diff --git a/logs_sharpness_pure/adam_lr_search/avg_loss_log_vs_steps.png b/logs_sharpness_pure/adam_lr_search/avg_loss_log_vs_steps.png new file mode 100644 index 0000000000000000000000000000000000000000..cc8f4cacfb4966e1349c4d6e0a173b71d5ddc002 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/avg_loss_log_vs_steps.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4d32db66e8ff21981801de41f5b63ee28f6050661d0d719b68264276aa22ec3 +size 113865 diff --git a/logs_sharpness_pure/adam_lr_search/avg_loss_vs_steps.png b/logs_sharpness_pure/adam_lr_search/avg_loss_vs_steps.png new file mode 100644 index 0000000000000000000000000000000000000000..85721e74130c12df41e90e23c00a1dacc6d92472 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/avg_loss_vs_steps.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a38999d84f0f67addae8fc854bcdaf8c557e38c8025c48c514cf8b7905e5f5c +size 105971 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..e0212dd51f8fdcaaab17be2177722d7a08782db8 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.0005, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "440b358c-fb44-4486-8955-bb82764c60e7", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..c2308d97f1317fc77b8c3e2b248e9b57c4d4cdbc --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.9994379878044128, + "total_l1_linf_norm": 8816.41796875, + "total_spectral_norm": 0.9994380474090576, + "layer_1_update_fnorm": 0.20055457949638367, + "layer_1_max_l1_linf_norm": 0.27178364992141724, + "layer_1_max_spectral_norm": 0.045446209609508514, + "layer_2_update_fnorm": 0.2132144272327423, + "layer_2_max_l1_linf_norm": 0.3791329562664032, + "layer_2_max_spectral_norm": 0.06540011614561081, + "layer_3_update_fnorm": 0.23929628729820251, + "layer_3_max_l1_linf_norm": 0.42553627490997314, + "layer_3_max_spectral_norm": 0.08652688562870026, + "layer_4_update_fnorm": 0.2532736361026764, + "layer_4_max_l1_linf_norm": 0.4240434169769287, + "layer_4_max_spectral_norm": 0.08241494745016098, + "layer_5_update_fnorm": 0.25287923216819763, + "layer_5_max_l1_linf_norm": 0.32915329933166504, + "layer_5_max_spectral_norm": 0.07779402285814285, + "layer_6_update_fnorm": 0.22581353783607483, + "layer_6_max_l1_linf_norm": 0.2852100133895874, + "layer_6_max_spectral_norm": 0.05817290395498276, + "layer_7_update_fnorm": 0.21208643913269043, + "layer_7_max_l1_linf_norm": 0.2346884161233902, + "layer_7_max_spectral_norm": 0.037785936146974564, + "layer_8_update_fnorm": 0.20874446630477905, + "layer_8_max_l1_linf_norm": 0.2599324584007263, + "layer_8_max_spectral_norm": 0.03620052710175514, + "layer_9_update_fnorm": 0.21104156970977783, + "layer_9_max_l1_linf_norm": 0.23932214081287384, + "layer_9_max_spectral_norm": 0.036400388926267624, + "layer_10_update_fnorm": 0.21192313730716705, + "layer_10_max_l1_linf_norm": 0.24356618523597717, + "layer_10_max_spectral_norm": 0.037692777812480927, + "layer_11_update_fnorm": 0.20805121958255768, + "layer_11_max_l1_linf_norm": 0.2478422373533249, + "layer_11_max_spectral_norm": 0.039110008627176285, + "layer_12_update_fnorm": 0.18934932351112366, + "layer_12_max_l1_linf_norm": 0.22743889689445496, + "layer_12_max_spectral_norm": 0.04727723449468613, + "total_sharpness": 0.18861103057861328, + "ip_v_neg_g": 0.14531825482845306, + "cos_v_neg_g": 0.07977639883756638, + "v_norm": 0.9994379878044128, + "g_norm": 1.8225938081741333, + "hv_norm": 2.398933172225952, + "cos_v_hv": 0.07857869565486908, + "hg_norm": 52.671932220458984, + "cos_g_hg": 0.6413846611976624, + "v_parallel_norm": 0.0039682709611952305, + "v_perp_norm": 0.9994301199913025, + "layer_1_v_norm": 0.20055457949638367, + "layer_1_cos_v_neg_g": 0.2069990038871765, + "layer_2_v_norm": 0.2132144272327423, + "layer_2_cos_v_neg_g": 0.20684131979942322, + "layer_3_v_norm": 0.23929628729820251, + "layer_3_cos_v_neg_g": 0.13905395567417145, + "layer_4_v_norm": 0.2532736361026764, + "layer_4_cos_v_neg_g": 0.09664247930049896, + "layer_5_v_norm": 0.25287923216819763, + "layer_5_cos_v_neg_g": 0.08652731776237488, + "layer_6_v_norm": 0.22581352293491364, + "layer_6_cos_v_neg_g": 0.09300487488508224, + "layer_7_v_norm": 0.21208643913269043, + "layer_7_cos_v_neg_g": 0.07262764126062393, + "layer_8_v_norm": 0.20874446630477905, + "layer_8_cos_v_neg_g": 0.06213324889540672, + "layer_9_v_norm": 0.21104156970977783, + "layer_9_cos_v_neg_g": 0.05445292964577675, + "layer_10_v_norm": 0.21192313730716705, + "layer_10_cos_v_neg_g": 0.05512494221329689, + "layer_11_v_norm": 0.20805121958255768, + "layer_11_cos_v_neg_g": 0.059419356286525726, + "layer_12_v_norm": 0.18934932351112366, + "layer_12_cos_v_neg_g": 0.0689568892121315, + "layer_1_sharpness": 0.9778797030448914, + "layer_2_sharpness": 0.2935989797115326, + "layer_3_sharpness": 0.0533040352165699, + "layer_4_sharpness": 0.01776401698589325, + "layer_5_sharpness": 0.02765183337032795, + "layer_6_sharpness": 0.036189060658216476, + "layer_7_sharpness": 0.026273099705576897, + "layer_8_sharpness": 0.01950560323894024, + "layer_9_sharpness": 0.011595518328249454, + "layer_10_sharpness": 0.009736076928675175, + "layer_11_sharpness": 0.012458978220820427, + "layer_12_sharpness": 0.03287578001618385 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..54132344492c1a4b798f9592a1b3fe0a1929a96f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.1202287673950195, + "total_l1_linf_norm": 9994.47265625, + "total_spectral_norm": 1.120228886604309, + "layer_1_update_fnorm": 0.2505336403846741, + "layer_1_max_l1_linf_norm": 0.3463025689125061, + "layer_1_max_spectral_norm": 0.04854340851306915, + "layer_2_update_fnorm": 0.25005313754081726, + "layer_2_max_l1_linf_norm": 0.34252217411994934, + "layer_2_max_spectral_norm": 0.048715826123952866, + "layer_3_update_fnorm": 0.253276526927948, + "layer_3_max_l1_linf_norm": 0.3580211102962494, + "layer_3_max_spectral_norm": 0.051392316818237305, + "layer_4_update_fnorm": 0.2628088891506195, + "layer_4_max_l1_linf_norm": 0.3665893077850342, + "layer_4_max_spectral_norm": 0.0484045073390007, + "layer_5_update_fnorm": 0.26357656717300415, + "layer_5_max_l1_linf_norm": 0.3227192163467407, + "layer_5_max_spectral_norm": 0.04762628674507141, + "layer_6_update_fnorm": 0.2645808160305023, + "layer_6_max_l1_linf_norm": 0.29753345251083374, + "layer_6_max_spectral_norm": 0.03945126757025719, + "layer_7_update_fnorm": 0.2606527805328369, + "layer_7_max_l1_linf_norm": 0.2867283225059509, + "layer_7_max_spectral_norm": 0.03453891724348068, + "layer_8_update_fnorm": 0.26219674944877625, + "layer_8_max_l1_linf_norm": 0.2756144404411316, + "layer_8_max_spectral_norm": 0.028118716552853584, + "layer_9_update_fnorm": 0.26659104228019714, + "layer_9_max_l1_linf_norm": 0.28837430477142334, + "layer_9_max_spectral_norm": 0.027500400319695473, + "layer_10_update_fnorm": 0.2669183611869812, + "layer_10_max_l1_linf_norm": 0.293255478143692, + "layer_10_max_spectral_norm": 0.028095250949263573, + "layer_11_update_fnorm": 0.2677024304866791, + "layer_11_max_l1_linf_norm": 0.31352129578590393, + "layer_11_max_spectral_norm": 0.03677600249648094, + "layer_12_update_fnorm": 0.25738605856895447, + "layer_12_max_l1_linf_norm": 0.30829066038131714, + "layer_12_max_spectral_norm": 0.04079150781035423, + "total_sharpness": 0.02023336850106716, + "ip_v_neg_g": 0.014116114005446434, + "cos_v_neg_g": 0.010077182203531265, + "v_norm": 1.1202287673950195, + "g_norm": 1.2504585981369019, + "hv_norm": 0.5663976669311523, + "cos_v_hv": 0.04001782089471817, + "hg_norm": 15.004913330078125, + "cos_g_hg": 0.5767911076545715, + "v_parallel_norm": 0.0003612139553297311, + "v_perp_norm": 1.12022864818573, + "layer_1_v_norm": 0.2505336403846741, + "layer_1_cos_v_neg_g": 0.021277185529470444, + "layer_2_v_norm": 0.25005313754081726, + "layer_2_cos_v_neg_g": 0.026237789541482925, + "layer_3_v_norm": 0.253276526927948, + "layer_3_cos_v_neg_g": 0.015211713500320911, + "layer_4_v_norm": 0.2628088891506195, + "layer_4_cos_v_neg_g": 0.007456223014742136, + "layer_5_v_norm": 0.26357656717300415, + "layer_5_cos_v_neg_g": 0.011009813286364079, + "layer_6_v_norm": 0.2645808458328247, + "layer_6_cos_v_neg_g": 0.01230990793555975, + "layer_7_v_norm": 0.2606527805328369, + "layer_7_cos_v_neg_g": 0.0124882273375988, + "layer_8_v_norm": 0.26219674944877625, + "layer_8_cos_v_neg_g": 0.014509228989481926, + "layer_9_v_norm": 0.26659104228019714, + "layer_9_cos_v_neg_g": 0.010535404086112976, + "layer_10_v_norm": 0.2669183611869812, + "layer_10_cos_v_neg_g": 0.008605850860476494, + "layer_11_v_norm": 0.2677024304866791, + "layer_11_cos_v_neg_g": 0.00633964454755187, + "layer_12_v_norm": 0.25738605856895447, + "layer_12_cos_v_neg_g": 0.008168226107954979, + "layer_1_sharpness": 0.0214700847864151, + "layer_2_sharpness": 0.03487631678581238, + "layer_3_sharpness": 0.006486807018518448, + "layer_4_sharpness": 0.0019747831393033266, + "layer_5_sharpness": 0.0032221961300820112, + "layer_6_sharpness": 0.003996896091848612, + "layer_7_sharpness": 0.004701865371316671, + "layer_8_sharpness": 0.004314451478421688, + "layer_9_sharpness": 0.0030894868541508913, + "layer_10_sharpness": 0.001975157530978322, + "layer_11_sharpness": 0.0023666401393711567, + "layer_12_sharpness": 0.006001608446240425 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..a8ab780a5e7b56ae2b326f3ca943f5b9e6c1cb0d --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.9566032290458679, + "total_l1_linf_norm": 8313.8935546875, + "total_spectral_norm": 0.9566032886505127, + "layer_1_update_fnorm": 0.17019520699977875, + "layer_1_max_l1_linf_norm": 0.2197989523410797, + "layer_1_max_spectral_norm": 0.03422505408525467, + "layer_2_update_fnorm": 0.1589529663324356, + "layer_2_max_l1_linf_norm": 0.27991583943367004, + "layer_2_max_spectral_norm": 0.03878429904580116, + "layer_3_update_fnorm": 0.16766375303268433, + "layer_3_max_l1_linf_norm": 0.2796263098716736, + "layer_3_max_spectral_norm": 0.0428864024579525, + "layer_4_update_fnorm": 0.1884792149066925, + "layer_4_max_l1_linf_norm": 0.25784406065940857, + "layer_4_max_spectral_norm": 0.042135562747716904, + "layer_5_update_fnorm": 0.19383934140205383, + "layer_5_max_l1_linf_norm": 0.2589515447616577, + "layer_5_max_spectral_norm": 0.03861325979232788, + "layer_6_update_fnorm": 0.20561683177947998, + "layer_6_max_l1_linf_norm": 0.24908597767353058, + "layer_6_max_spectral_norm": 0.03218749538064003, + "layer_7_update_fnorm": 0.20624591410160065, + "layer_7_max_l1_linf_norm": 0.2369450479745865, + "layer_7_max_spectral_norm": 0.03016282618045807, + "layer_8_update_fnorm": 0.21276141703128815, + "layer_8_max_l1_linf_norm": 0.25326573848724365, + "layer_8_max_spectral_norm": 0.030955763533711433, + "layer_9_update_fnorm": 0.21676766872406006, + "layer_9_max_l1_linf_norm": 0.23366045951843262, + "layer_9_max_spectral_norm": 0.031908463686704636, + "layer_10_update_fnorm": 0.222751185297966, + "layer_10_max_l1_linf_norm": 0.24712339043617249, + "layer_10_max_spectral_norm": 0.03329596668481827, + "layer_11_update_fnorm": 0.22269171476364136, + "layer_11_max_l1_linf_norm": 0.24859115481376648, + "layer_11_max_spectral_norm": 0.03464613854885101, + "layer_12_update_fnorm": 0.21052347123622894, + "layer_12_max_l1_linf_norm": 0.22751960158348083, + "layer_12_max_spectral_norm": 0.043026551604270935, + "total_sharpness": 0.058464415371418, + "ip_v_neg_g": 0.031035050749778748, + "cos_v_neg_g": 0.028326280415058136, + "v_norm": 0.9566032290458679, + "g_norm": 1.1453311443328857, + "hv_norm": 0.5791908502578735, + "cos_v_hv": 0.0965610072016716, + "hg_norm": 4.699522972106934, + "cos_g_hg": 0.45414718985557556, + "v_parallel_norm": 0.000915282522328198, + "v_perp_norm": 0.9566028118133545, + "layer_1_v_norm": 0.17019520699977875, + "layer_1_cos_v_neg_g": 0.05649230629205704, + "layer_2_v_norm": 0.1589529663324356, + "layer_2_cos_v_neg_g": 0.07275579124689102, + "layer_3_v_norm": 0.16766375303268433, + "layer_3_cos_v_neg_g": 0.06270360946655273, + "layer_4_v_norm": 0.1884792149066925, + "layer_4_cos_v_neg_g": 0.045535605400800705, + "layer_5_v_norm": 0.19383934140205383, + "layer_5_cos_v_neg_g": 0.040924374014139175, + "layer_6_v_norm": 0.20561683177947998, + "layer_6_cos_v_neg_g": 0.03866966813802719, + "layer_7_v_norm": 0.20624591410160065, + "layer_7_cos_v_neg_g": 0.035283852368593216, + "layer_8_v_norm": 0.21276141703128815, + "layer_8_cos_v_neg_g": 0.03618966042995453, + "layer_9_v_norm": 0.21676766872406006, + "layer_9_cos_v_neg_g": 0.036124322563409805, + "layer_10_v_norm": 0.222751185297966, + "layer_10_cos_v_neg_g": 0.03344431146979332, + "layer_11_v_norm": 0.22269171476364136, + "layer_11_cos_v_neg_g": 0.03486303240060806, + "layer_12_v_norm": 0.21052347123622894, + "layer_12_cos_v_neg_g": 0.04118311405181885, + "layer_1_sharpness": 0.11184472590684891, + "layer_2_sharpness": 0.05024897679686546, + "layer_3_sharpness": 0.0325869657099247, + "layer_4_sharpness": 0.009807909838855267, + "layer_5_sharpness": 0.01306726224720478, + "layer_6_sharpness": 0.013985946774482727, + "layer_7_sharpness": 0.01206858642399311, + "layer_8_sharpness": 0.009478067979216576, + "layer_9_sharpness": 0.007229510694742203, + "layer_10_sharpness": 0.005532876122742891, + "layer_11_sharpness": 0.007128313183784485, + "layer_12_sharpness": 0.013697681948542595 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..b6bef9c9888761ae586a7fa2acfc9fe4c53e132f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.0108085870742798, + "total_l1_linf_norm": 8899.681640625, + "total_spectral_norm": 1.0108087062835693, + "layer_1_update_fnorm": 0.1929156482219696, + "layer_1_max_l1_linf_norm": 0.2700032591819763, + "layer_1_max_spectral_norm": 0.03992825374007225, + "layer_2_update_fnorm": 0.18920785188674927, + "layer_2_max_l1_linf_norm": 0.3324818015098572, + "layer_2_max_spectral_norm": 0.05742060765624046, + "layer_3_update_fnorm": 0.19870774447917938, + "layer_3_max_l1_linf_norm": 0.29673540592193604, + "layer_3_max_spectral_norm": 0.04886457324028015, + "layer_4_update_fnorm": 0.21775171160697937, + "layer_4_max_l1_linf_norm": 0.29750514030456543, + "layer_4_max_spectral_norm": 0.04634855315089226, + "layer_5_update_fnorm": 0.21898342669010162, + "layer_5_max_l1_linf_norm": 0.2637355625629425, + "layer_5_max_spectral_norm": 0.03982267901301384, + "layer_6_update_fnorm": 0.22372043132781982, + "layer_6_max_l1_linf_norm": 0.2650526762008667, + "layer_6_max_spectral_norm": 0.03365721181035042, + "layer_7_update_fnorm": 0.22309012711048126, + "layer_7_max_l1_linf_norm": 0.25647908449172974, + "layer_7_max_spectral_norm": 0.030489087104797363, + "layer_8_update_fnorm": 0.22838100790977478, + "layer_8_max_l1_linf_norm": 0.25400596857070923, + "layer_8_max_spectral_norm": 0.03210154175758362, + "layer_9_update_fnorm": 0.2338394820690155, + "layer_9_max_l1_linf_norm": 0.2571723461151123, + "layer_9_max_spectral_norm": 0.03372207656502724, + "layer_10_update_fnorm": 0.2378387600183487, + "layer_10_max_l1_linf_norm": 0.259728342294693, + "layer_10_max_spectral_norm": 0.03259627893567085, + "layer_11_update_fnorm": 0.2373380810022354, + "layer_11_max_l1_linf_norm": 0.26678645610809326, + "layer_11_max_spectral_norm": 0.0342777743935585, + "layer_12_update_fnorm": 0.22738862037658691, + "layer_12_max_l1_linf_norm": 0.2499677538871765, + "layer_12_max_spectral_norm": 0.04545656591653824, + "total_sharpness": 0.06735413521528244, + "ip_v_neg_g": 0.03562192618846893, + "cos_v_neg_g": 0.02840598113834858, + "v_norm": 1.0108085870742798, + "g_norm": 1.2406196594238281, + "hv_norm": 0.9052815437316895, + "cos_v_hv": 0.0752054750919342, + "hg_norm": 9.347806930541992, + "cos_g_hg": 0.455485999584198, + "v_parallel_norm": 0.0010113412281498313, + "v_perp_norm": 1.010807991027832, + "layer_1_v_norm": 0.1929156482219696, + "layer_1_cos_v_neg_g": 0.06608304381370544, + "layer_2_v_norm": 0.18920785188674927, + "layer_2_cos_v_neg_g": 0.12303347140550613, + "layer_3_v_norm": 0.19870774447917938, + "layer_3_cos_v_neg_g": 0.07183290272951126, + "layer_4_v_norm": 0.21775171160697937, + "layer_4_cos_v_neg_g": 0.042130470275878906, + "layer_5_v_norm": 0.21898342669010162, + "layer_5_cos_v_neg_g": 0.0343150869011879, + "layer_6_v_norm": 0.22372043132781982, + "layer_6_cos_v_neg_g": 0.027061201632022858, + "layer_7_v_norm": 0.22309012711048126, + "layer_7_cos_v_neg_g": 0.027552036568522453, + "layer_8_v_norm": 0.22838100790977478, + "layer_8_cos_v_neg_g": 0.025301923975348473, + "layer_9_v_norm": 0.2338394820690155, + "layer_9_cos_v_neg_g": 0.022286832332611084, + "layer_10_v_norm": 0.2378387600183487, + "layer_10_cos_v_neg_g": 0.01733667403459549, + "layer_11_v_norm": 0.2373380810022354, + "layer_11_cos_v_neg_g": 0.016171906143426895, + "layer_12_v_norm": 0.22738862037658691, + "layer_12_cos_v_neg_g": 0.014076512306928635, + "layer_1_sharpness": 0.1757020354270935, + "layer_2_sharpness": 0.17485001683235168, + "layer_3_sharpness": 0.03937452659010887, + "layer_4_sharpness": 0.010917775332927704, + "layer_5_sharpness": 0.01396460272371769, + "layer_6_sharpness": 0.011354224756360054, + "layer_7_sharpness": 0.01053369790315628, + "layer_8_sharpness": 0.008246843703091145, + "layer_9_sharpness": 0.005834114737808704, + "layer_10_sharpness": 0.004426868632435799, + "layer_11_sharpness": 0.004652546718716621, + "layer_12_sharpness": 0.012396580539643764 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..eed92d96134faf592ce75006b444b0fe40eb4e53 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.0382567644119263, + "total_l1_linf_norm": 9172.53125, + "total_spectral_norm": 1.0382567644119263, + "layer_1_update_fnorm": 0.20756851136684418, + "layer_1_max_l1_linf_norm": 0.3010532855987549, + "layer_1_max_spectral_norm": 0.0432560071349144, + "layer_2_update_fnorm": 0.1991766393184662, + "layer_2_max_l1_linf_norm": 0.3304210603237152, + "layer_2_max_spectral_norm": 0.049846090376377106, + "layer_3_update_fnorm": 0.2113972306251526, + "layer_3_max_l1_linf_norm": 0.30442094802856445, + "layer_3_max_spectral_norm": 0.04896470904350281, + "layer_4_update_fnorm": 0.23089374601840973, + "layer_4_max_l1_linf_norm": 0.29684388637542725, + "layer_4_max_spectral_norm": 0.043329596519470215, + "layer_5_update_fnorm": 0.22992078959941864, + "layer_5_max_l1_linf_norm": 0.29291847348213196, + "layer_5_max_spectral_norm": 0.04198354110121727, + "layer_6_update_fnorm": 0.23627862334251404, + "layer_6_max_l1_linf_norm": 0.28403013944625854, + "layer_6_max_spectral_norm": 0.03599558770656586, + "layer_7_update_fnorm": 0.2364729642868042, + "layer_7_max_l1_linf_norm": 0.2701289653778076, + "layer_7_max_spectral_norm": 0.03377242386341095, + "layer_8_update_fnorm": 0.24101735651493073, + "layer_8_max_l1_linf_norm": 0.2791979908943176, + "layer_8_max_spectral_norm": 0.03420253098011017, + "layer_9_update_fnorm": 0.2446380853652954, + "layer_9_max_l1_linf_norm": 0.258586585521698, + "layer_9_max_spectral_norm": 0.03384382277727127, + "layer_10_update_fnorm": 0.2481764853000641, + "layer_10_max_l1_linf_norm": 0.26041650772094727, + "layer_10_max_spectral_norm": 0.03387173265218735, + "layer_11_update_fnorm": 0.24713020026683807, + "layer_11_max_l1_linf_norm": 0.26981115341186523, + "layer_11_max_spectral_norm": 0.03532543405890465, + "layer_12_update_fnorm": 0.2363150715827942, + "layer_12_max_l1_linf_norm": 0.2500186860561371, + "layer_12_max_spectral_norm": 0.04293745011091232, + "total_sharpness": 0.04579589515924454, + "ip_v_neg_g": 0.025089602917432785, + "cos_v_neg_g": 0.022163940593600273, + "v_norm": 1.0382567644119263, + "g_norm": 1.0902900695800781, + "hv_norm": 0.5400744080543518, + "cos_v_hv": 0.08803953230381012, + "hg_norm": 3.7550206184387207, + "cos_g_hg": 0.4596455991268158, + "v_parallel_norm": 0.0006918584695085883, + "v_perp_norm": 1.0382565259933472, + "layer_1_v_norm": 0.20756851136684418, + "layer_1_cos_v_neg_g": 0.03898325562477112, + "layer_2_v_norm": 0.1991766393184662, + "layer_2_cos_v_neg_g": 0.058399636298418045, + "layer_3_v_norm": 0.2113972306251526, + "layer_3_cos_v_neg_g": 0.044026460498571396, + "layer_4_v_norm": 0.23089374601840973, + "layer_4_cos_v_neg_g": 0.031702809035778046, + "layer_5_v_norm": 0.22992078959941864, + "layer_5_cos_v_neg_g": 0.032112330198287964, + "layer_6_v_norm": 0.23627862334251404, + "layer_6_cos_v_neg_g": 0.027242746204137802, + "layer_7_v_norm": 0.2364729642868042, + "layer_7_cos_v_neg_g": 0.025627925992012024, + "layer_8_v_norm": 0.24101735651493073, + "layer_8_cos_v_neg_g": 0.026250656694173813, + "layer_9_v_norm": 0.2446380853652954, + "layer_9_cos_v_neg_g": 0.026569802314043045, + "layer_10_v_norm": 0.2481764853000641, + "layer_10_cos_v_neg_g": 0.02057718299329281, + "layer_11_v_norm": 0.24713020026683807, + "layer_11_cos_v_neg_g": 0.021967949345707893, + "layer_12_v_norm": 0.2363150715827942, + "layer_12_cos_v_neg_g": 0.025185102596879005, + "layer_1_sharpness": 0.05192335322499275, + "layer_2_sharpness": 0.04786686599254608, + "layer_3_sharpness": 0.016581067815423012, + "layer_4_sharpness": 0.00609760545194149, + "layer_5_sharpness": 0.008441274054348469, + "layer_6_sharpness": 0.010864099487662315, + "layer_7_sharpness": 0.011721686460077763, + "layer_8_sharpness": 0.008684649132192135, + "layer_9_sharpness": 0.006897444371134043, + "layer_10_sharpness": 0.00436302600428462, + "layer_11_sharpness": 0.005065144505351782, + "layer_12_sharpness": 0.01106425654143095 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..40174ceffa2d23b0735ab87c4f071cbbc3fef0ad --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.0701643228530884, + "total_l1_linf_norm": 9479.0166015625, + "total_spectral_norm": 1.070164442062378, + "layer_1_update_fnorm": 0.2172134667634964, + "layer_1_max_l1_linf_norm": 0.2933937609195709, + "layer_1_max_spectral_norm": 0.04256433621048927, + "layer_2_update_fnorm": 0.20265930891036987, + "layer_2_max_l1_linf_norm": 0.28372982144355774, + "layer_2_max_spectral_norm": 0.044810742139816284, + "layer_3_update_fnorm": 0.21636922657489777, + "layer_3_max_l1_linf_norm": 0.3037418723106384, + "layer_3_max_spectral_norm": 0.04631135240197182, + "layer_4_update_fnorm": 0.2390339970588684, + "layer_4_max_l1_linf_norm": 0.30542150139808655, + "layer_4_max_spectral_norm": 0.04553716257214546, + "layer_5_update_fnorm": 0.2402467578649521, + "layer_5_max_l1_linf_norm": 0.3185480833053589, + "layer_5_max_spectral_norm": 0.04133784770965576, + "layer_6_update_fnorm": 0.24738773703575134, + "layer_6_max_l1_linf_norm": 0.2803424000740051, + "layer_6_max_spectral_norm": 0.034263238310813904, + "layer_7_update_fnorm": 0.24697701632976532, + "layer_7_max_l1_linf_norm": 0.27563661336898804, + "layer_7_max_spectral_norm": 0.03431142866611481, + "layer_8_update_fnorm": 0.253416508436203, + "layer_8_max_l1_linf_norm": 0.2810683250427246, + "layer_8_max_spectral_norm": 0.034543830901384354, + "layer_9_update_fnorm": 0.26089614629745483, + "layer_9_max_l1_linf_norm": 0.2838515639305115, + "layer_9_max_spectral_norm": 0.03717881441116333, + "layer_10_update_fnorm": 0.2655562162399292, + "layer_10_max_l1_linf_norm": 0.3010123670101166, + "layer_10_max_spectral_norm": 0.0385124534368515, + "layer_11_update_fnorm": 0.263788640499115, + "layer_11_max_l1_linf_norm": 0.29750680923461914, + "layer_11_max_spectral_norm": 0.03962210193276405, + "layer_12_update_fnorm": 0.2565019428730011, + "layer_12_max_l1_linf_norm": 0.29914015531539917, + "layer_12_max_spectral_norm": 0.0484115332365036, + "total_sharpness": 0.031827401369810104, + "ip_v_neg_g": 0.02577211894094944, + "cos_v_neg_g": 0.02366938628256321, + "v_norm": 1.0701643228530884, + "g_norm": 1.0174490213394165, + "hv_norm": 0.4131031334400177, + "cos_v_hv": 0.08245047181844711, + "hg_norm": 8.862105369567871, + "cos_g_hg": 0.19307413697242737, + "v_parallel_norm": 0.0006996482843533158, + "v_perp_norm": 1.0701640844345093, + "layer_1_v_norm": 0.2172134667634964, + "layer_1_cos_v_neg_g": 0.04136871546506882, + "layer_2_v_norm": 0.20265930891036987, + "layer_2_cos_v_neg_g": 0.04950815439224243, + "layer_3_v_norm": 0.21636922657489777, + "layer_3_cos_v_neg_g": 0.044622812420129776, + "layer_4_v_norm": 0.2390339970588684, + "layer_4_cos_v_neg_g": 0.031218132004141808, + "layer_5_v_norm": 0.2402467578649521, + "layer_5_cos_v_neg_g": 0.030715692788362503, + "layer_6_v_norm": 0.24738772213459015, + "layer_6_cos_v_neg_g": 0.026992453262209892, + "layer_7_v_norm": 0.24697701632976532, + "layer_7_cos_v_neg_g": 0.03246087580919266, + "layer_8_v_norm": 0.253416508436203, + "layer_8_cos_v_neg_g": 0.029053615406155586, + "layer_9_v_norm": 0.26089614629745483, + "layer_9_cos_v_neg_g": 0.025984644889831543, + "layer_10_v_norm": 0.2655562162399292, + "layer_10_cos_v_neg_g": 0.026966353878378868, + "layer_11_v_norm": 0.2637886106967926, + "layer_11_cos_v_neg_g": 0.025584330782294273, + "layer_12_v_norm": 0.2565019428730011, + "layer_12_cos_v_neg_g": 0.032011307775974274, + "layer_1_sharpness": 0.03939611464738846, + "layer_2_sharpness": 0.018835032358765602, + "layer_3_sharpness": 0.012563010677695274, + "layer_4_sharpness": 0.004663423169404268, + "layer_5_sharpness": 0.006455670576542616, + "layer_6_sharpness": 0.008078540675342083, + "layer_7_sharpness": 0.009943719953298569, + "layer_8_sharpness": 0.006684653460979462, + "layer_9_sharpness": 0.004579039756208658, + "layer_10_sharpness": 0.0038771936669945717, + "layer_11_sharpness": 0.0037197936326265335, + "layer_12_sharpness": 0.012487279251217842 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..255dadcfb7b00f720a2d3a1f1cf66b671ee3c704 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.0655032396316528, + "total_l1_linf_norm": 9443.7109375, + "total_spectral_norm": 1.0655031204223633, + "layer_1_update_fnorm": 0.21758587658405304, + "layer_1_max_l1_linf_norm": 0.29740333557128906, + "layer_1_max_spectral_norm": 0.04345599561929703, + "layer_2_update_fnorm": 0.21050120890140533, + "layer_2_max_l1_linf_norm": 0.3147680163383484, + "layer_2_max_spectral_norm": 0.04606155306100845, + "layer_3_update_fnorm": 0.22599667310714722, + "layer_3_max_l1_linf_norm": 0.31639933586120605, + "layer_3_max_spectral_norm": 0.04705042392015457, + "layer_4_update_fnorm": 0.24317489564418793, + "layer_4_max_l1_linf_norm": 0.3165961802005768, + "layer_4_max_spectral_norm": 0.04064476862549782, + "layer_5_update_fnorm": 0.2425118237733841, + "layer_5_max_l1_linf_norm": 0.2764861583709717, + "layer_5_max_spectral_norm": 0.038294557482004166, + "layer_6_update_fnorm": 0.2468963861465454, + "layer_6_max_l1_linf_norm": 0.262956440448761, + "layer_6_max_spectral_norm": 0.03223656117916107, + "layer_7_update_fnorm": 0.24380908906459808, + "layer_7_max_l1_linf_norm": 0.25735822319984436, + "layer_7_max_spectral_norm": 0.03094611130654812, + "layer_8_update_fnorm": 0.2464238554239273, + "layer_8_max_l1_linf_norm": 0.26881057024002075, + "layer_8_max_spectral_norm": 0.031774796545505524, + "layer_9_update_fnorm": 0.252817839384079, + "layer_9_max_l1_linf_norm": 0.2702566087245941, + "layer_9_max_spectral_norm": 0.032759349793195724, + "layer_10_update_fnorm": 0.2545493245124817, + "layer_10_max_l1_linf_norm": 0.27059149742126465, + "layer_10_max_spectral_norm": 0.0325947105884552, + "layer_11_update_fnorm": 0.25521257519721985, + "layer_11_max_l1_linf_norm": 0.2733551859855652, + "layer_11_max_spectral_norm": 0.03343995660543442, + "layer_12_update_fnorm": 0.24603018164634705, + "layer_12_max_l1_linf_norm": 0.2664644122123718, + "layer_12_max_spectral_norm": 0.04745369032025337, + "total_sharpness": 0.027906056493520737, + "ip_v_neg_g": 0.018942419439554214, + "cos_v_neg_g": 0.016526512801647186, + "v_norm": 1.0655032396316528, + "g_norm": 1.0757205486297607, + "hv_norm": 0.4269696772098541, + "cos_v_hv": 0.06963958591222763, + "hg_norm": 4.917377471923828, + "cos_g_hg": 0.4955134093761444, + "v_parallel_norm": 0.0005322037031874061, + "v_perp_norm": 1.0655031204223633, + "layer_1_v_norm": 0.21758587658405304, + "layer_1_cos_v_neg_g": 0.028209928423166275, + "layer_2_v_norm": 0.21050120890140533, + "layer_2_cos_v_neg_g": 0.03818730264902115, + "layer_3_v_norm": 0.22599667310714722, + "layer_3_cos_v_neg_g": 0.033214129507541656, + "layer_4_v_norm": 0.24317489564418793, + "layer_4_cos_v_neg_g": 0.023841217160224915, + "layer_5_v_norm": 0.2425118237733841, + "layer_5_cos_v_neg_g": 0.022539660334587097, + "layer_6_v_norm": 0.2468963861465454, + "layer_6_cos_v_neg_g": 0.02073063515126705, + "layer_7_v_norm": 0.24380908906459808, + "layer_7_cos_v_neg_g": 0.01997566595673561, + "layer_8_v_norm": 0.2464238554239273, + "layer_8_cos_v_neg_g": 0.02025756984949112, + "layer_9_v_norm": 0.252817839384079, + "layer_9_cos_v_neg_g": 0.020491033792495728, + "layer_10_v_norm": 0.2545493245124817, + "layer_10_cos_v_neg_g": 0.01607806421816349, + "layer_11_v_norm": 0.25521257519721985, + "layer_11_cos_v_neg_g": 0.013996207155287266, + "layer_12_v_norm": 0.24603018164634705, + "layer_12_cos_v_neg_g": 0.01881512813270092, + "layer_1_sharpness": 0.03226345032453537, + "layer_2_sharpness": 0.016675908118486404, + "layer_3_sharpness": 0.0146652115508914, + "layer_4_sharpness": 0.005150127690285444, + "layer_5_sharpness": 0.007428191602230072, + "layer_6_sharpness": 0.006649847608059645, + "layer_7_sharpness": 0.007295332849025726, + "layer_8_sharpness": 0.005637091584503651, + "layer_9_sharpness": 0.004541359376162291, + "layer_10_sharpness": 0.0031950781121850014, + "layer_11_sharpness": 0.004071253817528486, + "layer_12_sharpness": 0.010087664239108562 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..87f93e3a2a66290e80e44d51e69173d50a8a3740 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.061941385269165, + "total_l1_linf_norm": 9414.712890625, + "total_spectral_norm": 1.0619415044784546, + "layer_1_update_fnorm": 0.21320249140262604, + "layer_1_max_l1_linf_norm": 0.33228886127471924, + "layer_1_max_spectral_norm": 0.04587074741721153, + "layer_2_update_fnorm": 0.21728305518627167, + "layer_2_max_l1_linf_norm": 0.30499449372291565, + "layer_2_max_spectral_norm": 0.049808286130428314, + "layer_3_update_fnorm": 0.2196386754512787, + "layer_3_max_l1_linf_norm": 0.2841688394546509, + "layer_3_max_spectral_norm": 0.044933248311281204, + "layer_4_update_fnorm": 0.236786887049675, + "layer_4_max_l1_linf_norm": 0.2751556634902954, + "layer_4_max_spectral_norm": 0.04060149937868118, + "layer_5_update_fnorm": 0.2375914603471756, + "layer_5_max_l1_linf_norm": 0.30254700779914856, + "layer_5_max_spectral_norm": 0.039886705577373505, + "layer_6_update_fnorm": 0.2442454993724823, + "layer_6_max_l1_linf_norm": 0.27476614713668823, + "layer_6_max_spectral_norm": 0.030866965651512146, + "layer_7_update_fnorm": 0.24547317624092102, + "layer_7_max_l1_linf_norm": 0.26693466305732727, + "layer_7_max_spectral_norm": 0.03234809264540672, + "layer_8_update_fnorm": 0.24928107857704163, + "layer_8_max_l1_linf_norm": 0.267231285572052, + "layer_8_max_spectral_norm": 0.03204385191202164, + "layer_9_update_fnorm": 0.25348225235939026, + "layer_9_max_l1_linf_norm": 0.2692354917526245, + "layer_9_max_spectral_norm": 0.031592246145009995, + "layer_10_update_fnorm": 0.2547309398651123, + "layer_10_max_l1_linf_norm": 0.27218788862228394, + "layer_10_max_spectral_norm": 0.03114587999880314, + "layer_11_update_fnorm": 0.2559455335140228, + "layer_11_max_l1_linf_norm": 0.27176350355148315, + "layer_11_max_spectral_norm": 0.03176482021808624, + "layer_12_update_fnorm": 0.24473965167999268, + "layer_12_max_l1_linf_norm": 0.26416584849357605, + "layer_12_max_spectral_norm": 0.04751014709472656, + "total_sharpness": 0.03923434391617775, + "ip_v_neg_g": 0.029720425605773926, + "cos_v_neg_g": 0.027915844693779945, + "v_norm": 1.061941385269165, + "g_norm": 1.002544641494751, + "hv_norm": 0.6061627268791199, + "cos_v_hv": 0.06873496621847153, + "hg_norm": 4.169869899749756, + "cos_g_hg": 0.4002912640571594, + "v_parallel_norm": 0.0009871110087260604, + "v_perp_norm": 1.0619409084320068, + "layer_1_v_norm": 0.21320249140262604, + "layer_1_cos_v_neg_g": 0.06933744251728058, + "layer_2_v_norm": 0.21728305518627167, + "layer_2_cos_v_neg_g": 0.1265483796596527, + "layer_3_v_norm": 0.2196386754512787, + "layer_3_cos_v_neg_g": 0.05908823758363724, + "layer_4_v_norm": 0.236786887049675, + "layer_4_cos_v_neg_g": 0.028526023030281067, + "layer_5_v_norm": 0.2375914603471756, + "layer_5_cos_v_neg_g": 0.029559725895524025, + "layer_6_v_norm": 0.2442454993724823, + "layer_6_cos_v_neg_g": 0.021322375163435936, + "layer_7_v_norm": 0.24547317624092102, + "layer_7_cos_v_neg_g": 0.02262771874666214, + "layer_8_v_norm": 0.24928107857704163, + "layer_8_cos_v_neg_g": 0.024512235075235367, + "layer_9_v_norm": 0.25348225235939026, + "layer_9_cos_v_neg_g": 0.01978703960776329, + "layer_10_v_norm": 0.2547309398651123, + "layer_10_cos_v_neg_g": 0.014271039515733719, + "layer_11_v_norm": 0.2559455335140228, + "layer_11_cos_v_neg_g": 0.013547027483582497, + "layer_12_v_norm": 0.24473965167999268, + "layer_12_cos_v_neg_g": 0.011849826201796532, + "layer_1_sharpness": 0.07910864800214767, + "layer_2_sharpness": 0.08959963917732239, + "layer_3_sharpness": 0.020596623420715332, + "layer_4_sharpness": 0.00534820044413209, + "layer_5_sharpness": 0.0073819952085614204, + "layer_6_sharpness": 0.00665050745010376, + "layer_7_sharpness": 0.008633814752101898, + "layer_8_sharpness": 0.006496136542409658, + "layer_9_sharpness": 0.004118786193430424, + "layer_10_sharpness": 0.0023020890075713396, + "layer_11_sharpness": 0.0026201035361737013, + "layer_12_sharpness": 0.005960922688245773 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..108e5d18b0f524762e1bea0a051d437628406661 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.0981005430221558, + "total_l1_linf_norm": 9764.75, + "total_spectral_norm": 1.0981004238128662, + "layer_1_update_fnorm": 0.23514872789382935, + "layer_1_max_l1_linf_norm": 0.31189730763435364, + "layer_1_max_spectral_norm": 0.050989266484975815, + "layer_2_update_fnorm": 0.22841276228427887, + "layer_2_max_l1_linf_norm": 0.3666676878929138, + "layer_2_max_spectral_norm": 0.052505988627672195, + "layer_3_update_fnorm": 0.24067291617393494, + "layer_3_max_l1_linf_norm": 0.3662817180156708, + "layer_3_max_spectral_norm": 0.05343190208077431, + "layer_4_update_fnorm": 0.2537985146045685, + "layer_4_max_l1_linf_norm": 0.3480689823627472, + "layer_4_max_spectral_norm": 0.04339737072587013, + "layer_5_update_fnorm": 0.2513248026371002, + "layer_5_max_l1_linf_norm": 0.30237632989883423, + "layer_5_max_spectral_norm": 0.03995725139975548, + "layer_6_update_fnorm": 0.2541588246822357, + "layer_6_max_l1_linf_norm": 0.2864009141921997, + "layer_6_max_spectral_norm": 0.03227235749363899, + "layer_7_update_fnorm": 0.25177547335624695, + "layer_7_max_l1_linf_norm": 0.277525395154953, + "layer_7_max_spectral_norm": 0.032447461038827896, + "layer_8_update_fnorm": 0.25408294796943665, + "layer_8_max_l1_linf_norm": 0.27323535084724426, + "layer_8_max_spectral_norm": 0.03234070912003517, + "layer_9_update_fnorm": 0.25954484939575195, + "layer_9_max_l1_linf_norm": 0.284968763589859, + "layer_9_max_spectral_norm": 0.03393365442752838, + "layer_10_update_fnorm": 0.26394712924957275, + "layer_10_max_l1_linf_norm": 0.2822752892971039, + "layer_10_max_spectral_norm": 0.03517917916178703, + "layer_11_update_fnorm": 0.26397567987442017, + "layer_11_max_l1_linf_norm": 0.285993367433548, + "layer_11_max_spectral_norm": 0.035196784883737564, + "layer_12_update_fnorm": 0.2590484023094177, + "layer_12_max_l1_linf_norm": 0.28222543001174927, + "layer_12_max_spectral_norm": 0.04920313134789467, + "total_sharpness": 0.028964297845959663, + "ip_v_neg_g": 0.01969177834689617, + "cos_v_neg_g": 0.018817583099007607, + "v_norm": 1.0981005430221558, + "g_norm": 0.952969491481781, + "hv_norm": 0.41312727332115173, + "cos_v_hv": 0.07698767632246017, + "hg_norm": 11.645522117614746, + "cos_g_hg": 0.1979219764471054, + "v_parallel_norm": 0.0006052335374988616, + "v_perp_norm": 1.0981003046035767, + "layer_1_v_norm": 0.23514872789382935, + "layer_1_cos_v_neg_g": 0.03599977865815163, + "layer_2_v_norm": 0.22841276228427887, + "layer_2_cos_v_neg_g": 0.026414230465888977, + "layer_3_v_norm": 0.24067291617393494, + "layer_3_cos_v_neg_g": 0.03241899609565735, + "layer_4_v_norm": 0.2537985146045685, + "layer_4_cos_v_neg_g": 0.01934724859893322, + "layer_5_v_norm": 0.2513248026371002, + "layer_5_cos_v_neg_g": 0.020191656425595284, + "layer_6_v_norm": 0.2541588246822357, + "layer_6_cos_v_neg_g": 0.021080585196614265, + "layer_7_v_norm": 0.25177547335624695, + "layer_7_cos_v_neg_g": 0.024163734167814255, + "layer_8_v_norm": 0.25408297777175903, + "layer_8_cos_v_neg_g": 0.024833885952830315, + "layer_9_v_norm": 0.25954484939575195, + "layer_9_cos_v_neg_g": 0.025582516565918922, + "layer_10_v_norm": 0.26394712924957275, + "layer_10_cos_v_neg_g": 0.022503627464175224, + "layer_11_v_norm": 0.26397570967674255, + "layer_11_cos_v_neg_g": 0.02395334281027317, + "layer_12_v_norm": 0.2590484023094177, + "layer_12_cos_v_neg_g": 0.02576163411140442, + "layer_1_sharpness": 0.03422437235713005, + "layer_2_sharpness": 0.012296924367547035, + "layer_3_sharpness": 0.011699153110384941, + "layer_4_sharpness": 0.004170697648078203, + "layer_5_sharpness": 0.0052665662951767445, + "layer_6_sharpness": 0.006240926682949066, + "layer_7_sharpness": 0.0075734276324510574, + "layer_8_sharpness": 0.005844456609338522, + "layer_9_sharpness": 0.004647924564778805, + "layer_10_sharpness": 0.003371833823621273, + "layer_11_sharpness": 0.0036657066084444523, + "layer_12_sharpness": 0.00947635993361473 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..2081fd64908a5ed863055a4364deb4eaafdb5752 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.6788142323493958, + "total_l1_linf_norm": 5753.8291015625, + "total_spectral_norm": 0.6788142323493958, + "layer_1_update_fnorm": 0.11270620673894882, + "layer_1_max_l1_linf_norm": 0.17138773202896118, + "layer_1_max_spectral_norm": 0.025072529911994934, + "layer_2_update_fnorm": 0.1056872308254242, + "layer_2_max_l1_linf_norm": 0.14760041236877441, + "layer_2_max_spectral_norm": 0.02351200580596924, + "layer_3_update_fnorm": 0.10896353423595428, + "layer_3_max_l1_linf_norm": 0.167713463306427, + "layer_3_max_spectral_norm": 0.026850422844290733, + "layer_4_update_fnorm": 0.11968553066253662, + "layer_4_max_l1_linf_norm": 0.150476336479187, + "layer_4_max_spectral_norm": 0.02587083913385868, + "layer_5_update_fnorm": 0.12321449816226959, + "layer_5_max_l1_linf_norm": 0.153897225856781, + "layer_5_max_spectral_norm": 0.02604972943663597, + "layer_6_update_fnorm": 0.13295574486255646, + "layer_6_max_l1_linf_norm": 0.15974247455596924, + "layer_6_max_spectral_norm": 0.026828153058886528, + "layer_7_update_fnorm": 0.13019852340221405, + "layer_7_max_l1_linf_norm": 0.16141197085380554, + "layer_7_max_spectral_norm": 0.028423210605978966, + "layer_8_update_fnorm": 0.13219691812992096, + "layer_8_max_l1_linf_norm": 0.16747191548347473, + "layer_8_max_spectral_norm": 0.02793438546359539, + "layer_9_update_fnorm": 0.13099339604377747, + "layer_9_max_l1_linf_norm": 0.1626499742269516, + "layer_9_max_spectral_norm": 0.02918870933353901, + "layer_10_update_fnorm": 0.1323796808719635, + "layer_10_max_l1_linf_norm": 0.17038387060165405, + "layer_10_max_spectral_norm": 0.029081370681524277, + "layer_11_update_fnorm": 0.1282045841217041, + "layer_11_max_l1_linf_norm": 0.1728706657886505, + "layer_11_max_spectral_norm": 0.03103490173816681, + "layer_12_update_fnorm": 0.11779171973466873, + "layer_12_max_l1_linf_norm": 0.16222411394119263, + "layer_12_max_spectral_norm": 0.031065817922353745, + "total_sharpness": 0.24141931533813477, + "ip_v_neg_g": 0.09052520245313644, + "cos_v_neg_g": 0.07040482759475708, + "v_norm": 0.6788142323493958, + "g_norm": 1.8941576480865479, + "hv_norm": 1.5533429384231567, + "cos_v_hv": 0.10550077259540558, + "hg_norm": 44.16615295410156, + "cos_g_hg": 0.6397707462310791, + "v_parallel_norm": 0.0032009920105338097, + "v_perp_norm": 0.678806722164154, + "layer_1_v_norm": 0.11270620673894882, + "layer_1_cos_v_neg_g": 0.18228508532047272, + "layer_2_v_norm": 0.1056872308254242, + "layer_2_cos_v_neg_g": 0.21590831875801086, + "layer_3_v_norm": 0.10896352678537369, + "layer_3_cos_v_neg_g": 0.227610781788826, + "layer_4_v_norm": 0.11968553066253662, + "layer_4_cos_v_neg_g": 0.16211651265621185, + "layer_5_v_norm": 0.12321449816226959, + "layer_5_cos_v_neg_g": 0.1318286657333374, + "layer_6_v_norm": 0.13295574486255646, + "layer_6_cos_v_neg_g": 0.11599113792181015, + "layer_7_v_norm": 0.13019852340221405, + "layer_7_cos_v_neg_g": 0.1156042218208313, + "layer_8_v_norm": 0.13219693303108215, + "layer_8_cos_v_neg_g": 0.10850836336612701, + "layer_9_v_norm": 0.13099339604377747, + "layer_9_cos_v_neg_g": 0.09702208638191223, + "layer_10_v_norm": 0.1323796808719635, + "layer_10_cos_v_neg_g": 0.0994643121957779, + "layer_11_v_norm": 0.1282045841217041, + "layer_11_cos_v_neg_g": 0.10859399288892746, + "layer_12_v_norm": 0.11779171973466873, + "layer_12_cos_v_neg_g": 0.10857785493135452, + "layer_1_sharpness": 1.2150473594665527, + "layer_2_sharpness": 0.29943200945854187, + "layer_3_sharpness": 0.22702530026435852, + "layer_4_sharpness": 0.07476428151130676, + "layer_5_sharpness": 0.04790399596095085, + "layer_6_sharpness": 0.028056418523192406, + "layer_7_sharpness": 0.02168143354356289, + "layer_8_sharpness": 0.01573491282761097, + "layer_9_sharpness": 0.013965937308967113, + "layer_10_sharpness": 0.013017418794333935, + "layer_11_sharpness": 0.016251126304268837, + "layer_12_sharpness": 0.044140078127384186 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..00734033b0e67d853bc7d090ac5fde0852736b35 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.0903258323669434, + "total_l1_linf_norm": 9689.6875, + "total_spectral_norm": 1.0903257131576538, + "layer_1_update_fnorm": 0.2311437726020813, + "layer_1_max_l1_linf_norm": 0.32913145422935486, + "layer_1_max_spectral_norm": 0.04530400037765503, + "layer_2_update_fnorm": 0.22772285342216492, + "layer_2_max_l1_linf_norm": 0.3080098628997803, + "layer_2_max_spectral_norm": 0.046871334314346313, + "layer_3_update_fnorm": 0.23787744343280792, + "layer_3_max_l1_linf_norm": 0.3493804633617401, + "layer_3_max_spectral_norm": 0.045778147876262665, + "layer_4_update_fnorm": 0.2536981701850891, + "layer_4_max_l1_linf_norm": 0.3145834803581238, + "layer_4_max_spectral_norm": 0.042931102216243744, + "layer_5_update_fnorm": 0.253456711769104, + "layer_5_max_l1_linf_norm": 0.30508551001548767, + "layer_5_max_spectral_norm": 0.03950740024447441, + "layer_6_update_fnorm": 0.25538456439971924, + "layer_6_max_l1_linf_norm": 0.28029102087020874, + "layer_6_max_spectral_norm": 0.03199439495801926, + "layer_7_update_fnorm": 0.2527664005756378, + "layer_7_max_l1_linf_norm": 0.2694498300552368, + "layer_7_max_spectral_norm": 0.031437236815690994, + "layer_8_update_fnorm": 0.25511547923088074, + "layer_8_max_l1_linf_norm": 0.26515766978263855, + "layer_8_max_spectral_norm": 0.03170809894800186, + "layer_9_update_fnorm": 0.2570345401763916, + "layer_9_max_l1_linf_norm": 0.26969847083091736, + "layer_9_max_spectral_norm": 0.031687889248132706, + "layer_10_update_fnorm": 0.2602217495441437, + "layer_10_max_l1_linf_norm": 0.27852144837379456, + "layer_10_max_spectral_norm": 0.032443173229694366, + "layer_11_update_fnorm": 0.2599414587020874, + "layer_11_max_l1_linf_norm": 0.27762559056282043, + "layer_11_max_spectral_norm": 0.0335426926612854, + "layer_12_update_fnorm": 0.2508401572704315, + "layer_12_max_l1_linf_norm": 0.2814471423625946, + "layer_12_max_spectral_norm": 0.04544948413968086, + "total_sharpness": 0.02058742567896843, + "ip_v_neg_g": 0.011587174609303474, + "cos_v_neg_g": 0.009219341911375523, + "v_norm": 1.0903258323669434, + "g_norm": 1.152713418006897, + "hv_norm": 0.4221859574317932, + "cos_v_hv": 0.053168512880802155, + "hg_norm": 16.6918888092041, + "cos_g_hg": 0.5371459722518921, + "v_parallel_norm": 0.0003545553481671959, + "v_perp_norm": 1.0903257131576538, + "layer_1_v_norm": 0.2311437726020813, + "layer_1_cos_v_neg_g": 0.017778094857931137, + "layer_2_v_norm": 0.22772285342216492, + "layer_2_cos_v_neg_g": 0.023723293095827103, + "layer_3_v_norm": 0.23787744343280792, + "layer_3_cos_v_neg_g": 0.015056881122291088, + "layer_4_v_norm": 0.2536981701850891, + "layer_4_cos_v_neg_g": 0.010312161408364773, + "layer_5_v_norm": 0.253456711769104, + "layer_5_cos_v_neg_g": 0.013828899711370468, + "layer_6_v_norm": 0.25538456439971924, + "layer_6_cos_v_neg_g": 0.00836160872131586, + "layer_7_v_norm": 0.2527664005756378, + "layer_7_cos_v_neg_g": 0.013545666821300983, + "layer_8_v_norm": 0.25511547923088074, + "layer_8_cos_v_neg_g": 0.013152974657714367, + "layer_9_v_norm": 0.2570345401763916, + "layer_9_cos_v_neg_g": 0.009782569482922554, + "layer_10_v_norm": 0.2602217495441437, + "layer_10_cos_v_neg_g": 0.008444605395197868, + "layer_11_v_norm": 0.2599414587020874, + "layer_11_cos_v_neg_g": 0.009991873055696487, + "layer_12_v_norm": 0.2508401572704315, + "layer_12_cos_v_neg_g": 0.006047016941010952, + "layer_1_sharpness": 0.022691510617733, + "layer_2_sharpness": 0.02534029632806778, + "layer_3_sharpness": 0.007253281306475401, + "layer_4_sharpness": 0.003056609071791172, + "layer_5_sharpness": 0.005177769344300032, + "layer_6_sharpness": 0.005173583514988422, + "layer_7_sharpness": 0.006655597127974033, + "layer_8_sharpness": 0.004509426653385162, + "layer_9_sharpness": 0.0028891870751976967, + "layer_10_sharpness": 0.002384549705311656, + "layer_11_sharpness": 0.0027744590770453215, + "layer_12_sharpness": 0.005820120684802532 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..3ba7f96412bf6071eabba8aa1685415d0ea5312d --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.094035267829895, + "total_l1_linf_norm": 9724.384765625, + "total_spectral_norm": 1.0940351486206055, + "layer_1_update_fnorm": 0.23342068493366241, + "layer_1_max_l1_linf_norm": 0.33104604482650757, + "layer_1_max_spectral_norm": 0.04144606366753578, + "layer_2_update_fnorm": 0.23062388598918915, + "layer_2_max_l1_linf_norm": 0.3293197751045227, + "layer_2_max_spectral_norm": 0.04674110561609268, + "layer_3_update_fnorm": 0.24137790501117706, + "layer_3_max_l1_linf_norm": 0.32368308305740356, + "layer_3_max_spectral_norm": 0.04351391643285751, + "layer_4_update_fnorm": 0.25370773673057556, + "layer_4_max_l1_linf_norm": 0.3083859980106354, + "layer_4_max_spectral_norm": 0.042104631662368774, + "layer_5_update_fnorm": 0.25472694635391235, + "layer_5_max_l1_linf_norm": 0.29389840364456177, + "layer_5_max_spectral_norm": 0.036866143345832825, + "layer_6_update_fnorm": 0.25586625933647156, + "layer_6_max_l1_linf_norm": 0.28681236505508423, + "layer_6_max_spectral_norm": 0.030871491879224777, + "layer_7_update_fnorm": 0.25327959656715393, + "layer_7_max_l1_linf_norm": 0.2670113444328308, + "layer_7_max_spectral_norm": 0.029387162998318672, + "layer_8_update_fnorm": 0.2558627724647522, + "layer_8_max_l1_linf_norm": 0.26352956891059875, + "layer_8_max_spectral_norm": 0.029059242457151413, + "layer_9_update_fnorm": 0.25922244787216187, + "layer_9_max_l1_linf_norm": 0.2781163156032562, + "layer_9_max_spectral_norm": 0.029219333082437515, + "layer_10_update_fnorm": 0.26077428460121155, + "layer_10_max_l1_linf_norm": 0.26962459087371826, + "layer_10_max_spectral_norm": 0.030743760988116264, + "layer_11_update_fnorm": 0.26013147830963135, + "layer_11_max_l1_linf_norm": 0.28156760334968567, + "layer_11_max_spectral_norm": 0.03235696256160736, + "layer_12_update_fnorm": 0.2520892918109894, + "layer_12_max_l1_linf_norm": 0.26904720067977905, + "layer_12_max_spectral_norm": 0.04369966685771942, + "total_sharpness": 0.010889801196753979, + "ip_v_neg_g": 0.0038700508885085583, + "cos_v_neg_g": 0.0018916300032287836, + "v_norm": 1.094035267829895, + "g_norm": 1.870032548904419, + "hv_norm": 0.5774075388908386, + "cos_v_hv": 0.020633306354284286, + "hg_norm": 275.0255432128906, + "cos_g_hg": 0.4500371217727661, + "v_parallel_norm": 0.00041558666271157563, + "v_perp_norm": 1.0940351486206055, + "layer_1_v_norm": 0.23342068493366241, + "layer_1_cos_v_neg_g": -0.0023509704042226076, + "layer_2_v_norm": 0.23062388598918915, + "layer_2_cos_v_neg_g": 0.002561495639383793, + "layer_3_v_norm": 0.24137790501117706, + "layer_3_cos_v_neg_g": 0.005618992727249861, + "layer_4_v_norm": 0.25370773673057556, + "layer_4_cos_v_neg_g": 0.0047216168604791164, + "layer_5_v_norm": 0.25472694635391235, + "layer_5_cos_v_neg_g": 0.0010140207596123219, + "layer_6_v_norm": 0.25586625933647156, + "layer_6_cos_v_neg_g": 0.0024153708945959806, + "layer_7_v_norm": 0.25327959656715393, + "layer_7_cos_v_neg_g": 0.004632301162928343, + "layer_8_v_norm": 0.2558627724647522, + "layer_8_cos_v_neg_g": 0.0051274714060127735, + "layer_9_v_norm": 0.25922244787216187, + "layer_9_cos_v_neg_g": 0.005904581863433123, + "layer_10_v_norm": 0.26077428460121155, + "layer_10_cos_v_neg_g": 0.004601493943482637, + "layer_11_v_norm": 0.26013147830963135, + "layer_11_cos_v_neg_g": 0.003735421923920512, + "layer_12_v_norm": 0.2520892918109894, + "layer_12_cos_v_neg_g": 0.006551493424922228, + "layer_1_sharpness": 0.025258580222725868, + "layer_2_sharpness": 0.004593135789036751, + "layer_3_sharpness": 0.004478386137634516, + "layer_4_sharpness": 0.001962638460099697, + "layer_5_sharpness": 0.0031429571099579334, + "layer_6_sharpness": 0.003119850531220436, + "layer_7_sharpness": 0.0035872766748070717, + "layer_8_sharpness": 0.0025496622547507286, + "layer_9_sharpness": 0.0018735789926722646, + "layer_10_sharpness": 0.0014110396150499582, + "layer_11_sharpness": 0.001760945888236165, + "layer_12_sharpness": 0.003313256660476327 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..0d4a9d440af40f47bb897e20d98873febecc4276 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.1120368242263794, + "total_l1_linf_norm": 9910.978515625, + "total_spectral_norm": 1.1120368242263794, + "layer_1_update_fnorm": 0.2434566766023636, + "layer_1_max_l1_linf_norm": 0.2965802252292633, + "layer_1_max_spectral_norm": 0.041701700538396835, + "layer_2_update_fnorm": 0.2407534122467041, + "layer_2_max_l1_linf_norm": 0.3078678548336029, + "layer_2_max_spectral_norm": 0.04832831770181656, + "layer_3_update_fnorm": 0.24973630905151367, + "layer_3_max_l1_linf_norm": 0.3396543860435486, + "layer_3_max_spectral_norm": 0.04949473217129707, + "layer_4_update_fnorm": 0.2606332004070282, + "layer_4_max_l1_linf_norm": 0.321319043636322, + "layer_4_max_spectral_norm": 0.042260751128196716, + "layer_5_update_fnorm": 0.2599484920501709, + "layer_5_max_l1_linf_norm": 0.32769715785980225, + "layer_5_max_spectral_norm": 0.039932020008563995, + "layer_6_update_fnorm": 0.2619404196739197, + "layer_6_max_l1_linf_norm": 0.28531989455223083, + "layer_6_max_spectral_norm": 0.03410859405994415, + "layer_7_update_fnorm": 0.2596154808998108, + "layer_7_max_l1_linf_norm": 0.2832871973514557, + "layer_7_max_spectral_norm": 0.031027648597955704, + "layer_8_update_fnorm": 0.2621481120586395, + "layer_8_max_l1_linf_norm": 0.27194303274154663, + "layer_8_max_spectral_norm": 0.03129982575774193, + "layer_9_update_fnorm": 0.26404687762260437, + "layer_9_max_l1_linf_norm": 0.2759286165237427, + "layer_9_max_spectral_norm": 0.030864138156175613, + "layer_10_update_fnorm": 0.2660159766674042, + "layer_10_max_l1_linf_norm": 0.28308674693107605, + "layer_10_max_spectral_norm": 0.030306419357657433, + "layer_11_update_fnorm": 0.26518309116363525, + "layer_11_max_l1_linf_norm": 0.2878260612487793, + "layer_11_max_spectral_norm": 0.03213753551244736, + "layer_12_update_fnorm": 0.25770196318626404, + "layer_12_max_l1_linf_norm": 0.28333479166030884, + "layer_12_max_spectral_norm": 0.04235665127635002, + "total_sharpness": 0.0218244269490242, + "ip_v_neg_g": 0.014681234955787659, + "cos_v_neg_g": 0.01431156974285841, + "v_norm": 1.1120368242263794, + "g_norm": 0.9224782586097717, + "hv_norm": 0.3720608353614807, + "cos_v_hv": 0.0652301162481308, + "hg_norm": 3.513052225112915, + "cos_g_hg": 0.4224020838737488, + "v_parallel_norm": 0.00041576754301786423, + "v_perp_norm": 1.1120368242263794, + "layer_1_v_norm": 0.2434566766023636, + "layer_1_cos_v_neg_g": 0.01953912153840065, + "layer_2_v_norm": 0.2407534122467041, + "layer_2_cos_v_neg_g": 0.02187742106616497, + "layer_3_v_norm": 0.24973630905151367, + "layer_3_cos_v_neg_g": 0.029646556824445724, + "layer_4_v_norm": 0.2606332004070282, + "layer_4_cos_v_neg_g": 0.021282413974404335, + "layer_5_v_norm": 0.2599484920501709, + "layer_5_cos_v_neg_g": 0.021071873605251312, + "layer_6_v_norm": 0.2619404196739197, + "layer_6_cos_v_neg_g": 0.018859989941120148, + "layer_7_v_norm": 0.2596154808998108, + "layer_7_cos_v_neg_g": 0.021153388544917107, + "layer_8_v_norm": 0.2621481120586395, + "layer_8_cos_v_neg_g": 0.01846008189022541, + "layer_9_v_norm": 0.26404687762260437, + "layer_9_cos_v_neg_g": 0.014379586093127728, + "layer_10_v_norm": 0.2660159766674042, + "layer_10_cos_v_neg_g": 0.012513595633208752, + "layer_11_v_norm": 0.26518309116363525, + "layer_11_cos_v_neg_g": 0.011281775310635567, + "layer_12_v_norm": 0.25770196318626404, + "layer_12_cos_v_neg_g": 0.017269620671868324, + "layer_1_sharpness": 0.013976728543639183, + "layer_2_sharpness": 0.007708670571446419, + "layer_3_sharpness": 0.009233415126800537, + "layer_4_sharpness": 0.0038011488504707813, + "layer_5_sharpness": 0.005366782657802105, + "layer_6_sharpness": 0.006181610282510519, + "layer_7_sharpness": 0.007489002775400877, + "layer_8_sharpness": 0.005740005988627672, + "layer_9_sharpness": 0.0032918057404458523, + "layer_10_sharpness": 0.0025480822660028934, + "layer_11_sharpness": 0.003145185299217701, + "layer_12_sharpness": 0.01126586738973856 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..4f452c7dd08c6abeadb482f390c669f73fec815e --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.120757818222046, + "total_l1_linf_norm": 9995.8994140625, + "total_spectral_norm": 1.120757818222046, + "layer_1_update_fnorm": 0.2476659119129181, + "layer_1_max_l1_linf_norm": 0.34238988161087036, + "layer_1_max_spectral_norm": 0.04669385775923729, + "layer_2_update_fnorm": 0.2437649667263031, + "layer_2_max_l1_linf_norm": 0.3315202593803406, + "layer_2_max_spectral_norm": 0.05004188045859337, + "layer_3_update_fnorm": 0.2489408254623413, + "layer_3_max_l1_linf_norm": 0.333627849817276, + "layer_3_max_spectral_norm": 0.04947082698345184, + "layer_4_update_fnorm": 0.26174843311309814, + "layer_4_max_l1_linf_norm": 0.3533805012702942, + "layer_4_max_spectral_norm": 0.04589853435754776, + "layer_5_update_fnorm": 0.2603231370449066, + "layer_5_max_l1_linf_norm": 0.3191486597061157, + "layer_5_max_spectral_norm": 0.041479796171188354, + "layer_6_update_fnorm": 0.2616256773471832, + "layer_6_max_l1_linf_norm": 0.2804158627986908, + "layer_6_max_spectral_norm": 0.03457602858543396, + "layer_7_update_fnorm": 0.25978395342826843, + "layer_7_max_l1_linf_norm": 0.27971696853637695, + "layer_7_max_spectral_norm": 0.0319005586206913, + "layer_8_update_fnorm": 0.2626936733722687, + "layer_8_max_l1_linf_norm": 0.2749263048171997, + "layer_8_max_spectral_norm": 0.030339360237121582, + "layer_9_update_fnorm": 0.2656312584877014, + "layer_9_max_l1_linf_norm": 0.27770763635635376, + "layer_9_max_spectral_norm": 0.02921205945312977, + "layer_10_update_fnorm": 0.2688790559768677, + "layer_10_max_l1_linf_norm": 0.27708369493484497, + "layer_10_max_spectral_norm": 0.03138679638504982, + "layer_11_update_fnorm": 0.26891228556632996, + "layer_11_max_l1_linf_norm": 0.2860105633735657, + "layer_11_max_spectral_norm": 0.03270963579416275, + "layer_12_update_fnorm": 0.2606135904788971, + "layer_12_max_l1_linf_norm": 0.285122811794281, + "layer_12_max_spectral_norm": 0.057005252689123154, + "total_sharpness": 0.019053466618061066, + "ip_v_neg_g": 0.013076279312372208, + "cos_v_neg_g": 0.012399458326399326, + "v_norm": 1.120757818222046, + "g_norm": 0.9409568309783936, + "hv_norm": 0.3721907138824463, + "cos_v_hv": 0.05737467482686043, + "hg_norm": 3.7080302238464355, + "cos_g_hg": 0.4720830023288727, + "v_parallel_norm": 0.0003994718717876822, + "v_perp_norm": 1.120757818222046, + "layer_1_v_norm": 0.2476659119129181, + "layer_1_cos_v_neg_g": 0.0151943014934659, + "layer_2_v_norm": 0.2437649667263031, + "layer_2_cos_v_neg_g": 0.03274793550372124, + "layer_3_v_norm": 0.2489408254623413, + "layer_3_cos_v_neg_g": 0.021143855527043343, + "layer_4_v_norm": 0.26174843311309814, + "layer_4_cos_v_neg_g": 0.010700218379497528, + "layer_5_v_norm": 0.2603231370449066, + "layer_5_cos_v_neg_g": 0.012933268211781979, + "layer_6_v_norm": 0.2616256773471832, + "layer_6_cos_v_neg_g": 0.010342827066779137, + "layer_7_v_norm": 0.25978395342826843, + "layer_7_cos_v_neg_g": 0.014727562665939331, + "layer_8_v_norm": 0.2626936733722687, + "layer_8_cos_v_neg_g": 0.014261549338698387, + "layer_9_v_norm": 0.2656312584877014, + "layer_9_cos_v_neg_g": 0.011630131863057613, + "layer_10_v_norm": 0.2688790559768677, + "layer_10_cos_v_neg_g": 0.011767464689910412, + "layer_11_v_norm": 0.26891231536865234, + "layer_11_cos_v_neg_g": 0.014319597743451595, + "layer_12_v_norm": 0.2606135904788971, + "layer_12_cos_v_neg_g": 0.032300468534231186, + "layer_1_sharpness": 0.013278614729642868, + "layer_2_sharpness": 0.018434010446071625, + "layer_3_sharpness": 0.008330016396939754, + "layer_4_sharpness": 0.002647267421707511, + "layer_5_sharpness": 0.0034880400635302067, + "layer_6_sharpness": 0.00417145574465394, + "layer_7_sharpness": 0.006346435286104679, + "layer_8_sharpness": 0.004362577106803656, + "layer_9_sharpness": 0.002739155199378729, + "layer_10_sharpness": 0.0019115327158942819, + "layer_11_sharpness": 0.00259420950897038, + "layer_12_sharpness": 0.018764188513159752 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..a0b2e275b8b2303ea62f4fb124f3d952a2171174 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.1194968223571777, + "total_l1_linf_norm": 9986.2529296875, + "total_spectral_norm": 1.1194968223571777, + "layer_1_update_fnorm": 0.25717294216156006, + "layer_1_max_l1_linf_norm": 0.32548800110816956, + "layer_1_max_spectral_norm": 0.05533670634031296, + "layer_2_update_fnorm": 0.24594393372535706, + "layer_2_max_l1_linf_norm": 0.31247520446777344, + "layer_2_max_spectral_norm": 0.047773074358701706, + "layer_3_update_fnorm": 0.2503860294818878, + "layer_3_max_l1_linf_norm": 0.3190259337425232, + "layer_3_max_spectral_norm": 0.049497634172439575, + "layer_4_update_fnorm": 0.26164937019348145, + "layer_4_max_l1_linf_norm": 0.32298508286476135, + "layer_4_max_spectral_norm": 0.04427934065461159, + "layer_5_update_fnorm": 0.2607120871543884, + "layer_5_max_l1_linf_norm": 0.3120023310184479, + "layer_5_max_spectral_norm": 0.03995155915617943, + "layer_6_update_fnorm": 0.2611928880214691, + "layer_6_max_l1_linf_norm": 0.2798100411891937, + "layer_6_max_spectral_norm": 0.03326765075325966, + "layer_7_update_fnorm": 0.25983449816703796, + "layer_7_max_l1_linf_norm": 0.27788639068603516, + "layer_7_max_spectral_norm": 0.02888832800090313, + "layer_8_update_fnorm": 0.2606445252895355, + "layer_8_max_l1_linf_norm": 0.27177318930625916, + "layer_8_max_spectral_norm": 0.029120253399014473, + "layer_9_update_fnorm": 0.2653975188732147, + "layer_9_max_l1_linf_norm": 0.2782445251941681, + "layer_9_max_spectral_norm": 0.028200428932905197, + "layer_10_update_fnorm": 0.2664651870727539, + "layer_10_max_l1_linf_norm": 0.28865382075309753, + "layer_10_max_spectral_norm": 0.02955334261059761, + "layer_11_update_fnorm": 0.2656322419643402, + "layer_11_max_l1_linf_norm": 0.287533700466156, + "layer_11_max_spectral_norm": 0.030967094004154205, + "layer_12_update_fnorm": 0.2593071758747101, + "layer_12_max_l1_linf_norm": 0.2838270962238312, + "layer_12_max_spectral_norm": 0.04507666826248169, + "total_sharpness": 0.017906928434967995, + "ip_v_neg_g": 0.009775453247129917, + "cos_v_neg_g": 0.00893108919262886, + "v_norm": 1.1194968223571777, + "g_norm": 0.9777090549468994, + "hv_norm": 0.4717317819595337, + "cos_v_hv": 0.04249607399106026, + "hg_norm": 10.343273162841797, + "cos_g_hg": 0.31354349851608276, + "v_parallel_norm": 0.0003034051042050123, + "v_perp_norm": 1.1194967031478882, + "layer_1_v_norm": 0.25717294216156006, + "layer_1_cos_v_neg_g": 0.017896898090839386, + "layer_2_v_norm": 0.24594393372535706, + "layer_2_cos_v_neg_g": 0.023650186136364937, + "layer_3_v_norm": 0.2503860294818878, + "layer_3_cos_v_neg_g": 0.017221778631210327, + "layer_4_v_norm": 0.26164937019348145, + "layer_4_cos_v_neg_g": 0.008537805639207363, + "layer_5_v_norm": 0.2607120871543884, + "layer_5_cos_v_neg_g": 0.007754848338663578, + "layer_6_v_norm": 0.2611928880214691, + "layer_6_cos_v_neg_g": 0.009434881620109081, + "layer_7_v_norm": 0.25983449816703796, + "layer_7_cos_v_neg_g": 0.0085794348269701, + "layer_8_v_norm": 0.26064449548721313, + "layer_8_cos_v_neg_g": 0.009108897298574448, + "layer_9_v_norm": 0.2653975188732147, + "layer_9_cos_v_neg_g": 0.007674973923712969, + "layer_10_v_norm": 0.2664651870727539, + "layer_10_cos_v_neg_g": 0.008407382294535637, + "layer_11_v_norm": 0.2656322419643402, + "layer_11_cos_v_neg_g": 0.00929124653339386, + "layer_12_v_norm": 0.2593071758747101, + "layer_12_cos_v_neg_g": 0.007965857163071632, + "layer_1_sharpness": 0.019272146746516228, + "layer_2_sharpness": 0.018476394936442375, + "layer_3_sharpness": 0.006358639802783728, + "layer_4_sharpness": 0.0028842457104474306, + "layer_5_sharpness": 0.003648590063676238, + "layer_6_sharpness": 0.004154852591454983, + "layer_7_sharpness": 0.005111983977258205, + "layer_8_sharpness": 0.004104990977793932, + "layer_9_sharpness": 0.002782618859782815, + "layer_10_sharpness": 0.0019344250904396176, + "layer_11_sharpness": 0.0022415819112211466, + "layer_12_sharpness": 0.008284158073365688 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..e1fa6b393456bd5ec3cbaedad5aaa1c88cb28bf6 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.1163781881332397, + "total_l1_linf_norm": 9943.587890625, + "total_spectral_norm": 1.1163785457611084, + "layer_1_update_fnorm": 0.2530510425567627, + "layer_1_max_l1_linf_norm": 0.3329455256462097, + "layer_1_max_spectral_norm": 0.04459354653954506, + "layer_2_update_fnorm": 0.24706339836120605, + "layer_2_max_l1_linf_norm": 0.31831079721450806, + "layer_2_max_spectral_norm": 0.04875841364264488, + "layer_3_update_fnorm": 0.24980804324150085, + "layer_3_max_l1_linf_norm": 0.35881710052490234, + "layer_3_max_spectral_norm": 0.04843524843454361, + "layer_4_update_fnorm": 0.2593933939933777, + "layer_4_max_l1_linf_norm": 0.3040340542793274, + "layer_4_max_spectral_norm": 0.043223313987255096, + "layer_5_update_fnorm": 0.2592882812023163, + "layer_5_max_l1_linf_norm": 0.2982392907142639, + "layer_5_max_spectral_norm": 0.03779314458370209, + "layer_6_update_fnorm": 0.26070383191108704, + "layer_6_max_l1_linf_norm": 0.2886424660682678, + "layer_6_max_spectral_norm": 0.03565102070569992, + "layer_7_update_fnorm": 0.2590341567993164, + "layer_7_max_l1_linf_norm": 0.28362491726875305, + "layer_7_max_spectral_norm": 0.030978646129369736, + "layer_8_update_fnorm": 0.26084184646606445, + "layer_8_max_l1_linf_norm": 0.27831992506980896, + "layer_8_max_spectral_norm": 0.028504032641649246, + "layer_9_update_fnorm": 0.26541024446487427, + "layer_9_max_l1_linf_norm": 0.2740483283996582, + "layer_9_max_spectral_norm": 0.028315717354416847, + "layer_10_update_fnorm": 0.2660135328769684, + "layer_10_max_l1_linf_norm": 0.27474892139434814, + "layer_10_max_spectral_norm": 0.028653843328356743, + "layer_11_update_fnorm": 0.26473551988601685, + "layer_11_max_l1_linf_norm": 0.2973332405090332, + "layer_11_max_spectral_norm": 0.029588723555207253, + "layer_12_update_fnorm": 0.25468796491622925, + "layer_12_max_l1_linf_norm": 0.2867622375488281, + "layer_12_max_spectral_norm": 0.04307639226317406, + "total_sharpness": 0.015616237185895443, + "ip_v_neg_g": 0.01024535670876503, + "cos_v_neg_g": 0.00873036403208971, + "v_norm": 1.1163781881332397, + "g_norm": 1.0511952638626099, + "hv_norm": 0.3734554350376129, + "cos_v_hv": 0.046681951731443405, + "hg_norm": 6.572267532348633, + "cos_g_hg": 0.5015509724617004, + "v_parallel_norm": 0.00026328637613914907, + "v_perp_norm": 1.1163781881332397, + "layer_1_v_norm": 0.2530510425567627, + "layer_1_cos_v_neg_g": 0.01308517251163721, + "layer_2_v_norm": 0.24706339836120605, + "layer_2_cos_v_neg_g": 0.015412909910082817, + "layer_3_v_norm": 0.24980804324150085, + "layer_3_cos_v_neg_g": 0.01655644364655018, + "layer_4_v_norm": 0.2593933939933777, + "layer_4_cos_v_neg_g": 0.009404993616044521, + "layer_5_v_norm": 0.2592882812023163, + "layer_5_cos_v_neg_g": 0.007371424697339535, + "layer_6_v_norm": 0.26070383191108704, + "layer_6_cos_v_neg_g": 0.009520185180008411, + "layer_7_v_norm": 0.2590341567993164, + "layer_7_cos_v_neg_g": 0.011381831020116806, + "layer_8_v_norm": 0.26084184646606445, + "layer_8_cos_v_neg_g": 0.011974059045314789, + "layer_9_v_norm": 0.26541024446487427, + "layer_9_cos_v_neg_g": 0.012314927764236927, + "layer_10_v_norm": 0.2660135328769684, + "layer_10_cos_v_neg_g": 0.011275868862867355, + "layer_11_v_norm": 0.26473551988601685, + "layer_11_cos_v_neg_g": 0.009265949949622154, + "layer_12_v_norm": 0.25468796491622925, + "layer_12_cos_v_neg_g": 0.010428600944578648, + "layer_1_sharpness": 0.013085383921861649, + "layer_2_sharpness": 0.009558046236634254, + "layer_3_sharpness": 0.005709432065486908, + "layer_4_sharpness": 0.002175848698243499, + "layer_5_sharpness": 0.0030163160990923643, + "layer_6_sharpness": 0.004307260271161795, + "layer_7_sharpness": 0.005800645332783461, + "layer_8_sharpness": 0.003982942551374435, + "layer_9_sharpness": 0.0029757751617580652, + "layer_10_sharpness": 0.001844826270826161, + "layer_11_sharpness": 0.002110213041305542, + "layer_12_sharpness": 0.004434769973158836 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..ca921dab4a709cdf39b081453945bccdcbdaefe5 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.1135246753692627, + "total_l1_linf_norm": 9926.2265625, + "total_spectral_norm": 1.1135247945785522, + "layer_1_update_fnorm": 0.24763156473636627, + "layer_1_max_l1_linf_norm": 0.3062736392021179, + "layer_1_max_spectral_norm": 0.042256444692611694, + "layer_2_update_fnorm": 0.24429205060005188, + "layer_2_max_l1_linf_norm": 0.3106994330883026, + "layer_2_max_spectral_norm": 0.0478147529065609, + "layer_3_update_fnorm": 0.24960213899612427, + "layer_3_max_l1_linf_norm": 0.3248298466205597, + "layer_3_max_spectral_norm": 0.04382866621017456, + "layer_4_update_fnorm": 0.2611639201641083, + "layer_4_max_l1_linf_norm": 0.32564371824264526, + "layer_4_max_spectral_norm": 0.04057054594159126, + "layer_5_update_fnorm": 0.2608652710914612, + "layer_5_max_l1_linf_norm": 0.30214762687683105, + "layer_5_max_spectral_norm": 0.03699731454253197, + "layer_6_update_fnorm": 0.2611634433269501, + "layer_6_max_l1_linf_norm": 0.2871863842010498, + "layer_6_max_spectral_norm": 0.029540274292230606, + "layer_7_update_fnorm": 0.2584001421928406, + "layer_7_max_l1_linf_norm": 0.277610719203949, + "layer_7_max_spectral_norm": 0.027046410366892815, + "layer_8_update_fnorm": 0.26023972034454346, + "layer_8_max_l1_linf_norm": 0.2625691294670105, + "layer_8_max_spectral_norm": 0.02592632919549942, + "layer_9_update_fnorm": 0.2642362415790558, + "layer_9_max_l1_linf_norm": 0.26967042684555054, + "layer_9_max_spectral_norm": 0.026014890521764755, + "layer_10_update_fnorm": 0.26662594079971313, + "layer_10_max_l1_linf_norm": 0.27671048045158386, + "layer_10_max_spectral_norm": 0.027020197361707687, + "layer_11_update_fnorm": 0.26556676626205444, + "layer_11_max_l1_linf_norm": 0.27888554334640503, + "layer_11_max_spectral_norm": 0.028175145387649536, + "layer_12_update_fnorm": 0.2587375342845917, + "layer_12_max_l1_linf_norm": 0.2828112542629242, + "layer_12_max_spectral_norm": 0.04544755071401596, + "total_sharpness": 0.009844634681940079, + "ip_v_neg_g": 0.005040885880589485, + "cos_v_neg_g": 0.004712923429906368, + "v_norm": 1.1135246753692627, + "g_norm": 0.9605426788330078, + "hv_norm": 0.2532932162284851, + "cos_v_hv": 0.04327886924147606, + "hg_norm": 6.215693473815918, + "cos_g_hg": 0.42956990003585815, + "v_parallel_norm": 0.00019702779536601156, + "v_perp_norm": 1.1135246753692627, + "layer_1_v_norm": 0.24763156473636627, + "layer_1_cos_v_neg_g": 0.006438896991312504, + "layer_2_v_norm": 0.24429205060005188, + "layer_2_cos_v_neg_g": 0.007837488315999508, + "layer_3_v_norm": 0.24960213899612427, + "layer_3_cos_v_neg_g": 0.006438135169446468, + "layer_4_v_norm": 0.2611639201641083, + "layer_4_cos_v_neg_g": 0.003998824395239353, + "layer_5_v_norm": 0.2608652710914612, + "layer_5_cos_v_neg_g": 0.0052037290297448635, + "layer_6_v_norm": 0.2611634433269501, + "layer_6_cos_v_neg_g": 0.0043947440572083, + "layer_7_v_norm": 0.2584001421928406, + "layer_7_cos_v_neg_g": 0.005523670930415392, + "layer_8_v_norm": 0.26023972034454346, + "layer_8_cos_v_neg_g": 0.0044180466793477535, + "layer_9_v_norm": 0.2642362415790558, + "layer_9_cos_v_neg_g": 0.004418130032718182, + "layer_10_v_norm": 0.26662594079971313, + "layer_10_cos_v_neg_g": 0.0045570675283670425, + "layer_11_v_norm": 0.26556679606437683, + "layer_11_cos_v_neg_g": 0.005392624065279961, + "layer_12_v_norm": 0.2587375342845917, + "layer_12_cos_v_neg_g": 0.013951637782156467, + "layer_1_sharpness": 0.009665731340646744, + "layer_2_sharpness": 0.004007413517683744, + "layer_3_sharpness": 0.0036665117368102074, + "layer_4_sharpness": 0.0019638733938336372, + "layer_5_sharpness": 0.0026269396767020226, + "layer_6_sharpness": 0.002525940304622054, + "layer_7_sharpness": 0.0028254911303520203, + "layer_8_sharpness": 0.002303967485204339, + "layer_9_sharpness": 0.0016530024586245418, + "layer_10_sharpness": 0.0013161345850676298, + "layer_11_sharpness": 0.0017392217414453626, + "layer_12_sharpness": 0.007986483164131641 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..255ca8d0ba845be6d0b26027c01d69867a630caf --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.0835051536560059, + "total_l1_linf_norm": 9599.966796875, + "total_spectral_norm": 1.083505392074585, + "layer_1_update_fnorm": 0.23354308307170868, + "layer_1_max_l1_linf_norm": 0.3117724657058716, + "layer_1_max_spectral_norm": 0.04182633012533188, + "layer_2_update_fnorm": 0.23231624066829681, + "layer_2_max_l1_linf_norm": 0.3227694630622864, + "layer_2_max_spectral_norm": 0.04693714156746864, + "layer_3_update_fnorm": 0.24212291836738586, + "layer_3_max_l1_linf_norm": 0.30497950315475464, + "layer_3_max_spectral_norm": 0.04168697074055672, + "layer_4_update_fnorm": 0.25301259756088257, + "layer_4_max_l1_linf_norm": 0.290172278881073, + "layer_4_max_spectral_norm": 0.04186726734042168, + "layer_5_update_fnorm": 0.25311800837516785, + "layer_5_max_l1_linf_norm": 0.309566468000412, + "layer_5_max_spectral_norm": 0.038733091205358505, + "layer_6_update_fnorm": 0.25341707468032837, + "layer_6_max_l1_linf_norm": 0.26735562086105347, + "layer_6_max_spectral_norm": 0.033711835741996765, + "layer_7_update_fnorm": 0.25091204047203064, + "layer_7_max_l1_linf_norm": 0.283582866191864, + "layer_7_max_spectral_norm": 0.03176094591617584, + "layer_8_update_fnorm": 0.2512013912200928, + "layer_8_max_l1_linf_norm": 0.2714805603027344, + "layer_8_max_spectral_norm": 0.02844427153468132, + "layer_9_update_fnorm": 0.2539193034172058, + "layer_9_max_l1_linf_norm": 0.2750430405139923, + "layer_9_max_spectral_norm": 0.027935262769460678, + "layer_10_update_fnorm": 0.2513543367385864, + "layer_10_max_l1_linf_norm": 0.2659711539745331, + "layer_10_max_spectral_norm": 0.026787463575601578, + "layer_11_update_fnorm": 0.25139012932777405, + "layer_11_max_l1_linf_norm": 0.2866349518299103, + "layer_11_max_spectral_norm": 0.030180560424923897, + "layer_12_update_fnorm": 0.24467968940734863, + "layer_12_max_l1_linf_norm": 0.29116785526275635, + "layer_12_max_spectral_norm": 0.04276953637599945, + "total_sharpness": 0.0124109648168087, + "ip_v_neg_g": 0.0063414741307497025, + "cos_v_neg_g": 0.006066424772143364, + "v_norm": 1.0835051536560059, + "g_norm": 0.964775800704956, + "hv_norm": 0.2881239056587219, + "cos_v_hv": 0.046672090888023376, + "hg_norm": 4.495278358459473, + "cos_g_hg": 0.46409595012664795, + "v_parallel_norm": 0.00019225523283239454, + "v_perp_norm": 1.0835051536560059, + "layer_1_v_norm": 0.23354308307170868, + "layer_1_cos_v_neg_g": 0.008482562378048897, + "layer_2_v_norm": 0.23231624066829681, + "layer_2_cos_v_neg_g": 0.009415003471076488, + "layer_3_v_norm": 0.24212291836738586, + "layer_3_cos_v_neg_g": 0.007516610436141491, + "layer_4_v_norm": 0.25301259756088257, + "layer_4_cos_v_neg_g": 0.00806819275021553, + "layer_5_v_norm": 0.25311800837516785, + "layer_5_cos_v_neg_g": 0.008021692745387554, + "layer_6_v_norm": 0.25341707468032837, + "layer_6_cos_v_neg_g": 0.00984003022313118, + "layer_7_v_norm": 0.25091204047203064, + "layer_7_cos_v_neg_g": 0.008271725848317146, + "layer_8_v_norm": 0.2512013912200928, + "layer_8_cos_v_neg_g": 0.009059238247573376, + "layer_9_v_norm": 0.2539193034172058, + "layer_9_cos_v_neg_g": 0.009236970916390419, + "layer_10_v_norm": 0.2513543367385864, + "layer_10_cos_v_neg_g": 0.007637942209839821, + "layer_11_v_norm": 0.25139012932777405, + "layer_11_cos_v_neg_g": 0.005947345867753029, + "layer_12_v_norm": 0.24467968940734863, + "layer_12_cos_v_neg_g": 0.0018872099462896585, + "layer_1_sharpness": 0.010297644883394241, + "layer_2_sharpness": 0.006556202191859484, + "layer_3_sharpness": 0.004593019373714924, + "layer_4_sharpness": 0.001829013112001121, + "layer_5_sharpness": 0.0032445620745420456, + "layer_6_sharpness": 0.003243145765736699, + "layer_7_sharpness": 0.0042036608792841434, + "layer_8_sharpness": 0.003860814031213522, + "layer_9_sharpness": 0.002896767109632492, + "layer_10_sharpness": 0.001688790158368647, + "layer_11_sharpness": 0.0018439044943079352, + "layer_12_sharpness": 0.007646720856428146 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..a5670f58c6ee9120d52bbb93ef37b80e96977f52 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.1115198135375977, + "total_l1_linf_norm": 9896.71875, + "total_spectral_norm": 1.1115198135375977, + "layer_1_update_fnorm": 0.23800498247146606, + "layer_1_max_l1_linf_norm": 0.32479870319366455, + "layer_1_max_spectral_norm": 0.042666010558605194, + "layer_2_update_fnorm": 0.23756656050682068, + "layer_2_max_l1_linf_norm": 0.29388970136642456, + "layer_2_max_spectral_norm": 0.047873955219984055, + "layer_3_update_fnorm": 0.24838650226593018, + "layer_3_max_l1_linf_norm": 0.3143423795700073, + "layer_3_max_spectral_norm": 0.04875531792640686, + "layer_4_update_fnorm": 0.261013388633728, + "layer_4_max_l1_linf_norm": 0.34075573086738586, + "layer_4_max_spectral_norm": 0.04311222955584526, + "layer_5_update_fnorm": 0.26146814227104187, + "layer_5_max_l1_linf_norm": 0.3529907464981079, + "layer_5_max_spectral_norm": 0.04067551717162132, + "layer_6_update_fnorm": 0.26124629378318787, + "layer_6_max_l1_linf_norm": 0.30175700783729553, + "layer_6_max_spectral_norm": 0.03440380096435547, + "layer_7_update_fnorm": 0.25893211364746094, + "layer_7_max_l1_linf_norm": 0.29572609066963196, + "layer_7_max_spectral_norm": 0.031470488756895065, + "layer_8_update_fnorm": 0.2608037292957306, + "layer_8_max_l1_linf_norm": 0.2732388377189636, + "layer_8_max_spectral_norm": 0.02627212181687355, + "layer_9_update_fnorm": 0.2651941478252411, + "layer_9_max_l1_linf_norm": 0.2737981677055359, + "layer_9_max_spectral_norm": 0.025355583056807518, + "layer_10_update_fnorm": 0.26623088121414185, + "layer_10_max_l1_linf_norm": 0.2776816785335541, + "layer_10_max_spectral_norm": 0.026775019243359566, + "layer_11_update_fnorm": 0.2666591703891754, + "layer_11_max_l1_linf_norm": 0.28226447105407715, + "layer_11_max_spectral_norm": 0.030242202803492546, + "layer_12_update_fnorm": 0.2607784569263458, + "layer_12_max_l1_linf_norm": 0.28663456439971924, + "layer_12_max_spectral_norm": 0.04726535454392433, + "total_sharpness": 0.012141101062297821, + "ip_v_neg_g": 0.006504545919597149, + "cos_v_neg_g": 0.005627172067761421, + "v_norm": 1.1115198135375977, + "g_norm": 1.0399430990219116, + "hv_norm": 0.3280918598175049, + "cos_v_hv": 0.04113200306892395, + "hg_norm": 7.141217231750488, + "cos_g_hg": 0.5269321799278259, + "v_parallel_norm": 0.0002091705973725766, + "v_perp_norm": 1.1115198135375977, + "layer_1_v_norm": 0.23800498247146606, + "layer_1_cos_v_neg_g": 0.008541679009795189, + "layer_2_v_norm": 0.23756656050682068, + "layer_2_cos_v_neg_g": 0.008768831379711628, + "layer_3_v_norm": 0.24838651716709137, + "layer_3_cos_v_neg_g": 0.009565172716975212, + "layer_4_v_norm": 0.261013388633728, + "layer_4_cos_v_neg_g": 0.005840600933879614, + "layer_5_v_norm": 0.26146814227104187, + "layer_5_cos_v_neg_g": 0.005529489368200302, + "layer_6_v_norm": 0.26124629378318787, + "layer_6_cos_v_neg_g": 0.007500015199184418, + "layer_7_v_norm": 0.25893211364746094, + "layer_7_cos_v_neg_g": 0.006433262024074793, + "layer_8_v_norm": 0.2608037292957306, + "layer_8_cos_v_neg_g": 0.007229223381727934, + "layer_9_v_norm": 0.2651941478252411, + "layer_9_cos_v_neg_g": 0.008516346104443073, + "layer_10_v_norm": 0.26623088121414185, + "layer_10_cos_v_neg_g": 0.0072008417919278145, + "layer_11_v_norm": 0.2666591703891754, + "layer_11_cos_v_neg_g": 0.005040226504206657, + "layer_12_v_norm": 0.2607784569263458, + "layer_12_cos_v_neg_g": 0.008043010719120502, + "layer_1_sharpness": 0.010107113048434258, + "layer_2_sharpness": 0.0060367584228515625, + "layer_3_sharpness": 0.0049723293632268906, + "layer_4_sharpness": 0.0018588827224448323, + "layer_5_sharpness": 0.0032710623927414417, + "layer_6_sharpness": 0.002910591894760728, + "layer_7_sharpness": 0.0035612517967820168, + "layer_8_sharpness": 0.0029730936512351036, + "layer_9_sharpness": 0.0021255887113511562, + "layer_10_sharpness": 0.0016230691689997911, + "layer_11_sharpness": 0.0018556431168690324, + "layer_12_sharpness": 0.008983186446130276 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..d80e4564b58cbacdd0f6e476719481c6cdae8e4d --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.1092569828033447, + "total_l1_linf_norm": 9874.205078125, + "total_spectral_norm": 1.1092569828033447, + "layer_1_update_fnorm": 0.24888797104358673, + "layer_1_max_l1_linf_norm": 0.31031930446624756, + "layer_1_max_spectral_norm": 0.044232077896595, + "layer_2_update_fnorm": 0.2402222603559494, + "layer_2_max_l1_linf_norm": 0.3019203543663025, + "layer_2_max_spectral_norm": 0.04861653968691826, + "layer_3_update_fnorm": 0.24637463688850403, + "layer_3_max_l1_linf_norm": 0.28930139541625977, + "layer_3_max_spectral_norm": 0.04410361126065254, + "layer_4_update_fnorm": 0.2592525780200958, + "layer_4_max_l1_linf_norm": 0.3387379050254822, + "layer_4_max_spectral_norm": 0.04376550018787384, + "layer_5_update_fnorm": 0.2613775134086609, + "layer_5_max_l1_linf_norm": 0.33109116554260254, + "layer_5_max_spectral_norm": 0.040844306349754333, + "layer_6_update_fnorm": 0.26046428084373474, + "layer_6_max_l1_linf_norm": 0.2790064215660095, + "layer_6_max_spectral_norm": 0.033289529383182526, + "layer_7_update_fnorm": 0.2574388384819031, + "layer_7_max_l1_linf_norm": 0.28155434131622314, + "layer_7_max_spectral_norm": 0.031533289700746536, + "layer_8_update_fnorm": 0.2581997215747833, + "layer_8_max_l1_linf_norm": 0.27590394020080566, + "layer_8_max_spectral_norm": 0.02576851099729538, + "layer_9_update_fnorm": 0.26305946707725525, + "layer_9_max_l1_linf_norm": 0.27127912640571594, + "layer_9_max_spectral_norm": 0.024895576760172844, + "layer_10_update_fnorm": 0.2653194069862366, + "layer_10_max_l1_linf_norm": 0.2750301957130432, + "layer_10_max_spectral_norm": 0.026009833440184593, + "layer_11_update_fnorm": 0.26495906710624695, + "layer_11_max_l1_linf_norm": 0.302049458026886, + "layer_11_max_spectral_norm": 0.030741875991225243, + "layer_12_update_fnorm": 0.25835150480270386, + "layer_12_max_l1_linf_norm": 0.2804790735244751, + "layer_12_max_spectral_norm": 0.04229551553726196, + "total_sharpness": 0.012473810464143753, + "ip_v_neg_g": 0.006630114279687405, + "cos_v_neg_g": 0.005893408786505461, + "v_norm": 1.1092569828033447, + "g_norm": 1.0141968727111816, + "hv_norm": 0.38774093985557556, + "cos_v_hv": 0.035685326904058456, + "hg_norm": 10.013707160949707, + "cos_g_hg": 0.4107072949409485, + "v_parallel_norm": 0.00019622767285909504, + "v_perp_norm": 1.1092569828033447, + "layer_1_v_norm": 0.24888797104358673, + "layer_1_cos_v_neg_g": 0.011410245671868324, + "layer_2_v_norm": 0.2402222603559494, + "layer_2_cos_v_neg_g": 0.00702416617423296, + "layer_3_v_norm": 0.24637465178966522, + "layer_3_cos_v_neg_g": 0.004146074876189232, + "layer_4_v_norm": 0.2592525780200958, + "layer_4_cos_v_neg_g": 0.0030561243183910847, + "layer_5_v_norm": 0.2613775134086609, + "layer_5_cos_v_neg_g": 0.007225987035781145, + "layer_6_v_norm": 0.26046428084373474, + "layer_6_cos_v_neg_g": 0.005729703698307276, + "layer_7_v_norm": 0.2574388384819031, + "layer_7_cos_v_neg_g": 0.007922496646642685, + "layer_8_v_norm": 0.2581997215747833, + "layer_8_cos_v_neg_g": 0.008034324273467064, + "layer_9_v_norm": 0.26305946707725525, + "layer_9_cos_v_neg_g": 0.00877557322382927, + "layer_10_v_norm": 0.2653194069862366, + "layer_10_cos_v_neg_g": 0.008175780065357685, + "layer_11_v_norm": 0.26495903730392456, + "layer_11_cos_v_neg_g": 0.00848754495382309, + "layer_12_v_norm": 0.25835150480270386, + "layer_12_cos_v_neg_g": 0.010973993688821793, + "layer_1_sharpness": 0.017537035048007965, + "layer_2_sharpness": 0.008221188560128212, + "layer_3_sharpness": 0.004174663219600916, + "layer_4_sharpness": 0.0017081773839890957, + "layer_5_sharpness": 0.0034677942749112844, + "layer_6_sharpness": 0.003053293563425541, + "layer_7_sharpness": 0.0038031984586268663, + "layer_8_sharpness": 0.0026794569566845894, + "layer_9_sharpness": 0.0019165714038535953, + "layer_10_sharpness": 0.0014192188391461968, + "layer_11_sharpness": 0.001961775589734316, + "layer_12_sharpness": 0.006132913753390312 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..4326eb5c8cc409b4436d7f5f8bbb59606e69df7c --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.0005_mlr_0.01_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019236 +step:1 train loss:10.984179 +step:2 train loss:10.899072 +step:3 train loss:10.782001 +step:4 train loss:10.647367 +step:5 train loss:10.500494 +step:6 train loss:10.370033 +step:7 train loss:10.237843 +step:8 train loss:10.155497 +step:9 train loss:10.052963 +step:10 train loss:9.973687 +step:11 train loss:9.908942 +step:12 train loss:9.824200 +step:13 train loss:9.763720 +step:14 train loss:9.718290 +step:15 train loss:9.694456 +step:16 train loss:9.662695 +step:17 train loss:9.628262 +step:18 train loss:9.618961 +step:19 train loss:9.588086 +step:20 train loss:9.560263 +step:21 train loss:9.542135 +step:22 train loss:9.473760 +step:23 train loss:9.477495 +step:24 train loss:9.415913 +step:25 train loss:9.407367 +step:26 train loss:9.359302 +step:27 train loss:9.305637 +step:28 train loss:9.305487 +step:29 train loss:9.266809 +step:30 train loss:9.240658 +step:31 train loss:9.180003 +step:32 train loss:9.139183 +step:33 train loss:9.110190 +step:34 train loss:9.116907 +step:35 train loss:9.043916 +step:36 train loss:9.027126 +step:37 train loss:8.972307 +step:38 train loss:8.978395 +step:39 train loss:8.934462 +step:40 train loss:8.911207 +step:41 train loss:8.844534 +step:42 train loss:8.848594 +step:43 train loss:8.773499 +step:44 train loss:8.739384 +step:45 train loss:8.734095 +step:46 train loss:8.711760 +step:47 train loss:8.706924 +step:48 train loss:8.630234 +step:49 train loss:8.601296 +step:50 train loss:8.530943 +step:51 train loss:8.538498 +step:52 train loss:8.501981 +step:53 train loss:8.485594 +step:54 train loss:8.435383 +step:55 train loss:8.404305 +step:56 train loss:8.344446 +step:57 train loss:8.349399 +step:58 train loss:8.280663 +step:59 train loss:8.286101 +step:60 train loss:8.233448 +step:61 train loss:8.202339 +step:62 train loss:8.163420 +step:63 train loss:8.181179 +step:64 train loss:8.086758 +step:65 train loss:8.097939 +step:66 train loss:8.056900 +step:67 train loss:8.056993 +step:68 train loss:7.998500 +step:69 train loss:7.969890 +step:70 train loss:7.906733 +step:71 train loss:7.901424 +step:72 train loss:7.898109 +step:73 train loss:7.841643 +step:74 train loss:7.828317 +step:75 train loss:7.767147 +step:76 train loss:7.827910 +step:77 train loss:7.761956 +step:78 train loss:7.514393 +step:79 train loss:7.673117 +step:80 train loss:7.622960 +step:81 train loss:7.663232 +step:82 train loss:7.624333 +step:83 train loss:7.589827 +step:84 train loss:7.535296 +step:85 train loss:7.508885 +step:86 train loss:7.489910 +step:87 train loss:7.440001 +step:88 train loss:7.425511 +step:89 train loss:7.388698 +step:90 train loss:7.407055 +step:91 train loss:7.397779 +step:92 train loss:7.380470 +step:93 train loss:7.321068 +step:94 train loss:7.290444 +step:95 train loss:7.222616 +step:96 train loss:7.307501 +step:97 train loss:7.243371 +step:98 train loss:7.223853 +step:99 train loss:7.187064 +step:100 train loss:7.221176 +step:101 train loss:7.126766 +step:102 train loss:7.120817 +step:103 train loss:7.092525 +step:104 train loss:7.133571 +step:105 train loss:7.178358 +step:106 train loss:7.115777 +step:107 train loss:7.062912 +step:108 train loss:7.076936 +step:109 train loss:7.107559 +step:110 train loss:7.027490 +step:111 train loss:7.036371 +step:112 train loss:7.024679 +step:113 train loss:6.988386 +step:114 train loss:7.049751 +step:115 train loss:6.982197 +step:116 train loss:6.963199 +step:117 train loss:6.907516 +step:118 train loss:6.965391 +step:119 train loss:6.905347 +step:120 train loss:6.912918 +step:121 train loss:6.837617 +step:122 train loss:6.929713 +step:123 train loss:6.852123 +step:124 train loss:6.831995 +step:125 train loss:6.811581 +step:126 train loss:6.898519 +step:127 train loss:6.814914 +step:128 train loss:6.845755 +step:129 train loss:6.831797 +step:130 train loss:6.869724 +step:131 train loss:6.805963 +step:132 train loss:6.728343 +step:133 train loss:6.798869 +step:134 train loss:6.759259 +step:135 train loss:6.672851 +step:136 train loss:6.711963 +step:137 train loss:6.714262 +step:138 train loss:6.656945 +step:139 train loss:6.726337 +step:140 train loss:6.639026 +step:141 train loss:6.735714 +step:142 train loss:6.685653 +step:143 train loss:6.694513 +step:144 train loss:6.665595 +step:145 train loss:6.611762 +step:146 train loss:6.620260 +step:147 train loss:6.672632 +step:148 train loss:6.677407 +step:149 train loss:6.627509 +step:150 train loss:6.644113 +step:151 train loss:6.546781 +step:152 train loss:6.578778 +step:153 train loss:6.560413 +step:154 train loss:6.636101 +step:155 train loss:6.613058 +step:156 train loss:6.645256 +step:157 train loss:6.548901 +step:158 train loss:6.540185 +step:159 train loss:6.582786 +step:160 train loss:6.554414 +step:161 train loss:6.540492 +step:162 train loss:6.516109 +step:163 train loss:6.531860 +step:164 train loss:6.554300 +step:165 train loss:6.549726 +step:166 train loss:6.513391 +step:167 train loss:6.508713 +step:168 train loss:6.472920 +step:169 train loss:6.427412 +step:170 train loss:6.402400 +step:171 train loss:6.515480 +step:172 train loss:6.450202 +step:173 train loss:6.494309 +step:174 train loss:6.493180 +step:175 train loss:6.455366 +step:176 train loss:6.404957 +step:177 train loss:6.458576 +step:178 train loss:6.462486 +step:179 train loss:6.418333 +step:180 train loss:6.403376 +step:181 train loss:6.434826 +step:182 train loss:6.371561 +step:183 train loss:6.464499 +step:184 train loss:6.431846 +step:185 train loss:6.342300 +step:186 train loss:6.501578 +step:187 train loss:6.444722 +step:188 train loss:6.250274 +step:189 train loss:6.426685 +step:190 train loss:6.403035 +step:191 train loss:6.338290 +step:192 train loss:6.230541 +step:193 train loss:6.399723 +step:194 train loss:6.422791 +step:195 train loss:6.421739 +step:196 train loss:6.384496 +step:197 train loss:6.381555 +step:198 train loss:6.327935 +step:199 train loss:6.408882 +step:200 train loss:6.451968 +step:201 train loss:6.374215 +step:202 train loss:6.381957 +step:203 train loss:6.335304 +step:204 train loss:6.369170 +step:205 train loss:6.223967 +step:206 train loss:6.364477 +step:207 train loss:6.328036 +step:208 train loss:6.292068 +step:209 train loss:6.281466 +step:210 train loss:6.282086 +step:211 train loss:6.362781 +step:212 train loss:6.298917 +step:213 train loss:6.326395 +step:214 train loss:6.311051 +step:215 train loss:6.330130 +step:216 train loss:6.270106 +step:217 train loss:6.270668 +step:218 train loss:6.259153 +step:219 train loss:6.230759 +step:220 train loss:6.286850 +step:221 train loss:6.233253 +step:222 train loss:6.278460 +step:223 train loss:6.294567 +step:224 train loss:6.275804 +step:225 train loss:6.224158 +step:226 train loss:6.218645 +step:227 train loss:6.285364 +step:228 train loss:6.255214 +step:229 train loss:6.338775 +step:230 train loss:6.196092 +step:231 train loss:6.255785 +step:232 train loss:6.237275 +step:233 train loss:6.196990 +step:234 train loss:6.212633 +step:235 train loss:6.287139 +step:236 train loss:6.234074 +step:237 train loss:6.262532 +step:238 train loss:6.270058 +step:239 train loss:6.193700 +step:240 train loss:6.249683 +step:241 train loss:6.282819 +step:242 train loss:6.264236 +step:243 train loss:6.177961 +step:244 train loss:6.216415 +step:245 train loss:6.195317 +step:246 train loss:6.198963 +step:247 train loss:6.191261 +step:248 train loss:6.150650 +step:249 train loss:6.209391 +step:250 validation loss:6.191790 +step:250 train loss:6.175989 +step:251 train loss:6.197844 +step:252 train loss:6.164354 +step:253 train loss:6.171415 +step:254 train loss:6.133955 +step:255 train loss:6.168437 +step:256 train loss:6.177054 +step:257 train loss:6.213777 +step:258 train loss:6.109875 +step:259 train loss:6.149591 +step:260 train loss:6.125142 +step:261 train loss:6.118462 +step:262 train loss:6.196522 +step:263 train loss:6.155692 +step:264 train loss:6.104962 +step:265 train loss:6.139582 +step:266 train loss:6.092293 +step:267 train loss:6.133449 +step:268 train loss:6.098980 +step:269 train loss:6.116509 +step:270 train loss:6.141114 +step:271 train loss:6.121449 +step:272 train loss:6.075552 +step:273 train loss:6.161743 +step:274 train loss:6.076080 +step:275 train loss:6.093923 +step:276 train loss:6.079506 +step:277 train loss:6.077212 +step:278 train loss:6.051039 +step:279 train loss:6.025740 +step:280 train loss:6.101641 +step:281 train loss:6.183949 +step:282 train loss:6.104287 +step:283 train loss:6.067371 +step:284 train loss:6.046015 +step:285 train loss:6.107750 +step:286 train loss:6.078572 +step:287 train loss:6.056639 +step:288 train loss:6.025382 +step:289 train loss:6.073673 +step:290 train loss:6.120440 +step:291 train loss:6.040455 +step:292 train loss:6.091835 +step:293 train loss:6.044109 +step:294 train loss:6.132360 +step:295 train loss:6.020099 +step:296 train loss:6.087117 +step:297 train loss:6.128336 +step:298 train loss:6.011855 +step:299 train loss:6.111253 +step:300 train loss:6.003210 +step:301 train loss:6.039811 +step:302 train loss:6.010830 +step:303 train loss:6.029673 +step:304 train loss:6.053967 +step:305 train loss:5.984185 +step:306 train loss:6.001936 +step:307 train loss:6.033658 +step:308 train loss:5.931831 +step:309 train loss:6.081161 +step:310 train loss:6.063515 +step:311 train loss:6.027825 +step:312 train loss:6.015593 +step:313 train loss:6.033382 +step:314 train loss:6.017970 +step:315 train loss:5.981627 +step:316 train loss:5.964814 +step:317 train loss:5.941742 +step:318 train loss:5.933872 +step:319 train loss:6.010875 +step:320 train loss:5.927151 +step:321 train loss:5.990099 +step:322 train loss:5.976774 +step:323 train loss:6.050587 +step:324 train loss:5.984024 +step:325 train loss:6.002604 +step:326 train loss:6.011949 +step:327 train loss:5.985177 +step:328 train loss:5.962210 +step:329 train loss:5.970059 +step:330 train loss:5.906372 +step:331 train loss:5.946519 +step:332 train loss:5.914743 +step:333 train loss:5.854621 +step:334 train loss:5.954484 +step:335 train loss:5.990294 +step:336 train loss:6.117530 +step:337 train loss:6.026893 +step:338 train loss:5.939546 +step:339 train loss:5.880973 +step:340 train loss:5.887162 +step:341 train loss:5.887149 +step:342 train loss:5.958385 +step:343 train loss:5.928702 +step:344 train loss:5.888598 +step:345 train loss:5.867092 +step:346 train loss:5.896543 +step:347 train loss:5.841074 +step:348 train loss:5.862554 +step:349 train loss:5.782456 +step:350 train loss:5.839451 +step:351 train loss:5.900620 +step:352 train loss:5.862803 +step:353 train loss:5.889583 +step:354 train loss:5.851737 +step:355 train loss:5.887778 +step:356 train loss:5.857818 +step:357 train loss:5.920109 +step:358 train loss:5.973989 +step:359 train loss:5.806483 +step:360 train loss:5.914064 +step:361 train loss:5.910300 +step:362 train loss:5.885675 +step:363 train loss:5.846967 +step:364 train loss:5.945179 +step:365 train loss:5.877029 +step:366 train loss:5.847912 +step:367 train loss:5.867992 +step:368 train loss:5.853193 +step:369 train loss:5.819454 +step:370 train loss:5.898717 +step:371 train loss:5.824347 +step:372 train loss:5.889819 +step:373 train loss:5.846293 +step:374 train loss:5.816600 +step:375 train loss:5.848502 +step:376 train loss:5.835476 +step:377 train loss:5.710048 +step:378 train loss:5.795945 +step:379 train loss:5.872369 +step:380 train loss:5.788171 +step:381 train loss:5.871436 +step:382 train loss:5.852758 +step:383 train loss:5.833727 +step:384 train loss:5.801882 +step:385 train loss:5.789619 +step:386 train loss:5.806949 +step:387 train loss:5.809599 +step:388 train loss:5.789161 +step:389 train loss:5.795129 +step:390 train loss:5.793613 +step:391 train loss:5.791716 +step:392 train loss:5.784256 +step:393 train loss:5.772443 +step:394 train loss:5.812742 +step:395 train loss:5.755798 +step:396 train loss:5.695360 +step:397 train loss:5.794315 +step:398 train loss:5.787237 +step:399 train loss:5.794125 +step:400 train loss:5.765986 +step:401 train loss:5.789311 +step:402 train loss:5.758480 +step:403 train loss:5.760013 +step:404 train loss:5.741500 +step:405 train loss:5.758194 +step:406 train loss:5.764244 +step:407 train loss:5.762200 +step:408 train loss:5.811057 +step:409 train loss:5.744071 +step:410 train loss:5.712366 +step:411 train loss:5.707468 +step:412 train loss:5.789514 +step:413 train loss:5.684803 +step:414 train loss:5.760926 +step:415 train loss:5.733868 +step:416 train loss:5.733667 +step:417 train loss:5.749438 +step:418 train loss:5.706136 +step:419 train loss:5.691648 +step:420 train loss:5.696025 +step:421 train loss:5.669323 +step:422 train loss:5.686572 +step:423 train loss:5.695246 +step:424 train loss:5.679473 +step:425 train loss:5.729053 +step:426 train loss:5.744878 +step:427 train loss:5.669703 +step:428 train loss:5.732908 +step:429 train loss:5.639594 +step:430 train loss:5.678759 +step:431 train loss:5.703413 +step:432 train loss:5.715851 +step:433 train loss:5.702472 +step:434 train loss:5.669836 +step:435 train loss:5.723326 +step:436 train loss:5.744304 +step:437 train loss:5.685703 +step:438 train loss:5.643055 +step:439 train loss:5.652362 +step:440 train loss:5.684113 +step:441 train loss:5.637129 +step:442 train loss:5.635447 +step:443 train loss:5.660633 +step:444 train loss:5.690380 +step:445 train loss:5.690190 +step:446 train loss:5.627802 +step:447 train loss:5.662203 +step:448 train loss:5.703998 +step:449 train loss:5.676313 +step:450 train loss:5.640424 +step:451 train loss:5.620831 +step:452 train loss:5.680248 +step:453 train loss:5.606618 +step:454 train loss:5.590204 +step:455 train loss:5.698707 +step:456 train loss:5.650356 +step:457 train loss:5.630277 +step:458 train loss:5.627741 +step:459 train loss:5.587867 +step:460 train loss:5.683547 +step:461 train loss:5.630459 +step:462 train loss:5.537820 +step:463 train loss:5.608839 +step:464 train loss:5.641560 +step:465 train loss:5.598310 +step:466 train loss:5.636695 +step:467 train loss:5.584609 +step:468 train loss:5.641172 +step:469 train loss:5.614374 +step:470 train loss:5.565541 +step:471 train loss:5.685600 +step:472 train loss:5.573132 +step:473 train loss:5.648471 +step:474 train loss:5.633831 +step:475 train loss:5.641624 +step:476 train loss:5.607351 +step:477 train loss:5.567690 +step:478 train loss:5.586474 +step:479 train loss:5.578551 +step:480 train loss:5.599998 +step:481 train loss:5.600073 +step:482 train loss:5.554497 +step:483 train loss:5.613921 +step:484 train loss:5.564900 +step:485 train loss:5.550149 +step:486 train loss:5.602809 +step:487 train loss:5.575869 +step:488 train loss:5.559407 +step:489 train loss:5.551297 +step:490 train loss:5.543590 +step:491 train loss:5.545033 +step:492 train loss:5.541418 +step:493 train loss:5.542801 +step:494 train loss:5.547553 +step:495 train loss:5.501400 +step:496 train loss:5.621688 +step:497 train loss:5.482793 +step:498 train loss:5.593599 +step:499 train loss:5.558738 +step:500 validation loss:5.549487 total_sharp:2.4142e-01 L1_sharp:1.2150e+00 L2_sharp:2.9943e-01 L3_sharp:2.2703e-01 L4_sharp:7.4764e-02 L5_sharp:4.7904e-02 L6_sharp:2.8056e-02 L7_sharp:2.1681e-02 L8_sharp:1.5735e-02 L9_sharp:1.3966e-02 L10_sharp:1.3017e-02 L11_sharp:1.6251e-02 L12_sharp:4.4140e-02 total_fnorm:6.7881e-01 total_l1_linf:5.7538e+03 total_spectral:6.7881e-01 L1_fnorm:1.1271e-01 L2_fnorm:1.0569e-01 L3_fnorm:1.0896e-01 L4_fnorm:1.1969e-01 L5_fnorm:1.2321e-01 L6_fnorm:1.3296e-01 L7_fnorm:1.3020e-01 L8_fnorm:1.3220e-01 L9_fnorm:1.3099e-01 L10_fnorm:1.3238e-01 L11_fnorm:1.2820e-01 L12_fnorm:1.1779e-01 L1_l1linf:1.7139e-01 L2_l1linf:1.4760e-01 L3_l1linf:1.6771e-01 L4_l1linf:1.5048e-01 L5_l1linf:1.5390e-01 L6_l1linf:1.5974e-01 L7_l1linf:1.6141e-01 L8_l1linf:1.6747e-01 L9_l1linf:1.6265e-01 L10_l1linf:1.7038e-01 L11_l1linf:1.7287e-01 L12_l1linf:1.6222e-01 L1_spectral:2.5073e-02 L2_spectral:2.3512e-02 L3_spectral:2.6850e-02 L4_spectral:2.5871e-02 L5_spectral:2.6050e-02 L6_spectral:2.6828e-02 L7_spectral:2.8423e-02 L8_spectral:2.7934e-02 L9_spectral:2.9189e-02 L10_spectral:2.9081e-02 L11_spectral:3.1035e-02 L12_spectral:3.1066e-02 ip_v_neg_g:9.0525e-02 cos_v_neg_g:7.0405e-02 v_norm:6.7881e-01 g_norm:1.8942e+00 hv_norm:1.5533e+00 cos_v_hv:1.0550e-01 hg_norm:4.4166e+01 cos_g_hg:6.3977e-01 v_par:3.2010e-03 v_perp:6.7881e-01 L1_cos_v_neg_g:1.8229e-01 L1_v_norm:1.1271e-01 L2_cos_v_neg_g:2.1591e-01 L2_v_norm:1.0569e-01 L3_cos_v_neg_g:2.2761e-01 L3_v_norm:1.0896e-01 L4_cos_v_neg_g:1.6212e-01 L4_v_norm:1.1969e-01 L5_cos_v_neg_g:1.3183e-01 L5_v_norm:1.2321e-01 L6_cos_v_neg_g:1.1599e-01 L6_v_norm:1.3296e-01 L7_cos_v_neg_g:1.1560e-01 L7_v_norm:1.3020e-01 L8_cos_v_neg_g:1.0851e-01 L8_v_norm:1.3220e-01 L9_cos_v_neg_g:9.7022e-02 L9_v_norm:1.3099e-01 L10_cos_v_neg_g:9.9464e-02 L10_v_norm:1.3238e-01 L11_cos_v_neg_g:1.0859e-01 L11_v_norm:1.2820e-01 L12_cos_v_neg_g:1.0858e-01 L12_v_norm:1.1779e-01 +step:500 train loss:5.578399 +step:501 train loss:5.520987 +step:502 train loss:5.555736 +step:503 train loss:5.501077 +step:504 train loss:5.592916 +step:505 train loss:5.521355 +step:506 train loss:5.521141 +step:507 train loss:5.530503 +step:508 train loss:5.576062 +step:509 train loss:5.576734 +step:510 train loss:5.497619 +step:511 train loss:5.486809 +step:512 train loss:5.495351 +step:513 train loss:5.522908 +step:514 train loss:5.568620 +step:515 train loss:5.529445 +step:516 train loss:5.590048 +step:517 train loss:5.526498 +step:518 train loss:5.505650 +step:519 train loss:5.561447 +step:520 train loss:5.502283 +step:521 train loss:5.474198 +step:522 train loss:5.509474 +step:523 train loss:5.502035 +step:524 train loss:5.449284 +step:525 train loss:5.455871 +step:526 train loss:5.466797 +step:527 train loss:5.464754 +step:528 train loss:5.456203 +step:529 train loss:5.484454 +step:530 train loss:5.460005 +step:531 train loss:5.487788 +step:532 train loss:5.473397 +step:533 train loss:5.420222 +step:534 train loss:5.485191 +step:535 train loss:5.470809 +step:536 train loss:5.541510 +step:537 train loss:5.449858 +step:538 train loss:5.400906 +step:539 train loss:5.478341 +step:540 train loss:5.502427 +step:541 train loss:5.416163 +step:542 train loss:5.457010 +step:543 train loss:5.454683 +step:544 train loss:5.448669 +step:545 train loss:5.443094 +step:546 train loss:5.406954 +step:547 train loss:5.429632 +step:548 train loss:5.411389 +step:549 train loss:5.443735 +step:550 train loss:5.433627 +step:551 train loss:5.421127 +step:552 train loss:5.518030 +step:553 train loss:5.487573 +step:554 train loss:5.414386 +step:555 train loss:5.493810 +step:556 train loss:5.430257 +step:557 train loss:5.417936 +step:558 train loss:5.387027 +step:559 train loss:5.442604 +step:560 train loss:5.488332 +step:561 train loss:5.355435 +step:562 train loss:5.344311 +step:563 train loss:5.420131 +step:564 train loss:5.400173 +step:565 train loss:5.402341 +step:566 train loss:5.411245 +step:567 train loss:5.428232 +step:568 train loss:5.423373 +step:569 train loss:5.425847 +step:570 train loss:5.339509 +step:571 train loss:5.397365 +step:572 train loss:5.386362 +step:573 train loss:5.384433 +step:574 train loss:5.416098 +step:575 train loss:5.395603 +step:576 train loss:5.392040 +step:577 train loss:5.417794 +step:578 train loss:5.391089 +step:579 train loss:5.440418 +step:580 train loss:5.371278 +step:581 train loss:5.408587 +step:582 train loss:5.373894 +step:583 train loss:5.381732 +step:584 train loss:5.367843 +step:585 train loss:5.381953 +step:586 train loss:5.354960 +step:587 train loss:5.416706 +step:588 train loss:5.329486 +step:589 train loss:5.384378 +step:590 train loss:5.384628 +step:591 train loss:5.321324 +step:592 train loss:5.296402 +step:593 train loss:5.323369 +step:594 train loss:5.296294 +step:595 train loss:5.328242 +step:596 train loss:5.322661 +step:597 train loss:5.355021 +step:598 train loss:5.345051 +step:599 train loss:5.332129 +step:600 train loss:5.310105 +step:601 train loss:5.290606 +step:602 train loss:5.305052 +step:603 train loss:5.368985 +step:604 train loss:5.315388 +step:605 train loss:5.351071 +step:606 train loss:5.299041 +step:607 train loss:5.289192 +step:608 train loss:5.287452 +step:609 train loss:5.277647 +step:610 train loss:5.284323 +step:611 train loss:5.301308 +step:612 train loss:5.352498 +step:613 train loss:5.279399 +step:614 train loss:5.312316 +step:615 train loss:5.368978 +step:616 train loss:5.283353 +step:617 train loss:5.318670 +step:618 train loss:5.275560 +step:619 train loss:5.322447 +step:620 train loss:5.329891 +step:621 train loss:5.261765 +step:622 train loss:5.327673 +step:623 train loss:5.314029 +step:624 train loss:5.302732 +step:625 train loss:5.294490 +step:626 train loss:5.322343 +step:627 train loss:5.273357 +step:628 train loss:5.304468 +step:629 train loss:5.254363 +step:630 train loss:5.255250 +step:631 train loss:5.255469 +step:632 train loss:5.272059 +step:633 train loss:5.292416 +step:634 train loss:5.288928 +step:635 train loss:5.214181 +step:636 train loss:5.298518 +step:637 train loss:5.220643 +step:638 train loss:5.158485 +step:639 train loss:5.271852 +step:640 train loss:5.238876 +step:641 train loss:5.260242 +step:642 train loss:5.291909 +step:643 train loss:5.210773 +step:644 train loss:5.278345 +step:645 train loss:5.235646 +step:646 train loss:5.217818 +step:647 train loss:5.230781 +step:648 train loss:5.327709 +step:649 train loss:5.212050 +step:650 train loss:5.291528 +step:651 train loss:5.147229 +step:652 train loss:5.191409 +step:653 train loss:5.169065 +step:654 train loss:5.177220 +step:655 train loss:5.229651 +step:656 train loss:5.160584 +step:657 train loss:5.222734 +step:658 train loss:5.160940 +step:659 train loss:5.260571 +step:660 train loss:5.194127 +step:661 train loss:5.248175 +step:662 train loss:5.241113 +step:663 train loss:5.246011 +step:664 train loss:5.162845 +step:665 train loss:5.182604 +step:666 train loss:5.180420 +step:667 train loss:5.237585 +step:668 train loss:5.200697 +step:669 train loss:5.178746 +step:670 train loss:5.192315 +step:671 train loss:5.161052 +step:672 train loss:5.124350 +step:673 train loss:5.213032 +step:674 train loss:5.209834 +step:675 train loss:5.112894 +step:676 train loss:5.188197 +step:677 train loss:5.130250 +step:678 train loss:5.123927 +step:679 train loss:5.164112 +step:680 train loss:5.119220 +step:681 train loss:5.168166 +step:682 train loss:5.087120 +step:683 train loss:5.173456 +step:684 train loss:5.191753 +step:685 train loss:5.152830 +step:686 train loss:5.257953 +step:687 train loss:5.172913 +step:688 train loss:5.110111 +step:689 train loss:5.153534 +step:690 train loss:5.128434 +step:691 train loss:5.133124 +step:692 train loss:5.138705 +step:693 train loss:5.125808 +step:694 train loss:5.110542 +step:695 train loss:5.055688 +step:696 train loss:5.040454 +step:697 train loss:5.174201 +step:698 train loss:5.100305 +step:699 train loss:5.129230 +step:700 train loss:5.180647 +step:701 train loss:5.086174 +step:702 train loss:5.165395 +step:703 train loss:5.077418 +step:704 train loss:5.051752 +step:705 train loss:5.121543 +step:706 train loss:4.985122 +step:707 train loss:5.061106 +step:708 train loss:5.165480 +step:709 train loss:5.130442 +step:710 train loss:5.080650 +step:711 train loss:5.161493 +step:712 train loss:5.097013 +step:713 train loss:5.073039 +step:714 train loss:5.141101 +step:715 train loss:5.065322 +step:716 train loss:5.186028 +step:717 train loss:5.056327 +step:718 train loss:5.129825 +step:719 train loss:5.065982 +step:720 train loss:5.051733 +step:721 train loss:5.065467 +step:722 train loss:5.076190 +step:723 train loss:5.125265 +step:724 train loss:5.100667 +step:725 train loss:5.057494 +step:726 train loss:5.055816 +step:727 train loss:5.073840 +step:728 train loss:5.050269 +step:729 train loss:4.976570 +step:730 train loss:5.075572 +step:731 train loss:5.092604 +step:732 train loss:5.063157 +step:733 train loss:5.034345 +step:734 train loss:5.024629 +step:735 train loss:5.101860 +step:736 train loss:5.041357 +step:737 train loss:5.029124 +step:738 train loss:5.050713 +step:739 train loss:5.016791 +step:740 train loss:5.022969 +step:741 train loss:5.089653 +step:742 train loss:5.006865 +step:743 train loss:5.003253 +step:744 train loss:5.063103 +step:745 train loss:5.012092 +step:746 train loss:4.998672 +step:747 train loss:5.032676 +step:748 train loss:5.003023 +step:749 train loss:5.037397 +step:750 validation loss:5.001674 +step:750 train loss:4.981711 +step:751 train loss:5.003444 +step:752 train loss:4.950808 +step:753 train loss:5.000449 +step:754 train loss:5.003653 +step:755 train loss:5.045730 +step:756 train loss:5.025269 +step:757 train loss:5.116924 +step:758 train loss:4.992708 +step:759 train loss:4.989302 +step:760 train loss:4.950323 +step:761 train loss:4.990069 +step:762 train loss:4.964764 +step:763 train loss:4.958467 +step:764 train loss:4.933930 +step:765 train loss:4.936622 +step:766 train loss:5.015696 +step:767 train loss:5.130838 +step:768 train loss:4.953393 +step:769 train loss:5.009208 +step:770 train loss:5.027749 +step:771 train loss:5.067642 +step:772 train loss:5.009396 +step:773 train loss:4.951609 +step:774 train loss:4.994460 +step:775 train loss:4.972248 +step:776 train loss:4.986457 +step:777 train loss:4.940032 +step:778 train loss:4.951912 +step:779 train loss:4.922773 +step:780 train loss:4.968879 +step:781 train loss:4.900014 +step:782 train loss:4.923340 +step:783 train loss:4.893850 +step:784 train loss:4.909803 +step:785 train loss:4.872667 +step:786 train loss:4.904707 +step:787 train loss:4.851171 +step:788 train loss:4.912681 +step:789 train loss:4.906594 +step:790 train loss:4.873806 +step:791 train loss:4.984779 +step:792 train loss:4.982537 +step:793 train loss:4.943670 +step:794 train loss:4.929552 +step:795 train loss:4.881020 +step:796 train loss:5.140738 +step:797 train loss:4.937536 +step:798 train loss:4.937400 +step:799 train loss:4.935740 +step:800 train loss:5.009078 +step:801 train loss:4.936089 +step:802 train loss:5.040815 +step:803 train loss:4.934211 +step:804 train loss:4.879156 +step:805 train loss:4.932328 +step:806 train loss:4.824529 +step:807 train loss:4.905571 +step:808 train loss:4.904088 +step:809 train loss:4.878896 +step:810 train loss:4.837762 +step:811 train loss:4.936322 +step:812 train loss:4.892707 +step:813 train loss:4.906921 +step:814 train loss:4.946128 +step:815 train loss:4.903276 +step:816 train loss:4.832685 +step:817 train loss:4.866387 +step:818 train loss:4.853777 +step:819 train loss:4.894760 +step:820 train loss:4.893161 +step:821 train loss:4.853072 +step:822 train loss:4.859043 +step:823 train loss:4.933596 +step:824 train loss:4.849440 +step:825 train loss:4.820274 +step:826 train loss:4.859807 +step:827 train loss:4.804893 +step:828 train loss:4.872799 +step:829 train loss:4.859515 +step:830 train loss:4.865600 +step:831 train loss:4.885859 +step:832 train loss:4.931986 +step:833 train loss:4.882705 +step:834 train loss:4.863784 +step:835 train loss:4.836325 +step:836 train loss:4.807611 +step:837 train loss:4.781370 +step:838 train loss:4.782632 +step:839 train loss:4.798611 +step:840 train loss:4.852800 +step:841 train loss:4.823374 +step:842 train loss:4.838464 +step:843 train loss:4.816809 +step:844 train loss:4.826708 +step:845 train loss:4.790380 +step:846 train loss:4.881222 +step:847 train loss:4.827034 +step:848 train loss:4.790842 +step:849 train loss:4.840795 +step:850 train loss:4.830806 +step:851 train loss:4.802952 +step:852 train loss:4.870010 +step:853 train loss:4.764232 +step:854 train loss:4.822264 +step:855 train loss:4.786405 +step:856 train loss:4.738760 +step:857 train loss:4.780990 +step:858 train loss:4.804742 +step:859 train loss:4.763350 +step:860 train loss:4.760590 +step:861 train loss:4.794832 +step:862 train loss:4.732015 +step:863 train loss:4.756058 +step:864 train loss:4.726712 +step:865 train loss:4.751543 +step:866 train loss:4.769467 +step:867 train loss:4.856668 +step:868 train loss:4.759727 +step:869 train loss:4.777985 +step:870 train loss:4.738281 +step:871 train loss:4.737934 +step:872 train loss:4.760260 +step:873 train loss:4.747471 +step:874 train loss:4.761809 +step:875 train loss:4.654826 +step:876 train loss:4.772825 +step:877 train loss:4.685996 +step:878 train loss:4.770049 +step:879 train loss:4.699578 +step:880 train loss:4.778402 +step:881 train loss:4.730645 +step:882 train loss:4.694767 +step:883 train loss:4.720642 +step:884 train loss:4.733278 +step:885 train loss:4.683756 +step:886 train loss:4.668566 +step:887 train loss:4.695421 +step:888 train loss:4.792746 +step:889 train loss:4.727594 +step:890 train loss:4.681715 +step:891 train loss:4.649944 +step:892 train loss:4.646337 +step:893 train loss:4.709483 +step:894 train loss:4.681543 +step:895 train loss:4.661726 +step:896 train loss:4.750852 +step:897 train loss:4.688558 +step:898 train loss:4.693890 +step:899 train loss:4.694054 +step:900 train loss:4.737274 +step:901 train loss:4.654550 +step:902 train loss:4.686278 +step:903 train loss:4.764553 +step:904 train loss:4.774427 +step:905 train loss:4.662212 +step:906 train loss:4.670218 +step:907 train loss:4.689664 +step:908 train loss:4.719904 +step:909 train loss:4.673513 +step:910 train loss:4.692793 +step:911 train loss:4.804060 +step:912 train loss:4.620212 +step:913 train loss:4.667298 +step:914 train loss:4.639293 +step:915 train loss:4.643931 +step:916 train loss:4.722346 +step:917 train loss:4.653780 +step:918 train loss:4.740630 +step:919 train loss:4.821887 +step:920 train loss:4.618196 +step:921 train loss:4.704039 +step:922 train loss:4.699840 +step:923 train loss:4.629580 +step:924 train loss:4.663432 +step:925 train loss:4.609958 +step:926 train loss:4.706087 +step:927 train loss:4.604196 +step:928 train loss:4.673266 +step:929 train loss:4.651255 +step:930 train loss:4.644240 +step:931 train loss:4.682763 +step:932 train loss:4.633568 +step:933 train loss:4.654496 +step:934 train loss:4.683069 +step:935 train loss:4.654161 +step:936 train loss:4.639200 +step:937 train loss:4.641533 +step:938 train loss:4.634838 +step:939 train loss:4.532320 +step:940 train loss:4.636346 +step:941 train loss:4.578104 +step:942 train loss:4.556174 +step:943 train loss:4.657418 +step:944 train loss:4.603311 +step:945 train loss:4.608929 +step:946 train loss:4.640780 +step:947 train loss:4.755994 +step:948 train loss:4.580446 +step:949 train loss:4.636497 +step:950 train loss:4.573100 +step:951 train loss:4.589948 +step:952 train loss:4.656068 +step:953 train loss:4.584025 +step:954 train loss:4.601110 +step:955 train loss:4.539730 +step:956 train loss:4.555140 +step:957 train loss:4.555137 +step:958 train loss:4.631577 +step:959 train loss:4.567127 +step:960 train loss:4.655854 +step:961 train loss:4.610013 +step:962 train loss:4.567360 +step:963 train loss:4.567772 +step:964 train loss:4.600372 +step:965 train loss:4.538205 +step:966 train loss:4.542690 +step:967 train loss:4.591553 +step:968 train loss:4.588303 +step:969 train loss:4.540740 +step:970 train loss:4.607134 +step:971 train loss:4.573118 +step:972 train loss:4.497440 +step:973 train loss:4.582543 +step:974 train loss:4.519722 +step:975 train loss:4.590577 +step:976 train loss:4.552719 +step:977 train loss:4.549174 +step:978 train loss:4.552021 +step:979 train loss:4.533773 +step:980 train loss:4.538334 +step:981 train loss:4.520470 +step:982 train loss:4.522245 +step:983 train loss:4.536368 +step:984 train loss:4.562033 +step:985 train loss:4.530817 +step:986 train loss:4.539861 +step:987 train loss:4.570942 +step:988 train loss:4.549308 +step:989 train loss:4.522143 +step:990 train loss:4.507214 +step:991 train loss:4.449067 +step:992 train loss:4.507043 +step:993 train loss:4.530623 +step:994 train loss:4.471999 +step:995 train loss:4.491559 +step:996 train loss:4.543116 +step:997 train loss:4.532298 +step:998 train loss:4.532431 +step:999 train loss:4.594016 +step:1000 validation loss:4.530319 total_sharp:1.8861e-01 L1_sharp:9.7788e-01 L2_sharp:2.9360e-01 L3_sharp:5.3304e-02 L4_sharp:1.7764e-02 L5_sharp:2.7652e-02 L6_sharp:3.6189e-02 L7_sharp:2.6273e-02 L8_sharp:1.9506e-02 L9_sharp:1.1596e-02 L10_sharp:9.7361e-03 L11_sharp:1.2459e-02 L12_sharp:3.2876e-02 total_fnorm:9.9944e-01 total_l1_linf:8.8164e+03 total_spectral:9.9944e-01 L1_fnorm:2.0055e-01 L2_fnorm:2.1321e-01 L3_fnorm:2.3930e-01 L4_fnorm:2.5327e-01 L5_fnorm:2.5288e-01 L6_fnorm:2.2581e-01 L7_fnorm:2.1209e-01 L8_fnorm:2.0874e-01 L9_fnorm:2.1104e-01 L10_fnorm:2.1192e-01 L11_fnorm:2.0805e-01 L12_fnorm:1.8935e-01 L1_l1linf:2.7178e-01 L2_l1linf:3.7913e-01 L3_l1linf:4.2554e-01 L4_l1linf:4.2404e-01 L5_l1linf:3.2915e-01 L6_l1linf:2.8521e-01 L7_l1linf:2.3469e-01 L8_l1linf:2.5993e-01 L9_l1linf:2.3932e-01 L10_l1linf:2.4357e-01 L11_l1linf:2.4784e-01 L12_l1linf:2.2744e-01 L1_spectral:4.5446e-02 L2_spectral:6.5400e-02 L3_spectral:8.6527e-02 L4_spectral:8.2415e-02 L5_spectral:7.7794e-02 L6_spectral:5.8173e-02 L7_spectral:3.7786e-02 L8_spectral:3.6201e-02 L9_spectral:3.6400e-02 L10_spectral:3.7693e-02 L11_spectral:3.9110e-02 L12_spectral:4.7277e-02 ip_v_neg_g:1.4532e-01 cos_v_neg_g:7.9776e-02 v_norm:9.9944e-01 g_norm:1.8226e+00 hv_norm:2.3989e+00 cos_v_hv:7.8579e-02 hg_norm:5.2672e+01 cos_g_hg:6.4138e-01 v_par:3.9683e-03 v_perp:9.9943e-01 L1_cos_v_neg_g:2.0700e-01 L1_v_norm:2.0055e-01 L2_cos_v_neg_g:2.0684e-01 L2_v_norm:2.1321e-01 L3_cos_v_neg_g:1.3905e-01 L3_v_norm:2.3930e-01 L4_cos_v_neg_g:9.6642e-02 L4_v_norm:2.5327e-01 L5_cos_v_neg_g:8.6527e-02 L5_v_norm:2.5288e-01 L6_cos_v_neg_g:9.3005e-02 L6_v_norm:2.2581e-01 L7_cos_v_neg_g:7.2628e-02 L7_v_norm:2.1209e-01 L8_cos_v_neg_g:6.2133e-02 L8_v_norm:2.0874e-01 L9_cos_v_neg_g:5.4453e-02 L9_v_norm:2.1104e-01 L10_cos_v_neg_g:5.5125e-02 L10_v_norm:2.1192e-01 L11_cos_v_neg_g:5.9419e-02 L11_v_norm:2.0805e-01 L12_cos_v_neg_g:6.8957e-02 L12_v_norm:1.8935e-01 +step:1000 train loss:4.599127 +step:1001 train loss:4.592134 +step:1002 train loss:4.578403 +step:1003 train loss:4.568493 +step:1004 train loss:4.529735 +step:1005 train loss:4.529229 +step:1006 train loss:4.592211 +step:1007 train loss:4.553664 +step:1008 train loss:4.512777 +step:1009 train loss:4.569490 +step:1010 train loss:4.533716 +step:1011 train loss:4.558639 +step:1012 train loss:4.499356 +step:1013 train loss:4.468273 +step:1014 train loss:4.466839 +step:1015 train loss:4.507705 +step:1016 train loss:4.514816 +step:1017 train loss:4.470804 +step:1018 train loss:4.520321 +step:1019 train loss:4.503451 +step:1020 train loss:4.476322 +step:1021 train loss:4.562974 +step:1022 train loss:4.475569 +step:1023 train loss:4.482920 +step:1024 train loss:4.567457 +step:1025 train loss:4.510738 +step:1026 train loss:4.460610 +step:1027 train loss:4.503724 +step:1028 train loss:4.505808 +step:1029 train loss:4.451034 +step:1030 train loss:4.537300 +step:1031 train loss:4.522497 +step:1032 train loss:4.479750 +step:1033 train loss:4.452727 +step:1034 train loss:4.508869 +step:1035 train loss:4.506904 +step:1036 train loss:4.422963 +step:1037 train loss:4.477084 +step:1038 train loss:4.496486 +step:1039 train loss:4.625941 +step:1040 train loss:4.470223 +step:1041 train loss:4.468170 +step:1042 train loss:4.490536 +step:1043 train loss:4.492876 +step:1044 train loss:4.474761 +step:1045 train loss:4.477719 +step:1046 train loss:4.422404 +step:1047 train loss:4.449438 +step:1048 train loss:4.448387 +step:1049 train loss:4.506175 +step:1050 train loss:4.463428 +step:1051 train loss:4.439816 +step:1052 train loss:4.529702 +step:1053 train loss:4.427511 +step:1054 train loss:4.424611 +step:1055 train loss:4.490430 +step:1056 train loss:4.437366 +step:1057 train loss:4.335421 +step:1058 train loss:4.439073 +step:1059 train loss:4.426228 +step:1060 train loss:4.414144 +step:1061 train loss:4.466035 +step:1062 train loss:4.431618 +step:1063 train loss:4.436201 +step:1064 train loss:4.424899 +step:1065 train loss:4.444851 +step:1066 train loss:4.425713 +step:1067 train loss:4.461407 +step:1068 train loss:4.429887 +step:1069 train loss:4.448067 +step:1070 train loss:4.436693 +step:1071 train loss:4.459568 +step:1072 train loss:4.483521 +step:1073 train loss:4.392434 +step:1074 train loss:4.404482 +step:1075 train loss:4.421724 +step:1076 train loss:4.472783 +step:1077 train loss:4.404948 +step:1078 train loss:4.449985 +step:1079 train loss:4.505754 +step:1080 train loss:4.376410 +step:1081 train loss:4.433301 +step:1082 train loss:4.427655 +step:1083 train loss:4.393009 +step:1084 train loss:4.361627 +step:1085 train loss:4.427506 +step:1086 train loss:4.417819 +step:1087 train loss:4.396945 +step:1088 train loss:4.394209 +step:1089 train loss:4.406847 +step:1090 train loss:4.345954 +step:1091 train loss:4.340079 +step:1092 train loss:4.460875 +step:1093 train loss:4.353802 +step:1094 train loss:4.413427 +step:1095 train loss:4.449640 +step:1096 train loss:4.403234 +step:1097 train loss:4.394906 +step:1098 train loss:4.366366 +step:1099 train loss:4.414142 +step:1100 train loss:4.459229 +step:1101 train loss:4.449142 +step:1102 train loss:4.456285 +step:1103 train loss:4.386988 +step:1104 train loss:4.412875 +step:1105 train loss:4.459406 +step:1106 train loss:4.393012 +step:1107 train loss:4.505268 +step:1108 train loss:4.454902 +step:1109 train loss:4.418532 +step:1110 train loss:4.371109 +step:1111 train loss:4.425489 +step:1112 train loss:4.344814 +step:1113 train loss:4.332759 +step:1114 train loss:4.315651 +step:1115 train loss:4.351385 +step:1116 train loss:4.418242 +step:1117 train loss:4.436625 +step:1118 train loss:4.466932 +step:1119 train loss:4.410951 +step:1120 train loss:4.418793 +step:1121 train loss:4.450509 +step:1122 train loss:4.406040 +step:1123 train loss:4.521211 +step:1124 train loss:4.427435 +step:1125 train loss:4.417106 +step:1126 train loss:4.377503 +step:1127 train loss:4.389798 +step:1128 train loss:4.385918 +step:1129 train loss:4.444924 +step:1130 train loss:4.352311 +step:1131 train loss:4.441504 +step:1132 train loss:4.387648 +step:1133 train loss:4.393670 +step:1134 train loss:4.364036 +step:1135 train loss:4.403559 +step:1136 train loss:4.417861 +step:1137 train loss:4.340163 +step:1138 train loss:4.408269 +step:1139 train loss:4.362844 +step:1140 train loss:4.429319 +step:1141 train loss:4.385819 +step:1142 train loss:4.326669 +step:1143 train loss:4.395134 +step:1144 train loss:4.419117 +step:1145 train loss:4.371799 +step:1146 train loss:4.320934 +step:1147 train loss:4.335592 +step:1148 train loss:4.357682 +step:1149 train loss:4.400627 +step:1150 train loss:4.416124 +step:1151 train loss:4.424952 +step:1152 train loss:4.325531 +step:1153 train loss:4.319060 +step:1154 train loss:4.303750 +step:1155 train loss:4.408511 +step:1156 train loss:4.312754 +step:1157 train loss:4.335179 +step:1158 train loss:4.393938 +step:1159 train loss:4.384820 +step:1160 train loss:4.316890 +step:1161 train loss:4.410503 +step:1162 train loss:4.350814 +step:1163 train loss:4.333343 +step:1164 train loss:4.242838 +step:1165 train loss:4.379250 +step:1166 train loss:4.308635 +step:1167 train loss:4.315443 +step:1168 train loss:4.372932 +step:1169 train loss:4.335304 +step:1170 train loss:4.336249 +step:1171 train loss:4.356506 +step:1172 train loss:4.323696 +step:1173 train loss:4.356566 +step:1174 train loss:4.288735 +step:1175 train loss:4.317672 +step:1176 train loss:4.428458 +step:1177 train loss:4.283525 +step:1178 train loss:4.339511 +step:1179 train loss:4.302812 +step:1180 train loss:4.330457 +step:1181 train loss:4.326590 +step:1182 train loss:4.415084 +step:1183 train loss:4.380322 +step:1184 train loss:4.316489 +step:1185 train loss:4.345076 +step:1186 train loss:4.332238 +step:1187 train loss:4.296569 +step:1188 train loss:4.324664 +step:1189 train loss:4.256658 +step:1190 train loss:4.324238 +step:1191 train loss:4.384005 +step:1192 train loss:4.334505 +step:1193 train loss:4.333646 +step:1194 train loss:4.435317 +step:1195 train loss:4.414474 +step:1196 train loss:4.307844 +step:1197 train loss:4.324724 +step:1198 train loss:4.297478 +step:1199 train loss:4.306892 +step:1200 train loss:4.361179 +step:1201 train loss:4.331090 +step:1202 train loss:4.274486 +step:1203 train loss:4.268878 +step:1204 train loss:4.305470 +step:1205 train loss:4.301648 +step:1206 train loss:4.273715 +step:1207 train loss:4.384073 +step:1208 train loss:4.368566 +step:1209 train loss:4.262685 +step:1210 train loss:4.354667 +step:1211 train loss:4.306415 +step:1212 train loss:4.326795 +step:1213 train loss:4.256789 +step:1214 train loss:4.348478 +step:1215 train loss:4.306573 +step:1216 train loss:4.311979 +step:1217 train loss:4.266770 +step:1218 train loss:4.329637 +step:1219 train loss:4.263700 +step:1220 train loss:4.292775 +step:1221 train loss:4.307099 +step:1222 train loss:4.347923 +step:1223 train loss:4.316801 +step:1224 train loss:4.285042 +step:1225 train loss:4.329225 +step:1226 train loss:4.267452 +step:1227 train loss:4.283694 +step:1228 train loss:4.283362 +step:1229 train loss:4.253146 +step:1230 train loss:4.251340 +step:1231 train loss:4.302402 +step:1232 train loss:4.254449 +step:1233 train loss:4.253995 +step:1234 train loss:4.339755 +step:1235 train loss:4.312565 +step:1236 train loss:4.221556 +step:1237 train loss:4.315473 +step:1238 train loss:4.275383 +step:1239 train loss:4.317412 +step:1240 train loss:4.216640 +step:1241 train loss:4.270811 +step:1242 train loss:4.300100 +step:1243 train loss:4.259754 +step:1244 train loss:4.365811 +step:1245 train loss:4.370234 +step:1246 train loss:4.306096 +step:1247 train loss:4.275728 +step:1248 train loss:4.301358 +step:1249 train loss:4.238144 +step:1250 validation loss:4.240165 +step:1250 train loss:4.253236 +step:1251 train loss:4.311787 +step:1252 train loss:4.259327 +step:1253 train loss:4.218978 +step:1254 train loss:4.243081 +step:1255 train loss:4.237339 +step:1256 train loss:4.282884 +step:1257 train loss:4.265178 +step:1258 train loss:4.309648 +step:1259 train loss:4.303678 +step:1260 train loss:4.202766 +step:1261 train loss:4.430423 +step:1262 train loss:4.286144 +step:1263 train loss:4.251625 +step:1264 train loss:4.250623 +step:1265 train loss:4.315962 +step:1266 train loss:4.255971 +step:1267 train loss:4.266085 +step:1268 train loss:4.274717 +step:1269 train loss:4.264093 +step:1270 train loss:4.193600 +step:1271 train loss:4.194469 +step:1272 train loss:4.221966 +step:1273 train loss:4.273949 +step:1274 train loss:4.240526 +step:1275 train loss:4.268223 +step:1276 train loss:4.264299 +step:1277 train loss:4.271004 +step:1278 train loss:4.215966 +step:1279 train loss:4.234807 +step:1280 train loss:4.257033 +step:1281 train loss:4.315812 +step:1282 train loss:4.230815 +step:1283 train loss:4.314633 +step:1284 train loss:4.268083 +step:1285 train loss:4.311015 +step:1286 train loss:4.216926 +step:1287 train loss:4.248437 +step:1288 train loss:4.277034 +step:1289 train loss:4.332820 +step:1290 train loss:4.282773 +step:1291 train loss:4.247960 +step:1292 train loss:4.228276 +step:1293 train loss:4.213256 +step:1294 train loss:4.261330 +step:1295 train loss:4.236954 +step:1296 train loss:4.284181 +step:1297 train loss:4.243556 +step:1298 train loss:4.259120 +step:1299 train loss:4.292129 +step:1300 train loss:4.213468 +step:1301 train loss:4.259791 +step:1302 train loss:4.223975 +step:1303 train loss:4.254960 +step:1304 train loss:4.285257 +step:1305 train loss:4.259874 +step:1306 train loss:4.248358 +step:1307 train loss:4.234820 +step:1308 train loss:4.190346 +step:1309 train loss:4.200625 +step:1310 train loss:4.188902 +step:1311 train loss:4.196038 +step:1312 train loss:4.262899 +step:1313 train loss:4.187930 +step:1314 train loss:4.192240 +step:1315 train loss:4.241547 +step:1316 train loss:4.207474 +step:1317 train loss:4.105092 +step:1318 train loss:4.263500 +step:1319 train loss:4.306216 +step:1320 train loss:4.215250 +step:1321 train loss:4.191060 +step:1322 train loss:4.300492 +step:1323 train loss:4.257292 +step:1324 train loss:4.350303 +step:1325 train loss:4.246291 +step:1326 train loss:4.279610 +step:1327 train loss:4.291399 +step:1328 train loss:4.203662 +step:1329 train loss:4.229895 +step:1330 train loss:4.245381 +step:1331 train loss:4.095413 +step:1332 train loss:4.289141 +step:1333 train loss:4.241055 +step:1334 train loss:4.256430 +step:1335 train loss:4.269164 +step:1336 train loss:4.275485 +step:1337 train loss:4.251803 +step:1338 train loss:4.225908 +step:1339 train loss:4.305896 +step:1340 train loss:4.271091 +step:1341 train loss:4.249706 +step:1342 train loss:4.219248 +step:1343 train loss:4.201756 +step:1344 train loss:4.265683 +step:1345 train loss:4.223623 +step:1346 train loss:4.303525 +step:1347 train loss:4.228928 +step:1348 train loss:4.200217 +step:1349 train loss:4.139775 +step:1350 train loss:4.168305 +step:1351 train loss:4.244640 +step:1352 train loss:4.209396 +step:1353 train loss:4.183638 +step:1354 train loss:4.191695 +step:1355 train loss:4.263480 +step:1356 train loss:4.174966 +step:1357 train loss:4.202874 +step:1358 train loss:4.192063 +step:1359 train loss:4.191258 +step:1360 train loss:4.219345 +step:1361 train loss:4.335056 +step:1362 train loss:4.256942 +step:1363 train loss:4.144980 +step:1364 train loss:4.173649 +step:1365 train loss:4.180432 +step:1366 train loss:4.279557 +step:1367 train loss:4.190158 +step:1368 train loss:4.216745 +step:1369 train loss:4.301745 +step:1370 train loss:4.320939 +step:1371 train loss:4.266689 +step:1372 train loss:4.282814 +step:1373 train loss:4.329239 +step:1374 train loss:4.326891 +step:1375 train loss:4.284513 +step:1376 train loss:4.304967 +step:1377 train loss:4.338218 +step:1378 train loss:4.290907 +step:1379 train loss:4.250693 +step:1380 train loss:4.310068 +step:1381 train loss:4.248477 +step:1382 train loss:4.224918 +step:1383 train loss:4.207558 +step:1384 train loss:4.294140 +step:1385 train loss:4.186659 +step:1386 train loss:4.246403 +step:1387 train loss:4.247079 +step:1388 train loss:4.209371 +step:1389 train loss:4.182715 +step:1390 train loss:4.212508 +step:1391 train loss:4.240878 +step:1392 train loss:4.216730 +step:1393 train loss:4.268742 +step:1394 train loss:4.197397 +step:1395 train loss:4.231845 +step:1396 train loss:4.215107 +step:1397 train loss:4.225764 +step:1398 train loss:4.232785 +step:1399 train loss:4.205914 +step:1400 train loss:4.179193 +step:1401 train loss:4.173017 +step:1402 train loss:4.176307 +step:1403 train loss:4.137191 +step:1404 train loss:4.197163 +step:1405 train loss:4.157310 +step:1406 train loss:4.182170 +step:1407 train loss:4.177896 +step:1408 train loss:4.160816 +step:1409 train loss:4.145921 +step:1410 train loss:4.161954 +step:1411 train loss:4.194402 +step:1412 train loss:4.254266 +step:1413 train loss:4.172406 +step:1414 train loss:4.199773 +step:1415 train loss:4.159533 +step:1416 train loss:4.214760 +step:1417 train loss:4.184276 +step:1418 train loss:4.129305 +step:1419 train loss:4.127752 +step:1420 train loss:4.159436 +step:1421 train loss:4.197619 +step:1422 train loss:4.173657 +step:1423 train loss:4.268488 +step:1424 train loss:4.162940 +step:1425 train loss:4.131224 +step:1426 train loss:4.157763 +step:1427 train loss:4.150253 +step:1428 train loss:4.136487 +step:1429 train loss:4.162671 +step:1430 train loss:4.164429 +step:1431 train loss:4.204673 +step:1432 train loss:4.180506 +step:1433 train loss:4.159472 +step:1434 train loss:4.131476 +step:1435 train loss:4.126426 +step:1436 train loss:4.194017 +step:1437 train loss:4.126477 +step:1438 train loss:4.135435 +step:1439 train loss:4.119741 +step:1440 train loss:4.165303 +step:1441 train loss:4.232645 +step:1442 train loss:4.198000 +step:1443 train loss:4.124639 +step:1444 train loss:4.129416 +step:1445 train loss:4.135423 +step:1446 train loss:4.160149 +step:1447 train loss:4.168031 +step:1448 train loss:4.134008 +step:1449 train loss:4.163238 +step:1450 train loss:4.182476 +step:1451 train loss:4.104882 +step:1452 train loss:4.154028 +step:1453 train loss:4.152318 +step:1454 train loss:4.157934 +step:1455 train loss:4.078356 +step:1456 train loss:4.158773 +step:1457 train loss:4.092956 +step:1458 train loss:4.232662 +step:1459 train loss:4.154535 +step:1460 train loss:4.129732 +step:1461 train loss:4.185899 +step:1462 train loss:4.190919 +step:1463 train loss:4.152110 +step:1464 train loss:4.130938 +step:1465 train loss:4.127170 +step:1466 train loss:4.089038 +step:1467 train loss:4.222898 +step:1468 train loss:4.109559 +step:1469 train loss:4.190212 +step:1470 train loss:4.120152 +step:1471 train loss:4.123568 +step:1472 train loss:4.127970 +step:1473 train loss:4.122542 +step:1474 train loss:4.068928 +step:1475 train loss:4.127578 +step:1476 train loss:4.209212 +step:1477 train loss:4.161085 +step:1478 train loss:4.096242 +step:1479 train loss:4.124745 +step:1480 train loss:4.117532 +step:1481 train loss:4.088714 +step:1482 train loss:4.154955 +step:1483 train loss:4.138431 +step:1484 train loss:4.172204 +step:1485 train loss:4.181678 +step:1486 train loss:4.130073 +step:1487 train loss:4.113163 +step:1488 train loss:4.124527 +step:1489 train loss:4.108289 +step:1490 train loss:4.164861 +step:1491 train loss:4.152406 +step:1492 train loss:4.150077 +step:1493 train loss:4.092140 +step:1494 train loss:4.127483 +step:1495 train loss:4.105425 +step:1496 train loss:4.082251 +step:1497 train loss:4.154746 +step:1498 train loss:4.060439 +step:1499 train loss:4.098637 +step:1500 validation loss:4.078059 total_sharp:5.8464e-02 L1_sharp:1.1184e-01 L2_sharp:5.0249e-02 L3_sharp:3.2587e-02 L4_sharp:9.8079e-03 L5_sharp:1.3067e-02 L6_sharp:1.3986e-02 L7_sharp:1.2069e-02 L8_sharp:9.4781e-03 L9_sharp:7.2295e-03 L10_sharp:5.5329e-03 L11_sharp:7.1283e-03 L12_sharp:1.3698e-02 total_fnorm:9.5660e-01 total_l1_linf:8.3139e+03 total_spectral:9.5660e-01 L1_fnorm:1.7020e-01 L2_fnorm:1.5895e-01 L3_fnorm:1.6766e-01 L4_fnorm:1.8848e-01 L5_fnorm:1.9384e-01 L6_fnorm:2.0562e-01 L7_fnorm:2.0625e-01 L8_fnorm:2.1276e-01 L9_fnorm:2.1677e-01 L10_fnorm:2.2275e-01 L11_fnorm:2.2269e-01 L12_fnorm:2.1052e-01 L1_l1linf:2.1980e-01 L2_l1linf:2.7992e-01 L3_l1linf:2.7963e-01 L4_l1linf:2.5784e-01 L5_l1linf:2.5895e-01 L6_l1linf:2.4909e-01 L7_l1linf:2.3695e-01 L8_l1linf:2.5327e-01 L9_l1linf:2.3366e-01 L10_l1linf:2.4712e-01 L11_l1linf:2.4859e-01 L12_l1linf:2.2752e-01 L1_spectral:3.4225e-02 L2_spectral:3.8784e-02 L3_spectral:4.2886e-02 L4_spectral:4.2136e-02 L5_spectral:3.8613e-02 L6_spectral:3.2187e-02 L7_spectral:3.0163e-02 L8_spectral:3.0956e-02 L9_spectral:3.1908e-02 L10_spectral:3.3296e-02 L11_spectral:3.4646e-02 L12_spectral:4.3027e-02 ip_v_neg_g:3.1035e-02 cos_v_neg_g:2.8326e-02 v_norm:9.5660e-01 g_norm:1.1453e+00 hv_norm:5.7919e-01 cos_v_hv:9.6561e-02 hg_norm:4.6995e+00 cos_g_hg:4.5415e-01 v_par:9.1528e-04 v_perp:9.5660e-01 L1_cos_v_neg_g:5.6492e-02 L1_v_norm:1.7020e-01 L2_cos_v_neg_g:7.2756e-02 L2_v_norm:1.5895e-01 L3_cos_v_neg_g:6.2704e-02 L3_v_norm:1.6766e-01 L4_cos_v_neg_g:4.5536e-02 L4_v_norm:1.8848e-01 L5_cos_v_neg_g:4.0924e-02 L5_v_norm:1.9384e-01 L6_cos_v_neg_g:3.8670e-02 L6_v_norm:2.0562e-01 L7_cos_v_neg_g:3.5284e-02 L7_v_norm:2.0625e-01 L8_cos_v_neg_g:3.6190e-02 L8_v_norm:2.1276e-01 L9_cos_v_neg_g:3.6124e-02 L9_v_norm:2.1677e-01 L10_cos_v_neg_g:3.3444e-02 L10_v_norm:2.2275e-01 L11_cos_v_neg_g:3.4863e-02 L11_v_norm:2.2269e-01 L12_cos_v_neg_g:4.1183e-02 L12_v_norm:2.1052e-01 +step:1500 train loss:4.101642 +step:1501 train loss:4.114648 +step:1502 train loss:4.057210 +step:1503 train loss:4.108573 +step:1504 train loss:4.073835 +step:1505 train loss:4.046987 +step:1506 train loss:4.038103 +step:1507 train loss:4.066318 +step:1508 train loss:4.076097 +step:1509 train loss:4.134475 +step:1510 train loss:4.075425 +step:1511 train loss:4.102208 +step:1512 train loss:4.072406 +step:1513 train loss:4.144885 +step:1514 train loss:4.094374 +step:1515 train loss:4.161065 +step:1516 train loss:4.083301 +step:1517 train loss:4.100660 +step:1518 train loss:4.175409 +step:1519 train loss:4.136741 +step:1520 train loss:4.175845 +step:1521 train loss:4.088819 +step:1522 train loss:4.141398 +step:1523 train loss:4.142403 +step:1524 train loss:4.065655 +step:1525 train loss:4.144278 +step:1526 train loss:4.055929 +step:1527 train loss:4.119401 +step:1528 train loss:4.158980 +step:1529 train loss:4.120375 +step:1530 train loss:4.156987 +step:1531 train loss:4.082510 +step:1532 train loss:4.149443 +step:1533 train loss:4.122825 +step:1534 train loss:4.063831 +step:1535 train loss:4.124173 +step:1536 train loss:4.149054 +step:1537 train loss:4.101151 +step:1538 train loss:4.101897 +step:1539 train loss:4.105564 +step:1540 train loss:4.116751 +step:1541 train loss:4.084390 +step:1542 train loss:4.174806 +step:1543 train loss:4.194862 +step:1544 train loss:4.073171 +step:1545 train loss:4.059389 +step:1546 train loss:4.089479 +step:1547 train loss:4.081534 +step:1548 train loss:4.120614 +step:1549 train loss:4.052843 +step:1550 train loss:4.156540 +step:1551 train loss:4.092979 +step:1552 train loss:4.124111 +step:1553 train loss:4.137777 +step:1554 train loss:4.149135 +step:1555 train loss:4.098923 +step:1556 train loss:4.076617 +step:1557 train loss:4.082809 +step:1558 train loss:4.106162 +step:1559 train loss:4.073592 +step:1560 train loss:4.153089 +step:1561 train loss:4.122985 +step:1562 train loss:4.019474 +step:1563 train loss:3.999115 +step:1564 train loss:4.117277 +step:1565 train loss:4.104864 +step:1566 train loss:4.120845 +step:1567 train loss:4.122958 +step:1568 train loss:4.067776 +step:1569 train loss:4.071518 +step:1570 train loss:4.085731 +step:1571 train loss:4.063452 +step:1572 train loss:4.066871 +step:1573 train loss:4.114532 +step:1574 train loss:4.066819 +step:1575 train loss:4.090706 +step:1576 train loss:4.051726 +step:1577 train loss:4.073841 +step:1578 train loss:4.060423 +step:1579 train loss:4.134844 +step:1580 train loss:4.089094 +step:1581 train loss:4.116210 +step:1582 train loss:4.117844 +step:1583 train loss:4.091431 +step:1584 train loss:4.017541 +step:1585 train loss:4.100327 +step:1586 train loss:4.069895 +step:1587 train loss:4.094608 +step:1588 train loss:4.083571 +step:1589 train loss:4.119368 +step:1590 train loss:4.029949 +step:1591 train loss:4.083333 +step:1592 train loss:4.030380 +step:1593 train loss:4.069521 +step:1594 train loss:4.067271 +step:1595 train loss:4.060090 +step:1596 train loss:4.063770 +step:1597 train loss:3.997855 +step:1598 train loss:4.093088 +step:1599 train loss:4.107549 +step:1600 train loss:3.989462 +step:1601 train loss:4.059757 +step:1602 train loss:4.117445 +step:1603 train loss:4.116755 +step:1604 train loss:4.045672 +step:1605 train loss:4.094833 +step:1606 train loss:4.146784 +step:1607 train loss:4.038286 +step:1608 train loss:4.067063 +step:1609 train loss:4.076574 +step:1610 train loss:4.134852 +step:1611 train loss:4.062525 +step:1612 train loss:3.986844 +step:1613 train loss:4.053473 +step:1614 train loss:4.157394 +step:1615 train loss:4.117475 +step:1616 train loss:4.123458 +step:1617 train loss:4.118426 +step:1618 train loss:4.126687 +step:1619 train loss:4.288641 +step:1620 train loss:4.066787 +step:1621 train loss:4.130587 +step:1622 train loss:4.054210 +step:1623 train loss:4.109951 +step:1624 train loss:4.080925 +step:1625 train loss:4.153726 +step:1626 train loss:4.041463 +step:1627 train loss:4.045482 +step:1628 train loss:4.066652 +step:1629 train loss:4.092379 +step:1630 train loss:4.116290 +step:1631 train loss:4.059478 +step:1632 train loss:4.035235 +step:1633 train loss:4.045020 +step:1634 train loss:4.095211 +step:1635 train loss:4.038206 +step:1636 train loss:4.025680 +step:1637 train loss:4.096802 +step:1638 train loss:4.197463 +step:1639 train loss:4.008952 +step:1640 train loss:4.081394 +step:1641 train loss:4.044834 +step:1642 train loss:4.133014 +step:1643 train loss:4.035801 +step:1644 train loss:4.053023 +step:1645 train loss:4.027031 +step:1646 train loss:4.099049 +step:1647 train loss:3.997505 +step:1648 train loss:4.059371 +step:1649 train loss:4.026413 +step:1650 train loss:4.034980 +step:1651 train loss:4.059381 +step:1652 train loss:4.067450 +step:1653 train loss:4.075856 +step:1654 train loss:4.069527 +step:1655 train loss:4.045269 +step:1656 train loss:4.035869 +step:1657 train loss:4.043839 +step:1658 train loss:4.018705 +step:1659 train loss:4.092370 +step:1660 train loss:3.991743 +step:1661 train loss:4.106067 +step:1662 train loss:4.048573 +step:1663 train loss:4.034816 +step:1664 train loss:4.129986 +step:1665 train loss:4.051135 +step:1666 train loss:4.062685 +step:1667 train loss:4.080256 +step:1668 train loss:4.054847 +step:1669 train loss:4.015353 +step:1670 train loss:4.062119 +step:1671 train loss:4.061399 +step:1672 train loss:4.060949 +step:1673 train loss:4.019215 +step:1674 train loss:4.012544 +step:1675 train loss:4.061205 +step:1676 train loss:4.312373 +step:1677 train loss:4.076869 +step:1678 train loss:3.995517 +step:1679 train loss:4.118245 +step:1680 train loss:4.046494 +step:1681 train loss:4.095041 +step:1682 train loss:4.043724 +step:1683 train loss:4.042070 +step:1684 train loss:3.997702 +step:1685 train loss:4.051568 +step:1686 train loss:4.038977 +step:1687 train loss:4.050132 +step:1688 train loss:4.036900 +step:1689 train loss:4.019669 +step:1690 train loss:4.052403 +step:1691 train loss:4.031506 +step:1692 train loss:4.061089 +step:1693 train loss:4.023499 +step:1694 train loss:3.981805 +step:1695 train loss:4.000873 +step:1696 train loss:4.013135 +step:1697 train loss:4.051957 +step:1698 train loss:4.043141 +step:1699 train loss:4.005736 +step:1700 train loss:4.085567 +step:1701 train loss:4.023777 +step:1702 train loss:4.016547 +step:1703 train loss:4.035761 +step:1704 train loss:4.042149 +step:1705 train loss:4.054406 +step:1706 train loss:4.067403 +step:1707 train loss:4.061555 +step:1708 train loss:3.986708 +step:1709 train loss:4.080932 +step:1710 train loss:4.003965 +step:1711 train loss:4.008439 +step:1712 train loss:4.037573 +step:1713 train loss:4.004432 +step:1714 train loss:4.357713 +step:1715 train loss:4.021656 +step:1716 train loss:4.001873 +step:1717 train loss:4.004376 +step:1718 train loss:4.080239 +step:1719 train loss:3.988196 +step:1720 train loss:4.065619 +step:1721 train loss:4.009228 +step:1722 train loss:3.980901 +step:1723 train loss:4.076726 +step:1724 train loss:4.030876 +step:1725 train loss:4.025435 +step:1726 train loss:4.027905 +step:1727 train loss:4.064355 +step:1728 train loss:4.072845 +step:1729 train loss:3.990714 +step:1730 train loss:4.070560 +step:1731 train loss:3.999635 +step:1732 train loss:4.015052 +step:1733 train loss:4.026525 +step:1734 train loss:4.054144 +step:1735 train loss:4.124895 +step:1736 train loss:4.024817 +step:1737 train loss:4.061175 +step:1738 train loss:4.016364 +step:1739 train loss:4.074256 +step:1740 train loss:4.066286 +step:1741 train loss:4.115305 +step:1742 train loss:4.107498 +step:1743 train loss:4.000757 +step:1744 train loss:4.014475 +step:1745 train loss:4.001405 +step:1746 train loss:3.985716 +step:1747 train loss:4.020973 +step:1748 train loss:3.965683 +step:1749 train loss:4.019444 +step:1750 validation loss:3.993258 +step:1750 train loss:4.046189 +step:1751 train loss:4.067852 +step:1752 train loss:4.024499 +step:1753 train loss:4.050354 +step:1754 train loss:4.037419 +step:1755 train loss:4.036438 +step:1756 train loss:4.060513 +step:1757 train loss:4.064970 +step:1758 train loss:3.981184 +step:1759 train loss:4.068536 +step:1760 train loss:4.022836 +step:1761 train loss:3.993853 +step:1762 train loss:3.997487 +step:1763 train loss:3.995267 +step:1764 train loss:4.281832 +step:1765 train loss:3.999236 +step:1766 train loss:4.086779 +step:1767 train loss:4.002896 +step:1768 train loss:3.978981 +step:1769 train loss:3.995867 +step:1770 train loss:4.011315 +step:1771 train loss:3.985352 +step:1772 train loss:4.096366 +step:1773 train loss:4.020974 +step:1774 train loss:4.024017 +step:1775 train loss:4.142449 +step:1776 train loss:4.031910 +step:1777 train loss:4.020796 +step:1778 train loss:4.074961 +step:1779 train loss:3.991349 +step:1780 train loss:4.048822 +step:1781 train loss:4.058030 +step:1782 train loss:4.080585 +step:1783 train loss:4.012122 +step:1784 train loss:4.098001 +step:1785 train loss:4.009492 +step:1786 train loss:4.004254 +step:1787 train loss:4.002060 +step:1788 train loss:4.026753 +step:1789 train loss:3.976169 +step:1790 train loss:3.990540 +step:1791 train loss:4.069444 +step:1792 train loss:4.069605 +step:1793 train loss:3.986139 +step:1794 train loss:4.034964 +step:1795 train loss:3.991961 +step:1796 train loss:3.969842 +step:1797 train loss:4.031012 +step:1798 train loss:3.968505 +step:1799 train loss:4.026699 +step:1800 train loss:4.048984 +step:1801 train loss:4.039317 +step:1802 train loss:4.047526 +step:1803 train loss:4.039380 +step:1804 train loss:4.039893 +step:1805 train loss:4.027926 +step:1806 train loss:4.042073 +step:1807 train loss:3.970424 +step:1808 train loss:4.033497 +step:1809 train loss:4.015651 +step:1810 train loss:4.011707 +step:1811 train loss:4.021751 +step:1812 train loss:4.006462 +step:1813 train loss:4.019705 +step:1814 train loss:4.074381 +step:1815 train loss:4.017187 +step:1816 train loss:3.970335 +step:1817 train loss:3.962175 +step:1818 train loss:4.020744 +step:1819 train loss:3.989084 +step:1820 train loss:4.032944 +step:1821 train loss:3.990794 +step:1822 train loss:3.971234 +step:1823 train loss:3.969082 +step:1824 train loss:4.044384 +step:1825 train loss:3.961131 +step:1826 train loss:4.009343 +step:1827 train loss:3.972502 +step:1828 train loss:4.012774 +step:1829 train loss:3.975313 +step:1830 train loss:4.168653 +step:1831 train loss:3.932706 +step:1832 train loss:3.975262 +step:1833 train loss:4.024964 +step:1834 train loss:3.976559 +step:1835 train loss:3.994238 +step:1836 train loss:4.029906 +step:1837 train loss:3.953336 +step:1838 train loss:4.048295 +step:1839 train loss:4.021278 +step:1840 train loss:3.995332 +step:1841 train loss:4.019459 +step:1842 train loss:3.998730 +step:1843 train loss:3.940910 +step:1844 train loss:4.005765 +step:1845 train loss:3.972797 +step:1846 train loss:4.025953 +step:1847 train loss:4.081326 +step:1848 train loss:3.873876 +step:1849 train loss:3.979630 +step:1850 train loss:3.958724 +step:1851 train loss:3.993184 +step:1852 train loss:3.981358 +step:1853 train loss:4.031728 +step:1854 train loss:4.003044 +step:1855 train loss:3.985499 +step:1856 train loss:3.987850 +step:1857 train loss:3.992128 +step:1858 train loss:4.035273 +step:1859 train loss:3.990673 +step:1860 train loss:3.955286 +step:1861 train loss:3.973817 +step:1862 train loss:4.012274 +step:1863 train loss:4.048233 +step:1864 train loss:3.946614 +step:1865 train loss:3.967559 +step:1866 train loss:3.976356 +step:1867 train loss:4.002269 +step:1868 train loss:4.051136 +step:1869 train loss:3.968823 +step:1870 train loss:3.997767 +step:1871 train loss:3.934273 +step:1872 train loss:4.006565 +step:1873 train loss:4.072964 +step:1874 train loss:3.935531 +step:1875 train loss:4.023305 +step:1876 train loss:3.983447 +step:1877 train loss:4.020488 +step:1878 train loss:3.944721 +step:1879 train loss:4.002584 +step:1880 train loss:4.078114 +step:1881 train loss:4.001578 +step:1882 train loss:4.017970 +step:1883 train loss:4.046061 +step:1884 train loss:4.062088 +step:1885 train loss:4.016795 +step:1886 train loss:3.947621 +step:1887 train loss:3.968318 +step:1888 train loss:3.967710 +step:1889 train loss:3.990708 +step:1890 train loss:3.995545 +step:1891 train loss:3.924666 +step:1892 train loss:4.015749 +step:1893 train loss:3.939115 +step:1894 train loss:3.954216 +step:1895 train loss:3.993351 +step:1896 train loss:4.038518 +step:1897 train loss:3.934810 +step:1898 train loss:3.983901 +step:1899 train loss:3.997754 +step:1900 train loss:3.949412 +step:1901 train loss:4.019030 +step:1902 train loss:4.013252 +step:1903 train loss:3.951250 +step:1904 train loss:3.944209 +step:1905 train loss:3.942566 +step:1906 train loss:3.997484 +step:1907 train loss:3.945376 +step:1908 train loss:3.956204 +step:1909 train loss:4.051930 +step:1910 train loss:3.954066 +step:1911 train loss:3.948243 +step:1912 train loss:3.998058 +step:1913 train loss:3.940453 +step:1914 train loss:3.974258 +step:1915 train loss:3.937968 +step:1916 train loss:3.983796 +step:1917 train loss:3.971680 +step:1918 train loss:3.882936 +step:1919 train loss:4.032524 +step:1920 train loss:4.135202 +step:1921 train loss:3.917020 +step:1922 train loss:3.899981 +step:1923 train loss:4.001968 +step:1924 train loss:4.038841 +step:1925 train loss:3.985147 +step:1926 train loss:3.920811 +step:1927 train loss:4.002947 +step:1928 train loss:3.915077 +step:1929 train loss:3.942125 +step:1930 train loss:4.013862 +step:1931 train loss:3.921878 +step:1932 train loss:3.974993 +step:1933 train loss:3.971649 +step:1934 train loss:4.042589 +step:1935 train loss:3.994870 +step:1936 train loss:3.965344 +step:1937 train loss:3.908727 +step:1938 train loss:4.265546 +step:1939 train loss:4.030598 +step:1940 train loss:4.011436 +step:1941 train loss:4.012997 +step:1942 train loss:3.997538 +step:1943 train loss:3.993860 +step:1944 train loss:3.950064 +step:1945 train loss:3.950375 +step:1946 train loss:3.969923 +step:1947 train loss:3.994754 +step:1948 train loss:3.898983 +step:1949 train loss:4.011451 +step:1950 train loss:3.948281 +step:1951 train loss:3.966853 +step:1952 train loss:3.995224 +step:1953 train loss:3.930431 +step:1954 train loss:3.965953 +step:1955 train loss:3.925873 +step:1956 train loss:4.000819 +step:1957 train loss:4.030020 +step:1958 train loss:4.039954 +step:1959 train loss:3.913784 +step:1960 train loss:3.965200 +step:1961 train loss:4.002741 +step:1962 train loss:3.990029 +step:1963 train loss:3.961742 +step:1964 train loss:3.992981 +step:1965 train loss:4.031697 +step:1966 train loss:3.934924 +step:1967 train loss:3.996395 +step:1968 train loss:3.933911 +step:1969 train loss:3.949388 +step:1970 train loss:4.005669 +step:1971 train loss:3.907697 +step:1972 train loss:4.018908 +step:1973 train loss:3.911828 +step:1974 train loss:3.961749 +step:1975 train loss:3.921682 +step:1976 train loss:3.942854 +step:1977 train loss:3.985860 +step:1978 train loss:3.927695 +step:1979 train loss:3.905941 +step:1980 train loss:3.944705 +step:1981 train loss:3.924649 +step:1982 train loss:4.008976 +step:1983 train loss:3.952956 +step:1984 train loss:3.992269 +step:1985 train loss:3.980759 +step:1986 train loss:3.966712 +step:1987 train loss:3.920968 +step:1988 train loss:3.950764 +step:1989 train loss:4.085760 +step:1990 train loss:3.928657 +step:1991 train loss:3.921670 +step:1992 train loss:3.931051 +step:1993 train loss:3.965888 +step:1994 train loss:3.957654 +step:1995 train loss:3.915381 +step:1996 train loss:3.962027 +step:1997 train loss:3.965302 +step:1998 train loss:3.919517 +step:1999 train loss:4.029160 +step:2000 validation loss:3.901007 total_sharp:6.7354e-02 L1_sharp:1.7570e-01 L2_sharp:1.7485e-01 L3_sharp:3.9375e-02 L4_sharp:1.0918e-02 L5_sharp:1.3965e-02 L6_sharp:1.1354e-02 L7_sharp:1.0534e-02 L8_sharp:8.2468e-03 L9_sharp:5.8341e-03 L10_sharp:4.4269e-03 L11_sharp:4.6525e-03 L12_sharp:1.2397e-02 total_fnorm:1.0108e+00 total_l1_linf:8.8997e+03 total_spectral:1.0108e+00 L1_fnorm:1.9292e-01 L2_fnorm:1.8921e-01 L3_fnorm:1.9871e-01 L4_fnorm:2.1775e-01 L5_fnorm:2.1898e-01 L6_fnorm:2.2372e-01 L7_fnorm:2.2309e-01 L8_fnorm:2.2838e-01 L9_fnorm:2.3384e-01 L10_fnorm:2.3784e-01 L11_fnorm:2.3734e-01 L12_fnorm:2.2739e-01 L1_l1linf:2.7000e-01 L2_l1linf:3.3248e-01 L3_l1linf:2.9674e-01 L4_l1linf:2.9751e-01 L5_l1linf:2.6374e-01 L6_l1linf:2.6505e-01 L7_l1linf:2.5648e-01 L8_l1linf:2.5401e-01 L9_l1linf:2.5717e-01 L10_l1linf:2.5973e-01 L11_l1linf:2.6679e-01 L12_l1linf:2.4997e-01 L1_spectral:3.9928e-02 L2_spectral:5.7421e-02 L3_spectral:4.8865e-02 L4_spectral:4.6349e-02 L5_spectral:3.9823e-02 L6_spectral:3.3657e-02 L7_spectral:3.0489e-02 L8_spectral:3.2102e-02 L9_spectral:3.3722e-02 L10_spectral:3.2596e-02 L11_spectral:3.4278e-02 L12_spectral:4.5457e-02 ip_v_neg_g:3.5622e-02 cos_v_neg_g:2.8406e-02 v_norm:1.0108e+00 g_norm:1.2406e+00 hv_norm:9.0528e-01 cos_v_hv:7.5205e-02 hg_norm:9.3478e+00 cos_g_hg:4.5549e-01 v_par:1.0113e-03 v_perp:1.0108e+00 L1_cos_v_neg_g:6.6083e-02 L1_v_norm:1.9292e-01 L2_cos_v_neg_g:1.2303e-01 L2_v_norm:1.8921e-01 L3_cos_v_neg_g:7.1833e-02 L3_v_norm:1.9871e-01 L4_cos_v_neg_g:4.2130e-02 L4_v_norm:2.1775e-01 L5_cos_v_neg_g:3.4315e-02 L5_v_norm:2.1898e-01 L6_cos_v_neg_g:2.7061e-02 L6_v_norm:2.2372e-01 L7_cos_v_neg_g:2.7552e-02 L7_v_norm:2.2309e-01 L8_cos_v_neg_g:2.5302e-02 L8_v_norm:2.2838e-01 L9_cos_v_neg_g:2.2287e-02 L9_v_norm:2.3384e-01 L10_cos_v_neg_g:1.7337e-02 L10_v_norm:2.3784e-01 L11_cos_v_neg_g:1.6172e-02 L11_v_norm:2.3734e-01 L12_cos_v_neg_g:1.4077e-02 L12_v_norm:2.2739e-01 +step:2000 train loss:3.999853 +step:2001 train loss:3.923471 +step:2002 train loss:4.022082 +step:2003 train loss:4.065225 +step:2004 train loss:3.934965 +step:2005 train loss:4.031925 +step:2006 train loss:3.923634 +step:2007 train loss:3.996791 +step:2008 train loss:3.942014 +step:2009 train loss:3.941649 +step:2010 train loss:4.068688 +step:2011 train loss:3.919075 +step:2012 train loss:3.944723 +step:2013 train loss:3.957354 +step:2014 train loss:3.846262 +step:2015 train loss:3.977951 +step:2016 train loss:3.961659 +step:2017 train loss:3.966843 +step:2018 train loss:3.937021 +step:2019 train loss:3.958559 +step:2020 train loss:3.973510 +step:2021 train loss:3.932232 +step:2022 train loss:3.975040 +step:2023 train loss:3.950537 +step:2024 train loss:3.995980 +step:2025 train loss:3.946015 +step:2026 train loss:3.924860 +step:2027 train loss:3.955456 +step:2028 train loss:3.885769 +step:2029 train loss:3.919094 +step:2030 train loss:3.914331 +step:2031 train loss:3.883714 +step:2032 train loss:3.927549 +step:2033 train loss:3.930313 +step:2034 train loss:3.923594 +step:2035 train loss:3.964693 +step:2036 train loss:3.956252 +step:2037 train loss:3.941598 +step:2038 train loss:3.940970 +step:2039 train loss:3.931019 +step:2040 train loss:3.956378 +step:2041 train loss:3.963209 +step:2042 train loss:3.892895 +step:2043 train loss:4.047327 +step:2044 train loss:3.910734 +step:2045 train loss:3.933534 +step:2046 train loss:3.943802 +step:2047 train loss:3.909855 +step:2048 train loss:3.955039 +step:2049 train loss:3.911615 +step:2050 train loss:3.938710 +step:2051 train loss:3.899303 +step:2052 train loss:3.943875 +step:2053 train loss:3.954930 +step:2054 train loss:3.920215 +step:2055 train loss:3.935925 +step:2056 train loss:3.977723 +step:2057 train loss:3.974067 +step:2058 train loss:3.941771 +step:2059 train loss:4.019501 +step:2060 train loss:3.968628 +step:2061 train loss:3.917687 +step:2062 train loss:3.944678 +step:2063 train loss:3.850090 +step:2064 train loss:3.962942 +step:2065 train loss:3.972536 +step:2066 train loss:3.838850 +step:2067 train loss:3.877352 +step:2068 train loss:3.989028 +step:2069 train loss:3.923407 +step:2070 train loss:3.923040 +step:2071 train loss:3.971078 +step:2072 train loss:3.900021 +step:2073 train loss:3.949055 +step:2074 train loss:3.924137 +step:2075 train loss:4.009624 +step:2076 train loss:3.954391 +step:2077 train loss:3.966974 +step:2078 train loss:3.922018 +step:2079 train loss:4.072892 +step:2080 train loss:3.896685 +step:2081 train loss:4.003151 +step:2082 train loss:3.934982 +step:2083 train loss:3.917331 +step:2084 train loss:3.899230 +step:2085 train loss:3.940831 +step:2086 train loss:3.952820 +step:2087 train loss:3.991724 +step:2088 train loss:3.856676 +step:2089 train loss:3.888994 +step:2090 train loss:3.928361 +step:2091 train loss:3.949328 +step:2092 train loss:3.927354 +step:2093 train loss:3.912557 +step:2094 train loss:3.949243 +step:2095 train loss:3.896106 +step:2096 train loss:3.880061 +step:2097 train loss:3.913842 +step:2098 train loss:3.913050 +step:2099 train loss:3.888229 +step:2100 train loss:3.962742 +step:2101 train loss:3.950338 +step:2102 train loss:3.922490 +step:2103 train loss:3.938373 +step:2104 train loss:3.914006 +step:2105 train loss:3.923967 +step:2106 train loss:3.918482 +step:2107 train loss:3.982389 +step:2108 train loss:3.902162 +step:2109 train loss:3.864470 +step:2110 train loss:3.962899 +step:2111 train loss:3.912728 +step:2112 train loss:3.975272 +step:2113 train loss:3.911057 +step:2114 train loss:3.918565 +step:2115 train loss:3.969203 +step:2116 train loss:3.902137 +step:2117 train loss:3.918307 +step:2118 train loss:3.908612 +step:2119 train loss:3.841803 +step:2120 train loss:3.929758 +step:2121 train loss:3.917884 +step:2122 train loss:3.923377 +step:2123 train loss:3.979570 +step:2124 train loss:3.978356 +step:2125 train loss:3.888014 +step:2126 train loss:3.895734 +step:2127 train loss:3.887613 +step:2128 train loss:3.881190 +step:2129 train loss:3.907545 +step:2130 train loss:3.908553 +step:2131 train loss:3.933452 +step:2132 train loss:3.862740 +step:2133 train loss:3.972785 +step:2134 train loss:3.923765 +step:2135 train loss:3.886618 +step:2136 train loss:3.980300 +step:2137 train loss:3.945128 +step:2138 train loss:3.899196 +step:2139 train loss:3.902067 +step:2140 train loss:3.906982 +step:2141 train loss:3.951570 +step:2142 train loss:3.923689 +step:2143 train loss:3.845801 +step:2144 train loss:3.949773 +step:2145 train loss:3.916739 +step:2146 train loss:3.954507 +step:2147 train loss:4.052541 +step:2148 train loss:3.858718 +step:2149 train loss:3.867292 +step:2150 train loss:3.894380 +step:2151 train loss:3.930324 +step:2152 train loss:3.925137 +step:2153 train loss:3.967120 +step:2154 train loss:3.886210 +step:2155 train loss:3.970613 +step:2156 train loss:3.885391 +step:2157 train loss:3.960362 +step:2158 train loss:3.995409 +step:2159 train loss:3.922822 +step:2160 train loss:3.996697 +step:2161 train loss:3.897900 +step:2162 train loss:3.903479 +step:2163 train loss:3.877675 +step:2164 train loss:3.900106 +step:2165 train loss:3.881545 +step:2166 train loss:3.998163 +step:2167 train loss:3.905213 +step:2168 train loss:3.923527 +step:2169 train loss:3.869205 +step:2170 train loss:4.011355 +step:2171 train loss:3.971729 +step:2172 train loss:3.913824 +step:2173 train loss:3.896737 +step:2174 train loss:3.961757 +step:2175 train loss:3.897979 +step:2176 train loss:3.971619 +step:2177 train loss:3.941008 +step:2178 train loss:3.871368 +step:2179 train loss:3.933288 +step:2180 train loss:3.955081 +step:2181 train loss:3.883301 +step:2182 train loss:3.933630 +step:2183 train loss:3.925796 +step:2184 train loss:3.883387 +step:2185 train loss:3.855924 +step:2186 train loss:3.897600 +step:2187 train loss:3.911625 +step:2188 train loss:3.960371 +step:2189 train loss:3.860533 +step:2190 train loss:3.907796 +step:2191 train loss:3.961593 +step:2192 train loss:3.885380 +step:2193 train loss:3.851603 +step:2194 train loss:3.864743 +step:2195 train loss:3.882886 +step:2196 train loss:3.885703 +step:2197 train loss:3.870430 +step:2198 train loss:3.890305 +step:2199 train loss:3.965079 +step:2200 train loss:3.896605 +step:2201 train loss:3.902089 +step:2202 train loss:3.863712 +step:2203 train loss:3.883960 +step:2204 train loss:3.916000 +step:2205 train loss:3.894160 +step:2206 train loss:3.896082 +step:2207 train loss:3.889173 +step:2208 train loss:3.870267 +step:2209 train loss:4.145256 +step:2210 train loss:3.917213 +step:2211 train loss:3.914199 +step:2212 train loss:3.885463 +step:2213 train loss:3.975407 +step:2214 train loss:3.960199 +step:2215 train loss:3.889583 +step:2216 train loss:3.853254 +step:2217 train loss:3.883293 +step:2218 train loss:3.881222 +step:2219 train loss:3.918420 +step:2220 train loss:3.861501 +step:2221 train loss:3.893109 +step:2222 train loss:3.911587 +step:2223 train loss:3.940717 +step:2224 train loss:3.920445 +step:2225 train loss:3.859068 +step:2226 train loss:3.927554 +step:2227 train loss:3.928452 +step:2228 train loss:3.926948 +step:2229 train loss:3.864163 +step:2230 train loss:3.990027 +step:2231 train loss:3.905415 +step:2232 train loss:3.905832 +step:2233 train loss:3.946774 +step:2234 train loss:3.844817 +step:2235 train loss:3.935659 +step:2236 train loss:3.875737 +step:2237 train loss:4.008339 +step:2238 train loss:3.811928 +step:2239 train loss:3.888952 +step:2240 train loss:3.902447 +step:2241 train loss:3.817073 +step:2242 train loss:3.954229 +step:2243 train loss:3.995275 +step:2244 train loss:3.874436 +step:2245 train loss:3.874377 +step:2246 train loss:3.845806 +step:2247 train loss:3.846630 +step:2248 train loss:3.898200 +step:2249 train loss:3.881218 +step:2250 validation loss:3.834185 +step:2250 train loss:3.891563 +step:2251 train loss:3.855853 +step:2252 train loss:3.858047 +step:2253 train loss:3.885262 +step:2254 train loss:3.890426 +step:2255 train loss:3.846910 +step:2256 train loss:3.904127 +step:2257 train loss:3.889253 +step:2258 train loss:3.880048 +step:2259 train loss:3.892976 +step:2260 train loss:3.849701 +step:2261 train loss:3.925476 +step:2262 train loss:3.944573 +step:2263 train loss:3.902767 +step:2264 train loss:4.013002 +step:2265 train loss:3.862408 +step:2266 train loss:3.910214 +step:2267 train loss:3.867498 +step:2268 train loss:3.869537 +step:2269 train loss:3.879922 +step:2270 train loss:3.874011 +step:2271 train loss:3.876957 +step:2272 train loss:3.917236 +step:2273 train loss:3.838196 +step:2274 train loss:3.866405 +step:2275 train loss:3.826266 +step:2276 train loss:3.899316 +step:2277 train loss:3.910471 +step:2278 train loss:3.891221 +step:2279 train loss:3.874049 +step:2280 train loss:3.786278 +step:2281 train loss:3.926544 +step:2282 train loss:3.860361 +step:2283 train loss:3.844395 +step:2284 train loss:3.864543 +step:2285 train loss:3.918805 +step:2286 train loss:3.873197 +step:2287 train loss:3.911467 +step:2288 train loss:3.884410 +step:2289 train loss:3.879258 +step:2290 train loss:3.882199 +step:2291 train loss:3.872753 +step:2292 train loss:3.912549 +step:2293 train loss:3.893173 +step:2294 train loss:3.889071 +step:2295 train loss:3.946671 +step:2296 train loss:3.881325 +step:2297 train loss:3.856714 +step:2298 train loss:3.909670 +step:2299 train loss:3.884624 +step:2300 train loss:3.802628 +step:2301 train loss:3.894750 +step:2302 train loss:3.914086 +step:2303 train loss:3.876827 +step:2304 train loss:3.869106 +step:2305 train loss:3.910810 +step:2306 train loss:3.905176 +step:2307 train loss:3.877523 +step:2308 train loss:3.901006 +step:2309 train loss:3.857229 +step:2310 train loss:3.845020 +step:2311 train loss:3.832976 +step:2312 train loss:3.896713 +step:2313 train loss:3.813967 +step:2314 train loss:3.884501 +step:2315 train loss:3.904092 +step:2316 train loss:3.940878 +step:2317 train loss:3.811950 +step:2318 train loss:3.854253 +step:2319 train loss:3.908213 +step:2320 train loss:3.875080 +step:2321 train loss:3.849622 +step:2322 train loss:3.861737 +step:2323 train loss:3.860776 +step:2324 train loss:3.886073 +step:2325 train loss:3.825499 +step:2326 train loss:3.851285 +step:2327 train loss:3.971966 +step:2328 train loss:3.925334 +step:2329 train loss:3.882800 +step:2330 train loss:3.836694 +step:2331 train loss:3.881731 +step:2332 train loss:3.803730 +step:2333 train loss:3.863529 +step:2334 train loss:3.843667 +step:2335 train loss:3.826904 +step:2336 train loss:4.079283 +step:2337 train loss:3.860569 +step:2338 train loss:3.898212 +step:2339 train loss:3.898987 +step:2340 train loss:3.914025 +step:2341 train loss:3.898636 +step:2342 train loss:3.853112 +step:2343 train loss:3.872705 +step:2344 train loss:3.912186 +step:2345 train loss:3.870646 +step:2346 train loss:3.900039 +step:2347 train loss:3.820923 +step:2348 train loss:3.882645 +step:2349 train loss:3.832897 +step:2350 train loss:3.889314 +step:2351 train loss:3.897999 +step:2352 train loss:3.898755 +step:2353 train loss:3.854565 +step:2354 train loss:3.905359 +step:2355 train loss:3.891380 +step:2356 train loss:3.927525 +step:2357 train loss:3.836541 +step:2358 train loss:3.850678 +step:2359 train loss:3.873582 +step:2360 train loss:3.895442 +step:2361 train loss:3.930511 +step:2362 train loss:3.758139 +step:2363 train loss:3.953452 +step:2364 train loss:3.901303 +step:2365 train loss:3.872804 +step:2366 train loss:3.826471 +step:2367 train loss:3.885471 +step:2368 train loss:3.878309 +step:2369 train loss:3.863558 +step:2370 train loss:3.881458 +step:2371 train loss:3.936740 +step:2372 train loss:3.793833 +step:2373 train loss:3.934921 +step:2374 train loss:3.914812 +step:2375 train loss:3.901291 +step:2376 train loss:3.886176 +step:2377 train loss:3.834902 +step:2378 train loss:3.881186 +step:2379 train loss:3.865160 +step:2380 train loss:3.922775 +step:2381 train loss:4.013005 +step:2382 train loss:3.809139 +step:2383 train loss:3.864592 +step:2384 train loss:3.897214 +step:2385 train loss:3.790747 +step:2386 train loss:3.948369 +step:2387 train loss:3.828522 +step:2388 train loss:3.875371 +step:2389 train loss:3.894918 +step:2390 train loss:3.846498 +step:2391 train loss:3.867346 +step:2392 train loss:3.893066 +step:2393 train loss:3.850152 +step:2394 train loss:3.872885 +step:2395 train loss:3.859406 +step:2396 train loss:3.864427 +step:2397 train loss:3.844418 +step:2398 train loss:3.895462 +step:2399 train loss:3.862390 +step:2400 train loss:3.838498 +step:2401 train loss:3.879760 +step:2402 train loss:3.829952 +step:2403 train loss:3.884152 +step:2404 train loss:3.841409 +step:2405 train loss:3.845793 +step:2406 train loss:3.870202 +step:2407 train loss:3.813169 +step:2408 train loss:3.857921 +step:2409 train loss:3.850231 +step:2410 train loss:3.847907 +step:2411 train loss:3.919986 +step:2412 train loss:3.908968 +step:2413 train loss:3.944422 +step:2414 train loss:3.835572 +step:2415 train loss:3.830469 +step:2416 train loss:3.847606 +step:2417 train loss:3.881347 +step:2418 train loss:3.900707 +step:2419 train loss:3.828869 +step:2420 train loss:3.849285 +step:2421 train loss:3.875448 +step:2422 train loss:3.927402 +step:2423 train loss:3.867350 +step:2424 train loss:3.834607 +step:2425 train loss:3.892796 +step:2426 train loss:3.834242 +step:2427 train loss:3.861294 +step:2428 train loss:3.936226 +step:2429 train loss:3.888140 +step:2430 train loss:3.979579 +step:2431 train loss:3.890638 +step:2432 train loss:3.859188 +step:2433 train loss:3.838596 +step:2434 train loss:3.822000 +step:2435 train loss:3.876438 +step:2436 train loss:3.838973 +step:2437 train loss:3.871407 +step:2438 train loss:3.913674 +step:2439 train loss:3.898916 +step:2440 train loss:3.839318 +step:2441 train loss:3.873891 +step:2442 train loss:3.865767 +step:2443 train loss:3.832056 +step:2444 train loss:3.867528 +step:2445 train loss:3.863556 +step:2446 train loss:3.833799 +step:2447 train loss:3.817037 +step:2448 train loss:3.866091 +step:2449 train loss:3.894065 +step:2450 train loss:3.854905 +step:2451 train loss:3.773107 +step:2452 train loss:3.874810 +step:2453 train loss:3.843842 +step:2454 train loss:3.843185 +step:2455 train loss:3.892883 +step:2456 train loss:3.846254 +step:2457 train loss:3.903555 +step:2458 train loss:3.883679 +step:2459 train loss:3.859451 +step:2460 train loss:3.863418 +step:2461 train loss:3.898654 +step:2462 train loss:3.870315 +step:2463 train loss:3.846333 +step:2464 train loss:3.866864 +step:2465 train loss:3.938517 +step:2466 train loss:4.021287 +step:2467 train loss:3.934806 +step:2468 train loss:3.827343 +step:2469 train loss:3.894276 +step:2470 train loss:3.944626 +step:2471 train loss:3.943028 +step:2472 train loss:3.936779 +step:2473 train loss:3.868358 +step:2474 train loss:3.828726 +step:2475 train loss:3.876059 +step:2476 train loss:3.956938 +step:2477 train loss:3.872082 +step:2478 train loss:3.828552 +step:2479 train loss:3.867481 +step:2480 train loss:3.853863 +step:2481 train loss:4.044313 +step:2482 train loss:3.854749 +step:2483 train loss:3.886203 +step:2484 train loss:3.833404 +step:2485 train loss:3.818439 +step:2486 train loss:3.857768 +step:2487 train loss:3.891218 +step:2488 train loss:3.806942 +step:2489 train loss:3.913925 +step:2490 train loss:3.835985 +step:2491 train loss:3.846201 +step:2492 train loss:3.888237 +step:2493 train loss:3.922842 +step:2494 train loss:3.846719 +step:2495 train loss:3.875338 +step:2496 train loss:3.853550 +step:2497 train loss:3.869435 +step:2498 train loss:3.874512 +step:2499 train loss:3.869740 +step:2500 validation loss:3.791480 total_sharp:4.5796e-02 L1_sharp:5.1923e-02 L2_sharp:4.7867e-02 L3_sharp:1.6581e-02 L4_sharp:6.0976e-03 L5_sharp:8.4413e-03 L6_sharp:1.0864e-02 L7_sharp:1.1722e-02 L8_sharp:8.6846e-03 L9_sharp:6.8974e-03 L10_sharp:4.3630e-03 L11_sharp:5.0651e-03 L12_sharp:1.1064e-02 total_fnorm:1.0383e+00 total_l1_linf:9.1725e+03 total_spectral:1.0383e+00 L1_fnorm:2.0757e-01 L2_fnorm:1.9918e-01 L3_fnorm:2.1140e-01 L4_fnorm:2.3089e-01 L5_fnorm:2.2992e-01 L6_fnorm:2.3628e-01 L7_fnorm:2.3647e-01 L8_fnorm:2.4102e-01 L9_fnorm:2.4464e-01 L10_fnorm:2.4818e-01 L11_fnorm:2.4713e-01 L12_fnorm:2.3632e-01 L1_l1linf:3.0105e-01 L2_l1linf:3.3042e-01 L3_l1linf:3.0442e-01 L4_l1linf:2.9684e-01 L5_l1linf:2.9292e-01 L6_l1linf:2.8403e-01 L7_l1linf:2.7013e-01 L8_l1linf:2.7920e-01 L9_l1linf:2.5859e-01 L10_l1linf:2.6042e-01 L11_l1linf:2.6981e-01 L12_l1linf:2.5002e-01 L1_spectral:4.3256e-02 L2_spectral:4.9846e-02 L3_spectral:4.8965e-02 L4_spectral:4.3330e-02 L5_spectral:4.1984e-02 L6_spectral:3.5996e-02 L7_spectral:3.3772e-02 L8_spectral:3.4203e-02 L9_spectral:3.3844e-02 L10_spectral:3.3872e-02 L11_spectral:3.5325e-02 L12_spectral:4.2937e-02 ip_v_neg_g:2.5090e-02 cos_v_neg_g:2.2164e-02 v_norm:1.0383e+00 g_norm:1.0903e+00 hv_norm:5.4007e-01 cos_v_hv:8.8040e-02 hg_norm:3.7550e+00 cos_g_hg:4.5965e-01 v_par:6.9186e-04 v_perp:1.0383e+00 L1_cos_v_neg_g:3.8983e-02 L1_v_norm:2.0757e-01 L2_cos_v_neg_g:5.8400e-02 L2_v_norm:1.9918e-01 L3_cos_v_neg_g:4.4026e-02 L3_v_norm:2.1140e-01 L4_cos_v_neg_g:3.1703e-02 L4_v_norm:2.3089e-01 L5_cos_v_neg_g:3.2112e-02 L5_v_norm:2.2992e-01 L6_cos_v_neg_g:2.7243e-02 L6_v_norm:2.3628e-01 L7_cos_v_neg_g:2.5628e-02 L7_v_norm:2.3647e-01 L8_cos_v_neg_g:2.6251e-02 L8_v_norm:2.4102e-01 L9_cos_v_neg_g:2.6570e-02 L9_v_norm:2.4464e-01 L10_cos_v_neg_g:2.0577e-02 L10_v_norm:2.4818e-01 L11_cos_v_neg_g:2.1968e-02 L11_v_norm:2.4713e-01 L12_cos_v_neg_g:2.5185e-02 L12_v_norm:2.3632e-01 +step:2500 train loss:3.817795 +step:2501 train loss:3.879816 +step:2502 train loss:3.871118 +step:2503 train loss:3.804152 +step:2504 train loss:3.836018 +step:2505 train loss:3.861268 +step:2506 train loss:3.820864 +step:2507 train loss:3.846316 +step:2508 train loss:3.799335 +step:2509 train loss:3.816948 +step:2510 train loss:3.815483 +step:2511 train loss:3.864836 +step:2512 train loss:3.907652 +step:2513 train loss:3.854968 +step:2514 train loss:3.844083 +step:2515 train loss:3.990126 +step:2516 train loss:3.873468 +step:2517 train loss:3.939395 +step:2518 train loss:3.908107 +step:2519 train loss:3.873305 +step:2520 train loss:3.881655 +step:2521 train loss:3.854831 +step:2522 train loss:3.890328 +step:2523 train loss:3.806376 +step:2524 train loss:3.864449 +step:2525 train loss:3.855532 +step:2526 train loss:3.902053 +step:2527 train loss:3.889929 +step:2528 train loss:3.874581 +step:2529 train loss:3.892566 +step:2530 train loss:3.870624 +step:2531 train loss:3.808784 +step:2532 train loss:3.906883 +step:2533 train loss:3.801158 +step:2534 train loss:3.895660 +step:2535 train loss:3.849485 +step:2536 train loss:3.770780 +step:2537 train loss:3.887099 +step:2538 train loss:3.864904 +step:2539 train loss:3.882035 +step:2540 train loss:3.820482 +step:2541 train loss:3.843717 +step:2542 train loss:3.854465 +step:2543 train loss:3.845867 +step:2544 train loss:3.832218 +step:2545 train loss:3.819770 +step:2546 train loss:3.784977 +step:2547 train loss:3.831605 +step:2548 train loss:3.852989 +step:2549 train loss:3.854498 +step:2550 train loss:3.982563 +step:2551 train loss:4.058302 +step:2552 train loss:3.793406 +step:2553 train loss:3.828180 +step:2554 train loss:3.973789 +step:2555 train loss:3.867888 +step:2556 train loss:3.794770 +step:2557 train loss:3.883864 +step:2558 train loss:3.879991 +step:2559 train loss:3.831690 +step:2560 train loss:3.816549 +step:2561 train loss:3.912765 +step:2562 train loss:3.866306 +step:2563 train loss:3.801921 +step:2564 train loss:3.875864 +step:2565 train loss:3.855827 +step:2566 train loss:3.839839 +step:2567 train loss:3.811733 +step:2568 train loss:3.869112 +step:2569 train loss:3.873313 +step:2570 train loss:3.824359 +step:2571 train loss:3.905118 +step:2572 train loss:3.866444 +step:2573 train loss:3.800147 +step:2574 train loss:3.841712 +step:2575 train loss:3.888929 +step:2576 train loss:3.841447 +step:2577 train loss:3.802556 +step:2578 train loss:3.842527 +step:2579 train loss:3.822802 +step:2580 train loss:3.795485 +step:2581 train loss:3.806906 +step:2582 train loss:3.812295 +step:2583 train loss:3.842542 +step:2584 train loss:3.851801 +step:2585 train loss:3.818332 +step:2586 train loss:3.843328 +step:2587 train loss:3.774724 +step:2588 train loss:3.803755 +step:2589 train loss:3.880973 +step:2590 train loss:3.804096 +step:2591 train loss:3.862428 +step:2592 train loss:3.910998 +step:2593 train loss:3.869231 +step:2594 train loss:3.830345 +step:2595 train loss:3.837340 +step:2596 train loss:3.881550 +step:2597 train loss:3.763189 +step:2598 train loss:3.918827 +step:2599 train loss:3.865234 +step:2600 train loss:3.896614 +step:2601 train loss:3.835207 +step:2602 train loss:3.865732 +step:2603 train loss:3.862335 +step:2604 train loss:3.781711 +step:2605 train loss:3.909943 +step:2606 train loss:3.858852 +step:2607 train loss:3.818757 +step:2608 train loss:3.789228 +step:2609 train loss:3.815107 +step:2610 train loss:3.841480 +step:2611 train loss:3.881928 +step:2612 train loss:3.841785 +step:2613 train loss:3.815534 +step:2614 train loss:3.807154 +step:2615 train loss:3.801718 +step:2616 train loss:3.875173 +step:2617 train loss:3.833599 +step:2618 train loss:3.801199 +step:2619 train loss:3.820387 +step:2620 train loss:3.814817 +step:2621 train loss:3.822551 +step:2622 train loss:3.896826 +step:2623 train loss:3.770778 +step:2624 train loss:3.787270 +step:2625 train loss:3.857820 +step:2626 train loss:3.852652 +step:2627 train loss:3.831681 +step:2628 train loss:3.881601 +step:2629 train loss:3.832071 +step:2630 train loss:3.823665 +step:2631 train loss:3.852311 +step:2632 train loss:3.823791 +step:2633 train loss:3.805901 +step:2634 train loss:3.854206 +step:2635 train loss:3.835021 +step:2636 train loss:3.887245 +step:2637 train loss:3.836754 +step:2638 train loss:3.821549 +step:2639 train loss:3.877230 +step:2640 train loss:3.792200 +step:2641 train loss:3.852046 +step:2642 train loss:3.774277 +step:2643 train loss:3.777609 +step:2644 train loss:3.868300 +step:2645 train loss:3.800586 +step:2646 train loss:3.835770 +step:2647 train loss:3.853695 +step:2648 train loss:3.884132 +step:2649 train loss:3.795065 +step:2650 train loss:3.786357 +step:2651 train loss:3.827230 +step:2652 train loss:3.799747 +step:2653 train loss:3.868960 +step:2654 train loss:3.831729 +step:2655 train loss:3.822926 +step:2656 train loss:3.841246 +step:2657 train loss:3.865126 +step:2658 train loss:3.872496 +step:2659 train loss:3.850896 +step:2660 train loss:3.838320 +step:2661 train loss:3.880175 +step:2662 train loss:3.858831 +step:2663 train loss:3.833351 +step:2664 train loss:3.845832 +step:2665 train loss:3.793670 +step:2666 train loss:3.824317 +step:2667 train loss:3.828072 +step:2668 train loss:3.806164 +step:2669 train loss:3.817785 +step:2670 train loss:3.839832 +step:2671 train loss:3.817135 +step:2672 train loss:3.837653 +step:2673 train loss:3.772429 +step:2674 train loss:3.865174 +step:2675 train loss:3.839355 +step:2676 train loss:3.860481 +step:2677 train loss:3.840806 +step:2678 train loss:3.822626 +step:2679 train loss:3.809611 +step:2680 train loss:3.793457 +step:2681 train loss:3.768047 +step:2682 train loss:3.849556 +step:2683 train loss:3.822640 +step:2684 train loss:3.853048 +step:2685 train loss:3.783737 +step:2686 train loss:3.789143 +step:2687 train loss:3.868807 +step:2688 train loss:3.885655 +step:2689 train loss:3.788679 +step:2690 train loss:3.872536 +step:2691 train loss:3.841497 +step:2692 train loss:3.867992 +step:2693 train loss:3.917756 +step:2694 train loss:3.816092 +step:2695 train loss:3.836101 +step:2696 train loss:3.836426 +step:2697 train loss:3.832657 +step:2698 train loss:3.838328 +step:2699 train loss:3.854831 +step:2700 train loss:3.827744 +step:2701 train loss:3.893684 +step:2702 train loss:3.829531 +step:2703 train loss:3.788365 +step:2704 train loss:3.873651 +step:2705 train loss:3.861048 +step:2706 train loss:3.806509 +step:2707 train loss:3.764743 +step:2708 train loss:3.855090 +step:2709 train loss:3.838672 +step:2710 train loss:3.838636 +step:2711 train loss:3.806812 +step:2712 train loss:3.865528 +step:2713 train loss:3.871161 +step:2714 train loss:3.810366 +step:2715 train loss:3.806051 +step:2716 train loss:3.870086 +step:2717 train loss:3.837307 +step:2718 train loss:3.831193 +step:2719 train loss:3.834645 +step:2720 train loss:3.796072 +step:2721 train loss:3.875230 +step:2722 train loss:3.805335 +step:2723 train loss:3.790151 +step:2724 train loss:3.811637 +step:2725 train loss:3.813026 +step:2726 train loss:3.787113 +step:2727 train loss:3.847092 +step:2728 train loss:3.783303 +step:2729 train loss:3.913278 +step:2730 train loss:3.854349 +step:2731 train loss:3.892526 +step:2732 train loss:3.804788 +step:2733 train loss:3.801428 +step:2734 train loss:3.846348 +step:2735 train loss:3.847232 +step:2736 train loss:3.770074 +step:2737 train loss:3.827296 +step:2738 train loss:3.882611 +step:2739 train loss:3.799778 +step:2740 train loss:3.804753 +step:2741 train loss:3.792268 +step:2742 train loss:3.713408 +step:2743 train loss:3.827981 +step:2744 train loss:3.853138 +step:2745 train loss:3.802294 +step:2746 train loss:3.819715 +step:2747 train loss:3.803308 +step:2748 train loss:3.763874 +step:2749 train loss:3.833152 +step:2750 validation loss:3.755907 +step:2750 train loss:3.840364 +step:2751 train loss:3.860944 +step:2752 train loss:3.844314 +step:2753 train loss:3.836622 +step:2754 train loss:3.775138 +step:2755 train loss:3.841026 +step:2756 train loss:3.814252 +step:2757 train loss:3.803789 +step:2758 train loss:3.828990 +step:2759 train loss:3.839015 +step:2760 train loss:3.750341 +step:2761 train loss:3.764573 +step:2762 train loss:3.781948 +step:2763 train loss:3.804119 +step:2764 train loss:3.751349 +step:2765 train loss:3.792114 +step:2766 train loss:3.889155 +step:2767 train loss:3.764067 +step:2768 train loss:3.827505 +step:2769 train loss:3.804244 +step:2770 train loss:3.824070 +step:2771 train loss:3.846436 +step:2772 train loss:3.810621 +step:2773 train loss:3.807713 +step:2774 train loss:3.800911 +step:2775 train loss:3.817981 +step:2776 train loss:3.769262 +step:2777 train loss:3.800256 +step:2778 train loss:3.810349 +step:2779 train loss:3.835051 +step:2780 train loss:3.807389 +step:2781 train loss:3.793003 +step:2782 train loss:3.780695 +step:2783 train loss:3.813345 +step:2784 train loss:3.820344 +step:2785 train loss:3.891699 +step:2786 train loss:3.856692 +step:2787 train loss:3.817758 +step:2788 train loss:3.809808 +step:2789 train loss:3.808922 +step:2790 train loss:3.744740 +step:2791 train loss:3.845965 +step:2792 train loss:3.836741 +step:2793 train loss:3.798178 +step:2794 train loss:3.811197 +step:2795 train loss:3.828728 +step:2796 train loss:3.815881 +step:2797 train loss:3.865547 +step:2798 train loss:3.851998 +step:2799 train loss:3.761882 +step:2800 train loss:3.804939 +step:2801 train loss:3.842481 +step:2802 train loss:3.868017 +step:2803 train loss:3.835951 +step:2804 train loss:3.771894 +step:2805 train loss:3.811468 +step:2806 train loss:3.803054 +step:2807 train loss:3.834628 +step:2808 train loss:3.776601 +step:2809 train loss:3.842822 +step:2810 train loss:3.830767 +step:2811 train loss:3.820651 +step:2812 train loss:3.867465 +step:2813 train loss:3.838504 +step:2814 train loss:3.827978 +step:2815 train loss:3.838594 +step:2816 train loss:3.839821 +step:2817 train loss:3.776634 +step:2818 train loss:3.882780 +step:2819 train loss:3.805340 +step:2820 train loss:3.801382 +step:2821 train loss:3.779708 +step:2822 train loss:3.823964 +step:2823 train loss:3.772231 +step:2824 train loss:3.668075 +step:2825 train loss:3.823860 +step:2826 train loss:3.822175 +step:2827 train loss:3.847225 +step:2828 train loss:3.838446 +step:2829 train loss:3.827984 +step:2830 train loss:3.859106 +step:2831 train loss:3.797791 +step:2832 train loss:3.769929 +step:2833 train loss:3.827726 +step:2834 train loss:3.778970 +step:2835 train loss:3.812888 +step:2836 train loss:3.816906 +step:2837 train loss:3.814507 +step:2838 train loss:3.754631 +step:2839 train loss:3.851634 +step:2840 train loss:3.816066 +step:2841 train loss:3.897786 +step:2842 train loss:3.842216 +step:2843 train loss:3.833415 +step:2844 train loss:3.863281 +step:2845 train loss:3.815663 +step:2846 train loss:3.763860 +step:2847 train loss:3.854510 +step:2848 train loss:3.810444 +step:2849 train loss:3.796884 +step:2850 train loss:3.858285 +step:2851 train loss:3.810363 +step:2852 train loss:3.891032 +step:2853 train loss:3.803685 +step:2854 train loss:3.748614 +step:2855 train loss:3.830847 +step:2856 train loss:3.750205 +step:2857 train loss:3.857009 +step:2858 train loss:3.810782 +step:2859 train loss:3.809907 +step:2860 train loss:3.790918 +step:2861 train loss:3.772980 +step:2862 train loss:3.800189 +step:2863 train loss:3.782672 +step:2864 train loss:3.792665 +step:2865 train loss:3.866318 +step:2866 train loss:3.882874 +step:2867 train loss:3.822968 +step:2868 train loss:3.815684 +step:2869 train loss:3.778778 +step:2870 train loss:3.860678 +step:2871 train loss:3.864648 +step:2872 train loss:3.823930 +step:2873 train loss:3.833337 +step:2874 train loss:3.809709 +step:2875 train loss:3.760699 +step:2876 train loss:3.811022 +step:2877 train loss:3.790058 +step:2878 train loss:3.804640 +step:2879 train loss:3.774508 +step:2880 train loss:3.787090 +step:2881 train loss:3.781648 +step:2882 train loss:3.713933 +step:2883 train loss:3.797657 +step:2884 train loss:3.865498 +step:2885 train loss:3.763443 +step:2886 train loss:3.813233 +step:2887 train loss:3.834380 +step:2888 train loss:3.811696 +step:2889 train loss:3.792678 +step:2890 train loss:3.767297 +step:2891 train loss:3.807499 +step:2892 train loss:3.816571 +step:2893 train loss:3.799569 +step:2894 train loss:3.768393 +step:2895 train loss:3.816136 +step:2896 train loss:3.864575 +step:2897 train loss:3.845719 +step:2898 train loss:3.973003 +step:2899 train loss:3.735330 +step:2900 train loss:3.814278 +step:2901 train loss:3.758899 +step:2902 train loss:3.757454 +step:2903 train loss:3.772130 +step:2904 train loss:3.800217 +step:2905 train loss:3.861129 +step:2906 train loss:3.830575 +step:2907 train loss:4.003300 +step:2908 train loss:3.758242 +step:2909 train loss:3.835445 +step:2910 train loss:3.804557 +step:2911 train loss:3.832268 +step:2912 train loss:3.791970 +step:2913 train loss:3.821515 +step:2914 train loss:3.850306 +step:2915 train loss:3.846863 +step:2916 train loss:3.805541 +step:2917 train loss:3.839118 +step:2918 train loss:3.828475 +step:2919 train loss:3.771706 +step:2920 train loss:3.829123 +step:2921 train loss:3.781238 +step:2922 train loss:3.805691 +step:2923 train loss:3.872150 +step:2924 train loss:3.804815 +step:2925 train loss:3.759244 +step:2926 train loss:3.847243 +step:2927 train loss:3.759096 +step:2928 train loss:3.723967 +step:2929 train loss:3.741761 +step:2930 train loss:3.761237 +step:2931 train loss:3.918682 +step:2932 train loss:3.837328 +step:2933 train loss:3.804274 +step:2934 train loss:3.798489 +step:2935 train loss:3.817653 +step:2936 train loss:3.770240 +step:2937 train loss:3.786984 +step:2938 train loss:3.805408 +step:2939 train loss:3.877650 +step:2940 train loss:3.776028 +step:2941 train loss:3.810231 +step:2942 train loss:3.772563 +step:2943 train loss:4.047801 +step:2944 train loss:3.879568 +step:2945 train loss:3.837664 +step:2946 train loss:3.849113 +step:2947 train loss:3.806846 +step:2948 train loss:3.767925 +step:2949 train loss:3.859833 +step:2950 train loss:3.809373 +step:2951 train loss:3.707325 +step:2952 train loss:3.776117 +step:2953 train loss:3.692400 +step:2954 train loss:3.779282 +step:2955 train loss:3.855022 +step:2956 train loss:3.798670 +step:2957 train loss:3.798581 +step:2958 train loss:3.752347 +step:2959 train loss:3.776586 +step:2960 train loss:3.874139 +step:2961 train loss:3.729764 +step:2962 train loss:3.811326 +step:2963 train loss:3.799612 +step:2964 train loss:3.778684 +step:2965 train loss:3.809003 +step:2966 train loss:3.781903 +step:2967 train loss:3.780037 +step:2968 train loss:3.757646 +step:2969 train loss:3.771829 +step:2970 train loss:3.854239 +step:2971 train loss:3.779469 +step:2972 train loss:3.761646 +step:2973 train loss:3.757992 +step:2974 train loss:3.799041 +step:2975 train loss:3.758337 +step:2976 train loss:3.796053 +step:2977 train loss:3.788413 +step:2978 train loss:3.869492 +step:2979 train loss:3.846637 +step:2980 train loss:3.851141 +step:2981 train loss:3.811337 +step:2982 train loss:3.799976 +step:2983 train loss:3.753529 +step:2984 train loss:3.722485 +step:2985 train loss:3.835917 +step:2986 train loss:3.731390 +step:2987 train loss:3.860192 +step:2988 train loss:3.781982 +step:2989 train loss:3.815322 +step:2990 train loss:3.765446 +step:2991 train loss:3.835417 +step:2992 train loss:3.828880 +step:2993 train loss:3.795151 +step:2994 train loss:3.781051 +step:2995 train loss:3.851048 +step:2996 train loss:3.774843 +step:2997 train loss:3.687873 +step:2998 train loss:3.800871 +step:2999 train loss:3.836823 +step:3000 validation loss:3.724199 total_sharp:3.1827e-02 L1_sharp:3.9396e-02 L2_sharp:1.8835e-02 L3_sharp:1.2563e-02 L4_sharp:4.6634e-03 L5_sharp:6.4557e-03 L6_sharp:8.0785e-03 L7_sharp:9.9437e-03 L8_sharp:6.6847e-03 L9_sharp:4.5790e-03 L10_sharp:3.8772e-03 L11_sharp:3.7198e-03 L12_sharp:1.2487e-02 total_fnorm:1.0702e+00 total_l1_linf:9.4790e+03 total_spectral:1.0702e+00 L1_fnorm:2.1721e-01 L2_fnorm:2.0266e-01 L3_fnorm:2.1637e-01 L4_fnorm:2.3903e-01 L5_fnorm:2.4025e-01 L6_fnorm:2.4739e-01 L7_fnorm:2.4698e-01 L8_fnorm:2.5342e-01 L9_fnorm:2.6090e-01 L10_fnorm:2.6556e-01 L11_fnorm:2.6379e-01 L12_fnorm:2.5650e-01 L1_l1linf:2.9339e-01 L2_l1linf:2.8373e-01 L3_l1linf:3.0374e-01 L4_l1linf:3.0542e-01 L5_l1linf:3.1855e-01 L6_l1linf:2.8034e-01 L7_l1linf:2.7564e-01 L8_l1linf:2.8107e-01 L9_l1linf:2.8385e-01 L10_l1linf:3.0101e-01 L11_l1linf:2.9751e-01 L12_l1linf:2.9914e-01 L1_spectral:4.2564e-02 L2_spectral:4.4811e-02 L3_spectral:4.6311e-02 L4_spectral:4.5537e-02 L5_spectral:4.1338e-02 L6_spectral:3.4263e-02 L7_spectral:3.4311e-02 L8_spectral:3.4544e-02 L9_spectral:3.7179e-02 L10_spectral:3.8512e-02 L11_spectral:3.9622e-02 L12_spectral:4.8412e-02 ip_v_neg_g:2.5772e-02 cos_v_neg_g:2.3669e-02 v_norm:1.0702e+00 g_norm:1.0174e+00 hv_norm:4.1310e-01 cos_v_hv:8.2450e-02 hg_norm:8.8621e+00 cos_g_hg:1.9307e-01 v_par:6.9965e-04 v_perp:1.0702e+00 L1_cos_v_neg_g:4.1369e-02 L1_v_norm:2.1721e-01 L2_cos_v_neg_g:4.9508e-02 L2_v_norm:2.0266e-01 L3_cos_v_neg_g:4.4623e-02 L3_v_norm:2.1637e-01 L4_cos_v_neg_g:3.1218e-02 L4_v_norm:2.3903e-01 L5_cos_v_neg_g:3.0716e-02 L5_v_norm:2.4025e-01 L6_cos_v_neg_g:2.6992e-02 L6_v_norm:2.4739e-01 L7_cos_v_neg_g:3.2461e-02 L7_v_norm:2.4698e-01 L8_cos_v_neg_g:2.9054e-02 L8_v_norm:2.5342e-01 L9_cos_v_neg_g:2.5985e-02 L9_v_norm:2.6090e-01 L10_cos_v_neg_g:2.6966e-02 L10_v_norm:2.6556e-01 L11_cos_v_neg_g:2.5584e-02 L11_v_norm:2.6379e-01 L12_cos_v_neg_g:3.2011e-02 L12_v_norm:2.5650e-01 +step:3000 train loss:3.735289 +step:3001 train loss:3.788594 +step:3002 train loss:3.787051 +step:3003 train loss:3.783655 +step:3004 train loss:3.818237 +step:3005 train loss:3.709985 +step:3006 train loss:3.763385 +step:3007 train loss:3.794816 +step:3008 train loss:3.834363 +step:3009 train loss:3.794134 +step:3010 train loss:3.806684 +step:3011 train loss:3.800260 +step:3012 train loss:3.778536 +step:3013 train loss:3.817107 +step:3014 train loss:3.772993 +step:3015 train loss:3.770177 +step:3016 train loss:3.794886 +step:3017 train loss:3.813286 +step:3018 train loss:3.742422 +step:3019 train loss:3.782535 +step:3020 train loss:3.801502 +step:3021 train loss:3.767667 +step:3022 train loss:3.856827 +step:3023 train loss:3.804234 +step:3024 train loss:3.790212 +step:3025 train loss:3.797302 +step:3026 train loss:3.777610 +step:3027 train loss:3.751449 +step:3028 train loss:3.803111 +step:3029 train loss:3.791132 +step:3030 train loss:3.765379 +step:3031 train loss:3.746998 +step:3032 train loss:3.738308 +step:3033 train loss:3.766645 +step:3034 train loss:3.810994 +step:3035 train loss:3.790605 +step:3036 train loss:3.748165 +step:3037 train loss:3.709608 +step:3038 train loss:3.827802 +step:3039 train loss:3.708070 +step:3040 train loss:3.695136 +step:3041 train loss:3.824053 +step:3042 train loss:3.756920 +step:3043 train loss:3.814683 +step:3044 train loss:3.712538 +step:3045 train loss:3.757966 +step:3046 train loss:3.727865 +step:3047 train loss:3.762831 +step:3048 train loss:3.726951 +step:3049 train loss:3.805954 +step:3050 train loss:3.690255 +step:3051 train loss:3.710772 +step:3052 train loss:3.728080 +step:3053 train loss:3.797272 +step:3054 train loss:3.870355 +step:3055 train loss:3.710727 +step:3056 train loss:3.743942 +step:3057 train loss:3.775500 +step:3058 train loss:3.725568 +step:3059 train loss:3.754907 +step:3060 train loss:3.753318 +step:3061 train loss:3.738840 +step:3062 train loss:3.790247 +step:3063 train loss:3.773664 +step:3064 train loss:3.797977 +step:3065 train loss:3.813199 +step:3066 train loss:3.717122 +step:3067 train loss:3.763572 +step:3068 train loss:3.813022 +step:3069 train loss:3.828657 +step:3070 train loss:3.760948 +step:3071 train loss:3.782270 +step:3072 train loss:3.782855 +step:3073 train loss:3.818691 +step:3074 train loss:3.753923 +step:3075 train loss:3.785724 +step:3076 train loss:3.719287 +step:3077 train loss:3.721249 +step:3078 train loss:3.747540 +step:3079 train loss:3.796041 +step:3080 train loss:3.790493 +step:3081 train loss:3.837220 +step:3082 train loss:3.805012 +step:3083 train loss:3.738425 +step:3084 train loss:3.817775 +step:3085 train loss:3.748236 +step:3086 train loss:3.807067 +step:3087 train loss:3.772823 +step:3088 train loss:3.854224 +step:3089 train loss:3.732280 +step:3090 train loss:3.804618 +step:3091 train loss:3.726765 +step:3092 train loss:3.752801 +step:3093 train loss:3.774640 +step:3094 train loss:3.760885 +step:3095 train loss:3.838637 +step:3096 train loss:3.773425 +step:3097 train loss:3.780761 +step:3098 train loss:3.756649 +step:3099 train loss:3.769325 +step:3100 train loss:3.791803 +step:3101 train loss:3.875958 +step:3102 train loss:3.801753 +step:3103 train loss:3.726908 +step:3104 train loss:3.811037 +step:3105 train loss:3.783489 +step:3106 train loss:3.777282 +step:3107 train loss:3.761691 +step:3108 train loss:3.737010 +step:3109 train loss:3.788377 +step:3110 train loss:3.721707 +step:3111 train loss:3.753996 +step:3112 train loss:3.692933 +step:3113 train loss:3.811520 +step:3114 train loss:3.724886 +step:3115 train loss:3.766313 +step:3116 train loss:3.645095 +step:3117 train loss:3.669566 +step:3118 train loss:3.765316 +step:3119 train loss:3.780007 +step:3120 train loss:3.775421 +step:3121 train loss:3.723563 +step:3122 train loss:3.806444 +step:3123 train loss:3.721085 +step:3124 train loss:3.783585 +step:3125 train loss:3.796417 +step:3126 train loss:3.895769 +step:3127 train loss:3.748519 +step:3128 train loss:3.774270 +step:3129 train loss:3.756929 +step:3130 train loss:3.739844 +step:3131 train loss:3.810595 +step:3132 train loss:3.798552 +step:3133 train loss:3.767486 +step:3134 train loss:3.665050 +step:3135 train loss:3.758059 +step:3136 train loss:3.728306 +step:3137 train loss:3.862508 +step:3138 train loss:3.763338 +step:3139 train loss:3.741927 +step:3140 train loss:3.761596 +step:3141 train loss:3.767782 +step:3142 train loss:3.703483 +step:3143 train loss:3.787657 +step:3144 train loss:3.734332 +step:3145 train loss:3.722686 +step:3146 train loss:3.731785 +step:3147 train loss:3.845044 +step:3148 train loss:3.747635 +step:3149 train loss:3.804135 +step:3150 train loss:3.789525 +step:3151 train loss:3.762133 +step:3152 train loss:3.757041 +step:3153 train loss:3.717519 +step:3154 train loss:3.798387 +step:3155 train loss:3.747416 +step:3156 train loss:3.794745 +step:3157 train loss:3.800156 +step:3158 train loss:3.769714 +step:3159 train loss:3.710704 +step:3160 train loss:3.756610 +step:3161 train loss:3.723926 +step:3162 train loss:3.783260 +step:3163 train loss:3.765779 +step:3164 train loss:3.746486 +step:3165 train loss:3.758107 +step:3166 train loss:3.799051 +step:3167 train loss:3.760533 +step:3168 train loss:3.837331 +step:3169 train loss:3.750364 +step:3170 train loss:3.736636 +step:3171 train loss:3.721380 +step:3172 train loss:3.728506 +step:3173 train loss:3.671313 +step:3174 train loss:3.790857 +step:3175 train loss:3.754682 +step:3176 train loss:3.770531 +step:3177 train loss:3.731529 +step:3178 train loss:3.716691 +step:3179 train loss:3.787540 +step:3180 train loss:3.719960 +step:3181 train loss:3.798135 +step:3182 train loss:3.802465 +step:3183 train loss:3.745467 +step:3184 train loss:3.744509 +step:3185 train loss:3.803992 +step:3186 train loss:3.762101 +step:3187 train loss:3.781435 +step:3188 train loss:3.822985 +step:3189 train loss:3.772331 +step:3190 train loss:3.724010 +step:3191 train loss:3.732277 +step:3192 train loss:3.692826 +step:3193 train loss:3.772148 +step:3194 train loss:3.734243 +step:3195 train loss:3.721933 +step:3196 train loss:3.769320 +step:3197 train loss:3.734455 +step:3198 train loss:3.757726 +step:3199 train loss:3.747923 +step:3200 train loss:3.754240 +step:3201 train loss:3.719107 +step:3202 train loss:3.776187 +step:3203 train loss:3.840838 +step:3204 train loss:3.805789 +step:3205 train loss:3.658034 +step:3206 train loss:3.935119 +step:3207 train loss:3.698014 +step:3208 train loss:3.759892 +step:3209 train loss:3.751932 +step:3210 train loss:3.734567 +step:3211 train loss:3.759402 +step:3212 train loss:3.773666 +step:3213 train loss:3.708797 +step:3214 train loss:3.820329 +step:3215 train loss:3.820387 +step:3216 train loss:3.692662 +step:3217 train loss:3.767150 +step:3218 train loss:3.813061 +step:3219 train loss:3.726578 +step:3220 train loss:3.796766 +step:3221 train loss:3.709655 +step:3222 train loss:3.753520 +step:3223 train loss:3.767249 +step:3224 train loss:3.780722 +step:3225 train loss:3.701469 +step:3226 train loss:3.735168 +step:3227 train loss:3.765265 +step:3228 train loss:3.759567 +step:3229 train loss:3.795459 +step:3230 train loss:3.805979 +step:3231 train loss:3.744641 +step:3232 train loss:3.757686 +step:3233 train loss:3.725633 +step:3234 train loss:3.716707 +step:3235 train loss:3.718726 +step:3236 train loss:3.742786 +step:3237 train loss:3.744645 +step:3238 train loss:3.768531 +step:3239 train loss:3.661067 +step:3240 train loss:3.775855 +step:3241 train loss:3.769976 +step:3242 train loss:3.826076 +step:3243 train loss:3.770571 +step:3244 train loss:3.783912 +step:3245 train loss:3.685758 +step:3246 train loss:3.812045 +step:3247 train loss:3.750976 +step:3248 train loss:3.774704 +step:3249 train loss:3.720213 +step:3250 validation loss:3.687448 +step:3250 train loss:3.722526 +step:3251 train loss:3.831876 +step:3252 train loss:3.760816 +step:3253 train loss:3.761883 +step:3254 train loss:3.830930 +step:3255 train loss:3.768738 +step:3256 train loss:3.766202 +step:3257 train loss:3.745491 +step:3258 train loss:3.677114 +step:3259 train loss:3.657728 +step:3260 train loss:3.774285 +step:3261 train loss:3.764091 +step:3262 train loss:3.744479 +step:3263 train loss:3.732474 +step:3264 train loss:3.842283 +step:3265 train loss:3.750660 +step:3266 train loss:3.776422 +step:3267 train loss:3.743399 +step:3268 train loss:3.742207 +step:3269 train loss:3.755310 +step:3270 train loss:3.783387 +step:3271 train loss:3.747478 +step:3272 train loss:3.723352 +step:3273 train loss:3.742232 +step:3274 train loss:3.868501 +step:3275 train loss:3.742666 +step:3276 train loss:3.806869 +step:3277 train loss:3.746272 +step:3278 train loss:3.723713 +step:3279 train loss:3.752432 +step:3280 train loss:3.776117 +step:3281 train loss:3.702484 +step:3282 train loss:3.771860 +step:3283 train loss:3.743534 +step:3284 train loss:3.704222 +step:3285 train loss:3.723646 +step:3286 train loss:3.754335 +step:3287 train loss:3.689267 +step:3288 train loss:3.770648 +step:3289 train loss:3.715287 +step:3290 train loss:3.750449 +step:3291 train loss:3.712717 +step:3292 train loss:3.730308 +step:3293 train loss:3.771714 +step:3294 train loss:3.784337 +step:3295 train loss:3.699370 +step:3296 train loss:3.754582 +step:3297 train loss:3.713308 +step:3298 train loss:3.713877 +step:3299 train loss:3.843019 +step:3300 train loss:3.688654 +step:3301 train loss:3.771070 +step:3302 train loss:3.737310 +step:3303 train loss:3.781177 +step:3304 train loss:3.754845 +step:3305 train loss:3.840592 +step:3306 train loss:3.765191 +step:3307 train loss:3.786827 +step:3308 train loss:3.738701 +step:3309 train loss:3.798220 +step:3310 train loss:3.714749 +step:3311 train loss:3.763414 +step:3312 train loss:3.728932 +step:3313 train loss:3.761205 +step:3314 train loss:3.756777 +step:3315 train loss:3.832844 +step:3316 train loss:3.685661 +step:3317 train loss:3.776739 +step:3318 train loss:3.784108 +step:3319 train loss:3.713095 +step:3320 train loss:3.867615 +step:3321 train loss:3.773278 +step:3322 train loss:3.772034 +step:3323 train loss:3.874024 +step:3324 train loss:3.793617 +step:3325 train loss:3.769540 +step:3326 train loss:3.756891 +step:3327 train loss:3.773316 +step:3328 train loss:3.748509 +step:3329 train loss:3.748012 +step:3330 train loss:3.741245 +step:3331 train loss:3.783718 +step:3332 train loss:3.808426 +step:3333 train loss:3.771124 +step:3334 train loss:3.705880 +step:3335 train loss:3.714813 +step:3336 train loss:3.753109 +step:3337 train loss:3.748830 +step:3338 train loss:3.737905 +step:3339 train loss:3.732551 +step:3340 train loss:3.770262 +step:3341 train loss:3.723584 +step:3342 train loss:3.766663 +step:3343 train loss:3.704049 +step:3344 train loss:3.757982 +step:3345 train loss:3.714645 +step:3346 train loss:3.726969 +step:3347 train loss:3.733744 +step:3348 train loss:3.756280 +step:3349 train loss:3.744346 +step:3350 train loss:3.769083 +step:3351 train loss:3.824257 +step:3352 train loss:3.762955 +step:3353 train loss:3.862025 +step:3354 train loss:3.704502 +step:3355 train loss:3.814304 +step:3356 train loss:3.762330 +step:3357 train loss:3.772538 +step:3358 train loss:3.712132 +step:3359 train loss:3.742946 +step:3360 train loss:3.737263 +step:3361 train loss:3.738348 +step:3362 train loss:3.729917 +step:3363 train loss:3.726175 +step:3364 train loss:3.711077 +step:3365 train loss:3.747373 +step:3366 train loss:3.779628 +step:3367 train loss:3.733712 +step:3368 train loss:3.831142 +step:3369 train loss:3.739238 +step:3370 train loss:3.830352 +step:3371 train loss:3.793636 +step:3372 train loss:3.761055 +step:3373 train loss:3.766711 +step:3374 train loss:3.818074 +step:3375 train loss:3.751341 +step:3376 train loss:3.760981 +step:3377 train loss:3.740392 +step:3378 train loss:3.715777 +step:3379 train loss:3.797694 +step:3380 train loss:3.774199 +step:3381 train loss:3.759665 +step:3382 train loss:3.772771 +step:3383 train loss:3.787910 +step:3384 train loss:3.717618 +step:3385 train loss:3.763991 +step:3386 train loss:3.741789 +step:3387 train loss:3.814158 +step:3388 train loss:3.718996 +step:3389 train loss:3.914796 +step:3390 train loss:3.678737 +step:3391 train loss:3.764339 +step:3392 train loss:3.751040 +step:3393 train loss:3.780196 +step:3394 train loss:3.727519 +step:3395 train loss:3.802551 +step:3396 train loss:3.709576 +step:3397 train loss:3.785224 +step:3398 train loss:3.748041 +step:3399 train loss:3.763940 +step:3400 train loss:3.712099 +step:3401 train loss:3.748585 +step:3402 train loss:3.903728 +step:3403 train loss:3.790593 +step:3404 train loss:3.907990 +step:3405 train loss:3.763140 +step:3406 train loss:3.740055 +step:3407 train loss:3.737849 +step:3408 train loss:3.719918 +step:3409 train loss:3.684153 +step:3410 train loss:3.715458 +step:3411 train loss:3.783350 +step:3412 train loss:3.707746 +step:3413 train loss:3.698146 +step:3414 train loss:3.736492 +step:3415 train loss:3.710330 +step:3416 train loss:3.712008 +step:3417 train loss:3.795445 +step:3418 train loss:3.794374 +step:3419 train loss:3.752609 +step:3420 train loss:3.725718 +step:3421 train loss:3.758956 +step:3422 train loss:3.776557 +step:3423 train loss:3.795811 +step:3424 train loss:3.676042 +step:3425 train loss:3.697207 +step:3426 train loss:3.693482 +step:3427 train loss:3.754185 +step:3428 train loss:3.678906 +step:3429 train loss:3.741843 +step:3430 train loss:3.709614 +step:3431 train loss:3.763210 +step:3432 train loss:3.745920 +step:3433 train loss:3.708766 +step:3434 train loss:3.796865 +step:3435 train loss:3.735399 +step:3436 train loss:3.825758 +step:3437 train loss:3.653667 +step:3438 train loss:3.759122 +step:3439 train loss:3.734185 +step:3440 train loss:3.826460 +step:3441 train loss:3.722689 +step:3442 train loss:3.789064 +step:3443 train loss:3.723165 +step:3444 train loss:3.741011 +step:3445 train loss:3.789206 +step:3446 train loss:3.694436 +step:3447 train loss:3.769093 +step:3448 train loss:3.723314 +step:3449 train loss:3.754497 +step:3450 train loss:3.666987 +step:3451 train loss:3.785989 +step:3452 train loss:3.733441 +step:3453 train loss:3.785863 +step:3454 train loss:3.811025 +step:3455 train loss:3.871370 +step:3456 train loss:3.814861 +step:3457 train loss:3.807989 +step:3458 train loss:3.732112 +step:3459 train loss:3.745293 +step:3460 train loss:3.690124 +step:3461 train loss:3.751939 +step:3462 train loss:3.750849 +step:3463 train loss:3.723651 +step:3464 train loss:3.769204 +step:3465 train loss:3.703923 +step:3466 train loss:3.770063 +step:3467 train loss:3.725517 +step:3468 train loss:3.740392 +step:3469 train loss:3.754162 +step:3470 train loss:3.734898 +step:3471 train loss:3.774642 +step:3472 train loss:3.661551 +step:3473 train loss:3.780295 +step:3474 train loss:3.679931 +step:3475 train loss:3.759068 +step:3476 train loss:3.730706 +step:3477 train loss:3.751251 +step:3478 train loss:3.727081 +step:3479 train loss:3.753137 +step:3480 train loss:3.769240 +step:3481 train loss:3.752255 +step:3482 train loss:3.734932 +step:3483 train loss:3.877747 +step:3484 train loss:3.720228 +step:3485 train loss:3.707049 +step:3486 train loss:3.756095 +step:3487 train loss:3.805095 +step:3488 train loss:3.705513 +step:3489 train loss:3.758516 +step:3490 train loss:3.726565 +step:3491 train loss:3.760845 +step:3492 train loss:3.799205 +step:3493 train loss:3.769312 +step:3494 train loss:3.764167 +step:3495 train loss:3.739725 +step:3496 train loss:3.707705 +step:3497 train loss:3.821064 +step:3498 train loss:3.764175 +step:3499 train loss:3.699214 +step:3500 validation loss:3.666644 total_sharp:2.7906e-02 L1_sharp:3.2263e-02 L2_sharp:1.6676e-02 L3_sharp:1.4665e-02 L4_sharp:5.1501e-03 L5_sharp:7.4282e-03 L6_sharp:6.6498e-03 L7_sharp:7.2953e-03 L8_sharp:5.6371e-03 L9_sharp:4.5414e-03 L10_sharp:3.1951e-03 L11_sharp:4.0713e-03 L12_sharp:1.0088e-02 total_fnorm:1.0655e+00 total_l1_linf:9.4437e+03 total_spectral:1.0655e+00 L1_fnorm:2.1759e-01 L2_fnorm:2.1050e-01 L3_fnorm:2.2600e-01 L4_fnorm:2.4317e-01 L5_fnorm:2.4251e-01 L6_fnorm:2.4690e-01 L7_fnorm:2.4381e-01 L8_fnorm:2.4642e-01 L9_fnorm:2.5282e-01 L10_fnorm:2.5455e-01 L11_fnorm:2.5521e-01 L12_fnorm:2.4603e-01 L1_l1linf:2.9740e-01 L2_l1linf:3.1477e-01 L3_l1linf:3.1640e-01 L4_l1linf:3.1660e-01 L5_l1linf:2.7649e-01 L6_l1linf:2.6296e-01 L7_l1linf:2.5736e-01 L8_l1linf:2.6881e-01 L9_l1linf:2.7026e-01 L10_l1linf:2.7059e-01 L11_l1linf:2.7336e-01 L12_l1linf:2.6646e-01 L1_spectral:4.3456e-02 L2_spectral:4.6062e-02 L3_spectral:4.7050e-02 L4_spectral:4.0645e-02 L5_spectral:3.8295e-02 L6_spectral:3.2237e-02 L7_spectral:3.0946e-02 L8_spectral:3.1775e-02 L9_spectral:3.2759e-02 L10_spectral:3.2595e-02 L11_spectral:3.3440e-02 L12_spectral:4.7454e-02 ip_v_neg_g:1.8942e-02 cos_v_neg_g:1.6527e-02 v_norm:1.0655e+00 g_norm:1.0757e+00 hv_norm:4.2697e-01 cos_v_hv:6.9640e-02 hg_norm:4.9174e+00 cos_g_hg:4.9551e-01 v_par:5.3220e-04 v_perp:1.0655e+00 L1_cos_v_neg_g:2.8210e-02 L1_v_norm:2.1759e-01 L2_cos_v_neg_g:3.8187e-02 L2_v_norm:2.1050e-01 L3_cos_v_neg_g:3.3214e-02 L3_v_norm:2.2600e-01 L4_cos_v_neg_g:2.3841e-02 L4_v_norm:2.4317e-01 L5_cos_v_neg_g:2.2540e-02 L5_v_norm:2.4251e-01 L6_cos_v_neg_g:2.0731e-02 L6_v_norm:2.4690e-01 L7_cos_v_neg_g:1.9976e-02 L7_v_norm:2.4381e-01 L8_cos_v_neg_g:2.0258e-02 L8_v_norm:2.4642e-01 L9_cos_v_neg_g:2.0491e-02 L9_v_norm:2.5282e-01 L10_cos_v_neg_g:1.6078e-02 L10_v_norm:2.5455e-01 L11_cos_v_neg_g:1.3996e-02 L11_v_norm:2.5521e-01 L12_cos_v_neg_g:1.8815e-02 L12_v_norm:2.4603e-01 +step:3500 train loss:3.716858 +step:3501 train loss:3.845452 +step:3502 train loss:3.823778 +step:3503 train loss:3.776152 +step:3504 train loss:3.725124 +step:3505 train loss:3.741096 +step:3506 train loss:3.638829 +step:3507 train loss:3.757710 +step:3508 train loss:3.703416 +step:3509 train loss:3.771053 +step:3510 train loss:3.704713 +step:3511 train loss:3.735919 +step:3512 train loss:3.878037 +step:3513 train loss:3.696943 +step:3514 train loss:3.712747 +step:3515 train loss:3.963567 +step:3516 train loss:3.759804 +step:3517 train loss:3.722334 +step:3518 train loss:3.726160 +step:3519 train loss:3.715749 +step:3520 train loss:3.750507 +step:3521 train loss:3.738391 +step:3522 train loss:3.647255 +step:3523 train loss:3.748998 +step:3524 train loss:3.733325 +step:3525 train loss:3.723195 +step:3526 train loss:3.746354 +step:3527 train loss:3.695175 +step:3528 train loss:3.745979 +step:3529 train loss:3.722833 +step:3530 train loss:3.717125 +step:3531 train loss:3.712464 +step:3532 train loss:3.896104 +step:3533 train loss:3.722092 +step:3534 train loss:3.737815 +step:3535 train loss:3.709431 +step:3536 train loss:3.709784 +step:3537 train loss:3.716764 +step:3538 train loss:3.751186 +step:3539 train loss:3.698123 +step:3540 train loss:3.761860 +step:3541 train loss:3.730813 +step:3542 train loss:3.741798 +step:3543 train loss:3.664995 +step:3544 train loss:3.686059 +step:3545 train loss:3.684759 +step:3546 train loss:3.748895 +step:3547 train loss:3.760529 +step:3548 train loss:3.732583 +step:3549 train loss:3.729889 +step:3550 train loss:3.716671 +step:3551 train loss:3.747501 +step:3552 train loss:3.648717 +step:3553 train loss:3.763948 +step:3554 train loss:3.758006 +step:3555 train loss:3.742796 +step:3556 train loss:3.770064 +step:3557 train loss:3.751251 +step:3558 train loss:3.725468 +step:3559 train loss:3.675212 +step:3560 train loss:3.766424 +step:3561 train loss:3.759987 +step:3562 train loss:3.935442 +step:3563 train loss:3.796089 +step:3564 train loss:3.760192 +step:3565 train loss:3.758662 +step:3566 train loss:3.729685 +step:3567 train loss:3.670052 +step:3568 train loss:3.693746 +step:3569 train loss:3.781385 +step:3570 train loss:3.803794 +step:3571 train loss:3.781733 +step:3572 train loss:3.770365 +step:3573 train loss:3.728003 +step:3574 train loss:3.726416 +step:3575 train loss:3.717022 +step:3576 train loss:3.701125 +step:3577 train loss:3.707358 +step:3578 train loss:3.792608 +step:3579 train loss:3.700832 +step:3580 train loss:3.781225 +step:3581 train loss:3.721728 +step:3582 train loss:3.772830 +step:3583 train loss:3.718071 +step:3584 train loss:3.688519 +step:3585 train loss:3.741368 +step:3586 train loss:3.687572 +step:3587 train loss:3.784497 +step:3588 train loss:3.914158 +step:3589 train loss:3.744622 +step:3590 train loss:3.732524 +step:3591 train loss:3.737646 +step:3592 train loss:3.700941 +step:3593 train loss:3.672423 +step:3594 train loss:3.724437 +step:3595 train loss:3.701330 +step:3596 train loss:3.782321 +step:3597 train loss:3.751204 +step:3598 train loss:3.705964 +step:3599 train loss:3.755174 +step:3600 train loss:3.698040 +step:3601 train loss:3.709810 +step:3602 train loss:3.701000 +step:3603 train loss:3.717282 +step:3604 train loss:3.740766 +step:3605 train loss:3.847152 +step:3606 train loss:3.748307 +step:3607 train loss:3.730647 +step:3608 train loss:3.746184 +step:3609 train loss:3.729152 +step:3610 train loss:3.698135 +step:3611 train loss:3.701616 +step:3612 train loss:3.770918 +step:3613 train loss:3.742759 +step:3614 train loss:3.691406 +step:3615 train loss:3.723437 +step:3616 train loss:3.735598 +step:3617 train loss:3.773522 +step:3618 train loss:3.726542 +step:3619 train loss:3.717353 +step:3620 train loss:3.739418 +step:3621 train loss:3.692401 +step:3622 train loss:3.796834 +step:3623 train loss:3.787547 +step:3624 train loss:3.754814 +step:3625 train loss:3.737932 +step:3626 train loss:3.738435 +step:3627 train loss:3.737624 +step:3628 train loss:3.722891 +step:3629 train loss:3.726077 +step:3630 train loss:3.812716 +step:3631 train loss:3.730069 +step:3632 train loss:3.763542 +step:3633 train loss:3.722249 +step:3634 train loss:3.723759 +step:3635 train loss:3.711255 +step:3636 train loss:3.781654 +step:3637 train loss:3.860916 +step:3638 train loss:3.770828 +step:3639 train loss:3.761111 +step:3640 train loss:3.770643 +step:3641 train loss:3.805894 +step:3642 train loss:3.700077 +step:3643 train loss:3.874831 +step:3644 train loss:3.762288 +step:3645 train loss:3.732653 +step:3646 train loss:3.858740 +step:3647 train loss:3.747771 +step:3648 train loss:3.736979 +step:3649 train loss:3.686635 +step:3650 train loss:3.728742 +step:3651 train loss:3.723958 +step:3652 train loss:3.712031 +step:3653 train loss:3.639998 +step:3654 train loss:3.709026 +step:3655 train loss:3.698960 +step:3656 train loss:3.729455 +step:3657 train loss:3.749170 +step:3658 train loss:3.743512 +step:3659 train loss:3.724269 +step:3660 train loss:3.697003 +step:3661 train loss:3.728131 +step:3662 train loss:3.702523 +step:3663 train loss:3.739516 +step:3664 train loss:3.690385 +step:3665 train loss:3.735644 +step:3666 train loss:3.773064 +step:3667 train loss:3.861813 +step:3668 train loss:3.745723 +step:3669 train loss:3.701885 +step:3670 train loss:3.750780 +step:3671 train loss:3.707378 +step:3672 train loss:3.744658 +step:3673 train loss:3.728196 +step:3674 train loss:3.743836 +step:3675 train loss:3.757572 +step:3676 train loss:3.720613 +step:3677 train loss:3.682001 +step:3678 train loss:3.746166 +step:3679 train loss:3.641846 +step:3680 train loss:3.744355 +step:3681 train loss:3.778916 +step:3682 train loss:3.758405 +step:3683 train loss:3.704300 +step:3684 train loss:3.700212 +step:3685 train loss:3.729786 +step:3686 train loss:3.758655 +step:3687 train loss:3.709606 +step:3688 train loss:3.684766 +step:3689 train loss:3.721294 +step:3690 train loss:3.711339 +step:3691 train loss:3.689845 +step:3692 train loss:3.747630 +step:3693 train loss:3.882111 +step:3694 train loss:3.697570 +step:3695 train loss:3.758416 +step:3696 train loss:3.718017 +step:3697 train loss:3.710346 +step:3698 train loss:3.649569 +step:3699 train loss:3.675464 +step:3700 train loss:3.707501 +step:3701 train loss:3.728276 +step:3702 train loss:3.747020 +step:3703 train loss:3.707021 +step:3704 train loss:3.750281 +step:3705 train loss:3.730724 +step:3706 train loss:3.679259 +step:3707 train loss:3.733458 +step:3708 train loss:3.711767 +step:3709 train loss:3.631578 +step:3710 train loss:3.757220 +step:3711 train loss:3.707397 +step:3712 train loss:3.745645 +step:3713 train loss:3.702676 +step:3714 train loss:3.717571 +step:3715 train loss:3.839695 +step:3716 train loss:3.745474 +step:3717 train loss:3.718160 +step:3718 train loss:3.722461 +step:3719 train loss:3.718941 +step:3720 train loss:3.727396 +step:3721 train loss:3.782431 +step:3722 train loss:3.794986 +step:3723 train loss:3.684025 +step:3724 train loss:3.737909 +step:3725 train loss:3.714313 +step:3726 train loss:3.737453 +step:3727 train loss:3.805967 +step:3728 train loss:3.771956 +step:3729 train loss:3.673587 +step:3730 train loss:3.691724 +step:3731 train loss:3.712031 +step:3732 train loss:3.864607 +step:3733 train loss:3.725540 +step:3734 train loss:3.727570 +step:3735 train loss:3.666560 +step:3736 train loss:3.722983 +step:3737 train loss:3.773691 +step:3738 train loss:3.790643 +step:3739 train loss:3.711064 +step:3740 train loss:3.612808 +step:3741 train loss:3.822039 +step:3742 train loss:3.733869 +step:3743 train loss:3.708208 +step:3744 train loss:3.749977 +step:3745 train loss:3.727684 +step:3746 train loss:3.714099 +step:3747 train loss:3.712128 +step:3748 train loss:3.752584 +step:3749 train loss:3.737782 +step:3750 validation loss:3.652988 +step:3750 train loss:3.749843 +step:3751 train loss:3.837426 +step:3752 train loss:3.768299 +step:3753 train loss:3.691175 +step:3754 train loss:3.740332 +step:3755 train loss:3.923809 +step:3756 train loss:3.699915 +step:3757 train loss:3.694849 +step:3758 train loss:3.720852 +step:3759 train loss:3.668824 +step:3760 train loss:3.665281 +step:3761 train loss:3.716927 +step:3762 train loss:3.707577 +step:3763 train loss:3.710130 +step:3764 train loss:3.702471 +step:3765 train loss:3.701282 +step:3766 train loss:3.666896 +step:3767 train loss:3.750769 +step:3768 train loss:3.693774 +step:3769 train loss:3.972685 +step:3770 train loss:3.747097 +step:3771 train loss:3.760910 +step:3772 train loss:3.714442 +step:3773 train loss:3.704961 +step:3774 train loss:3.714366 +step:3775 train loss:3.704657 +step:3776 train loss:3.708145 +step:3777 train loss:3.670717 +step:3778 train loss:3.683678 +step:3779 train loss:3.670725 +step:3780 train loss:3.750406 +step:3781 train loss:3.719329 +step:3782 train loss:3.636504 +step:3783 train loss:3.741287 +step:3784 train loss:3.750818 +step:3785 train loss:3.660452 +step:3786 train loss:3.770133 +step:3787 train loss:3.681740 +step:3788 train loss:3.692426 +step:3789 train loss:3.601031 +step:3790 train loss:3.722565 +step:3791 train loss:3.743351 +step:3792 train loss:3.714471 +step:3793 train loss:3.710029 +step:3794 train loss:3.738162 +step:3795 train loss:3.705297 +step:3796 train loss:3.724294 +step:3797 train loss:3.698019 +step:3798 train loss:3.705931 +step:3799 train loss:3.713881 +step:3800 train loss:3.625301 +step:3801 train loss:3.736767 +step:3802 train loss:3.666888 +step:3803 train loss:3.743537 +step:3804 train loss:3.758446 +step:3805 train loss:3.718003 +step:3806 train loss:3.736488 +step:3807 train loss:3.755755 +step:3808 train loss:3.707693 +step:3809 train loss:3.725331 +step:3810 train loss:3.725587 +step:3811 train loss:3.711986 +step:3812 train loss:3.714704 +step:3813 train loss:3.669854 +step:3814 train loss:3.711705 +step:3815 train loss:3.714590 +step:3816 train loss:3.731632 +step:3817 train loss:3.750070 +step:3818 train loss:3.723279 +step:3819 train loss:3.732484 +step:3820 train loss:3.733145 +step:3821 train loss:3.687857 +step:3822 train loss:3.780353 +step:3823 train loss:3.669079 +step:3824 train loss:3.681130 +step:3825 train loss:3.690057 +step:3826 train loss:3.780196 +step:3827 train loss:3.789227 +step:3828 train loss:3.682384 +step:3829 train loss:3.698729 +step:3830 train loss:3.758349 +step:3831 train loss:3.692227 +step:3832 train loss:3.751189 +step:3833 train loss:3.690411 +step:3834 train loss:3.655924 +step:3835 train loss:3.696257 +step:3836 train loss:3.671966 +step:3837 train loss:3.740222 +step:3838 train loss:3.694578 +step:3839 train loss:3.735210 +step:3840 train loss:3.747656 +step:3841 train loss:3.698082 +step:3842 train loss:3.724604 +step:3843 train loss:3.743796 +step:3844 train loss:3.712929 +step:3845 train loss:3.733205 +step:3846 train loss:3.775330 +step:3847 train loss:3.675779 +step:3848 train loss:3.680034 +step:3849 train loss:3.735528 +step:3850 train loss:3.723098 +step:3851 train loss:3.862199 +step:3852 train loss:3.838177 +step:3853 train loss:3.732700 +step:3854 train loss:3.707058 +step:3855 train loss:3.752358 +step:3856 train loss:3.674327 +step:3857 train loss:3.734539 +step:3858 train loss:3.651319 +step:3859 train loss:3.697881 +step:3860 train loss:3.771540 +step:3861 train loss:3.745711 +step:3862 train loss:3.675877 +step:3863 train loss:3.725398 +step:3864 train loss:3.695875 +step:3865 train loss:3.733348 +step:3866 train loss:3.751554 +step:3867 train loss:3.746418 +step:3868 train loss:3.698261 +step:3869 train loss:3.697884 +step:3870 train loss:3.675465 +step:3871 train loss:3.674166 +step:3872 train loss:3.796914 +step:3873 train loss:3.721476 +step:3874 train loss:3.735600 +step:3875 train loss:3.843529 +step:3876 train loss:3.721298 +step:3877 train loss:3.742165 +step:3878 train loss:3.773173 +step:3879 train loss:3.759573 +step:3880 train loss:3.839030 +step:3881 train loss:3.661141 +step:3882 train loss:3.695435 +step:3883 train loss:3.705397 +step:3884 train loss:3.695102 +step:3885 train loss:3.713583 +step:3886 train loss:3.772824 +step:3887 train loss:3.756795 +step:3888 train loss:3.712175 +step:3889 train loss:3.683155 +step:3890 train loss:3.722202 +step:3891 train loss:3.739754 +step:3892 train loss:3.641003 +step:3893 train loss:3.752378 +step:3894 train loss:3.700647 +step:3895 train loss:3.720278 +step:3896 train loss:3.710367 +step:3897 train loss:3.677109 +step:3898 train loss:3.736969 +step:3899 train loss:3.778038 +step:3900 train loss:3.732286 +step:3901 train loss:3.752075 +step:3902 train loss:3.675762 +step:3903 train loss:3.690750 +step:3904 train loss:3.723662 +step:3905 train loss:3.660197 +step:3906 train loss:3.692645 +step:3907 train loss:3.727180 +step:3908 train loss:3.805665 +step:3909 train loss:3.694510 +step:3910 train loss:3.721195 +step:3911 train loss:3.734545 +step:3912 train loss:3.686206 +step:3913 train loss:3.703647 +step:3914 train loss:3.722891 +step:3915 train loss:3.689607 +step:3916 train loss:3.726327 +step:3917 train loss:3.769454 +step:3918 train loss:3.749193 +step:3919 train loss:3.726197 +step:3920 train loss:3.698097 +step:3921 train loss:3.741003 +step:3922 train loss:3.744737 +step:3923 train loss:3.736741 +step:3924 train loss:3.673649 +step:3925 train loss:3.872445 +step:3926 train loss:3.718629 +step:3927 train loss:3.701340 +step:3928 train loss:3.781530 +step:3929 train loss:3.856417 +step:3930 train loss:3.760310 +step:3931 train loss:3.693725 +step:3932 train loss:3.741817 +step:3933 train loss:3.755710 +step:3934 train loss:3.705091 +step:3935 train loss:3.679923 +step:3936 train loss:3.772897 +step:3937 train loss:3.729068 +step:3938 train loss:3.737356 +step:3939 train loss:3.762170 +step:3940 train loss:3.711182 +step:3941 train loss:3.792065 +step:3942 train loss:3.754152 +step:3943 train loss:3.734693 +step:3944 train loss:3.787294 +step:3945 train loss:3.694480 +step:3946 train loss:3.639304 +step:3947 train loss:3.766255 +step:3948 train loss:3.733356 +step:3949 train loss:3.897583 +step:3950 train loss:3.700290 +step:3951 train loss:3.645202 +step:3952 train loss:3.587539 +step:3953 train loss:3.666520 +step:3954 train loss:3.718565 +step:3955 train loss:3.744475 +step:3956 train loss:3.701494 +step:3957 train loss:3.754490 +step:3958 train loss:3.729254 +step:3959 train loss:3.765286 +step:3960 train loss:3.688909 +step:3961 train loss:3.715160 +step:3962 train loss:3.723575 +step:3963 train loss:3.696847 +step:3964 train loss:3.675210 +step:3965 train loss:3.736843 +step:3966 train loss:3.691983 +step:3967 train loss:3.732236 +step:3968 train loss:3.753403 +step:3969 train loss:3.661184 +step:3970 train loss:3.778188 +step:3971 train loss:3.688676 +step:3972 train loss:3.721070 +step:3973 train loss:3.677980 +step:3974 train loss:3.770298 +step:3975 train loss:3.728634 +step:3976 train loss:3.679592 +step:3977 train loss:3.735430 +step:3978 train loss:3.702342 +step:3979 train loss:3.686066 +step:3980 train loss:3.757464 +step:3981 train loss:3.691426 +step:3982 train loss:3.711180 +step:3983 train loss:3.692101 +step:3984 train loss:3.725266 +step:3985 train loss:3.701299 +step:3986 train loss:3.715150 +step:3987 train loss:3.724936 +step:3988 train loss:3.658835 +step:3989 train loss:3.732794 +step:3990 train loss:3.725393 +step:3991 train loss:3.737155 +step:3992 train loss:3.695586 +step:3993 train loss:3.730608 +step:3994 train loss:3.677361 +step:3995 train loss:3.732883 +step:3996 train loss:3.652251 +step:3997 train loss:3.725140 +step:3998 train loss:3.612464 +step:3999 train loss:3.770093 +step:4000 validation loss:3.636792 total_sharp:3.9234e-02 L1_sharp:7.9109e-02 L2_sharp:8.9600e-02 L3_sharp:2.0597e-02 L4_sharp:5.3482e-03 L5_sharp:7.3820e-03 L6_sharp:6.6505e-03 L7_sharp:8.6338e-03 L8_sharp:6.4961e-03 L9_sharp:4.1188e-03 L10_sharp:2.3021e-03 L11_sharp:2.6201e-03 L12_sharp:5.9609e-03 total_fnorm:1.0619e+00 total_l1_linf:9.4147e+03 total_spectral:1.0619e+00 L1_fnorm:2.1320e-01 L2_fnorm:2.1728e-01 L3_fnorm:2.1964e-01 L4_fnorm:2.3679e-01 L5_fnorm:2.3759e-01 L6_fnorm:2.4425e-01 L7_fnorm:2.4547e-01 L8_fnorm:2.4928e-01 L9_fnorm:2.5348e-01 L10_fnorm:2.5473e-01 L11_fnorm:2.5595e-01 L12_fnorm:2.4474e-01 L1_l1linf:3.3229e-01 L2_l1linf:3.0499e-01 L3_l1linf:2.8417e-01 L4_l1linf:2.7516e-01 L5_l1linf:3.0255e-01 L6_l1linf:2.7477e-01 L7_l1linf:2.6693e-01 L8_l1linf:2.6723e-01 L9_l1linf:2.6924e-01 L10_l1linf:2.7219e-01 L11_l1linf:2.7176e-01 L12_l1linf:2.6417e-01 L1_spectral:4.5871e-02 L2_spectral:4.9808e-02 L3_spectral:4.4933e-02 L4_spectral:4.0601e-02 L5_spectral:3.9887e-02 L6_spectral:3.0867e-02 L7_spectral:3.2348e-02 L8_spectral:3.2044e-02 L9_spectral:3.1592e-02 L10_spectral:3.1146e-02 L11_spectral:3.1765e-02 L12_spectral:4.7510e-02 ip_v_neg_g:2.9720e-02 cos_v_neg_g:2.7916e-02 v_norm:1.0619e+00 g_norm:1.0025e+00 hv_norm:6.0616e-01 cos_v_hv:6.8735e-02 hg_norm:4.1699e+00 cos_g_hg:4.0029e-01 v_par:9.8711e-04 v_perp:1.0619e+00 L1_cos_v_neg_g:6.9337e-02 L1_v_norm:2.1320e-01 L2_cos_v_neg_g:1.2655e-01 L2_v_norm:2.1728e-01 L3_cos_v_neg_g:5.9088e-02 L3_v_norm:2.1964e-01 L4_cos_v_neg_g:2.8526e-02 L4_v_norm:2.3679e-01 L5_cos_v_neg_g:2.9560e-02 L5_v_norm:2.3759e-01 L6_cos_v_neg_g:2.1322e-02 L6_v_norm:2.4425e-01 L7_cos_v_neg_g:2.2628e-02 L7_v_norm:2.4547e-01 L8_cos_v_neg_g:2.4512e-02 L8_v_norm:2.4928e-01 L9_cos_v_neg_g:1.9787e-02 L9_v_norm:2.5348e-01 L10_cos_v_neg_g:1.4271e-02 L10_v_norm:2.5473e-01 L11_cos_v_neg_g:1.3547e-02 L11_v_norm:2.5595e-01 L12_cos_v_neg_g:1.1850e-02 L12_v_norm:2.4474e-01 +step:4000 train loss:3.650260 +step:4001 train loss:3.723460 +step:4002 train loss:3.704585 +step:4003 train loss:3.737343 +step:4004 train loss:3.646409 +step:4005 train loss:3.739559 +step:4006 train loss:3.748386 +step:4007 train loss:3.670687 +step:4008 train loss:3.626994 +step:4009 train loss:3.710260 +step:4010 train loss:3.687706 +step:4011 train loss:3.692145 +step:4012 train loss:3.704473 +step:4013 train loss:3.680251 +step:4014 train loss:3.693836 +step:4015 train loss:3.687097 +step:4016 train loss:3.697057 +step:4017 train loss:3.660510 +step:4018 train loss:3.600163 +step:4019 train loss:3.654187 +step:4020 train loss:3.723212 +step:4021 train loss:3.668148 +step:4022 train loss:3.672026 +step:4023 train loss:3.689161 +step:4024 train loss:3.600121 +step:4025 train loss:3.722466 +step:4026 train loss:3.707931 +step:4027 train loss:3.722372 +step:4028 train loss:3.737305 +step:4029 train loss:3.768038 +step:4030 train loss:3.682524 +step:4031 train loss:3.725367 +step:4032 train loss:3.682595 +step:4033 train loss:3.713265 +step:4034 train loss:3.733012 +step:4035 train loss:3.709801 +step:4036 train loss:3.702882 +step:4037 train loss:3.720849 +step:4038 train loss:3.642192 +step:4039 train loss:3.695542 +step:4040 train loss:3.675644 +step:4041 train loss:3.667894 +step:4042 train loss:3.691392 +step:4043 train loss:3.672562 +step:4044 train loss:3.710748 +step:4045 train loss:3.713837 +step:4046 train loss:3.670836 +step:4047 train loss:3.696844 +step:4048 train loss:3.709420 +step:4049 train loss:3.671402 +step:4050 train loss:3.776720 +step:4051 train loss:3.691635 +step:4052 train loss:3.707980 +step:4053 train loss:3.757198 +step:4054 train loss:3.732362 +step:4055 train loss:3.745229 +step:4056 train loss:3.742210 +step:4057 train loss:3.679503 +step:4058 train loss:3.662876 +step:4059 train loss:3.743716 +step:4060 train loss:3.684379 +step:4061 train loss:3.657392 +step:4062 train loss:3.764910 +step:4063 train loss:3.721723 +step:4064 train loss:3.687147 +step:4065 train loss:3.671969 +step:4066 train loss:3.700943 +step:4067 train loss:3.722043 +step:4068 train loss:3.690280 +step:4069 train loss:3.749108 +step:4070 train loss:3.666804 +step:4071 train loss:3.638816 +step:4072 train loss:3.711804 +step:4073 train loss:3.649220 +step:4074 train loss:3.702550 +step:4075 train loss:3.769127 +step:4076 train loss:3.620070 +step:4077 train loss:3.702446 +step:4078 train loss:3.805484 +step:4079 train loss:3.745756 +step:4080 train loss:3.690225 +step:4081 train loss:3.660125 +step:4082 train loss:3.711252 +step:4083 train loss:3.650405 +step:4084 train loss:3.668650 +step:4085 train loss:3.902097 +step:4086 train loss:3.671684 +step:4087 train loss:3.716623 +step:4088 train loss:3.701367 +step:4089 train loss:3.692792 +step:4090 train loss:3.709879 +step:4091 train loss:3.735970 +step:4092 train loss:3.656693 +step:4093 train loss:3.688686 +step:4094 train loss:3.706851 +step:4095 train loss:3.659455 +step:4096 train loss:3.694165 +step:4097 train loss:3.694690 +step:4098 train loss:3.668522 +step:4099 train loss:3.671747 +step:4100 train loss:3.724444 +step:4101 train loss:3.648103 +step:4102 train loss:3.679536 +step:4103 train loss:3.888878 +step:4104 train loss:3.699182 +step:4105 train loss:3.667217 +step:4106 train loss:3.741115 +step:4107 train loss:3.663066 +step:4108 train loss:3.669194 +step:4109 train loss:3.722014 +step:4110 train loss:3.731728 +step:4111 train loss:3.702583 +step:4112 train loss:3.722722 +step:4113 train loss:3.680832 +step:4114 train loss:3.630882 +step:4115 train loss:3.667622 +step:4116 train loss:3.652964 +step:4117 train loss:3.668957 +step:4118 train loss:3.724422 +step:4119 train loss:3.747375 +step:4120 train loss:3.670984 +step:4121 train loss:3.662441 +step:4122 train loss:3.725852 +step:4123 train loss:3.738576 +step:4124 train loss:3.716690 +step:4125 train loss:3.756501 +step:4126 train loss:3.689588 +step:4127 train loss:3.712151 +step:4128 train loss:3.696687 +step:4129 train loss:3.750784 +step:4130 train loss:3.677841 +step:4131 train loss:3.714051 +step:4132 train loss:3.725793 +step:4133 train loss:3.677498 +step:4134 train loss:3.733088 +step:4135 train loss:3.666242 +step:4136 train loss:3.686130 +step:4137 train loss:3.659393 +step:4138 train loss:3.664775 +step:4139 train loss:3.710901 +step:4140 train loss:3.673978 +step:4141 train loss:3.637539 +step:4142 train loss:3.679501 +step:4143 train loss:3.717693 +step:4144 train loss:3.669421 +step:4145 train loss:3.635349 +step:4146 train loss:3.702569 +step:4147 train loss:3.679724 +step:4148 train loss:3.672766 +step:4149 train loss:3.751653 +step:4150 train loss:3.717527 +step:4151 train loss:3.698415 +step:4152 train loss:3.719847 +step:4153 train loss:3.728338 +step:4154 train loss:3.731937 +step:4155 train loss:3.759535 +step:4156 train loss:3.631956 +step:4157 train loss:3.651933 +step:4158 train loss:3.712905 +step:4159 train loss:3.613746 +step:4160 train loss:3.702733 +step:4161 train loss:3.705448 +step:4162 train loss:3.615117 +step:4163 train loss:3.697978 +step:4164 train loss:3.644365 +step:4165 train loss:3.643733 +step:4166 train loss:3.708715 +step:4167 train loss:3.705753 +step:4168 train loss:3.696693 +step:4169 train loss:3.723871 +step:4170 train loss:3.848200 +step:4171 train loss:3.704003 +step:4172 train loss:3.716125 +step:4173 train loss:3.712325 +step:4174 train loss:3.674185 +step:4175 train loss:3.768993 +step:4176 train loss:3.693737 +step:4177 train loss:3.714983 +step:4178 train loss:3.692804 +step:4179 train loss:3.644658 +step:4180 train loss:3.640512 +step:4181 train loss:3.692340 +step:4182 train loss:3.679693 +step:4183 train loss:3.610149 +step:4184 train loss:3.683315 +step:4185 train loss:3.748276 +step:4186 train loss:3.724635 +step:4187 train loss:3.733415 +step:4188 train loss:3.705454 +step:4189 train loss:3.667331 +step:4190 train loss:3.711950 +step:4191 train loss:3.659161 +step:4192 train loss:3.743284 +step:4193 train loss:3.653339 +step:4194 train loss:3.634089 +step:4195 train loss:3.632259 +step:4196 train loss:3.701884 +step:4197 train loss:3.716835 +step:4198 train loss:3.638127 +step:4199 train loss:3.721174 +step:4200 train loss:3.682535 +step:4201 train loss:3.664255 +step:4202 train loss:3.678870 +step:4203 train loss:3.690540 +step:4204 train loss:3.681214 +step:4205 train loss:3.696393 +step:4206 train loss:3.716807 +step:4207 train loss:3.716130 +step:4208 train loss:3.677537 +step:4209 train loss:3.742888 +step:4210 train loss:3.773739 +step:4211 train loss:3.652879 +step:4212 train loss:3.695690 +step:4213 train loss:3.648984 +step:4214 train loss:3.655311 +step:4215 train loss:3.671396 +step:4216 train loss:3.645031 +step:4217 train loss:3.667593 +step:4218 train loss:3.710445 +step:4219 train loss:3.709300 +step:4220 train loss:3.799561 +step:4221 train loss:3.712061 +step:4222 train loss:3.755275 +step:4223 train loss:3.686051 +step:4224 train loss:3.761619 +step:4225 train loss:3.685054 +step:4226 train loss:3.749585 +step:4227 train loss:3.725201 +step:4228 train loss:3.697799 +step:4229 train loss:3.692187 +step:4230 train loss:3.675180 +step:4231 train loss:3.662574 +step:4232 train loss:3.714168 +step:4233 train loss:3.619285 +step:4234 train loss:3.696373 +step:4235 train loss:3.775740 +step:4236 train loss:3.747167 +step:4237 train loss:3.724454 +step:4238 train loss:3.732003 +step:4239 train loss:3.782473 +step:4240 train loss:3.687219 +step:4241 train loss:3.614973 +step:4242 train loss:3.736392 +step:4243 train loss:3.732290 +step:4244 train loss:3.744034 +step:4245 train loss:3.798532 +step:4246 train loss:3.671487 +step:4247 train loss:3.728094 +step:4248 train loss:3.677521 +step:4249 train loss:3.688157 +step:4250 validation loss:3.609573 +step:4250 train loss:3.667120 +step:4251 train loss:3.760542 +step:4252 train loss:3.669996 +step:4253 train loss:3.661346 +step:4254 train loss:3.675187 +step:4255 train loss:3.653228 +step:4256 train loss:3.670526 +step:4257 train loss:3.726471 +step:4258 train loss:3.589326 +step:4259 train loss:3.653237 +step:4260 train loss:3.715552 +step:4261 train loss:3.702234 +step:4262 train loss:3.842308 +step:4263 train loss:3.779072 +step:4264 train loss:3.721641 +step:4265 train loss:3.709821 +step:4266 train loss:3.709620 +step:4267 train loss:3.708019 +step:4268 train loss:3.653057 +step:4269 train loss:3.747465 +step:4270 train loss:3.727133 +step:4271 train loss:3.637942 +step:4272 train loss:3.694816 +step:4273 train loss:3.673971 +step:4274 train loss:3.656446 +step:4275 train loss:3.676483 +step:4276 train loss:3.642576 +step:4277 train loss:3.779101 +step:4278 train loss:3.626672 +step:4279 train loss:3.658547 +step:4280 train loss:3.739904 +step:4281 train loss:3.723950 +step:4282 train loss:3.786917 +step:4283 train loss:3.644639 +step:4284 train loss:3.671542 +step:4285 train loss:3.674096 +step:4286 train loss:3.741396 +step:4287 train loss:3.736362 +step:4288 train loss:3.720145 +step:4289 train loss:3.670983 +step:4290 train loss:3.680924 +step:4291 train loss:3.638228 +step:4292 train loss:3.683475 +step:4293 train loss:3.697586 +step:4294 train loss:3.680901 +step:4295 train loss:3.614759 +step:4296 train loss:3.685708 +step:4297 train loss:3.669313 +step:4298 train loss:3.681156 +step:4299 train loss:3.677080 +step:4300 train loss:3.794391 +step:4301 train loss:3.611712 +step:4302 train loss:3.748580 +step:4303 train loss:3.627898 +step:4304 train loss:3.636717 +step:4305 train loss:3.656539 +step:4306 train loss:3.731260 +step:4307 train loss:3.649585 +step:4308 train loss:3.646423 +step:4309 train loss:3.714289 +step:4310 train loss:3.654081 +step:4311 train loss:3.708056 +step:4312 train loss:3.701308 +step:4313 train loss:3.695974 +step:4314 train loss:3.641237 +step:4315 train loss:3.671648 +step:4316 train loss:3.619957 +step:4317 train loss:3.679465 +step:4318 train loss:3.714216 +step:4319 train loss:3.664192 +step:4320 train loss:3.727600 +step:4321 train loss:3.710140 +step:4322 train loss:3.662332 +step:4323 train loss:3.602822 +step:4324 train loss:3.695608 +step:4325 train loss:3.671083 +step:4326 train loss:3.664297 +step:4327 train loss:3.766919 +step:4328 train loss:3.682216 +step:4329 train loss:3.634054 +step:4330 train loss:3.682233 +step:4331 train loss:3.694050 +step:4332 train loss:3.722235 +step:4333 train loss:3.682580 +step:4334 train loss:3.701778 +step:4335 train loss:3.699579 +step:4336 train loss:3.709948 +step:4337 train loss:3.672339 +step:4338 train loss:3.796535 +step:4339 train loss:3.702207 +step:4340 train loss:3.706944 +step:4341 train loss:3.677388 +step:4342 train loss:3.695166 +step:4343 train loss:3.808143 +step:4344 train loss:3.701137 +step:4345 train loss:3.717171 +step:4346 train loss:3.730389 +step:4347 train loss:3.738551 +step:4348 train loss:3.650017 +step:4349 train loss:3.735925 +step:4350 train loss:3.683372 +step:4351 train loss:3.638256 +step:4352 train loss:3.716013 +step:4353 train loss:3.659084 +step:4354 train loss:3.713941 +step:4355 train loss:3.676493 +step:4356 train loss:3.696506 +step:4357 train loss:3.676670 +step:4358 train loss:3.771394 +step:4359 train loss:3.719564 +step:4360 train loss:3.633883 +step:4361 train loss:3.683319 +step:4362 train loss:3.704278 +step:4363 train loss:3.721975 +step:4364 train loss:3.684590 +step:4365 train loss:3.669941 +step:4366 train loss:3.712291 +step:4367 train loss:3.726557 +step:4368 train loss:3.709239 +step:4369 train loss:3.571130 +step:4370 train loss:3.706076 +step:4371 train loss:3.616663 +step:4372 train loss:3.762732 +step:4373 train loss:3.700934 +step:4374 train loss:3.668075 +step:4375 train loss:3.714191 +step:4376 train loss:3.725304 +step:4377 train loss:3.657465 +step:4378 train loss:3.666880 +step:4379 train loss:3.750199 +step:4380 train loss:3.732266 +step:4381 train loss:3.631263 +step:4382 train loss:3.677958 +step:4383 train loss:3.706858 +step:4384 train loss:3.701514 +step:4385 train loss:3.625898 +step:4386 train loss:3.683166 +step:4387 train loss:3.652461 +step:4388 train loss:3.673333 +step:4389 train loss:3.702237 +step:4390 train loss:3.746634 +step:4391 train loss:3.669077 +step:4392 train loss:3.743944 +step:4393 train loss:3.703304 +step:4394 train loss:3.639915 +step:4395 train loss:3.699179 +step:4396 train loss:3.672410 +step:4397 train loss:3.713491 +step:4398 train loss:3.659877 +step:4399 train loss:3.655233 +step:4400 train loss:3.656147 +step:4401 train loss:3.718365 +step:4402 train loss:3.718434 +step:4403 train loss:3.670168 +step:4404 train loss:3.698188 +step:4405 train loss:3.621228 +step:4406 train loss:3.700823 +step:4407 train loss:3.633879 +step:4408 train loss:3.729640 +step:4409 train loss:3.686145 +step:4410 train loss:3.693445 +step:4411 train loss:3.651548 +step:4412 train loss:3.766027 +step:4413 train loss:3.666322 +step:4414 train loss:3.671621 +step:4415 train loss:3.655850 +step:4416 train loss:3.653446 +step:4417 train loss:3.643803 +step:4418 train loss:3.716606 +step:4419 train loss:3.685381 +step:4420 train loss:3.694156 +step:4421 train loss:3.720917 +step:4422 train loss:3.736055 +step:4423 train loss:3.697525 +step:4424 train loss:3.678751 +step:4425 train loss:3.644293 +step:4426 train loss:3.716149 +step:4427 train loss:3.676217 +step:4428 train loss:3.617495 +step:4429 train loss:3.675623 +step:4430 train loss:3.714992 +step:4431 train loss:3.710197 +step:4432 train loss:3.614854 +step:4433 train loss:3.667941 +step:4434 train loss:3.666774 +step:4435 train loss:3.696005 +step:4436 train loss:3.633041 +step:4437 train loss:3.711203 +step:4438 train loss:3.677841 +step:4439 train loss:3.684992 +step:4440 train loss:3.681204 +step:4441 train loss:3.683558 +step:4442 train loss:3.731685 +step:4443 train loss:3.668365 +step:4444 train loss:3.752994 +step:4445 train loss:3.714836 +step:4446 train loss:3.649041 +step:4447 train loss:3.695999 +step:4448 train loss:3.716778 +step:4449 train loss:3.657362 +step:4450 train loss:3.670479 +step:4451 train loss:3.728801 +step:4452 train loss:3.784231 +step:4453 train loss:3.713173 +step:4454 train loss:3.688754 +step:4455 train loss:3.738962 +step:4456 train loss:3.681942 +step:4457 train loss:3.682876 +step:4458 train loss:3.689979 +step:4459 train loss:3.724212 +step:4460 train loss:3.636059 +step:4461 train loss:3.607256 +step:4462 train loss:3.660682 +step:4463 train loss:3.686219 +step:4464 train loss:3.649481 +step:4465 train loss:3.686173 +step:4466 train loss:3.782627 +step:4467 train loss:3.661290 +step:4468 train loss:3.654084 +step:4469 train loss:3.649580 +step:4470 train loss:3.622461 +step:4471 train loss:3.685616 +step:4472 train loss:3.607172 +step:4473 train loss:3.697185 +step:4474 train loss:3.723066 +step:4475 train loss:3.683727 +step:4476 train loss:3.646268 +step:4477 train loss:3.631397 +step:4478 train loss:3.689813 +step:4479 train loss:3.792988 +step:4480 train loss:3.629599 +step:4481 train loss:3.698239 +step:4482 train loss:3.659078 +step:4483 train loss:3.656494 +step:4484 train loss:3.701497 +step:4485 train loss:3.662719 +step:4486 train loss:3.761940 +step:4487 train loss:3.660252 +step:4488 train loss:3.656752 +step:4489 train loss:3.612161 +step:4490 train loss:3.696403 +step:4491 train loss:3.644739 +step:4492 train loss:3.675651 +step:4493 train loss:3.663371 +step:4494 train loss:3.657439 +step:4495 train loss:3.724838 +step:4496 train loss:3.666147 +step:4497 train loss:3.748809 +step:4498 train loss:3.639514 +step:4499 train loss:3.695790 +step:4500 validation loss:3.596065 total_sharp:2.8964e-02 L1_sharp:3.4224e-02 L2_sharp:1.2297e-02 L3_sharp:1.1699e-02 L4_sharp:4.1707e-03 L5_sharp:5.2666e-03 L6_sharp:6.2409e-03 L7_sharp:7.5734e-03 L8_sharp:5.8445e-03 L9_sharp:4.6479e-03 L10_sharp:3.3718e-03 L11_sharp:3.6657e-03 L12_sharp:9.4764e-03 total_fnorm:1.0981e+00 total_l1_linf:9.7648e+03 total_spectral:1.0981e+00 L1_fnorm:2.3515e-01 L2_fnorm:2.2841e-01 L3_fnorm:2.4067e-01 L4_fnorm:2.5380e-01 L5_fnorm:2.5132e-01 L6_fnorm:2.5416e-01 L7_fnorm:2.5178e-01 L8_fnorm:2.5408e-01 L9_fnorm:2.5954e-01 L10_fnorm:2.6395e-01 L11_fnorm:2.6398e-01 L12_fnorm:2.5905e-01 L1_l1linf:3.1190e-01 L2_l1linf:3.6667e-01 L3_l1linf:3.6628e-01 L4_l1linf:3.4807e-01 L5_l1linf:3.0238e-01 L6_l1linf:2.8640e-01 L7_l1linf:2.7753e-01 L8_l1linf:2.7324e-01 L9_l1linf:2.8497e-01 L10_l1linf:2.8228e-01 L11_l1linf:2.8599e-01 L12_l1linf:2.8223e-01 L1_spectral:5.0989e-02 L2_spectral:5.2506e-02 L3_spectral:5.3432e-02 L4_spectral:4.3397e-02 L5_spectral:3.9957e-02 L6_spectral:3.2272e-02 L7_spectral:3.2447e-02 L8_spectral:3.2341e-02 L9_spectral:3.3934e-02 L10_spectral:3.5179e-02 L11_spectral:3.5197e-02 L12_spectral:4.9203e-02 ip_v_neg_g:1.9692e-02 cos_v_neg_g:1.8818e-02 v_norm:1.0981e+00 g_norm:9.5297e-01 hv_norm:4.1313e-01 cos_v_hv:7.6988e-02 hg_norm:1.1646e+01 cos_g_hg:1.9792e-01 v_par:6.0523e-04 v_perp:1.0981e+00 L1_cos_v_neg_g:3.6000e-02 L1_v_norm:2.3515e-01 L2_cos_v_neg_g:2.6414e-02 L2_v_norm:2.2841e-01 L3_cos_v_neg_g:3.2419e-02 L3_v_norm:2.4067e-01 L4_cos_v_neg_g:1.9347e-02 L4_v_norm:2.5380e-01 L5_cos_v_neg_g:2.0192e-02 L5_v_norm:2.5132e-01 L6_cos_v_neg_g:2.1081e-02 L6_v_norm:2.5416e-01 L7_cos_v_neg_g:2.4164e-02 L7_v_norm:2.5178e-01 L8_cos_v_neg_g:2.4834e-02 L8_v_norm:2.5408e-01 L9_cos_v_neg_g:2.5583e-02 L9_v_norm:2.5954e-01 L10_cos_v_neg_g:2.2504e-02 L10_v_norm:2.6395e-01 L11_cos_v_neg_g:2.3953e-02 L11_v_norm:2.6398e-01 L12_cos_v_neg_g:2.5762e-02 L12_v_norm:2.5905e-01 +step:4500 train loss:3.602937 +step:4501 train loss:3.662361 +step:4502 train loss:3.785221 +step:4503 train loss:3.688338 +step:4504 train loss:3.697896 +step:4505 train loss:3.684376 +step:4506 train loss:3.652096 +step:4507 train loss:3.727982 +step:4508 train loss:3.662389 +step:4509 train loss:3.659777 +step:4510 train loss:3.695548 +step:4511 train loss:3.647852 +step:4512 train loss:3.669409 +step:4513 train loss:3.726552 +step:4514 train loss:3.632867 +step:4515 train loss:3.744909 +step:4516 train loss:3.723066 +step:4517 train loss:3.679406 +step:4518 train loss:3.615366 +step:4519 train loss:3.651487 +step:4520 train loss:3.664143 +step:4521 train loss:3.605258 +step:4522 train loss:3.658610 +step:4523 train loss:3.707772 +step:4524 train loss:3.688635 +step:4525 train loss:3.612051 +step:4526 train loss:3.653073 +step:4527 train loss:3.643331 +step:4528 train loss:3.672534 +step:4529 train loss:3.668572 +step:4530 train loss:3.762494 +step:4531 train loss:3.653714 +step:4532 train loss:3.678663 +step:4533 train loss:3.648787 +step:4534 train loss:3.742126 +step:4535 train loss:3.640387 +step:4536 train loss:3.711657 +step:4537 train loss:3.695598 +step:4538 train loss:3.671951 +step:4539 train loss:3.693542 +step:4540 train loss:3.671663 +step:4541 train loss:3.636193 +step:4542 train loss:3.688056 +step:4543 train loss:3.775330 +step:4544 train loss:3.718035 +step:4545 train loss:3.660218 +step:4546 train loss:3.754300 +step:4547 train loss:3.714535 +step:4548 train loss:3.717592 +step:4549 train loss:3.671488 +step:4550 train loss:3.641290 +step:4551 train loss:3.654608 +step:4552 train loss:3.656663 +step:4553 train loss:3.739024 +step:4554 train loss:3.633222 +step:4555 train loss:3.746754 +step:4556 train loss:3.681107 +step:4557 train loss:3.610407 +step:4558 train loss:3.694669 +step:4559 train loss:3.706402 +step:4560 train loss:3.644681 +step:4561 train loss:3.630383 +step:4562 train loss:3.674814 +step:4563 train loss:3.624327 +step:4564 train loss:3.650585 +step:4565 train loss:3.650135 +step:4566 train loss:3.626887 +step:4567 train loss:3.651591 +step:4568 train loss:3.650636 +step:4569 train loss:3.637090 +step:4570 train loss:3.688464 +step:4571 train loss:3.663023 +step:4572 train loss:3.657389 +step:4573 train loss:3.666846 +step:4574 train loss:3.812079 +step:4575 train loss:3.644588 +step:4576 train loss:3.635334 +step:4577 train loss:3.674425 +step:4578 train loss:3.715025 +step:4579 train loss:3.662858 +step:4580 train loss:3.724326 +step:4581 train loss:3.664948 +step:4582 train loss:3.654888 +step:4583 train loss:3.663047 +step:4584 train loss:3.635283 +step:4585 train loss:3.711439 +step:4586 train loss:3.708415 +step:4587 train loss:3.605092 +step:4588 train loss:3.645017 +step:4589 train loss:3.720534 +step:4590 train loss:3.693773 +step:4591 train loss:3.631137 +step:4592 train loss:3.715182 +step:4593 train loss:3.636925 +step:4594 train loss:3.664737 +step:4595 train loss:3.686880 +step:4596 train loss:3.625642 +step:4597 train loss:3.760556 +step:4598 train loss:3.680457 +step:4599 train loss:3.631660 +step:4600 train loss:3.639548 +step:4601 train loss:3.663567 +step:4602 train loss:3.615264 +step:4603 train loss:3.629509 +step:4604 train loss:3.734008 +step:4605 train loss:3.654138 +step:4606 train loss:3.679864 +step:4607 train loss:3.664041 +step:4608 train loss:3.701480 +step:4609 train loss:3.656377 +step:4610 train loss:3.700676 +step:4611 train loss:3.723984 +step:4612 train loss:3.724356 +step:4613 train loss:3.709401 +step:4614 train loss:3.696532 +step:4615 train loss:3.640063 +step:4616 train loss:3.624206 +step:4617 train loss:3.666567 +step:4618 train loss:3.681653 +step:4619 train loss:3.644104 +step:4620 train loss:3.658591 +step:4621 train loss:3.659561 +step:4622 train loss:3.595133 +step:4623 train loss:3.704902 +step:4624 train loss:3.688545 +step:4625 train loss:3.646241 +step:4626 train loss:3.689566 +step:4627 train loss:3.660687 +step:4628 train loss:3.646948 +step:4629 train loss:3.685714 +step:4630 train loss:3.741956 +step:4631 train loss:3.744750 +step:4632 train loss:3.638094 +step:4633 train loss:3.649884 +step:4634 train loss:3.724941 +step:4635 train loss:3.690046 +step:4636 train loss:3.705363 +step:4637 train loss:3.642421 +step:4638 train loss:3.647499 +step:4639 train loss:3.644228 +step:4640 train loss:3.653831 +step:4641 train loss:3.663790 +step:4642 train loss:3.695526 +step:4643 train loss:3.656592 +step:4644 train loss:3.679911 +step:4645 train loss:3.700681 +step:4646 train loss:3.649705 +step:4647 train loss:3.607095 +step:4648 train loss:3.716715 +step:4649 train loss:3.726900 +step:4650 train loss:3.672398 +step:4651 train loss:3.673449 +step:4652 train loss:3.663121 +step:4653 train loss:3.722019 +step:4654 train loss:3.719390 +step:4655 train loss:3.618721 +step:4656 train loss:3.653750 +step:4657 train loss:3.707639 +step:4658 train loss:3.664230 +step:4659 train loss:3.675837 +step:4660 train loss:3.722841 +step:4661 train loss:3.635540 +step:4662 train loss:3.647554 +step:4663 train loss:3.653095 +step:4664 train loss:3.713635 +step:4665 train loss:3.708288 +step:4666 train loss:3.706394 +step:4667 train loss:3.697708 +step:4668 train loss:3.664301 +step:4669 train loss:3.672084 +step:4670 train loss:3.705967 +step:4671 train loss:3.737960 +step:4672 train loss:3.584750 +step:4673 train loss:3.621248 +step:4674 train loss:3.748161 +step:4675 train loss:3.652575 +step:4676 train loss:3.615766 +step:4677 train loss:3.617215 +step:4678 train loss:3.590386 +step:4679 train loss:3.693283 +step:4680 train loss:3.631421 +step:4681 train loss:3.681250 +step:4682 train loss:3.628717 +step:4683 train loss:3.603183 +step:4684 train loss:3.726886 +step:4685 train loss:3.650729 +step:4686 train loss:3.666829 +step:4687 train loss:3.699759 +step:4688 train loss:3.630693 +step:4689 train loss:3.704632 +step:4690 train loss:3.645339 +step:4691 train loss:3.681695 +step:4692 train loss:3.609148 +step:4693 train loss:3.647983 +step:4694 train loss:3.689013 +step:4695 train loss:3.706909 +step:4696 train loss:3.692469 +step:4697 train loss:3.604321 +step:4698 train loss:3.625779 +step:4699 train loss:3.672093 +step:4700 train loss:3.640657 +step:4701 train loss:3.651837 +step:4702 train loss:3.607466 +step:4703 train loss:3.688220 +step:4704 train loss:3.672856 +step:4705 train loss:3.619281 +step:4706 train loss:3.623615 +step:4707 train loss:3.610830 +step:4708 train loss:3.682230 +step:4709 train loss:3.626580 +step:4710 train loss:3.643106 +step:4711 train loss:3.699193 +step:4712 train loss:3.597431 +step:4713 train loss:3.701233 +step:4714 train loss:3.601215 +step:4715 train loss:3.694498 +step:4716 train loss:3.657003 +step:4717 train loss:3.592441 +step:4718 train loss:3.679916 +step:4719 train loss:3.607222 +step:4720 train loss:3.702551 +step:4721 train loss:3.660245 +step:4722 train loss:3.713605 +step:4723 train loss:3.611838 +step:4724 train loss:3.664453 +step:4725 train loss:3.595835 +step:4726 train loss:3.646024 +step:4727 train loss:3.650468 +step:4728 train loss:3.659029 +step:4729 train loss:3.684758 +step:4730 train loss:3.587990 +step:4731 train loss:3.647264 +step:4732 train loss:3.597660 +step:4733 train loss:3.539210 +step:4734 train loss:3.673870 +step:4735 train loss:3.629331 +step:4736 train loss:3.667821 +step:4737 train loss:3.551177 +step:4738 train loss:3.696079 +step:4739 train loss:3.571531 +step:4740 train loss:3.684567 +step:4741 train loss:3.652348 +step:4742 train loss:3.614806 +step:4743 train loss:3.610746 +step:4744 train loss:3.657610 +step:4745 train loss:3.674382 +step:4746 train loss:3.710572 +step:4747 train loss:3.673369 +step:4748 train loss:3.574240 +step:4749 train loss:3.639004 +step:4750 validation loss:3.575669 +step:4750 train loss:3.586798 +step:4751 train loss:3.683486 +step:4752 train loss:3.611646 +step:4753 train loss:3.722013 +step:4754 train loss:3.589219 +step:4755 train loss:3.632248 +step:4756 train loss:3.701335 +step:4757 train loss:3.631028 +step:4758 train loss:3.644351 +step:4759 train loss:3.647983 +step:4760 train loss:3.674438 +step:4761 train loss:3.597921 +step:4762 train loss:3.629534 +step:4763 train loss:3.650983 +step:4764 train loss:3.710354 +step:4765 train loss:3.604795 +step:4766 train loss:3.627975 +step:4767 train loss:3.578358 +step:4768 train loss:3.636503 +step:4769 train loss:3.660678 +step:4770 train loss:3.618897 +step:4771 train loss:3.632846 +step:4772 train loss:3.609394 +step:4773 train loss:3.640410 +step:4774 train loss:3.582041 +step:4775 train loss:3.717897 +step:4776 train loss:3.584001 +step:4777 train loss:3.656860 +step:4778 train loss:3.596003 +step:4779 train loss:3.643946 +step:4780 train loss:3.577528 +step:4781 train loss:3.588434 +step:4782 train loss:3.690059 +step:4783 train loss:3.683867 +step:4784 train loss:3.640387 +step:4785 train loss:3.647994 +step:4786 train loss:3.751819 +step:4787 train loss:3.589577 +step:4788 train loss:3.609997 +step:4789 train loss:3.634340 +step:4790 train loss:3.681235 +step:4791 train loss:3.651606 +step:4792 train loss:3.691926 +step:4793 train loss:3.606690 +step:4794 train loss:3.684234 +step:4795 train loss:3.632902 +step:4796 train loss:3.622231 +step:4797 train loss:3.629427 +step:4798 train loss:3.639791 +step:4799 train loss:3.635200 +step:4800 train loss:3.668947 +step:4801 train loss:3.657442 +step:4802 train loss:3.694314 +step:4803 train loss:3.676873 +step:4804 train loss:3.641168 +step:4805 train loss:3.631539 +step:4806 train loss:3.611891 +step:4807 train loss:3.717623 +step:4808 train loss:3.588353 +step:4809 train loss:3.696685 +step:4810 train loss:3.632427 +step:4811 train loss:3.655149 +step:4812 train loss:3.626555 +step:4813 train loss:3.582881 +step:4814 train loss:3.576832 +step:4815 train loss:3.572803 +step:4816 train loss:3.637809 +step:4817 train loss:3.579197 +step:4818 train loss:3.640784 +step:4819 train loss:3.639011 +step:4820 train loss:3.884546 +step:4821 train loss:3.660590 +step:4822 train loss:3.673179 +step:4823 train loss:3.605304 +step:4824 train loss:3.613293 +step:4825 train loss:3.591940 +step:4826 train loss:3.680073 +step:4827 train loss:3.623306 +step:4828 train loss:3.568372 +step:4829 train loss:3.673477 +step:4830 train loss:3.613270 +step:4831 train loss:3.759483 +step:4832 train loss:3.630410 +step:4833 train loss:3.666265 +step:4834 train loss:3.568383 +step:4835 train loss:3.659304 +step:4836 train loss:3.641423 +step:4837 train loss:3.667845 +step:4838 train loss:3.608704 +step:4839 train loss:3.672846 +step:4840 train loss:3.580646 +step:4841 train loss:3.675375 +step:4842 train loss:3.591929 +step:4843 train loss:3.667825 +step:4844 train loss:3.667189 +step:4845 train loss:3.608858 +step:4846 train loss:3.620575 +step:4847 train loss:3.611596 +step:4848 train loss:3.631028 +step:4849 train loss:3.586790 +step:4850 train loss:3.593632 +step:4851 train loss:3.590476 +step:4852 train loss:3.665747 +step:4853 train loss:3.644296 +step:4854 train loss:3.619759 +step:4855 train loss:3.686480 +step:4856 train loss:3.656014 +step:4857 train loss:3.682216 +step:4858 train loss:3.751022 +step:4859 train loss:3.596444 +step:4860 train loss:3.672568 +step:4861 train loss:3.642404 +step:4862 train loss:3.676272 +step:4863 train loss:3.611745 +step:4864 train loss:3.620384 +step:4865 train loss:3.612749 +step:4866 train loss:3.661162 +step:4867 train loss:3.625284 +step:4868 train loss:3.642488 +step:4869 train loss:3.591965 +step:4870 train loss:3.625572 +step:4871 train loss:3.706216 +step:4872 train loss:3.653406 +step:4873 train loss:3.648886 +step:4874 train loss:3.624694 +step:4875 train loss:3.585964 +step:4876 train loss:3.602287 +step:4877 train loss:3.600020 +step:4878 train loss:3.642682 +step:4879 train loss:3.599667 +step:4880 train loss:3.629238 +step:4881 train loss:3.574526 +step:4882 train loss:3.774336 +step:4883 train loss:3.583881 +step:4884 train loss:3.615964 +step:4885 train loss:3.590948 +step:4886 train loss:3.667746 +step:4887 train loss:3.616542 +step:4888 train loss:3.629170 +step:4889 train loss:3.619974 +step:4890 train loss:3.664704 +step:4891 train loss:3.598778 +step:4892 train loss:3.605556 +step:4893 train loss:3.648885 +step:4894 train loss:3.589647 +step:4895 train loss:3.616614 +step:4896 train loss:3.602392 +step:4897 train loss:3.669774 +step:4898 train loss:3.624377 +step:4899 train loss:3.607375 +step:4900 train loss:3.654122 +step:4901 train loss:3.604143 +step:4902 train loss:3.598592 +step:4903 train loss:3.618804 +step:4904 train loss:3.634731 +step:4905 train loss:3.631260 +step:4906 train loss:3.632166 +step:4907 train loss:3.703118 +step:4908 train loss:3.613990 +step:4909 train loss:3.615437 +step:4910 train loss:3.638913 +step:4911 train loss:3.686235 +step:4912 train loss:3.664534 +step:4913 train loss:3.640558 +step:4914 train loss:3.631387 +step:4915 train loss:3.612593 +step:4916 train loss:3.553722 +step:4917 train loss:3.579443 +step:4918 train loss:3.609136 +step:4919 train loss:3.600731 +step:4920 train loss:3.604044 +step:4921 train loss:3.763894 +step:4922 train loss:3.659284 +step:4923 train loss:3.675689 +step:4924 train loss:3.676600 +step:4925 train loss:3.606915 +step:4926 train loss:3.606877 +step:4927 train loss:3.631351 +step:4928 train loss:3.670711 +step:4929 train loss:3.627263 +step:4930 train loss:3.609401 +step:4931 train loss:3.601825 +step:4932 train loss:3.612724 +step:4933 train loss:3.604609 +step:4934 train loss:3.668716 +step:4935 train loss:3.657028 +step:4936 train loss:3.617685 +step:4937 train loss:3.728855 +step:4938 train loss:3.713762 +step:4939 train loss:3.581068 +step:4940 train loss:3.659568 +step:4941 train loss:3.562309 +step:4942 train loss:3.602602 +step:4943 train loss:3.609003 +step:4944 train loss:3.607954 +step:4945 train loss:3.658566 +step:4946 train loss:3.628707 +step:4947 train loss:3.617474 +step:4948 train loss:3.647043 +step:4949 train loss:3.557079 +step:4950 train loss:3.636985 +step:4951 train loss:3.692125 +step:4952 train loss:3.626141 +step:4953 train loss:3.659644 +step:4954 train loss:3.563952 +step:4955 train loss:3.637547 +step:4956 train loss:3.665140 +step:4957 train loss:3.663536 +step:4958 train loss:3.573966 +step:4959 train loss:3.693393 +step:4960 train loss:3.621543 +step:4961 train loss:3.640806 +step:4962 train loss:3.600454 +step:4963 train loss:3.648039 +step:4964 train loss:3.597159 +step:4965 train loss:3.750891 +step:4966 train loss:3.599064 +step:4967 train loss:3.706622 +step:4968 train loss:3.599643 +step:4969 train loss:3.640232 +step:4970 train loss:3.630816 +step:4971 train loss:3.580191 +step:4972 train loss:3.629642 +step:4973 train loss:3.633880 +step:4974 train loss:3.625371 +step:4975 train loss:3.705338 +step:4976 train loss:3.686700 +step:4977 train loss:3.632345 +step:4978 train loss:3.622544 +step:4979 train loss:3.617388 +step:4980 train loss:3.725296 +step:4981 train loss:3.565502 +step:4982 train loss:3.648088 +step:4983 train loss:3.568214 +step:4984 train loss:3.757723 +step:4985 train loss:3.657526 +step:4986 train loss:3.598450 +step:4987 train loss:3.615286 +step:4988 train loss:3.814947 +step:4989 train loss:3.620567 +step:4990 train loss:3.615949 +step:4991 train loss:3.627700 +step:4992 train loss:3.611700 +step:4993 train loss:3.592628 +step:4994 train loss:3.698514 +step:4995 train loss:3.626095 +step:4996 train loss:3.715725 +step:4997 train loss:3.612675 +step:4998 train loss:3.619118 +step:4999 train loss:3.602605 +step:5000 validation loss:3.567934 total_sharp:2.0587e-02 L1_sharp:2.2692e-02 L2_sharp:2.5340e-02 L3_sharp:7.2533e-03 L4_sharp:3.0566e-03 L5_sharp:5.1778e-03 L6_sharp:5.1736e-03 L7_sharp:6.6556e-03 L8_sharp:4.5094e-03 L9_sharp:2.8892e-03 L10_sharp:2.3845e-03 L11_sharp:2.7745e-03 L12_sharp:5.8201e-03 total_fnorm:1.0903e+00 total_l1_linf:9.6897e+03 total_spectral:1.0903e+00 L1_fnorm:2.3114e-01 L2_fnorm:2.2772e-01 L3_fnorm:2.3788e-01 L4_fnorm:2.5370e-01 L5_fnorm:2.5346e-01 L6_fnorm:2.5538e-01 L7_fnorm:2.5277e-01 L8_fnorm:2.5512e-01 L9_fnorm:2.5703e-01 L10_fnorm:2.6022e-01 L11_fnorm:2.5994e-01 L12_fnorm:2.5084e-01 L1_l1linf:3.2913e-01 L2_l1linf:3.0801e-01 L3_l1linf:3.4938e-01 L4_l1linf:3.1458e-01 L5_l1linf:3.0509e-01 L6_l1linf:2.8029e-01 L7_l1linf:2.6945e-01 L8_l1linf:2.6516e-01 L9_l1linf:2.6970e-01 L10_l1linf:2.7852e-01 L11_l1linf:2.7763e-01 L12_l1linf:2.8145e-01 L1_spectral:4.5304e-02 L2_spectral:4.6871e-02 L3_spectral:4.5778e-02 L4_spectral:4.2931e-02 L5_spectral:3.9507e-02 L6_spectral:3.1994e-02 L7_spectral:3.1437e-02 L8_spectral:3.1708e-02 L9_spectral:3.1688e-02 L10_spectral:3.2443e-02 L11_spectral:3.3543e-02 L12_spectral:4.5449e-02 ip_v_neg_g:1.1587e-02 cos_v_neg_g:9.2193e-03 v_norm:1.0903e+00 g_norm:1.1527e+00 hv_norm:4.2219e-01 cos_v_hv:5.3169e-02 hg_norm:1.6692e+01 cos_g_hg:5.3715e-01 v_par:3.5456e-04 v_perp:1.0903e+00 L1_cos_v_neg_g:1.7778e-02 L1_v_norm:2.3114e-01 L2_cos_v_neg_g:2.3723e-02 L2_v_norm:2.2772e-01 L3_cos_v_neg_g:1.5057e-02 L3_v_norm:2.3788e-01 L4_cos_v_neg_g:1.0312e-02 L4_v_norm:2.5370e-01 L5_cos_v_neg_g:1.3829e-02 L5_v_norm:2.5346e-01 L6_cos_v_neg_g:8.3616e-03 L6_v_norm:2.5538e-01 L7_cos_v_neg_g:1.3546e-02 L7_v_norm:2.5277e-01 L8_cos_v_neg_g:1.3153e-02 L8_v_norm:2.5512e-01 L9_cos_v_neg_g:9.7826e-03 L9_v_norm:2.5703e-01 L10_cos_v_neg_g:8.4446e-03 L10_v_norm:2.6022e-01 L11_cos_v_neg_g:9.9919e-03 L11_v_norm:2.5994e-01 L12_cos_v_neg_g:6.0470e-03 L12_v_norm:2.5084e-01 +step:5000 train loss:3.713503 +step:5001 train loss:3.582064 +step:5002 train loss:3.634917 +step:5003 train loss:3.633145 +step:5004 train loss:3.623380 +step:5005 train loss:3.620633 +step:5006 train loss:3.660545 +step:5007 train loss:3.666177 +step:5008 train loss:3.603540 +step:5009 train loss:3.647192 +step:5010 train loss:3.596791 +step:5011 train loss:3.626427 +step:5012 train loss:3.600763 +step:5013 train loss:3.705138 +step:5014 train loss:3.617068 +step:5015 train loss:3.693230 +step:5016 train loss:3.619785 +step:5017 train loss:3.665602 +step:5018 train loss:3.584966 +step:5019 train loss:3.618638 +step:5020 train loss:3.614408 +step:5021 train loss:3.625709 +step:5022 train loss:3.661452 +step:5023 train loss:3.632339 +step:5024 train loss:3.684181 +step:5025 train loss:3.568128 +step:5026 train loss:3.693896 +step:5027 train loss:3.625815 +step:5028 train loss:3.690995 +step:5029 train loss:3.589422 +step:5030 train loss:3.626578 +step:5031 train loss:3.614686 +step:5032 train loss:3.638418 +step:5033 train loss:3.626529 +step:5034 train loss:3.621170 +step:5035 train loss:3.709203 +step:5036 train loss:3.653966 +step:5037 train loss:3.608284 +step:5038 train loss:3.656116 +step:5039 train loss:3.671002 +step:5040 train loss:3.632426 +step:5041 train loss:3.646897 +step:5042 train loss:3.553810 +step:5043 train loss:3.696169 +step:5044 train loss:3.612382 +step:5045 train loss:3.663932 +step:5046 train loss:3.584601 +step:5047 train loss:3.658706 +step:5048 train loss:3.577194 +step:5049 train loss:3.710028 +step:5050 train loss:3.597180 +step:5051 train loss:3.643250 +step:5052 train loss:3.540909 +step:5053 train loss:3.721164 +step:5054 train loss:3.611473 +step:5055 train loss:3.633833 +step:5056 train loss:3.671382 +step:5057 train loss:3.597759 +step:5058 train loss:3.633748 +step:5059 train loss:3.594721 +step:5060 train loss:3.639830 +step:5061 train loss:3.635341 +step:5062 train loss:3.604002 +step:5063 train loss:3.598527 +step:5064 train loss:3.607054 +step:5065 train loss:3.590730 +step:5066 train loss:3.652192 +step:5067 train loss:3.636175 +step:5068 train loss:3.618095 +step:5069 train loss:3.590726 +step:5070 train loss:3.622438 +step:5071 train loss:3.690234 +step:5072 train loss:3.583540 +step:5073 train loss:3.589174 +step:5074 train loss:3.539772 +step:5075 train loss:3.606845 +step:5076 train loss:3.538174 +step:5077 train loss:3.599965 +step:5078 train loss:3.616335 +step:5079 train loss:3.646571 +step:5080 train loss:3.618964 +step:5081 train loss:3.633518 +step:5082 train loss:3.624122 +step:5083 train loss:3.684863 +step:5084 train loss:3.664876 +step:5085 train loss:3.628073 +step:5086 train loss:3.700294 +step:5087 train loss:3.683103 +step:5088 train loss:3.602828 +step:5089 train loss:3.668157 +step:5090 train loss:3.613057 +step:5091 train loss:3.618525 +step:5092 train loss:3.713310 +step:5093 train loss:3.597295 +step:5094 train loss:3.593999 +step:5095 train loss:3.644833 +step:5096 train loss:3.613744 +step:5097 train loss:3.624851 +step:5098 train loss:3.630034 +step:5099 train loss:3.587207 +step:5100 train loss:3.597167 +step:5101 train loss:3.789670 +step:5102 train loss:3.637909 +step:5103 train loss:3.645989 +step:5104 train loss:3.692766 +step:5105 train loss:3.635883 +step:5106 train loss:3.588458 +step:5107 train loss:3.610462 +step:5108 train loss:3.602339 +step:5109 train loss:3.680294 +step:5110 train loss:3.592412 +step:5111 train loss:3.684552 +step:5112 train loss:3.596850 +step:5113 train loss:3.573143 +step:5114 train loss:3.621965 +step:5115 train loss:3.581918 +step:5116 train loss:3.640671 +step:5117 train loss:3.584026 +step:5118 train loss:3.613212 +step:5119 train loss:3.595558 +step:5120 train loss:3.634409 +step:5121 train loss:3.584254 +step:5122 train loss:3.594405 +step:5123 train loss:3.590478 +step:5124 train loss:3.543972 +step:5125 train loss:3.655422 +step:5126 train loss:3.643685 +step:5127 train loss:3.644657 +step:5128 train loss:3.666128 +step:5129 train loss:3.586207 +step:5130 train loss:3.603425 +step:5131 train loss:3.539123 +step:5132 train loss:3.657254 +step:5133 train loss:3.624454 +step:5134 train loss:3.626786 +step:5135 train loss:3.579960 +step:5136 train loss:3.643056 +step:5137 train loss:3.644372 +step:5138 train loss:3.623464 +step:5139 train loss:3.660009 +step:5140 train loss:3.630952 +step:5141 train loss:3.664701 +step:5142 train loss:3.610074 +step:5143 train loss:3.640792 +step:5144 train loss:3.635714 +step:5145 train loss:3.581115 +step:5146 train loss:3.572719 +step:5147 train loss:3.647729 +step:5148 train loss:3.582586 +step:5149 train loss:3.648682 +step:5150 train loss:3.626835 +step:5151 train loss:3.592897 +step:5152 train loss:3.638009 +step:5153 train loss:3.612324 +step:5154 train loss:3.622752 +step:5155 train loss:3.632649 +step:5156 train loss:3.611948 +step:5157 train loss:3.607694 +step:5158 train loss:3.633476 +step:5159 train loss:3.666440 +step:5160 train loss:3.735214 +step:5161 train loss:3.663080 +step:5162 train loss:3.678653 +step:5163 train loss:3.595562 +step:5164 train loss:3.660747 +step:5165 train loss:3.674722 +step:5166 train loss:3.614123 +step:5167 train loss:3.710041 +step:5168 train loss:3.625234 +step:5169 train loss:3.655417 +step:5170 train loss:3.632915 +step:5171 train loss:3.680531 +step:5172 train loss:3.597454 +step:5173 train loss:3.660904 +step:5174 train loss:3.598724 +step:5175 train loss:3.626002 +step:5176 train loss:3.620599 +step:5177 train loss:3.616207 +step:5178 train loss:3.680627 +step:5179 train loss:3.593844 +step:5180 train loss:3.670353 +step:5181 train loss:3.614606 +step:5182 train loss:3.676146 +step:5183 train loss:3.605212 +step:5184 train loss:3.582551 +step:5185 train loss:3.612487 +step:5186 train loss:3.665264 +step:5187 train loss:3.659913 +step:5188 train loss:3.591459 +step:5189 train loss:3.635554 +step:5190 train loss:3.620303 +step:5191 train loss:3.599595 +step:5192 train loss:3.580575 +step:5193 train loss:3.665308 +step:5194 train loss:3.618209 +step:5195 train loss:3.591998 +step:5196 train loss:3.659202 +step:5197 train loss:3.705505 +step:5198 train loss:3.621026 +step:5199 train loss:3.607206 +step:5200 train loss:3.631725 +step:5201 train loss:3.624108 +step:5202 train loss:3.626410 +step:5203 train loss:3.629453 +step:5204 train loss:3.601491 +step:5205 train loss:3.648712 +step:5206 train loss:3.582820 +step:5207 train loss:3.588735 +step:5208 train loss:3.650328 +step:5209 train loss:3.667259 +step:5210 train loss:3.579471 +step:5211 train loss:3.620237 +step:5212 train loss:3.636222 +step:5213 train loss:3.608608 +step:5214 train loss:3.658693 +step:5215 train loss:3.768791 +step:5216 train loss:3.618459 +step:5217 train loss:3.597096 +step:5218 train loss:3.603155 +step:5219 train loss:3.667660 +step:5220 train loss:3.581376 +step:5221 train loss:3.587062 +step:5222 train loss:3.664386 +step:5223 train loss:3.659475 +step:5224 train loss:3.558073 +step:5225 train loss:3.704122 +step:5226 train loss:3.617341 +step:5227 train loss:3.689067 +step:5228 train loss:3.665689 +step:5229 train loss:3.603350 +step:5230 train loss:3.617050 +step:5231 train loss:3.564812 +step:5232 train loss:3.684016 +step:5233 train loss:3.646803 +step:5234 train loss:3.646506 +step:5235 train loss:3.600208 +step:5236 train loss:3.672034 +step:5237 train loss:3.727614 +step:5238 train loss:3.626359 +step:5239 train loss:3.692370 +step:5240 train loss:3.573446 +step:5241 train loss:3.632550 +step:5242 train loss:3.606481 +step:5243 train loss:3.611660 +step:5244 train loss:3.606503 +step:5245 train loss:3.651532 +step:5246 train loss:3.691113 +step:5247 train loss:3.620294 +step:5248 train loss:3.589898 +step:5249 train loss:3.650392 +step:5250 validation loss:3.550888 +step:5250 train loss:3.618116 +step:5251 train loss:3.681023 +step:5252 train loss:3.574209 +step:5253 train loss:3.727758 +step:5254 train loss:3.600576 +step:5255 train loss:3.668592 +step:5256 train loss:3.587731 +step:5257 train loss:3.640865 +step:5258 train loss:3.637587 +step:5259 train loss:3.624954 +step:5260 train loss:3.616266 +step:5261 train loss:3.607072 +step:5262 train loss:3.649115 +step:5263 train loss:3.635998 +step:5264 train loss:3.586476 +step:5265 train loss:3.667092 +step:5266 train loss:3.584111 +step:5267 train loss:3.594576 +step:5268 train loss:3.577086 +step:5269 train loss:3.580183 +step:5270 train loss:3.631849 +step:5271 train loss:3.557485 +step:5272 train loss:3.648270 +step:5273 train loss:3.554873 +step:5274 train loss:3.605289 +step:5275 train loss:3.622864 +step:5276 train loss:3.748947 +step:5277 train loss:3.646455 +step:5278 train loss:3.595721 +step:5279 train loss:3.641757 +step:5280 train loss:3.618801 +step:5281 train loss:3.613186 +step:5282 train loss:3.582270 +step:5283 train loss:3.581949 +step:5284 train loss:3.588809 +step:5285 train loss:3.662077 +step:5286 train loss:3.566233 +step:5287 train loss:3.668382 +step:5288 train loss:3.644602 +step:5289 train loss:3.611603 +step:5290 train loss:3.665444 +step:5291 train loss:3.617660 +step:5292 train loss:3.636545 +step:5293 train loss:3.606763 +step:5294 train loss:3.594112 +step:5295 train loss:3.601200 +step:5296 train loss:3.591645 +step:5297 train loss:3.611759 +step:5298 train loss:3.559257 +step:5299 train loss:3.650495 +step:5300 train loss:3.601190 +step:5301 train loss:3.669635 +step:5302 train loss:3.677863 +step:5303 train loss:3.536395 +step:5304 train loss:3.572107 +step:5305 train loss:3.546236 +step:5306 train loss:3.582600 +step:5307 train loss:3.583670 +step:5308 train loss:3.679621 +step:5309 train loss:3.629293 +step:5310 train loss:3.614770 +step:5311 train loss:3.684686 +step:5312 train loss:3.570199 +step:5313 train loss:3.658733 +step:5314 train loss:3.649758 +step:5315 train loss:3.608499 +step:5316 train loss:3.642194 +step:5317 train loss:3.660229 +step:5318 train loss:3.613776 +step:5319 train loss:3.640279 +step:5320 train loss:3.592375 +step:5321 train loss:3.713565 +step:5322 train loss:3.623318 +step:5323 train loss:3.624350 +step:5324 train loss:3.568470 +step:5325 train loss:3.646647 +step:5326 train loss:3.643106 +step:5327 train loss:3.533561 +step:5328 train loss:3.668308 +step:5329 train loss:3.633496 +step:5330 train loss:3.634563 +step:5331 train loss:3.684242 +step:5332 train loss:3.605812 +step:5333 train loss:3.670517 +step:5334 train loss:3.646377 +step:5335 train loss:3.701218 +step:5336 train loss:3.737990 +step:5337 train loss:3.572617 +step:5338 train loss:3.578966 +step:5339 train loss:3.601814 +step:5340 train loss:3.623897 +step:5341 train loss:3.644195 +step:5342 train loss:3.542036 +step:5343 train loss:3.700747 +step:5344 train loss:3.581133 +step:5345 train loss:3.581699 +step:5346 train loss:3.591646 +step:5347 train loss:3.608467 +step:5348 train loss:3.653869 +step:5349 train loss:3.593655 +step:5350 train loss:3.632269 +step:5351 train loss:3.708253 +step:5352 train loss:3.747329 +step:5353 train loss:3.658105 +step:5354 train loss:3.627042 +step:5355 train loss:3.593455 +step:5356 train loss:3.616002 +step:5357 train loss:3.593608 +step:5358 train loss:3.616614 +step:5359 train loss:3.627656 +step:5360 train loss:3.598282 +step:5361 train loss:3.601333 +step:5362 train loss:3.589915 +step:5363 train loss:3.579062 +step:5364 train loss:3.581269 +step:5365 train loss:3.615723 +step:5366 train loss:3.647357 +step:5367 train loss:3.573106 +step:5368 train loss:3.643361 +step:5369 train loss:3.661094 +step:5370 train loss:3.559929 +step:5371 train loss:3.611773 +step:5372 train loss:3.632236 +step:5373 train loss:3.673939 +step:5374 train loss:3.556388 +step:5375 train loss:3.601470 +step:5376 train loss:3.669293 +step:5377 train loss:3.603876 +step:5378 train loss:3.580193 +step:5379 train loss:3.585135 +step:5380 train loss:3.618130 +step:5381 train loss:3.657955 +step:5382 train loss:3.562996 +step:5383 train loss:3.622501 +step:5384 train loss:3.640831 +step:5385 train loss:3.645385 +step:5386 train loss:3.624224 +step:5387 train loss:3.628279 +step:5388 train loss:3.643371 +step:5389 train loss:3.572073 +step:5390 train loss:3.602071 +step:5391 train loss:3.542386 +step:5392 train loss:3.606654 +step:5393 train loss:3.593143 +step:5394 train loss:3.592608 +step:5395 train loss:3.665780 +step:5396 train loss:3.628822 +step:5397 train loss:3.649430 +step:5398 train loss:3.648589 +step:5399 train loss:3.673710 +step:5400 train loss:3.682506 +step:5401 train loss:3.642539 +step:5402 train loss:3.746632 +step:5403 train loss:3.653390 +step:5404 train loss:3.626345 +step:5405 train loss:3.700534 +step:5406 train loss:3.657386 +step:5407 train loss:3.585271 +step:5408 train loss:3.732275 +step:5409 train loss:3.570927 +step:5410 train loss:3.638211 +step:5411 train loss:3.621946 +step:5412 train loss:3.592996 +step:5413 train loss:3.646438 +step:5414 train loss:3.622751 +step:5415 train loss:3.603512 +step:5416 train loss:3.598069 +step:5417 train loss:3.662647 +step:5418 train loss:3.680251 +step:5419 train loss:3.584659 +step:5420 train loss:3.645889 +step:5421 train loss:3.615250 +step:5422 train loss:3.656411 +step:5423 train loss:3.636956 +step:5424 train loss:3.539531 +step:5425 train loss:3.605433 +step:5426 train loss:3.693264 +step:5427 train loss:3.585136 +step:5428 train loss:3.624909 +step:5429 train loss:3.561050 +step:5430 train loss:3.592294 +step:5431 train loss:3.656392 +step:5432 train loss:3.640411 +step:5433 train loss:3.637983 +step:5434 train loss:3.589236 +step:5435 train loss:3.584900 +step:5436 train loss:3.586884 +step:5437 train loss:3.625400 +step:5438 train loss:3.603552 +step:5439 train loss:3.611354 +step:5440 train loss:3.651196 +step:5441 train loss:3.674010 +step:5442 train loss:3.594348 +step:5443 train loss:3.593138 +step:5444 train loss:3.541478 +step:5445 train loss:3.624168 +step:5446 train loss:3.596027 +step:5447 train loss:3.631501 +step:5448 train loss:3.688064 +step:5449 train loss:3.575871 +step:5450 train loss:3.610473 +step:5451 train loss:3.603994 +step:5452 train loss:3.617802 +step:5453 train loss:3.674570 +step:5454 train loss:3.600857 +step:5455 train loss:3.586098 +step:5456 train loss:3.727703 +step:5457 train loss:3.608677 +step:5458 train loss:3.642200 +step:5459 train loss:3.586776 +step:5460 train loss:3.604290 +step:5461 train loss:3.606719 +step:5462 train loss:3.607271 +step:5463 train loss:3.615978 +step:5464 train loss:3.616388 +step:5465 train loss:3.563606 +step:5466 train loss:3.635015 +step:5467 train loss:3.618500 +step:5468 train loss:3.623703 +step:5469 train loss:3.719467 +step:5470 train loss:3.612684 +step:5471 train loss:3.686360 +step:5472 train loss:3.634803 +step:5473 train loss:3.540855 +step:5474 train loss:3.873960 +step:5475 train loss:3.547173 +step:5476 train loss:3.626374 +step:5477 train loss:3.627233 +step:5478 train loss:3.625016 +step:5479 train loss:3.769285 +step:5480 train loss:3.612624 +step:5481 train loss:3.678347 +step:5482 train loss:3.591822 +step:5483 train loss:3.622758 +step:5484 train loss:3.665092 +step:5485 train loss:3.579302 +step:5486 train loss:3.626318 +step:5487 train loss:3.628420 +step:5488 train loss:3.541203 +step:5489 train loss:3.644125 +step:5490 train loss:3.590936 +step:5491 train loss:3.690536 +step:5492 train loss:3.622351 +step:5493 train loss:3.550760 +step:5494 train loss:3.603836 +step:5495 train loss:3.583416 +step:5496 train loss:3.580064 +step:5497 train loss:3.700039 +step:5498 train loss:3.566568 +step:5499 train loss:3.703706 +step:5500 validation loss:3.546178 total_sharp:1.0890e-02 L1_sharp:2.5259e-02 L2_sharp:4.5931e-03 L3_sharp:4.4784e-03 L4_sharp:1.9626e-03 L5_sharp:3.1430e-03 L6_sharp:3.1199e-03 L7_sharp:3.5873e-03 L8_sharp:2.5497e-03 L9_sharp:1.8736e-03 L10_sharp:1.4110e-03 L11_sharp:1.7609e-03 L12_sharp:3.3133e-03 total_fnorm:1.0940e+00 total_l1_linf:9.7244e+03 total_spectral:1.0940e+00 L1_fnorm:2.3342e-01 L2_fnorm:2.3062e-01 L3_fnorm:2.4138e-01 L4_fnorm:2.5371e-01 L5_fnorm:2.5473e-01 L6_fnorm:2.5587e-01 L7_fnorm:2.5328e-01 L8_fnorm:2.5586e-01 L9_fnorm:2.5922e-01 L10_fnorm:2.6077e-01 L11_fnorm:2.6013e-01 L12_fnorm:2.5209e-01 L1_l1linf:3.3105e-01 L2_l1linf:3.2932e-01 L3_l1linf:3.2368e-01 L4_l1linf:3.0839e-01 L5_l1linf:2.9390e-01 L6_l1linf:2.8681e-01 L7_l1linf:2.6701e-01 L8_l1linf:2.6353e-01 L9_l1linf:2.7812e-01 L10_l1linf:2.6962e-01 L11_l1linf:2.8157e-01 L12_l1linf:2.6905e-01 L1_spectral:4.1446e-02 L2_spectral:4.6741e-02 L3_spectral:4.3514e-02 L4_spectral:4.2105e-02 L5_spectral:3.6866e-02 L6_spectral:3.0871e-02 L7_spectral:2.9387e-02 L8_spectral:2.9059e-02 L9_spectral:2.9219e-02 L10_spectral:3.0744e-02 L11_spectral:3.2357e-02 L12_spectral:4.3700e-02 ip_v_neg_g:3.8701e-03 cos_v_neg_g:1.8916e-03 v_norm:1.0940e+00 g_norm:1.8700e+00 hv_norm:5.7741e-01 cos_v_hv:2.0633e-02 hg_norm:2.7503e+02 cos_g_hg:4.5004e-01 v_par:4.1559e-04 v_perp:1.0940e+00 L1_cos_v_neg_g:-2.3510e-03 L1_v_norm:2.3342e-01 L2_cos_v_neg_g:2.5615e-03 L2_v_norm:2.3062e-01 L3_cos_v_neg_g:5.6190e-03 L3_v_norm:2.4138e-01 L4_cos_v_neg_g:4.7216e-03 L4_v_norm:2.5371e-01 L5_cos_v_neg_g:1.0140e-03 L5_v_norm:2.5473e-01 L6_cos_v_neg_g:2.4154e-03 L6_v_norm:2.5587e-01 L7_cos_v_neg_g:4.6323e-03 L7_v_norm:2.5328e-01 L8_cos_v_neg_g:5.1275e-03 L8_v_norm:2.5586e-01 L9_cos_v_neg_g:5.9046e-03 L9_v_norm:2.5922e-01 L10_cos_v_neg_g:4.6015e-03 L10_v_norm:2.6077e-01 L11_cos_v_neg_g:3.7354e-03 L11_v_norm:2.6013e-01 L12_cos_v_neg_g:6.5515e-03 L12_v_norm:2.5209e-01 +step:5500 train loss:3.621803 +step:5501 train loss:3.690758 +step:5502 train loss:3.639222 +step:5503 train loss:3.606109 +step:5504 train loss:3.651164 +step:5505 train loss:3.615814 +step:5506 train loss:3.654862 +step:5507 train loss:3.646117 +step:5508 train loss:3.664560 +step:5509 train loss:3.674934 +step:5510 train loss:3.651821 +step:5511 train loss:3.643090 +step:5512 train loss:3.766938 +step:5513 train loss:3.568799 +step:5514 train loss:3.626585 +step:5515 train loss:3.653865 +step:5516 train loss:3.676816 +step:5517 train loss:3.634606 +step:5518 train loss:3.660748 +step:5519 train loss:3.695518 +step:5520 train loss:3.601829 +step:5521 train loss:3.614652 +step:5522 train loss:3.582626 +step:5523 train loss:3.629725 +step:5524 train loss:3.677158 +step:5525 train loss:3.585732 +step:5526 train loss:3.599054 +step:5527 train loss:3.615875 +step:5528 train loss:3.725752 +step:5529 train loss:3.686670 +step:5530 train loss:3.655916 +step:5531 train loss:3.589584 +step:5532 train loss:3.615106 +step:5533 train loss:3.652378 +step:5534 train loss:3.565475 +step:5535 train loss:3.614977 +step:5536 train loss:3.551368 +step:5537 train loss:3.600274 +step:5538 train loss:3.593046 +step:5539 train loss:3.539110 +step:5540 train loss:3.757249 +step:5541 train loss:3.574108 +step:5542 train loss:3.622114 +step:5543 train loss:3.609592 +step:5544 train loss:3.605227 +step:5545 train loss:3.596077 +step:5546 train loss:3.635260 +step:5547 train loss:3.562527 +step:5548 train loss:3.604218 +step:5549 train loss:3.611229 +step:5550 train loss:3.632009 +step:5551 train loss:3.639631 +step:5552 train loss:3.591120 +step:5553 train loss:3.618910 +step:5554 train loss:3.591551 +step:5555 train loss:3.600404 +step:5556 train loss:3.614918 +step:5557 train loss:3.679482 +step:5558 train loss:3.602593 +step:5559 train loss:3.604358 +step:5560 train loss:3.600585 +step:5561 train loss:3.633022 +step:5562 train loss:3.588471 +step:5563 train loss:3.570454 +step:5564 train loss:3.604772 +step:5565 train loss:3.669053 +step:5566 train loss:3.573826 +step:5567 train loss:3.690601 +step:5568 train loss:3.813700 +step:5569 train loss:3.603304 +step:5570 train loss:3.532340 +step:5571 train loss:3.623305 +step:5572 train loss:3.559963 +step:5573 train loss:3.549412 +step:5574 train loss:3.520890 +step:5575 train loss:3.616408 +step:5576 train loss:3.599014 +step:5577 train loss:3.606819 +step:5578 train loss:3.632948 +step:5579 train loss:3.589650 +step:5580 train loss:3.616933 +step:5581 train loss:3.636899 +step:5582 train loss:3.616401 +step:5583 train loss:3.628442 +step:5584 train loss:3.748404 +step:5585 train loss:3.653221 +step:5586 train loss:3.586344 +step:5587 train loss:3.619545 +step:5588 train loss:3.636804 +step:5589 train loss:3.632390 +step:5590 train loss:3.695858 +step:5591 train loss:3.559064 +step:5592 train loss:3.737995 +step:5593 train loss:3.617144 +step:5594 train loss:3.627450 +step:5595 train loss:3.615818 +step:5596 train loss:3.568525 +step:5597 train loss:3.583398 +step:5598 train loss:3.586424 +step:5599 train loss:3.594730 +step:5600 train loss:3.635879 +step:5601 train loss:3.661229 +step:5602 train loss:3.594664 +step:5603 train loss:3.630918 +step:5604 train loss:3.632031 +step:5605 train loss:3.601747 +step:5606 train loss:3.605504 +step:5607 train loss:3.635216 +step:5608 train loss:3.579213 +step:5609 train loss:3.631145 +step:5610 train loss:3.587152 +step:5611 train loss:3.625712 +step:5612 train loss:3.654825 +step:5613 train loss:3.616063 +step:5614 train loss:3.582206 +step:5615 train loss:3.682116 +step:5616 train loss:3.580200 +step:5617 train loss:3.667810 +step:5618 train loss:3.654306 +step:5619 train loss:3.609771 +step:5620 train loss:3.606716 +step:5621 train loss:3.682209 +step:5622 train loss:3.565764 +step:5623 train loss:3.600801 +step:5624 train loss:3.591877 +step:5625 train loss:3.626255 +step:5626 train loss:3.618681 +step:5627 train loss:3.592170 +step:5628 train loss:3.631446 +step:5629 train loss:3.609352 +step:5630 train loss:3.543615 +step:5631 train loss:3.583099 +step:5632 train loss:3.624631 +step:5633 train loss:3.621496 +step:5634 train loss:3.573265 +step:5635 train loss:3.614001 +step:5636 train loss:3.590392 +step:5637 train loss:3.730183 +step:5638 train loss:3.641415 +step:5639 train loss:3.617578 +step:5640 train loss:3.622302 +step:5641 train loss:3.664855 +step:5642 train loss:3.593398 +step:5643 train loss:3.612919 +step:5644 train loss:3.692476 +step:5645 train loss:3.648377 +step:5646 train loss:3.647829 +step:5647 train loss:3.640450 +step:5648 train loss:3.627235 +step:5649 train loss:3.544384 +step:5650 train loss:3.545789 +step:5651 train loss:3.623479 +step:5652 train loss:3.622865 +step:5653 train loss:3.585789 +step:5654 train loss:3.719825 +step:5655 train loss:3.584011 +step:5656 train loss:3.605359 +step:5657 train loss:3.671595 +step:5658 train loss:3.575994 +step:5659 train loss:3.609350 +step:5660 train loss:3.660259 +step:5661 train loss:3.599124 +step:5662 train loss:3.640924 +step:5663 train loss:3.529553 +step:5664 train loss:3.502921 +step:5665 train loss:3.621710 +step:5666 train loss:3.625525 +step:5667 train loss:3.658869 +step:5668 train loss:3.592265 +step:5669 train loss:3.605585 +step:5670 train loss:3.603748 +step:5671 train loss:3.592371 +step:5672 train loss:3.638008 +step:5673 train loss:3.608709 +step:5674 train loss:3.678812 +step:5675 train loss:3.593370 +step:5676 train loss:3.738983 +step:5677 train loss:3.640526 +step:5678 train loss:3.616661 +step:5679 train loss:3.611603 +step:5680 train loss:3.642852 +step:5681 train loss:3.607117 +step:5682 train loss:3.624284 +step:5683 train loss:3.580905 +step:5684 train loss:3.590885 +step:5685 train loss:3.636155 +step:5686 train loss:3.650218 +step:5687 train loss:3.594400 +step:5688 train loss:3.684738 +step:5689 train loss:3.593294 +step:5690 train loss:3.739377 +step:5691 train loss:3.570578 +step:5692 train loss:3.560387 +step:5693 train loss:3.566574 +step:5694 train loss:3.585212 +step:5695 train loss:3.600976 +step:5696 train loss:3.652032 +step:5697 train loss:3.576039 +step:5698 train loss:3.596174 +step:5699 train loss:3.609895 +step:5700 train loss:3.607198 +step:5701 train loss:3.599400 +step:5702 train loss:3.666732 +step:5703 train loss:3.569003 +step:5704 train loss:3.610928 +step:5705 train loss:3.619479 +step:5706 train loss:3.642918 +step:5707 train loss:3.560567 +step:5708 train loss:3.646020 +step:5709 train loss:3.650357 +step:5710 train loss:3.638593 +step:5711 train loss:3.663400 +step:5712 train loss:3.643165 +step:5713 train loss:3.569006 +step:5714 train loss:3.653274 +step:5715 train loss:3.611341 +step:5716 train loss:3.612782 +step:5717 train loss:3.644040 +step:5718 train loss:3.583478 +step:5719 train loss:3.655184 +step:5720 train loss:3.630116 +step:5721 train loss:3.560588 +step:5722 train loss:3.571167 +step:5723 train loss:3.652873 +step:5724 train loss:3.569446 +step:5725 train loss:3.642687 +step:5726 train loss:3.638076 +step:5727 train loss:3.594022 +step:5728 train loss:3.594710 +step:5729 train loss:3.596522 +step:5730 train loss:3.672319 +step:5731 train loss:3.540906 +step:5732 train loss:3.598990 +step:5733 train loss:3.589521 +step:5734 train loss:3.605253 +step:5735 train loss:3.597675 +step:5736 train loss:3.597815 +step:5737 train loss:3.622124 +step:5738 train loss:3.586324 +step:5739 train loss:3.596107 +step:5740 train loss:3.638302 +step:5741 train loss:3.610076 +step:5742 train loss:3.663689 +step:5743 train loss:3.630418 +step:5744 train loss:3.586634 +step:5745 train loss:3.594890 +step:5746 train loss:3.621990 +step:5747 train loss:3.605413 +step:5748 train loss:3.653883 +step:5749 train loss:3.610844 +step:5750 validation loss:3.534691 +step:5750 train loss:3.615678 +step:5751 train loss:3.630033 +step:5752 train loss:3.614794 +step:5753 train loss:3.585698 +step:5754 train loss:3.594120 +step:5755 train loss:3.612023 +step:5756 train loss:3.600351 +step:5757 train loss:3.665094 +step:5758 train loss:3.596502 +step:5759 train loss:3.560910 +step:5760 train loss:3.639001 +step:5761 train loss:3.634617 +step:5762 train loss:3.593720 +step:5763 train loss:3.619631 +step:5764 train loss:3.583837 +step:5765 train loss:3.699846 +step:5766 train loss:3.607909 +step:5767 train loss:3.644507 +step:5768 train loss:3.580188 +step:5769 train loss:3.703707 +step:5770 train loss:3.626353 +step:5771 train loss:3.652431 +step:5772 train loss:3.605319 +step:5773 train loss:3.584460 +step:5774 train loss:3.591123 +step:5775 train loss:3.659683 +step:5776 train loss:3.647546 +step:5777 train loss:3.566796 +step:5778 train loss:3.651206 +step:5779 train loss:3.614362 +step:5780 train loss:3.584788 +step:5781 train loss:3.649302 +step:5782 train loss:3.610815 +step:5783 train loss:3.568361 +step:5784 train loss:3.673459 +step:5785 train loss:3.663486 +step:5786 train loss:3.573293 +step:5787 train loss:3.619861 +step:5788 train loss:3.631066 +step:5789 train loss:3.576445 +step:5790 train loss:3.676790 +step:5791 train loss:3.603623 +step:5792 train loss:3.871614 +step:5793 train loss:3.642831 +step:5794 train loss:3.665532 +step:5795 train loss:3.654752 +step:5796 train loss:3.639194 +step:5797 train loss:3.620944 +step:5798 train loss:3.623726 +step:5799 train loss:3.586533 +step:5800 train loss:3.748685 +step:5801 train loss:3.621367 +step:5802 train loss:3.607340 +step:5803 train loss:3.618711 +step:5804 train loss:3.638271 +step:5805 train loss:3.601451 +step:5806 train loss:3.642341 +step:5807 train loss:3.565240 +step:5808 train loss:3.597018 +step:5809 train loss:3.609144 +step:5810 train loss:3.578890 +step:5811 train loss:3.594197 +step:5812 train loss:3.577496 +step:5813 train loss:3.586233 +step:5814 train loss:3.580454 +step:5815 train loss:3.582266 +step:5816 train loss:3.644698 +step:5817 train loss:3.657531 +step:5818 train loss:3.630530 +step:5819 train loss:3.681946 +step:5820 train loss:3.623451 +step:5821 train loss:3.611236 +step:5822 train loss:3.629948 +step:5823 train loss:3.634246 +step:5824 train loss:3.585474 +step:5825 train loss:3.676487 +step:5826 train loss:3.593015 +step:5827 train loss:3.556482 +step:5828 train loss:3.542907 +step:5829 train loss:3.606697 +step:5830 train loss:3.584108 +step:5831 train loss:3.550974 +step:5832 train loss:3.668869 +step:5833 train loss:3.646357 +step:5834 train loss:3.628670 +step:5835 train loss:3.578989 +step:5836 train loss:3.542833 +step:5837 train loss:3.663409 +step:5838 train loss:3.645615 +step:5839 train loss:3.622076 +step:5840 train loss:3.702292 +step:5841 train loss:3.626918 +step:5842 train loss:3.641912 +step:5843 train loss:3.585908 +step:5844 train loss:3.655382 +step:5845 train loss:3.564390 +step:5846 train loss:3.609556 +step:5847 train loss:3.635712 +step:5848 train loss:3.705366 +step:5849 train loss:3.595786 +step:5850 train loss:3.625740 +step:5851 train loss:3.595563 +step:5852 train loss:3.680881 +step:5853 train loss:3.773223 +step:5854 train loss:3.563986 +step:5855 train loss:3.625715 +step:5856 train loss:3.595815 +step:5857 train loss:3.608891 +step:5858 train loss:3.579174 +step:5859 train loss:3.583601 +step:5860 train loss:3.687137 +step:5861 train loss:3.572134 +step:5862 train loss:3.683278 +step:5863 train loss:3.622812 +step:5864 train loss:3.614313 +step:5865 train loss:3.617622 +step:5866 train loss:3.606092 +step:5867 train loss:3.687499 +step:5868 train loss:3.613518 +step:5869 train loss:3.635723 +step:5870 train loss:3.610890 +step:5871 train loss:3.595599 +step:5872 train loss:3.624232 +step:5873 train loss:3.599603 +step:5874 train loss:3.681580 +step:5875 train loss:3.611744 +step:5876 train loss:3.590667 +step:5877 train loss:3.599307 +step:5878 train loss:3.600890 +step:5879 train loss:3.571237 +step:5880 train loss:3.767534 +step:5881 train loss:3.609349 +step:5882 train loss:3.581485 +step:5883 train loss:3.587606 +step:5884 train loss:3.603927 +step:5885 train loss:3.596406 +step:5886 train loss:3.620639 +step:5887 train loss:3.616420 +step:5888 train loss:3.598302 +step:5889 train loss:3.574311 +step:5890 train loss:3.622757 +step:5891 train loss:3.566586 +step:5892 train loss:3.648997 +step:5893 train loss:3.572984 +step:5894 train loss:3.566240 +step:5895 train loss:3.566361 +step:5896 train loss:3.580404 +step:5897 train loss:3.646922 +step:5898 train loss:3.865984 +step:5899 train loss:3.599825 +step:5900 train loss:3.647201 +step:5901 train loss:3.598868 +step:5902 train loss:3.618545 +step:5903 train loss:3.605300 +step:5904 train loss:3.632762 +step:5905 train loss:3.741068 +step:5906 train loss:3.681891 +step:5907 train loss:3.624958 +step:5908 train loss:3.603536 +step:5909 train loss:3.591773 +step:5910 train loss:3.580539 +step:5911 train loss:3.594896 +step:5912 train loss:3.628430 +step:5913 train loss:3.630367 +step:5914 train loss:3.611144 +step:5915 train loss:3.738617 +step:5916 train loss:3.617185 +step:5917 train loss:3.587713 +step:5918 train loss:3.585332 +step:5919 train loss:3.615620 +step:5920 train loss:3.613753 +step:5921 train loss:3.582685 +step:5922 train loss:3.639349 +step:5923 train loss:3.632859 +step:5924 train loss:3.587242 +step:5925 train loss:3.709395 +step:5926 train loss:3.598794 +step:5927 train loss:3.574437 +step:5928 train loss:3.606112 +step:5929 train loss:3.628571 +step:5930 train loss:3.578998 +step:5931 train loss:3.562384 +step:5932 train loss:3.600752 +step:5933 train loss:3.658188 +step:5934 train loss:3.570918 +step:5935 train loss:3.595902 +step:5936 train loss:3.585805 +step:5937 train loss:3.567252 +step:5938 train loss:3.582894 +step:5939 train loss:3.561160 +step:5940 train loss:3.643972 +step:5941 train loss:3.577876 +step:5942 train loss:3.593350 +step:5943 train loss:3.598359 +step:5944 train loss:3.652443 +step:5945 train loss:3.583987 +step:5946 train loss:3.563707 +step:5947 train loss:3.576710 +step:5948 train loss:3.614326 +step:5949 train loss:3.661198 +step:5950 train loss:3.620133 +step:5951 train loss:3.622555 +step:5952 train loss:3.546247 +step:5953 train loss:3.587965 +step:5954 train loss:3.596300 +step:5955 train loss:3.603913 +step:5956 train loss:3.580359 +step:5957 train loss:3.549837 +step:5958 train loss:3.620244 +step:5959 train loss:3.579712 +step:5960 train loss:3.556073 +step:5961 train loss:3.581158 +step:5962 train loss:3.612297 +step:5963 train loss:3.647931 +step:5964 train loss:3.604937 +step:5965 train loss:3.619630 +step:5966 train loss:3.615894 +step:5967 train loss:3.581839 +step:5968 train loss:3.654436 +step:5969 train loss:3.595947 +step:5970 train loss:3.613727 +step:5971 train loss:3.563615 +step:5972 train loss:3.592916 +step:5973 train loss:3.584625 +step:5974 train loss:3.606951 +step:5975 train loss:3.575051 +step:5976 train loss:3.616881 +step:5977 train loss:3.573790 +step:5978 train loss:3.560091 +step:5979 train loss:3.598924 +step:5980 train loss:3.667183 +step:5981 train loss:3.561856 +step:5982 train loss:3.572504 +step:5983 train loss:3.636537 +step:5984 train loss:3.583342 +step:5985 train loss:3.623671 +step:5986 train loss:3.601616 +step:5987 train loss:3.586197 +step:5988 train loss:3.592610 +step:5989 train loss:3.612443 +step:5990 train loss:3.542198 +step:5991 train loss:3.605740 +step:5992 train loss:3.637543 +step:5993 train loss:3.591756 +step:5994 train loss:3.611000 +step:5995 train loss:3.498622 +step:5996 train loss:3.667122 +step:5997 train loss:3.650417 +step:5998 train loss:3.528514 +step:5999 train loss:3.556260 +step:6000 validation loss:3.524150 total_sharp:2.1824e-02 L1_sharp:1.3977e-02 L2_sharp:7.7087e-03 L3_sharp:9.2334e-03 L4_sharp:3.8011e-03 L5_sharp:5.3668e-03 L6_sharp:6.1816e-03 L7_sharp:7.4890e-03 L8_sharp:5.7400e-03 L9_sharp:3.2918e-03 L10_sharp:2.5481e-03 L11_sharp:3.1452e-03 L12_sharp:1.1266e-02 total_fnorm:1.1120e+00 total_l1_linf:9.9110e+03 total_spectral:1.1120e+00 L1_fnorm:2.4346e-01 L2_fnorm:2.4075e-01 L3_fnorm:2.4974e-01 L4_fnorm:2.6063e-01 L5_fnorm:2.5995e-01 L6_fnorm:2.6194e-01 L7_fnorm:2.5962e-01 L8_fnorm:2.6215e-01 L9_fnorm:2.6405e-01 L10_fnorm:2.6602e-01 L11_fnorm:2.6518e-01 L12_fnorm:2.5770e-01 L1_l1linf:2.9658e-01 L2_l1linf:3.0787e-01 L3_l1linf:3.3965e-01 L4_l1linf:3.2132e-01 L5_l1linf:3.2770e-01 L6_l1linf:2.8532e-01 L7_l1linf:2.8329e-01 L8_l1linf:2.7194e-01 L9_l1linf:2.7593e-01 L10_l1linf:2.8309e-01 L11_l1linf:2.8783e-01 L12_l1linf:2.8333e-01 L1_spectral:4.1702e-02 L2_spectral:4.8328e-02 L3_spectral:4.9495e-02 L4_spectral:4.2261e-02 L5_spectral:3.9932e-02 L6_spectral:3.4109e-02 L7_spectral:3.1028e-02 L8_spectral:3.1300e-02 L9_spectral:3.0864e-02 L10_spectral:3.0306e-02 L11_spectral:3.2138e-02 L12_spectral:4.2357e-02 ip_v_neg_g:1.4681e-02 cos_v_neg_g:1.4312e-02 v_norm:1.1120e+00 g_norm:9.2248e-01 hv_norm:3.7206e-01 cos_v_hv:6.5230e-02 hg_norm:3.5131e+00 cos_g_hg:4.2240e-01 v_par:4.1577e-04 v_perp:1.1120e+00 L1_cos_v_neg_g:1.9539e-02 L1_v_norm:2.4346e-01 L2_cos_v_neg_g:2.1877e-02 L2_v_norm:2.4075e-01 L3_cos_v_neg_g:2.9647e-02 L3_v_norm:2.4974e-01 L4_cos_v_neg_g:2.1282e-02 L4_v_norm:2.6063e-01 L5_cos_v_neg_g:2.1072e-02 L5_v_norm:2.5995e-01 L6_cos_v_neg_g:1.8860e-02 L6_v_norm:2.6194e-01 L7_cos_v_neg_g:2.1153e-02 L7_v_norm:2.5962e-01 L8_cos_v_neg_g:1.8460e-02 L8_v_norm:2.6215e-01 L9_cos_v_neg_g:1.4380e-02 L9_v_norm:2.6405e-01 L10_cos_v_neg_g:1.2514e-02 L10_v_norm:2.6602e-01 L11_cos_v_neg_g:1.1282e-02 L11_v_norm:2.6518e-01 L12_cos_v_neg_g:1.7270e-02 L12_v_norm:2.5770e-01 +step:6000 train loss:3.605691 +step:6001 train loss:3.568606 +step:6002 train loss:3.596859 +step:6003 train loss:3.620321 +step:6004 train loss:3.570713 +step:6005 train loss:3.640244 +step:6006 train loss:3.547738 +step:6007 train loss:3.568367 +step:6008 train loss:3.581192 +step:6009 train loss:3.621567 +step:6010 train loss:3.615238 +step:6011 train loss:3.603317 +step:6012 train loss:3.571019 +step:6013 train loss:3.633230 +step:6014 train loss:3.646743 +step:6015 train loss:3.643476 +step:6016 train loss:3.614445 +step:6017 train loss:3.620091 +step:6018 train loss:3.558493 +step:6019 train loss:3.595504 +step:6020 train loss:3.582490 +step:6021 train loss:3.510339 +step:6022 train loss:3.621429 +step:6023 train loss:3.556746 +step:6024 train loss:3.631900 +step:6025 train loss:3.599797 +step:6026 train loss:3.568584 +step:6027 train loss:3.614455 +step:6028 train loss:3.528840 +step:6029 train loss:3.641543 +step:6030 train loss:3.612802 +step:6031 train loss:3.586868 +step:6032 train loss:3.543166 +step:6033 train loss:3.601269 +step:6034 train loss:3.625901 +step:6035 train loss:3.543575 +step:6036 train loss:3.517045 +step:6037 train loss:3.631739 +step:6038 train loss:3.639812 +step:6039 train loss:3.622539 +step:6040 train loss:3.579356 +step:6041 train loss:3.562618 +step:6042 train loss:3.540241 +step:6043 train loss:3.597273 +step:6044 train loss:3.721444 +step:6045 train loss:3.566684 +step:6046 train loss:3.573326 +step:6047 train loss:3.609186 +step:6048 train loss:3.621346 +step:6049 train loss:3.596547 +step:6050 train loss:3.561893 +step:6051 train loss:3.612132 +step:6052 train loss:3.587884 +step:6053 train loss:3.704611 +step:6054 train loss:3.741693 +step:6055 train loss:3.557982 +step:6056 train loss:3.550445 +step:6057 train loss:3.584653 +step:6058 train loss:3.612078 +step:6059 train loss:3.614793 +step:6060 train loss:3.620585 +step:6061 train loss:3.637539 +step:6062 train loss:3.586199 +step:6063 train loss:3.604777 +step:6064 train loss:3.596923 +step:6065 train loss:3.597337 +step:6066 train loss:3.585389 +step:6067 train loss:3.624560 +step:6068 train loss:3.567111 +step:6069 train loss:3.521890 +step:6070 train loss:3.670739 +step:6071 train loss:3.616861 +step:6072 train loss:3.557770 +step:6073 train loss:3.594363 +step:6074 train loss:3.680433 +step:6075 train loss:3.602862 +step:6076 train loss:3.608502 +step:6077 train loss:3.613410 +step:6078 train loss:3.549527 +step:6079 train loss:3.579448 +step:6080 train loss:3.583510 +step:6081 train loss:3.622002 +step:6082 train loss:3.571037 +step:6083 train loss:3.580082 +step:6084 train loss:3.647296 +step:6085 train loss:3.642413 +step:6086 train loss:3.544742 +step:6087 train loss:3.589177 +step:6088 train loss:3.573944 +step:6089 train loss:3.633585 +step:6090 train loss:3.636118 +step:6091 train loss:3.583758 +step:6092 train loss:3.547803 +step:6093 train loss:3.606907 +step:6094 train loss:3.522086 +step:6095 train loss:3.687521 +step:6096 train loss:3.555620 +step:6097 train loss:3.634100 +step:6098 train loss:3.612477 +step:6099 train loss:3.668221 +step:6100 train loss:3.662265 +step:6101 train loss:3.595788 +step:6102 train loss:3.712740 +step:6103 train loss:3.598151 +step:6104 train loss:3.708959 +step:6105 train loss:3.646732 +step:6106 train loss:3.582477 +step:6107 train loss:3.645362 +step:6108 train loss:3.608119 +step:6109 train loss:3.680764 +step:6110 train loss:3.611505 +step:6111 train loss:3.644412 +step:6112 train loss:3.582295 +step:6113 train loss:3.608929 +step:6114 train loss:3.581318 +step:6115 train loss:3.645976 +step:6116 train loss:3.587007 +step:6117 train loss:3.638208 +step:6118 train loss:3.618255 +step:6119 train loss:3.628355 +step:6120 train loss:3.773933 +step:6121 train loss:3.606598 +step:6122 train loss:3.619311 +step:6123 train loss:3.598541 +step:6124 train loss:3.576018 +step:6125 train loss:3.565536 +step:6126 train loss:3.586913 +step:6127 train loss:3.573297 +step:6128 train loss:3.543305 +step:6129 train loss:3.780790 +step:6130 train loss:3.560454 +step:6131 train loss:3.537508 +step:6132 train loss:3.613958 +step:6133 train loss:3.578169 +step:6134 train loss:3.604367 +step:6135 train loss:3.688025 +step:6136 train loss:3.711345 +step:6137 train loss:3.570404 +step:6138 train loss:3.626813 +step:6139 train loss:3.605320 +step:6140 train loss:3.605278 +step:6141 train loss:3.560838 +step:6142 train loss:3.627450 +step:6143 train loss:3.592261 +step:6144 train loss:3.610569 +step:6145 train loss:3.873890 +step:6146 train loss:3.697536 +step:6147 train loss:3.782435 +step:6148 train loss:3.547496 +step:6149 train loss:3.675722 +step:6150 train loss:3.628760 +step:6151 train loss:3.579525 +step:6152 train loss:3.576795 +step:6153 train loss:3.644316 +step:6154 train loss:3.729298 +step:6155 train loss:3.593330 +step:6156 train loss:3.698562 +step:6157 train loss:3.620282 +step:6158 train loss:3.613358 +step:6159 train loss:3.578183 +step:6160 train loss:3.747578 +step:6161 train loss:3.597669 +step:6162 train loss:3.615223 +step:6163 train loss:3.647524 +step:6164 train loss:3.562185 +step:6165 train loss:3.623415 +step:6166 train loss:3.618852 +step:6167 train loss:3.638150 +step:6168 train loss:3.606305 +step:6169 train loss:3.606246 +step:6170 train loss:3.609785 +step:6171 train loss:3.579084 +step:6172 train loss:3.564072 +step:6173 train loss:3.615358 +step:6174 train loss:3.545227 +step:6175 train loss:3.557229 +step:6176 train loss:3.537493 +step:6177 train loss:3.633103 +step:6178 train loss:3.580352 +step:6179 train loss:3.587502 +step:6180 train loss:3.593258 +step:6181 train loss:3.629391 +step:6182 train loss:3.511161 +step:6183 train loss:3.521549 +step:6184 train loss:3.640650 +step:6185 train loss:3.594473 +step:6186 train loss:3.554001 +step:6187 train loss:3.596784 +step:6188 train loss:3.563065 +step:6189 train loss:3.607120 +step:6190 train loss:3.561532 +step:6191 train loss:3.595237 +step:6192 train loss:3.560799 +step:6193 train loss:3.629289 +step:6194 train loss:3.622185 +step:6195 train loss:3.601364 +step:6196 train loss:3.615299 +step:6197 train loss:3.638690 +step:6198 train loss:3.552081 +step:6199 train loss:3.575848 +step:6200 train loss:3.618032 +step:6201 train loss:3.662032 +step:6202 train loss:3.663839 +step:6203 train loss:3.660053 +step:6204 train loss:3.643805 +step:6205 train loss:3.585265 +step:6206 train loss:3.569514 +step:6207 train loss:3.632264 +step:6208 train loss:3.653826 +step:6209 train loss:3.625748 +step:6210 train loss:3.655212 +step:6211 train loss:3.573832 +step:6212 train loss:3.565003 +step:6213 train loss:3.578436 +step:6214 train loss:3.553520 +step:6215 train loss:3.730304 +step:6216 train loss:3.597769 +step:6217 train loss:3.660724 +step:6218 train loss:3.633017 +step:6219 train loss:3.647331 +step:6220 train loss:3.603411 +step:6221 train loss:3.568932 +step:6222 train loss:3.807279 +step:6223 train loss:3.577296 +step:6224 train loss:3.607846 +step:6225 train loss:3.597485 +step:6226 train loss:3.597955 +step:6227 train loss:3.600401 +step:6228 train loss:3.594814 +step:6229 train loss:3.631457 +step:6230 train loss:3.588243 +step:6231 train loss:3.703690 +step:6232 train loss:3.539997 +step:6233 train loss:3.583145 +step:6234 train loss:3.591984 +step:6235 train loss:3.616885 +step:6236 train loss:3.554818 +step:6237 train loss:3.575813 +step:6238 train loss:3.595262 +step:6239 train loss:3.585818 +step:6240 train loss:3.606348 +step:6241 train loss:3.592081 +step:6242 train loss:3.588330 +step:6243 train loss:3.624764 +step:6244 train loss:3.786766 +step:6245 train loss:3.577255 +step:6246 train loss:3.563433 +step:6247 train loss:3.557027 +step:6248 train loss:3.561849 +step:6249 train loss:3.503074 +step:6250 validation loss:3.511805 +step:6250 train loss:3.536728 +step:6251 train loss:3.554380 +step:6252 train loss:3.601951 +step:6253 train loss:3.610654 +step:6254 train loss:3.600506 +step:6255 train loss:3.565275 +step:6256 train loss:3.618441 +step:6257 train loss:3.617635 +step:6258 train loss:3.593601 +step:6259 train loss:3.601057 +step:6260 train loss:3.626836 +step:6261 train loss:3.651604 +step:6262 train loss:3.545604 +step:6263 train loss:3.577727 +step:6264 train loss:3.588764 +step:6265 train loss:3.574857 +step:6266 train loss:3.780919 +step:6267 train loss:3.583422 +step:6268 train loss:3.672123 +step:6269 train loss:3.548215 +step:6270 train loss:3.556343 +step:6271 train loss:3.606542 +step:6272 train loss:3.597122 +step:6273 train loss:3.798907 +step:6274 train loss:3.574832 +step:6275 train loss:3.608214 +step:6276 train loss:3.581200 +step:6277 train loss:3.565233 +step:6278 train loss:3.549004 +step:6279 train loss:3.603847 +step:6280 train loss:3.608780 +step:6281 train loss:3.542025 +step:6282 train loss:3.557508 +step:6283 train loss:3.643640 +step:6284 train loss:3.612865 +step:6285 train loss:3.613229 +step:6286 train loss:3.560158 +step:6287 train loss:3.584792 +step:6288 train loss:3.683052 +step:6289 train loss:3.546280 +step:6290 train loss:3.553725 +step:6291 train loss:3.578794 +step:6292 train loss:3.601471 +step:6293 train loss:3.590937 +step:6294 train loss:3.578042 +step:6295 train loss:3.598289 +step:6296 train loss:3.562532 +step:6297 train loss:3.688109 +step:6298 train loss:3.633159 +step:6299 train loss:3.528581 +step:6300 train loss:3.608215 +step:6301 train loss:3.635796 +step:6302 train loss:3.617938 +step:6303 train loss:3.585554 +step:6304 train loss:3.599674 +step:6305 train loss:3.574226 +step:6306 train loss:3.584891 +step:6307 train loss:3.595809 +step:6308 train loss:3.567476 +step:6309 train loss:3.562246 +step:6310 train loss:3.616030 +step:6311 train loss:3.572112 +step:6312 train loss:3.612501 +step:6313 train loss:3.541501 +step:6314 train loss:3.566714 +step:6315 train loss:3.623331 +step:6316 train loss:3.542146 +step:6317 train loss:3.538292 +step:6318 train loss:3.646937 +step:6319 train loss:3.582111 +step:6320 train loss:3.599625 +step:6321 train loss:3.582233 +step:6322 train loss:3.579900 +step:6323 train loss:3.514063 +step:6324 train loss:3.520577 +step:6325 train loss:3.620591 +step:6326 train loss:3.540339 +step:6327 train loss:3.612687 +step:6328 train loss:3.593257 +step:6329 train loss:3.512775 +step:6330 train loss:3.540864 +step:6331 train loss:3.558874 +step:6332 train loss:3.693600 +step:6333 train loss:3.570936 +step:6334 train loss:3.547625 +step:6335 train loss:3.517381 +step:6336 train loss:3.551896 +step:6337 train loss:3.572505 +step:6338 train loss:3.530953 +step:6339 train loss:3.572853 +step:6340 train loss:3.550545 +step:6341 train loss:3.569596 +step:6342 train loss:3.565210 +step:6343 train loss:3.665771 +step:6344 train loss:3.514583 +step:6345 train loss:3.530439 +step:6346 train loss:3.611133 +step:6347 train loss:3.482942 +step:6348 train loss:3.580455 +step:6349 train loss:3.555366 +step:6350 train loss:3.531419 +step:6351 train loss:3.529557 +step:6352 train loss:3.543734 +step:6353 train loss:3.566609 +step:6354 train loss:3.579138 +step:6355 train loss:3.587926 +step:6356 train loss:3.598791 +step:6357 train loss:3.454677 +step:6358 train loss:3.548397 +step:6359 train loss:3.600653 +step:6360 train loss:3.515048 +step:6361 train loss:3.517967 +step:6362 train loss:3.553531 +step:6363 train loss:3.540107 +step:6364 train loss:3.521708 +step:6365 train loss:3.594907 +step:6366 train loss:3.606542 +step:6367 train loss:3.530069 +step:6368 train loss:3.577425 +step:6369 train loss:3.545035 +step:6370 train loss:3.592957 +step:6371 train loss:3.510331 +step:6372 train loss:3.539603 +step:6373 train loss:3.568206 +step:6374 train loss:3.595592 +step:6375 train loss:3.554255 +step:6376 train loss:3.581681 +step:6377 train loss:3.575362 +step:6378 train loss:3.530000 +step:6379 train loss:3.568081 +step:6380 train loss:3.610175 +step:6381 train loss:3.577235 +step:6382 train loss:3.525807 +step:6383 train loss:3.601356 +step:6384 train loss:3.569101 +step:6385 train loss:3.547796 +step:6386 train loss:3.582320 +step:6387 train loss:3.559518 +step:6388 train loss:3.604276 +step:6389 train loss:3.609291 +step:6390 train loss:3.562199 +step:6391 train loss:3.544405 +step:6392 train loss:3.531184 +step:6393 train loss:3.588036 +step:6394 train loss:3.572805 +step:6395 train loss:3.757357 +step:6396 train loss:3.576814 +step:6397 train loss:3.520733 +step:6398 train loss:3.592017 +step:6399 train loss:3.531071 +step:6400 train loss:3.609921 +step:6401 train loss:3.641410 +step:6402 train loss:3.579329 +step:6403 train loss:3.564740 +step:6404 train loss:3.544498 +step:6405 train loss:3.570866 +step:6406 train loss:3.578043 +step:6407 train loss:3.636254 +step:6408 train loss:3.527244 +step:6409 train loss:3.515525 +step:6410 train loss:3.642934 +step:6411 train loss:3.574405 +step:6412 train loss:3.575524 +step:6413 train loss:3.578057 +step:6414 train loss:3.528551 +step:6415 train loss:3.586014 +step:6416 train loss:3.557877 +step:6417 train loss:3.527732 +step:6418 train loss:3.523069 +step:6419 train loss:3.605427 +step:6420 train loss:3.531988 +step:6421 train loss:3.558559 +step:6422 train loss:3.544905 +step:6423 train loss:3.561171 +step:6424 train loss:3.578478 +step:6425 train loss:3.574082 +step:6426 train loss:3.614990 +step:6427 train loss:3.577396 +step:6428 train loss:3.617804 +step:6429 train loss:3.579623 +step:6430 train loss:3.558053 +step:6431 train loss:3.530873 +step:6432 train loss:3.565213 +step:6433 train loss:3.577056 +step:6434 train loss:3.462904 +step:6435 train loss:3.639080 +step:6436 train loss:3.573725 +step:6437 train loss:3.543092 +step:6438 train loss:3.565715 +step:6439 train loss:3.548036 +step:6440 train loss:3.562013 +step:6441 train loss:3.552593 +step:6442 train loss:3.495993 +step:6443 train loss:3.546184 +step:6444 train loss:3.690184 +step:6445 train loss:3.588422 +step:6446 train loss:3.595956 +step:6447 train loss:3.574579 +step:6448 train loss:3.522914 +step:6449 train loss:3.547557 +step:6450 train loss:3.528213 +step:6451 train loss:3.518056 +step:6452 train loss:3.518113 +step:6453 train loss:3.566696 +step:6454 train loss:3.585660 +step:6455 train loss:3.577391 +step:6456 train loss:3.593412 +step:6457 train loss:3.574171 +step:6458 train loss:3.548915 +step:6459 train loss:3.526826 +step:6460 train loss:3.536932 +step:6461 train loss:3.535437 +step:6462 train loss:3.529649 +step:6463 train loss:3.626252 +step:6464 train loss:3.532844 +step:6465 train loss:3.576482 +step:6466 train loss:3.589951 +step:6467 train loss:3.517363 +step:6468 train loss:3.594166 +step:6469 train loss:3.503216 +step:6470 train loss:3.625176 +step:6471 train loss:3.533714 +step:6472 train loss:3.688884 +step:6473 train loss:3.574454 +step:6474 train loss:3.605782 +step:6475 train loss:3.551124 +step:6476 train loss:3.620389 +step:6477 train loss:3.551146 +step:6478 train loss:3.680839 +step:6479 train loss:3.595611 +step:6480 train loss:3.535249 +step:6481 train loss:3.589016 +step:6482 train loss:3.533015 +step:6483 train loss:3.591933 +step:6484 train loss:3.546523 +step:6485 train loss:3.609026 +step:6486 train loss:3.539841 +step:6487 train loss:3.542029 +step:6488 train loss:3.535558 +step:6489 train loss:3.536244 +step:6490 train loss:3.562657 +step:6491 train loss:3.531042 +step:6492 train loss:3.636692 +step:6493 train loss:3.539196 +step:6494 train loss:3.543931 +step:6495 train loss:3.541288 +step:6496 train loss:3.576240 +step:6497 train loss:3.593846 +step:6498 train loss:3.701353 +step:6499 train loss:3.671831 +step:6500 validation loss:3.506255 total_sharp:1.9053e-02 L1_sharp:1.3279e-02 L2_sharp:1.8434e-02 L3_sharp:8.3300e-03 L4_sharp:2.6473e-03 L5_sharp:3.4880e-03 L6_sharp:4.1715e-03 L7_sharp:6.3464e-03 L8_sharp:4.3626e-03 L9_sharp:2.7392e-03 L10_sharp:1.9115e-03 L11_sharp:2.5942e-03 L12_sharp:1.8764e-02 total_fnorm:1.1208e+00 total_l1_linf:9.9959e+03 total_spectral:1.1208e+00 L1_fnorm:2.4767e-01 L2_fnorm:2.4376e-01 L3_fnorm:2.4894e-01 L4_fnorm:2.6175e-01 L5_fnorm:2.6032e-01 L6_fnorm:2.6163e-01 L7_fnorm:2.5978e-01 L8_fnorm:2.6269e-01 L9_fnorm:2.6563e-01 L10_fnorm:2.6888e-01 L11_fnorm:2.6891e-01 L12_fnorm:2.6061e-01 L1_l1linf:3.4239e-01 L2_l1linf:3.3152e-01 L3_l1linf:3.3363e-01 L4_l1linf:3.5338e-01 L5_l1linf:3.1915e-01 L6_l1linf:2.8042e-01 L7_l1linf:2.7972e-01 L8_l1linf:2.7493e-01 L9_l1linf:2.7771e-01 L10_l1linf:2.7708e-01 L11_l1linf:2.8601e-01 L12_l1linf:2.8512e-01 L1_spectral:4.6694e-02 L2_spectral:5.0042e-02 L3_spectral:4.9471e-02 L4_spectral:4.5899e-02 L5_spectral:4.1480e-02 L6_spectral:3.4576e-02 L7_spectral:3.1901e-02 L8_spectral:3.0339e-02 L9_spectral:2.9212e-02 L10_spectral:3.1387e-02 L11_spectral:3.2710e-02 L12_spectral:5.7005e-02 ip_v_neg_g:1.3076e-02 cos_v_neg_g:1.2399e-02 v_norm:1.1208e+00 g_norm:9.4096e-01 hv_norm:3.7219e-01 cos_v_hv:5.7375e-02 hg_norm:3.7080e+00 cos_g_hg:4.7208e-01 v_par:3.9947e-04 v_perp:1.1208e+00 L1_cos_v_neg_g:1.5194e-02 L1_v_norm:2.4767e-01 L2_cos_v_neg_g:3.2748e-02 L2_v_norm:2.4376e-01 L3_cos_v_neg_g:2.1144e-02 L3_v_norm:2.4894e-01 L4_cos_v_neg_g:1.0700e-02 L4_v_norm:2.6175e-01 L5_cos_v_neg_g:1.2933e-02 L5_v_norm:2.6032e-01 L6_cos_v_neg_g:1.0343e-02 L6_v_norm:2.6163e-01 L7_cos_v_neg_g:1.4728e-02 L7_v_norm:2.5978e-01 L8_cos_v_neg_g:1.4262e-02 L8_v_norm:2.6269e-01 L9_cos_v_neg_g:1.1630e-02 L9_v_norm:2.6563e-01 L10_cos_v_neg_g:1.1767e-02 L10_v_norm:2.6888e-01 L11_cos_v_neg_g:1.4320e-02 L11_v_norm:2.6891e-01 L12_cos_v_neg_g:3.2300e-02 L12_v_norm:2.6061e-01 +step:6500 train loss:3.520956 +step:6501 train loss:3.535757 +step:6502 train loss:3.554983 +step:6503 train loss:3.612474 +step:6504 train loss:3.562999 +step:6505 train loss:3.570545 +step:6506 train loss:3.526496 +step:6507 train loss:3.598089 +step:6508 train loss:3.562183 +step:6509 train loss:3.548719 +step:6510 train loss:3.554112 +step:6511 train loss:3.571659 +step:6512 train loss:3.511840 +step:6513 train loss:3.581861 +step:6514 train loss:3.452597 +step:6515 train loss:3.545621 +step:6516 train loss:3.596973 +step:6517 train loss:3.507339 +step:6518 train loss:3.550292 +step:6519 train loss:3.540123 +step:6520 train loss:3.629719 +step:6521 train loss:3.606684 +step:6522 train loss:3.617570 +step:6523 train loss:3.510269 +step:6524 train loss:3.598986 +step:6525 train loss:3.583476 +step:6526 train loss:3.524300 +step:6527 train loss:3.573497 +step:6528 train loss:3.594254 +step:6529 train loss:3.620780 +step:6530 train loss:3.523666 +step:6531 train loss:3.606396 +step:6532 train loss:3.528672 +step:6533 train loss:3.569003 +step:6534 train loss:3.573261 +step:6535 train loss:3.554544 +step:6536 train loss:3.683388 +step:6537 train loss:3.507658 +step:6538 train loss:3.604779 +step:6539 train loss:3.527741 +step:6540 train loss:3.642435 +step:6541 train loss:3.620819 +step:6542 train loss:3.577827 +step:6543 train loss:3.529469 +step:6544 train loss:3.514866 +step:6545 train loss:3.503644 +step:6546 train loss:3.563614 +step:6547 train loss:3.617681 +step:6548 train loss:3.561680 +step:6549 train loss:3.575425 +step:6550 train loss:3.686814 +step:6551 train loss:3.567163 +step:6552 train loss:3.559865 +step:6553 train loss:3.595804 +step:6554 train loss:3.489556 +step:6555 train loss:3.572707 +step:6556 train loss:3.446019 +step:6557 train loss:3.792598 +step:6558 train loss:3.627174 +step:6559 train loss:3.540182 +step:6560 train loss:3.578685 +step:6561 train loss:3.553731 +step:6562 train loss:3.572142 +step:6563 train loss:3.463536 +step:6564 train loss:3.565007 +step:6565 train loss:3.472868 +step:6566 train loss:3.586471 +step:6567 train loss:3.555972 +step:6568 train loss:3.601904 +step:6569 train loss:3.549874 +step:6570 train loss:3.585642 +step:6571 train loss:3.514594 +step:6572 train loss:3.590066 +step:6573 train loss:3.600216 +step:6574 train loss:3.588156 +step:6575 train loss:3.532904 +step:6576 train loss:3.522586 +step:6577 train loss:3.594747 +step:6578 train loss:3.463657 +step:6579 train loss:3.564692 +step:6580 train loss:3.522934 +step:6581 train loss:3.531965 +step:6582 train loss:3.514033 +step:6583 train loss:3.608374 +step:6584 train loss:3.541955 +step:6585 train loss:3.576684 +step:6586 train loss:3.583509 +step:6587 train loss:3.593912 +step:6588 train loss:3.560035 +step:6589 train loss:3.584300 +step:6590 train loss:3.528742 +step:6591 train loss:3.583199 +step:6592 train loss:3.521463 +step:6593 train loss:3.531913 +step:6594 train loss:3.557977 +step:6595 train loss:3.539494 +step:6596 train loss:3.536913 +step:6597 train loss:3.562259 +step:6598 train loss:3.605265 +step:6599 train loss:3.499509 +step:6600 train loss:3.553840 +step:6601 train loss:3.615667 +step:6602 train loss:3.532954 +step:6603 train loss:3.564135 +step:6604 train loss:3.571750 +step:6605 train loss:3.555444 +step:6606 train loss:3.612934 +step:6607 train loss:3.531494 +step:6608 train loss:3.546276 +step:6609 train loss:3.517737 +step:6610 train loss:3.626863 +step:6611 train loss:3.548992 +step:6612 train loss:3.595083 +step:6613 train loss:3.510960 +step:6614 train loss:3.538718 +step:6615 train loss:3.539508 +step:6616 train loss:3.519184 +step:6617 train loss:3.558930 +step:6618 train loss:3.545151 +step:6619 train loss:3.517478 +step:6620 train loss:3.623129 +step:6621 train loss:3.502636 +step:6622 train loss:3.573558 +step:6623 train loss:3.506529 +step:6624 train loss:3.577571 +step:6625 train loss:3.619895 +step:6626 train loss:3.600078 +step:6627 train loss:3.538551 +step:6628 train loss:3.597620 +step:6629 train loss:3.500147 +step:6630 train loss:3.536264 +step:6631 train loss:3.572083 +step:6632 train loss:3.608664 +step:6633 train loss:3.563563 +step:6634 train loss:3.621148 +step:6635 train loss:3.523582 +step:6636 train loss:3.565100 +step:6637 train loss:3.530069 +step:6638 train loss:3.530903 +step:6639 train loss:3.541678 +step:6640 train loss:3.527711 +step:6641 train loss:3.540091 +step:6642 train loss:3.544013 +step:6643 train loss:3.621934 +step:6644 train loss:3.625084 +step:6645 train loss:3.500208 +step:6646 train loss:3.589783 +step:6647 train loss:3.548133 +step:6648 train loss:3.649806 +step:6649 train loss:3.576388 +step:6650 train loss:3.530298 +step:6651 train loss:3.572561 +step:6652 train loss:3.588207 +step:6653 train loss:3.530432 +step:6654 train loss:3.527882 +step:6655 train loss:3.566006 +step:6656 train loss:3.538450 +step:6657 train loss:3.560313 +step:6658 train loss:3.544423 +step:6659 train loss:3.697490 +step:6660 train loss:3.595394 +step:6661 train loss:3.522012 +step:6662 train loss:3.553676 +step:6663 train loss:3.489214 +step:6664 train loss:3.569928 +step:6665 train loss:3.575629 +step:6666 train loss:3.592980 +step:6667 train loss:3.506538 +step:6668 train loss:3.637505 +step:6669 train loss:3.518009 +step:6670 train loss:3.527984 +step:6671 train loss:3.610122 +step:6672 train loss:3.562891 +step:6673 train loss:3.569795 +step:6674 train loss:3.545897 +step:6675 train loss:3.562659 +step:6676 train loss:3.577894 +step:6677 train loss:3.528305 +step:6678 train loss:3.602618 +step:6679 train loss:3.636971 +step:6680 train loss:3.634660 +step:6681 train loss:3.587693 +step:6682 train loss:3.532675 +step:6683 train loss:3.556740 +step:6684 train loss:3.568482 +step:6685 train loss:3.577543 +step:6686 train loss:3.513049 +step:6687 train loss:3.531927 +step:6688 train loss:3.577722 +step:6689 train loss:3.583742 +step:6690 train loss:3.559571 +step:6691 train loss:3.592333 +step:6692 train loss:3.600966 +step:6693 train loss:3.633480 +step:6694 train loss:3.587260 +step:6695 train loss:3.557682 +step:6696 train loss:3.497980 +step:6697 train loss:3.710975 +step:6698 train loss:3.556474 +step:6699 train loss:3.553745 +step:6700 train loss:3.565470 +step:6701 train loss:3.621863 +step:6702 train loss:3.516459 +step:6703 train loss:3.561750 +step:6704 train loss:3.545558 +step:6705 train loss:3.554508 +step:6706 train loss:3.534192 +step:6707 train loss:3.605488 +step:6708 train loss:3.561024 +step:6709 train loss:3.587999 +step:6710 train loss:3.579184 +step:6711 train loss:3.531971 +step:6712 train loss:3.521646 +step:6713 train loss:3.542662 +step:6714 train loss:3.587106 +step:6715 train loss:3.530957 +step:6716 train loss:3.608302 +step:6717 train loss:3.551553 +step:6718 train loss:3.574223 +step:6719 train loss:3.611414 +step:6720 train loss:3.536819 +step:6721 train loss:3.554309 +step:6722 train loss:3.531863 +step:6723 train loss:3.657455 +step:6724 train loss:3.516638 +step:6725 train loss:3.575402 +step:6726 train loss:3.532889 +step:6727 train loss:3.596755 +step:6728 train loss:3.690648 +step:6729 train loss:3.554482 +step:6730 train loss:3.551410 +step:6731 train loss:3.589812 +step:6732 train loss:3.468800 +step:6733 train loss:3.601338 +step:6734 train loss:3.544595 +step:6735 train loss:3.560272 +step:6736 train loss:3.563627 +step:6737 train loss:3.561321 +step:6738 train loss:3.595179 +step:6739 train loss:3.544974 +step:6740 train loss:3.500496 +step:6741 train loss:3.614193 +step:6742 train loss:3.570970 +step:6743 train loss:3.575198 +step:6744 train loss:3.472905 +step:6745 train loss:3.620309 +step:6746 train loss:3.552299 +step:6747 train loss:3.544277 +step:6748 train loss:3.618716 +step:6749 train loss:3.596486 +step:6750 validation loss:3.499805 +step:6750 train loss:3.517309 +step:6751 train loss:3.552806 +step:6752 train loss:3.556609 +step:6753 train loss:3.588241 +step:6754 train loss:3.570851 +step:6755 train loss:3.578790 +step:6756 train loss:3.522316 +step:6757 train loss:3.491185 +step:6758 train loss:3.669461 +step:6759 train loss:3.557386 +step:6760 train loss:3.613458 +step:6761 train loss:3.545843 +step:6762 train loss:3.567664 +step:6763 train loss:3.468218 +step:6764 train loss:3.548697 +step:6765 train loss:3.548140 +step:6766 train loss:3.545753 +step:6767 train loss:3.499930 +step:6768 train loss:3.506929 +step:6769 train loss:3.469294 +step:6770 train loss:3.552251 +step:6771 train loss:3.554286 +step:6772 train loss:3.562573 +step:6773 train loss:3.545738 +step:6774 train loss:3.560010 +step:6775 train loss:3.598302 +step:6776 train loss:3.555776 +step:6777 train loss:3.632202 +step:6778 train loss:3.517699 +step:6779 train loss:3.575799 +step:6780 train loss:3.502586 +step:6781 train loss:3.569181 +step:6782 train loss:3.479449 +step:6783 train loss:3.516630 +step:6784 train loss:3.541585 +step:6785 train loss:3.527228 +step:6786 train loss:3.543092 +step:6787 train loss:3.619729 +step:6788 train loss:3.557142 +step:6789 train loss:3.565669 +step:6790 train loss:3.562184 +step:6791 train loss:3.576012 +step:6792 train loss:3.575640 +step:6793 train loss:3.571768 +step:6794 train loss:3.543530 +step:6795 train loss:3.539989 +step:6796 train loss:3.547418 +step:6797 train loss:3.644791 +step:6798 train loss:3.546467 +step:6799 train loss:3.538268 +step:6800 train loss:3.507324 +step:6801 train loss:3.638200 +step:6802 train loss:3.586245 +step:6803 train loss:3.575492 +step:6804 train loss:3.605463 +step:6805 train loss:3.562719 +step:6806 train loss:3.501605 +step:6807 train loss:3.556923 +step:6808 train loss:3.540450 +step:6809 train loss:3.568577 +step:6810 train loss:3.693727 +step:6811 train loss:3.593075 +step:6812 train loss:3.565506 +step:6813 train loss:3.582276 +step:6814 train loss:3.588549 +step:6815 train loss:3.629972 +step:6816 train loss:3.551036 +step:6817 train loss:3.574308 +step:6818 train loss:3.552348 +step:6819 train loss:3.538432 +step:6820 train loss:3.564076 +step:6821 train loss:3.528349 +step:6822 train loss:3.634920 +step:6823 train loss:3.612335 +step:6824 train loss:3.589790 +step:6825 train loss:3.535165 +step:6826 train loss:3.581369 +step:6827 train loss:3.564732 +step:6828 train loss:3.582503 +step:6829 train loss:3.569339 +step:6830 train loss:3.537660 +step:6831 train loss:3.496999 +step:6832 train loss:3.482568 +step:6833 train loss:3.501854 +step:6834 train loss:3.585934 +step:6835 train loss:3.561370 +step:6836 train loss:3.477642 +step:6837 train loss:3.544827 +step:6838 train loss:3.601676 +step:6839 train loss:3.686006 +step:6840 train loss:3.559289 +step:6841 train loss:3.515483 +step:6842 train loss:3.566307 +step:6843 train loss:3.670577 +step:6844 train loss:3.550944 +step:6845 train loss:3.603638 +step:6846 train loss:3.667650 +step:6847 train loss:3.596445 +step:6848 train loss:3.587854 +step:6849 train loss:3.612835 +step:6850 train loss:3.588711 +step:6851 train loss:3.512639 +step:6852 train loss:3.508924 +step:6853 train loss:3.495712 +step:6854 train loss:3.573293 +step:6855 train loss:3.546309 +step:6856 train loss:3.528575 +step:6857 train loss:3.579863 +step:6858 train loss:3.613795 +step:6859 train loss:3.518157 +step:6860 train loss:3.628466 +step:6861 train loss:3.651448 +step:6862 train loss:3.565600 +step:6863 train loss:3.561301 +step:6864 train loss:3.506521 +step:6865 train loss:3.578048 +step:6866 train loss:3.503110 +step:6867 train loss:3.683026 +step:6868 train loss:3.556024 +step:6869 train loss:3.594901 +step:6870 train loss:3.629259 +step:6871 train loss:3.544770 +step:6872 train loss:3.544209 +step:6873 train loss:3.560787 +step:6874 train loss:3.521672 +step:6875 train loss:3.523667 +step:6876 train loss:3.554428 +step:6877 train loss:3.594451 +step:6878 train loss:3.506933 +step:6879 train loss:3.552155 +step:6880 train loss:3.560696 +step:6881 train loss:3.524130 +step:6882 train loss:3.586677 +step:6883 train loss:3.560658 +step:6884 train loss:3.805659 +step:6885 train loss:3.577198 +step:6886 train loss:3.556739 +step:6887 train loss:3.494042 +step:6888 train loss:3.600027 +step:6889 train loss:3.482469 +step:6890 train loss:3.588310 +step:6891 train loss:3.595721 +step:6892 train loss:3.696778 +step:6893 train loss:3.524987 +step:6894 train loss:3.589329 +step:6895 train loss:3.590705 +step:6896 train loss:3.565013 +step:6897 train loss:3.521167 +step:6898 train loss:3.516532 +step:6899 train loss:3.604204 +step:6900 train loss:3.576993 +step:6901 train loss:3.527856 +step:6902 train loss:3.463930 +step:6903 train loss:3.508105 +step:6904 train loss:3.621172 +step:6905 train loss:3.654541 +step:6906 train loss:3.571247 +step:6907 train loss:3.590929 +step:6908 train loss:3.630067 +step:6909 train loss:3.616550 +step:6910 train loss:3.496535 +step:6911 train loss:3.621025 +step:6912 train loss:3.518940 +step:6913 train loss:3.551751 +step:6914 train loss:3.512367 +step:6915 train loss:3.538388 +step:6916 train loss:3.517719 +step:6917 train loss:3.637031 +step:6918 train loss:3.586493 +step:6919 train loss:3.578691 +step:6920 train loss:3.563179 +step:6921 train loss:3.630011 +step:6922 train loss:3.619427 +step:6923 train loss:3.485331 +step:6924 train loss:3.562674 +step:6925 train loss:3.544497 +step:6926 train loss:3.576928 +step:6927 train loss:3.629869 +step:6928 train loss:3.513219 +step:6929 train loss:3.530666 +step:6930 train loss:3.560554 +step:6931 train loss:3.563439 +step:6932 train loss:3.795211 +step:6933 train loss:3.626510 +step:6934 train loss:3.563665 +step:6935 train loss:3.548117 +step:6936 train loss:3.588558 +step:6937 train loss:3.534459 +step:6938 train loss:3.599118 +step:6939 train loss:3.529755 +step:6940 train loss:3.584630 +step:6941 train loss:3.500015 +step:6942 train loss:3.588300 +step:6943 train loss:3.482917 +step:6944 train loss:3.578389 +step:6945 train loss:3.519964 +step:6946 train loss:3.603529 +step:6947 train loss:3.531522 +step:6948 train loss:3.526114 +step:6949 train loss:3.600187 +step:6950 train loss:3.590809 +step:6951 train loss:3.594882 +step:6952 train loss:3.524328 +step:6953 train loss:3.569674 +step:6954 train loss:3.632200 +step:6955 train loss:3.546016 +step:6956 train loss:3.584574 +step:6957 train loss:3.573052 +step:6958 train loss:3.536644 +step:6959 train loss:3.572678 +step:6960 train loss:3.542754 +step:6961 train loss:3.546295 +step:6962 train loss:3.531459 +step:6963 train loss:3.500241 +step:6964 train loss:3.545974 +step:6965 train loss:3.534447 +step:6966 train loss:3.581817 +step:6967 train loss:3.517924 +step:6968 train loss:3.554935 +step:6969 train loss:3.575828 +step:6970 train loss:3.551886 +step:6971 train loss:3.616034 +step:6972 train loss:3.560615 +step:6973 train loss:3.520118 +step:6974 train loss:3.646897 +step:6975 train loss:3.552203 +step:6976 train loss:3.524473 +step:6977 train loss:3.561647 +step:6978 train loss:3.556026 +step:6979 train loss:3.564484 +step:6980 train loss:3.546124 +step:6981 train loss:3.598314 +step:6982 train loss:3.554197 +step:6983 train loss:3.543599 +step:6984 train loss:3.662168 +step:6985 train loss:3.506913 +step:6986 train loss:3.499372 +step:6987 train loss:3.550048 +step:6988 train loss:3.553709 +step:6989 train loss:3.704903 +step:6990 train loss:3.561950 +step:6991 train loss:3.520828 +step:6992 train loss:3.567915 +step:6993 train loss:3.638675 +step:6994 train loss:3.583911 +step:6995 train loss:3.532558 +step:6996 train loss:3.538548 +step:6997 train loss:3.616825 +step:6998 train loss:3.517003 +step:6999 train loss:3.565709 +step:7000 validation loss:3.490454 total_sharp:1.7907e-02 L1_sharp:1.9272e-02 L2_sharp:1.8476e-02 L3_sharp:6.3586e-03 L4_sharp:2.8842e-03 L5_sharp:3.6486e-03 L6_sharp:4.1549e-03 L7_sharp:5.1120e-03 L8_sharp:4.1050e-03 L9_sharp:2.7826e-03 L10_sharp:1.9344e-03 L11_sharp:2.2416e-03 L12_sharp:8.2842e-03 total_fnorm:1.1195e+00 total_l1_linf:9.9863e+03 total_spectral:1.1195e+00 L1_fnorm:2.5717e-01 L2_fnorm:2.4594e-01 L3_fnorm:2.5039e-01 L4_fnorm:2.6165e-01 L5_fnorm:2.6071e-01 L6_fnorm:2.6119e-01 L7_fnorm:2.5983e-01 L8_fnorm:2.6064e-01 L9_fnorm:2.6540e-01 L10_fnorm:2.6647e-01 L11_fnorm:2.6563e-01 L12_fnorm:2.5931e-01 L1_l1linf:3.2549e-01 L2_l1linf:3.1248e-01 L3_l1linf:3.1903e-01 L4_l1linf:3.2299e-01 L5_l1linf:3.1200e-01 L6_l1linf:2.7981e-01 L7_l1linf:2.7789e-01 L8_l1linf:2.7177e-01 L9_l1linf:2.7824e-01 L10_l1linf:2.8865e-01 L11_l1linf:2.8753e-01 L12_l1linf:2.8383e-01 L1_spectral:5.5337e-02 L2_spectral:4.7773e-02 L3_spectral:4.9498e-02 L4_spectral:4.4279e-02 L5_spectral:3.9952e-02 L6_spectral:3.3268e-02 L7_spectral:2.8888e-02 L8_spectral:2.9120e-02 L9_spectral:2.8200e-02 L10_spectral:2.9553e-02 L11_spectral:3.0967e-02 L12_spectral:4.5077e-02 ip_v_neg_g:9.7755e-03 cos_v_neg_g:8.9311e-03 v_norm:1.1195e+00 g_norm:9.7771e-01 hv_norm:4.7173e-01 cos_v_hv:4.2496e-02 hg_norm:1.0343e+01 cos_g_hg:3.1354e-01 v_par:3.0341e-04 v_perp:1.1195e+00 L1_cos_v_neg_g:1.7897e-02 L1_v_norm:2.5717e-01 L2_cos_v_neg_g:2.3650e-02 L2_v_norm:2.4594e-01 L3_cos_v_neg_g:1.7222e-02 L3_v_norm:2.5039e-01 L4_cos_v_neg_g:8.5378e-03 L4_v_norm:2.6165e-01 L5_cos_v_neg_g:7.7548e-03 L5_v_norm:2.6071e-01 L6_cos_v_neg_g:9.4349e-03 L6_v_norm:2.6119e-01 L7_cos_v_neg_g:8.5794e-03 L7_v_norm:2.5983e-01 L8_cos_v_neg_g:9.1089e-03 L8_v_norm:2.6064e-01 L9_cos_v_neg_g:7.6750e-03 L9_v_norm:2.6540e-01 L10_cos_v_neg_g:8.4074e-03 L10_v_norm:2.6647e-01 L11_cos_v_neg_g:9.2912e-03 L11_v_norm:2.6563e-01 L12_cos_v_neg_g:7.9659e-03 L12_v_norm:2.5931e-01 +step:7000 train loss:3.646706 +step:7001 train loss:3.565008 +step:7002 train loss:3.536793 +step:7003 train loss:3.559684 +step:7004 train loss:3.555355 +step:7005 train loss:3.540669 +step:7006 train loss:3.543333 +step:7007 train loss:3.596407 +step:7008 train loss:3.534103 +step:7009 train loss:3.583844 +step:7010 train loss:3.511387 +step:7011 train loss:3.571692 +step:7012 train loss:3.542166 +step:7013 train loss:3.614985 +step:7014 train loss:3.521974 +step:7015 train loss:3.578424 +step:7016 train loss:3.570233 +step:7017 train loss:3.534327 +step:7018 train loss:3.617496 +step:7019 train loss:3.543021 +step:7020 train loss:3.587548 +step:7021 train loss:3.532768 +step:7022 train loss:3.547483 +step:7023 train loss:3.568352 +step:7024 train loss:3.529738 +step:7025 train loss:3.579611 +step:7026 train loss:3.536034 +step:7027 train loss:3.597973 +step:7028 train loss:3.520192 +step:7029 train loss:3.509147 +step:7030 train loss:3.515008 +step:7031 train loss:3.567713 +step:7032 train loss:3.575732 +step:7033 train loss:3.548351 +step:7034 train loss:3.569059 +step:7035 train loss:3.621244 +step:7036 train loss:3.542729 +step:7037 train loss:3.567991 +step:7038 train loss:3.526195 +step:7039 train loss:3.581670 +step:7040 train loss:3.499543 +step:7041 train loss:3.590594 +step:7042 train loss:3.527092 +step:7043 train loss:3.497789 +step:7044 train loss:3.543696 +step:7045 train loss:3.544967 +step:7046 train loss:3.533942 +step:7047 train loss:3.574265 +step:7048 train loss:3.522815 +step:7049 train loss:3.532820 +step:7050 train loss:3.555248 +step:7051 train loss:3.573041 +step:7052 train loss:3.577151 +step:7053 train loss:3.536098 +step:7054 train loss:3.518686 +step:7055 train loss:3.584757 +step:7056 train loss:3.589673 +step:7057 train loss:3.508800 +step:7058 train loss:3.627754 +step:7059 train loss:3.540838 +step:7060 train loss:3.547830 +step:7061 train loss:3.522511 +step:7062 train loss:3.543172 +step:7063 train loss:3.602311 +step:7064 train loss:3.524884 +step:7065 train loss:3.574708 +step:7066 train loss:3.536552 +step:7067 train loss:3.572660 +step:7068 train loss:3.551255 +step:7069 train loss:3.509047 +step:7070 train loss:3.537606 +step:7071 train loss:3.504767 +step:7072 train loss:3.509609 +step:7073 train loss:3.501953 +step:7074 train loss:3.496719 +step:7075 train loss:3.518736 +step:7076 train loss:3.526953 +step:7077 train loss:3.538487 +step:7078 train loss:3.582752 +step:7079 train loss:3.594084 +step:7080 train loss:3.539251 +step:7081 train loss:3.559316 +step:7082 train loss:3.527323 +step:7083 train loss:3.554770 +step:7084 train loss:3.548781 +step:7085 train loss:3.508534 +step:7086 train loss:3.548854 +step:7087 train loss:3.524444 +step:7088 train loss:3.645933 +step:7089 train loss:3.543201 +step:7090 train loss:3.503934 +step:7091 train loss:3.525440 +step:7092 train loss:3.501640 +step:7093 train loss:3.596543 +step:7094 train loss:3.520270 +step:7095 train loss:3.531126 +step:7096 train loss:3.548958 +step:7097 train loss:3.539267 +step:7098 train loss:3.562474 +step:7099 train loss:3.518661 +step:7100 train loss:3.549580 +step:7101 train loss:3.619829 +step:7102 train loss:3.509543 +step:7103 train loss:3.534440 +step:7104 train loss:3.568060 +step:7105 train loss:3.540404 +step:7106 train loss:3.526687 +step:7107 train loss:3.564533 +step:7108 train loss:3.633389 +step:7109 train loss:3.563648 +step:7110 train loss:3.585219 +step:7111 train loss:3.567957 +step:7112 train loss:3.555167 +step:7113 train loss:3.557207 +step:7114 train loss:3.572645 +step:7115 train loss:3.609227 +step:7116 train loss:3.541700 +step:7117 train loss:3.578134 +step:7118 train loss:3.591563 +step:7119 train loss:3.549630 +step:7120 train loss:3.609313 +step:7121 train loss:3.524561 +step:7122 train loss:3.525409 +step:7123 train loss:3.467879 +step:7124 train loss:3.620673 +step:7125 train loss:3.474088 +step:7126 train loss:3.639065 +step:7127 train loss:3.600403 +step:7128 train loss:3.541743 +step:7129 train loss:3.548711 +step:7130 train loss:3.541787 +step:7131 train loss:3.480863 +step:7132 train loss:3.521337 +step:7133 train loss:3.568228 +step:7134 train loss:3.501160 +step:7135 train loss:3.555374 +step:7136 train loss:3.539262 +step:7137 train loss:3.514893 +step:7138 train loss:3.503287 +step:7139 train loss:3.508269 +step:7140 train loss:3.541428 +step:7141 train loss:3.541925 +step:7142 train loss:3.534850 +step:7143 train loss:3.575176 +step:7144 train loss:3.524276 +step:7145 train loss:3.538994 +step:7146 train loss:3.546989 +step:7147 train loss:3.567637 +step:7148 train loss:3.572967 +step:7149 train loss:3.579282 +step:7150 train loss:3.554918 +step:7151 train loss:3.518718 +step:7152 train loss:3.491488 +step:7153 train loss:3.527123 +step:7154 train loss:3.543948 +step:7155 train loss:3.563733 +step:7156 train loss:3.530987 +step:7157 train loss:3.550573 +step:7158 train loss:3.506178 +step:7159 train loss:3.560812 +step:7160 train loss:3.572824 +step:7161 train loss:3.520258 +step:7162 train loss:3.568416 +step:7163 train loss:3.504423 +step:7164 train loss:3.540792 +step:7165 train loss:3.545625 +step:7166 train loss:3.602389 +step:7167 train loss:3.579660 +step:7168 train loss:3.558343 +step:7169 train loss:3.534895 +step:7170 train loss:3.567948 +step:7171 train loss:3.515811 +step:7172 train loss:3.680055 +step:7173 train loss:3.519972 +step:7174 train loss:3.563384 +step:7175 train loss:3.539284 +step:7176 train loss:3.545373 +step:7177 train loss:3.562675 +step:7178 train loss:3.561244 +step:7179 train loss:3.542412 +step:7180 train loss:3.547058 +step:7181 train loss:3.575233 +step:7182 train loss:3.527450 +step:7183 train loss:3.599102 +step:7184 train loss:3.691277 +step:7185 train loss:3.604042 +step:7186 train loss:3.541900 +step:7187 train loss:3.552465 +step:7188 train loss:3.541219 +step:7189 train loss:3.540271 +step:7190 train loss:3.543670 +step:7191 train loss:3.533359 +step:7192 train loss:3.567554 +step:7193 train loss:3.485950 +step:7194 train loss:3.549565 +step:7195 train loss:3.524549 +step:7196 train loss:3.572267 +step:7197 train loss:3.549862 +step:7198 train loss:3.607721 +step:7199 train loss:3.565222 +step:7200 train loss:3.555198 +step:7201 train loss:3.566883 +step:7202 train loss:3.545431 +step:7203 train loss:3.559870 +step:7204 train loss:3.527162 +step:7205 train loss:3.486666 +step:7206 train loss:3.514730 +step:7207 train loss:3.692395 +step:7208 train loss:3.521477 +step:7209 train loss:3.607547 +step:7210 train loss:3.542511 +step:7211 train loss:3.570603 +step:7212 train loss:3.654675 +step:7213 train loss:3.502565 +step:7214 train loss:3.572644 +step:7215 train loss:3.541620 +step:7216 train loss:3.588475 +step:7217 train loss:3.551295 +step:7218 train loss:3.638246 +step:7219 train loss:3.545990 +step:7220 train loss:3.622489 +step:7221 train loss:3.504024 +step:7222 train loss:3.586960 +step:7223 train loss:3.503940 +step:7224 train loss:3.566321 +step:7225 train loss:3.543186 +step:7226 train loss:3.514972 +step:7227 train loss:3.534615 +step:7228 train loss:3.520830 +step:7229 train loss:3.525169 +step:7230 train loss:3.512115 +step:7231 train loss:3.641058 +step:7232 train loss:3.511080 +step:7233 train loss:3.579895 +step:7234 train loss:3.570198 +step:7235 train loss:3.543056 +step:7236 train loss:3.582076 +step:7237 train loss:3.531647 +step:7238 train loss:3.569906 +step:7239 train loss:3.526202 +step:7240 train loss:3.522477 +step:7241 train loss:3.535474 +step:7242 train loss:3.517422 +step:7243 train loss:3.560196 +step:7244 train loss:3.536322 +step:7245 train loss:3.536754 +step:7246 train loss:3.580729 +step:7247 train loss:3.536252 +step:7248 train loss:3.575246 +step:7249 train loss:3.521199 +step:7250 validation loss:3.478917 +step:7250 train loss:3.545871 +step:7251 train loss:3.591425 +step:7252 train loss:3.505824 +step:7253 train loss:3.594244 +step:7254 train loss:3.532623 +step:7255 train loss:3.501671 +step:7256 train loss:3.545230 +step:7257 train loss:3.587716 +step:7258 train loss:3.547176 +step:7259 train loss:3.530219 +step:7260 train loss:3.613237 +step:7261 train loss:3.572045 +step:7262 train loss:3.527608 +step:7263 train loss:3.566931 +step:7264 train loss:3.553006 +step:7265 train loss:3.457449 +step:7266 train loss:3.579987 +step:7267 train loss:3.498361 +step:7268 train loss:3.564224 +step:7269 train loss:3.568482 +step:7270 train loss:3.522921 +step:7271 train loss:3.538748 +step:7272 train loss:3.547266 +step:7273 train loss:3.542884 +step:7274 train loss:3.519022 +step:7275 train loss:3.592733 +step:7276 train loss:3.497128 +step:7277 train loss:3.544563 +step:7278 train loss:3.514792 +step:7279 train loss:3.498005 +step:7280 train loss:3.565529 +step:7281 train loss:3.587112 +step:7282 train loss:3.587919 +step:7283 train loss:3.476595 +step:7284 train loss:3.521642 +step:7285 train loss:3.547298 +step:7286 train loss:3.679050 +step:7287 train loss:3.588647 +step:7288 train loss:3.542714 +step:7289 train loss:3.546799 +step:7290 train loss:3.595327 +step:7291 train loss:3.558313 +step:7292 train loss:3.626247 +step:7293 train loss:3.522204 +step:7294 train loss:3.608170 +step:7295 train loss:3.497338 +step:7296 train loss:3.492358 +step:7297 train loss:3.541146 +step:7298 train loss:3.517482 +step:7299 train loss:3.557895 +step:7300 train loss:3.543586 +step:7301 train loss:3.493960 +step:7302 train loss:3.640918 +step:7303 train loss:3.533369 +step:7304 train loss:3.474424 +step:7305 train loss:3.550057 +step:7306 train loss:3.576710 +step:7307 train loss:3.585624 +step:7308 train loss:3.533556 +step:7309 train loss:3.497271 +step:7310 train loss:3.527122 +step:7311 train loss:3.515119 +step:7312 train loss:3.551021 +step:7313 train loss:3.594728 +step:7314 train loss:3.488283 +step:7315 train loss:3.479949 +step:7316 train loss:3.626630 +step:7317 train loss:3.563582 +step:7318 train loss:3.504690 +step:7319 train loss:3.531364 +step:7320 train loss:3.561361 +step:7321 train loss:3.590907 +step:7322 train loss:3.471397 +step:7323 train loss:3.525633 +step:7324 train loss:3.551015 +step:7325 train loss:3.516810 +step:7326 train loss:3.546809 +step:7327 train loss:3.518829 +step:7328 train loss:3.639098 +step:7329 train loss:3.485217 +step:7330 train loss:3.537626 +step:7331 train loss:3.533568 +step:7332 train loss:3.577517 +step:7333 train loss:3.556580 +step:7334 train loss:3.526171 +step:7335 train loss:3.521650 +step:7336 train loss:3.775580 +step:7337 train loss:3.560609 +step:7338 train loss:3.555248 +step:7339 train loss:3.566806 +step:7340 train loss:3.555068 +step:7341 train loss:3.545629 +step:7342 train loss:3.536524 +step:7343 train loss:3.550559 +step:7344 train loss:3.627834 +step:7345 train loss:3.486415 +step:7346 train loss:3.520892 +step:7347 train loss:3.514427 +step:7348 train loss:3.520546 +step:7349 train loss:3.621744 +step:7350 train loss:3.603204 +step:7351 train loss:3.540071 +step:7352 train loss:3.569087 +step:7353 train loss:3.549478 +step:7354 train loss:3.499938 +step:7355 train loss:3.686430 +step:7356 train loss:3.657408 +step:7357 train loss:3.579305 +step:7358 train loss:3.557566 +step:7359 train loss:3.526792 +step:7360 train loss:3.535935 +step:7361 train loss:3.488506 +step:7362 train loss:3.535833 +step:7363 train loss:3.551174 +step:7364 train loss:3.583622 +step:7365 train loss:3.569028 +step:7366 train loss:3.534322 +step:7367 train loss:3.608166 +step:7368 train loss:3.589829 +step:7369 train loss:3.579979 +step:7370 train loss:3.545550 +step:7371 train loss:3.505249 +step:7372 train loss:3.563253 +step:7373 train loss:3.582379 +step:7374 train loss:3.674838 +step:7375 train loss:3.500762 +step:7376 train loss:3.522379 +step:7377 train loss:3.567090 +step:7378 train loss:3.522130 +step:7379 train loss:3.646055 +step:7380 train loss:3.608078 +step:7381 train loss:3.574176 +step:7382 train loss:3.536517 +step:7383 train loss:3.631276 +step:7384 train loss:3.574597 +step:7385 train loss:3.526320 +step:7386 train loss:3.536658 +step:7387 train loss:3.577765 +step:7388 train loss:3.609431 +step:7389 train loss:3.552825 +step:7390 train loss:3.491192 +step:7391 train loss:3.530650 +step:7392 train loss:3.588298 +step:7393 train loss:3.553431 +step:7394 train loss:3.594702 +step:7395 train loss:3.482763 +step:7396 train loss:3.579335 +step:7397 train loss:3.510878 +step:7398 train loss:3.524465 +step:7399 train loss:3.575254 +step:7400 train loss:3.575671 +step:7401 train loss:3.492834 +step:7402 train loss:3.613365 +step:7403 train loss:3.496032 +step:7404 train loss:3.564278 +step:7405 train loss:3.689564 +step:7406 train loss:3.517321 +step:7407 train loss:3.562191 +step:7408 train loss:3.556882 +step:7409 train loss:3.532502 +step:7410 train loss:3.702473 +step:7411 train loss:3.546482 +step:7412 train loss:3.552250 +step:7413 train loss:3.602403 +step:7414 train loss:3.510669 +step:7415 train loss:3.570560 +step:7416 train loss:3.455944 +step:7417 train loss:3.571800 +step:7418 train loss:3.556283 +step:7419 train loss:3.524475 +step:7420 train loss:3.514661 +step:7421 train loss:3.551117 +step:7422 train loss:3.509647 +step:7423 train loss:3.649137 +step:7424 train loss:3.711803 +step:7425 train loss:3.601295 +step:7426 train loss:3.566386 +step:7427 train loss:3.534482 +step:7428 train loss:3.554173 +step:7429 train loss:3.574918 +step:7430 train loss:3.500334 +step:7431 train loss:3.505003 +step:7432 train loss:3.514472 +step:7433 train loss:3.610270 +step:7434 train loss:3.526945 +step:7435 train loss:3.608803 +step:7436 train loss:3.651120 +step:7437 train loss:3.472212 +step:7438 train loss:3.533024 +step:7439 train loss:3.543953 +step:7440 train loss:3.518306 +step:7441 train loss:3.485167 +step:7442 train loss:3.713639 +step:7443 train loss:3.536843 +step:7444 train loss:3.579089 +step:7445 train loss:3.509101 +step:7446 train loss:3.531613 +step:7447 train loss:3.457378 +step:7448 train loss:3.513789 +step:7449 train loss:3.527980 +step:7450 train loss:3.561441 +step:7451 train loss:3.593603 +step:7452 train loss:3.522300 +step:7453 train loss:3.547076 +step:7454 train loss:3.529820 +step:7455 train loss:3.542291 +step:7456 train loss:3.514983 +step:7457 train loss:3.522724 +step:7458 train loss:3.562656 +step:7459 train loss:3.539058 +step:7460 train loss:3.548530 +step:7461 train loss:3.584702 +step:7462 train loss:3.520974 +step:7463 train loss:3.580955 +step:7464 train loss:3.504922 +step:7465 train loss:3.515529 +step:7466 train loss:3.517943 +step:7467 train loss:3.525677 +step:7468 train loss:3.576335 +step:7469 train loss:3.508780 +step:7470 train loss:3.539598 +step:7471 train loss:3.527509 +step:7472 train loss:3.563454 +step:7473 train loss:3.504269 +step:7474 train loss:3.489779 +step:7475 train loss:3.521180 +step:7476 train loss:3.559114 +step:7477 train loss:3.533247 +step:7478 train loss:3.534194 +step:7479 train loss:3.545431 +step:7480 train loss:3.824893 +step:7481 train loss:3.477597 +step:7482 train loss:3.542748 +step:7483 train loss:3.540493 +step:7484 train loss:3.560435 +step:7485 train loss:3.546946 +step:7486 train loss:3.571646 +step:7487 train loss:3.565036 +step:7488 train loss:3.584264 +step:7489 train loss:3.579156 +step:7490 train loss:3.526566 +step:7491 train loss:3.548136 +step:7492 train loss:3.656594 +step:7493 train loss:3.630136 +step:7494 train loss:3.657013 +step:7495 train loss:3.524985 +step:7496 train loss:3.512867 +step:7497 train loss:3.610611 +step:7498 train loss:3.544988 +step:7499 train loss:3.581901 +step:7500 validation loss:3.476305 total_sharp:1.5616e-02 L1_sharp:1.3085e-02 L2_sharp:9.5580e-03 L3_sharp:5.7094e-03 L4_sharp:2.1758e-03 L5_sharp:3.0163e-03 L6_sharp:4.3073e-03 L7_sharp:5.8006e-03 L8_sharp:3.9829e-03 L9_sharp:2.9758e-03 L10_sharp:1.8448e-03 L11_sharp:2.1102e-03 L12_sharp:4.4348e-03 total_fnorm:1.1164e+00 total_l1_linf:9.9436e+03 total_spectral:1.1164e+00 L1_fnorm:2.5305e-01 L2_fnorm:2.4706e-01 L3_fnorm:2.4981e-01 L4_fnorm:2.5939e-01 L5_fnorm:2.5929e-01 L6_fnorm:2.6070e-01 L7_fnorm:2.5903e-01 L8_fnorm:2.6084e-01 L9_fnorm:2.6541e-01 L10_fnorm:2.6601e-01 L11_fnorm:2.6474e-01 L12_fnorm:2.5469e-01 L1_l1linf:3.3295e-01 L2_l1linf:3.1831e-01 L3_l1linf:3.5882e-01 L4_l1linf:3.0403e-01 L5_l1linf:2.9824e-01 L6_l1linf:2.8864e-01 L7_l1linf:2.8362e-01 L8_l1linf:2.7832e-01 L9_l1linf:2.7405e-01 L10_l1linf:2.7475e-01 L11_l1linf:2.9733e-01 L12_l1linf:2.8676e-01 L1_spectral:4.4594e-02 L2_spectral:4.8758e-02 L3_spectral:4.8435e-02 L4_spectral:4.3223e-02 L5_spectral:3.7793e-02 L6_spectral:3.5651e-02 L7_spectral:3.0979e-02 L8_spectral:2.8504e-02 L9_spectral:2.8316e-02 L10_spectral:2.8654e-02 L11_spectral:2.9589e-02 L12_spectral:4.3076e-02 ip_v_neg_g:1.0245e-02 cos_v_neg_g:8.7304e-03 v_norm:1.1164e+00 g_norm:1.0512e+00 hv_norm:3.7346e-01 cos_v_hv:4.6682e-02 hg_norm:6.5723e+00 cos_g_hg:5.0155e-01 v_par:2.6329e-04 v_perp:1.1164e+00 L1_cos_v_neg_g:1.3085e-02 L1_v_norm:2.5305e-01 L2_cos_v_neg_g:1.5413e-02 L2_v_norm:2.4706e-01 L3_cos_v_neg_g:1.6556e-02 L3_v_norm:2.4981e-01 L4_cos_v_neg_g:9.4050e-03 L4_v_norm:2.5939e-01 L5_cos_v_neg_g:7.3714e-03 L5_v_norm:2.5929e-01 L6_cos_v_neg_g:9.5202e-03 L6_v_norm:2.6070e-01 L7_cos_v_neg_g:1.1382e-02 L7_v_norm:2.5903e-01 L8_cos_v_neg_g:1.1974e-02 L8_v_norm:2.6084e-01 L9_cos_v_neg_g:1.2315e-02 L9_v_norm:2.6541e-01 L10_cos_v_neg_g:1.1276e-02 L10_v_norm:2.6601e-01 L11_cos_v_neg_g:9.2659e-03 L11_v_norm:2.6474e-01 L12_cos_v_neg_g:1.0429e-02 L12_v_norm:2.5469e-01 +step:7500 train loss:3.527094 +step:7501 train loss:3.517259 +step:7502 train loss:3.508367 +step:7503 train loss:3.484912 +step:7504 train loss:3.510066 +step:7505 train loss:3.499856 +step:7506 train loss:3.559210 +step:7507 train loss:3.478031 +step:7508 train loss:3.547191 +step:7509 train loss:3.518872 +step:7510 train loss:3.550965 +step:7511 train loss:3.559533 +step:7512 train loss:3.814474 +step:7513 train loss:3.511384 +step:7514 train loss:3.542019 +step:7515 train loss:3.505211 +step:7516 train loss:3.515356 +step:7517 train loss:3.551286 +step:7518 train loss:3.527885 +step:7519 train loss:3.536985 +step:7520 train loss:3.601810 +step:7521 train loss:3.490003 +step:7522 train loss:3.545605 +step:7523 train loss:3.577754 +step:7524 train loss:3.526294 +step:7525 train loss:3.529571 +step:7526 train loss:3.478118 +step:7527 train loss:3.483279 +step:7528 train loss:3.585616 +step:7529 train loss:3.558705 +step:7530 train loss:3.506876 +step:7531 train loss:3.580664 +step:7532 train loss:3.570038 +step:7533 train loss:3.496736 +step:7534 train loss:3.559370 +step:7535 train loss:3.563773 +step:7536 train loss:3.597962 +step:7537 train loss:3.613175 +step:7538 train loss:3.642035 +step:7539 train loss:3.541015 +step:7540 train loss:3.531506 +step:7541 train loss:3.581224 +step:7542 train loss:3.543353 +step:7543 train loss:3.499500 +step:7544 train loss:3.541474 +step:7545 train loss:3.529333 +step:7546 train loss:3.487211 +step:7547 train loss:3.532532 +step:7548 train loss:3.545231 +step:7549 train loss:3.527443 +step:7550 train loss:3.526965 +step:7551 train loss:3.626416 +step:7552 train loss:3.538445 +step:7553 train loss:3.576799 +step:7554 train loss:3.504992 +step:7555 train loss:3.592354 +step:7556 train loss:3.497046 +step:7557 train loss:3.590855 +step:7558 train loss:3.580115 +step:7559 train loss:3.536579 +step:7560 train loss:3.629947 +step:7561 train loss:3.602822 +step:7562 train loss:3.506985 +step:7563 train loss:3.502450 +step:7564 train loss:3.555017 +step:7565 train loss:3.574167 +step:7566 train loss:3.563892 +step:7567 train loss:3.581862 +step:7568 train loss:3.525362 +step:7569 train loss:3.584124 +step:7570 train loss:3.566544 +step:7571 train loss:3.651280 +step:7572 train loss:3.498085 +step:7573 train loss:3.566064 +step:7574 train loss:3.531339 +step:7575 train loss:3.523912 +step:7576 train loss:3.533957 +step:7577 train loss:3.547402 +step:7578 train loss:3.606141 +step:7579 train loss:3.538972 +step:7580 train loss:3.529752 +step:7581 train loss:3.515455 +step:7582 train loss:3.570777 +step:7583 train loss:3.512409 +step:7584 train loss:3.495266 +step:7585 train loss:3.462965 +step:7586 train loss:3.499074 +step:7587 train loss:3.559886 +step:7588 train loss:3.691484 +step:7589 train loss:3.509936 +step:7590 train loss:3.576427 +step:7591 train loss:3.580579 +step:7592 train loss:3.540060 +step:7593 train loss:3.562930 +step:7594 train loss:3.562673 +step:7595 train loss:3.530803 +step:7596 train loss:3.582829 +step:7597 train loss:3.488102 +step:7598 train loss:3.549593 +step:7599 train loss:3.541336 +step:7600 train loss:3.501424 +step:7601 train loss:3.615931 +step:7602 train loss:3.555709 +step:7603 train loss:3.514174 +step:7604 train loss:3.661460 +step:7605 train loss:3.550672 +step:7606 train loss:3.585232 +step:7607 train loss:3.533972 +step:7608 train loss:3.548024 +step:7609 train loss:3.580538 +step:7610 train loss:3.538741 +step:7611 train loss:3.514899 +step:7612 train loss:3.460121 +step:7613 train loss:3.505252 +step:7614 train loss:3.577649 +step:7615 train loss:3.536609 +step:7616 train loss:3.602723 +step:7617 train loss:3.501442 +step:7618 train loss:3.589429 +step:7619 train loss:3.534723 +step:7620 train loss:3.522972 +step:7621 train loss:3.466095 +step:7622 train loss:3.743283 +step:7623 train loss:3.756928 +step:7624 train loss:3.574127 +step:7625 train loss:3.610574 +step:7626 train loss:3.527160 +step:7627 train loss:3.598089 +step:7628 train loss:3.480563 +step:7629 train loss:3.539479 +step:7630 train loss:3.553765 +step:7631 train loss:3.535763 +step:7632 train loss:3.586072 +step:7633 train loss:3.649810 +step:7634 train loss:3.613831 +step:7635 train loss:3.514015 +step:7636 train loss:3.546798 +step:7637 train loss:3.491455 +step:7638 train loss:3.601609 +step:7639 train loss:3.530740 +step:7640 train loss:3.511333 +step:7641 train loss:3.541677 +step:7642 train loss:3.881509 +step:7643 train loss:3.632043 +step:7644 train loss:3.553762 +step:7645 train loss:3.544927 +step:7646 train loss:3.531290 +step:7647 train loss:3.522316 +step:7648 train loss:3.558600 +step:7649 train loss:3.518730 +step:7650 train loss:3.565973 +step:7651 train loss:3.587340 +step:7652 train loss:3.463814 +step:7653 train loss:3.658644 +step:7654 train loss:3.519492 +step:7655 train loss:3.538513 +step:7656 train loss:3.513957 +step:7657 train loss:3.529984 +step:7658 train loss:3.483181 +step:7659 train loss:3.547045 +step:7660 train loss:3.482392 +step:7661 train loss:3.497312 +step:7662 train loss:3.494769 +step:7663 train loss:3.545596 +step:7664 train loss:3.504063 +step:7665 train loss:3.477836 +step:7666 train loss:3.584386 +step:7667 train loss:3.499398 +step:7668 train loss:3.610086 +step:7669 train loss:3.541370 +step:7670 train loss:3.498637 +step:7671 train loss:3.551198 +step:7672 train loss:3.571386 +step:7673 train loss:3.535873 +step:7674 train loss:3.573785 +step:7675 train loss:3.629647 +step:7676 train loss:3.599199 +step:7677 train loss:3.622962 +step:7678 train loss:3.563249 +step:7679 train loss:3.587699 +step:7680 train loss:3.596217 +step:7681 train loss:3.563512 +step:7682 train loss:3.526428 +step:7683 train loss:3.532368 +step:7684 train loss:3.505722 +step:7685 train loss:3.482514 +step:7686 train loss:3.605699 +step:7687 train loss:3.520658 +step:7688 train loss:3.487591 +step:7689 train loss:3.534154 +step:7690 train loss:3.504002 +step:7691 train loss:3.528561 +step:7692 train loss:3.562914 +step:7693 train loss:3.566018 +step:7694 train loss:3.616011 +step:7695 train loss:3.543100 +step:7696 train loss:3.518607 +step:7697 train loss:3.508490 +step:7698 train loss:3.566943 +step:7699 train loss:3.562275 +step:7700 train loss:3.461585 +step:7701 train loss:3.579137 +step:7702 train loss:3.522041 +step:7703 train loss:3.524179 +step:7704 train loss:3.574223 +step:7705 train loss:3.535776 +step:7706 train loss:3.474917 +step:7707 train loss:3.589409 +step:7708 train loss:3.532162 +step:7709 train loss:3.550767 +step:7710 train loss:3.610051 +step:7711 train loss:3.572848 +step:7712 train loss:3.517962 +step:7713 train loss:3.598250 +step:7714 train loss:3.543218 +step:7715 train loss:3.494450 +step:7716 train loss:3.534410 +step:7717 train loss:3.559639 +step:7718 train loss:3.562548 +step:7719 train loss:3.519013 +step:7720 train loss:3.534551 +step:7721 train loss:3.577520 +step:7722 train loss:3.504491 +step:7723 train loss:3.874252 +step:7724 train loss:3.541157 +step:7725 train loss:3.445417 +step:7726 train loss:3.528346 +step:7727 train loss:3.556188 +step:7728 train loss:3.513526 +step:7729 train loss:3.518211 +step:7730 train loss:3.541643 +step:7731 train loss:3.570518 +step:7732 train loss:3.593900 +step:7733 train loss:3.502334 +step:7734 train loss:3.528487 +step:7735 train loss:3.619215 +step:7736 train loss:3.563964 +step:7737 train loss:3.577998 +step:7738 train loss:3.483349 +step:7739 train loss:3.558352 +step:7740 train loss:3.507119 +step:7741 train loss:3.541738 +step:7742 train loss:3.538491 +step:7743 train loss:3.493302 +step:7744 train loss:3.616861 +step:7745 train loss:3.507048 +step:7746 train loss:3.483953 +step:7747 train loss:3.579562 +step:7748 train loss:3.556829 +step:7749 train loss:3.484329 +step:7750 validation loss:3.470377 +step:7750 train loss:3.644156 +step:7751 train loss:3.523453 +step:7752 train loss:3.515298 +step:7753 train loss:3.519369 +step:7754 train loss:3.493118 +step:7755 train loss:3.557191 +step:7756 train loss:3.583709 +step:7757 train loss:3.534897 +step:7758 train loss:3.505231 +step:7759 train loss:3.528626 +step:7760 train loss:3.557256 +step:7761 train loss:3.552978 +step:7762 train loss:3.537123 +step:7763 train loss:3.521819 +step:7764 train loss:3.525769 +step:7765 train loss:3.481253 +step:7766 train loss:3.549525 +step:7767 train loss:3.549330 +step:7768 train loss:3.506347 +step:7769 train loss:3.569501 +step:7770 train loss:3.585137 +step:7771 train loss:3.561718 +step:7772 train loss:3.533865 +step:7773 train loss:3.592346 +step:7774 train loss:3.490552 +step:7775 train loss:3.477406 +step:7776 train loss:3.579439 +step:7777 train loss:3.536709 +step:7778 train loss:3.494390 +step:7779 train loss:3.537598 +step:7780 train loss:3.532808 +step:7781 train loss:3.539009 +step:7782 train loss:3.525986 +step:7783 train loss:3.506883 +step:7784 train loss:3.504374 +step:7785 train loss:3.548272 +step:7786 train loss:3.502113 +step:7787 train loss:3.583467 +step:7788 train loss:3.534041 +step:7789 train loss:3.469765 +step:7790 train loss:3.528223 +step:7791 train loss:3.562173 +step:7792 train loss:3.519793 +step:7793 train loss:3.541874 +step:7794 train loss:3.530154 +step:7795 train loss:3.562080 +step:7796 train loss:3.526736 +step:7797 train loss:3.541735 +step:7798 train loss:3.536294 +step:7799 train loss:3.527874 +step:7800 train loss:3.481042 +step:7801 train loss:3.547851 +step:7802 train loss:3.527509 +step:7803 train loss:3.577493 +step:7804 train loss:3.538296 +step:7805 train loss:3.535842 +step:7806 train loss:3.553255 +step:7807 train loss:3.627965 +step:7808 train loss:3.487958 +step:7809 train loss:3.466261 +step:7810 train loss:3.554349 +step:7811 train loss:3.486609 +step:7812 train loss:3.510213 +step:7813 train loss:3.594557 +step:7814 train loss:3.672105 +step:7815 train loss:3.480504 +step:7816 train loss:3.565181 +step:7817 train loss:3.595610 +step:7818 train loss:3.493749 +step:7819 train loss:3.546705 +step:7820 train loss:3.589061 +step:7821 train loss:3.517647 +step:7822 train loss:3.477290 +step:7823 train loss:3.582265 +step:7824 train loss:3.538768 +step:7825 train loss:3.529137 +step:7826 train loss:3.526061 +step:7827 train loss:3.569942 +step:7828 train loss:3.561564 +step:7829 train loss:3.512707 +step:7830 train loss:3.523150 +step:7831 train loss:3.528960 +step:7832 train loss:3.587486 +step:7833 train loss:3.570053 +step:7834 train loss:3.533173 +step:7835 train loss:3.553947 +step:7836 train loss:3.668209 +step:7837 train loss:3.550415 +step:7838 train loss:3.516883 +step:7839 train loss:3.482057 +step:7840 train loss:3.496021 +step:7841 train loss:3.589454 +step:7842 train loss:3.574431 +step:7843 train loss:3.629241 +step:7844 train loss:3.558593 +step:7845 train loss:3.537606 +step:7846 train loss:3.647253 +step:7847 train loss:3.535829 +step:7848 train loss:3.549567 +step:7849 train loss:3.561167 +step:7850 train loss:3.532946 +step:7851 train loss:3.557259 +step:7852 train loss:3.533564 +step:7853 train loss:3.504184 +step:7854 train loss:3.534120 +step:7855 train loss:3.534236 +step:7856 train loss:3.536391 +step:7857 train loss:3.523555 +step:7858 train loss:3.530605 +step:7859 train loss:3.540147 +step:7860 train loss:3.575292 +step:7861 train loss:3.560853 +step:7862 train loss:3.504637 +step:7863 train loss:3.609146 +step:7864 train loss:3.446606 +step:7865 train loss:3.525935 +step:7866 train loss:3.500895 +step:7867 train loss:3.544897 +step:7868 train loss:3.523452 +step:7869 train loss:3.526066 +step:7870 train loss:3.450572 +step:7871 train loss:3.512680 +step:7872 train loss:3.507108 +step:7873 train loss:3.583774 +step:7874 train loss:3.527675 +step:7875 train loss:3.534288 +step:7876 train loss:3.552353 +step:7877 train loss:3.506874 +step:7878 train loss:3.541826 +step:7879 train loss:3.881843 +step:7880 train loss:3.537301 +step:7881 train loss:3.559748 +step:7882 train loss:3.642246 +step:7883 train loss:3.454572 +step:7884 train loss:3.543572 +step:7885 train loss:3.528072 +step:7886 train loss:3.528389 +step:7887 train loss:3.521579 +step:7888 train loss:3.552879 +step:7889 train loss:3.603168 +step:7890 train loss:3.505275 +step:7891 train loss:3.557865 +step:7892 train loss:3.526026 +step:7893 train loss:3.502609 +step:7894 train loss:3.526110 +step:7895 train loss:3.506540 +step:7896 train loss:3.506413 +step:7897 train loss:3.529836 +step:7898 train loss:3.539663 +step:7899 train loss:3.524338 +step:7900 train loss:3.499760 +step:7901 train loss:3.486084 +step:7902 train loss:3.631843 +step:7903 train loss:3.482997 +step:7904 train loss:3.528425 +step:7905 train loss:3.600399 +step:7906 train loss:3.496281 +step:7907 train loss:3.524301 +step:7908 train loss:3.571928 +step:7909 train loss:3.624717 +step:7910 train loss:3.503722 +step:7911 train loss:3.526037 +step:7912 train loss:3.524925 +step:7913 train loss:3.502187 +step:7914 train loss:3.539455 +step:7915 train loss:3.640408 +step:7916 train loss:3.512897 +step:7917 train loss:3.571990 +step:7918 train loss:3.513437 +step:7919 train loss:3.503268 +step:7920 train loss:3.542783 +step:7921 train loss:3.548239 +step:7922 train loss:3.525898 +step:7923 train loss:3.572559 +step:7924 train loss:3.533584 +step:7925 train loss:3.555498 +step:7926 train loss:3.460612 +step:7927 train loss:3.735590 +step:7928 train loss:3.567103 +step:7929 train loss:3.529511 +step:7930 train loss:3.490133 +step:7931 train loss:3.514570 +step:7932 train loss:3.534741 +step:7933 train loss:3.550637 +step:7934 train loss:3.642813 +step:7935 train loss:3.565442 +step:7936 train loss:3.535555 +step:7937 train loss:3.485346 +step:7938 train loss:3.502820 +step:7939 train loss:3.546243 +step:7940 train loss:3.532945 +step:7941 train loss:3.561419 +step:7942 train loss:3.551111 +step:7943 train loss:3.562986 +step:7944 train loss:3.485704 +step:7945 train loss:3.589679 +step:7946 train loss:3.533150 +step:7947 train loss:3.550006 +step:7948 train loss:3.504703 +step:7949 train loss:3.556419 +step:7950 train loss:3.610063 +step:7951 train loss:3.578460 +step:7952 train loss:3.721081 +step:7953 train loss:3.613406 +step:7954 train loss:3.519813 +step:7955 train loss:3.507596 +step:7956 train loss:3.508420 +step:7957 train loss:3.585389 +step:7958 train loss:3.596236 +step:7959 train loss:3.550731 +step:7960 train loss:3.614228 +step:7961 train loss:3.521865 +step:7962 train loss:3.492120 +step:7963 train loss:3.531212 +step:7964 train loss:3.527759 +step:7965 train loss:3.539379 +step:7966 train loss:3.508067 +step:7967 train loss:3.529327 +step:7968 train loss:3.544414 +step:7969 train loss:3.499524 +step:7970 train loss:3.468748 +step:7971 train loss:3.554039 +step:7972 train loss:3.530977 +step:7973 train loss:3.502359 +step:7974 train loss:3.541853 +step:7975 train loss:3.527538 +step:7976 train loss:3.547742 +step:7977 train loss:3.577646 +step:7978 train loss:3.598848 +step:7979 train loss:3.546162 +step:7980 train loss:3.453888 +step:7981 train loss:3.489356 +step:7982 train loss:3.539970 +step:7983 train loss:3.556838 +step:7984 train loss:3.596473 +step:7985 train loss:3.524012 +step:7986 train loss:3.545510 +step:7987 train loss:3.600270 +step:7988 train loss:3.575418 +step:7989 train loss:3.478324 +step:7990 train loss:3.493307 +step:7991 train loss:3.508967 +step:7992 train loss:3.532464 +step:7993 train loss:3.513674 +step:7994 train loss:3.563703 +step:7995 train loss:3.567084 +step:7996 train loss:3.535965 +step:7997 train loss:3.552687 +step:7998 train loss:3.577261 +step:7999 train loss:3.507541 +step:8000 validation loss:3.461669 total_sharp:9.8446e-03 L1_sharp:9.6657e-03 L2_sharp:4.0074e-03 L3_sharp:3.6665e-03 L4_sharp:1.9639e-03 L5_sharp:2.6269e-03 L6_sharp:2.5259e-03 L7_sharp:2.8255e-03 L8_sharp:2.3040e-03 L9_sharp:1.6530e-03 L10_sharp:1.3161e-03 L11_sharp:1.7392e-03 L12_sharp:7.9865e-03 total_fnorm:1.1135e+00 total_l1_linf:9.9262e+03 total_spectral:1.1135e+00 L1_fnorm:2.4763e-01 L2_fnorm:2.4429e-01 L3_fnorm:2.4960e-01 L4_fnorm:2.6116e-01 L5_fnorm:2.6087e-01 L6_fnorm:2.6116e-01 L7_fnorm:2.5840e-01 L8_fnorm:2.6024e-01 L9_fnorm:2.6424e-01 L10_fnorm:2.6663e-01 L11_fnorm:2.6557e-01 L12_fnorm:2.5874e-01 L1_l1linf:3.0627e-01 L2_l1linf:3.1070e-01 L3_l1linf:3.2483e-01 L4_l1linf:3.2564e-01 L5_l1linf:3.0215e-01 L6_l1linf:2.8719e-01 L7_l1linf:2.7761e-01 L8_l1linf:2.6257e-01 L9_l1linf:2.6967e-01 L10_l1linf:2.7671e-01 L11_l1linf:2.7889e-01 L12_l1linf:2.8281e-01 L1_spectral:4.2256e-02 L2_spectral:4.7815e-02 L3_spectral:4.3829e-02 L4_spectral:4.0571e-02 L5_spectral:3.6997e-02 L6_spectral:2.9540e-02 L7_spectral:2.7046e-02 L8_spectral:2.5926e-02 L9_spectral:2.6015e-02 L10_spectral:2.7020e-02 L11_spectral:2.8175e-02 L12_spectral:4.5448e-02 ip_v_neg_g:5.0409e-03 cos_v_neg_g:4.7129e-03 v_norm:1.1135e+00 g_norm:9.6054e-01 hv_norm:2.5329e-01 cos_v_hv:4.3279e-02 hg_norm:6.2157e+00 cos_g_hg:4.2957e-01 v_par:1.9703e-04 v_perp:1.1135e+00 L1_cos_v_neg_g:6.4389e-03 L1_v_norm:2.4763e-01 L2_cos_v_neg_g:7.8375e-03 L2_v_norm:2.4429e-01 L3_cos_v_neg_g:6.4381e-03 L3_v_norm:2.4960e-01 L4_cos_v_neg_g:3.9988e-03 L4_v_norm:2.6116e-01 L5_cos_v_neg_g:5.2037e-03 L5_v_norm:2.6087e-01 L6_cos_v_neg_g:4.3947e-03 L6_v_norm:2.6116e-01 L7_cos_v_neg_g:5.5237e-03 L7_v_norm:2.5840e-01 L8_cos_v_neg_g:4.4180e-03 L8_v_norm:2.6024e-01 L9_cos_v_neg_g:4.4181e-03 L9_v_norm:2.6424e-01 L10_cos_v_neg_g:4.5571e-03 L10_v_norm:2.6663e-01 L11_cos_v_neg_g:5.3926e-03 L11_v_norm:2.6557e-01 L12_cos_v_neg_g:1.3952e-02 L12_v_norm:2.5874e-01 +step:8000 train loss:3.576781 +step:8001 train loss:3.538811 +step:8002 train loss:3.556261 +step:8003 train loss:3.572352 +step:8004 train loss:3.549696 +step:8005 train loss:3.469802 +step:8006 train loss:3.549749 +step:8007 train loss:3.517123 +step:8008 train loss:3.544600 +step:8009 train loss:3.616820 +step:8010 train loss:3.839864 +step:8011 train loss:3.511041 +step:8012 train loss:3.586213 +step:8013 train loss:3.541754 +step:8014 train loss:3.559317 +step:8015 train loss:3.550160 +step:8016 train loss:3.539286 +step:8017 train loss:3.561593 +step:8018 train loss:3.522852 +step:8019 train loss:3.489471 +step:8020 train loss:3.527330 +step:8021 train loss:3.601442 +step:8022 train loss:3.515846 +step:8023 train loss:3.549589 +step:8024 train loss:3.433820 +step:8025 train loss:3.530929 +step:8026 train loss:3.540884 +step:8027 train loss:3.546589 +step:8028 train loss:3.603562 +step:8029 train loss:3.530897 +step:8030 train loss:3.493348 +step:8031 train loss:3.547991 +step:8032 train loss:3.531823 +step:8033 train loss:3.483928 +step:8034 train loss:3.534469 +step:8035 train loss:3.507605 +step:8036 train loss:3.501375 +step:8037 train loss:3.469984 +step:8038 train loss:3.481775 +step:8039 train loss:3.576961 +step:8040 train loss:3.511015 +step:8041 train loss:3.508937 +step:8042 train loss:3.545621 +step:8043 train loss:3.488105 +step:8044 train loss:3.499729 +step:8045 train loss:3.564802 +step:8046 train loss:3.494894 +step:8047 train loss:3.497215 +step:8048 train loss:3.528560 +step:8049 train loss:3.578647 +step:8050 train loss:3.517437 +step:8051 train loss:3.492845 +step:8052 train loss:3.554389 +step:8053 train loss:3.503597 +step:8054 train loss:3.543458 +step:8055 train loss:3.569464 +step:8056 train loss:3.537633 +step:8057 train loss:3.614976 +step:8058 train loss:3.518155 +step:8059 train loss:3.580793 +step:8060 train loss:3.547997 +step:8061 train loss:3.437788 +step:8062 train loss:3.577106 +step:8063 train loss:3.543853 +step:8064 train loss:3.498907 +step:8065 train loss:3.563382 +step:8066 train loss:3.523555 +step:8067 train loss:3.584796 +step:8068 train loss:3.512886 +step:8069 train loss:3.536867 +step:8070 train loss:3.500866 +step:8071 train loss:3.510498 +step:8072 train loss:3.550746 +step:8073 train loss:3.502300 +step:8074 train loss:3.513944 +step:8075 train loss:3.507861 +step:8076 train loss:3.548349 +step:8077 train loss:3.556253 +step:8078 train loss:3.496576 +step:8079 train loss:3.520838 +step:8080 train loss:3.505317 +step:8081 train loss:3.523554 +step:8082 train loss:3.538503 +step:8083 train loss:3.452679 +step:8084 train loss:3.581988 +step:8085 train loss:3.457562 +step:8086 train loss:3.583566 +step:8087 train loss:3.475910 +step:8088 train loss:3.527429 +step:8089 train loss:3.555716 +step:8090 train loss:3.583388 +step:8091 train loss:3.524060 +step:8092 train loss:3.506267 +step:8093 train loss:3.510394 +step:8094 train loss:3.513940 +step:8095 train loss:3.541017 +step:8096 train loss:3.542419 +step:8097 train loss:3.468913 +step:8098 train loss:3.478947 +step:8099 train loss:3.479357 +step:8100 train loss:3.527815 +step:8101 train loss:3.597949 +step:8102 train loss:3.540805 +step:8103 train loss:3.489671 +step:8104 train loss:3.542940 +step:8105 train loss:3.540727 +step:8106 train loss:3.500436 +step:8107 train loss:3.482892 +step:8108 train loss:3.498755 +step:8109 train loss:3.494696 +step:8110 train loss:3.556920 +step:8111 train loss:3.482002 +step:8112 train loss:3.499962 +step:8113 train loss:3.492382 +step:8114 train loss:3.434965 +step:8115 train loss:3.490361 +step:8116 train loss:3.522858 +step:8117 train loss:3.491916 +step:8118 train loss:3.486840 +step:8119 train loss:3.529100 +step:8120 train loss:3.478581 +step:8121 train loss:3.533625 +step:8122 train loss:3.515489 +step:8123 train loss:3.522547 +step:8124 train loss:3.482929 +step:8125 train loss:3.470460 +step:8126 train loss:3.459238 +step:8127 train loss:3.555447 +step:8128 train loss:3.562392 +step:8129 train loss:3.480190 +step:8130 train loss:3.511410 +step:8131 train loss:3.479788 +step:8132 train loss:3.548700 +step:8133 train loss:3.470490 +step:8134 train loss:3.508686 +step:8135 train loss:3.498844 +step:8136 train loss:3.510368 +step:8137 train loss:3.571540 +step:8138 train loss:3.483987 +step:8139 train loss:3.554174 +step:8140 train loss:3.483369 +step:8141 train loss:3.508827 +step:8142 train loss:3.485317 +step:8143 train loss:3.541183 +step:8144 train loss:3.517808 +step:8145 train loss:3.483933 +step:8146 train loss:3.490633 +step:8147 train loss:3.512648 +step:8148 train loss:3.604045 +step:8149 train loss:3.519573 +step:8150 train loss:3.501203 +step:8151 train loss:3.492658 +step:8152 train loss:3.589956 +step:8153 train loss:3.467399 +step:8154 train loss:3.482790 +step:8155 train loss:3.505405 +step:8156 train loss:3.495079 +step:8157 train loss:3.509097 +step:8158 train loss:3.521451 +step:8159 train loss:3.535824 +step:8160 train loss:3.490404 +step:8161 train loss:3.531672 +step:8162 train loss:3.464534 +step:8163 train loss:3.522295 +step:8164 train loss:3.510675 +step:8165 train loss:3.560380 +step:8166 train loss:3.565659 +step:8167 train loss:3.472271 +step:8168 train loss:3.452117 +step:8169 train loss:3.500939 +step:8170 train loss:3.448322 +step:8171 train loss:3.510329 +step:8172 train loss:3.508366 +step:8173 train loss:3.508120 +step:8174 train loss:3.518898 +step:8175 train loss:3.478372 +step:8176 train loss:3.477212 +step:8177 train loss:3.519280 +step:8178 train loss:3.609141 +step:8179 train loss:3.512637 +step:8180 train loss:3.540645 +step:8181 train loss:3.536688 +step:8182 train loss:3.498897 +step:8183 train loss:3.484362 +step:8184 train loss:3.478052 +step:8185 train loss:3.516824 +step:8186 train loss:3.520809 +step:8187 train loss:3.529129 +step:8188 train loss:3.458058 +step:8189 train loss:3.607320 +step:8190 train loss:3.538762 +step:8191 train loss:3.542113 +step:8192 train loss:3.652158 +step:8193 train loss:3.521782 +step:8194 train loss:3.457682 +step:8195 train loss:3.553610 +step:8196 train loss:3.471569 +step:8197 train loss:3.504507 +step:8198 train loss:3.508384 +step:8199 train loss:3.510530 +step:8200 train loss:3.486800 +step:8201 train loss:3.602701 +step:8202 train loss:3.520647 +step:8203 train loss:3.539596 +step:8204 train loss:3.448971 +step:8205 train loss:3.457020 +step:8206 train loss:3.579918 +step:8207 train loss:3.504283 +step:8208 train loss:3.522686 +step:8209 train loss:3.567988 +step:8210 train loss:3.551378 +step:8211 train loss:3.484542 +step:8212 train loss:3.543252 +step:8213 train loss:3.553871 +step:8214 train loss:3.591370 +step:8215 train loss:3.563234 +step:8216 train loss:3.547246 +step:8217 train loss:3.525942 +step:8218 train loss:3.533037 +step:8219 train loss:3.665596 +step:8220 train loss:3.497712 +step:8221 train loss:3.516825 +step:8222 train loss:3.471095 +step:8223 train loss:3.490128 +step:8224 train loss:3.499398 +step:8225 train loss:3.551295 +step:8226 train loss:3.477261 +step:8227 train loss:3.549602 +step:8228 train loss:3.434863 +step:8229 train loss:3.478657 +step:8230 train loss:3.492367 +step:8231 train loss:3.519273 +step:8232 train loss:3.515949 +step:8233 train loss:3.561501 +step:8234 train loss:3.558550 +step:8235 train loss:3.529933 +step:8236 train loss:3.514600 +step:8237 train loss:3.466015 +step:8238 train loss:3.718357 +step:8239 train loss:3.550624 +step:8240 train loss:3.497669 +step:8241 train loss:3.468721 +step:8242 train loss:3.507885 +step:8243 train loss:3.496145 +step:8244 train loss:3.511568 +step:8245 train loss:3.493559 +step:8246 train loss:3.561997 +step:8247 train loss:3.590680 +step:8248 train loss:3.511854 +step:8249 train loss:3.505295 +step:8250 validation loss:3.451179 +step:8250 train loss:3.493251 +step:8251 train loss:3.586154 +step:8252 train loss:3.528109 +step:8253 train loss:3.495335 +step:8254 train loss:3.463273 +step:8255 train loss:3.501623 +step:8256 train loss:3.478055 +step:8257 train loss:3.586826 +step:8258 train loss:3.508387 +step:8259 train loss:3.495506 +step:8260 train loss:3.490694 +step:8261 train loss:3.491794 +step:8262 train loss:3.506529 +step:8263 train loss:3.522358 +step:8264 train loss:3.486209 +step:8265 train loss:3.477093 +step:8266 train loss:3.487167 +step:8267 train loss:3.420010 +step:8268 train loss:3.539529 +step:8269 train loss:3.474528 +step:8270 train loss:3.526732 +step:8271 train loss:3.553328 +step:8272 train loss:3.578327 +step:8273 train loss:3.455941 +step:8274 train loss:3.519586 +step:8275 train loss:3.481961 +step:8276 train loss:3.514841 +step:8277 train loss:3.585169 +step:8278 train loss:3.598552 +step:8279 train loss:3.509003 +step:8280 train loss:3.499415 +step:8281 train loss:3.465967 +step:8282 train loss:3.524922 +step:8283 train loss:3.511792 +step:8284 train loss:3.500410 +step:8285 train loss:3.489408 +step:8286 train loss:3.602291 +step:8287 train loss:3.533267 +step:8288 train loss:3.507371 +step:8289 train loss:3.521119 +step:8290 train loss:3.460316 +step:8291 train loss:3.500414 +step:8292 train loss:3.528322 +step:8293 train loss:3.504123 +step:8294 train loss:3.475044 +step:8295 train loss:3.511606 +step:8296 train loss:3.579289 +step:8297 train loss:3.666057 +step:8298 train loss:3.483933 +step:8299 train loss:3.516946 +step:8300 train loss:3.525770 +step:8301 train loss:3.499130 +step:8302 train loss:3.559256 +step:8303 train loss:3.689392 +step:8304 train loss:3.501811 +step:8305 train loss:3.543091 +step:8306 train loss:3.520125 +step:8307 train loss:3.535898 +step:8308 train loss:3.533869 +step:8309 train loss:3.555391 +step:8310 train loss:3.471727 +step:8311 train loss:3.564893 +step:8312 train loss:3.556217 +step:8313 train loss:3.619117 +step:8314 train loss:3.491128 +step:8315 train loss:3.439111 +step:8316 train loss:3.496690 +step:8317 train loss:3.519956 +step:8318 train loss:3.508814 +step:8319 train loss:3.545512 +step:8320 train loss:3.567308 +step:8321 train loss:3.474973 +step:8322 train loss:3.491140 +step:8323 train loss:3.528632 +step:8324 train loss:3.501677 +step:8325 train loss:3.558442 +step:8326 train loss:3.526184 +step:8327 train loss:3.516104 +step:8328 train loss:3.587607 +step:8329 train loss:3.496453 +step:8330 train loss:3.534486 +step:8331 train loss:3.462671 +step:8332 train loss:3.560297 +step:8333 train loss:3.578434 +step:8334 train loss:3.444803 +step:8335 train loss:3.509107 +step:8336 train loss:3.603146 +step:8337 train loss:3.535266 +step:8338 train loss:3.501736 +step:8339 train loss:3.481425 +step:8340 train loss:3.569526 +step:8341 train loss:3.470980 +step:8342 train loss:3.543336 +step:8343 train loss:3.456109 +step:8344 train loss:3.501110 +step:8345 train loss:3.536690 +step:8346 train loss:3.615340 +step:8347 train loss:3.509471 +step:8348 train loss:3.538944 +step:8349 train loss:3.508970 +step:8350 train loss:3.529004 +step:8351 train loss:3.470598 +step:8352 train loss:3.555082 +step:8353 train loss:3.510322 +step:8354 train loss:3.490458 +step:8355 train loss:3.495932 +step:8356 train loss:3.486109 +step:8357 train loss:3.504530 +step:8358 train loss:3.478020 +step:8359 train loss:3.473244 +step:8360 train loss:3.519468 +step:8361 train loss:3.534754 +step:8362 train loss:3.551833 +step:8363 train loss:3.553362 +step:8364 train loss:3.514352 +step:8365 train loss:3.662009 +step:8366 train loss:3.502296 +step:8367 train loss:3.478171 +step:8368 train loss:3.448137 +step:8369 train loss:3.478603 +step:8370 train loss:3.561771 +step:8371 train loss:3.531975 +step:8372 train loss:3.508124 +step:8373 train loss:3.522079 +step:8374 train loss:3.453937 +step:8375 train loss:3.518883 +step:8376 train loss:3.552050 +step:8377 train loss:3.382145 +step:8378 train loss:3.596038 +step:8379 train loss:3.459677 +step:8380 train loss:3.467839 +step:8381 train loss:3.474647 +step:8382 train loss:3.507082 +step:8383 train loss:3.460449 +step:8384 train loss:3.502131 +step:8385 train loss:3.516185 +step:8386 train loss:3.496677 +step:8387 train loss:3.659524 +step:8388 train loss:3.571150 +step:8389 train loss:3.549509 +step:8390 train loss:3.548433 +step:8391 train loss:3.481720 +step:8392 train loss:3.494990 +step:8393 train loss:3.451907 +step:8394 train loss:3.537795 +step:8395 train loss:3.546612 +step:8396 train loss:3.569874 +step:8397 train loss:3.502888 +step:8398 train loss:3.522858 +step:8399 train loss:3.487000 +step:8400 train loss:3.493069 +step:8401 train loss:3.506773 +step:8402 train loss:3.485398 +step:8403 train loss:3.501554 +step:8404 train loss:3.508466 +step:8405 train loss:3.458688 +step:8406 train loss:3.503051 +step:8407 train loss:3.542373 +step:8408 train loss:3.516957 +step:8409 train loss:3.437740 +step:8410 train loss:3.501627 +step:8411 train loss:3.525845 +step:8412 train loss:3.583049 +step:8413 train loss:3.560195 +step:8414 train loss:3.557960 +step:8415 train loss:3.477561 +step:8416 train loss:3.525069 +step:8417 train loss:3.442831 +step:8418 train loss:3.543238 +step:8419 train loss:3.499408 +step:8420 train loss:3.577202 +step:8421 train loss:3.492506 +step:8422 train loss:3.509797 +step:8423 train loss:3.524965 +step:8424 train loss:3.531176 +step:8425 train loss:3.587210 +step:8426 train loss:3.558836 +step:8427 train loss:3.478691 +step:8428 train loss:3.488994 +step:8429 train loss:3.550906 +step:8430 train loss:3.493782 +step:8431 train loss:3.495893 +step:8432 train loss:3.496658 +step:8433 train loss:3.471799 +step:8434 train loss:3.509094 +step:8435 train loss:3.428456 +step:8436 train loss:3.511369 +step:8437 train loss:3.553956 +step:8438 train loss:3.531815 +step:8439 train loss:3.473832 +step:8440 train loss:3.440671 +step:8441 train loss:3.499401 +step:8442 train loss:3.522143 +step:8443 train loss:3.478268 +step:8444 train loss:3.509371 +step:8445 train loss:3.463469 +step:8446 train loss:3.510428 +step:8447 train loss:3.524940 +step:8448 train loss:3.505863 +step:8449 train loss:3.497062 +step:8450 train loss:3.489318 +step:8451 train loss:3.520821 +step:8452 train loss:3.491400 +step:8453 train loss:3.477075 +step:8454 train loss:3.525027 +step:8455 train loss:3.597824 +step:8456 train loss:3.574390 +step:8457 train loss:3.630386 +step:8458 train loss:3.517967 +step:8459 train loss:3.522414 +step:8460 train loss:3.449652 +step:8461 train loss:3.605313 +step:8462 train loss:3.477788 +step:8463 train loss:3.517452 +step:8464 train loss:3.530124 +step:8465 train loss:3.537883 +step:8466 train loss:3.511949 +step:8467 train loss:3.513522 +step:8468 train loss:3.769001 +step:8469 train loss:3.476680 +step:8470 train loss:3.471058 +step:8471 train loss:3.515593 +step:8472 train loss:3.534820 +step:8473 train loss:3.492749 +step:8474 train loss:3.616385 +step:8475 train loss:3.575242 +step:8476 train loss:3.522309 +step:8477 train loss:3.514379 +step:8478 train loss:3.494140 +step:8479 train loss:3.496842 +step:8480 train loss:3.581455 +step:8481 train loss:3.495924 +step:8482 train loss:3.489715 +step:8483 train loss:3.641299 +step:8484 train loss:3.522131 +step:8485 train loss:3.567235 +step:8486 train loss:3.479128 +step:8487 train loss:3.531437 +step:8488 train loss:3.477117 +step:8489 train loss:3.553679 +step:8490 train loss:3.541511 +step:8491 train loss:3.558954 +step:8492 train loss:3.515734 +step:8493 train loss:3.583095 +step:8494 train loss:3.449429 +step:8495 train loss:3.548269 +step:8496 train loss:3.492188 +step:8497 train loss:3.526212 +step:8498 train loss:3.540558 +step:8499 train loss:3.516171 +step:8500 validation loss:3.450153 total_sharp:1.2411e-02 L1_sharp:1.0298e-02 L2_sharp:6.5562e-03 L3_sharp:4.5930e-03 L4_sharp:1.8290e-03 L5_sharp:3.2446e-03 L6_sharp:3.2431e-03 L7_sharp:4.2037e-03 L8_sharp:3.8608e-03 L9_sharp:2.8968e-03 L10_sharp:1.6888e-03 L11_sharp:1.8439e-03 L12_sharp:7.6467e-03 total_fnorm:1.0835e+00 total_l1_linf:9.6000e+03 total_spectral:1.0835e+00 L1_fnorm:2.3354e-01 L2_fnorm:2.3232e-01 L3_fnorm:2.4212e-01 L4_fnorm:2.5301e-01 L5_fnorm:2.5312e-01 L6_fnorm:2.5342e-01 L7_fnorm:2.5091e-01 L8_fnorm:2.5120e-01 L9_fnorm:2.5392e-01 L10_fnorm:2.5135e-01 L11_fnorm:2.5139e-01 L12_fnorm:2.4468e-01 L1_l1linf:3.1177e-01 L2_l1linf:3.2277e-01 L3_l1linf:3.0498e-01 L4_l1linf:2.9017e-01 L5_l1linf:3.0957e-01 L6_l1linf:2.6736e-01 L7_l1linf:2.8358e-01 L8_l1linf:2.7148e-01 L9_l1linf:2.7504e-01 L10_l1linf:2.6597e-01 L11_l1linf:2.8663e-01 L12_l1linf:2.9117e-01 L1_spectral:4.1826e-02 L2_spectral:4.6937e-02 L3_spectral:4.1687e-02 L4_spectral:4.1867e-02 L5_spectral:3.8733e-02 L6_spectral:3.3712e-02 L7_spectral:3.1761e-02 L8_spectral:2.8444e-02 L9_spectral:2.7935e-02 L10_spectral:2.6787e-02 L11_spectral:3.0181e-02 L12_spectral:4.2770e-02 ip_v_neg_g:6.3415e-03 cos_v_neg_g:6.0664e-03 v_norm:1.0835e+00 g_norm:9.6478e-01 hv_norm:2.8812e-01 cos_v_hv:4.6672e-02 hg_norm:4.4953e+00 cos_g_hg:4.6410e-01 v_par:1.9226e-04 v_perp:1.0835e+00 L1_cos_v_neg_g:8.4826e-03 L1_v_norm:2.3354e-01 L2_cos_v_neg_g:9.4150e-03 L2_v_norm:2.3232e-01 L3_cos_v_neg_g:7.5166e-03 L3_v_norm:2.4212e-01 L4_cos_v_neg_g:8.0682e-03 L4_v_norm:2.5301e-01 L5_cos_v_neg_g:8.0217e-03 L5_v_norm:2.5312e-01 L6_cos_v_neg_g:9.8400e-03 L6_v_norm:2.5342e-01 L7_cos_v_neg_g:8.2717e-03 L7_v_norm:2.5091e-01 L8_cos_v_neg_g:9.0592e-03 L8_v_norm:2.5120e-01 L9_cos_v_neg_g:9.2370e-03 L9_v_norm:2.5392e-01 L10_cos_v_neg_g:7.6379e-03 L10_v_norm:2.5135e-01 L11_cos_v_neg_g:5.9473e-03 L11_v_norm:2.5139e-01 L12_cos_v_neg_g:1.8872e-03 L12_v_norm:2.4468e-01 +step:8500 train loss:3.512813 +step:8501 train loss:3.734779 +step:8502 train loss:3.747585 +step:8503 train loss:3.502397 +step:8504 train loss:3.502826 +step:8505 train loss:3.477094 +step:8506 train loss:3.549903 +step:8507 train loss:3.488050 +step:8508 train loss:3.521318 +step:8509 train loss:3.461141 +step:8510 train loss:3.481112 +step:8511 train loss:3.442735 +step:8512 train loss:3.539717 +step:8513 train loss:3.544060 +step:8514 train loss:3.490109 +step:8515 train loss:3.587666 +step:8516 train loss:3.501869 +step:8517 train loss:3.524712 +step:8518 train loss:3.411547 +step:8519 train loss:3.508868 +step:8520 train loss:3.473472 +step:8521 train loss:3.513403 +step:8522 train loss:3.408727 +step:8523 train loss:3.506333 +step:8524 train loss:3.493135 +step:8525 train loss:3.559025 +step:8526 train loss:3.543427 +step:8527 train loss:3.484591 +step:8528 train loss:3.567413 +step:8529 train loss:3.522756 +step:8530 train loss:3.559052 +step:8531 train loss:3.546100 +step:8532 train loss:3.584955 +step:8533 train loss:3.534446 +step:8534 train loss:3.538704 +step:8535 train loss:3.508416 +step:8536 train loss:3.596447 +step:8537 train loss:3.510538 +step:8538 train loss:3.581288 +step:8539 train loss:3.503507 +step:8540 train loss:3.528745 +step:8541 train loss:3.469839 +step:8542 train loss:3.535559 +step:8543 train loss:3.453769 +step:8544 train loss:3.447326 +step:8545 train loss:3.499866 +step:8546 train loss:3.451912 +step:8547 train loss:3.504921 +step:8548 train loss:3.475998 +step:8549 train loss:3.519258 +step:8550 train loss:3.471119 +step:8551 train loss:3.522356 +step:8552 train loss:3.525757 +step:8553 train loss:3.525036 +step:8554 train loss:3.500082 +step:8555 train loss:3.511680 +step:8556 train loss:3.590672 +step:8557 train loss:3.488339 +step:8558 train loss:3.525694 +step:8559 train loss:3.517727 +step:8560 train loss:3.502534 +step:8561 train loss:3.455530 +step:8562 train loss:3.486640 +step:8563 train loss:3.482510 +step:8564 train loss:3.552444 +step:8565 train loss:3.529990 +step:8566 train loss:3.546465 +step:8567 train loss:3.493264 +step:8568 train loss:3.508916 +step:8569 train loss:3.518994 +step:8570 train loss:3.463548 +step:8571 train loss:3.502955 +step:8572 train loss:3.518720 +step:8573 train loss:3.596713 +step:8574 train loss:3.526816 +step:8575 train loss:3.525954 +step:8576 train loss:3.559225 +step:8577 train loss:3.640348 +step:8578 train loss:3.550971 +step:8579 train loss:3.537583 +step:8580 train loss:3.470994 +step:8581 train loss:3.513173 +step:8582 train loss:3.516789 +step:8583 train loss:3.514249 +step:8584 train loss:3.506921 +step:8585 train loss:3.586436 +step:8586 train loss:3.503475 +step:8587 train loss:3.511538 +step:8588 train loss:3.559784 +step:8589 train loss:3.504986 +step:8590 train loss:3.499706 +step:8591 train loss:3.499846 +step:8592 train loss:3.458616 +step:8593 train loss:3.540252 +step:8594 train loss:3.561114 +step:8595 train loss:3.485113 +step:8596 train loss:3.527852 +step:8597 train loss:3.490124 +step:8598 train loss:3.542358 +step:8599 train loss:3.515463 +step:8600 train loss:3.519670 +step:8601 train loss:3.506502 +step:8602 train loss:3.481032 +step:8603 train loss:3.538607 +step:8604 train loss:3.486772 +step:8605 train loss:3.497159 +step:8606 train loss:3.510905 +step:8607 train loss:3.519311 +step:8608 train loss:3.563907 +step:8609 train loss:3.458560 +step:8610 train loss:3.531311 +step:8611 train loss:3.462523 +step:8612 train loss:3.542540 +step:8613 train loss:3.474804 +step:8614 train loss:3.537297 +step:8615 train loss:3.580241 +step:8616 train loss:3.460234 +step:8617 train loss:3.528268 +step:8618 train loss:3.504571 +step:8619 train loss:3.456751 +step:8620 train loss:3.498457 +step:8621 train loss:3.529439 +step:8622 train loss:3.488101 +step:8623 train loss:3.500639 +step:8624 train loss:3.582427 +step:8625 train loss:3.499183 +step:8626 train loss:3.508033 +step:8627 train loss:3.506639 +step:8628 train loss:3.538902 +step:8629 train loss:3.444569 +step:8630 train loss:3.548473 +step:8631 train loss:3.492512 +step:8632 train loss:3.550421 +step:8633 train loss:3.492977 +step:8634 train loss:3.726815 +step:8635 train loss:3.520491 +step:8636 train loss:3.566879 +step:8637 train loss:3.493366 +step:8638 train loss:3.490973 +step:8639 train loss:3.547312 +step:8640 train loss:3.461806 +step:8641 train loss:3.562391 +step:8642 train loss:3.512060 +step:8643 train loss:3.622080 +step:8644 train loss:3.464749 +step:8645 train loss:3.534154 +step:8646 train loss:3.497399 +step:8647 train loss:3.521514 +step:8648 train loss:3.472753 +step:8649 train loss:3.554941 +step:8650 train loss:3.509595 +step:8651 train loss:3.523602 +step:8652 train loss:3.488831 +step:8653 train loss:3.522403 +step:8654 train loss:3.565569 +step:8655 train loss:3.495332 +step:8656 train loss:3.536529 +step:8657 train loss:3.536969 +step:8658 train loss:3.512579 +step:8659 train loss:3.502542 +step:8660 train loss:3.446982 +step:8661 train loss:3.510016 +step:8662 train loss:3.449143 +step:8663 train loss:3.521083 +step:8664 train loss:3.441671 +step:8665 train loss:3.460291 +step:8666 train loss:3.537759 +step:8667 train loss:3.429298 +step:8668 train loss:3.538226 +step:8669 train loss:3.574389 +step:8670 train loss:3.477097 +step:8671 train loss:3.473979 +step:8672 train loss:3.690497 +step:8673 train loss:3.457459 +step:8674 train loss:3.525056 +step:8675 train loss:3.567772 +step:8676 train loss:3.510307 +step:8677 train loss:3.533513 +step:8678 train loss:3.486710 +step:8679 train loss:3.538876 +step:8680 train loss:3.520514 +step:8681 train loss:3.520506 +step:8682 train loss:3.477264 +step:8683 train loss:3.490389 +step:8684 train loss:3.569041 +step:8685 train loss:3.510502 +step:8686 train loss:3.503411 +step:8687 train loss:3.455942 +step:8688 train loss:3.473029 +step:8689 train loss:3.543881 +step:8690 train loss:3.480986 +step:8691 train loss:3.559679 +step:8692 train loss:3.445895 +step:8693 train loss:3.535033 +step:8694 train loss:3.540611 +step:8695 train loss:3.520526 +step:8696 train loss:3.548539 +step:8697 train loss:3.502419 +step:8698 train loss:3.542489 +step:8699 train loss:3.492555 +step:8700 train loss:3.521013 +step:8701 train loss:3.480632 +step:8702 train loss:3.462006 +step:8703 train loss:3.481749 +step:8704 train loss:3.432736 +step:8705 train loss:3.517560 +step:8706 train loss:3.535298 +step:8707 train loss:3.532378 +step:8708 train loss:3.474925 +step:8709 train loss:3.546628 +step:8710 train loss:3.467773 +step:8711 train loss:3.526603 +step:8712 train loss:3.428324 +step:8713 train loss:3.507019 +step:8714 train loss:3.616075 +step:8715 train loss:3.469047 +step:8716 train loss:3.528323 +step:8717 train loss:3.496630 +step:8718 train loss:3.534653 +step:8719 train loss:3.505696 +step:8720 train loss:3.618157 +step:8721 train loss:3.510539 +step:8722 train loss:3.601529 +step:8723 train loss:3.469744 +step:8724 train loss:3.481031 +step:8725 train loss:3.510189 +step:8726 train loss:3.465503 +step:8727 train loss:3.540144 +step:8728 train loss:3.501966 +step:8729 train loss:3.502873 +step:8730 train loss:3.482520 +step:8731 train loss:3.484188 +step:8732 train loss:3.587130 +step:8733 train loss:3.507787 +step:8734 train loss:3.545964 +step:8735 train loss:3.617797 +step:8736 train loss:3.473229 +step:8737 train loss:3.497329 +step:8738 train loss:3.481147 +step:8739 train loss:3.541734 +step:8740 train loss:3.462596 +step:8741 train loss:3.516145 +step:8742 train loss:3.473887 +step:8743 train loss:3.508797 +step:8744 train loss:3.533621 +step:8745 train loss:3.572214 +step:8746 train loss:3.471841 +step:8747 train loss:3.580125 +step:8748 train loss:3.481786 +step:8749 train loss:3.521004 +step:8750 validation loss:3.441154 +step:8750 train loss:3.534000 +step:8751 train loss:3.569583 +step:8752 train loss:3.431602 +step:8753 train loss:3.475669 +step:8754 train loss:3.530996 +step:8755 train loss:3.508143 +step:8756 train loss:3.556726 +step:8757 train loss:3.469294 +step:8758 train loss:3.622060 +step:8759 train loss:3.473631 +step:8760 train loss:3.503666 +step:8761 train loss:3.579527 +step:8762 train loss:3.480464 +step:8763 train loss:3.451333 +step:8764 train loss:3.522090 +step:8765 train loss:3.592164 +step:8766 train loss:3.524680 +step:8767 train loss:3.479674 +step:8768 train loss:3.522521 +step:8769 train loss:3.492884 +step:8770 train loss:3.540035 +step:8771 train loss:3.512342 +step:8772 train loss:3.529501 +step:8773 train loss:3.492900 +step:8774 train loss:3.523903 +step:8775 train loss:3.520613 +step:8776 train loss:3.467689 +step:8777 train loss:3.503768 +step:8778 train loss:3.513251 +step:8779 train loss:3.537543 +step:8780 train loss:3.502110 +step:8781 train loss:3.501697 +step:8782 train loss:3.527841 +step:8783 train loss:3.507514 +step:8784 train loss:3.530747 +step:8785 train loss:3.515012 +step:8786 train loss:3.589184 +step:8787 train loss:3.536712 +step:8788 train loss:3.436305 +step:8789 train loss:3.537301 +step:8790 train loss:3.464128 +step:8791 train loss:3.515726 +step:8792 train loss:3.453581 +step:8793 train loss:3.541546 +step:8794 train loss:3.468258 +step:8795 train loss:3.536098 +step:8796 train loss:3.684223 +step:8797 train loss:3.430562 +step:8798 train loss:3.591662 +step:8799 train loss:3.506260 +step:8800 train loss:3.497294 +step:8801 train loss:3.520790 +step:8802 train loss:3.574245 +step:8803 train loss:3.534223 +step:8804 train loss:3.509983 +step:8805 train loss:3.531737 +step:8806 train loss:3.499802 +step:8807 train loss:3.493522 +step:8808 train loss:3.447280 +step:8809 train loss:3.572462 +step:8810 train loss:3.478005 +step:8811 train loss:3.464511 +step:8812 train loss:3.510253 +step:8813 train loss:3.417959 +step:8814 train loss:3.607577 +step:8815 train loss:3.453177 +step:8816 train loss:3.567125 +step:8817 train loss:3.508594 +step:8818 train loss:3.438377 +step:8819 train loss:3.558797 +step:8820 train loss:3.486091 +step:8821 train loss:3.511533 +step:8822 train loss:3.492223 +step:8823 train loss:3.508044 +step:8824 train loss:3.568166 +step:8825 train loss:3.542182 +step:8826 train loss:3.517450 +step:8827 train loss:3.475344 +step:8828 train loss:3.514712 +step:8829 train loss:3.499868 +step:8830 train loss:3.472376 +step:8831 train loss:3.549516 +step:8832 train loss:3.488484 +step:8833 train loss:3.518532 +step:8834 train loss:3.487860 +step:8835 train loss:3.423020 +step:8836 train loss:3.547496 +step:8837 train loss:3.454139 +step:8838 train loss:3.497903 +step:8839 train loss:3.483643 +step:8840 train loss:3.485414 +step:8841 train loss:3.500338 +step:8842 train loss:3.513009 +step:8843 train loss:3.524049 +step:8844 train loss:3.490844 +step:8845 train loss:3.510238 +step:8846 train loss:3.475316 +step:8847 train loss:3.513407 +step:8848 train loss:3.561944 +step:8849 train loss:3.540454 +step:8850 train loss:3.532452 +step:8851 train loss:3.417285 +step:8852 train loss:3.521076 +step:8853 train loss:3.503931 +step:8854 train loss:3.472718 +step:8855 train loss:3.548100 +step:8856 train loss:3.536258 +step:8857 train loss:3.603972 +step:8858 train loss:3.472482 +step:8859 train loss:3.538304 +step:8860 train loss:3.499228 +step:8861 train loss:3.478526 +step:8862 train loss:3.481619 +step:8863 train loss:3.462068 +step:8864 train loss:3.531851 +step:8865 train loss:3.525447 +step:8866 train loss:3.407684 +step:8867 train loss:3.510090 +step:8868 train loss:3.538284 +step:8869 train loss:3.621200 +step:8870 train loss:3.502326 +step:8871 train loss:3.522614 +step:8872 train loss:3.508962 +step:8873 train loss:3.509296 +step:8874 train loss:3.560709 +step:8875 train loss:3.493598 +step:8876 train loss:3.530109 +step:8877 train loss:3.514649 +step:8878 train loss:3.562713 +step:8879 train loss:3.523313 +step:8880 train loss:3.472150 +step:8881 train loss:3.436539 +step:8882 train loss:3.507133 +step:8883 train loss:3.493012 +step:8884 train loss:3.584717 +step:8885 train loss:3.517401 +step:8886 train loss:3.520463 +step:8887 train loss:3.547277 +step:8888 train loss:3.506592 +step:8889 train loss:3.511178 +step:8890 train loss:3.502537 +step:8891 train loss:3.474487 +step:8892 train loss:3.558089 +step:8893 train loss:3.497069 +step:8894 train loss:3.512374 +step:8895 train loss:3.544117 +step:8896 train loss:3.461206 +step:8897 train loss:3.550921 +step:8898 train loss:3.485101 +step:8899 train loss:3.510241 +step:8900 train loss:3.474966 +step:8901 train loss:3.489290 +step:8902 train loss:3.530970 +step:8903 train loss:3.469124 +step:8904 train loss:3.521405 +step:8905 train loss:3.493669 +step:8906 train loss:3.482289 +step:8907 train loss:3.500923 +step:8908 train loss:3.560499 +step:8909 train loss:3.507267 +step:8910 train loss:3.466781 +step:8911 train loss:3.569309 +step:8912 train loss:3.464582 +step:8913 train loss:3.476047 +step:8914 train loss:3.570808 +step:8915 train loss:3.512545 +step:8916 train loss:3.541260 +step:8917 train loss:3.498944 +step:8918 train loss:3.505121 +step:8919 train loss:3.491453 +step:8920 train loss:3.515666 +step:8921 train loss:3.513010 +step:8922 train loss:3.493472 +step:8923 train loss:3.685389 +step:8924 train loss:3.577866 +step:8925 train loss:3.505296 +step:8926 train loss:3.521478 +step:8927 train loss:3.547433 +step:8928 train loss:3.501764 +step:8929 train loss:3.494522 +step:8930 train loss:3.550734 +step:8931 train loss:3.461939 +step:8932 train loss:3.562939 +step:8933 train loss:3.473043 +step:8934 train loss:3.508216 +step:8935 train loss:3.521918 +step:8936 train loss:3.557911 +step:8937 train loss:3.560025 +step:8938 train loss:3.497961 +step:8939 train loss:3.560899 +step:8940 train loss:3.517247 +step:8941 train loss:3.458551 +step:8942 train loss:3.534755 +step:8943 train loss:3.467327 +step:8944 train loss:3.516793 +step:8945 train loss:3.536085 +step:8946 train loss:3.381050 +step:8947 train loss:3.574624 +step:8948 train loss:3.422420 +step:8949 train loss:3.422668 +step:8950 train loss:3.467424 +step:8951 train loss:3.505880 +step:8952 train loss:3.526166 +step:8953 train loss:3.480642 +step:8954 train loss:3.586285 +step:8955 train loss:3.498746 +step:8956 train loss:3.525164 +step:8957 train loss:3.516007 +step:8958 train loss:3.495257 +step:8959 train loss:3.484190 +step:8960 train loss:3.451348 +step:8961 train loss:3.474558 +step:8962 train loss:3.527976 +step:8963 train loss:3.506394 +step:8964 train loss:3.487637 +step:8965 train loss:3.531777 +step:8966 train loss:3.490035 +step:8967 train loss:3.466908 +step:8968 train loss:3.452183 +step:8969 train loss:3.441496 +step:8970 train loss:3.521492 +step:8971 train loss:3.471683 +step:8972 train loss:3.671148 +step:8973 train loss:3.557991 +step:8974 train loss:3.514341 +step:8975 train loss:3.518133 +step:8976 train loss:3.481573 +step:8977 train loss:3.569709 +step:8978 train loss:3.551278 +step:8979 train loss:3.469753 +step:8980 train loss:3.566021 +step:8981 train loss:3.515156 +step:8982 train loss:3.489450 +step:8983 train loss:3.435992 +step:8984 train loss:3.558644 +step:8985 train loss:3.475371 +step:8986 train loss:3.511248 +step:8987 train loss:3.484183 +step:8988 train loss:3.534572 +step:8989 train loss:3.443148 +step:8990 train loss:3.586389 +step:8991 train loss:3.440907 +step:8992 train loss:3.498059 +step:8993 train loss:3.588811 +step:8994 train loss:3.490938 +step:8995 train loss:3.517667 +step:8996 train loss:3.486944 +step:8997 train loss:3.434468 +step:8998 train loss:3.438172 +step:8999 train loss:3.463346 +step:9000 validation loss:3.437703 total_sharp:1.2141e-02 L1_sharp:1.0107e-02 L2_sharp:6.0368e-03 L3_sharp:4.9723e-03 L4_sharp:1.8589e-03 L5_sharp:3.2711e-03 L6_sharp:2.9106e-03 L7_sharp:3.5613e-03 L8_sharp:2.9731e-03 L9_sharp:2.1256e-03 L10_sharp:1.6231e-03 L11_sharp:1.8556e-03 L12_sharp:8.9832e-03 total_fnorm:1.1115e+00 total_l1_linf:9.8967e+03 total_spectral:1.1115e+00 L1_fnorm:2.3800e-01 L2_fnorm:2.3757e-01 L3_fnorm:2.4839e-01 L4_fnorm:2.6101e-01 L5_fnorm:2.6147e-01 L6_fnorm:2.6125e-01 L7_fnorm:2.5893e-01 L8_fnorm:2.6080e-01 L9_fnorm:2.6519e-01 L10_fnorm:2.6623e-01 L11_fnorm:2.6666e-01 L12_fnorm:2.6078e-01 L1_l1linf:3.2480e-01 L2_l1linf:2.9389e-01 L3_l1linf:3.1434e-01 L4_l1linf:3.4076e-01 L5_l1linf:3.5299e-01 L6_l1linf:3.0176e-01 L7_l1linf:2.9573e-01 L8_l1linf:2.7324e-01 L9_l1linf:2.7380e-01 L10_l1linf:2.7768e-01 L11_l1linf:2.8226e-01 L12_l1linf:2.8663e-01 L1_spectral:4.2666e-02 L2_spectral:4.7874e-02 L3_spectral:4.8755e-02 L4_spectral:4.3112e-02 L5_spectral:4.0676e-02 L6_spectral:3.4404e-02 L7_spectral:3.1470e-02 L8_spectral:2.6272e-02 L9_spectral:2.5356e-02 L10_spectral:2.6775e-02 L11_spectral:3.0242e-02 L12_spectral:4.7265e-02 ip_v_neg_g:6.5045e-03 cos_v_neg_g:5.6272e-03 v_norm:1.1115e+00 g_norm:1.0399e+00 hv_norm:3.2809e-01 cos_v_hv:4.1132e-02 hg_norm:7.1412e+00 cos_g_hg:5.2693e-01 v_par:2.0917e-04 v_perp:1.1115e+00 L1_cos_v_neg_g:8.5417e-03 L1_v_norm:2.3800e-01 L2_cos_v_neg_g:8.7688e-03 L2_v_norm:2.3757e-01 L3_cos_v_neg_g:9.5652e-03 L3_v_norm:2.4839e-01 L4_cos_v_neg_g:5.8406e-03 L4_v_norm:2.6101e-01 L5_cos_v_neg_g:5.5295e-03 L5_v_norm:2.6147e-01 L6_cos_v_neg_g:7.5000e-03 L6_v_norm:2.6125e-01 L7_cos_v_neg_g:6.4333e-03 L7_v_norm:2.5893e-01 L8_cos_v_neg_g:7.2292e-03 L8_v_norm:2.6080e-01 L9_cos_v_neg_g:8.5163e-03 L9_v_norm:2.6519e-01 L10_cos_v_neg_g:7.2008e-03 L10_v_norm:2.6623e-01 L11_cos_v_neg_g:5.0402e-03 L11_v_norm:2.6666e-01 L12_cos_v_neg_g:8.0430e-03 L12_v_norm:2.6078e-01 +step:9000 train loss:3.549718 +step:9001 train loss:3.517560 +step:9002 train loss:3.524241 +step:9003 train loss:3.464290 +step:9004 train loss:3.462986 +step:9005 train loss:3.477314 +step:9006 train loss:3.478776 +step:9007 train loss:3.496620 +step:9008 train loss:3.456359 +step:9009 train loss:3.449275 +step:9010 train loss:3.485907 +step:9011 train loss:3.479403 +step:9012 train loss:3.595189 +step:9013 train loss:3.422806 +step:9014 train loss:3.489774 +step:9015 train loss:3.494931 +step:9016 train loss:3.568774 +step:9017 train loss:3.512251 +step:9018 train loss:3.435112 +step:9019 train loss:3.517856 +step:9020 train loss:3.529934 +step:9021 train loss:3.485738 +step:9022 train loss:3.496355 +step:9023 train loss:3.494638 +step:9024 train loss:3.513389 +step:9025 train loss:3.497695 +step:9026 train loss:3.454898 +step:9027 train loss:3.504219 +step:9028 train loss:3.523120 +step:9029 train loss:3.542200 +step:9030 train loss:3.538177 +step:9031 train loss:3.501658 +step:9032 train loss:3.513892 +step:9033 train loss:3.498903 +step:9034 train loss:3.506046 +step:9035 train loss:3.512789 +step:9036 train loss:3.460508 +step:9037 train loss:3.455593 +step:9038 train loss:3.577964 +step:9039 train loss:3.481351 +step:9040 train loss:3.497835 +step:9041 train loss:3.545884 +step:9042 train loss:3.405288 +step:9043 train loss:3.499592 +step:9044 train loss:3.515589 +step:9045 train loss:3.461254 +step:9046 train loss:3.504971 +step:9047 train loss:3.499125 +step:9048 train loss:3.477103 +step:9049 train loss:3.513488 +step:9050 train loss:3.467712 +step:9051 train loss:3.508985 +step:9052 train loss:3.437470 +step:9053 train loss:3.559100 +step:9054 train loss:3.574923 +step:9055 train loss:3.496086 +step:9056 train loss:3.557140 +step:9057 train loss:3.413351 +step:9058 train loss:3.495909 +step:9059 train loss:3.572788 +step:9060 train loss:3.503881 +step:9061 train loss:3.529813 +step:9062 train loss:3.462534 +step:9063 train loss:3.590856 +step:9064 train loss:3.480587 +step:9065 train loss:3.490303 +step:9066 train loss:3.509551 +step:9067 train loss:3.474285 +step:9068 train loss:3.545451 +step:9069 train loss:3.504078 +step:9070 train loss:3.553903 +step:9071 train loss:3.487913 +step:9072 train loss:3.507600 +step:9073 train loss:3.468872 +step:9074 train loss:3.549857 +step:9075 train loss:3.495928 +step:9076 train loss:3.463238 +step:9077 train loss:3.543956 +step:9078 train loss:3.477644 +step:9079 train loss:3.521894 +step:9080 train loss:3.458408 +step:9081 train loss:3.494909 +step:9082 train loss:3.519956 +step:9083 train loss:3.550428 +step:9084 train loss:3.440029 +step:9085 train loss:3.511682 +step:9086 train loss:3.497522 +step:9087 train loss:3.441450 +step:9088 train loss:3.505321 +step:9089 train loss:3.520060 +step:9090 train loss:3.455873 +step:9091 train loss:3.553313 +step:9092 train loss:3.482652 +step:9093 train loss:3.478566 +step:9094 train loss:3.608519 +step:9095 train loss:3.470476 +step:9096 train loss:3.487486 +step:9097 train loss:3.470898 +step:9098 train loss:3.462955 +step:9099 train loss:3.592449 +step:9100 train loss:3.623361 +step:9101 train loss:3.540183 +step:9102 train loss:3.484284 +step:9103 train loss:3.490752 +step:9104 train loss:3.576822 +step:9105 train loss:3.440185 +step:9106 train loss:3.567646 +step:9107 train loss:3.501002 +step:9108 train loss:3.481798 +step:9109 train loss:3.508856 +step:9110 train loss:3.513307 +step:9111 train loss:3.490008 +step:9112 train loss:3.492088 +step:9113 train loss:3.534886 +step:9114 train loss:3.472616 +step:9115 train loss:3.500480 +step:9116 train loss:3.526853 +step:9117 train loss:3.534158 +step:9118 train loss:3.506793 +step:9119 train loss:3.430280 +step:9120 train loss:3.525548 +step:9121 train loss:3.556194 +step:9122 train loss:3.502951 +step:9123 train loss:3.520805 +step:9124 train loss:3.551431 +step:9125 train loss:3.503505 +step:9126 train loss:3.481267 +step:9127 train loss:3.513732 +step:9128 train loss:3.570229 +step:9129 train loss:3.522808 +step:9130 train loss:3.535231 +step:9131 train loss:3.513674 +step:9132 train loss:3.522447 +step:9133 train loss:3.510226 +step:9134 train loss:3.481371 +step:9135 train loss:3.510311 +step:9136 train loss:3.509346 +step:9137 train loss:3.563329 +step:9138 train loss:3.481715 +step:9139 train loss:3.554930 +step:9140 train loss:3.480264 +step:9141 train loss:3.456604 +step:9142 train loss:3.634214 +step:9143 train loss:3.459981 +step:9144 train loss:3.554314 +step:9145 train loss:3.560491 +step:9146 train loss:3.475312 +step:9147 train loss:3.548916 +step:9148 train loss:3.569515 +step:9149 train loss:3.477989 +step:9150 train loss:3.498956 +step:9151 train loss:3.562248 +step:9152 train loss:3.518156 +step:9153 train loss:3.482666 +step:9154 train loss:3.497925 +step:9155 train loss:3.466242 +step:9156 train loss:3.466970 +step:9157 train loss:3.486355 +step:9158 train loss:3.466182 +step:9159 train loss:3.555400 +step:9160 train loss:3.437642 +step:9161 train loss:3.465483 +step:9162 train loss:3.554364 +step:9163 train loss:3.500476 +step:9164 train loss:3.470611 +step:9165 train loss:3.463785 +step:9166 train loss:3.522510 +step:9167 train loss:3.464746 +step:9168 train loss:3.509694 +step:9169 train loss:3.444704 +step:9170 train loss:3.466631 +step:9171 train loss:3.531639 +step:9172 train loss:3.455310 +step:9173 train loss:3.574318 +step:9174 train loss:3.502529 +step:9175 train loss:3.480515 +step:9176 train loss:3.463917 +step:9177 train loss:3.508943 +step:9178 train loss:3.456815 +step:9179 train loss:3.413342 +step:9180 train loss:3.509909 +step:9181 train loss:3.518841 +step:9182 train loss:3.488152 +step:9183 train loss:3.494847 +step:9184 train loss:3.491280 +step:9185 train loss:3.505659 +step:9186 train loss:3.466654 +step:9187 train loss:3.538774 +step:9188 train loss:3.575962 +step:9189 train loss:3.500143 +step:9190 train loss:3.505460 +step:9191 train loss:3.498183 +step:9192 train loss:3.509491 +step:9193 train loss:3.512587 +step:9194 train loss:3.445798 +step:9195 train loss:3.439125 +step:9196 train loss:3.489972 +step:9197 train loss:3.449133 +step:9198 train loss:3.525826 +step:9199 train loss:3.472352 +step:9200 train loss:3.498832 +step:9201 train loss:3.534409 +step:9202 train loss:3.521226 +step:9203 train loss:3.477292 +step:9204 train loss:3.674857 +step:9205 train loss:3.590169 +step:9206 train loss:3.500788 +step:9207 train loss:3.554200 +step:9208 train loss:3.529755 +step:9209 train loss:3.551224 +step:9210 train loss:3.445532 +step:9211 train loss:3.466886 +step:9212 train loss:3.470378 +step:9213 train loss:3.532990 +step:9214 train loss:3.473883 +step:9215 train loss:3.540677 +step:9216 train loss:3.503337 +step:9217 train loss:3.445256 +step:9218 train loss:3.532512 +step:9219 train loss:3.493031 +step:9220 train loss:3.538770 +step:9221 train loss:3.589330 +step:9222 train loss:3.534897 +step:9223 train loss:3.705281 +step:9224 train loss:3.540576 +step:9225 train loss:3.474205 +step:9226 train loss:3.489975 +step:9227 train loss:3.508147 +step:9228 train loss:3.510253 +step:9229 train loss:3.467893 +step:9230 train loss:3.533182 +step:9231 train loss:3.415123 +step:9232 train loss:3.472617 +step:9233 train loss:3.496127 +step:9234 train loss:3.554649 +step:9235 train loss:3.553159 +step:9236 train loss:3.462166 +step:9237 train loss:3.523084 +step:9238 train loss:3.495384 +step:9239 train loss:3.489146 +step:9240 train loss:3.456489 +step:9241 train loss:3.490440 +step:9242 train loss:3.497457 +step:9243 train loss:3.496531 +step:9244 train loss:3.472145 +step:9245 train loss:3.476441 +step:9246 train loss:3.475858 +step:9247 train loss:3.487032 +step:9248 train loss:3.496663 +step:9249 train loss:3.495007 +step:9250 validation loss:3.431598 +step:9250 train loss:3.535017 +step:9251 train loss:3.476799 +step:9252 train loss:3.543864 +step:9253 train loss:3.538282 +step:9254 train loss:3.464407 +step:9255 train loss:3.584322 +step:9256 train loss:3.465040 +step:9257 train loss:3.406677 +step:9258 train loss:3.486778 +step:9259 train loss:3.489131 +step:9260 train loss:3.597142 +step:9261 train loss:3.468469 +step:9262 train loss:3.538662 +step:9263 train loss:3.441126 +step:9264 train loss:3.590960 +step:9265 train loss:3.617202 +step:9266 train loss:3.545395 +step:9267 train loss:3.491011 +step:9268 train loss:3.485505 +step:9269 train loss:3.511853 +step:9270 train loss:3.435828 +step:9271 train loss:3.547503 +step:9272 train loss:3.488088 +step:9273 train loss:3.507376 +step:9274 train loss:3.510910 +step:9275 train loss:3.506646 +step:9276 train loss:3.535030 +step:9277 train loss:3.507815 +step:9278 train loss:3.521779 +step:9279 train loss:3.513021 +step:9280 train loss:3.514393 +step:9281 train loss:3.487002 +step:9282 train loss:3.605486 +step:9283 train loss:3.495818 +step:9284 train loss:3.458530 +step:9285 train loss:3.477178 +step:9286 train loss:3.531023 +step:9287 train loss:3.500242 +step:9288 train loss:3.506637 +step:9289 train loss:3.479235 +step:9290 train loss:3.508878 +step:9291 train loss:3.483005 +step:9292 train loss:3.525093 +step:9293 train loss:3.585474 +step:9294 train loss:3.502816 +step:9295 train loss:3.485713 +step:9296 train loss:3.439931 +step:9297 train loss:3.509515 +step:9298 train loss:3.449889 +step:9299 train loss:3.435021 +step:9300 train loss:3.536161 +step:9301 train loss:3.562922 +step:9302 train loss:3.501756 +step:9303 train loss:3.552701 +step:9304 train loss:3.471358 +step:9305 train loss:3.464853 +step:9306 train loss:3.471199 +step:9307 train loss:3.467639 +step:9308 train loss:3.438737 +step:9309 train loss:3.428476 +step:9310 train loss:3.488101 +step:9311 train loss:3.545763 +step:9312 train loss:3.497721 +step:9313 train loss:3.443654 +step:9314 train loss:3.472563 +step:9315 train loss:3.504575 +step:9316 train loss:3.492526 +step:9317 train loss:3.466671 +step:9318 train loss:3.554920 +step:9319 train loss:3.460905 +step:9320 train loss:3.487982 +step:9321 train loss:3.497917 +step:9322 train loss:3.508423 +step:9323 train loss:3.580365 +step:9324 train loss:3.525888 +step:9325 train loss:3.465075 +step:9326 train loss:3.540169 +step:9327 train loss:3.536316 +step:9328 train loss:3.535065 +step:9329 train loss:3.426660 +step:9330 train loss:3.592909 +step:9331 train loss:3.525799 +step:9332 train loss:3.546035 +step:9333 train loss:3.563319 +step:9334 train loss:3.499195 +step:9335 train loss:3.594491 +step:9336 train loss:3.554943 +step:9337 train loss:3.509931 +step:9338 train loss:3.561801 +step:9339 train loss:3.539252 +step:9340 train loss:3.499126 +step:9341 train loss:3.584367 +step:9342 train loss:3.487463 +step:9343 train loss:3.479559 +step:9344 train loss:3.483126 +step:9345 train loss:3.626798 +step:9346 train loss:3.457590 +step:9347 train loss:3.477162 +step:9348 train loss:3.502120 +step:9349 train loss:3.445810 +step:9350 train loss:3.522358 +step:9351 train loss:3.499305 +step:9352 train loss:3.486985 +step:9353 train loss:3.514522 +step:9354 train loss:3.483045 +step:9355 train loss:3.478013 +step:9356 train loss:3.525072 +step:9357 train loss:3.477862 +step:9358 train loss:3.510393 +step:9359 train loss:3.452235 +step:9360 train loss:3.467438 +step:9361 train loss:3.467659 +step:9362 train loss:3.456082 +step:9363 train loss:3.520660 +step:9364 train loss:3.497550 +step:9365 train loss:3.501675 +step:9366 train loss:3.497553 +step:9367 train loss:3.511099 +step:9368 train loss:3.485438 +step:9369 train loss:3.483871 +step:9370 train loss:3.494016 +step:9371 train loss:3.511086 +step:9372 train loss:3.478204 +step:9373 train loss:3.461879 +step:9374 train loss:3.497140 +step:9375 train loss:3.510875 +step:9376 train loss:3.451279 +step:9377 train loss:3.524963 +step:9378 train loss:3.526228 +step:9379 train loss:3.552125 +step:9380 train loss:3.485982 +step:9381 train loss:3.494296 +step:9382 train loss:3.469704 +step:9383 train loss:3.464141 +step:9384 train loss:3.433354 +step:9385 train loss:3.508154 +step:9386 train loss:3.536059 +step:9387 train loss:3.511171 +step:9388 train loss:3.449124 +step:9389 train loss:3.468019 +step:9390 train loss:3.511005 +step:9391 train loss:3.516663 +step:9392 train loss:3.478749 +step:9393 train loss:3.471205 +step:9394 train loss:3.498892 +step:9395 train loss:3.492145 +step:9396 train loss:3.641905 +step:9397 train loss:3.529549 +step:9398 train loss:3.552449 +step:9399 train loss:3.500094 +step:9400 train loss:3.504279 +step:9401 train loss:3.497035 +step:9402 train loss:3.498152 +step:9403 train loss:3.429942 +step:9404 train loss:3.505392 +step:9405 train loss:3.466186 +step:9406 train loss:3.521129 +step:9407 train loss:3.463293 +step:9408 train loss:3.400850 +step:9409 train loss:3.465444 +step:9410 train loss:3.546794 +step:9411 train loss:3.506634 +step:9412 train loss:3.536736 +step:9413 train loss:3.553010 +step:9414 train loss:3.491208 +step:9415 train loss:3.481058 +step:9416 train loss:3.497317 +step:9417 train loss:3.451539 +step:9418 train loss:3.480535 +step:9419 train loss:3.450000 +step:9420 train loss:3.468296 +step:9421 train loss:3.515005 +step:9422 train loss:3.467551 +step:9423 train loss:3.531760 +step:9424 train loss:3.468409 +step:9425 train loss:3.513072 +step:9426 train loss:3.515425 +step:9427 train loss:3.488065 +step:9428 train loss:3.594591 +step:9429 train loss:3.485917 +step:9430 train loss:3.440986 +step:9431 train loss:3.531394 +step:9432 train loss:3.496750 +step:9433 train loss:3.535004 +step:9434 train loss:3.488314 +step:9435 train loss:3.514447 +step:9436 train loss:3.484437 +step:9437 train loss:3.497108 +step:9438 train loss:3.491878 +step:9439 train loss:3.489645 +step:9440 train loss:3.480469 +step:9441 train loss:3.493948 +step:9442 train loss:3.432975 +step:9443 train loss:3.485151 +step:9444 train loss:3.551826 +step:9445 train loss:3.482790 +step:9446 train loss:3.461923 +step:9447 train loss:3.528612 +step:9448 train loss:3.464382 +step:9449 train loss:3.486144 +step:9450 train loss:3.527194 +step:9451 train loss:3.443907 +step:9452 train loss:3.494327 +step:9453 train loss:3.474935 +step:9454 train loss:3.538337 +step:9455 train loss:3.519522 +step:9456 train loss:3.456665 +step:9457 train loss:3.490128 +step:9458 train loss:3.480603 +step:9459 train loss:3.472262 +step:9460 train loss:3.515881 +step:9461 train loss:3.539396 +step:9462 train loss:3.491625 +step:9463 train loss:3.519365 +step:9464 train loss:3.473945 +step:9465 train loss:3.563602 +step:9466 train loss:3.513234 +step:9467 train loss:3.535474 +step:9468 train loss:3.483072 +step:9469 train loss:3.470332 +step:9470 train loss:3.469812 +step:9471 train loss:3.507150 +step:9472 train loss:3.533351 +step:9473 train loss:3.522562 +step:9474 train loss:3.464705 +step:9475 train loss:3.458724 +step:9476 train loss:3.677029 +step:9477 train loss:3.550730 +step:9478 train loss:3.529809 +step:9479 train loss:3.624910 +step:9480 train loss:3.475248 +step:9481 train loss:3.506752 +step:9482 train loss:3.532642 +step:9483 train loss:3.488488 +step:9484 train loss:3.517696 +step:9485 train loss:3.439806 +step:9486 train loss:3.476471 +step:9487 train loss:3.507299 +step:9488 train loss:3.464917 +step:9489 train loss:3.509982 +step:9490 train loss:3.474499 +step:9491 train loss:3.517590 +step:9492 train loss:3.534371 +step:9493 train loss:3.510777 +step:9494 train loss:3.519882 +step:9495 train loss:3.473214 +step:9496 train loss:3.532932 +step:9497 train loss:3.546547 +step:9498 train loss:3.497505 +step:9499 train loss:3.541301 +step:9500 validation loss:3.430226 total_sharp:1.2474e-02 L1_sharp:1.7537e-02 L2_sharp:8.2212e-03 L3_sharp:4.1747e-03 L4_sharp:1.7082e-03 L5_sharp:3.4678e-03 L6_sharp:3.0533e-03 L7_sharp:3.8032e-03 L8_sharp:2.6795e-03 L9_sharp:1.9166e-03 L10_sharp:1.4192e-03 L11_sharp:1.9618e-03 L12_sharp:6.1329e-03 total_fnorm:1.1093e+00 total_l1_linf:9.8742e+03 total_spectral:1.1093e+00 L1_fnorm:2.4889e-01 L2_fnorm:2.4022e-01 L3_fnorm:2.4637e-01 L4_fnorm:2.5925e-01 L5_fnorm:2.6138e-01 L6_fnorm:2.6046e-01 L7_fnorm:2.5744e-01 L8_fnorm:2.5820e-01 L9_fnorm:2.6306e-01 L10_fnorm:2.6532e-01 L11_fnorm:2.6496e-01 L12_fnorm:2.5835e-01 L1_l1linf:3.1032e-01 L2_l1linf:3.0192e-01 L3_l1linf:2.8930e-01 L4_l1linf:3.3874e-01 L5_l1linf:3.3109e-01 L6_l1linf:2.7901e-01 L7_l1linf:2.8155e-01 L8_l1linf:2.7590e-01 L9_l1linf:2.7128e-01 L10_l1linf:2.7503e-01 L11_l1linf:3.0205e-01 L12_l1linf:2.8048e-01 L1_spectral:4.4232e-02 L2_spectral:4.8617e-02 L3_spectral:4.4104e-02 L4_spectral:4.3766e-02 L5_spectral:4.0844e-02 L6_spectral:3.3290e-02 L7_spectral:3.1533e-02 L8_spectral:2.5769e-02 L9_spectral:2.4896e-02 L10_spectral:2.6010e-02 L11_spectral:3.0742e-02 L12_spectral:4.2296e-02 ip_v_neg_g:6.6301e-03 cos_v_neg_g:5.8934e-03 v_norm:1.1093e+00 g_norm:1.0142e+00 hv_norm:3.8774e-01 cos_v_hv:3.5685e-02 hg_norm:1.0014e+01 cos_g_hg:4.1071e-01 v_par:1.9623e-04 v_perp:1.1093e+00 L1_cos_v_neg_g:1.1410e-02 L1_v_norm:2.4889e-01 L2_cos_v_neg_g:7.0242e-03 L2_v_norm:2.4022e-01 L3_cos_v_neg_g:4.1461e-03 L3_v_norm:2.4637e-01 L4_cos_v_neg_g:3.0561e-03 L4_v_norm:2.5925e-01 L5_cos_v_neg_g:7.2260e-03 L5_v_norm:2.6138e-01 L6_cos_v_neg_g:5.7297e-03 L6_v_norm:2.6046e-01 L7_cos_v_neg_g:7.9225e-03 L7_v_norm:2.5744e-01 L8_cos_v_neg_g:8.0343e-03 L8_v_norm:2.5820e-01 L9_cos_v_neg_g:8.7756e-03 L9_v_norm:2.6306e-01 L10_cos_v_neg_g:8.1758e-03 L10_v_norm:2.6532e-01 L11_cos_v_neg_g:8.4875e-03 L11_v_norm:2.6496e-01 L12_cos_v_neg_g:1.0974e-02 L12_v_norm:2.5835e-01 +step:9500 train loss:3.534498 +step:9501 train loss:3.513215 +step:9502 train loss:3.483624 +step:9503 train loss:3.501909 +step:9504 train loss:3.457519 +step:9505 train loss:3.480570 +step:9506 train loss:3.494635 +step:9507 train loss:3.482542 +step:9508 train loss:3.674636 +step:9509 train loss:3.491689 +step:9510 train loss:3.479923 +step:9511 train loss:3.506558 +step:9512 train loss:3.537038 +step:9513 train loss:3.529028 +step:9514 train loss:3.494023 +step:9515 train loss:3.396544 +step:9516 train loss:3.496772 +step:9517 train loss:3.532633 +step:9518 train loss:3.506119 +step:9519 train loss:3.519645 +step:9520 train loss:3.405395 +step:9521 train loss:3.400309 +step:9522 train loss:3.519306 +step:9523 train loss:3.515453 +step:9524 train loss:3.516663 +step:9525 train loss:3.561196 +step:9526 train loss:3.575574 +step:9527 train loss:3.533199 +step:9528 train loss:3.468694 +step:9529 train loss:3.509875 +step:9530 train loss:3.554662 +step:9531 train loss:3.462450 +step:9532 train loss:3.512598 +step:9533 train loss:3.483942 +step:9534 train loss:3.564982 +step:9535 train loss:3.486204 +step:9536 train loss:3.466827 +step:9537 train loss:3.412317 +step:9538 train loss:3.431889 +step:9539 train loss:3.502315 +step:9540 train loss:3.418264 +step:9541 train loss:3.480221 +step:9542 train loss:3.605789 +step:9543 train loss:3.502476 +step:9544 train loss:3.544810 +step:9545 train loss:3.478533 +step:9546 train loss:3.502743 +step:9547 train loss:3.546381 +step:9548 train loss:3.489016 +step:9549 train loss:3.457821 +step:9550 train loss:3.488529 +step:9551 train loss:3.483191 +step:9552 train loss:3.503997 +step:9553 train loss:3.501374 +step:9554 train loss:3.545794 +step:9555 train loss:3.554585 +step:9556 train loss:3.457819 +step:9557 train loss:3.482656 +step:9558 train loss:3.544651 +step:9559 train loss:3.552734 +step:9560 train loss:3.461113 +step:9561 train loss:3.491101 +step:9562 train loss:3.529736 +step:9563 train loss:3.475866 +step:9564 train loss:3.510294 +step:9565 train loss:3.488919 +step:9566 train loss:3.460090 +step:9567 train loss:3.527284 +step:9568 train loss:3.497540 +step:9569 train loss:3.540928 +step:9570 train loss:3.434521 +step:9571 train loss:3.508223 +step:9572 train loss:3.451034 +step:9573 train loss:3.483232 +step:9574 train loss:3.460667 +step:9575 train loss:3.533083 +step:9576 train loss:3.421569 +step:9577 train loss:3.469848 +step:9578 train loss:3.477114 +step:9579 train loss:3.472610 +step:9580 train loss:3.539886 +step:9581 train loss:3.530068 +step:9582 train loss:3.503377 +step:9583 train loss:3.526677 +step:9584 train loss:3.463359 +step:9585 train loss:3.483624 +step:9586 train loss:3.535532 +step:9587 train loss:3.506931 +step:9588 train loss:3.493130 +step:9589 train loss:3.546940 +step:9590 train loss:3.516406 +step:9591 train loss:3.477422 +step:9592 train loss:3.499619 +step:9593 train loss:3.501881 +step:9594 train loss:3.518348 +step:9595 train loss:3.491313 +step:9596 train loss:3.578859 +step:9597 train loss:3.487156 +step:9598 train loss:3.452260 +step:9599 train loss:3.454118 +step:9600 train loss:3.541790 +step:9601 train loss:3.457173 +step:9602 train loss:3.539458 +step:9603 train loss:3.534339 +step:9604 train loss:3.416203 +step:9605 train loss:3.504803 +step:9606 train loss:3.558628 +step:9607 train loss:3.477308 +step:9608 train loss:3.482634 +step:9609 train loss:3.495136 +step:9610 train loss:3.534666 +step:9611 train loss:3.470275 +step:9612 train loss:3.475956 +step:9613 train loss:3.515658 +step:9614 train loss:3.487001 +step:9615 train loss:3.672527 +step:9616 train loss:3.486830 +step:9617 train loss:3.474442 +step:9618 train loss:3.433374 +step:9619 train loss:3.492216 +step:9620 train loss:3.553742 +step:9621 train loss:3.473120 +step:9622 train loss:3.485751 +step:9623 train loss:3.527489 +step:9624 train loss:3.510083 +step:9625 train loss:3.525461 +step:9626 train loss:3.498234 +step:9627 train loss:3.577534 +step:9628 train loss:3.541129 +step:9629 train loss:3.455972 +step:9630 train loss:3.513995 +step:9631 train loss:3.501178 +step:9632 train loss:3.469407 +step:9633 train loss:3.510695 +step:9634 train loss:3.578513 +step:9635 train loss:3.481639 +step:9636 train loss:3.430147 +step:9637 train loss:3.563117 +step:9638 train loss:3.446524 +step:9639 train loss:3.414323 +step:9640 train loss:3.537396 +step:9641 train loss:3.511236 +step:9642 train loss:3.487024 +step:9643 train loss:3.489436 +step:9644 train loss:3.548302 +step:9645 train loss:3.472439 +step:9646 train loss:3.509045 +step:9647 train loss:3.520965 +step:9648 train loss:3.469496 +step:9649 train loss:3.442545 +step:9650 train loss:3.459645 +step:9651 train loss:3.549707 +step:9652 train loss:3.530031 +step:9653 train loss:3.473210 +step:9654 train loss:3.453850 +step:9655 train loss:3.451224 +step:9656 train loss:3.444468 +step:9657 train loss:3.473704 +step:9658 train loss:3.529456 +step:9659 train loss:3.637636 +step:9660 train loss:3.419951 +step:9661 train loss:3.439782 +step:9662 train loss:3.460953 +step:9663 train loss:3.498231 +step:9664 train loss:3.552460 +step:9665 train loss:3.395114 +step:9666 train loss:3.438357 +step:9667 train loss:3.574587 +step:9668 train loss:3.552530 +step:9669 train loss:3.572726 +step:9670 train loss:3.553252 +step:9671 train loss:3.549749 +step:9672 train loss:3.465677 +step:9673 train loss:3.486534 +step:9674 train loss:3.497935 +step:9675 train loss:3.497963 +step:9676 train loss:3.455042 +step:9677 train loss:3.461667 +step:9678 train loss:3.497600 +step:9679 train loss:3.488523 +step:9680 train loss:3.486011 +step:9681 train loss:3.473403 +step:9682 train loss:3.539808 +step:9683 train loss:3.515481 +step:9684 train loss:3.431234 +step:9685 train loss:3.516983 +step:9686 train loss:3.549218 +step:9687 train loss:3.455976 +step:9688 train loss:3.543061 +step:9689 train loss:3.641261 +step:9690 train loss:3.485666 +step:9691 train loss:3.474324 +step:9692 train loss:3.434482 +step:9693 train loss:3.433654 +step:9694 train loss:3.454750 +step:9695 train loss:3.564548 +step:9696 train loss:3.596152 +step:9697 train loss:3.501778 +step:9698 train loss:3.540759 +step:9699 train loss:3.498601 +step:9700 train loss:3.499574 +step:9701 train loss:3.552193 +step:9702 train loss:3.465027 +step:9703 train loss:3.488965 +step:9704 train loss:3.572140 +step:9705 train loss:3.469327 +step:9706 train loss:3.461774 +step:9707 train loss:3.513967 +step:9708 train loss:3.462432 +step:9709 train loss:3.481263 +step:9710 train loss:3.501891 +step:9711 train loss:3.474540 +step:9712 train loss:3.487083 +step:9713 train loss:3.539095 +step:9714 train loss:3.489782 +step:9715 train loss:3.510058 +step:9716 train loss:3.535212 +step:9717 train loss:3.455309 +step:9718 train loss:3.461509 +step:9719 train loss:3.545995 +step:9720 train loss:3.477176 +step:9721 train loss:3.465368 +step:9722 train loss:3.531166 +step:9723 train loss:3.474321 +step:9724 train loss:3.506669 +step:9725 train loss:3.556678 +step:9726 train loss:3.499865 +step:9727 train loss:3.476394 +step:9728 train loss:3.513942 +step:9729 train loss:3.543315 +step:9730 train loss:3.612462 +step:9731 train loss:3.533692 +step:9732 train loss:3.494504 +step:9733 train loss:3.536899 +step:9734 train loss:3.459642 +step:9735 train loss:3.562712 +step:9736 train loss:3.464645 +step:9737 train loss:3.522814 +step:9738 train loss:3.488809 +step:9739 train loss:3.561408 +step:9740 train loss:3.526229 +step:9741 train loss:3.465980 +step:9742 train loss:3.558357 +step:9743 train loss:3.433659 +step:9744 train loss:3.493955 +step:9745 train loss:3.453318 +step:9746 train loss:3.489522 +step:9747 train loss:3.479693 +step:9748 train loss:3.383781 +step:9749 train loss:3.478934 +step:9750 validation loss:3.425560 +step:9750 train loss:3.458586 +step:9751 train loss:3.602080 +step:9752 train loss:3.482901 +step:9753 train loss:3.446491 +step:9754 train loss:3.473678 +step:9755 train loss:3.470102 +step:9756 train loss:3.473115 +step:9757 train loss:3.439302 +step:9758 train loss:3.432557 +step:9759 train loss:3.478383 +step:9760 train loss:3.424334 +step:9761 train loss:3.460962 +step:9762 train loss:3.460315 +step:9763 train loss:3.481120 +step:9764 train loss:3.467094 +step:9765 train loss:3.430813 +step:9766 train loss:3.518230 +step:9767 train loss:3.474969 +step:9768 train loss:3.487724 +step:9769 train loss:3.440253 +step:9770 train loss:3.440711 +step:9771 train loss:3.490308 +step:9772 train loss:3.502478 +step:9773 train loss:3.478824 +step:9774 train loss:3.450715 +step:9775 train loss:3.538070 +step:9776 train loss:3.534882 +step:9777 train loss:3.428328 +step:9778 train loss:3.432644 +step:9779 train loss:3.438852 +step:9780 train loss:3.437921 +step:9781 train loss:3.456259 +step:9782 train loss:3.534193 +step:9783 train loss:3.441307 +step:9784 train loss:3.472330 +step:9785 train loss:3.462389 +step:9786 train loss:3.495533 +step:9787 train loss:3.522403 +step:9788 train loss:3.451456 +step:9789 train loss:3.459333 +step:9790 train loss:3.419708 +step:9791 train loss:3.467191 +step:9792 train loss:3.484156 +step:9793 train loss:3.499226 +step:9794 train loss:3.478022 +step:9795 train loss:3.482700 +step:9796 train loss:3.466113 +step:9797 train loss:3.463773 +step:9798 train loss:3.477793 +step:9799 train loss:3.480243 +step:9800 train loss:3.550508 +step:9801 train loss:3.476485 +step:9802 train loss:3.532098 +step:9803 train loss:3.391950 +step:9804 train loss:3.487284 +step:9805 train loss:3.491675 +step:9806 train loss:3.466866 +step:9807 train loss:3.437934 +step:9808 train loss:3.351996 +step:9809 train loss:3.538322 +step:9810 train loss:3.494747 +step:9811 train loss:3.482161 +step:9812 train loss:3.454285 +step:9813 train loss:3.534923 +step:9814 train loss:3.524148 +step:9815 train loss:3.429134 +step:9816 train loss:3.430079 +step:9817 train loss:3.462266 +step:9818 train loss:3.488420 +step:9819 train loss:3.458259 +step:9820 train loss:3.527558 +step:9821 train loss:3.502914 +step:9822 train loss:3.481098 +step:9823 train loss:3.538545 +step:9824 train loss:3.448695 +step:9825 train loss:3.529931 +step:9826 train loss:3.524517 +step:9827 train loss:3.530406 +step:9828 train loss:3.447494 +step:9829 train loss:3.454666 +step:9830 train loss:3.441082 +step:9831 train loss:3.505130 +step:9832 train loss:3.515475 +step:9833 train loss:3.428617 +step:9834 train loss:3.478921 +step:9835 train loss:3.448472 +step:9836 train loss:3.508599 +step:9837 train loss:3.483381 +step:9838 train loss:3.520050 +step:9839 train loss:3.493640 +step:9840 train loss:3.459253 +step:9841 train loss:3.467098 +step:9842 train loss:3.529937 +step:9843 train loss:3.522661 +step:9844 train loss:3.472168 +step:9845 train loss:3.502833 +step:9846 train loss:3.437031 +step:9847 train loss:3.567586 +step:9848 train loss:3.491852 +step:9849 train loss:3.517482 +step:9850 train loss:3.434468 +step:9851 train loss:3.491002 +step:9852 train loss:3.451234 +step:9853 train loss:3.475688 +step:9854 train loss:3.484552 +step:9855 train loss:3.438283 +step:9856 train loss:3.438644 +step:9857 train loss:3.431341 +step:9858 train loss:3.493357 +step:9859 train loss:3.416472 +step:9860 train loss:3.651079 +step:9861 train loss:3.477993 +step:9862 train loss:3.442941 +step:9863 train loss:3.426023 +step:9864 train loss:3.548814 +step:9865 train loss:3.424589 +step:9866 train loss:3.465592 +step:9867 train loss:3.464976 +step:9868 train loss:3.523968 +step:9869 train loss:3.486272 +step:9870 train loss:3.458255 +step:9871 train loss:3.500025 +step:9872 train loss:3.443865 +step:9873 train loss:3.492845 +step:9874 train loss:3.460244 +step:9875 train loss:3.463797 +step:9876 train loss:3.428531 +step:9877 train loss:3.476610 +step:9878 train loss:3.511098 +step:9879 train loss:3.510094 +step:9880 train loss:3.441474 +step:9881 train loss:3.492611 +step:9882 train loss:3.454025 +step:9883 train loss:3.462468 +step:9884 train loss:3.456945 +step:9885 train loss:3.520356 +step:9886 train loss:3.486976 +step:9887 train loss:3.486381 +step:9888 train loss:3.507628 +step:9889 train loss:3.544155 +step:9890 train loss:3.457513 +step:9891 train loss:3.461923 +step:9892 train loss:3.436389 +step:9893 train loss:3.555254 +step:9894 train loss:3.465138 +step:9895 train loss:3.400774 +step:9896 train loss:3.555254 +step:9897 train loss:3.430122 +step:9898 train loss:3.496259 +step:9899 train loss:3.477929 +step:9900 train loss:3.522100 +step:9901 train loss:3.444829 +step:9902 train loss:3.488415 +step:9903 train loss:3.459796 +step:9904 train loss:3.509387 +step:9905 train loss:3.416194 +step:9906 train loss:3.452403 +step:9907 train loss:3.461200 +step:9908 train loss:3.460264 +step:9909 train loss:3.476106 +step:9910 train loss:3.500326 +step:9911 train loss:3.581667 +step:9912 train loss:3.462238 +step:9913 train loss:3.464004 +step:9914 train loss:3.473862 +step:9915 train loss:3.470705 +step:9916 train loss:3.424443 +step:9917 train loss:3.460536 +step:9918 train loss:3.456312 +step:9919 train loss:3.619714 +step:9920 train loss:3.412700 +step:9921 train loss:3.498991 +step:9922 train loss:3.457062 +step:9923 train loss:3.514978 +step:9924 train loss:3.430399 +step:9925 train loss:3.485980 +step:9926 train loss:3.466438 +step:9927 train loss:3.506511 +step:9928 train loss:3.435565 +step:9929 train loss:3.472814 +step:9930 train loss:3.564102 +step:9931 train loss:3.527457 +step:9932 train loss:3.415039 +step:9933 train loss:3.509694 +step:9934 train loss:3.444278 +step:9935 train loss:3.544855 +step:9936 train loss:3.450679 +step:9937 train loss:3.478066 +step:9938 train loss:3.464071 +step:9939 train loss:3.529483 +step:9940 train loss:3.562886 +step:9941 train loss:3.442959 +step:9942 train loss:3.483949 +step:9943 train loss:3.622441 +step:9944 train loss:3.482335 +step:9945 train loss:3.511759 +step:9946 train loss:3.478165 +step:9947 train loss:3.426939 +step:9948 train loss:3.472017 +step:9949 train loss:3.365645 +step:9950 train loss:3.515934 +step:9951 train loss:3.438324 +step:9952 train loss:3.510730 +step:9953 train loss:3.468975 +step:9954 train loss:3.527932 +step:9955 train loss:3.502925 +step:9956 train loss:3.504629 +step:9957 train loss:3.480257 +step:9958 train loss:3.535444 +step:9959 train loss:3.434073 +step:9960 train loss:3.467363 +step:9961 train loss:3.477932 +step:9962 train loss:3.522981 +step:9963 train loss:3.412061 +step:9964 train loss:3.467298 +step:9965 train loss:3.470495 +step:9966 train loss:3.527941 +step:9967 train loss:3.451927 +step:9968 train loss:3.509511 +step:9969 train loss:3.421471 +step:9970 train loss:3.465630 +step:9971 train loss:3.518457 +step:9972 train loss:3.529544 +step:9973 train loss:3.504642 +step:9974 train loss:3.491565 +step:9975 train loss:3.462194 +step:9976 train loss:3.418220 +step:9977 train loss:3.472716 +step:9978 train loss:3.467662 +step:9979 train loss:3.478038 +step:9980 train loss:3.529536 +step:9981 train loss:3.439567 +step:9982 train loss:3.504030 +step:9983 train loss:3.421058 +step:9984 train loss:3.484489 +step:9985 train loss:3.428849 +step:9986 train loss:3.479528 +step:9987 train loss:3.529316 +step:9988 train loss:3.539865 +step:9989 train loss:3.434137 +step:9990 train loss:3.572621 +step:9991 train loss:3.418816 +step:9992 train loss:3.496312 +step:9993 train loss:3.484800 +step:9994 train loss:3.602973 +step:9995 train loss:3.539095 +step:9996 train loss:3.455213 +step:9997 train loss:3.498327 +step:9998 train loss:3.548207 +step:9999 train loss:3.515256 +step:10000 validation loss:3.421028 total_sharp:2.0233e-02 L1_sharp:2.1470e-02 L2_sharp:3.4876e-02 L3_sharp:6.4868e-03 L4_sharp:1.9748e-03 L5_sharp:3.2222e-03 L6_sharp:3.9969e-03 L7_sharp:4.7019e-03 L8_sharp:4.3145e-03 L9_sharp:3.0895e-03 L10_sharp:1.9752e-03 L11_sharp:2.3666e-03 L12_sharp:6.0016e-03 total_fnorm:1.1202e+00 total_l1_linf:9.9945e+03 total_spectral:1.1202e+00 L1_fnorm:2.5053e-01 L2_fnorm:2.5005e-01 L3_fnorm:2.5328e-01 L4_fnorm:2.6281e-01 L5_fnorm:2.6358e-01 L6_fnorm:2.6458e-01 L7_fnorm:2.6065e-01 L8_fnorm:2.6220e-01 L9_fnorm:2.6659e-01 L10_fnorm:2.6692e-01 L11_fnorm:2.6770e-01 L12_fnorm:2.5739e-01 L1_l1linf:3.4630e-01 L2_l1linf:3.4252e-01 L3_l1linf:3.5802e-01 L4_l1linf:3.6659e-01 L5_l1linf:3.2272e-01 L6_l1linf:2.9753e-01 L7_l1linf:2.8673e-01 L8_l1linf:2.7561e-01 L9_l1linf:2.8837e-01 L10_l1linf:2.9326e-01 L11_l1linf:3.1352e-01 L12_l1linf:3.0829e-01 L1_spectral:4.8543e-02 L2_spectral:4.8716e-02 L3_spectral:5.1392e-02 L4_spectral:4.8405e-02 L5_spectral:4.7626e-02 L6_spectral:3.9451e-02 L7_spectral:3.4539e-02 L8_spectral:2.8119e-02 L9_spectral:2.7500e-02 L10_spectral:2.8095e-02 L11_spectral:3.6776e-02 L12_spectral:4.0792e-02 ip_v_neg_g:1.4116e-02 cos_v_neg_g:1.0077e-02 v_norm:1.1202e+00 g_norm:1.2505e+00 hv_norm:5.6640e-01 cos_v_hv:4.0018e-02 hg_norm:1.5005e+01 cos_g_hg:5.7679e-01 v_par:3.6121e-04 v_perp:1.1202e+00 L1_cos_v_neg_g:2.1277e-02 L1_v_norm:2.5053e-01 L2_cos_v_neg_g:2.6238e-02 L2_v_norm:2.5005e-01 L3_cos_v_neg_g:1.5212e-02 L3_v_norm:2.5328e-01 L4_cos_v_neg_g:7.4562e-03 L4_v_norm:2.6281e-01 L5_cos_v_neg_g:1.1010e-02 L5_v_norm:2.6358e-01 L6_cos_v_neg_g:1.2310e-02 L6_v_norm:2.6458e-01 L7_cos_v_neg_g:1.2488e-02 L7_v_norm:2.6065e-01 L8_cos_v_neg_g:1.4509e-02 L8_v_norm:2.6220e-01 L9_cos_v_neg_g:1.0535e-02 L9_v_norm:2.6659e-01 L10_cos_v_neg_g:8.6059e-03 L10_v_norm:2.6692e-01 L11_cos_v_neg_g:6.3396e-03 L11_v_norm:2.6770e-01 L12_cos_v_neg_g:8.1682e-03 L12_v_norm:2.5739e-01 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2ca9db2fd2b0fa5def389286d020c6cd8d62788f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "b42c6566-8fad-4a05-a731-a4909d0f6197", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..7897ed78781ba264ec4a1df925d64dc870e4dee8 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.878334403038025, + "total_l1_linf_norm": 16278.5625, + "total_spectral_norm": 1.8783342838287354, + "layer_1_update_fnorm": 0.3311390280723572, + "layer_1_max_l1_linf_norm": 0.4104083180427551, + "layer_1_max_spectral_norm": 0.06299792975187302, + "layer_2_update_fnorm": 0.3185162842273712, + "layer_2_max_l1_linf_norm": 0.5175017714500427, + "layer_2_max_spectral_norm": 0.07014401257038116, + "layer_3_update_fnorm": 0.3331528306007385, + "layer_3_max_l1_linf_norm": 0.47265493869781494, + "layer_3_max_spectral_norm": 0.06965389102697372, + "layer_4_update_fnorm": 0.37769871950149536, + "layer_4_max_l1_linf_norm": 0.4552053213119507, + "layer_4_max_spectral_norm": 0.07318747788667679, + "layer_5_update_fnorm": 0.367770791053772, + "layer_5_max_l1_linf_norm": 0.44768083095550537, + "layer_5_max_spectral_norm": 0.0677788257598877, + "layer_6_update_fnorm": 0.3953912854194641, + "layer_6_max_l1_linf_norm": 0.4638969898223877, + "layer_6_max_spectral_norm": 0.06294617801904678, + "layer_7_update_fnorm": 0.4082126319408417, + "layer_7_max_l1_linf_norm": 0.45096683502197266, + "layer_7_max_spectral_norm": 0.06487727910280228, + "layer_8_update_fnorm": 0.4232954680919647, + "layer_8_max_l1_linf_norm": 0.4492161273956299, + "layer_8_max_spectral_norm": 0.06213856115937233, + "layer_9_update_fnorm": 0.43148118257522583, + "layer_9_max_l1_linf_norm": 0.4663977026939392, + "layer_9_max_spectral_norm": 0.06512871384620667, + "layer_10_update_fnorm": 0.4364188611507416, + "layer_10_max_l1_linf_norm": 0.47628143429756165, + "layer_10_max_spectral_norm": 0.0669553205370903, + "layer_11_update_fnorm": 0.43396326899528503, + "layer_11_max_l1_linf_norm": 0.4742245674133301, + "layer_11_max_spectral_norm": 0.06872671097517014, + "layer_12_update_fnorm": 0.4046643078327179, + "layer_12_max_l1_linf_norm": 0.4840271472930908, + "layer_12_max_spectral_norm": 0.09433094412088394, + "total_sharpness": 0.03338683024048805, + "ip_v_neg_g": 0.06691545248031616, + "cos_v_neg_g": 0.03922653570771217, + "v_norm": 1.878334403038025, + "g_norm": 0.9081833958625793, + "hv_norm": 0.8523383736610413, + "cos_v_hv": 0.07357598096132278, + "hg_norm": 4.745843410491943, + "cos_g_hg": 0.5768895149230957, + "v_parallel_norm": 0.0027009707409888506, + "v_perp_norm": 1.8783323764801025, + "layer_1_v_norm": 0.3311390280723572, + "layer_1_cos_v_neg_g": 0.10757655650377274, + "layer_2_v_norm": 0.3185162842273712, + "layer_2_cos_v_neg_g": 0.14456282556056976, + "layer_3_v_norm": 0.3331528306007385, + "layer_3_cos_v_neg_g": 0.07855276763439178, + "layer_4_v_norm": 0.37769871950149536, + "layer_4_cos_v_neg_g": 0.06272385269403458, + "layer_5_v_norm": 0.367770791053772, + "layer_5_cos_v_neg_g": 0.06592407077550888, + "layer_6_v_norm": 0.3953912556171417, + "layer_6_cos_v_neg_g": 0.05107644945383072, + "layer_7_v_norm": 0.4082126319408417, + "layer_7_cos_v_neg_g": 0.04542878270149231, + "layer_8_v_norm": 0.4232954680919647, + "layer_8_cos_v_neg_g": 0.033912986516952515, + "layer_9_v_norm": 0.43148118257522583, + "layer_9_cos_v_neg_g": 0.03093462437391281, + "layer_10_v_norm": 0.4364188611507416, + "layer_10_cos_v_neg_g": 0.03222111240029335, + "layer_11_v_norm": 0.43396326899528503, + "layer_11_cos_v_neg_g": 0.03048265166580677, + "layer_12_v_norm": 0.4046643078327179, + "layer_12_cos_v_neg_g": 0.053732503205537796, + "layer_1_sharpness": 0.11408098042011261, + "layer_2_sharpness": 0.05198827385902405, + "layer_3_sharpness": 0.012959091924130917, + "layer_4_sharpness": 0.006452509202063084, + "layer_5_sharpness": 0.006211471278220415, + "layer_6_sharpness": 0.0058324383571743965, + "layer_7_sharpness": 0.006602529902011156, + "layer_8_sharpness": 0.0039528775960206985, + "layer_9_sharpness": 0.00298269372433424, + "layer_10_sharpness": 0.0024370995815843344, + "layer_11_sharpness": 0.002516939537599683, + "layer_12_sharpness": 0.009670095518231392 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..964040121e8d84935bc507e272a5a6d54f268459 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.256441116333008, + "total_l1_linf_norm": 20115.0078125, + "total_spectral_norm": 2.256441116333008, + "layer_1_update_fnorm": 0.50361168384552, + "layer_1_max_l1_linf_norm": 0.7020329236984253, + "layer_1_max_spectral_norm": 0.09371642023324966, + "layer_2_update_fnorm": 0.5124355554580688, + "layer_2_max_l1_linf_norm": 0.6259580254554749, + "layer_2_max_spectral_norm": 0.0839458703994751, + "layer_3_update_fnorm": 0.527656078338623, + "layer_3_max_l1_linf_norm": 0.6020212769508362, + "layer_3_max_spectral_norm": 0.08122504502534866, + "layer_4_update_fnorm": 0.5314855575561523, + "layer_4_max_l1_linf_norm": 0.6400128602981567, + "layer_4_max_spectral_norm": 0.08986089378595352, + "layer_5_update_fnorm": 0.5297581553459167, + "layer_5_max_l1_linf_norm": 0.6453620195388794, + "layer_5_max_spectral_norm": 0.0931713730096817, + "layer_6_update_fnorm": 0.532495379447937, + "layer_6_max_l1_linf_norm": 0.5897854566574097, + "layer_6_max_spectral_norm": 0.07927070558071136, + "layer_7_update_fnorm": 0.5262057185173035, + "layer_7_max_l1_linf_norm": 0.6621009707450867, + "layer_7_max_spectral_norm": 0.07397270947694778, + "layer_8_update_fnorm": 0.5349504351615906, + "layer_8_max_l1_linf_norm": 0.5651981830596924, + "layer_8_max_spectral_norm": 0.0612650141119957, + "layer_9_update_fnorm": 0.5368508100509644, + "layer_9_max_l1_linf_norm": 0.5937102437019348, + "layer_9_max_spectral_norm": 0.04943709075450897, + "layer_10_update_fnorm": 0.5369714498519897, + "layer_10_max_l1_linf_norm": 0.6220614910125732, + "layer_10_max_spectral_norm": 0.0701528936624527, + "layer_11_update_fnorm": 0.5366345643997192, + "layer_11_max_l1_linf_norm": 0.5841791033744812, + "layer_11_max_spectral_norm": 0.08307306468486786, + "layer_12_update_fnorm": 0.5279362201690674, + "layer_12_max_l1_linf_norm": 0.6531914472579956, + "layer_12_max_spectral_norm": 0.09020088613033295, + "total_sharpness": 0.003967445343732834, + "ip_v_neg_g": 0.01011667586863041, + "cos_v_neg_g": 0.003741679945960641, + "v_norm": 2.256441116333008, + "g_norm": 1.198249340057373, + "hv_norm": 0.42902129888534546, + "cos_v_hv": 0.020866811275482178, + "hg_norm": 20.344884872436523, + "cos_g_hg": 0.6505393981933594, + "v_parallel_norm": 0.0002893104974646121, + "v_perp_norm": 2.256441116333008, + "layer_1_v_norm": 0.50361168384552, + "layer_1_cos_v_neg_g": 0.005443139933049679, + "layer_2_v_norm": 0.5124355554580688, + "layer_2_cos_v_neg_g": 0.002869053278118372, + "layer_3_v_norm": 0.527656078338623, + "layer_3_cos_v_neg_g": 0.0035858741030097008, + "layer_4_v_norm": 0.5314855575561523, + "layer_4_cos_v_neg_g": 0.004206061363220215, + "layer_5_v_norm": 0.5297581553459167, + "layer_5_cos_v_neg_g": 0.003892833599820733, + "layer_6_v_norm": 0.532495379447937, + "layer_6_cos_v_neg_g": 0.0048077781684696674, + "layer_7_v_norm": 0.5262057185173035, + "layer_7_cos_v_neg_g": 0.004426421597599983, + "layer_8_v_norm": 0.5349504351615906, + "layer_8_cos_v_neg_g": 0.004567173309624195, + "layer_9_v_norm": 0.5368508100509644, + "layer_9_cos_v_neg_g": 0.004229416139423847, + "layer_10_v_norm": 0.5369714498519897, + "layer_10_cos_v_neg_g": 0.005434914957731962, + "layer_11_v_norm": 0.5366345643997192, + "layer_11_cos_v_neg_g": 0.007272150833159685, + "layer_12_v_norm": 0.5279362201690674, + "layer_12_cos_v_neg_g": 0.00868746917694807, + "layer_1_sharpness": 0.003518645418807864, + "layer_2_sharpness": 0.0006824371521361172, + "layer_3_sharpness": 0.0007284111343324184, + "layer_4_sharpness": 0.0004882189678028226, + "layer_5_sharpness": 0.0006995096919126809, + "layer_6_sharpness": 0.0007298151031136513, + "layer_7_sharpness": 0.001124664326198399, + "layer_8_sharpness": 0.000950430054217577, + "layer_9_sharpness": 0.0010261802235618234, + "layer_10_sharpness": 0.0008425448322668672, + "layer_11_sharpness": 0.0008522093412466347, + "layer_12_sharpness": 0.0018651711288839579 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..03d2bb433897130050ccdc43d604ea99cee8071f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.042729616165161, + "total_l1_linf_norm": 17953.8515625, + "total_spectral_norm": 2.042729377746582, + "layer_1_update_fnorm": 0.3716132938861847, + "layer_1_max_l1_linf_norm": 0.4831186532974243, + "layer_1_max_spectral_norm": 0.0722932368516922, + "layer_2_update_fnorm": 0.39591142535209656, + "layer_2_max_l1_linf_norm": 0.5670522451400757, + "layer_2_max_spectral_norm": 0.0758233293890953, + "layer_3_update_fnorm": 0.4323331415653229, + "layer_3_max_l1_linf_norm": 0.5075644850730896, + "layer_3_max_spectral_norm": 0.0857958272099495, + "layer_4_update_fnorm": 0.4607854187488556, + "layer_4_max_l1_linf_norm": 0.5440258979797363, + "layer_4_max_spectral_norm": 0.07797014713287354, + "layer_5_update_fnorm": 0.45600005984306335, + "layer_5_max_l1_linf_norm": 0.5328421592712402, + "layer_5_max_spectral_norm": 0.07223114371299744, + "layer_6_update_fnorm": 0.46130210161209106, + "layer_6_max_l1_linf_norm": 0.50828617811203, + "layer_6_max_spectral_norm": 0.0617964081466198, + "layer_7_update_fnorm": 0.45861974358558655, + "layer_7_max_l1_linf_norm": 0.4921337366104126, + "layer_7_max_spectral_norm": 0.05933582782745361, + "layer_8_update_fnorm": 0.4752819538116455, + "layer_8_max_l1_linf_norm": 0.5099600553512573, + "layer_8_max_spectral_norm": 0.06357308477163315, + "layer_9_update_fnorm": 0.4776802062988281, + "layer_9_max_l1_linf_norm": 0.5002601742744446, + "layer_9_max_spectral_norm": 0.06234614551067352, + "layer_10_update_fnorm": 0.48240217566490173, + "layer_10_max_l1_linf_norm": 0.5114092826843262, + "layer_10_max_spectral_norm": 0.06460624188184738, + "layer_11_update_fnorm": 0.47838982939720154, + "layer_11_max_l1_linf_norm": 0.5019173622131348, + "layer_11_max_spectral_norm": 0.06413723528385162, + "layer_12_update_fnorm": 0.4546346962451935, + "layer_12_max_l1_linf_norm": 0.48733797669410706, + "layer_12_max_spectral_norm": 0.08971022814512253, + "total_sharpness": 0.011777958832681179, + "ip_v_neg_g": 0.025364341214299202, + "cos_v_neg_g": 0.01677859015762806, + "v_norm": 2.042729616165161, + "g_norm": 0.7400434613227844, + "hv_norm": 0.34805384278297424, + "cos_v_hv": 0.06912490725517273, + "hg_norm": 2.3366074562072754, + "cos_g_hg": 0.3479495644569397, + "v_parallel_norm": 0.001102366833947599, + "v_perp_norm": 2.042729377746582, + "layer_1_v_norm": 0.3716132938861847, + "layer_1_cos_v_neg_g": 0.03383911773562431, + "layer_2_v_norm": 0.39591142535209656, + "layer_2_cos_v_neg_g": 0.04122646898031235, + "layer_3_v_norm": 0.43233317136764526, + "layer_3_cos_v_neg_g": 0.033531635999679565, + "layer_4_v_norm": 0.4607854187488556, + "layer_4_cos_v_neg_g": 0.026576582342386246, + "layer_5_v_norm": 0.45600005984306335, + "layer_5_cos_v_neg_g": 0.028886673972010612, + "layer_6_v_norm": 0.46130210161209106, + "layer_6_cos_v_neg_g": 0.023967942222952843, + "layer_7_v_norm": 0.45861974358558655, + "layer_7_cos_v_neg_g": 0.0231489110738039, + "layer_8_v_norm": 0.4752819538116455, + "layer_8_cos_v_neg_g": 0.019607555121183395, + "layer_9_v_norm": 0.4776802062988281, + "layer_9_cos_v_neg_g": 0.021958857774734497, + "layer_10_v_norm": 0.48240217566490173, + "layer_10_cos_v_neg_g": 0.020973071455955505, + "layer_11_v_norm": 0.47838979959487915, + "layer_11_cos_v_neg_g": 0.01867268793284893, + "layer_12_v_norm": 0.4546346962451935, + "layer_12_cos_v_neg_g": 0.02203242853283882, + "layer_1_sharpness": 0.020433463156223297, + "layer_2_sharpness": 0.005308460909873247, + "layer_3_sharpness": 0.004492986015975475, + "layer_4_sharpness": 0.002544393064454198, + "layer_5_sharpness": 0.002304966328665614, + "layer_6_sharpness": 0.002227657474577427, + "layer_7_sharpness": 0.0029677164275199175, + "layer_8_sharpness": 0.0021335931960493326, + "layer_9_sharpness": 0.002142618875950575, + "layer_10_sharpness": 0.0017267613438889384, + "layer_11_sharpness": 0.0017641762970015407, + "layer_12_sharpness": 0.0038391316775232553 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..b5188d20f6e1b46ac6dc7bb18a28d87390adecca --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.1097168922424316, + "total_l1_linf_norm": 18622.109375, + "total_spectral_norm": 2.1097168922424316, + "layer_1_update_fnorm": 0.41498151421546936, + "layer_1_max_l1_linf_norm": 0.5308437943458557, + "layer_1_max_spectral_norm": 0.0788259506225586, + "layer_2_update_fnorm": 0.4395008981227875, + "layer_2_max_l1_linf_norm": 0.49377527832984924, + "layer_2_max_spectral_norm": 0.08052821457386017, + "layer_3_update_fnorm": 0.46398693323135376, + "layer_3_max_l1_linf_norm": 0.512062668800354, + "layer_3_max_spectral_norm": 0.08474162966012955, + "layer_4_update_fnorm": 0.4858446419239044, + "layer_4_max_l1_linf_norm": 0.5453816652297974, + "layer_4_max_spectral_norm": 0.07430075854063034, + "layer_5_update_fnorm": 0.4795481860637665, + "layer_5_max_l1_linf_norm": 0.5345184803009033, + "layer_5_max_spectral_norm": 0.06828960031270981, + "layer_6_update_fnorm": 0.48519811034202576, + "layer_6_max_l1_linf_norm": 0.5256967544555664, + "layer_6_max_spectral_norm": 0.056303173303604126, + "layer_7_update_fnorm": 0.48077890276908875, + "layer_7_max_l1_linf_norm": 0.5021024942398071, + "layer_7_max_spectral_norm": 0.054006729274988174, + "layer_8_update_fnorm": 0.49211958050727844, + "layer_8_max_l1_linf_norm": 0.5073568224906921, + "layer_8_max_spectral_norm": 0.058894939720630646, + "layer_9_update_fnorm": 0.49039578437805176, + "layer_9_max_l1_linf_norm": 0.5084162354469299, + "layer_9_max_spectral_norm": 0.05785704776644707, + "layer_10_update_fnorm": 0.4950588047504425, + "layer_10_max_l1_linf_norm": 0.5122032165527344, + "layer_10_max_spectral_norm": 0.05962524563074112, + "layer_11_update_fnorm": 0.49370473623275757, + "layer_11_max_l1_linf_norm": 0.528322160243988, + "layer_11_max_spectral_norm": 0.06043616682291031, + "layer_12_update_fnorm": 0.4721125066280365, + "layer_12_max_l1_linf_norm": 0.5117517709732056, + "layer_12_max_spectral_norm": 0.08439015597105026, + "total_sharpness": 0.00802728720009327, + "ip_v_neg_g": 0.018552716821432114, + "cos_v_neg_g": 0.011053129099309444, + "v_norm": 2.1097168922424316, + "g_norm": 0.7956060767173767, + "hv_norm": 0.3387686014175415, + "cos_v_hv": 0.04999077692627907, + "hg_norm": 2.129153251647949, + "cos_g_hg": 0.48187363147735596, + "v_parallel_norm": 0.00092377356486395, + "v_perp_norm": 2.1097166538238525, + "layer_1_v_norm": 0.41498151421546936, + "layer_1_cos_v_neg_g": 0.020219992846250534, + "layer_2_v_norm": 0.4395008981227875, + "layer_2_cos_v_neg_g": 0.02296103723347187, + "layer_3_v_norm": 0.46398690342903137, + "layer_3_cos_v_neg_g": 0.024254372343420982, + "layer_4_v_norm": 0.4858446419239044, + "layer_4_cos_v_neg_g": 0.019603684544563293, + "layer_5_v_norm": 0.4795481860637665, + "layer_5_cos_v_neg_g": 0.022361284121870995, + "layer_6_v_norm": 0.48519814014434814, + "layer_6_cos_v_neg_g": 0.014083437621593475, + "layer_7_v_norm": 0.48077890276908875, + "layer_7_cos_v_neg_g": 0.013658121228218079, + "layer_8_v_norm": 0.49211958050727844, + "layer_8_cos_v_neg_g": 0.013233964331448078, + "layer_9_v_norm": 0.49039578437805176, + "layer_9_cos_v_neg_g": 0.012540462426841259, + "layer_10_v_norm": 0.4950588047504425, + "layer_10_cos_v_neg_g": 0.013262507505714893, + "layer_11_v_norm": 0.49370473623275757, + "layer_11_cos_v_neg_g": 0.012244371697306633, + "layer_12_v_norm": 0.4721125066280365, + "layer_12_cos_v_neg_g": 0.015213515609502792, + "layer_1_sharpness": 0.012652159668505192, + "layer_2_sharpness": 0.0017196739790961146, + "layer_3_sharpness": 0.002404899336397648, + "layer_4_sharpness": 0.0015574101125821471, + "layer_5_sharpness": 0.0022629816085100174, + "layer_6_sharpness": 0.0015063456958159804, + "layer_7_sharpness": 0.0022111281286925077, + "layer_8_sharpness": 0.0017709886888042092, + "layer_9_sharpness": 0.0015085184713825583, + "layer_10_sharpness": 0.0013284756569191813, + "layer_11_sharpness": 0.0013041599886491895, + "layer_12_sharpness": 0.003198056947439909 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..4035be6d3b098fe28acec5f5dde4109525ec64f0 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.144362211227417, + "total_l1_linf_norm": 18991.34765625, + "total_spectral_norm": 2.144362688064575, + "layer_1_update_fnorm": 0.43293729424476624, + "layer_1_max_l1_linf_norm": 0.6011788845062256, + "layer_1_max_spectral_norm": 0.08097925782203674, + "layer_2_update_fnorm": 0.46479010581970215, + "layer_2_max_l1_linf_norm": 0.5683387517929077, + "layer_2_max_spectral_norm": 0.08136863261461258, + "layer_3_update_fnorm": 0.47894755005836487, + "layer_3_max_l1_linf_norm": 0.5760027766227722, + "layer_3_max_spectral_norm": 0.08217774331569672, + "layer_4_update_fnorm": 0.49741876125335693, + "layer_4_max_l1_linf_norm": 0.5749567151069641, + "layer_4_max_spectral_norm": 0.07055924087762833, + "layer_5_update_fnorm": 0.49538326263427734, + "layer_5_max_l1_linf_norm": 0.5646100640296936, + "layer_5_max_spectral_norm": 0.06912297010421753, + "layer_6_update_fnorm": 0.5017687082290649, + "layer_6_max_l1_linf_norm": 0.5317459106445312, + "layer_6_max_spectral_norm": 0.05967474728822708, + "layer_7_update_fnorm": 0.4970124065876007, + "layer_7_max_l1_linf_norm": 0.5282890796661377, + "layer_7_max_spectral_norm": 0.0536416620016098, + "layer_8_update_fnorm": 0.5053529739379883, + "layer_8_max_l1_linf_norm": 0.5200639963150024, + "layer_8_max_spectral_norm": 0.06030752882361412, + "layer_9_update_fnorm": 0.5053465962409973, + "layer_9_max_l1_linf_norm": 0.5265570878982544, + "layer_9_max_spectral_norm": 0.059515636414289474, + "layer_10_update_fnorm": 0.505069375038147, + "layer_10_max_l1_linf_norm": 0.5568762421607971, + "layer_10_max_spectral_norm": 0.06156053766608238, + "layer_11_update_fnorm": 0.5012977123260498, + "layer_11_max_l1_linf_norm": 0.5762889385223389, + "layer_11_max_spectral_norm": 0.06377136707305908, + "layer_12_update_fnorm": 0.48625391721725464, + "layer_12_max_l1_linf_norm": 0.52992844581604, + "layer_12_max_spectral_norm": 0.0854828953742981, + "total_sharpness": 0.01020487304776907, + "ip_v_neg_g": 0.02629251964390278, + "cos_v_neg_g": 0.016234140843153, + "v_norm": 2.144362211227417, + "g_norm": 0.7552743554115295, + "hv_norm": 0.3859029710292816, + "cos_v_hv": 0.056705817580223083, + "hg_norm": 2.1178529262542725, + "cos_g_hg": 0.4701683521270752, + "v_parallel_norm": 0.001074407366104424, + "v_perp_norm": 2.144361972808838, + "layer_1_v_norm": 0.43293729424476624, + "layer_1_cos_v_neg_g": 0.0255013145506382, + "layer_2_v_norm": 0.46479010581970215, + "layer_2_cos_v_neg_g": 0.022203873842954636, + "layer_3_v_norm": 0.47894757986068726, + "layer_3_cos_v_neg_g": 0.01789899915456772, + "layer_4_v_norm": 0.49741876125335693, + "layer_4_cos_v_neg_g": 0.01622958853840828, + "layer_5_v_norm": 0.49538326263427734, + "layer_5_cos_v_neg_g": 0.02470511384308338, + "layer_6_v_norm": 0.5017687678337097, + "layer_6_cos_v_neg_g": 0.01891271397471428, + "layer_7_v_norm": 0.4970124065876007, + "layer_7_cos_v_neg_g": 0.023099184036254883, + "layer_8_v_norm": 0.5053529739379883, + "layer_8_cos_v_neg_g": 0.02577674575150013, + "layer_9_v_norm": 0.5053465962409973, + "layer_9_cos_v_neg_g": 0.024510061368346214, + "layer_10_v_norm": 0.505069375038147, + "layer_10_cos_v_neg_g": 0.025845855474472046, + "layer_11_v_norm": 0.5012977123260498, + "layer_11_cos_v_neg_g": 0.025462433695793152, + "layer_12_v_norm": 0.48625391721725464, + "layer_12_cos_v_neg_g": 0.026765523478388786, + "layer_1_sharpness": 0.010570787824690342, + "layer_2_sharpness": 0.0009962953627109528, + "layer_3_sharpness": 0.001672073733061552, + "layer_4_sharpness": 0.0013280212879180908, + "layer_5_sharpness": 0.002393269445747137, + "layer_6_sharpness": 0.0015649086562916636, + "layer_7_sharpness": 0.002700114855542779, + "layer_8_sharpness": 0.002758991438895464, + "layer_9_sharpness": 0.0024563493207097054, + "layer_10_sharpness": 0.0019616910722106695, + "layer_11_sharpness": 0.0019146120175719261, + "layer_12_sharpness": 0.004751145839691162 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..65e0afb3f538283646795af72c59bee757a0c9cd --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.2313737869262695, + "total_l1_linf_norm": 19866.609375, + "total_spectral_norm": 2.2313740253448486, + "layer_1_update_fnorm": 0.490829199552536, + "layer_1_max_l1_linf_norm": 0.5976056456565857, + "layer_1_max_spectral_norm": 0.08588702976703644, + "layer_2_update_fnorm": 0.4980906546115875, + "layer_2_max_l1_linf_norm": 0.5690314173698425, + "layer_2_max_spectral_norm": 0.08484316617250443, + "layer_3_update_fnorm": 0.5124061107635498, + "layer_3_max_l1_linf_norm": 0.5846426486968994, + "layer_3_max_spectral_norm": 0.08414109796285629, + "layer_4_update_fnorm": 0.5234634876251221, + "layer_4_max_l1_linf_norm": 0.5607491135597229, + "layer_4_max_spectral_norm": 0.06865590065717697, + "layer_5_update_fnorm": 0.5205563902854919, + "layer_5_max_l1_linf_norm": 0.5842353105545044, + "layer_5_max_spectral_norm": 0.06771387904882431, + "layer_6_update_fnorm": 0.5273948907852173, + "layer_6_max_l1_linf_norm": 0.5476982593536377, + "layer_6_max_spectral_norm": 0.05805610120296478, + "layer_7_update_fnorm": 0.5222862958908081, + "layer_7_max_l1_linf_norm": 0.5504276752471924, + "layer_7_max_spectral_norm": 0.0534692257642746, + "layer_8_update_fnorm": 0.5300546288490295, + "layer_8_max_l1_linf_norm": 0.5616282224655151, + "layer_8_max_spectral_norm": 0.05725334957242012, + "layer_9_update_fnorm": 0.5330920815467834, + "layer_9_max_l1_linf_norm": 0.5722399950027466, + "layer_9_max_spectral_norm": 0.06279747933149338, + "layer_10_update_fnorm": 0.5369389057159424, + "layer_10_max_l1_linf_norm": 0.5675411820411682, + "layer_10_max_spectral_norm": 0.06510481238365173, + "layer_11_update_fnorm": 0.5340197086334229, + "layer_11_max_l1_linf_norm": 0.5695350170135498, + "layer_11_max_spectral_norm": 0.07077264040708542, + "layer_12_update_fnorm": 0.5210990905761719, + "layer_12_max_l1_linf_norm": 0.5988690257072449, + "layer_12_max_spectral_norm": 0.09197523444890976, + "total_sharpness": 0.0072046616114676, + "ip_v_neg_g": 0.022536665201187134, + "cos_v_neg_g": 0.013974564149975777, + "v_norm": 2.2313737869262695, + "g_norm": 0.7227349281311035, + "hv_norm": 0.33381929993629456, + "cos_v_hv": 0.0481586679816246, + "hg_norm": 1.398723840713501, + "cos_g_hg": 0.4678879380226135, + "v_parallel_norm": 0.0010780912125483155, + "v_perp_norm": 2.2313735485076904, + "layer_1_v_norm": 0.490829199552536, + "layer_1_cos_v_neg_g": 0.028381213545799255, + "layer_2_v_norm": 0.4980906546115875, + "layer_2_cos_v_neg_g": 0.027371743693947792, + "layer_3_v_norm": 0.5124061107635498, + "layer_3_cos_v_neg_g": 0.01887165755033493, + "layer_4_v_norm": 0.5234634876251221, + "layer_4_cos_v_neg_g": 0.015449076890945435, + "layer_5_v_norm": 0.5205563902854919, + "layer_5_cos_v_neg_g": 0.017556477338075638, + "layer_6_v_norm": 0.5273948907852173, + "layer_6_cos_v_neg_g": 0.014227515086531639, + "layer_7_v_norm": 0.5222862958908081, + "layer_7_cos_v_neg_g": 0.016143400222063065, + "layer_8_v_norm": 0.5300546288490295, + "layer_8_cos_v_neg_g": 0.01641249470412731, + "layer_9_v_norm": 0.5330920815467834, + "layer_9_cos_v_neg_g": 0.014797940850257874, + "layer_10_v_norm": 0.5369389057159424, + "layer_10_cos_v_neg_g": 0.017534034326672554, + "layer_11_v_norm": 0.5340196490287781, + "layer_11_cos_v_neg_g": 0.01785861887037754, + "layer_12_v_norm": 0.5210990905761719, + "layer_12_cos_v_neg_g": 0.02518605627119541, + "layer_1_sharpness": 0.010206840001046658, + "layer_2_sharpness": 0.001822596532292664, + "layer_3_sharpness": 0.0014182829763740301, + "layer_4_sharpness": 0.0011888343142345548, + "layer_5_sharpness": 0.0011511493939906359, + "layer_6_sharpness": 0.0013677041279152036, + "layer_7_sharpness": 0.0017227762145921588, + "layer_8_sharpness": 0.0015570102259516716, + "layer_9_sharpness": 0.0014080952387303114, + "layer_10_sharpness": 0.001220343867316842, + "layer_11_sharpness": 0.001309670158661902, + "layer_12_sharpness": 0.004083497915416956 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..777d71c0b9bb35ebec27c3bd716d2d4970535dd3 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.1859076023101807, + "total_l1_linf_norm": 19423.958984375, + "total_spectral_norm": 2.1859073638916016, + "layer_1_update_fnorm": 0.46507710218429565, + "layer_1_max_l1_linf_norm": 0.577402651309967, + "layer_1_max_spectral_norm": 0.08304259926080704, + "layer_2_update_fnorm": 0.4854670464992523, + "layer_2_max_l1_linf_norm": 0.5539255738258362, + "layer_2_max_spectral_norm": 0.07950052618980408, + "layer_3_update_fnorm": 0.5009487867355347, + "layer_3_max_l1_linf_norm": 0.5687323808670044, + "layer_3_max_spectral_norm": 0.07785377651453018, + "layer_4_update_fnorm": 0.5128715634346008, + "layer_4_max_l1_linf_norm": 0.5768412947654724, + "layer_4_max_spectral_norm": 0.06208255514502525, + "layer_5_update_fnorm": 0.5063354969024658, + "layer_5_max_l1_linf_norm": 0.5806112289428711, + "layer_5_max_spectral_norm": 0.06734760105609894, + "layer_6_update_fnorm": 0.5160243511199951, + "layer_6_max_l1_linf_norm": 0.5390170216560364, + "layer_6_max_spectral_norm": 0.05958103388547897, + "layer_7_update_fnorm": 0.5105457305908203, + "layer_7_max_l1_linf_norm": 0.5335986614227295, + "layer_7_max_spectral_norm": 0.04774110019207001, + "layer_8_update_fnorm": 0.5154950022697449, + "layer_8_max_l1_linf_norm": 0.5300865173339844, + "layer_8_max_spectral_norm": 0.05103229731321335, + "layer_9_update_fnorm": 0.5136969685554504, + "layer_9_max_l1_linf_norm": 0.5357406139373779, + "layer_9_max_spectral_norm": 0.05102882534265518, + "layer_10_update_fnorm": 0.5107022523880005, + "layer_10_max_l1_linf_norm": 0.5248463153839111, + "layer_10_max_spectral_norm": 0.053571488708257675, + "layer_11_update_fnorm": 0.5107354521751404, + "layer_11_max_l1_linf_norm": 0.5563504695892334, + "layer_11_max_spectral_norm": 0.06367503851652145, + "layer_12_update_fnorm": 0.495372474193573, + "layer_12_max_l1_linf_norm": 0.5476990938186646, + "layer_12_max_spectral_norm": 0.07835310697555542, + "total_sharpness": 0.0071809967048466206, + "ip_v_neg_g": 0.015822719782590866, + "cos_v_neg_g": 0.008901444263756275, + "v_norm": 2.1859076023101807, + "g_norm": 0.8131840825080872, + "hv_norm": 0.41620340943336487, + "cos_v_hv": 0.03771471977233887, + "hg_norm": 2.9165332317352295, + "cos_g_hg": 0.5583151578903198, + "v_parallel_norm": 0.0006467204075306654, + "v_perp_norm": 2.1859076023101807, + "layer_1_v_norm": 0.46507710218429565, + "layer_1_cos_v_neg_g": 0.014886626973748207, + "layer_2_v_norm": 0.4854670464992523, + "layer_2_cos_v_neg_g": 0.022556915879249573, + "layer_3_v_norm": 0.5009487867355347, + "layer_3_cos_v_neg_g": 0.021096039563417435, + "layer_4_v_norm": 0.5128715634346008, + "layer_4_cos_v_neg_g": 0.010941126383841038, + "layer_5_v_norm": 0.5063354969024658, + "layer_5_cos_v_neg_g": 0.007916743867099285, + "layer_6_v_norm": 0.5160243511199951, + "layer_6_cos_v_neg_g": 0.00751790264621377, + "layer_7_v_norm": 0.5105457305908203, + "layer_7_cos_v_neg_g": 0.010258015245199203, + "layer_8_v_norm": 0.5154950022697449, + "layer_8_cos_v_neg_g": 0.00925404392182827, + "layer_9_v_norm": 0.5136969685554504, + "layer_9_cos_v_neg_g": 0.010217811912298203, + "layer_10_v_norm": 0.5107022523880005, + "layer_10_cos_v_neg_g": 0.011939994059503078, + "layer_11_v_norm": 0.5107354521751404, + "layer_11_cos_v_neg_g": 0.012728369794785976, + "layer_12_v_norm": 0.495372474193573, + "layer_12_cos_v_neg_g": 0.016126632690429688, + "layer_1_sharpness": 0.011107653379440308, + "layer_2_sharpness": 0.004035003017634153, + "layer_3_sharpness": 0.002626478672027588, + "layer_4_sharpness": 0.0011720851762220263, + "layer_5_sharpness": 0.0012098479783162475, + "layer_6_sharpness": 0.0012202857760712504, + "layer_7_sharpness": 0.001800894271582365, + "layer_8_sharpness": 0.001729277428239584, + "layer_9_sharpness": 0.0013288852060213685, + "layer_10_sharpness": 0.0009747957228682935, + "layer_11_sharpness": 0.0012277450878173113, + "layer_12_sharpness": 0.002622866304591298 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..8c4d922ca892ab3926d5d84b127986f9a8b333dd --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.194784164428711, + "total_l1_linf_norm": 19512.609375, + "total_spectral_norm": 2.194783926010132, + "layer_1_update_fnorm": 0.46964165568351746, + "layer_1_max_l1_linf_norm": 0.6016402244567871, + "layer_1_max_spectral_norm": 0.08238338679075241, + "layer_2_update_fnorm": 0.4935939908027649, + "layer_2_max_l1_linf_norm": 0.5342075824737549, + "layer_2_max_spectral_norm": 0.08052550256252289, + "layer_3_update_fnorm": 0.4981958568096161, + "layer_3_max_l1_linf_norm": 0.5499794483184814, + "layer_3_max_spectral_norm": 0.07832643389701843, + "layer_4_update_fnorm": 0.5081595778465271, + "layer_4_max_l1_linf_norm": 0.5498306155204773, + "layer_4_max_spectral_norm": 0.0631972998380661, + "layer_5_update_fnorm": 0.5024797320365906, + "layer_5_max_l1_linf_norm": 0.5634773969650269, + "layer_5_max_spectral_norm": 0.06692103296518326, + "layer_6_update_fnorm": 0.5131524205207825, + "layer_6_max_l1_linf_norm": 0.5649717450141907, + "layer_6_max_spectral_norm": 0.06037157028913498, + "layer_7_update_fnorm": 0.5141947865486145, + "layer_7_max_l1_linf_norm": 0.55930495262146, + "layer_7_max_spectral_norm": 0.05081889033317566, + "layer_8_update_fnorm": 0.5216469168663025, + "layer_8_max_l1_linf_norm": 0.5336675643920898, + "layer_8_max_spectral_norm": 0.05149427801370621, + "layer_9_update_fnorm": 0.5199463367462158, + "layer_9_max_l1_linf_norm": 0.5325222015380859, + "layer_9_max_spectral_norm": 0.05233196169137955, + "layer_10_update_fnorm": 0.5160955190658569, + "layer_10_max_l1_linf_norm": 0.5917448997497559, + "layer_10_max_spectral_norm": 0.05913407728075981, + "layer_11_update_fnorm": 0.5171604156494141, + "layer_11_max_l1_linf_norm": 0.610249936580658, + "layer_11_max_spectral_norm": 0.06661330163478851, + "layer_12_update_fnorm": 0.5000795125961304, + "layer_12_max_l1_linf_norm": 0.5898138880729675, + "layer_12_max_spectral_norm": 0.07877683639526367, + "total_sharpness": 0.006118997931480408, + "ip_v_neg_g": 0.017805270850658417, + "cos_v_neg_g": 0.010862731374800205, + "v_norm": 2.194784164428711, + "g_norm": 0.7468230724334717, + "hv_norm": 0.42411693930625916, + "cos_v_hv": 0.031665511429309845, + "hg_norm": 2.5921385288238525, + "cos_g_hg": 0.4613349437713623, + "v_parallel_norm": 0.0008035546052269638, + "v_perp_norm": 2.194784164428711, + "layer_1_v_norm": 0.46964165568351746, + "layer_1_cos_v_neg_g": 0.031123729422688484, + "layer_2_v_norm": 0.4935939908027649, + "layer_2_cos_v_neg_g": 0.02362220175564289, + "layer_3_v_norm": 0.4981958270072937, + "layer_3_cos_v_neg_g": 0.016570676118135452, + "layer_4_v_norm": 0.5081595778465271, + "layer_4_cos_v_neg_g": 0.011101217940449715, + "layer_5_v_norm": 0.5024797320365906, + "layer_5_cos_v_neg_g": 0.010434411466121674, + "layer_6_v_norm": 0.5131524205207825, + "layer_6_cos_v_neg_g": 0.010955868288874626, + "layer_7_v_norm": 0.5141947865486145, + "layer_7_cos_v_neg_g": 0.012954478152096272, + "layer_8_v_norm": 0.5216468572616577, + "layer_8_cos_v_neg_g": 0.011428119614720345, + "layer_9_v_norm": 0.5199463367462158, + "layer_9_cos_v_neg_g": 0.010320063680410385, + "layer_10_v_norm": 0.5160955190658569, + "layer_10_cos_v_neg_g": 0.010284669697284698, + "layer_11_v_norm": 0.5171604156494141, + "layer_11_cos_v_neg_g": 0.010964795015752316, + "layer_12_v_norm": 0.5000795125961304, + "layer_12_cos_v_neg_g": 0.011717678047716618, + "layer_1_sharpness": 0.013054030016064644, + "layer_2_sharpness": 0.003461963962763548, + "layer_3_sharpness": 0.001671642530709505, + "layer_4_sharpness": 0.0007121730595827103, + "layer_5_sharpness": 0.0008966330788098276, + "layer_6_sharpness": 0.0011824064422398806, + "layer_7_sharpness": 0.0016234293580055237, + "layer_8_sharpness": 0.0015059600118547678, + "layer_9_sharpness": 0.0012671988224610686, + "layer_10_sharpness": 0.0009629781707189977, + "layer_11_sharpness": 0.000924356747418642, + "layer_12_sharpness": 0.0023936727084219456 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..aeddd1de5add947897f8ba23efb6e33891c35137 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.238088607788086, + "total_l1_linf_norm": 19927.390625, + "total_spectral_norm": 2.238088846206665, + "layer_1_update_fnorm": 0.5037688612937927, + "layer_1_max_l1_linf_norm": 0.6569094657897949, + "layer_1_max_spectral_norm": 0.1029672920703888, + "layer_2_update_fnorm": 0.5049216747283936, + "layer_2_max_l1_linf_norm": 0.5488865375518799, + "layer_2_max_spectral_norm": 0.08394569903612137, + "layer_3_update_fnorm": 0.5163975358009338, + "layer_3_max_l1_linf_norm": 0.5965222120285034, + "layer_3_max_spectral_norm": 0.08036834746599197, + "layer_4_update_fnorm": 0.5224692225456238, + "layer_4_max_l1_linf_norm": 0.620502233505249, + "layer_4_max_spectral_norm": 0.06697116047143936, + "layer_5_update_fnorm": 0.5191789865493774, + "layer_5_max_l1_linf_norm": 0.5728378295898438, + "layer_5_max_spectral_norm": 0.07531990855932236, + "layer_6_update_fnorm": 0.5253576636314392, + "layer_6_max_l1_linf_norm": 0.5884624719619751, + "layer_6_max_spectral_norm": 0.06926024705171585, + "layer_7_update_fnorm": 0.5218843817710876, + "layer_7_max_l1_linf_norm": 0.5482345223426819, + "layer_7_max_spectral_norm": 0.05758785083889961, + "layer_8_update_fnorm": 0.5287210941314697, + "layer_8_max_l1_linf_norm": 0.5572471618652344, + "layer_8_max_spectral_norm": 0.05044957622885704, + "layer_9_update_fnorm": 0.5283271074295044, + "layer_9_max_l1_linf_norm": 0.5726759433746338, + "layer_9_max_spectral_norm": 0.057717062532901764, + "layer_10_update_fnorm": 0.5259105563163757, + "layer_10_max_l1_linf_norm": 0.5635204911231995, + "layer_10_max_spectral_norm": 0.0660165399312973, + "layer_11_update_fnorm": 0.5268053412437439, + "layer_11_max_l1_linf_norm": 0.5960335731506348, + "layer_11_max_spectral_norm": 0.07850530743598938, + "layer_12_update_fnorm": 0.5168518424034119, + "layer_12_max_l1_linf_norm": 0.6027899980545044, + "layer_12_max_spectral_norm": 0.0735345259308815, + "total_sharpness": 0.00861254334449768, + "ip_v_neg_g": 0.024753712117671967, + "cos_v_neg_g": 0.013803220354020596, + "v_norm": 2.238088607788086, + "g_norm": 0.8012769222259521, + "hv_norm": 0.602280855178833, + "cos_v_hv": 0.03200439736247063, + "hg_norm": 9.95428466796875, + "cos_g_hg": 0.3529309034347534, + "v_parallel_norm": 0.0009955550776794553, + "v_perp_norm": 2.238088369369507, + "layer_1_v_norm": 0.5037688612937927, + "layer_1_cos_v_neg_g": 0.034486059099435806, + "layer_2_v_norm": 0.5049216747283936, + "layer_2_cos_v_neg_g": 0.02119341306388378, + "layer_3_v_norm": 0.5163975358009338, + "layer_3_cos_v_neg_g": 0.012072053737938404, + "layer_4_v_norm": 0.5224692225456238, + "layer_4_cos_v_neg_g": 0.00998219009488821, + "layer_5_v_norm": 0.5191789865493774, + "layer_5_cos_v_neg_g": 0.013918937183916569, + "layer_6_v_norm": 0.5253576636314392, + "layer_6_cos_v_neg_g": 0.014983863569796085, + "layer_7_v_norm": 0.5218843817710876, + "layer_7_cos_v_neg_g": 0.016398146748542786, + "layer_8_v_norm": 0.5287210941314697, + "layer_8_cos_v_neg_g": 0.01586228422820568, + "layer_9_v_norm": 0.5283271074295044, + "layer_9_cos_v_neg_g": 0.01783531904220581, + "layer_10_v_norm": 0.5259105563163757, + "layer_10_cos_v_neg_g": 0.019295871257781982, + "layer_11_v_norm": 0.5268052816390991, + "layer_11_cos_v_neg_g": 0.02041281759738922, + "layer_12_v_norm": 0.5168518424034119, + "layer_12_cos_v_neg_g": 0.02121965028345585, + "layer_1_sharpness": 0.013643158599734306, + "layer_2_sharpness": 0.002506362274289131, + "layer_3_sharpness": 0.0014189862413331866, + "layer_4_sharpness": 0.0009443638846278191, + "layer_5_sharpness": 0.0010050847195088863, + "layer_6_sharpness": 0.001293708453886211, + "layer_7_sharpness": 0.0020550217013806105, + "layer_8_sharpness": 0.001767258276231587, + "layer_9_sharpness": 0.001954329200088978, + "layer_10_sharpness": 0.0014316040324047208, + "layer_11_sharpness": 0.001454862765967846, + "layer_12_sharpness": 0.002791089005768299 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..2b9685cbaeac640a650f5f644ab1f4944ab16c17 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3424036502838135, + "total_l1_linf_norm": 11586.359375, + "total_spectral_norm": 1.3424038887023926, + "layer_1_update_fnorm": 0.23374392092227936, + "layer_1_max_l1_linf_norm": 0.3850448727607727, + "layer_1_max_spectral_norm": 0.05567670986056328, + "layer_2_update_fnorm": 0.219018816947937, + "layer_2_max_l1_linf_norm": 0.3081614077091217, + "layer_2_max_spectral_norm": 0.051223646849393845, + "layer_3_update_fnorm": 0.22774377465248108, + "layer_3_max_l1_linf_norm": 0.3671313524246216, + "layer_3_max_spectral_norm": 0.05227451026439667, + "layer_4_update_fnorm": 0.2518695890903473, + "layer_4_max_l1_linf_norm": 0.37363895773887634, + "layer_4_max_spectral_norm": 0.06281903386116028, + "layer_5_update_fnorm": 0.2629379630088806, + "layer_5_max_l1_linf_norm": 0.4039331078529358, + "layer_5_max_spectral_norm": 0.061731405556201935, + "layer_6_update_fnorm": 0.2793961763381958, + "layer_6_max_l1_linf_norm": 0.41108402609825134, + "layer_6_max_spectral_norm": 0.060462452471256256, + "layer_7_update_fnorm": 0.2872203290462494, + "layer_7_max_l1_linf_norm": 0.37826189398765564, + "layer_7_max_spectral_norm": 0.059850551187992096, + "layer_8_update_fnorm": 0.2942703664302826, + "layer_8_max_l1_linf_norm": 0.3747805953025818, + "layer_8_max_spectral_norm": 0.06317312270402908, + "layer_9_update_fnorm": 0.29403969645500183, + "layer_9_max_l1_linf_norm": 0.3795725107192993, + "layer_9_max_spectral_norm": 0.06520339846611023, + "layer_10_update_fnorm": 0.2988283932209015, + "layer_10_max_l1_linf_norm": 0.376981645822525, + "layer_10_max_spectral_norm": 0.06887194514274597, + "layer_11_update_fnorm": 0.287278413772583, + "layer_11_max_l1_linf_norm": 0.37943729758262634, + "layer_11_max_spectral_norm": 0.07121593505144119, + "layer_12_update_fnorm": 0.2653281092643738, + "layer_12_max_l1_linf_norm": 0.3824160099029541, + "layer_12_max_spectral_norm": 0.0969904363155365, + "total_sharpness": 0.0713232234120369, + "ip_v_neg_g": 0.10161006450653076, + "cos_v_neg_g": 0.06567557156085968, + "v_norm": 1.3424036502838135, + "g_norm": 1.152523398399353, + "hv_norm": 0.9778636693954468, + "cos_v_hv": 0.09791197627782822, + "hg_norm": 7.400205612182617, + "cos_g_hg": 0.6080043315887451, + "v_parallel_norm": 0.003945634234696627, + "v_perp_norm": 1.3423978090286255, + "layer_1_v_norm": 0.23374392092227936, + "layer_1_cos_v_neg_g": 0.1322774589061737, + "layer_2_v_norm": 0.219018816947937, + "layer_2_cos_v_neg_g": 0.18640105426311493, + "layer_3_v_norm": 0.22774377465248108, + "layer_3_cos_v_neg_g": 0.17245014011859894, + "layer_4_v_norm": 0.2518695890903473, + "layer_4_cos_v_neg_g": 0.16082461178302765, + "layer_5_v_norm": 0.2629379630088806, + "layer_5_cos_v_neg_g": 0.1655886173248291, + "layer_6_v_norm": 0.2793961763381958, + "layer_6_cos_v_neg_g": 0.13395951688289642, + "layer_7_v_norm": 0.2872203290462494, + "layer_7_cos_v_neg_g": 0.13233543932437897, + "layer_8_v_norm": 0.2942703664302826, + "layer_8_cos_v_neg_g": 0.11525501310825348, + "layer_9_v_norm": 0.29403969645500183, + "layer_9_cos_v_neg_g": 0.1104290708899498, + "layer_10_v_norm": 0.2988283932209015, + "layer_10_cos_v_neg_g": 0.09925577044487, + "layer_11_v_norm": 0.2872783839702606, + "layer_11_cos_v_neg_g": 0.1163019984960556, + "layer_12_v_norm": 0.2653281092643738, + "layer_12_cos_v_neg_g": 0.15859155356884003, + "layer_1_sharpness": 0.20012007653713226, + "layer_2_sharpness": 0.0557454377412796, + "layer_3_sharpness": 0.029735565185546875, + "layer_4_sharpness": 0.020663978531956673, + "layer_5_sharpness": 0.017011182382702827, + "layer_6_sharpness": 0.010149484500288963, + "layer_7_sharpness": 0.008831596933305264, + "layer_8_sharpness": 0.005972858518362045, + "layer_9_sharpness": 0.006235347129404545, + "layer_10_sharpness": 0.005254075862467289, + "layer_11_sharpness": 0.006667455192655325, + "layer_12_sharpness": 0.0326019823551178 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..ab8f28487cebcd4ce5bd1680209682831c7b3f78 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.2089338302612305, + "total_l1_linf_norm": 19630.05078125, + "total_spectral_norm": 2.2089338302612305, + "layer_1_update_fnorm": 0.47918277978897095, + "layer_1_max_l1_linf_norm": 0.5603561401367188, + "layer_1_max_spectral_norm": 0.07743410021066666, + "layer_2_update_fnorm": 0.4967811405658722, + "layer_2_max_l1_linf_norm": 0.5592313408851624, + "layer_2_max_spectral_norm": 0.08096524327993393, + "layer_3_update_fnorm": 0.5063077807426453, + "layer_3_max_l1_linf_norm": 0.567754864692688, + "layer_3_max_spectral_norm": 0.07590479403734207, + "layer_4_update_fnorm": 0.5162073373794556, + "layer_4_max_l1_linf_norm": 0.5628415942192078, + "layer_4_max_spectral_norm": 0.060059912502765656, + "layer_5_update_fnorm": 0.5152472853660583, + "layer_5_max_l1_linf_norm": 0.6039984226226807, + "layer_5_max_spectral_norm": 0.07021006941795349, + "layer_6_update_fnorm": 0.5219915509223938, + "layer_6_max_l1_linf_norm": 0.5842199325561523, + "layer_6_max_spectral_norm": 0.061781492084264755, + "layer_7_update_fnorm": 0.518640398979187, + "layer_7_max_l1_linf_norm": 0.5525172352790833, + "layer_7_max_spectral_norm": 0.05287260189652443, + "layer_8_update_fnorm": 0.5241948366165161, + "layer_8_max_l1_linf_norm": 0.5365408658981323, + "layer_8_max_spectral_norm": 0.04594375938177109, + "layer_9_update_fnorm": 0.5211505889892578, + "layer_9_max_l1_linf_norm": 0.5364865660667419, + "layer_9_max_spectral_norm": 0.0447566881775856, + "layer_10_update_fnorm": 0.5187749266624451, + "layer_10_max_l1_linf_norm": 0.5417795777320862, + "layer_10_max_spectral_norm": 0.057017192244529724, + "layer_11_update_fnorm": 0.5159738063812256, + "layer_11_max_l1_linf_norm": 0.5561716556549072, + "layer_11_max_spectral_norm": 0.06732652336359024, + "layer_12_update_fnorm": 0.5024656057357788, + "layer_12_max_l1_linf_norm": 0.5635955333709717, + "layer_12_max_spectral_norm": 0.07263876497745514, + "total_sharpness": 0.0041991532780230045, + "ip_v_neg_g": 0.012713184580206871, + "cos_v_neg_g": 0.00642114644870162, + "v_norm": 2.2089338302612305, + "g_norm": 0.8963117003440857, + "hv_norm": 0.3090144395828247, + "cos_v_hv": 0.030016887933015823, + "hg_norm": 7.764153003692627, + "cos_g_hg": 0.5362406373023987, + "v_parallel_norm": 0.0005754043813794851, + "v_perp_norm": 2.2089338302612305, + "layer_1_v_norm": 0.47918277978897095, + "layer_1_cos_v_neg_g": 0.011371655389666557, + "layer_2_v_norm": 0.4967811405658722, + "layer_2_cos_v_neg_g": 0.006467642728239298, + "layer_3_v_norm": 0.5063077807426453, + "layer_3_cos_v_neg_g": 0.01004535797983408, + "layer_4_v_norm": 0.5162073373794556, + "layer_4_cos_v_neg_g": 0.007115228567272425, + "layer_5_v_norm": 0.5152472853660583, + "layer_5_cos_v_neg_g": 0.00909019261598587, + "layer_6_v_norm": 0.5219915509223938, + "layer_6_cos_v_neg_g": 0.007567768916487694, + "layer_7_v_norm": 0.518640398979187, + "layer_7_cos_v_neg_g": 0.0077079301699995995, + "layer_8_v_norm": 0.5241948366165161, + "layer_8_cos_v_neg_g": 0.007276064250618219, + "layer_9_v_norm": 0.5211505889892578, + "layer_9_cos_v_neg_g": 0.007325246464461088, + "layer_10_v_norm": 0.5187749266624451, + "layer_10_cos_v_neg_g": 0.007291771471500397, + "layer_11_v_norm": 0.5159738063812256, + "layer_11_cos_v_neg_g": 0.008977538906037807, + "layer_12_v_norm": 0.5024656057357788, + "layer_12_cos_v_neg_g": 0.012613643892109394, + "layer_1_sharpness": 0.004468594212085009, + "layer_2_sharpness": 0.0006390550406649709, + "layer_3_sharpness": 0.0010841947514563799, + "layer_4_sharpness": 0.0007341416203416884, + "layer_5_sharpness": 0.0008266160148195922, + "layer_6_sharpness": 0.000915790384169668, + "layer_7_sharpness": 0.0014463169500231743, + "layer_8_sharpness": 0.0011312278220430017, + "layer_9_sharpness": 0.0009214018355123699, + "layer_10_sharpness": 0.0007751106168143451, + "layer_11_sharpness": 0.0009120486793108284, + "layer_12_sharpness": 0.0023077630903571844 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..ed4cfb2472474ba3324772b3a49713eb1a61ba20 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.2173967361450195, + "total_l1_linf_norm": 19722.583984375, + "total_spectral_norm": 2.2173967361450195, + "layer_1_update_fnorm": 0.477468878030777, + "layer_1_max_l1_linf_norm": 0.5890733003616333, + "layer_1_max_spectral_norm": 0.08495590835809708, + "layer_2_update_fnorm": 0.49459365010261536, + "layer_2_max_l1_linf_norm": 0.5840120911598206, + "layer_2_max_spectral_norm": 0.07892094552516937, + "layer_3_update_fnorm": 0.5071389079093933, + "layer_3_max_l1_linf_norm": 0.6001710891723633, + "layer_3_max_spectral_norm": 0.07392341643571854, + "layer_4_update_fnorm": 0.52083820104599, + "layer_4_max_l1_linf_norm": 0.5892820358276367, + "layer_4_max_spectral_norm": 0.06820694357156754, + "layer_5_update_fnorm": 0.517204225063324, + "layer_5_max_l1_linf_norm": 0.6813424825668335, + "layer_5_max_spectral_norm": 0.07415629178285599, + "layer_6_update_fnorm": 0.5257001519203186, + "layer_6_max_l1_linf_norm": 0.5494900941848755, + "layer_6_max_spectral_norm": 0.06087765470147133, + "layer_7_update_fnorm": 0.5200725197792053, + "layer_7_max_l1_linf_norm": 0.5353781580924988, + "layer_7_max_spectral_norm": 0.054263144731521606, + "layer_8_update_fnorm": 0.5273181200027466, + "layer_8_max_l1_linf_norm": 0.5603402256965637, + "layer_8_max_spectral_norm": 0.04784901440143585, + "layer_9_update_fnorm": 0.5247995257377625, + "layer_9_max_l1_linf_norm": 0.536701500415802, + "layer_9_max_spectral_norm": 0.04506867751479149, + "layer_10_update_fnorm": 0.5248727798461914, + "layer_10_max_l1_linf_norm": 0.5540667176246643, + "layer_10_max_spectral_norm": 0.059640783816576004, + "layer_11_update_fnorm": 0.5240080952644348, + "layer_11_max_l1_linf_norm": 0.606562614440918, + "layer_11_max_spectral_norm": 0.07141951471567154, + "layer_12_update_fnorm": 0.5106891989707947, + "layer_12_max_l1_linf_norm": 0.6140848994255066, + "layer_12_max_spectral_norm": 0.08845976740121841, + "total_sharpness": 0.0034424313344061375, + "ip_v_neg_g": 0.011446135118603706, + "cos_v_neg_g": 0.004001126624643803, + "v_norm": 2.2173967361450195, + "g_norm": 1.2901290655136108, + "hv_norm": 0.3440956771373749, + "cos_v_hv": 0.02218347042798996, + "hg_norm": 14.258776664733887, + "cos_g_hg": 0.5676335096359253, + "v_parallel_norm": 0.0003364796284586191, + "v_perp_norm": 2.2173967361450195, + "layer_1_v_norm": 0.477468878030777, + "layer_1_cos_v_neg_g": 0.007874689064919949, + "layer_2_v_norm": 0.49459365010261536, + "layer_2_cos_v_neg_g": 0.00683789374306798, + "layer_3_v_norm": 0.5071389079093933, + "layer_3_cos_v_neg_g": 0.005812404677271843, + "layer_4_v_norm": 0.52083820104599, + "layer_4_cos_v_neg_g": 0.003172011114656925, + "layer_5_v_norm": 0.517204225063324, + "layer_5_cos_v_neg_g": 0.004414665978401899, + "layer_6_v_norm": 0.5257001519203186, + "layer_6_cos_v_neg_g": 0.003600770141929388, + "layer_7_v_norm": 0.5200725197792053, + "layer_7_cos_v_neg_g": 0.0055503989569842815, + "layer_8_v_norm": 0.5273181200027466, + "layer_8_cos_v_neg_g": 0.006249686237424612, + "layer_9_v_norm": 0.5247995257377625, + "layer_9_cos_v_neg_g": 0.005589100066572428, + "layer_10_v_norm": 0.5248727798461914, + "layer_10_cos_v_neg_g": 0.004869843367487192, + "layer_11_v_norm": 0.5240081548690796, + "layer_11_cos_v_neg_g": 0.005165714770555496, + "layer_12_v_norm": 0.5106891989707947, + "layer_12_cos_v_neg_g": 0.0066936626099050045, + "layer_1_sharpness": 0.004448274616152048, + "layer_2_sharpness": 0.00035964607377536595, + "layer_3_sharpness": 0.0007248598267324269, + "layer_4_sharpness": 0.0005818239878863096, + "layer_5_sharpness": 0.0007466793176718056, + "layer_6_sharpness": 0.0005784430541098118, + "layer_7_sharpness": 0.0009245582041330636, + "layer_8_sharpness": 0.0009359654504805803, + "layer_9_sharpness": 0.0007980727823451161, + "layer_10_sharpness": 0.0007142919348552823, + "layer_11_sharpness": 0.000746975070796907, + "layer_12_sharpness": 0.002636920427903533 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..ccbabe7cd53d596eebb030178586c59a4ea755ec --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.225661516189575, + "total_l1_linf_norm": 19807.0390625, + "total_spectral_norm": 2.2256617546081543, + "layer_1_update_fnorm": 0.48412632942199707, + "layer_1_max_l1_linf_norm": 0.5810456275939941, + "layer_1_max_spectral_norm": 0.08009403198957443, + "layer_2_update_fnorm": 0.502390444278717, + "layer_2_max_l1_linf_norm": 0.5612978935241699, + "layer_2_max_spectral_norm": 0.07947409898042679, + "layer_3_update_fnorm": 0.5140717625617981, + "layer_3_max_l1_linf_norm": 0.5784225463867188, + "layer_3_max_spectral_norm": 0.07244661450386047, + "layer_4_update_fnorm": 0.5232914090156555, + "layer_4_max_l1_linf_norm": 0.5748046636581421, + "layer_4_max_spectral_norm": 0.060285232961177826, + "layer_5_update_fnorm": 0.5225146412849426, + "layer_5_max_l1_linf_norm": 0.5697191953659058, + "layer_5_max_spectral_norm": 0.07369234412908554, + "layer_6_update_fnorm": 0.5267227292060852, + "layer_6_max_l1_linf_norm": 0.6031221151351929, + "layer_6_max_spectral_norm": 0.06497590243816376, + "layer_7_update_fnorm": 0.52239590883255, + "layer_7_max_l1_linf_norm": 0.5518701076507568, + "layer_7_max_spectral_norm": 0.056445926427841187, + "layer_8_update_fnorm": 0.5294041633605957, + "layer_8_max_l1_linf_norm": 0.5404763221740723, + "layer_8_max_spectral_norm": 0.048171523958444595, + "layer_9_update_fnorm": 0.5270324945449829, + "layer_9_max_l1_linf_norm": 0.5397505164146423, + "layer_9_max_spectral_norm": 0.0492909774184227, + "layer_10_update_fnorm": 0.5234782695770264, + "layer_10_max_l1_linf_norm": 0.5544552206993103, + "layer_10_max_spectral_norm": 0.061763696372509, + "layer_11_update_fnorm": 0.519398033618927, + "layer_11_max_l1_linf_norm": 0.5914751291275024, + "layer_11_max_spectral_norm": 0.07141809910535812, + "layer_12_update_fnorm": 0.5142908096313477, + "layer_12_max_l1_linf_norm": 0.6026360988616943, + "layer_12_max_spectral_norm": 0.08053223043680191, + "total_sharpness": 0.004407839383929968, + "ip_v_neg_g": 0.008700319565832615, + "cos_v_neg_g": 0.004871234763413668, + "v_norm": 2.225661516189575, + "g_norm": 0.8024851679801941, + "hv_norm": 0.3277050256729126, + "cos_v_hv": 0.02993655391037464, + "hg_norm": 4.450244903564453, + "cos_g_hg": 0.5049397945404053, + "v_parallel_norm": 0.0004185448633506894, + "v_perp_norm": 2.225661516189575, + "layer_1_v_norm": 0.48412632942199707, + "layer_1_cos_v_neg_g": 0.011117515154182911, + "layer_2_v_norm": 0.502390444278717, + "layer_2_cos_v_neg_g": 0.007114661391824484, + "layer_3_v_norm": 0.5140717625617981, + "layer_3_cos_v_neg_g": 0.004517165012657642, + "layer_4_v_norm": 0.5232914090156555, + "layer_4_cos_v_neg_g": 0.0039268420077860355, + "layer_5_v_norm": 0.5225146412849426, + "layer_5_cos_v_neg_g": 0.0047057559713721275, + "layer_6_v_norm": 0.5267226696014404, + "layer_6_cos_v_neg_g": 0.004590801429003477, + "layer_7_v_norm": 0.52239590883255, + "layer_7_cos_v_neg_g": 0.005099566653370857, + "layer_8_v_norm": 0.5294041633605957, + "layer_8_cos_v_neg_g": 0.004144481848925352, + "layer_9_v_norm": 0.5270324945449829, + "layer_9_cos_v_neg_g": 0.0047072237357497215, + "layer_10_v_norm": 0.5234782695770264, + "layer_10_cos_v_neg_g": 0.005795628298074007, + "layer_11_v_norm": 0.519398033618927, + "layer_11_cos_v_neg_g": 0.007669758517295122, + "layer_12_v_norm": 0.5142908096313477, + "layer_12_cos_v_neg_g": 0.014349153265357018, + "layer_1_sharpness": 0.004465428646653891, + "layer_2_sharpness": 0.000680778524838388, + "layer_3_sharpness": 0.0009673258755356073, + "layer_4_sharpness": 0.0005722320056520402, + "layer_5_sharpness": 0.0008207574719563127, + "layer_6_sharpness": 0.0009073377004824579, + "layer_7_sharpness": 0.0012460027355700731, + "layer_8_sharpness": 0.0010471006389707327, + "layer_9_sharpness": 0.0012694144388660789, + "layer_10_sharpness": 0.001016299589537084, + "layer_11_sharpness": 0.0011629179352894425, + "layer_12_sharpness": 0.003349153557792306 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..ff12bfa3a03409168678cff64294345871cf5578 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.2510242462158203, + "total_l1_linf_norm": 20071.24609375, + "total_spectral_norm": 2.2510244846343994, + "layer_1_update_fnorm": 0.5028026103973389, + "layer_1_max_l1_linf_norm": 0.6020835638046265, + "layer_1_max_spectral_norm": 0.08662617951631546, + "layer_2_update_fnorm": 0.5153867602348328, + "layer_2_max_l1_linf_norm": 0.6077135801315308, + "layer_2_max_spectral_norm": 0.08195731043815613, + "layer_3_update_fnorm": 0.5197140574455261, + "layer_3_max_l1_linf_norm": 0.6444247364997864, + "layer_3_max_spectral_norm": 0.07441839575767517, + "layer_4_update_fnorm": 0.5279815196990967, + "layer_4_max_l1_linf_norm": 0.631205677986145, + "layer_4_max_spectral_norm": 0.0747964158654213, + "layer_5_update_fnorm": 0.5235231518745422, + "layer_5_max_l1_linf_norm": 0.6143225431442261, + "layer_5_max_spectral_norm": 0.07974136620759964, + "layer_6_update_fnorm": 0.5310568809509277, + "layer_6_max_l1_linf_norm": 0.5807823538780212, + "layer_6_max_spectral_norm": 0.08081327378749847, + "layer_7_update_fnorm": 0.5238601565361023, + "layer_7_max_l1_linf_norm": 0.619320273399353, + "layer_7_max_spectral_norm": 0.0700434148311615, + "layer_8_update_fnorm": 0.5319070816040039, + "layer_8_max_l1_linf_norm": 0.5554230213165283, + "layer_8_max_spectral_norm": 0.05504293739795685, + "layer_9_update_fnorm": 0.5299547910690308, + "layer_9_max_l1_linf_norm": 0.5348256826400757, + "layer_9_max_spectral_norm": 0.043941475450992584, + "layer_10_update_fnorm": 0.530841052532196, + "layer_10_max_l1_linf_norm": 0.5495635271072388, + "layer_10_max_spectral_norm": 0.055662572383880615, + "layer_11_update_fnorm": 0.5320121049880981, + "layer_11_max_l1_linf_norm": 0.6026872992515564, + "layer_11_max_spectral_norm": 0.06798732280731201, + "layer_12_update_fnorm": 0.5226157307624817, + "layer_12_max_l1_linf_norm": 0.549673318862915, + "layer_12_max_spectral_norm": 0.08082661032676697, + "total_sharpness": 0.0046465746127069, + "ip_v_neg_g": 0.0105523020029068, + "cos_v_neg_g": 0.005718282889574766, + "v_norm": 2.2510242462158203, + "g_norm": 0.8197877407073975, + "hv_norm": 0.4058978855609894, + "cos_v_hv": 0.02576892450451851, + "hg_norm": 4.054766654968262, + "cos_g_hg": 0.5511825680732727, + "v_parallel_norm": 0.0004689672205131501, + "v_perp_norm": 2.2510242462158203, + "layer_1_v_norm": 0.5028026103973389, + "layer_1_cos_v_neg_g": 0.013842018321156502, + "layer_2_v_norm": 0.5153867602348328, + "layer_2_cos_v_neg_g": 0.011555350385606289, + "layer_3_v_norm": 0.5197140574455261, + "layer_3_cos_v_neg_g": 0.0078438026830554, + "layer_4_v_norm": 0.5279815196990967, + "layer_4_cos_v_neg_g": 0.005867771804332733, + "layer_5_v_norm": 0.5235231518745422, + "layer_5_cos_v_neg_g": 0.004686852917075157, + "layer_6_v_norm": 0.5310568809509277, + "layer_6_cos_v_neg_g": 0.005112781655043364, + "layer_7_v_norm": 0.5238601565361023, + "layer_7_cos_v_neg_g": 0.005694283172488213, + "layer_8_v_norm": 0.5319070816040039, + "layer_8_cos_v_neg_g": 0.0061365761794149876, + "layer_9_v_norm": 0.5299547910690308, + "layer_9_cos_v_neg_g": 0.00584131246432662, + "layer_10_v_norm": 0.530841052532196, + "layer_10_cos_v_neg_g": 0.005581770557910204, + "layer_11_v_norm": 0.5320121049880981, + "layer_11_cos_v_neg_g": 0.007211407646536827, + "layer_12_v_norm": 0.5226157307624817, + "layer_12_cos_v_neg_g": 0.011671998538076878, + "layer_1_sharpness": 0.0053472742438316345, + "layer_2_sharpness": 0.0010066257091239095, + "layer_3_sharpness": 0.0009656524052843451, + "layer_4_sharpness": 0.0007776750135235488, + "layer_5_sharpness": 0.0006894728867337108, + "layer_6_sharpness": 0.0010394733399152756, + "layer_7_sharpness": 0.001298581250011921, + "layer_8_sharpness": 0.0011581252329051495, + "layer_9_sharpness": 0.0010532834567129612, + "layer_10_sharpness": 0.0007990242447704077, + "layer_11_sharpness": 0.0008204703917726874, + "layer_12_sharpness": 0.002141867531463504 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..d07af6b02668563efb3019cc4ce8c5450773263c --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.2519729137420654, + "total_l1_linf_norm": 20080.296875, + "total_spectral_norm": 2.2519726753234863, + "layer_1_update_fnorm": 0.5133788585662842, + "layer_1_max_l1_linf_norm": 0.5765475034713745, + "layer_1_max_spectral_norm": 0.08402503281831741, + "layer_2_update_fnorm": 0.5167635679244995, + "layer_2_max_l1_linf_norm": 0.6120648980140686, + "layer_2_max_spectral_norm": 0.08019645512104034, + "layer_3_update_fnorm": 0.5173183083534241, + "layer_3_max_l1_linf_norm": 0.5683653354644775, + "layer_3_max_spectral_norm": 0.07154551148414612, + "layer_4_update_fnorm": 0.530167281627655, + "layer_4_max_l1_linf_norm": 0.6145498752593994, + "layer_4_max_spectral_norm": 0.0746813416481018, + "layer_5_update_fnorm": 0.5270127654075623, + "layer_5_max_l1_linf_norm": 0.6123385429382324, + "layer_5_max_spectral_norm": 0.08305077999830246, + "layer_6_update_fnorm": 0.5296021103858948, + "layer_6_max_l1_linf_norm": 0.5560040473937988, + "layer_6_max_spectral_norm": 0.0689815953373909, + "layer_7_update_fnorm": 0.5245100855827332, + "layer_7_max_l1_linf_norm": 0.583220899105072, + "layer_7_max_spectral_norm": 0.06337553262710571, + "layer_8_update_fnorm": 0.5339004993438721, + "layer_8_max_l1_linf_norm": 0.564834713935852, + "layer_8_max_spectral_norm": 0.05456208065152168, + "layer_9_update_fnorm": 0.5320221781730652, + "layer_9_max_l1_linf_norm": 0.5491639375686646, + "layer_9_max_spectral_norm": 0.0462917685508728, + "layer_10_update_fnorm": 0.531211256980896, + "layer_10_max_l1_linf_norm": 0.6237660646438599, + "layer_10_max_spectral_norm": 0.06026092544198036, + "layer_11_update_fnorm": 0.5278268456459045, + "layer_11_max_l1_linf_norm": 0.5681891441345215, + "layer_11_max_spectral_norm": 0.07129151374101639, + "layer_12_update_fnorm": 0.5208470821380615, + "layer_12_max_l1_linf_norm": 0.5812827348709106, + "layer_12_max_spectral_norm": 0.0758187398314476, + "total_sharpness": 0.003794541582465172, + "ip_v_neg_g": 0.008742826990783215, + "cos_v_neg_g": 0.004531423561275005, + "v_norm": 2.2519729137420654, + "g_norm": 0.8567498922348022, + "hv_norm": 0.33885857462882996, + "cos_v_hv": 0.02521761693060398, + "hg_norm": 4.619610786437988, + "cos_g_hg": 0.5380264520645142, + "v_parallel_norm": 0.00032939284574240446, + "v_perp_norm": 2.2519729137420654, + "layer_1_v_norm": 0.5133788585662842, + "layer_1_cos_v_neg_g": 0.010869724676012993, + "layer_2_v_norm": 0.5167635679244995, + "layer_2_cos_v_neg_g": 0.007810976821929216, + "layer_3_v_norm": 0.5173183083534241, + "layer_3_cos_v_neg_g": 0.006368684582412243, + "layer_4_v_norm": 0.530167281627655, + "layer_4_cos_v_neg_g": 0.0042037880048155785, + "layer_5_v_norm": 0.5270127654075623, + "layer_5_cos_v_neg_g": 0.004468122962862253, + "layer_6_v_norm": 0.5296021103858948, + "layer_6_cos_v_neg_g": 0.0038576906081289053, + "layer_7_v_norm": 0.5245100855827332, + "layer_7_cos_v_neg_g": 0.004290730692446232, + "layer_8_v_norm": 0.5339004993438721, + "layer_8_cos_v_neg_g": 0.004470992833375931, + "layer_9_v_norm": 0.5320221781730652, + "layer_9_cos_v_neg_g": 0.004225764889270067, + "layer_10_v_norm": 0.531211256980896, + "layer_10_cos_v_neg_g": 0.0058845821768045425, + "layer_11_v_norm": 0.5278268456459045, + "layer_11_cos_v_neg_g": 0.006541721988469362, + "layer_12_v_norm": 0.5208470821380615, + "layer_12_cos_v_neg_g": 0.008459114469587803, + "layer_1_sharpness": 0.0035818133037537336, + "layer_2_sharpness": 0.0009508089278824627, + "layer_3_sharpness": 0.0010290151694789529, + "layer_4_sharpness": 0.0007087320555001497, + "layer_5_sharpness": 0.00105485157109797, + "layer_6_sharpness": 0.0007035795715637505, + "layer_7_sharpness": 0.0010959119535982609, + "layer_8_sharpness": 0.0009594110888428986, + "layer_9_sharpness": 0.0009223298984579742, + "layer_10_sharpness": 0.0007285278406925499, + "layer_11_sharpness": 0.0007818281301297247, + "layer_12_sharpness": 0.0017623597523197532 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..bf023640ce5979db68ea600ce9e2d84221df83ba --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.240729331970215, + "total_l1_linf_norm": 19950.08203125, + "total_spectral_norm": 2.240729570388794, + "layer_1_update_fnorm": 0.49571576714515686, + "layer_1_max_l1_linf_norm": 0.6304517984390259, + "layer_1_max_spectral_norm": 0.08719436824321747, + "layer_2_update_fnorm": 0.509324312210083, + "layer_2_max_l1_linf_norm": 0.5693233013153076, + "layer_2_max_spectral_norm": 0.0800657793879509, + "layer_3_update_fnorm": 0.5122302174568176, + "layer_3_max_l1_linf_norm": 0.5753648281097412, + "layer_3_max_spectral_norm": 0.07261708378791809, + "layer_4_update_fnorm": 0.5246667265892029, + "layer_4_max_l1_linf_norm": 0.6242188811302185, + "layer_4_max_spectral_norm": 0.06957866996526718, + "layer_5_update_fnorm": 0.5228288173675537, + "layer_5_max_l1_linf_norm": 0.6251419186592102, + "layer_5_max_spectral_norm": 0.07692839950323105, + "layer_6_update_fnorm": 0.5264508724212646, + "layer_6_max_l1_linf_norm": 0.5865170955657959, + "layer_6_max_spectral_norm": 0.06660491228103638, + "layer_7_update_fnorm": 0.5189507603645325, + "layer_7_max_l1_linf_norm": 0.5781061053276062, + "layer_7_max_spectral_norm": 0.05962735414505005, + "layer_8_update_fnorm": 0.5286971926689148, + "layer_8_max_l1_linf_norm": 0.543889045715332, + "layer_8_max_spectral_norm": 0.05344291031360626, + "layer_9_update_fnorm": 0.5322630405426025, + "layer_9_max_l1_linf_norm": 0.5522478818893433, + "layer_9_max_spectral_norm": 0.04923107475042343, + "layer_10_update_fnorm": 0.5320789217948914, + "layer_10_max_l1_linf_norm": 0.5729959607124329, + "layer_10_max_spectral_norm": 0.06356032937765121, + "layer_11_update_fnorm": 0.530661404132843, + "layer_11_max_l1_linf_norm": 0.5983149409294128, + "layer_11_max_spectral_norm": 0.07045949995517731, + "layer_12_update_fnorm": 0.521763801574707, + "layer_12_max_l1_linf_norm": 0.6132709980010986, + "layer_12_max_spectral_norm": 0.07204511761665344, + "total_sharpness": 0.004006450995802879, + "ip_v_neg_g": 0.011144916526973248, + "cos_v_neg_g": 0.0051546539179980755, + "v_norm": 2.240729331970215, + "g_norm": 0.964912474155426, + "hv_norm": 0.37855613231658936, + "cos_v_hv": 0.02371477335691452, + "hg_norm": 7.5117506980896, + "cos_g_hg": 0.6033095121383667, + "v_parallel_norm": 0.00037791492650285363, + "v_perp_norm": 2.240729331970215, + "layer_1_v_norm": 0.49571576714515686, + "layer_1_cos_v_neg_g": 0.009788800962269306, + "layer_2_v_norm": 0.509324312210083, + "layer_2_cos_v_neg_g": 0.00514116371050477, + "layer_3_v_norm": 0.5122302174568176, + "layer_3_cos_v_neg_g": 0.0038974087219685316, + "layer_4_v_norm": 0.5246667265892029, + "layer_4_cos_v_neg_g": 0.0033673651050776243, + "layer_5_v_norm": 0.5228288173675537, + "layer_5_cos_v_neg_g": 0.004592543933540583, + "layer_6_v_norm": 0.5264508724212646, + "layer_6_cos_v_neg_g": 0.0051876395009458065, + "layer_7_v_norm": 0.5189507603645325, + "layer_7_cos_v_neg_g": 0.005635402165353298, + "layer_8_v_norm": 0.5286971926689148, + "layer_8_cos_v_neg_g": 0.0061406963504850864, + "layer_9_v_norm": 0.5322630405426025, + "layer_9_cos_v_neg_g": 0.006688747089356184, + "layer_10_v_norm": 0.5320789217948914, + "layer_10_cos_v_neg_g": 0.008354930207133293, + "layer_11_v_norm": 0.530661404132843, + "layer_11_cos_v_neg_g": 0.008896059356629848, + "layer_12_v_norm": 0.521763801574707, + "layer_12_cos_v_neg_g": 0.013273519463837147, + "layer_1_sharpness": 0.004364529624581337, + "layer_2_sharpness": 0.0009384853183291852, + "layer_3_sharpness": 0.000726891856174916, + "layer_4_sharpness": 0.0004899187479168177, + "layer_5_sharpness": 0.000615457072854042, + "layer_6_sharpness": 0.0007073228480294347, + "layer_7_sharpness": 0.0011230639647692442, + "layer_8_sharpness": 0.0010197254596278071, + "layer_9_sharpness": 0.00106843002140522, + "layer_10_sharpness": 0.0009020311408676207, + "layer_11_sharpness": 0.0008144588791765273, + "layer_12_sharpness": 0.0015369585016742349 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..f2125e717c0922a66f97bb53d05f379cbaf3c2d3 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.2587192058563232, + "total_l1_linf_norm": 20166.26953125, + "total_spectral_norm": 2.258718729019165, + "layer_1_update_fnorm": 0.520748496055603, + "layer_1_max_l1_linf_norm": 0.6272806525230408, + "layer_1_max_spectral_norm": 0.08771487325429916, + "layer_2_update_fnorm": 0.5331931710243225, + "layer_2_max_l1_linf_norm": 0.6066648960113525, + "layer_2_max_spectral_norm": 0.08748822659254074, + "layer_3_update_fnorm": 0.5253615975379944, + "layer_3_max_l1_linf_norm": 0.5730903148651123, + "layer_3_max_spectral_norm": 0.07314964383840561, + "layer_4_update_fnorm": 0.5284672379493713, + "layer_4_max_l1_linf_norm": 0.6319426894187927, + "layer_4_max_spectral_norm": 0.06947160512208939, + "layer_5_update_fnorm": 0.5265092849731445, + "layer_5_max_l1_linf_norm": 0.5956939458847046, + "layer_5_max_spectral_norm": 0.0742354765534401, + "layer_6_update_fnorm": 0.5318737030029297, + "layer_6_max_l1_linf_norm": 0.5676554441452026, + "layer_6_max_spectral_norm": 0.06805135309696198, + "layer_7_update_fnorm": 0.5267806649208069, + "layer_7_max_l1_linf_norm": 0.5723435282707214, + "layer_7_max_spectral_norm": 0.06311728060245514, + "layer_8_update_fnorm": 0.5358293056488037, + "layer_8_max_l1_linf_norm": 0.5602874755859375, + "layer_8_max_spectral_norm": 0.05248498544096947, + "layer_9_update_fnorm": 0.5344420671463013, + "layer_9_max_l1_linf_norm": 0.5406072735786438, + "layer_9_max_spectral_norm": 0.04583948850631714, + "layer_10_update_fnorm": 0.5332685112953186, + "layer_10_max_l1_linf_norm": 0.561110258102417, + "layer_10_max_spectral_norm": 0.05595193803310394, + "layer_11_update_fnorm": 0.5319786071777344, + "layer_11_max_l1_linf_norm": 0.57371985912323, + "layer_11_max_spectral_norm": 0.06747953593730927, + "layer_12_update_fnorm": 0.526354193687439, + "layer_12_max_l1_linf_norm": 0.6029219031333923, + "layer_12_max_spectral_norm": 0.07954072952270508, + "total_sharpness": 0.004343604668974876, + "ip_v_neg_g": 0.012822851538658142, + "cos_v_neg_g": 0.006326150149106979, + "v_norm": 2.2587192058563232, + "g_norm": 0.8973934054374695, + "hv_norm": 0.48598557710647583, + "cos_v_hv": 0.020187806338071823, + "hg_norm": 6.701879024505615, + "cos_g_hg": 0.5413090586662292, + "v_parallel_norm": 0.0005166797782294452, + "v_perp_norm": 2.258718967437744, + "layer_1_v_norm": 0.520748496055603, + "layer_1_cos_v_neg_g": 0.02084486559033394, + "layer_2_v_norm": 0.5331931710243225, + "layer_2_cos_v_neg_g": 0.02034848742187023, + "layer_3_v_norm": 0.5253615975379944, + "layer_3_cos_v_neg_g": 0.009195979684591293, + "layer_4_v_norm": 0.5284672379493713, + "layer_4_cos_v_neg_g": 0.004342501517385244, + "layer_5_v_norm": 0.5265092849731445, + "layer_5_cos_v_neg_g": 0.005585966166108847, + "layer_6_v_norm": 0.5318737030029297, + "layer_6_cos_v_neg_g": 0.002625854453071952, + "layer_7_v_norm": 0.5267806649208069, + "layer_7_cos_v_neg_g": 0.003680276684463024, + "layer_8_v_norm": 0.5358293056488037, + "layer_8_cos_v_neg_g": 0.004005536902695894, + "layer_9_v_norm": 0.5344420671463013, + "layer_9_cos_v_neg_g": 0.0038298312574625015, + "layer_10_v_norm": 0.5332685112953186, + "layer_10_cos_v_neg_g": 0.005091129336506128, + "layer_11_v_norm": 0.5319786071777344, + "layer_11_cos_v_neg_g": 0.006199860479682684, + "layer_12_v_norm": 0.526354193687439, + "layer_12_cos_v_neg_g": 0.011563125997781754, + "layer_1_sharpness": 0.009042122401297092, + "layer_2_sharpness": 0.003675403306260705, + "layer_3_sharpness": 0.0015489034121856093, + "layer_4_sharpness": 0.0005080997361801565, + "layer_5_sharpness": 0.000740233575925231, + "layer_6_sharpness": 0.0006066793575882912, + "layer_7_sharpness": 0.0010783616453409195, + "layer_8_sharpness": 0.0008498922688886523, + "layer_9_sharpness": 0.0007581972167827189, + "layer_10_sharpness": 0.0005780741921626031, + "layer_11_sharpness": 0.0006242678500711918, + "layer_12_sharpness": 0.001975080231204629 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..d166d03e9bbcdc30e48138b75622757167419e15 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.210829973220825, + "total_l1_linf_norm": 19637.2578125, + "total_spectral_norm": 2.2108302116394043, + "layer_1_update_fnorm": 0.4823213815689087, + "layer_1_max_l1_linf_norm": 0.6137989163398743, + "layer_1_max_spectral_norm": 0.08602933585643768, + "layer_2_update_fnorm": 0.5093998312950134, + "layer_2_max_l1_linf_norm": 0.6122022271156311, + "layer_2_max_spectral_norm": 0.0795191153883934, + "layer_3_update_fnorm": 0.5119498372077942, + "layer_3_max_l1_linf_norm": 0.5572488307952881, + "layer_3_max_spectral_norm": 0.07295006513595581, + "layer_4_update_fnorm": 0.5176073908805847, + "layer_4_max_l1_linf_norm": 0.6262236833572388, + "layer_4_max_spectral_norm": 0.07367013394832611, + "layer_5_update_fnorm": 0.5139520168304443, + "layer_5_max_l1_linf_norm": 0.6369047164916992, + "layer_5_max_spectral_norm": 0.0787818655371666, + "layer_6_update_fnorm": 0.5215743780136108, + "layer_6_max_l1_linf_norm": 0.5627914667129517, + "layer_6_max_spectral_norm": 0.06983684003353119, + "layer_7_update_fnorm": 0.5152009725570679, + "layer_7_max_l1_linf_norm": 0.5614944696426392, + "layer_7_max_spectral_norm": 0.06520149111747742, + "layer_8_update_fnorm": 0.5245999693870544, + "layer_8_max_l1_linf_norm": 0.5910801887512207, + "layer_8_max_spectral_norm": 0.05769095569849014, + "layer_9_update_fnorm": 0.5215643644332886, + "layer_9_max_l1_linf_norm": 0.559273362159729, + "layer_9_max_spectral_norm": 0.04700716212391853, + "layer_10_update_fnorm": 0.5180925130844116, + "layer_10_max_l1_linf_norm": 0.5770614743232727, + "layer_10_max_spectral_norm": 0.05911358445882797, + "layer_11_update_fnorm": 0.5120787024497986, + "layer_11_max_l1_linf_norm": 0.6419675350189209, + "layer_11_max_spectral_norm": 0.07174181193113327, + "layer_12_update_fnorm": 0.5093337893486023, + "layer_12_max_l1_linf_norm": 0.6456868648529053, + "layer_12_max_spectral_norm": 0.10034490376710892, + "total_sharpness": 0.0037528015673160553, + "ip_v_neg_g": 0.008029738441109657, + "cos_v_neg_g": 0.004016411490738392, + "v_norm": 2.210829973220825, + "g_norm": 0.9042901992797852, + "hv_norm": 0.34447774291038513, + "cos_v_hv": 0.02408517152070999, + "hg_norm": 5.707664489746094, + "cos_g_hg": 0.5849370956420898, + "v_parallel_norm": 0.0003192113945260644, + "v_perp_norm": 2.210829973220825, + "layer_1_v_norm": 0.4823213815689087, + "layer_1_cos_v_neg_g": 0.007017313502728939, + "layer_2_v_norm": 0.5093998312950134, + "layer_2_cos_v_neg_g": 0.005463203880935907, + "layer_3_v_norm": 0.5119498372077942, + "layer_3_cos_v_neg_g": 0.0029749651439487934, + "layer_4_v_norm": 0.5176073908805847, + "layer_4_cos_v_neg_g": 0.002914082957431674, + "layer_5_v_norm": 0.5139520168304443, + "layer_5_cos_v_neg_g": 0.003953223582357168, + "layer_6_v_norm": 0.5215743780136108, + "layer_6_cos_v_neg_g": 0.00319975265301764, + "layer_7_v_norm": 0.5152009725570679, + "layer_7_cos_v_neg_g": 0.004497184418141842, + "layer_8_v_norm": 0.5245999693870544, + "layer_8_cos_v_neg_g": 0.005682841874659061, + "layer_9_v_norm": 0.5215643644332886, + "layer_9_cos_v_neg_g": 0.0053324103355407715, + "layer_10_v_norm": 0.5180925130844116, + "layer_10_cos_v_neg_g": 0.005433524493128061, + "layer_11_v_norm": 0.5120787620544434, + "layer_11_cos_v_neg_g": 0.0063320002518594265, + "layer_12_v_norm": 0.5093337893486023, + "layer_12_cos_v_neg_g": 0.01240500994026661, + "layer_1_sharpness": 0.0032302977051585913, + "layer_2_sharpness": 0.0006740433163940907, + "layer_3_sharpness": 0.0006914041587151587, + "layer_4_sharpness": 0.0005614531110040843, + "layer_5_sharpness": 0.0005774049204774201, + "layer_6_sharpness": 0.0007455507293343544, + "layer_7_sharpness": 0.0011054533533751965, + "layer_8_sharpness": 0.0009179917396977544, + "layer_9_sharpness": 0.0008463806589134037, + "layer_10_sharpness": 0.000700374657753855, + "layer_11_sharpness": 0.0008633250836282969, + "layer_12_sharpness": 0.005522625520825386 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..498b5cb6caa8e61393aff7afa6b18f4752e5df23 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.2524209022521973, + "total_l1_linf_norm": 20091.97265625, + "total_spectral_norm": 2.252420663833618, + "layer_1_update_fnorm": 0.5096139907836914, + "layer_1_max_l1_linf_norm": 0.6409592628479004, + "layer_1_max_spectral_norm": 0.08233631402254105, + "layer_2_update_fnorm": 0.5210292339324951, + "layer_2_max_l1_linf_norm": 0.6389988660812378, + "layer_2_max_spectral_norm": 0.08082740008831024, + "layer_3_update_fnorm": 0.5229647755622864, + "layer_3_max_l1_linf_norm": 0.6239070296287537, + "layer_3_max_spectral_norm": 0.07245524227619171, + "layer_4_update_fnorm": 0.529891312122345, + "layer_4_max_l1_linf_norm": 0.6420949101448059, + "layer_4_max_spectral_norm": 0.0736444741487503, + "layer_5_update_fnorm": 0.5252982378005981, + "layer_5_max_l1_linf_norm": 0.6244509816169739, + "layer_5_max_spectral_norm": 0.08400332182645798, + "layer_6_update_fnorm": 0.5290130972862244, + "layer_6_max_l1_linf_norm": 0.5820355415344238, + "layer_6_max_spectral_norm": 0.07523633539676666, + "layer_7_update_fnorm": 0.5249430537223816, + "layer_7_max_l1_linf_norm": 0.5574089288711548, + "layer_7_max_spectral_norm": 0.06789730489253998, + "layer_8_update_fnorm": 0.5361278653144836, + "layer_8_max_l1_linf_norm": 0.5687041282653809, + "layer_8_max_spectral_norm": 0.0601080097258091, + "layer_9_update_fnorm": 0.5325944423675537, + "layer_9_max_l1_linf_norm": 0.5616532564163208, + "layer_9_max_spectral_norm": 0.048703134059906006, + "layer_10_update_fnorm": 0.5327255129814148, + "layer_10_max_l1_linf_norm": 0.554384708404541, + "layer_10_max_spectral_norm": 0.06238086521625519, + "layer_11_update_fnorm": 0.5321220755577087, + "layer_11_max_l1_linf_norm": 0.5777284502983093, + "layer_11_max_spectral_norm": 0.07211107760667801, + "layer_12_update_fnorm": 0.5222422480583191, + "layer_12_max_l1_linf_norm": 0.5856319665908813, + "layer_12_max_spectral_norm": 0.07545129209756851, + "total_sharpness": 0.0039840578101575375, + "ip_v_neg_g": 0.012525610625743866, + "cos_v_neg_g": 0.005595730151981115, + "v_norm": 2.2524209022521973, + "g_norm": 0.9937853813171387, + "hv_norm": 0.4234640300273895, + "cos_v_hv": 0.021191351115703583, + "hg_norm": 9.91030216217041, + "cos_g_hg": 0.6229751110076904, + "v_parallel_norm": 0.00036085068131797016, + "v_perp_norm": 2.2524209022521973, + "layer_1_v_norm": 0.5096139907836914, + "layer_1_cos_v_neg_g": 0.01227456983178854, + "layer_2_v_norm": 0.5210292339324951, + "layer_2_cos_v_neg_g": 0.012099085375666618, + "layer_3_v_norm": 0.5229647755622864, + "layer_3_cos_v_neg_g": 0.007863393053412437, + "layer_4_v_norm": 0.529891312122345, + "layer_4_cos_v_neg_g": 0.004934300202876329, + "layer_5_v_norm": 0.5252982378005981, + "layer_5_cos_v_neg_g": 0.005120756570249796, + "layer_6_v_norm": 0.5290130972862244, + "layer_6_cos_v_neg_g": 0.005266312975436449, + "layer_7_v_norm": 0.5249430537223816, + "layer_7_cos_v_neg_g": 0.005775612313300371, + "layer_8_v_norm": 0.5361278653144836, + "layer_8_cos_v_neg_g": 0.005542650353163481, + "layer_9_v_norm": 0.5325944423675537, + "layer_9_cos_v_neg_g": 0.006603861227631569, + "layer_10_v_norm": 0.5327255129814148, + "layer_10_cos_v_neg_g": 0.007390504702925682, + "layer_11_v_norm": 0.532122015953064, + "layer_11_cos_v_neg_g": 0.007652612868696451, + "layer_12_v_norm": 0.5222422480583191, + "layer_12_cos_v_neg_g": 0.010117914527654648, + "layer_1_sharpness": 0.005675750318914652, + "layer_2_sharpness": 0.0015800894470885396, + "layer_3_sharpness": 0.0009286047425121069, + "layer_4_sharpness": 0.000487437064293772, + "layer_5_sharpness": 0.0005868286825716496, + "layer_6_sharpness": 0.0007388092926703393, + "layer_7_sharpness": 0.0011631121160462499, + "layer_8_sharpness": 0.0010395736899226904, + "layer_9_sharpness": 0.0010605831630527973, + "layer_10_sharpness": 0.00076981948222965, + "layer_11_sharpness": 0.0007591932662762702, + "layer_12_sharpness": 0.001867355196736753 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..add4c0530670e10246393a1bb0478bc4222fa202 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.243312120437622, + "total_l1_linf_norm": 19986.189453125, + "total_spectral_norm": 2.243312120437622, + "layer_1_update_fnorm": 0.5092134475708008, + "layer_1_max_l1_linf_norm": 0.6321834921836853, + "layer_1_max_spectral_norm": 0.08726383000612259, + "layer_2_update_fnorm": 0.515273928642273, + "layer_2_max_l1_linf_norm": 0.6038751602172852, + "layer_2_max_spectral_norm": 0.0807572603225708, + "layer_3_update_fnorm": 0.5196501612663269, + "layer_3_max_l1_linf_norm": 0.6100327372550964, + "layer_3_max_spectral_norm": 0.07265628129243851, + "layer_4_update_fnorm": 0.5282049775123596, + "layer_4_max_l1_linf_norm": 0.603539228439331, + "layer_4_max_spectral_norm": 0.07688289135694504, + "layer_5_update_fnorm": 0.5261965990066528, + "layer_5_max_l1_linf_norm": 0.6879973411560059, + "layer_5_max_spectral_norm": 0.08264435827732086, + "layer_6_update_fnorm": 0.5289614796638489, + "layer_6_max_l1_linf_norm": 0.6389575004577637, + "layer_6_max_spectral_norm": 0.07167766988277435, + "layer_7_update_fnorm": 0.521599292755127, + "layer_7_max_l1_linf_norm": 0.5857230424880981, + "layer_7_max_spectral_norm": 0.0675874724984169, + "layer_8_update_fnorm": 0.5299295783042908, + "layer_8_max_l1_linf_norm": 0.5605111122131348, + "layer_8_max_spectral_norm": 0.05692127346992493, + "layer_9_update_fnorm": 0.5319985747337341, + "layer_9_max_l1_linf_norm": 0.5445425510406494, + "layer_9_max_spectral_norm": 0.0509662963449955, + "layer_10_update_fnorm": 0.5314062833786011, + "layer_10_max_l1_linf_norm": 0.557087242603302, + "layer_10_max_spectral_norm": 0.06285016983747482, + "layer_11_update_fnorm": 0.5297803282737732, + "layer_11_max_l1_linf_norm": 0.6092842817306519, + "layer_11_max_spectral_norm": 0.07134000211954117, + "layer_12_update_fnorm": 0.5179179906845093, + "layer_12_max_l1_linf_norm": 0.5879102945327759, + "layer_12_max_spectral_norm": 0.07360192388296127, + "total_sharpness": 0.0035511190071702003, + "ip_v_neg_g": 0.006051326636224985, + "cos_v_neg_g": 0.0028280706610530615, + "v_norm": 2.243312120437622, + "g_norm": 0.9538292288780212, + "hv_norm": 0.3950551748275757, + "cos_v_hv": 0.02016494981944561, + "hg_norm": 7.2568488121032715, + "cos_g_hg": 0.6178531050682068, + "v_parallel_norm": 0.0002167643397115171, + "v_perp_norm": 2.243312120437622, + "layer_1_v_norm": 0.5092134475708008, + "layer_1_cos_v_neg_g": 0.005176073405891657, + "layer_2_v_norm": 0.515273928642273, + "layer_2_cos_v_neg_g": 0.0016245645238086581, + "layer_3_v_norm": 0.5196501612663269, + "layer_3_cos_v_neg_g": 0.0021594383288174868, + "layer_4_v_norm": 0.5282049775123596, + "layer_4_cos_v_neg_g": 0.0016102533554658294, + "layer_5_v_norm": 0.5261965990066528, + "layer_5_cos_v_neg_g": 0.0024225397501140833, + "layer_6_v_norm": 0.5289614796638489, + "layer_6_cos_v_neg_g": 0.0024227979592978954, + "layer_7_v_norm": 0.521599292755127, + "layer_7_cos_v_neg_g": 0.0031694811768829823, + "layer_8_v_norm": 0.5299296379089355, + "layer_8_cos_v_neg_g": 0.0031614266335964203, + "layer_9_v_norm": 0.5319985747337341, + "layer_9_cos_v_neg_g": 0.003513368545100093, + "layer_10_v_norm": 0.5314062833786011, + "layer_10_cos_v_neg_g": 0.005137632600963116, + "layer_11_v_norm": 0.5297802686691284, + "layer_11_cos_v_neg_g": 0.006737341172993183, + "layer_12_v_norm": 0.5179179906845093, + "layer_12_cos_v_neg_g": 0.008417296223342419, + "layer_1_sharpness": 0.00347009114921093, + "layer_2_sharpness": 0.0006052026874385774, + "layer_3_sharpness": 0.0006650117575190961, + "layer_4_sharpness": 0.0005260705365799367, + "layer_5_sharpness": 0.0006766234873794019, + "layer_6_sharpness": 0.0007291825604625046, + "layer_7_sharpness": 0.0011224769987165928, + "layer_8_sharpness": 0.0008357304031960666, + "layer_9_sharpness": 0.0009874313836917281, + "layer_10_sharpness": 0.0007922871736809611, + "layer_11_sharpness": 0.0008289190009236336, + "layer_12_sharpness": 0.0014697087462991476 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..5bfc3f64c20a7cf8ddbbb995f5a0d9c872e2fe5e --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.001_mlr_0.01_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019232 +step:1 train loss:10.942616 +step:2 train loss:10.782427 +step:3 train loss:10.583086 +step:4 train loss:10.399472 +step:5 train loss:10.228755 +step:6 train loss:10.091090 +step:7 train loss:9.950638 +step:8 train loss:9.884434 +step:9 train loss:9.795784 +step:10 train loss:9.739383 +step:11 train loss:9.696905 +step:12 train loss:9.633005 +step:13 train loss:9.589773 +step:14 train loss:9.563635 +step:15 train loss:9.546618 +step:16 train loss:9.518008 +step:17 train loss:9.471753 +step:18 train loss:9.458405 +step:19 train loss:9.393511 +step:20 train loss:9.347491 +step:21 train loss:9.317150 +step:22 train loss:9.211340 +step:23 train loss:9.201447 +step:24 train loss:9.129150 +step:25 train loss:9.114797 +step:26 train loss:9.056517 +step:27 train loss:8.988647 +step:28 train loss:8.985079 +step:29 train loss:8.936971 +step:30 train loss:8.900311 +step:31 train loss:8.819027 +step:32 train loss:8.771678 +step:33 train loss:8.731232 +step:34 train loss:8.730371 +step:35 train loss:8.638797 +step:36 train loss:8.616808 +step:37 train loss:8.546884 +step:38 train loss:8.540925 +step:39 train loss:8.481695 +step:40 train loss:8.449862 +step:41 train loss:8.366921 +step:42 train loss:8.365979 +step:43 train loss:8.269278 +step:44 train loss:8.223131 +step:45 train loss:8.201583 +step:46 train loss:8.161366 +step:47 train loss:8.146229 +step:48 train loss:8.053371 +step:49 train loss:8.017366 +step:50 train loss:7.931625 +step:51 train loss:7.926973 +step:52 train loss:7.885573 +step:53 train loss:7.852207 +step:54 train loss:7.808225 +step:55 train loss:7.748191 +step:56 train loss:7.692964 +step:57 train loss:7.688380 +step:58 train loss:7.611541 +step:59 train loss:7.605891 +step:60 train loss:7.558922 +step:61 train loss:7.516357 +step:62 train loss:7.475450 +step:63 train loss:7.506017 +step:64 train loss:7.387560 +step:65 train loss:7.399515 +step:66 train loss:7.371301 +step:67 train loss:7.375675 +step:68 train loss:7.317906 +step:69 train loss:7.283607 +step:70 train loss:7.236607 +step:71 train loss:7.211371 +step:72 train loss:7.231528 +step:73 train loss:7.169023 +step:74 train loss:7.180657 +step:75 train loss:7.125306 +step:76 train loss:7.203342 +step:77 train loss:7.131277 +step:78 train loss:6.894454 +step:79 train loss:7.051960 +step:80 train loss:7.022473 +step:81 train loss:7.093371 +step:82 train loss:7.048091 +step:83 train loss:7.012802 +step:84 train loss:6.969572 +step:85 train loss:6.939219 +step:86 train loss:6.929449 +step:87 train loss:6.899911 +step:88 train loss:6.896404 +step:89 train loss:6.858235 +step:90 train loss:6.896688 +step:91 train loss:6.902740 +step:92 train loss:6.913408 +step:93 train loss:6.838134 +step:94 train loss:6.806695 +step:95 train loss:6.762544 +step:96 train loss:6.853469 +step:97 train loss:6.809159 +step:98 train loss:6.780069 +step:99 train loss:6.765405 +step:100 train loss:6.791609 +step:101 train loss:6.693794 +step:102 train loss:6.698115 +step:103 train loss:6.681605 +step:104 train loss:6.709790 +step:105 train loss:6.776550 +step:106 train loss:6.716881 +step:107 train loss:6.664454 +step:108 train loss:6.692876 +step:109 train loss:6.722269 +step:110 train loss:6.642407 +step:111 train loss:6.658998 +step:112 train loss:6.651484 +step:113 train loss:6.608820 +step:114 train loss:6.676901 +step:115 train loss:6.616295 +step:116 train loss:6.607091 +step:117 train loss:6.538417 +step:118 train loss:6.600571 +step:119 train loss:6.549538 +step:120 train loss:6.565205 +step:121 train loss:6.486564 +step:122 train loss:6.587561 +step:123 train loss:6.509475 +step:124 train loss:6.488377 +step:125 train loss:6.468022 +step:126 train loss:6.567856 +step:127 train loss:6.478644 +step:128 train loss:6.526831 +step:129 train loss:6.514103 +step:130 train loss:6.556863 +step:131 train loss:6.488813 +step:132 train loss:6.417627 +step:133 train loss:6.489456 +step:134 train loss:6.464585 +step:135 train loss:6.374958 +step:136 train loss:6.418256 +step:137 train loss:6.421104 +step:138 train loss:6.365248 +step:139 train loss:6.439220 +step:140 train loss:6.355800 +step:141 train loss:6.468637 +step:142 train loss:6.401491 +step:143 train loss:6.419168 +step:144 train loss:6.399396 +step:145 train loss:6.333836 +step:146 train loss:6.341161 +step:147 train loss:6.395544 +step:148 train loss:6.402611 +step:149 train loss:6.365182 +step:150 train loss:6.369217 +step:151 train loss:6.285602 +step:152 train loss:6.330672 +step:153 train loss:6.313179 +step:154 train loss:6.395391 +step:155 train loss:6.368476 +step:156 train loss:6.394903 +step:157 train loss:6.314986 +step:158 train loss:6.293200 +step:159 train loss:6.329183 +step:160 train loss:6.310170 +step:161 train loss:6.306157 +step:162 train loss:6.276693 +step:163 train loss:6.294699 +step:164 train loss:6.311138 +step:165 train loss:6.317010 +step:166 train loss:6.278766 +step:167 train loss:6.274918 +step:168 train loss:6.242461 +step:169 train loss:6.196538 +step:170 train loss:6.163063 +step:171 train loss:6.299709 +step:172 train loss:6.220771 +step:173 train loss:6.271635 +step:174 train loss:6.274592 +step:175 train loss:6.239777 +step:176 train loss:6.195959 +step:177 train loss:6.239385 +step:178 train loss:6.239037 +step:179 train loss:6.193248 +step:180 train loss:6.181302 +step:181 train loss:6.216632 +step:182 train loss:6.152444 +step:183 train loss:6.243023 +step:184 train loss:6.207280 +step:185 train loss:6.130028 +step:186 train loss:6.276250 +step:187 train loss:6.228187 +step:188 train loss:6.039773 +step:189 train loss:6.211576 +step:190 train loss:6.201039 +step:191 train loss:6.129063 +step:192 train loss:6.033589 +step:193 train loss:6.211723 +step:194 train loss:6.230363 +step:195 train loss:6.210980 +step:196 train loss:6.183399 +step:197 train loss:6.171804 +step:198 train loss:6.118417 +step:199 train loss:6.194041 +step:200 train loss:6.237796 +step:201 train loss:6.162416 +step:202 train loss:6.168061 +step:203 train loss:6.131603 +step:204 train loss:6.166222 +step:205 train loss:6.022746 +step:206 train loss:6.148769 +step:207 train loss:6.129050 +step:208 train loss:6.065398 +step:209 train loss:6.059012 +step:210 train loss:6.071687 +step:211 train loss:6.139216 +step:212 train loss:6.091245 +step:213 train loss:6.121162 +step:214 train loss:6.109003 +step:215 train loss:6.122485 +step:216 train loss:6.063898 +step:217 train loss:6.067778 +step:218 train loss:6.051144 +step:219 train loss:6.027633 +step:220 train loss:6.065489 +step:221 train loss:6.033971 +step:222 train loss:6.076179 +step:223 train loss:6.090588 +step:224 train loss:6.069769 +step:225 train loss:6.014883 +step:226 train loss:6.016953 +step:227 train loss:6.078589 +step:228 train loss:6.037284 +step:229 train loss:6.108697 +step:230 train loss:5.981017 +step:231 train loss:6.037788 +step:232 train loss:6.026192 +step:233 train loss:5.990802 +step:234 train loss:5.993708 +step:235 train loss:6.078829 +step:236 train loss:6.027205 +step:237 train loss:6.061459 +step:238 train loss:6.072956 +step:239 train loss:5.999525 +step:240 train loss:6.048025 +step:241 train loss:6.093894 +step:242 train loss:6.074627 +step:243 train loss:5.984277 +step:244 train loss:6.004655 +step:245 train loss:5.978832 +step:246 train loss:5.981725 +step:247 train loss:5.976408 +step:248 train loss:5.936916 +step:249 train loss:6.001660 +step:250 validation loss:5.983541 +step:250 train loss:5.965970 +step:251 train loss:6.001202 +step:252 train loss:5.956785 +step:253 train loss:5.961700 +step:254 train loss:5.914624 +step:255 train loss:5.972639 +step:256 train loss:5.995929 +step:257 train loss:6.023942 +step:258 train loss:5.916603 +step:259 train loss:5.947838 +step:260 train loss:5.909543 +step:261 train loss:5.914861 +step:262 train loss:5.981457 +step:263 train loss:5.942153 +step:264 train loss:5.908056 +step:265 train loss:5.931380 +step:266 train loss:5.893807 +step:267 train loss:5.928214 +step:268 train loss:5.882816 +step:269 train loss:5.906119 +step:270 train loss:5.929539 +step:271 train loss:5.914568 +step:272 train loss:5.858881 +step:273 train loss:5.940864 +step:274 train loss:5.843174 +step:275 train loss:5.883784 +step:276 train loss:5.858455 +step:277 train loss:5.869569 +step:278 train loss:5.861194 +step:279 train loss:5.821577 +step:280 train loss:5.906348 +step:281 train loss:5.976380 +step:282 train loss:5.882625 +step:283 train loss:5.868645 +step:284 train loss:5.838252 +step:285 train loss:5.895273 +step:286 train loss:5.862424 +step:287 train loss:5.835281 +step:288 train loss:5.811556 +step:289 train loss:5.845983 +step:290 train loss:5.889210 +step:291 train loss:5.818592 +step:292 train loss:5.875627 +step:293 train loss:5.812388 +step:294 train loss:5.928971 +step:295 train loss:5.825303 +step:296 train loss:5.914806 +step:297 train loss:5.934234 +step:298 train loss:5.826550 +step:299 train loss:5.893482 +step:300 train loss:5.812663 +step:301 train loss:5.844321 +step:302 train loss:5.818120 +step:303 train loss:5.825863 +step:304 train loss:5.854352 +step:305 train loss:5.778775 +step:306 train loss:5.801647 +step:307 train loss:5.813727 +step:308 train loss:5.729284 +step:309 train loss:5.863827 +step:310 train loss:5.833827 +step:311 train loss:5.819846 +step:312 train loss:5.805346 +step:313 train loss:5.819777 +step:314 train loss:5.794730 +step:315 train loss:5.756599 +step:316 train loss:5.745508 +step:317 train loss:5.713602 +step:318 train loss:5.709477 +step:319 train loss:5.783803 +step:320 train loss:5.702909 +step:321 train loss:5.786463 +step:322 train loss:5.788582 +step:323 train loss:5.838195 +step:324 train loss:5.788903 +step:325 train loss:5.817963 +step:326 train loss:5.821913 +step:327 train loss:5.798476 +step:328 train loss:5.775424 +step:329 train loss:5.798678 +step:330 train loss:5.716466 +step:331 train loss:5.751769 +step:332 train loss:5.725815 +step:333 train loss:5.669469 +step:334 train loss:5.775162 +step:335 train loss:5.823353 +step:336 train loss:5.939941 +step:337 train loss:5.827361 +step:338 train loss:5.774822 +step:339 train loss:5.708829 +step:340 train loss:5.720035 +step:341 train loss:5.704551 +step:342 train loss:5.764584 +step:343 train loss:5.734762 +step:344 train loss:5.683832 +step:345 train loss:5.652060 +step:346 train loss:5.697766 +step:347 train loss:5.646593 +step:348 train loss:5.649526 +step:349 train loss:5.588568 +step:350 train loss:5.621046 +step:351 train loss:5.697524 +step:352 train loss:5.650828 +step:353 train loss:5.672055 +step:354 train loss:5.627189 +step:355 train loss:5.666566 +step:356 train loss:5.640759 +step:357 train loss:5.739323 +step:358 train loss:5.776588 +step:359 train loss:5.640416 +step:360 train loss:5.761962 +step:361 train loss:5.734289 +step:362 train loss:5.711515 +step:363 train loss:5.673974 +step:364 train loss:5.781667 +step:365 train loss:5.734980 +step:366 train loss:5.686048 +step:367 train loss:5.708390 +step:368 train loss:5.677900 +step:369 train loss:5.668210 +step:370 train loss:5.710337 +step:371 train loss:5.643576 +step:372 train loss:5.717278 +step:373 train loss:5.677326 +step:374 train loss:5.646870 +step:375 train loss:5.668738 +step:376 train loss:5.650800 +step:377 train loss:5.533599 +step:378 train loss:5.611633 +step:379 train loss:5.663158 +step:380 train loss:5.584920 +step:381 train loss:5.645189 +step:382 train loss:5.647073 +step:383 train loss:5.645027 +step:384 train loss:5.604319 +step:385 train loss:5.595204 +step:386 train loss:5.626922 +step:387 train loss:5.624140 +step:388 train loss:5.588321 +step:389 train loss:5.604335 +step:390 train loss:5.587869 +step:391 train loss:5.589452 +step:392 train loss:5.580668 +step:393 train loss:5.576458 +step:394 train loss:5.615067 +step:395 train loss:5.549617 +step:396 train loss:5.510185 +step:397 train loss:5.622136 +step:398 train loss:5.588804 +step:399 train loss:5.602264 +step:400 train loss:5.577309 +step:401 train loss:5.617623 +step:402 train loss:5.586416 +step:403 train loss:5.584993 +step:404 train loss:5.555142 +step:405 train loss:5.560172 +step:406 train loss:5.589059 +step:407 train loss:5.569323 +step:408 train loss:5.632094 +step:409 train loss:5.550806 +step:410 train loss:5.518881 +step:411 train loss:5.503585 +step:412 train loss:5.593049 +step:413 train loss:5.481271 +step:414 train loss:5.566042 +step:415 train loss:5.534013 +step:416 train loss:5.546066 +step:417 train loss:5.589427 +step:418 train loss:5.537756 +step:419 train loss:5.531836 +step:420 train loss:5.543929 +step:421 train loss:5.509687 +step:422 train loss:5.506676 +step:423 train loss:5.513398 +step:424 train loss:5.478779 +step:425 train loss:5.540858 +step:426 train loss:5.531409 +step:427 train loss:5.470004 +step:428 train loss:5.582135 +step:429 train loss:5.455479 +step:430 train loss:5.511566 +step:431 train loss:5.545654 +step:432 train loss:5.558114 +step:433 train loss:5.535814 +step:434 train loss:5.487062 +step:435 train loss:5.538239 +step:436 train loss:5.559980 +step:437 train loss:5.506724 +step:438 train loss:5.463706 +step:439 train loss:5.462619 +step:440 train loss:5.505166 +step:441 train loss:5.453521 +step:442 train loss:5.446952 +step:443 train loss:5.458915 +step:444 train loss:5.495698 +step:445 train loss:5.494793 +step:446 train loss:5.434431 +step:447 train loss:5.457693 +step:448 train loss:5.523375 +step:449 train loss:5.473036 +step:450 train loss:5.470638 +step:451 train loss:5.460392 +step:452 train loss:5.506820 +step:453 train loss:5.446801 +step:454 train loss:5.408682 +step:455 train loss:5.505723 +step:456 train loss:5.460756 +step:457 train loss:5.433356 +step:458 train loss:5.463297 +step:459 train loss:5.427719 +step:460 train loss:5.519590 +step:461 train loss:5.471549 +step:462 train loss:5.366562 +step:463 train loss:5.438012 +step:464 train loss:5.489910 +step:465 train loss:5.451324 +step:466 train loss:5.467031 +step:467 train loss:5.412643 +step:468 train loss:5.456365 +step:469 train loss:5.426187 +step:470 train loss:5.376760 +step:471 train loss:5.474899 +step:472 train loss:5.360138 +step:473 train loss:5.435989 +step:474 train loss:5.419928 +step:475 train loss:5.436826 +step:476 train loss:5.398214 +step:477 train loss:5.346197 +step:478 train loss:5.361343 +step:479 train loss:5.375525 +step:480 train loss:5.405341 +step:481 train loss:5.399084 +step:482 train loss:5.328087 +step:483 train loss:5.409347 +step:484 train loss:5.381371 +step:485 train loss:5.383719 +step:486 train loss:5.438647 +step:487 train loss:5.395288 +step:488 train loss:5.382222 +step:489 train loss:5.384395 +step:490 train loss:5.374448 +step:491 train loss:5.385225 +step:492 train loss:5.382841 +step:493 train loss:5.382947 +step:494 train loss:5.385039 +step:495 train loss:5.327860 +step:496 train loss:5.427475 +step:497 train loss:5.309858 +step:498 train loss:5.417261 +step:499 train loss:5.386298 +step:500 validation loss:5.367658 total_sharp:7.1323e-02 L1_sharp:2.0012e-01 L2_sharp:5.5745e-02 L3_sharp:2.9736e-02 L4_sharp:2.0664e-02 L5_sharp:1.7011e-02 L6_sharp:1.0149e-02 L7_sharp:8.8316e-03 L8_sharp:5.9729e-03 L9_sharp:6.2353e-03 L10_sharp:5.2541e-03 L11_sharp:6.6675e-03 L12_sharp:3.2602e-02 total_fnorm:1.3424e+00 total_l1_linf:1.1586e+04 total_spectral:1.3424e+00 L1_fnorm:2.3374e-01 L2_fnorm:2.1902e-01 L3_fnorm:2.2774e-01 L4_fnorm:2.5187e-01 L5_fnorm:2.6294e-01 L6_fnorm:2.7940e-01 L7_fnorm:2.8722e-01 L8_fnorm:2.9427e-01 L9_fnorm:2.9404e-01 L10_fnorm:2.9883e-01 L11_fnorm:2.8728e-01 L12_fnorm:2.6533e-01 L1_l1linf:3.8504e-01 L2_l1linf:3.0816e-01 L3_l1linf:3.6713e-01 L4_l1linf:3.7364e-01 L5_l1linf:4.0393e-01 L6_l1linf:4.1108e-01 L7_l1linf:3.7826e-01 L8_l1linf:3.7478e-01 L9_l1linf:3.7957e-01 L10_l1linf:3.7698e-01 L11_l1linf:3.7944e-01 L12_l1linf:3.8242e-01 L1_spectral:5.5677e-02 L2_spectral:5.1224e-02 L3_spectral:5.2275e-02 L4_spectral:6.2819e-02 L5_spectral:6.1731e-02 L6_spectral:6.0462e-02 L7_spectral:5.9851e-02 L8_spectral:6.3173e-02 L9_spectral:6.5203e-02 L10_spectral:6.8872e-02 L11_spectral:7.1216e-02 L12_spectral:9.6990e-02 ip_v_neg_g:1.0161e-01 cos_v_neg_g:6.5676e-02 v_norm:1.3424e+00 g_norm:1.1525e+00 hv_norm:9.7786e-01 cos_v_hv:9.7912e-02 hg_norm:7.4002e+00 cos_g_hg:6.0800e-01 v_par:3.9456e-03 v_perp:1.3424e+00 L1_cos_v_neg_g:1.3228e-01 L1_v_norm:2.3374e-01 L2_cos_v_neg_g:1.8640e-01 L2_v_norm:2.1902e-01 L3_cos_v_neg_g:1.7245e-01 L3_v_norm:2.2774e-01 L4_cos_v_neg_g:1.6082e-01 L4_v_norm:2.5187e-01 L5_cos_v_neg_g:1.6559e-01 L5_v_norm:2.6294e-01 L6_cos_v_neg_g:1.3396e-01 L6_v_norm:2.7940e-01 L7_cos_v_neg_g:1.3234e-01 L7_v_norm:2.8722e-01 L8_cos_v_neg_g:1.1526e-01 L8_v_norm:2.9427e-01 L9_cos_v_neg_g:1.1043e-01 L9_v_norm:2.9404e-01 L10_cos_v_neg_g:9.9256e-02 L10_v_norm:2.9883e-01 L11_cos_v_neg_g:1.1630e-01 L11_v_norm:2.8728e-01 L12_cos_v_neg_g:1.5859e-01 L12_v_norm:2.6533e-01 +step:500 train loss:5.388126 +step:501 train loss:5.341118 +step:502 train loss:5.369151 +step:503 train loss:5.295244 +step:504 train loss:5.374474 +step:505 train loss:5.304893 +step:506 train loss:5.300834 +step:507 train loss:5.303626 +step:508 train loss:5.330715 +step:509 train loss:5.336096 +step:510 train loss:5.269704 +step:511 train loss:5.268487 +step:512 train loss:5.284906 +step:513 train loss:5.343064 +step:514 train loss:5.383230 +step:515 train loss:5.361148 +step:516 train loss:5.410685 +step:517 train loss:5.329393 +step:518 train loss:5.313177 +step:519 train loss:5.370253 +step:520 train loss:5.300094 +step:521 train loss:5.280766 +step:522 train loss:5.305678 +step:523 train loss:5.301394 +step:524 train loss:5.241051 +step:525 train loss:5.247715 +step:526 train loss:5.266701 +step:527 train loss:5.262883 +step:528 train loss:5.257493 +step:529 train loss:5.289766 +step:530 train loss:5.231885 +step:531 train loss:5.274893 +step:532 train loss:5.243541 +step:533 train loss:5.212547 +step:534 train loss:5.278985 +step:535 train loss:5.267248 +step:536 train loss:5.331089 +step:537 train loss:5.209481 +step:538 train loss:5.166301 +step:539 train loss:5.277643 +step:540 train loss:5.306761 +step:541 train loss:5.217556 +step:542 train loss:5.281518 +step:543 train loss:5.299538 +step:544 train loss:5.297370 +step:545 train loss:5.285771 +step:546 train loss:5.233524 +step:547 train loss:5.255207 +step:548 train loss:5.219403 +step:549 train loss:5.290920 +step:550 train loss:5.256796 +step:551 train loss:5.256483 +step:552 train loss:5.346293 +step:553 train loss:5.316559 +step:554 train loss:5.263702 +step:555 train loss:5.302691 +step:556 train loss:5.250716 +step:557 train loss:5.215439 +step:558 train loss:5.176292 +step:559 train loss:5.230263 +step:560 train loss:5.286937 +step:561 train loss:5.151797 +step:562 train loss:5.137316 +step:563 train loss:5.224112 +step:564 train loss:5.175284 +step:565 train loss:5.180747 +step:566 train loss:5.182957 +step:567 train loss:5.176805 +step:568 train loss:5.205356 +step:569 train loss:5.177093 +step:570 train loss:5.102351 +step:571 train loss:5.131169 +step:572 train loss:5.127777 +step:573 train loss:5.132625 +step:574 train loss:5.160085 +step:575 train loss:5.130651 +step:576 train loss:5.127025 +step:577 train loss:5.151257 +step:578 train loss:5.120818 +step:579 train loss:5.153710 +step:580 train loss:5.097095 +step:581 train loss:5.147346 +step:582 train loss:5.122936 +step:583 train loss:5.166479 +step:584 train loss:5.142343 +step:585 train loss:5.143061 +step:586 train loss:5.152085 +step:587 train loss:5.222693 +step:588 train loss:5.149305 +step:589 train loss:5.224098 +step:590 train loss:5.259857 +step:591 train loss:5.159864 +step:592 train loss:5.140232 +step:593 train loss:5.178186 +step:594 train loss:5.146167 +step:595 train loss:5.184594 +step:596 train loss:5.148245 +step:597 train loss:5.176937 +step:598 train loss:5.133830 +step:599 train loss:5.136576 +step:600 train loss:5.113331 +step:601 train loss:5.100775 +step:602 train loss:5.087723 +step:603 train loss:5.156770 +step:604 train loss:5.125407 +step:605 train loss:5.187057 +step:606 train loss:5.161387 +step:607 train loss:5.143054 +step:608 train loss:5.160729 +step:609 train loss:5.139053 +step:610 train loss:5.129431 +step:611 train loss:5.155337 +step:612 train loss:5.181449 +step:613 train loss:5.071929 +step:614 train loss:5.120261 +step:615 train loss:5.165057 +step:616 train loss:5.076044 +step:617 train loss:5.101759 +step:618 train loss:5.064208 +step:619 train loss:5.107121 +step:620 train loss:5.116503 +step:621 train loss:5.034946 +step:622 train loss:5.107168 +step:623 train loss:5.094034 +step:624 train loss:5.087853 +step:625 train loss:5.105477 +step:626 train loss:5.114974 +step:627 train loss:5.089273 +step:628 train loss:5.085693 +step:629 train loss:5.016931 +step:630 train loss:5.040465 +step:631 train loss:5.018232 +step:632 train loss:5.016571 +step:633 train loss:5.042358 +step:634 train loss:5.031201 +step:635 train loss:4.969465 +step:636 train loss:5.058880 +step:637 train loss:4.976296 +step:638 train loss:4.905775 +step:639 train loss:5.016845 +step:640 train loss:4.974317 +step:641 train loss:5.002539 +step:642 train loss:5.032177 +step:643 train loss:4.938676 +step:644 train loss:5.022024 +step:645 train loss:4.985456 +step:646 train loss:4.981142 +step:647 train loss:4.999969 +step:648 train loss:5.094873 +step:649 train loss:4.994780 +step:650 train loss:5.069936 +step:651 train loss:4.951795 +step:652 train loss:5.001254 +step:653 train loss:4.981144 +step:654 train loss:5.005004 +step:655 train loss:5.031107 +step:656 train loss:4.962982 +step:657 train loss:5.036316 +step:658 train loss:4.958277 +step:659 train loss:5.037590 +step:660 train loss:4.992128 +step:661 train loss:5.045341 +step:662 train loss:5.031449 +step:663 train loss:5.010024 +step:664 train loss:4.911731 +step:665 train loss:4.942031 +step:666 train loss:4.936616 +step:667 train loss:5.008402 +step:668 train loss:4.961175 +step:669 train loss:4.946179 +step:670 train loss:4.975310 +step:671 train loss:4.943322 +step:672 train loss:4.925303 +step:673 train loss:5.026279 +step:674 train loss:5.008418 +step:675 train loss:4.925988 +step:676 train loss:5.014119 +step:677 train loss:4.936091 +step:678 train loss:4.931515 +step:679 train loss:4.969527 +step:680 train loss:4.927240 +step:681 train loss:4.965125 +step:682 train loss:4.873097 +step:683 train loss:4.937588 +step:684 train loss:4.979534 +step:685 train loss:4.917103 +step:686 train loss:5.039254 +step:687 train loss:4.957445 +step:688 train loss:4.886743 +step:689 train loss:4.913227 +step:690 train loss:4.887601 +step:691 train loss:4.898384 +step:692 train loss:4.913939 +step:693 train loss:4.908916 +step:694 train loss:4.901415 +step:695 train loss:4.869474 +step:696 train loss:4.815392 +step:697 train loss:4.939925 +step:698 train loss:4.857830 +step:699 train loss:4.860142 +step:700 train loss:4.934398 +step:701 train loss:4.841547 +step:702 train loss:4.932269 +step:703 train loss:4.867049 +step:704 train loss:4.825385 +step:705 train loss:4.896793 +step:706 train loss:4.751212 +step:707 train loss:4.817201 +step:708 train loss:4.918110 +step:709 train loss:4.873869 +step:710 train loss:4.856677 +step:711 train loss:4.924114 +step:712 train loss:4.862844 +step:713 train loss:4.834025 +step:714 train loss:4.915024 +step:715 train loss:4.792505 +step:716 train loss:4.952904 +step:717 train loss:4.841691 +step:718 train loss:4.943352 +step:719 train loss:4.866952 +step:720 train loss:4.838523 +step:721 train loss:4.854830 +step:722 train loss:4.862588 +step:723 train loss:4.922859 +step:724 train loss:4.890971 +step:725 train loss:4.838162 +step:726 train loss:4.826081 +step:727 train loss:4.877826 +step:728 train loss:4.855861 +step:729 train loss:4.773526 +step:730 train loss:4.872375 +step:731 train loss:4.911592 +step:732 train loss:4.873146 +step:733 train loss:4.843977 +step:734 train loss:4.841826 +step:735 train loss:4.917928 +step:736 train loss:4.836643 +step:737 train loss:4.822512 +step:738 train loss:4.847161 +step:739 train loss:4.792847 +step:740 train loss:4.812396 +step:741 train loss:4.885140 +step:742 train loss:4.796892 +step:743 train loss:4.793029 +step:744 train loss:4.822882 +step:745 train loss:4.748758 +step:746 train loss:4.757399 +step:747 train loss:4.782125 +step:748 train loss:4.746444 +step:749 train loss:4.783085 +step:750 validation loss:4.746082 +step:750 train loss:4.727802 +step:751 train loss:4.750292 +step:752 train loss:4.686689 +step:753 train loss:4.743066 +step:754 train loss:4.735353 +step:755 train loss:4.791496 +step:756 train loss:4.761528 +step:757 train loss:4.846444 +step:758 train loss:4.731751 +step:759 train loss:4.764718 +step:760 train loss:4.738573 +step:761 train loss:4.793150 +step:762 train loss:4.748270 +step:763 train loss:4.743125 +step:764 train loss:4.716230 +step:765 train loss:4.716260 +step:766 train loss:4.790905 +step:767 train loss:4.915739 +step:768 train loss:4.744514 +step:769 train loss:4.756855 +step:770 train loss:4.775110 +step:771 train loss:4.831724 +step:772 train loss:4.755801 +step:773 train loss:4.692131 +step:774 train loss:4.732905 +step:775 train loss:4.702470 +step:776 train loss:4.712538 +step:777 train loss:4.675979 +step:778 train loss:4.685117 +step:779 train loss:4.705300 +step:780 train loss:4.821001 +step:781 train loss:4.713408 +step:782 train loss:4.743839 +step:783 train loss:4.723648 +step:784 train loss:4.720235 +step:785 train loss:4.699756 +step:786 train loss:4.719942 +step:787 train loss:4.676816 +step:788 train loss:4.740375 +step:789 train loss:4.722950 +step:790 train loss:4.673082 +step:791 train loss:4.761155 +step:792 train loss:4.762844 +step:793 train loss:4.747936 +step:794 train loss:4.735888 +step:795 train loss:4.685594 +step:796 train loss:4.941442 +step:797 train loss:4.703630 +step:798 train loss:4.691782 +step:799 train loss:4.707944 +step:800 train loss:4.798411 +step:801 train loss:4.701159 +step:802 train loss:4.812791 +step:803 train loss:4.702465 +step:804 train loss:4.656200 +step:805 train loss:4.708577 +step:806 train loss:4.616436 +step:807 train loss:4.673266 +step:808 train loss:4.658599 +step:809 train loss:4.626536 +step:810 train loss:4.595627 +step:811 train loss:4.683981 +step:812 train loss:4.641069 +step:813 train loss:4.650601 +step:814 train loss:4.707368 +step:815 train loss:4.666096 +step:816 train loss:4.589568 +step:817 train loss:4.622888 +step:818 train loss:4.593726 +step:819 train loss:4.588784 +step:820 train loss:4.594590 +step:821 train loss:4.535951 +step:822 train loss:4.537107 +step:823 train loss:4.616045 +step:824 train loss:4.525327 +step:825 train loss:4.506564 +step:826 train loss:4.573804 +step:827 train loss:4.485135 +step:828 train loss:4.555463 +step:829 train loss:4.553508 +step:830 train loss:4.562293 +step:831 train loss:4.601468 +step:832 train loss:4.658878 +step:833 train loss:4.615973 +step:834 train loss:4.634733 +step:835 train loss:4.595879 +step:836 train loss:4.593113 +step:837 train loss:4.600224 +step:838 train loss:4.604218 +step:839 train loss:4.594864 +step:840 train loss:4.641682 +step:841 train loss:4.611241 +step:842 train loss:4.599186 +step:843 train loss:4.587766 +step:844 train loss:4.554773 +step:845 train loss:4.537174 +step:846 train loss:4.617697 +step:847 train loss:4.574233 +step:848 train loss:4.527987 +step:849 train loss:4.564893 +step:850 train loss:4.568233 +step:851 train loss:4.531256 +step:852 train loss:4.636189 +step:853 train loss:4.583444 +step:854 train loss:4.644522 +step:855 train loss:4.596235 +step:856 train loss:4.556572 +step:857 train loss:4.576708 +step:858 train loss:4.606778 +step:859 train loss:4.533556 +step:860 train loss:4.544347 +step:861 train loss:4.587386 +step:862 train loss:4.530326 +step:863 train loss:4.556722 +step:864 train loss:4.539156 +step:865 train loss:4.541828 +step:866 train loss:4.561961 +step:867 train loss:4.653776 +step:868 train loss:4.522571 +step:869 train loss:4.536331 +step:870 train loss:4.485581 +step:871 train loss:4.464221 +step:872 train loss:4.516165 +step:873 train loss:4.491912 +step:874 train loss:4.508747 +step:875 train loss:4.419739 +step:876 train loss:4.518114 +step:877 train loss:4.439378 +step:878 train loss:4.539062 +step:879 train loss:4.441952 +step:880 train loss:4.540678 +step:881 train loss:4.478010 +step:882 train loss:4.434624 +step:883 train loss:4.474185 +step:884 train loss:4.490646 +step:885 train loss:4.433338 +step:886 train loss:4.432732 +step:887 train loss:4.454625 +step:888 train loss:4.559027 +step:889 train loss:4.499196 +step:890 train loss:4.447845 +step:891 train loss:4.408197 +step:892 train loss:4.421242 +step:893 train loss:4.505023 +step:894 train loss:4.496177 +step:895 train loss:4.461708 +step:896 train loss:4.558771 +step:897 train loss:4.480299 +step:898 train loss:4.495825 +step:899 train loss:4.518709 +step:900 train loss:4.563415 +step:901 train loss:4.547568 +step:902 train loss:4.629013 +step:903 train loss:4.651821 +step:904 train loss:4.663391 +step:905 train loss:4.536257 +step:906 train loss:4.537147 +step:907 train loss:4.552966 +step:908 train loss:4.552938 +step:909 train loss:4.498468 +step:910 train loss:4.530904 +step:911 train loss:4.642200 +step:912 train loss:4.476624 +step:913 train loss:4.537503 +step:914 train loss:4.505239 +step:915 train loss:4.504911 +step:916 train loss:4.567116 +step:917 train loss:4.499296 +step:918 train loss:4.576893 +step:919 train loss:4.641218 +step:920 train loss:4.400486 +step:921 train loss:4.498368 +step:922 train loss:4.471678 +step:923 train loss:4.384511 +step:924 train loss:4.433669 +step:925 train loss:4.384418 +step:926 train loss:4.476352 +step:927 train loss:4.379986 +step:928 train loss:4.456367 +step:929 train loss:4.425646 +step:930 train loss:4.421349 +step:931 train loss:4.460877 +step:932 train loss:4.402476 +step:933 train loss:4.434560 +step:934 train loss:4.468972 +step:935 train loss:4.446474 +step:936 train loss:4.424710 +step:937 train loss:4.421831 +step:938 train loss:4.417095 +step:939 train loss:4.314508 +step:940 train loss:4.425952 +step:941 train loss:4.375964 +step:942 train loss:4.357123 +step:943 train loss:4.466434 +step:944 train loss:4.410132 +step:945 train loss:4.410251 +step:946 train loss:4.431570 +step:947 train loss:4.569702 +step:948 train loss:4.377968 +step:949 train loss:4.419130 +step:950 train loss:4.355427 +step:951 train loss:4.386272 +step:952 train loss:4.444411 +step:953 train loss:4.372270 +step:954 train loss:4.397563 +step:955 train loss:4.339519 +step:956 train loss:4.358545 +step:957 train loss:4.362098 +step:958 train loss:4.436855 +step:959 train loss:4.369605 +step:960 train loss:4.469437 +step:961 train loss:4.415861 +step:962 train loss:4.373556 +step:963 train loss:4.358113 +step:964 train loss:4.393321 +step:965 train loss:4.315829 +step:966 train loss:4.328275 +step:967 train loss:4.396633 +step:968 train loss:4.394825 +step:969 train loss:4.347095 +step:970 train loss:4.406701 +step:971 train loss:4.374869 +step:972 train loss:4.294794 +step:973 train loss:4.396783 +step:974 train loss:4.333803 +step:975 train loss:4.441024 +step:976 train loss:4.397141 +step:977 train loss:4.387083 +step:978 train loss:4.382052 +step:979 train loss:4.362011 +step:980 train loss:4.365680 +step:981 train loss:4.339360 +step:982 train loss:4.346111 +step:983 train loss:4.359958 +step:984 train loss:4.379265 +step:985 train loss:4.347332 +step:986 train loss:4.369313 +step:987 train loss:4.395844 +step:988 train loss:4.373613 +step:989 train loss:4.341769 +step:990 train loss:4.329345 +step:991 train loss:4.259411 +step:992 train loss:4.322695 +step:993 train loss:4.343838 +step:994 train loss:4.283021 +step:995 train loss:4.302560 +step:996 train loss:4.345922 +step:997 train loss:4.301479 +step:998 train loss:4.295096 +step:999 train loss:4.344876 +step:1000 validation loss:4.284408 total_sharp:3.3387e-02 L1_sharp:1.1408e-01 L2_sharp:5.1988e-02 L3_sharp:1.2959e-02 L4_sharp:6.4525e-03 L5_sharp:6.2115e-03 L6_sharp:5.8324e-03 L7_sharp:6.6025e-03 L8_sharp:3.9529e-03 L9_sharp:2.9827e-03 L10_sharp:2.4371e-03 L11_sharp:2.5169e-03 L12_sharp:9.6701e-03 total_fnorm:1.8783e+00 total_l1_linf:1.6279e+04 total_spectral:1.8783e+00 L1_fnorm:3.3114e-01 L2_fnorm:3.1852e-01 L3_fnorm:3.3315e-01 L4_fnorm:3.7770e-01 L5_fnorm:3.6777e-01 L6_fnorm:3.9539e-01 L7_fnorm:4.0821e-01 L8_fnorm:4.2330e-01 L9_fnorm:4.3148e-01 L10_fnorm:4.3642e-01 L11_fnorm:4.3396e-01 L12_fnorm:4.0466e-01 L1_l1linf:4.1041e-01 L2_l1linf:5.1750e-01 L3_l1linf:4.7265e-01 L4_l1linf:4.5521e-01 L5_l1linf:4.4768e-01 L6_l1linf:4.6390e-01 L7_l1linf:4.5097e-01 L8_l1linf:4.4922e-01 L9_l1linf:4.6640e-01 L10_l1linf:4.7628e-01 L11_l1linf:4.7422e-01 L12_l1linf:4.8403e-01 L1_spectral:6.2998e-02 L2_spectral:7.0144e-02 L3_spectral:6.9654e-02 L4_spectral:7.3187e-02 L5_spectral:6.7779e-02 L6_spectral:6.2946e-02 L7_spectral:6.4877e-02 L8_spectral:6.2139e-02 L9_spectral:6.5129e-02 L10_spectral:6.6955e-02 L11_spectral:6.8727e-02 L12_spectral:9.4331e-02 ip_v_neg_g:6.6915e-02 cos_v_neg_g:3.9227e-02 v_norm:1.8783e+00 g_norm:9.0818e-01 hv_norm:8.5234e-01 cos_v_hv:7.3576e-02 hg_norm:4.7458e+00 cos_g_hg:5.7689e-01 v_par:2.7010e-03 v_perp:1.8783e+00 L1_cos_v_neg_g:1.0758e-01 L1_v_norm:3.3114e-01 L2_cos_v_neg_g:1.4456e-01 L2_v_norm:3.1852e-01 L3_cos_v_neg_g:7.8553e-02 L3_v_norm:3.3315e-01 L4_cos_v_neg_g:6.2724e-02 L4_v_norm:3.7770e-01 L5_cos_v_neg_g:6.5924e-02 L5_v_norm:3.6777e-01 L6_cos_v_neg_g:5.1076e-02 L6_v_norm:3.9539e-01 L7_cos_v_neg_g:4.5429e-02 L7_v_norm:4.0821e-01 L8_cos_v_neg_g:3.3913e-02 L8_v_norm:4.2330e-01 L9_cos_v_neg_g:3.0935e-02 L9_v_norm:4.3148e-01 L10_cos_v_neg_g:3.2221e-02 L10_v_norm:4.3642e-01 L11_cos_v_neg_g:3.0483e-02 L11_v_norm:4.3396e-01 L12_cos_v_neg_g:5.3733e-02 L12_v_norm:4.0466e-01 +step:1000 train loss:4.356574 +step:1001 train loss:4.380035 +step:1002 train loss:4.386061 +step:1003 train loss:4.355639 +step:1004 train loss:4.327443 +step:1005 train loss:4.336444 +step:1006 train loss:4.430711 +step:1007 train loss:4.377875 +step:1008 train loss:4.348868 +step:1009 train loss:4.407998 +step:1010 train loss:4.378403 +step:1011 train loss:4.394454 +step:1012 train loss:4.337492 +step:1013 train loss:4.308563 +step:1014 train loss:4.306478 +step:1015 train loss:4.339354 +step:1016 train loss:4.342902 +step:1017 train loss:4.289630 +step:1018 train loss:4.348338 +step:1019 train loss:4.290956 +step:1020 train loss:4.293922 +step:1021 train loss:4.393479 +step:1022 train loss:4.296069 +step:1023 train loss:4.300302 +step:1024 train loss:4.382221 +step:1025 train loss:4.343071 +step:1026 train loss:4.288501 +step:1027 train loss:4.323833 +step:1028 train loss:4.326089 +step:1029 train loss:4.264292 +step:1030 train loss:4.349647 +step:1031 train loss:4.337218 +step:1032 train loss:4.294517 +step:1033 train loss:4.260964 +step:1034 train loss:4.323034 +step:1035 train loss:4.323765 +step:1036 train loss:4.236579 +step:1037 train loss:4.298712 +step:1038 train loss:4.320441 +step:1039 train loss:4.451554 +step:1040 train loss:4.288511 +step:1041 train loss:4.274094 +step:1042 train loss:4.295233 +step:1043 train loss:4.298949 +step:1044 train loss:4.287858 +step:1045 train loss:4.299773 +step:1046 train loss:4.239259 +step:1047 train loss:4.277328 +step:1048 train loss:4.268298 +step:1049 train loss:4.329835 +step:1050 train loss:4.284529 +step:1051 train loss:4.255983 +step:1052 train loss:4.362950 +step:1053 train loss:4.266162 +step:1054 train loss:4.259207 +step:1055 train loss:4.314661 +step:1056 train loss:4.268618 +step:1057 train loss:4.163633 +step:1058 train loss:4.266613 +step:1059 train loss:4.250748 +step:1060 train loss:4.241547 +step:1061 train loss:4.292077 +step:1062 train loss:4.254718 +step:1063 train loss:4.260082 +step:1064 train loss:4.253979 +step:1065 train loss:4.259152 +step:1066 train loss:4.232702 +step:1067 train loss:4.263709 +step:1068 train loss:4.218368 +step:1069 train loss:4.239271 +step:1070 train loss:4.251333 +step:1071 train loss:4.264054 +step:1072 train loss:4.288733 +step:1073 train loss:4.208012 +step:1074 train loss:4.218551 +step:1075 train loss:4.227989 +step:1076 train loss:4.296167 +step:1077 train loss:4.221904 +step:1078 train loss:4.266297 +step:1079 train loss:4.314847 +step:1080 train loss:4.203679 +step:1081 train loss:4.271637 +step:1082 train loss:4.270438 +step:1083 train loss:4.239313 +step:1084 train loss:4.221877 +step:1085 train loss:4.284899 +step:1086 train loss:4.267283 +step:1087 train loss:4.252684 +step:1088 train loss:4.249829 +step:1089 train loss:4.255458 +step:1090 train loss:4.196720 +step:1091 train loss:4.184384 +step:1092 train loss:4.291770 +step:1093 train loss:4.176670 +step:1094 train loss:4.237289 +step:1095 train loss:4.276742 +step:1096 train loss:4.212016 +step:1097 train loss:4.208536 +step:1098 train loss:4.179280 +step:1099 train loss:4.231394 +step:1100 train loss:4.277754 +step:1101 train loss:4.266376 +step:1102 train loss:4.279742 +step:1103 train loss:4.205469 +step:1104 train loss:4.235377 +step:1105 train loss:4.283397 +step:1106 train loss:4.222861 +step:1107 train loss:4.342979 +step:1108 train loss:4.287671 +step:1109 train loss:4.258645 +step:1110 train loss:4.224022 +step:1111 train loss:4.280654 +step:1112 train loss:4.195804 +step:1113 train loss:4.180161 +step:1114 train loss:4.156440 +step:1115 train loss:4.192700 +step:1116 train loss:4.258127 +step:1117 train loss:4.278623 +step:1118 train loss:4.300424 +step:1119 train loss:4.228373 +step:1120 train loss:4.261179 +step:1121 train loss:4.284738 +step:1122 train loss:4.245595 +step:1123 train loss:4.359759 +step:1124 train loss:4.233963 +step:1125 train loss:4.238713 +step:1126 train loss:4.196345 +step:1127 train loss:4.211066 +step:1128 train loss:4.209181 +step:1129 train loss:4.270130 +step:1130 train loss:4.186134 +step:1131 train loss:4.278589 +step:1132 train loss:4.218003 +step:1133 train loss:4.227454 +step:1134 train loss:4.195596 +step:1135 train loss:4.240534 +step:1136 train loss:4.253211 +step:1137 train loss:4.174526 +step:1138 train loss:4.247039 +step:1139 train loss:4.193104 +step:1140 train loss:4.269823 +step:1141 train loss:4.227429 +step:1142 train loss:4.163408 +step:1143 train loss:4.242033 +step:1144 train loss:4.268693 +step:1145 train loss:4.220007 +step:1146 train loss:4.167960 +step:1147 train loss:4.180934 +step:1148 train loss:4.205588 +step:1149 train loss:4.251718 +step:1150 train loss:4.259386 +step:1151 train loss:4.266068 +step:1152 train loss:4.164200 +step:1153 train loss:4.163823 +step:1154 train loss:4.150605 +step:1155 train loss:4.255847 +step:1156 train loss:4.159156 +step:1157 train loss:4.185505 +step:1158 train loss:4.237909 +step:1159 train loss:4.238207 +step:1160 train loss:4.164464 +step:1161 train loss:4.256926 +step:1162 train loss:4.198887 +step:1163 train loss:4.175189 +step:1164 train loss:4.084679 +step:1165 train loss:4.217555 +step:1166 train loss:4.143806 +step:1167 train loss:4.151987 +step:1168 train loss:4.209634 +step:1169 train loss:4.168656 +step:1170 train loss:4.169202 +step:1171 train loss:4.190400 +step:1172 train loss:4.157032 +step:1173 train loss:4.187387 +step:1174 train loss:4.128002 +step:1175 train loss:4.156995 +step:1176 train loss:4.274314 +step:1177 train loss:4.131784 +step:1178 train loss:4.188543 +step:1179 train loss:4.144351 +step:1180 train loss:4.184830 +step:1181 train loss:4.176426 +step:1182 train loss:4.228164 +step:1183 train loss:4.203935 +step:1184 train loss:4.148422 +step:1185 train loss:4.178843 +step:1186 train loss:4.168552 +step:1187 train loss:4.151745 +step:1188 train loss:4.187426 +step:1189 train loss:4.112654 +step:1190 train loss:4.170050 +step:1191 train loss:4.227701 +step:1192 train loss:4.175807 +step:1193 train loss:4.172516 +step:1194 train loss:4.282980 +step:1195 train loss:4.263299 +step:1196 train loss:4.155265 +step:1197 train loss:4.176027 +step:1198 train loss:4.156399 +step:1199 train loss:4.156021 +step:1200 train loss:4.212646 +step:1201 train loss:4.181571 +step:1202 train loss:4.122661 +step:1203 train loss:4.116152 +step:1204 train loss:4.151553 +step:1205 train loss:4.151929 +step:1206 train loss:4.103355 +step:1207 train loss:4.195764 +step:1208 train loss:4.169426 +step:1209 train loss:4.091413 +step:1210 train loss:4.184808 +step:1211 train loss:4.133191 +step:1212 train loss:4.159201 +step:1213 train loss:4.089214 +step:1214 train loss:4.175227 +step:1215 train loss:4.142411 +step:1216 train loss:4.145482 +step:1217 train loss:4.113526 +step:1218 train loss:4.207926 +step:1219 train loss:4.186148 +step:1220 train loss:4.220694 +step:1221 train loss:4.228354 +step:1222 train loss:4.263749 +step:1223 train loss:4.240249 +step:1224 train loss:4.226609 +step:1225 train loss:4.291071 +step:1226 train loss:4.213557 +step:1227 train loss:4.233500 +step:1228 train loss:4.266034 +step:1229 train loss:4.259627 +step:1230 train loss:4.200109 +step:1231 train loss:4.242490 +step:1232 train loss:4.186112 +step:1233 train loss:4.173857 +step:1234 train loss:4.249470 +step:1235 train loss:4.222678 +step:1236 train loss:4.124119 +step:1237 train loss:4.219196 +step:1238 train loss:4.164805 +step:1239 train loss:4.203193 +step:1240 train loss:4.105354 +step:1241 train loss:4.138419 +step:1242 train loss:4.163219 +step:1243 train loss:4.110222 +step:1244 train loss:4.222269 +step:1245 train loss:4.232899 +step:1246 train loss:4.163612 +step:1247 train loss:4.137220 +step:1248 train loss:4.164268 +step:1249 train loss:4.095190 +step:1250 validation loss:4.092664 +step:1250 train loss:4.110017 +step:1251 train loss:4.175420 +step:1252 train loss:4.122274 +step:1253 train loss:4.077509 +step:1254 train loss:4.105115 +step:1255 train loss:4.096032 +step:1256 train loss:4.145018 +step:1257 train loss:4.125608 +step:1258 train loss:4.176950 +step:1259 train loss:4.158206 +step:1260 train loss:4.059495 +step:1261 train loss:4.294506 +step:1262 train loss:4.145986 +step:1263 train loss:4.106649 +step:1264 train loss:4.117370 +step:1265 train loss:4.169598 +step:1266 train loss:4.106258 +step:1267 train loss:4.116227 +step:1268 train loss:4.127580 +step:1269 train loss:4.122246 +step:1270 train loss:4.044760 +step:1271 train loss:4.053681 +step:1272 train loss:4.084609 +step:1273 train loss:4.137424 +step:1274 train loss:4.102515 +step:1275 train loss:4.129236 +step:1276 train loss:4.126243 +step:1277 train loss:4.135868 +step:1278 train loss:4.078300 +step:1279 train loss:4.084859 +step:1280 train loss:4.102805 +step:1281 train loss:4.155146 +step:1282 train loss:4.085697 +step:1283 train loss:4.166602 +step:1284 train loss:4.102610 +step:1285 train loss:4.149323 +step:1286 train loss:4.048449 +step:1287 train loss:4.089565 +step:1288 train loss:4.117790 +step:1289 train loss:4.180055 +step:1290 train loss:4.142099 +step:1291 train loss:4.103439 +step:1292 train loss:4.085329 +step:1293 train loss:4.072211 +step:1294 train loss:4.124586 +step:1295 train loss:4.106453 +step:1296 train loss:4.152155 +step:1297 train loss:4.107013 +step:1298 train loss:4.127168 +step:1299 train loss:4.160317 +step:1300 train loss:4.085634 +step:1301 train loss:4.128137 +step:1302 train loss:4.091744 +step:1303 train loss:4.127392 +step:1304 train loss:4.153607 +step:1305 train loss:4.127415 +step:1306 train loss:4.122719 +step:1307 train loss:4.111536 +step:1308 train loss:4.065578 +step:1309 train loss:4.080227 +step:1310 train loss:4.068978 +step:1311 train loss:4.073643 +step:1312 train loss:4.147436 +step:1313 train loss:4.060257 +step:1314 train loss:4.062663 +step:1315 train loss:4.107972 +step:1316 train loss:4.078563 +step:1317 train loss:3.974167 +step:1318 train loss:4.132743 +step:1319 train loss:4.162464 +step:1320 train loss:4.078296 +step:1321 train loss:4.057535 +step:1322 train loss:4.164155 +step:1323 train loss:4.108741 +step:1324 train loss:4.217116 +step:1325 train loss:4.113461 +step:1326 train loss:4.172696 +step:1327 train loss:4.160066 +step:1328 train loss:4.066270 +step:1329 train loss:4.107250 +step:1330 train loss:4.140550 +step:1331 train loss:3.994732 +step:1332 train loss:4.161631 +step:1333 train loss:4.123638 +step:1334 train loss:4.127443 +step:1335 train loss:4.147608 +step:1336 train loss:4.156015 +step:1337 train loss:4.116712 +step:1338 train loss:4.098400 +step:1339 train loss:4.170439 +step:1340 train loss:4.137070 +step:1341 train loss:4.119685 +step:1342 train loss:4.090249 +step:1343 train loss:4.081353 +step:1344 train loss:4.144538 +step:1345 train loss:4.098165 +step:1346 train loss:4.180656 +step:1347 train loss:4.103007 +step:1348 train loss:4.073742 +step:1349 train loss:4.014803 +step:1350 train loss:4.053491 +step:1351 train loss:4.124089 +step:1352 train loss:4.090563 +step:1353 train loss:4.065874 +step:1354 train loss:4.068268 +step:1355 train loss:4.136264 +step:1356 train loss:4.046576 +step:1357 train loss:4.073349 +step:1358 train loss:4.067128 +step:1359 train loss:4.062441 +step:1360 train loss:4.098832 +step:1361 train loss:4.212741 +step:1362 train loss:4.125795 +step:1363 train loss:4.019165 +step:1364 train loss:4.041503 +step:1365 train loss:4.030409 +step:1366 train loss:4.071037 +step:1367 train loss:4.000773 +step:1368 train loss:4.035529 +step:1369 train loss:4.068297 +step:1370 train loss:4.087132 +step:1371 train loss:4.050176 +step:1372 train loss:4.079248 +step:1373 train loss:4.116378 +step:1374 train loss:4.125894 +step:1375 train loss:4.091599 +step:1376 train loss:4.142027 +step:1377 train loss:4.121690 +step:1378 train loss:4.112535 +step:1379 train loss:4.083246 +step:1380 train loss:4.155717 +step:1381 train loss:4.111387 +step:1382 train loss:4.090634 +step:1383 train loss:4.064915 +step:1384 train loss:4.149447 +step:1385 train loss:4.042906 +step:1386 train loss:4.108458 +step:1387 train loss:4.114738 +step:1388 train loss:4.069228 +step:1389 train loss:4.047202 +step:1390 train loss:4.078059 +step:1391 train loss:4.108668 +step:1392 train loss:4.084694 +step:1393 train loss:4.133619 +step:1394 train loss:4.059406 +step:1395 train loss:4.096357 +step:1396 train loss:4.079248 +step:1397 train loss:4.093544 +step:1398 train loss:4.100355 +step:1399 train loss:4.069788 +step:1400 train loss:4.044866 +step:1401 train loss:4.039870 +step:1402 train loss:4.042055 +step:1403 train loss:4.003600 +step:1404 train loss:4.063883 +step:1405 train loss:4.027306 +step:1406 train loss:4.052475 +step:1407 train loss:4.044659 +step:1408 train loss:4.028371 +step:1409 train loss:4.016143 +step:1410 train loss:4.034516 +step:1411 train loss:4.068383 +step:1412 train loss:4.122778 +step:1413 train loss:4.042597 +step:1414 train loss:4.074255 +step:1415 train loss:4.031524 +step:1416 train loss:4.086430 +step:1417 train loss:4.055256 +step:1418 train loss:3.995090 +step:1419 train loss:4.005294 +step:1420 train loss:4.029300 +step:1421 train loss:4.068580 +step:1422 train loss:4.045305 +step:1423 train loss:4.141706 +step:1424 train loss:4.045158 +step:1425 train loss:4.009336 +step:1426 train loss:4.032890 +step:1427 train loss:4.027248 +step:1428 train loss:4.012847 +step:1429 train loss:4.038386 +step:1430 train loss:4.046029 +step:1431 train loss:4.066927 +step:1432 train loss:4.049448 +step:1433 train loss:4.026687 +step:1434 train loss:3.998195 +step:1435 train loss:3.991062 +step:1436 train loss:4.072213 +step:1437 train loss:3.994313 +step:1438 train loss:4.000993 +step:1439 train loss:3.987803 +step:1440 train loss:4.026699 +step:1441 train loss:4.110559 +step:1442 train loss:4.072375 +step:1443 train loss:4.001343 +step:1444 train loss:4.007351 +step:1445 train loss:4.006303 +step:1446 train loss:4.034540 +step:1447 train loss:4.052223 +step:1448 train loss:4.013761 +step:1449 train loss:4.044753 +step:1450 train loss:4.055470 +step:1451 train loss:3.979302 +step:1452 train loss:4.036404 +step:1453 train loss:4.031360 +step:1454 train loss:4.030024 +step:1455 train loss:3.960446 +step:1456 train loss:4.037791 +step:1457 train loss:3.971575 +step:1458 train loss:4.114534 +step:1459 train loss:4.034425 +step:1460 train loss:4.008965 +step:1461 train loss:4.068233 +step:1462 train loss:4.074958 +step:1463 train loss:4.027668 +step:1464 train loss:4.019407 +step:1465 train loss:4.007909 +step:1466 train loss:3.971169 +step:1467 train loss:4.109632 +step:1468 train loss:3.998055 +step:1469 train loss:4.069006 +step:1470 train loss:3.999937 +step:1471 train loss:3.996915 +step:1472 train loss:3.998575 +step:1473 train loss:3.998979 +step:1474 train loss:3.948677 +step:1475 train loss:4.009560 +step:1476 train loss:4.092859 +step:1477 train loss:4.039615 +step:1478 train loss:3.970805 +step:1479 train loss:4.003689 +step:1480 train loss:3.999501 +step:1481 train loss:3.968144 +step:1482 train loss:4.032893 +step:1483 train loss:4.019297 +step:1484 train loss:4.050166 +step:1485 train loss:4.062410 +step:1486 train loss:3.998840 +step:1487 train loss:3.987136 +step:1488 train loss:3.990430 +step:1489 train loss:3.981391 +step:1490 train loss:4.036785 +step:1491 train loss:4.030381 +step:1492 train loss:4.033043 +step:1493 train loss:3.975607 +step:1494 train loss:4.011445 +step:1495 train loss:3.996683 +step:1496 train loss:3.963438 +step:1497 train loss:4.037717 +step:1498 train loss:3.939754 +step:1499 train loss:3.981586 +step:1500 validation loss:3.956860 total_sharp:1.1778e-02 L1_sharp:2.0433e-02 L2_sharp:5.3085e-03 L3_sharp:4.4930e-03 L4_sharp:2.5444e-03 L5_sharp:2.3050e-03 L6_sharp:2.2277e-03 L7_sharp:2.9677e-03 L8_sharp:2.1336e-03 L9_sharp:2.1426e-03 L10_sharp:1.7268e-03 L11_sharp:1.7642e-03 L12_sharp:3.8391e-03 total_fnorm:2.0427e+00 total_l1_linf:1.7954e+04 total_spectral:2.0427e+00 L1_fnorm:3.7161e-01 L2_fnorm:3.9591e-01 L3_fnorm:4.3233e-01 L4_fnorm:4.6079e-01 L5_fnorm:4.5600e-01 L6_fnorm:4.6130e-01 L7_fnorm:4.5862e-01 L8_fnorm:4.7528e-01 L9_fnorm:4.7768e-01 L10_fnorm:4.8240e-01 L11_fnorm:4.7839e-01 L12_fnorm:4.5463e-01 L1_l1linf:4.8312e-01 L2_l1linf:5.6705e-01 L3_l1linf:5.0756e-01 L4_l1linf:5.4403e-01 L5_l1linf:5.3284e-01 L6_l1linf:5.0829e-01 L7_l1linf:4.9213e-01 L8_l1linf:5.0996e-01 L9_l1linf:5.0026e-01 L10_l1linf:5.1141e-01 L11_l1linf:5.0192e-01 L12_l1linf:4.8734e-01 L1_spectral:7.2293e-02 L2_spectral:7.5823e-02 L3_spectral:8.5796e-02 L4_spectral:7.7970e-02 L5_spectral:7.2231e-02 L6_spectral:6.1796e-02 L7_spectral:5.9336e-02 L8_spectral:6.3573e-02 L9_spectral:6.2346e-02 L10_spectral:6.4606e-02 L11_spectral:6.4137e-02 L12_spectral:8.9710e-02 ip_v_neg_g:2.5364e-02 cos_v_neg_g:1.6779e-02 v_norm:2.0427e+00 g_norm:7.4004e-01 hv_norm:3.4805e-01 cos_v_hv:6.9125e-02 hg_norm:2.3366e+00 cos_g_hg:3.4795e-01 v_par:1.1024e-03 v_perp:2.0427e+00 L1_cos_v_neg_g:3.3839e-02 L1_v_norm:3.7161e-01 L2_cos_v_neg_g:4.1226e-02 L2_v_norm:3.9591e-01 L3_cos_v_neg_g:3.3532e-02 L3_v_norm:4.3233e-01 L4_cos_v_neg_g:2.6577e-02 L4_v_norm:4.6079e-01 L5_cos_v_neg_g:2.8887e-02 L5_v_norm:4.5600e-01 L6_cos_v_neg_g:2.3968e-02 L6_v_norm:4.6130e-01 L7_cos_v_neg_g:2.3149e-02 L7_v_norm:4.5862e-01 L8_cos_v_neg_g:1.9608e-02 L8_v_norm:4.7528e-01 L9_cos_v_neg_g:2.1959e-02 L9_v_norm:4.7768e-01 L10_cos_v_neg_g:2.0973e-02 L10_v_norm:4.8240e-01 L11_cos_v_neg_g:1.8673e-02 L11_v_norm:4.7839e-01 L12_cos_v_neg_g:2.2032e-02 L12_v_norm:4.5463e-01 +step:1500 train loss:3.978863 +step:1501 train loss:4.003369 +step:1502 train loss:3.943238 +step:1503 train loss:3.994913 +step:1504 train loss:3.959919 +step:1505 train loss:3.932994 +step:1506 train loss:3.923989 +step:1507 train loss:3.944733 +step:1508 train loss:3.960124 +step:1509 train loss:4.015626 +step:1510 train loss:3.965072 +step:1511 train loss:3.986250 +step:1512 train loss:3.959895 +step:1513 train loss:4.027995 +step:1514 train loss:3.982025 +step:1515 train loss:4.044991 +step:1516 train loss:3.974060 +step:1517 train loss:3.976086 +step:1518 train loss:4.060629 +step:1519 train loss:4.019916 +step:1520 train loss:4.063318 +step:1521 train loss:3.963074 +step:1522 train loss:4.021841 +step:1523 train loss:4.019444 +step:1524 train loss:3.942811 +step:1525 train loss:4.025097 +step:1526 train loss:3.939744 +step:1527 train loss:3.992680 +step:1528 train loss:4.041890 +step:1529 train loss:3.998919 +step:1530 train loss:4.045378 +step:1531 train loss:3.963464 +step:1532 train loss:4.036099 +step:1533 train loss:4.008952 +step:1534 train loss:3.951525 +step:1535 train loss:4.006749 +step:1536 train loss:4.030252 +step:1537 train loss:3.979754 +step:1538 train loss:3.985847 +step:1539 train loss:3.987123 +step:1540 train loss:4.012762 +step:1541 train loss:3.968991 +step:1542 train loss:4.054035 +step:1543 train loss:4.081205 +step:1544 train loss:3.952828 +step:1545 train loss:3.937374 +step:1546 train loss:3.980507 +step:1547 train loss:3.966875 +step:1548 train loss:4.004058 +step:1549 train loss:3.929523 +step:1550 train loss:4.044459 +step:1551 train loss:3.976939 +step:1552 train loss:4.001122 +step:1553 train loss:4.012371 +step:1554 train loss:4.019059 +step:1555 train loss:3.977532 +step:1556 train loss:3.957791 +step:1557 train loss:3.967007 +step:1558 train loss:3.989318 +step:1559 train loss:3.954709 +step:1560 train loss:4.034554 +step:1561 train loss:4.008654 +step:1562 train loss:3.902821 +step:1563 train loss:3.881700 +step:1564 train loss:4.013287 +step:1565 train loss:3.988666 +step:1566 train loss:4.002533 +step:1567 train loss:4.002392 +step:1568 train loss:3.955853 +step:1569 train loss:3.951578 +step:1570 train loss:3.972634 +step:1571 train loss:3.948736 +step:1572 train loss:3.953938 +step:1573 train loss:3.993458 +step:1574 train loss:3.949596 +step:1575 train loss:3.973365 +step:1576 train loss:3.929620 +step:1577 train loss:3.955451 +step:1578 train loss:3.942230 +step:1579 train loss:4.017724 +step:1580 train loss:3.973317 +step:1581 train loss:4.010491 +step:1582 train loss:4.010718 +step:1583 train loss:3.983693 +step:1584 train loss:3.904462 +step:1585 train loss:3.989047 +step:1586 train loss:3.955540 +step:1587 train loss:3.977901 +step:1588 train loss:3.972264 +step:1589 train loss:4.014969 +step:1590 train loss:3.920590 +step:1591 train loss:3.971433 +step:1592 train loss:3.922088 +step:1593 train loss:3.959049 +step:1594 train loss:3.959611 +step:1595 train loss:3.950963 +step:1596 train loss:3.957058 +step:1597 train loss:3.895121 +step:1598 train loss:3.990467 +step:1599 train loss:3.999494 +step:1600 train loss:3.880167 +step:1601 train loss:3.957933 +step:1602 train loss:4.018201 +step:1603 train loss:4.013348 +step:1604 train loss:3.937124 +step:1605 train loss:3.981746 +step:1606 train loss:4.033237 +step:1607 train loss:3.915518 +step:1608 train loss:3.950824 +step:1609 train loss:3.967886 +step:1610 train loss:4.025097 +step:1611 train loss:3.958659 +step:1612 train loss:3.877903 +step:1613 train loss:3.948884 +step:1614 train loss:4.055164 +step:1615 train loss:3.986203 +step:1616 train loss:4.007048 +step:1617 train loss:3.980250 +step:1618 train loss:3.982962 +step:1619 train loss:4.152542 +step:1620 train loss:3.939629 +step:1621 train loss:4.001009 +step:1622 train loss:3.921668 +step:1623 train loss:3.985293 +step:1624 train loss:3.951111 +step:1625 train loss:4.024397 +step:1626 train loss:3.917357 +step:1627 train loss:3.928457 +step:1628 train loss:3.951031 +step:1629 train loss:3.979749 +step:1630 train loss:3.989457 +step:1631 train loss:3.939293 +step:1632 train loss:3.914616 +step:1633 train loss:3.928229 +step:1634 train loss:3.983871 +step:1635 train loss:3.925777 +step:1636 train loss:3.909888 +step:1637 train loss:3.980611 +step:1638 train loss:4.086427 +step:1639 train loss:3.891795 +step:1640 train loss:3.970799 +step:1641 train loss:3.938048 +step:1642 train loss:4.029313 +step:1643 train loss:3.928127 +step:1644 train loss:3.939342 +step:1645 train loss:3.917752 +step:1646 train loss:3.997813 +step:1647 train loss:3.890546 +step:1648 train loss:3.950392 +step:1649 train loss:3.915963 +step:1650 train loss:3.928589 +step:1651 train loss:3.950681 +step:1652 train loss:3.966917 +step:1653 train loss:3.971521 +step:1654 train loss:3.964329 +step:1655 train loss:3.940040 +step:1656 train loss:3.933495 +step:1657 train loss:3.941968 +step:1658 train loss:3.911465 +step:1659 train loss:3.983993 +step:1660 train loss:3.882893 +step:1661 train loss:3.994249 +step:1662 train loss:3.930970 +step:1663 train loss:3.923913 +step:1664 train loss:4.019043 +step:1665 train loss:3.940185 +step:1666 train loss:3.952458 +step:1667 train loss:3.969072 +step:1668 train loss:3.944183 +step:1669 train loss:3.915482 +step:1670 train loss:3.968290 +step:1671 train loss:3.959552 +step:1672 train loss:3.955203 +step:1673 train loss:3.913636 +step:1674 train loss:3.912574 +step:1675 train loss:3.955639 +step:1676 train loss:4.220374 +step:1677 train loss:3.990833 +step:1678 train loss:3.920609 +step:1679 train loss:4.027617 +step:1680 train loss:3.950772 +step:1681 train loss:4.000846 +step:1682 train loss:3.957352 +step:1683 train loss:3.950243 +step:1684 train loss:3.907333 +step:1685 train loss:3.961755 +step:1686 train loss:3.941859 +step:1687 train loss:3.953837 +step:1688 train loss:3.932620 +step:1689 train loss:3.920594 +step:1690 train loss:3.943799 +step:1691 train loss:3.932214 +step:1692 train loss:3.948111 +step:1693 train loss:3.918285 +step:1694 train loss:3.871232 +step:1695 train loss:3.894471 +step:1696 train loss:3.904489 +step:1697 train loss:3.947347 +step:1698 train loss:3.944418 +step:1699 train loss:3.899450 +step:1700 train loss:3.980583 +step:1701 train loss:3.919611 +step:1702 train loss:3.908473 +step:1703 train loss:3.927419 +step:1704 train loss:3.936152 +step:1705 train loss:3.947415 +step:1706 train loss:3.959472 +step:1707 train loss:3.959824 +step:1708 train loss:3.877128 +step:1709 train loss:3.978091 +step:1710 train loss:3.898644 +step:1711 train loss:3.907168 +step:1712 train loss:3.931256 +step:1713 train loss:3.892555 +step:1714 train loss:4.258387 +step:1715 train loss:3.910206 +step:1716 train loss:3.894171 +step:1717 train loss:3.897689 +step:1718 train loss:3.970960 +step:1719 train loss:3.881804 +step:1720 train loss:3.965181 +step:1721 train loss:3.902787 +step:1722 train loss:3.878138 +step:1723 train loss:3.976695 +step:1724 train loss:3.922513 +step:1725 train loss:3.920209 +step:1726 train loss:3.918196 +step:1727 train loss:3.954407 +step:1728 train loss:3.967816 +step:1729 train loss:3.891481 +step:1730 train loss:3.968132 +step:1731 train loss:3.898701 +step:1732 train loss:3.906366 +step:1733 train loss:3.897231 +step:1734 train loss:3.942505 +step:1735 train loss:4.004162 +step:1736 train loss:3.911522 +step:1737 train loss:3.941478 +step:1738 train loss:3.905255 +step:1739 train loss:3.969600 +step:1740 train loss:3.958285 +step:1741 train loss:4.016979 +step:1742 train loss:4.007996 +step:1743 train loss:3.903830 +step:1744 train loss:3.917459 +step:1745 train loss:3.905527 +step:1746 train loss:3.887358 +step:1747 train loss:3.924188 +step:1748 train loss:3.863576 +step:1749 train loss:3.903249 +step:1750 validation loss:3.880581 +step:1750 train loss:3.938075 +step:1751 train loss:3.954799 +step:1752 train loss:3.918764 +step:1753 train loss:3.944084 +step:1754 train loss:3.937341 +step:1755 train loss:3.931122 +step:1756 train loss:3.952094 +step:1757 train loss:3.954912 +step:1758 train loss:3.872689 +step:1759 train loss:3.962590 +step:1760 train loss:3.912967 +step:1761 train loss:3.892068 +step:1762 train loss:3.890247 +step:1763 train loss:3.891475 +step:1764 train loss:4.184983 +step:1765 train loss:3.896327 +step:1766 train loss:3.990156 +step:1767 train loss:3.896888 +step:1768 train loss:3.875959 +step:1769 train loss:3.899530 +step:1770 train loss:3.909508 +step:1771 train loss:3.887849 +step:1772 train loss:4.006398 +step:1773 train loss:3.920614 +step:1774 train loss:3.931576 +step:1775 train loss:4.043125 +step:1776 train loss:3.930841 +step:1777 train loss:3.922828 +step:1778 train loss:3.976564 +step:1779 train loss:3.904230 +step:1780 train loss:3.958269 +step:1781 train loss:3.962221 +step:1782 train loss:3.990535 +step:1783 train loss:3.914958 +step:1784 train loss:4.004176 +step:1785 train loss:3.911712 +step:1786 train loss:3.905199 +step:1787 train loss:3.902870 +step:1788 train loss:3.926942 +step:1789 train loss:3.878593 +step:1790 train loss:3.894237 +step:1791 train loss:3.971875 +step:1792 train loss:3.969137 +step:1793 train loss:3.887749 +step:1794 train loss:3.931606 +step:1795 train loss:3.880963 +step:1796 train loss:3.862942 +step:1797 train loss:3.928840 +step:1798 train loss:3.869483 +step:1799 train loss:3.921617 +step:1800 train loss:3.950260 +step:1801 train loss:3.943340 +step:1802 train loss:3.948415 +step:1803 train loss:3.936654 +step:1804 train loss:3.936792 +step:1805 train loss:3.929167 +step:1806 train loss:3.937481 +step:1807 train loss:3.864005 +step:1808 train loss:3.929693 +step:1809 train loss:3.913436 +step:1810 train loss:3.904692 +step:1811 train loss:3.920808 +step:1812 train loss:3.901142 +step:1813 train loss:3.916106 +step:1814 train loss:3.976383 +step:1815 train loss:3.918145 +step:1816 train loss:3.872046 +step:1817 train loss:3.865978 +step:1818 train loss:3.923838 +step:1819 train loss:3.895934 +step:1820 train loss:3.928143 +step:1821 train loss:3.894272 +step:1822 train loss:3.872523 +step:1823 train loss:3.866510 +step:1824 train loss:3.941693 +step:1825 train loss:3.856520 +step:1826 train loss:3.899923 +step:1827 train loss:3.867296 +step:1828 train loss:3.914792 +step:1829 train loss:3.883247 +step:1830 train loss:4.081040 +step:1831 train loss:3.837752 +step:1832 train loss:3.885886 +step:1833 train loss:3.931848 +step:1834 train loss:3.880617 +step:1835 train loss:3.892083 +step:1836 train loss:3.930528 +step:1837 train loss:3.850876 +step:1838 train loss:3.946772 +step:1839 train loss:3.929020 +step:1840 train loss:3.896605 +step:1841 train loss:3.926180 +step:1842 train loss:3.896437 +step:1843 train loss:3.845439 +step:1844 train loss:3.912418 +step:1845 train loss:3.877286 +step:1846 train loss:3.931610 +step:1847 train loss:3.985699 +step:1848 train loss:3.781585 +step:1849 train loss:3.879289 +step:1850 train loss:3.857764 +step:1851 train loss:3.891735 +step:1852 train loss:3.877996 +step:1853 train loss:3.932402 +step:1854 train loss:3.897253 +step:1855 train loss:3.883824 +step:1856 train loss:3.885804 +step:1857 train loss:3.893053 +step:1858 train loss:3.939454 +step:1859 train loss:3.895770 +step:1860 train loss:3.897943 +step:1861 train loss:3.899061 +step:1862 train loss:3.950929 +step:1863 train loss:4.002032 +step:1864 train loss:3.918352 +step:1865 train loss:3.933691 +step:1866 train loss:3.999728 +step:1867 train loss:4.060192 +step:1868 train loss:4.044544 +step:1869 train loss:3.938743 +step:1870 train loss:3.963741 +step:1871 train loss:3.887448 +step:1872 train loss:3.952609 +step:1873 train loss:4.019452 +step:1874 train loss:3.874119 +step:1875 train loss:3.947976 +step:1876 train loss:3.916333 +step:1877 train loss:3.979770 +step:1878 train loss:3.928163 +step:1879 train loss:3.983757 +step:1880 train loss:4.062617 +step:1881 train loss:3.964896 +step:1882 train loss:3.967380 +step:1883 train loss:3.991686 +step:1884 train loss:3.999316 +step:1885 train loss:3.950117 +step:1886 train loss:3.877745 +step:1887 train loss:3.895859 +step:1888 train loss:3.895216 +step:1889 train loss:3.913418 +step:1890 train loss:3.907002 +step:1891 train loss:3.840742 +step:1892 train loss:3.931271 +step:1893 train loss:3.854949 +step:1894 train loss:3.869999 +step:1895 train loss:3.906595 +step:1896 train loss:3.951953 +step:1897 train loss:3.848293 +step:1898 train loss:3.893116 +step:1899 train loss:3.912148 +step:1900 train loss:3.855542 +step:1901 train loss:3.933332 +step:1902 train loss:3.923215 +step:1903 train loss:3.863330 +step:1904 train loss:3.855610 +step:1905 train loss:3.851089 +step:1906 train loss:3.906313 +step:1907 train loss:3.853482 +step:1908 train loss:3.865769 +step:1909 train loss:3.960009 +step:1910 train loss:3.854273 +step:1911 train loss:3.855565 +step:1912 train loss:3.907137 +step:1913 train loss:3.848376 +step:1914 train loss:3.882824 +step:1915 train loss:3.844974 +step:1916 train loss:3.893838 +step:1917 train loss:3.877448 +step:1918 train loss:3.789056 +step:1919 train loss:3.943004 +step:1920 train loss:4.046022 +step:1921 train loss:3.826415 +step:1922 train loss:3.809232 +step:1923 train loss:3.905391 +step:1924 train loss:3.948083 +step:1925 train loss:3.889427 +step:1926 train loss:3.832268 +step:1927 train loss:3.913144 +step:1928 train loss:3.823805 +step:1929 train loss:3.852403 +step:1930 train loss:3.922107 +step:1931 train loss:3.833725 +step:1932 train loss:3.882758 +step:1933 train loss:3.881755 +step:1934 train loss:3.953528 +step:1935 train loss:3.902558 +step:1936 train loss:3.876052 +step:1937 train loss:3.812708 +step:1938 train loss:4.180814 +step:1939 train loss:3.937787 +step:1940 train loss:3.919605 +step:1941 train loss:3.926999 +step:1942 train loss:3.907524 +step:1943 train loss:3.906387 +step:1944 train loss:3.863581 +step:1945 train loss:3.863315 +step:1946 train loss:3.885868 +step:1947 train loss:3.904583 +step:1948 train loss:3.813802 +step:1949 train loss:3.922522 +step:1950 train loss:3.860390 +step:1951 train loss:3.880350 +step:1952 train loss:3.908071 +step:1953 train loss:3.840293 +step:1954 train loss:3.877662 +step:1955 train loss:3.830755 +step:1956 train loss:3.911282 +step:1957 train loss:3.937873 +step:1958 train loss:3.953894 +step:1959 train loss:3.825041 +step:1960 train loss:3.868965 +step:1961 train loss:3.899288 +step:1962 train loss:3.884825 +step:1963 train loss:3.864886 +step:1964 train loss:3.900610 +step:1965 train loss:3.934342 +step:1966 train loss:3.845127 +step:1967 train loss:3.899912 +step:1968 train loss:3.838015 +step:1969 train loss:3.857113 +step:1970 train loss:3.919765 +step:1971 train loss:3.826288 +step:1972 train loss:3.932901 +step:1973 train loss:3.826588 +step:1974 train loss:3.876784 +step:1975 train loss:3.831842 +step:1976 train loss:3.855928 +step:1977 train loss:3.897257 +step:1978 train loss:3.836675 +step:1979 train loss:3.820053 +step:1980 train loss:3.856181 +step:1981 train loss:3.837919 +step:1982 train loss:3.919419 +step:1983 train loss:3.863255 +step:1984 train loss:3.903150 +step:1985 train loss:3.889263 +step:1986 train loss:3.880218 +step:1987 train loss:3.836241 +step:1988 train loss:3.868617 +step:1989 train loss:4.007867 +step:1990 train loss:3.843870 +step:1991 train loss:3.834193 +step:1992 train loss:3.843007 +step:1993 train loss:3.875678 +step:1994 train loss:3.867967 +step:1995 train loss:3.819389 +step:1996 train loss:3.872065 +step:1997 train loss:3.877454 +step:1998 train loss:3.830143 +step:1999 train loss:3.940495 +step:2000 validation loss:3.810988 total_sharp:8.0273e-03 L1_sharp:1.2652e-02 L2_sharp:1.7197e-03 L3_sharp:2.4049e-03 L4_sharp:1.5574e-03 L5_sharp:2.2630e-03 L6_sharp:1.5063e-03 L7_sharp:2.2111e-03 L8_sharp:1.7710e-03 L9_sharp:1.5085e-03 L10_sharp:1.3285e-03 L11_sharp:1.3042e-03 L12_sharp:3.1981e-03 total_fnorm:2.1097e+00 total_l1_linf:1.8622e+04 total_spectral:2.1097e+00 L1_fnorm:4.1498e-01 L2_fnorm:4.3950e-01 L3_fnorm:4.6399e-01 L4_fnorm:4.8584e-01 L5_fnorm:4.7955e-01 L6_fnorm:4.8520e-01 L7_fnorm:4.8078e-01 L8_fnorm:4.9212e-01 L9_fnorm:4.9040e-01 L10_fnorm:4.9506e-01 L11_fnorm:4.9370e-01 L12_fnorm:4.7211e-01 L1_l1linf:5.3084e-01 L2_l1linf:4.9378e-01 L3_l1linf:5.1206e-01 L4_l1linf:5.4538e-01 L5_l1linf:5.3452e-01 L6_l1linf:5.2570e-01 L7_l1linf:5.0210e-01 L8_l1linf:5.0736e-01 L9_l1linf:5.0842e-01 L10_l1linf:5.1220e-01 L11_l1linf:5.2832e-01 L12_l1linf:5.1175e-01 L1_spectral:7.8826e-02 L2_spectral:8.0528e-02 L3_spectral:8.4742e-02 L4_spectral:7.4301e-02 L5_spectral:6.8290e-02 L6_spectral:5.6303e-02 L7_spectral:5.4007e-02 L8_spectral:5.8895e-02 L9_spectral:5.7857e-02 L10_spectral:5.9625e-02 L11_spectral:6.0436e-02 L12_spectral:8.4390e-02 ip_v_neg_g:1.8553e-02 cos_v_neg_g:1.1053e-02 v_norm:2.1097e+00 g_norm:7.9561e-01 hv_norm:3.3877e-01 cos_v_hv:4.9991e-02 hg_norm:2.1292e+00 cos_g_hg:4.8187e-01 v_par:9.2377e-04 v_perp:2.1097e+00 L1_cos_v_neg_g:2.0220e-02 L1_v_norm:4.1498e-01 L2_cos_v_neg_g:2.2961e-02 L2_v_norm:4.3950e-01 L3_cos_v_neg_g:2.4254e-02 L3_v_norm:4.6399e-01 L4_cos_v_neg_g:1.9604e-02 L4_v_norm:4.8584e-01 L5_cos_v_neg_g:2.2361e-02 L5_v_norm:4.7955e-01 L6_cos_v_neg_g:1.4083e-02 L6_v_norm:4.8520e-01 L7_cos_v_neg_g:1.3658e-02 L7_v_norm:4.8078e-01 L8_cos_v_neg_g:1.3234e-02 L8_v_norm:4.9212e-01 L9_cos_v_neg_g:1.2540e-02 L9_v_norm:4.9040e-01 L10_cos_v_neg_g:1.3263e-02 L10_v_norm:4.9506e-01 L11_cos_v_neg_g:1.2244e-02 L11_v_norm:4.9370e-01 L12_cos_v_neg_g:1.5214e-02 L12_v_norm:4.7211e-01 +step:2000 train loss:3.904930 +step:2001 train loss:3.832392 +step:2002 train loss:3.934200 +step:2003 train loss:3.978591 +step:2004 train loss:3.848078 +step:2005 train loss:3.947723 +step:2006 train loss:3.833050 +step:2007 train loss:3.906783 +step:2008 train loss:3.851872 +step:2009 train loss:3.850514 +step:2010 train loss:3.980302 +step:2011 train loss:3.832582 +step:2012 train loss:3.860122 +step:2013 train loss:3.869481 +step:2014 train loss:3.775214 +step:2015 train loss:3.892037 +step:2016 train loss:3.875509 +step:2017 train loss:3.878340 +step:2018 train loss:3.847801 +step:2019 train loss:3.876768 +step:2020 train loss:3.889139 +step:2021 train loss:3.850072 +step:2022 train loss:3.889558 +step:2023 train loss:3.864919 +step:2024 train loss:3.915929 +step:2025 train loss:3.858163 +step:2026 train loss:3.838920 +step:2027 train loss:3.866096 +step:2028 train loss:3.799516 +step:2029 train loss:3.828951 +step:2030 train loss:3.830303 +step:2031 train loss:3.793236 +step:2032 train loss:3.847420 +step:2033 train loss:3.845797 +step:2034 train loss:3.849554 +step:2035 train loss:3.885334 +step:2036 train loss:3.876699 +step:2037 train loss:3.859315 +step:2038 train loss:3.851710 +step:2039 train loss:3.845908 +step:2040 train loss:3.871419 +step:2041 train loss:3.872228 +step:2042 train loss:3.805796 +step:2043 train loss:3.956437 +step:2044 train loss:3.827230 +step:2045 train loss:3.848179 +step:2046 train loss:3.855847 +step:2047 train loss:3.830324 +step:2048 train loss:3.873325 +step:2049 train loss:3.831414 +step:2050 train loss:3.854378 +step:2051 train loss:3.817825 +step:2052 train loss:3.865427 +step:2053 train loss:3.867409 +step:2054 train loss:3.843987 +step:2055 train loss:3.847207 +step:2056 train loss:3.885990 +step:2057 train loss:3.891988 +step:2058 train loss:3.855660 +step:2059 train loss:3.937276 +step:2060 train loss:3.881951 +step:2061 train loss:3.835313 +step:2062 train loss:3.861052 +step:2063 train loss:3.764720 +step:2064 train loss:3.881459 +step:2065 train loss:3.889491 +step:2066 train loss:3.753955 +step:2067 train loss:3.801466 +step:2068 train loss:3.907082 +step:2069 train loss:3.842205 +step:2070 train loss:3.841783 +step:2071 train loss:3.884881 +step:2072 train loss:3.815680 +step:2073 train loss:3.861998 +step:2074 train loss:3.842645 +step:2075 train loss:3.928507 +step:2076 train loss:3.869738 +step:2077 train loss:3.885404 +step:2078 train loss:3.839778 +step:2079 train loss:3.989701 +step:2080 train loss:3.806877 +step:2081 train loss:3.916714 +step:2082 train loss:3.851960 +step:2083 train loss:3.836055 +step:2084 train loss:3.815908 +step:2085 train loss:3.859405 +step:2086 train loss:3.869645 +step:2087 train loss:3.912232 +step:2088 train loss:3.776199 +step:2089 train loss:3.804242 +step:2090 train loss:3.838218 +step:2091 train loss:3.857459 +step:2092 train loss:3.838207 +step:2093 train loss:3.827087 +step:2094 train loss:3.863394 +step:2095 train loss:3.809124 +step:2096 train loss:3.795534 +step:2097 train loss:3.829914 +step:2098 train loss:3.831742 +step:2099 train loss:3.813019 +step:2100 train loss:3.883137 +step:2101 train loss:3.874296 +step:2102 train loss:3.839742 +step:2103 train loss:3.860116 +step:2104 train loss:3.841647 +step:2105 train loss:3.847291 +step:2106 train loss:3.843030 +step:2107 train loss:3.909226 +step:2108 train loss:3.824374 +step:2109 train loss:3.785111 +step:2110 train loss:3.879383 +step:2111 train loss:3.826707 +step:2112 train loss:3.888205 +step:2113 train loss:3.822830 +step:2114 train loss:3.827888 +step:2115 train loss:3.879128 +step:2116 train loss:3.819478 +step:2117 train loss:3.836467 +step:2118 train loss:3.833456 +step:2119 train loss:3.767000 +step:2120 train loss:3.855032 +step:2121 train loss:3.839102 +step:2122 train loss:3.848446 +step:2123 train loss:3.905467 +step:2124 train loss:3.910011 +step:2125 train loss:3.812555 +step:2126 train loss:3.816268 +step:2127 train loss:3.808215 +step:2128 train loss:3.802841 +step:2129 train loss:3.827032 +step:2130 train loss:3.832748 +step:2131 train loss:3.853696 +step:2132 train loss:3.785334 +step:2133 train loss:3.893554 +step:2134 train loss:3.846275 +step:2135 train loss:3.801093 +step:2136 train loss:3.894246 +step:2137 train loss:3.858287 +step:2138 train loss:3.815635 +step:2139 train loss:3.817953 +step:2140 train loss:3.820291 +step:2141 train loss:3.870153 +step:2142 train loss:3.840707 +step:2143 train loss:3.760840 +step:2144 train loss:3.867336 +step:2145 train loss:3.836728 +step:2146 train loss:3.872153 +step:2147 train loss:3.976843 +step:2148 train loss:3.776402 +step:2149 train loss:3.790571 +step:2150 train loss:3.815981 +step:2151 train loss:3.850805 +step:2152 train loss:3.844834 +step:2153 train loss:3.884670 +step:2154 train loss:3.802168 +step:2155 train loss:3.883936 +step:2156 train loss:3.809132 +step:2157 train loss:3.878226 +step:2158 train loss:3.918619 +step:2159 train loss:3.844057 +step:2160 train loss:3.915842 +step:2161 train loss:3.817572 +step:2162 train loss:3.825035 +step:2163 train loss:3.802336 +step:2164 train loss:3.826130 +step:2165 train loss:3.801426 +step:2166 train loss:3.920391 +step:2167 train loss:3.826991 +step:2168 train loss:3.840235 +step:2169 train loss:3.789785 +step:2170 train loss:3.937520 +step:2171 train loss:3.893031 +step:2172 train loss:3.832078 +step:2173 train loss:3.822170 +step:2174 train loss:3.885490 +step:2175 train loss:3.815819 +step:2176 train loss:3.896193 +step:2177 train loss:3.866251 +step:2178 train loss:3.789950 +step:2179 train loss:3.856612 +step:2180 train loss:3.875186 +step:2181 train loss:3.802239 +step:2182 train loss:3.853079 +step:2183 train loss:3.849892 +step:2184 train loss:3.799173 +step:2185 train loss:3.778036 +step:2186 train loss:3.821069 +step:2187 train loss:3.831858 +step:2188 train loss:3.881220 +step:2189 train loss:3.774778 +step:2190 train loss:3.820977 +step:2191 train loss:3.874692 +step:2192 train loss:3.803696 +step:2193 train loss:3.766712 +step:2194 train loss:3.778223 +step:2195 train loss:3.799710 +step:2196 train loss:3.806008 +step:2197 train loss:3.789044 +step:2198 train loss:3.815792 +step:2199 train loss:3.884808 +step:2200 train loss:3.813633 +step:2201 train loss:3.818506 +step:2202 train loss:3.781539 +step:2203 train loss:3.799869 +step:2204 train loss:3.835166 +step:2205 train loss:3.817587 +step:2206 train loss:3.819990 +step:2207 train loss:3.815374 +step:2208 train loss:3.790432 +step:2209 train loss:4.071483 +step:2210 train loss:3.840672 +step:2211 train loss:3.836893 +step:2212 train loss:3.811438 +step:2213 train loss:3.898014 +step:2214 train loss:3.885561 +step:2215 train loss:3.811211 +step:2216 train loss:3.776356 +step:2217 train loss:3.804810 +step:2218 train loss:3.806272 +step:2219 train loss:3.841609 +step:2220 train loss:3.781158 +step:2221 train loss:3.813367 +step:2222 train loss:3.828303 +step:2223 train loss:3.868372 +step:2224 train loss:3.839912 +step:2225 train loss:3.782582 +step:2226 train loss:3.847830 +step:2227 train loss:3.849898 +step:2228 train loss:3.849359 +step:2229 train loss:3.790733 +step:2230 train loss:3.913089 +step:2231 train loss:3.829837 +step:2232 train loss:3.832104 +step:2233 train loss:3.871350 +step:2234 train loss:3.768508 +step:2235 train loss:3.856851 +step:2236 train loss:3.799009 +step:2237 train loss:3.933311 +step:2238 train loss:3.734466 +step:2239 train loss:3.810202 +step:2240 train loss:3.826668 +step:2241 train loss:3.739989 +step:2242 train loss:3.884108 +step:2243 train loss:3.918861 +step:2244 train loss:3.795669 +step:2245 train loss:3.798265 +step:2246 train loss:3.763998 +step:2247 train loss:3.768125 +step:2248 train loss:3.824018 +step:2249 train loss:3.803651 +step:2250 validation loss:3.757036 +step:2250 train loss:3.818340 +step:2251 train loss:3.779055 +step:2252 train loss:3.780653 +step:2253 train loss:3.808671 +step:2254 train loss:3.814822 +step:2255 train loss:3.772367 +step:2256 train loss:3.823917 +step:2257 train loss:3.809116 +step:2258 train loss:3.801928 +step:2259 train loss:3.818935 +step:2260 train loss:3.771361 +step:2261 train loss:3.855271 +step:2262 train loss:3.878479 +step:2263 train loss:3.833645 +step:2264 train loss:3.943004 +step:2265 train loss:3.791706 +step:2266 train loss:3.832846 +step:2267 train loss:3.791131 +step:2268 train loss:3.795413 +step:2269 train loss:3.799494 +step:2270 train loss:3.786320 +step:2271 train loss:3.801895 +step:2272 train loss:3.839214 +step:2273 train loss:3.758519 +step:2274 train loss:3.789990 +step:2275 train loss:3.744979 +step:2276 train loss:3.819789 +step:2277 train loss:3.830889 +step:2278 train loss:3.810318 +step:2279 train loss:3.796497 +step:2280 train loss:3.704201 +step:2281 train loss:3.850363 +step:2282 train loss:3.782862 +step:2283 train loss:3.766422 +step:2284 train loss:3.784178 +step:2285 train loss:3.836786 +step:2286 train loss:3.797289 +step:2287 train loss:3.832842 +step:2288 train loss:3.805682 +step:2289 train loss:3.801892 +step:2290 train loss:3.807619 +step:2291 train loss:3.794290 +step:2292 train loss:3.836883 +step:2293 train loss:3.815986 +step:2294 train loss:3.813096 +step:2295 train loss:3.867350 +step:2296 train loss:3.802324 +step:2297 train loss:3.782984 +step:2298 train loss:3.840891 +step:2299 train loss:3.817799 +step:2300 train loss:3.726777 +step:2301 train loss:3.825403 +step:2302 train loss:3.840436 +step:2303 train loss:3.807620 +step:2304 train loss:3.797768 +step:2305 train loss:3.837504 +step:2306 train loss:3.832214 +step:2307 train loss:3.814402 +step:2308 train loss:3.834304 +step:2309 train loss:3.785275 +step:2310 train loss:3.772961 +step:2311 train loss:3.758141 +step:2312 train loss:3.824467 +step:2313 train loss:3.741425 +step:2314 train loss:3.813749 +step:2315 train loss:3.831303 +step:2316 train loss:3.867620 +step:2317 train loss:3.733412 +step:2318 train loss:3.779046 +step:2319 train loss:3.833129 +step:2320 train loss:3.799045 +step:2321 train loss:3.770926 +step:2322 train loss:3.788013 +step:2323 train loss:3.784035 +step:2324 train loss:3.814564 +step:2325 train loss:3.753145 +step:2326 train loss:3.783943 +step:2327 train loss:3.898013 +step:2328 train loss:3.847528 +step:2329 train loss:3.800390 +step:2330 train loss:3.759910 +step:2331 train loss:3.799627 +step:2332 train loss:3.723934 +step:2333 train loss:3.785243 +step:2334 train loss:3.764709 +step:2335 train loss:3.751889 +step:2336 train loss:4.006884 +step:2337 train loss:3.782674 +step:2338 train loss:3.827255 +step:2339 train loss:3.822568 +step:2340 train loss:3.840501 +step:2341 train loss:3.827311 +step:2342 train loss:3.780127 +step:2343 train loss:3.797851 +step:2344 train loss:3.841125 +step:2345 train loss:3.794196 +step:2346 train loss:3.823645 +step:2347 train loss:3.748562 +step:2348 train loss:3.804195 +step:2349 train loss:3.757109 +step:2350 train loss:3.812446 +step:2351 train loss:3.821425 +step:2352 train loss:3.824971 +step:2353 train loss:3.787043 +step:2354 train loss:3.837127 +step:2355 train loss:3.823497 +step:2356 train loss:3.858728 +step:2357 train loss:3.763036 +step:2358 train loss:3.776958 +step:2359 train loss:3.801106 +step:2360 train loss:3.825446 +step:2361 train loss:3.857204 +step:2362 train loss:3.689954 +step:2363 train loss:3.881069 +step:2364 train loss:3.832693 +step:2365 train loss:3.797021 +step:2366 train loss:3.750650 +step:2367 train loss:3.813718 +step:2368 train loss:3.804606 +step:2369 train loss:3.795985 +step:2370 train loss:3.808026 +step:2371 train loss:3.865885 +step:2372 train loss:3.719432 +step:2373 train loss:3.864639 +step:2374 train loss:3.846164 +step:2375 train loss:3.829691 +step:2376 train loss:3.815440 +step:2377 train loss:3.763898 +step:2378 train loss:3.811602 +step:2379 train loss:3.792571 +step:2380 train loss:3.853333 +step:2381 train loss:3.945664 +step:2382 train loss:3.735918 +step:2383 train loss:3.780010 +step:2384 train loss:3.811719 +step:2385 train loss:3.712697 +step:2386 train loss:3.865452 +step:2387 train loss:3.748984 +step:2388 train loss:3.800524 +step:2389 train loss:3.820430 +step:2390 train loss:3.770526 +step:2391 train loss:3.796894 +step:2392 train loss:3.820992 +step:2393 train loss:3.777918 +step:2394 train loss:3.800979 +step:2395 train loss:3.790612 +step:2396 train loss:3.794286 +step:2397 train loss:3.771648 +step:2398 train loss:3.824710 +step:2399 train loss:3.791456 +step:2400 train loss:3.767025 +step:2401 train loss:3.810158 +step:2402 train loss:3.761723 +step:2403 train loss:3.813562 +step:2404 train loss:3.769466 +step:2405 train loss:3.773078 +step:2406 train loss:3.804214 +step:2407 train loss:3.745215 +step:2408 train loss:3.785064 +step:2409 train loss:3.777413 +step:2410 train loss:3.775188 +step:2411 train loss:3.846520 +step:2412 train loss:3.836149 +step:2413 train loss:3.879143 +step:2414 train loss:3.764893 +step:2415 train loss:3.754714 +step:2416 train loss:3.772085 +step:2417 train loss:3.809428 +step:2418 train loss:3.828097 +step:2419 train loss:3.756316 +step:2420 train loss:3.776831 +step:2421 train loss:3.806029 +step:2422 train loss:3.861841 +step:2423 train loss:3.811243 +step:2424 train loss:3.766697 +step:2425 train loss:3.829195 +step:2426 train loss:3.766302 +step:2427 train loss:3.789373 +step:2428 train loss:3.869694 +step:2429 train loss:3.821202 +step:2430 train loss:3.910024 +step:2431 train loss:3.823037 +step:2432 train loss:3.790067 +step:2433 train loss:3.765448 +step:2434 train loss:3.752758 +step:2435 train loss:3.808934 +step:2436 train loss:3.769130 +step:2437 train loss:3.800061 +step:2438 train loss:3.843029 +step:2439 train loss:3.827087 +step:2440 train loss:3.766990 +step:2441 train loss:3.804575 +step:2442 train loss:3.796642 +step:2443 train loss:3.757230 +step:2444 train loss:3.792554 +step:2445 train loss:3.789195 +step:2446 train loss:3.758402 +step:2447 train loss:3.740965 +step:2448 train loss:3.795262 +step:2449 train loss:3.823605 +step:2450 train loss:3.780285 +step:2451 train loss:3.707604 +step:2452 train loss:3.806085 +step:2453 train loss:3.775674 +step:2454 train loss:3.771446 +step:2455 train loss:3.823244 +step:2456 train loss:3.779636 +step:2457 train loss:3.838080 +step:2458 train loss:3.814434 +step:2459 train loss:3.787482 +step:2460 train loss:3.795118 +step:2461 train loss:3.827697 +step:2462 train loss:3.796282 +step:2463 train loss:3.775423 +step:2464 train loss:3.787675 +step:2465 train loss:3.868954 +step:2466 train loss:3.953197 +step:2467 train loss:3.857562 +step:2468 train loss:3.746749 +step:2469 train loss:3.824258 +step:2470 train loss:3.867321 +step:2471 train loss:3.870140 +step:2472 train loss:3.850645 +step:2473 train loss:3.791638 +step:2474 train loss:3.751580 +step:2475 train loss:3.801764 +step:2476 train loss:3.881691 +step:2477 train loss:3.798250 +step:2478 train loss:3.750330 +step:2479 train loss:3.794919 +step:2480 train loss:3.783802 +step:2481 train loss:3.981891 +step:2482 train loss:3.787648 +step:2483 train loss:3.819719 +step:2484 train loss:3.765274 +step:2485 train loss:3.751592 +step:2486 train loss:3.788775 +step:2487 train loss:3.823290 +step:2488 train loss:3.740277 +step:2489 train loss:3.842769 +step:2490 train loss:3.765625 +step:2491 train loss:3.777474 +step:2492 train loss:3.821429 +step:2493 train loss:3.855359 +step:2494 train loss:3.778170 +step:2495 train loss:3.810268 +step:2496 train loss:3.785347 +step:2497 train loss:3.801326 +step:2498 train loss:3.805615 +step:2499 train loss:3.800409 +step:2500 validation loss:3.722103 total_sharp:1.0205e-02 L1_sharp:1.0571e-02 L2_sharp:9.9630e-04 L3_sharp:1.6721e-03 L4_sharp:1.3280e-03 L5_sharp:2.3933e-03 L6_sharp:1.5649e-03 L7_sharp:2.7001e-03 L8_sharp:2.7590e-03 L9_sharp:2.4563e-03 L10_sharp:1.9617e-03 L11_sharp:1.9146e-03 L12_sharp:4.7511e-03 total_fnorm:2.1444e+00 total_l1_linf:1.8991e+04 total_spectral:2.1444e+00 L1_fnorm:4.3294e-01 L2_fnorm:4.6479e-01 L3_fnorm:4.7895e-01 L4_fnorm:4.9742e-01 L5_fnorm:4.9538e-01 L6_fnorm:5.0177e-01 L7_fnorm:4.9701e-01 L8_fnorm:5.0535e-01 L9_fnorm:5.0535e-01 L10_fnorm:5.0507e-01 L11_fnorm:5.0130e-01 L12_fnorm:4.8625e-01 L1_l1linf:6.0118e-01 L2_l1linf:5.6834e-01 L3_l1linf:5.7600e-01 L4_l1linf:5.7496e-01 L5_l1linf:5.6461e-01 L6_l1linf:5.3175e-01 L7_l1linf:5.2829e-01 L8_l1linf:5.2006e-01 L9_l1linf:5.2656e-01 L10_l1linf:5.5688e-01 L11_l1linf:5.7629e-01 L12_l1linf:5.2993e-01 L1_spectral:8.0979e-02 L2_spectral:8.1369e-02 L3_spectral:8.2178e-02 L4_spectral:7.0559e-02 L5_spectral:6.9123e-02 L6_spectral:5.9675e-02 L7_spectral:5.3642e-02 L8_spectral:6.0308e-02 L9_spectral:5.9516e-02 L10_spectral:6.1561e-02 L11_spectral:6.3771e-02 L12_spectral:8.5483e-02 ip_v_neg_g:2.6293e-02 cos_v_neg_g:1.6234e-02 v_norm:2.1444e+00 g_norm:7.5527e-01 hv_norm:3.8590e-01 cos_v_hv:5.6706e-02 hg_norm:2.1179e+00 cos_g_hg:4.7017e-01 v_par:1.0744e-03 v_perp:2.1444e+00 L1_cos_v_neg_g:2.5501e-02 L1_v_norm:4.3294e-01 L2_cos_v_neg_g:2.2204e-02 L2_v_norm:4.6479e-01 L3_cos_v_neg_g:1.7899e-02 L3_v_norm:4.7895e-01 L4_cos_v_neg_g:1.6230e-02 L4_v_norm:4.9742e-01 L5_cos_v_neg_g:2.4705e-02 L5_v_norm:4.9538e-01 L6_cos_v_neg_g:1.8913e-02 L6_v_norm:5.0177e-01 L7_cos_v_neg_g:2.3099e-02 L7_v_norm:4.9701e-01 L8_cos_v_neg_g:2.5777e-02 L8_v_norm:5.0535e-01 L9_cos_v_neg_g:2.4510e-02 L9_v_norm:5.0535e-01 L10_cos_v_neg_g:2.5846e-02 L10_v_norm:5.0507e-01 L11_cos_v_neg_g:2.5462e-02 L11_v_norm:5.0130e-01 L12_cos_v_neg_g:2.6766e-02 L12_v_norm:4.8625e-01 +step:2500 train loss:3.747832 +step:2501 train loss:3.816286 +step:2502 train loss:3.809052 +step:2503 train loss:3.730758 +step:2504 train loss:3.766894 +step:2505 train loss:3.789188 +step:2506 train loss:3.751108 +step:2507 train loss:3.777878 +step:2508 train loss:3.730169 +step:2509 train loss:3.748503 +step:2510 train loss:3.743234 +step:2511 train loss:3.784373 +step:2512 train loss:3.830721 +step:2513 train loss:3.780396 +step:2514 train loss:3.764509 +step:2515 train loss:3.900014 +step:2516 train loss:3.792598 +step:2517 train loss:3.854043 +step:2518 train loss:3.824537 +step:2519 train loss:3.798559 +step:2520 train loss:3.807132 +step:2521 train loss:3.786364 +step:2522 train loss:3.817095 +step:2523 train loss:3.735103 +step:2524 train loss:3.792681 +step:2525 train loss:3.781116 +step:2526 train loss:3.832606 +step:2527 train loss:3.821953 +step:2528 train loss:3.807580 +step:2529 train loss:3.823690 +step:2530 train loss:3.803788 +step:2531 train loss:3.741440 +step:2532 train loss:3.838880 +step:2533 train loss:3.729239 +step:2534 train loss:3.826365 +step:2535 train loss:3.779637 +step:2536 train loss:3.703782 +step:2537 train loss:3.817959 +step:2538 train loss:3.795330 +step:2539 train loss:3.815233 +step:2540 train loss:3.750257 +step:2541 train loss:3.774982 +step:2542 train loss:3.784946 +step:2543 train loss:3.777447 +step:2544 train loss:3.761850 +step:2545 train loss:3.750210 +step:2546 train loss:3.716647 +step:2547 train loss:3.761228 +step:2548 train loss:3.784241 +step:2549 train loss:3.784518 +step:2550 train loss:3.918205 +step:2551 train loss:3.992924 +step:2552 train loss:3.727141 +step:2553 train loss:3.760037 +step:2554 train loss:3.906357 +step:2555 train loss:3.794800 +step:2556 train loss:3.719919 +step:2557 train loss:3.811115 +step:2558 train loss:3.809043 +step:2559 train loss:3.759477 +step:2560 train loss:3.744126 +step:2561 train loss:3.839999 +step:2562 train loss:3.793704 +step:2563 train loss:3.730125 +step:2564 train loss:3.800943 +step:2565 train loss:3.780018 +step:2566 train loss:3.759772 +step:2567 train loss:3.739132 +step:2568 train loss:3.793290 +step:2569 train loss:3.800877 +step:2570 train loss:3.749643 +step:2571 train loss:3.834772 +step:2572 train loss:3.794461 +step:2573 train loss:3.727981 +step:2574 train loss:3.777611 +step:2575 train loss:3.823020 +step:2576 train loss:3.773046 +step:2577 train loss:3.737444 +step:2578 train loss:3.777536 +step:2579 train loss:3.755497 +step:2580 train loss:3.724779 +step:2581 train loss:3.738476 +step:2582 train loss:3.744896 +step:2583 train loss:3.772193 +step:2584 train loss:3.785592 +step:2585 train loss:3.747289 +step:2586 train loss:3.770836 +step:2587 train loss:3.704034 +step:2588 train loss:3.736645 +step:2589 train loss:3.813342 +step:2590 train loss:3.737577 +step:2591 train loss:3.795933 +step:2592 train loss:3.846882 +step:2593 train loss:3.803161 +step:2594 train loss:3.764508 +step:2595 train loss:3.771314 +step:2596 train loss:3.810285 +step:2597 train loss:3.692621 +step:2598 train loss:3.849298 +step:2599 train loss:3.799222 +step:2600 train loss:3.828951 +step:2601 train loss:3.765252 +step:2602 train loss:3.794433 +step:2603 train loss:3.789685 +step:2604 train loss:3.715559 +step:2605 train loss:3.843746 +step:2606 train loss:3.788952 +step:2607 train loss:3.747066 +step:2608 train loss:3.721219 +step:2609 train loss:3.749093 +step:2610 train loss:3.772247 +step:2611 train loss:3.810408 +step:2612 train loss:3.771377 +step:2613 train loss:3.748009 +step:2614 train loss:3.735609 +step:2615 train loss:3.732024 +step:2616 train loss:3.809102 +step:2617 train loss:3.767212 +step:2618 train loss:3.735145 +step:2619 train loss:3.759037 +step:2620 train loss:3.747640 +step:2621 train loss:3.761986 +step:2622 train loss:3.836597 +step:2623 train loss:3.704329 +step:2624 train loss:3.722928 +step:2625 train loss:3.795311 +step:2626 train loss:3.788391 +step:2627 train loss:3.766900 +step:2628 train loss:3.815526 +step:2629 train loss:3.766960 +step:2630 train loss:3.759662 +step:2631 train loss:3.788524 +step:2632 train loss:3.757691 +step:2633 train loss:3.739331 +step:2634 train loss:3.786205 +step:2635 train loss:3.769322 +step:2636 train loss:3.818584 +step:2637 train loss:3.770303 +step:2638 train loss:3.753082 +step:2639 train loss:3.806560 +step:2640 train loss:3.722179 +step:2641 train loss:3.782495 +step:2642 train loss:3.703635 +step:2643 train loss:3.707456 +step:2644 train loss:3.794155 +step:2645 train loss:3.735127 +step:2646 train loss:3.765434 +step:2647 train loss:3.784749 +step:2648 train loss:3.820576 +step:2649 train loss:3.733835 +step:2650 train loss:3.723986 +step:2651 train loss:3.764929 +step:2652 train loss:3.735452 +step:2653 train loss:3.803631 +step:2654 train loss:3.760255 +step:2655 train loss:3.749644 +step:2656 train loss:3.771904 +step:2657 train loss:3.795775 +step:2658 train loss:3.803652 +step:2659 train loss:3.782462 +step:2660 train loss:3.768142 +step:2661 train loss:3.814562 +step:2662 train loss:3.792313 +step:2663 train loss:3.766288 +step:2664 train loss:3.775709 +step:2665 train loss:3.727909 +step:2666 train loss:3.755227 +step:2667 train loss:3.765161 +step:2668 train loss:3.743403 +step:2669 train loss:3.759555 +step:2670 train loss:3.779611 +step:2671 train loss:3.751031 +step:2672 train loss:3.770191 +step:2673 train loss:3.707946 +step:2674 train loss:3.800574 +step:2675 train loss:3.775120 +step:2676 train loss:3.792915 +step:2677 train loss:3.775228 +step:2678 train loss:3.759140 +step:2679 train loss:3.740903 +step:2680 train loss:3.724278 +step:2681 train loss:3.696870 +step:2682 train loss:3.782549 +step:2683 train loss:3.758676 +step:2684 train loss:3.787626 +step:2685 train loss:3.701147 +step:2686 train loss:3.721190 +step:2687 train loss:3.806630 +step:2688 train loss:3.817658 +step:2689 train loss:3.723236 +step:2690 train loss:3.804196 +step:2691 train loss:3.774493 +step:2692 train loss:3.797840 +step:2693 train loss:3.851009 +step:2694 train loss:3.747348 +step:2695 train loss:3.767790 +step:2696 train loss:3.769599 +step:2697 train loss:3.761515 +step:2698 train loss:3.772987 +step:2699 train loss:3.786831 +step:2700 train loss:3.759146 +step:2701 train loss:3.827246 +step:2702 train loss:3.762429 +step:2703 train loss:3.722749 +step:2704 train loss:3.791844 +step:2705 train loss:3.786817 +step:2706 train loss:3.726119 +step:2707 train loss:3.684269 +step:2708 train loss:3.781811 +step:2709 train loss:3.762218 +step:2710 train loss:3.766083 +step:2711 train loss:3.735646 +step:2712 train loss:3.796730 +step:2713 train loss:3.800330 +step:2714 train loss:3.747372 +step:2715 train loss:3.738145 +step:2716 train loss:3.804980 +step:2717 train loss:3.769655 +step:2718 train loss:3.760767 +step:2719 train loss:3.765740 +step:2720 train loss:3.728322 +step:2721 train loss:3.812920 +step:2722 train loss:3.737420 +step:2723 train loss:3.725189 +step:2724 train loss:3.746374 +step:2725 train loss:3.747795 +step:2726 train loss:3.721233 +step:2727 train loss:3.778191 +step:2728 train loss:3.718041 +step:2729 train loss:3.846601 +step:2730 train loss:3.789906 +step:2731 train loss:3.829174 +step:2732 train loss:3.738160 +step:2733 train loss:3.735466 +step:2734 train loss:3.785071 +step:2735 train loss:3.782898 +step:2736 train loss:3.705451 +step:2737 train loss:3.762629 +step:2738 train loss:3.817987 +step:2739 train loss:3.736782 +step:2740 train loss:3.738757 +step:2741 train loss:3.724351 +step:2742 train loss:3.650672 +step:2743 train loss:3.764816 +step:2744 train loss:3.786009 +step:2745 train loss:3.737779 +step:2746 train loss:3.755861 +step:2747 train loss:3.740732 +step:2748 train loss:3.699388 +step:2749 train loss:3.767479 +step:2750 validation loss:3.692114 +step:2750 train loss:3.773658 +step:2751 train loss:3.796711 +step:2752 train loss:3.780293 +step:2753 train loss:3.772807 +step:2754 train loss:3.710654 +step:2755 train loss:3.779850 +step:2756 train loss:3.751411 +step:2757 train loss:3.738550 +step:2758 train loss:3.767183 +step:2759 train loss:3.780422 +step:2760 train loss:3.691142 +step:2761 train loss:3.702775 +step:2762 train loss:3.720830 +step:2763 train loss:3.740181 +step:2764 train loss:3.683591 +step:2765 train loss:3.730599 +step:2766 train loss:3.823752 +step:2767 train loss:3.695577 +step:2768 train loss:3.757631 +step:2769 train loss:3.729429 +step:2770 train loss:3.751040 +step:2771 train loss:3.776202 +step:2772 train loss:3.741904 +step:2773 train loss:3.742736 +step:2774 train loss:3.733488 +step:2775 train loss:3.747524 +step:2776 train loss:3.700523 +step:2777 train loss:3.735125 +step:2778 train loss:3.742467 +step:2779 train loss:3.769703 +step:2780 train loss:3.739229 +step:2781 train loss:3.725710 +step:2782 train loss:3.714170 +step:2783 train loss:3.746069 +step:2784 train loss:3.755178 +step:2785 train loss:3.824451 +step:2786 train loss:3.787367 +step:2787 train loss:3.748798 +step:2788 train loss:3.746172 +step:2789 train loss:3.740527 +step:2790 train loss:3.681592 +step:2791 train loss:3.778296 +step:2792 train loss:3.767796 +step:2793 train loss:3.733959 +step:2794 train loss:3.742217 +step:2795 train loss:3.759179 +step:2796 train loss:3.749255 +step:2797 train loss:3.798486 +step:2798 train loss:3.788322 +step:2799 train loss:3.696511 +step:2800 train loss:3.740368 +step:2801 train loss:3.776088 +step:2802 train loss:3.803969 +step:2803 train loss:3.775167 +step:2804 train loss:3.707679 +step:2805 train loss:3.748811 +step:2806 train loss:3.737414 +step:2807 train loss:3.769574 +step:2808 train loss:3.709472 +step:2809 train loss:3.780445 +step:2810 train loss:3.767714 +step:2811 train loss:3.758436 +step:2812 train loss:3.806379 +step:2813 train loss:3.774797 +step:2814 train loss:3.765255 +step:2815 train loss:3.775017 +step:2816 train loss:3.776818 +step:2817 train loss:3.712929 +step:2818 train loss:3.819941 +step:2819 train loss:3.747408 +step:2820 train loss:3.743219 +step:2821 train loss:3.719339 +step:2822 train loss:3.765068 +step:2823 train loss:3.712271 +step:2824 train loss:3.610491 +step:2825 train loss:3.761773 +step:2826 train loss:3.757282 +step:2827 train loss:3.781603 +step:2828 train loss:3.776013 +step:2829 train loss:3.762133 +step:2830 train loss:3.791787 +step:2831 train loss:3.734487 +step:2832 train loss:3.701559 +step:2833 train loss:3.762176 +step:2834 train loss:3.712142 +step:2835 train loss:3.747546 +step:2836 train loss:3.750964 +step:2837 train loss:3.751521 +step:2838 train loss:3.691065 +step:2839 train loss:3.789044 +step:2840 train loss:3.751214 +step:2841 train loss:3.831347 +step:2842 train loss:3.773166 +step:2843 train loss:3.767262 +step:2844 train loss:3.794838 +step:2845 train loss:3.749347 +step:2846 train loss:3.700158 +step:2847 train loss:3.791022 +step:2848 train loss:3.748088 +step:2849 train loss:3.738496 +step:2850 train loss:3.793687 +step:2851 train loss:3.748398 +step:2852 train loss:3.833340 +step:2853 train loss:3.744279 +step:2854 train loss:3.693275 +step:2855 train loss:3.770914 +step:2856 train loss:3.690437 +step:2857 train loss:3.796547 +step:2858 train loss:3.753219 +step:2859 train loss:3.764132 +step:2860 train loss:3.731642 +step:2861 train loss:3.711121 +step:2862 train loss:3.741795 +step:2863 train loss:3.724346 +step:2864 train loss:3.730810 +step:2865 train loss:3.805498 +step:2866 train loss:3.821367 +step:2867 train loss:3.758158 +step:2868 train loss:3.753762 +step:2869 train loss:3.715139 +step:2870 train loss:3.801045 +step:2871 train loss:3.800188 +step:2872 train loss:3.761840 +step:2873 train loss:3.767217 +step:2874 train loss:3.747375 +step:2875 train loss:3.697155 +step:2876 train loss:3.747250 +step:2877 train loss:3.726385 +step:2878 train loss:3.743263 +step:2879 train loss:3.712511 +step:2880 train loss:3.729271 +step:2881 train loss:3.719417 +step:2882 train loss:3.654126 +step:2883 train loss:3.738761 +step:2884 train loss:3.809783 +step:2885 train loss:3.701921 +step:2886 train loss:3.754376 +step:2887 train loss:3.775889 +step:2888 train loss:3.749904 +step:2889 train loss:3.732437 +step:2890 train loss:3.706219 +step:2891 train loss:3.746484 +step:2892 train loss:3.750108 +step:2893 train loss:3.733517 +step:2894 train loss:3.706527 +step:2895 train loss:3.755264 +step:2896 train loss:3.800477 +step:2897 train loss:3.783330 +step:2898 train loss:3.915495 +step:2899 train loss:3.670042 +step:2900 train loss:3.747395 +step:2901 train loss:3.693602 +step:2902 train loss:3.695416 +step:2903 train loss:3.711295 +step:2904 train loss:3.737253 +step:2905 train loss:3.798839 +step:2906 train loss:3.769663 +step:2907 train loss:3.942196 +step:2908 train loss:3.693989 +step:2909 train loss:3.773507 +step:2910 train loss:3.742371 +step:2911 train loss:3.770969 +step:2912 train loss:3.728905 +step:2913 train loss:3.763285 +step:2914 train loss:3.792378 +step:2915 train loss:3.788540 +step:2916 train loss:3.741681 +step:2917 train loss:3.778331 +step:2918 train loss:3.767903 +step:2919 train loss:3.714627 +step:2920 train loss:3.766786 +step:2921 train loss:3.717970 +step:2922 train loss:3.744495 +step:2923 train loss:3.811151 +step:2924 train loss:3.744419 +step:2925 train loss:3.698766 +step:2926 train loss:3.788807 +step:2927 train loss:3.696589 +step:2928 train loss:3.668935 +step:2929 train loss:3.684249 +step:2930 train loss:3.699798 +step:2931 train loss:3.855325 +step:2932 train loss:3.776907 +step:2933 train loss:3.740493 +step:2934 train loss:3.731216 +step:2935 train loss:3.754111 +step:2936 train loss:3.705661 +step:2937 train loss:3.726273 +step:2938 train loss:3.741874 +step:2939 train loss:3.816777 +step:2940 train loss:3.714232 +step:2941 train loss:3.750924 +step:2942 train loss:3.709728 +step:2943 train loss:3.988774 +step:2944 train loss:3.818338 +step:2945 train loss:3.778777 +step:2946 train loss:3.787333 +step:2947 train loss:3.745018 +step:2948 train loss:3.704422 +step:2949 train loss:3.794156 +step:2950 train loss:3.746622 +step:2951 train loss:3.649096 +step:2952 train loss:3.718438 +step:2953 train loss:3.627583 +step:2954 train loss:3.721328 +step:2955 train loss:3.794280 +step:2956 train loss:3.740958 +step:2957 train loss:3.741178 +step:2958 train loss:3.693254 +step:2959 train loss:3.717754 +step:2960 train loss:3.811885 +step:2961 train loss:3.671682 +step:2962 train loss:3.750060 +step:2963 train loss:3.742723 +step:2964 train loss:3.720331 +step:2965 train loss:3.746958 +step:2966 train loss:3.725310 +step:2967 train loss:3.723279 +step:2968 train loss:3.697408 +step:2969 train loss:3.708289 +step:2970 train loss:3.776465 +step:2971 train loss:3.703472 +step:2972 train loss:3.690229 +step:2973 train loss:3.687508 +step:2974 train loss:3.727225 +step:2975 train loss:3.688219 +step:2976 train loss:3.729788 +step:2977 train loss:3.719728 +step:2978 train loss:3.800043 +step:2979 train loss:3.783548 +step:2980 train loss:3.791057 +step:2981 train loss:3.746642 +step:2982 train loss:3.735638 +step:2983 train loss:3.688030 +step:2984 train loss:3.662654 +step:2985 train loss:3.775809 +step:2986 train loss:3.669989 +step:2987 train loss:3.798831 +step:2988 train loss:3.722628 +step:2989 train loss:3.754585 +step:2990 train loss:3.702816 +step:2991 train loss:3.772226 +step:2992 train loss:3.765285 +step:2993 train loss:3.732801 +step:2994 train loss:3.721505 +step:2995 train loss:3.790635 +step:2996 train loss:3.714339 +step:2997 train loss:3.623969 +step:2998 train loss:3.739061 +step:2999 train loss:3.782699 +step:3000 validation loss:3.663772 total_sharp:7.2047e-03 L1_sharp:1.0207e-02 L2_sharp:1.8226e-03 L3_sharp:1.4183e-03 L4_sharp:1.1888e-03 L5_sharp:1.1511e-03 L6_sharp:1.3677e-03 L7_sharp:1.7228e-03 L8_sharp:1.5570e-03 L9_sharp:1.4081e-03 L10_sharp:1.2203e-03 L11_sharp:1.3097e-03 L12_sharp:4.0835e-03 total_fnorm:2.2314e+00 total_l1_linf:1.9867e+04 total_spectral:2.2314e+00 L1_fnorm:4.9083e-01 L2_fnorm:4.9809e-01 L3_fnorm:5.1241e-01 L4_fnorm:5.2346e-01 L5_fnorm:5.2056e-01 L6_fnorm:5.2739e-01 L7_fnorm:5.2229e-01 L8_fnorm:5.3005e-01 L9_fnorm:5.3309e-01 L10_fnorm:5.3694e-01 L11_fnorm:5.3402e-01 L12_fnorm:5.2110e-01 L1_l1linf:5.9761e-01 L2_l1linf:5.6903e-01 L3_l1linf:5.8464e-01 L4_l1linf:5.6075e-01 L5_l1linf:5.8424e-01 L6_l1linf:5.4770e-01 L7_l1linf:5.5043e-01 L8_l1linf:5.6163e-01 L9_l1linf:5.7224e-01 L10_l1linf:5.6754e-01 L11_l1linf:5.6954e-01 L12_l1linf:5.9887e-01 L1_spectral:8.5887e-02 L2_spectral:8.4843e-02 L3_spectral:8.4141e-02 L4_spectral:6.8656e-02 L5_spectral:6.7714e-02 L6_spectral:5.8056e-02 L7_spectral:5.3469e-02 L8_spectral:5.7253e-02 L9_spectral:6.2797e-02 L10_spectral:6.5105e-02 L11_spectral:7.0773e-02 L12_spectral:9.1975e-02 ip_v_neg_g:2.2537e-02 cos_v_neg_g:1.3975e-02 v_norm:2.2314e+00 g_norm:7.2273e-01 hv_norm:3.3382e-01 cos_v_hv:4.8159e-02 hg_norm:1.3987e+00 cos_g_hg:4.6789e-01 v_par:1.0781e-03 v_perp:2.2314e+00 L1_cos_v_neg_g:2.8381e-02 L1_v_norm:4.9083e-01 L2_cos_v_neg_g:2.7372e-02 L2_v_norm:4.9809e-01 L3_cos_v_neg_g:1.8872e-02 L3_v_norm:5.1241e-01 L4_cos_v_neg_g:1.5449e-02 L4_v_norm:5.2346e-01 L5_cos_v_neg_g:1.7556e-02 L5_v_norm:5.2056e-01 L6_cos_v_neg_g:1.4228e-02 L6_v_norm:5.2739e-01 L7_cos_v_neg_g:1.6143e-02 L7_v_norm:5.2229e-01 L8_cos_v_neg_g:1.6412e-02 L8_v_norm:5.3005e-01 L9_cos_v_neg_g:1.4798e-02 L9_v_norm:5.3309e-01 L10_cos_v_neg_g:1.7534e-02 L10_v_norm:5.3694e-01 L11_cos_v_neg_g:1.7859e-02 L11_v_norm:5.3402e-01 L12_cos_v_neg_g:2.5186e-02 L12_v_norm:5.2110e-01 +step:3000 train loss:3.677081 +step:3001 train loss:3.729270 +step:3002 train loss:3.726622 +step:3003 train loss:3.724943 +step:3004 train loss:3.753335 +step:3005 train loss:3.653972 +step:3006 train loss:3.701442 +step:3007 train loss:3.734225 +step:3008 train loss:3.780245 +step:3009 train loss:3.738021 +step:3010 train loss:3.755284 +step:3011 train loss:3.742934 +step:3012 train loss:3.719102 +step:3013 train loss:3.762126 +step:3014 train loss:3.717152 +step:3015 train loss:3.715254 +step:3016 train loss:3.733890 +step:3017 train loss:3.757052 +step:3018 train loss:3.687188 +step:3019 train loss:3.722637 +step:3020 train loss:3.746941 +step:3021 train loss:3.706284 +step:3022 train loss:3.799798 +step:3023 train loss:3.745474 +step:3024 train loss:3.730536 +step:3025 train loss:3.741515 +step:3026 train loss:3.716608 +step:3027 train loss:3.694389 +step:3028 train loss:3.741430 +step:3029 train loss:3.731737 +step:3030 train loss:3.703504 +step:3031 train loss:3.686498 +step:3032 train loss:3.676805 +step:3033 train loss:3.701382 +step:3034 train loss:3.749918 +step:3035 train loss:3.726431 +step:3036 train loss:3.688854 +step:3037 train loss:3.649229 +step:3038 train loss:3.764376 +step:3039 train loss:3.648177 +step:3040 train loss:3.633376 +step:3041 train loss:3.758364 +step:3042 train loss:3.694344 +step:3043 train loss:3.754976 +step:3044 train loss:3.653137 +step:3045 train loss:3.695339 +step:3046 train loss:3.669766 +step:3047 train loss:3.697526 +step:3048 train loss:3.666525 +step:3049 train loss:3.744026 +step:3050 train loss:3.633114 +step:3051 train loss:3.648974 +step:3052 train loss:3.669933 +step:3053 train loss:3.741215 +step:3054 train loss:3.814660 +step:3055 train loss:3.653308 +step:3056 train loss:3.684833 +step:3057 train loss:3.722327 +step:3058 train loss:3.668718 +step:3059 train loss:3.694578 +step:3060 train loss:3.692880 +step:3061 train loss:3.679398 +step:3062 train loss:3.731416 +step:3063 train loss:3.713778 +step:3064 train loss:3.741261 +step:3065 train loss:3.756065 +step:3066 train loss:3.653780 +step:3067 train loss:3.707986 +step:3068 train loss:3.755239 +step:3069 train loss:3.771428 +step:3070 train loss:3.701549 +step:3071 train loss:3.722223 +step:3072 train loss:3.721230 +step:3073 train loss:3.761728 +step:3074 train loss:3.693936 +step:3075 train loss:3.728867 +step:3076 train loss:3.662600 +step:3077 train loss:3.660077 +step:3078 train loss:3.692584 +step:3079 train loss:3.736223 +step:3080 train loss:3.730326 +step:3081 train loss:3.780294 +step:3082 train loss:3.751959 +step:3083 train loss:3.680069 +step:3084 train loss:3.762231 +step:3085 train loss:3.687865 +step:3086 train loss:3.749281 +step:3087 train loss:3.714839 +step:3088 train loss:3.799411 +step:3089 train loss:3.674031 +step:3090 train loss:3.745427 +step:3091 train loss:3.669214 +step:3092 train loss:3.687175 +step:3093 train loss:3.713789 +step:3094 train loss:3.699858 +step:3095 train loss:3.779182 +step:3096 train loss:3.711462 +step:3097 train loss:3.720970 +step:3098 train loss:3.699710 +step:3099 train loss:3.706884 +step:3100 train loss:3.735763 +step:3101 train loss:3.817268 +step:3102 train loss:3.742535 +step:3103 train loss:3.667617 +step:3104 train loss:3.748948 +step:3105 train loss:3.722394 +step:3106 train loss:3.719759 +step:3107 train loss:3.703742 +step:3108 train loss:3.674045 +step:3109 train loss:3.730796 +step:3110 train loss:3.659418 +step:3111 train loss:3.695155 +step:3112 train loss:3.634732 +step:3113 train loss:3.754446 +step:3114 train loss:3.667442 +step:3115 train loss:3.710225 +step:3116 train loss:3.592028 +step:3117 train loss:3.610871 +step:3118 train loss:3.709371 +step:3119 train loss:3.718159 +step:3120 train loss:3.718948 +step:3121 train loss:3.664545 +step:3122 train loss:3.745574 +step:3123 train loss:3.663054 +step:3124 train loss:3.726181 +step:3125 train loss:3.734465 +step:3126 train loss:3.842992 +step:3127 train loss:3.689451 +step:3128 train loss:3.716934 +step:3129 train loss:3.699029 +step:3130 train loss:3.676325 +step:3131 train loss:3.751665 +step:3132 train loss:3.738974 +step:3133 train loss:3.712924 +step:3134 train loss:3.602924 +step:3135 train loss:3.698903 +step:3136 train loss:3.668823 +step:3137 train loss:3.804053 +step:3138 train loss:3.703937 +step:3139 train loss:3.683501 +step:3140 train loss:3.704453 +step:3141 train loss:3.707632 +step:3142 train loss:3.644433 +step:3143 train loss:3.727744 +step:3144 train loss:3.676162 +step:3145 train loss:3.663631 +step:3146 train loss:3.677222 +step:3147 train loss:3.787092 +step:3148 train loss:3.690424 +step:3149 train loss:3.745311 +step:3150 train loss:3.732250 +step:3151 train loss:3.700800 +step:3152 train loss:3.700220 +step:3153 train loss:3.656760 +step:3154 train loss:3.740111 +step:3155 train loss:3.680698 +step:3156 train loss:3.735273 +step:3157 train loss:3.736636 +step:3158 train loss:3.709549 +step:3159 train loss:3.645951 +step:3160 train loss:3.696431 +step:3161 train loss:3.669489 +step:3162 train loss:3.723192 +step:3163 train loss:3.706539 +step:3164 train loss:3.683365 +step:3165 train loss:3.700496 +step:3166 train loss:3.738830 +step:3167 train loss:3.701239 +step:3168 train loss:3.779232 +step:3169 train loss:3.696717 +step:3170 train loss:3.678646 +step:3171 train loss:3.670795 +step:3172 train loss:3.674430 +step:3173 train loss:3.617200 +step:3174 train loss:3.729481 +step:3175 train loss:3.698187 +step:3176 train loss:3.714200 +step:3177 train loss:3.677050 +step:3178 train loss:3.658963 +step:3179 train loss:3.731099 +step:3180 train loss:3.664548 +step:3181 train loss:3.744491 +step:3182 train loss:3.752936 +step:3183 train loss:3.688488 +step:3184 train loss:3.689840 +step:3185 train loss:3.747565 +step:3186 train loss:3.707705 +step:3187 train loss:3.725124 +step:3188 train loss:3.768069 +step:3189 train loss:3.712833 +step:3190 train loss:3.668622 +step:3191 train loss:3.670482 +step:3192 train loss:3.638240 +step:3193 train loss:3.716048 +step:3194 train loss:3.680645 +step:3195 train loss:3.665617 +step:3196 train loss:3.713220 +step:3197 train loss:3.679660 +step:3198 train loss:3.703189 +step:3199 train loss:3.692149 +step:3200 train loss:3.702361 +step:3201 train loss:3.661259 +step:3202 train loss:3.726903 +step:3203 train loss:3.789511 +step:3204 train loss:3.752506 +step:3205 train loss:3.597116 +step:3206 train loss:3.879224 +step:3207 train loss:3.638172 +step:3208 train loss:3.702692 +step:3209 train loss:3.692184 +step:3210 train loss:3.675788 +step:3211 train loss:3.703480 +step:3212 train loss:3.714713 +step:3213 train loss:3.652633 +step:3214 train loss:3.760257 +step:3215 train loss:3.763385 +step:3216 train loss:3.634996 +step:3217 train loss:3.712340 +step:3218 train loss:3.752459 +step:3219 train loss:3.670494 +step:3220 train loss:3.741668 +step:3221 train loss:3.652869 +step:3222 train loss:3.696635 +step:3223 train loss:3.713223 +step:3224 train loss:3.722582 +step:3225 train loss:3.646930 +step:3226 train loss:3.682399 +step:3227 train loss:3.711703 +step:3228 train loss:3.705612 +step:3229 train loss:3.740505 +step:3230 train loss:3.752216 +step:3231 train loss:3.693264 +step:3232 train loss:3.700343 +step:3233 train loss:3.672781 +step:3234 train loss:3.662474 +step:3235 train loss:3.664612 +step:3236 train loss:3.684159 +step:3237 train loss:3.685388 +step:3238 train loss:3.706060 +step:3239 train loss:3.611232 +step:3240 train loss:3.737561 +step:3241 train loss:3.720340 +step:3242 train loss:3.796224 +step:3243 train loss:3.731123 +step:3244 train loss:3.739740 +step:3245 train loss:3.642196 +step:3246 train loss:3.765062 +step:3247 train loss:3.707728 +step:3248 train loss:3.731297 +step:3249 train loss:3.673214 +step:3250 validation loss:3.637133 +step:3250 train loss:3.671477 +step:3251 train loss:3.782408 +step:3252 train loss:3.711617 +step:3253 train loss:3.709754 +step:3254 train loss:3.779645 +step:3255 train loss:3.720207 +step:3256 train loss:3.712003 +step:3257 train loss:3.693148 +step:3258 train loss:3.623427 +step:3259 train loss:3.604354 +step:3260 train loss:3.717054 +step:3261 train loss:3.698445 +step:3262 train loss:3.686777 +step:3263 train loss:3.675021 +step:3264 train loss:3.783617 +step:3265 train loss:3.695313 +step:3266 train loss:3.724626 +step:3267 train loss:3.684434 +step:3268 train loss:3.685556 +step:3269 train loss:3.698482 +step:3270 train loss:3.727051 +step:3271 train loss:3.689142 +step:3272 train loss:3.669487 +step:3273 train loss:3.677426 +step:3274 train loss:3.813926 +step:3275 train loss:3.684395 +step:3276 train loss:3.751338 +step:3277 train loss:3.692334 +step:3278 train loss:3.670579 +step:3279 train loss:3.694504 +step:3280 train loss:3.719628 +step:3281 train loss:3.645506 +step:3282 train loss:3.718488 +step:3283 train loss:3.688642 +step:3284 train loss:3.651574 +step:3285 train loss:3.670452 +step:3286 train loss:3.698034 +step:3287 train loss:3.632982 +step:3288 train loss:3.716615 +step:3289 train loss:3.658835 +step:3290 train loss:3.697128 +step:3291 train loss:3.653883 +step:3292 train loss:3.676908 +step:3293 train loss:3.717936 +step:3294 train loss:3.734649 +step:3295 train loss:3.643414 +step:3296 train loss:3.702166 +step:3297 train loss:3.656676 +step:3298 train loss:3.662270 +step:3299 train loss:3.791146 +step:3300 train loss:3.639254 +step:3301 train loss:3.714306 +step:3302 train loss:3.691845 +step:3303 train loss:3.721833 +step:3304 train loss:3.679578 +step:3305 train loss:3.775653 +step:3306 train loss:3.700477 +step:3307 train loss:3.719415 +step:3308 train loss:3.677658 +step:3309 train loss:3.733267 +step:3310 train loss:3.651613 +step:3311 train loss:3.700243 +step:3312 train loss:3.669623 +step:3313 train loss:3.702268 +step:3314 train loss:3.703636 +step:3315 train loss:3.778314 +step:3316 train loss:3.630898 +step:3317 train loss:3.719639 +step:3318 train loss:3.735455 +step:3319 train loss:3.659130 +step:3320 train loss:3.816001 +step:3321 train loss:3.718874 +step:3322 train loss:3.720243 +step:3323 train loss:3.825238 +step:3324 train loss:3.740847 +step:3325 train loss:3.713039 +step:3326 train loss:3.706178 +step:3327 train loss:3.720654 +step:3328 train loss:3.697389 +step:3329 train loss:3.698141 +step:3330 train loss:3.688118 +step:3331 train loss:3.733074 +step:3332 train loss:3.754686 +step:3333 train loss:3.719639 +step:3334 train loss:3.652440 +step:3335 train loss:3.666421 +step:3336 train loss:3.701104 +step:3337 train loss:3.699518 +step:3338 train loss:3.685452 +step:3339 train loss:3.681563 +step:3340 train loss:3.719180 +step:3341 train loss:3.670541 +step:3342 train loss:3.713588 +step:3343 train loss:3.654839 +step:3344 train loss:3.710243 +step:3345 train loss:3.665052 +step:3346 train loss:3.676153 +step:3347 train loss:3.683372 +step:3348 train loss:3.699697 +step:3349 train loss:3.689365 +step:3350 train loss:3.717858 +step:3351 train loss:3.770304 +step:3352 train loss:3.709648 +step:3353 train loss:3.809733 +step:3354 train loss:3.651701 +step:3355 train loss:3.758320 +step:3356 train loss:3.708279 +step:3357 train loss:3.717190 +step:3358 train loss:3.658002 +step:3359 train loss:3.689689 +step:3360 train loss:3.685617 +step:3361 train loss:3.688622 +step:3362 train loss:3.675663 +step:3363 train loss:3.676590 +step:3364 train loss:3.658509 +step:3365 train loss:3.695291 +step:3366 train loss:3.725737 +step:3367 train loss:3.680904 +step:3368 train loss:3.776853 +step:3369 train loss:3.688175 +step:3370 train loss:3.763333 +step:3371 train loss:3.739788 +step:3372 train loss:3.708151 +step:3373 train loss:3.715409 +step:3374 train loss:3.765022 +step:3375 train loss:3.692241 +step:3376 train loss:3.706882 +step:3377 train loss:3.686231 +step:3378 train loss:3.660918 +step:3379 train loss:3.741691 +step:3380 train loss:3.721370 +step:3381 train loss:3.705844 +step:3382 train loss:3.722036 +step:3383 train loss:3.729815 +step:3384 train loss:3.661281 +step:3385 train loss:3.708954 +step:3386 train loss:3.686535 +step:3387 train loss:3.761486 +step:3388 train loss:3.665658 +step:3389 train loss:3.866644 +step:3390 train loss:3.617757 +step:3391 train loss:3.705123 +step:3392 train loss:3.691114 +step:3393 train loss:3.713621 +step:3394 train loss:3.665556 +step:3395 train loss:3.735782 +step:3396 train loss:3.647549 +step:3397 train loss:3.722920 +step:3398 train loss:3.688896 +step:3399 train loss:3.704682 +step:3400 train loss:3.653217 +step:3401 train loss:3.689814 +step:3402 train loss:3.846477 +step:3403 train loss:3.738579 +step:3404 train loss:3.856497 +step:3405 train loss:3.707516 +step:3406 train loss:3.683031 +step:3407 train loss:3.681481 +step:3408 train loss:3.661048 +step:3409 train loss:3.625360 +step:3410 train loss:3.659472 +step:3411 train loss:3.726672 +step:3412 train loss:3.649771 +step:3413 train loss:3.647354 +step:3414 train loss:3.683096 +step:3415 train loss:3.658307 +step:3416 train loss:3.660322 +step:3417 train loss:3.742864 +step:3418 train loss:3.740485 +step:3419 train loss:3.697877 +step:3420 train loss:3.675620 +step:3421 train loss:3.708329 +step:3422 train loss:3.726809 +step:3423 train loss:3.743252 +step:3424 train loss:3.620857 +step:3425 train loss:3.647128 +step:3426 train loss:3.643678 +step:3427 train loss:3.703490 +step:3428 train loss:3.625913 +step:3429 train loss:3.690800 +step:3430 train loss:3.658950 +step:3431 train loss:3.711773 +step:3432 train loss:3.694193 +step:3433 train loss:3.660619 +step:3434 train loss:3.748085 +step:3435 train loss:3.687603 +step:3436 train loss:3.773804 +step:3437 train loss:3.606885 +step:3438 train loss:3.710016 +step:3439 train loss:3.683945 +step:3440 train loss:3.777595 +step:3441 train loss:3.669465 +step:3442 train loss:3.739374 +step:3443 train loss:3.671307 +step:3444 train loss:3.692119 +step:3445 train loss:3.734315 +step:3446 train loss:3.642871 +step:3447 train loss:3.715709 +step:3448 train loss:3.670265 +step:3449 train loss:3.702564 +step:3450 train loss:3.614882 +step:3451 train loss:3.730056 +step:3452 train loss:3.682536 +step:3453 train loss:3.738260 +step:3454 train loss:3.761445 +step:3455 train loss:3.822317 +step:3456 train loss:3.767229 +step:3457 train loss:3.766412 +step:3458 train loss:3.680757 +step:3459 train loss:3.699465 +step:3460 train loss:3.639282 +step:3461 train loss:3.698039 +step:3462 train loss:3.701880 +step:3463 train loss:3.673240 +step:3464 train loss:3.725351 +step:3465 train loss:3.653545 +step:3466 train loss:3.722378 +step:3467 train loss:3.678535 +step:3468 train loss:3.690171 +step:3469 train loss:3.701757 +step:3470 train loss:3.683294 +step:3471 train loss:3.722912 +step:3472 train loss:3.607835 +step:3473 train loss:3.732297 +step:3474 train loss:3.626267 +step:3475 train loss:3.708598 +step:3476 train loss:3.677837 +step:3477 train loss:3.698536 +step:3478 train loss:3.672718 +step:3479 train loss:3.702946 +step:3480 train loss:3.720085 +step:3481 train loss:3.698439 +step:3482 train loss:3.684329 +step:3483 train loss:3.824355 +step:3484 train loss:3.667704 +step:3485 train loss:3.655450 +step:3486 train loss:3.706913 +step:3487 train loss:3.750620 +step:3488 train loss:3.652559 +step:3489 train loss:3.708272 +step:3490 train loss:3.673771 +step:3491 train loss:3.710741 +step:3492 train loss:3.747110 +step:3493 train loss:3.719255 +step:3494 train loss:3.709457 +step:3495 train loss:3.686698 +step:3496 train loss:3.654701 +step:3497 train loss:3.765689 +step:3498 train loss:3.712384 +step:3499 train loss:3.645852 +step:3500 validation loss:3.611931 total_sharp:7.1810e-03 L1_sharp:1.1108e-02 L2_sharp:4.0350e-03 L3_sharp:2.6265e-03 L4_sharp:1.1721e-03 L5_sharp:1.2098e-03 L6_sharp:1.2203e-03 L7_sharp:1.8009e-03 L8_sharp:1.7293e-03 L9_sharp:1.3289e-03 L10_sharp:9.7480e-04 L11_sharp:1.2277e-03 L12_sharp:2.6229e-03 total_fnorm:2.1859e+00 total_l1_linf:1.9424e+04 total_spectral:2.1859e+00 L1_fnorm:4.6508e-01 L2_fnorm:4.8547e-01 L3_fnorm:5.0095e-01 L4_fnorm:5.1287e-01 L5_fnorm:5.0634e-01 L6_fnorm:5.1602e-01 L7_fnorm:5.1055e-01 L8_fnorm:5.1550e-01 L9_fnorm:5.1370e-01 L10_fnorm:5.1070e-01 L11_fnorm:5.1074e-01 L12_fnorm:4.9537e-01 L1_l1linf:5.7740e-01 L2_l1linf:5.5393e-01 L3_l1linf:5.6873e-01 L4_l1linf:5.7684e-01 L5_l1linf:5.8061e-01 L6_l1linf:5.3902e-01 L7_l1linf:5.3360e-01 L8_l1linf:5.3009e-01 L9_l1linf:5.3574e-01 L10_l1linf:5.2485e-01 L11_l1linf:5.5635e-01 L12_l1linf:5.4770e-01 L1_spectral:8.3043e-02 L2_spectral:7.9501e-02 L3_spectral:7.7854e-02 L4_spectral:6.2083e-02 L5_spectral:6.7348e-02 L6_spectral:5.9581e-02 L7_spectral:4.7741e-02 L8_spectral:5.1032e-02 L9_spectral:5.1029e-02 L10_spectral:5.3571e-02 L11_spectral:6.3675e-02 L12_spectral:7.8353e-02 ip_v_neg_g:1.5823e-02 cos_v_neg_g:8.9014e-03 v_norm:2.1859e+00 g_norm:8.1318e-01 hv_norm:4.1620e-01 cos_v_hv:3.7715e-02 hg_norm:2.9165e+00 cos_g_hg:5.5832e-01 v_par:6.4672e-04 v_perp:2.1859e+00 L1_cos_v_neg_g:1.4887e-02 L1_v_norm:4.6508e-01 L2_cos_v_neg_g:2.2557e-02 L2_v_norm:4.8547e-01 L3_cos_v_neg_g:2.1096e-02 L3_v_norm:5.0095e-01 L4_cos_v_neg_g:1.0941e-02 L4_v_norm:5.1287e-01 L5_cos_v_neg_g:7.9167e-03 L5_v_norm:5.0634e-01 L6_cos_v_neg_g:7.5179e-03 L6_v_norm:5.1602e-01 L7_cos_v_neg_g:1.0258e-02 L7_v_norm:5.1055e-01 L8_cos_v_neg_g:9.2540e-03 L8_v_norm:5.1550e-01 L9_cos_v_neg_g:1.0218e-02 L9_v_norm:5.1370e-01 L10_cos_v_neg_g:1.1940e-02 L10_v_norm:5.1070e-01 L11_cos_v_neg_g:1.2728e-02 L11_v_norm:5.1074e-01 L12_cos_v_neg_g:1.6127e-02 L12_v_norm:4.9537e-01 +step:3500 train loss:3.664076 +step:3501 train loss:3.794007 +step:3502 train loss:3.773582 +step:3503 train loss:3.723831 +step:3504 train loss:3.675227 +step:3505 train loss:3.688359 +step:3506 train loss:3.589064 +step:3507 train loss:3.706800 +step:3508 train loss:3.649282 +step:3509 train loss:3.720680 +step:3510 train loss:3.651746 +step:3511 train loss:3.686727 +step:3512 train loss:3.828269 +step:3513 train loss:3.645918 +step:3514 train loss:3.662483 +step:3515 train loss:3.912876 +step:3516 train loss:3.709108 +step:3517 train loss:3.666644 +step:3518 train loss:3.670365 +step:3519 train loss:3.662965 +step:3520 train loss:3.695718 +step:3521 train loss:3.683356 +step:3522 train loss:3.595491 +step:3523 train loss:3.697782 +step:3524 train loss:3.682189 +step:3525 train loss:3.670673 +step:3526 train loss:3.691282 +step:3527 train loss:3.640633 +step:3528 train loss:3.691704 +step:3529 train loss:3.670396 +step:3530 train loss:3.666524 +step:3531 train loss:3.657367 +step:3532 train loss:3.846309 +step:3533 train loss:3.665720 +step:3534 train loss:3.684168 +step:3535 train loss:3.656827 +step:3536 train loss:3.655485 +step:3537 train loss:3.668410 +step:3538 train loss:3.695727 +step:3539 train loss:3.644892 +step:3540 train loss:3.711126 +step:3541 train loss:3.683441 +step:3542 train loss:3.686019 +step:3543 train loss:3.612983 +step:3544 train loss:3.634527 +step:3545 train loss:3.635862 +step:3546 train loss:3.699774 +step:3547 train loss:3.706723 +step:3548 train loss:3.681180 +step:3549 train loss:3.677854 +step:3550 train loss:3.668937 +step:3551 train loss:3.695043 +step:3552 train loss:3.593761 +step:3553 train loss:3.713555 +step:3554 train loss:3.711218 +step:3555 train loss:3.692659 +step:3556 train loss:3.717719 +step:3557 train loss:3.701268 +step:3558 train loss:3.672581 +step:3559 train loss:3.625031 +step:3560 train loss:3.713419 +step:3561 train loss:3.708647 +step:3562 train loss:3.882693 +step:3563 train loss:3.743210 +step:3564 train loss:3.707467 +step:3565 train loss:3.704922 +step:3566 train loss:3.679353 +step:3567 train loss:3.620007 +step:3568 train loss:3.642295 +step:3569 train loss:3.730725 +step:3570 train loss:3.751851 +step:3571 train loss:3.726125 +step:3572 train loss:3.719150 +step:3573 train loss:3.676468 +step:3574 train loss:3.675437 +step:3575 train loss:3.667936 +step:3576 train loss:3.645650 +step:3577 train loss:3.657221 +step:3578 train loss:3.741118 +step:3579 train loss:3.651626 +step:3580 train loss:3.730171 +step:3581 train loss:3.670202 +step:3582 train loss:3.726833 +step:3583 train loss:3.664597 +step:3584 train loss:3.639441 +step:3585 train loss:3.687935 +step:3586 train loss:3.638309 +step:3587 train loss:3.729465 +step:3588 train loss:3.862310 +step:3589 train loss:3.691141 +step:3590 train loss:3.678160 +step:3591 train loss:3.686904 +step:3592 train loss:3.646922 +step:3593 train loss:3.618320 +step:3594 train loss:3.675122 +step:3595 train loss:3.648441 +step:3596 train loss:3.732210 +step:3597 train loss:3.702194 +step:3598 train loss:3.653679 +step:3599 train loss:3.704920 +step:3600 train loss:3.645199 +step:3601 train loss:3.660981 +step:3602 train loss:3.649675 +step:3603 train loss:3.668828 +step:3604 train loss:3.690802 +step:3605 train loss:3.800162 +step:3606 train loss:3.703208 +step:3607 train loss:3.680295 +step:3608 train loss:3.699013 +step:3609 train loss:3.682036 +step:3610 train loss:3.650663 +step:3611 train loss:3.651167 +step:3612 train loss:3.719620 +step:3613 train loss:3.688461 +step:3614 train loss:3.634264 +step:3615 train loss:3.674175 +step:3616 train loss:3.674750 +step:3617 train loss:3.722312 +step:3618 train loss:3.708536 +step:3619 train loss:3.693336 +step:3620 train loss:3.719484 +step:3621 train loss:3.672667 +step:3622 train loss:3.768204 +step:3623 train loss:3.759371 +step:3624 train loss:3.720581 +step:3625 train loss:3.699251 +step:3626 train loss:3.699868 +step:3627 train loss:3.698149 +step:3628 train loss:3.679782 +step:3629 train loss:3.681725 +step:3630 train loss:3.774156 +step:3631 train loss:3.696359 +step:3632 train loss:3.721829 +step:3633 train loss:3.675855 +step:3634 train loss:3.676408 +step:3635 train loss:3.666652 +step:3636 train loss:3.738253 +step:3637 train loss:3.814346 +step:3638 train loss:3.727382 +step:3639 train loss:3.712731 +step:3640 train loss:3.724083 +step:3641 train loss:3.757269 +step:3642 train loss:3.652242 +step:3643 train loss:3.830081 +step:3644 train loss:3.714739 +step:3645 train loss:3.687192 +step:3646 train loss:3.809826 +step:3647 train loss:3.698502 +step:3648 train loss:3.687456 +step:3649 train loss:3.637814 +step:3650 train loss:3.678817 +step:3651 train loss:3.675745 +step:3652 train loss:3.662766 +step:3653 train loss:3.596099 +step:3654 train loss:3.660922 +step:3655 train loss:3.650200 +step:3656 train loss:3.681675 +step:3657 train loss:3.700747 +step:3658 train loss:3.696370 +step:3659 train loss:3.675778 +step:3660 train loss:3.653789 +step:3661 train loss:3.676194 +step:3662 train loss:3.651195 +step:3663 train loss:3.686230 +step:3664 train loss:3.639425 +step:3665 train loss:3.684629 +step:3666 train loss:3.723248 +step:3667 train loss:3.812578 +step:3668 train loss:3.692777 +step:3669 train loss:3.651577 +step:3670 train loss:3.698108 +step:3671 train loss:3.659124 +step:3672 train loss:3.692935 +step:3673 train loss:3.676406 +step:3674 train loss:3.690315 +step:3675 train loss:3.705110 +step:3676 train loss:3.667727 +step:3677 train loss:3.628224 +step:3678 train loss:3.688322 +step:3679 train loss:3.590703 +step:3680 train loss:3.692113 +step:3681 train loss:3.723660 +step:3682 train loss:3.705321 +step:3683 train loss:3.650460 +step:3684 train loss:3.645801 +step:3685 train loss:3.676494 +step:3686 train loss:3.705392 +step:3687 train loss:3.656977 +step:3688 train loss:3.632281 +step:3689 train loss:3.669255 +step:3690 train loss:3.659342 +step:3691 train loss:3.640130 +step:3692 train loss:3.698848 +step:3693 train loss:3.833140 +step:3694 train loss:3.648585 +step:3695 train loss:3.706924 +step:3696 train loss:3.670767 +step:3697 train loss:3.659567 +step:3698 train loss:3.599031 +step:3699 train loss:3.627620 +step:3700 train loss:3.656626 +step:3701 train loss:3.681808 +step:3702 train loss:3.702403 +step:3703 train loss:3.658567 +step:3704 train loss:3.700753 +step:3705 train loss:3.682624 +step:3706 train loss:3.631888 +step:3707 train loss:3.687253 +step:3708 train loss:3.662724 +step:3709 train loss:3.587430 +step:3710 train loss:3.707357 +step:3711 train loss:3.656392 +step:3712 train loss:3.695527 +step:3713 train loss:3.647285 +step:3714 train loss:3.664208 +step:3715 train loss:3.784555 +step:3716 train loss:3.688229 +step:3717 train loss:3.667711 +step:3718 train loss:3.668926 +step:3719 train loss:3.668029 +step:3720 train loss:3.672925 +step:3721 train loss:3.734843 +step:3722 train loss:3.746801 +step:3723 train loss:3.633357 +step:3724 train loss:3.692108 +step:3725 train loss:3.668288 +step:3726 train loss:3.691351 +step:3727 train loss:3.761013 +step:3728 train loss:3.728903 +step:3729 train loss:3.632676 +step:3730 train loss:3.645807 +step:3731 train loss:3.667786 +step:3732 train loss:3.819904 +step:3733 train loss:3.678296 +step:3734 train loss:3.679371 +step:3735 train loss:3.619574 +step:3736 train loss:3.676563 +step:3737 train loss:3.724690 +step:3738 train loss:3.746760 +step:3739 train loss:3.663583 +step:3740 train loss:3.569931 +step:3741 train loss:3.775582 +step:3742 train loss:3.687749 +step:3743 train loss:3.660949 +step:3744 train loss:3.703207 +step:3745 train loss:3.683701 +step:3746 train loss:3.658997 +step:3747 train loss:3.672204 +step:3748 train loss:3.715661 +step:3749 train loss:3.704943 +step:3750 validation loss:3.611281 +step:3750 train loss:3.707619 +step:3751 train loss:3.796118 +step:3752 train loss:3.729513 +step:3753 train loss:3.650732 +step:3754 train loss:3.697279 +step:3755 train loss:3.877830 +step:3756 train loss:3.651231 +step:3757 train loss:3.647403 +step:3758 train loss:3.678355 +step:3759 train loss:3.624212 +step:3760 train loss:3.619981 +step:3761 train loss:3.671515 +step:3762 train loss:3.663609 +step:3763 train loss:3.665940 +step:3764 train loss:3.659784 +step:3765 train loss:3.654304 +step:3766 train loss:3.623323 +step:3767 train loss:3.709439 +step:3768 train loss:3.647886 +step:3769 train loss:3.925668 +step:3770 train loss:3.704383 +step:3771 train loss:3.713973 +step:3772 train loss:3.670059 +step:3773 train loss:3.660389 +step:3774 train loss:3.668266 +step:3775 train loss:3.662025 +step:3776 train loss:3.660668 +step:3777 train loss:3.621445 +step:3778 train loss:3.642794 +step:3779 train loss:3.630479 +step:3780 train loss:3.705424 +step:3781 train loss:3.669763 +step:3782 train loss:3.588936 +step:3783 train loss:3.695381 +step:3784 train loss:3.705185 +step:3785 train loss:3.613526 +step:3786 train loss:3.723155 +step:3787 train loss:3.634750 +step:3788 train loss:3.646514 +step:3789 train loss:3.568043 +step:3790 train loss:3.673021 +step:3791 train loss:3.694345 +step:3792 train loss:3.668100 +step:3793 train loss:3.664411 +step:3794 train loss:3.689583 +step:3795 train loss:3.660144 +step:3796 train loss:3.676648 +step:3797 train loss:3.649329 +step:3798 train loss:3.660897 +step:3799 train loss:3.667912 +step:3800 train loss:3.574960 +step:3801 train loss:3.689290 +step:3802 train loss:3.617901 +step:3803 train loss:3.700752 +step:3804 train loss:3.713966 +step:3805 train loss:3.672404 +step:3806 train loss:3.687735 +step:3807 train loss:3.710197 +step:3808 train loss:3.664707 +step:3809 train loss:3.678196 +step:3810 train loss:3.681583 +step:3811 train loss:3.666682 +step:3812 train loss:3.667624 +step:3813 train loss:3.623353 +step:3814 train loss:3.667459 +step:3815 train loss:3.669919 +step:3816 train loss:3.688322 +step:3817 train loss:3.704458 +step:3818 train loss:3.678458 +step:3819 train loss:3.688523 +step:3820 train loss:3.684701 +step:3821 train loss:3.644480 +step:3822 train loss:3.734271 +step:3823 train loss:3.620953 +step:3824 train loss:3.637094 +step:3825 train loss:3.643747 +step:3826 train loss:3.704146 +step:3827 train loss:3.731903 +step:3828 train loss:3.623852 +step:3829 train loss:3.641691 +step:3830 train loss:3.701060 +step:3831 train loss:3.636416 +step:3832 train loss:3.694231 +step:3833 train loss:3.638299 +step:3834 train loss:3.601883 +step:3835 train loss:3.649056 +step:3836 train loss:3.622092 +step:3837 train loss:3.689339 +step:3838 train loss:3.643703 +step:3839 train loss:3.686546 +step:3840 train loss:3.699137 +step:3841 train loss:3.650980 +step:3842 train loss:3.681941 +step:3843 train loss:3.694095 +step:3844 train loss:3.666590 +step:3845 train loss:3.688281 +step:3846 train loss:3.728226 +step:3847 train loss:3.626080 +step:3848 train loss:3.633498 +step:3849 train loss:3.649653 +step:3850 train loss:3.669492 +step:3851 train loss:3.806042 +step:3852 train loss:3.786183 +step:3853 train loss:3.680372 +step:3854 train loss:3.649416 +step:3855 train loss:3.697919 +step:3856 train loss:3.621383 +step:3857 train loss:3.683990 +step:3858 train loss:3.598179 +step:3859 train loss:3.645994 +step:3860 train loss:3.713529 +step:3861 train loss:3.691119 +step:3862 train loss:3.626665 +step:3863 train loss:3.675198 +step:3864 train loss:3.648427 +step:3865 train loss:3.685866 +step:3866 train loss:3.706256 +step:3867 train loss:3.700780 +step:3868 train loss:3.651952 +step:3869 train loss:3.652334 +step:3870 train loss:3.626156 +step:3871 train loss:3.624780 +step:3872 train loss:3.751466 +step:3873 train loss:3.673298 +step:3874 train loss:3.686926 +step:3875 train loss:3.798208 +step:3876 train loss:3.673325 +step:3877 train loss:3.698094 +step:3878 train loss:3.725118 +step:3879 train loss:3.711047 +step:3880 train loss:3.795544 +step:3881 train loss:3.612297 +step:3882 train loss:3.646935 +step:3883 train loss:3.660610 +step:3884 train loss:3.649839 +step:3885 train loss:3.665654 +step:3886 train loss:3.727120 +step:3887 train loss:3.707678 +step:3888 train loss:3.668516 +step:3889 train loss:3.636212 +step:3890 train loss:3.674057 +step:3891 train loss:3.688956 +step:3892 train loss:3.597449 +step:3893 train loss:3.705130 +step:3894 train loss:3.651746 +step:3895 train loss:3.672942 +step:3896 train loss:3.665148 +step:3897 train loss:3.630514 +step:3898 train loss:3.689243 +step:3899 train loss:3.729293 +step:3900 train loss:3.682418 +step:3901 train loss:3.702090 +step:3902 train loss:3.625411 +step:3903 train loss:3.642489 +step:3904 train loss:3.673664 +step:3905 train loss:3.610441 +step:3906 train loss:3.651304 +step:3907 train loss:3.686699 +step:3908 train loss:3.763932 +step:3909 train loss:3.650612 +step:3910 train loss:3.680296 +step:3911 train loss:3.699342 +step:3912 train loss:3.648029 +step:3913 train loss:3.662548 +step:3914 train loss:3.684657 +step:3915 train loss:3.652664 +step:3916 train loss:3.684886 +step:3917 train loss:3.729475 +step:3918 train loss:3.705487 +step:3919 train loss:3.688914 +step:3920 train loss:3.655489 +step:3921 train loss:3.697534 +step:3922 train loss:3.701681 +step:3923 train loss:3.693049 +step:3924 train loss:3.629527 +step:3925 train loss:3.831147 +step:3926 train loss:3.671504 +step:3927 train loss:3.655252 +step:3928 train loss:3.735001 +step:3929 train loss:3.802061 +step:3930 train loss:3.696617 +step:3931 train loss:3.631784 +step:3932 train loss:3.679891 +step:3933 train loss:3.700314 +step:3934 train loss:3.649265 +step:3935 train loss:3.624496 +step:3936 train loss:3.715963 +step:3937 train loss:3.675574 +step:3938 train loss:3.683885 +step:3939 train loss:3.708584 +step:3940 train loss:3.656977 +step:3941 train loss:3.740516 +step:3942 train loss:3.701779 +step:3943 train loss:3.685374 +step:3944 train loss:3.733135 +step:3945 train loss:3.645950 +step:3946 train loss:3.591885 +step:3947 train loss:3.717932 +step:3948 train loss:3.687669 +step:3949 train loss:3.850117 +step:3950 train loss:3.653657 +step:3951 train loss:3.582395 +step:3952 train loss:3.543405 +step:3953 train loss:3.618294 +step:3954 train loss:3.669425 +step:3955 train loss:3.696690 +step:3956 train loss:3.652915 +step:3957 train loss:3.705925 +step:3958 train loss:3.683668 +step:3959 train loss:3.719689 +step:3960 train loss:3.644056 +step:3961 train loss:3.667637 +step:3962 train loss:3.674562 +step:3963 train loss:3.649866 +step:3964 train loss:3.627242 +step:3965 train loss:3.682884 +step:3966 train loss:3.649276 +step:3967 train loss:3.682374 +step:3968 train loss:3.701878 +step:3969 train loss:3.611988 +step:3970 train loss:3.721891 +step:3971 train loss:3.640636 +step:3972 train loss:3.672587 +step:3973 train loss:3.633453 +step:3974 train loss:3.724029 +step:3975 train loss:3.678476 +step:3976 train loss:3.632361 +step:3977 train loss:3.688929 +step:3978 train loss:3.653798 +step:3979 train loss:3.638929 +step:3980 train loss:3.710963 +step:3981 train loss:3.640300 +step:3982 train loss:3.662842 +step:3983 train loss:3.646124 +step:3984 train loss:3.680997 +step:3985 train loss:3.655880 +step:3986 train loss:3.669018 +step:3987 train loss:3.678484 +step:3988 train loss:3.618523 +step:3989 train loss:3.687385 +step:3990 train loss:3.684498 +step:3991 train loss:3.696297 +step:3992 train loss:3.653814 +step:3993 train loss:3.687058 +step:3994 train loss:3.636736 +step:3995 train loss:3.692463 +step:3996 train loss:3.610077 +step:3997 train loss:3.683966 +step:3998 train loss:3.567482 +step:3999 train loss:3.725606 +step:4000 validation loss:3.586736 total_sharp:6.1190e-03 L1_sharp:1.3054e-02 L2_sharp:3.4620e-03 L3_sharp:1.6716e-03 L4_sharp:7.1217e-04 L5_sharp:8.9663e-04 L6_sharp:1.1824e-03 L7_sharp:1.6234e-03 L8_sharp:1.5060e-03 L9_sharp:1.2672e-03 L10_sharp:9.6298e-04 L11_sharp:9.2436e-04 L12_sharp:2.3937e-03 total_fnorm:2.1948e+00 total_l1_linf:1.9513e+04 total_spectral:2.1948e+00 L1_fnorm:4.6964e-01 L2_fnorm:4.9359e-01 L3_fnorm:4.9820e-01 L4_fnorm:5.0816e-01 L5_fnorm:5.0248e-01 L6_fnorm:5.1315e-01 L7_fnorm:5.1419e-01 L8_fnorm:5.2165e-01 L9_fnorm:5.1995e-01 L10_fnorm:5.1610e-01 L11_fnorm:5.1716e-01 L12_fnorm:5.0008e-01 L1_l1linf:6.0164e-01 L2_l1linf:5.3421e-01 L3_l1linf:5.4998e-01 L4_l1linf:5.4983e-01 L5_l1linf:5.6348e-01 L6_l1linf:5.6497e-01 L7_l1linf:5.5930e-01 L8_l1linf:5.3367e-01 L9_l1linf:5.3252e-01 L10_l1linf:5.9174e-01 L11_l1linf:6.1025e-01 L12_l1linf:5.8981e-01 L1_spectral:8.2383e-02 L2_spectral:8.0526e-02 L3_spectral:7.8326e-02 L4_spectral:6.3197e-02 L5_spectral:6.6921e-02 L6_spectral:6.0372e-02 L7_spectral:5.0819e-02 L8_spectral:5.1494e-02 L9_spectral:5.2332e-02 L10_spectral:5.9134e-02 L11_spectral:6.6613e-02 L12_spectral:7.8777e-02 ip_v_neg_g:1.7805e-02 cos_v_neg_g:1.0863e-02 v_norm:2.1948e+00 g_norm:7.4682e-01 hv_norm:4.2412e-01 cos_v_hv:3.1666e-02 hg_norm:2.5921e+00 cos_g_hg:4.6133e-01 v_par:8.0355e-04 v_perp:2.1948e+00 L1_cos_v_neg_g:3.1124e-02 L1_v_norm:4.6964e-01 L2_cos_v_neg_g:2.3622e-02 L2_v_norm:4.9359e-01 L3_cos_v_neg_g:1.6571e-02 L3_v_norm:4.9820e-01 L4_cos_v_neg_g:1.1101e-02 L4_v_norm:5.0816e-01 L5_cos_v_neg_g:1.0434e-02 L5_v_norm:5.0248e-01 L6_cos_v_neg_g:1.0956e-02 L6_v_norm:5.1315e-01 L7_cos_v_neg_g:1.2954e-02 L7_v_norm:5.1419e-01 L8_cos_v_neg_g:1.1428e-02 L8_v_norm:5.2165e-01 L9_cos_v_neg_g:1.0320e-02 L9_v_norm:5.1995e-01 L10_cos_v_neg_g:1.0285e-02 L10_v_norm:5.1610e-01 L11_cos_v_neg_g:1.0965e-02 L11_v_norm:5.1716e-01 L12_cos_v_neg_g:1.1718e-02 L12_v_norm:5.0008e-01 +step:4000 train loss:3.604395 +step:4001 train loss:3.680363 +step:4002 train loss:3.662251 +step:4003 train loss:3.694352 +step:4004 train loss:3.601949 +step:4005 train loss:3.697453 +step:4006 train loss:3.706535 +step:4007 train loss:3.625966 +step:4008 train loss:3.581638 +step:4009 train loss:3.667303 +step:4010 train loss:3.641201 +step:4011 train loss:3.657169 +step:4012 train loss:3.660869 +step:4013 train loss:3.640253 +step:4014 train loss:3.652779 +step:4015 train loss:3.643666 +step:4016 train loss:3.653889 +step:4017 train loss:3.620091 +step:4018 train loss:3.561686 +step:4019 train loss:3.613781 +step:4020 train loss:3.682065 +step:4021 train loss:3.627449 +step:4022 train loss:3.629842 +step:4023 train loss:3.643582 +step:4024 train loss:3.559749 +step:4025 train loss:3.679180 +step:4026 train loss:3.668585 +step:4027 train loss:3.675456 +step:4028 train loss:3.691325 +step:4029 train loss:3.722617 +step:4030 train loss:3.639003 +step:4031 train loss:3.680884 +step:4032 train loss:3.635130 +step:4033 train loss:3.670979 +step:4034 train loss:3.685791 +step:4035 train loss:3.664456 +step:4036 train loss:3.658651 +step:4037 train loss:3.676542 +step:4038 train loss:3.595883 +step:4039 train loss:3.652550 +step:4040 train loss:3.629002 +step:4041 train loss:3.624773 +step:4042 train loss:3.644143 +step:4043 train loss:3.629741 +step:4044 train loss:3.665349 +step:4045 train loss:3.667046 +step:4046 train loss:3.624463 +step:4047 train loss:3.652717 +step:4048 train loss:3.664888 +step:4049 train loss:3.625406 +step:4050 train loss:3.729373 +step:4051 train loss:3.641629 +step:4052 train loss:3.663516 +step:4053 train loss:3.712151 +step:4054 train loss:3.682904 +step:4055 train loss:3.699356 +step:4056 train loss:3.696279 +step:4057 train loss:3.634308 +step:4058 train loss:3.615470 +step:4059 train loss:3.695671 +step:4060 train loss:3.639001 +step:4061 train loss:3.611149 +step:4062 train loss:3.721910 +step:4063 train loss:3.673770 +step:4064 train loss:3.640768 +step:4065 train loss:3.627080 +step:4066 train loss:3.653713 +step:4067 train loss:3.679790 +step:4068 train loss:3.648575 +step:4069 train loss:3.705681 +step:4070 train loss:3.622246 +step:4071 train loss:3.594867 +step:4072 train loss:3.671019 +step:4073 train loss:3.604586 +step:4074 train loss:3.657210 +step:4075 train loss:3.726846 +step:4076 train loss:3.579185 +step:4077 train loss:3.657661 +step:4078 train loss:3.761270 +step:4079 train loss:3.701792 +step:4080 train loss:3.648665 +step:4081 train loss:3.614282 +step:4082 train loss:3.668928 +step:4083 train loss:3.605457 +step:4084 train loss:3.622928 +step:4085 train loss:3.864174 +step:4086 train loss:3.628767 +step:4087 train loss:3.672941 +step:4088 train loss:3.658311 +step:4089 train loss:3.647913 +step:4090 train loss:3.663197 +step:4091 train loss:3.689394 +step:4092 train loss:3.610889 +step:4093 train loss:3.643298 +step:4094 train loss:3.658931 +step:4095 train loss:3.615638 +step:4096 train loss:3.647544 +step:4097 train loss:3.651408 +step:4098 train loss:3.624119 +step:4099 train loss:3.626601 +step:4100 train loss:3.679194 +step:4101 train loss:3.601470 +step:4102 train loss:3.635864 +step:4103 train loss:3.845625 +step:4104 train loss:3.656439 +step:4105 train loss:3.624280 +step:4106 train loss:3.695714 +step:4107 train loss:3.616917 +step:4108 train loss:3.624094 +step:4109 train loss:3.675015 +step:4110 train loss:3.685227 +step:4111 train loss:3.659566 +step:4112 train loss:3.678145 +step:4113 train loss:3.635552 +step:4114 train loss:3.585772 +step:4115 train loss:3.621081 +step:4116 train loss:3.604262 +step:4117 train loss:3.626351 +step:4118 train loss:3.678627 +step:4119 train loss:3.701457 +step:4120 train loss:3.626104 +step:4121 train loss:3.614586 +step:4122 train loss:3.684590 +step:4123 train loss:3.692688 +step:4124 train loss:3.674199 +step:4125 train loss:3.714023 +step:4126 train loss:3.648448 +step:4127 train loss:3.666306 +step:4128 train loss:3.654057 +step:4129 train loss:3.702186 +step:4130 train loss:3.632546 +step:4131 train loss:3.669308 +step:4132 train loss:3.685190 +step:4133 train loss:3.634948 +step:4134 train loss:3.688535 +step:4135 train loss:3.622537 +step:4136 train loss:3.644111 +step:4137 train loss:3.615017 +step:4138 train loss:3.624120 +step:4139 train loss:3.669496 +step:4140 train loss:3.630282 +step:4141 train loss:3.592555 +step:4142 train loss:3.634774 +step:4143 train loss:3.673619 +step:4144 train loss:3.625168 +step:4145 train loss:3.592836 +step:4146 train loss:3.660302 +step:4147 train loss:3.634837 +step:4148 train loss:3.629855 +step:4149 train loss:3.707454 +step:4150 train loss:3.672316 +step:4151 train loss:3.654112 +step:4152 train loss:3.676460 +step:4153 train loss:3.683453 +step:4154 train loss:3.687907 +step:4155 train loss:3.714089 +step:4156 train loss:3.587105 +step:4157 train loss:3.610185 +step:4158 train loss:3.665814 +step:4159 train loss:3.568309 +step:4160 train loss:3.658555 +step:4161 train loss:3.659522 +step:4162 train loss:3.568359 +step:4163 train loss:3.651477 +step:4164 train loss:3.597178 +step:4165 train loss:3.597933 +step:4166 train loss:3.665755 +step:4167 train loss:3.662730 +step:4168 train loss:3.653025 +step:4169 train loss:3.684395 +step:4170 train loss:3.806010 +step:4171 train loss:3.658754 +step:4172 train loss:3.676194 +step:4173 train loss:3.669577 +step:4174 train loss:3.634309 +step:4175 train loss:3.723958 +step:4176 train loss:3.646546 +step:4177 train loss:3.670054 +step:4178 train loss:3.646857 +step:4179 train loss:3.601381 +step:4180 train loss:3.598328 +step:4181 train loss:3.648345 +step:4182 train loss:3.634992 +step:4183 train loss:3.567461 +step:4184 train loss:3.639346 +step:4185 train loss:3.705947 +step:4186 train loss:3.682536 +step:4187 train loss:3.691566 +step:4188 train loss:3.664240 +step:4189 train loss:3.624978 +step:4190 train loss:3.665403 +step:4191 train loss:3.614584 +step:4192 train loss:3.701719 +step:4193 train loss:3.610258 +step:4194 train loss:3.595325 +step:4195 train loss:3.592768 +step:4196 train loss:3.659802 +step:4197 train loss:3.673957 +step:4198 train loss:3.596087 +step:4199 train loss:3.679924 +step:4200 train loss:3.640102 +step:4201 train loss:3.620592 +step:4202 train loss:3.637043 +step:4203 train loss:3.647837 +step:4204 train loss:3.640585 +step:4205 train loss:3.655760 +step:4206 train loss:3.672212 +step:4207 train loss:3.673202 +step:4208 train loss:3.635973 +step:4209 train loss:3.702555 +step:4210 train loss:3.729885 +step:4211 train loss:3.609760 +step:4212 train loss:3.651109 +step:4213 train loss:3.605063 +step:4214 train loss:3.610367 +step:4215 train loss:3.630510 +step:4216 train loss:3.601090 +step:4217 train loss:3.623526 +step:4218 train loss:3.665679 +step:4219 train loss:3.683802 +step:4220 train loss:3.754995 +step:4221 train loss:3.643684 +step:4222 train loss:3.701656 +step:4223 train loss:3.621571 +step:4224 train loss:3.694484 +step:4225 train loss:3.620903 +step:4226 train loss:3.674246 +step:4227 train loss:3.652051 +step:4228 train loss:3.625499 +step:4229 train loss:3.635269 +step:4230 train loss:3.617843 +step:4231 train loss:3.605990 +step:4232 train loss:3.657120 +step:4233 train loss:3.564715 +step:4234 train loss:3.645975 +step:4235 train loss:3.722635 +step:4236 train loss:3.695226 +step:4237 train loss:3.674277 +step:4238 train loss:3.683232 +step:4239 train loss:3.736592 +step:4240 train loss:3.640253 +step:4241 train loss:3.566466 +step:4242 train loss:3.688320 +step:4243 train loss:3.685640 +step:4244 train loss:3.698541 +step:4245 train loss:3.755688 +step:4246 train loss:3.627262 +step:4247 train loss:3.683671 +step:4248 train loss:3.634344 +step:4249 train loss:3.641293 +step:4250 validation loss:3.567694 +step:4250 train loss:3.624983 +step:4251 train loss:3.720345 +step:4252 train loss:3.629863 +step:4253 train loss:3.621910 +step:4254 train loss:3.631827 +step:4255 train loss:3.612291 +step:4256 train loss:3.629153 +step:4257 train loss:3.684333 +step:4258 train loss:3.546954 +step:4259 train loss:3.608427 +step:4260 train loss:3.674974 +step:4261 train loss:3.661574 +step:4262 train loss:3.796840 +step:4263 train loss:3.734841 +step:4264 train loss:3.677744 +step:4265 train loss:3.669055 +step:4266 train loss:3.663711 +step:4267 train loss:3.664088 +step:4268 train loss:3.611010 +step:4269 train loss:3.703009 +step:4270 train loss:3.684996 +step:4271 train loss:3.597735 +step:4272 train loss:3.651598 +step:4273 train loss:3.629032 +step:4274 train loss:3.613065 +step:4275 train loss:3.637084 +step:4276 train loss:3.600540 +step:4277 train loss:3.738524 +step:4278 train loss:3.586502 +step:4279 train loss:3.614144 +step:4280 train loss:3.698696 +step:4281 train loss:3.681931 +step:4282 train loss:3.748553 +step:4283 train loss:3.604260 +step:4284 train loss:3.629345 +step:4285 train loss:3.632663 +step:4286 train loss:3.698594 +step:4287 train loss:3.695260 +step:4288 train loss:3.679291 +step:4289 train loss:3.631683 +step:4290 train loss:3.640585 +step:4291 train loss:3.595402 +step:4292 train loss:3.641006 +step:4293 train loss:3.654436 +step:4294 train loss:3.638022 +step:4295 train loss:3.572899 +step:4296 train loss:3.645458 +step:4297 train loss:3.627360 +step:4298 train loss:3.637513 +step:4299 train loss:3.632973 +step:4300 train loss:3.752824 +step:4301 train loss:3.570334 +step:4302 train loss:3.708285 +step:4303 train loss:3.588454 +step:4304 train loss:3.595595 +step:4305 train loss:3.614956 +step:4306 train loss:3.688808 +step:4307 train loss:3.601922 +step:4308 train loss:3.603536 +step:4309 train loss:3.670142 +step:4310 train loss:3.611090 +step:4311 train loss:3.664551 +step:4312 train loss:3.659990 +step:4313 train loss:3.650974 +step:4314 train loss:3.597375 +step:4315 train loss:3.629012 +step:4316 train loss:3.576691 +step:4317 train loss:3.631138 +step:4318 train loss:3.672665 +step:4319 train loss:3.620783 +step:4320 train loss:3.682214 +step:4321 train loss:3.667832 +step:4322 train loss:3.620480 +step:4323 train loss:3.559361 +step:4324 train loss:3.650263 +step:4325 train loss:3.628755 +step:4326 train loss:3.621412 +step:4327 train loss:3.724998 +step:4328 train loss:3.638404 +step:4329 train loss:3.591136 +step:4330 train loss:3.636772 +step:4331 train loss:3.650664 +step:4332 train loss:3.678706 +step:4333 train loss:3.640517 +step:4334 train loss:3.658376 +step:4335 train loss:3.656554 +step:4336 train loss:3.670034 +step:4337 train loss:3.632626 +step:4338 train loss:3.752461 +step:4339 train loss:3.663828 +step:4340 train loss:3.667459 +step:4341 train loss:3.634310 +step:4342 train loss:3.651195 +step:4343 train loss:3.769866 +step:4344 train loss:3.660640 +step:4345 train loss:3.674865 +step:4346 train loss:3.690355 +step:4347 train loss:3.696769 +step:4348 train loss:3.608566 +step:4349 train loss:3.692913 +step:4350 train loss:3.634097 +step:4351 train loss:3.591099 +step:4352 train loss:3.664871 +step:4353 train loss:3.608500 +step:4354 train loss:3.664588 +step:4355 train loss:3.624955 +step:4356 train loss:3.648166 +step:4357 train loss:3.629045 +step:4358 train loss:3.725290 +step:4359 train loss:3.673642 +step:4360 train loss:3.590112 +step:4361 train loss:3.638973 +step:4362 train loss:3.656972 +step:4363 train loss:3.675098 +step:4364 train loss:3.640852 +step:4365 train loss:3.625422 +step:4366 train loss:3.670060 +step:4367 train loss:3.683450 +step:4368 train loss:3.664032 +step:4369 train loss:3.534194 +step:4370 train loss:3.662496 +step:4371 train loss:3.571974 +step:4372 train loss:3.720086 +step:4373 train loss:3.656272 +step:4374 train loss:3.626095 +step:4375 train loss:3.670738 +step:4376 train loss:3.681020 +step:4377 train loss:3.612876 +step:4378 train loss:3.626844 +step:4379 train loss:3.708087 +step:4380 train loss:3.686839 +step:4381 train loss:3.591269 +step:4382 train loss:3.634106 +step:4383 train loss:3.662444 +step:4384 train loss:3.659497 +step:4385 train loss:3.584945 +step:4386 train loss:3.641369 +step:4387 train loss:3.611998 +step:4388 train loss:3.634558 +step:4389 train loss:3.660518 +step:4390 train loss:3.703448 +step:4391 train loss:3.628766 +step:4392 train loss:3.699396 +step:4393 train loss:3.660088 +step:4394 train loss:3.598405 +step:4395 train loss:3.655540 +step:4396 train loss:3.629021 +step:4397 train loss:3.673170 +step:4398 train loss:3.618175 +step:4399 train loss:3.611037 +step:4400 train loss:3.613968 +step:4401 train loss:3.678273 +step:4402 train loss:3.675033 +step:4403 train loss:3.625774 +step:4404 train loss:3.656501 +step:4405 train loss:3.575837 +step:4406 train loss:3.657749 +step:4407 train loss:3.592691 +step:4408 train loss:3.683339 +step:4409 train loss:3.644312 +step:4410 train loss:3.650731 +step:4411 train loss:3.605611 +step:4412 train loss:3.722555 +step:4413 train loss:3.623182 +step:4414 train loss:3.628962 +step:4415 train loss:3.613267 +step:4416 train loss:3.607227 +step:4417 train loss:3.599008 +step:4418 train loss:3.675913 +step:4419 train loss:3.642617 +step:4420 train loss:3.652934 +step:4421 train loss:3.676743 +step:4422 train loss:3.692238 +step:4423 train loss:3.654053 +step:4424 train loss:3.637544 +step:4425 train loss:3.602300 +step:4426 train loss:3.674560 +step:4427 train loss:3.633806 +step:4428 train loss:3.576872 +step:4429 train loss:3.634485 +step:4430 train loss:3.675196 +step:4431 train loss:3.666723 +step:4432 train loss:3.572938 +step:4433 train loss:3.627493 +step:4434 train loss:3.623780 +step:4435 train loss:3.653894 +step:4436 train loss:3.590224 +step:4437 train loss:3.668894 +step:4438 train loss:3.634810 +step:4439 train loss:3.643110 +step:4440 train loss:3.640351 +step:4441 train loss:3.638959 +step:4442 train loss:3.690629 +step:4443 train loss:3.627327 +step:4444 train loss:3.710812 +step:4445 train loss:3.675062 +step:4446 train loss:3.607646 +step:4447 train loss:3.652173 +step:4448 train loss:3.673158 +step:4449 train loss:3.613765 +step:4450 train loss:3.626886 +step:4451 train loss:3.682518 +step:4452 train loss:3.744448 +step:4453 train loss:3.675172 +step:4454 train loss:3.648107 +step:4455 train loss:3.704729 +step:4456 train loss:3.642676 +step:4457 train loss:3.644156 +step:4458 train loss:3.650864 +step:4459 train loss:3.684840 +step:4460 train loss:3.594558 +step:4461 train loss:3.565519 +step:4462 train loss:3.620842 +step:4463 train loss:3.642503 +step:4464 train loss:3.609187 +step:4465 train loss:3.645481 +step:4466 train loss:3.736861 +step:4467 train loss:3.619103 +step:4468 train loss:3.613898 +step:4469 train loss:3.604919 +step:4470 train loss:3.580168 +step:4471 train loss:3.641548 +step:4472 train loss:3.567977 +step:4473 train loss:3.654197 +step:4474 train loss:3.678763 +step:4475 train loss:3.640466 +step:4476 train loss:3.605110 +step:4477 train loss:3.588131 +step:4478 train loss:3.646657 +step:4479 train loss:3.749759 +step:4480 train loss:3.584783 +step:4481 train loss:3.654737 +step:4482 train loss:3.615173 +step:4483 train loss:3.610600 +step:4484 train loss:3.661466 +step:4485 train loss:3.619987 +step:4486 train loss:3.720408 +step:4487 train loss:3.616717 +step:4488 train loss:3.613433 +step:4489 train loss:3.572635 +step:4490 train loss:3.651752 +step:4491 train loss:3.603453 +step:4492 train loss:3.636288 +step:4493 train loss:3.620851 +step:4494 train loss:3.615093 +step:4495 train loss:3.681859 +step:4496 train loss:3.621587 +step:4497 train loss:3.707417 +step:4498 train loss:3.596765 +step:4499 train loss:3.649981 +step:4500 validation loss:3.553601 total_sharp:8.6125e-03 L1_sharp:1.3643e-02 L2_sharp:2.5064e-03 L3_sharp:1.4190e-03 L4_sharp:9.4436e-04 L5_sharp:1.0051e-03 L6_sharp:1.2937e-03 L7_sharp:2.0550e-03 L8_sharp:1.7673e-03 L9_sharp:1.9543e-03 L10_sharp:1.4316e-03 L11_sharp:1.4549e-03 L12_sharp:2.7911e-03 total_fnorm:2.2381e+00 total_l1_linf:1.9927e+04 total_spectral:2.2381e+00 L1_fnorm:5.0377e-01 L2_fnorm:5.0492e-01 L3_fnorm:5.1640e-01 L4_fnorm:5.2247e-01 L5_fnorm:5.1918e-01 L6_fnorm:5.2536e-01 L7_fnorm:5.2188e-01 L8_fnorm:5.2872e-01 L9_fnorm:5.2833e-01 L10_fnorm:5.2591e-01 L11_fnorm:5.2681e-01 L12_fnorm:5.1685e-01 L1_l1linf:6.5691e-01 L2_l1linf:5.4889e-01 L3_l1linf:5.9652e-01 L4_l1linf:6.2050e-01 L5_l1linf:5.7284e-01 L6_l1linf:5.8846e-01 L7_l1linf:5.4823e-01 L8_l1linf:5.5725e-01 L9_l1linf:5.7268e-01 L10_l1linf:5.6352e-01 L11_l1linf:5.9603e-01 L12_l1linf:6.0279e-01 L1_spectral:1.0297e-01 L2_spectral:8.3946e-02 L3_spectral:8.0368e-02 L4_spectral:6.6971e-02 L5_spectral:7.5320e-02 L6_spectral:6.9260e-02 L7_spectral:5.7588e-02 L8_spectral:5.0450e-02 L9_spectral:5.7717e-02 L10_spectral:6.6017e-02 L11_spectral:7.8505e-02 L12_spectral:7.3535e-02 ip_v_neg_g:2.4754e-02 cos_v_neg_g:1.3803e-02 v_norm:2.2381e+00 g_norm:8.0128e-01 hv_norm:6.0228e-01 cos_v_hv:3.2004e-02 hg_norm:9.9543e+00 cos_g_hg:3.5293e-01 v_par:9.9556e-04 v_perp:2.2381e+00 L1_cos_v_neg_g:3.4486e-02 L1_v_norm:5.0377e-01 L2_cos_v_neg_g:2.1193e-02 L2_v_norm:5.0492e-01 L3_cos_v_neg_g:1.2072e-02 L3_v_norm:5.1640e-01 L4_cos_v_neg_g:9.9822e-03 L4_v_norm:5.2247e-01 L5_cos_v_neg_g:1.3919e-02 L5_v_norm:5.1918e-01 L6_cos_v_neg_g:1.4984e-02 L6_v_norm:5.2536e-01 L7_cos_v_neg_g:1.6398e-02 L7_v_norm:5.2188e-01 L8_cos_v_neg_g:1.5862e-02 L8_v_norm:5.2872e-01 L9_cos_v_neg_g:1.7835e-02 L9_v_norm:5.2833e-01 L10_cos_v_neg_g:1.9296e-02 L10_v_norm:5.2591e-01 L11_cos_v_neg_g:2.0413e-02 L11_v_norm:5.2681e-01 L12_cos_v_neg_g:2.1220e-02 L12_v_norm:5.1685e-01 +step:4500 train loss:3.561614 +step:4501 train loss:3.620587 +step:4502 train loss:3.744923 +step:4503 train loss:3.645137 +step:4504 train loss:3.657766 +step:4505 train loss:3.644357 +step:4506 train loss:3.611308 +step:4507 train loss:3.685472 +step:4508 train loss:3.621236 +step:4509 train loss:3.618903 +step:4510 train loss:3.655282 +step:4511 train loss:3.604034 +step:4512 train loss:3.629668 +step:4513 train loss:3.685769 +step:4514 train loss:3.592600 +step:4515 train loss:3.706613 +step:4516 train loss:3.680525 +step:4517 train loss:3.635720 +step:4518 train loss:3.574541 +step:4519 train loss:3.614769 +step:4520 train loss:3.623967 +step:4521 train loss:3.564561 +step:4522 train loss:3.618215 +step:4523 train loss:3.665622 +step:4524 train loss:3.647870 +step:4525 train loss:3.570162 +step:4526 train loss:3.612046 +step:4527 train loss:3.600802 +step:4528 train loss:3.629818 +step:4529 train loss:3.623507 +step:4530 train loss:3.720944 +step:4531 train loss:3.610293 +step:4532 train loss:3.632995 +step:4533 train loss:3.606390 +step:4534 train loss:3.699246 +step:4535 train loss:3.599560 +step:4536 train loss:3.670475 +step:4537 train loss:3.653813 +step:4538 train loss:3.630988 +step:4539 train loss:3.652369 +step:4540 train loss:3.628831 +step:4541 train loss:3.596178 +step:4542 train loss:3.645063 +step:4543 train loss:3.732964 +step:4544 train loss:3.676275 +step:4545 train loss:3.615729 +step:4546 train loss:3.714420 +step:4547 train loss:3.671347 +step:4548 train loss:3.673883 +step:4549 train loss:3.632014 +step:4550 train loss:3.597164 +step:4551 train loss:3.612530 +step:4552 train loss:3.618064 +step:4553 train loss:3.702341 +step:4554 train loss:3.594331 +step:4555 train loss:3.709903 +step:4556 train loss:3.641921 +step:4557 train loss:3.570343 +step:4558 train loss:3.654597 +step:4559 train loss:3.668022 +step:4560 train loss:3.605912 +step:4561 train loss:3.591852 +step:4562 train loss:3.633927 +step:4563 train loss:3.584926 +step:4564 train loss:3.613811 +step:4565 train loss:3.610453 +step:4566 train loss:3.585441 +step:4567 train loss:3.613377 +step:4568 train loss:3.611471 +step:4569 train loss:3.596398 +step:4570 train loss:3.645271 +step:4571 train loss:3.623703 +step:4572 train loss:3.615947 +step:4573 train loss:3.626166 +step:4574 train loss:3.775239 +step:4575 train loss:3.607491 +step:4576 train loss:3.597236 +step:4577 train loss:3.634115 +step:4578 train loss:3.674348 +step:4579 train loss:3.624146 +step:4580 train loss:3.686074 +step:4581 train loss:3.623178 +step:4582 train loss:3.614935 +step:4583 train loss:3.622959 +step:4584 train loss:3.593850 +step:4585 train loss:3.674757 +step:4586 train loss:3.658800 +step:4587 train loss:3.563148 +step:4588 train loss:3.605976 +step:4589 train loss:3.678910 +step:4590 train loss:3.651404 +step:4591 train loss:3.589852 +step:4592 train loss:3.672631 +step:4593 train loss:3.594993 +step:4594 train loss:3.623543 +step:4595 train loss:3.647185 +step:4596 train loss:3.584749 +step:4597 train loss:3.721402 +step:4598 train loss:3.638350 +step:4599 train loss:3.594061 +step:4600 train loss:3.600820 +step:4601 train loss:3.624278 +step:4602 train loss:3.573454 +step:4603 train loss:3.588677 +step:4604 train loss:3.696397 +step:4605 train loss:3.613938 +step:4606 train loss:3.644678 +step:4607 train loss:3.622420 +step:4608 train loss:3.659941 +step:4609 train loss:3.615962 +step:4610 train loss:3.660798 +step:4611 train loss:3.686389 +step:4612 train loss:3.684084 +step:4613 train loss:3.664858 +step:4614 train loss:3.658458 +step:4615 train loss:3.599093 +step:4616 train loss:3.583587 +step:4617 train loss:3.625621 +step:4618 train loss:3.642322 +step:4619 train loss:3.600301 +step:4620 train loss:3.615390 +step:4621 train loss:3.617015 +step:4622 train loss:3.554494 +step:4623 train loss:3.663702 +step:4624 train loss:3.646432 +step:4625 train loss:3.604230 +step:4626 train loss:3.650257 +step:4627 train loss:3.619562 +step:4628 train loss:3.608081 +step:4629 train loss:3.646859 +step:4630 train loss:3.702645 +step:4631 train loss:3.703910 +step:4632 train loss:3.598373 +step:4633 train loss:3.610089 +step:4634 train loss:3.684395 +step:4635 train loss:3.647927 +step:4636 train loss:3.663983 +step:4637 train loss:3.600519 +step:4638 train loss:3.607733 +step:4639 train loss:3.600862 +step:4640 train loss:3.612954 +step:4641 train loss:3.621668 +step:4642 train loss:3.652930 +step:4643 train loss:3.613887 +step:4644 train loss:3.637711 +step:4645 train loss:3.656556 +step:4646 train loss:3.607845 +step:4647 train loss:3.567737 +step:4648 train loss:3.675198 +step:4649 train loss:3.685447 +step:4650 train loss:3.629724 +step:4651 train loss:3.634150 +step:4652 train loss:3.622509 +step:4653 train loss:3.681379 +step:4654 train loss:3.678603 +step:4655 train loss:3.579152 +step:4656 train loss:3.613600 +step:4657 train loss:3.667018 +step:4658 train loss:3.624142 +step:4659 train loss:3.635667 +step:4660 train loss:3.680535 +step:4661 train loss:3.599072 +step:4662 train loss:3.616592 +step:4663 train loss:3.626949 +step:4664 train loss:3.675371 +step:4665 train loss:3.674119 +step:4666 train loss:3.669159 +step:4667 train loss:3.663268 +step:4668 train loss:3.631412 +step:4669 train loss:3.633944 +step:4670 train loss:3.665341 +step:4671 train loss:3.697723 +step:4672 train loss:3.551663 +step:4673 train loss:3.586150 +step:4674 train loss:3.716119 +step:4675 train loss:3.621063 +step:4676 train loss:3.580148 +step:4677 train loss:3.586799 +step:4678 train loss:3.557191 +step:4679 train loss:3.655164 +step:4680 train loss:3.595271 +step:4681 train loss:3.645678 +step:4682 train loss:3.593228 +step:4683 train loss:3.564014 +step:4684 train loss:3.686307 +step:4685 train loss:3.616789 +step:4686 train loss:3.626606 +step:4687 train loss:3.666473 +step:4688 train loss:3.592467 +step:4689 train loss:3.666919 +step:4690 train loss:3.611513 +step:4691 train loss:3.641347 +step:4692 train loss:3.571001 +step:4693 train loss:3.612087 +step:4694 train loss:3.651183 +step:4695 train loss:3.667220 +step:4696 train loss:3.655007 +step:4697 train loss:3.567346 +step:4698 train loss:3.584089 +step:4699 train loss:3.633510 +step:4700 train loss:3.605563 +step:4701 train loss:3.612548 +step:4702 train loss:3.567892 +step:4703 train loss:3.646747 +step:4704 train loss:3.632208 +step:4705 train loss:3.575794 +step:4706 train loss:3.583874 +step:4707 train loss:3.570354 +step:4708 train loss:3.640836 +step:4709 train loss:3.585540 +step:4710 train loss:3.598927 +step:4711 train loss:3.659129 +step:4712 train loss:3.557353 +step:4713 train loss:3.660121 +step:4714 train loss:3.559637 +step:4715 train loss:3.651011 +step:4716 train loss:3.616984 +step:4717 train loss:3.548399 +step:4718 train loss:3.638918 +step:4719 train loss:3.568054 +step:4720 train loss:3.662833 +step:4721 train loss:3.622449 +step:4722 train loss:3.673815 +step:4723 train loss:3.571194 +step:4724 train loss:3.622828 +step:4725 train loss:3.559389 +step:4726 train loss:3.603296 +step:4727 train loss:3.610016 +step:4728 train loss:3.614841 +step:4729 train loss:3.646691 +step:4730 train loss:3.547248 +step:4731 train loss:3.605039 +step:4732 train loss:3.560946 +step:4733 train loss:3.496120 +step:4734 train loss:3.633855 +step:4735 train loss:3.586181 +step:4736 train loss:3.628470 +step:4737 train loss:3.507142 +step:4738 train loss:3.658200 +step:4739 train loss:3.533302 +step:4740 train loss:3.644262 +step:4741 train loss:3.611783 +step:4742 train loss:3.573625 +step:4743 train loss:3.571972 +step:4744 train loss:3.613880 +step:4745 train loss:3.636226 +step:4746 train loss:3.670914 +step:4747 train loss:3.634186 +step:4748 train loss:3.537722 +step:4749 train loss:3.602714 +step:4750 validation loss:3.535613 +step:4750 train loss:3.546735 +step:4751 train loss:3.643729 +step:4752 train loss:3.575562 +step:4753 train loss:3.683947 +step:4754 train loss:3.552610 +step:4755 train loss:3.590881 +step:4756 train loss:3.668997 +step:4757 train loss:3.592833 +step:4758 train loss:3.611537 +step:4759 train loss:3.611633 +step:4760 train loss:3.638516 +step:4761 train loss:3.557605 +step:4762 train loss:3.588579 +step:4763 train loss:3.614074 +step:4764 train loss:3.669971 +step:4765 train loss:3.565461 +step:4766 train loss:3.585340 +step:4767 train loss:3.538524 +step:4768 train loss:3.595649 +step:4769 train loss:3.620496 +step:4770 train loss:3.578944 +step:4771 train loss:3.591740 +step:4772 train loss:3.566440 +step:4773 train loss:3.601737 +step:4774 train loss:3.545918 +step:4775 train loss:3.675787 +step:4776 train loss:3.543139 +step:4777 train loss:3.614341 +step:4778 train loss:3.555197 +step:4779 train loss:3.603850 +step:4780 train loss:3.541866 +step:4781 train loss:3.546860 +step:4782 train loss:3.652841 +step:4783 train loss:3.644071 +step:4784 train loss:3.605648 +step:4785 train loss:3.604728 +step:4786 train loss:3.714733 +step:4787 train loss:3.552744 +step:4788 train loss:3.572736 +step:4789 train loss:3.595400 +step:4790 train loss:3.643703 +step:4791 train loss:3.611737 +step:4792 train loss:3.650742 +step:4793 train loss:3.569458 +step:4794 train loss:3.643885 +step:4795 train loss:3.595011 +step:4796 train loss:3.581722 +step:4797 train loss:3.593108 +step:4798 train loss:3.602920 +step:4799 train loss:3.597924 +step:4800 train loss:3.627444 +step:4801 train loss:3.621477 +step:4802 train loss:3.656893 +step:4803 train loss:3.639533 +step:4804 train loss:3.597178 +step:4805 train loss:3.592559 +step:4806 train loss:3.573683 +step:4807 train loss:3.678038 +step:4808 train loss:3.551648 +step:4809 train loss:3.653352 +step:4810 train loss:3.588866 +step:4811 train loss:3.610017 +step:4812 train loss:3.586460 +step:4813 train loss:3.540390 +step:4814 train loss:3.538700 +step:4815 train loss:3.530276 +step:4816 train loss:3.602670 +step:4817 train loss:3.537902 +step:4818 train loss:3.600157 +step:4819 train loss:3.597548 +step:4820 train loss:3.846478 +step:4821 train loss:3.622764 +step:4822 train loss:3.633551 +step:4823 train loss:3.565026 +step:4824 train loss:3.571986 +step:4825 train loss:3.552482 +step:4826 train loss:3.637850 +step:4827 train loss:3.585697 +step:4828 train loss:3.527359 +step:4829 train loss:3.631978 +step:4830 train loss:3.571811 +step:4831 train loss:3.721612 +step:4832 train loss:3.590744 +step:4833 train loss:3.630727 +step:4834 train loss:3.529807 +step:4835 train loss:3.621754 +step:4836 train loss:3.601332 +step:4837 train loss:3.629045 +step:4838 train loss:3.567430 +step:4839 train loss:3.631577 +step:4840 train loss:3.542085 +step:4841 train loss:3.636209 +step:4842 train loss:3.549664 +step:4843 train loss:3.628190 +step:4844 train loss:3.629625 +step:4845 train loss:3.567186 +step:4846 train loss:3.582938 +step:4847 train loss:3.568463 +step:4848 train loss:3.591878 +step:4849 train loss:3.545065 +step:4850 train loss:3.556910 +step:4851 train loss:3.551927 +step:4852 train loss:3.630148 +step:4853 train loss:3.603717 +step:4854 train loss:3.580356 +step:4855 train loss:3.646195 +step:4856 train loss:3.615261 +step:4857 train loss:3.624438 +step:4858 train loss:3.702088 +step:4859 train loss:3.545785 +step:4860 train loss:3.626899 +step:4861 train loss:3.598370 +step:4862 train loss:3.631461 +step:4863 train loss:3.568003 +step:4864 train loss:3.579904 +step:4865 train loss:3.572893 +step:4866 train loss:3.616902 +step:4867 train loss:3.584390 +step:4868 train loss:3.601860 +step:4869 train loss:3.556344 +step:4870 train loss:3.583982 +step:4871 train loss:3.667634 +step:4872 train loss:3.613414 +step:4873 train loss:3.611186 +step:4874 train loss:3.582561 +step:4875 train loss:3.548362 +step:4876 train loss:3.560967 +step:4877 train loss:3.564175 +step:4878 train loss:3.602267 +step:4879 train loss:3.565330 +step:4880 train loss:3.587897 +step:4881 train loss:3.535156 +step:4882 train loss:3.736284 +step:4883 train loss:3.547642 +step:4884 train loss:3.576578 +step:4885 train loss:3.550653 +step:4886 train loss:3.626972 +step:4887 train loss:3.579141 +step:4888 train loss:3.590300 +step:4889 train loss:3.584919 +step:4890 train loss:3.621398 +step:4891 train loss:3.559402 +step:4892 train loss:3.566480 +step:4893 train loss:3.612648 +step:4894 train loss:3.547674 +step:4895 train loss:3.580727 +step:4896 train loss:3.562852 +step:4897 train loss:3.635743 +step:4898 train loss:3.586970 +step:4899 train loss:3.571288 +step:4900 train loss:3.613234 +step:4901 train loss:3.562983 +step:4902 train loss:3.558000 +step:4903 train loss:3.580824 +step:4904 train loss:3.591769 +step:4905 train loss:3.590396 +step:4906 train loss:3.591353 +step:4907 train loss:3.662996 +step:4908 train loss:3.569386 +step:4909 train loss:3.574863 +step:4910 train loss:3.596844 +step:4911 train loss:3.647838 +step:4912 train loss:3.626598 +step:4913 train loss:3.602754 +step:4914 train loss:3.591291 +step:4915 train loss:3.573036 +step:4916 train loss:3.520072 +step:4917 train loss:3.541455 +step:4918 train loss:3.570012 +step:4919 train loss:3.565433 +step:4920 train loss:3.572938 +step:4921 train loss:3.726777 +step:4922 train loss:3.621747 +step:4923 train loss:3.636360 +step:4924 train loss:3.637185 +step:4925 train loss:3.569804 +step:4926 train loss:3.563880 +step:4927 train loss:3.592032 +step:4928 train loss:3.634306 +step:4929 train loss:3.593252 +step:4930 train loss:3.577683 +step:4931 train loss:3.567035 +step:4932 train loss:3.576299 +step:4933 train loss:3.568652 +step:4934 train loss:3.632412 +step:4935 train loss:3.618145 +step:4936 train loss:3.581223 +step:4937 train loss:3.692689 +step:4938 train loss:3.679183 +step:4939 train loss:3.543173 +step:4940 train loss:3.621174 +step:4941 train loss:3.521755 +step:4942 train loss:3.563938 +step:4943 train loss:3.564971 +step:4944 train loss:3.570051 +step:4945 train loss:3.613769 +step:4946 train loss:3.589865 +step:4947 train loss:3.573524 +step:4948 train loss:3.607841 +step:4949 train loss:3.513942 +step:4950 train loss:3.598034 +step:4951 train loss:3.648360 +step:4952 train loss:3.588196 +step:4953 train loss:3.619746 +step:4954 train loss:3.527201 +step:4955 train loss:3.599336 +step:4956 train loss:3.628842 +step:4957 train loss:3.622344 +step:4958 train loss:3.536033 +step:4959 train loss:3.654505 +step:4960 train loss:3.579721 +step:4961 train loss:3.602036 +step:4962 train loss:3.560317 +step:4963 train loss:3.607753 +step:4964 train loss:3.557424 +step:4965 train loss:3.714133 +step:4966 train loss:3.560784 +step:4967 train loss:3.666919 +step:4968 train loss:3.558615 +step:4969 train loss:3.602602 +step:4970 train loss:3.591016 +step:4971 train loss:3.541346 +step:4972 train loss:3.587127 +step:4973 train loss:3.594848 +step:4974 train loss:3.586779 +step:4975 train loss:3.666321 +step:4976 train loss:3.647429 +step:4977 train loss:3.595601 +step:4978 train loss:3.580868 +step:4979 train loss:3.581432 +step:4980 train loss:3.689397 +step:4981 train loss:3.526292 +step:4982 train loss:3.605793 +step:4983 train loss:3.533951 +step:4984 train loss:3.720198 +step:4985 train loss:3.616924 +step:4986 train loss:3.558858 +step:4987 train loss:3.578374 +step:4988 train loss:3.780766 +step:4989 train loss:3.583213 +step:4990 train loss:3.576750 +step:4991 train loss:3.591557 +step:4992 train loss:3.575882 +step:4993 train loss:3.553327 +step:4994 train loss:3.664587 +step:4995 train loss:3.589748 +step:4996 train loss:3.677398 +step:4997 train loss:3.576848 +step:4998 train loss:3.580485 +step:4999 train loss:3.562462 +step:5000 validation loss:3.531006 total_sharp:4.1992e-03 L1_sharp:4.4686e-03 L2_sharp:6.3906e-04 L3_sharp:1.0842e-03 L4_sharp:7.3414e-04 L5_sharp:8.2662e-04 L6_sharp:9.1579e-04 L7_sharp:1.4463e-03 L8_sharp:1.1312e-03 L9_sharp:9.2140e-04 L10_sharp:7.7511e-04 L11_sharp:9.1205e-04 L12_sharp:2.3078e-03 total_fnorm:2.2089e+00 total_l1_linf:1.9630e+04 total_spectral:2.2089e+00 L1_fnorm:4.7918e-01 L2_fnorm:4.9678e-01 L3_fnorm:5.0631e-01 L4_fnorm:5.1621e-01 L5_fnorm:5.1525e-01 L6_fnorm:5.2199e-01 L7_fnorm:5.1864e-01 L8_fnorm:5.2419e-01 L9_fnorm:5.2115e-01 L10_fnorm:5.1877e-01 L11_fnorm:5.1597e-01 L12_fnorm:5.0247e-01 L1_l1linf:5.6036e-01 L2_l1linf:5.5923e-01 L3_l1linf:5.6775e-01 L4_l1linf:5.6284e-01 L5_l1linf:6.0400e-01 L6_l1linf:5.8422e-01 L7_l1linf:5.5252e-01 L8_l1linf:5.3654e-01 L9_l1linf:5.3649e-01 L10_l1linf:5.4178e-01 L11_l1linf:5.5617e-01 L12_l1linf:5.6360e-01 L1_spectral:7.7434e-02 L2_spectral:8.0965e-02 L3_spectral:7.5905e-02 L4_spectral:6.0060e-02 L5_spectral:7.0210e-02 L6_spectral:6.1781e-02 L7_spectral:5.2873e-02 L8_spectral:4.5944e-02 L9_spectral:4.4757e-02 L10_spectral:5.7017e-02 L11_spectral:6.7327e-02 L12_spectral:7.2639e-02 ip_v_neg_g:1.2713e-02 cos_v_neg_g:6.4211e-03 v_norm:2.2089e+00 g_norm:8.9631e-01 hv_norm:3.0901e-01 cos_v_hv:3.0017e-02 hg_norm:7.7642e+00 cos_g_hg:5.3624e-01 v_par:5.7540e-04 v_perp:2.2089e+00 L1_cos_v_neg_g:1.1372e-02 L1_v_norm:4.7918e-01 L2_cos_v_neg_g:6.4676e-03 L2_v_norm:4.9678e-01 L3_cos_v_neg_g:1.0045e-02 L3_v_norm:5.0631e-01 L4_cos_v_neg_g:7.1152e-03 L4_v_norm:5.1621e-01 L5_cos_v_neg_g:9.0902e-03 L5_v_norm:5.1525e-01 L6_cos_v_neg_g:7.5678e-03 L6_v_norm:5.2199e-01 L7_cos_v_neg_g:7.7079e-03 L7_v_norm:5.1864e-01 L8_cos_v_neg_g:7.2761e-03 L8_v_norm:5.2419e-01 L9_cos_v_neg_g:7.3252e-03 L9_v_norm:5.2115e-01 L10_cos_v_neg_g:7.2918e-03 L10_v_norm:5.1877e-01 L11_cos_v_neg_g:8.9775e-03 L11_v_norm:5.1597e-01 L12_cos_v_neg_g:1.2614e-02 L12_v_norm:5.0247e-01 +step:5000 train loss:3.676868 +step:5001 train loss:3.543580 +step:5002 train loss:3.595689 +step:5003 train loss:3.592591 +step:5004 train loss:3.586619 +step:5005 train loss:3.584229 +step:5006 train loss:3.624848 +step:5007 train loss:3.625694 +step:5008 train loss:3.564236 +step:5009 train loss:3.606712 +step:5010 train loss:3.559136 +step:5011 train loss:3.586617 +step:5012 train loss:3.563036 +step:5013 train loss:3.665093 +step:5014 train loss:3.577535 +step:5015 train loss:3.655249 +step:5016 train loss:3.582782 +step:5017 train loss:3.628342 +step:5018 train loss:3.548856 +step:5019 train loss:3.580476 +step:5020 train loss:3.573276 +step:5021 train loss:3.588424 +step:5022 train loss:3.622529 +step:5023 train loss:3.595345 +step:5024 train loss:3.647090 +step:5025 train loss:3.536086 +step:5026 train loss:3.655281 +step:5027 train loss:3.590446 +step:5028 train loss:3.657090 +step:5029 train loss:3.552894 +step:5030 train loss:3.588203 +step:5031 train loss:3.576914 +step:5032 train loss:3.606377 +step:5033 train loss:3.588933 +step:5034 train loss:3.587706 +step:5035 train loss:3.670793 +step:5036 train loss:3.621099 +step:5037 train loss:3.570608 +step:5038 train loss:3.622244 +step:5039 train loss:3.632798 +step:5040 train loss:3.596404 +step:5041 train loss:3.612041 +step:5042 train loss:3.515446 +step:5043 train loss:3.655842 +step:5044 train loss:3.577251 +step:5045 train loss:3.623733 +step:5046 train loss:3.546754 +step:5047 train loss:3.621584 +step:5048 train loss:3.538097 +step:5049 train loss:3.672731 +step:5050 train loss:3.556831 +step:5051 train loss:3.604408 +step:5052 train loss:3.502709 +step:5053 train loss:3.685089 +step:5054 train loss:3.571514 +step:5055 train loss:3.597766 +step:5056 train loss:3.633852 +step:5057 train loss:3.564158 +step:5058 train loss:3.594834 +step:5059 train loss:3.558111 +step:5060 train loss:3.601784 +step:5061 train loss:3.596295 +step:5062 train loss:3.567755 +step:5063 train loss:3.562402 +step:5064 train loss:3.569482 +step:5065 train loss:3.552548 +step:5066 train loss:3.615587 +step:5067 train loss:3.597931 +step:5068 train loss:3.583327 +step:5069 train loss:3.554106 +step:5070 train loss:3.585281 +step:5071 train loss:3.653121 +step:5072 train loss:3.545638 +step:5073 train loss:3.552800 +step:5074 train loss:3.499759 +step:5075 train loss:3.570027 +step:5076 train loss:3.500170 +step:5077 train loss:3.563433 +step:5078 train loss:3.586285 +step:5079 train loss:3.606206 +step:5080 train loss:3.583455 +step:5081 train loss:3.592601 +step:5082 train loss:3.588279 +step:5083 train loss:3.651446 +step:5084 train loss:3.630639 +step:5085 train loss:3.595177 +step:5086 train loss:3.667274 +step:5087 train loss:3.650902 +step:5088 train loss:3.570575 +step:5089 train loss:3.634789 +step:5090 train loss:3.579277 +step:5091 train loss:3.579895 +step:5092 train loss:3.680372 +step:5093 train loss:3.560719 +step:5094 train loss:3.560132 +step:5095 train loss:3.613697 +step:5096 train loss:3.578824 +step:5097 train loss:3.586684 +step:5098 train loss:3.597519 +step:5099 train loss:3.551556 +step:5100 train loss:3.563828 +step:5101 train loss:3.757918 +step:5102 train loss:3.600074 +step:5103 train loss:3.607533 +step:5104 train loss:3.658315 +step:5105 train loss:3.603139 +step:5106 train loss:3.553211 +step:5107 train loss:3.575410 +step:5108 train loss:3.564701 +step:5109 train loss:3.647670 +step:5110 train loss:3.553704 +step:5111 train loss:3.648627 +step:5112 train loss:3.557207 +step:5113 train loss:3.539212 +step:5114 train loss:3.583592 +step:5115 train loss:3.545595 +step:5116 train loss:3.603011 +step:5117 train loss:3.552790 +step:5118 train loss:3.574592 +step:5119 train loss:3.558064 +step:5120 train loss:3.600514 +step:5121 train loss:3.547518 +step:5122 train loss:3.558740 +step:5123 train loss:3.551653 +step:5124 train loss:3.506815 +step:5125 train loss:3.614495 +step:5126 train loss:3.607819 +step:5127 train loss:3.605585 +step:5128 train loss:3.619243 +step:5129 train loss:3.547120 +step:5130 train loss:3.559201 +step:5131 train loss:3.499456 +step:5132 train loss:3.616666 +step:5133 train loss:3.589422 +step:5134 train loss:3.587599 +step:5135 train loss:3.542075 +step:5136 train loss:3.604761 +step:5137 train loss:3.605663 +step:5138 train loss:3.585813 +step:5139 train loss:3.619415 +step:5140 train loss:3.594594 +step:5141 train loss:3.627374 +step:5142 train loss:3.575466 +step:5143 train loss:3.601039 +step:5144 train loss:3.602102 +step:5145 train loss:3.541957 +step:5146 train loss:3.534399 +step:5147 train loss:3.610617 +step:5148 train loss:3.566028 +step:5149 train loss:3.616257 +step:5150 train loss:3.595557 +step:5151 train loss:3.559979 +step:5152 train loss:3.605619 +step:5153 train loss:3.579474 +step:5154 train loss:3.589484 +step:5155 train loss:3.601452 +step:5156 train loss:3.578200 +step:5157 train loss:3.576756 +step:5158 train loss:3.597366 +step:5159 train loss:3.633077 +step:5160 train loss:3.700248 +step:5161 train loss:3.628900 +step:5162 train loss:3.648327 +step:5163 train loss:3.558883 +step:5164 train loss:3.643054 +step:5165 train loss:3.636073 +step:5166 train loss:3.580787 +step:5167 train loss:3.674142 +step:5168 train loss:3.601697 +step:5169 train loss:3.622965 +step:5170 train loss:3.602791 +step:5171 train loss:3.646937 +step:5172 train loss:3.563140 +step:5173 train loss:3.625605 +step:5174 train loss:3.562130 +step:5175 train loss:3.593792 +step:5176 train loss:3.581901 +step:5177 train loss:3.584778 +step:5178 train loss:3.644377 +step:5179 train loss:3.556389 +step:5180 train loss:3.635615 +step:5181 train loss:3.579764 +step:5182 train loss:3.638861 +step:5183 train loss:3.567070 +step:5184 train loss:3.546597 +step:5185 train loss:3.571902 +step:5186 train loss:3.628801 +step:5187 train loss:3.622489 +step:5188 train loss:3.554685 +step:5189 train loss:3.598491 +step:5190 train loss:3.582240 +step:5191 train loss:3.566849 +step:5192 train loss:3.546392 +step:5193 train loss:3.632936 +step:5194 train loss:3.580352 +step:5195 train loss:3.553514 +step:5196 train loss:3.623914 +step:5197 train loss:3.683769 +step:5198 train loss:3.583597 +step:5199 train loss:3.571639 +step:5200 train loss:3.596644 +step:5201 train loss:3.587360 +step:5202 train loss:3.592190 +step:5203 train loss:3.591765 +step:5204 train loss:3.564108 +step:5205 train loss:3.608565 +step:5206 train loss:3.545591 +step:5207 train loss:3.552001 +step:5208 train loss:3.612670 +step:5209 train loss:3.629697 +step:5210 train loss:3.543587 +step:5211 train loss:3.585439 +step:5212 train loss:3.601043 +step:5213 train loss:3.574090 +step:5214 train loss:3.621552 +step:5215 train loss:3.732497 +step:5216 train loss:3.583837 +step:5217 train loss:3.564185 +step:5218 train loss:3.566948 +step:5219 train loss:3.628584 +step:5220 train loss:3.546304 +step:5221 train loss:3.549654 +step:5222 train loss:3.629909 +step:5223 train loss:3.622076 +step:5224 train loss:3.521242 +step:5225 train loss:3.668299 +step:5226 train loss:3.580341 +step:5227 train loss:3.654490 +step:5228 train loss:3.623870 +step:5229 train loss:3.565492 +step:5230 train loss:3.578728 +step:5231 train loss:3.526555 +step:5232 train loss:3.648772 +step:5233 train loss:3.610061 +step:5234 train loss:3.611700 +step:5235 train loss:3.560745 +step:5236 train loss:3.635329 +step:5237 train loss:3.690534 +step:5238 train loss:3.592698 +step:5239 train loss:3.653187 +step:5240 train loss:3.535995 +step:5241 train loss:3.596950 +step:5242 train loss:3.567514 +step:5243 train loss:3.573103 +step:5244 train loss:3.570302 +step:5245 train loss:3.614643 +step:5246 train loss:3.656498 +step:5247 train loss:3.583968 +step:5248 train loss:3.555100 +step:5249 train loss:3.613982 +step:5250 validation loss:3.512014 +step:5250 train loss:3.583448 +step:5251 train loss:3.646241 +step:5252 train loss:3.537406 +step:5253 train loss:3.690349 +step:5254 train loss:3.564971 +step:5255 train loss:3.634365 +step:5256 train loss:3.550207 +step:5257 train loss:3.605046 +step:5258 train loss:3.603450 +step:5259 train loss:3.589372 +step:5260 train loss:3.581659 +step:5261 train loss:3.570993 +step:5262 train loss:3.615190 +step:5263 train loss:3.602020 +step:5264 train loss:3.554451 +step:5265 train loss:3.629612 +step:5266 train loss:3.548239 +step:5267 train loss:3.561734 +step:5268 train loss:3.542002 +step:5269 train loss:3.544231 +step:5270 train loss:3.596589 +step:5271 train loss:3.521567 +step:5272 train loss:3.613746 +step:5273 train loss:3.518664 +step:5274 train loss:3.571432 +step:5275 train loss:3.583458 +step:5276 train loss:3.713006 +step:5277 train loss:3.612274 +step:5278 train loss:3.555492 +step:5279 train loss:3.603506 +step:5280 train loss:3.578898 +step:5281 train loss:3.575429 +step:5282 train loss:3.545234 +step:5283 train loss:3.548280 +step:5284 train loss:3.556619 +step:5285 train loss:3.622117 +step:5286 train loss:3.532461 +step:5287 train loss:3.632597 +step:5288 train loss:3.606176 +step:5289 train loss:3.575808 +step:5290 train loss:3.629638 +step:5291 train loss:3.582409 +step:5292 train loss:3.599974 +step:5293 train loss:3.570286 +step:5294 train loss:3.556670 +step:5295 train loss:3.561914 +step:5296 train loss:3.557547 +step:5297 train loss:3.576795 +step:5298 train loss:3.522725 +step:5299 train loss:3.615826 +step:5300 train loss:3.565272 +step:5301 train loss:3.634526 +step:5302 train loss:3.641553 +step:5303 train loss:3.501091 +step:5304 train loss:3.534578 +step:5305 train loss:3.512659 +step:5306 train loss:3.546723 +step:5307 train loss:3.552741 +step:5308 train loss:3.644877 +step:5309 train loss:3.592479 +step:5310 train loss:3.579358 +step:5311 train loss:3.650150 +step:5312 train loss:3.533934 +step:5313 train loss:3.621524 +step:5314 train loss:3.613472 +step:5315 train loss:3.574202 +step:5316 train loss:3.605820 +step:5317 train loss:3.624303 +step:5318 train loss:3.578211 +step:5319 train loss:3.606108 +step:5320 train loss:3.556064 +step:5321 train loss:3.678078 +step:5322 train loss:3.590149 +step:5323 train loss:3.591776 +step:5324 train loss:3.532258 +step:5325 train loss:3.614891 +step:5326 train loss:3.607261 +step:5327 train loss:3.495394 +step:5328 train loss:3.633963 +step:5329 train loss:3.596311 +step:5330 train loss:3.599459 +step:5331 train loss:3.644949 +step:5332 train loss:3.569822 +step:5333 train loss:3.633116 +step:5334 train loss:3.606238 +step:5335 train loss:3.666834 +step:5336 train loss:3.704771 +step:5337 train loss:3.538795 +step:5338 train loss:3.542641 +step:5339 train loss:3.569706 +step:5340 train loss:3.592442 +step:5341 train loss:3.605951 +step:5342 train loss:3.505780 +step:5343 train loss:3.664879 +step:5344 train loss:3.545598 +step:5345 train loss:3.549860 +step:5346 train loss:3.549772 +step:5347 train loss:3.573081 +step:5348 train loss:3.615514 +step:5349 train loss:3.555291 +step:5350 train loss:3.595226 +step:5351 train loss:3.668286 +step:5352 train loss:3.706826 +step:5353 train loss:3.618040 +step:5354 train loss:3.588202 +step:5355 train loss:3.554658 +step:5356 train loss:3.575516 +step:5357 train loss:3.558989 +step:5358 train loss:3.578305 +step:5359 train loss:3.588965 +step:5360 train loss:3.562587 +step:5361 train loss:3.567604 +step:5362 train loss:3.549486 +step:5363 train loss:3.545612 +step:5364 train loss:3.547132 +step:5365 train loss:3.580990 +step:5366 train loss:3.611972 +step:5367 train loss:3.538388 +step:5368 train loss:3.609863 +step:5369 train loss:3.623924 +step:5370 train loss:3.527961 +step:5371 train loss:3.576390 +step:5372 train loss:3.599229 +step:5373 train loss:3.637618 +step:5374 train loss:3.522134 +step:5375 train loss:3.567598 +step:5376 train loss:3.632155 +step:5377 train loss:3.570347 +step:5378 train loss:3.543070 +step:5379 train loss:3.548342 +step:5380 train loss:3.585742 +step:5381 train loss:3.622730 +step:5382 train loss:3.539293 +step:5383 train loss:3.591612 +step:5384 train loss:3.609405 +step:5385 train loss:3.612484 +step:5386 train loss:3.589503 +step:5387 train loss:3.598970 +step:5388 train loss:3.607707 +step:5389 train loss:3.541895 +step:5390 train loss:3.566203 +step:5391 train loss:3.507751 +step:5392 train loss:3.573225 +step:5393 train loss:3.558717 +step:5394 train loss:3.558872 +step:5395 train loss:3.634225 +step:5396 train loss:3.598325 +step:5397 train loss:3.614536 +step:5398 train loss:3.611756 +step:5399 train loss:3.644249 +step:5400 train loss:3.647830 +step:5401 train loss:3.608244 +step:5402 train loss:3.719003 +step:5403 train loss:3.619621 +step:5404 train loss:3.594740 +step:5405 train loss:3.666694 +step:5406 train loss:3.625831 +step:5407 train loss:3.552962 +step:5408 train loss:3.701429 +step:5409 train loss:3.537156 +step:5410 train loss:3.601321 +step:5411 train loss:3.586210 +step:5412 train loss:3.562242 +step:5413 train loss:3.611173 +step:5414 train loss:3.590258 +step:5415 train loss:3.568246 +step:5416 train loss:3.562203 +step:5417 train loss:3.630161 +step:5418 train loss:3.649276 +step:5419 train loss:3.550880 +step:5420 train loss:3.612066 +step:5421 train loss:3.579775 +step:5422 train loss:3.626346 +step:5423 train loss:3.599848 +step:5424 train loss:3.501233 +step:5425 train loss:3.571662 +step:5426 train loss:3.656251 +step:5427 train loss:3.551338 +step:5428 train loss:3.590392 +step:5429 train loss:3.521895 +step:5430 train loss:3.556314 +step:5431 train loss:3.618220 +step:5432 train loss:3.596612 +step:5433 train loss:3.601952 +step:5434 train loss:3.552938 +step:5435 train loss:3.551382 +step:5436 train loss:3.551509 +step:5437 train loss:3.591791 +step:5438 train loss:3.569890 +step:5439 train loss:3.574428 +step:5440 train loss:3.618095 +step:5441 train loss:3.640158 +step:5442 train loss:3.565150 +step:5443 train loss:3.559172 +step:5444 train loss:3.505794 +step:5445 train loss:3.595118 +step:5446 train loss:3.561116 +step:5447 train loss:3.598371 +step:5448 train loss:3.653073 +step:5449 train loss:3.541703 +step:5450 train loss:3.577785 +step:5451 train loss:3.569736 +step:5452 train loss:3.585599 +step:5453 train loss:3.640626 +step:5454 train loss:3.565428 +step:5455 train loss:3.560635 +step:5456 train loss:3.692084 +step:5457 train loss:3.577679 +step:5458 train loss:3.607283 +step:5459 train loss:3.552176 +step:5460 train loss:3.567616 +step:5461 train loss:3.571103 +step:5462 train loss:3.574731 +step:5463 train loss:3.579917 +step:5464 train loss:3.585209 +step:5465 train loss:3.531488 +step:5466 train loss:3.602354 +step:5467 train loss:3.583700 +step:5468 train loss:3.590410 +step:5469 train loss:3.686062 +step:5470 train loss:3.578181 +step:5471 train loss:3.653228 +step:5472 train loss:3.599145 +step:5473 train loss:3.504776 +step:5474 train loss:3.840385 +step:5475 train loss:3.512373 +step:5476 train loss:3.592131 +step:5477 train loss:3.593807 +step:5478 train loss:3.589663 +step:5479 train loss:3.733799 +step:5480 train loss:3.579100 +step:5481 train loss:3.642996 +step:5482 train loss:3.554752 +step:5483 train loss:3.588972 +step:5484 train loss:3.628731 +step:5485 train loss:3.543936 +step:5486 train loss:3.592974 +step:5487 train loss:3.593868 +step:5488 train loss:3.505959 +step:5489 train loss:3.608090 +step:5490 train loss:3.554234 +step:5491 train loss:3.659078 +step:5492 train loss:3.585574 +step:5493 train loss:3.515366 +step:5494 train loss:3.568790 +step:5495 train loss:3.547566 +step:5496 train loss:3.548164 +step:5497 train loss:3.665300 +step:5498 train loss:3.535559 +step:5499 train loss:3.668110 +step:5500 validation loss:3.509552 total_sharp:3.4424e-03 L1_sharp:4.4483e-03 L2_sharp:3.5965e-04 L3_sharp:7.2486e-04 L4_sharp:5.8182e-04 L5_sharp:7.4668e-04 L6_sharp:5.7844e-04 L7_sharp:9.2456e-04 L8_sharp:9.3597e-04 L9_sharp:7.9807e-04 L10_sharp:7.1429e-04 L11_sharp:7.4698e-04 L12_sharp:2.6369e-03 total_fnorm:2.2174e+00 total_l1_linf:1.9723e+04 total_spectral:2.2174e+00 L1_fnorm:4.7747e-01 L2_fnorm:4.9459e-01 L3_fnorm:5.0714e-01 L4_fnorm:5.2084e-01 L5_fnorm:5.1720e-01 L6_fnorm:5.2570e-01 L7_fnorm:5.2007e-01 L8_fnorm:5.2732e-01 L9_fnorm:5.2480e-01 L10_fnorm:5.2487e-01 L11_fnorm:5.2401e-01 L12_fnorm:5.1069e-01 L1_l1linf:5.8907e-01 L2_l1linf:5.8401e-01 L3_l1linf:6.0017e-01 L4_l1linf:5.8928e-01 L5_l1linf:6.8134e-01 L6_l1linf:5.4949e-01 L7_l1linf:5.3538e-01 L8_l1linf:5.6034e-01 L9_l1linf:5.3670e-01 L10_l1linf:5.5407e-01 L11_l1linf:6.0656e-01 L12_l1linf:6.1408e-01 L1_spectral:8.4956e-02 L2_spectral:7.8921e-02 L3_spectral:7.3923e-02 L4_spectral:6.8207e-02 L5_spectral:7.4156e-02 L6_spectral:6.0878e-02 L7_spectral:5.4263e-02 L8_spectral:4.7849e-02 L9_spectral:4.5069e-02 L10_spectral:5.9641e-02 L11_spectral:7.1420e-02 L12_spectral:8.8460e-02 ip_v_neg_g:1.1446e-02 cos_v_neg_g:4.0011e-03 v_norm:2.2174e+00 g_norm:1.2901e+00 hv_norm:3.4410e-01 cos_v_hv:2.2183e-02 hg_norm:1.4259e+01 cos_g_hg:5.6763e-01 v_par:3.3648e-04 v_perp:2.2174e+00 L1_cos_v_neg_g:7.8747e-03 L1_v_norm:4.7747e-01 L2_cos_v_neg_g:6.8379e-03 L2_v_norm:4.9459e-01 L3_cos_v_neg_g:5.8124e-03 L3_v_norm:5.0714e-01 L4_cos_v_neg_g:3.1720e-03 L4_v_norm:5.2084e-01 L5_cos_v_neg_g:4.4147e-03 L5_v_norm:5.1720e-01 L6_cos_v_neg_g:3.6008e-03 L6_v_norm:5.2570e-01 L7_cos_v_neg_g:5.5504e-03 L7_v_norm:5.2007e-01 L8_cos_v_neg_g:6.2497e-03 L8_v_norm:5.2732e-01 L9_cos_v_neg_g:5.5891e-03 L9_v_norm:5.2480e-01 L10_cos_v_neg_g:4.8698e-03 L10_v_norm:5.2487e-01 L11_cos_v_neg_g:5.1657e-03 L11_v_norm:5.2401e-01 L12_cos_v_neg_g:6.6937e-03 L12_v_norm:5.1069e-01 +step:5500 train loss:3.582799 +step:5501 train loss:3.659074 +step:5502 train loss:3.608428 +step:5503 train loss:3.572829 +step:5504 train loss:3.617986 +step:5505 train loss:3.581236 +step:5506 train loss:3.622633 +step:5507 train loss:3.609120 +step:5508 train loss:3.634709 +step:5509 train loss:3.642655 +step:5510 train loss:3.614429 +step:5511 train loss:3.609273 +step:5512 train loss:3.735316 +step:5513 train loss:3.532084 +step:5514 train loss:3.595789 +step:5515 train loss:3.621560 +step:5516 train loss:3.641588 +step:5517 train loss:3.601079 +step:5518 train loss:3.625519 +step:5519 train loss:3.662188 +step:5520 train loss:3.567845 +step:5521 train loss:3.580544 +step:5522 train loss:3.551681 +step:5523 train loss:3.594496 +step:5524 train loss:3.639575 +step:5525 train loss:3.549210 +step:5526 train loss:3.562084 +step:5527 train loss:3.584649 +step:5528 train loss:3.692281 +step:5529 train loss:3.655601 +step:5530 train loss:3.620221 +step:5531 train loss:3.556146 +step:5532 train loss:3.583648 +step:5533 train loss:3.617409 +step:5534 train loss:3.529587 +step:5535 train loss:3.580401 +step:5536 train loss:3.521193 +step:5537 train loss:3.567837 +step:5538 train loss:3.559283 +step:5539 train loss:3.502845 +step:5540 train loss:3.727532 +step:5541 train loss:3.542153 +step:5542 train loss:3.591191 +step:5543 train loss:3.580924 +step:5544 train loss:3.571645 +step:5545 train loss:3.562271 +step:5546 train loss:3.597606 +step:5547 train loss:3.528518 +step:5548 train loss:3.569926 +step:5549 train loss:3.574515 +step:5550 train loss:3.596403 +step:5551 train loss:3.600920 +step:5552 train loss:3.556267 +step:5553 train loss:3.587792 +step:5554 train loss:3.557924 +step:5555 train loss:3.563786 +step:5556 train loss:3.580712 +step:5557 train loss:3.646760 +step:5558 train loss:3.566843 +step:5559 train loss:3.573563 +step:5560 train loss:3.565884 +step:5561 train loss:3.601796 +step:5562 train loss:3.553816 +step:5563 train loss:3.536755 +step:5564 train loss:3.573935 +step:5565 train loss:3.637163 +step:5566 train loss:3.538282 +step:5567 train loss:3.658130 +step:5568 train loss:3.775234 +step:5569 train loss:3.567083 +step:5570 train loss:3.498678 +step:5571 train loss:3.586891 +step:5572 train loss:3.526119 +step:5573 train loss:3.518176 +step:5574 train loss:3.483387 +step:5575 train loss:3.582832 +step:5576 train loss:3.566735 +step:5577 train loss:3.570641 +step:5578 train loss:3.602036 +step:5579 train loss:3.555496 +step:5580 train loss:3.580991 +step:5581 train loss:3.602907 +step:5582 train loss:3.579579 +step:5583 train loss:3.591639 +step:5584 train loss:3.714338 +step:5585 train loss:3.622149 +step:5586 train loss:3.553682 +step:5587 train loss:3.586836 +step:5588 train loss:3.604053 +step:5589 train loss:3.600303 +step:5590 train loss:3.659736 +step:5591 train loss:3.527752 +step:5592 train loss:3.710654 +step:5593 train loss:3.580712 +step:5594 train loss:3.588145 +step:5595 train loss:3.579699 +step:5596 train loss:3.531738 +step:5597 train loss:3.547237 +step:5598 train loss:3.552464 +step:5599 train loss:3.561322 +step:5600 train loss:3.603272 +step:5601 train loss:3.626841 +step:5602 train loss:3.561537 +step:5603 train loss:3.600548 +step:5604 train loss:3.595485 +step:5605 train loss:3.569177 +step:5606 train loss:3.574348 +step:5607 train loss:3.601269 +step:5608 train loss:3.545822 +step:5609 train loss:3.597188 +step:5610 train loss:3.553975 +step:5611 train loss:3.591643 +step:5612 train loss:3.622954 +step:5613 train loss:3.582213 +step:5614 train loss:3.549255 +step:5615 train loss:3.648201 +step:5616 train loss:3.545224 +step:5617 train loss:3.633878 +step:5618 train loss:3.618455 +step:5619 train loss:3.574393 +step:5620 train loss:3.574563 +step:5621 train loss:3.646515 +step:5622 train loss:3.530933 +step:5623 train loss:3.567085 +step:5624 train loss:3.553852 +step:5625 train loss:3.593446 +step:5626 train loss:3.585745 +step:5627 train loss:3.557137 +step:5628 train loss:3.598830 +step:5629 train loss:3.578391 +step:5630 train loss:3.507817 +step:5631 train loss:3.552489 +step:5632 train loss:3.593613 +step:5633 train loss:3.586123 +step:5634 train loss:3.542354 +step:5635 train loss:3.579657 +step:5636 train loss:3.557842 +step:5637 train loss:3.696936 +step:5638 train loss:3.608288 +step:5639 train loss:3.585595 +step:5640 train loss:3.591001 +step:5641 train loss:3.626127 +step:5642 train loss:3.561450 +step:5643 train loss:3.579030 +step:5644 train loss:3.660621 +step:5645 train loss:3.615329 +step:5646 train loss:3.613422 +step:5647 train loss:3.605831 +step:5648 train loss:3.592222 +step:5649 train loss:3.506035 +step:5650 train loss:3.510832 +step:5651 train loss:3.585476 +step:5652 train loss:3.589074 +step:5653 train loss:3.553879 +step:5654 train loss:3.680824 +step:5655 train loss:3.546404 +step:5656 train loss:3.569260 +step:5657 train loss:3.635383 +step:5658 train loss:3.537498 +step:5659 train loss:3.576091 +step:5660 train loss:3.625633 +step:5661 train loss:3.564966 +step:5662 train loss:3.605208 +step:5663 train loss:3.493632 +step:5664 train loss:3.468190 +step:5665 train loss:3.588151 +step:5666 train loss:3.591909 +step:5667 train loss:3.628110 +step:5668 train loss:3.558703 +step:5669 train loss:3.573380 +step:5670 train loss:3.571052 +step:5671 train loss:3.556776 +step:5672 train loss:3.607412 +step:5673 train loss:3.574110 +step:5674 train loss:3.643243 +step:5675 train loss:3.561082 +step:5676 train loss:3.706943 +step:5677 train loss:3.605739 +step:5678 train loss:3.583955 +step:5679 train loss:3.577889 +step:5680 train loss:3.607199 +step:5681 train loss:3.576305 +step:5682 train loss:3.589386 +step:5683 train loss:3.546102 +step:5684 train loss:3.558876 +step:5685 train loss:3.598137 +step:5686 train loss:3.614037 +step:5687 train loss:3.558811 +step:5688 train loss:3.650974 +step:5689 train loss:3.555643 +step:5690 train loss:3.706016 +step:5691 train loss:3.536487 +step:5692 train loss:3.526462 +step:5693 train loss:3.533031 +step:5694 train loss:3.551899 +step:5695 train loss:3.567285 +step:5696 train loss:3.617610 +step:5697 train loss:3.542027 +step:5698 train loss:3.563602 +step:5699 train loss:3.576837 +step:5700 train loss:3.572510 +step:5701 train loss:3.570913 +step:5702 train loss:3.633152 +step:5703 train loss:3.534485 +step:5704 train loss:3.574001 +step:5705 train loss:3.585666 +step:5706 train loss:3.609445 +step:5707 train loss:3.527151 +step:5708 train loss:3.612284 +step:5709 train loss:3.615329 +step:5710 train loss:3.606746 +step:5711 train loss:3.628893 +step:5712 train loss:3.609942 +step:5713 train loss:3.534910 +step:5714 train loss:3.618439 +step:5715 train loss:3.577084 +step:5716 train loss:3.578444 +step:5717 train loss:3.605722 +step:5718 train loss:3.549407 +step:5719 train loss:3.621115 +step:5720 train loss:3.593841 +step:5721 train loss:3.524526 +step:5722 train loss:3.536596 +step:5723 train loss:3.615303 +step:5724 train loss:3.536673 +step:5725 train loss:3.605280 +step:5726 train loss:3.602177 +step:5727 train loss:3.558655 +step:5728 train loss:3.563616 +step:5729 train loss:3.563473 +step:5730 train loss:3.637922 +step:5731 train loss:3.503401 +step:5732 train loss:3.563929 +step:5733 train loss:3.557343 +step:5734 train loss:3.571124 +step:5735 train loss:3.563775 +step:5736 train loss:3.565157 +step:5737 train loss:3.586433 +step:5738 train loss:3.551723 +step:5739 train loss:3.563512 +step:5740 train loss:3.601986 +step:5741 train loss:3.577485 +step:5742 train loss:3.629287 +step:5743 train loss:3.594860 +step:5744 train loss:3.555752 +step:5745 train loss:3.560077 +step:5746 train loss:3.590395 +step:5747 train loss:3.572924 +step:5748 train loss:3.621416 +step:5749 train loss:3.576169 +step:5750 validation loss:3.499821 +step:5750 train loss:3.581563 +step:5751 train loss:3.594313 +step:5752 train loss:3.579801 +step:5753 train loss:3.553291 +step:5754 train loss:3.560213 +step:5755 train loss:3.577245 +step:5756 train loss:3.566385 +step:5757 train loss:3.628199 +step:5758 train loss:3.560424 +step:5759 train loss:3.525823 +step:5760 train loss:3.607191 +step:5761 train loss:3.601526 +step:5762 train loss:3.561028 +step:5763 train loss:3.586301 +step:5764 train loss:3.548273 +step:5765 train loss:3.667132 +step:5766 train loss:3.576500 +step:5767 train loss:3.612768 +step:5768 train loss:3.548787 +step:5769 train loss:3.669446 +step:5770 train loss:3.595006 +step:5771 train loss:3.620908 +step:5772 train loss:3.570189 +step:5773 train loss:3.551259 +step:5774 train loss:3.559977 +step:5775 train loss:3.627230 +step:5776 train loss:3.614508 +step:5777 train loss:3.533523 +step:5778 train loss:3.618664 +step:5779 train loss:3.579080 +step:5780 train loss:3.552967 +step:5781 train loss:3.612688 +step:5782 train loss:3.573537 +step:5783 train loss:3.534957 +step:5784 train loss:3.639376 +step:5785 train loss:3.626615 +step:5786 train loss:3.539398 +step:5787 train loss:3.587221 +step:5788 train loss:3.594593 +step:5789 train loss:3.538859 +step:5790 train loss:3.642519 +step:5791 train loss:3.568048 +step:5792 train loss:3.839699 +step:5793 train loss:3.609699 +step:5794 train loss:3.630043 +step:5795 train loss:3.621585 +step:5796 train loss:3.606289 +step:5797 train loss:3.586521 +step:5798 train loss:3.586571 +step:5799 train loss:3.553586 +step:5800 train loss:3.712067 +step:5801 train loss:3.585860 +step:5802 train loss:3.575416 +step:5803 train loss:3.582794 +step:5804 train loss:3.604131 +step:5805 train loss:3.568943 +step:5806 train loss:3.605072 +step:5807 train loss:3.528513 +step:5808 train loss:3.561563 +step:5809 train loss:3.572954 +step:5810 train loss:3.542851 +step:5811 train loss:3.560395 +step:5812 train loss:3.540369 +step:5813 train loss:3.550609 +step:5814 train loss:3.545841 +step:5815 train loss:3.548026 +step:5816 train loss:3.611348 +step:5817 train loss:3.622615 +step:5818 train loss:3.596371 +step:5819 train loss:3.648035 +step:5820 train loss:3.587406 +step:5821 train loss:3.582190 +step:5822 train loss:3.596791 +step:5823 train loss:3.600291 +step:5824 train loss:3.552683 +step:5825 train loss:3.645975 +step:5826 train loss:3.560292 +step:5827 train loss:3.522315 +step:5828 train loss:3.510384 +step:5829 train loss:3.572998 +step:5830 train loss:3.550507 +step:5831 train loss:3.517960 +step:5832 train loss:3.633329 +step:5833 train loss:3.611037 +step:5834 train loss:3.597469 +step:5835 train loss:3.545966 +step:5836 train loss:3.508815 +step:5837 train loss:3.633120 +step:5838 train loss:3.611835 +step:5839 train loss:3.587616 +step:5840 train loss:3.668704 +step:5841 train loss:3.591889 +step:5842 train loss:3.607652 +step:5843 train loss:3.549426 +step:5844 train loss:3.621317 +step:5845 train loss:3.528932 +step:5846 train loss:3.577623 +step:5847 train loss:3.602575 +step:5848 train loss:3.670335 +step:5849 train loss:3.566531 +step:5850 train loss:3.595161 +step:5851 train loss:3.562840 +step:5852 train loss:3.651544 +step:5853 train loss:3.740578 +step:5854 train loss:3.532482 +step:5855 train loss:3.593128 +step:5856 train loss:3.559628 +step:5857 train loss:3.575811 +step:5858 train loss:3.546515 +step:5859 train loss:3.555361 +step:5860 train loss:3.654453 +step:5861 train loss:3.537904 +step:5862 train loss:3.651104 +step:5863 train loss:3.589479 +step:5864 train loss:3.578760 +step:5865 train loss:3.582037 +step:5866 train loss:3.572584 +step:5867 train loss:3.654813 +step:5868 train loss:3.579676 +step:5869 train loss:3.603805 +step:5870 train loss:3.581115 +step:5871 train loss:3.562165 +step:5872 train loss:3.591377 +step:5873 train loss:3.566350 +step:5874 train loss:3.650539 +step:5875 train loss:3.576905 +step:5876 train loss:3.558802 +step:5877 train loss:3.565700 +step:5878 train loss:3.564452 +step:5879 train loss:3.536896 +step:5880 train loss:3.735590 +step:5881 train loss:3.575909 +step:5882 train loss:3.549349 +step:5883 train loss:3.552077 +step:5884 train loss:3.569077 +step:5885 train loss:3.563613 +step:5886 train loss:3.583745 +step:5887 train loss:3.582693 +step:5888 train loss:3.564599 +step:5889 train loss:3.542008 +step:5890 train loss:3.587571 +step:5891 train loss:3.532877 +step:5892 train loss:3.615775 +step:5893 train loss:3.538167 +step:5894 train loss:3.530703 +step:5895 train loss:3.534594 +step:5896 train loss:3.545177 +step:5897 train loss:3.612392 +step:5898 train loss:3.833783 +step:5899 train loss:3.563488 +step:5900 train loss:3.617119 +step:5901 train loss:3.567926 +step:5902 train loss:3.579945 +step:5903 train loss:3.568512 +step:5904 train loss:3.597842 +step:5905 train loss:3.702927 +step:5906 train loss:3.647909 +step:5907 train loss:3.588558 +step:5908 train loss:3.566323 +step:5909 train loss:3.558671 +step:5910 train loss:3.544134 +step:5911 train loss:3.560749 +step:5912 train loss:3.565606 +step:5913 train loss:3.592747 +step:5914 train loss:3.576371 +step:5915 train loss:3.734293 +step:5916 train loss:3.607251 +step:5917 train loss:3.571488 +step:5918 train loss:3.564931 +step:5919 train loss:3.599683 +step:5920 train loss:3.592614 +step:5921 train loss:3.561695 +step:5922 train loss:3.615313 +step:5923 train loss:3.608442 +step:5924 train loss:3.561128 +step:5925 train loss:3.688002 +step:5926 train loss:3.570004 +step:5927 train loss:3.545933 +step:5928 train loss:3.580151 +step:5929 train loss:3.601743 +step:5930 train loss:3.549680 +step:5931 train loss:3.533832 +step:5932 train loss:3.572757 +step:5933 train loss:3.629908 +step:5934 train loss:3.542305 +step:5935 train loss:3.566557 +step:5936 train loss:3.555245 +step:5937 train loss:3.533552 +step:5938 train loss:3.553390 +step:5939 train loss:3.529359 +step:5940 train loss:3.613853 +step:5941 train loss:3.545012 +step:5942 train loss:3.561608 +step:5943 train loss:3.565308 +step:5944 train loss:3.620883 +step:5945 train loss:3.553330 +step:5946 train loss:3.533293 +step:5947 train loss:3.542671 +step:5948 train loss:3.583554 +step:5949 train loss:3.630704 +step:5950 train loss:3.587743 +step:5951 train loss:3.590428 +step:5952 train loss:3.514507 +step:5953 train loss:3.556013 +step:5954 train loss:3.566917 +step:5955 train loss:3.569561 +step:5956 train loss:3.549649 +step:5957 train loss:3.515695 +step:5958 train loss:3.590790 +step:5959 train loss:3.546333 +step:5960 train loss:3.524818 +step:5961 train loss:3.549417 +step:5962 train loss:3.580064 +step:5963 train loss:3.613869 +step:5964 train loss:3.570487 +step:5965 train loss:3.587977 +step:5966 train loss:3.584262 +step:5967 train loss:3.549778 +step:5968 train loss:3.625674 +step:5969 train loss:3.564190 +step:5970 train loss:3.581375 +step:5971 train loss:3.531654 +step:5972 train loss:3.560317 +step:5973 train loss:3.549300 +step:5974 train loss:3.574231 +step:5975 train loss:3.541414 +step:5976 train loss:3.586161 +step:5977 train loss:3.539908 +step:5978 train loss:3.528246 +step:5979 train loss:3.563108 +step:5980 train loss:3.635248 +step:5981 train loss:3.525665 +step:5982 train loss:3.537598 +step:5983 train loss:3.604825 +step:5984 train loss:3.549522 +step:5985 train loss:3.593035 +step:5986 train loss:3.567322 +step:5987 train loss:3.553304 +step:5988 train loss:3.559984 +step:5989 train loss:3.579963 +step:5990 train loss:3.510755 +step:5991 train loss:3.574500 +step:5992 train loss:3.606777 +step:5993 train loss:3.558588 +step:5994 train loss:3.577737 +step:5995 train loss:3.467557 +step:5996 train loss:3.635645 +step:5997 train loss:3.614905 +step:5998 train loss:3.492997 +step:5999 train loss:3.522430 +step:6000 validation loss:3.486367 total_sharp:4.4078e-03 L1_sharp:4.4654e-03 L2_sharp:6.8078e-04 L3_sharp:9.6733e-04 L4_sharp:5.7223e-04 L5_sharp:8.2076e-04 L6_sharp:9.0734e-04 L7_sharp:1.2460e-03 L8_sharp:1.0471e-03 L9_sharp:1.2694e-03 L10_sharp:1.0163e-03 L11_sharp:1.1629e-03 L12_sharp:3.3492e-03 total_fnorm:2.2257e+00 total_l1_linf:1.9807e+04 total_spectral:2.2257e+00 L1_fnorm:4.8413e-01 L2_fnorm:5.0239e-01 L3_fnorm:5.1407e-01 L4_fnorm:5.2329e-01 L5_fnorm:5.2251e-01 L6_fnorm:5.2672e-01 L7_fnorm:5.2240e-01 L8_fnorm:5.2940e-01 L9_fnorm:5.2703e-01 L10_fnorm:5.2348e-01 L11_fnorm:5.1940e-01 L12_fnorm:5.1429e-01 L1_l1linf:5.8105e-01 L2_l1linf:5.6130e-01 L3_l1linf:5.7842e-01 L4_l1linf:5.7480e-01 L5_l1linf:5.6972e-01 L6_l1linf:6.0312e-01 L7_l1linf:5.5187e-01 L8_l1linf:5.4048e-01 L9_l1linf:5.3975e-01 L10_l1linf:5.5446e-01 L11_l1linf:5.9148e-01 L12_l1linf:6.0264e-01 L1_spectral:8.0094e-02 L2_spectral:7.9474e-02 L3_spectral:7.2447e-02 L4_spectral:6.0285e-02 L5_spectral:7.3692e-02 L6_spectral:6.4976e-02 L7_spectral:5.6446e-02 L8_spectral:4.8172e-02 L9_spectral:4.9291e-02 L10_spectral:6.1764e-02 L11_spectral:7.1418e-02 L12_spectral:8.0532e-02 ip_v_neg_g:8.7003e-03 cos_v_neg_g:4.8712e-03 v_norm:2.2257e+00 g_norm:8.0249e-01 hv_norm:3.2771e-01 cos_v_hv:2.9937e-02 hg_norm:4.4502e+00 cos_g_hg:5.0494e-01 v_par:4.1854e-04 v_perp:2.2257e+00 L1_cos_v_neg_g:1.1118e-02 L1_v_norm:4.8413e-01 L2_cos_v_neg_g:7.1147e-03 L2_v_norm:5.0239e-01 L3_cos_v_neg_g:4.5172e-03 L3_v_norm:5.1407e-01 L4_cos_v_neg_g:3.9268e-03 L4_v_norm:5.2329e-01 L5_cos_v_neg_g:4.7058e-03 L5_v_norm:5.2251e-01 L6_cos_v_neg_g:4.5908e-03 L6_v_norm:5.2672e-01 L7_cos_v_neg_g:5.0996e-03 L7_v_norm:5.2240e-01 L8_cos_v_neg_g:4.1445e-03 L8_v_norm:5.2940e-01 L9_cos_v_neg_g:4.7072e-03 L9_v_norm:5.2703e-01 L10_cos_v_neg_g:5.7956e-03 L10_v_norm:5.2348e-01 L11_cos_v_neg_g:7.6698e-03 L11_v_norm:5.1940e-01 L12_cos_v_neg_g:1.4349e-02 L12_v_norm:5.1429e-01 +step:6000 train loss:3.571886 +step:6001 train loss:3.537322 +step:6002 train loss:3.565734 +step:6003 train loss:3.584479 +step:6004 train loss:3.534295 +step:6005 train loss:3.605356 +step:6006 train loss:3.514072 +step:6007 train loss:3.534760 +step:6008 train loss:3.548797 +step:6009 train loss:3.588715 +step:6010 train loss:3.583303 +step:6011 train loss:3.572703 +step:6012 train loss:3.538431 +step:6013 train loss:3.594460 +step:6014 train loss:3.614356 +step:6015 train loss:3.611699 +step:6016 train loss:3.580122 +step:6017 train loss:3.589046 +step:6018 train loss:3.527440 +step:6019 train loss:3.563470 +step:6020 train loss:3.545646 +step:6021 train loss:3.475920 +step:6022 train loss:3.589273 +step:6023 train loss:3.524609 +step:6024 train loss:3.603150 +step:6025 train loss:3.566479 +step:6026 train loss:3.538870 +step:6027 train loss:3.580133 +step:6028 train loss:3.495547 +step:6029 train loss:3.608592 +step:6030 train loss:3.580885 +step:6031 train loss:3.551953 +step:6032 train loss:3.514810 +step:6033 train loss:3.569420 +step:6034 train loss:3.595077 +step:6035 train loss:3.512146 +step:6036 train loss:3.485840 +step:6037 train loss:3.601551 +step:6038 train loss:3.607573 +step:6039 train loss:3.588454 +step:6040 train loss:3.547498 +step:6041 train loss:3.527147 +step:6042 train loss:3.506010 +step:6043 train loss:3.565003 +step:6044 train loss:3.685190 +step:6045 train loss:3.528273 +step:6046 train loss:3.541738 +step:6047 train loss:3.575144 +step:6048 train loss:3.586699 +step:6049 train loss:3.562829 +step:6050 train loss:3.532823 +step:6051 train loss:3.580994 +step:6052 train loss:3.555259 +step:6053 train loss:3.675916 +step:6054 train loss:3.710811 +step:6055 train loss:3.527750 +step:6056 train loss:3.518662 +step:6057 train loss:3.552699 +step:6058 train loss:3.577345 +step:6059 train loss:3.581758 +step:6060 train loss:3.588470 +step:6061 train loss:3.601746 +step:6062 train loss:3.554740 +step:6063 train loss:3.573296 +step:6064 train loss:3.564994 +step:6065 train loss:3.567745 +step:6066 train loss:3.553813 +step:6067 train loss:3.594689 +step:6068 train loss:3.533606 +step:6069 train loss:3.489836 +step:6070 train loss:3.642300 +step:6071 train loss:3.586327 +step:6072 train loss:3.524614 +step:6073 train loss:3.565673 +step:6074 train loss:3.647071 +step:6075 train loss:3.570863 +step:6076 train loss:3.580246 +step:6077 train loss:3.581254 +step:6078 train loss:3.517082 +step:6079 train loss:3.547112 +step:6080 train loss:3.550285 +step:6081 train loss:3.588845 +step:6082 train loss:3.537322 +step:6083 train loss:3.551381 +step:6084 train loss:3.616046 +step:6085 train loss:3.610363 +step:6086 train loss:3.512171 +step:6087 train loss:3.556577 +step:6088 train loss:3.542462 +step:6089 train loss:3.600283 +step:6090 train loss:3.603040 +step:6091 train loss:3.549980 +step:6092 train loss:3.513479 +step:6093 train loss:3.574713 +step:6094 train loss:3.490247 +step:6095 train loss:3.654804 +step:6096 train loss:3.527035 +step:6097 train loss:3.603029 +step:6098 train loss:3.577022 +step:6099 train loss:3.634039 +step:6100 train loss:3.627424 +step:6101 train loss:3.561973 +step:6102 train loss:3.682284 +step:6103 train loss:3.561977 +step:6104 train loss:3.677664 +step:6105 train loss:3.612416 +step:6106 train loss:3.551610 +step:6107 train loss:3.612338 +step:6108 train loss:3.575962 +step:6109 train loss:3.647863 +step:6110 train loss:3.580190 +step:6111 train loss:3.611990 +step:6112 train loss:3.549922 +step:6113 train loss:3.579033 +step:6114 train loss:3.549029 +step:6115 train loss:3.610624 +step:6116 train loss:3.552985 +step:6117 train loss:3.606928 +step:6118 train loss:3.590140 +step:6119 train loss:3.597101 +step:6120 train loss:3.741115 +step:6121 train loss:3.577688 +step:6122 train loss:3.588080 +step:6123 train loss:3.563548 +step:6124 train loss:3.542502 +step:6125 train loss:3.533854 +step:6126 train loss:3.554076 +step:6127 train loss:3.540459 +step:6128 train loss:3.510973 +step:6129 train loss:3.742433 +step:6130 train loss:3.529716 +step:6131 train loss:3.507636 +step:6132 train loss:3.581754 +step:6133 train loss:3.546515 +step:6134 train loss:3.574659 +step:6135 train loss:3.656674 +step:6136 train loss:3.674648 +step:6137 train loss:3.539236 +step:6138 train loss:3.590898 +step:6139 train loss:3.572959 +step:6140 train loss:3.570707 +step:6141 train loss:3.528816 +step:6142 train loss:3.594496 +step:6143 train loss:3.559250 +step:6144 train loss:3.579298 +step:6145 train loss:3.831497 +step:6146 train loss:3.668499 +step:6147 train loss:3.753012 +step:6148 train loss:3.517359 +step:6149 train loss:3.642849 +step:6150 train loss:3.596549 +step:6151 train loss:3.548766 +step:6152 train loss:3.546476 +step:6153 train loss:3.611317 +step:6154 train loss:3.699645 +step:6155 train loss:3.563946 +step:6156 train loss:3.658984 +step:6157 train loss:3.589694 +step:6158 train loss:3.581384 +step:6159 train loss:3.544497 +step:6160 train loss:3.711023 +step:6161 train loss:3.567031 +step:6162 train loss:3.582207 +step:6163 train loss:3.610975 +step:6164 train loss:3.528053 +step:6165 train loss:3.593535 +step:6166 train loss:3.588205 +step:6167 train loss:3.608660 +step:6168 train loss:3.582102 +step:6169 train loss:3.573367 +step:6170 train loss:3.578453 +step:6171 train loss:3.547674 +step:6172 train loss:3.534515 +step:6173 train loss:3.585392 +step:6174 train loss:3.512138 +step:6175 train loss:3.524619 +step:6176 train loss:3.505933 +step:6177 train loss:3.603098 +step:6178 train loss:3.547852 +step:6179 train loss:3.555654 +step:6180 train loss:3.561016 +step:6181 train loss:3.595634 +step:6182 train loss:3.479312 +step:6183 train loss:3.489139 +step:6184 train loss:3.606893 +step:6185 train loss:3.561978 +step:6186 train loss:3.520866 +step:6187 train loss:3.563925 +step:6188 train loss:3.531904 +step:6189 train loss:3.570431 +step:6190 train loss:3.530804 +step:6191 train loss:3.561316 +step:6192 train loss:3.528970 +step:6193 train loss:3.594513 +step:6194 train loss:3.588342 +step:6195 train loss:3.568660 +step:6196 train loss:3.582659 +step:6197 train loss:3.607981 +step:6198 train loss:3.519032 +step:6199 train loss:3.544269 +step:6200 train loss:3.583560 +step:6201 train loss:3.628541 +step:6202 train loss:3.628810 +step:6203 train loss:3.628084 +step:6204 train loss:3.612976 +step:6205 train loss:3.553993 +step:6206 train loss:3.539476 +step:6207 train loss:3.599308 +step:6208 train loss:3.625029 +step:6209 train loss:3.592697 +step:6210 train loss:3.624268 +step:6211 train loss:3.539689 +step:6212 train loss:3.531218 +step:6213 train loss:3.547099 +step:6214 train loss:3.521821 +step:6215 train loss:3.698941 +step:6216 train loss:3.569545 +step:6217 train loss:3.630448 +step:6218 train loss:3.600298 +step:6219 train loss:3.617299 +step:6220 train loss:3.573102 +step:6221 train loss:3.535954 +step:6222 train loss:3.774348 +step:6223 train loss:3.542514 +step:6224 train loss:3.574270 +step:6225 train loss:3.555501 +step:6226 train loss:3.560798 +step:6227 train loss:3.564149 +step:6228 train loss:3.558640 +step:6229 train loss:3.598213 +step:6230 train loss:3.555629 +step:6231 train loss:3.667195 +step:6232 train loss:3.506260 +step:6233 train loss:3.547783 +step:6234 train loss:3.555843 +step:6235 train loss:3.582101 +step:6236 train loss:3.522495 +step:6237 train loss:3.542542 +step:6238 train loss:3.564607 +step:6239 train loss:3.553787 +step:6240 train loss:3.574728 +step:6241 train loss:3.560645 +step:6242 train loss:3.558460 +step:6243 train loss:3.591460 +step:6244 train loss:3.750046 +step:6245 train loss:3.547043 +step:6246 train loss:3.533102 +step:6247 train loss:3.527506 +step:6248 train loss:3.531151 +step:6249 train loss:3.471271 +step:6250 validation loss:3.479312 +step:6250 train loss:3.505762 +step:6251 train loss:3.527029 +step:6252 train loss:3.569239 +step:6253 train loss:3.580607 +step:6254 train loss:3.568174 +step:6255 train loss:3.535572 +step:6256 train loss:3.586523 +step:6257 train loss:3.585380 +step:6258 train loss:3.564263 +step:6259 train loss:3.573018 +step:6260 train loss:3.599065 +step:6261 train loss:3.619791 +step:6262 train loss:3.515634 +step:6263 train loss:3.548758 +step:6264 train loss:3.556334 +step:6265 train loss:3.544511 +step:6266 train loss:3.750704 +step:6267 train loss:3.552148 +step:6268 train loss:3.639069 +step:6269 train loss:3.512929 +step:6270 train loss:3.524946 +step:6271 train loss:3.575064 +step:6272 train loss:3.566059 +step:6273 train loss:3.767898 +step:6274 train loss:3.543476 +step:6275 train loss:3.580405 +step:6276 train loss:3.551745 +step:6277 train loss:3.535958 +step:6278 train loss:3.519413 +step:6279 train loss:3.573607 +step:6280 train loss:3.580414 +step:6281 train loss:3.511257 +step:6282 train loss:3.526243 +step:6283 train loss:3.613654 +step:6284 train loss:3.580238 +step:6285 train loss:3.581888 +step:6286 train loss:3.525909 +step:6287 train loss:3.555556 +step:6288 train loss:3.653884 +step:6289 train loss:3.514562 +step:6290 train loss:3.517941 +step:6291 train loss:3.551724 +step:6292 train loss:3.567564 +step:6293 train loss:3.554262 +step:6294 train loss:3.545162 +step:6295 train loss:3.564446 +step:6296 train loss:3.528863 +step:6297 train loss:3.655309 +step:6298 train loss:3.600036 +step:6299 train loss:3.492271 +step:6300 train loss:3.578715 +step:6301 train loss:3.602096 +step:6302 train loss:3.584273 +step:6303 train loss:3.552392 +step:6304 train loss:3.569451 +step:6305 train loss:3.541334 +step:6306 train loss:3.550838 +step:6307 train loss:3.560554 +step:6308 train loss:3.535141 +step:6309 train loss:3.533118 +step:6310 train loss:3.582557 +step:6311 train loss:3.539963 +step:6312 train loss:3.578973 +step:6313 train loss:3.510767 +step:6314 train loss:3.537223 +step:6315 train loss:3.590293 +step:6316 train loss:3.511761 +step:6317 train loss:3.504293 +step:6318 train loss:3.618336 +step:6319 train loss:3.549436 +step:6320 train loss:3.563452 +step:6321 train loss:3.549797 +step:6322 train loss:3.549920 +step:6323 train loss:3.481412 +step:6324 train loss:3.489811 +step:6325 train loss:3.588344 +step:6326 train loss:3.507354 +step:6327 train loss:3.582591 +step:6328 train loss:3.559169 +step:6329 train loss:3.482012 +step:6330 train loss:3.508314 +step:6331 train loss:3.524967 +step:6332 train loss:3.664178 +step:6333 train loss:3.538446 +step:6334 train loss:3.516428 +step:6335 train loss:3.485240 +step:6336 train loss:3.519324 +step:6337 train loss:3.542696 +step:6338 train loss:3.497320 +step:6339 train loss:3.542557 +step:6340 train loss:3.522712 +step:6341 train loss:3.536419 +step:6342 train loss:3.534377 +step:6343 train loss:3.636497 +step:6344 train loss:3.484497 +step:6345 train loss:3.499990 +step:6346 train loss:3.576485 +step:6347 train loss:3.452683 +step:6348 train loss:3.549220 +step:6349 train loss:3.523304 +step:6350 train loss:3.500526 +step:6351 train loss:3.498384 +step:6352 train loss:3.515335 +step:6353 train loss:3.534792 +step:6354 train loss:3.547858 +step:6355 train loss:3.557400 +step:6356 train loss:3.567215 +step:6357 train loss:3.424448 +step:6358 train loss:3.512403 +step:6359 train loss:3.570014 +step:6360 train loss:3.481142 +step:6361 train loss:3.481801 +step:6362 train loss:3.524675 +step:6363 train loss:3.504086 +step:6364 train loss:3.489177 +step:6365 train loss:3.562110 +step:6366 train loss:3.575265 +step:6367 train loss:3.500469 +step:6368 train loss:3.546182 +step:6369 train loss:3.510591 +step:6370 train loss:3.559927 +step:6371 train loss:3.477920 +step:6372 train loss:3.509392 +step:6373 train loss:3.533351 +step:6374 train loss:3.565447 +step:6375 train loss:3.520867 +step:6376 train loss:3.548201 +step:6377 train loss:3.545598 +step:6378 train loss:3.497391 +step:6379 train loss:3.536022 +step:6380 train loss:3.579516 +step:6381 train loss:3.543938 +step:6382 train loss:3.498323 +step:6383 train loss:3.564982 +step:6384 train loss:3.535480 +step:6385 train loss:3.514747 +step:6386 train loss:3.548464 +step:6387 train loss:3.527918 +step:6388 train loss:3.568314 +step:6389 train loss:3.579224 +step:6390 train loss:3.528011 +step:6391 train loss:3.512399 +step:6392 train loss:3.500315 +step:6393 train loss:3.554283 +step:6394 train loss:3.543965 +step:6395 train loss:3.725946 +step:6396 train loss:3.548251 +step:6397 train loss:3.489162 +step:6398 train loss:3.558900 +step:6399 train loss:3.501173 +step:6400 train loss:3.575076 +step:6401 train loss:3.611961 +step:6402 train loss:3.543767 +step:6403 train loss:3.534762 +step:6404 train loss:3.513226 +step:6405 train loss:3.540171 +step:6406 train loss:3.545122 +step:6407 train loss:3.604568 +step:6408 train loss:3.495332 +step:6409 train loss:3.477984 +step:6410 train loss:3.611290 +step:6411 train loss:3.543534 +step:6412 train loss:3.545043 +step:6413 train loss:3.549374 +step:6414 train loss:3.496497 +step:6415 train loss:3.555898 +step:6416 train loss:3.527584 +step:6417 train loss:3.501662 +step:6418 train loss:3.489984 +step:6419 train loss:3.573291 +step:6420 train loss:3.500457 +step:6421 train loss:3.528058 +step:6422 train loss:3.515116 +step:6423 train loss:3.526749 +step:6424 train loss:3.548562 +step:6425 train loss:3.542098 +step:6426 train loss:3.584486 +step:6427 train loss:3.548301 +step:6428 train loss:3.584178 +step:6429 train loss:3.550401 +step:6430 train loss:3.523000 +step:6431 train loss:3.497989 +step:6432 train loss:3.533837 +step:6433 train loss:3.547875 +step:6434 train loss:3.431396 +step:6435 train loss:3.618455 +step:6436 train loss:3.557090 +step:6437 train loss:3.518216 +step:6438 train loss:3.547627 +step:6439 train loss:3.525286 +step:6440 train loss:3.537363 +step:6441 train loss:3.530355 +step:6442 train loss:3.471040 +step:6443 train loss:3.523474 +step:6444 train loss:3.664448 +step:6445 train loss:3.567058 +step:6446 train loss:3.570281 +step:6447 train loss:3.550333 +step:6448 train loss:3.497328 +step:6449 train loss:3.520189 +step:6450 train loss:3.501628 +step:6451 train loss:3.490495 +step:6452 train loss:3.493893 +step:6453 train loss:3.536261 +step:6454 train loss:3.558096 +step:6455 train loss:3.548233 +step:6456 train loss:3.566823 +step:6457 train loss:3.543486 +step:6458 train loss:3.515218 +step:6459 train loss:3.498264 +step:6460 train loss:3.505473 +step:6461 train loss:3.502419 +step:6462 train loss:3.499881 +step:6463 train loss:3.596532 +step:6464 train loss:3.501708 +step:6465 train loss:3.545956 +step:6466 train loss:3.560085 +step:6467 train loss:3.484054 +step:6468 train loss:3.562224 +step:6469 train loss:3.468952 +step:6470 train loss:3.594678 +step:6471 train loss:3.500460 +step:6472 train loss:3.659656 +step:6473 train loss:3.542577 +step:6474 train loss:3.575339 +step:6475 train loss:3.517544 +step:6476 train loss:3.588431 +step:6477 train loss:3.522583 +step:6478 train loss:3.649733 +step:6479 train loss:3.565152 +step:6480 train loss:3.501243 +step:6481 train loss:3.557680 +step:6482 train loss:3.499427 +step:6483 train loss:3.558083 +step:6484 train loss:3.515350 +step:6485 train loss:3.577250 +step:6486 train loss:3.508264 +step:6487 train loss:3.507869 +step:6488 train loss:3.504350 +step:6489 train loss:3.506063 +step:6490 train loss:3.530064 +step:6491 train loss:3.501311 +step:6492 train loss:3.604535 +step:6493 train loss:3.508033 +step:6494 train loss:3.512938 +step:6495 train loss:3.512680 +step:6496 train loss:3.543394 +step:6497 train loss:3.558936 +step:6498 train loss:3.671990 +step:6499 train loss:3.643478 +step:6500 validation loss:3.469603 total_sharp:4.6466e-03 L1_sharp:5.3473e-03 L2_sharp:1.0066e-03 L3_sharp:9.6565e-04 L4_sharp:7.7768e-04 L5_sharp:6.8947e-04 L6_sharp:1.0395e-03 L7_sharp:1.2986e-03 L8_sharp:1.1581e-03 L9_sharp:1.0533e-03 L10_sharp:7.9902e-04 L11_sharp:8.2047e-04 L12_sharp:2.1419e-03 total_fnorm:2.2510e+00 total_l1_linf:2.0071e+04 total_spectral:2.2510e+00 L1_fnorm:5.0280e-01 L2_fnorm:5.1539e-01 L3_fnorm:5.1971e-01 L4_fnorm:5.2798e-01 L5_fnorm:5.2352e-01 L6_fnorm:5.3106e-01 L7_fnorm:5.2386e-01 L8_fnorm:5.3191e-01 L9_fnorm:5.2995e-01 L10_fnorm:5.3084e-01 L11_fnorm:5.3201e-01 L12_fnorm:5.2262e-01 L1_l1linf:6.0208e-01 L2_l1linf:6.0771e-01 L3_l1linf:6.4442e-01 L4_l1linf:6.3121e-01 L5_l1linf:6.1432e-01 L6_l1linf:5.8078e-01 L7_l1linf:6.1932e-01 L8_l1linf:5.5542e-01 L9_l1linf:5.3483e-01 L10_l1linf:5.4956e-01 L11_l1linf:6.0269e-01 L12_l1linf:5.4967e-01 L1_spectral:8.6626e-02 L2_spectral:8.1957e-02 L3_spectral:7.4418e-02 L4_spectral:7.4796e-02 L5_spectral:7.9741e-02 L6_spectral:8.0813e-02 L7_spectral:7.0043e-02 L8_spectral:5.5043e-02 L9_spectral:4.3941e-02 L10_spectral:5.5663e-02 L11_spectral:6.7987e-02 L12_spectral:8.0827e-02 ip_v_neg_g:1.0552e-02 cos_v_neg_g:5.7183e-03 v_norm:2.2510e+00 g_norm:8.1979e-01 hv_norm:4.0590e-01 cos_v_hv:2.5769e-02 hg_norm:4.0548e+00 cos_g_hg:5.5118e-01 v_par:4.6897e-04 v_perp:2.2510e+00 L1_cos_v_neg_g:1.3842e-02 L1_v_norm:5.0280e-01 L2_cos_v_neg_g:1.1555e-02 L2_v_norm:5.1539e-01 L3_cos_v_neg_g:7.8438e-03 L3_v_norm:5.1971e-01 L4_cos_v_neg_g:5.8678e-03 L4_v_norm:5.2798e-01 L5_cos_v_neg_g:4.6869e-03 L5_v_norm:5.2352e-01 L6_cos_v_neg_g:5.1128e-03 L6_v_norm:5.3106e-01 L7_cos_v_neg_g:5.6943e-03 L7_v_norm:5.2386e-01 L8_cos_v_neg_g:6.1366e-03 L8_v_norm:5.3191e-01 L9_cos_v_neg_g:5.8413e-03 L9_v_norm:5.2995e-01 L10_cos_v_neg_g:5.5818e-03 L10_v_norm:5.3084e-01 L11_cos_v_neg_g:7.2114e-03 L11_v_norm:5.3201e-01 L12_cos_v_neg_g:1.1672e-02 L12_v_norm:5.2262e-01 +step:6500 train loss:3.487374 +step:6501 train loss:3.508221 +step:6502 train loss:3.523289 +step:6503 train loss:3.583434 +step:6504 train loss:3.530830 +step:6505 train loss:3.538120 +step:6506 train loss:3.497844 +step:6507 train loss:3.566991 +step:6508 train loss:3.534446 +step:6509 train loss:3.515684 +step:6510 train loss:3.525628 +step:6511 train loss:3.540698 +step:6512 train loss:3.479773 +step:6513 train loss:3.551670 +step:6514 train loss:3.425189 +step:6515 train loss:3.514387 +step:6516 train loss:3.566687 +step:6517 train loss:3.477074 +step:6518 train loss:3.519668 +step:6519 train loss:3.506415 +step:6520 train loss:3.598629 +step:6521 train loss:3.576291 +step:6522 train loss:3.586314 +step:6523 train loss:3.480721 +step:6524 train loss:3.564900 +step:6525 train loss:3.552852 +step:6526 train loss:3.488809 +step:6527 train loss:3.540047 +step:6528 train loss:3.561608 +step:6529 train loss:3.588223 +step:6530 train loss:3.496858 +step:6531 train loss:3.573269 +step:6532 train loss:3.499697 +step:6533 train loss:3.537675 +step:6534 train loss:3.545958 +step:6535 train loss:3.520800 +step:6536 train loss:3.653211 +step:6537 train loss:3.488802 +step:6538 train loss:3.571571 +step:6539 train loss:3.499578 +step:6540 train loss:3.610624 +step:6541 train loss:3.592093 +step:6542 train loss:3.548252 +step:6543 train loss:3.502673 +step:6544 train loss:3.484781 +step:6545 train loss:3.473878 +step:6546 train loss:3.533592 +step:6547 train loss:3.589336 +step:6548 train loss:3.529918 +step:6549 train loss:3.547518 +step:6550 train loss:3.657378 +step:6551 train loss:3.538389 +step:6552 train loss:3.529118 +step:6553 train loss:3.569377 +step:6554 train loss:3.459707 +step:6555 train loss:3.545052 +step:6556 train loss:3.417860 +step:6557 train loss:3.765150 +step:6558 train loss:3.598824 +step:6559 train loss:3.514899 +step:6560 train loss:3.552212 +step:6561 train loss:3.523631 +step:6562 train loss:3.543187 +step:6563 train loss:3.434575 +step:6564 train loss:3.537986 +step:6565 train loss:3.444507 +step:6566 train loss:3.558372 +step:6567 train loss:3.523745 +step:6568 train loss:3.571687 +step:6569 train loss:3.518447 +step:6570 train loss:3.555187 +step:6571 train loss:3.484276 +step:6572 train loss:3.561428 +step:6573 train loss:3.576503 +step:6574 train loss:3.558267 +step:6575 train loss:3.502813 +step:6576 train loss:3.494225 +step:6577 train loss:3.566157 +step:6578 train loss:3.433252 +step:6579 train loss:3.535310 +step:6580 train loss:3.491818 +step:6581 train loss:3.499960 +step:6582 train loss:3.481488 +step:6583 train loss:3.579710 +step:6584 train loss:3.510340 +step:6585 train loss:3.548040 +step:6586 train loss:3.553567 +step:6587 train loss:3.560267 +step:6588 train loss:3.529159 +step:6589 train loss:3.556604 +step:6590 train loss:3.498056 +step:6591 train loss:3.549354 +step:6592 train loss:3.494785 +step:6593 train loss:3.499857 +step:6594 train loss:3.528406 +step:6595 train loss:3.506680 +step:6596 train loss:3.508932 +step:6597 train loss:3.529030 +step:6598 train loss:3.574218 +step:6599 train loss:3.475117 +step:6600 train loss:3.523727 +step:6601 train loss:3.580653 +step:6602 train loss:3.506552 +step:6603 train loss:3.531723 +step:6604 train loss:3.544012 +step:6605 train loss:3.522381 +step:6606 train loss:3.586828 +step:6607 train loss:3.502514 +step:6608 train loss:3.516664 +step:6609 train loss:3.487245 +step:6610 train loss:3.598368 +step:6611 train loss:3.519456 +step:6612 train loss:3.564233 +step:6613 train loss:3.481705 +step:6614 train loss:3.509633 +step:6615 train loss:3.510415 +step:6616 train loss:3.488283 +step:6617 train loss:3.528567 +step:6618 train loss:3.515877 +step:6619 train loss:3.489865 +step:6620 train loss:3.590207 +step:6621 train loss:3.473798 +step:6622 train loss:3.543583 +step:6623 train loss:3.475362 +step:6624 train loss:3.546172 +step:6625 train loss:3.590621 +step:6626 train loss:3.557714 +step:6627 train loss:3.506381 +step:6628 train loss:3.562716 +step:6629 train loss:3.465916 +step:6630 train loss:3.501211 +step:6631 train loss:3.537443 +step:6632 train loss:3.574952 +step:6633 train loss:3.528953 +step:6634 train loss:3.589056 +step:6635 train loss:3.489177 +step:6636 train loss:3.530063 +step:6637 train loss:3.496069 +step:6638 train loss:3.498460 +step:6639 train loss:3.510617 +step:6640 train loss:3.496044 +step:6641 train loss:3.519635 +step:6642 train loss:3.511267 +step:6643 train loss:3.592692 +step:6644 train loss:3.594654 +step:6645 train loss:3.471842 +step:6646 train loss:3.561600 +step:6647 train loss:3.516729 +step:6648 train loss:3.620440 +step:6649 train loss:3.547880 +step:6650 train loss:3.500032 +step:6651 train loss:3.545197 +step:6652 train loss:3.557884 +step:6653 train loss:3.503726 +step:6654 train loss:3.499474 +step:6655 train loss:3.541250 +step:6656 train loss:3.510738 +step:6657 train loss:3.532612 +step:6658 train loss:3.518517 +step:6659 train loss:3.669312 +step:6660 train loss:3.571380 +step:6661 train loss:3.492551 +step:6662 train loss:3.524794 +step:6663 train loss:3.458359 +step:6664 train loss:3.537124 +step:6665 train loss:3.548621 +step:6666 train loss:3.587942 +step:6667 train loss:3.478765 +step:6668 train loss:3.607134 +step:6669 train loss:3.492342 +step:6670 train loss:3.500573 +step:6671 train loss:3.585795 +step:6672 train loss:3.536047 +step:6673 train loss:3.546528 +step:6674 train loss:3.517611 +step:6675 train loss:3.536772 +step:6676 train loss:3.544965 +step:6677 train loss:3.500157 +step:6678 train loss:3.572031 +step:6679 train loss:3.609448 +step:6680 train loss:3.609110 +step:6681 train loss:3.560618 +step:6682 train loss:3.502344 +step:6683 train loss:3.526322 +step:6684 train loss:3.537317 +step:6685 train loss:3.548383 +step:6686 train loss:3.483973 +step:6687 train loss:3.501182 +step:6688 train loss:3.546462 +step:6689 train loss:3.556414 +step:6690 train loss:3.530698 +step:6691 train loss:3.562706 +step:6692 train loss:3.570615 +step:6693 train loss:3.602567 +step:6694 train loss:3.555037 +step:6695 train loss:3.527683 +step:6696 train loss:3.467245 +step:6697 train loss:3.679344 +step:6698 train loss:3.524827 +step:6699 train loss:3.523095 +step:6700 train loss:3.535170 +step:6701 train loss:3.590865 +step:6702 train loss:3.484859 +step:6703 train loss:3.530109 +step:6704 train loss:3.516019 +step:6705 train loss:3.525786 +step:6706 train loss:3.504233 +step:6707 train loss:3.576859 +step:6708 train loss:3.529568 +step:6709 train loss:3.558232 +step:6710 train loss:3.550345 +step:6711 train loss:3.501606 +step:6712 train loss:3.489973 +step:6713 train loss:3.514450 +step:6714 train loss:3.558584 +step:6715 train loss:3.500868 +step:6716 train loss:3.579706 +step:6717 train loss:3.523160 +step:6718 train loss:3.543937 +step:6719 train loss:3.579203 +step:6720 train loss:3.508178 +step:6721 train loss:3.526107 +step:6722 train loss:3.502063 +step:6723 train loss:3.627883 +step:6724 train loss:3.487202 +step:6725 train loss:3.545394 +step:6726 train loss:3.501612 +step:6727 train loss:3.566660 +step:6728 train loss:3.664770 +step:6729 train loss:3.525693 +step:6730 train loss:3.520638 +step:6731 train loss:3.562562 +step:6732 train loss:3.437067 +step:6733 train loss:3.573313 +step:6734 train loss:3.503056 +step:6735 train loss:3.527120 +step:6736 train loss:3.529154 +step:6737 train loss:3.526399 +step:6738 train loss:3.559329 +step:6739 train loss:3.512718 +step:6740 train loss:3.464336 +step:6741 train loss:3.575336 +step:6742 train loss:3.533334 +step:6743 train loss:3.536185 +step:6744 train loss:3.428791 +step:6745 train loss:3.587246 +step:6746 train loss:3.514278 +step:6747 train loss:3.509860 +step:6748 train loss:3.584523 +step:6749 train loss:3.565361 +step:6750 validation loss:3.461174 +step:6750 train loss:3.482247 +step:6751 train loss:3.520264 +step:6752 train loss:3.520224 +step:6753 train loss:3.557273 +step:6754 train loss:3.536033 +step:6755 train loss:3.548650 +step:6756 train loss:3.491548 +step:6757 train loss:3.460063 +step:6758 train loss:3.634338 +step:6759 train loss:3.526386 +step:6760 train loss:3.582822 +step:6761 train loss:3.515037 +step:6762 train loss:3.534977 +step:6763 train loss:3.435861 +step:6764 train loss:3.514297 +step:6765 train loss:3.517957 +step:6766 train loss:3.513973 +step:6767 train loss:3.468323 +step:6768 train loss:3.474080 +step:6769 train loss:3.437292 +step:6770 train loss:3.521810 +step:6771 train loss:3.524110 +step:6772 train loss:3.533190 +step:6773 train loss:3.512460 +step:6774 train loss:3.525045 +step:6775 train loss:3.567142 +step:6776 train loss:3.524515 +step:6777 train loss:3.603200 +step:6778 train loss:3.488501 +step:6779 train loss:3.540791 +step:6780 train loss:3.471795 +step:6781 train loss:3.536079 +step:6782 train loss:3.447821 +step:6783 train loss:3.482518 +step:6784 train loss:3.509459 +step:6785 train loss:3.493968 +step:6786 train loss:3.513505 +step:6787 train loss:3.586890 +step:6788 train loss:3.525743 +step:6789 train loss:3.533855 +step:6790 train loss:3.530787 +step:6791 train loss:3.544991 +step:6792 train loss:3.544377 +step:6793 train loss:3.542044 +step:6794 train loss:3.511641 +step:6795 train loss:3.512412 +step:6796 train loss:3.516366 +step:6797 train loss:3.612402 +step:6798 train loss:3.516749 +step:6799 train loss:3.506943 +step:6800 train loss:3.473598 +step:6801 train loss:3.608198 +step:6802 train loss:3.557899 +step:6803 train loss:3.547213 +step:6804 train loss:3.572961 +step:6805 train loss:3.535838 +step:6806 train loss:3.472761 +step:6807 train loss:3.528213 +step:6808 train loss:3.511128 +step:6809 train loss:3.539516 +step:6810 train loss:3.662603 +step:6811 train loss:3.565713 +step:6812 train loss:3.534339 +step:6813 train loss:3.549862 +step:6814 train loss:3.557956 +step:6815 train loss:3.601823 +step:6816 train loss:3.517961 +step:6817 train loss:3.545962 +step:6818 train loss:3.523047 +step:6819 train loss:3.507065 +step:6820 train loss:3.533554 +step:6821 train loss:3.498723 +step:6822 train loss:3.605221 +step:6823 train loss:3.584754 +step:6824 train loss:3.560500 +step:6825 train loss:3.505826 +step:6826 train loss:3.550453 +step:6827 train loss:3.535441 +step:6828 train loss:3.551921 +step:6829 train loss:3.538881 +step:6830 train loss:3.507720 +step:6831 train loss:3.467916 +step:6832 train loss:3.454197 +step:6833 train loss:3.471736 +step:6834 train loss:3.556105 +step:6835 train loss:3.530998 +step:6836 train loss:3.449033 +step:6837 train loss:3.512323 +step:6838 train loss:3.572758 +step:6839 train loss:3.655247 +step:6840 train loss:3.528251 +step:6841 train loss:3.483559 +step:6842 train loss:3.537113 +step:6843 train loss:3.640566 +step:6844 train loss:3.521348 +step:6845 train loss:3.573261 +step:6846 train loss:3.637446 +step:6847 train loss:3.567713 +step:6848 train loss:3.555059 +step:6849 train loss:3.581235 +step:6850 train loss:3.558677 +step:6851 train loss:3.484803 +step:6852 train loss:3.477874 +step:6853 train loss:3.463674 +step:6854 train loss:3.544560 +step:6855 train loss:3.513370 +step:6856 train loss:3.496456 +step:6857 train loss:3.552574 +step:6858 train loss:3.579395 +step:6859 train loss:3.489285 +step:6860 train loss:3.599382 +step:6861 train loss:3.622719 +step:6862 train loss:3.533536 +step:6863 train loss:3.528885 +step:6864 train loss:3.477808 +step:6865 train loss:3.544849 +step:6866 train loss:3.474373 +step:6867 train loss:3.652383 +step:6868 train loss:3.529049 +step:6869 train loss:3.562925 +step:6870 train loss:3.597308 +step:6871 train loss:3.513509 +step:6872 train loss:3.511755 +step:6873 train loss:3.528360 +step:6874 train loss:3.487473 +step:6875 train loss:3.492612 +step:6876 train loss:3.521094 +step:6877 train loss:3.561529 +step:6878 train loss:3.476492 +step:6879 train loss:3.521398 +step:6880 train loss:3.531958 +step:6881 train loss:3.494329 +step:6882 train loss:3.559938 +step:6883 train loss:3.542772 +step:6884 train loss:3.774296 +step:6885 train loss:3.547650 +step:6886 train loss:3.525883 +step:6887 train loss:3.464608 +step:6888 train loss:3.569972 +step:6889 train loss:3.452154 +step:6890 train loss:3.562283 +step:6891 train loss:3.568791 +step:6892 train loss:3.668236 +step:6893 train loss:3.497795 +step:6894 train loss:3.560825 +step:6895 train loss:3.558430 +step:6896 train loss:3.535653 +step:6897 train loss:3.488126 +step:6898 train loss:3.490273 +step:6899 train loss:3.576199 +step:6900 train loss:3.549723 +step:6901 train loss:3.498403 +step:6902 train loss:3.433343 +step:6903 train loss:3.476180 +step:6904 train loss:3.591013 +step:6905 train loss:3.625226 +step:6906 train loss:3.543839 +step:6907 train loss:3.559186 +step:6908 train loss:3.595649 +step:6909 train loss:3.587147 +step:6910 train loss:3.464989 +step:6911 train loss:3.594119 +step:6912 train loss:3.485722 +step:6913 train loss:3.522481 +step:6914 train loss:3.483893 +step:6915 train loss:3.510081 +step:6916 train loss:3.486041 +step:6917 train loss:3.609719 +step:6918 train loss:3.555135 +step:6919 train loss:3.548347 +step:6920 train loss:3.533324 +step:6921 train loss:3.600751 +step:6922 train loss:3.590914 +step:6923 train loss:3.453634 +step:6924 train loss:3.535516 +step:6925 train loss:3.512226 +step:6926 train loss:3.548710 +step:6927 train loss:3.601434 +step:6928 train loss:3.486168 +step:6929 train loss:3.498563 +step:6930 train loss:3.532109 +step:6931 train loss:3.530386 +step:6932 train loss:3.761536 +step:6933 train loss:3.595567 +step:6934 train loss:3.535296 +step:6935 train loss:3.518140 +step:6936 train loss:3.558442 +step:6937 train loss:3.503222 +step:6938 train loss:3.566405 +step:6939 train loss:3.503713 +step:6940 train loss:3.554115 +step:6941 train loss:3.470290 +step:6942 train loss:3.560310 +step:6943 train loss:3.451802 +step:6944 train loss:3.545775 +step:6945 train loss:3.484827 +step:6946 train loss:3.572994 +step:6947 train loss:3.500570 +step:6948 train loss:3.493601 +step:6949 train loss:3.569213 +step:6950 train loss:3.562324 +step:6951 train loss:3.562708 +step:6952 train loss:3.494067 +step:6953 train loss:3.538938 +step:6954 train loss:3.603307 +step:6955 train loss:3.514228 +step:6956 train loss:3.552001 +step:6957 train loss:3.543568 +step:6958 train loss:3.503170 +step:6959 train loss:3.541591 +step:6960 train loss:3.511835 +step:6961 train loss:3.516090 +step:6962 train loss:3.498376 +step:6963 train loss:3.471953 +step:6964 train loss:3.513079 +step:6965 train loss:3.505815 +step:6966 train loss:3.548001 +step:6967 train loss:3.486699 +step:6968 train loss:3.527307 +step:6969 train loss:3.544110 +step:6970 train loss:3.523077 +step:6971 train loss:3.587167 +step:6972 train loss:3.531169 +step:6973 train loss:3.489045 +step:6974 train loss:3.619098 +step:6975 train loss:3.520720 +step:6976 train loss:3.494332 +step:6977 train loss:3.530696 +step:6978 train loss:3.524199 +step:6979 train loss:3.533786 +step:6980 train loss:3.511663 +step:6981 train loss:3.572592 +step:6982 train loss:3.522204 +step:6983 train loss:3.515298 +step:6984 train loss:3.630943 +step:6985 train loss:3.478549 +step:6986 train loss:3.472334 +step:6987 train loss:3.519399 +step:6988 train loss:3.524652 +step:6989 train loss:3.670367 +step:6990 train loss:3.535125 +step:6991 train loss:3.489737 +step:6992 train loss:3.538411 +step:6993 train loss:3.606881 +step:6994 train loss:3.551939 +step:6995 train loss:3.500580 +step:6996 train loss:3.503972 +step:6997 train loss:3.586782 +step:6998 train loss:3.485790 +step:6999 train loss:3.535920 +step:7000 validation loss:3.456516 total_sharp:3.7945e-03 L1_sharp:3.5818e-03 L2_sharp:9.5081e-04 L3_sharp:1.0290e-03 L4_sharp:7.0873e-04 L5_sharp:1.0549e-03 L6_sharp:7.0358e-04 L7_sharp:1.0959e-03 L8_sharp:9.5941e-04 L9_sharp:9.2233e-04 L10_sharp:7.2853e-04 L11_sharp:7.8183e-04 L12_sharp:1.7624e-03 total_fnorm:2.2520e+00 total_l1_linf:2.0080e+04 total_spectral:2.2520e+00 L1_fnorm:5.1338e-01 L2_fnorm:5.1676e-01 L3_fnorm:5.1732e-01 L4_fnorm:5.3017e-01 L5_fnorm:5.2701e-01 L6_fnorm:5.2960e-01 L7_fnorm:5.2451e-01 L8_fnorm:5.3390e-01 L9_fnorm:5.3202e-01 L10_fnorm:5.3121e-01 L11_fnorm:5.2783e-01 L12_fnorm:5.2085e-01 L1_l1linf:5.7655e-01 L2_l1linf:6.1206e-01 L3_l1linf:5.6837e-01 L4_l1linf:6.1455e-01 L5_l1linf:6.1234e-01 L6_l1linf:5.5600e-01 L7_l1linf:5.8322e-01 L8_l1linf:5.6483e-01 L9_l1linf:5.4916e-01 L10_l1linf:6.2377e-01 L11_l1linf:5.6819e-01 L12_l1linf:5.8128e-01 L1_spectral:8.4025e-02 L2_spectral:8.0196e-02 L3_spectral:7.1546e-02 L4_spectral:7.4681e-02 L5_spectral:8.3051e-02 L6_spectral:6.8982e-02 L7_spectral:6.3376e-02 L8_spectral:5.4562e-02 L9_spectral:4.6292e-02 L10_spectral:6.0261e-02 L11_spectral:7.1292e-02 L12_spectral:7.5819e-02 ip_v_neg_g:8.7428e-03 cos_v_neg_g:4.5314e-03 v_norm:2.2520e+00 g_norm:8.5675e-01 hv_norm:3.3886e-01 cos_v_hv:2.5218e-02 hg_norm:4.6196e+00 cos_g_hg:5.3803e-01 v_par:3.2939e-04 v_perp:2.2520e+00 L1_cos_v_neg_g:1.0870e-02 L1_v_norm:5.1338e-01 L2_cos_v_neg_g:7.8110e-03 L2_v_norm:5.1676e-01 L3_cos_v_neg_g:6.3687e-03 L3_v_norm:5.1732e-01 L4_cos_v_neg_g:4.2038e-03 L4_v_norm:5.3017e-01 L5_cos_v_neg_g:4.4681e-03 L5_v_norm:5.2701e-01 L6_cos_v_neg_g:3.8577e-03 L6_v_norm:5.2960e-01 L7_cos_v_neg_g:4.2907e-03 L7_v_norm:5.2451e-01 L8_cos_v_neg_g:4.4710e-03 L8_v_norm:5.3390e-01 L9_cos_v_neg_g:4.2258e-03 L9_v_norm:5.3202e-01 L10_cos_v_neg_g:5.8846e-03 L10_v_norm:5.3121e-01 L11_cos_v_neg_g:6.5417e-03 L11_v_norm:5.2783e-01 L12_cos_v_neg_g:8.4591e-03 L12_v_norm:5.2085e-01 +step:7000 train loss:3.611506 +step:7001 train loss:3.509137 +step:7002 train loss:3.504227 +step:7003 train loss:3.528227 +step:7004 train loss:3.522441 +step:7005 train loss:3.505049 +step:7006 train loss:3.514378 +step:7007 train loss:3.565242 +step:7008 train loss:3.504035 +step:7009 train loss:3.547122 +step:7010 train loss:3.479790 +step:7011 train loss:3.537353 +step:7012 train loss:3.509793 +step:7013 train loss:3.583957 +step:7014 train loss:3.490911 +step:7015 train loss:3.553827 +step:7016 train loss:3.538231 +step:7017 train loss:3.506978 +step:7018 train loss:3.586198 +step:7019 train loss:3.509091 +step:7020 train loss:3.558341 +step:7021 train loss:3.502873 +step:7022 train loss:3.517287 +step:7023 train loss:3.535457 +step:7024 train loss:3.498919 +step:7025 train loss:3.548524 +step:7026 train loss:3.504655 +step:7027 train loss:3.567194 +step:7028 train loss:3.492371 +step:7029 train loss:3.480582 +step:7030 train loss:3.486353 +step:7031 train loss:3.536242 +step:7032 train loss:3.545381 +step:7033 train loss:3.519486 +step:7034 train loss:3.538966 +step:7035 train loss:3.589993 +step:7036 train loss:3.513174 +step:7037 train loss:3.537532 +step:7038 train loss:3.498069 +step:7039 train loss:3.553826 +step:7040 train loss:3.470406 +step:7041 train loss:3.563073 +step:7042 train loss:3.494806 +step:7043 train loss:3.470425 +step:7044 train loss:3.512959 +step:7045 train loss:3.514703 +step:7046 train loss:3.507685 +step:7047 train loss:3.545751 +step:7048 train loss:3.494773 +step:7049 train loss:3.502278 +step:7050 train loss:3.526220 +step:7051 train loss:3.543654 +step:7052 train loss:3.545301 +step:7053 train loss:3.507211 +step:7054 train loss:3.487578 +step:7055 train loss:3.557871 +step:7056 train loss:3.554480 +step:7057 train loss:3.479838 +step:7058 train loss:3.599005 +step:7059 train loss:3.508420 +step:7060 train loss:3.517722 +step:7061 train loss:3.489485 +step:7062 train loss:3.514886 +step:7063 train loss:3.574446 +step:7064 train loss:3.497139 +step:7065 train loss:3.550211 +step:7066 train loss:3.508888 +step:7067 train loss:3.543917 +step:7068 train loss:3.519686 +step:7069 train loss:3.482958 +step:7070 train loss:3.508144 +step:7071 train loss:3.479204 +step:7072 train loss:3.480804 +step:7073 train loss:3.473897 +step:7074 train loss:3.471718 +step:7075 train loss:3.487771 +step:7076 train loss:3.499995 +step:7077 train loss:3.508264 +step:7078 train loss:3.554535 +step:7079 train loss:3.563694 +step:7080 train loss:3.513153 +step:7081 train loss:3.528354 +step:7082 train loss:3.496581 +step:7083 train loss:3.527025 +step:7084 train loss:3.517858 +step:7085 train loss:3.481970 +step:7086 train loss:3.517927 +step:7087 train loss:3.494254 +step:7088 train loss:3.618378 +step:7089 train loss:3.512905 +step:7090 train loss:3.476121 +step:7091 train loss:3.495059 +step:7092 train loss:3.473721 +step:7093 train loss:3.566327 +step:7094 train loss:3.488302 +step:7095 train loss:3.504442 +step:7096 train loss:3.522304 +step:7097 train loss:3.508536 +step:7098 train loss:3.533592 +step:7099 train loss:3.490110 +step:7100 train loss:3.520624 +step:7101 train loss:3.590177 +step:7102 train loss:3.482616 +step:7103 train loss:3.504898 +step:7104 train loss:3.539105 +step:7105 train loss:3.515740 +step:7106 train loss:3.499444 +step:7107 train loss:3.535519 +step:7108 train loss:3.602748 +step:7109 train loss:3.533368 +step:7110 train loss:3.558366 +step:7111 train loss:3.537977 +step:7112 train loss:3.528758 +step:7113 train loss:3.523768 +step:7114 train loss:3.540792 +step:7115 train loss:3.581164 +step:7116 train loss:3.512800 +step:7117 train loss:3.548676 +step:7118 train loss:3.559377 +step:7119 train loss:3.520186 +step:7120 train loss:3.578066 +step:7121 train loss:3.491873 +step:7122 train loss:3.497419 +step:7123 train loss:3.434184 +step:7124 train loss:3.590565 +step:7125 train loss:3.441637 +step:7126 train loss:3.611633 +step:7127 train loss:3.570514 +step:7128 train loss:3.511583 +step:7129 train loss:3.519766 +step:7130 train loss:3.511182 +step:7131 train loss:3.451495 +step:7132 train loss:3.489211 +step:7133 train loss:3.538080 +step:7134 train loss:3.470175 +step:7135 train loss:3.525846 +step:7136 train loss:3.509053 +step:7137 train loss:3.490401 +step:7138 train loss:3.476288 +step:7139 train loss:3.482667 +step:7140 train loss:3.516117 +step:7141 train loss:3.511937 +step:7142 train loss:3.508165 +step:7143 train loss:3.546059 +step:7144 train loss:3.493805 +step:7145 train loss:3.510232 +step:7146 train loss:3.518980 +step:7147 train loss:3.538803 +step:7148 train loss:3.543131 +step:7149 train loss:3.550316 +step:7150 train loss:3.524973 +step:7151 train loss:3.490910 +step:7152 train loss:3.463544 +step:7153 train loss:3.496794 +step:7154 train loss:3.515918 +step:7155 train loss:3.534148 +step:7156 train loss:3.503012 +step:7157 train loss:3.522903 +step:7158 train loss:3.475872 +step:7159 train loss:3.530904 +step:7160 train loss:3.541289 +step:7161 train loss:3.492633 +step:7162 train loss:3.539907 +step:7163 train loss:3.473913 +step:7164 train loss:3.511366 +step:7165 train loss:3.516111 +step:7166 train loss:3.570266 +step:7167 train loss:3.546818 +step:7168 train loss:3.526451 +step:7169 train loss:3.504992 +step:7170 train loss:3.533384 +step:7171 train loss:3.481578 +step:7172 train loss:3.643714 +step:7173 train loss:3.493485 +step:7174 train loss:3.531765 +step:7175 train loss:3.508693 +step:7176 train loss:3.519272 +step:7177 train loss:3.532426 +step:7178 train loss:3.534716 +step:7179 train loss:3.517080 +step:7180 train loss:3.518995 +step:7181 train loss:3.548421 +step:7182 train loss:3.497923 +step:7183 train loss:3.569244 +step:7184 train loss:3.660666 +step:7185 train loss:3.574669 +step:7186 train loss:3.512608 +step:7187 train loss:3.521230 +step:7188 train loss:3.511528 +step:7189 train loss:3.512914 +step:7190 train loss:3.513617 +step:7191 train loss:3.506550 +step:7192 train loss:3.539011 +step:7193 train loss:3.458220 +step:7194 train loss:3.520036 +step:7195 train loss:3.495883 +step:7196 train loss:3.543115 +step:7197 train loss:3.523942 +step:7198 train loss:3.579955 +step:7199 train loss:3.536584 +step:7200 train loss:3.529907 +step:7201 train loss:3.537670 +step:7202 train loss:3.514637 +step:7203 train loss:3.532773 +step:7204 train loss:3.500069 +step:7205 train loss:3.458430 +step:7206 train loss:3.486480 +step:7207 train loss:3.663478 +step:7208 train loss:3.495606 +step:7209 train loss:3.579894 +step:7210 train loss:3.516798 +step:7211 train loss:3.543101 +step:7212 train loss:3.624811 +step:7213 train loss:3.473963 +step:7214 train loss:3.545187 +step:7215 train loss:3.510552 +step:7216 train loss:3.561383 +step:7217 train loss:3.521046 +step:7218 train loss:3.605626 +step:7219 train loss:3.518992 +step:7220 train loss:3.596107 +step:7221 train loss:3.475876 +step:7222 train loss:3.556972 +step:7223 train loss:3.474919 +step:7224 train loss:3.536157 +step:7225 train loss:3.517395 +step:7226 train loss:3.483308 +step:7227 train loss:3.502638 +step:7228 train loss:3.490614 +step:7229 train loss:3.494248 +step:7230 train loss:3.481894 +step:7231 train loss:3.613503 +step:7232 train loss:3.484809 +step:7233 train loss:3.550558 +step:7234 train loss:3.541495 +step:7235 train loss:3.513552 +step:7236 train loss:3.554243 +step:7237 train loss:3.502539 +step:7238 train loss:3.544110 +step:7239 train loss:3.496312 +step:7240 train loss:3.493201 +step:7241 train loss:3.504963 +step:7242 train loss:3.489123 +step:7243 train loss:3.532195 +step:7244 train loss:3.505768 +step:7245 train loss:3.511278 +step:7246 train loss:3.551699 +step:7247 train loss:3.508742 +step:7248 train loss:3.546330 +step:7249 train loss:3.496537 +step:7250 validation loss:3.445033 +step:7250 train loss:3.518193 +step:7251 train loss:3.566312 +step:7252 train loss:3.479017 +step:7253 train loss:3.568878 +step:7254 train loss:3.503930 +step:7255 train loss:3.476493 +step:7256 train loss:3.518419 +step:7257 train loss:3.557327 +step:7258 train loss:3.516419 +step:7259 train loss:3.498975 +step:7260 train loss:3.583128 +step:7261 train loss:3.541623 +step:7262 train loss:3.496952 +step:7263 train loss:3.538130 +step:7264 train loss:3.523395 +step:7265 train loss:3.427485 +step:7266 train loss:3.552132 +step:7267 train loss:3.470185 +step:7268 train loss:3.533589 +step:7269 train loss:3.539655 +step:7270 train loss:3.493795 +step:7271 train loss:3.510271 +step:7272 train loss:3.515144 +step:7273 train loss:3.513871 +step:7274 train loss:3.488096 +step:7275 train loss:3.561395 +step:7276 train loss:3.467342 +step:7277 train loss:3.516519 +step:7278 train loss:3.484424 +step:7279 train loss:3.466931 +step:7280 train loss:3.536065 +step:7281 train loss:3.561283 +step:7282 train loss:3.558607 +step:7283 train loss:3.447838 +step:7284 train loss:3.492837 +step:7285 train loss:3.517346 +step:7286 train loss:3.653162 +step:7287 train loss:3.558913 +step:7288 train loss:3.514616 +step:7289 train loss:3.518872 +step:7290 train loss:3.565397 +step:7291 train loss:3.527857 +step:7292 train loss:3.596295 +step:7293 train loss:3.495138 +step:7294 train loss:3.581474 +step:7295 train loss:3.469160 +step:7296 train loss:3.464934 +step:7297 train loss:3.511981 +step:7298 train loss:3.488662 +step:7299 train loss:3.528883 +step:7300 train loss:3.512100 +step:7301 train loss:3.465625 +step:7302 train loss:3.610254 +step:7303 train loss:3.501388 +step:7304 train loss:3.445138 +step:7305 train loss:3.521300 +step:7306 train loss:3.550142 +step:7307 train loss:3.556929 +step:7308 train loss:3.508246 +step:7309 train loss:3.469755 +step:7310 train loss:3.500360 +step:7311 train loss:3.486659 +step:7312 train loss:3.527578 +step:7313 train loss:3.564897 +step:7314 train loss:3.459468 +step:7315 train loss:3.453842 +step:7316 train loss:3.601350 +step:7317 train loss:3.537237 +step:7318 train loss:3.477689 +step:7319 train loss:3.504985 +step:7320 train loss:3.534593 +step:7321 train loss:3.562587 +step:7322 train loss:3.444028 +step:7323 train loss:3.499105 +step:7324 train loss:3.524468 +step:7325 train loss:3.489715 +step:7326 train loss:3.517375 +step:7327 train loss:3.493008 +step:7328 train loss:3.612216 +step:7329 train loss:3.455246 +step:7330 train loss:3.512014 +step:7331 train loss:3.502549 +step:7332 train loss:3.548082 +step:7333 train loss:3.526411 +step:7334 train loss:3.495085 +step:7335 train loss:3.494886 +step:7336 train loss:3.744359 +step:7337 train loss:3.532494 +step:7338 train loss:3.526778 +step:7339 train loss:3.538492 +step:7340 train loss:3.526984 +step:7341 train loss:3.515555 +step:7342 train loss:3.504571 +step:7343 train loss:3.521488 +step:7344 train loss:3.596635 +step:7345 train loss:3.459580 +step:7346 train loss:3.492653 +step:7347 train loss:3.485877 +step:7348 train loss:3.489395 +step:7349 train loss:3.592307 +step:7350 train loss:3.574714 +step:7351 train loss:3.511817 +step:7352 train loss:3.537689 +step:7353 train loss:3.520842 +step:7354 train loss:3.470353 +step:7355 train loss:3.655448 +step:7356 train loss:3.625281 +step:7357 train loss:3.554558 +step:7358 train loss:3.528704 +step:7359 train loss:3.498986 +step:7360 train loss:3.506382 +step:7361 train loss:3.460448 +step:7362 train loss:3.509456 +step:7363 train loss:3.520788 +step:7364 train loss:3.556406 +step:7365 train loss:3.541185 +step:7366 train loss:3.503271 +step:7367 train loss:3.581257 +step:7368 train loss:3.558022 +step:7369 train loss:3.553777 +step:7370 train loss:3.518498 +step:7371 train loss:3.475771 +step:7372 train loss:3.534055 +step:7373 train loss:3.559034 +step:7374 train loss:3.650140 +step:7375 train loss:3.475397 +step:7376 train loss:3.494164 +step:7377 train loss:3.536733 +step:7378 train loss:3.494595 +step:7379 train loss:3.616479 +step:7380 train loss:3.578628 +step:7381 train loss:3.546607 +step:7382 train loss:3.512305 +step:7383 train loss:3.602905 +step:7384 train loss:3.546214 +step:7385 train loss:3.500779 +step:7386 train loss:3.507583 +step:7387 train loss:3.550058 +step:7388 train loss:3.580378 +step:7389 train loss:3.524191 +step:7390 train loss:3.460752 +step:7391 train loss:3.501166 +step:7392 train loss:3.558881 +step:7393 train loss:3.524811 +step:7394 train loss:3.566148 +step:7395 train loss:3.452247 +step:7396 train loss:3.549940 +step:7397 train loss:3.481689 +step:7398 train loss:3.496821 +step:7399 train loss:3.544667 +step:7400 train loss:3.547429 +step:7401 train loss:3.464873 +step:7402 train loss:3.582617 +step:7403 train loss:3.467171 +step:7404 train loss:3.533243 +step:7405 train loss:3.659067 +step:7406 train loss:3.486411 +step:7407 train loss:3.531594 +step:7408 train loss:3.531491 +step:7409 train loss:3.502780 +step:7410 train loss:3.674337 +step:7411 train loss:3.518956 +step:7412 train loss:3.521087 +step:7413 train loss:3.576298 +step:7414 train loss:3.482433 +step:7415 train loss:3.543277 +step:7416 train loss:3.425797 +step:7417 train loss:3.545560 +step:7418 train loss:3.527619 +step:7419 train loss:3.495404 +step:7420 train loss:3.488950 +step:7421 train loss:3.521064 +step:7422 train loss:3.480246 +step:7423 train loss:3.619206 +step:7424 train loss:3.680992 +step:7425 train loss:3.571800 +step:7426 train loss:3.536855 +step:7427 train loss:3.507304 +step:7428 train loss:3.522568 +step:7429 train loss:3.546140 +step:7430 train loss:3.473587 +step:7431 train loss:3.474408 +step:7432 train loss:3.483581 +step:7433 train loss:3.583853 +step:7434 train loss:3.497403 +step:7435 train loss:3.579513 +step:7436 train loss:3.624693 +step:7437 train loss:3.446309 +step:7438 train loss:3.505226 +step:7439 train loss:3.514847 +step:7440 train loss:3.488526 +step:7441 train loss:3.458348 +step:7442 train loss:3.685218 +step:7443 train loss:3.509060 +step:7444 train loss:3.551178 +step:7445 train loss:3.483179 +step:7446 train loss:3.504182 +step:7447 train loss:3.428570 +step:7448 train loss:3.487318 +step:7449 train loss:3.498872 +step:7450 train loss:3.537965 +step:7451 train loss:3.566514 +step:7452 train loss:3.494692 +step:7453 train loss:3.521183 +step:7454 train loss:3.507148 +step:7455 train loss:3.513149 +step:7456 train loss:3.488289 +step:7457 train loss:3.495553 +step:7458 train loss:3.536726 +step:7459 train loss:3.513853 +step:7460 train loss:3.522268 +step:7461 train loss:3.556235 +step:7462 train loss:3.491148 +step:7463 train loss:3.555046 +step:7464 train loss:3.478182 +step:7465 train loss:3.486819 +step:7466 train loss:3.489648 +step:7467 train loss:3.497588 +step:7468 train loss:3.549763 +step:7469 train loss:3.481051 +step:7470 train loss:3.513984 +step:7471 train loss:3.504002 +step:7472 train loss:3.536883 +step:7473 train loss:3.479426 +step:7474 train loss:3.463891 +step:7475 train loss:3.494810 +step:7476 train loss:3.531930 +step:7477 train loss:3.503174 +step:7478 train loss:3.501323 +step:7479 train loss:3.514285 +step:7480 train loss:3.799859 +step:7481 train loss:3.449138 +step:7482 train loss:3.520234 +step:7483 train loss:3.512700 +step:7484 train loss:3.536322 +step:7485 train loss:3.520362 +step:7486 train loss:3.544769 +step:7487 train loss:3.536600 +step:7488 train loss:3.557164 +step:7489 train loss:3.551086 +step:7490 train loss:3.496911 +step:7491 train loss:3.520878 +step:7492 train loss:3.625211 +step:7493 train loss:3.600056 +step:7494 train loss:3.625204 +step:7495 train loss:3.494184 +step:7496 train loss:3.485038 +step:7497 train loss:3.580455 +step:7498 train loss:3.515157 +step:7499 train loss:3.552314 +step:7500 validation loss:3.445188 total_sharp:4.0065e-03 L1_sharp:4.3645e-03 L2_sharp:9.3849e-04 L3_sharp:7.2689e-04 L4_sharp:4.8992e-04 L5_sharp:6.1546e-04 L6_sharp:7.0732e-04 L7_sharp:1.1231e-03 L8_sharp:1.0197e-03 L9_sharp:1.0684e-03 L10_sharp:9.0203e-04 L11_sharp:8.1446e-04 L12_sharp:1.5370e-03 total_fnorm:2.2407e+00 total_l1_linf:1.9950e+04 total_spectral:2.2407e+00 L1_fnorm:4.9572e-01 L2_fnorm:5.0932e-01 L3_fnorm:5.1223e-01 L4_fnorm:5.2467e-01 L5_fnorm:5.2283e-01 L6_fnorm:5.2645e-01 L7_fnorm:5.1895e-01 L8_fnorm:5.2870e-01 L9_fnorm:5.3226e-01 L10_fnorm:5.3208e-01 L11_fnorm:5.3066e-01 L12_fnorm:5.2176e-01 L1_l1linf:6.3045e-01 L2_l1linf:5.6932e-01 L3_l1linf:5.7536e-01 L4_l1linf:6.2422e-01 L5_l1linf:6.2514e-01 L6_l1linf:5.8652e-01 L7_l1linf:5.7811e-01 L8_l1linf:5.4389e-01 L9_l1linf:5.5225e-01 L10_l1linf:5.7300e-01 L11_l1linf:5.9831e-01 L12_l1linf:6.1327e-01 L1_spectral:8.7194e-02 L2_spectral:8.0066e-02 L3_spectral:7.2617e-02 L4_spectral:6.9579e-02 L5_spectral:7.6928e-02 L6_spectral:6.6605e-02 L7_spectral:5.9627e-02 L8_spectral:5.3443e-02 L9_spectral:4.9231e-02 L10_spectral:6.3560e-02 L11_spectral:7.0459e-02 L12_spectral:7.2045e-02 ip_v_neg_g:1.1145e-02 cos_v_neg_g:5.1547e-03 v_norm:2.2407e+00 g_norm:9.6491e-01 hv_norm:3.7856e-01 cos_v_hv:2.3715e-02 hg_norm:7.5118e+00 cos_g_hg:6.0331e-01 v_par:3.7791e-04 v_perp:2.2407e+00 L1_cos_v_neg_g:9.7888e-03 L1_v_norm:4.9572e-01 L2_cos_v_neg_g:5.1412e-03 L2_v_norm:5.0932e-01 L3_cos_v_neg_g:3.8974e-03 L3_v_norm:5.1223e-01 L4_cos_v_neg_g:3.3674e-03 L4_v_norm:5.2467e-01 L5_cos_v_neg_g:4.5925e-03 L5_v_norm:5.2283e-01 L6_cos_v_neg_g:5.1876e-03 L6_v_norm:5.2645e-01 L7_cos_v_neg_g:5.6354e-03 L7_v_norm:5.1895e-01 L8_cos_v_neg_g:6.1407e-03 L8_v_norm:5.2870e-01 L9_cos_v_neg_g:6.6887e-03 L9_v_norm:5.3226e-01 L10_cos_v_neg_g:8.3549e-03 L10_v_norm:5.3208e-01 L11_cos_v_neg_g:8.8961e-03 L11_v_norm:5.3066e-01 L12_cos_v_neg_g:1.3274e-02 L12_v_norm:5.2176e-01 +step:7500 train loss:3.500322 +step:7501 train loss:3.488276 +step:7502 train loss:3.479852 +step:7503 train loss:3.458398 +step:7504 train loss:3.480062 +step:7505 train loss:3.470097 +step:7506 train loss:3.529781 +step:7507 train loss:3.446551 +step:7508 train loss:3.518147 +step:7509 train loss:3.489414 +step:7510 train loss:3.518314 +step:7511 train loss:3.523122 +step:7512 train loss:3.788259 +step:7513 train loss:3.478647 +step:7514 train loss:3.514258 +step:7515 train loss:3.472450 +step:7516 train loss:3.485737 +step:7517 train loss:3.522447 +step:7518 train loss:3.498465 +step:7519 train loss:3.510649 +step:7520 train loss:3.574188 +step:7521 train loss:3.463628 +step:7522 train loss:3.518968 +step:7523 train loss:3.552098 +step:7524 train loss:3.500093 +step:7525 train loss:3.503807 +step:7526 train loss:3.448636 +step:7527 train loss:3.458874 +step:7528 train loss:3.557334 +step:7529 train loss:3.533591 +step:7530 train loss:3.480794 +step:7531 train loss:3.553699 +step:7532 train loss:3.543969 +step:7533 train loss:3.470308 +step:7534 train loss:3.534887 +step:7535 train loss:3.537572 +step:7536 train loss:3.569801 +step:7537 train loss:3.589574 +step:7538 train loss:3.615622 +step:7539 train loss:3.515106 +step:7540 train loss:3.499892 +step:7541 train loss:3.555780 +step:7542 train loss:3.516260 +step:7543 train loss:3.472104 +step:7544 train loss:3.513882 +step:7545 train loss:3.501532 +step:7546 train loss:3.456395 +step:7547 train loss:3.502328 +step:7548 train loss:3.516186 +step:7549 train loss:3.499154 +step:7550 train loss:3.498594 +step:7551 train loss:3.597023 +step:7552 train loss:3.511229 +step:7553 train loss:3.550220 +step:7554 train loss:3.473955 +step:7555 train loss:3.564262 +step:7556 train loss:3.465896 +step:7557 train loss:3.564179 +step:7558 train loss:3.550599 +step:7559 train loss:3.510470 +step:7560 train loss:3.603170 +step:7561 train loss:3.573849 +step:7562 train loss:3.479301 +step:7563 train loss:3.473081 +step:7564 train loss:3.527279 +step:7565 train loss:3.549333 +step:7566 train loss:3.538346 +step:7567 train loss:3.554118 +step:7568 train loss:3.495332 +step:7569 train loss:3.558586 +step:7570 train loss:3.540693 +step:7571 train loss:3.621489 +step:7572 train loss:3.472816 +step:7573 train loss:3.537095 +step:7574 train loss:3.500347 +step:7575 train loss:3.495970 +step:7576 train loss:3.503072 +step:7577 train loss:3.518454 +step:7578 train loss:3.578017 +step:7579 train loss:3.513183 +step:7580 train loss:3.502009 +step:7581 train loss:3.488238 +step:7582 train loss:3.543097 +step:7583 train loss:3.481291 +step:7584 train loss:3.466541 +step:7585 train loss:3.434855 +step:7586 train loss:3.469437 +step:7587 train loss:3.533786 +step:7588 train loss:3.662493 +step:7589 train loss:3.482157 +step:7590 train loss:3.547206 +step:7591 train loss:3.552689 +step:7592 train loss:3.511600 +step:7593 train loss:3.535398 +step:7594 train loss:3.534347 +step:7595 train loss:3.503326 +step:7596 train loss:3.555493 +step:7597 train loss:3.460439 +step:7598 train loss:3.521913 +step:7599 train loss:3.512572 +step:7600 train loss:3.477015 +step:7601 train loss:3.587650 +step:7602 train loss:3.526736 +step:7603 train loss:3.487950 +step:7604 train loss:3.635157 +step:7605 train loss:3.520767 +step:7606 train loss:3.555949 +step:7607 train loss:3.507967 +step:7608 train loss:3.515968 +step:7609 train loss:3.553551 +step:7610 train loss:3.510751 +step:7611 train loss:3.488168 +step:7612 train loss:3.430976 +step:7613 train loss:3.479571 +step:7614 train loss:3.550274 +step:7615 train loss:3.509832 +step:7616 train loss:3.575750 +step:7617 train loss:3.476242 +step:7618 train loss:3.562170 +step:7619 train loss:3.504951 +step:7620 train loss:3.492511 +step:7621 train loss:3.440300 +step:7622 train loss:3.715551 +step:7623 train loss:3.728868 +step:7624 train loss:3.542328 +step:7625 train loss:3.584790 +step:7626 train loss:3.499856 +step:7627 train loss:3.572602 +step:7628 train loss:3.452069 +step:7629 train loss:3.512756 +step:7630 train loss:3.524664 +step:7631 train loss:3.506429 +step:7632 train loss:3.557701 +step:7633 train loss:3.622993 +step:7634 train loss:3.586172 +step:7635 train loss:3.490799 +step:7636 train loss:3.521456 +step:7637 train loss:3.463281 +step:7638 train loss:3.573575 +step:7639 train loss:3.501923 +step:7640 train loss:3.482491 +step:7641 train loss:3.515298 +step:7642 train loss:3.854081 +step:7643 train loss:3.602442 +step:7644 train loss:3.527041 +step:7645 train loss:3.514759 +step:7646 train loss:3.503775 +step:7647 train loss:3.496414 +step:7648 train loss:3.528491 +step:7649 train loss:3.489529 +step:7650 train loss:3.540515 +step:7651 train loss:3.560019 +step:7652 train loss:3.439503 +step:7653 train loss:3.635525 +step:7654 train loss:3.491189 +step:7655 train loss:3.511523 +step:7656 train loss:3.486965 +step:7657 train loss:3.502769 +step:7658 train loss:3.454466 +step:7659 train loss:3.517862 +step:7660 train loss:3.451527 +step:7661 train loss:3.466519 +step:7662 train loss:3.467684 +step:7663 train loss:3.516416 +step:7664 train loss:3.475752 +step:7665 train loss:3.451451 +step:7666 train loss:3.558753 +step:7667 train loss:3.474191 +step:7668 train loss:3.583607 +step:7669 train loss:3.515454 +step:7670 train loss:3.470193 +step:7671 train loss:3.524472 +step:7672 train loss:3.544716 +step:7673 train loss:3.509124 +step:7674 train loss:3.545869 +step:7675 train loss:3.602938 +step:7676 train loss:3.572655 +step:7677 train loss:3.598979 +step:7678 train loss:3.538364 +step:7679 train loss:3.560251 +step:7680 train loss:3.566715 +step:7681 train loss:3.541214 +step:7682 train loss:3.503739 +step:7683 train loss:3.508424 +step:7684 train loss:3.478915 +step:7685 train loss:3.458080 +step:7686 train loss:3.578566 +step:7687 train loss:3.492793 +step:7688 train loss:3.460063 +step:7689 train loss:3.506903 +step:7690 train loss:3.476426 +step:7691 train loss:3.502120 +step:7692 train loss:3.536237 +step:7693 train loss:3.537155 +step:7694 train loss:3.592508 +step:7695 train loss:3.516938 +step:7696 train loss:3.491698 +step:7697 train loss:3.479619 +step:7698 train loss:3.540625 +step:7699 train loss:3.536417 +step:7700 train loss:3.437894 +step:7701 train loss:3.553964 +step:7702 train loss:3.495087 +step:7703 train loss:3.496929 +step:7704 train loss:3.547623 +step:7705 train loss:3.507975 +step:7706 train loss:3.445482 +step:7707 train loss:3.565639 +step:7708 train loss:3.505862 +step:7709 train loss:3.524761 +step:7710 train loss:3.585692 +step:7711 train loss:3.545594 +step:7712 train loss:3.491980 +step:7713 train loss:3.572115 +step:7714 train loss:3.515155 +step:7715 train loss:3.469585 +step:7716 train loss:3.506320 +step:7717 train loss:3.530614 +step:7718 train loss:3.536131 +step:7719 train loss:3.491980 +step:7720 train loss:3.507211 +step:7721 train loss:3.550476 +step:7722 train loss:3.475201 +step:7723 train loss:3.849886 +step:7724 train loss:3.514461 +step:7725 train loss:3.425463 +step:7726 train loss:3.497648 +step:7727 train loss:3.529139 +step:7728 train loss:3.489192 +step:7729 train loss:3.491411 +step:7730 train loss:3.514183 +step:7731 train loss:3.541674 +step:7732 train loss:3.562665 +step:7733 train loss:3.473251 +step:7734 train loss:3.501330 +step:7735 train loss:3.589728 +step:7736 train loss:3.536265 +step:7737 train loss:3.551254 +step:7738 train loss:3.452749 +step:7739 train loss:3.533290 +step:7740 train loss:3.477574 +step:7741 train loss:3.513302 +step:7742 train loss:3.514179 +step:7743 train loss:3.464601 +step:7744 train loss:3.588455 +step:7745 train loss:3.478599 +step:7746 train loss:3.457350 +step:7747 train loss:3.552516 +step:7748 train loss:3.531532 +step:7749 train loss:3.456421 +step:7750 validation loss:3.442021 +step:7750 train loss:3.614593 +step:7751 train loss:3.499018 +step:7752 train loss:3.489524 +step:7753 train loss:3.493157 +step:7754 train loss:3.465218 +step:7755 train loss:3.533024 +step:7756 train loss:3.558707 +step:7757 train loss:3.507577 +step:7758 train loss:3.477340 +step:7759 train loss:3.504890 +step:7760 train loss:3.531785 +step:7761 train loss:3.522836 +step:7762 train loss:3.508605 +step:7763 train loss:3.494729 +step:7764 train loss:3.499160 +step:7765 train loss:3.452490 +step:7766 train loss:3.521598 +step:7767 train loss:3.522277 +step:7768 train loss:3.478977 +step:7769 train loss:3.543913 +step:7770 train loss:3.561559 +step:7771 train loss:3.533178 +step:7772 train loss:3.504662 +step:7773 train loss:3.567415 +step:7774 train loss:3.462584 +step:7775 train loss:3.451229 +step:7776 train loss:3.553773 +step:7777 train loss:3.508517 +step:7778 train loss:3.466050 +step:7779 train loss:3.508375 +step:7780 train loss:3.504184 +step:7781 train loss:3.512646 +step:7782 train loss:3.498012 +step:7783 train loss:3.478946 +step:7784 train loss:3.477289 +step:7785 train loss:3.518803 +step:7786 train loss:3.475056 +step:7787 train loss:3.555489 +step:7788 train loss:3.505486 +step:7789 train loss:3.443110 +step:7790 train loss:3.500199 +step:7791 train loss:3.533885 +step:7792 train loss:3.495223 +step:7793 train loss:3.515709 +step:7794 train loss:3.502870 +step:7795 train loss:3.535132 +step:7796 train loss:3.497880 +step:7797 train loss:3.516953 +step:7798 train loss:3.510550 +step:7799 train loss:3.500507 +step:7800 train loss:3.456290 +step:7801 train loss:3.519494 +step:7802 train loss:3.499911 +step:7803 train loss:3.551229 +step:7804 train loss:3.513303 +step:7805 train loss:3.508274 +step:7806 train loss:3.527736 +step:7807 train loss:3.603655 +step:7808 train loss:3.469343 +step:7809 train loss:3.450189 +step:7810 train loss:3.535752 +step:7811 train loss:3.466838 +step:7812 train loss:3.488315 +step:7813 train loss:3.571713 +step:7814 train loss:3.651349 +step:7815 train loss:3.456875 +step:7816 train loss:3.543069 +step:7817 train loss:3.571984 +step:7818 train loss:3.468500 +step:7819 train loss:3.520772 +step:7820 train loss:3.561449 +step:7821 train loss:3.491525 +step:7822 train loss:3.453799 +step:7823 train loss:3.572955 +step:7824 train loss:3.513637 +step:7825 train loss:3.501307 +step:7826 train loss:3.496791 +step:7827 train loss:3.542778 +step:7828 train loss:3.530734 +step:7829 train loss:3.485083 +step:7830 train loss:3.495267 +step:7831 train loss:3.499030 +step:7832 train loss:3.560646 +step:7833 train loss:3.540439 +step:7834 train loss:3.504709 +step:7835 train loss:3.526915 +step:7836 train loss:3.642411 +step:7837 train loss:3.523791 +step:7838 train loss:3.492151 +step:7839 train loss:3.454947 +step:7840 train loss:3.469169 +step:7841 train loss:3.562190 +step:7842 train loss:3.550075 +step:7843 train loss:3.604992 +step:7844 train loss:3.530142 +step:7845 train loss:3.508980 +step:7846 train loss:3.621442 +step:7847 train loss:3.509014 +step:7848 train loss:3.521019 +step:7849 train loss:3.532105 +step:7850 train loss:3.506031 +step:7851 train loss:3.530582 +step:7852 train loss:3.506022 +step:7853 train loss:3.475287 +step:7854 train loss:3.506811 +step:7855 train loss:3.507370 +step:7856 train loss:3.507948 +step:7857 train loss:3.495775 +step:7858 train loss:3.504236 +step:7859 train loss:3.511230 +step:7860 train loss:3.545991 +step:7861 train loss:3.533494 +step:7862 train loss:3.478901 +step:7863 train loss:3.582225 +step:7864 train loss:3.424235 +step:7865 train loss:3.502420 +step:7866 train loss:3.473705 +step:7867 train loss:3.521380 +step:7868 train loss:3.497914 +step:7869 train loss:3.500341 +step:7870 train loss:3.421650 +step:7871 train loss:3.491293 +step:7872 train loss:3.482090 +step:7873 train loss:3.561618 +step:7874 train loss:3.504841 +step:7875 train loss:3.507270 +step:7876 train loss:3.526825 +step:7877 train loss:3.482383 +step:7878 train loss:3.517895 +step:7879 train loss:3.855889 +step:7880 train loss:3.509554 +step:7881 train loss:3.533793 +step:7882 train loss:3.615470 +step:7883 train loss:3.428532 +step:7884 train loss:3.519308 +step:7885 train loss:3.503244 +step:7886 train loss:3.501839 +step:7887 train loss:3.496048 +step:7888 train loss:3.526575 +step:7889 train loss:3.577111 +step:7890 train loss:3.481831 +step:7891 train loss:3.530148 +step:7892 train loss:3.500458 +step:7893 train loss:3.475322 +step:7894 train loss:3.499023 +step:7895 train loss:3.478497 +step:7896 train loss:3.481285 +step:7897 train loss:3.502615 +step:7898 train loss:3.510817 +step:7899 train loss:3.499164 +step:7900 train loss:3.468780 +step:7901 train loss:3.461240 +step:7902 train loss:3.605726 +step:7903 train loss:3.456626 +step:7904 train loss:3.504822 +step:7905 train loss:3.572872 +step:7906 train loss:3.469992 +step:7907 train loss:3.494713 +step:7908 train loss:3.548860 +step:7909 train loss:3.597470 +step:7910 train loss:3.478088 +step:7911 train loss:3.500801 +step:7912 train loss:3.500710 +step:7913 train loss:3.477701 +step:7914 train loss:3.510146 +step:7915 train loss:3.614726 +step:7916 train loss:3.486099 +step:7917 train loss:3.545279 +step:7918 train loss:3.487027 +step:7919 train loss:3.474276 +step:7920 train loss:3.516217 +step:7921 train loss:3.519847 +step:7922 train loss:3.495030 +step:7923 train loss:3.545128 +step:7924 train loss:3.503420 +step:7925 train loss:3.526815 +step:7926 train loss:3.432294 +step:7927 train loss:3.711225 +step:7928 train loss:3.537441 +step:7929 train loss:3.500986 +step:7930 train loss:3.461658 +step:7931 train loss:3.483756 +step:7932 train loss:3.507065 +step:7933 train loss:3.519873 +step:7934 train loss:3.615140 +step:7935 train loss:3.536925 +step:7936 train loss:3.507908 +step:7937 train loss:3.460338 +step:7938 train loss:3.474191 +step:7939 train loss:3.521942 +step:7940 train loss:3.506524 +step:7941 train loss:3.534356 +step:7942 train loss:3.525525 +step:7943 train loss:3.536209 +step:7944 train loss:3.452471 +step:7945 train loss:3.559855 +step:7946 train loss:3.508314 +step:7947 train loss:3.520052 +step:7948 train loss:3.478685 +step:7949 train loss:3.534410 +step:7950 train loss:3.585657 +step:7951 train loss:3.556230 +step:7952 train loss:3.697056 +step:7953 train loss:3.587854 +step:7954 train loss:3.494008 +step:7955 train loss:3.479826 +step:7956 train loss:3.486324 +step:7957 train loss:3.560902 +step:7958 train loss:3.571012 +step:7959 train loss:3.525395 +step:7960 train loss:3.589234 +step:7961 train loss:3.498293 +step:7962 train loss:3.466563 +step:7963 train loss:3.504904 +step:7964 train loss:3.503213 +step:7965 train loss:3.512485 +step:7966 train loss:3.483795 +step:7967 train loss:3.506485 +step:7968 train loss:3.516739 +step:7969 train loss:3.473367 +step:7970 train loss:3.443079 +step:7971 train loss:3.528121 +step:7972 train loss:3.503096 +step:7973 train loss:3.477374 +step:7974 train loss:3.515024 +step:7975 train loss:3.501759 +step:7976 train loss:3.522543 +step:7977 train loss:3.550768 +step:7978 train loss:3.577241 +step:7979 train loss:3.522136 +step:7980 train loss:3.426975 +step:7981 train loss:3.464614 +step:7982 train loss:3.514386 +step:7983 train loss:3.529635 +step:7984 train loss:3.570937 +step:7985 train loss:3.495996 +step:7986 train loss:3.519326 +step:7987 train loss:3.572426 +step:7988 train loss:3.544536 +step:7989 train loss:3.450327 +step:7990 train loss:3.467344 +step:7991 train loss:3.481065 +step:7992 train loss:3.502647 +step:7993 train loss:3.487221 +step:7994 train loss:3.538809 +step:7995 train loss:3.542197 +step:7996 train loss:3.510864 +step:7997 train loss:3.528834 +step:7998 train loss:3.552310 +step:7999 train loss:3.481085 +step:8000 validation loss:3.432750 total_sharp:4.3436e-03 L1_sharp:9.0421e-03 L2_sharp:3.6754e-03 L3_sharp:1.5489e-03 L4_sharp:5.0810e-04 L5_sharp:7.4023e-04 L6_sharp:6.0668e-04 L7_sharp:1.0784e-03 L8_sharp:8.4989e-04 L9_sharp:7.5820e-04 L10_sharp:5.7807e-04 L11_sharp:6.2427e-04 L12_sharp:1.9751e-03 total_fnorm:2.2587e+00 total_l1_linf:2.0166e+04 total_spectral:2.2587e+00 L1_fnorm:5.2075e-01 L2_fnorm:5.3319e-01 L3_fnorm:5.2536e-01 L4_fnorm:5.2847e-01 L5_fnorm:5.2651e-01 L6_fnorm:5.3187e-01 L7_fnorm:5.2678e-01 L8_fnorm:5.3583e-01 L9_fnorm:5.3444e-01 L10_fnorm:5.3327e-01 L11_fnorm:5.3198e-01 L12_fnorm:5.2635e-01 L1_l1linf:6.2728e-01 L2_l1linf:6.0666e-01 L3_l1linf:5.7309e-01 L4_l1linf:6.3194e-01 L5_l1linf:5.9569e-01 L6_l1linf:5.6766e-01 L7_l1linf:5.7234e-01 L8_l1linf:5.6029e-01 L9_l1linf:5.4061e-01 L10_l1linf:5.6111e-01 L11_l1linf:5.7372e-01 L12_l1linf:6.0292e-01 L1_spectral:8.7715e-02 L2_spectral:8.7488e-02 L3_spectral:7.3150e-02 L4_spectral:6.9472e-02 L5_spectral:7.4235e-02 L6_spectral:6.8051e-02 L7_spectral:6.3117e-02 L8_spectral:5.2485e-02 L9_spectral:4.5839e-02 L10_spectral:5.5952e-02 L11_spectral:6.7480e-02 L12_spectral:7.9541e-02 ip_v_neg_g:1.2823e-02 cos_v_neg_g:6.3262e-03 v_norm:2.2587e+00 g_norm:8.9739e-01 hv_norm:4.8599e-01 cos_v_hv:2.0188e-02 hg_norm:6.7019e+00 cos_g_hg:5.4131e-01 v_par:5.1668e-04 v_perp:2.2587e+00 L1_cos_v_neg_g:2.0845e-02 L1_v_norm:5.2075e-01 L2_cos_v_neg_g:2.0348e-02 L2_v_norm:5.3319e-01 L3_cos_v_neg_g:9.1960e-03 L3_v_norm:5.2536e-01 L4_cos_v_neg_g:4.3425e-03 L4_v_norm:5.2847e-01 L5_cos_v_neg_g:5.5860e-03 L5_v_norm:5.2651e-01 L6_cos_v_neg_g:2.6259e-03 L6_v_norm:5.3187e-01 L7_cos_v_neg_g:3.6803e-03 L7_v_norm:5.2678e-01 L8_cos_v_neg_g:4.0055e-03 L8_v_norm:5.3583e-01 L9_cos_v_neg_g:3.8298e-03 L9_v_norm:5.3444e-01 L10_cos_v_neg_g:5.0911e-03 L10_v_norm:5.3327e-01 L11_cos_v_neg_g:6.1999e-03 L11_v_norm:5.3198e-01 L12_cos_v_neg_g:1.1563e-02 L12_v_norm:5.2635e-01 +step:8000 train loss:3.550985 +step:8001 train loss:3.510467 +step:8002 train loss:3.529384 +step:8003 train loss:3.547074 +step:8004 train loss:3.523759 +step:8005 train loss:3.444659 +step:8006 train loss:3.522197 +step:8007 train loss:3.488922 +step:8008 train loss:3.517065 +step:8009 train loss:3.591927 +step:8010 train loss:3.821645 +step:8011 train loss:3.483671 +step:8012 train loss:3.560187 +step:8013 train loss:3.513752 +step:8014 train loss:3.528780 +step:8015 train loss:3.522402 +step:8016 train loss:3.512281 +step:8017 train loss:3.533778 +step:8018 train loss:3.494912 +step:8019 train loss:3.461853 +step:8020 train loss:3.501654 +step:8021 train loss:3.573912 +step:8022 train loss:3.489183 +step:8023 train loss:3.523534 +step:8024 train loss:3.395059 +step:8025 train loss:3.498900 +step:8026 train loss:3.508084 +step:8027 train loss:3.514814 +step:8028 train loss:3.574758 +step:8029 train loss:3.502354 +step:8030 train loss:3.459975 +step:8031 train loss:3.519552 +step:8032 train loss:3.504719 +step:8033 train loss:3.454581 +step:8034 train loss:3.494606 +step:8035 train loss:3.481102 +step:8036 train loss:3.472085 +step:8037 train loss:3.443677 +step:8038 train loss:3.456703 +step:8039 train loss:3.551816 +step:8040 train loss:3.486170 +step:8041 train loss:3.479918 +step:8042 train loss:3.517395 +step:8043 train loss:3.460833 +step:8044 train loss:3.470405 +step:8045 train loss:3.543306 +step:8046 train loss:3.468503 +step:8047 train loss:3.472023 +step:8048 train loss:3.504241 +step:8049 train loss:3.551075 +step:8050 train loss:3.490655 +step:8051 train loss:3.468005 +step:8052 train loss:3.526396 +step:8053 train loss:3.479009 +step:8054 train loss:3.515618 +step:8055 train loss:3.544825 +step:8056 train loss:3.511299 +step:8057 train loss:3.586842 +step:8058 train loss:3.491299 +step:8059 train loss:3.552334 +step:8060 train loss:3.520629 +step:8061 train loss:3.412855 +step:8062 train loss:3.548942 +step:8063 train loss:3.513306 +step:8064 train loss:3.469801 +step:8065 train loss:3.537292 +step:8066 train loss:3.493591 +step:8067 train loss:3.559511 +step:8068 train loss:3.483125 +step:8069 train loss:3.509212 +step:8070 train loss:3.474955 +step:8071 train loss:3.481172 +step:8072 train loss:3.522904 +step:8073 train loss:3.476051 +step:8074 train loss:3.487704 +step:8075 train loss:3.469217 +step:8076 train loss:3.519368 +step:8077 train loss:3.529212 +step:8078 train loss:3.469774 +step:8079 train loss:3.492524 +step:8080 train loss:3.478687 +step:8081 train loss:3.495165 +step:8082 train loss:3.512211 +step:8083 train loss:3.421539 +step:8084 train loss:3.550201 +step:8085 train loss:3.427109 +step:8086 train loss:3.551898 +step:8087 train loss:3.449026 +step:8088 train loss:3.496940 +step:8089 train loss:3.530294 +step:8090 train loss:3.553611 +step:8091 train loss:3.497190 +step:8092 train loss:3.477113 +step:8093 train loss:3.484825 +step:8094 train loss:3.489724 +step:8095 train loss:3.511582 +step:8096 train loss:3.514758 +step:8097 train loss:3.441667 +step:8098 train loss:3.452735 +step:8099 train loss:3.448139 +step:8100 train loss:3.501153 +step:8101 train loss:3.576580 +step:8102 train loss:3.516223 +step:8103 train loss:3.464929 +step:8104 train loss:3.515346 +step:8105 train loss:3.510731 +step:8106 train loss:3.474337 +step:8107 train loss:3.455106 +step:8108 train loss:3.473612 +step:8109 train loss:3.469687 +step:8110 train loss:3.530339 +step:8111 train loss:3.455500 +step:8112 train loss:3.473208 +step:8113 train loss:3.464443 +step:8114 train loss:3.408124 +step:8115 train loss:3.461776 +step:8116 train loss:3.494481 +step:8117 train loss:3.470127 +step:8118 train loss:3.459233 +step:8119 train loss:3.501862 +step:8120 train loss:3.449047 +step:8121 train loss:3.506536 +step:8122 train loss:3.490328 +step:8123 train loss:3.496474 +step:8124 train loss:3.458068 +step:8125 train loss:3.439832 +step:8126 train loss:3.433930 +step:8127 train loss:3.527131 +step:8128 train loss:3.533640 +step:8129 train loss:3.455706 +step:8130 train loss:3.481331 +step:8131 train loss:3.452602 +step:8132 train loss:3.521729 +step:8133 train loss:3.446448 +step:8134 train loss:3.481754 +step:8135 train loss:3.475921 +step:8136 train loss:3.482872 +step:8137 train loss:3.546774 +step:8138 train loss:3.457137 +step:8139 train loss:3.528957 +step:8140 train loss:3.457102 +step:8141 train loss:3.483355 +step:8142 train loss:3.462847 +step:8143 train loss:3.512603 +step:8144 train loss:3.490447 +step:8145 train loss:3.455150 +step:8146 train loss:3.463353 +step:8147 train loss:3.485940 +step:8148 train loss:3.579919 +step:8149 train loss:3.491927 +step:8150 train loss:3.470085 +step:8151 train loss:3.465135 +step:8152 train loss:3.560903 +step:8153 train loss:3.437719 +step:8154 train loss:3.456579 +step:8155 train loss:3.480773 +step:8156 train loss:3.463147 +step:8157 train loss:3.483049 +step:8158 train loss:3.495362 +step:8159 train loss:3.513069 +step:8160 train loss:3.461681 +step:8161 train loss:3.507390 +step:8162 train loss:3.437146 +step:8163 train loss:3.498563 +step:8164 train loss:3.484099 +step:8165 train loss:3.532795 +step:8166 train loss:3.539087 +step:8167 train loss:3.442381 +step:8168 train loss:3.423393 +step:8169 train loss:3.473605 +step:8170 train loss:3.423059 +step:8171 train loss:3.485818 +step:8172 train loss:3.481772 +step:8173 train loss:3.482932 +step:8174 train loss:3.491549 +step:8175 train loss:3.455505 +step:8176 train loss:3.447011 +step:8177 train loss:3.492149 +step:8178 train loss:3.582072 +step:8179 train loss:3.488649 +step:8180 train loss:3.511439 +step:8181 train loss:3.510169 +step:8182 train loss:3.470591 +step:8183 train loss:3.457409 +step:8184 train loss:3.450022 +step:8185 train loss:3.490459 +step:8186 train loss:3.494025 +step:8187 train loss:3.502636 +step:8188 train loss:3.428871 +step:8189 train loss:3.577844 +step:8190 train loss:3.511000 +step:8191 train loss:3.516818 +step:8192 train loss:3.628791 +step:8193 train loss:3.495538 +step:8194 train loss:3.429271 +step:8195 train loss:3.527941 +step:8196 train loss:3.442866 +step:8197 train loss:3.473932 +step:8198 train loss:3.481301 +step:8199 train loss:3.481508 +step:8200 train loss:3.461965 +step:8201 train loss:3.575663 +step:8202 train loss:3.500992 +step:8203 train loss:3.513694 +step:8204 train loss:3.423678 +step:8205 train loss:3.430737 +step:8206 train loss:3.556527 +step:8207 train loss:3.480211 +step:8208 train loss:3.498327 +step:8209 train loss:3.541893 +step:8210 train loss:3.527384 +step:8211 train loss:3.459479 +step:8212 train loss:3.519222 +step:8213 train loss:3.526709 +step:8214 train loss:3.566358 +step:8215 train loss:3.539776 +step:8216 train loss:3.520772 +step:8217 train loss:3.499438 +step:8218 train loss:3.507200 +step:8219 train loss:3.641019 +step:8220 train loss:3.469517 +step:8221 train loss:3.490165 +step:8222 train loss:3.443505 +step:8223 train loss:3.462720 +step:8224 train loss:3.473157 +step:8225 train loss:3.524462 +step:8226 train loss:3.455290 +step:8227 train loss:3.522069 +step:8228 train loss:3.408587 +step:8229 train loss:3.452659 +step:8230 train loss:3.467296 +step:8231 train loss:3.491564 +step:8232 train loss:3.492859 +step:8233 train loss:3.536260 +step:8234 train loss:3.533172 +step:8235 train loss:3.502791 +step:8236 train loss:3.488356 +step:8237 train loss:3.439804 +step:8238 train loss:3.689920 +step:8239 train loss:3.526177 +step:8240 train loss:3.472795 +step:8241 train loss:3.443155 +step:8242 train loss:3.480495 +step:8243 train loss:3.472923 +step:8244 train loss:3.481793 +step:8245 train loss:3.469321 +step:8246 train loss:3.537759 +step:8247 train loss:3.564729 +step:8248 train loss:3.484277 +step:8249 train loss:3.477883 +step:8250 validation loss:3.425148 +step:8250 train loss:3.465274 +step:8251 train loss:3.561341 +step:8252 train loss:3.503108 +step:8253 train loss:3.467530 +step:8254 train loss:3.439622 +step:8255 train loss:3.469887 +step:8256 train loss:3.453540 +step:8257 train loss:3.558871 +step:8258 train loss:3.483523 +step:8259 train loss:3.466925 +step:8260 train loss:3.468030 +step:8261 train loss:3.466062 +step:8262 train loss:3.480201 +step:8263 train loss:3.491780 +step:8264 train loss:3.459732 +step:8265 train loss:3.451125 +step:8266 train loss:3.459561 +step:8267 train loss:3.392213 +step:8268 train loss:3.510930 +step:8269 train loss:3.447433 +step:8270 train loss:3.500485 +step:8271 train loss:3.525683 +step:8272 train loss:3.551665 +step:8273 train loss:3.430266 +step:8274 train loss:3.492517 +step:8275 train loss:3.452480 +step:8276 train loss:3.488271 +step:8277 train loss:3.559631 +step:8278 train loss:3.571060 +step:8279 train loss:3.483352 +step:8280 train loss:3.471780 +step:8281 train loss:3.437606 +step:8282 train loss:3.498551 +step:8283 train loss:3.487132 +step:8284 train loss:3.470002 +step:8285 train loss:3.462209 +step:8286 train loss:3.570766 +step:8287 train loss:3.507212 +step:8288 train loss:3.479320 +step:8289 train loss:3.494229 +step:8290 train loss:3.432654 +step:8291 train loss:3.473206 +step:8292 train loss:3.502350 +step:8293 train loss:3.476530 +step:8294 train loss:3.447382 +step:8295 train loss:3.484870 +step:8296 train loss:3.550404 +step:8297 train loss:3.636787 +step:8298 train loss:3.452251 +step:8299 train loss:3.491427 +step:8300 train loss:3.497897 +step:8301 train loss:3.471414 +step:8302 train loss:3.531316 +step:8303 train loss:3.665307 +step:8304 train loss:3.474673 +step:8305 train loss:3.518648 +step:8306 train loss:3.492786 +step:8307 train loss:3.508955 +step:8308 train loss:3.506986 +step:8309 train loss:3.528218 +step:8310 train loss:3.446605 +step:8311 train loss:3.537476 +step:8312 train loss:3.529161 +step:8313 train loss:3.593953 +step:8314 train loss:3.466883 +step:8315 train loss:3.413856 +step:8316 train loss:3.471059 +step:8317 train loss:3.494478 +step:8318 train loss:3.482338 +step:8319 train loss:3.518180 +step:8320 train loss:3.539754 +step:8321 train loss:3.447590 +step:8322 train loss:3.465914 +step:8323 train loss:3.501871 +step:8324 train loss:3.475874 +step:8325 train loss:3.532661 +step:8326 train loss:3.499204 +step:8327 train loss:3.487013 +step:8328 train loss:3.562716 +step:8329 train loss:3.470362 +step:8330 train loss:3.509576 +step:8331 train loss:3.437451 +step:8332 train loss:3.535203 +step:8333 train loss:3.553061 +step:8334 train loss:3.421002 +step:8335 train loss:3.482325 +step:8336 train loss:3.578293 +step:8337 train loss:3.507866 +step:8338 train loss:3.475699 +step:8339 train loss:3.453681 +step:8340 train loss:3.545742 +step:8341 train loss:3.443141 +step:8342 train loss:3.517339 +step:8343 train loss:3.431001 +step:8344 train loss:3.477155 +step:8345 train loss:3.509198 +step:8346 train loss:3.594225 +step:8347 train loss:3.482657 +step:8348 train loss:3.509636 +step:8349 train loss:3.481324 +step:8350 train loss:3.502414 +step:8351 train loss:3.444420 +step:8352 train loss:3.527575 +step:8353 train loss:3.481710 +step:8354 train loss:3.465613 +step:8355 train loss:3.466092 +step:8356 train loss:3.459644 +step:8357 train loss:3.475559 +step:8358 train loss:3.452689 +step:8359 train loss:3.445940 +step:8360 train loss:3.494380 +step:8361 train loss:3.508594 +step:8362 train loss:3.526233 +step:8363 train loss:3.524748 +step:8364 train loss:3.487121 +step:8365 train loss:3.634686 +step:8366 train loss:3.477726 +step:8367 train loss:3.452590 +step:8368 train loss:3.422147 +step:8369 train loss:3.453387 +step:8370 train loss:3.535451 +step:8371 train loss:3.504001 +step:8372 train loss:3.485209 +step:8373 train loss:3.496572 +step:8374 train loss:3.429919 +step:8375 train loss:3.489887 +step:8376 train loss:3.528157 +step:8377 train loss:3.357887 +step:8378 train loss:3.571211 +step:8379 train loss:3.440181 +step:8380 train loss:3.443286 +step:8381 train loss:3.448691 +step:8382 train loss:3.483742 +step:8383 train loss:3.438225 +step:8384 train loss:3.478912 +step:8385 train loss:3.490885 +step:8386 train loss:3.472583 +step:8387 train loss:3.633518 +step:8388 train loss:3.545038 +step:8389 train loss:3.517947 +step:8390 train loss:3.522166 +step:8391 train loss:3.455191 +step:8392 train loss:3.467602 +step:8393 train loss:3.427223 +step:8394 train loss:3.510428 +step:8395 train loss:3.517614 +step:8396 train loss:3.544043 +step:8397 train loss:3.475731 +step:8398 train loss:3.492290 +step:8399 train loss:3.459782 +step:8400 train loss:3.464561 +step:8401 train loss:3.481631 +step:8402 train loss:3.458363 +step:8403 train loss:3.477113 +step:8404 train loss:3.477597 +step:8405 train loss:3.432891 +step:8406 train loss:3.476681 +step:8407 train loss:3.516411 +step:8408 train loss:3.489251 +step:8409 train loss:3.408948 +step:8410 train loss:3.476481 +step:8411 train loss:3.499899 +step:8412 train loss:3.559028 +step:8413 train loss:3.534607 +step:8414 train loss:3.531543 +step:8415 train loss:3.452302 +step:8416 train loss:3.501719 +step:8417 train loss:3.414719 +step:8418 train loss:3.521632 +step:8419 train loss:3.474288 +step:8420 train loss:3.553030 +step:8421 train loss:3.468731 +step:8422 train loss:3.486465 +step:8423 train loss:3.500715 +step:8424 train loss:3.505149 +step:8425 train loss:3.562596 +step:8426 train loss:3.531428 +step:8427 train loss:3.448998 +step:8428 train loss:3.467706 +step:8429 train loss:3.525819 +step:8430 train loss:3.466954 +step:8431 train loss:3.469826 +step:8432 train loss:3.472347 +step:8433 train loss:3.453172 +step:8434 train loss:3.483921 +step:8435 train loss:3.404300 +step:8436 train loss:3.485046 +step:8437 train loss:3.526857 +step:8438 train loss:3.505976 +step:8439 train loss:3.447489 +step:8440 train loss:3.418453 +step:8441 train loss:3.473446 +step:8442 train loss:3.496418 +step:8443 train loss:3.455785 +step:8444 train loss:3.481500 +step:8445 train loss:3.436371 +step:8446 train loss:3.487218 +step:8447 train loss:3.497864 +step:8448 train loss:3.479599 +step:8449 train loss:3.471414 +step:8450 train loss:3.462932 +step:8451 train loss:3.493983 +step:8452 train loss:3.467579 +step:8453 train loss:3.450687 +step:8454 train loss:3.500404 +step:8455 train loss:3.572891 +step:8456 train loss:3.548479 +step:8457 train loss:3.603325 +step:8458 train loss:3.493385 +step:8459 train loss:3.498083 +step:8460 train loss:3.426697 +step:8461 train loss:3.581817 +step:8462 train loss:3.454571 +step:8463 train loss:3.490618 +step:8464 train loss:3.506173 +step:8465 train loss:3.513829 +step:8466 train loss:3.486179 +step:8467 train loss:3.489659 +step:8468 train loss:3.740816 +step:8469 train loss:3.450814 +step:8470 train loss:3.445861 +step:8471 train loss:3.490526 +step:8472 train loss:3.510503 +step:8473 train loss:3.465462 +step:8474 train loss:3.593226 +step:8475 train loss:3.548004 +step:8476 train loss:3.498047 +step:8477 train loss:3.488203 +step:8478 train loss:3.466997 +step:8479 train loss:3.468661 +step:8480 train loss:3.552355 +step:8481 train loss:3.469192 +step:8482 train loss:3.461984 +step:8483 train loss:3.611376 +step:8484 train loss:3.493710 +step:8485 train loss:3.537489 +step:8486 train loss:3.449629 +step:8487 train loss:3.503338 +step:8488 train loss:3.448824 +step:8489 train loss:3.525300 +step:8490 train loss:3.511333 +step:8491 train loss:3.532632 +step:8492 train loss:3.490728 +step:8493 train loss:3.557213 +step:8494 train loss:3.421571 +step:8495 train loss:3.520222 +step:8496 train loss:3.466274 +step:8497 train loss:3.500832 +step:8498 train loss:3.511810 +step:8499 train loss:3.493093 +step:8500 validation loss:3.422225 total_sharp:3.7528e-03 L1_sharp:3.2303e-03 L2_sharp:6.7404e-04 L3_sharp:6.9140e-04 L4_sharp:5.6145e-04 L5_sharp:5.7740e-04 L6_sharp:7.4555e-04 L7_sharp:1.1055e-03 L8_sharp:9.1799e-04 L9_sharp:8.4638e-04 L10_sharp:7.0037e-04 L11_sharp:8.6333e-04 L12_sharp:5.5226e-03 total_fnorm:2.2108e+00 total_l1_linf:1.9637e+04 total_spectral:2.2108e+00 L1_fnorm:4.8232e-01 L2_fnorm:5.0940e-01 L3_fnorm:5.1195e-01 L4_fnorm:5.1761e-01 L5_fnorm:5.1395e-01 L6_fnorm:5.2157e-01 L7_fnorm:5.1520e-01 L8_fnorm:5.2460e-01 L9_fnorm:5.2156e-01 L10_fnorm:5.1809e-01 L11_fnorm:5.1208e-01 L12_fnorm:5.0933e-01 L1_l1linf:6.1380e-01 L2_l1linf:6.1220e-01 L3_l1linf:5.5725e-01 L4_l1linf:6.2622e-01 L5_l1linf:6.3690e-01 L6_l1linf:5.6279e-01 L7_l1linf:5.6149e-01 L8_l1linf:5.9108e-01 L9_l1linf:5.5927e-01 L10_l1linf:5.7706e-01 L11_l1linf:6.4197e-01 L12_l1linf:6.4569e-01 L1_spectral:8.6029e-02 L2_spectral:7.9519e-02 L3_spectral:7.2950e-02 L4_spectral:7.3670e-02 L5_spectral:7.8782e-02 L6_spectral:6.9837e-02 L7_spectral:6.5201e-02 L8_spectral:5.7691e-02 L9_spectral:4.7007e-02 L10_spectral:5.9114e-02 L11_spectral:7.1742e-02 L12_spectral:1.0034e-01 ip_v_neg_g:8.0297e-03 cos_v_neg_g:4.0164e-03 v_norm:2.2108e+00 g_norm:9.0429e-01 hv_norm:3.4448e-01 cos_v_hv:2.4085e-02 hg_norm:5.7077e+00 cos_g_hg:5.8494e-01 v_par:3.1921e-04 v_perp:2.2108e+00 L1_cos_v_neg_g:7.0173e-03 L1_v_norm:4.8232e-01 L2_cos_v_neg_g:5.4632e-03 L2_v_norm:5.0940e-01 L3_cos_v_neg_g:2.9750e-03 L3_v_norm:5.1195e-01 L4_cos_v_neg_g:2.9141e-03 L4_v_norm:5.1761e-01 L5_cos_v_neg_g:3.9532e-03 L5_v_norm:5.1395e-01 L6_cos_v_neg_g:3.1998e-03 L6_v_norm:5.2157e-01 L7_cos_v_neg_g:4.4972e-03 L7_v_norm:5.1520e-01 L8_cos_v_neg_g:5.6828e-03 L8_v_norm:5.2460e-01 L9_cos_v_neg_g:5.3324e-03 L9_v_norm:5.2156e-01 L10_cos_v_neg_g:5.4335e-03 L10_v_norm:5.1809e-01 L11_cos_v_neg_g:6.3320e-03 L11_v_norm:5.1208e-01 L12_cos_v_neg_g:1.2405e-02 L12_v_norm:5.0933e-01 +step:8500 train loss:3.483504 +step:8501 train loss:3.709225 +step:8502 train loss:3.721457 +step:8503 train loss:3.477212 +step:8504 train loss:3.475315 +step:8505 train loss:3.454672 +step:8506 train loss:3.524714 +step:8507 train loss:3.462473 +step:8508 train loss:3.495366 +step:8509 train loss:3.434819 +step:8510 train loss:3.459535 +step:8511 train loss:3.415184 +step:8512 train loss:3.514702 +step:8513 train loss:3.516496 +step:8514 train loss:3.464741 +step:8515 train loss:3.560490 +step:8516 train loss:3.477916 +step:8517 train loss:3.498195 +step:8518 train loss:3.388490 +step:8519 train loss:3.482284 +step:8520 train loss:3.449861 +step:8521 train loss:3.487184 +step:8522 train loss:3.381981 +step:8523 train loss:3.477854 +step:8524 train loss:3.468393 +step:8525 train loss:3.535496 +step:8526 train loss:3.514584 +step:8527 train loss:3.458511 +step:8528 train loss:3.540291 +step:8529 train loss:3.498332 +step:8530 train loss:3.531625 +step:8531 train loss:3.520840 +step:8532 train loss:3.561173 +step:8533 train loss:3.512501 +step:8534 train loss:3.510248 +step:8535 train loss:3.483531 +step:8536 train loss:3.572468 +step:8537 train loss:3.487663 +step:8538 train loss:3.556049 +step:8539 train loss:3.480577 +step:8540 train loss:3.505210 +step:8541 train loss:3.444656 +step:8542 train loss:3.512487 +step:8543 train loss:3.425508 +step:8544 train loss:3.422903 +step:8545 train loss:3.474428 +step:8546 train loss:3.426305 +step:8547 train loss:3.477545 +step:8548 train loss:3.450884 +step:8549 train loss:3.493550 +step:8550 train loss:3.445042 +step:8551 train loss:3.497383 +step:8552 train loss:3.498368 +step:8553 train loss:3.501750 +step:8554 train loss:3.474516 +step:8555 train loss:3.492414 +step:8556 train loss:3.569259 +step:8557 train loss:3.465608 +step:8558 train loss:3.504039 +step:8559 train loss:3.495872 +step:8560 train loss:3.476530 +step:8561 train loss:3.431282 +step:8562 train loss:3.457770 +step:8563 train loss:3.458572 +step:8564 train loss:3.528228 +step:8565 train loss:3.501840 +step:8566 train loss:3.523874 +step:8567 train loss:3.468563 +step:8568 train loss:3.484786 +step:8569 train loss:3.492099 +step:8570 train loss:3.440142 +step:8571 train loss:3.480429 +step:8572 train loss:3.496478 +step:8573 train loss:3.573412 +step:8574 train loss:3.500684 +step:8575 train loss:3.501517 +step:8576 train loss:3.534757 +step:8577 train loss:3.614865 +step:8578 train loss:3.525123 +step:8579 train loss:3.510820 +step:8580 train loss:3.443709 +step:8581 train loss:3.488255 +step:8582 train loss:3.492270 +step:8583 train loss:3.489579 +step:8584 train loss:3.479898 +step:8585 train loss:3.559791 +step:8586 train loss:3.475577 +step:8587 train loss:3.487562 +step:8588 train loss:3.533265 +step:8589 train loss:3.480349 +step:8590 train loss:3.472144 +step:8591 train loss:3.477334 +step:8592 train loss:3.433310 +step:8593 train loss:3.513054 +step:8594 train loss:3.537279 +step:8595 train loss:3.457350 +step:8596 train loss:3.503575 +step:8597 train loss:3.464325 +step:8598 train loss:3.517235 +step:8599 train loss:3.488992 +step:8600 train loss:3.492428 +step:8601 train loss:3.480587 +step:8602 train loss:3.454288 +step:8603 train loss:3.513099 +step:8604 train loss:3.460929 +step:8605 train loss:3.472587 +step:8606 train loss:3.483935 +step:8607 train loss:3.492671 +step:8608 train loss:3.536599 +step:8609 train loss:3.431832 +step:8610 train loss:3.509379 +step:8611 train loss:3.436380 +step:8612 train loss:3.515361 +step:8613 train loss:3.449287 +step:8614 train loss:3.513927 +step:8615 train loss:3.554586 +step:8616 train loss:3.435917 +step:8617 train loss:3.503894 +step:8618 train loss:3.480414 +step:8619 train loss:3.431072 +step:8620 train loss:3.475962 +step:8621 train loss:3.507116 +step:8622 train loss:3.465809 +step:8623 train loss:3.479682 +step:8624 train loss:3.555753 +step:8625 train loss:3.475274 +step:8626 train loss:3.485003 +step:8627 train loss:3.481313 +step:8628 train loss:3.514043 +step:8629 train loss:3.421926 +step:8630 train loss:3.522647 +step:8631 train loss:3.465022 +step:8632 train loss:3.519734 +step:8633 train loss:3.466139 +step:8634 train loss:3.701479 +step:8635 train loss:3.494353 +step:8636 train loss:3.539047 +step:8637 train loss:3.466533 +step:8638 train loss:3.466793 +step:8639 train loss:3.521446 +step:8640 train loss:3.437311 +step:8641 train loss:3.534792 +step:8642 train loss:3.484867 +step:8643 train loss:3.595641 +step:8644 train loss:3.436551 +step:8645 train loss:3.509017 +step:8646 train loss:3.469460 +step:8647 train loss:3.496685 +step:8648 train loss:3.446144 +step:8649 train loss:3.529098 +step:8650 train loss:3.482717 +step:8651 train loss:3.496203 +step:8652 train loss:3.461381 +step:8653 train loss:3.496246 +step:8654 train loss:3.538767 +step:8655 train loss:3.467946 +step:8656 train loss:3.510292 +step:8657 train loss:3.510517 +step:8658 train loss:3.485494 +step:8659 train loss:3.475718 +step:8660 train loss:3.422686 +step:8661 train loss:3.482643 +step:8662 train loss:3.424362 +step:8663 train loss:3.498296 +step:8664 train loss:3.413512 +step:8665 train loss:3.434676 +step:8666 train loss:3.512041 +step:8667 train loss:3.404587 +step:8668 train loss:3.512420 +step:8669 train loss:3.553670 +step:8670 train loss:3.451471 +step:8671 train loss:3.448502 +step:8672 train loss:3.668613 +step:8673 train loss:3.431614 +step:8674 train loss:3.503110 +step:8675 train loss:3.542308 +step:8676 train loss:3.487692 +step:8677 train loss:3.508625 +step:8678 train loss:3.459347 +step:8679 train loss:3.513520 +step:8680 train loss:3.493628 +step:8681 train loss:3.495818 +step:8682 train loss:3.451937 +step:8683 train loss:3.468149 +step:8684 train loss:3.541504 +step:8685 train loss:3.486202 +step:8686 train loss:3.475285 +step:8687 train loss:3.431444 +step:8688 train loss:3.448135 +step:8689 train loss:3.517160 +step:8690 train loss:3.456374 +step:8691 train loss:3.532894 +step:8692 train loss:3.423454 +step:8693 train loss:3.509089 +step:8694 train loss:3.513331 +step:8695 train loss:3.498158 +step:8696 train loss:3.521900 +step:8697 train loss:3.478072 +step:8698 train loss:3.515143 +step:8699 train loss:3.465294 +step:8700 train loss:3.491273 +step:8701 train loss:3.454036 +step:8702 train loss:3.440133 +step:8703 train loss:3.454889 +step:8704 train loss:3.412529 +step:8705 train loss:3.490792 +step:8706 train loss:3.509832 +step:8707 train loss:3.507799 +step:8708 train loss:3.452783 +step:8709 train loss:3.517276 +step:8710 train loss:3.441223 +step:8711 train loss:3.498897 +step:8712 train loss:3.405092 +step:8713 train loss:3.480571 +step:8714 train loss:3.592379 +step:8715 train loss:3.445045 +step:8716 train loss:3.501925 +step:8717 train loss:3.474585 +step:8718 train loss:3.508148 +step:8719 train loss:3.479174 +step:8720 train loss:3.591236 +step:8721 train loss:3.483943 +step:8722 train loss:3.576112 +step:8723 train loss:3.443335 +step:8724 train loss:3.454704 +step:8725 train loss:3.482466 +step:8726 train loss:3.441993 +step:8727 train loss:3.514184 +step:8728 train loss:3.474461 +step:8729 train loss:3.476938 +step:8730 train loss:3.454474 +step:8731 train loss:3.458154 +step:8732 train loss:3.561285 +step:8733 train loss:3.482544 +step:8734 train loss:3.520893 +step:8735 train loss:3.589295 +step:8736 train loss:3.448741 +step:8737 train loss:3.473793 +step:8738 train loss:3.454208 +step:8739 train loss:3.515775 +step:8740 train loss:3.436505 +step:8741 train loss:3.491251 +step:8742 train loss:3.446880 +step:8743 train loss:3.483093 +step:8744 train loss:3.507608 +step:8745 train loss:3.546451 +step:8746 train loss:3.445093 +step:8747 train loss:3.551088 +step:8748 train loss:3.461977 +step:8749 train loss:3.493079 +step:8750 validation loss:3.413835 +step:8750 train loss:3.506168 +step:8751 train loss:3.544219 +step:8752 train loss:3.405311 +step:8753 train loss:3.452693 +step:8754 train loss:3.505043 +step:8755 train loss:3.487004 +step:8756 train loss:3.532471 +step:8757 train loss:3.442276 +step:8758 train loss:3.602677 +step:8759 train loss:3.443211 +step:8760 train loss:3.479906 +step:8761 train loss:3.554260 +step:8762 train loss:3.454284 +step:8763 train loss:3.426944 +step:8764 train loss:3.497143 +step:8765 train loss:3.567535 +step:8766 train loss:3.497680 +step:8767 train loss:3.455823 +step:8768 train loss:3.496037 +step:8769 train loss:3.471038 +step:8770 train loss:3.514596 +step:8771 train loss:3.488675 +step:8772 train loss:3.505940 +step:8773 train loss:3.467338 +step:8774 train loss:3.500086 +step:8775 train loss:3.498507 +step:8776 train loss:3.443299 +step:8777 train loss:3.479030 +step:8778 train loss:3.489549 +step:8779 train loss:3.509657 +step:8780 train loss:3.475580 +step:8781 train loss:3.479117 +step:8782 train loss:3.500129 +step:8783 train loss:3.481773 +step:8784 train loss:3.506417 +step:8785 train loss:3.488726 +step:8786 train loss:3.564582 +step:8787 train loss:3.510315 +step:8788 train loss:3.409271 +step:8789 train loss:3.508891 +step:8790 train loss:3.438294 +step:8791 train loss:3.490173 +step:8792 train loss:3.429594 +step:8793 train loss:3.518074 +step:8794 train loss:3.442103 +step:8795 train loss:3.511218 +step:8796 train loss:3.658703 +step:8797 train loss:3.407743 +step:8798 train loss:3.561436 +step:8799 train loss:3.481200 +step:8800 train loss:3.470632 +step:8801 train loss:3.491997 +step:8802 train loss:3.551411 +step:8803 train loss:3.509098 +step:8804 train loss:3.487176 +step:8805 train loss:3.506147 +step:8806 train loss:3.477683 +step:8807 train loss:3.467471 +step:8808 train loss:3.423709 +step:8809 train loss:3.548919 +step:8810 train loss:3.452019 +step:8811 train loss:3.441099 +step:8812 train loss:3.483285 +step:8813 train loss:3.394175 +step:8814 train loss:3.583288 +step:8815 train loss:3.425580 +step:8816 train loss:3.544246 +step:8817 train loss:3.480990 +step:8818 train loss:3.413726 +step:8819 train loss:3.531692 +step:8820 train loss:3.459635 +step:8821 train loss:3.485860 +step:8822 train loss:3.466268 +step:8823 train loss:3.483432 +step:8824 train loss:3.540301 +step:8825 train loss:3.517669 +step:8826 train loss:3.488398 +step:8827 train loss:3.447878 +step:8828 train loss:3.491339 +step:8829 train loss:3.468653 +step:8830 train loss:3.446810 +step:8831 train loss:3.524056 +step:8832 train loss:3.462406 +step:8833 train loss:3.496536 +step:8834 train loss:3.461792 +step:8835 train loss:3.400402 +step:8836 train loss:3.525200 +step:8837 train loss:3.431932 +step:8838 train loss:3.474266 +step:8839 train loss:3.460343 +step:8840 train loss:3.462992 +step:8841 train loss:3.473328 +step:8842 train loss:3.488824 +step:8843 train loss:3.495585 +step:8844 train loss:3.465734 +step:8845 train loss:3.484838 +step:8846 train loss:3.452114 +step:8847 train loss:3.490190 +step:8848 train loss:3.537652 +step:8849 train loss:3.516606 +step:8850 train loss:3.508483 +step:8851 train loss:3.393728 +step:8852 train loss:3.495381 +step:8853 train loss:3.481301 +step:8854 train loss:3.448072 +step:8855 train loss:3.519907 +step:8856 train loss:3.510314 +step:8857 train loss:3.575948 +step:8858 train loss:3.444817 +step:8859 train loss:3.515286 +step:8860 train loss:3.472483 +step:8861 train loss:3.454650 +step:8862 train loss:3.453711 +step:8863 train loss:3.435158 +step:8864 train loss:3.505931 +step:8865 train loss:3.498104 +step:8866 train loss:3.381283 +step:8867 train loss:3.482116 +step:8868 train loss:3.512798 +step:8869 train loss:3.595890 +step:8870 train loss:3.475089 +step:8871 train loss:3.497280 +step:8872 train loss:3.481252 +step:8873 train loss:3.482949 +step:8874 train loss:3.535710 +step:8875 train loss:3.467865 +step:8876 train loss:3.506378 +step:8877 train loss:3.488709 +step:8878 train loss:3.539179 +step:8879 train loss:3.497666 +step:8880 train loss:3.445616 +step:8881 train loss:3.411008 +step:8882 train loss:3.480163 +step:8883 train loss:3.466949 +step:8884 train loss:3.556841 +step:8885 train loss:3.491423 +step:8886 train loss:3.496260 +step:8887 train loss:3.519467 +step:8888 train loss:3.479786 +step:8889 train loss:3.486465 +step:8890 train loss:3.476484 +step:8891 train loss:3.449294 +step:8892 train loss:3.531305 +step:8893 train loss:3.471125 +step:8894 train loss:3.488125 +step:8895 train loss:3.517759 +step:8896 train loss:3.433002 +step:8897 train loss:3.526951 +step:8898 train loss:3.458758 +step:8899 train loss:3.483124 +step:8900 train loss:3.449692 +step:8901 train loss:3.466527 +step:8902 train loss:3.504327 +step:8903 train loss:3.444491 +step:8904 train loss:3.495667 +step:8905 train loss:3.469337 +step:8906 train loss:3.458786 +step:8907 train loss:3.473037 +step:8908 train loss:3.537034 +step:8909 train loss:3.481918 +step:8910 train loss:3.443419 +step:8911 train loss:3.542791 +step:8912 train loss:3.438667 +step:8913 train loss:3.450736 +step:8914 train loss:3.543993 +step:8915 train loss:3.487253 +step:8916 train loss:3.516220 +step:8917 train loss:3.472308 +step:8918 train loss:3.477433 +step:8919 train loss:3.464715 +step:8920 train loss:3.492406 +step:8921 train loss:3.486301 +step:8922 train loss:3.466872 +step:8923 train loss:3.653466 +step:8924 train loss:3.548742 +step:8925 train loss:3.479574 +step:8926 train loss:3.490732 +step:8927 train loss:3.520383 +step:8928 train loss:3.475441 +step:8929 train loss:3.467681 +step:8930 train loss:3.521759 +step:8931 train loss:3.433914 +step:8932 train loss:3.536721 +step:8933 train loss:3.444623 +step:8934 train loss:3.483062 +step:8935 train loss:3.496359 +step:8936 train loss:3.533731 +step:8937 train loss:3.532141 +step:8938 train loss:3.471138 +step:8939 train loss:3.535020 +step:8940 train loss:3.491306 +step:8941 train loss:3.434826 +step:8942 train loss:3.508714 +step:8943 train loss:3.440965 +step:8944 train loss:3.491634 +step:8945 train loss:3.512985 +step:8946 train loss:3.358181 +step:8947 train loss:3.547271 +step:8948 train loss:3.398186 +step:8949 train loss:3.398144 +step:8950 train loss:3.441738 +step:8951 train loss:3.480229 +step:8952 train loss:3.501155 +step:8953 train loss:3.454379 +step:8954 train loss:3.561430 +step:8955 train loss:3.474487 +step:8956 train loss:3.502131 +step:8957 train loss:3.491942 +step:8958 train loss:3.469548 +step:8959 train loss:3.460530 +step:8960 train loss:3.427134 +step:8961 train loss:3.452154 +step:8962 train loss:3.503480 +step:8963 train loss:3.480908 +step:8964 train loss:3.463574 +step:8965 train loss:3.506704 +step:8966 train loss:3.464201 +step:8967 train loss:3.442851 +step:8968 train loss:3.427444 +step:8969 train loss:3.417174 +step:8970 train loss:3.497000 +step:8971 train loss:3.445777 +step:8972 train loss:3.646702 +step:8973 train loss:3.532398 +step:8974 train loss:3.489523 +step:8975 train loss:3.491043 +step:8976 train loss:3.454089 +step:8977 train loss:3.541371 +step:8978 train loss:3.525189 +step:8979 train loss:3.441717 +step:8980 train loss:3.540347 +step:8981 train loss:3.490346 +step:8982 train loss:3.461712 +step:8983 train loss:3.408302 +step:8984 train loss:3.530785 +step:8985 train loss:3.447956 +step:8986 train loss:3.485761 +step:8987 train loss:3.459854 +step:8988 train loss:3.509081 +step:8989 train loss:3.419635 +step:8990 train loss:3.559294 +step:8991 train loss:3.411813 +step:8992 train loss:3.470941 +step:8993 train loss:3.560366 +step:8994 train loss:3.463367 +step:8995 train loss:3.490610 +step:8996 train loss:3.460147 +step:8997 train loss:3.409412 +step:8998 train loss:3.413414 +step:8999 train loss:3.435604 +step:9000 validation loss:3.412693 total_sharp:3.9841e-03 L1_sharp:5.6758e-03 L2_sharp:1.5801e-03 L3_sharp:9.2860e-04 L4_sharp:4.8744e-04 L5_sharp:5.8683e-04 L6_sharp:7.3881e-04 L7_sharp:1.1631e-03 L8_sharp:1.0396e-03 L9_sharp:1.0606e-03 L10_sharp:7.6982e-04 L11_sharp:7.5919e-04 L12_sharp:1.8674e-03 total_fnorm:2.2524e+00 total_l1_linf:2.0092e+04 total_spectral:2.2524e+00 L1_fnorm:5.0961e-01 L2_fnorm:5.2103e-01 L3_fnorm:5.2296e-01 L4_fnorm:5.2989e-01 L5_fnorm:5.2530e-01 L6_fnorm:5.2901e-01 L7_fnorm:5.2494e-01 L8_fnorm:5.3613e-01 L9_fnorm:5.3259e-01 L10_fnorm:5.3273e-01 L11_fnorm:5.3212e-01 L12_fnorm:5.2224e-01 L1_l1linf:6.4096e-01 L2_l1linf:6.3900e-01 L3_l1linf:6.2391e-01 L4_l1linf:6.4209e-01 L5_l1linf:6.2445e-01 L6_l1linf:5.8204e-01 L7_l1linf:5.5741e-01 L8_l1linf:5.6870e-01 L9_l1linf:5.6165e-01 L10_l1linf:5.5438e-01 L11_l1linf:5.7773e-01 L12_l1linf:5.8563e-01 L1_spectral:8.2336e-02 L2_spectral:8.0827e-02 L3_spectral:7.2455e-02 L4_spectral:7.3644e-02 L5_spectral:8.4003e-02 L6_spectral:7.5236e-02 L7_spectral:6.7897e-02 L8_spectral:6.0108e-02 L9_spectral:4.8703e-02 L10_spectral:6.2381e-02 L11_spectral:7.2111e-02 L12_spectral:7.5451e-02 ip_v_neg_g:1.2526e-02 cos_v_neg_g:5.5957e-03 v_norm:2.2524e+00 g_norm:9.9379e-01 hv_norm:4.2346e-01 cos_v_hv:2.1191e-02 hg_norm:9.9103e+00 cos_g_hg:6.2298e-01 v_par:3.6085e-04 v_perp:2.2524e+00 L1_cos_v_neg_g:1.2275e-02 L1_v_norm:5.0961e-01 L2_cos_v_neg_g:1.2099e-02 L2_v_norm:5.2103e-01 L3_cos_v_neg_g:7.8634e-03 L3_v_norm:5.2296e-01 L4_cos_v_neg_g:4.9343e-03 L4_v_norm:5.2989e-01 L5_cos_v_neg_g:5.1208e-03 L5_v_norm:5.2530e-01 L6_cos_v_neg_g:5.2663e-03 L6_v_norm:5.2901e-01 L7_cos_v_neg_g:5.7756e-03 L7_v_norm:5.2494e-01 L8_cos_v_neg_g:5.5427e-03 L8_v_norm:5.3613e-01 L9_cos_v_neg_g:6.6039e-03 L9_v_norm:5.3259e-01 L10_cos_v_neg_g:7.3905e-03 L10_v_norm:5.3273e-01 L11_cos_v_neg_g:7.6526e-03 L11_v_norm:5.3212e-01 L12_cos_v_neg_g:1.0118e-02 L12_v_norm:5.2224e-01 +step:9000 train loss:3.524856 +step:9001 train loss:3.490320 +step:9002 train loss:3.499932 +step:9003 train loss:3.436387 +step:9004 train loss:3.440630 +step:9005 train loss:3.452249 +step:9006 train loss:3.453458 +step:9007 train loss:3.471656 +step:9008 train loss:3.428288 +step:9009 train loss:3.424761 +step:9010 train loss:3.460221 +step:9011 train loss:3.455248 +step:9012 train loss:3.572121 +step:9013 train loss:3.398037 +step:9014 train loss:3.466596 +step:9015 train loss:3.470801 +step:9016 train loss:3.546930 +step:9017 train loss:3.488096 +step:9018 train loss:3.408188 +step:9019 train loss:3.494730 +step:9020 train loss:3.503162 +step:9021 train loss:3.460935 +step:9022 train loss:3.473025 +step:9023 train loss:3.469694 +step:9024 train loss:3.489034 +step:9025 train loss:3.472340 +step:9026 train loss:3.432914 +step:9027 train loss:3.478342 +step:9028 train loss:3.498354 +step:9029 train loss:3.516699 +step:9030 train loss:3.513416 +step:9031 train loss:3.479246 +step:9032 train loss:3.488930 +step:9033 train loss:3.474842 +step:9034 train loss:3.483487 +step:9035 train loss:3.487982 +step:9036 train loss:3.436795 +step:9037 train loss:3.433784 +step:9038 train loss:3.555147 +step:9039 train loss:3.459244 +step:9040 train loss:3.473550 +step:9041 train loss:3.523045 +step:9042 train loss:3.380031 +step:9043 train loss:3.471877 +step:9044 train loss:3.489690 +step:9045 train loss:3.436646 +step:9046 train loss:3.480052 +step:9047 train loss:3.476141 +step:9048 train loss:3.453686 +step:9049 train loss:3.490261 +step:9050 train loss:3.441792 +step:9051 train loss:3.481573 +step:9052 train loss:3.410630 +step:9053 train loss:3.536304 +step:9054 train loss:3.548494 +step:9055 train loss:3.470238 +step:9056 train loss:3.532576 +step:9057 train loss:3.387656 +step:9058 train loss:3.471704 +step:9059 train loss:3.548200 +step:9060 train loss:3.476904 +step:9061 train loss:3.504824 +step:9062 train loss:3.436905 +step:9063 train loss:3.569133 +step:9064 train loss:3.456522 +step:9065 train loss:3.465942 +step:9066 train loss:3.485691 +step:9067 train loss:3.446511 +step:9068 train loss:3.522414 +step:9069 train loss:3.480115 +step:9070 train loss:3.527806 +step:9071 train loss:3.463195 +step:9072 train loss:3.483758 +step:9073 train loss:3.443829 +step:9074 train loss:3.526543 +step:9075 train loss:3.471425 +step:9076 train loss:3.439741 +step:9077 train loss:3.516762 +step:9078 train loss:3.452880 +step:9079 train loss:3.500488 +step:9080 train loss:3.435240 +step:9081 train loss:3.469937 +step:9082 train loss:3.496242 +step:9083 train loss:3.525487 +step:9084 train loss:3.417598 +step:9085 train loss:3.489458 +step:9086 train loss:3.472341 +step:9087 train loss:3.418629 +step:9088 train loss:3.481727 +step:9089 train loss:3.494570 +step:9090 train loss:3.428860 +step:9091 train loss:3.529171 +step:9092 train loss:3.458034 +step:9093 train loss:3.452587 +step:9094 train loss:3.583511 +step:9095 train loss:3.447021 +step:9096 train loss:3.461843 +step:9097 train loss:3.455063 +step:9098 train loss:3.442142 +step:9099 train loss:3.569534 +step:9100 train loss:3.600173 +step:9101 train loss:3.517586 +step:9102 train loss:3.461803 +step:9103 train loss:3.467188 +step:9104 train loss:3.553536 +step:9105 train loss:3.414689 +step:9106 train loss:3.544212 +step:9107 train loss:3.477498 +step:9108 train loss:3.455629 +step:9109 train loss:3.485075 +step:9110 train loss:3.486936 +step:9111 train loss:3.466533 +step:9112 train loss:3.468596 +step:9113 train loss:3.498844 +step:9114 train loss:3.451269 +step:9115 train loss:3.477008 +step:9116 train loss:3.501861 +step:9117 train loss:3.510902 +step:9118 train loss:3.481181 +step:9119 train loss:3.402623 +step:9120 train loss:3.501140 +step:9121 train loss:3.529713 +step:9122 train loss:3.474796 +step:9123 train loss:3.497946 +step:9124 train loss:3.524790 +step:9125 train loss:3.477856 +step:9126 train loss:3.456392 +step:9127 train loss:3.485111 +step:9128 train loss:3.541355 +step:9129 train loss:3.496214 +step:9130 train loss:3.509553 +step:9131 train loss:3.488632 +step:9132 train loss:3.494634 +step:9133 train loss:3.486150 +step:9134 train loss:3.454880 +step:9135 train loss:3.486714 +step:9136 train loss:3.483962 +step:9137 train loss:3.537167 +step:9138 train loss:3.455361 +step:9139 train loss:3.531431 +step:9140 train loss:3.452791 +step:9141 train loss:3.435145 +step:9142 train loss:3.607342 +step:9143 train loss:3.435598 +step:9144 train loss:3.527821 +step:9145 train loss:3.536407 +step:9146 train loss:3.451825 +step:9147 train loss:3.526096 +step:9148 train loss:3.544338 +step:9149 train loss:3.453306 +step:9150 train loss:3.476931 +step:9151 train loss:3.536227 +step:9152 train loss:3.492346 +step:9153 train loss:3.460218 +step:9154 train loss:3.472781 +step:9155 train loss:3.437154 +step:9156 train loss:3.442233 +step:9157 train loss:3.460819 +step:9158 train loss:3.441055 +step:9159 train loss:3.534940 +step:9160 train loss:3.414958 +step:9161 train loss:3.443026 +step:9162 train loss:3.529249 +step:9163 train loss:3.472409 +step:9164 train loss:3.444277 +step:9165 train loss:3.440185 +step:9166 train loss:3.497511 +step:9167 train loss:3.440108 +step:9168 train loss:3.481814 +step:9169 train loss:3.420524 +step:9170 train loss:3.440534 +step:9171 train loss:3.508085 +step:9172 train loss:3.429416 +step:9173 train loss:3.548283 +step:9174 train loss:3.478859 +step:9175 train loss:3.458386 +step:9176 train loss:3.439434 +step:9177 train loss:3.487045 +step:9178 train loss:3.433490 +step:9179 train loss:3.391907 +step:9180 train loss:3.483667 +step:9181 train loss:3.494354 +step:9182 train loss:3.466153 +step:9183 train loss:3.472573 +step:9184 train loss:3.466789 +step:9185 train loss:3.482128 +step:9186 train loss:3.445131 +step:9187 train loss:3.516970 +step:9188 train loss:3.554499 +step:9189 train loss:3.478077 +step:9190 train loss:3.482185 +step:9191 train loss:3.471294 +step:9192 train loss:3.485927 +step:9193 train loss:3.488014 +step:9194 train loss:3.424384 +step:9195 train loss:3.416829 +step:9196 train loss:3.462372 +step:9197 train loss:3.426718 +step:9198 train loss:3.497547 +step:9199 train loss:3.448620 +step:9200 train loss:3.473519 +step:9201 train loss:3.507326 +step:9202 train loss:3.495135 +step:9203 train loss:3.452122 +step:9204 train loss:3.650776 +step:9205 train loss:3.565347 +step:9206 train loss:3.477060 +step:9207 train loss:3.529549 +step:9208 train loss:3.504917 +step:9209 train loss:3.526527 +step:9210 train loss:3.419663 +step:9211 train loss:3.443433 +step:9212 train loss:3.446116 +step:9213 train loss:3.507132 +step:9214 train loss:3.451111 +step:9215 train loss:3.516071 +step:9216 train loss:3.478042 +step:9217 train loss:3.419047 +step:9218 train loss:3.512223 +step:9219 train loss:3.468964 +step:9220 train loss:3.516310 +step:9221 train loss:3.567713 +step:9222 train loss:3.511009 +step:9223 train loss:3.678064 +step:9224 train loss:3.516980 +step:9225 train loss:3.450199 +step:9226 train loss:3.468151 +step:9227 train loss:3.483747 +step:9228 train loss:3.485891 +step:9229 train loss:3.441887 +step:9230 train loss:3.505617 +step:9231 train loss:3.390845 +step:9232 train loss:3.447766 +step:9233 train loss:3.470171 +step:9234 train loss:3.526415 +step:9235 train loss:3.529881 +step:9236 train loss:3.436509 +step:9237 train loss:3.498789 +step:9238 train loss:3.470672 +step:9239 train loss:3.464426 +step:9240 train loss:3.429736 +step:9241 train loss:3.463517 +step:9242 train loss:3.471072 +step:9243 train loss:3.470092 +step:9244 train loss:3.446022 +step:9245 train loss:3.451667 +step:9246 train loss:3.452401 +step:9247 train loss:3.461189 +step:9248 train loss:3.472116 +step:9249 train loss:3.469274 +step:9250 validation loss:3.406778 +step:9250 train loss:3.509841 +step:9251 train loss:3.450949 +step:9252 train loss:3.521596 +step:9253 train loss:3.514933 +step:9254 train loss:3.443366 +step:9255 train loss:3.559735 +step:9256 train loss:3.438548 +step:9257 train loss:3.384566 +step:9258 train loss:3.463615 +step:9259 train loss:3.466678 +step:9260 train loss:3.561316 +step:9261 train loss:3.440679 +step:9262 train loss:3.513480 +step:9263 train loss:3.414900 +step:9264 train loss:3.560747 +step:9265 train loss:3.589339 +step:9266 train loss:3.518629 +step:9267 train loss:3.468532 +step:9268 train loss:3.460908 +step:9269 train loss:3.484704 +step:9270 train loss:3.410075 +step:9271 train loss:3.520595 +step:9272 train loss:3.464865 +step:9273 train loss:3.480232 +step:9274 train loss:3.485373 +step:9275 train loss:3.481491 +step:9276 train loss:3.507434 +step:9277 train loss:3.482248 +step:9278 train loss:3.494023 +step:9279 train loss:3.488049 +step:9280 train loss:3.489580 +step:9281 train loss:3.462259 +step:9282 train loss:3.581870 +step:9283 train loss:3.467171 +step:9284 train loss:3.432108 +step:9285 train loss:3.451775 +step:9286 train loss:3.504782 +step:9287 train loss:3.471522 +step:9288 train loss:3.483684 +step:9289 train loss:3.452284 +step:9290 train loss:3.481616 +step:9291 train loss:3.458470 +step:9292 train loss:3.497946 +step:9293 train loss:3.555561 +step:9294 train loss:3.478515 +step:9295 train loss:3.461122 +step:9296 train loss:3.416476 +step:9297 train loss:3.482686 +step:9298 train loss:3.422513 +step:9299 train loss:3.410212 +step:9300 train loss:3.510650 +step:9301 train loss:3.539289 +step:9302 train loss:3.478696 +step:9303 train loss:3.525506 +step:9304 train loss:3.447507 +step:9305 train loss:3.438633 +step:9306 train loss:3.442487 +step:9307 train loss:3.442218 +step:9308 train loss:3.415211 +step:9309 train loss:3.405252 +step:9310 train loss:3.460169 +step:9311 train loss:3.521439 +step:9312 train loss:3.473133 +step:9313 train loss:3.420898 +step:9314 train loss:3.449471 +step:9315 train loss:3.478909 +step:9316 train loss:3.466624 +step:9317 train loss:3.442652 +step:9318 train loss:3.529546 +step:9319 train loss:3.439165 +step:9320 train loss:3.463232 +step:9321 train loss:3.473344 +step:9322 train loss:3.479661 +step:9323 train loss:3.557038 +step:9324 train loss:3.497471 +step:9325 train loss:3.438853 +step:9326 train loss:3.513796 +step:9327 train loss:3.509453 +step:9328 train loss:3.510240 +step:9329 train loss:3.399595 +step:9330 train loss:3.569656 +step:9331 train loss:3.499834 +step:9332 train loss:3.520519 +step:9333 train loss:3.538753 +step:9334 train loss:3.474678 +step:9335 train loss:3.571745 +step:9336 train loss:3.528837 +step:9337 train loss:3.482511 +step:9338 train loss:3.537116 +step:9339 train loss:3.515919 +step:9340 train loss:3.474484 +step:9341 train loss:3.563097 +step:9342 train loss:3.461957 +step:9343 train loss:3.454501 +step:9344 train loss:3.459658 +step:9345 train loss:3.596350 +step:9346 train loss:3.436569 +step:9347 train loss:3.452816 +step:9348 train loss:3.476627 +step:9349 train loss:3.420715 +step:9350 train loss:3.496596 +step:9351 train loss:3.474494 +step:9352 train loss:3.460179 +step:9353 train loss:3.492971 +step:9354 train loss:3.459816 +step:9355 train loss:3.453033 +step:9356 train loss:3.501237 +step:9357 train loss:3.452330 +step:9358 train loss:3.485008 +step:9359 train loss:3.427462 +step:9360 train loss:3.442397 +step:9361 train loss:3.443275 +step:9362 train loss:3.431187 +step:9363 train loss:3.495390 +step:9364 train loss:3.474805 +step:9365 train loss:3.477102 +step:9366 train loss:3.473508 +step:9367 train loss:3.486704 +step:9368 train loss:3.462824 +step:9369 train loss:3.458573 +step:9370 train loss:3.467717 +step:9371 train loss:3.489052 +step:9372 train loss:3.455525 +step:9373 train loss:3.439238 +step:9374 train loss:3.473428 +step:9375 train loss:3.487785 +step:9376 train loss:3.428390 +step:9377 train loss:3.499815 +step:9378 train loss:3.503215 +step:9379 train loss:3.530624 +step:9380 train loss:3.459164 +step:9381 train loss:3.468338 +step:9382 train loss:3.443292 +step:9383 train loss:3.439330 +step:9384 train loss:3.409448 +step:9385 train loss:3.484580 +step:9386 train loss:3.511514 +step:9387 train loss:3.489281 +step:9388 train loss:3.425662 +step:9389 train loss:3.444961 +step:9390 train loss:3.484985 +step:9391 train loss:3.493996 +step:9392 train loss:3.455487 +step:9393 train loss:3.446636 +step:9394 train loss:3.474876 +step:9395 train loss:3.468208 +step:9396 train loss:3.617249 +step:9397 train loss:3.505194 +step:9398 train loss:3.526317 +step:9399 train loss:3.478188 +step:9400 train loss:3.479286 +step:9401 train loss:3.472799 +step:9402 train loss:3.474042 +step:9403 train loss:3.406962 +step:9404 train loss:3.483871 +step:9405 train loss:3.442338 +step:9406 train loss:3.496734 +step:9407 train loss:3.438422 +step:9408 train loss:3.376070 +step:9409 train loss:3.440171 +step:9410 train loss:3.522291 +step:9411 train loss:3.482987 +step:9412 train loss:3.511271 +step:9413 train loss:3.529099 +step:9414 train loss:3.465237 +step:9415 train loss:3.457922 +step:9416 train loss:3.474739 +step:9417 train loss:3.429318 +step:9418 train loss:3.455471 +step:9419 train loss:3.424419 +step:9420 train loss:3.441948 +step:9421 train loss:3.492756 +step:9422 train loss:3.443742 +step:9423 train loss:3.507943 +step:9424 train loss:3.444799 +step:9425 train loss:3.488349 +step:9426 train loss:3.491637 +step:9427 train loss:3.466642 +step:9428 train loss:3.570395 +step:9429 train loss:3.460987 +step:9430 train loss:3.418298 +step:9431 train loss:3.508126 +step:9432 train loss:3.470604 +step:9433 train loss:3.511785 +step:9434 train loss:3.465197 +step:9435 train loss:3.489069 +step:9436 train loss:3.459921 +step:9437 train loss:3.470945 +step:9438 train loss:3.463361 +step:9439 train loss:3.464150 +step:9440 train loss:3.456646 +step:9441 train loss:3.467081 +step:9442 train loss:3.409131 +step:9443 train loss:3.458473 +step:9444 train loss:3.528235 +step:9445 train loss:3.458808 +step:9446 train loss:3.440990 +step:9447 train loss:3.503702 +step:9448 train loss:3.438282 +step:9449 train loss:3.462813 +step:9450 train loss:3.505593 +step:9451 train loss:3.417430 +step:9452 train loss:3.470846 +step:9453 train loss:3.453113 +step:9454 train loss:3.513249 +step:9455 train loss:3.494846 +step:9456 train loss:3.432806 +step:9457 train loss:3.468522 +step:9458 train loss:3.453077 +step:9459 train loss:3.448238 +step:9460 train loss:3.486959 +step:9461 train loss:3.516164 +step:9462 train loss:3.466361 +step:9463 train loss:3.494870 +step:9464 train loss:3.448087 +step:9465 train loss:3.540408 +step:9466 train loss:3.490744 +step:9467 train loss:3.511328 +step:9468 train loss:3.457168 +step:9469 train loss:3.446196 +step:9470 train loss:3.444026 +step:9471 train loss:3.484751 +step:9472 train loss:3.508866 +step:9473 train loss:3.497083 +step:9474 train loss:3.442867 +step:9475 train loss:3.434222 +step:9476 train loss:3.653024 +step:9477 train loss:3.526914 +step:9478 train loss:3.502710 +step:9479 train loss:3.602782 +step:9480 train loss:3.451009 +step:9481 train loss:3.480957 +step:9482 train loss:3.507585 +step:9483 train loss:3.464715 +step:9484 train loss:3.495528 +step:9485 train loss:3.416442 +step:9486 train loss:3.451753 +step:9487 train loss:3.484138 +step:9488 train loss:3.439285 +step:9489 train loss:3.486474 +step:9490 train loss:3.452200 +step:9491 train loss:3.492564 +step:9492 train loss:3.512643 +step:9493 train loss:3.487307 +step:9494 train loss:3.495598 +step:9495 train loss:3.449573 +step:9496 train loss:3.508174 +step:9497 train loss:3.523043 +step:9498 train loss:3.471189 +step:9499 train loss:3.519265 +step:9500 validation loss:3.405315 total_sharp:3.5511e-03 L1_sharp:3.4701e-03 L2_sharp:6.0520e-04 L3_sharp:6.6501e-04 L4_sharp:5.2607e-04 L5_sharp:6.7662e-04 L6_sharp:7.2918e-04 L7_sharp:1.1225e-03 L8_sharp:8.3573e-04 L9_sharp:9.8743e-04 L10_sharp:7.9229e-04 L11_sharp:8.2892e-04 L12_sharp:1.4697e-03 total_fnorm:2.2433e+00 total_l1_linf:1.9986e+04 total_spectral:2.2433e+00 L1_fnorm:5.0921e-01 L2_fnorm:5.1527e-01 L3_fnorm:5.1965e-01 L4_fnorm:5.2820e-01 L5_fnorm:5.2620e-01 L6_fnorm:5.2896e-01 L7_fnorm:5.2160e-01 L8_fnorm:5.2993e-01 L9_fnorm:5.3200e-01 L10_fnorm:5.3141e-01 L11_fnorm:5.2978e-01 L12_fnorm:5.1792e-01 L1_l1linf:6.3218e-01 L2_l1linf:6.0388e-01 L3_l1linf:6.1003e-01 L4_l1linf:6.0354e-01 L5_l1linf:6.8800e-01 L6_l1linf:6.3896e-01 L7_l1linf:5.8572e-01 L8_l1linf:5.6051e-01 L9_l1linf:5.4454e-01 L10_l1linf:5.5709e-01 L11_l1linf:6.0928e-01 L12_l1linf:5.8791e-01 L1_spectral:8.7264e-02 L2_spectral:8.0757e-02 L3_spectral:7.2656e-02 L4_spectral:7.6883e-02 L5_spectral:8.2644e-02 L6_spectral:7.1678e-02 L7_spectral:6.7587e-02 L8_spectral:5.6921e-02 L9_spectral:5.0966e-02 L10_spectral:6.2850e-02 L11_spectral:7.1340e-02 L12_spectral:7.3602e-02 ip_v_neg_g:6.0513e-03 cos_v_neg_g:2.8281e-03 v_norm:2.2433e+00 g_norm:9.5383e-01 hv_norm:3.9506e-01 cos_v_hv:2.0165e-02 hg_norm:7.2568e+00 cos_g_hg:6.1785e-01 v_par:2.1676e-04 v_perp:2.2433e+00 L1_cos_v_neg_g:5.1761e-03 L1_v_norm:5.0921e-01 L2_cos_v_neg_g:1.6246e-03 L2_v_norm:5.1527e-01 L3_cos_v_neg_g:2.1594e-03 L3_v_norm:5.1965e-01 L4_cos_v_neg_g:1.6103e-03 L4_v_norm:5.2820e-01 L5_cos_v_neg_g:2.4225e-03 L5_v_norm:5.2620e-01 L6_cos_v_neg_g:2.4228e-03 L6_v_norm:5.2896e-01 L7_cos_v_neg_g:3.1695e-03 L7_v_norm:5.2160e-01 L8_cos_v_neg_g:3.1614e-03 L8_v_norm:5.2993e-01 L9_cos_v_neg_g:3.5134e-03 L9_v_norm:5.3200e-01 L10_cos_v_neg_g:5.1376e-03 L10_v_norm:5.3141e-01 L11_cos_v_neg_g:6.7373e-03 L11_v_norm:5.2978e-01 L12_cos_v_neg_g:8.4173e-03 L12_v_norm:5.1792e-01 +step:9500 train loss:3.510505 +step:9501 train loss:3.491178 +step:9502 train loss:3.460875 +step:9503 train loss:3.475574 +step:9504 train loss:3.431388 +step:9505 train loss:3.457008 +step:9506 train loss:3.471702 +step:9507 train loss:3.457497 +step:9508 train loss:3.652433 +step:9509 train loss:3.470247 +step:9510 train loss:3.456225 +step:9511 train loss:3.483361 +step:9512 train loss:3.514286 +step:9513 train loss:3.503912 +step:9514 train loss:3.469970 +step:9515 train loss:3.372132 +step:9516 train loss:3.473332 +step:9517 train loss:3.508426 +step:9518 train loss:3.483300 +step:9519 train loss:3.493922 +step:9520 train loss:3.380836 +step:9521 train loss:3.375858 +step:9522 train loss:3.494162 +step:9523 train loss:3.489229 +step:9524 train loss:3.489774 +step:9525 train loss:3.536531 +step:9526 train loss:3.553304 +step:9527 train loss:3.508871 +step:9528 train loss:3.443104 +step:9529 train loss:3.487194 +step:9530 train loss:3.532125 +step:9531 train loss:3.437620 +step:9532 train loss:3.487343 +step:9533 train loss:3.457918 +step:9534 train loss:3.542963 +step:9535 train loss:3.463562 +step:9536 train loss:3.444407 +step:9537 train loss:3.390838 +step:9538 train loss:3.407213 +step:9539 train loss:3.479120 +step:9540 train loss:3.397027 +step:9541 train loss:3.456614 +step:9542 train loss:3.584971 +step:9543 train loss:3.480908 +step:9544 train loss:3.522178 +step:9545 train loss:3.457854 +step:9546 train loss:3.478803 +step:9547 train loss:3.522706 +step:9548 train loss:3.467592 +step:9549 train loss:3.434220 +step:9550 train loss:3.462896 +step:9551 train loss:3.459234 +step:9552 train loss:3.482512 +step:9553 train loss:3.475618 +step:9554 train loss:3.522498 +step:9555 train loss:3.531047 +step:9556 train loss:3.436162 +step:9557 train loss:3.459369 +step:9558 train loss:3.519819 +step:9559 train loss:3.527012 +step:9560 train loss:3.440045 +step:9561 train loss:3.463657 +step:9562 train loss:3.504134 +step:9563 train loss:3.451725 +step:9564 train loss:3.485838 +step:9565 train loss:3.466115 +step:9566 train loss:3.436045 +step:9567 train loss:3.503859 +step:9568 train loss:3.473418 +step:9569 train loss:3.516864 +step:9570 train loss:3.410293 +step:9571 train loss:3.484048 +step:9572 train loss:3.429498 +step:9573 train loss:3.457514 +step:9574 train loss:3.436994 +step:9575 train loss:3.508308 +step:9576 train loss:3.397096 +step:9577 train loss:3.448077 +step:9578 train loss:3.451702 +step:9579 train loss:3.450533 +step:9580 train loss:3.514428 +step:9581 train loss:3.504703 +step:9582 train loss:3.473960 +step:9583 train loss:3.502590 +step:9584 train loss:3.439482 +step:9585 train loss:3.461680 +step:9586 train loss:3.512676 +step:9587 train loss:3.482025 +step:9588 train loss:3.466114 +step:9589 train loss:3.524717 +step:9590 train loss:3.491295 +step:9591 train loss:3.454901 +step:9592 train loss:3.475432 +step:9593 train loss:3.476763 +step:9594 train loss:3.492662 +step:9595 train loss:3.468462 +step:9596 train loss:3.555964 +step:9597 train loss:3.464130 +step:9598 train loss:3.427731 +step:9599 train loss:3.432728 +step:9600 train loss:3.517367 +step:9601 train loss:3.434503 +step:9602 train loss:3.515585 +step:9603 train loss:3.510632 +step:9604 train loss:3.392447 +step:9605 train loss:3.479643 +step:9606 train loss:3.534214 +step:9607 train loss:3.453640 +step:9608 train loss:3.460659 +step:9609 train loss:3.470814 +step:9610 train loss:3.510929 +step:9611 train loss:3.444987 +step:9612 train loss:3.452125 +step:9613 train loss:3.493413 +step:9614 train loss:3.462786 +step:9615 train loss:3.650518 +step:9616 train loss:3.463954 +step:9617 train loss:3.448439 +step:9618 train loss:3.411845 +step:9619 train loss:3.470895 +step:9620 train loss:3.530492 +step:9621 train loss:3.450579 +step:9622 train loss:3.464705 +step:9623 train loss:3.504906 +step:9624 train loss:3.488306 +step:9625 train loss:3.504925 +step:9626 train loss:3.476020 +step:9627 train loss:3.554553 +step:9628 train loss:3.518358 +step:9629 train loss:3.434903 +step:9630 train loss:3.491275 +step:9631 train loss:3.475744 +step:9632 train loss:3.446342 +step:9633 train loss:3.489881 +step:9634 train loss:3.556152 +step:9635 train loss:3.458211 +step:9636 train loss:3.406181 +step:9637 train loss:3.537621 +step:9638 train loss:3.420337 +step:9639 train loss:3.388570 +step:9640 train loss:3.514462 +step:9641 train loss:3.486704 +step:9642 train loss:3.463687 +step:9643 train loss:3.466179 +step:9644 train loss:3.522710 +step:9645 train loss:3.448381 +step:9646 train loss:3.486055 +step:9647 train loss:3.496741 +step:9648 train loss:3.446924 +step:9649 train loss:3.420262 +step:9650 train loss:3.437494 +step:9651 train loss:3.528239 +step:9652 train loss:3.509043 +step:9653 train loss:3.449262 +step:9654 train loss:3.431870 +step:9655 train loss:3.426835 +step:9656 train loss:3.422480 +step:9657 train loss:3.447873 +step:9658 train loss:3.505070 +step:9659 train loss:3.615181 +step:9660 train loss:3.398382 +step:9661 train loss:3.415125 +step:9662 train loss:3.437382 +step:9663 train loss:3.479516 +step:9664 train loss:3.528935 +step:9665 train loss:3.370872 +step:9666 train loss:3.413171 +step:9667 train loss:3.552317 +step:9668 train loss:3.527460 +step:9669 train loss:3.547737 +step:9670 train loss:3.529140 +step:9671 train loss:3.529359 +step:9672 train loss:3.442872 +step:9673 train loss:3.464598 +step:9674 train loss:3.475053 +step:9675 train loss:3.473956 +step:9676 train loss:3.428947 +step:9677 train loss:3.438306 +step:9678 train loss:3.472978 +step:9679 train loss:3.464403 +step:9680 train loss:3.461567 +step:9681 train loss:3.448832 +step:9682 train loss:3.515220 +step:9683 train loss:3.492382 +step:9684 train loss:3.408101 +step:9685 train loss:3.493626 +step:9686 train loss:3.526161 +step:9687 train loss:3.432824 +step:9688 train loss:3.519644 +step:9689 train loss:3.618126 +step:9690 train loss:3.460187 +step:9691 train loss:3.450159 +step:9692 train loss:3.413544 +step:9693 train loss:3.410962 +step:9694 train loss:3.432721 +step:9695 train loss:3.540182 +step:9696 train loss:3.572474 +step:9697 train loss:3.485018 +step:9698 train loss:3.518085 +step:9699 train loss:3.477738 +step:9700 train loss:3.476725 +step:9701 train loss:3.528774 +step:9702 train loss:3.443832 +step:9703 train loss:3.467037 +step:9704 train loss:3.548596 +step:9705 train loss:3.445139 +step:9706 train loss:3.441628 +step:9707 train loss:3.489551 +step:9708 train loss:3.438927 +step:9709 train loss:3.458359 +step:9710 train loss:3.480193 +step:9711 train loss:3.451332 +step:9712 train loss:3.461163 +step:9713 train loss:3.513691 +step:9714 train loss:3.467206 +step:9715 train loss:3.485655 +step:9716 train loss:3.509979 +step:9717 train loss:3.432651 +step:9718 train loss:3.439133 +step:9719 train loss:3.524509 +step:9720 train loss:3.453858 +step:9721 train loss:3.443136 +step:9722 train loss:3.508722 +step:9723 train loss:3.454180 +step:9724 train loss:3.482980 +step:9725 train loss:3.534362 +step:9726 train loss:3.475825 +step:9727 train loss:3.455351 +step:9728 train loss:3.493192 +step:9729 train loss:3.520545 +step:9730 train loss:3.591944 +step:9731 train loss:3.512936 +step:9732 train loss:3.473109 +step:9733 train loss:3.512386 +step:9734 train loss:3.436098 +step:9735 train loss:3.541929 +step:9736 train loss:3.442876 +step:9737 train loss:3.501755 +step:9738 train loss:3.466547 +step:9739 train loss:3.537389 +step:9740 train loss:3.503759 +step:9741 train loss:3.443132 +step:9742 train loss:3.537611 +step:9743 train loss:3.410881 +step:9744 train loss:3.470178 +step:9745 train loss:3.430571 +step:9746 train loss:3.467095 +step:9747 train loss:3.457177 +step:9748 train loss:3.358113 +step:9749 train loss:3.455585 +step:9750 validation loss:3.397558 +step:9750 train loss:3.435312 +step:9751 train loss:3.574280 +step:9752 train loss:3.460628 +step:9753 train loss:3.417567 +step:9754 train loss:3.449465 +step:9755 train loss:3.446264 +step:9756 train loss:3.446869 +step:9757 train loss:3.418356 +step:9758 train loss:3.406501 +step:9759 train loss:3.454255 +step:9760 train loss:3.398291 +step:9761 train loss:3.439996 +step:9762 train loss:3.434984 +step:9763 train loss:3.456024 +step:9764 train loss:3.442039 +step:9765 train loss:3.405496 +step:9766 train loss:3.495448 +step:9767 train loss:3.450797 +step:9768 train loss:3.462435 +step:9769 train loss:3.419496 +step:9770 train loss:3.416313 +step:9771 train loss:3.466422 +step:9772 train loss:3.477253 +step:9773 train loss:3.453926 +step:9774 train loss:3.426958 +step:9775 train loss:3.515409 +step:9776 train loss:3.511214 +step:9777 train loss:3.401227 +step:9778 train loss:3.411404 +step:9779 train loss:3.413565 +step:9780 train loss:3.414195 +step:9781 train loss:3.434922 +step:9782 train loss:3.510446 +step:9783 train loss:3.420405 +step:9784 train loss:3.446430 +step:9785 train loss:3.438407 +step:9786 train loss:3.472891 +step:9787 train loss:3.499745 +step:9788 train loss:3.427825 +step:9789 train loss:3.436793 +step:9790 train loss:3.395766 +step:9791 train loss:3.444349 +step:9792 train loss:3.460938 +step:9793 train loss:3.478620 +step:9794 train loss:3.454149 +step:9795 train loss:3.458315 +step:9796 train loss:3.443887 +step:9797 train loss:3.438020 +step:9798 train loss:3.455101 +step:9799 train loss:3.455646 +step:9800 train loss:3.528544 +step:9801 train loss:3.452927 +step:9802 train loss:3.510710 +step:9803 train loss:3.368464 +step:9804 train loss:3.462353 +step:9805 train loss:3.469152 +step:9806 train loss:3.441354 +step:9807 train loss:3.411676 +step:9808 train loss:3.328803 +step:9809 train loss:3.514649 +step:9810 train loss:3.472017 +step:9811 train loss:3.455400 +step:9812 train loss:3.429080 +step:9813 train loss:3.509215 +step:9814 train loss:3.500268 +step:9815 train loss:3.404647 +step:9816 train loss:3.406527 +step:9817 train loss:3.437717 +step:9818 train loss:3.465238 +step:9819 train loss:3.436384 +step:9820 train loss:3.503487 +step:9821 train loss:3.481025 +step:9822 train loss:3.456856 +step:9823 train loss:3.516323 +step:9824 train loss:3.420632 +step:9825 train loss:3.508472 +step:9826 train loss:3.503521 +step:9827 train loss:3.507922 +step:9828 train loss:3.423203 +step:9829 train loss:3.431223 +step:9830 train loss:3.418555 +step:9831 train loss:3.479187 +step:9832 train loss:3.490945 +step:9833 train loss:3.405089 +step:9834 train loss:3.456253 +step:9835 train loss:3.422114 +step:9836 train loss:3.486162 +step:9837 train loss:3.459493 +step:9838 train loss:3.495965 +step:9839 train loss:3.470602 +step:9840 train loss:3.438596 +step:9841 train loss:3.445019 +step:9842 train loss:3.506105 +step:9843 train loss:3.501707 +step:9844 train loss:3.449007 +step:9845 train loss:3.478810 +step:9846 train loss:3.413642 +step:9847 train loss:3.543188 +step:9848 train loss:3.469349 +step:9849 train loss:3.493026 +step:9850 train loss:3.412552 +step:9851 train loss:3.466574 +step:9852 train loss:3.428873 +step:9853 train loss:3.451091 +step:9854 train loss:3.464610 +step:9855 train loss:3.413911 +step:9856 train loss:3.416477 +step:9857 train loss:3.406527 +step:9858 train loss:3.471556 +step:9859 train loss:3.389687 +step:9860 train loss:3.627221 +step:9861 train loss:3.455492 +step:9862 train loss:3.420661 +step:9863 train loss:3.400722 +step:9864 train loss:3.525633 +step:9865 train loss:3.403053 +step:9866 train loss:3.442571 +step:9867 train loss:3.440990 +step:9868 train loss:3.499288 +step:9869 train loss:3.463317 +step:9870 train loss:3.432938 +step:9871 train loss:3.477372 +step:9872 train loss:3.422013 +step:9873 train loss:3.471976 +step:9874 train loss:3.436467 +step:9875 train loss:3.439489 +step:9876 train loss:3.404881 +step:9877 train loss:3.458123 +step:9878 train loss:3.484201 +step:9879 train loss:3.486416 +step:9880 train loss:3.417991 +step:9881 train loss:3.469485 +step:9882 train loss:3.429982 +step:9883 train loss:3.438993 +step:9884 train loss:3.432959 +step:9885 train loss:3.496309 +step:9886 train loss:3.461638 +step:9887 train loss:3.463382 +step:9888 train loss:3.485737 +step:9889 train loss:3.521336 +step:9890 train loss:3.431123 +step:9891 train loss:3.436002 +step:9892 train loss:3.409081 +step:9893 train loss:3.531481 +step:9894 train loss:3.439211 +step:9895 train loss:3.374972 +step:9896 train loss:3.531360 +step:9897 train loss:3.406434 +step:9898 train loss:3.475450 +step:9899 train loss:3.453678 +step:9900 train loss:3.499200 +step:9901 train loss:3.420315 +step:9902 train loss:3.466518 +step:9903 train loss:3.437052 +step:9904 train loss:3.486237 +step:9905 train loss:3.393441 +step:9906 train loss:3.431463 +step:9907 train loss:3.438052 +step:9908 train loss:3.435224 +step:9909 train loss:3.452249 +step:9910 train loss:3.477748 +step:9911 train loss:3.560789 +step:9912 train loss:3.438478 +step:9913 train loss:3.440201 +step:9914 train loss:3.450148 +step:9915 train loss:3.448089 +step:9916 train loss:3.399557 +step:9917 train loss:3.437436 +step:9918 train loss:3.431586 +step:9919 train loss:3.595064 +step:9920 train loss:3.382756 +step:9921 train loss:3.476297 +step:9922 train loss:3.434733 +step:9923 train loss:3.490804 +step:9924 train loss:3.404391 +step:9925 train loss:3.464763 +step:9926 train loss:3.441874 +step:9927 train loss:3.484676 +step:9928 train loss:3.412258 +step:9929 train loss:3.448653 +step:9930 train loss:3.541838 +step:9931 train loss:3.504713 +step:9932 train loss:3.390182 +step:9933 train loss:3.484267 +step:9934 train loss:3.409034 +step:9935 train loss:3.521597 +step:9936 train loss:3.426767 +step:9937 train loss:3.453547 +step:9938 train loss:3.437604 +step:9939 train loss:3.504377 +step:9940 train loss:3.542683 +step:9941 train loss:3.415764 +step:9942 train loss:3.462116 +step:9943 train loss:3.592752 +step:9944 train loss:3.458742 +step:9945 train loss:3.488884 +step:9946 train loss:3.453265 +step:9947 train loss:3.403994 +step:9948 train loss:3.447236 +step:9949 train loss:3.342184 +step:9950 train loss:3.495547 +step:9951 train loss:3.414854 +step:9952 train loss:3.485677 +step:9953 train loss:3.448764 +step:9954 train loss:3.503732 +step:9955 train loss:3.478225 +step:9956 train loss:3.478476 +step:9957 train loss:3.454165 +step:9958 train loss:3.510195 +step:9959 train loss:3.408526 +step:9960 train loss:3.441989 +step:9961 train loss:3.449972 +step:9962 train loss:3.498232 +step:9963 train loss:3.389527 +step:9964 train loss:3.443657 +step:9965 train loss:3.447547 +step:9966 train loss:3.503991 +step:9967 train loss:3.418989 +step:9968 train loss:3.482405 +step:9969 train loss:3.393808 +step:9970 train loss:3.438385 +step:9971 train loss:3.479515 +step:9972 train loss:3.501540 +step:9973 train loss:3.478775 +step:9974 train loss:3.466658 +step:9975 train loss:3.434231 +step:9976 train loss:3.392006 +step:9977 train loss:3.445440 +step:9978 train loss:3.440004 +step:9979 train loss:3.453182 +step:9980 train loss:3.507791 +step:9981 train loss:3.416349 +step:9982 train loss:3.477578 +step:9983 train loss:3.396760 +step:9984 train loss:3.459605 +step:9985 train loss:3.404636 +step:9986 train loss:3.455631 +step:9987 train loss:3.501293 +step:9988 train loss:3.515726 +step:9989 train loss:3.409683 +step:9990 train loss:3.549383 +step:9991 train loss:3.399642 +step:9992 train loss:3.474285 +step:9993 train loss:3.463421 +step:9994 train loss:3.578632 +step:9995 train loss:3.516895 +step:9996 train loss:3.434141 +step:9997 train loss:3.475165 +step:9998 train loss:3.526325 +step:9999 train loss:3.492654 +step:10000 validation loss:3.396091 total_sharp:3.9674e-03 L1_sharp:3.5186e-03 L2_sharp:6.8244e-04 L3_sharp:7.2841e-04 L4_sharp:4.8822e-04 L5_sharp:6.9951e-04 L6_sharp:7.2982e-04 L7_sharp:1.1247e-03 L8_sharp:9.5043e-04 L9_sharp:1.0262e-03 L10_sharp:8.4254e-04 L11_sharp:8.5221e-04 L12_sharp:1.8652e-03 total_fnorm:2.2564e+00 total_l1_linf:2.0115e+04 total_spectral:2.2564e+00 L1_fnorm:5.0361e-01 L2_fnorm:5.1244e-01 L3_fnorm:5.2766e-01 L4_fnorm:5.3149e-01 L5_fnorm:5.2976e-01 L6_fnorm:5.3250e-01 L7_fnorm:5.2621e-01 L8_fnorm:5.3495e-01 L9_fnorm:5.3685e-01 L10_fnorm:5.3697e-01 L11_fnorm:5.3663e-01 L12_fnorm:5.2794e-01 L1_l1linf:7.0203e-01 L2_l1linf:6.2596e-01 L3_l1linf:6.0202e-01 L4_l1linf:6.4001e-01 L5_l1linf:6.4536e-01 L6_l1linf:5.8979e-01 L7_l1linf:6.6210e-01 L8_l1linf:5.6520e-01 L9_l1linf:5.9371e-01 L10_l1linf:6.2206e-01 L11_l1linf:5.8418e-01 L12_l1linf:6.5319e-01 L1_spectral:9.3716e-02 L2_spectral:8.3946e-02 L3_spectral:8.1225e-02 L4_spectral:8.9861e-02 L5_spectral:9.3171e-02 L6_spectral:7.9271e-02 L7_spectral:7.3973e-02 L8_spectral:6.1265e-02 L9_spectral:4.9437e-02 L10_spectral:7.0153e-02 L11_spectral:8.3073e-02 L12_spectral:9.0201e-02 ip_v_neg_g:1.0117e-02 cos_v_neg_g:3.7417e-03 v_norm:2.2564e+00 g_norm:1.1982e+00 hv_norm:4.2902e-01 cos_v_hv:2.0867e-02 hg_norm:2.0345e+01 cos_g_hg:6.5054e-01 v_par:2.8931e-04 v_perp:2.2564e+00 L1_cos_v_neg_g:5.4431e-03 L1_v_norm:5.0361e-01 L2_cos_v_neg_g:2.8691e-03 L2_v_norm:5.1244e-01 L3_cos_v_neg_g:3.5859e-03 L3_v_norm:5.2766e-01 L4_cos_v_neg_g:4.2061e-03 L4_v_norm:5.3149e-01 L5_cos_v_neg_g:3.8928e-03 L5_v_norm:5.2976e-01 L6_cos_v_neg_g:4.8078e-03 L6_v_norm:5.3250e-01 L7_cos_v_neg_g:4.4264e-03 L7_v_norm:5.2621e-01 L8_cos_v_neg_g:4.5672e-03 L8_v_norm:5.3495e-01 L9_cos_v_neg_g:4.2294e-03 L9_v_norm:5.3685e-01 L10_cos_v_neg_g:5.4349e-03 L10_v_norm:5.3697e-01 L11_cos_v_neg_g:7.2722e-03 L11_v_norm:5.3663e-01 L12_cos_v_neg_g:8.6875e-03 L12_v_norm:5.2794e-01 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..28535ba9b142ceece0ec830c23c45898ccee68eb --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.002, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "46082d75-9da1-4c44-a62a-4dcb8c94a873", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..64333be8a3c3f2aeaa5e45e063737f79ae2f8838 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 3.803499937057495, + "total_l1_linf_norm": 32908.296875, + "total_spectral_norm": 3.803499937057495, + "layer_1_update_fnorm": 0.6690749526023865, + "layer_1_max_l1_linf_norm": 0.848091185092926, + "layer_1_max_spectral_norm": 0.13356834650039673, + "layer_2_update_fnorm": 0.7349246144294739, + "layer_2_max_l1_linf_norm": 0.845048725605011, + "layer_2_max_spectral_norm": 0.11631607264280319, + "layer_3_update_fnorm": 0.7530000805854797, + "layer_3_max_l1_linf_norm": 0.8724329471588135, + "layer_3_max_spectral_norm": 0.12499625235795975, + "layer_4_update_fnorm": 0.8098304271697998, + "layer_4_max_l1_linf_norm": 0.9062519073486328, + "layer_4_max_spectral_norm": 0.12999886274337769, + "layer_5_update_fnorm": 0.8315579295158386, + "layer_5_max_l1_linf_norm": 0.9004092812538147, + "layer_5_max_spectral_norm": 0.12602761387825012, + "layer_6_update_fnorm": 0.8460747599601746, + "layer_6_max_l1_linf_norm": 0.9140877723693848, + "layer_6_max_spectral_norm": 0.11974061280488968, + "layer_7_update_fnorm": 0.8364734649658203, + "layer_7_max_l1_linf_norm": 0.9150630831718445, + "layer_7_max_spectral_norm": 0.11048229783773422, + "layer_8_update_fnorm": 0.846396267414093, + "layer_8_max_l1_linf_norm": 0.9226115942001343, + "layer_8_max_spectral_norm": 0.10821396112442017, + "layer_9_update_fnorm": 0.8575325012207031, + "layer_9_max_l1_linf_norm": 0.9580366611480713, + "layer_9_max_spectral_norm": 0.1074291244149208, + "layer_10_update_fnorm": 0.8516858816146851, + "layer_10_max_l1_linf_norm": 0.9662332534790039, + "layer_10_max_spectral_norm": 0.11344899237155914, + "layer_11_update_fnorm": 0.852161705493927, + "layer_11_max_l1_linf_norm": 0.9517210721969604, + "layer_11_max_spectral_norm": 0.12142246216535568, + "layer_12_update_fnorm": 0.8240905404090881, + "layer_12_max_l1_linf_norm": 0.9504753351211548, + "layer_12_max_spectral_norm": 0.13378876447677612, + "total_sharpness": 0.0056312778033316135, + "ip_v_neg_g": 0.0463106669485569, + "cos_v_neg_g": 0.020724546164274216, + "v_norm": 3.803499937057495, + "g_norm": 0.5875064134597778, + "hv_norm": 0.3652019798755646, + "cos_v_hv": 0.05864854156970978, + "hg_norm": 0.9095128774642944, + "cos_g_hg": 0.4783177971839905, + "v_parallel_norm": 0.0039220466278493404, + "v_perp_norm": 3.803497791290283, + "layer_1_v_norm": 0.6690749526023865, + "layer_1_cos_v_neg_g": 0.04723447188735008, + "layer_2_v_norm": 0.7349246144294739, + "layer_2_cos_v_neg_g": 0.029570404440164566, + "layer_3_v_norm": 0.7530000805854797, + "layer_3_cos_v_neg_g": 0.03261527791619301, + "layer_4_v_norm": 0.8098304271697998, + "layer_4_cos_v_neg_g": 0.03798221796751022, + "layer_5_v_norm": 0.8315579295158386, + "layer_5_cos_v_neg_g": 0.034480493515729904, + "layer_6_v_norm": 0.8460747599601746, + "layer_6_cos_v_neg_g": 0.031562790274620056, + "layer_7_v_norm": 0.8364734649658203, + "layer_7_cos_v_neg_g": 0.03295731171965599, + "layer_8_v_norm": 0.846396267414093, + "layer_8_cos_v_neg_g": 0.03480035439133644, + "layer_9_v_norm": 0.8575325012207031, + "layer_9_cos_v_neg_g": 0.03282953053712845, + "layer_10_v_norm": 0.8516858816146851, + "layer_10_cos_v_neg_g": 0.03612016141414642, + "layer_11_v_norm": 0.852161705493927, + "layer_11_cos_v_neg_g": 0.03557844087481499, + "layer_12_v_norm": 0.8240905404090881, + "layer_12_cos_v_neg_g": 0.04153024032711983, + "layer_1_sharpness": 0.012514264322817326, + "layer_2_sharpness": 0.0005245278589427471, + "layer_3_sharpness": 0.0010246681049466133, + "layer_4_sharpness": 0.0010213118512183428, + "layer_5_sharpness": 0.0007527876878157258, + "layer_6_sharpness": 0.000692143919877708, + "layer_7_sharpness": 0.0009672546875663102, + "layer_8_sharpness": 0.0014284339267760515, + "layer_9_sharpness": 0.0013077782932668924, + "layer_10_sharpness": 0.0012432202929630876, + "layer_11_sharpness": 0.001487173605710268, + "layer_12_sharpness": 0.0014911068137735128 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..cf0fc2f3edcff1cb9bd0828c78bbbd5742b1fc6a --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.520252227783203, + "total_l1_linf_norm": 40260.64453125, + "total_spectral_norm": 4.520251750946045, + "layer_1_update_fnorm": 1.0243769884109497, + "layer_1_max_l1_linf_norm": 1.2306506633758545, + "layer_1_max_spectral_norm": 0.15859219431877136, + "layer_2_update_fnorm": 1.045413613319397, + "layer_2_max_l1_linf_norm": 1.1049563884735107, + "layer_2_max_spectral_norm": 0.14639650285243988, + "layer_3_update_fnorm": 1.0461342334747314, + "layer_3_max_l1_linf_norm": 1.075145959854126, + "layer_3_max_spectral_norm": 0.16277989745140076, + "layer_4_update_fnorm": 1.0632699728012085, + "layer_4_max_l1_linf_norm": 1.0970009565353394, + "layer_4_max_spectral_norm": 0.10648676007986069, + "layer_5_update_fnorm": 1.0897274017333984, + "layer_5_max_l1_linf_norm": 1.1031429767608643, + "layer_5_max_spectral_norm": 0.11071732640266418, + "layer_6_update_fnorm": 1.0933114290237427, + "layer_6_max_l1_linf_norm": 1.102624535560608, + "layer_6_max_spectral_norm": 0.10728929191827774, + "layer_7_update_fnorm": 1.087112545967102, + "layer_7_max_l1_linf_norm": 1.0978373289108276, + "layer_7_max_spectral_norm": 0.09520428627729416, + "layer_8_update_fnorm": 1.0804545879364014, + "layer_8_max_l1_linf_norm": 1.0932347774505615, + "layer_8_max_spectral_norm": 0.10184831917285919, + "layer_9_update_fnorm": 1.0739895105361938, + "layer_9_max_l1_linf_norm": 1.1109117269515991, + "layer_9_max_spectral_norm": 0.13661330938339233, + "layer_10_update_fnorm": 1.0434882640838623, + "layer_10_max_l1_linf_norm": 1.2536821365356445, + "layer_10_max_spectral_norm": 0.1768481731414795, + "layer_11_update_fnorm": 1.055788278579712, + "layer_11_max_l1_linf_norm": 1.3037474155426025, + "layer_11_max_spectral_norm": 0.18097853660583496, + "layer_12_update_fnorm": 1.0485987663269043, + "layer_12_max_l1_linf_norm": 1.2566277980804443, + "layer_12_max_spectral_norm": 0.18764567375183105, + "total_sharpness": 0.0008994365925900638, + "ip_v_neg_g": 0.010272607207298279, + "cos_v_neg_g": 0.002311495365574956, + "v_norm": 4.520252227783203, + "g_norm": 0.9831617474555969, + "hv_norm": 0.38447070121765137, + "cos_v_hv": 0.010574745014309883, + "hg_norm": 10.106234550476074, + "cos_g_hg": 0.5664945840835571, + "v_parallel_norm": 0.0004336408746894449, + "v_perp_norm": 4.520252227783203, + "layer_1_v_norm": 1.0243769884109497, + "layer_1_cos_v_neg_g": 0.0063844346441328526, + "layer_2_v_norm": 1.045413613319397, + "layer_2_cos_v_neg_g": 0.0022096417378634214, + "layer_3_v_norm": 1.0461342334747314, + "layer_3_cos_v_neg_g": 0.00261330371722579, + "layer_4_v_norm": 1.0632699728012085, + "layer_4_cos_v_neg_g": 0.002133071655407548, + "layer_5_v_norm": 1.0897274017333984, + "layer_5_cos_v_neg_g": 0.0015278930077329278, + "layer_6_v_norm": 1.0933114290237427, + "layer_6_cos_v_neg_g": 0.0019996261689811945, + "layer_7_v_norm": 1.087112545967102, + "layer_7_cos_v_neg_g": 0.0019729123450815678, + "layer_8_v_norm": 1.080454707145691, + "layer_8_cos_v_neg_g": 0.0021369887981563807, + "layer_9_v_norm": 1.0739895105361938, + "layer_9_cos_v_neg_g": 0.0028462507762014866, + "layer_10_v_norm": 1.0434882640838623, + "layer_10_cos_v_neg_g": 0.0039558433927595615, + "layer_11_v_norm": 1.055788278579712, + "layer_11_cos_v_neg_g": 0.0062935748137533665, + "layer_12_v_norm": 1.0485987663269043, + "layer_12_cos_v_neg_g": 0.008063005283474922, + "layer_1_sharpness": 0.0007474528392776847, + "layer_2_sharpness": 1.0477291652932763e-05, + "layer_3_sharpness": 0.0002000767271965742, + "layer_4_sharpness": 0.00017844159447122365, + "layer_5_sharpness": 0.00014669146912638098, + "layer_6_sharpness": 0.00013625036808662117, + "layer_7_sharpness": 0.00020261552708689123, + "layer_8_sharpness": 0.00029882616945542395, + "layer_9_sharpness": 0.0002973453083541244, + "layer_10_sharpness": 0.0004012287827208638, + "layer_11_sharpness": 0.0003372562350705266, + "layer_12_sharpness": 0.00047988799633458257 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..2ca8229597286fa204a62a41bcbcb37328d1c677 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.091350078582764, + "total_l1_linf_norm": 35875.31640625, + "total_spectral_norm": 4.09135103225708, + "layer_1_update_fnorm": 0.7873528003692627, + "layer_1_max_l1_linf_norm": 1.1901063919067383, + "layer_1_max_spectral_norm": 0.1628245860338211, + "layer_2_update_fnorm": 0.8475444912910461, + "layer_2_max_l1_linf_norm": 0.9253116250038147, + "layer_2_max_spectral_norm": 0.1407298892736435, + "layer_3_update_fnorm": 0.8672695755958557, + "layer_3_max_l1_linf_norm": 0.9870719909667969, + "layer_3_max_spectral_norm": 0.13871367275714874, + "layer_4_update_fnorm": 0.8898330926895142, + "layer_4_max_l1_linf_norm": 0.9494233131408691, + "layer_4_max_spectral_norm": 0.12772764265537262, + "layer_5_update_fnorm": 0.9213395714759827, + "layer_5_max_l1_linf_norm": 0.9796481132507324, + "layer_5_max_spectral_norm": 0.12449056655168533, + "layer_6_update_fnorm": 0.9436293840408325, + "layer_6_max_l1_linf_norm": 1.0007610321044922, + "layer_6_max_spectral_norm": 0.11265559494495392, + "layer_7_update_fnorm": 0.9426191449165344, + "layer_7_max_l1_linf_norm": 0.9947046637535095, + "layer_7_max_spectral_norm": 0.10736387223005295, + "layer_8_update_fnorm": 0.9517059922218323, + "layer_8_max_l1_linf_norm": 1.053882122039795, + "layer_8_max_spectral_norm": 0.10906878858804703, + "layer_9_update_fnorm": 0.9513084292411804, + "layer_9_max_l1_linf_norm": 1.0695834159851074, + "layer_9_max_spectral_norm": 0.12304115295410156, + "layer_10_update_fnorm": 0.9462228417396545, + "layer_10_max_l1_linf_norm": 1.0862963199615479, + "layer_10_max_spectral_norm": 0.1274379938840866, + "layer_11_update_fnorm": 0.9464902877807617, + "layer_11_max_l1_linf_norm": 1.1628079414367676, + "layer_11_max_spectral_norm": 0.13763973116874695, + "layer_12_update_fnorm": 0.9312503933906555, + "layer_12_max_l1_linf_norm": 1.2283806800842285, + "layer_12_max_spectral_norm": 0.1702820062637329, + "total_sharpness": 0.004357380326837301, + "ip_v_neg_g": 0.03937329724431038, + "cos_v_neg_g": 0.015559452585875988, + "v_norm": 4.091350078582764, + "g_norm": 0.618501603603363, + "hv_norm": 0.45367905497550964, + "cos_v_hv": 0.03929555043578148, + "hg_norm": 2.304097890853882, + "cos_g_hg": 0.47976818680763245, + "v_parallel_norm": 0.0023606575559824705, + "v_perp_norm": 4.091349124908447, + "layer_1_v_norm": 0.7873528003692627, + "layer_1_cos_v_neg_g": 0.041363298892974854, + "layer_2_v_norm": 0.8475444912910461, + "layer_2_cos_v_neg_g": 0.02969052642583847, + "layer_3_v_norm": 0.8672695755958557, + "layer_3_cos_v_neg_g": 0.027415581047534943, + "layer_4_v_norm": 0.8898330926895142, + "layer_4_cos_v_neg_g": 0.022725315764546394, + "layer_5_v_norm": 0.9213395714759827, + "layer_5_cos_v_neg_g": 0.020879508927464485, + "layer_6_v_norm": 0.9436293840408325, + "layer_6_cos_v_neg_g": 0.021364986896514893, + "layer_7_v_norm": 0.9426191449165344, + "layer_7_cos_v_neg_g": 0.018841609358787537, + "layer_8_v_norm": 0.951706051826477, + "layer_8_cos_v_neg_g": 0.021552318707108498, + "layer_9_v_norm": 0.9513084292411804, + "layer_9_cos_v_neg_g": 0.020498307421803474, + "layer_10_v_norm": 0.9462228417396545, + "layer_10_cos_v_neg_g": 0.024110104888677597, + "layer_11_v_norm": 0.9464902877807617, + "layer_11_cos_v_neg_g": 0.021526645869016647, + "layer_12_v_norm": 0.9312503933906555, + "layer_12_cos_v_neg_g": 0.02659803256392479, + "layer_1_sharpness": 0.007639605086296797, + "layer_2_sharpness": 0.0007527509587816894, + "layer_3_sharpness": 0.002359185367822647, + "layer_4_sharpness": 0.000659335230011493, + "layer_5_sharpness": 0.0005070877959951758, + "layer_6_sharpness": 0.0005999967688694596, + "layer_7_sharpness": 0.0005261101177893579, + "layer_8_sharpness": 0.0010390820680186152, + "layer_9_sharpness": 0.0011264141649007797, + "layer_10_sharpness": 0.0011165662435814738, + "layer_11_sharpness": 0.001084280083887279, + "layer_12_sharpness": 0.001848030835390091 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..51fcb26e70e68090410090cdded5263ab6a1ec51 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.241456508636475, + "total_l1_linf_norm": 37410.03125, + "total_spectral_norm": 4.241456985473633, + "layer_1_update_fnorm": 0.8945304751396179, + "layer_1_max_l1_linf_norm": 1.1460360288619995, + "layer_1_max_spectral_norm": 0.1523970514535904, + "layer_2_update_fnorm": 0.915676474571228, + "layer_2_max_l1_linf_norm": 0.9913955330848694, + "layer_2_max_spectral_norm": 0.1377606838941574, + "layer_3_update_fnorm": 0.9313573241233826, + "layer_3_max_l1_linf_norm": 1.0099384784698486, + "layer_3_max_spectral_norm": 0.130940243601799, + "layer_4_update_fnorm": 0.9517517685890198, + "layer_4_max_l1_linf_norm": 0.9932616949081421, + "layer_4_max_spectral_norm": 0.11819856613874435, + "layer_5_update_fnorm": 0.9791897535324097, + "layer_5_max_l1_linf_norm": 1.0213865041732788, + "layer_5_max_spectral_norm": 0.1125505194067955, + "layer_6_update_fnorm": 0.9945789575576782, + "layer_6_max_l1_linf_norm": 1.0360386371612549, + "layer_6_max_spectral_norm": 0.10837186127901077, + "layer_7_update_fnorm": 0.9869734644889832, + "layer_7_max_l1_linf_norm": 1.003665804862976, + "layer_7_max_spectral_norm": 0.0906943529844284, + "layer_8_update_fnorm": 0.9894973635673523, + "layer_8_max_l1_linf_norm": 1.0027360916137695, + "layer_8_max_spectral_norm": 0.08878748118877411, + "layer_9_update_fnorm": 0.9802846312522888, + "layer_9_max_l1_linf_norm": 1.058045744895935, + "layer_9_max_spectral_norm": 0.10434301942586899, + "layer_10_update_fnorm": 0.9693663716316223, + "layer_10_max_l1_linf_norm": 1.0305719375610352, + "layer_10_max_spectral_norm": 0.11879581212997437, + "layer_11_update_fnorm": 0.9807072281837463, + "layer_11_max_l1_linf_norm": 1.1344281435012817, + "layer_11_max_spectral_norm": 0.13398906588554382, + "layer_12_update_fnorm": 0.9700557589530945, + "layer_12_max_l1_linf_norm": 1.115533471107483, + "layer_12_max_spectral_norm": 0.13925957679748535, + "total_sharpness": 0.002912451047450304, + "ip_v_neg_g": 0.02832896076142788, + "cos_v_neg_g": 0.009475775063037872, + "v_norm": 4.241456508636475, + "g_norm": 0.7048567533493042, + "hv_norm": 0.49194467067718506, + "cos_v_hv": 0.02511061541736126, + "hg_norm": 3.066518545150757, + "cos_g_hg": 0.5278069376945496, + "v_parallel_norm": 0.0015122377080842853, + "v_perp_norm": 4.241456031799316, + "layer_1_v_norm": 0.8945304751396179, + "layer_1_cos_v_neg_g": 0.02360863797366619, + "layer_2_v_norm": 0.915676474571228, + "layer_2_cos_v_neg_g": 0.019351046532392502, + "layer_3_v_norm": 0.9313573241233826, + "layer_3_cos_v_neg_g": 0.017082054167985916, + "layer_4_v_norm": 0.9517517685890198, + "layer_4_cos_v_neg_g": 0.01404678262770176, + "layer_5_v_norm": 0.9791897535324097, + "layer_5_cos_v_neg_g": 0.012238970957696438, + "layer_6_v_norm": 0.9945789575576782, + "layer_6_cos_v_neg_g": 0.010723624378442764, + "layer_7_v_norm": 0.9869734644889832, + "layer_7_cos_v_neg_g": 0.01138627715408802, + "layer_8_v_norm": 0.9894973635673523, + "layer_8_cos_v_neg_g": 0.010357193648815155, + "layer_9_v_norm": 0.9802846312522888, + "layer_9_cos_v_neg_g": 0.010778557509183884, + "layer_10_v_norm": 0.9693663716316223, + "layer_10_cos_v_neg_g": 0.012594910338521004, + "layer_11_v_norm": 0.9807072877883911, + "layer_11_cos_v_neg_g": 0.013341335579752922, + "layer_12_v_norm": 0.9700557589530945, + "layer_12_cos_v_neg_g": 0.013769233599305153, + "layer_1_sharpness": 0.003826158121228218, + "layer_2_sharpness": 0.0004839216999243945, + "layer_3_sharpness": 0.0009234583703801036, + "layer_4_sharpness": 0.0004884761292487383, + "layer_5_sharpness": 0.00035351584665477276, + "layer_6_sharpness": 0.00038268588832579553, + "layer_7_sharpness": 0.00047367767547257245, + "layer_8_sharpness": 0.0006841351860202849, + "layer_9_sharpness": 0.0007193175260908902, + "layer_10_sharpness": 0.0008530892664566636, + "layer_11_sharpness": 0.0007900933269411325, + "layer_12_sharpness": 0.0011893447954207659 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..625ecf57ef560716fd77b3ff2a93b18ccce58476 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.2795586585998535, + "total_l1_linf_norm": 37801.84765625, + "total_spectral_norm": 4.2795586585998535, + "layer_1_update_fnorm": 0.916025698184967, + "layer_1_max_l1_linf_norm": 1.263790249824524, + "layer_1_max_spectral_norm": 0.16710038483142853, + "layer_2_update_fnorm": 0.9270911812782288, + "layer_2_max_l1_linf_norm": 1.0248688459396362, + "layer_2_max_spectral_norm": 0.140603706240654, + "layer_3_update_fnorm": 0.9410408139228821, + "layer_3_max_l1_linf_norm": 1.0119210481643677, + "layer_3_max_spectral_norm": 0.1297188550233841, + "layer_4_update_fnorm": 0.9588687419891357, + "layer_4_max_l1_linf_norm": 1.0107641220092773, + "layer_4_max_spectral_norm": 0.11452394723892212, + "layer_5_update_fnorm": 0.993285059928894, + "layer_5_max_l1_linf_norm": 1.0813884735107422, + "layer_5_max_spectral_norm": 0.10332635790109634, + "layer_6_update_fnorm": 1.0111063718795776, + "layer_6_max_l1_linf_norm": 1.068283200263977, + "layer_6_max_spectral_norm": 0.10004684329032898, + "layer_7_update_fnorm": 1.0101419687271118, + "layer_7_max_l1_linf_norm": 1.0483434200286865, + "layer_7_max_spectral_norm": 0.09141841530799866, + "layer_8_update_fnorm": 1.0141611099243164, + "layer_8_max_l1_linf_norm": 1.0473496913909912, + "layer_8_max_spectral_norm": 0.09023290872573853, + "layer_9_update_fnorm": 1.011997938156128, + "layer_9_max_l1_linf_norm": 1.0555691719055176, + "layer_9_max_spectral_norm": 0.11535229533910751, + "layer_10_update_fnorm": 0.9973337650299072, + "layer_10_max_l1_linf_norm": 1.0974395275115967, + "layer_10_max_spectral_norm": 0.13308444619178772, + "layer_11_update_fnorm": 0.9939030408859253, + "layer_11_max_l1_linf_norm": 1.067878246307373, + "layer_11_max_spectral_norm": 0.1386287808418274, + "layer_12_update_fnorm": 0.9809508323669434, + "layer_12_max_l1_linf_norm": 1.041601538658142, + "layer_12_max_spectral_norm": 0.1423022449016571, + "total_sharpness": 0.003327825805172324, + "ip_v_neg_g": 0.03617347404360771, + "cos_v_neg_g": 0.0116701889783144, + "v_norm": 4.2795586585998535, + "g_norm": 0.7242914438247681, + "hv_norm": 0.5274482369422913, + "cos_v_hv": 0.027000991627573967, + "hg_norm": 3.3752119541168213, + "cos_g_hg": 0.6025654077529907, + "v_parallel_norm": 0.0018202707869932055, + "v_perp_norm": 4.279558181762695, + "layer_1_v_norm": 0.916025698184967, + "layer_1_cos_v_neg_g": 0.03339718282222748, + "layer_2_v_norm": 0.9270911812782288, + "layer_2_cos_v_neg_g": 0.01618303172290325, + "layer_3_v_norm": 0.9410408139228821, + "layer_3_cos_v_neg_g": 0.014250670559704304, + "layer_4_v_norm": 0.9588687419891357, + "layer_4_cos_v_neg_g": 0.012099970132112503, + "layer_5_v_norm": 0.993285059928894, + "layer_5_cos_v_neg_g": 0.010745197534561157, + "layer_6_v_norm": 1.0111063718795776, + "layer_6_cos_v_neg_g": 0.013163025490939617, + "layer_7_v_norm": 1.0101419687271118, + "layer_7_cos_v_neg_g": 0.012622524052858353, + "layer_8_v_norm": 1.0141611099243164, + "layer_8_cos_v_neg_g": 0.014154501259326935, + "layer_9_v_norm": 1.011997938156128, + "layer_9_cos_v_neg_g": 0.014308387413620949, + "layer_10_v_norm": 0.9973337650299072, + "layer_10_cos_v_neg_g": 0.017467785626649857, + "layer_11_v_norm": 0.9939030408859253, + "layer_11_cos_v_neg_g": 0.016724182292819023, + "layer_12_v_norm": 0.9809508323669434, + "layer_12_cos_v_neg_g": 0.01910668984055519, + "layer_1_sharpness": 0.005151069723069668, + "layer_2_sharpness": 0.0004842474008910358, + "layer_3_sharpness": 0.0006639949860982597, + "layer_4_sharpness": 0.0003164175432175398, + "layer_5_sharpness": 0.0002544481249060482, + "layer_6_sharpness": 0.0003774506039917469, + "layer_7_sharpness": 0.00041848854743875563, + "layer_8_sharpness": 0.0006516107823699713, + "layer_9_sharpness": 0.0007659748080186546, + "layer_10_sharpness": 0.0009773328201845288, + "layer_11_sharpness": 0.0008198359282687306, + "layer_12_sharpness": 0.001385830226354301 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..488fcab20a8ecc23bc4fb70f677b821a3c133719 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.428201675415039, + "total_l1_linf_norm": 39265.77734375, + "total_spectral_norm": 4.428200721740723, + "layer_1_update_fnorm": 1.0097949504852295, + "layer_1_max_l1_linf_norm": 1.370924711227417, + "layer_1_max_spectral_norm": 0.16701245307922363, + "layer_2_update_fnorm": 0.9612780213356018, + "layer_2_max_l1_linf_norm": 1.0532033443450928, + "layer_2_max_spectral_norm": 0.15139393508434296, + "layer_3_update_fnorm": 0.9989029169082642, + "layer_3_max_l1_linf_norm": 1.0815589427947998, + "layer_3_max_spectral_norm": 0.13882438838481903, + "layer_4_update_fnorm": 1.0168331861495972, + "layer_4_max_l1_linf_norm": 1.078446388244629, + "layer_4_max_spectral_norm": 0.11707838624715805, + "layer_5_update_fnorm": 1.0330532789230347, + "layer_5_max_l1_linf_norm": 1.0925922393798828, + "layer_5_max_spectral_norm": 0.10747745633125305, + "layer_6_update_fnorm": 1.05057954788208, + "layer_6_max_l1_linf_norm": 1.106845736503601, + "layer_6_max_spectral_norm": 0.11514118313789368, + "layer_7_update_fnorm": 1.0581576824188232, + "layer_7_max_l1_linf_norm": 1.098930835723877, + "layer_7_max_spectral_norm": 0.10698620229959488, + "layer_8_update_fnorm": 1.0572882890701294, + "layer_8_max_l1_linf_norm": 1.0803022384643555, + "layer_8_max_spectral_norm": 0.09487292170524597, + "layer_9_update_fnorm": 1.0532971620559692, + "layer_9_max_l1_linf_norm": 1.1317521333694458, + "layer_9_max_spectral_norm": 0.11780813336372375, + "layer_10_update_fnorm": 1.0455623865127563, + "layer_10_max_l1_linf_norm": 1.179731011390686, + "layer_10_max_spectral_norm": 0.1506640762090683, + "layer_11_update_fnorm": 1.049973487854004, + "layer_11_max_l1_linf_norm": 1.2172658443450928, + "layer_11_max_spectral_norm": 0.1580866426229477, + "layer_12_update_fnorm": 1.0578184127807617, + "layer_12_max_l1_linf_norm": 1.2269601821899414, + "layer_12_max_spectral_norm": 0.16485495865345, + "total_sharpness": 0.002005055546760559, + "ip_v_neg_g": 0.022255096584558487, + "cos_v_neg_g": 0.007702934090048075, + "v_norm": 4.428201675415039, + "g_norm": 0.6524479985237122, + "hv_norm": 0.3462410271167755, + "cos_v_hv": 0.025643378496170044, + "hg_norm": 2.2647249698638916, + "cos_g_hg": 0.5388676524162292, + "v_parallel_norm": 0.0015012432122603059, + "v_perp_norm": 4.428201198577881, + "layer_1_v_norm": 1.0097949504852295, + "layer_1_cos_v_neg_g": 0.02076818235218525, + "layer_2_v_norm": 0.9612780213356018, + "layer_2_cos_v_neg_g": 0.0068359836004674435, + "layer_3_v_norm": 0.9989029765129089, + "layer_3_cos_v_neg_g": 0.007019483484327793, + "layer_4_v_norm": 1.0168331861495972, + "layer_4_cos_v_neg_g": 0.006882141809910536, + "layer_5_v_norm": 1.0330532789230347, + "layer_5_cos_v_neg_g": 0.005957774352282286, + "layer_6_v_norm": 1.05057954788208, + "layer_6_cos_v_neg_g": 0.006817967165261507, + "layer_7_v_norm": 1.0581576824188232, + "layer_7_cos_v_neg_g": 0.009135276079177856, + "layer_8_v_norm": 1.0572882890701294, + "layer_8_cos_v_neg_g": 0.009196369908750057, + "layer_9_v_norm": 1.0532971620559692, + "layer_9_cos_v_neg_g": 0.008779406547546387, + "layer_10_v_norm": 1.0455623865127563, + "layer_10_cos_v_neg_g": 0.013099182397127151, + "layer_11_v_norm": 1.0499736070632935, + "layer_11_cos_v_neg_g": 0.01380329579114914, + "layer_12_v_norm": 1.0578184127807617, + "layer_12_cos_v_neg_g": 0.01463501900434494, + "layer_1_sharpness": 0.002432518173009157, + "layer_2_sharpness": 0.00011283066851319745, + "layer_3_sharpness": 0.000282126507954672, + "layer_4_sharpness": 0.00028609822038561106, + "layer_5_sharpness": 0.00019741791766136885, + "layer_6_sharpness": 0.00022764483583159745, + "layer_7_sharpness": 0.0004079466452822089, + "layer_8_sharpness": 0.00055368070024997, + "layer_9_sharpness": 0.0005113062215968966, + "layer_10_sharpness": 0.0006642265361733735, + "layer_11_sharpness": 0.0005913814529776573, + "layer_12_sharpness": 0.0008184811449609697 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..ef1aad5942245b2488b3086c44eb2e887a274392 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.358567237854004, + "total_l1_linf_norm": 38609.33203125, + "total_spectral_norm": 4.358567714691162, + "layer_1_update_fnorm": 0.9546825289726257, + "layer_1_max_l1_linf_norm": 1.1156105995178223, + "layer_1_max_spectral_norm": 0.14616361260414124, + "layer_2_update_fnorm": 0.9518256187438965, + "layer_2_max_l1_linf_norm": 1.0245850086212158, + "layer_2_max_spectral_norm": 0.15123958885669708, + "layer_3_update_fnorm": 0.9801844954490662, + "layer_3_max_l1_linf_norm": 1.0475473403930664, + "layer_3_max_spectral_norm": 0.13493984937667847, + "layer_4_update_fnorm": 1.0048710107803345, + "layer_4_max_l1_linf_norm": 1.0696425437927246, + "layer_4_max_spectral_norm": 0.11403924226760864, + "layer_5_update_fnorm": 1.034585952758789, + "layer_5_max_l1_linf_norm": 1.1073572635650635, + "layer_5_max_spectral_norm": 0.10159005224704742, + "layer_6_update_fnorm": 1.0424375534057617, + "layer_6_max_l1_linf_norm": 1.0924608707427979, + "layer_6_max_spectral_norm": 0.10012617707252502, + "layer_7_update_fnorm": 1.0312339067459106, + "layer_7_max_l1_linf_norm": 1.0444602966308594, + "layer_7_max_spectral_norm": 0.09035533666610718, + "layer_8_update_fnorm": 1.0339759588241577, + "layer_8_max_l1_linf_norm": 1.053628921508789, + "layer_8_max_spectral_norm": 0.08554495871067047, + "layer_9_update_fnorm": 1.0264025926589966, + "layer_9_max_l1_linf_norm": 1.0898497104644775, + "layer_9_max_spectral_norm": 0.10591332614421844, + "layer_10_update_fnorm": 1.0017591714859009, + "layer_10_max_l1_linf_norm": 1.1053954362869263, + "layer_10_max_spectral_norm": 0.13288991153240204, + "layer_11_update_fnorm": 1.011054515838623, + "layer_11_max_l1_linf_norm": 1.1773264408111572, + "layer_11_max_spectral_norm": 0.1507609635591507, + "layer_12_update_fnorm": 1.0032055377960205, + "layer_12_max_l1_linf_norm": 1.2004683017730713, + "layer_12_max_spectral_norm": 0.15774592757225037, + "total_sharpness": 0.001526195090264082, + "ip_v_neg_g": 0.015122472308576107, + "cos_v_neg_g": 0.004623944405466318, + "v_norm": 4.358567237854004, + "g_norm": 0.7503544092178345, + "hv_norm": 0.31902042031288147, + "cos_v_hv": 0.02085140161216259, + "hg_norm": 3.902074098587036, + "cos_g_hg": 0.5906962156295776, + "v_parallel_norm": 0.0007480968488380313, + "v_perp_norm": 4.358567237854004, + "layer_1_v_norm": 0.9546825289726257, + "layer_1_cos_v_neg_g": 0.014106490649282932, + "layer_2_v_norm": 0.9518256187438965, + "layer_2_cos_v_neg_g": 0.003664389718323946, + "layer_3_v_norm": 0.9801844954490662, + "layer_3_cos_v_neg_g": 0.0054444363340735435, + "layer_4_v_norm": 1.0048710107803345, + "layer_4_cos_v_neg_g": 0.005370818078517914, + "layer_5_v_norm": 1.034585952758789, + "layer_5_cos_v_neg_g": 0.005135578103363514, + "layer_6_v_norm": 1.0424375534057617, + "layer_6_cos_v_neg_g": 0.004923378583043814, + "layer_7_v_norm": 1.0312339067459106, + "layer_7_cos_v_neg_g": 0.004494332242757082, + "layer_8_v_norm": 1.0339759588241577, + "layer_8_cos_v_neg_g": 0.004911035764962435, + "layer_9_v_norm": 1.0264025926589966, + "layer_9_cos_v_neg_g": 0.005522696767002344, + "layer_10_v_norm": 1.0017591714859009, + "layer_10_cos_v_neg_g": 0.006610700394958258, + "layer_11_v_norm": 1.0110546350479126, + "layer_11_cos_v_neg_g": 0.007964576594531536, + "layer_12_v_norm": 1.0032055377960205, + "layer_12_cos_v_neg_g": 0.007038464304059744, + "layer_1_sharpness": 0.0016362066380679607, + "layer_2_sharpness": 7.5111398473382e-05, + "layer_3_sharpness": 0.0003203883534297347, + "layer_4_sharpness": 0.0002669098903425038, + "layer_5_sharpness": 0.0002160279400413856, + "layer_6_sharpness": 0.00018905724573414773, + "layer_7_sharpness": 0.0003103890048805624, + "layer_8_sharpness": 0.00036015507066622376, + "layer_9_sharpness": 0.0003902617027051747, + "layer_10_sharpness": 0.000558390689548105, + "layer_11_sharpness": 0.0005875364295206964, + "layer_12_sharpness": 0.00087671511573717 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..2daf2612ebecb153629d2c9bae1cd83fc1cee765 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.3434224128723145, + "total_l1_linf_norm": 38410.578125, + "total_spectral_norm": 4.343423366546631, + "layer_1_update_fnorm": 0.8985306024551392, + "layer_1_max_l1_linf_norm": 1.0683470964431763, + "layer_1_max_spectral_norm": 0.16172462701797485, + "layer_2_update_fnorm": 0.9574827551841736, + "layer_2_max_l1_linf_norm": 1.020288109779358, + "layer_2_max_spectral_norm": 0.14307914674282074, + "layer_3_update_fnorm": 0.9712438583374023, + "layer_3_max_l1_linf_norm": 1.0278167724609375, + "layer_3_max_spectral_norm": 0.1331639140844345, + "layer_4_update_fnorm": 0.9899227619171143, + "layer_4_max_l1_linf_norm": 1.0666297674179077, + "layer_4_max_spectral_norm": 0.11316782981157303, + "layer_5_update_fnorm": 1.0137016773223877, + "layer_5_max_l1_linf_norm": 1.055525302886963, + "layer_5_max_spectral_norm": 0.10494160652160645, + "layer_6_update_fnorm": 1.0244468450546265, + "layer_6_max_l1_linf_norm": 1.05991530418396, + "layer_6_max_spectral_norm": 0.09663376957178116, + "layer_7_update_fnorm": 1.0235012769699097, + "layer_7_max_l1_linf_norm": 1.0405211448669434, + "layer_7_max_spectral_norm": 0.08463499695062637, + "layer_8_update_fnorm": 1.0302788019180298, + "layer_8_max_l1_linf_norm": 1.0385565757751465, + "layer_8_max_spectral_norm": 0.0837026983499527, + "layer_9_update_fnorm": 1.0323998928070068, + "layer_9_max_l1_linf_norm": 1.0724449157714844, + "layer_9_max_spectral_norm": 0.10362584888935089, + "layer_10_update_fnorm": 1.0125012397766113, + "layer_10_max_l1_linf_norm": 1.0577125549316406, + "layer_10_max_spectral_norm": 0.1320553570985794, + "layer_11_update_fnorm": 1.0221385955810547, + "layer_11_max_l1_linf_norm": 1.1405322551727295, + "layer_11_max_spectral_norm": 0.13928861916065216, + "layer_12_update_fnorm": 1.0060821771621704, + "layer_12_max_l1_linf_norm": 1.0951464176177979, + "layer_12_max_spectral_norm": 0.14168280363082886, + "total_sharpness": 0.0013429740210995078, + "ip_v_neg_g": 0.012787933461368084, + "cos_v_neg_g": 0.004399455618113279, + "v_norm": 4.3434224128723145, + "g_norm": 0.6692206859588623, + "hv_norm": 0.316162109375, + "cos_v_hv": 0.018449725583195686, + "hg_norm": 3.174081563949585, + "cos_g_hg": 0.535081148147583, + "v_parallel_norm": 0.0008673111442476511, + "v_perp_norm": 4.3434224128723145, + "layer_1_v_norm": 0.8985306024551392, + "layer_1_cos_v_neg_g": 0.011252610012888908, + "layer_2_v_norm": 0.9574827551841736, + "layer_2_cos_v_neg_g": 0.002862694440409541, + "layer_3_v_norm": 0.9712438583374023, + "layer_3_cos_v_neg_g": 0.004430428612977266, + "layer_4_v_norm": 0.9899227619171143, + "layer_4_cos_v_neg_g": 0.0034930375404655933, + "layer_5_v_norm": 1.0137016773223877, + "layer_5_cos_v_neg_g": 0.0037756820674985647, + "layer_6_v_norm": 1.0244468450546265, + "layer_6_cos_v_neg_g": 0.004515340086072683, + "layer_7_v_norm": 1.0235012769699097, + "layer_7_cos_v_neg_g": 0.004454104695469141, + "layer_8_v_norm": 1.0302788019180298, + "layer_8_cos_v_neg_g": 0.004869450349360704, + "layer_9_v_norm": 1.0323998928070068, + "layer_9_cos_v_neg_g": 0.005662696901708841, + "layer_10_v_norm": 1.0125012397766113, + "layer_10_cos_v_neg_g": 0.008324877358973026, + "layer_11_v_norm": 1.0221385955810547, + "layer_11_cos_v_neg_g": 0.009215189144015312, + "layer_12_v_norm": 1.0060821771621704, + "layer_12_cos_v_neg_g": 0.00809141993522644, + "layer_1_sharpness": 0.0017893468029797077, + "layer_2_sharpness": 9.332864283351228e-05, + "layer_3_sharpness": 0.00029981217812746763, + "layer_4_sharpness": 0.00017551642667967826, + "layer_5_sharpness": 0.00022429556702263653, + "layer_6_sharpness": 0.00021614614524878561, + "layer_7_sharpness": 0.0002868963056243956, + "layer_8_sharpness": 0.0004523357783909887, + "layer_9_sharpness": 0.00043719972018152475, + "layer_10_sharpness": 0.0005945536540821195, + "layer_11_sharpness": 0.00044714007526636124, + "layer_12_sharpness": 0.0005918843089602888 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..3ebfd8fe4bbc86c526f2c10a0550d8ad0cf5bdef --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.444023609161377, + "total_l1_linf_norm": 39434.265625, + "total_spectral_norm": 4.444024085998535, + "layer_1_update_fnorm": 1.0032618045806885, + "layer_1_max_l1_linf_norm": 1.2605504989624023, + "layer_1_max_spectral_norm": 0.17349326610565186, + "layer_2_update_fnorm": 0.9814984798431396, + "layer_2_max_l1_linf_norm": 1.0682909488677979, + "layer_2_max_spectral_norm": 0.15331099927425385, + "layer_3_update_fnorm": 1.0038020610809326, + "layer_3_max_l1_linf_norm": 1.108710527420044, + "layer_3_max_spectral_norm": 0.14470848441123962, + "layer_4_update_fnorm": 1.0243322849273682, + "layer_4_max_l1_linf_norm": 1.0847294330596924, + "layer_4_max_spectral_norm": 0.11048127710819244, + "layer_5_update_fnorm": 1.0504140853881836, + "layer_5_max_l1_linf_norm": 1.1033778190612793, + "layer_5_max_spectral_norm": 0.10008834302425385, + "layer_6_update_fnorm": 1.0541352033615112, + "layer_6_max_l1_linf_norm": 1.0981603860855103, + "layer_6_max_spectral_norm": 0.09262991696596146, + "layer_7_update_fnorm": 1.0532804727554321, + "layer_7_max_l1_linf_norm": 1.0602822303771973, + "layer_7_max_spectral_norm": 0.09280422329902649, + "layer_8_update_fnorm": 1.0512566566467285, + "layer_8_max_l1_linf_norm": 1.0572197437286377, + "layer_8_max_spectral_norm": 0.08425609022378922, + "layer_9_update_fnorm": 1.0479737520217896, + "layer_9_max_l1_linf_norm": 1.1202170848846436, + "layer_9_max_spectral_norm": 0.11332190781831741, + "layer_10_update_fnorm": 1.0290215015411377, + "layer_10_max_l1_linf_norm": 1.1627991199493408, + "layer_10_max_spectral_norm": 0.14588724076747894, + "layer_11_update_fnorm": 1.0441292524337769, + "layer_11_max_l1_linf_norm": 1.2909729480743408, + "layer_11_max_spectral_norm": 0.1643752157688141, + "layer_12_update_fnorm": 1.04297935962677, + "layer_12_max_l1_linf_norm": 1.1615865230560303, + "layer_12_max_spectral_norm": 0.16148029267787933, + "total_sharpness": 0.0019321972504258156, + "ip_v_neg_g": 0.02407851815223694, + "cos_v_neg_g": 0.007550166454166174, + "v_norm": 4.444023609161377, + "g_norm": 0.7176238894462585, + "hv_norm": 0.4104653000831604, + "cos_v_hv": 0.020919503644108772, + "hg_norm": 3.968214988708496, + "cos_g_hg": 0.6047324538230896, + "v_parallel_norm": 0.0012780267279595137, + "v_perp_norm": 4.444023609161377, + "layer_1_v_norm": 1.0032618045806885, + "layer_1_cos_v_neg_g": 0.02049480378627777, + "layer_2_v_norm": 0.9814984798431396, + "layer_2_cos_v_neg_g": 0.00747003685683012, + "layer_3_v_norm": 1.003801941871643, + "layer_3_cos_v_neg_g": 0.00680944649502635, + "layer_4_v_norm": 1.0243322849273682, + "layer_4_cos_v_neg_g": 0.007612799294292927, + "layer_5_v_norm": 1.0504140853881836, + "layer_5_cos_v_neg_g": 0.0063764601945877075, + "layer_6_v_norm": 1.0541352033615112, + "layer_6_cos_v_neg_g": 0.00656856456771493, + "layer_7_v_norm": 1.0532804727554321, + "layer_7_cos_v_neg_g": 0.0071343956515192986, + "layer_8_v_norm": 1.0512566566467285, + "layer_8_cos_v_neg_g": 0.008559339679777622, + "layer_9_v_norm": 1.0479737520217896, + "layer_9_cos_v_neg_g": 0.008739257231354713, + "layer_10_v_norm": 1.0290215015411377, + "layer_10_cos_v_neg_g": 0.013517580926418304, + "layer_11_v_norm": 1.0441292524337769, + "layer_11_cos_v_neg_g": 0.015476359985768795, + "layer_12_v_norm": 1.04297935962677, + "layer_12_cos_v_neg_g": 0.016722341999411583, + "layer_1_sharpness": 0.0018979752203449607, + "layer_2_sharpness": 0.0001391901314491406, + "layer_3_sharpness": 0.00034086519735865295, + "layer_4_sharpness": 0.00029026821721345186, + "layer_5_sharpness": 0.00022067561803851277, + "layer_6_sharpness": 0.00022474300931207836, + "layer_7_sharpness": 0.0003691477468237281, + "layer_8_sharpness": 0.0005333084263838828, + "layer_9_sharpness": 0.000506955839227885, + "layer_10_sharpness": 0.0007275463431142271, + "layer_11_sharpness": 0.0006607063696719706, + "layer_12_sharpness": 0.0008664521155878901 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..fcd66a6edfada215d96ae93fd0f969ab94c0935b --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.7388715744018555, + "total_l1_linf_norm": 23811.794921875, + "total_spectral_norm": 2.7388718128204346, + "layer_1_update_fnorm": 0.5076460242271423, + "layer_1_max_l1_linf_norm": 0.8827260732650757, + "layer_1_max_spectral_norm": 0.13259918987751007, + "layer_2_update_fnorm": 0.4940517842769623, + "layer_2_max_l1_linf_norm": 0.6197546720504761, + "layer_2_max_spectral_norm": 0.1092132180929184, + "layer_3_update_fnorm": 0.4956188201904297, + "layer_3_max_l1_linf_norm": 0.6332273483276367, + "layer_3_max_spectral_norm": 0.11172253638505936, + "layer_4_update_fnorm": 0.5356323719024658, + "layer_4_max_l1_linf_norm": 0.7817125916481018, + "layer_4_max_spectral_norm": 0.12059196084737778, + "layer_5_update_fnorm": 0.5496950149536133, + "layer_5_max_l1_linf_norm": 0.6998517513275146, + "layer_5_max_spectral_norm": 0.12170681357383728, + "layer_6_update_fnorm": 0.5907064080238342, + "layer_6_max_l1_linf_norm": 0.7034395933151245, + "layer_6_max_spectral_norm": 0.13157415390014648, + "layer_7_update_fnorm": 0.609838604927063, + "layer_7_max_l1_linf_norm": 0.7293071746826172, + "layer_7_max_spectral_norm": 0.12158612161874771, + "layer_8_update_fnorm": 0.6327699422836304, + "layer_8_max_l1_linf_norm": 0.6709545254707336, + "layer_8_max_spectral_norm": 0.11919543892145157, + "layer_9_update_fnorm": 0.6385546922683716, + "layer_9_max_l1_linf_norm": 0.6919118165969849, + "layer_9_max_spectral_norm": 0.11708789318799973, + "layer_10_update_fnorm": 0.6367579698562622, + "layer_10_max_l1_linf_norm": 0.7095649242401123, + "layer_10_max_spectral_norm": 0.12150195986032486, + "layer_11_update_fnorm": 0.6247446537017822, + "layer_11_max_l1_linf_norm": 0.7050716280937195, + "layer_11_max_spectral_norm": 0.12959083914756775, + "layer_12_update_fnorm": 0.5820555686950684, + "layer_12_max_l1_linf_norm": 0.7920606136322021, + "layer_12_max_spectral_norm": 0.19426655769348145, + "total_sharpness": 0.020502083003520966, + "ip_v_neg_g": 0.13075312972068787, + "cos_v_neg_g": 0.05168687552213669, + "v_norm": 2.7388715744018555, + "g_norm": 0.9236345291137695, + "hv_norm": 0.8256139755249023, + "cos_v_hv": 0.06801310181617737, + "hg_norm": 4.133584022521973, + "cos_g_hg": 0.6508166193962097, + "v_parallel_norm": 0.007382506504654884, + "v_perp_norm": 2.738861560821533, + "layer_1_v_norm": 0.5076460242271423, + "layer_1_cos_v_neg_g": 0.11427904665470123, + "layer_2_v_norm": 0.4940517842769623, + "layer_2_cos_v_neg_g": 0.11332262307405472, + "layer_3_v_norm": 0.4956188201904297, + "layer_3_cos_v_neg_g": 0.13670365512371063, + "layer_4_v_norm": 0.5356323719024658, + "layer_4_cos_v_neg_g": 0.15858741104602814, + "layer_5_v_norm": 0.5496950149536133, + "layer_5_cos_v_neg_g": 0.12764103710651398, + "layer_6_v_norm": 0.590706467628479, + "layer_6_cos_v_neg_g": 0.10413463413715363, + "layer_7_v_norm": 0.609838604927063, + "layer_7_cos_v_neg_g": 0.11093313246965408, + "layer_8_v_norm": 0.6327699422836304, + "layer_8_cos_v_neg_g": 0.1085088849067688, + "layer_9_v_norm": 0.6385546922683716, + "layer_9_cos_v_neg_g": 0.0707172229886055, + "layer_10_v_norm": 0.6367579698562622, + "layer_10_cos_v_neg_g": 0.07583478093147278, + "layer_11_v_norm": 0.6247446537017822, + "layer_11_cos_v_neg_g": 0.08516642451286316, + "layer_12_v_norm": 0.5820555686950684, + "layer_12_cos_v_neg_g": 0.14761383831501007, + "layer_1_sharpness": 0.07746560126543045, + "layer_2_sharpness": 0.005236889235675335, + "layer_3_sharpness": 0.008206922560930252, + "layer_4_sharpness": 0.008175571449100971, + "layer_5_sharpness": 0.0043624876998364925, + "layer_6_sharpness": 0.002551947021856904, + "layer_7_sharpness": 0.0036903696600347757, + "layer_8_sharpness": 0.003952115308493376, + "layer_9_sharpness": 0.00180677673779428, + "layer_10_sharpness": 0.0018935628468170762, + "layer_11_sharpness": 0.002409485401585698, + "layer_12_sharpness": 0.014148195274174213 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..b6974c3033494e81b563db14d710c51a8c1eb079 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.365923881530762, + "total_l1_linf_norm": 38606.52734375, + "total_spectral_norm": 4.365922927856445, + "layer_1_update_fnorm": 0.8792237043380737, + "layer_1_max_l1_linf_norm": 1.0351557731628418, + "layer_1_max_spectral_norm": 0.12617507576942444, + "layer_2_update_fnorm": 0.9744774699211121, + "layer_2_max_l1_linf_norm": 1.0652801990509033, + "layer_2_max_spectral_norm": 0.15235619246959686, + "layer_3_update_fnorm": 0.9846502542495728, + "layer_3_max_l1_linf_norm": 1.072164535522461, + "layer_3_max_spectral_norm": 0.14204177260398865, + "layer_4_update_fnorm": 1.0078682899475098, + "layer_4_max_l1_linf_norm": 1.0546419620513916, + "layer_4_max_spectral_norm": 0.11293104290962219, + "layer_5_update_fnorm": 1.034877896308899, + "layer_5_max_l1_linf_norm": 1.093811273574829, + "layer_5_max_spectral_norm": 0.09634484350681305, + "layer_6_update_fnorm": 1.0381203889846802, + "layer_6_max_l1_linf_norm": 1.0685505867004395, + "layer_6_max_spectral_norm": 0.09124557673931122, + "layer_7_update_fnorm": 1.040982723236084, + "layer_7_max_l1_linf_norm": 1.0442075729370117, + "layer_7_max_spectral_norm": 0.07648012042045593, + "layer_8_update_fnorm": 1.0365244150161743, + "layer_8_max_l1_linf_norm": 1.0425219535827637, + "layer_8_max_spectral_norm": 0.08297950774431229, + "layer_9_update_fnorm": 1.0363575220108032, + "layer_9_max_l1_linf_norm": 1.0803030729293823, + "layer_9_max_spectral_norm": 0.10890088975429535, + "layer_10_update_fnorm": 1.0082447528839111, + "layer_10_max_l1_linf_norm": 1.09939706325531, + "layer_10_max_spectral_norm": 0.1332847774028778, + "layer_11_update_fnorm": 1.0174723863601685, + "layer_11_max_l1_linf_norm": 1.2176740169525146, + "layer_11_max_spectral_norm": 0.15367861092090607, + "layer_12_update_fnorm": 1.0194625854492188, + "layer_12_max_l1_linf_norm": 1.1417896747589111, + "layer_12_max_spectral_norm": 0.15887708961963654, + "total_sharpness": 0.0015127796214073896, + "ip_v_neg_g": 0.015954621136188507, + "cos_v_neg_g": 0.0041295490227639675, + "v_norm": 4.365923881530762, + "g_norm": 0.8849275708198547, + "hv_norm": 0.4986983835697174, + "cos_v_hv": 0.013243838213384151, + "hg_norm": 16.54123306274414, + "cos_g_hg": 0.5829445719718933, + "v_parallel_norm": 0.0007954141474328935, + "v_perp_norm": 4.365923881530762, + "layer_1_v_norm": 0.8792237043380737, + "layer_1_cos_v_neg_g": 0.012388859875500202, + "layer_2_v_norm": 0.9744774699211121, + "layer_2_cos_v_neg_g": 0.005356608424335718, + "layer_3_v_norm": 0.9846502542495728, + "layer_3_cos_v_neg_g": 0.0063660042360424995, + "layer_4_v_norm": 1.0078682899475098, + "layer_4_cos_v_neg_g": 0.004112796857953072, + "layer_5_v_norm": 1.034877896308899, + "layer_5_cos_v_neg_g": 0.0040816692635416985, + "layer_6_v_norm": 1.0381203889846802, + "layer_6_cos_v_neg_g": 0.004224722273647785, + "layer_7_v_norm": 1.040982723236084, + "layer_7_cos_v_neg_g": 0.003559007542207837, + "layer_8_v_norm": 1.0365242958068848, + "layer_8_cos_v_neg_g": 0.004121442325413227, + "layer_9_v_norm": 1.0363575220108032, + "layer_9_cos_v_neg_g": 0.004067466128617525, + "layer_10_v_norm": 1.0082447528839111, + "layer_10_cos_v_neg_g": 0.006195340305566788, + "layer_11_v_norm": 1.0174723863601685, + "layer_11_cos_v_neg_g": 0.007547613698989153, + "layer_12_v_norm": 1.0194625854492188, + "layer_12_cos_v_neg_g": 0.007274515461176634, + "layer_1_sharpness": 0.0016679773107171059, + "layer_2_sharpness": 0.00022580155928153545, + "layer_3_sharpness": 0.0007754885009489954, + "layer_4_sharpness": 0.00030604511266574264, + "layer_5_sharpness": 0.000192738909390755, + "layer_6_sharpness": 0.00027006404707208276, + "layer_7_sharpness": 0.00026646783226169646, + "layer_8_sharpness": 0.0003762864216696471, + "layer_9_sharpness": 0.00040738502866588533, + "layer_10_sharpness": 0.0005606217309832573, + "layer_11_sharpness": 0.00046200663200579584, + "layer_12_sharpness": 0.0007300431025214493 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..efa3cec6c9dad41289b153436eefc77d13b736eb --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.423046588897705, + "total_l1_linf_norm": 39249.7734375, + "total_spectral_norm": 4.423046588897705, + "layer_1_update_fnorm": 0.9405917525291443, + "layer_1_max_l1_linf_norm": 1.2421915531158447, + "layer_1_max_spectral_norm": 0.13495658338069916, + "layer_2_update_fnorm": 0.9820553660392761, + "layer_2_max_l1_linf_norm": 1.050572395324707, + "layer_2_max_spectral_norm": 0.14456050097942352, + "layer_3_update_fnorm": 0.9952684044837952, + "layer_3_max_l1_linf_norm": 1.0253171920776367, + "layer_3_max_spectral_norm": 0.14306363463401794, + "layer_4_update_fnorm": 1.02198326587677, + "layer_4_max_l1_linf_norm": 1.05108642578125, + "layer_4_max_spectral_norm": 0.11124788969755173, + "layer_5_update_fnorm": 1.0506035089492798, + "layer_5_max_l1_linf_norm": 1.1069293022155762, + "layer_5_max_spectral_norm": 0.09326058626174927, + "layer_6_update_fnorm": 1.0603522062301636, + "layer_6_max_l1_linf_norm": 1.0911543369293213, + "layer_6_max_spectral_norm": 0.08851726353168488, + "layer_7_update_fnorm": 1.061082124710083, + "layer_7_max_l1_linf_norm": 1.0846729278564453, + "layer_7_max_spectral_norm": 0.08035240322351456, + "layer_8_update_fnorm": 1.0635977983474731, + "layer_8_max_l1_linf_norm": 1.0531413555145264, + "layer_8_max_spectral_norm": 0.08073287457227707, + "layer_9_update_fnorm": 1.0608100891113281, + "layer_9_max_l1_linf_norm": 1.1406595706939697, + "layer_9_max_spectral_norm": 0.11299193650484085, + "layer_10_update_fnorm": 1.038003921508789, + "layer_10_max_l1_linf_norm": 1.1072924137115479, + "layer_10_max_spectral_norm": 0.1357913315296173, + "layer_11_update_fnorm": 1.0379598140716553, + "layer_11_max_l1_linf_norm": 1.152256965637207, + "layer_11_max_spectral_norm": 0.15483029186725616, + "layer_12_update_fnorm": 1.0299488306045532, + "layer_12_max_l1_linf_norm": 1.2042121887207031, + "layer_12_max_spectral_norm": 0.1651785969734192, + "total_sharpness": 0.0009786429582163692, + "ip_v_neg_g": 0.01651063933968544, + "cos_v_neg_g": 0.003032635897397995, + "v_norm": 4.423046588897705, + "g_norm": 1.230898141860962, + "hv_norm": 0.3484225273132324, + "cos_v_hv": 0.012423373758792877, + "hg_norm": 11.0812349319458, + "cos_g_hg": 0.6933260560035706, + "v_parallel_norm": 0.0007777732680551708, + "v_perp_norm": 4.423046588897705, + "layer_1_v_norm": 0.9405917525291443, + "layer_1_cos_v_neg_g": 0.00765525596216321, + "layer_2_v_norm": 0.9820553660392761, + "layer_2_cos_v_neg_g": 0.003760574385523796, + "layer_3_v_norm": 0.9952684044837952, + "layer_3_cos_v_neg_g": 0.00407508946955204, + "layer_4_v_norm": 1.02198326587677, + "layer_4_cos_v_neg_g": 0.002870325231924653, + "layer_5_v_norm": 1.0506035089492798, + "layer_5_cos_v_neg_g": 0.002866902621462941, + "layer_6_v_norm": 1.0603522062301636, + "layer_6_cos_v_neg_g": 0.003278568387031555, + "layer_7_v_norm": 1.061082124710083, + "layer_7_cos_v_neg_g": 0.0034333388321101665, + "layer_8_v_norm": 1.0635977983474731, + "layer_8_cos_v_neg_g": 0.003902303520590067, + "layer_9_v_norm": 1.0608100891113281, + "layer_9_cos_v_neg_g": 0.00433970196172595, + "layer_10_v_norm": 1.038003921508789, + "layer_10_cos_v_neg_g": 0.004530409350991249, + "layer_11_v_norm": 1.0379598140716553, + "layer_11_cos_v_neg_g": 0.0034754988737404346, + "layer_12_v_norm": 1.0299488306045532, + "layer_12_cos_v_neg_g": 0.004323458764702082, + "layer_1_sharpness": 0.0010169028537347913, + "layer_2_sharpness": 0.00010559701331658289, + "layer_3_sharpness": 0.00033383708796463907, + "layer_4_sharpness": 0.00015995584544725716, + "layer_5_sharpness": 0.00011648819781839848, + "layer_6_sharpness": 0.00012237422924954444, + "layer_7_sharpness": 0.00019526183314155787, + "layer_8_sharpness": 0.0002707852690946311, + "layer_9_sharpness": 0.00033211809932254255, + "layer_10_sharpness": 0.0004376813303679228, + "layer_11_sharpness": 0.00038004486123099923, + "layer_12_sharpness": 0.0005298223113641143 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..3e257ac76be671c70adcbce1353b7d462f70fa96 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.447958946228027, + "total_l1_linf_norm": 39494.0078125, + "total_spectral_norm": 4.447958469390869, + "layer_1_update_fnorm": 0.9838617444038391, + "layer_1_max_l1_linf_norm": 1.1679012775421143, + "layer_1_max_spectral_norm": 0.14208737015724182, + "layer_2_update_fnorm": 0.9887482523918152, + "layer_2_max_l1_linf_norm": 1.0756903886795044, + "layer_2_max_spectral_norm": 0.14742611348628998, + "layer_3_update_fnorm": 1.0185688734054565, + "layer_3_max_l1_linf_norm": 1.0783603191375732, + "layer_3_max_spectral_norm": 0.1416526883840561, + "layer_4_update_fnorm": 1.0286564826965332, + "layer_4_max_l1_linf_norm": 1.0591150522232056, + "layer_4_max_spectral_norm": 0.11103227734565735, + "layer_5_update_fnorm": 1.062689185142517, + "layer_5_max_l1_linf_norm": 1.0887199640274048, + "layer_5_max_spectral_norm": 0.09275788068771362, + "layer_6_update_fnorm": 1.0656013488769531, + "layer_6_max_l1_linf_norm": 1.0801379680633545, + "layer_6_max_spectral_norm": 0.08670049905776978, + "layer_7_update_fnorm": 1.0660154819488525, + "layer_7_max_l1_linf_norm": 1.0666477680206299, + "layer_7_max_spectral_norm": 0.07822347432374954, + "layer_8_update_fnorm": 1.0626174211502075, + "layer_8_max_l1_linf_norm": 1.080653429031372, + "layer_8_max_spectral_norm": 0.08350672572851181, + "layer_9_update_fnorm": 1.0578205585479736, + "layer_9_max_l1_linf_norm": 1.1067314147949219, + "layer_9_max_spectral_norm": 0.10724985599517822, + "layer_10_update_fnorm": 1.0348130464553833, + "layer_10_max_l1_linf_norm": 1.1551647186279297, + "layer_10_max_spectral_norm": 0.13934873044490814, + "layer_11_update_fnorm": 1.0417120456695557, + "layer_11_max_l1_linf_norm": 1.1823415756225586, + "layer_11_max_spectral_norm": 0.15361808240413666, + "layer_12_update_fnorm": 1.039689540863037, + "layer_12_max_l1_linf_norm": 1.1963008642196655, + "layer_12_max_spectral_norm": 0.1616697609424591, + "total_sharpness": 0.0011012052418664098, + "ip_v_neg_g": 0.01211380772292614, + "cos_v_neg_g": 0.003999926149845123, + "v_norm": 4.447958946228027, + "g_norm": 0.680875837802887, + "hv_norm": 0.2945336699485779, + "cos_v_hv": 0.01663007028400898, + "hg_norm": 3.0424184799194336, + "cos_g_hg": 0.5728052258491516, + "v_parallel_norm": 0.0007539794314652681, + "v_perp_norm": 4.447958946228027, + "layer_1_v_norm": 0.9838617444038391, + "layer_1_cos_v_neg_g": 0.010632557794451714, + "layer_2_v_norm": 0.9887482523918152, + "layer_2_cos_v_neg_g": 0.004677485208958387, + "layer_3_v_norm": 1.0185688734054565, + "layer_3_cos_v_neg_g": 0.004524831660091877, + "layer_4_v_norm": 1.0286564826965332, + "layer_4_cos_v_neg_g": 0.002800124231725931, + "layer_5_v_norm": 1.062689185142517, + "layer_5_cos_v_neg_g": 0.003076176391914487, + "layer_6_v_norm": 1.0656013488769531, + "layer_6_cos_v_neg_g": 0.004056065808981657, + "layer_7_v_norm": 1.0660154819488525, + "layer_7_cos_v_neg_g": 0.004881435539573431, + "layer_8_v_norm": 1.0626174211502075, + "layer_8_cos_v_neg_g": 0.004404003266245127, + "layer_9_v_norm": 1.0578205585479736, + "layer_9_cos_v_neg_g": 0.004351663403213024, + "layer_10_v_norm": 1.0348130464553833, + "layer_10_cos_v_neg_g": 0.005003741011023521, + "layer_11_v_norm": 1.0417120456695557, + "layer_11_cos_v_neg_g": 0.007305185776203871, + "layer_12_v_norm": 1.039689540863037, + "layer_12_cos_v_neg_g": 0.010918284766376019, + "layer_1_sharpness": 0.0009704309632070363, + "layer_2_sharpness": 0.0001294581888942048, + "layer_3_sharpness": 0.00025589304277673364, + "layer_4_sharpness": 0.00015273013559635729, + "layer_5_sharpness": 0.00015868015179876238, + "layer_6_sharpness": 0.00015472476661670953, + "layer_7_sharpness": 0.0002856723149307072, + "layer_8_sharpness": 0.00036079957499168813, + "layer_9_sharpness": 0.0003392203652765602, + "layer_10_sharpness": 0.0004504684475250542, + "layer_11_sharpness": 0.00041237252298742533, + "layer_12_sharpness": 0.0006214294699020684 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..b336813e9110325436c9ef047008b3980f794e2b --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.518218517303467, + "total_l1_linf_norm": 40212.0078125, + "total_spectral_norm": 4.518218994140625, + "layer_1_update_fnorm": 1.0385589599609375, + "layer_1_max_l1_linf_norm": 1.2501530647277832, + "layer_1_max_spectral_norm": 0.17174026370048523, + "layer_2_update_fnorm": 1.031291127204895, + "layer_2_max_l1_linf_norm": 1.1005818843841553, + "layer_2_max_spectral_norm": 0.15400667488574982, + "layer_3_update_fnorm": 1.0469022989273071, + "layer_3_max_l1_linf_norm": 1.1053340435028076, + "layer_3_max_spectral_norm": 0.15278860926628113, + "layer_4_update_fnorm": 1.060152530670166, + "layer_4_max_l1_linf_norm": 1.1216319799423218, + "layer_4_max_spectral_norm": 0.10798091441392899, + "layer_5_update_fnorm": 1.0813156366348267, + "layer_5_max_l1_linf_norm": 1.1318647861480713, + "layer_5_max_spectral_norm": 0.10198087990283966, + "layer_6_update_fnorm": 1.08652663230896, + "layer_6_max_l1_linf_norm": 1.1971895694732666, + "layer_6_max_spectral_norm": 0.10703945904970169, + "layer_7_update_fnorm": 1.0760546922683716, + "layer_7_max_l1_linf_norm": 1.0876867771148682, + "layer_7_max_spectral_norm": 0.08627086877822876, + "layer_8_update_fnorm": 1.0681129693984985, + "layer_8_max_l1_linf_norm": 1.0745947360992432, + "layer_8_max_spectral_norm": 0.08275274187326431, + "layer_9_update_fnorm": 1.0671051740646362, + "layer_9_max_l1_linf_norm": 1.1010000705718994, + "layer_9_max_spectral_norm": 0.11219269782304764, + "layer_10_update_fnorm": 1.0437389612197876, + "layer_10_max_l1_linf_norm": 1.1129231452941895, + "layer_10_max_spectral_norm": 0.14906010031700134, + "layer_11_update_fnorm": 1.051575779914856, + "layer_11_max_l1_linf_norm": 1.2410885095596313, + "layer_11_max_spectral_norm": 0.14648263156414032, + "layer_12_update_fnorm": 1.049197793006897, + "layer_12_max_l1_linf_norm": 1.167348861694336, + "layer_12_max_spectral_norm": 0.1576940417289734, + "total_sharpness": 0.0013717184774577618, + "ip_v_neg_g": 0.012597626075148582, + "cos_v_neg_g": 0.0038606703747063875, + "v_norm": 4.518218517303467, + "g_norm": 0.7222021222114563, + "hv_norm": 0.3870050311088562, + "cos_v_hv": 0.01601458340883255, + "hg_norm": 3.954493761062622, + "cos_g_hg": 0.6029852628707886, + "v_parallel_norm": 0.0008246822981163859, + "v_perp_norm": 4.518218517303467, + "layer_1_v_norm": 1.0385589599609375, + "layer_1_cos_v_neg_g": 0.007647635415196419, + "layer_2_v_norm": 1.031291127204895, + "layer_2_cos_v_neg_g": 0.003352022496983409, + "layer_3_v_norm": 1.0469021797180176, + "layer_3_cos_v_neg_g": 0.004330967087298632, + "layer_4_v_norm": 1.060152530670166, + "layer_4_cos_v_neg_g": 0.0034580316860228777, + "layer_5_v_norm": 1.0813156366348267, + "layer_5_cos_v_neg_g": 0.00347472564317286, + "layer_6_v_norm": 1.08652663230896, + "layer_6_cos_v_neg_g": 0.003920519258826971, + "layer_7_v_norm": 1.0760546922683716, + "layer_7_cos_v_neg_g": 0.003828073386102915, + "layer_8_v_norm": 1.0681129693984985, + "layer_8_cos_v_neg_g": 0.004390842746943235, + "layer_9_v_norm": 1.0671051740646362, + "layer_9_cos_v_neg_g": 0.0054608979262411594, + "layer_10_v_norm": 1.0437389612197876, + "layer_10_cos_v_neg_g": 0.0067998068407177925, + "layer_11_v_norm": 1.051575779914856, + "layer_11_cos_v_neg_g": 0.008067429065704346, + "layer_12_v_norm": 1.049197793006897, + "layer_12_cos_v_neg_g": 0.008586459793150425, + "layer_1_sharpness": 0.0009897921700030565, + "layer_2_sharpness": 0.00012817872629966587, + "layer_3_sharpness": 0.00022367048950400203, + "layer_4_sharpness": 0.0002179959847126156, + "layer_5_sharpness": 0.0001940989022841677, + "layer_6_sharpness": 0.00018860484124161303, + "layer_7_sharpness": 0.0002639810845721513, + "layer_8_sharpness": 0.0003963762428611517, + "layer_9_sharpness": 0.0004502857045736164, + "layer_10_sharpness": 0.0006426559411920607, + "layer_11_sharpness": 0.0004176326037850231, + "layer_12_sharpness": 0.0005109222256578505 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..a617057300b68af52c63254e30e5af6054b75f5a --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.495510578155518, + "total_l1_linf_norm": 39993.9140625, + "total_spectral_norm": 4.495509624481201, + "layer_1_update_fnorm": 1.0236221551895142, + "layer_1_max_l1_linf_norm": 1.1537144184112549, + "layer_1_max_spectral_norm": 0.14932477474212646, + "layer_2_update_fnorm": 1.0189350843429565, + "layer_2_max_l1_linf_norm": 1.0784969329833984, + "layer_2_max_spectral_norm": 0.14961785078048706, + "layer_3_update_fnorm": 1.0338330268859863, + "layer_3_max_l1_linf_norm": 1.090018630027771, + "layer_3_max_spectral_norm": 0.1513473391532898, + "layer_4_update_fnorm": 1.0539664030075073, + "layer_4_max_l1_linf_norm": 1.0892910957336426, + "layer_4_max_spectral_norm": 0.1117003932595253, + "layer_5_update_fnorm": 1.0778217315673828, + "layer_5_max_l1_linf_norm": 1.1534652709960938, + "layer_5_max_spectral_norm": 0.10225886851549149, + "layer_6_update_fnorm": 1.0820121765136719, + "layer_6_max_l1_linf_norm": 1.1082630157470703, + "layer_6_max_spectral_norm": 0.10698609799146652, + "layer_7_update_fnorm": 1.0725069046020508, + "layer_7_max_l1_linf_norm": 1.062059998512268, + "layer_7_max_spectral_norm": 0.09175654500722885, + "layer_8_update_fnorm": 1.0714048147201538, + "layer_8_max_l1_linf_norm": 1.0899872779846191, + "layer_8_max_spectral_norm": 0.083412766456604, + "layer_9_update_fnorm": 1.062171459197998, + "layer_9_max_l1_linf_norm": 1.1593796014785767, + "layer_9_max_spectral_norm": 0.11712302267551422, + "layer_10_update_fnorm": 1.0351850986480713, + "layer_10_max_l1_linf_norm": 1.1058709621429443, + "layer_10_max_spectral_norm": 0.14298366010189056, + "layer_11_update_fnorm": 1.0388221740722656, + "layer_11_max_l1_linf_norm": 1.2633024454116821, + "layer_11_max_spectral_norm": 0.15149812400341034, + "layer_12_update_fnorm": 1.0381230115890503, + "layer_12_max_l1_linf_norm": 1.2169990539550781, + "layer_12_max_spectral_norm": 0.1577487289905548, + "total_sharpness": 0.0012386217713356018, + "ip_v_neg_g": 0.013637278228998184, + "cos_v_neg_g": 0.004094555974006653, + "v_norm": 4.495510578155518, + "g_norm": 0.7408697009086609, + "hv_norm": 0.38994041085243225, + "cos_v_hv": 0.014279713854193687, + "hg_norm": 5.014493942260742, + "cos_g_hg": 0.5508518815040588, + "v_parallel_norm": 0.0007892317953519523, + "v_perp_norm": 4.495510578155518, + "layer_1_v_norm": 1.0236221551895142, + "layer_1_cos_v_neg_g": 0.011068408377468586, + "layer_2_v_norm": 1.0189350843429565, + "layer_2_cos_v_neg_g": 0.003519731806591153, + "layer_3_v_norm": 1.0338330268859863, + "layer_3_cos_v_neg_g": 0.004447948187589645, + "layer_4_v_norm": 1.0539664030075073, + "layer_4_cos_v_neg_g": 0.0043256222270429134, + "layer_5_v_norm": 1.0778217315673828, + "layer_5_cos_v_neg_g": 0.003144304733723402, + "layer_6_v_norm": 1.0820121765136719, + "layer_6_cos_v_neg_g": 0.002875651465728879, + "layer_7_v_norm": 1.0725069046020508, + "layer_7_cos_v_neg_g": 0.003647763514891267, + "layer_8_v_norm": 1.0714048147201538, + "layer_8_cos_v_neg_g": 0.004346078727394342, + "layer_9_v_norm": 1.062171459197998, + "layer_9_cos_v_neg_g": 0.005072819534689188, + "layer_10_v_norm": 1.0351850986480713, + "layer_10_cos_v_neg_g": 0.006272341124713421, + "layer_11_v_norm": 1.0388221740722656, + "layer_11_cos_v_neg_g": 0.00941563956439495, + "layer_12_v_norm": 1.0381230115890503, + "layer_12_cos_v_neg_g": 0.010746662504971027, + "layer_1_sharpness": 0.0008929343312047422, + "layer_2_sharpness": 7.748819916741922e-05, + "layer_3_sharpness": 0.0005309897824190557, + "layer_4_sharpness": 0.0002874884521588683, + "layer_5_sharpness": 0.00021791983454022557, + "layer_6_sharpness": 0.0002085715241264552, + "layer_7_sharpness": 0.0002757524198386818, + "layer_8_sharpness": 0.00035561196273192763, + "layer_9_sharpness": 0.0003493805124890059, + "layer_10_sharpness": 0.0004650316550396383, + "layer_11_sharpness": 0.0003721774846781045, + "layer_12_sharpness": 0.0004903952940367162 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..08a890b54fb5d91133fb453c87f18cce5e13dca5 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.520442008972168, + "total_l1_linf_norm": 40203.99609375, + "total_spectral_norm": 4.52044153213501, + "layer_1_update_fnorm": 1.0228737592697144, + "layer_1_max_l1_linf_norm": 1.1401029825210571, + "layer_1_max_spectral_norm": 0.14215701818466187, + "layer_2_update_fnorm": 1.0283302068710327, + "layer_2_max_l1_linf_norm": 1.0826973915100098, + "layer_2_max_spectral_norm": 0.14560580253601074, + "layer_3_update_fnorm": 1.0432655811309814, + "layer_3_max_l1_linf_norm": 1.0643789768218994, + "layer_3_max_spectral_norm": 0.14717763662338257, + "layer_4_update_fnorm": 1.061057209968567, + "layer_4_max_l1_linf_norm": 1.0747339725494385, + "layer_4_max_spectral_norm": 0.10728771239519119, + "layer_5_update_fnorm": 1.0803536176681519, + "layer_5_max_l1_linf_norm": 1.0873733758926392, + "layer_5_max_spectral_norm": 0.08848410844802856, + "layer_6_update_fnorm": 1.0837239027023315, + "layer_6_max_l1_linf_norm": 1.096104383468628, + "layer_6_max_spectral_norm": 0.08479317277669907, + "layer_7_update_fnorm": 1.0824068784713745, + "layer_7_max_l1_linf_norm": 1.0921189785003662, + "layer_7_max_spectral_norm": 0.07806438207626343, + "layer_8_update_fnorm": 1.0785555839538574, + "layer_8_max_l1_linf_norm": 1.1080753803253174, + "layer_8_max_spectral_norm": 0.08103813230991364, + "layer_9_update_fnorm": 1.0759243965148926, + "layer_9_max_l1_linf_norm": 1.1452550888061523, + "layer_9_max_spectral_norm": 0.11637832969427109, + "layer_10_update_fnorm": 1.0496312379837036, + "layer_10_max_l1_linf_norm": 1.1239652633666992, + "layer_10_max_spectral_norm": 0.1385904848575592, + "layer_11_update_fnorm": 1.0580432415008545, + "layer_11_max_l1_linf_norm": 1.2480647563934326, + "layer_11_max_spectral_norm": 0.1494443565607071, + "layer_12_update_fnorm": 1.0536936521530151, + "layer_12_max_l1_linf_norm": 1.220285415649414, + "layer_12_max_spectral_norm": 0.1507817953824997, + "total_sharpness": 0.0009068095241673291, + "ip_v_neg_g": 0.012217984534800053, + "cos_v_neg_g": 0.00327280187048018, + "v_norm": 4.520442008972168, + "g_norm": 0.8258457779884338, + "hv_norm": 0.30337825417518616, + "cos_v_hv": 0.013511777855455875, + "hg_norm": 6.810629367828369, + "cos_g_hg": 0.6026468276977539, + "v_parallel_norm": 0.0005384890246205032, + "v_perp_norm": 4.520442008972168, + "layer_1_v_norm": 1.0228737592697144, + "layer_1_cos_v_neg_g": 0.007737285457551479, + "layer_2_v_norm": 1.0283302068710327, + "layer_2_cos_v_neg_g": 0.003705346491187811, + "layer_3_v_norm": 1.0432655811309814, + "layer_3_cos_v_neg_g": 0.003789434442296624, + "layer_4_v_norm": 1.061057209968567, + "layer_4_cos_v_neg_g": 0.003137393854558468, + "layer_5_v_norm": 1.0803536176681519, + "layer_5_cos_v_neg_g": 0.0033754960168153048, + "layer_6_v_norm": 1.083724021911621, + "layer_6_cos_v_neg_g": 0.003090823534876108, + "layer_7_v_norm": 1.0824068784713745, + "layer_7_cos_v_neg_g": 0.0036874625366181135, + "layer_8_v_norm": 1.0785555839538574, + "layer_8_cos_v_neg_g": 0.0035563299898058176, + "layer_9_v_norm": 1.0759243965148926, + "layer_9_cos_v_neg_g": 0.0031751778442412615, + "layer_10_v_norm": 1.0496312379837036, + "layer_10_cos_v_neg_g": 0.005449589807540178, + "layer_11_v_norm": 1.0580432415008545, + "layer_11_cos_v_neg_g": 0.006972518749535084, + "layer_12_v_norm": 1.0536936521530151, + "layer_12_cos_v_neg_g": 0.006223422475159168, + "layer_1_sharpness": 0.0008398746722377837, + "layer_2_sharpness": 7.804187771398574e-05, + "layer_3_sharpness": 0.00017995121015701443, + "layer_4_sharpness": 0.00012473473907448351, + "layer_5_sharpness": 0.00014410933363251388, + "layer_6_sharpness": 0.00015608900866936892, + "layer_7_sharpness": 0.00024246901739388704, + "layer_8_sharpness": 0.00028985977405682206, + "layer_9_sharpness": 0.0003051424282602966, + "layer_10_sharpness": 0.00045217713341116905, + "layer_11_sharpness": 0.00036142425960861146, + "layer_12_sharpness": 0.0003278734511695802 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..9ee5f8d7af61c6b84aa89dc8f8f643a40c9f2290 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.503103733062744, + "total_l1_linf_norm": 40081.078125, + "total_spectral_norm": 4.503103733062744, + "layer_1_update_fnorm": 1.0122956037521362, + "layer_1_max_l1_linf_norm": 1.2477953433990479, + "layer_1_max_spectral_norm": 0.15840046107769012, + "layer_2_update_fnorm": 1.0221821069717407, + "layer_2_max_l1_linf_norm": 1.074369192123413, + "layer_2_max_spectral_norm": 0.15101762115955353, + "layer_3_update_fnorm": 1.037589192390442, + "layer_3_max_l1_linf_norm": 1.0767779350280762, + "layer_3_max_spectral_norm": 0.1553177386522293, + "layer_4_update_fnorm": 1.0567048788070679, + "layer_4_max_l1_linf_norm": 1.1107350587844849, + "layer_4_max_spectral_norm": 0.11474060267210007, + "layer_5_update_fnorm": 1.0817142724990845, + "layer_5_max_l1_linf_norm": 1.121415615081787, + "layer_5_max_spectral_norm": 0.09554429352283478, + "layer_6_update_fnorm": 1.083589792251587, + "layer_6_max_l1_linf_norm": 1.0956990718841553, + "layer_6_max_spectral_norm": 0.09669248759746552, + "layer_7_update_fnorm": 1.0831079483032227, + "layer_7_max_l1_linf_norm": 1.084869623184204, + "layer_7_max_spectral_norm": 0.08557985723018646, + "layer_8_update_fnorm": 1.0762208700180054, + "layer_8_max_l1_linf_norm": 1.1281394958496094, + "layer_8_max_spectral_norm": 0.09599454700946808, + "layer_9_update_fnorm": 1.0750781297683716, + "layer_9_max_l1_linf_norm": 1.1409518718719482, + "layer_9_max_spectral_norm": 0.13656273484230042, + "layer_10_update_fnorm": 1.04755699634552, + "layer_10_max_l1_linf_norm": 1.2645115852355957, + "layer_10_max_spectral_norm": 0.16598618030548096, + "layer_11_update_fnorm": 1.059154748916626, + "layer_11_max_l1_linf_norm": 1.2783968448638916, + "layer_11_max_spectral_norm": 0.16453874111175537, + "layer_12_update_fnorm": 1.056707501411438, + "layer_12_max_l1_linf_norm": 1.2793984413146973, + "layer_12_max_spectral_norm": 0.16901715099811554, + "total_sharpness": 0.0013226198498159647, + "ip_v_neg_g": 0.012926398776471615, + "cos_v_neg_g": 0.003869615262374282, + "v_norm": 4.503103733062744, + "g_norm": 0.7418187260627747, + "hv_norm": 0.4150618016719818, + "cos_v_hv": 0.014349416829645634, + "hg_norm": 5.691033363342285, + "cos_g_hg": 0.553259551525116, + "v_parallel_norm": 0.0007153606275096536, + "v_perp_norm": 4.503103733062744, + "layer_1_v_norm": 1.0122956037521362, + "layer_1_cos_v_neg_g": 0.007764468900859356, + "layer_2_v_norm": 1.0221821069717407, + "layer_2_cos_v_neg_g": 0.0072194794192910194, + "layer_3_v_norm": 1.037589192390442, + "layer_3_cos_v_neg_g": 0.00453191576525569, + "layer_4_v_norm": 1.0567048788070679, + "layer_4_cos_v_neg_g": 0.0025728584732860327, + "layer_5_v_norm": 1.0817142724990845, + "layer_5_cos_v_neg_g": 0.0023008098360151052, + "layer_6_v_norm": 1.083589792251587, + "layer_6_cos_v_neg_g": 0.0032883239910006523, + "layer_7_v_norm": 1.0831079483032227, + "layer_7_cos_v_neg_g": 0.0033974202815443277, + "layer_8_v_norm": 1.076220989227295, + "layer_8_cos_v_neg_g": 0.004506825003772974, + "layer_9_v_norm": 1.0750781297683716, + "layer_9_cos_v_neg_g": 0.005186702124774456, + "layer_10_v_norm": 1.04755699634552, + "layer_10_cos_v_neg_g": 0.007169533055275679, + "layer_11_v_norm": 1.059154748916626, + "layer_11_cos_v_neg_g": 0.008502565324306488, + "layer_12_v_norm": 1.056707501411438, + "layer_12_cos_v_neg_g": 0.00855555571615696, + "layer_1_sharpness": 0.0009981711627915502, + "layer_2_sharpness": 0.0003228918940294534, + "layer_3_sharpness": 0.000499713874887675, + "layer_4_sharpness": 0.00022218852245714515, + "layer_5_sharpness": 0.0001617021334823221, + "layer_6_sharpness": 0.0001768340589478612, + "layer_7_sharpness": 0.00027054609381593764, + "layer_8_sharpness": 0.0003665576805360615, + "layer_9_sharpness": 0.00046419116551987827, + "layer_10_sharpness": 0.0005891151959076524, + "layer_11_sharpness": 0.0003891513915732503, + "layer_12_sharpness": 0.00047460891073569655 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..f3285769fb9bbe2eaa5a5671bb7d6ed3c9e080ba --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.3927998542785645, + "total_l1_linf_norm": 38888.44140625, + "total_spectral_norm": 4.392801284790039, + "layer_1_update_fnorm": 0.9630382061004639, + "layer_1_max_l1_linf_norm": 1.1332148313522339, + "layer_1_max_spectral_norm": 0.1478518843650818, + "layer_2_update_fnorm": 0.9870977997779846, + "layer_2_max_l1_linf_norm": 1.0473265647888184, + "layer_2_max_spectral_norm": 0.1477578729391098, + "layer_3_update_fnorm": 0.9944890737533569, + "layer_3_max_l1_linf_norm": 1.0621569156646729, + "layer_3_max_spectral_norm": 0.1484670341014862, + "layer_4_update_fnorm": 1.022128701210022, + "layer_4_max_l1_linf_norm": 1.071426510810852, + "layer_4_max_spectral_norm": 0.10490340739488602, + "layer_5_update_fnorm": 1.0489064455032349, + "layer_5_max_l1_linf_norm": 1.107987403869629, + "layer_5_max_spectral_norm": 0.09078755229711533, + "layer_6_update_fnorm": 1.0541932582855225, + "layer_6_max_l1_linf_norm": 1.073923110961914, + "layer_6_max_spectral_norm": 0.09096968173980713, + "layer_7_update_fnorm": 1.0517659187316895, + "layer_7_max_l1_linf_norm": 1.0551738739013672, + "layer_7_max_spectral_norm": 0.07561629265546799, + "layer_8_update_fnorm": 1.046135663986206, + "layer_8_max_l1_linf_norm": 1.061600923538208, + "layer_8_max_spectral_norm": 0.08996224403381348, + "layer_9_update_fnorm": 1.0400971174240112, + "layer_9_max_l1_linf_norm": 1.1272798776626587, + "layer_9_max_spectral_norm": 0.12149031460285187, + "layer_10_update_fnorm": 1.0106964111328125, + "layer_10_max_l1_linf_norm": 1.1343309879302979, + "layer_10_max_spectral_norm": 0.15031550824642181, + "layer_11_update_fnorm": 1.0090124607086182, + "layer_11_max_l1_linf_norm": 1.3044142723083496, + "layer_11_max_spectral_norm": 0.1598748117685318, + "layer_12_update_fnorm": 1.0073400735855103, + "layer_12_max_l1_linf_norm": 1.2594469785690308, + "layer_12_max_spectral_norm": 0.16772955656051636, + "total_sharpness": 0.0008491296321153641, + "ip_v_neg_g": 0.009985070675611496, + "cos_v_neg_g": 0.0031613954342901707, + "v_norm": 4.3927998542785645, + "g_norm": 0.7190033197402954, + "hv_norm": 0.2568165063858032, + "cos_v_hv": 0.014524209313094616, + "hg_norm": 4.1579508781433105, + "cos_g_hg": 0.5413174629211426, + "v_parallel_norm": 0.0005383372190408409, + "v_perp_norm": 4.3927998542785645, + "layer_1_v_norm": 0.9630382061004639, + "layer_1_cos_v_neg_g": 0.009605814702808857, + "layer_2_v_norm": 0.9870977997779846, + "layer_2_cos_v_neg_g": 0.0035931626334786415, + "layer_3_v_norm": 0.9944890737533569, + "layer_3_cos_v_neg_g": 0.0035364243667572737, + "layer_4_v_norm": 1.022128701210022, + "layer_4_cos_v_neg_g": 0.003108015051111579, + "layer_5_v_norm": 1.0489064455032349, + "layer_5_cos_v_neg_g": 0.0034168646670877934, + "layer_6_v_norm": 1.054193377494812, + "layer_6_cos_v_neg_g": 0.0034889099188148975, + "layer_7_v_norm": 1.0517659187316895, + "layer_7_cos_v_neg_g": 0.0033238783944398165, + "layer_8_v_norm": 1.046135663986206, + "layer_8_cos_v_neg_g": 0.003528801491484046, + "layer_9_v_norm": 1.0400971174240112, + "layer_9_cos_v_neg_g": 0.003462006337940693, + "layer_10_v_norm": 1.0106964111328125, + "layer_10_cos_v_neg_g": 0.004867097828537226, + "layer_11_v_norm": 1.0090124607086182, + "layer_11_cos_v_neg_g": 0.006306036841124296, + "layer_12_v_norm": 1.0073400735855103, + "layer_12_cos_v_neg_g": 0.0057677337899804115, + "layer_1_sharpness": 0.0007636973168700933, + "layer_2_sharpness": 7.490185089409351e-05, + "layer_3_sharpness": 0.00019463052740320563, + "layer_4_sharpness": 0.0001500919897807762, + "layer_5_sharpness": 0.00017351323913317174, + "layer_6_sharpness": 0.0001625916629564017, + "layer_7_sharpness": 0.00020968640455976129, + "layer_8_sharpness": 0.00028863694751635194, + "layer_9_sharpness": 0.0002851125900633633, + "layer_10_sharpness": 0.00040492689004167914, + "layer_11_sharpness": 0.0003121467598248273, + "layer_12_sharpness": 0.00048389186849817634 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..3c88e517c5627ad839a2ae60a2217256293cecc5 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.485447883605957, + "total_l1_linf_norm": 39888.9609375, + "total_spectral_norm": 4.485446929931641, + "layer_1_update_fnorm": 0.9982377886772156, + "layer_1_max_l1_linf_norm": 1.2190637588500977, + "layer_1_max_spectral_norm": 0.14682000875473022, + "layer_2_update_fnorm": 1.0100799798965454, + "layer_2_max_l1_linf_norm": 1.0747745037078857, + "layer_2_max_spectral_norm": 0.1408315896987915, + "layer_3_update_fnorm": 1.0278352499008179, + "layer_3_max_l1_linf_norm": 1.0508328676223755, + "layer_3_max_spectral_norm": 0.15474824607372284, + "layer_4_update_fnorm": 1.050794243812561, + "layer_4_max_l1_linf_norm": 1.082509994506836, + "layer_4_max_spectral_norm": 0.10410834848880768, + "layer_5_update_fnorm": 1.0799285173416138, + "layer_5_max_l1_linf_norm": 1.1399967670440674, + "layer_5_max_spectral_norm": 0.09854678809642792, + "layer_6_update_fnorm": 1.0786609649658203, + "layer_6_max_l1_linf_norm": 1.0910512208938599, + "layer_6_max_spectral_norm": 0.09392222762107849, + "layer_7_update_fnorm": 1.0772751569747925, + "layer_7_max_l1_linf_norm": 1.077509880065918, + "layer_7_max_spectral_norm": 0.08137000352144241, + "layer_8_update_fnorm": 1.0720947980880737, + "layer_8_max_l1_linf_norm": 1.0881447792053223, + "layer_8_max_spectral_norm": 0.08826995640993118, + "layer_9_update_fnorm": 1.0640705823898315, + "layer_9_max_l1_linf_norm": 1.1956963539123535, + "layer_9_max_spectral_norm": 0.1255468726158142, + "layer_10_update_fnorm": 1.0361047983169556, + "layer_10_max_l1_linf_norm": 1.1690056324005127, + "layer_10_max_spectral_norm": 0.16280092298984528, + "layer_11_update_fnorm": 1.052357792854309, + "layer_11_max_l1_linf_norm": 1.2869484424591064, + "layer_11_max_spectral_norm": 0.16557741165161133, + "layer_12_update_fnorm": 1.0474438667297363, + "layer_12_max_l1_linf_norm": 1.289501428604126, + "layer_12_max_spectral_norm": 0.1743112951517105, + "total_sharpness": 0.0011834489414468408, + "ip_v_neg_g": 0.010699539445340633, + "cos_v_neg_g": 0.003124191192910075, + "v_norm": 4.485447883605957, + "g_norm": 0.7635221481323242, + "hv_norm": 0.5122349262237549, + "cos_v_hv": 0.010363015346229076, + "hg_norm": 7.82946252822876, + "cos_g_hg": 0.46626827120780945, + "v_parallel_norm": 0.0005542851868085563, + "v_perp_norm": 4.485447883605957, + "layer_1_v_norm": 0.9982377886772156, + "layer_1_cos_v_neg_g": 0.007238641381263733, + "layer_2_v_norm": 1.0100799798965454, + "layer_2_cos_v_neg_g": 0.006062676664441824, + "layer_3_v_norm": 1.0278352499008179, + "layer_3_cos_v_neg_g": 0.0042252447456121445, + "layer_4_v_norm": 1.050794243812561, + "layer_4_cos_v_neg_g": 0.003409488359466195, + "layer_5_v_norm": 1.0799285173416138, + "layer_5_cos_v_neg_g": 0.002646374749019742, + "layer_6_v_norm": 1.0786610841751099, + "layer_6_cos_v_neg_g": 0.0023719898890703917, + "layer_7_v_norm": 1.0772751569747925, + "layer_7_cos_v_neg_g": 0.0023897523060441017, + "layer_8_v_norm": 1.0720947980880737, + "layer_8_cos_v_neg_g": 0.0029712305404245853, + "layer_9_v_norm": 1.0640705823898315, + "layer_9_cos_v_neg_g": 0.0036155739799141884, + "layer_10_v_norm": 1.0361047983169556, + "layer_10_cos_v_neg_g": 0.005543599370867014, + "layer_11_v_norm": 1.0523576736450195, + "layer_11_cos_v_neg_g": 0.006047248374670744, + "layer_12_v_norm": 1.0474438667297363, + "layer_12_cos_v_neg_g": 0.007719431538134813, + "layer_1_sharpness": 0.0007111027953214943, + "layer_2_sharpness": 0.0004806460055988282, + "layer_3_sharpness": 0.000491149490699172, + "layer_4_sharpness": 0.00016962509835138917, + "layer_5_sharpness": 0.00013172088074497879, + "layer_6_sharpness": 0.00014738918980583549, + "layer_7_sharpness": 0.00024316941562574357, + "layer_8_sharpness": 0.000291804492007941, + "layer_9_sharpness": 0.00041932339081540704, + "layer_10_sharpness": 0.0006431269575841725, + "layer_11_sharpness": 0.0003803985600825399, + "layer_12_sharpness": 0.0006465642945840955 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..970a774cf5ccd8d7d46bc5f356837b1ad2ea7f4e --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.510036945343018, + "total_l1_linf_norm": 40156.73828125, + "total_spectral_norm": 4.510036468505859, + "layer_1_update_fnorm": 1.0371013879776, + "layer_1_max_l1_linf_norm": 1.2425410747528076, + "layer_1_max_spectral_norm": 0.15662533044815063, + "layer_2_update_fnorm": 1.0522352457046509, + "layer_2_max_l1_linf_norm": 1.1004464626312256, + "layer_2_max_spectral_norm": 0.1457330286502838, + "layer_3_update_fnorm": 1.0502904653549194, + "layer_3_max_l1_linf_norm": 1.081789255142212, + "layer_3_max_spectral_norm": 0.16524016857147217, + "layer_4_update_fnorm": 1.0623022317886353, + "layer_4_max_l1_linf_norm": 1.119400978088379, + "layer_4_max_spectral_norm": 0.1092299371957779, + "layer_5_update_fnorm": 1.0844844579696655, + "layer_5_max_l1_linf_norm": 1.1182572841644287, + "layer_5_max_spectral_norm": 0.09758011251688004, + "layer_6_update_fnorm": 1.0911763906478882, + "layer_6_max_l1_linf_norm": 1.0953710079193115, + "layer_6_max_spectral_norm": 0.10364308208227158, + "layer_7_update_fnorm": 1.081095814704895, + "layer_7_max_l1_linf_norm": 1.0718955993652344, + "layer_7_max_spectral_norm": 0.08376577496528625, + "layer_8_update_fnorm": 1.0744918584823608, + "layer_8_max_l1_linf_norm": 1.1004164218902588, + "layer_8_max_spectral_norm": 0.0976468175649643, + "layer_9_update_fnorm": 1.0671554803848267, + "layer_9_max_l1_linf_norm": 1.164644718170166, + "layer_9_max_spectral_norm": 0.14669178426265717, + "layer_10_update_fnorm": 1.038078784942627, + "layer_10_max_l1_linf_norm": 1.1343798637390137, + "layer_10_max_spectral_norm": 0.16182675957679749, + "layer_11_update_fnorm": 1.0435839891433716, + "layer_11_max_l1_linf_norm": 1.2101266384124756, + "layer_11_max_spectral_norm": 0.16984057426452637, + "layer_12_update_fnorm": 1.03748619556427, + "layer_12_max_l1_linf_norm": 1.3504756689071655, + "layer_12_max_spectral_norm": 0.1590833067893982, + "total_sharpness": 0.0014000083319842815, + "ip_v_neg_g": 0.013378603383898735, + "cos_v_neg_g": 0.0038394476287066936, + "v_norm": 4.510036945343018, + "g_norm": 0.7726128697395325, + "hv_norm": 0.4440746307373047, + "cos_v_hv": 0.014218531548976898, + "hg_norm": 5.117405891418457, + "cos_g_hg": 0.6087834239006042, + "v_parallel_norm": 0.0005118888802826405, + "v_perp_norm": 4.510036945343018, + "layer_1_v_norm": 1.0371013879776, + "layer_1_cos_v_neg_g": 0.01122243795543909, + "layer_2_v_norm": 1.0522352457046509, + "layer_2_cos_v_neg_g": 0.011786929331719875, + "layer_3_v_norm": 1.0502904653549194, + "layer_3_cos_v_neg_g": 0.007115335203707218, + "layer_4_v_norm": 1.0623022317886353, + "layer_4_cos_v_neg_g": 0.002850552089512348, + "layer_5_v_norm": 1.0844844579696655, + "layer_5_cos_v_neg_g": 0.0028147343546152115, + "layer_6_v_norm": 1.0911763906478882, + "layer_6_cos_v_neg_g": 0.0034884666092693806, + "layer_7_v_norm": 1.081095814704895, + "layer_7_cos_v_neg_g": 0.0021674036979675293, + "layer_8_v_norm": 1.0744918584823608, + "layer_8_cos_v_neg_g": 0.0032383757643401623, + "layer_9_v_norm": 1.0671554803848267, + "layer_9_cos_v_neg_g": 0.0035743603948503733, + "layer_10_v_norm": 1.038078784942627, + "layer_10_cos_v_neg_g": 0.005776457488536835, + "layer_11_v_norm": 1.0435841083526611, + "layer_11_cos_v_neg_g": 0.007675998844206333, + "layer_12_v_norm": 1.03748619556427, + "layer_12_cos_v_neg_g": 0.005846488755196333, + "layer_1_sharpness": 0.0008172267698682845, + "layer_2_sharpness": 0.00032277984428219497, + "layer_3_sharpness": 0.0006735771894454956, + "layer_4_sharpness": 0.0002206055069109425, + "layer_5_sharpness": 0.0002316822938155383, + "layer_6_sharpness": 0.00022794965479988605, + "layer_7_sharpness": 0.0002809397119563073, + "layer_8_sharpness": 0.00040729172178544104, + "layer_9_sharpness": 0.0005138108972460032, + "layer_10_sharpness": 0.0005835423362441361, + "layer_11_sharpness": 0.0004554213082883507, + "layer_12_sharpness": 0.00036955170799046755 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..773e54f327bd9052f114b48883bce0d86e0aeeb7 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019228 +step:1 train loss:10.861330 +step:2 train loss:10.587100 +step:3 train loss:10.326361 +step:4 train loss:10.129658 +step:5 train loss:9.953171 +step:6 train loss:9.829903 +step:7 train loss:9.710363 +step:8 train loss:9.682979 +step:9 train loss:9.619624 +step:10 train loss:9.592817 +step:11 train loss:9.559793 +step:12 train loss:9.493972 +step:13 train loss:9.427437 +step:14 train loss:9.376854 +step:15 train loss:9.313620 +step:16 train loss:9.259697 +step:17 train loss:9.188359 +step:18 train loss:9.170364 +step:19 train loss:9.090624 +step:20 train loss:9.036908 +step:21 train loss:9.007249 +step:22 train loss:8.870847 +step:23 train loss:8.855865 +step:24 train loss:8.758526 +step:25 train loss:8.730013 +step:26 train loss:8.651573 +step:27 train loss:8.559449 +step:28 train loss:8.537726 +step:29 train loss:8.475349 +step:30 train loss:8.417757 +step:31 train loss:8.307889 +step:32 train loss:8.251253 +step:33 train loss:8.183208 +step:34 train loss:8.172077 +step:35 train loss:8.062297 +step:36 train loss:8.015575 +step:37 train loss:7.919177 +step:38 train loss:7.906404 +step:39 train loss:7.820517 +step:40 train loss:7.779230 +step:41 train loss:7.693663 +step:42 train loss:7.689746 +step:43 train loss:7.590885 +step:44 train loss:7.533147 +step:45 train loss:7.531911 +step:46 train loss:7.474424 +step:47 train loss:7.476285 +step:48 train loss:7.377592 +step:49 train loss:7.352447 +step:50 train loss:7.275494 +step:51 train loss:7.267167 +step:52 train loss:7.262322 +step:53 train loss:7.228252 +step:54 train loss:7.203259 +step:55 train loss:7.137533 +step:56 train loss:7.100938 +step:57 train loss:7.119496 +step:58 train loss:7.036008 +step:59 train loss:7.058528 +step:60 train loss:7.030593 +step:61 train loss:6.993155 +step:62 train loss:6.974191 +step:63 train loss:7.021764 +step:64 train loss:6.917957 +step:65 train loss:6.937332 +step:66 train loss:6.949371 +step:67 train loss:6.961531 +step:68 train loss:6.921583 +step:69 train loss:6.886842 +step:70 train loss:6.848574 +step:71 train loss:6.819071 +step:72 train loss:6.859267 +step:73 train loss:6.786294 +step:74 train loss:6.816416 +step:75 train loss:6.758345 +step:76 train loss:6.838686 +step:77 train loss:6.771539 +step:78 train loss:6.526150 +step:79 train loss:6.711505 +step:80 train loss:6.673613 +step:81 train loss:6.758817 +step:82 train loss:6.711124 +step:83 train loss:6.674675 +step:84 train loss:6.660316 +step:85 train loss:6.619121 +step:86 train loss:6.619065 +step:87 train loss:6.594769 +step:88 train loss:6.585663 +step:89 train loss:6.549877 +step:90 train loss:6.593302 +step:91 train loss:6.598070 +step:92 train loss:6.603402 +step:93 train loss:6.554456 +step:94 train loss:6.516251 +step:95 train loss:6.469765 +step:96 train loss:6.569727 +step:97 train loss:6.505400 +step:98 train loss:6.499275 +step:99 train loss:6.467481 +step:100 train loss:6.490386 +step:101 train loss:6.418886 +step:102 train loss:6.436616 +step:103 train loss:6.436627 +step:104 train loss:6.458988 +step:105 train loss:6.524231 +step:106 train loss:6.469657 +step:107 train loss:6.416121 +step:108 train loss:6.440888 +step:109 train loss:6.475173 +step:110 train loss:6.406099 +step:111 train loss:6.420832 +step:112 train loss:6.416759 +step:113 train loss:6.372642 +step:114 train loss:6.436715 +step:115 train loss:6.385892 +step:116 train loss:6.372581 +step:117 train loss:6.312580 +step:118 train loss:6.375330 +step:119 train loss:6.342608 +step:120 train loss:6.353158 +step:121 train loss:6.278883 +step:122 train loss:6.376953 +step:123 train loss:6.304360 +step:124 train loss:6.280113 +step:125 train loss:6.269013 +step:126 train loss:6.376766 +step:127 train loss:6.277438 +step:128 train loss:6.335906 +step:129 train loss:6.310988 +step:130 train loss:6.340905 +step:131 train loss:6.290368 +step:132 train loss:6.229195 +step:133 train loss:6.279847 +step:134 train loss:6.267677 +step:135 train loss:6.176558 +step:136 train loss:6.236431 +step:137 train loss:6.249403 +step:138 train loss:6.220026 +step:139 train loss:6.274649 +step:140 train loss:6.206347 +step:141 train loss:6.290781 +step:142 train loss:6.242225 +step:143 train loss:6.244734 +step:144 train loss:6.223301 +step:145 train loss:6.148550 +step:146 train loss:6.164919 +step:147 train loss:6.214146 +step:148 train loss:6.224491 +step:149 train loss:6.182794 +step:150 train loss:6.181070 +step:151 train loss:6.104972 +step:152 train loss:6.155746 +step:153 train loss:6.139557 +step:154 train loss:6.203999 +step:155 train loss:6.200946 +step:156 train loss:6.218138 +step:157 train loss:6.134258 +step:158 train loss:6.115865 +step:159 train loss:6.144935 +step:160 train loss:6.127116 +step:161 train loss:6.122565 +step:162 train loss:6.094255 +step:163 train loss:6.114045 +step:164 train loss:6.117301 +step:165 train loss:6.135592 +step:166 train loss:6.086641 +step:167 train loss:6.085374 +step:168 train loss:6.065502 +step:169 train loss:6.027975 +step:170 train loss:5.990186 +step:171 train loss:6.121838 +step:172 train loss:6.047549 +step:173 train loss:6.100880 +step:174 train loss:6.097697 +step:175 train loss:6.069516 +step:176 train loss:6.024918 +step:177 train loss:6.065558 +step:178 train loss:6.064000 +step:179 train loss:6.019598 +step:180 train loss:5.994808 +step:181 train loss:6.041445 +step:182 train loss:5.976402 +step:183 train loss:6.055660 +step:184 train loss:6.021160 +step:185 train loss:5.963130 +step:186 train loss:6.088866 +step:187 train loss:6.032166 +step:188 train loss:5.869743 +step:189 train loss:6.026625 +step:190 train loss:6.034358 +step:191 train loss:5.999349 +step:192 train loss:5.905367 +step:193 train loss:6.062427 +step:194 train loss:6.077326 +step:195 train loss:6.064542 +step:196 train loss:6.031878 +step:197 train loss:6.029975 +step:198 train loss:5.979321 +step:199 train loss:6.047409 +step:200 train loss:6.087985 +step:201 train loss:6.016054 +step:202 train loss:6.014319 +step:203 train loss:5.984169 +step:204 train loss:5.997531 +step:205 train loss:5.870856 +step:206 train loss:5.986119 +step:207 train loss:5.972770 +step:208 train loss:5.908108 +step:209 train loss:5.891585 +step:210 train loss:5.903858 +step:211 train loss:5.957802 +step:212 train loss:5.921916 +step:213 train loss:5.935517 +step:214 train loss:5.919603 +step:215 train loss:5.933754 +step:216 train loss:5.882710 +step:217 train loss:5.900315 +step:218 train loss:5.876022 +step:219 train loss:5.881038 +step:220 train loss:5.917609 +step:221 train loss:5.896703 +step:222 train loss:5.935719 +step:223 train loss:5.969995 +step:224 train loss:5.941329 +step:225 train loss:5.868382 +step:226 train loss:5.887243 +step:227 train loss:5.934790 +step:228 train loss:5.898477 +step:229 train loss:5.954842 +step:230 train loss:5.832457 +step:231 train loss:5.886694 +step:232 train loss:5.873001 +step:233 train loss:5.852224 +step:234 train loss:5.852778 +step:235 train loss:5.934103 +step:236 train loss:5.881671 +step:237 train loss:5.917928 +step:238 train loss:5.899329 +step:239 train loss:5.815433 +step:240 train loss:5.877905 +step:241 train loss:5.919575 +step:242 train loss:5.901635 +step:243 train loss:5.814071 +step:244 train loss:5.831482 +step:245 train loss:5.810296 +step:246 train loss:5.814867 +step:247 train loss:5.811561 +step:248 train loss:5.768804 +step:249 train loss:5.817649 +step:250 validation loss:5.810755 +step:250 train loss:5.790125 +step:251 train loss:5.833077 +step:252 train loss:5.793602 +step:253 train loss:5.793323 +step:254 train loss:5.761998 +step:255 train loss:5.800498 +step:256 train loss:5.796034 +step:257 train loss:5.838646 +step:258 train loss:5.747231 +step:259 train loss:5.759666 +step:260 train loss:5.733988 +step:261 train loss:5.741736 +step:262 train loss:5.822738 +step:263 train loss:5.776392 +step:264 train loss:5.738123 +step:265 train loss:5.749291 +step:266 train loss:5.725163 +step:267 train loss:5.755210 +step:268 train loss:5.710494 +step:269 train loss:5.741302 +step:270 train loss:5.757668 +step:271 train loss:5.773403 +step:272 train loss:5.731441 +step:273 train loss:5.805814 +step:274 train loss:5.710169 +step:275 train loss:5.756882 +step:276 train loss:5.725377 +step:277 train loss:5.732564 +step:278 train loss:5.718197 +step:279 train loss:5.703788 +step:280 train loss:5.786093 +step:281 train loss:5.860424 +step:282 train loss:5.728027 +step:283 train loss:5.741398 +step:284 train loss:5.700935 +step:285 train loss:5.765494 +step:286 train loss:5.729885 +step:287 train loss:5.708064 +step:288 train loss:5.684672 +step:289 train loss:5.693542 +step:290 train loss:5.750977 +step:291 train loss:5.675463 +step:292 train loss:5.734602 +step:293 train loss:5.655217 +step:294 train loss:5.780127 +step:295 train loss:5.687173 +step:296 train loss:5.732541 +step:297 train loss:5.772328 +step:298 train loss:5.667428 +step:299 train loss:5.724454 +step:300 train loss:5.647436 +step:301 train loss:5.676558 +step:302 train loss:5.647529 +step:303 train loss:5.651811 +step:304 train loss:5.678961 +step:305 train loss:5.612137 +step:306 train loss:5.636944 +step:307 train loss:5.652990 +step:308 train loss:5.578105 +step:309 train loss:5.720108 +step:310 train loss:5.687404 +step:311 train loss:5.672039 +step:312 train loss:5.654953 +step:313 train loss:5.685105 +step:314 train loss:5.680318 +step:315 train loss:5.628385 +step:316 train loss:5.634178 +step:317 train loss:5.600155 +step:318 train loss:5.591046 +step:319 train loss:5.670577 +step:320 train loss:5.575229 +step:321 train loss:5.632418 +step:322 train loss:5.625679 +step:323 train loss:5.679698 +step:324 train loss:5.618536 +step:325 train loss:5.643080 +step:326 train loss:5.641556 +step:327 train loss:5.638394 +step:328 train loss:5.610439 +step:329 train loss:5.631624 +step:330 train loss:5.571867 +step:331 train loss:5.604932 +step:332 train loss:5.594631 +step:333 train loss:5.539988 +step:334 train loss:5.646944 +step:335 train loss:5.691231 +step:336 train loss:5.803287 +step:337 train loss:5.699787 +step:338 train loss:5.614584 +step:339 train loss:5.567646 +step:340 train loss:5.573284 +step:341 train loss:5.553538 +step:342 train loss:5.618171 +step:343 train loss:5.594095 +step:344 train loss:5.551575 +step:345 train loss:5.530881 +step:346 train loss:5.578556 +step:347 train loss:5.538233 +step:348 train loss:5.557038 +step:349 train loss:5.491550 +step:350 train loss:5.542922 +step:351 train loss:5.585795 +step:352 train loss:5.553412 +step:353 train loss:5.574202 +step:354 train loss:5.516861 +step:355 train loss:5.554150 +step:356 train loss:5.516304 +step:357 train loss:5.584364 +step:358 train loss:5.609398 +step:359 train loss:5.449830 +step:360 train loss:5.574351 +step:361 train loss:5.574326 +step:362 train loss:5.594142 +step:363 train loss:5.527474 +step:364 train loss:5.649735 +step:365 train loss:5.577234 +step:366 train loss:5.550532 +step:367 train loss:5.591434 +step:368 train loss:5.560874 +step:369 train loss:5.550860 +step:370 train loss:5.594034 +step:371 train loss:5.514977 +step:372 train loss:5.588101 +step:373 train loss:5.528204 +step:374 train loss:5.509901 +step:375 train loss:5.536131 +step:376 train loss:5.534967 +step:377 train loss:5.451831 +step:378 train loss:5.527595 +step:379 train loss:5.583170 +step:380 train loss:5.490435 +step:381 train loss:5.538197 +step:382 train loss:5.547368 +step:383 train loss:5.505332 +step:384 train loss:5.479965 +step:385 train loss:5.467783 +step:386 train loss:5.499455 +step:387 train loss:5.499063 +step:388 train loss:5.469858 +step:389 train loss:5.469575 +step:390 train loss:5.445452 +step:391 train loss:5.457922 +step:392 train loss:5.439508 +step:393 train loss:5.425576 +step:394 train loss:5.473428 +step:395 train loss:5.411146 +step:396 train loss:5.376257 +step:397 train loss:5.449185 +step:398 train loss:5.445551 +step:399 train loss:5.463078 +step:400 train loss:5.412097 +step:401 train loss:5.483312 +step:402 train loss:5.442691 +step:403 train loss:5.435839 +step:404 train loss:5.412139 +step:405 train loss:5.415473 +step:406 train loss:5.461629 +step:407 train loss:5.429432 +step:408 train loss:5.506083 +step:409 train loss:5.413658 +step:410 train loss:5.389379 +step:411 train loss:5.365496 +step:412 train loss:5.466796 +step:413 train loss:5.375190 +step:414 train loss:5.448098 +step:415 train loss:5.400525 +step:416 train loss:5.398045 +step:417 train loss:5.431848 +step:418 train loss:5.383291 +step:419 train loss:5.383182 +step:420 train loss:5.358929 +step:421 train loss:5.348909 +step:422 train loss:5.355558 +step:423 train loss:5.374565 +step:424 train loss:5.337897 +step:425 train loss:5.416158 +step:426 train loss:5.425659 +step:427 train loss:5.370039 +step:428 train loss:5.400058 +step:429 train loss:5.310357 +step:430 train loss:5.362731 +step:431 train loss:5.409413 +step:432 train loss:5.424058 +step:433 train loss:5.400771 +step:434 train loss:5.364962 +step:435 train loss:5.418321 +step:436 train loss:5.467330 +step:437 train loss:5.400142 +step:438 train loss:5.360075 +step:439 train loss:5.342813 +step:440 train loss:5.393437 +step:441 train loss:5.339600 +step:442 train loss:5.343834 +step:443 train loss:5.381919 +step:444 train loss:5.420605 +step:445 train loss:5.454478 +step:446 train loss:5.368983 +step:447 train loss:5.375202 +step:448 train loss:5.432354 +step:449 train loss:5.379649 +step:450 train loss:5.368140 +step:451 train loss:5.355287 +step:452 train loss:5.432992 +step:453 train loss:5.370494 +step:454 train loss:5.317888 +step:455 train loss:5.393771 +step:456 train loss:5.352083 +step:457 train loss:5.328762 +step:458 train loss:5.348899 +step:459 train loss:5.318800 +step:460 train loss:5.404638 +step:461 train loss:5.344795 +step:462 train loss:5.236034 +step:463 train loss:5.285214 +step:464 train loss:5.334065 +step:465 train loss:5.288128 +step:466 train loss:5.307633 +step:467 train loss:5.265182 +step:468 train loss:5.326711 +step:469 train loss:5.286676 +step:470 train loss:5.265024 +step:471 train loss:5.358934 +step:472 train loss:5.250060 +step:473 train loss:5.311206 +step:474 train loss:5.287158 +step:475 train loss:5.303382 +step:476 train loss:5.268580 +step:477 train loss:5.209614 +step:478 train loss:5.222921 +step:479 train loss:5.201045 +step:480 train loss:5.232271 +step:481 train loss:5.256711 +step:482 train loss:5.190509 +step:483 train loss:5.257506 +step:484 train loss:5.217645 +step:485 train loss:5.201994 +step:486 train loss:5.258691 +step:487 train loss:5.240458 +step:488 train loss:5.231688 +step:489 train loss:5.226741 +step:490 train loss:5.199256 +step:491 train loss:5.225206 +step:492 train loss:5.240093 +step:493 train loss:5.234263 +step:494 train loss:5.244333 +step:495 train loss:5.208176 +step:496 train loss:5.315846 +step:497 train loss:5.168325 +step:498 train loss:5.288449 +step:499 train loss:5.252139 +step:500 validation loss:5.230519 total_sharp:2.0502e-02 L1_sharp:7.7466e-02 L2_sharp:5.2369e-03 L3_sharp:8.2069e-03 L4_sharp:8.1756e-03 L5_sharp:4.3625e-03 L6_sharp:2.5519e-03 L7_sharp:3.6904e-03 L8_sharp:3.9521e-03 L9_sharp:1.8068e-03 L10_sharp:1.8936e-03 L11_sharp:2.4095e-03 L12_sharp:1.4148e-02 total_fnorm:2.7389e+00 total_l1_linf:2.3812e+04 total_spectral:2.7389e+00 L1_fnorm:5.0765e-01 L2_fnorm:4.9405e-01 L3_fnorm:4.9562e-01 L4_fnorm:5.3563e-01 L5_fnorm:5.4970e-01 L6_fnorm:5.9071e-01 L7_fnorm:6.0984e-01 L8_fnorm:6.3277e-01 L9_fnorm:6.3855e-01 L10_fnorm:6.3676e-01 L11_fnorm:6.2474e-01 L12_fnorm:5.8206e-01 L1_l1linf:8.8273e-01 L2_l1linf:6.1975e-01 L3_l1linf:6.3323e-01 L4_l1linf:7.8171e-01 L5_l1linf:6.9985e-01 L6_l1linf:7.0344e-01 L7_l1linf:7.2931e-01 L8_l1linf:6.7095e-01 L9_l1linf:6.9191e-01 L10_l1linf:7.0956e-01 L11_l1linf:7.0507e-01 L12_l1linf:7.9206e-01 L1_spectral:1.3260e-01 L2_spectral:1.0921e-01 L3_spectral:1.1172e-01 L4_spectral:1.2059e-01 L5_spectral:1.2171e-01 L6_spectral:1.3157e-01 L7_spectral:1.2159e-01 L8_spectral:1.1920e-01 L9_spectral:1.1709e-01 L10_spectral:1.2150e-01 L11_spectral:1.2959e-01 L12_spectral:1.9427e-01 ip_v_neg_g:1.3075e-01 cos_v_neg_g:5.1687e-02 v_norm:2.7389e+00 g_norm:9.2363e-01 hv_norm:8.2561e-01 cos_v_hv:6.8013e-02 hg_norm:4.1336e+00 cos_g_hg:6.5082e-01 v_par:7.3825e-03 v_perp:2.7389e+00 L1_cos_v_neg_g:1.1428e-01 L1_v_norm:5.0765e-01 L2_cos_v_neg_g:1.1332e-01 L2_v_norm:4.9405e-01 L3_cos_v_neg_g:1.3670e-01 L3_v_norm:4.9562e-01 L4_cos_v_neg_g:1.5859e-01 L4_v_norm:5.3563e-01 L5_cos_v_neg_g:1.2764e-01 L5_v_norm:5.4970e-01 L6_cos_v_neg_g:1.0413e-01 L6_v_norm:5.9071e-01 L7_cos_v_neg_g:1.1093e-01 L7_v_norm:6.0984e-01 L8_cos_v_neg_g:1.0851e-01 L8_v_norm:6.3277e-01 L9_cos_v_neg_g:7.0717e-02 L9_v_norm:6.3855e-01 L10_cos_v_neg_g:7.5835e-02 L10_v_norm:6.3676e-01 L11_cos_v_neg_g:8.5166e-02 L11_v_norm:6.2474e-01 L12_cos_v_neg_g:1.4761e-01 L12_v_norm:5.8206e-01 +step:500 train loss:5.250592 +step:501 train loss:5.222590 +step:502 train loss:5.266776 +step:503 train loss:5.181409 +step:504 train loss:5.275558 +step:505 train loss:5.202726 +step:506 train loss:5.196493 +step:507 train loss:5.213369 +step:508 train loss:5.239515 +step:509 train loss:5.233685 +step:510 train loss:5.158216 +step:511 train loss:5.147589 +step:512 train loss:5.148785 +step:513 train loss:5.169245 +step:514 train loss:5.233867 +step:515 train loss:5.172553 +step:516 train loss:5.236125 +step:517 train loss:5.149700 +step:518 train loss:5.156024 +step:519 train loss:5.231196 +step:520 train loss:5.190290 +step:521 train loss:5.166346 +step:522 train loss:5.182561 +step:523 train loss:5.195178 +step:524 train loss:5.146509 +step:525 train loss:5.138925 +step:526 train loss:5.154562 +step:527 train loss:5.127354 +step:528 train loss:5.125896 +step:529 train loss:5.145382 +step:530 train loss:5.093542 +step:531 train loss:5.131530 +step:532 train loss:5.089110 +step:533 train loss:5.079103 +step:534 train loss:5.143740 +step:535 train loss:5.135989 +step:536 train loss:5.202721 +step:537 train loss:5.081851 +step:538 train loss:5.053304 +step:539 train loss:5.132488 +step:540 train loss:5.156939 +step:541 train loss:5.076462 +step:542 train loss:5.104811 +step:543 train loss:5.112069 +step:544 train loss:5.098517 +step:545 train loss:5.088251 +step:546 train loss:5.049500 +step:547 train loss:5.066050 +step:548 train loss:5.013576 +step:549 train loss:5.087170 +step:550 train loss:5.037954 +step:551 train loss:5.055431 +step:552 train loss:5.162861 +step:553 train loss:5.105296 +step:554 train loss:5.039038 +step:555 train loss:5.105832 +step:556 train loss:5.045416 +step:557 train loss:5.035886 +step:558 train loss:4.988213 +step:559 train loss:5.050320 +step:560 train loss:5.106804 +step:561 train loss:4.959889 +step:562 train loss:4.946361 +step:563 train loss:5.036283 +step:564 train loss:5.021594 +step:565 train loss:5.014057 +step:566 train loss:5.036786 +step:567 train loss:5.018776 +step:568 train loss:5.068439 +step:569 train loss:5.062913 +step:570 train loss:4.972636 +step:571 train loss:5.018340 +step:572 train loss:5.009947 +step:573 train loss:5.024123 +step:574 train loss:5.066474 +step:575 train loss:5.033575 +step:576 train loss:5.037955 +step:577 train loss:5.070732 +step:578 train loss:5.048657 +step:579 train loss:5.085503 +step:580 train loss:5.008010 +step:581 train loss:5.056994 +step:582 train loss:5.011744 +step:583 train loss:5.052783 +step:584 train loss:5.026164 +step:585 train loss:4.993561 +step:586 train loss:5.013734 +step:587 train loss:5.070577 +step:588 train loss:4.968279 +step:589 train loss:5.023757 +step:590 train loss:5.035383 +step:591 train loss:4.950120 +step:592 train loss:4.945540 +step:593 train loss:4.994516 +step:594 train loss:4.987573 +step:595 train loss:5.111268 +step:596 train loss:5.071175 +step:597 train loss:5.085841 +step:598 train loss:5.042379 +step:599 train loss:5.040344 +step:600 train loss:5.006787 +step:601 train loss:4.990574 +step:602 train loss:4.988104 +step:603 train loss:5.051205 +step:604 train loss:5.047314 +step:605 train loss:5.067859 +step:606 train loss:5.021136 +step:607 train loss:5.001752 +step:608 train loss:4.994034 +step:609 train loss:4.955187 +step:610 train loss:4.951557 +step:611 train loss:4.962448 +step:612 train loss:4.990892 +step:613 train loss:4.902808 +step:614 train loss:4.951574 +step:615 train loss:5.003495 +step:616 train loss:4.921867 +step:617 train loss:4.944760 +step:618 train loss:4.898752 +step:619 train loss:4.927574 +step:620 train loss:4.958549 +step:621 train loss:4.865811 +step:622 train loss:4.948164 +step:623 train loss:4.933179 +step:624 train loss:4.909743 +step:625 train loss:4.911787 +step:626 train loss:4.904246 +step:627 train loss:4.898985 +step:628 train loss:4.891431 +step:629 train loss:4.834744 +step:630 train loss:4.878701 +step:631 train loss:4.863291 +step:632 train loss:4.866696 +step:633 train loss:4.895019 +step:634 train loss:4.868087 +step:635 train loss:4.831556 +step:636 train loss:4.928608 +step:637 train loss:4.819298 +step:638 train loss:4.763968 +step:639 train loss:4.901714 +step:640 train loss:4.846785 +step:641 train loss:4.867277 +step:642 train loss:4.899291 +step:643 train loss:4.793333 +step:644 train loss:4.877830 +step:645 train loss:4.831160 +step:646 train loss:4.825480 +step:647 train loss:4.841915 +step:648 train loss:4.926069 +step:649 train loss:4.834291 +step:650 train loss:4.885047 +step:651 train loss:4.790918 +step:652 train loss:4.805493 +step:653 train loss:4.790421 +step:654 train loss:4.799523 +step:655 train loss:4.832942 +step:656 train loss:4.772868 +step:657 train loss:4.835265 +step:658 train loss:4.795734 +step:659 train loss:4.886477 +step:660 train loss:4.862041 +step:661 train loss:4.906751 +step:662 train loss:4.911050 +step:663 train loss:4.927748 +step:664 train loss:4.818526 +step:665 train loss:4.837053 +step:666 train loss:4.842747 +step:667 train loss:4.946206 +step:668 train loss:4.951428 +step:669 train loss:4.981647 +step:670 train loss:5.039487 +step:671 train loss:4.999128 +step:672 train loss:4.967495 +step:673 train loss:5.063915 +step:674 train loss:5.084729 +step:675 train loss:4.978192 +step:676 train loss:5.056114 +step:677 train loss:5.009242 +step:678 train loss:5.054073 +step:679 train loss:5.072432 +step:680 train loss:5.027941 +step:681 train loss:5.011661 +step:682 train loss:4.912652 +step:683 train loss:4.961611 +step:684 train loss:4.979241 +step:685 train loss:4.900068 +step:686 train loss:5.002920 +step:687 train loss:4.927929 +step:688 train loss:4.852573 +step:689 train loss:4.897622 +step:690 train loss:4.874316 +step:691 train loss:4.885255 +step:692 train loss:4.896122 +step:693 train loss:4.901367 +step:694 train loss:4.868074 +step:695 train loss:4.812234 +step:696 train loss:4.749589 +step:697 train loss:4.876592 +step:698 train loss:4.784833 +step:699 train loss:4.784266 +step:700 train loss:4.842758 +step:701 train loss:4.746770 +step:702 train loss:4.807847 +step:703 train loss:4.748448 +step:704 train loss:4.698036 +step:705 train loss:4.761792 +step:706 train loss:4.629310 +step:707 train loss:4.697603 +step:708 train loss:4.800153 +step:709 train loss:4.761391 +step:710 train loss:4.729675 +step:711 train loss:4.766219 +step:712 train loss:4.724990 +step:713 train loss:4.683022 +step:714 train loss:4.769864 +step:715 train loss:4.706134 +step:716 train loss:4.856500 +step:717 train loss:4.755497 +step:718 train loss:4.825045 +step:719 train loss:4.792103 +step:720 train loss:4.745738 +step:721 train loss:4.793672 +step:722 train loss:4.775611 +step:723 train loss:4.825406 +step:724 train loss:4.782945 +step:725 train loss:4.736733 +step:726 train loss:4.723875 +step:727 train loss:4.751718 +step:728 train loss:4.728346 +step:729 train loss:4.689254 +step:730 train loss:4.769284 +step:731 train loss:4.795246 +step:732 train loss:4.745673 +step:733 train loss:4.715448 +step:734 train loss:4.711861 +step:735 train loss:4.793588 +step:736 train loss:4.710531 +step:737 train loss:4.699498 +step:738 train loss:4.723826 +step:739 train loss:4.659808 +step:740 train loss:4.680777 +step:741 train loss:4.754626 +step:742 train loss:4.645171 +step:743 train loss:4.631730 +step:744 train loss:4.670207 +step:745 train loss:4.585304 +step:746 train loss:4.622705 +step:747 train loss:4.641953 +step:748 train loss:4.642287 +step:749 train loss:4.687901 +step:750 validation loss:4.638661 +step:750 train loss:4.624871 +step:751 train loss:4.648630 +step:752 train loss:4.590408 +step:753 train loss:4.640465 +step:754 train loss:4.629329 +step:755 train loss:4.703440 +step:756 train loss:4.694551 +step:757 train loss:4.756802 +step:758 train loss:4.646589 +step:759 train loss:4.656994 +step:760 train loss:4.624955 +step:761 train loss:4.664695 +step:762 train loss:4.616491 +step:763 train loss:4.605821 +step:764 train loss:4.586308 +step:765 train loss:4.579275 +step:766 train loss:4.651067 +step:767 train loss:4.767448 +step:768 train loss:4.593493 +step:769 train loss:4.620635 +step:770 train loss:4.661006 +step:771 train loss:4.771613 +step:772 train loss:4.663501 +step:773 train loss:4.613056 +step:774 train loss:4.689849 +step:775 train loss:4.727802 +step:776 train loss:4.763929 +step:777 train loss:4.713812 +step:778 train loss:4.714727 +step:779 train loss:4.751099 +step:780 train loss:4.804365 +step:781 train loss:4.802362 +step:782 train loss:4.903420 +step:783 train loss:4.861446 +step:784 train loss:4.803802 +step:785 train loss:4.801210 +step:786 train loss:4.950607 +step:787 train loss:4.765506 +step:788 train loss:4.810287 +step:789 train loss:4.799745 +step:790 train loss:4.740905 +step:791 train loss:4.827881 +step:792 train loss:4.819288 +step:793 train loss:4.770048 +step:794 train loss:4.726625 +step:795 train loss:4.674864 +step:796 train loss:4.936438 +step:797 train loss:4.684178 +step:798 train loss:4.662197 +step:799 train loss:4.663381 +step:800 train loss:4.725871 +step:801 train loss:4.644974 +step:802 train loss:4.770296 +step:803 train loss:4.645537 +step:804 train loss:4.590328 +step:805 train loss:4.640430 +step:806 train loss:4.553115 +step:807 train loss:4.599732 +step:808 train loss:4.589490 +step:809 train loss:4.549895 +step:810 train loss:4.526771 +step:811 train loss:4.613055 +step:812 train loss:4.565486 +step:813 train loss:4.576552 +step:814 train loss:4.640642 +step:815 train loss:4.608007 +step:816 train loss:4.524868 +step:817 train loss:4.561476 +step:818 train loss:4.533061 +step:819 train loss:4.522392 +step:820 train loss:4.517893 +step:821 train loss:4.469458 +step:822 train loss:4.463058 +step:823 train loss:4.541051 +step:824 train loss:4.448009 +step:825 train loss:4.425377 +step:826 train loss:4.495355 +step:827 train loss:4.399318 +step:828 train loss:4.474580 +step:829 train loss:4.481857 +step:830 train loss:4.491875 +step:831 train loss:4.532399 +step:832 train loss:4.584322 +step:833 train loss:4.531021 +step:834 train loss:4.517519 +step:835 train loss:4.494492 +step:836 train loss:4.479465 +step:837 train loss:4.470005 +step:838 train loss:4.470666 +step:839 train loss:4.480838 +step:840 train loss:4.533361 +step:841 train loss:4.518507 +step:842 train loss:4.500607 +step:843 train loss:4.500385 +step:844 train loss:4.458448 +step:845 train loss:4.433938 +step:846 train loss:4.531973 +step:847 train loss:4.501424 +step:848 train loss:4.456655 +step:849 train loss:4.496637 +step:850 train loss:4.497059 +step:851 train loss:4.453824 +step:852 train loss:4.530969 +step:853 train loss:4.421032 +step:854 train loss:4.457817 +step:855 train loss:4.458828 +step:856 train loss:4.423376 +step:857 train loss:4.464141 +step:858 train loss:4.498938 +step:859 train loss:4.412148 +step:860 train loss:4.437059 +step:861 train loss:4.475611 +step:862 train loss:4.418092 +step:863 train loss:4.433698 +step:864 train loss:4.426881 +step:865 train loss:4.440550 +step:866 train loss:4.455935 +step:867 train loss:4.564518 +step:868 train loss:4.424644 +step:869 train loss:4.448249 +step:870 train loss:4.386130 +step:871 train loss:4.370397 +step:872 train loss:4.442861 +step:873 train loss:4.415026 +step:874 train loss:4.427337 +step:875 train loss:4.339863 +step:876 train loss:4.430387 +step:877 train loss:4.367102 +step:878 train loss:4.470190 +step:879 train loss:4.370391 +step:880 train loss:4.490637 +step:881 train loss:4.415884 +step:882 train loss:4.378657 +step:883 train loss:4.420841 +step:884 train loss:4.444286 +step:885 train loss:4.384192 +step:886 train loss:4.390682 +step:887 train loss:4.415733 +step:888 train loss:4.513607 +step:889 train loss:4.445450 +step:890 train loss:4.389442 +step:891 train loss:4.343553 +step:892 train loss:4.331251 +step:893 train loss:4.403903 +step:894 train loss:4.379870 +step:895 train loss:4.362323 +step:896 train loss:4.443579 +step:897 train loss:4.389198 +step:898 train loss:4.437652 +step:899 train loss:4.446354 +step:900 train loss:4.475081 +step:901 train loss:4.396070 +step:902 train loss:4.429156 +step:903 train loss:4.529366 +step:904 train loss:4.523465 +step:905 train loss:4.408916 +step:906 train loss:4.432376 +step:907 train loss:4.477365 +step:908 train loss:4.464559 +step:909 train loss:4.402447 +step:910 train loss:4.439576 +step:911 train loss:4.553710 +step:912 train loss:4.353760 +step:913 train loss:4.412823 +step:914 train loss:4.366932 +step:915 train loss:4.390799 +step:916 train loss:4.449664 +step:917 train loss:4.400250 +step:918 train loss:4.476900 +step:919 train loss:4.544336 +step:920 train loss:4.293740 +step:921 train loss:4.414799 +step:922 train loss:4.382856 +step:923 train loss:4.298038 +step:924 train loss:4.346598 +step:925 train loss:4.304332 +step:926 train loss:4.399347 +step:927 train loss:4.302711 +step:928 train loss:4.381364 +step:929 train loss:4.352571 +step:930 train loss:4.349249 +step:931 train loss:4.384118 +step:932 train loss:4.326326 +step:933 train loss:4.363492 +step:934 train loss:4.399364 +step:935 train loss:4.388223 +step:936 train loss:4.370494 +step:937 train loss:4.376101 +step:938 train loss:4.363299 +step:939 train loss:4.269707 +step:940 train loss:4.373449 +step:941 train loss:4.318324 +step:942 train loss:4.299682 +step:943 train loss:4.401563 +step:944 train loss:4.354052 +step:945 train loss:4.355032 +step:946 train loss:4.376178 +step:947 train loss:4.521973 +step:948 train loss:4.324731 +step:949 train loss:4.372287 +step:950 train loss:4.298701 +step:951 train loss:4.333260 +step:952 train loss:4.390503 +step:953 train loss:4.325754 +step:954 train loss:4.359919 +step:955 train loss:4.295618 +step:956 train loss:4.313765 +step:957 train loss:4.319384 +step:958 train loss:4.395310 +step:959 train loss:4.325091 +step:960 train loss:4.418928 +step:961 train loss:4.366197 +step:962 train loss:4.318832 +step:963 train loss:4.299544 +step:964 train loss:4.333950 +step:965 train loss:4.258055 +step:966 train loss:4.270225 +step:967 train loss:4.327374 +step:968 train loss:4.326823 +step:969 train loss:4.283700 +step:970 train loss:4.336998 +step:971 train loss:4.329465 +step:972 train loss:4.255241 +step:973 train loss:4.368496 +step:974 train loss:4.300104 +step:975 train loss:4.377356 +step:976 train loss:4.340952 +step:977 train loss:4.345865 +step:978 train loss:4.347281 +step:979 train loss:4.333683 +step:980 train loss:4.325422 +step:981 train loss:4.297543 +step:982 train loss:4.310395 +step:983 train loss:4.312705 +step:984 train loss:4.339013 +step:985 train loss:4.309285 +step:986 train loss:4.325251 +step:987 train loss:4.356156 +step:988 train loss:4.332683 +step:989 train loss:4.306216 +step:990 train loss:4.296553 +step:991 train loss:4.221723 +step:992 train loss:4.289304 +step:993 train loss:4.314501 +step:994 train loss:4.247907 +step:995 train loss:4.267259 +step:996 train loss:4.299682 +step:997 train loss:4.252841 +step:998 train loss:4.251196 +step:999 train loss:4.296463 +step:1000 validation loss:4.228085 total_sharp:5.6313e-03 L1_sharp:1.2514e-02 L2_sharp:5.2453e-04 L3_sharp:1.0247e-03 L4_sharp:1.0213e-03 L5_sharp:7.5279e-04 L6_sharp:6.9214e-04 L7_sharp:9.6725e-04 L8_sharp:1.4284e-03 L9_sharp:1.3078e-03 L10_sharp:1.2432e-03 L11_sharp:1.4872e-03 L12_sharp:1.4911e-03 total_fnorm:3.8035e+00 total_l1_linf:3.2908e+04 total_spectral:3.8035e+00 L1_fnorm:6.6907e-01 L2_fnorm:7.3492e-01 L3_fnorm:7.5300e-01 L4_fnorm:8.0983e-01 L5_fnorm:8.3156e-01 L6_fnorm:8.4607e-01 L7_fnorm:8.3647e-01 L8_fnorm:8.4640e-01 L9_fnorm:8.5753e-01 L10_fnorm:8.5169e-01 L11_fnorm:8.5216e-01 L12_fnorm:8.2409e-01 L1_l1linf:8.4809e-01 L2_l1linf:8.4505e-01 L3_l1linf:8.7243e-01 L4_l1linf:9.0625e-01 L5_l1linf:9.0041e-01 L6_l1linf:9.1409e-01 L7_l1linf:9.1506e-01 L8_l1linf:9.2261e-01 L9_l1linf:9.5804e-01 L10_l1linf:9.6623e-01 L11_l1linf:9.5172e-01 L12_l1linf:9.5048e-01 L1_spectral:1.3357e-01 L2_spectral:1.1632e-01 L3_spectral:1.2500e-01 L4_spectral:1.3000e-01 L5_spectral:1.2603e-01 L6_spectral:1.1974e-01 L7_spectral:1.1048e-01 L8_spectral:1.0821e-01 L9_spectral:1.0743e-01 L10_spectral:1.1345e-01 L11_spectral:1.2142e-01 L12_spectral:1.3379e-01 ip_v_neg_g:4.6311e-02 cos_v_neg_g:2.0725e-02 v_norm:3.8035e+00 g_norm:5.8751e-01 hv_norm:3.6520e-01 cos_v_hv:5.8649e-02 hg_norm:9.0951e-01 cos_g_hg:4.7832e-01 v_par:3.9220e-03 v_perp:3.8035e+00 L1_cos_v_neg_g:4.7234e-02 L1_v_norm:6.6907e-01 L2_cos_v_neg_g:2.9570e-02 L2_v_norm:7.3492e-01 L3_cos_v_neg_g:3.2615e-02 L3_v_norm:7.5300e-01 L4_cos_v_neg_g:3.7982e-02 L4_v_norm:8.0983e-01 L5_cos_v_neg_g:3.4480e-02 L5_v_norm:8.3156e-01 L6_cos_v_neg_g:3.1563e-02 L6_v_norm:8.4607e-01 L7_cos_v_neg_g:3.2957e-02 L7_v_norm:8.3647e-01 L8_cos_v_neg_g:3.4800e-02 L8_v_norm:8.4640e-01 L9_cos_v_neg_g:3.2830e-02 L9_v_norm:8.5753e-01 L10_cos_v_neg_g:3.6120e-02 L10_v_norm:8.5169e-01 L11_cos_v_neg_g:3.5578e-02 L11_v_norm:8.5216e-01 L12_cos_v_neg_g:4.1530e-02 L12_v_norm:8.2409e-01 +step:1000 train loss:4.303055 +step:1001 train loss:4.300827 +step:1002 train loss:4.292967 +step:1003 train loss:4.263546 +step:1004 train loss:4.235258 +step:1005 train loss:4.247956 +step:1006 train loss:4.333200 +step:1007 train loss:4.275429 +step:1008 train loss:4.267232 +step:1009 train loss:4.330419 +step:1010 train loss:4.313993 +step:1011 train loss:4.337312 +step:1012 train loss:4.283372 +step:1013 train loss:4.260380 +step:1014 train loss:4.269970 +step:1015 train loss:4.304065 +step:1016 train loss:4.316472 +step:1017 train loss:4.253136 +step:1018 train loss:4.311543 +step:1019 train loss:4.265319 +step:1020 train loss:4.266319 +step:1021 train loss:4.390732 +step:1022 train loss:4.295921 +step:1023 train loss:4.301380 +step:1024 train loss:4.409507 +step:1025 train loss:4.390591 +step:1026 train loss:4.355535 +step:1027 train loss:4.393800 +step:1028 train loss:4.390946 +step:1029 train loss:4.317231 +step:1030 train loss:4.385470 +step:1031 train loss:4.376593 +step:1032 train loss:4.325315 +step:1033 train loss:4.277170 +step:1034 train loss:4.337929 +step:1035 train loss:4.334862 +step:1036 train loss:4.244547 +step:1037 train loss:4.308522 +step:1038 train loss:4.330695 +step:1039 train loss:4.466495 +step:1040 train loss:4.290063 +step:1041 train loss:4.271075 +step:1042 train loss:4.283833 +step:1043 train loss:4.284430 +step:1044 train loss:4.267204 +step:1045 train loss:4.275303 +step:1046 train loss:4.213363 +step:1047 train loss:4.244666 +step:1048 train loss:4.235295 +step:1049 train loss:4.290689 +step:1050 train loss:4.251291 +step:1051 train loss:4.220470 +step:1052 train loss:4.326098 +step:1053 train loss:4.220893 +step:1054 train loss:4.211638 +step:1055 train loss:4.273646 +step:1056 train loss:4.221276 +step:1057 train loss:4.121346 +step:1058 train loss:4.221781 +step:1059 train loss:4.209231 +step:1060 train loss:4.204422 +step:1061 train loss:4.255407 +step:1062 train loss:4.211286 +step:1063 train loss:4.217683 +step:1064 train loss:4.209276 +step:1065 train loss:4.220805 +step:1066 train loss:4.204988 +step:1067 train loss:4.238743 +step:1068 train loss:4.195113 +step:1069 train loss:4.216879 +step:1070 train loss:4.227516 +step:1071 train loss:4.246626 +step:1072 train loss:4.265778 +step:1073 train loss:4.178378 +step:1074 train loss:4.192444 +step:1075 train loss:4.192610 +step:1076 train loss:4.267300 +step:1077 train loss:4.191450 +step:1078 train loss:4.245431 +step:1079 train loss:4.281298 +step:1080 train loss:4.156406 +step:1081 train loss:4.228666 +step:1082 train loss:4.220148 +step:1083 train loss:4.181803 +step:1084 train loss:4.162189 +step:1085 train loss:4.221616 +step:1086 train loss:4.207345 +step:1087 train loss:4.196108 +step:1088 train loss:4.197986 +step:1089 train loss:4.213158 +step:1090 train loss:4.169047 +step:1091 train loss:4.169036 +step:1092 train loss:4.274781 +step:1093 train loss:4.156860 +step:1094 train loss:4.217372 +step:1095 train loss:4.257688 +step:1096 train loss:4.190861 +step:1097 train loss:4.198517 +step:1098 train loss:4.167333 +step:1099 train loss:4.213205 +step:1100 train loss:4.256576 +step:1101 train loss:4.246642 +step:1102 train loss:4.259307 +step:1103 train loss:4.177164 +step:1104 train loss:4.210115 +step:1105 train loss:4.269630 +step:1106 train loss:4.202386 +step:1107 train loss:4.325224 +step:1108 train loss:4.265812 +step:1109 train loss:4.231896 +step:1110 train loss:4.176094 +step:1111 train loss:4.228548 +step:1112 train loss:4.141421 +step:1113 train loss:4.125980 +step:1114 train loss:4.112062 +step:1115 train loss:4.152977 +step:1116 train loss:4.213021 +step:1117 train loss:4.239270 +step:1118 train loss:4.261183 +step:1119 train loss:4.197955 +step:1120 train loss:4.220012 +step:1121 train loss:4.223688 +step:1122 train loss:4.205862 +step:1123 train loss:4.310431 +step:1124 train loss:4.203959 +step:1125 train loss:4.243602 +step:1126 train loss:4.193763 +step:1127 train loss:4.208293 +step:1128 train loss:4.213876 +step:1129 train loss:4.281497 +step:1130 train loss:4.182494 +step:1131 train loss:4.280774 +step:1132 train loss:4.214976 +step:1133 train loss:4.228445 +step:1134 train loss:4.194661 +step:1135 train loss:4.236519 +step:1136 train loss:4.249445 +step:1137 train loss:4.176914 +step:1138 train loss:4.239948 +step:1139 train loss:4.189195 +step:1140 train loss:4.271624 +step:1141 train loss:4.218641 +step:1142 train loss:4.152087 +step:1143 train loss:4.223586 +step:1144 train loss:4.248116 +step:1145 train loss:4.195629 +step:1146 train loss:4.144883 +step:1147 train loss:4.157146 +step:1148 train loss:4.185913 +step:1149 train loss:4.235751 +step:1150 train loss:4.241821 +step:1151 train loss:4.252017 +step:1152 train loss:4.153516 +step:1153 train loss:4.156836 +step:1154 train loss:4.131453 +step:1155 train loss:4.236217 +step:1156 train loss:4.133147 +step:1157 train loss:4.164007 +step:1158 train loss:4.217914 +step:1159 train loss:4.217080 +step:1160 train loss:4.138975 +step:1161 train loss:4.225263 +step:1162 train loss:4.168691 +step:1163 train loss:4.151427 +step:1164 train loss:4.059369 +step:1165 train loss:4.190725 +step:1166 train loss:4.119358 +step:1167 train loss:4.123801 +step:1168 train loss:4.180085 +step:1169 train loss:4.138822 +step:1170 train loss:4.142077 +step:1171 train loss:4.166504 +step:1172 train loss:4.133616 +step:1173 train loss:4.172596 +step:1174 train loss:4.104512 +step:1175 train loss:4.138593 +step:1176 train loss:4.253240 +step:1177 train loss:4.102728 +step:1178 train loss:4.166950 +step:1179 train loss:4.131858 +step:1180 train loss:4.160571 +step:1181 train loss:4.146217 +step:1182 train loss:4.208947 +step:1183 train loss:4.190112 +step:1184 train loss:4.127316 +step:1185 train loss:4.151789 +step:1186 train loss:4.142769 +step:1187 train loss:4.109839 +step:1188 train loss:4.142951 +step:1189 train loss:4.082008 +step:1190 train loss:4.145555 +step:1191 train loss:4.204688 +step:1192 train loss:4.154451 +step:1193 train loss:4.156472 +step:1194 train loss:4.264580 +step:1195 train loss:4.242311 +step:1196 train loss:4.129603 +step:1197 train loss:4.149005 +step:1198 train loss:4.134275 +step:1199 train loss:4.129267 +step:1200 train loss:4.185993 +step:1201 train loss:4.156833 +step:1202 train loss:4.100651 +step:1203 train loss:4.090085 +step:1204 train loss:4.125800 +step:1205 train loss:4.139100 +step:1206 train loss:4.085705 +step:1207 train loss:4.188605 +step:1208 train loss:4.154917 +step:1209 train loss:4.072700 +step:1210 train loss:4.167105 +step:1211 train loss:4.117985 +step:1212 train loss:4.147224 +step:1213 train loss:4.078473 +step:1214 train loss:4.156664 +step:1215 train loss:4.121728 +step:1216 train loss:4.145341 +step:1217 train loss:4.082124 +step:1218 train loss:4.147369 +step:1219 train loss:4.094682 +step:1220 train loss:4.137196 +step:1221 train loss:4.161003 +step:1222 train loss:4.221521 +step:1223 train loss:4.205450 +step:1224 train loss:4.169680 +step:1225 train loss:4.216955 +step:1226 train loss:4.165437 +step:1227 train loss:4.167517 +step:1228 train loss:4.178829 +step:1229 train loss:4.150732 +step:1230 train loss:4.139310 +step:1231 train loss:4.196273 +step:1232 train loss:4.146266 +step:1233 train loss:4.128576 +step:1234 train loss:4.210926 +step:1235 train loss:4.180667 +step:1236 train loss:4.082146 +step:1237 train loss:4.183983 +step:1238 train loss:4.127344 +step:1239 train loss:4.163779 +step:1240 train loss:4.073267 +step:1241 train loss:4.105514 +step:1242 train loss:4.136306 +step:1243 train loss:4.079381 +step:1244 train loss:4.203960 +step:1245 train loss:4.214002 +step:1246 train loss:4.143027 +step:1247 train loss:4.126929 +step:1248 train loss:4.145381 +step:1249 train loss:4.078106 +step:1250 validation loss:4.072997 +step:1250 train loss:4.087970 +step:1251 train loss:4.160653 +step:1252 train loss:4.108568 +step:1253 train loss:4.065307 +step:1254 train loss:4.095092 +step:1255 train loss:4.085644 +step:1256 train loss:4.131183 +step:1257 train loss:4.108525 +step:1258 train loss:4.161311 +step:1259 train loss:4.144964 +step:1260 train loss:4.045548 +step:1261 train loss:4.282382 +step:1262 train loss:4.132309 +step:1263 train loss:4.088471 +step:1264 train loss:4.097289 +step:1265 train loss:4.147419 +step:1266 train loss:4.092256 +step:1267 train loss:4.104258 +step:1268 train loss:4.115944 +step:1269 train loss:4.099133 +step:1270 train loss:4.028186 +step:1271 train loss:4.034108 +step:1272 train loss:4.064774 +step:1273 train loss:4.119473 +step:1274 train loss:4.085783 +step:1275 train loss:4.110957 +step:1276 train loss:4.112354 +step:1277 train loss:4.119085 +step:1278 train loss:4.056901 +step:1279 train loss:4.066033 +step:1280 train loss:4.083673 +step:1281 train loss:4.133864 +step:1282 train loss:4.065580 +step:1283 train loss:4.142339 +step:1284 train loss:4.080964 +step:1285 train loss:4.130983 +step:1286 train loss:4.030844 +step:1287 train loss:4.075704 +step:1288 train loss:4.101437 +step:1289 train loss:4.163780 +step:1290 train loss:4.112787 +step:1291 train loss:4.076325 +step:1292 train loss:4.059813 +step:1293 train loss:4.052644 +step:1294 train loss:4.102055 +step:1295 train loss:4.086814 +step:1296 train loss:4.126258 +step:1297 train loss:4.086643 +step:1298 train loss:4.099013 +step:1299 train loss:4.136640 +step:1300 train loss:4.058032 +step:1301 train loss:4.105234 +step:1302 train loss:4.065244 +step:1303 train loss:4.105197 +step:1304 train loss:4.130280 +step:1305 train loss:4.103947 +step:1306 train loss:4.099710 +step:1307 train loss:4.080400 +step:1308 train loss:4.038679 +step:1309 train loss:4.051661 +step:1310 train loss:4.043890 +step:1311 train loss:4.053385 +step:1312 train loss:4.130423 +step:1313 train loss:4.048529 +step:1314 train loss:4.056653 +step:1315 train loss:4.094474 +step:1316 train loss:4.066898 +step:1317 train loss:3.960341 +step:1318 train loss:4.114763 +step:1319 train loss:4.147829 +step:1320 train loss:4.061593 +step:1321 train loss:4.041246 +step:1322 train loss:4.150081 +step:1323 train loss:4.106588 +step:1324 train loss:4.212413 +step:1325 train loss:4.098971 +step:1326 train loss:4.128699 +step:1327 train loss:4.149373 +step:1328 train loss:4.045259 +step:1329 train loss:4.064191 +step:1330 train loss:4.083879 +step:1331 train loss:3.996977 +step:1332 train loss:4.126027 +step:1333 train loss:4.089931 +step:1334 train loss:4.096751 +step:1335 train loss:4.122832 +step:1336 train loss:4.139860 +step:1337 train loss:4.103713 +step:1338 train loss:4.085603 +step:1339 train loss:4.158015 +step:1340 train loss:4.133232 +step:1341 train loss:4.122088 +step:1342 train loss:4.107533 +step:1343 train loss:4.069688 +step:1344 train loss:4.138102 +step:1345 train loss:4.093970 +step:1346 train loss:4.176664 +step:1347 train loss:4.105192 +step:1348 train loss:4.065912 +step:1349 train loss:4.008152 +step:1350 train loss:4.036630 +step:1351 train loss:4.105294 +step:1352 train loss:4.072743 +step:1353 train loss:4.050027 +step:1354 train loss:4.050938 +step:1355 train loss:4.123844 +step:1356 train loss:4.031782 +step:1357 train loss:4.058815 +step:1358 train loss:4.053846 +step:1359 train loss:4.053391 +step:1360 train loss:4.084881 +step:1361 train loss:4.199586 +step:1362 train loss:4.116439 +step:1363 train loss:4.004922 +step:1364 train loss:4.027545 +step:1365 train loss:4.017713 +step:1366 train loss:4.063917 +step:1367 train loss:3.986526 +step:1368 train loss:4.020143 +step:1369 train loss:4.057301 +step:1370 train loss:4.067307 +step:1371 train loss:4.035634 +step:1372 train loss:4.060539 +step:1373 train loss:4.096463 +step:1374 train loss:4.096671 +step:1375 train loss:4.056191 +step:1376 train loss:4.079218 +step:1377 train loss:4.089172 +step:1378 train loss:4.077916 +step:1379 train loss:4.068787 +step:1380 train loss:4.134916 +step:1381 train loss:4.082542 +step:1382 train loss:4.089004 +step:1383 train loss:4.060832 +step:1384 train loss:4.151256 +step:1385 train loss:4.043023 +step:1386 train loss:4.117877 +step:1387 train loss:4.117922 +step:1388 train loss:4.073581 +step:1389 train loss:4.055830 +step:1390 train loss:4.081676 +step:1391 train loss:4.108249 +step:1392 train loss:4.080841 +step:1393 train loss:4.140596 +step:1394 train loss:4.067925 +step:1395 train loss:4.094065 +step:1396 train loss:4.078378 +step:1397 train loss:4.096352 +step:1398 train loss:4.098527 +step:1399 train loss:4.065004 +step:1400 train loss:4.040931 +step:1401 train loss:4.034581 +step:1402 train loss:4.041706 +step:1403 train loss:3.995722 +step:1404 train loss:4.055741 +step:1405 train loss:4.020137 +step:1406 train loss:4.047409 +step:1407 train loss:4.038527 +step:1408 train loss:4.020059 +step:1409 train loss:4.006322 +step:1410 train loss:4.026548 +step:1411 train loss:4.056628 +step:1412 train loss:4.111977 +step:1413 train loss:4.032257 +step:1414 train loss:4.061517 +step:1415 train loss:4.023651 +step:1416 train loss:4.073882 +step:1417 train loss:4.040349 +step:1418 train loss:3.979389 +step:1419 train loss:3.988755 +step:1420 train loss:4.019699 +step:1421 train loss:4.059617 +step:1422 train loss:4.045553 +step:1423 train loss:4.147474 +step:1424 train loss:4.051429 +step:1425 train loss:4.014719 +step:1426 train loss:4.035447 +step:1427 train loss:4.035052 +step:1428 train loss:4.022927 +step:1429 train loss:4.061563 +step:1430 train loss:4.049325 +step:1431 train loss:4.073820 +step:1432 train loss:4.055140 +step:1433 train loss:4.037423 +step:1434 train loss:4.004152 +step:1435 train loss:3.994441 +step:1436 train loss:4.089041 +step:1437 train loss:4.000073 +step:1438 train loss:3.995553 +step:1439 train loss:3.980984 +step:1440 train loss:4.016055 +step:1441 train loss:4.097799 +step:1442 train loss:4.054744 +step:1443 train loss:3.982151 +step:1444 train loss:3.991131 +step:1445 train loss:3.994780 +step:1446 train loss:4.026922 +step:1447 train loss:4.033761 +step:1448 train loss:4.001386 +step:1449 train loss:4.032039 +step:1450 train loss:4.042782 +step:1451 train loss:3.967480 +step:1452 train loss:4.020994 +step:1453 train loss:4.018829 +step:1454 train loss:4.008386 +step:1455 train loss:3.946966 +step:1456 train loss:4.021232 +step:1457 train loss:3.956353 +step:1458 train loss:4.093218 +step:1459 train loss:4.021437 +step:1460 train loss:3.994342 +step:1461 train loss:4.052132 +step:1462 train loss:4.058827 +step:1463 train loss:4.025589 +step:1464 train loss:4.014018 +step:1465 train loss:3.998583 +step:1466 train loss:3.960622 +step:1467 train loss:4.098285 +step:1468 train loss:3.996973 +step:1469 train loss:4.060807 +step:1470 train loss:4.001757 +step:1471 train loss:3.991416 +step:1472 train loss:3.995834 +step:1473 train loss:3.998472 +step:1474 train loss:3.944477 +step:1475 train loss:4.007472 +step:1476 train loss:4.087626 +step:1477 train loss:4.029638 +step:1478 train loss:3.962697 +step:1479 train loss:3.994024 +step:1480 train loss:3.987252 +step:1481 train loss:3.963156 +step:1482 train loss:4.026147 +step:1483 train loss:4.014530 +step:1484 train loss:4.041184 +step:1485 train loss:4.059980 +step:1486 train loss:3.989171 +step:1487 train loss:3.980589 +step:1488 train loss:3.982047 +step:1489 train loss:3.975615 +step:1490 train loss:4.028368 +step:1491 train loss:4.025628 +step:1492 train loss:4.022773 +step:1493 train loss:3.969403 +step:1494 train loss:4.004013 +step:1495 train loss:3.986199 +step:1496 train loss:3.947889 +step:1497 train loss:4.024657 +step:1498 train loss:3.929235 +step:1499 train loss:3.975580 +step:1500 validation loss:3.948700 total_sharp:4.3574e-03 L1_sharp:7.6396e-03 L2_sharp:7.5275e-04 L3_sharp:2.3592e-03 L4_sharp:6.5934e-04 L5_sharp:5.0709e-04 L6_sharp:6.0000e-04 L7_sharp:5.2611e-04 L8_sharp:1.0391e-03 L9_sharp:1.1264e-03 L10_sharp:1.1166e-03 L11_sharp:1.0843e-03 L12_sharp:1.8480e-03 total_fnorm:4.0914e+00 total_l1_linf:3.5875e+04 total_spectral:4.0914e+00 L1_fnorm:7.8735e-01 L2_fnorm:8.4754e-01 L3_fnorm:8.6727e-01 L4_fnorm:8.8983e-01 L5_fnorm:9.2134e-01 L6_fnorm:9.4363e-01 L7_fnorm:9.4262e-01 L8_fnorm:9.5171e-01 L9_fnorm:9.5131e-01 L10_fnorm:9.4622e-01 L11_fnorm:9.4649e-01 L12_fnorm:9.3125e-01 L1_l1linf:1.1901e+00 L2_l1linf:9.2531e-01 L3_l1linf:9.8707e-01 L4_l1linf:9.4942e-01 L5_l1linf:9.7965e-01 L6_l1linf:1.0008e+00 L7_l1linf:9.9470e-01 L8_l1linf:1.0539e+00 L9_l1linf:1.0696e+00 L10_l1linf:1.0863e+00 L11_l1linf:1.1628e+00 L12_l1linf:1.2284e+00 L1_spectral:1.6282e-01 L2_spectral:1.4073e-01 L3_spectral:1.3871e-01 L4_spectral:1.2773e-01 L5_spectral:1.2449e-01 L6_spectral:1.1266e-01 L7_spectral:1.0736e-01 L8_spectral:1.0907e-01 L9_spectral:1.2304e-01 L10_spectral:1.2744e-01 L11_spectral:1.3764e-01 L12_spectral:1.7028e-01 ip_v_neg_g:3.9373e-02 cos_v_neg_g:1.5559e-02 v_norm:4.0914e+00 g_norm:6.1850e-01 hv_norm:4.5368e-01 cos_v_hv:3.9296e-02 hg_norm:2.3041e+00 cos_g_hg:4.7977e-01 v_par:2.3607e-03 v_perp:4.0913e+00 L1_cos_v_neg_g:4.1363e-02 L1_v_norm:7.8735e-01 L2_cos_v_neg_g:2.9691e-02 L2_v_norm:8.4754e-01 L3_cos_v_neg_g:2.7416e-02 L3_v_norm:8.6727e-01 L4_cos_v_neg_g:2.2725e-02 L4_v_norm:8.8983e-01 L5_cos_v_neg_g:2.0880e-02 L5_v_norm:9.2134e-01 L6_cos_v_neg_g:2.1365e-02 L6_v_norm:9.4363e-01 L7_cos_v_neg_g:1.8842e-02 L7_v_norm:9.4262e-01 L8_cos_v_neg_g:2.1552e-02 L8_v_norm:9.5171e-01 L9_cos_v_neg_g:2.0498e-02 L9_v_norm:9.5131e-01 L10_cos_v_neg_g:2.4110e-02 L10_v_norm:9.4622e-01 L11_cos_v_neg_g:2.1527e-02 L11_v_norm:9.4649e-01 L12_cos_v_neg_g:2.6598e-02 L12_v_norm:9.3125e-01 +step:1500 train loss:3.970822 +step:1501 train loss:3.999271 +step:1502 train loss:3.931043 +step:1503 train loss:3.988705 +step:1504 train loss:3.958304 +step:1505 train loss:3.931824 +step:1506 train loss:3.924188 +step:1507 train loss:3.941096 +step:1508 train loss:3.956999 +step:1509 train loss:4.003639 +step:1510 train loss:3.950958 +step:1511 train loss:3.970656 +step:1512 train loss:3.947948 +step:1513 train loss:4.017534 +step:1514 train loss:3.968687 +step:1515 train loss:4.024064 +step:1516 train loss:3.953650 +step:1517 train loss:3.969058 +step:1518 train loss:4.049450 +step:1519 train loss:4.005270 +step:1520 train loss:4.050306 +step:1521 train loss:3.947820 +step:1522 train loss:4.004308 +step:1523 train loss:4.007145 +step:1524 train loss:3.934829 +step:1525 train loss:4.016633 +step:1526 train loss:3.932493 +step:1527 train loss:3.984220 +step:1528 train loss:4.032103 +step:1529 train loss:3.986742 +step:1530 train loss:4.030452 +step:1531 train loss:3.947320 +step:1532 train loss:4.026116 +step:1533 train loss:3.993343 +step:1534 train loss:3.941904 +step:1535 train loss:3.993637 +step:1536 train loss:4.020842 +step:1537 train loss:3.971480 +step:1538 train loss:3.981967 +step:1539 train loss:3.982482 +step:1540 train loss:4.003170 +step:1541 train loss:3.961604 +step:1542 train loss:4.052346 +step:1543 train loss:4.074595 +step:1544 train loss:3.943798 +step:1545 train loss:3.925147 +step:1546 train loss:3.964548 +step:1547 train loss:3.957235 +step:1548 train loss:3.996089 +step:1549 train loss:3.917453 +step:1550 train loss:4.035519 +step:1551 train loss:3.966122 +step:1552 train loss:3.999379 +step:1553 train loss:4.005191 +step:1554 train loss:4.014771 +step:1555 train loss:3.966689 +step:1556 train loss:3.952626 +step:1557 train loss:3.959676 +step:1558 train loss:3.983892 +step:1559 train loss:3.945682 +step:1560 train loss:4.024850 +step:1561 train loss:4.000823 +step:1562 train loss:3.895778 +step:1563 train loss:3.876122 +step:1564 train loss:4.010577 +step:1565 train loss:3.980182 +step:1566 train loss:4.000176 +step:1567 train loss:3.998090 +step:1568 train loss:3.952147 +step:1569 train loss:3.947612 +step:1570 train loss:3.968035 +step:1571 train loss:3.947032 +step:1572 train loss:3.949663 +step:1573 train loss:3.987312 +step:1574 train loss:3.947371 +step:1575 train loss:3.966059 +step:1576 train loss:3.923366 +step:1577 train loss:3.946824 +step:1578 train loss:3.934258 +step:1579 train loss:4.006813 +step:1580 train loss:3.963431 +step:1581 train loss:4.001879 +step:1582 train loss:4.001736 +step:1583 train loss:3.984745 +step:1584 train loss:3.900278 +step:1585 train loss:3.988886 +step:1586 train loss:3.952183 +step:1587 train loss:3.965246 +step:1588 train loss:3.951099 +step:1589 train loss:3.997516 +step:1590 train loss:3.899687 +step:1591 train loss:3.955842 +step:1592 train loss:3.907349 +step:1593 train loss:3.941965 +step:1594 train loss:3.942723 +step:1595 train loss:3.936489 +step:1596 train loss:3.943668 +step:1597 train loss:3.881113 +step:1598 train loss:3.974212 +step:1599 train loss:3.989918 +step:1600 train loss:3.865311 +step:1601 train loss:3.941984 +step:1602 train loss:4.002569 +step:1603 train loss:3.998586 +step:1604 train loss:3.924862 +step:1605 train loss:3.974885 +step:1606 train loss:4.026604 +step:1607 train loss:3.909118 +step:1608 train loss:3.942415 +step:1609 train loss:3.953599 +step:1610 train loss:4.010827 +step:1611 train loss:3.940593 +step:1612 train loss:3.865824 +step:1613 train loss:3.941131 +step:1614 train loss:4.053793 +step:1615 train loss:3.981039 +step:1616 train loss:3.996276 +step:1617 train loss:3.971865 +step:1618 train loss:3.976815 +step:1619 train loss:4.156998 +step:1620 train loss:3.941918 +step:1621 train loss:3.996837 +step:1622 train loss:3.917223 +step:1623 train loss:3.975788 +step:1624 train loss:3.947703 +step:1625 train loss:4.029521 +step:1626 train loss:3.926500 +step:1627 train loss:3.926924 +step:1628 train loss:3.944420 +step:1629 train loss:3.976244 +step:1630 train loss:3.987130 +step:1631 train loss:3.936538 +step:1632 train loss:3.906556 +step:1633 train loss:3.920890 +step:1634 train loss:3.975338 +step:1635 train loss:3.920043 +step:1636 train loss:3.900497 +step:1637 train loss:3.976091 +step:1638 train loss:4.078951 +step:1639 train loss:3.886412 +step:1640 train loss:3.962834 +step:1641 train loss:3.927081 +step:1642 train loss:4.023362 +step:1643 train loss:3.921515 +step:1644 train loss:3.932849 +step:1645 train loss:3.914711 +step:1646 train loss:3.991715 +step:1647 train loss:3.886668 +step:1648 train loss:3.951926 +step:1649 train loss:3.911063 +step:1650 train loss:3.925480 +step:1651 train loss:3.944368 +step:1652 train loss:3.960911 +step:1653 train loss:3.960631 +step:1654 train loss:3.958238 +step:1655 train loss:3.931984 +step:1656 train loss:3.924603 +step:1657 train loss:3.934221 +step:1658 train loss:3.906294 +step:1659 train loss:3.986360 +step:1660 train loss:3.887077 +step:1661 train loss:3.996337 +step:1662 train loss:3.933752 +step:1663 train loss:3.922476 +step:1664 train loss:4.012711 +step:1665 train loss:3.938275 +step:1666 train loss:3.945043 +step:1667 train loss:3.961866 +step:1668 train loss:3.937057 +step:1669 train loss:3.896342 +step:1670 train loss:3.952654 +step:1671 train loss:3.950981 +step:1672 train loss:3.943015 +step:1673 train loss:3.899236 +step:1674 train loss:3.897077 +step:1675 train loss:3.939410 +step:1676 train loss:4.203393 +step:1677 train loss:3.981356 +step:1678 train loss:3.929561 +step:1679 train loss:4.053317 +step:1680 train loss:3.980684 +step:1681 train loss:4.025634 +step:1682 train loss:3.971525 +step:1683 train loss:3.973139 +step:1684 train loss:3.936209 +step:1685 train loss:3.971614 +step:1686 train loss:3.948928 +step:1687 train loss:3.965155 +step:1688 train loss:3.938037 +step:1689 train loss:3.928181 +step:1690 train loss:3.950409 +step:1691 train loss:3.937251 +step:1692 train loss:3.951126 +step:1693 train loss:3.924412 +step:1694 train loss:3.876003 +step:1695 train loss:3.896133 +step:1696 train loss:3.901878 +step:1697 train loss:3.947924 +step:1698 train loss:3.948753 +step:1699 train loss:3.894953 +step:1700 train loss:3.979642 +step:1701 train loss:3.914948 +step:1702 train loss:3.906878 +step:1703 train loss:3.923193 +step:1704 train loss:3.932133 +step:1705 train loss:3.945134 +step:1706 train loss:3.951843 +step:1707 train loss:3.953017 +step:1708 train loss:3.869040 +step:1709 train loss:3.974613 +step:1710 train loss:3.891281 +step:1711 train loss:3.896742 +step:1712 train loss:3.921582 +step:1713 train loss:3.883865 +step:1714 train loss:4.253746 +step:1715 train loss:3.901837 +step:1716 train loss:3.887792 +step:1717 train loss:3.890813 +step:1718 train loss:3.966021 +step:1719 train loss:3.871307 +step:1720 train loss:3.958274 +step:1721 train loss:3.896273 +step:1722 train loss:3.871809 +step:1723 train loss:3.969568 +step:1724 train loss:3.917794 +step:1725 train loss:3.914508 +step:1726 train loss:3.914526 +step:1727 train loss:3.948539 +step:1728 train loss:3.955389 +step:1729 train loss:3.888301 +step:1730 train loss:3.957244 +step:1731 train loss:3.886906 +step:1732 train loss:3.899927 +step:1733 train loss:3.894047 +step:1734 train loss:3.942380 +step:1735 train loss:4.003283 +step:1736 train loss:3.911656 +step:1737 train loss:3.938487 +step:1738 train loss:3.900745 +step:1739 train loss:3.967949 +step:1740 train loss:3.953394 +step:1741 train loss:4.012713 +step:1742 train loss:3.999016 +step:1743 train loss:3.893133 +step:1744 train loss:3.903479 +step:1745 train loss:3.901072 +step:1746 train loss:3.879133 +step:1747 train loss:3.919209 +step:1748 train loss:3.857623 +step:1749 train loss:3.898774 +step:1750 validation loss:3.872357 +step:1750 train loss:3.928997 +step:1751 train loss:3.944047 +step:1752 train loss:3.912056 +step:1753 train loss:3.939727 +step:1754 train loss:3.934551 +step:1755 train loss:3.929845 +step:1756 train loss:3.950553 +step:1757 train loss:3.956934 +step:1758 train loss:3.871449 +step:1759 train loss:3.958821 +step:1760 train loss:3.910233 +step:1761 train loss:3.894225 +step:1762 train loss:3.892375 +step:1763 train loss:3.892356 +step:1764 train loss:4.185548 +step:1765 train loss:3.893678 +step:1766 train loss:3.985634 +step:1767 train loss:3.893991 +step:1768 train loss:3.874442 +step:1769 train loss:3.894267 +step:1770 train loss:3.907394 +step:1771 train loss:3.883209 +step:1772 train loss:3.995988 +step:1773 train loss:3.918462 +step:1774 train loss:3.919930 +step:1775 train loss:4.036514 +step:1776 train loss:3.925574 +step:1777 train loss:3.914891 +step:1778 train loss:3.976159 +step:1779 train loss:3.905088 +step:1780 train loss:3.951385 +step:1781 train loss:3.957567 +step:1782 train loss:3.983066 +step:1783 train loss:3.905998 +step:1784 train loss:4.004389 +step:1785 train loss:3.906306 +step:1786 train loss:3.903246 +step:1787 train loss:3.900799 +step:1788 train loss:3.921196 +step:1789 train loss:3.873693 +step:1790 train loss:3.887174 +step:1791 train loss:3.975492 +step:1792 train loss:3.969094 +step:1793 train loss:3.889719 +step:1794 train loss:3.928356 +step:1795 train loss:3.879587 +step:1796 train loss:3.865029 +step:1797 train loss:3.924424 +step:1798 train loss:3.867195 +step:1799 train loss:3.920041 +step:1800 train loss:3.948428 +step:1801 train loss:3.936398 +step:1802 train loss:3.943171 +step:1803 train loss:3.929406 +step:1804 train loss:3.932001 +step:1805 train loss:3.922881 +step:1806 train loss:3.931878 +step:1807 train loss:3.858885 +step:1808 train loss:3.924075 +step:1809 train loss:3.906063 +step:1810 train loss:3.899584 +step:1811 train loss:3.918903 +step:1812 train loss:3.903842 +step:1813 train loss:3.914727 +step:1814 train loss:3.981038 +step:1815 train loss:3.921939 +step:1816 train loss:3.873088 +step:1817 train loss:3.865653 +step:1818 train loss:3.918806 +step:1819 train loss:3.891289 +step:1820 train loss:3.924341 +step:1821 train loss:3.887166 +step:1822 train loss:3.865963 +step:1823 train loss:3.866974 +step:1824 train loss:3.938795 +step:1825 train loss:3.848550 +step:1826 train loss:3.894652 +step:1827 train loss:3.860482 +step:1828 train loss:3.915154 +step:1829 train loss:3.879919 +step:1830 train loss:4.076944 +step:1831 train loss:3.828934 +step:1832 train loss:3.887017 +step:1833 train loss:3.930102 +step:1834 train loss:3.880145 +step:1835 train loss:3.892895 +step:1836 train loss:3.925869 +step:1837 train loss:3.852893 +step:1838 train loss:3.943512 +step:1839 train loss:3.930440 +step:1840 train loss:3.896917 +step:1841 train loss:3.929236 +step:1842 train loss:3.903078 +step:1843 train loss:3.850632 +step:1844 train loss:3.917756 +step:1845 train loss:3.881414 +step:1846 train loss:3.934734 +step:1847 train loss:3.982599 +step:1848 train loss:3.788919 +step:1849 train loss:3.879696 +step:1850 train loss:3.860438 +step:1851 train loss:3.894106 +step:1852 train loss:3.882116 +step:1853 train loss:3.938235 +step:1854 train loss:3.900624 +step:1855 train loss:3.887359 +step:1856 train loss:3.888604 +step:1857 train loss:3.891574 +step:1858 train loss:3.936904 +step:1859 train loss:3.885493 +step:1860 train loss:3.861050 +step:1861 train loss:3.870415 +step:1862 train loss:3.913280 +step:1863 train loss:3.951710 +step:1864 train loss:3.847616 +step:1865 train loss:3.869388 +step:1866 train loss:3.873306 +step:1867 train loss:3.906432 +step:1868 train loss:3.949313 +step:1869 train loss:3.871539 +step:1870 train loss:3.898377 +step:1871 train loss:3.837379 +step:1872 train loss:3.910558 +step:1873 train loss:3.970967 +step:1874 train loss:3.837101 +step:1875 train loss:3.913269 +step:1876 train loss:3.876826 +step:1877 train loss:3.919118 +step:1878 train loss:3.838671 +step:1879 train loss:3.899739 +step:1880 train loss:3.982008 +step:1881 train loss:3.905668 +step:1882 train loss:3.924414 +step:1883 train loss:3.949873 +step:1884 train loss:3.960726 +step:1885 train loss:3.912946 +step:1886 train loss:3.845703 +step:1887 train loss:3.860590 +step:1888 train loss:3.863465 +step:1889 train loss:3.879372 +step:1890 train loss:3.876941 +step:1891 train loss:3.820287 +step:1892 train loss:3.910424 +step:1893 train loss:3.832582 +step:1894 train loss:3.849020 +step:1895 train loss:3.887784 +step:1896 train loss:3.934329 +step:1897 train loss:3.830639 +step:1898 train loss:3.876943 +step:1899 train loss:3.895164 +step:1900 train loss:3.848315 +step:1901 train loss:3.930875 +step:1902 train loss:3.916417 +step:1903 train loss:3.857312 +step:1904 train loss:3.847800 +step:1905 train loss:3.845970 +step:1906 train loss:3.902987 +step:1907 train loss:3.846002 +step:1908 train loss:3.860020 +step:1909 train loss:3.954628 +step:1910 train loss:3.847505 +step:1911 train loss:3.849688 +step:1912 train loss:3.905494 +step:1913 train loss:3.847721 +step:1914 train loss:3.894149 +step:1915 train loss:3.862360 +step:1916 train loss:3.914603 +step:1917 train loss:3.891137 +step:1918 train loss:3.799548 +step:1919 train loss:3.954171 +step:1920 train loss:4.055507 +step:1921 train loss:3.835395 +step:1922 train loss:3.817936 +step:1923 train loss:3.909927 +step:1924 train loss:3.948987 +step:1925 train loss:3.891706 +step:1926 train loss:3.832960 +step:1927 train loss:3.911445 +step:1928 train loss:3.826197 +step:1929 train loss:3.853879 +step:1930 train loss:3.919109 +step:1931 train loss:3.831186 +step:1932 train loss:3.879712 +step:1933 train loss:3.876643 +step:1934 train loss:3.949711 +step:1935 train loss:3.898232 +step:1936 train loss:3.869429 +step:1937 train loss:3.812434 +step:1938 train loss:4.174872 +step:1939 train loss:3.921881 +step:1940 train loss:3.907617 +step:1941 train loss:3.907873 +step:1942 train loss:3.895104 +step:1943 train loss:3.892373 +step:1944 train loss:3.851360 +step:1945 train loss:3.851869 +step:1946 train loss:3.879573 +step:1947 train loss:3.896354 +step:1948 train loss:3.806708 +step:1949 train loss:3.912992 +step:1950 train loss:3.851095 +step:1951 train loss:3.873692 +step:1952 train loss:3.899248 +step:1953 train loss:3.832771 +step:1954 train loss:3.873837 +step:1955 train loss:3.824750 +step:1956 train loss:3.906784 +step:1957 train loss:3.928913 +step:1958 train loss:3.946102 +step:1959 train loss:3.814561 +step:1960 train loss:3.867081 +step:1961 train loss:3.893802 +step:1962 train loss:3.885108 +step:1963 train loss:3.863881 +step:1964 train loss:3.896925 +step:1965 train loss:3.934542 +step:1966 train loss:3.847653 +step:1967 train loss:3.901475 +step:1968 train loss:3.838581 +step:1969 train loss:3.854833 +step:1970 train loss:3.912905 +step:1971 train loss:3.816238 +step:1972 train loss:3.925217 +step:1973 train loss:3.823299 +step:1974 train loss:3.868494 +step:1975 train loss:3.825788 +step:1976 train loss:3.855152 +step:1977 train loss:3.896711 +step:1978 train loss:3.837231 +step:1979 train loss:3.817511 +step:1980 train loss:3.859097 +step:1981 train loss:3.836419 +step:1982 train loss:3.917011 +step:1983 train loss:3.860111 +step:1984 train loss:3.902081 +step:1985 train loss:3.884956 +step:1986 train loss:3.877840 +step:1987 train loss:3.828464 +step:1988 train loss:3.855196 +step:1989 train loss:3.998022 +step:1990 train loss:3.833325 +step:1991 train loss:3.825995 +step:1992 train loss:3.833989 +step:1993 train loss:3.868889 +step:1994 train loss:3.864573 +step:1995 train loss:3.811549 +step:1996 train loss:3.867373 +step:1997 train loss:3.873944 +step:1998 train loss:3.823254 +step:1999 train loss:3.938001 +step:2000 validation loss:3.804729 total_sharp:2.9125e-03 L1_sharp:3.8262e-03 L2_sharp:4.8392e-04 L3_sharp:9.2346e-04 L4_sharp:4.8848e-04 L5_sharp:3.5352e-04 L6_sharp:3.8269e-04 L7_sharp:4.7368e-04 L8_sharp:6.8414e-04 L9_sharp:7.1932e-04 L10_sharp:8.5309e-04 L11_sharp:7.9009e-04 L12_sharp:1.1893e-03 total_fnorm:4.2415e+00 total_l1_linf:3.7410e+04 total_spectral:4.2415e+00 L1_fnorm:8.9453e-01 L2_fnorm:9.1568e-01 L3_fnorm:9.3136e-01 L4_fnorm:9.5175e-01 L5_fnorm:9.7919e-01 L6_fnorm:9.9458e-01 L7_fnorm:9.8697e-01 L8_fnorm:9.8950e-01 L9_fnorm:9.8028e-01 L10_fnorm:9.6937e-01 L11_fnorm:9.8071e-01 L12_fnorm:9.7006e-01 L1_l1linf:1.1460e+00 L2_l1linf:9.9140e-01 L3_l1linf:1.0099e+00 L4_l1linf:9.9326e-01 L5_l1linf:1.0214e+00 L6_l1linf:1.0360e+00 L7_l1linf:1.0037e+00 L8_l1linf:1.0027e+00 L9_l1linf:1.0580e+00 L10_l1linf:1.0306e+00 L11_l1linf:1.1344e+00 L12_l1linf:1.1155e+00 L1_spectral:1.5240e-01 L2_spectral:1.3776e-01 L3_spectral:1.3094e-01 L4_spectral:1.1820e-01 L5_spectral:1.1255e-01 L6_spectral:1.0837e-01 L7_spectral:9.0694e-02 L8_spectral:8.8787e-02 L9_spectral:1.0434e-01 L10_spectral:1.1880e-01 L11_spectral:1.3399e-01 L12_spectral:1.3926e-01 ip_v_neg_g:2.8329e-02 cos_v_neg_g:9.4758e-03 v_norm:4.2415e+00 g_norm:7.0486e-01 hv_norm:4.9194e-01 cos_v_hv:2.5111e-02 hg_norm:3.0665e+00 cos_g_hg:5.2781e-01 v_par:1.5122e-03 v_perp:4.2415e+00 L1_cos_v_neg_g:2.3609e-02 L1_v_norm:8.9453e-01 L2_cos_v_neg_g:1.9351e-02 L2_v_norm:9.1568e-01 L3_cos_v_neg_g:1.7082e-02 L3_v_norm:9.3136e-01 L4_cos_v_neg_g:1.4047e-02 L4_v_norm:9.5175e-01 L5_cos_v_neg_g:1.2239e-02 L5_v_norm:9.7919e-01 L6_cos_v_neg_g:1.0724e-02 L6_v_norm:9.9458e-01 L7_cos_v_neg_g:1.1386e-02 L7_v_norm:9.8697e-01 L8_cos_v_neg_g:1.0357e-02 L8_v_norm:9.8950e-01 L9_cos_v_neg_g:1.0779e-02 L9_v_norm:9.8028e-01 L10_cos_v_neg_g:1.2595e-02 L10_v_norm:9.6937e-01 L11_cos_v_neg_g:1.3341e-02 L11_v_norm:9.8071e-01 L12_cos_v_neg_g:1.3769e-02 L12_v_norm:9.7006e-01 +step:2000 train loss:3.899175 +step:2001 train loss:3.828405 +step:2002 train loss:3.933874 +step:2003 train loss:3.974023 +step:2004 train loss:3.844779 +step:2005 train loss:3.944212 +step:2006 train loss:3.825977 +step:2007 train loss:3.904001 +step:2008 train loss:3.852866 +step:2009 train loss:3.850798 +step:2010 train loss:3.981979 +step:2011 train loss:3.831126 +step:2012 train loss:3.857998 +step:2013 train loss:3.868021 +step:2014 train loss:3.765912 +step:2015 train loss:3.886925 +step:2016 train loss:3.868413 +step:2017 train loss:3.867324 +step:2018 train loss:3.835297 +step:2019 train loss:3.862992 +step:2020 train loss:3.875052 +step:2021 train loss:3.836656 +step:2022 train loss:3.884258 +step:2023 train loss:3.857708 +step:2024 train loss:3.910336 +step:2025 train loss:3.849461 +step:2026 train loss:3.828279 +step:2027 train loss:3.858797 +step:2028 train loss:3.790178 +step:2029 train loss:3.822148 +step:2030 train loss:3.821919 +step:2031 train loss:3.788330 +step:2032 train loss:3.841971 +step:2033 train loss:3.837343 +step:2034 train loss:3.835383 +step:2035 train loss:3.873599 +step:2036 train loss:3.864812 +step:2037 train loss:3.849913 +step:2038 train loss:3.842486 +step:2039 train loss:3.835409 +step:2040 train loss:3.865537 +step:2041 train loss:3.866617 +step:2042 train loss:3.795775 +step:2043 train loss:3.950545 +step:2044 train loss:3.821747 +step:2045 train loss:3.842416 +step:2046 train loss:3.849704 +step:2047 train loss:3.826231 +step:2048 train loss:3.867954 +step:2049 train loss:3.824433 +step:2050 train loss:3.845798 +step:2051 train loss:3.807697 +step:2052 train loss:3.862719 +step:2053 train loss:3.862361 +step:2054 train loss:3.842562 +step:2055 train loss:3.841839 +step:2056 train loss:3.887964 +step:2057 train loss:3.888619 +step:2058 train loss:3.853554 +step:2059 train loss:3.935442 +step:2060 train loss:3.880643 +step:2061 train loss:3.832004 +step:2062 train loss:3.859138 +step:2063 train loss:3.764953 +step:2064 train loss:3.884235 +step:2065 train loss:3.891824 +step:2066 train loss:3.750763 +step:2067 train loss:3.796483 +step:2068 train loss:3.902792 +step:2069 train loss:3.834331 +step:2070 train loss:3.837000 +step:2071 train loss:3.876179 +step:2072 train loss:3.809500 +step:2073 train loss:3.859196 +step:2074 train loss:3.834633 +step:2075 train loss:3.924381 +step:2076 train loss:3.865006 +step:2077 train loss:3.882604 +step:2078 train loss:3.835317 +step:2079 train loss:3.987264 +step:2080 train loss:3.806605 +step:2081 train loss:3.912897 +step:2082 train loss:3.841471 +step:2083 train loss:3.830187 +step:2084 train loss:3.809333 +step:2085 train loss:3.855381 +step:2086 train loss:3.861596 +step:2087 train loss:3.906907 +step:2088 train loss:3.774157 +step:2089 train loss:3.800539 +step:2090 train loss:3.836340 +step:2091 train loss:3.855995 +step:2092 train loss:3.834246 +step:2093 train loss:3.822461 +step:2094 train loss:3.859346 +step:2095 train loss:3.807183 +step:2096 train loss:3.794532 +step:2097 train loss:3.830356 +step:2098 train loss:3.830587 +step:2099 train loss:3.811232 +step:2100 train loss:3.880918 +step:2101 train loss:3.876497 +step:2102 train loss:3.835472 +step:2103 train loss:3.851589 +step:2104 train loss:3.829930 +step:2105 train loss:3.835876 +step:2106 train loss:3.837711 +step:2107 train loss:3.899954 +step:2108 train loss:3.821439 +step:2109 train loss:3.786940 +step:2110 train loss:3.884896 +step:2111 train loss:3.856900 +step:2112 train loss:3.905467 +step:2113 train loss:3.848337 +step:2114 train loss:3.896334 +step:2115 train loss:3.914705 +step:2116 train loss:3.879524 +step:2117 train loss:3.882761 +step:2118 train loss:3.860826 +step:2119 train loss:3.797532 +step:2120 train loss:3.885688 +step:2121 train loss:3.867493 +step:2122 train loss:3.873109 +step:2123 train loss:3.925492 +step:2124 train loss:3.933216 +step:2125 train loss:3.844365 +step:2126 train loss:3.840663 +step:2127 train loss:3.829349 +step:2128 train loss:3.818827 +step:2129 train loss:3.842073 +step:2130 train loss:3.841922 +step:2131 train loss:3.865670 +step:2132 train loss:3.795309 +step:2133 train loss:3.902908 +step:2134 train loss:3.852215 +step:2135 train loss:3.807292 +step:2136 train loss:3.895774 +step:2137 train loss:3.860968 +step:2138 train loss:3.816786 +step:2139 train loss:3.817741 +step:2140 train loss:3.821259 +step:2141 train loss:3.868598 +step:2142 train loss:3.837106 +step:2143 train loss:3.757124 +step:2144 train loss:3.865563 +step:2145 train loss:3.831056 +step:2146 train loss:3.870856 +step:2147 train loss:3.975553 +step:2148 train loss:3.776051 +step:2149 train loss:3.787983 +step:2150 train loss:3.812334 +step:2151 train loss:3.847893 +step:2152 train loss:3.842834 +step:2153 train loss:3.879918 +step:2154 train loss:3.799638 +step:2155 train loss:3.879050 +step:2156 train loss:3.804194 +step:2157 train loss:3.875674 +step:2158 train loss:3.915544 +step:2159 train loss:3.844616 +step:2160 train loss:3.913031 +step:2161 train loss:3.810601 +step:2162 train loss:3.819417 +step:2163 train loss:3.795803 +step:2164 train loss:3.818694 +step:2165 train loss:3.796250 +step:2166 train loss:3.911586 +step:2167 train loss:3.821890 +step:2168 train loss:3.830449 +step:2169 train loss:3.784335 +step:2170 train loss:3.931372 +step:2171 train loss:3.888285 +step:2172 train loss:3.824393 +step:2173 train loss:3.817571 +step:2174 train loss:3.880463 +step:2175 train loss:3.809604 +step:2176 train loss:3.892356 +step:2177 train loss:3.859952 +step:2178 train loss:3.787313 +step:2179 train loss:3.858303 +step:2180 train loss:3.875311 +step:2181 train loss:3.806037 +step:2182 train loss:3.856973 +step:2183 train loss:3.853655 +step:2184 train loss:3.801334 +step:2185 train loss:3.782995 +step:2186 train loss:3.820601 +step:2187 train loss:3.832848 +step:2188 train loss:3.883779 +step:2189 train loss:3.771153 +step:2190 train loss:3.820193 +step:2191 train loss:3.872918 +step:2192 train loss:3.802225 +step:2193 train loss:3.773871 +step:2194 train loss:3.779989 +step:2195 train loss:3.800060 +step:2196 train loss:3.803194 +step:2197 train loss:3.788499 +step:2198 train loss:3.813889 +step:2199 train loss:3.879934 +step:2200 train loss:3.813493 +step:2201 train loss:3.819611 +step:2202 train loss:3.782084 +step:2203 train loss:3.800321 +step:2204 train loss:3.834420 +step:2205 train loss:3.813775 +step:2206 train loss:3.812930 +step:2207 train loss:3.815269 +step:2208 train loss:3.791359 +step:2209 train loss:4.073507 +step:2210 train loss:3.841714 +step:2211 train loss:3.842299 +step:2212 train loss:3.814255 +step:2213 train loss:3.892551 +step:2214 train loss:3.886619 +step:2215 train loss:3.812662 +step:2216 train loss:3.774966 +step:2217 train loss:3.807144 +step:2218 train loss:3.805892 +step:2219 train loss:3.840523 +step:2220 train loss:3.781400 +step:2221 train loss:3.813763 +step:2222 train loss:3.826990 +step:2223 train loss:3.867696 +step:2224 train loss:3.840178 +step:2225 train loss:3.787391 +step:2226 train loss:3.848411 +step:2227 train loss:3.851641 +step:2228 train loss:3.846055 +step:2229 train loss:3.791507 +step:2230 train loss:3.913451 +step:2231 train loss:3.828489 +step:2232 train loss:3.827797 +step:2233 train loss:3.869311 +step:2234 train loss:3.767555 +step:2235 train loss:3.853075 +step:2236 train loss:3.792588 +step:2237 train loss:3.932405 +step:2238 train loss:3.725745 +step:2239 train loss:3.810630 +step:2240 train loss:3.823990 +step:2241 train loss:3.739272 +step:2242 train loss:3.880931 +step:2243 train loss:3.916039 +step:2244 train loss:3.792238 +step:2245 train loss:3.796441 +step:2246 train loss:3.761894 +step:2247 train loss:3.769820 +step:2248 train loss:3.823129 +step:2249 train loss:3.801898 +step:2250 validation loss:3.757480 +step:2250 train loss:3.816693 +step:2251 train loss:3.778095 +step:2252 train loss:3.778997 +step:2253 train loss:3.811427 +step:2254 train loss:3.817663 +step:2255 train loss:3.776878 +step:2256 train loss:3.827031 +step:2257 train loss:3.817913 +step:2258 train loss:3.805244 +step:2259 train loss:3.822691 +step:2260 train loss:3.773505 +step:2261 train loss:3.853673 +step:2262 train loss:3.873605 +step:2263 train loss:3.828333 +step:2264 train loss:3.943972 +step:2265 train loss:3.790382 +step:2266 train loss:3.839906 +step:2267 train loss:3.796928 +step:2268 train loss:3.800213 +step:2269 train loss:3.804442 +step:2270 train loss:3.791833 +step:2271 train loss:3.805967 +step:2272 train loss:3.840890 +step:2273 train loss:3.763537 +step:2274 train loss:3.793570 +step:2275 train loss:3.744474 +step:2276 train loss:3.821222 +step:2277 train loss:3.833085 +step:2278 train loss:3.810108 +step:2279 train loss:3.799715 +step:2280 train loss:3.710954 +step:2281 train loss:3.856399 +step:2282 train loss:3.783733 +step:2283 train loss:3.768906 +step:2284 train loss:3.782712 +step:2285 train loss:3.838337 +step:2286 train loss:3.794868 +step:2287 train loss:3.831122 +step:2288 train loss:3.802897 +step:2289 train loss:3.804070 +step:2290 train loss:3.808048 +step:2291 train loss:3.800181 +step:2292 train loss:3.847008 +step:2293 train loss:3.833958 +step:2294 train loss:3.826532 +step:2295 train loss:3.887943 +step:2296 train loss:3.822164 +step:2297 train loss:3.796574 +step:2298 train loss:3.855046 +step:2299 train loss:3.821449 +step:2300 train loss:3.737502 +step:2301 train loss:3.832773 +step:2302 train loss:3.847358 +step:2303 train loss:3.817216 +step:2304 train loss:3.802631 +step:2305 train loss:3.842213 +step:2306 train loss:3.830530 +step:2307 train loss:3.814764 +step:2308 train loss:3.829872 +step:2309 train loss:3.786473 +step:2310 train loss:3.773564 +step:2311 train loss:3.759057 +step:2312 train loss:3.824231 +step:2313 train loss:3.738267 +step:2314 train loss:3.811175 +step:2315 train loss:3.828638 +step:2316 train loss:3.865470 +step:2317 train loss:3.731061 +step:2318 train loss:3.777700 +step:2319 train loss:3.833714 +step:2320 train loss:3.798772 +step:2321 train loss:3.772706 +step:2322 train loss:3.785917 +step:2323 train loss:3.783187 +step:2324 train loss:3.811239 +step:2325 train loss:3.750101 +step:2326 train loss:3.779925 +step:2327 train loss:3.892006 +step:2328 train loss:3.840219 +step:2329 train loss:3.799135 +step:2330 train loss:3.758716 +step:2331 train loss:3.800032 +step:2332 train loss:3.722726 +step:2333 train loss:3.787075 +step:2334 train loss:3.768457 +step:2335 train loss:3.749848 +step:2336 train loss:4.007720 +step:2337 train loss:3.782037 +step:2338 train loss:3.822404 +step:2339 train loss:3.821704 +step:2340 train loss:3.843787 +step:2341 train loss:3.824833 +step:2342 train loss:3.781265 +step:2343 train loss:3.798252 +step:2344 train loss:3.842656 +step:2345 train loss:3.793913 +step:2346 train loss:3.824996 +step:2347 train loss:3.751915 +step:2348 train loss:3.808092 +step:2349 train loss:3.758530 +step:2350 train loss:3.813144 +step:2351 train loss:3.819587 +step:2352 train loss:3.827565 +step:2353 train loss:3.785954 +step:2354 train loss:3.831436 +step:2355 train loss:3.822736 +step:2356 train loss:3.860005 +step:2357 train loss:3.763105 +step:2358 train loss:3.778674 +step:2359 train loss:3.807733 +step:2360 train loss:3.825258 +step:2361 train loss:3.860931 +step:2362 train loss:3.693591 +step:2363 train loss:3.885257 +step:2364 train loss:3.830378 +step:2365 train loss:3.803349 +step:2366 train loss:3.754120 +step:2367 train loss:3.821011 +step:2368 train loss:3.804777 +step:2369 train loss:3.800382 +step:2370 train loss:3.809170 +step:2371 train loss:3.862701 +step:2372 train loss:3.725775 +step:2373 train loss:3.861511 +step:2374 train loss:3.845622 +step:2375 train loss:3.831816 +step:2376 train loss:3.818456 +step:2377 train loss:3.764134 +step:2378 train loss:3.811888 +step:2379 train loss:3.796242 +step:2380 train loss:3.851921 +step:2381 train loss:3.948525 +step:2382 train loss:3.733624 +step:2383 train loss:3.780828 +step:2384 train loss:3.814719 +step:2385 train loss:3.716659 +step:2386 train loss:3.869756 +step:2387 train loss:3.748080 +step:2388 train loss:3.797128 +step:2389 train loss:3.822660 +step:2390 train loss:3.773614 +step:2391 train loss:3.798812 +step:2392 train loss:3.824287 +step:2393 train loss:3.778412 +step:2394 train loss:3.805846 +step:2395 train loss:3.795704 +step:2396 train loss:3.798182 +step:2397 train loss:3.773160 +step:2398 train loss:3.827901 +step:2399 train loss:3.792979 +step:2400 train loss:3.767419 +step:2401 train loss:3.808841 +step:2402 train loss:3.761989 +step:2403 train loss:3.813688 +step:2404 train loss:3.772954 +step:2405 train loss:3.775898 +step:2406 train loss:3.802144 +step:2407 train loss:3.747843 +step:2408 train loss:3.789012 +step:2409 train loss:3.784477 +step:2410 train loss:3.777006 +step:2411 train loss:3.853705 +step:2412 train loss:3.840811 +step:2413 train loss:3.878778 +step:2414 train loss:3.771628 +step:2415 train loss:3.757166 +step:2416 train loss:3.771927 +step:2417 train loss:3.809183 +step:2418 train loss:3.828653 +step:2419 train loss:3.757582 +step:2420 train loss:3.776193 +step:2421 train loss:3.807126 +step:2422 train loss:3.860415 +step:2423 train loss:3.795106 +step:2424 train loss:3.762197 +step:2425 train loss:3.824166 +step:2426 train loss:3.761919 +step:2427 train loss:3.786387 +step:2428 train loss:3.865772 +step:2429 train loss:3.818403 +step:2430 train loss:3.910501 +step:2431 train loss:3.824705 +step:2432 train loss:3.792732 +step:2433 train loss:3.765258 +step:2434 train loss:3.753109 +step:2435 train loss:3.811563 +step:2436 train loss:3.770450 +step:2437 train loss:3.803365 +step:2438 train loss:3.845518 +step:2439 train loss:3.828531 +step:2440 train loss:3.770769 +step:2441 train loss:3.805703 +step:2442 train loss:3.796885 +step:2443 train loss:3.758234 +step:2444 train loss:3.797241 +step:2445 train loss:3.795810 +step:2446 train loss:3.762778 +step:2447 train loss:3.743575 +step:2448 train loss:3.797449 +step:2449 train loss:3.823239 +step:2450 train loss:3.781944 +step:2451 train loss:3.710661 +step:2452 train loss:3.806098 +step:2453 train loss:3.776790 +step:2454 train loss:3.771825 +step:2455 train loss:3.824208 +step:2456 train loss:3.783850 +step:2457 train loss:3.840956 +step:2458 train loss:3.817278 +step:2459 train loss:3.790174 +step:2460 train loss:3.797140 +step:2461 train loss:3.831077 +step:2462 train loss:3.800457 +step:2463 train loss:3.779518 +step:2464 train loss:3.794086 +step:2465 train loss:3.870496 +step:2466 train loss:3.956219 +step:2467 train loss:3.857885 +step:2468 train loss:3.753113 +step:2469 train loss:3.831600 +step:2470 train loss:3.871375 +step:2471 train loss:3.873121 +step:2472 train loss:3.856750 +step:2473 train loss:3.799026 +step:2474 train loss:3.765124 +step:2475 train loss:3.810842 +step:2476 train loss:3.888130 +step:2477 train loss:3.805245 +step:2478 train loss:3.761427 +step:2479 train loss:3.800537 +step:2480 train loss:3.789719 +step:2481 train loss:3.984416 +step:2482 train loss:3.791708 +step:2483 train loss:3.820478 +step:2484 train loss:3.775592 +step:2485 train loss:3.765917 +step:2486 train loss:3.796419 +step:2487 train loss:3.831350 +step:2488 train loss:3.739721 +step:2489 train loss:3.849919 +step:2490 train loss:3.770017 +step:2491 train loss:3.783702 +step:2492 train loss:3.826614 +step:2493 train loss:3.860209 +step:2494 train loss:3.779914 +step:2495 train loss:3.815056 +step:2496 train loss:3.788249 +step:2497 train loss:3.804283 +step:2498 train loss:3.809784 +step:2499 train loss:3.807594 +step:2500 validation loss:3.728230 total_sharp:3.3278e-03 L1_sharp:5.1511e-03 L2_sharp:4.8425e-04 L3_sharp:6.6399e-04 L4_sharp:3.1642e-04 L5_sharp:2.5445e-04 L6_sharp:3.7745e-04 L7_sharp:4.1849e-04 L8_sharp:6.5161e-04 L9_sharp:7.6597e-04 L10_sharp:9.7733e-04 L11_sharp:8.1984e-04 L12_sharp:1.3858e-03 total_fnorm:4.2796e+00 total_l1_linf:3.7802e+04 total_spectral:4.2796e+00 L1_fnorm:9.1603e-01 L2_fnorm:9.2709e-01 L3_fnorm:9.4104e-01 L4_fnorm:9.5887e-01 L5_fnorm:9.9329e-01 L6_fnorm:1.0111e+00 L7_fnorm:1.0101e+00 L8_fnorm:1.0142e+00 L9_fnorm:1.0120e+00 L10_fnorm:9.9733e-01 L11_fnorm:9.9390e-01 L12_fnorm:9.8095e-01 L1_l1linf:1.2638e+00 L2_l1linf:1.0249e+00 L3_l1linf:1.0119e+00 L4_l1linf:1.0108e+00 L5_l1linf:1.0814e+00 L6_l1linf:1.0683e+00 L7_l1linf:1.0483e+00 L8_l1linf:1.0473e+00 L9_l1linf:1.0556e+00 L10_l1linf:1.0974e+00 L11_l1linf:1.0679e+00 L12_l1linf:1.0416e+00 L1_spectral:1.6710e-01 L2_spectral:1.4060e-01 L3_spectral:1.2972e-01 L4_spectral:1.1452e-01 L5_spectral:1.0333e-01 L6_spectral:1.0005e-01 L7_spectral:9.1418e-02 L8_spectral:9.0233e-02 L9_spectral:1.1535e-01 L10_spectral:1.3308e-01 L11_spectral:1.3863e-01 L12_spectral:1.4230e-01 ip_v_neg_g:3.6173e-02 cos_v_neg_g:1.1670e-02 v_norm:4.2796e+00 g_norm:7.2429e-01 hv_norm:5.2745e-01 cos_v_hv:2.7001e-02 hg_norm:3.3752e+00 cos_g_hg:6.0257e-01 v_par:1.8203e-03 v_perp:4.2796e+00 L1_cos_v_neg_g:3.3397e-02 L1_v_norm:9.1603e-01 L2_cos_v_neg_g:1.6183e-02 L2_v_norm:9.2709e-01 L3_cos_v_neg_g:1.4251e-02 L3_v_norm:9.4104e-01 L4_cos_v_neg_g:1.2100e-02 L4_v_norm:9.5887e-01 L5_cos_v_neg_g:1.0745e-02 L5_v_norm:9.9329e-01 L6_cos_v_neg_g:1.3163e-02 L6_v_norm:1.0111e+00 L7_cos_v_neg_g:1.2623e-02 L7_v_norm:1.0101e+00 L8_cos_v_neg_g:1.4155e-02 L8_v_norm:1.0142e+00 L9_cos_v_neg_g:1.4308e-02 L9_v_norm:1.0120e+00 L10_cos_v_neg_g:1.7468e-02 L10_v_norm:9.9733e-01 L11_cos_v_neg_g:1.6724e-02 L11_v_norm:9.9390e-01 L12_cos_v_neg_g:1.9107e-02 L12_v_norm:9.8095e-01 +step:2500 train loss:3.752997 +step:2501 train loss:3.821754 +step:2502 train loss:3.812271 +step:2503 train loss:3.733789 +step:2504 train loss:3.769186 +step:2505 train loss:3.792696 +step:2506 train loss:3.752617 +step:2507 train loss:3.782660 +step:2508 train loss:3.734086 +step:2509 train loss:3.747845 +step:2510 train loss:3.743472 +step:2511 train loss:3.785880 +step:2512 train loss:3.834957 +step:2513 train loss:3.784047 +step:2514 train loss:3.767408 +step:2515 train loss:3.915098 +step:2516 train loss:3.793973 +step:2517 train loss:3.862104 +step:2518 train loss:3.824300 +step:2519 train loss:3.809696 +step:2520 train loss:3.819660 +step:2521 train loss:3.812518 +step:2522 train loss:3.867429 +step:2523 train loss:3.789953 +step:2524 train loss:3.856749 +step:2525 train loss:3.830423 +step:2526 train loss:3.873835 +step:2527 train loss:3.856708 +step:2528 train loss:3.835878 +step:2529 train loss:3.847373 +step:2530 train loss:3.824347 +step:2531 train loss:3.760081 +step:2532 train loss:3.857992 +step:2533 train loss:3.749444 +step:2534 train loss:3.841332 +step:2535 train loss:3.791620 +step:2536 train loss:3.713223 +step:2537 train loss:3.828146 +step:2538 train loss:3.806135 +step:2539 train loss:3.823786 +step:2540 train loss:3.757066 +step:2541 train loss:3.784659 +step:2542 train loss:3.792983 +step:2543 train loss:3.783210 +step:2544 train loss:3.768301 +step:2545 train loss:3.754854 +step:2546 train loss:3.718903 +step:2547 train loss:3.765212 +step:2548 train loss:3.790129 +step:2549 train loss:3.792097 +step:2550 train loss:3.923994 +step:2551 train loss:4.001444 +step:2552 train loss:3.729872 +step:2553 train loss:3.762807 +step:2554 train loss:3.911828 +step:2555 train loss:3.795464 +step:2556 train loss:3.719676 +step:2557 train loss:3.816434 +step:2558 train loss:3.810361 +step:2559 train loss:3.761616 +step:2560 train loss:3.748197 +step:2561 train loss:3.839130 +step:2562 train loss:3.793939 +step:2563 train loss:3.727735 +step:2564 train loss:3.798909 +step:2565 train loss:3.777906 +step:2566 train loss:3.759569 +step:2567 train loss:3.740378 +step:2568 train loss:3.791816 +step:2569 train loss:3.800415 +step:2570 train loss:3.749488 +step:2571 train loss:3.833574 +step:2572 train loss:3.792161 +step:2573 train loss:3.726540 +step:2574 train loss:3.781378 +step:2575 train loss:3.823889 +step:2576 train loss:3.776401 +step:2577 train loss:3.739650 +step:2578 train loss:3.783245 +step:2579 train loss:3.758617 +step:2580 train loss:3.731029 +step:2581 train loss:3.741549 +step:2582 train loss:3.751389 +step:2583 train loss:3.776497 +step:2584 train loss:3.791253 +step:2585 train loss:3.753642 +step:2586 train loss:3.776930 +step:2587 train loss:3.709135 +step:2588 train loss:3.740424 +step:2589 train loss:3.817654 +step:2590 train loss:3.742470 +step:2591 train loss:3.798957 +step:2592 train loss:3.849005 +step:2593 train loss:3.808335 +step:2594 train loss:3.765355 +step:2595 train loss:3.774654 +step:2596 train loss:3.813298 +step:2597 train loss:3.697356 +step:2598 train loss:3.851303 +step:2599 train loss:3.804381 +step:2600 train loss:3.829710 +step:2601 train loss:3.772136 +step:2602 train loss:3.798128 +step:2603 train loss:3.791226 +step:2604 train loss:3.717309 +step:2605 train loss:3.846956 +step:2606 train loss:3.793684 +step:2607 train loss:3.752318 +step:2608 train loss:3.726573 +step:2609 train loss:3.749205 +step:2610 train loss:3.773903 +step:2611 train loss:3.817687 +step:2612 train loss:3.776406 +step:2613 train loss:3.752401 +step:2614 train loss:3.740983 +step:2615 train loss:3.737261 +step:2616 train loss:3.810820 +step:2617 train loss:3.774922 +step:2618 train loss:3.739596 +step:2619 train loss:3.757210 +step:2620 train loss:3.748882 +step:2621 train loss:3.759068 +step:2622 train loss:3.837069 +step:2623 train loss:3.706815 +step:2624 train loss:3.724370 +step:2625 train loss:3.796150 +step:2626 train loss:3.787426 +step:2627 train loss:3.767970 +step:2628 train loss:3.819819 +step:2629 train loss:3.768438 +step:2630 train loss:3.764006 +step:2631 train loss:3.794382 +step:2632 train loss:3.761136 +step:2633 train loss:3.745008 +step:2634 train loss:3.792110 +step:2635 train loss:3.773355 +step:2636 train loss:3.821956 +step:2637 train loss:3.775467 +step:2638 train loss:3.759920 +step:2639 train loss:3.810798 +step:2640 train loss:3.729479 +step:2641 train loss:3.788546 +step:2642 train loss:3.710270 +step:2643 train loss:3.710825 +step:2644 train loss:3.799219 +step:2645 train loss:3.740369 +step:2646 train loss:3.767795 +step:2647 train loss:3.789353 +step:2648 train loss:3.824183 +step:2649 train loss:3.736095 +step:2650 train loss:3.723593 +step:2651 train loss:3.765560 +step:2652 train loss:3.741731 +step:2653 train loss:3.805992 +step:2654 train loss:3.763841 +step:2655 train loss:3.749326 +step:2656 train loss:3.773986 +step:2657 train loss:3.799743 +step:2658 train loss:3.806568 +step:2659 train loss:3.785503 +step:2660 train loss:3.771404 +step:2661 train loss:3.816857 +step:2662 train loss:3.792747 +step:2663 train loss:3.770194 +step:2664 train loss:3.781577 +step:2665 train loss:3.735404 +step:2666 train loss:3.761455 +step:2667 train loss:3.767162 +step:2668 train loss:3.746919 +step:2669 train loss:3.755804 +step:2670 train loss:3.781553 +step:2671 train loss:3.755013 +step:2672 train loss:3.778839 +step:2673 train loss:3.712510 +step:2674 train loss:3.806865 +step:2675 train loss:3.776027 +step:2676 train loss:3.796513 +step:2677 train loss:3.779740 +step:2678 train loss:3.768516 +step:2679 train loss:3.749176 +step:2680 train loss:3.732340 +step:2681 train loss:3.704893 +step:2682 train loss:3.793252 +step:2683 train loss:3.765037 +step:2684 train loss:3.793593 +step:2685 train loss:3.707200 +step:2686 train loss:3.723417 +step:2687 train loss:3.801250 +step:2688 train loss:3.818925 +step:2689 train loss:3.717268 +step:2690 train loss:3.804834 +step:2691 train loss:3.774344 +step:2692 train loss:3.797154 +step:2693 train loss:3.850466 +step:2694 train loss:3.750273 +step:2695 train loss:3.769170 +step:2696 train loss:3.773884 +step:2697 train loss:3.766959 +step:2698 train loss:3.777841 +step:2699 train loss:3.793867 +step:2700 train loss:3.761384 +step:2701 train loss:3.832669 +step:2702 train loss:3.768810 +step:2703 train loss:3.727129 +step:2704 train loss:3.793465 +step:2705 train loss:3.794025 +step:2706 train loss:3.734675 +step:2707 train loss:3.697938 +step:2708 train loss:3.788571 +step:2709 train loss:3.775497 +step:2710 train loss:3.779324 +step:2711 train loss:3.745042 +step:2712 train loss:3.807951 +step:2713 train loss:3.805498 +step:2714 train loss:3.746808 +step:2715 train loss:3.744847 +step:2716 train loss:3.807426 +step:2717 train loss:3.772623 +step:2718 train loss:3.770318 +step:2719 train loss:3.763908 +step:2720 train loss:3.731753 +step:2721 train loss:3.812338 +step:2722 train loss:3.742148 +step:2723 train loss:3.732098 +step:2724 train loss:3.753726 +step:2725 train loss:3.751177 +step:2726 train loss:3.724207 +step:2727 train loss:3.782536 +step:2728 train loss:3.718943 +step:2729 train loss:3.849861 +step:2730 train loss:3.793835 +step:2731 train loss:3.831924 +step:2732 train loss:3.744968 +step:2733 train loss:3.738940 +step:2734 train loss:3.788329 +step:2735 train loss:3.788446 +step:2736 train loss:3.708769 +step:2737 train loss:3.763677 +step:2738 train loss:3.824050 +step:2739 train loss:3.741088 +step:2740 train loss:3.742398 +step:2741 train loss:3.731195 +step:2742 train loss:3.657265 +step:2743 train loss:3.769013 +step:2744 train loss:3.798332 +step:2745 train loss:3.744211 +step:2746 train loss:3.758453 +step:2747 train loss:3.741835 +step:2748 train loss:3.701450 +step:2749 train loss:3.768193 +step:2750 validation loss:3.691197 +step:2750 train loss:3.773186 +step:2751 train loss:3.798369 +step:2752 train loss:3.787191 +step:2753 train loss:3.775368 +step:2754 train loss:3.714432 +step:2755 train loss:3.783488 +step:2756 train loss:3.754246 +step:2757 train loss:3.743689 +step:2758 train loss:3.776514 +step:2759 train loss:3.795617 +step:2760 train loss:3.703200 +step:2761 train loss:3.726564 +step:2762 train loss:3.738334 +step:2763 train loss:3.762484 +step:2764 train loss:3.723547 +step:2765 train loss:3.796199 +step:2766 train loss:3.869758 +step:2767 train loss:3.733461 +step:2768 train loss:3.788894 +step:2769 train loss:3.761238 +step:2770 train loss:3.776369 +step:2771 train loss:3.797994 +step:2772 train loss:3.760176 +step:2773 train loss:3.758984 +step:2774 train loss:3.751647 +step:2775 train loss:3.765526 +step:2776 train loss:3.713549 +step:2777 train loss:3.749769 +step:2778 train loss:3.756944 +step:2779 train loss:3.783849 +step:2780 train loss:3.751439 +step:2781 train loss:3.737003 +step:2782 train loss:3.723468 +step:2783 train loss:3.754022 +step:2784 train loss:3.759584 +step:2785 train loss:3.832597 +step:2786 train loss:3.796312 +step:2787 train loss:3.754689 +step:2788 train loss:3.753258 +step:2789 train loss:3.746740 +step:2790 train loss:3.685417 +step:2791 train loss:3.784801 +step:2792 train loss:3.775487 +step:2793 train loss:3.740225 +step:2794 train loss:3.747137 +step:2795 train loss:3.763881 +step:2796 train loss:3.753195 +step:2797 train loss:3.802605 +step:2798 train loss:3.787755 +step:2799 train loss:3.695783 +step:2800 train loss:3.741662 +step:2801 train loss:3.776289 +step:2802 train loss:3.804452 +step:2803 train loss:3.774647 +step:2804 train loss:3.709908 +step:2805 train loss:3.749604 +step:2806 train loss:3.741495 +step:2807 train loss:3.770933 +step:2808 train loss:3.710869 +step:2809 train loss:3.782601 +step:2810 train loss:3.768417 +step:2811 train loss:3.759260 +step:2812 train loss:3.809854 +step:2813 train loss:3.776477 +step:2814 train loss:3.765960 +step:2815 train loss:3.777215 +step:2816 train loss:3.778856 +step:2817 train loss:3.714954 +step:2818 train loss:3.822811 +step:2819 train loss:3.749934 +step:2820 train loss:3.747009 +step:2821 train loss:3.723006 +step:2822 train loss:3.767254 +step:2823 train loss:3.717340 +step:2824 train loss:3.619388 +step:2825 train loss:3.763949 +step:2826 train loss:3.760155 +step:2827 train loss:3.788130 +step:2828 train loss:3.788105 +step:2829 train loss:3.768103 +step:2830 train loss:3.800912 +step:2831 train loss:3.742067 +step:2832 train loss:3.710504 +step:2833 train loss:3.770369 +step:2834 train loss:3.724546 +step:2835 train loss:3.755430 +step:2836 train loss:3.760258 +step:2837 train loss:3.757505 +step:2838 train loss:3.697552 +step:2839 train loss:3.798636 +step:2840 train loss:3.760197 +step:2841 train loss:3.834626 +step:2842 train loss:3.779899 +step:2843 train loss:3.772292 +step:2844 train loss:3.811111 +step:2845 train loss:3.757094 +step:2846 train loss:3.706233 +step:2847 train loss:3.798732 +step:2848 train loss:3.759917 +step:2849 train loss:3.748488 +step:2850 train loss:3.803766 +step:2851 train loss:3.755353 +step:2852 train loss:3.837361 +step:2853 train loss:3.750661 +step:2854 train loss:3.700233 +step:2855 train loss:3.773007 +step:2856 train loss:3.691720 +step:2857 train loss:3.797748 +step:2858 train loss:3.752372 +step:2859 train loss:3.760872 +step:2860 train loss:3.736597 +step:2861 train loss:3.721918 +step:2862 train loss:3.749475 +step:2863 train loss:3.729757 +step:2864 train loss:3.736794 +step:2865 train loss:3.811865 +step:2866 train loss:3.823756 +step:2867 train loss:3.766587 +step:2868 train loss:3.762970 +step:2869 train loss:3.725498 +step:2870 train loss:3.813488 +step:2871 train loss:3.808363 +step:2872 train loss:3.769295 +step:2873 train loss:3.774326 +step:2874 train loss:3.752560 +step:2875 train loss:3.706423 +step:2876 train loss:3.752033 +step:2877 train loss:3.734576 +step:2878 train loss:3.750329 +step:2879 train loss:3.713938 +step:2880 train loss:3.732128 +step:2881 train loss:3.727686 +step:2882 train loss:3.660528 +step:2883 train loss:3.742471 +step:2884 train loss:3.816883 +step:2885 train loss:3.704737 +step:2886 train loss:3.758286 +step:2887 train loss:3.780658 +step:2888 train loss:3.754347 +step:2889 train loss:3.734509 +step:2890 train loss:3.709141 +step:2891 train loss:3.752201 +step:2892 train loss:3.756588 +step:2893 train loss:3.736341 +step:2894 train loss:3.711153 +step:2895 train loss:3.759312 +step:2896 train loss:3.805278 +step:2897 train loss:3.786821 +step:2898 train loss:3.920753 +step:2899 train loss:3.679224 +step:2900 train loss:3.756091 +step:2901 train loss:3.702004 +step:2902 train loss:3.704818 +step:2903 train loss:3.717920 +step:2904 train loss:3.744771 +step:2905 train loss:3.804185 +step:2906 train loss:3.775239 +step:2907 train loss:3.949301 +step:2908 train loss:3.698893 +step:2909 train loss:3.775362 +step:2910 train loss:3.748904 +step:2911 train loss:3.776661 +step:2912 train loss:3.732714 +step:2913 train loss:3.767469 +step:2914 train loss:3.797557 +step:2915 train loss:3.789650 +step:2916 train loss:3.745308 +step:2917 train loss:3.783636 +step:2918 train loss:3.772396 +step:2919 train loss:3.715927 +step:2920 train loss:3.768401 +step:2921 train loss:3.724654 +step:2922 train loss:3.745795 +step:2923 train loss:3.817586 +step:2924 train loss:3.750874 +step:2925 train loss:3.705371 +step:2926 train loss:3.795578 +step:2927 train loss:3.700202 +step:2928 train loss:3.670428 +step:2929 train loss:3.687821 +step:2930 train loss:3.705981 +step:2931 train loss:3.862301 +step:2932 train loss:3.782402 +step:2933 train loss:3.742743 +step:2934 train loss:3.738018 +step:2935 train loss:3.762047 +step:2936 train loss:3.708948 +step:2937 train loss:3.732600 +step:2938 train loss:3.746894 +step:2939 train loss:3.820888 +step:2940 train loss:3.718052 +step:2941 train loss:3.755629 +step:2942 train loss:3.715119 +step:2943 train loss:3.994633 +step:2944 train loss:3.822630 +step:2945 train loss:3.781519 +step:2946 train loss:3.795018 +step:2947 train loss:3.750884 +step:2948 train loss:3.710036 +step:2949 train loss:3.797498 +step:2950 train loss:3.753990 +step:2951 train loss:3.653602 +step:2952 train loss:3.723125 +step:2953 train loss:3.634730 +step:2954 train loss:3.727562 +step:2955 train loss:3.802736 +step:2956 train loss:3.743139 +step:2957 train loss:3.745619 +step:2958 train loss:3.697738 +step:2959 train loss:3.722360 +step:2960 train loss:3.815761 +step:2961 train loss:3.680359 +step:2962 train loss:3.754432 +step:2963 train loss:3.748840 +step:2964 train loss:3.726785 +step:2965 train loss:3.756068 +step:2966 train loss:3.729369 +step:2967 train loss:3.725695 +step:2968 train loss:3.702082 +step:2969 train loss:3.711329 +step:2970 train loss:3.780901 +step:2971 train loss:3.707884 +step:2972 train loss:3.693325 +step:2973 train loss:3.690720 +step:2974 train loss:3.731543 +step:2975 train loss:3.693759 +step:2976 train loss:3.735854 +step:2977 train loss:3.726667 +step:2978 train loss:3.806363 +step:2979 train loss:3.790069 +step:2980 train loss:3.799280 +step:2981 train loss:3.752366 +step:2982 train loss:3.746073 +step:2983 train loss:3.692672 +step:2984 train loss:3.670705 +step:2985 train loss:3.781786 +step:2986 train loss:3.675281 +step:2987 train loss:3.806570 +step:2988 train loss:3.729596 +step:2989 train loss:3.760916 +step:2990 train loss:3.708835 +step:2991 train loss:3.779530 +step:2992 train loss:3.772256 +step:2993 train loss:3.741822 +step:2994 train loss:3.733938 +step:2995 train loss:3.799782 +step:2996 train loss:3.720032 +step:2997 train loss:3.631546 +step:2998 train loss:3.747021 +step:2999 train loss:3.787105 +step:3000 validation loss:3.668540 total_sharp:2.0051e-03 L1_sharp:2.4325e-03 L2_sharp:1.1283e-04 L3_sharp:2.8213e-04 L4_sharp:2.8610e-04 L5_sharp:1.9742e-04 L6_sharp:2.2764e-04 L7_sharp:4.0795e-04 L8_sharp:5.5368e-04 L9_sharp:5.1131e-04 L10_sharp:6.6423e-04 L11_sharp:5.9138e-04 L12_sharp:8.1848e-04 total_fnorm:4.4282e+00 total_l1_linf:3.9266e+04 total_spectral:4.4282e+00 L1_fnorm:1.0098e+00 L2_fnorm:9.6128e-01 L3_fnorm:9.9890e-01 L4_fnorm:1.0168e+00 L5_fnorm:1.0331e+00 L6_fnorm:1.0506e+00 L7_fnorm:1.0582e+00 L8_fnorm:1.0573e+00 L9_fnorm:1.0533e+00 L10_fnorm:1.0456e+00 L11_fnorm:1.0500e+00 L12_fnorm:1.0578e+00 L1_l1linf:1.3709e+00 L2_l1linf:1.0532e+00 L3_l1linf:1.0816e+00 L4_l1linf:1.0784e+00 L5_l1linf:1.0926e+00 L6_l1linf:1.1068e+00 L7_l1linf:1.0989e+00 L8_l1linf:1.0803e+00 L9_l1linf:1.1318e+00 L10_l1linf:1.1797e+00 L11_l1linf:1.2173e+00 L12_l1linf:1.2270e+00 L1_spectral:1.6701e-01 L2_spectral:1.5139e-01 L3_spectral:1.3882e-01 L4_spectral:1.1708e-01 L5_spectral:1.0748e-01 L6_spectral:1.1514e-01 L7_spectral:1.0699e-01 L8_spectral:9.4873e-02 L9_spectral:1.1781e-01 L10_spectral:1.5066e-01 L11_spectral:1.5809e-01 L12_spectral:1.6485e-01 ip_v_neg_g:2.2255e-02 cos_v_neg_g:7.7029e-03 v_norm:4.4282e+00 g_norm:6.5245e-01 hv_norm:3.4624e-01 cos_v_hv:2.5643e-02 hg_norm:2.2647e+00 cos_g_hg:5.3887e-01 v_par:1.5012e-03 v_perp:4.4282e+00 L1_cos_v_neg_g:2.0768e-02 L1_v_norm:1.0098e+00 L2_cos_v_neg_g:6.8360e-03 L2_v_norm:9.6128e-01 L3_cos_v_neg_g:7.0195e-03 L3_v_norm:9.9890e-01 L4_cos_v_neg_g:6.8821e-03 L4_v_norm:1.0168e+00 L5_cos_v_neg_g:5.9578e-03 L5_v_norm:1.0331e+00 L6_cos_v_neg_g:6.8180e-03 L6_v_norm:1.0506e+00 L7_cos_v_neg_g:9.1353e-03 L7_v_norm:1.0582e+00 L8_cos_v_neg_g:9.1964e-03 L8_v_norm:1.0573e+00 L9_cos_v_neg_g:8.7794e-03 L9_v_norm:1.0533e+00 L10_cos_v_neg_g:1.3099e-02 L10_v_norm:1.0456e+00 L11_cos_v_neg_g:1.3803e-02 L11_v_norm:1.0500e+00 L12_cos_v_neg_g:1.4635e-02 L12_v_norm:1.0578e+00 +step:3000 train loss:3.684688 +step:3001 train loss:3.736225 +step:3002 train loss:3.732785 +step:3003 train loss:3.728978 +step:3004 train loss:3.760609 +step:3005 train loss:3.652242 +step:3006 train loss:3.704745 +step:3007 train loss:3.737636 +step:3008 train loss:3.781231 +step:3009 train loss:3.738771 +step:3010 train loss:3.755431 +step:3011 train loss:3.743210 +step:3012 train loss:3.721557 +step:3013 train loss:3.764241 +step:3014 train loss:3.718314 +step:3015 train loss:3.718340 +step:3016 train loss:3.738783 +step:3017 train loss:3.762557 +step:3018 train loss:3.692924 +step:3019 train loss:3.732679 +step:3020 train loss:3.752506 +step:3021 train loss:3.712363 +step:3022 train loss:3.804122 +step:3023 train loss:3.753959 +step:3024 train loss:3.735239 +step:3025 train loss:3.747505 +step:3026 train loss:3.719530 +step:3027 train loss:3.698536 +step:3028 train loss:3.745735 +step:3029 train loss:3.736582 +step:3030 train loss:3.709498 +step:3031 train loss:3.692462 +step:3032 train loss:3.684498 +step:3033 train loss:3.706924 +step:3034 train loss:3.756865 +step:3035 train loss:3.735022 +step:3036 train loss:3.696100 +step:3037 train loss:3.653850 +step:3038 train loss:3.768792 +step:3039 train loss:3.653589 +step:3040 train loss:3.636635 +step:3041 train loss:3.764705 +step:3042 train loss:3.697778 +step:3043 train loss:3.760864 +step:3044 train loss:3.656368 +step:3045 train loss:3.702244 +step:3046 train loss:3.677583 +step:3047 train loss:3.703098 +step:3048 train loss:3.677311 +step:3049 train loss:3.752636 +step:3050 train loss:3.641849 +step:3051 train loss:3.661516 +step:3052 train loss:3.683370 +step:3053 train loss:3.748107 +step:3054 train loss:3.822854 +step:3055 train loss:3.660724 +step:3056 train loss:3.691761 +step:3057 train loss:3.726821 +step:3058 train loss:3.676503 +step:3059 train loss:3.703698 +step:3060 train loss:3.702553 +step:3061 train loss:3.681645 +step:3062 train loss:3.738733 +step:3063 train loss:3.721337 +step:3064 train loss:3.748125 +step:3065 train loss:3.767713 +step:3066 train loss:3.663734 +step:3067 train loss:3.714716 +step:3068 train loss:3.763179 +step:3069 train loss:3.778750 +step:3070 train loss:3.707417 +step:3071 train loss:3.724552 +step:3072 train loss:3.722604 +step:3073 train loss:3.761214 +step:3074 train loss:3.695276 +step:3075 train loss:3.731094 +step:3076 train loss:3.666614 +step:3077 train loss:3.661222 +step:3078 train loss:3.695082 +step:3079 train loss:3.741855 +step:3080 train loss:3.732183 +step:3081 train loss:3.778097 +step:3082 train loss:3.754675 +step:3083 train loss:3.681276 +step:3084 train loss:3.768018 +step:3085 train loss:3.692836 +step:3086 train loss:3.754312 +step:3087 train loss:3.719570 +step:3088 train loss:3.798187 +step:3089 train loss:3.679066 +step:3090 train loss:3.749014 +step:3091 train loss:3.674490 +step:3092 train loss:3.693177 +step:3093 train loss:3.723276 +step:3094 train loss:3.707629 +step:3095 train loss:3.788403 +step:3096 train loss:3.719966 +step:3097 train loss:3.730385 +step:3098 train loss:3.705523 +step:3099 train loss:3.714908 +step:3100 train loss:3.744414 +step:3101 train loss:3.824995 +step:3102 train loss:3.752689 +step:3103 train loss:3.675946 +step:3104 train loss:3.758823 +step:3105 train loss:3.731665 +step:3106 train loss:3.725990 +step:3107 train loss:3.710931 +step:3108 train loss:3.680152 +step:3109 train loss:3.739164 +step:3110 train loss:3.665053 +step:3111 train loss:3.702322 +step:3112 train loss:3.638715 +step:3113 train loss:3.762357 +step:3114 train loss:3.673902 +step:3115 train loss:3.716278 +step:3116 train loss:3.598073 +step:3117 train loss:3.613400 +step:3118 train loss:3.719160 +step:3119 train loss:3.722899 +step:3120 train loss:3.723996 +step:3121 train loss:3.667291 +step:3122 train loss:3.751617 +step:3123 train loss:3.667334 +step:3124 train loss:3.729746 +step:3125 train loss:3.740661 +step:3126 train loss:3.847919 +step:3127 train loss:3.695866 +step:3128 train loss:3.722428 +step:3129 train loss:3.706514 +step:3130 train loss:3.682477 +step:3131 train loss:3.761336 +step:3132 train loss:3.744859 +step:3133 train loss:3.718500 +step:3134 train loss:3.606030 +step:3135 train loss:3.705128 +step:3136 train loss:3.673053 +step:3137 train loss:3.809188 +step:3138 train loss:3.708908 +step:3139 train loss:3.689874 +step:3140 train loss:3.711730 +step:3141 train loss:3.713550 +step:3142 train loss:3.649421 +step:3143 train loss:3.735080 +step:3144 train loss:3.682734 +step:3145 train loss:3.667441 +step:3146 train loss:3.682724 +step:3147 train loss:3.790213 +step:3148 train loss:3.697186 +step:3149 train loss:3.748144 +step:3150 train loss:3.737797 +step:3151 train loss:3.703782 +step:3152 train loss:3.703407 +step:3153 train loss:3.659255 +step:3154 train loss:3.745219 +step:3155 train loss:3.687130 +step:3156 train loss:3.740318 +step:3157 train loss:3.744151 +step:3158 train loss:3.718119 +step:3159 train loss:3.652384 +step:3160 train loss:3.703038 +step:3161 train loss:3.673642 +step:3162 train loss:3.732596 +step:3163 train loss:3.717350 +step:3164 train loss:3.696943 +step:3165 train loss:3.715030 +step:3166 train loss:3.752641 +step:3167 train loss:3.713510 +step:3168 train loss:3.789955 +step:3169 train loss:3.707860 +step:3170 train loss:3.689786 +step:3171 train loss:3.677597 +step:3172 train loss:3.678571 +step:3173 train loss:3.627023 +step:3174 train loss:3.743692 +step:3175 train loss:3.710544 +step:3176 train loss:3.719851 +step:3177 train loss:3.690359 +step:3178 train loss:3.670381 +step:3179 train loss:3.741480 +step:3180 train loss:3.672525 +step:3181 train loss:3.751715 +step:3182 train loss:3.758191 +step:3183 train loss:3.696457 +step:3184 train loss:3.695303 +step:3185 train loss:3.756693 +step:3186 train loss:3.715261 +step:3187 train loss:3.731363 +step:3188 train loss:3.777007 +step:3189 train loss:3.720759 +step:3190 train loss:3.675142 +step:3191 train loss:3.676208 +step:3192 train loss:3.644395 +step:3193 train loss:3.719133 +step:3194 train loss:3.686900 +step:3195 train loss:3.669477 +step:3196 train loss:3.722896 +step:3197 train loss:3.681633 +step:3198 train loss:3.716953 +step:3199 train loss:3.695876 +step:3200 train loss:3.706873 +step:3201 train loss:3.663904 +step:3202 train loss:3.731375 +step:3203 train loss:3.792490 +step:3204 train loss:3.757786 +step:3205 train loss:3.603153 +step:3206 train loss:3.887117 +step:3207 train loss:3.640469 +step:3208 train loss:3.709274 +step:3209 train loss:3.701214 +step:3210 train loss:3.682167 +step:3211 train loss:3.709635 +step:3212 train loss:3.728603 +step:3213 train loss:3.661230 +step:3214 train loss:3.767266 +step:3215 train loss:3.770527 +step:3216 train loss:3.640593 +step:3217 train loss:3.722648 +step:3218 train loss:3.756859 +step:3219 train loss:3.676754 +step:3220 train loss:3.745056 +step:3221 train loss:3.658887 +step:3222 train loss:3.702148 +step:3223 train loss:3.719385 +step:3224 train loss:3.725562 +step:3225 train loss:3.654983 +step:3226 train loss:3.687263 +step:3227 train loss:3.713873 +step:3228 train loss:3.711929 +step:3229 train loss:3.743977 +step:3230 train loss:3.757891 +step:3231 train loss:3.695838 +step:3232 train loss:3.706062 +step:3233 train loss:3.679047 +step:3234 train loss:3.667103 +step:3235 train loss:3.672280 +step:3236 train loss:3.689978 +step:3237 train loss:3.691661 +step:3238 train loss:3.706566 +step:3239 train loss:3.612242 +step:3240 train loss:3.724658 +step:3241 train loss:3.719600 +step:3242 train loss:3.773257 +step:3243 train loss:3.715214 +step:3244 train loss:3.732705 +step:3245 train loss:3.636358 +step:3246 train loss:3.762748 +step:3247 train loss:3.711455 +step:3248 train loss:3.732054 +step:3249 train loss:3.672867 +step:3250 validation loss:3.638323 +step:3250 train loss:3.673402 +step:3251 train loss:3.784997 +step:3252 train loss:3.714414 +step:3253 train loss:3.711972 +step:3254 train loss:3.782088 +step:3255 train loss:3.722230 +step:3256 train loss:3.715183 +step:3257 train loss:3.697954 +step:3258 train loss:3.629118 +step:3259 train loss:3.608680 +step:3260 train loss:3.724487 +step:3261 train loss:3.705757 +step:3262 train loss:3.691922 +step:3263 train loss:3.680639 +step:3264 train loss:3.789008 +step:3265 train loss:3.699167 +step:3266 train loss:3.726923 +step:3267 train loss:3.690559 +step:3268 train loss:3.693978 +step:3269 train loss:3.706757 +step:3270 train loss:3.733748 +step:3271 train loss:3.695617 +step:3272 train loss:3.675302 +step:3273 train loss:3.690671 +step:3274 train loss:3.820112 +step:3275 train loss:3.694405 +step:3276 train loss:3.762608 +step:3277 train loss:3.699638 +step:3278 train loss:3.675062 +step:3279 train loss:3.702983 +step:3280 train loss:3.727779 +step:3281 train loss:3.651127 +step:3282 train loss:3.722839 +step:3283 train loss:3.694369 +step:3284 train loss:3.659581 +step:3285 train loss:3.675389 +step:3286 train loss:3.704250 +step:3287 train loss:3.640461 +step:3288 train loss:3.724492 +step:3289 train loss:3.666769 +step:3290 train loss:3.699893 +step:3291 train loss:3.656453 +step:3292 train loss:3.683909 +step:3293 train loss:3.723604 +step:3294 train loss:3.740298 +step:3295 train loss:3.649415 +step:3296 train loss:3.709444 +step:3297 train loss:3.664386 +step:3298 train loss:3.667410 +step:3299 train loss:3.793209 +step:3300 train loss:3.637066 +step:3301 train loss:3.716314 +step:3302 train loss:3.691488 +step:3303 train loss:3.724131 +step:3304 train loss:3.706917 +step:3305 train loss:3.793697 +step:3306 train loss:3.725975 +step:3307 train loss:3.735120 +step:3308 train loss:3.690619 +step:3309 train loss:3.746777 +step:3310 train loss:3.664619 +step:3311 train loss:3.714338 +step:3312 train loss:3.681714 +step:3313 train loss:3.711634 +step:3314 train loss:3.711380 +step:3315 train loss:3.785342 +step:3316 train loss:3.641778 +step:3317 train loss:3.729637 +step:3318 train loss:3.740234 +step:3319 train loss:3.664852 +step:3320 train loss:3.824583 +step:3321 train loss:3.725608 +step:3322 train loss:3.725793 +step:3323 train loss:3.827698 +step:3324 train loss:3.745958 +step:3325 train loss:3.716845 +step:3326 train loss:3.708735 +step:3327 train loss:3.723326 +step:3328 train loss:3.701497 +step:3329 train loss:3.700891 +step:3330 train loss:3.688601 +step:3331 train loss:3.735185 +step:3332 train loss:3.754723 +step:3333 train loss:3.725239 +step:3334 train loss:3.656662 +step:3335 train loss:3.671350 +step:3336 train loss:3.702983 +step:3337 train loss:3.701301 +step:3338 train loss:3.690427 +step:3339 train loss:3.684770 +step:3340 train loss:3.724399 +step:3341 train loss:3.673928 +step:3342 train loss:3.719555 +step:3343 train loss:3.657837 +step:3344 train loss:3.715901 +step:3345 train loss:3.665720 +step:3346 train loss:3.680920 +step:3347 train loss:3.687611 +step:3348 train loss:3.701500 +step:3349 train loss:3.690808 +step:3350 train loss:3.717237 +step:3351 train loss:3.773059 +step:3352 train loss:3.718419 +step:3353 train loss:3.811791 +step:3354 train loss:3.659084 +step:3355 train loss:3.764079 +step:3356 train loss:3.712549 +step:3357 train loss:3.721970 +step:3358 train loss:3.664366 +step:3359 train loss:3.692554 +step:3360 train loss:3.689082 +step:3361 train loss:3.687484 +step:3362 train loss:3.680485 +step:3363 train loss:3.682981 +step:3364 train loss:3.659365 +step:3365 train loss:3.700947 +step:3366 train loss:3.729312 +step:3367 train loss:3.686465 +step:3368 train loss:3.782384 +step:3369 train loss:3.695537 +step:3370 train loss:3.767345 +step:3371 train loss:3.744901 +step:3372 train loss:3.714012 +step:3373 train loss:3.720561 +step:3374 train loss:3.772146 +step:3375 train loss:3.699845 +step:3376 train loss:3.713723 +step:3377 train loss:3.694242 +step:3378 train loss:3.670662 +step:3379 train loss:3.750409 +step:3380 train loss:3.726838 +step:3381 train loss:3.711390 +step:3382 train loss:3.728604 +step:3383 train loss:3.739252 +step:3384 train loss:3.666379 +step:3385 train loss:3.714484 +step:3386 train loss:3.696602 +step:3387 train loss:3.766893 +step:3388 train loss:3.670992 +step:3389 train loss:3.875991 +step:3390 train loss:3.635257 +step:3391 train loss:3.721266 +step:3392 train loss:3.699835 +step:3393 train loss:3.727369 +step:3394 train loss:3.676104 +step:3395 train loss:3.746477 +step:3396 train loss:3.656720 +step:3397 train loss:3.734443 +step:3398 train loss:3.698784 +step:3399 train loss:3.712828 +step:3400 train loss:3.662187 +step:3401 train loss:3.698009 +step:3402 train loss:3.851699 +step:3403 train loss:3.745639 +step:3404 train loss:3.865412 +step:3405 train loss:3.718676 +step:3406 train loss:3.687538 +step:3407 train loss:3.692098 +step:3408 train loss:3.669863 +step:3409 train loss:3.634048 +step:3410 train loss:3.666387 +step:3411 train loss:3.733218 +step:3412 train loss:3.657480 +step:3413 train loss:3.654394 +step:3414 train loss:3.689292 +step:3415 train loss:3.665052 +step:3416 train loss:3.670457 +step:3417 train loss:3.747336 +step:3418 train loss:3.748902 +step:3419 train loss:3.703788 +step:3420 train loss:3.681973 +step:3421 train loss:3.715717 +step:3422 train loss:3.729952 +step:3423 train loss:3.746828 +step:3424 train loss:3.627285 +step:3425 train loss:3.653016 +step:3426 train loss:3.645607 +step:3427 train loss:3.707747 +step:3428 train loss:3.629944 +step:3429 train loss:3.695726 +step:3430 train loss:3.664172 +step:3431 train loss:3.717809 +step:3432 train loss:3.699837 +step:3433 train loss:3.666999 +step:3434 train loss:3.749846 +step:3435 train loss:3.687953 +step:3436 train loss:3.781532 +step:3437 train loss:3.609272 +step:3438 train loss:3.717945 +step:3439 train loss:3.692027 +step:3440 train loss:3.782495 +step:3441 train loss:3.677486 +step:3442 train loss:3.747237 +step:3443 train loss:3.678356 +step:3444 train loss:3.699731 +step:3445 train loss:3.740642 +step:3446 train loss:3.647338 +step:3447 train loss:3.724333 +step:3448 train loss:3.676808 +step:3449 train loss:3.706767 +step:3450 train loss:3.624529 +step:3451 train loss:3.739991 +step:3452 train loss:3.686112 +step:3453 train loss:3.747797 +step:3454 train loss:3.767072 +step:3455 train loss:3.833379 +step:3456 train loss:3.777444 +step:3457 train loss:3.776739 +step:3458 train loss:3.689913 +step:3459 train loss:3.701929 +step:3460 train loss:3.652769 +step:3461 train loss:3.707408 +step:3462 train loss:3.709002 +step:3463 train loss:3.680771 +step:3464 train loss:3.735329 +step:3465 train loss:3.659486 +step:3466 train loss:3.729708 +step:3467 train loss:3.688765 +step:3468 train loss:3.701217 +step:3469 train loss:3.710925 +step:3470 train loss:3.690566 +step:3471 train loss:3.728427 +step:3472 train loss:3.615251 +step:3473 train loss:3.741561 +step:3474 train loss:3.633168 +step:3475 train loss:3.716216 +step:3476 train loss:3.686958 +step:3477 train loss:3.706132 +step:3478 train loss:3.678052 +step:3479 train loss:3.709215 +step:3480 train loss:3.728441 +step:3481 train loss:3.706347 +step:3482 train loss:3.691314 +step:3483 train loss:3.834442 +step:3484 train loss:3.676059 +step:3485 train loss:3.662976 +step:3486 train loss:3.716161 +step:3487 train loss:3.758963 +step:3488 train loss:3.661696 +step:3489 train loss:3.716781 +step:3490 train loss:3.680812 +step:3491 train loss:3.721099 +step:3492 train loss:3.754314 +step:3493 train loss:3.731060 +step:3494 train loss:3.724335 +step:3495 train loss:3.698909 +step:3496 train loss:3.665807 +step:3497 train loss:3.774007 +step:3498 train loss:3.721286 +step:3499 train loss:3.652880 +step:3500 validation loss:3.619022 total_sharp:1.5262e-03 L1_sharp:1.6362e-03 L2_sharp:7.5111e-05 L3_sharp:3.2039e-04 L4_sharp:2.6691e-04 L5_sharp:2.1603e-04 L6_sharp:1.8906e-04 L7_sharp:3.1039e-04 L8_sharp:3.6016e-04 L9_sharp:3.9026e-04 L10_sharp:5.5839e-04 L11_sharp:5.8754e-04 L12_sharp:8.7672e-04 total_fnorm:4.3586e+00 total_l1_linf:3.8609e+04 total_spectral:4.3586e+00 L1_fnorm:9.5468e-01 L2_fnorm:9.5183e-01 L3_fnorm:9.8018e-01 L4_fnorm:1.0049e+00 L5_fnorm:1.0346e+00 L6_fnorm:1.0424e+00 L7_fnorm:1.0312e+00 L8_fnorm:1.0340e+00 L9_fnorm:1.0264e+00 L10_fnorm:1.0018e+00 L11_fnorm:1.0111e+00 L12_fnorm:1.0032e+00 L1_l1linf:1.1156e+00 L2_l1linf:1.0246e+00 L3_l1linf:1.0475e+00 L4_l1linf:1.0696e+00 L5_l1linf:1.1074e+00 L6_l1linf:1.0925e+00 L7_l1linf:1.0445e+00 L8_l1linf:1.0536e+00 L9_l1linf:1.0898e+00 L10_l1linf:1.1054e+00 L11_l1linf:1.1773e+00 L12_l1linf:1.2005e+00 L1_spectral:1.4616e-01 L2_spectral:1.5124e-01 L3_spectral:1.3494e-01 L4_spectral:1.1404e-01 L5_spectral:1.0159e-01 L6_spectral:1.0013e-01 L7_spectral:9.0355e-02 L8_spectral:8.5545e-02 L9_spectral:1.0591e-01 L10_spectral:1.3289e-01 L11_spectral:1.5076e-01 L12_spectral:1.5775e-01 ip_v_neg_g:1.5122e-02 cos_v_neg_g:4.6239e-03 v_norm:4.3586e+00 g_norm:7.5035e-01 hv_norm:3.1902e-01 cos_v_hv:2.0851e-02 hg_norm:3.9021e+00 cos_g_hg:5.9070e-01 v_par:7.4810e-04 v_perp:4.3586e+00 L1_cos_v_neg_g:1.4106e-02 L1_v_norm:9.5468e-01 L2_cos_v_neg_g:3.6644e-03 L2_v_norm:9.5183e-01 L3_cos_v_neg_g:5.4444e-03 L3_v_norm:9.8018e-01 L4_cos_v_neg_g:5.3708e-03 L4_v_norm:1.0049e+00 L5_cos_v_neg_g:5.1356e-03 L5_v_norm:1.0346e+00 L6_cos_v_neg_g:4.9234e-03 L6_v_norm:1.0424e+00 L7_cos_v_neg_g:4.4943e-03 L7_v_norm:1.0312e+00 L8_cos_v_neg_g:4.9110e-03 L8_v_norm:1.0340e+00 L9_cos_v_neg_g:5.5227e-03 L9_v_norm:1.0264e+00 L10_cos_v_neg_g:6.6107e-03 L10_v_norm:1.0018e+00 L11_cos_v_neg_g:7.9646e-03 L11_v_norm:1.0111e+00 L12_cos_v_neg_g:7.0385e-03 L12_v_norm:1.0032e+00 +step:3500 train loss:3.672051 +step:3501 train loss:3.800007 +step:3502 train loss:3.780433 +step:3503 train loss:3.729891 +step:3504 train loss:3.681911 +step:3505 train loss:3.696249 +step:3506 train loss:3.593600 +step:3507 train loss:3.711874 +step:3508 train loss:3.655675 +step:3509 train loss:3.725298 +step:3510 train loss:3.656536 +step:3511 train loss:3.695416 +step:3512 train loss:3.831451 +step:3513 train loss:3.656080 +step:3514 train loss:3.672264 +step:3515 train loss:3.923875 +step:3516 train loss:3.716844 +step:3517 train loss:3.675705 +step:3518 train loss:3.679726 +step:3519 train loss:3.671597 +step:3520 train loss:3.703950 +step:3521 train loss:3.695817 +step:3522 train loss:3.604840 +step:3523 train loss:3.704588 +step:3524 train loss:3.688505 +step:3525 train loss:3.678440 +step:3526 train loss:3.703204 +step:3527 train loss:3.650324 +step:3528 train loss:3.699471 +step:3529 train loss:3.679677 +step:3530 train loss:3.673329 +step:3531 train loss:3.665339 +step:3532 train loss:3.856032 +step:3533 train loss:3.672906 +step:3534 train loss:3.692637 +step:3535 train loss:3.665357 +step:3536 train loss:3.663806 +step:3537 train loss:3.676544 +step:3538 train loss:3.701824 +step:3539 train loss:3.653577 +step:3540 train loss:3.717116 +step:3541 train loss:3.687681 +step:3542 train loss:3.695953 +step:3543 train loss:3.619678 +step:3544 train loss:3.639006 +step:3545 train loss:3.641891 +step:3546 train loss:3.709731 +step:3547 train loss:3.716694 +step:3548 train loss:3.687743 +step:3549 train loss:3.685371 +step:3550 train loss:3.675466 +step:3551 train loss:3.702986 +step:3552 train loss:3.608584 +step:3553 train loss:3.720122 +step:3554 train loss:3.716663 +step:3555 train loss:3.698453 +step:3556 train loss:3.725474 +step:3557 train loss:3.710725 +step:3558 train loss:3.682725 +step:3559 train loss:3.633560 +step:3560 train loss:3.722389 +step:3561 train loss:3.717164 +step:3562 train loss:3.891045 +step:3563 train loss:3.750542 +step:3564 train loss:3.711638 +step:3565 train loss:3.707995 +step:3566 train loss:3.680865 +step:3567 train loss:3.620417 +step:3568 train loss:3.648307 +step:3569 train loss:3.731471 +step:3570 train loss:3.758325 +step:3571 train loss:3.733340 +step:3572 train loss:3.722731 +step:3573 train loss:3.682393 +step:3574 train loss:3.683478 +step:3575 train loss:3.672035 +step:3576 train loss:3.653303 +step:3577 train loss:3.663526 +step:3578 train loss:3.749566 +step:3579 train loss:3.658961 +step:3580 train loss:3.735875 +step:3581 train loss:3.677616 +step:3582 train loss:3.734988 +step:3583 train loss:3.671463 +step:3584 train loss:3.647473 +step:3585 train loss:3.695029 +step:3586 train loss:3.644769 +step:3587 train loss:3.738337 +step:3588 train loss:3.870874 +step:3589 train loss:3.699296 +step:3590 train loss:3.686943 +step:3591 train loss:3.697621 +step:3592 train loss:3.658190 +step:3593 train loss:3.627032 +step:3594 train loss:3.683861 +step:3595 train loss:3.656103 +step:3596 train loss:3.738154 +step:3597 train loss:3.706956 +step:3598 train loss:3.661882 +step:3599 train loss:3.711866 +step:3600 train loss:3.651399 +step:3601 train loss:3.670498 +step:3602 train loss:3.656863 +step:3603 train loss:3.674705 +step:3604 train loss:3.700376 +step:3605 train loss:3.803412 +step:3606 train loss:3.703987 +step:3607 train loss:3.689573 +step:3608 train loss:3.706019 +step:3609 train loss:3.689152 +step:3610 train loss:3.661494 +step:3611 train loss:3.660973 +step:3612 train loss:3.732719 +step:3613 train loss:3.697424 +step:3614 train loss:3.646132 +step:3615 train loss:3.683303 +step:3616 train loss:3.660002 +step:3617 train loss:3.730562 +step:3618 train loss:3.716456 +step:3619 train loss:3.698859 +step:3620 train loss:3.716898 +step:3621 train loss:3.662440 +step:3622 train loss:3.764740 +step:3623 train loss:3.759723 +step:3624 train loss:3.728698 +step:3625 train loss:3.705354 +step:3626 train loss:3.711299 +step:3627 train loss:3.701180 +step:3628 train loss:3.684590 +step:3629 train loss:3.686737 +step:3630 train loss:3.774060 +step:3631 train loss:3.696535 +step:3632 train loss:3.723751 +step:3633 train loss:3.682910 +step:3634 train loss:3.682395 +step:3635 train loss:3.669514 +step:3636 train loss:3.743211 +step:3637 train loss:3.818271 +step:3638 train loss:3.733244 +step:3639 train loss:3.720130 +step:3640 train loss:3.728908 +step:3641 train loss:3.764320 +step:3642 train loss:3.658206 +step:3643 train loss:3.829346 +step:3644 train loss:3.717505 +step:3645 train loss:3.691574 +step:3646 train loss:3.812743 +step:3647 train loss:3.702645 +step:3648 train loss:3.692642 +step:3649 train loss:3.644266 +step:3650 train loss:3.683856 +step:3651 train loss:3.679756 +step:3652 train loss:3.666665 +step:3653 train loss:3.602872 +step:3654 train loss:3.665336 +step:3655 train loss:3.658171 +step:3656 train loss:3.687432 +step:3657 train loss:3.706866 +step:3658 train loss:3.697845 +step:3659 train loss:3.682644 +step:3660 train loss:3.657433 +step:3661 train loss:3.681232 +step:3662 train loss:3.653262 +step:3663 train loss:3.691350 +step:3664 train loss:3.647868 +step:3665 train loss:3.694756 +step:3666 train loss:3.727539 +step:3667 train loss:3.817293 +step:3668 train loss:3.701467 +step:3669 train loss:3.656938 +step:3670 train loss:3.705371 +step:3671 train loss:3.663069 +step:3672 train loss:3.700056 +step:3673 train loss:3.682298 +step:3674 train loss:3.698202 +step:3675 train loss:3.710794 +step:3676 train loss:3.672232 +step:3677 train loss:3.636890 +step:3678 train loss:3.693691 +step:3679 train loss:3.598941 +step:3680 train loss:3.701855 +step:3681 train loss:3.731151 +step:3682 train loss:3.713773 +step:3683 train loss:3.655541 +step:3684 train loss:3.656986 +step:3685 train loss:3.685049 +step:3686 train loss:3.714685 +step:3687 train loss:3.667180 +step:3688 train loss:3.644859 +step:3689 train loss:3.680295 +step:3690 train loss:3.668018 +step:3691 train loss:3.652918 +step:3692 train loss:3.710898 +step:3693 train loss:3.842391 +step:3694 train loss:3.661658 +step:3695 train loss:3.715115 +step:3696 train loss:3.679996 +step:3697 train loss:3.670435 +step:3698 train loss:3.608757 +step:3699 train loss:3.634889 +step:3700 train loss:3.665960 +step:3701 train loss:3.685241 +step:3702 train loss:3.706281 +step:3703 train loss:3.662107 +step:3704 train loss:3.705722 +step:3705 train loss:3.688161 +step:3706 train loss:3.640057 +step:3707 train loss:3.693623 +step:3708 train loss:3.670428 +step:3709 train loss:3.594606 +step:3710 train loss:3.715160 +step:3711 train loss:3.664707 +step:3712 train loss:3.704885 +step:3713 train loss:3.656394 +step:3714 train loss:3.670092 +step:3715 train loss:3.788695 +step:3716 train loss:3.696183 +step:3717 train loss:3.671891 +step:3718 train loss:3.675742 +step:3719 train loss:3.673175 +step:3720 train loss:3.682111 +step:3721 train loss:3.738564 +step:3722 train loss:3.752690 +step:3723 train loss:3.638592 +step:3724 train loss:3.697447 +step:3725 train loss:3.674883 +step:3726 train loss:3.696077 +step:3727 train loss:3.768730 +step:3728 train loss:3.735576 +step:3729 train loss:3.634326 +step:3730 train loss:3.651865 +step:3731 train loss:3.672194 +step:3732 train loss:3.826079 +step:3733 train loss:3.683019 +step:3734 train loss:3.684824 +step:3735 train loss:3.623857 +step:3736 train loss:3.683383 +step:3737 train loss:3.731854 +step:3738 train loss:3.754970 +step:3739 train loss:3.671389 +step:3740 train loss:3.575178 +step:3741 train loss:3.781195 +step:3742 train loss:3.697057 +step:3743 train loss:3.671079 +step:3744 train loss:3.689317 +step:3745 train loss:3.702186 +step:3746 train loss:3.671410 +step:3747 train loss:3.684102 +step:3748 train loss:3.716848 +step:3749 train loss:3.703551 +step:3750 validation loss:3.616493 +step:3750 train loss:3.709194 +step:3751 train loss:3.798323 +step:3752 train loss:3.730880 +step:3753 train loss:3.653542 +step:3754 train loss:3.700192 +step:3755 train loss:3.881994 +step:3756 train loss:3.656516 +step:3757 train loss:3.653248 +step:3758 train loss:3.683853 +step:3759 train loss:3.626934 +step:3760 train loss:3.623350 +step:3761 train loss:3.677915 +step:3762 train loss:3.667723 +step:3763 train loss:3.670281 +step:3764 train loss:3.661380 +step:3765 train loss:3.659715 +step:3766 train loss:3.625804 +step:3767 train loss:3.711287 +step:3768 train loss:3.649611 +step:3769 train loss:3.924126 +step:3770 train loss:3.706775 +step:3771 train loss:3.718205 +step:3772 train loss:3.676563 +step:3773 train loss:3.666020 +step:3774 train loss:3.672669 +step:3775 train loss:3.668661 +step:3776 train loss:3.666111 +step:3777 train loss:3.626119 +step:3778 train loss:3.644919 +step:3779 train loss:3.630358 +step:3780 train loss:3.711904 +step:3781 train loss:3.674792 +step:3782 train loss:3.593607 +step:3783 train loss:3.701109 +step:3784 train loss:3.710853 +step:3785 train loss:3.621089 +step:3786 train loss:3.728493 +step:3787 train loss:3.637448 +step:3788 train loss:3.651754 +step:3789 train loss:3.563203 +step:3790 train loss:3.678483 +step:3791 train loss:3.698186 +step:3792 train loss:3.672116 +step:3793 train loss:3.670143 +step:3794 train loss:3.696429 +step:3795 train loss:3.664758 +step:3796 train loss:3.684103 +step:3797 train loss:3.658984 +step:3798 train loss:3.666768 +step:3799 train loss:3.678463 +step:3800 train loss:3.585103 +step:3801 train loss:3.699377 +step:3802 train loss:3.624549 +step:3803 train loss:3.706579 +step:3804 train loss:3.719424 +step:3805 train loss:3.679042 +step:3806 train loss:3.695035 +step:3807 train loss:3.715413 +step:3808 train loss:3.674023 +step:3809 train loss:3.685714 +step:3810 train loss:3.688463 +step:3811 train loss:3.672576 +step:3812 train loss:3.673648 +step:3813 train loss:3.630441 +step:3814 train loss:3.671257 +step:3815 train loss:3.673043 +step:3816 train loss:3.691542 +step:3817 train loss:3.712131 +step:3818 train loss:3.682244 +step:3819 train loss:3.696477 +step:3820 train loss:3.692636 +step:3821 train loss:3.649181 +step:3822 train loss:3.739284 +step:3823 train loss:3.626484 +step:3824 train loss:3.644937 +step:3825 train loss:3.650058 +step:3826 train loss:3.726163 +step:3827 train loss:3.742244 +step:3828 train loss:3.631375 +step:3829 train loss:3.652335 +step:3830 train loss:3.715353 +step:3831 train loss:3.647538 +step:3832 train loss:3.706204 +step:3833 train loss:3.647490 +step:3834 train loss:3.609862 +step:3835 train loss:3.655909 +step:3836 train loss:3.629041 +step:3837 train loss:3.696492 +step:3838 train loss:3.652345 +step:3839 train loss:3.693282 +step:3840 train loss:3.707197 +step:3841 train loss:3.654230 +step:3842 train loss:3.689357 +step:3843 train loss:3.701585 +step:3844 train loss:3.674303 +step:3845 train loss:3.695695 +step:3846 train loss:3.737623 +step:3847 train loss:3.633202 +step:3848 train loss:3.638877 +step:3849 train loss:3.671199 +step:3850 train loss:3.678054 +step:3851 train loss:3.817197 +step:3852 train loss:3.794422 +step:3853 train loss:3.694508 +step:3854 train loss:3.666985 +step:3855 train loss:3.711346 +step:3856 train loss:3.634788 +step:3857 train loss:3.692379 +step:3858 train loss:3.607606 +step:3859 train loss:3.661432 +step:3860 train loss:3.729322 +step:3861 train loss:3.703133 +step:3862 train loss:3.638327 +step:3863 train loss:3.684099 +step:3864 train loss:3.658211 +step:3865 train loss:3.690904 +step:3866 train loss:3.708568 +step:3867 train loss:3.707672 +step:3868 train loss:3.654834 +step:3869 train loss:3.660665 +step:3870 train loss:3.632914 +step:3871 train loss:3.646225 +step:3872 train loss:3.762782 +step:3873 train loss:3.688355 +step:3874 train loss:3.701232 +step:3875 train loss:3.814955 +step:3876 train loss:3.684625 +step:3877 train loss:3.708659 +step:3878 train loss:3.737457 +step:3879 train loss:3.723884 +step:3880 train loss:3.804247 +step:3881 train loss:3.623794 +step:3882 train loss:3.659905 +step:3883 train loss:3.671664 +step:3884 train loss:3.659670 +step:3885 train loss:3.674555 +step:3886 train loss:3.733719 +step:3887 train loss:3.720364 +step:3888 train loss:3.677617 +step:3889 train loss:3.647021 +step:3890 train loss:3.685284 +step:3891 train loss:3.699868 +step:3892 train loss:3.606132 +step:3893 train loss:3.712384 +step:3894 train loss:3.659921 +step:3895 train loss:3.681465 +step:3896 train loss:3.674065 +step:3897 train loss:3.638218 +step:3898 train loss:3.699534 +step:3899 train loss:3.736388 +step:3900 train loss:3.691251 +step:3901 train loss:3.709502 +step:3902 train loss:3.631629 +step:3903 train loss:3.650104 +step:3904 train loss:3.682394 +step:3905 train loss:3.618037 +step:3906 train loss:3.652334 +step:3907 train loss:3.688967 +step:3908 train loss:3.764331 +step:3909 train loss:3.658041 +step:3910 train loss:3.683422 +step:3911 train loss:3.700576 +step:3912 train loss:3.645953 +step:3913 train loss:3.663723 +step:3914 train loss:3.684656 +step:3915 train loss:3.654317 +step:3916 train loss:3.689131 +step:3917 train loss:3.734410 +step:3918 train loss:3.709278 +step:3919 train loss:3.682375 +step:3920 train loss:3.661356 +step:3921 train loss:3.703532 +step:3922 train loss:3.705817 +step:3923 train loss:3.693795 +step:3924 train loss:3.631351 +step:3925 train loss:3.834604 +step:3926 train loss:3.675291 +step:3927 train loss:3.653636 +step:3928 train loss:3.734787 +step:3929 train loss:3.796382 +step:3930 train loss:3.694757 +step:3931 train loss:3.634644 +step:3932 train loss:3.682528 +step:3933 train loss:3.702663 +step:3934 train loss:3.650539 +step:3935 train loss:3.627220 +step:3936 train loss:3.718449 +step:3937 train loss:3.678572 +step:3938 train loss:3.689037 +step:3939 train loss:3.715847 +step:3940 train loss:3.662167 +step:3941 train loss:3.747686 +step:3942 train loss:3.708173 +step:3943 train loss:3.692319 +step:3944 train loss:3.742824 +step:3945 train loss:3.652222 +step:3946 train loss:3.596364 +step:3947 train loss:3.724964 +step:3948 train loss:3.697286 +step:3949 train loss:3.857048 +step:3950 train loss:3.660838 +step:3951 train loss:3.598954 +step:3952 train loss:3.546775 +step:3953 train loss:3.626689 +step:3954 train loss:3.679369 +step:3955 train loss:3.705643 +step:3956 train loss:3.665651 +step:3957 train loss:3.712232 +step:3958 train loss:3.691711 +step:3959 train loss:3.728439 +step:3960 train loss:3.655017 +step:3961 train loss:3.677486 +step:3962 train loss:3.682669 +step:3963 train loss:3.658979 +step:3964 train loss:3.637472 +step:3965 train loss:3.698831 +step:3966 train loss:3.650339 +step:3967 train loss:3.693461 +step:3968 train loss:3.712789 +step:3969 train loss:3.621711 +step:3970 train loss:3.731170 +step:3971 train loss:3.652071 +step:3972 train loss:3.686576 +step:3973 train loss:3.640941 +step:3974 train loss:3.738923 +step:3975 train loss:3.687322 +step:3976 train loss:3.644991 +step:3977 train loss:3.698634 +step:3978 train loss:3.662541 +step:3979 train loss:3.649400 +step:3980 train loss:3.718873 +step:3981 train loss:3.651201 +step:3982 train loss:3.673160 +step:3983 train loss:3.653209 +step:3984 train loss:3.688027 +step:3985 train loss:3.665101 +step:3986 train loss:3.676557 +step:3987 train loss:3.686170 +step:3988 train loss:3.625325 +step:3989 train loss:3.697933 +step:3990 train loss:3.693226 +step:3991 train loss:3.707170 +step:3992 train loss:3.661638 +step:3993 train loss:3.694199 +step:3994 train loss:3.646515 +step:3995 train loss:3.698371 +step:3996 train loss:3.617924 +step:3997 train loss:3.692981 +step:3998 train loss:3.577945 +step:3999 train loss:3.731743 +step:4000 validation loss:3.589269 total_sharp:1.3430e-03 L1_sharp:1.7893e-03 L2_sharp:9.3329e-05 L3_sharp:2.9981e-04 L4_sharp:1.7552e-04 L5_sharp:2.2430e-04 L6_sharp:2.1615e-04 L7_sharp:2.8690e-04 L8_sharp:4.5234e-04 L9_sharp:4.3720e-04 L10_sharp:5.9455e-04 L11_sharp:4.4714e-04 L12_sharp:5.9188e-04 total_fnorm:4.3434e+00 total_l1_linf:3.8411e+04 total_spectral:4.3434e+00 L1_fnorm:8.9853e-01 L2_fnorm:9.5748e-01 L3_fnorm:9.7124e-01 L4_fnorm:9.8992e-01 L5_fnorm:1.0137e+00 L6_fnorm:1.0244e+00 L7_fnorm:1.0235e+00 L8_fnorm:1.0303e+00 L9_fnorm:1.0324e+00 L10_fnorm:1.0125e+00 L11_fnorm:1.0221e+00 L12_fnorm:1.0061e+00 L1_l1linf:1.0683e+00 L2_l1linf:1.0203e+00 L3_l1linf:1.0278e+00 L4_l1linf:1.0666e+00 L5_l1linf:1.0555e+00 L6_l1linf:1.0599e+00 L7_l1linf:1.0405e+00 L8_l1linf:1.0386e+00 L9_l1linf:1.0724e+00 L10_l1linf:1.0577e+00 L11_l1linf:1.1405e+00 L12_l1linf:1.0951e+00 L1_spectral:1.6172e-01 L2_spectral:1.4308e-01 L3_spectral:1.3316e-01 L4_spectral:1.1317e-01 L5_spectral:1.0494e-01 L6_spectral:9.6634e-02 L7_spectral:8.4635e-02 L8_spectral:8.3703e-02 L9_spectral:1.0363e-01 L10_spectral:1.3206e-01 L11_spectral:1.3929e-01 L12_spectral:1.4168e-01 ip_v_neg_g:1.2788e-02 cos_v_neg_g:4.3995e-03 v_norm:4.3434e+00 g_norm:6.6922e-01 hv_norm:3.1616e-01 cos_v_hv:1.8450e-02 hg_norm:3.1741e+00 cos_g_hg:5.3508e-01 v_par:8.6731e-04 v_perp:4.3434e+00 L1_cos_v_neg_g:1.1253e-02 L1_v_norm:8.9853e-01 L2_cos_v_neg_g:2.8627e-03 L2_v_norm:9.5748e-01 L3_cos_v_neg_g:4.4304e-03 L3_v_norm:9.7124e-01 L4_cos_v_neg_g:3.4930e-03 L4_v_norm:9.8992e-01 L5_cos_v_neg_g:3.7757e-03 L5_v_norm:1.0137e+00 L6_cos_v_neg_g:4.5153e-03 L6_v_norm:1.0244e+00 L7_cos_v_neg_g:4.4541e-03 L7_v_norm:1.0235e+00 L8_cos_v_neg_g:4.8695e-03 L8_v_norm:1.0303e+00 L9_cos_v_neg_g:5.6627e-03 L9_v_norm:1.0324e+00 L10_cos_v_neg_g:8.3249e-03 L10_v_norm:1.0125e+00 L11_cos_v_neg_g:9.2152e-03 L11_v_norm:1.0221e+00 L12_cos_v_neg_g:8.0914e-03 L12_v_norm:1.0061e+00 +step:4000 train loss:3.610244 +step:4001 train loss:3.686248 +step:4002 train loss:3.665486 +step:4003 train loss:3.702528 +step:4004 train loss:3.607628 +step:4005 train loss:3.703391 +step:4006 train loss:3.714213 +step:4007 train loss:3.632793 +step:4008 train loss:3.592972 +step:4009 train loss:3.678185 +step:4010 train loss:3.649840 +step:4011 train loss:3.660123 +step:4012 train loss:3.671771 +step:4013 train loss:3.646168 +step:4014 train loss:3.662739 +step:4015 train loss:3.652327 +step:4016 train loss:3.660968 +step:4017 train loss:3.626906 +step:4018 train loss:3.568017 +step:4019 train loss:3.620743 +step:4020 train loss:3.688789 +step:4021 train loss:3.636717 +step:4022 train loss:3.638890 +step:4023 train loss:3.648871 +step:4024 train loss:3.568860 +step:4025 train loss:3.687174 +step:4026 train loss:3.676133 +step:4027 train loss:3.683556 +step:4028 train loss:3.699229 +step:4029 train loss:3.731103 +step:4030 train loss:3.647170 +step:4031 train loss:3.686775 +step:4032 train loss:3.646423 +step:4033 train loss:3.679284 +step:4034 train loss:3.695743 +step:4035 train loss:3.672220 +step:4036 train loss:3.668653 +step:4037 train loss:3.688660 +step:4038 train loss:3.603342 +step:4039 train loss:3.662004 +step:4040 train loss:3.638399 +step:4041 train loss:3.632019 +step:4042 train loss:3.655657 +step:4043 train loss:3.636992 +step:4044 train loss:3.671056 +step:4045 train loss:3.676240 +step:4046 train loss:3.632323 +step:4047 train loss:3.663111 +step:4048 train loss:3.672161 +step:4049 train loss:3.635120 +step:4050 train loss:3.737753 +step:4051 train loss:3.648859 +step:4052 train loss:3.670838 +step:4053 train loss:3.720049 +step:4054 train loss:3.691283 +step:4055 train loss:3.706340 +step:4056 train loss:3.703279 +step:4057 train loss:3.641043 +step:4058 train loss:3.626563 +step:4059 train loss:3.703696 +step:4060 train loss:3.646416 +step:4061 train loss:3.617749 +step:4062 train loss:3.731018 +step:4063 train loss:3.681926 +step:4064 train loss:3.650721 +step:4065 train loss:3.634625 +step:4066 train loss:3.662945 +step:4067 train loss:3.688329 +step:4068 train loss:3.655811 +step:4069 train loss:3.711942 +step:4070 train loss:3.628720 +step:4071 train loss:3.600692 +step:4072 train loss:3.673872 +step:4073 train loss:3.611488 +step:4074 train loss:3.666423 +step:4075 train loss:3.732344 +step:4076 train loss:3.588625 +step:4077 train loss:3.666290 +step:4078 train loss:3.770757 +step:4079 train loss:3.714764 +step:4080 train loss:3.655926 +step:4081 train loss:3.623447 +step:4082 train loss:3.676719 +step:4083 train loss:3.614023 +step:4084 train loss:3.629261 +step:4085 train loss:3.867490 +step:4086 train loss:3.634434 +step:4087 train loss:3.680432 +step:4088 train loss:3.665234 +step:4089 train loss:3.656703 +step:4090 train loss:3.676728 +step:4091 train loss:3.699256 +step:4092 train loss:3.618520 +step:4093 train loss:3.649112 +step:4094 train loss:3.669782 +step:4095 train loss:3.622780 +step:4096 train loss:3.655597 +step:4097 train loss:3.659461 +step:4098 train loss:3.634656 +step:4099 train loss:3.636868 +step:4100 train loss:3.686517 +step:4101 train loss:3.610228 +step:4102 train loss:3.648073 +step:4103 train loss:3.855393 +step:4104 train loss:3.663557 +step:4105 train loss:3.630980 +step:4106 train loss:3.701612 +step:4107 train loss:3.620859 +step:4108 train loss:3.630042 +step:4109 train loss:3.681467 +step:4110 train loss:3.692717 +step:4111 train loss:3.663909 +step:4112 train loss:3.684945 +step:4113 train loss:3.642958 +step:4114 train loss:3.589990 +step:4115 train loss:3.628239 +step:4116 train loss:3.611471 +step:4117 train loss:3.630637 +step:4118 train loss:3.689090 +step:4119 train loss:3.709761 +step:4120 train loss:3.632522 +step:4121 train loss:3.624248 +step:4122 train loss:3.694246 +step:4123 train loss:3.708325 +step:4124 train loss:3.682700 +step:4125 train loss:3.721930 +step:4126 train loss:3.657536 +step:4127 train loss:3.676804 +step:4128 train loss:3.664904 +step:4129 train loss:3.710785 +step:4130 train loss:3.641890 +step:4131 train loss:3.678714 +step:4132 train loss:3.691411 +step:4133 train loss:3.643280 +step:4134 train loss:3.697119 +step:4135 train loss:3.631662 +step:4136 train loss:3.652706 +step:4137 train loss:3.623143 +step:4138 train loss:3.633626 +step:4139 train loss:3.677252 +step:4140 train loss:3.636310 +step:4141 train loss:3.599562 +step:4142 train loss:3.644853 +step:4143 train loss:3.681849 +step:4144 train loss:3.633135 +step:4145 train loss:3.599905 +step:4146 train loss:3.666721 +step:4147 train loss:3.642121 +step:4148 train loss:3.635516 +step:4149 train loss:3.713636 +step:4150 train loss:3.682580 +step:4151 train loss:3.663361 +step:4152 train loss:3.682234 +step:4153 train loss:3.691101 +step:4154 train loss:3.698739 +step:4155 train loss:3.721577 +step:4156 train loss:3.598892 +step:4157 train loss:3.621979 +step:4158 train loss:3.674106 +step:4159 train loss:3.576563 +step:4160 train loss:3.669806 +step:4161 train loss:3.668581 +step:4162 train loss:3.577958 +step:4163 train loss:3.659352 +step:4164 train loss:3.607136 +step:4165 train loss:3.607332 +step:4166 train loss:3.676158 +step:4167 train loss:3.668466 +step:4168 train loss:3.661967 +step:4169 train loss:3.691582 +step:4170 train loss:3.814410 +step:4171 train loss:3.665197 +step:4172 train loss:3.683256 +step:4173 train loss:3.678135 +step:4174 train loss:3.641319 +step:4175 train loss:3.731349 +step:4176 train loss:3.654701 +step:4177 train loss:3.678427 +step:4178 train loss:3.653957 +step:4179 train loss:3.608816 +step:4180 train loss:3.610443 +step:4181 train loss:3.656996 +step:4182 train loss:3.641754 +step:4183 train loss:3.581048 +step:4184 train loss:3.653358 +step:4185 train loss:3.718284 +step:4186 train loss:3.695641 +step:4187 train loss:3.702811 +step:4188 train loss:3.676730 +step:4189 train loss:3.636712 +step:4190 train loss:3.678783 +step:4191 train loss:3.626849 +step:4192 train loss:3.714225 +step:4193 train loss:3.620768 +step:4194 train loss:3.605581 +step:4195 train loss:3.603963 +step:4196 train loss:3.670580 +step:4197 train loss:3.686488 +step:4198 train loss:3.608366 +step:4199 train loss:3.687452 +step:4200 train loss:3.650696 +step:4201 train loss:3.632860 +step:4202 train loss:3.645749 +step:4203 train loss:3.654839 +step:4204 train loss:3.650067 +step:4205 train loss:3.665069 +step:4206 train loss:3.679668 +step:4207 train loss:3.682264 +step:4208 train loss:3.645585 +step:4209 train loss:3.710846 +step:4210 train loss:3.738376 +step:4211 train loss:3.620053 +step:4212 train loss:3.663198 +step:4213 train loss:3.613770 +step:4214 train loss:3.623909 +step:4215 train loss:3.639712 +step:4216 train loss:3.611076 +step:4217 train loss:3.634277 +step:4218 train loss:3.672680 +step:4219 train loss:3.684138 +step:4220 train loss:3.756339 +step:4221 train loss:3.647888 +step:4222 train loss:3.707308 +step:4223 train loss:3.626230 +step:4224 train loss:3.698647 +step:4225 train loss:3.623620 +step:4226 train loss:3.680851 +step:4227 train loss:3.656892 +step:4228 train loss:3.631010 +step:4229 train loss:3.641302 +step:4230 train loss:3.622075 +step:4231 train loss:3.612192 +step:4232 train loss:3.663954 +step:4233 train loss:3.570257 +step:4234 train loss:3.652869 +step:4235 train loss:3.729438 +step:4236 train loss:3.698385 +step:4237 train loss:3.679219 +step:4238 train loss:3.688558 +step:4239 train loss:3.740225 +step:4240 train loss:3.648497 +step:4241 train loss:3.573372 +step:4242 train loss:3.693213 +step:4243 train loss:3.694073 +step:4244 train loss:3.708576 +step:4245 train loss:3.765514 +step:4246 train loss:3.635029 +step:4247 train loss:3.695266 +step:4248 train loss:3.643689 +step:4249 train loss:3.652077 +step:4250 validation loss:3.574407 +step:4250 train loss:3.630343 +step:4251 train loss:3.724249 +step:4252 train loss:3.634104 +step:4253 train loss:3.632241 +step:4254 train loss:3.635903 +step:4255 train loss:3.619985 +step:4256 train loss:3.639302 +step:4257 train loss:3.693661 +step:4258 train loss:3.553479 +step:4259 train loss:3.620550 +step:4260 train loss:3.684190 +step:4261 train loss:3.670346 +step:4262 train loss:3.809385 +step:4263 train loss:3.750737 +step:4264 train loss:3.693094 +step:4265 train loss:3.685828 +step:4266 train loss:3.674881 +step:4267 train loss:3.677858 +step:4268 train loss:3.621950 +step:4269 train loss:3.715053 +step:4270 train loss:3.694993 +step:4271 train loss:3.610575 +step:4272 train loss:3.663022 +step:4273 train loss:3.637268 +step:4274 train loss:3.625955 +step:4275 train loss:3.647511 +step:4276 train loss:3.608842 +step:4277 train loss:3.748551 +step:4278 train loss:3.596753 +step:4279 train loss:3.623816 +step:4280 train loss:3.707402 +step:4281 train loss:3.691500 +step:4282 train loss:3.755959 +step:4283 train loss:3.613233 +step:4284 train loss:3.638767 +step:4285 train loss:3.643171 +step:4286 train loss:3.708362 +step:4287 train loss:3.704295 +step:4288 train loss:3.687874 +step:4289 train loss:3.639798 +step:4290 train loss:3.647265 +step:4291 train loss:3.605994 +step:4292 train loss:3.651914 +step:4293 train loss:3.663567 +step:4294 train loss:3.647828 +step:4295 train loss:3.583106 +step:4296 train loss:3.654990 +step:4297 train loss:3.634085 +step:4298 train loss:3.646712 +step:4299 train loss:3.645715 +step:4300 train loss:3.766103 +step:4301 train loss:3.575929 +step:4302 train loss:3.719759 +step:4303 train loss:3.600533 +step:4304 train loss:3.607862 +step:4305 train loss:3.624972 +step:4306 train loss:3.699293 +step:4307 train loss:3.614509 +step:4308 train loss:3.614343 +step:4309 train loss:3.680867 +step:4310 train loss:3.620410 +step:4311 train loss:3.675070 +step:4312 train loss:3.667594 +step:4313 train loss:3.657897 +step:4314 train loss:3.608588 +step:4315 train loss:3.639368 +step:4316 train loss:3.585103 +step:4317 train loss:3.641572 +step:4318 train loss:3.682122 +step:4319 train loss:3.629490 +step:4320 train loss:3.693280 +step:4321 train loss:3.674543 +step:4322 train loss:3.627957 +step:4323 train loss:3.570324 +step:4324 train loss:3.661179 +step:4325 train loss:3.637326 +step:4326 train loss:3.635644 +step:4327 train loss:3.736380 +step:4328 train loss:3.649401 +step:4329 train loss:3.605259 +step:4330 train loss:3.651309 +step:4331 train loss:3.660551 +step:4332 train loss:3.694973 +step:4333 train loss:3.649843 +step:4334 train loss:3.666793 +step:4335 train loss:3.666605 +step:4336 train loss:3.678391 +step:4337 train loss:3.639956 +step:4338 train loss:3.762044 +step:4339 train loss:3.671554 +step:4340 train loss:3.679470 +step:4341 train loss:3.645743 +step:4342 train loss:3.660113 +step:4343 train loss:3.777481 +step:4344 train loss:3.669051 +step:4345 train loss:3.684739 +step:4346 train loss:3.697597 +step:4347 train loss:3.704981 +step:4348 train loss:3.617487 +step:4349 train loss:3.701044 +step:4350 train loss:3.641572 +step:4351 train loss:3.598092 +step:4352 train loss:3.674896 +step:4353 train loss:3.622077 +step:4354 train loss:3.678673 +step:4355 train loss:3.633092 +step:4356 train loss:3.661875 +step:4357 train loss:3.640885 +step:4358 train loss:3.733510 +step:4359 train loss:3.680643 +step:4360 train loss:3.600967 +step:4361 train loss:3.648364 +step:4362 train loss:3.666181 +step:4363 train loss:3.686144 +step:4364 train loss:3.650151 +step:4365 train loss:3.633274 +step:4366 train loss:3.678411 +step:4367 train loss:3.693448 +step:4368 train loss:3.671516 +step:4369 train loss:3.538267 +step:4370 train loss:3.671041 +step:4371 train loss:3.577550 +step:4372 train loss:3.729748 +step:4373 train loss:3.665161 +step:4374 train loss:3.636558 +step:4375 train loss:3.680026 +step:4376 train loss:3.690203 +step:4377 train loss:3.625886 +step:4378 train loss:3.635835 +step:4379 train loss:3.716155 +step:4380 train loss:3.698394 +step:4381 train loss:3.599990 +step:4382 train loss:3.642998 +step:4383 train loss:3.672776 +step:4384 train loss:3.667090 +step:4385 train loss:3.593500 +step:4386 train loss:3.650914 +step:4387 train loss:3.620243 +step:4388 train loss:3.641953 +step:4389 train loss:3.666658 +step:4390 train loss:3.715552 +step:4391 train loss:3.640827 +step:4392 train loss:3.721564 +step:4393 train loss:3.674410 +step:4394 train loss:3.616151 +step:4395 train loss:3.668114 +step:4396 train loss:3.644195 +step:4397 train loss:3.683792 +step:4398 train loss:3.628778 +step:4399 train loss:3.623545 +step:4400 train loss:3.628403 +step:4401 train loss:3.688901 +step:4402 train loss:3.684728 +step:4403 train loss:3.637753 +step:4404 train loss:3.666765 +step:4405 train loss:3.588774 +step:4406 train loss:3.664985 +step:4407 train loss:3.602582 +step:4408 train loss:3.694589 +step:4409 train loss:3.652967 +step:4410 train loss:3.661391 +step:4411 train loss:3.617445 +step:4412 train loss:3.734349 +step:4413 train loss:3.631608 +step:4414 train loss:3.638837 +step:4415 train loss:3.621262 +step:4416 train loss:3.616042 +step:4417 train loss:3.608010 +step:4418 train loss:3.682775 +step:4419 train loss:3.652281 +step:4420 train loss:3.661423 +step:4421 train loss:3.687073 +step:4422 train loss:3.704185 +step:4423 train loss:3.662608 +step:4424 train loss:3.646405 +step:4425 train loss:3.610040 +step:4426 train loss:3.685115 +step:4427 train loss:3.644495 +step:4428 train loss:3.583225 +step:4429 train loss:3.643264 +step:4430 train loss:3.682506 +step:4431 train loss:3.676847 +step:4432 train loss:3.582942 +step:4433 train loss:3.635045 +step:4434 train loss:3.632233 +step:4435 train loss:3.665557 +step:4436 train loss:3.598404 +step:4437 train loss:3.678320 +step:4438 train loss:3.644982 +step:4439 train loss:3.650398 +step:4440 train loss:3.649404 +step:4441 train loss:3.648290 +step:4442 train loss:3.699970 +step:4443 train loss:3.637935 +step:4444 train loss:3.721335 +step:4445 train loss:3.682002 +step:4446 train loss:3.617220 +step:4447 train loss:3.666654 +step:4448 train loss:3.683373 +step:4449 train loss:3.621036 +step:4450 train loss:3.638092 +step:4451 train loss:3.692264 +step:4452 train loss:3.751190 +step:4453 train loss:3.685606 +step:4454 train loss:3.658640 +step:4455 train loss:3.705744 +step:4456 train loss:3.652327 +step:4457 train loss:3.647941 +step:4458 train loss:3.657997 +step:4459 train loss:3.693201 +step:4460 train loss:3.601966 +step:4461 train loss:3.575808 +step:4462 train loss:3.630707 +step:4463 train loss:3.651769 +step:4464 train loss:3.618930 +step:4465 train loss:3.655865 +step:4466 train loss:3.747462 +step:4467 train loss:3.629539 +step:4468 train loss:3.621753 +step:4469 train loss:3.614623 +step:4470 train loss:3.589518 +step:4471 train loss:3.652992 +step:4472 train loss:3.577566 +step:4473 train loss:3.667214 +step:4474 train loss:3.688903 +step:4475 train loss:3.651423 +step:4476 train loss:3.614582 +step:4477 train loss:3.600922 +step:4478 train loss:3.658166 +step:4479 train loss:3.760049 +step:4480 train loss:3.596012 +step:4481 train loss:3.667480 +step:4482 train loss:3.625576 +step:4483 train loss:3.622300 +step:4484 train loss:3.672411 +step:4485 train loss:3.628112 +step:4486 train loss:3.729530 +step:4487 train loss:3.625832 +step:4488 train loss:3.624264 +step:4489 train loss:3.581275 +step:4490 train loss:3.662276 +step:4491 train loss:3.616465 +step:4492 train loss:3.645600 +step:4493 train loss:3.630529 +step:4494 train loss:3.626153 +step:4495 train loss:3.692101 +step:4496 train loss:3.631647 +step:4497 train loss:3.715408 +step:4498 train loss:3.605366 +step:4499 train loss:3.657702 +step:4500 validation loss:3.562686 total_sharp:1.9322e-03 L1_sharp:1.8980e-03 L2_sharp:1.3919e-04 L3_sharp:3.4087e-04 L4_sharp:2.9027e-04 L5_sharp:2.2068e-04 L6_sharp:2.2474e-04 L7_sharp:3.6915e-04 L8_sharp:5.3331e-04 L9_sharp:5.0696e-04 L10_sharp:7.2755e-04 L11_sharp:6.6071e-04 L12_sharp:8.6645e-04 total_fnorm:4.4440e+00 total_l1_linf:3.9434e+04 total_spectral:4.4440e+00 L1_fnorm:1.0033e+00 L2_fnorm:9.8150e-01 L3_fnorm:1.0038e+00 L4_fnorm:1.0243e+00 L5_fnorm:1.0504e+00 L6_fnorm:1.0541e+00 L7_fnorm:1.0533e+00 L8_fnorm:1.0513e+00 L9_fnorm:1.0480e+00 L10_fnorm:1.0290e+00 L11_fnorm:1.0441e+00 L12_fnorm:1.0430e+00 L1_l1linf:1.2606e+00 L2_l1linf:1.0683e+00 L3_l1linf:1.1087e+00 L4_l1linf:1.0847e+00 L5_l1linf:1.1034e+00 L6_l1linf:1.0982e+00 L7_l1linf:1.0603e+00 L8_l1linf:1.0572e+00 L9_l1linf:1.1202e+00 L10_l1linf:1.1628e+00 L11_l1linf:1.2910e+00 L12_l1linf:1.1616e+00 L1_spectral:1.7349e-01 L2_spectral:1.5331e-01 L3_spectral:1.4471e-01 L4_spectral:1.1048e-01 L5_spectral:1.0009e-01 L6_spectral:9.2630e-02 L7_spectral:9.2804e-02 L8_spectral:8.4256e-02 L9_spectral:1.1332e-01 L10_spectral:1.4589e-01 L11_spectral:1.6438e-01 L12_spectral:1.6148e-01 ip_v_neg_g:2.4079e-02 cos_v_neg_g:7.5502e-03 v_norm:4.4440e+00 g_norm:7.1762e-01 hv_norm:4.1047e-01 cos_v_hv:2.0920e-02 hg_norm:3.9682e+00 cos_g_hg:6.0473e-01 v_par:1.2780e-03 v_perp:4.4440e+00 L1_cos_v_neg_g:2.0495e-02 L1_v_norm:1.0033e+00 L2_cos_v_neg_g:7.4700e-03 L2_v_norm:9.8150e-01 L3_cos_v_neg_g:6.8094e-03 L3_v_norm:1.0038e+00 L4_cos_v_neg_g:7.6128e-03 L4_v_norm:1.0243e+00 L5_cos_v_neg_g:6.3765e-03 L5_v_norm:1.0504e+00 L6_cos_v_neg_g:6.5686e-03 L6_v_norm:1.0541e+00 L7_cos_v_neg_g:7.1344e-03 L7_v_norm:1.0533e+00 L8_cos_v_neg_g:8.5593e-03 L8_v_norm:1.0513e+00 L9_cos_v_neg_g:8.7393e-03 L9_v_norm:1.0480e+00 L10_cos_v_neg_g:1.3518e-02 L10_v_norm:1.0290e+00 L11_cos_v_neg_g:1.5476e-02 L11_v_norm:1.0441e+00 L12_cos_v_neg_g:1.6722e-02 L12_v_norm:1.0430e+00 +step:4500 train loss:3.569633 +step:4501 train loss:3.627710 +step:4502 train loss:3.752301 +step:4503 train loss:3.653815 +step:4504 train loss:3.664243 +step:4505 train loss:3.646166 +step:4506 train loss:3.619684 +step:4507 train loss:3.693342 +step:4508 train loss:3.630901 +step:4509 train loss:3.628300 +step:4510 train loss:3.663459 +step:4511 train loss:3.615138 +step:4512 train loss:3.640371 +step:4513 train loss:3.695427 +step:4514 train loss:3.602645 +step:4515 train loss:3.720303 +step:4516 train loss:3.692606 +step:4517 train loss:3.647913 +step:4518 train loss:3.586300 +step:4519 train loss:3.621390 +step:4520 train loss:3.633608 +step:4521 train loss:3.574112 +step:4522 train loss:3.628849 +step:4523 train loss:3.674803 +step:4524 train loss:3.658302 +step:4525 train loss:3.581498 +step:4526 train loss:3.622001 +step:4527 train loss:3.610100 +step:4528 train loss:3.640260 +step:4529 train loss:3.635217 +step:4530 train loss:3.730464 +step:4531 train loss:3.620874 +step:4532 train loss:3.642976 +step:4533 train loss:3.617217 +step:4534 train loss:3.708831 +step:4535 train loss:3.609712 +step:4536 train loss:3.679747 +step:4537 train loss:3.662543 +step:4538 train loss:3.639720 +step:4539 train loss:3.662304 +step:4540 train loss:3.642648 +step:4541 train loss:3.606641 +step:4542 train loss:3.655721 +step:4543 train loss:3.739138 +step:4544 train loss:3.685378 +step:4545 train loss:3.623760 +step:4546 train loss:3.723011 +step:4547 train loss:3.682949 +step:4548 train loss:3.683573 +step:4549 train loss:3.642078 +step:4550 train loss:3.606524 +step:4551 train loss:3.624488 +step:4552 train loss:3.628914 +step:4553 train loss:3.713279 +step:4554 train loss:3.606963 +step:4555 train loss:3.722979 +step:4556 train loss:3.655062 +step:4557 train loss:3.585233 +step:4558 train loss:3.668014 +step:4559 train loss:3.678881 +step:4560 train loss:3.618522 +step:4561 train loss:3.603014 +step:4562 train loss:3.642950 +step:4563 train loss:3.596278 +step:4564 train loss:3.623918 +step:4565 train loss:3.622937 +step:4566 train loss:3.594522 +step:4567 train loss:3.625125 +step:4568 train loss:3.622118 +step:4569 train loss:3.603268 +step:4570 train loss:3.659825 +step:4571 train loss:3.637543 +step:4572 train loss:3.628855 +step:4573 train loss:3.636687 +step:4574 train loss:3.785421 +step:4575 train loss:3.616916 +step:4576 train loss:3.604239 +step:4577 train loss:3.644767 +step:4578 train loss:3.685384 +step:4579 train loss:3.635051 +step:4580 train loss:3.695228 +step:4581 train loss:3.632910 +step:4582 train loss:3.626178 +step:4583 train loss:3.633348 +step:4584 train loss:3.604498 +step:4585 train loss:3.686872 +step:4586 train loss:3.672233 +step:4587 train loss:3.573575 +step:4588 train loss:3.613827 +step:4589 train loss:3.690314 +step:4590 train loss:3.659847 +step:4591 train loss:3.600502 +step:4592 train loss:3.686847 +step:4593 train loss:3.604764 +step:4594 train loss:3.635481 +step:4595 train loss:3.659658 +step:4596 train loss:3.596185 +step:4597 train loss:3.733997 +step:4598 train loss:3.650960 +step:4599 train loss:3.604677 +step:4600 train loss:3.611692 +step:4601 train loss:3.636251 +step:4602 train loss:3.587037 +step:4603 train loss:3.599270 +step:4604 train loss:3.706767 +step:4605 train loss:3.625544 +step:4606 train loss:3.651518 +step:4607 train loss:3.631926 +step:4608 train loss:3.668086 +step:4609 train loss:3.626612 +step:4610 train loss:3.670616 +step:4611 train loss:3.696946 +step:4612 train loss:3.696580 +step:4613 train loss:3.674537 +step:4614 train loss:3.668595 +step:4615 train loss:3.609791 +step:4616 train loss:3.593184 +step:4617 train loss:3.637407 +step:4618 train loss:3.652726 +step:4619 train loss:3.612513 +step:4620 train loss:3.627952 +step:4621 train loss:3.631821 +step:4622 train loss:3.566306 +step:4623 train loss:3.674830 +step:4624 train loss:3.659118 +step:4625 train loss:3.616511 +step:4626 train loss:3.660736 +step:4627 train loss:3.629071 +step:4628 train loss:3.615454 +step:4629 train loss:3.656422 +step:4630 train loss:3.711077 +step:4631 train loss:3.711772 +step:4632 train loss:3.608069 +step:4633 train loss:3.619441 +step:4634 train loss:3.695238 +step:4635 train loss:3.659054 +step:4636 train loss:3.672125 +step:4637 train loss:3.612247 +step:4638 train loss:3.618237 +step:4639 train loss:3.614311 +step:4640 train loss:3.625473 +step:4641 train loss:3.631965 +step:4642 train loss:3.666381 +step:4643 train loss:3.622327 +step:4644 train loss:3.652101 +step:4645 train loss:3.672128 +step:4646 train loss:3.622132 +step:4647 train loss:3.580775 +step:4648 train loss:3.687583 +step:4649 train loss:3.695770 +step:4650 train loss:3.641603 +step:4651 train loss:3.646657 +step:4652 train loss:3.635446 +step:4653 train loss:3.690763 +step:4654 train loss:3.687597 +step:4655 train loss:3.590470 +step:4656 train loss:3.626161 +step:4657 train loss:3.675817 +step:4658 train loss:3.632037 +step:4659 train loss:3.646315 +step:4660 train loss:3.689266 +step:4661 train loss:3.606508 +step:4662 train loss:3.625599 +step:4663 train loss:3.637455 +step:4664 train loss:3.681917 +step:4665 train loss:3.679856 +step:4666 train loss:3.676440 +step:4667 train loss:3.668680 +step:4668 train loss:3.633249 +step:4669 train loss:3.642915 +step:4670 train loss:3.676794 +step:4671 train loss:3.682559 +step:4672 train loss:3.556908 +step:4673 train loss:3.589717 +step:4674 train loss:3.720778 +step:4675 train loss:3.624669 +step:4676 train loss:3.584408 +step:4677 train loss:3.589042 +step:4678 train loss:3.560852 +step:4679 train loss:3.659943 +step:4680 train loss:3.598645 +step:4681 train loss:3.647708 +step:4682 train loss:3.598095 +step:4683 train loss:3.569716 +step:4684 train loss:3.687889 +step:4685 train loss:3.623768 +step:4686 train loss:3.633234 +step:4687 train loss:3.672427 +step:4688 train loss:3.600909 +step:4689 train loss:3.676705 +step:4690 train loss:3.618813 +step:4691 train loss:3.654676 +step:4692 train loss:3.583051 +step:4693 train loss:3.620916 +step:4694 train loss:3.660582 +step:4695 train loss:3.680089 +step:4696 train loss:3.665252 +step:4697 train loss:3.579237 +step:4698 train loss:3.595549 +step:4699 train loss:3.642671 +step:4700 train loss:3.614189 +step:4701 train loss:3.623480 +step:4702 train loss:3.581433 +step:4703 train loss:3.656867 +step:4704 train loss:3.644309 +step:4705 train loss:3.587127 +step:4706 train loss:3.595047 +step:4707 train loss:3.582142 +step:4708 train loss:3.651001 +step:4709 train loss:3.595336 +step:4710 train loss:3.610029 +step:4711 train loss:3.668813 +step:4712 train loss:3.568273 +step:4713 train loss:3.671018 +step:4714 train loss:3.571182 +step:4715 train loss:3.663224 +step:4716 train loss:3.630962 +step:4717 train loss:3.559392 +step:4718 train loss:3.652483 +step:4719 train loss:3.576849 +step:4720 train loss:3.676029 +step:4721 train loss:3.629215 +step:4722 train loss:3.685120 +step:4723 train loss:3.582555 +step:4724 train loss:3.634803 +step:4725 train loss:3.569037 +step:4726 train loss:3.612855 +step:4727 train loss:3.622947 +step:4728 train loss:3.627522 +step:4729 train loss:3.658596 +step:4730 train loss:3.558429 +step:4731 train loss:3.615911 +step:4732 train loss:3.569552 +step:4733 train loss:3.506991 +step:4734 train loss:3.646734 +step:4735 train loss:3.595899 +step:4736 train loss:3.637376 +step:4737 train loss:3.518245 +step:4738 train loss:3.666608 +step:4739 train loss:3.545334 +step:4740 train loss:3.654343 +step:4741 train loss:3.622483 +step:4742 train loss:3.581916 +step:4743 train loss:3.581180 +step:4744 train loss:3.623367 +step:4745 train loss:3.645996 +step:4746 train loss:3.680876 +step:4747 train loss:3.646196 +step:4748 train loss:3.545158 +step:4749 train loss:3.611717 +step:4750 validation loss:3.545226 +step:4750 train loss:3.559762 +step:4751 train loss:3.651886 +step:4752 train loss:3.585521 +step:4753 train loss:3.692634 +step:4754 train loss:3.562332 +step:4755 train loss:3.601671 +step:4756 train loss:3.678403 +step:4757 train loss:3.599690 +step:4758 train loss:3.620582 +step:4759 train loss:3.621165 +step:4760 train loss:3.650687 +step:4761 train loss:3.569026 +step:4762 train loss:3.600119 +step:4763 train loss:3.628091 +step:4764 train loss:3.684630 +step:4765 train loss:3.575488 +step:4766 train loss:3.597565 +step:4767 train loss:3.550187 +step:4768 train loss:3.607885 +step:4769 train loss:3.632693 +step:4770 train loss:3.591405 +step:4771 train loss:3.603923 +step:4772 train loss:3.578691 +step:4773 train loss:3.613627 +step:4774 train loss:3.557280 +step:4775 train loss:3.686949 +step:4776 train loss:3.555800 +step:4777 train loss:3.628958 +step:4778 train loss:3.568134 +step:4779 train loss:3.613651 +step:4780 train loss:3.553245 +step:4781 train loss:3.560167 +step:4782 train loss:3.667942 +step:4783 train loss:3.652811 +step:4784 train loss:3.617714 +step:4785 train loss:3.615654 +step:4786 train loss:3.725864 +step:4787 train loss:3.558403 +step:4788 train loss:3.581880 +step:4789 train loss:3.606100 +step:4790 train loss:3.657033 +step:4791 train loss:3.619189 +step:4792 train loss:3.662785 +step:4793 train loss:3.579667 +step:4794 train loss:3.653650 +step:4795 train loss:3.602597 +step:4796 train loss:3.594120 +step:4797 train loss:3.601801 +step:4798 train loss:3.607879 +step:4799 train loss:3.607993 +step:4800 train loss:3.636848 +step:4801 train loss:3.629824 +step:4802 train loss:3.665730 +step:4803 train loss:3.650880 +step:4804 train loss:3.608300 +step:4805 train loss:3.603849 +step:4806 train loss:3.587020 +step:4807 train loss:3.689280 +step:4808 train loss:3.564073 +step:4809 train loss:3.662750 +step:4810 train loss:3.602326 +step:4811 train loss:3.622932 +step:4812 train loss:3.599185 +step:4813 train loss:3.553779 +step:4814 train loss:3.547283 +step:4815 train loss:3.542373 +step:4816 train loss:3.611749 +step:4817 train loss:3.545662 +step:4818 train loss:3.610054 +step:4819 train loss:3.604722 +step:4820 train loss:3.857495 +step:4821 train loss:3.633798 +step:4822 train loss:3.644506 +step:4823 train loss:3.576866 +step:4824 train loss:3.582693 +step:4825 train loss:3.564061 +step:4826 train loss:3.648553 +step:4827 train loss:3.595271 +step:4828 train loss:3.538217 +step:4829 train loss:3.644581 +step:4830 train loss:3.582841 +step:4831 train loss:3.733652 +step:4832 train loss:3.604310 +step:4833 train loss:3.643311 +step:4834 train loss:3.543557 +step:4835 train loss:3.637636 +step:4836 train loss:3.612894 +step:4837 train loss:3.642211 +step:4838 train loss:3.582469 +step:4839 train loss:3.642878 +step:4840 train loss:3.552707 +step:4841 train loss:3.647711 +step:4842 train loss:3.570984 +step:4843 train loss:3.641042 +step:4844 train loss:3.644956 +step:4845 train loss:3.582307 +step:4846 train loss:3.596796 +step:4847 train loss:3.581661 +step:4848 train loss:3.606808 +step:4849 train loss:3.556179 +step:4850 train loss:3.568868 +step:4851 train loss:3.560611 +step:4852 train loss:3.642427 +step:4853 train loss:3.615027 +step:4854 train loss:3.596085 +step:4855 train loss:3.657646 +step:4856 train loss:3.625273 +step:4857 train loss:3.633107 +step:4858 train loss:3.711636 +step:4859 train loss:3.556918 +step:4860 train loss:3.637204 +step:4861 train loss:3.609040 +step:4862 train loss:3.642394 +step:4863 train loss:3.575672 +step:4864 train loss:3.588529 +step:4865 train loss:3.583498 +step:4866 train loss:3.627625 +step:4867 train loss:3.594770 +step:4868 train loss:3.613008 +step:4869 train loss:3.567991 +step:4870 train loss:3.592321 +step:4871 train loss:3.681698 +step:4872 train loss:3.622263 +step:4873 train loss:3.622690 +step:4874 train loss:3.593427 +step:4875 train loss:3.563207 +step:4876 train loss:3.570248 +step:4877 train loss:3.579240 +step:4878 train loss:3.611805 +step:4879 train loss:3.576827 +step:4880 train loss:3.601141 +step:4881 train loss:3.547755 +step:4882 train loss:3.747609 +step:4883 train loss:3.559474 +step:4884 train loss:3.589083 +step:4885 train loss:3.567240 +step:4886 train loss:3.638945 +step:4887 train loss:3.589611 +step:4888 train loss:3.599896 +step:4889 train loss:3.593642 +step:4890 train loss:3.633330 +step:4891 train loss:3.574199 +step:4892 train loss:3.575378 +step:4893 train loss:3.623115 +step:4894 train loss:3.557281 +step:4895 train loss:3.591254 +step:4896 train loss:3.571127 +step:4897 train loss:3.648255 +step:4898 train loss:3.596063 +step:4899 train loss:3.583549 +step:4900 train loss:3.624213 +step:4901 train loss:3.580038 +step:4902 train loss:3.567948 +step:4903 train loss:3.591841 +step:4904 train loss:3.605484 +step:4905 train loss:3.602673 +step:4906 train loss:3.600515 +step:4907 train loss:3.677525 +step:4908 train loss:3.579540 +step:4909 train loss:3.588362 +step:4910 train loss:3.609890 +step:4911 train loss:3.659721 +step:4912 train loss:3.636320 +step:4913 train loss:3.613327 +step:4914 train loss:3.604561 +step:4915 train loss:3.586668 +step:4916 train loss:3.527910 +step:4917 train loss:3.554946 +step:4918 train loss:3.585533 +step:4919 train loss:3.578301 +step:4920 train loss:3.633086 +step:4921 train loss:3.748662 +step:4922 train loss:3.636726 +step:4923 train loss:3.651591 +step:4924 train loss:3.655370 +step:4925 train loss:3.594101 +step:4926 train loss:3.583806 +step:4927 train loss:3.615078 +step:4928 train loss:3.647584 +step:4929 train loss:3.608898 +step:4930 train loss:3.585133 +step:4931 train loss:3.582561 +step:4932 train loss:3.588343 +step:4933 train loss:3.580072 +step:4934 train loss:3.645248 +step:4935 train loss:3.630742 +step:4936 train loss:3.594147 +step:4937 train loss:3.704254 +step:4938 train loss:3.694671 +step:4939 train loss:3.553828 +step:4940 train loss:3.635306 +step:4941 train loss:3.532529 +step:4942 train loss:3.579890 +step:4943 train loss:3.577555 +step:4944 train loss:3.581009 +step:4945 train loss:3.624587 +step:4946 train loss:3.603807 +step:4947 train loss:3.582875 +step:4948 train loss:3.621633 +step:4949 train loss:3.526226 +step:4950 train loss:3.610358 +step:4951 train loss:3.658751 +step:4952 train loss:3.598703 +step:4953 train loss:3.631499 +step:4954 train loss:3.537136 +step:4955 train loss:3.612631 +step:4956 train loss:3.638831 +step:4957 train loss:3.631693 +step:4958 train loss:3.546257 +step:4959 train loss:3.665250 +step:4960 train loss:3.591529 +step:4961 train loss:3.614362 +step:4962 train loss:3.573102 +step:4963 train loss:3.619297 +step:4964 train loss:3.570364 +step:4965 train loss:3.725200 +step:4966 train loss:3.573912 +step:4967 train loss:3.677557 +step:4968 train loss:3.568646 +step:4969 train loss:3.614039 +step:4970 train loss:3.602440 +step:4971 train loss:3.556591 +step:4972 train loss:3.597600 +step:4973 train loss:3.606024 +step:4974 train loss:3.592535 +step:4975 train loss:3.678362 +step:4976 train loss:3.658527 +step:4977 train loss:3.604900 +step:4978 train loss:3.591048 +step:4979 train loss:3.588899 +step:4980 train loss:3.698547 +step:4981 train loss:3.534687 +step:4982 train loss:3.617469 +step:4983 train loss:3.543157 +step:4984 train loss:3.728565 +step:4985 train loss:3.628368 +step:4986 train loss:3.570201 +step:4987 train loss:3.590959 +step:4988 train loss:3.787220 +step:4989 train loss:3.596863 +step:4990 train loss:3.586669 +step:4991 train loss:3.603262 +step:4992 train loss:3.585190 +step:4993 train loss:3.563743 +step:4994 train loss:3.675963 +step:4995 train loss:3.599920 +step:4996 train loss:3.687049 +step:4997 train loss:3.587005 +step:4998 train loss:3.587922 +step:4999 train loss:3.573547 +step:5000 validation loss:3.534165 total_sharp:1.5128e-03 L1_sharp:1.6680e-03 L2_sharp:2.2580e-04 L3_sharp:7.7549e-04 L4_sharp:3.0605e-04 L5_sharp:1.9274e-04 L6_sharp:2.7006e-04 L7_sharp:2.6647e-04 L8_sharp:3.7629e-04 L9_sharp:4.0739e-04 L10_sharp:5.6062e-04 L11_sharp:4.6201e-04 L12_sharp:7.3004e-04 total_fnorm:4.3659e+00 total_l1_linf:3.8607e+04 total_spectral:4.3659e+00 L1_fnorm:8.7922e-01 L2_fnorm:9.7448e-01 L3_fnorm:9.8465e-01 L4_fnorm:1.0079e+00 L5_fnorm:1.0349e+00 L6_fnorm:1.0381e+00 L7_fnorm:1.0410e+00 L8_fnorm:1.0365e+00 L9_fnorm:1.0364e+00 L10_fnorm:1.0082e+00 L11_fnorm:1.0175e+00 L12_fnorm:1.0195e+00 L1_l1linf:1.0352e+00 L2_l1linf:1.0653e+00 L3_l1linf:1.0722e+00 L4_l1linf:1.0546e+00 L5_l1linf:1.0938e+00 L6_l1linf:1.0686e+00 L7_l1linf:1.0442e+00 L8_l1linf:1.0425e+00 L9_l1linf:1.0803e+00 L10_l1linf:1.0994e+00 L11_l1linf:1.2177e+00 L12_l1linf:1.1418e+00 L1_spectral:1.2618e-01 L2_spectral:1.5236e-01 L3_spectral:1.4204e-01 L4_spectral:1.1293e-01 L5_spectral:9.6345e-02 L6_spectral:9.1246e-02 L7_spectral:7.6480e-02 L8_spectral:8.2980e-02 L9_spectral:1.0890e-01 L10_spectral:1.3328e-01 L11_spectral:1.5368e-01 L12_spectral:1.5888e-01 ip_v_neg_g:1.5955e-02 cos_v_neg_g:4.1295e-03 v_norm:4.3659e+00 g_norm:8.8493e-01 hv_norm:4.9870e-01 cos_v_hv:1.3244e-02 hg_norm:1.6541e+01 cos_g_hg:5.8294e-01 v_par:7.9541e-04 v_perp:4.3659e+00 L1_cos_v_neg_g:1.2389e-02 L1_v_norm:8.7922e-01 L2_cos_v_neg_g:5.3566e-03 L2_v_norm:9.7448e-01 L3_cos_v_neg_g:6.3660e-03 L3_v_norm:9.8465e-01 L4_cos_v_neg_g:4.1128e-03 L4_v_norm:1.0079e+00 L5_cos_v_neg_g:4.0817e-03 L5_v_norm:1.0349e+00 L6_cos_v_neg_g:4.2247e-03 L6_v_norm:1.0381e+00 L7_cos_v_neg_g:3.5590e-03 L7_v_norm:1.0410e+00 L8_cos_v_neg_g:4.1214e-03 L8_v_norm:1.0365e+00 L9_cos_v_neg_g:4.0675e-03 L9_v_norm:1.0364e+00 L10_cos_v_neg_g:6.1953e-03 L10_v_norm:1.0082e+00 L11_cos_v_neg_g:7.5476e-03 L11_v_norm:1.0175e+00 L12_cos_v_neg_g:7.2745e-03 L12_v_norm:1.0195e+00 +step:5000 train loss:3.686461 +step:5001 train loss:3.548930 +step:5002 train loss:3.607126 +step:5003 train loss:3.602619 +step:5004 train loss:3.594106 +step:5005 train loss:3.593176 +step:5006 train loss:3.634575 +step:5007 train loss:3.637197 +step:5008 train loss:3.574628 +step:5009 train loss:3.618628 +step:5010 train loss:3.570879 +step:5011 train loss:3.598705 +step:5012 train loss:3.571302 +step:5013 train loss:3.677016 +step:5014 train loss:3.589180 +step:5015 train loss:3.665204 +step:5016 train loss:3.592509 +step:5017 train loss:3.640514 +step:5018 train loss:3.557287 +step:5019 train loss:3.594059 +step:5020 train loss:3.584448 +step:5021 train loss:3.602299 +step:5022 train loss:3.633190 +step:5023 train loss:3.608490 +step:5024 train loss:3.657053 +step:5025 train loss:3.546675 +step:5026 train loss:3.669895 +step:5027 train loss:3.602433 +step:5028 train loss:3.670364 +step:5029 train loss:3.564845 +step:5030 train loss:3.604029 +step:5031 train loss:3.592978 +step:5032 train loss:3.619910 +step:5033 train loss:3.601998 +step:5034 train loss:3.601597 +step:5035 train loss:3.683924 +step:5036 train loss:3.632338 +step:5037 train loss:3.583453 +step:5038 train loss:3.639974 +step:5039 train loss:3.645634 +step:5040 train loss:3.609329 +step:5041 train loss:3.630366 +step:5042 train loss:3.531171 +step:5043 train loss:3.671270 +step:5044 train loss:3.588257 +step:5045 train loss:3.638409 +step:5046 train loss:3.558857 +step:5047 train loss:3.636320 +step:5048 train loss:3.554333 +step:5049 train loss:3.691338 +step:5050 train loss:3.573390 +step:5051 train loss:3.619914 +step:5052 train loss:3.519137 +step:5053 train loss:3.703978 +step:5054 train loss:3.587797 +step:5055 train loss:3.611649 +step:5056 train loss:3.644646 +step:5057 train loss:3.577246 +step:5058 train loss:3.605488 +step:5059 train loss:3.568587 +step:5060 train loss:3.615399 +step:5061 train loss:3.610279 +step:5062 train loss:3.581479 +step:5063 train loss:3.574536 +step:5064 train loss:3.582408 +step:5065 train loss:3.568094 +step:5066 train loss:3.627806 +step:5067 train loss:3.608070 +step:5068 train loss:3.596209 +step:5069 train loss:3.566673 +step:5070 train loss:3.595305 +step:5071 train loss:3.665110 +step:5072 train loss:3.558927 +step:5073 train loss:3.564300 +step:5074 train loss:3.513442 +step:5075 train loss:3.583267 +step:5076 train loss:3.512971 +step:5077 train loss:3.577142 +step:5078 train loss:3.591702 +step:5079 train loss:3.619427 +step:5080 train loss:3.596292 +step:5081 train loss:3.608407 +step:5082 train loss:3.598645 +step:5083 train loss:3.664154 +step:5084 train loss:3.631685 +step:5085 train loss:3.597040 +step:5086 train loss:3.670242 +step:5087 train loss:3.654839 +step:5088 train loss:3.572313 +step:5089 train loss:3.638804 +step:5090 train loss:3.587178 +step:5091 train loss:3.588143 +step:5092 train loss:3.686836 +step:5093 train loss:3.569152 +step:5094 train loss:3.569537 +step:5095 train loss:3.621775 +step:5096 train loss:3.595129 +step:5097 train loss:3.603611 +step:5098 train loss:3.602139 +step:5099 train loss:3.564415 +step:5100 train loss:3.574099 +step:5101 train loss:3.765210 +step:5102 train loss:3.613035 +step:5103 train loss:3.619927 +step:5104 train loss:3.667238 +step:5105 train loss:3.607999 +step:5106 train loss:3.562191 +step:5107 train loss:3.580783 +step:5108 train loss:3.574461 +step:5109 train loss:3.658754 +step:5110 train loss:3.564671 +step:5111 train loss:3.662184 +step:5112 train loss:3.568460 +step:5113 train loss:3.551322 +step:5114 train loss:3.594909 +step:5115 train loss:3.557835 +step:5116 train loss:3.614503 +step:5117 train loss:3.559232 +step:5118 train loss:3.586812 +step:5119 train loss:3.568929 +step:5120 train loss:3.612017 +step:5121 train loss:3.558256 +step:5122 train loss:3.570789 +step:5123 train loss:3.562333 +step:5124 train loss:3.518184 +step:5125 train loss:3.627174 +step:5126 train loss:3.616541 +step:5127 train loss:3.618537 +step:5128 train loss:3.627936 +step:5129 train loss:3.559482 +step:5130 train loss:3.569667 +step:5131 train loss:3.510506 +step:5132 train loss:3.628627 +step:5133 train loss:3.596968 +step:5134 train loss:3.599876 +step:5135 train loss:3.553841 +step:5136 train loss:3.617575 +step:5137 train loss:3.615294 +step:5138 train loss:3.597522 +step:5139 train loss:3.630262 +step:5140 train loss:3.606148 +step:5141 train loss:3.635889 +step:5142 train loss:3.586361 +step:5143 train loss:3.612240 +step:5144 train loss:3.613340 +step:5145 train loss:3.554980 +step:5146 train loss:3.544796 +step:5147 train loss:3.621549 +step:5148 train loss:3.558450 +step:5149 train loss:3.623731 +step:5150 train loss:3.602647 +step:5151 train loss:3.569074 +step:5152 train loss:3.614224 +step:5153 train loss:3.584201 +step:5154 train loss:3.600030 +step:5155 train loss:3.607938 +step:5156 train loss:3.585482 +step:5157 train loss:3.585502 +step:5158 train loss:3.608129 +step:5159 train loss:3.643411 +step:5160 train loss:3.709611 +step:5161 train loss:3.636082 +step:5162 train loss:3.656848 +step:5163 train loss:3.568966 +step:5164 train loss:3.646577 +step:5165 train loss:3.647446 +step:5166 train loss:3.588727 +step:5167 train loss:3.684784 +step:5168 train loss:3.609391 +step:5169 train loss:3.630774 +step:5170 train loss:3.611271 +step:5171 train loss:3.653742 +step:5172 train loss:3.572586 +step:5173 train loss:3.639051 +step:5174 train loss:3.569885 +step:5175 train loss:3.603766 +step:5176 train loss:3.595386 +step:5177 train loss:3.595492 +step:5178 train loss:3.661459 +step:5179 train loss:3.571371 +step:5180 train loss:3.650036 +step:5181 train loss:3.591690 +step:5182 train loss:3.651955 +step:5183 train loss:3.580358 +step:5184 train loss:3.560714 +step:5185 train loss:3.584794 +step:5186 train loss:3.641706 +step:5187 train loss:3.634202 +step:5188 train loss:3.567575 +step:5189 train loss:3.612311 +step:5190 train loss:3.594608 +step:5191 train loss:3.579947 +step:5192 train loss:3.561023 +step:5193 train loss:3.645104 +step:5194 train loss:3.595061 +step:5195 train loss:3.566952 +step:5196 train loss:3.637164 +step:5197 train loss:3.693361 +step:5198 train loss:3.598199 +step:5199 train loss:3.584724 +step:5200 train loss:3.609099 +step:5201 train loss:3.599112 +step:5202 train loss:3.605505 +step:5203 train loss:3.603555 +step:5204 train loss:3.577155 +step:5205 train loss:3.621928 +step:5206 train loss:3.558810 +step:5207 train loss:3.559860 +step:5208 train loss:3.623645 +step:5209 train loss:3.640742 +step:5210 train loss:3.560900 +step:5211 train loss:3.604084 +step:5212 train loss:3.614552 +step:5213 train loss:3.589630 +step:5214 train loss:3.635937 +step:5215 train loss:3.746631 +step:5216 train loss:3.598749 +step:5217 train loss:3.575497 +step:5218 train loss:3.582089 +step:5219 train loss:3.644698 +step:5220 train loss:3.560334 +step:5221 train loss:3.560337 +step:5222 train loss:3.646662 +step:5223 train loss:3.637344 +step:5224 train loss:3.533905 +step:5225 train loss:3.679570 +step:5226 train loss:3.595571 +step:5227 train loss:3.667330 +step:5228 train loss:3.638300 +step:5229 train loss:3.581146 +step:5230 train loss:3.588654 +step:5231 train loss:3.538019 +step:5232 train loss:3.661056 +step:5233 train loss:3.619645 +step:5234 train loss:3.627413 +step:5235 train loss:3.571001 +step:5236 train loss:3.649205 +step:5237 train loss:3.703841 +step:5238 train loss:3.601437 +step:5239 train loss:3.665979 +step:5240 train loss:3.549548 +step:5241 train loss:3.609787 +step:5242 train loss:3.580205 +step:5243 train loss:3.585119 +step:5244 train loss:3.583068 +step:5245 train loss:3.629942 +step:5246 train loss:3.671526 +step:5247 train loss:3.600684 +step:5248 train loss:3.569642 +step:5249 train loss:3.627768 +step:5250 validation loss:3.522884 +step:5250 train loss:3.596133 +step:5251 train loss:3.660146 +step:5252 train loss:3.549053 +step:5253 train loss:3.699671 +step:5254 train loss:3.573592 +step:5255 train loss:3.647759 +step:5256 train loss:3.560781 +step:5257 train loss:3.618322 +step:5258 train loss:3.611925 +step:5259 train loss:3.596637 +step:5260 train loss:3.594541 +step:5261 train loss:3.583366 +step:5262 train loss:3.625497 +step:5263 train loss:3.610535 +step:5264 train loss:3.562329 +step:5265 train loss:3.641531 +step:5266 train loss:3.556580 +step:5267 train loss:3.570839 +step:5268 train loss:3.550965 +step:5269 train loss:3.557956 +step:5270 train loss:3.607514 +step:5271 train loss:3.530589 +step:5272 train loss:3.625334 +step:5273 train loss:3.531488 +step:5274 train loss:3.584589 +step:5275 train loss:3.597524 +step:5276 train loss:3.720391 +step:5277 train loss:3.625002 +step:5278 train loss:3.570087 +step:5279 train loss:3.617894 +step:5280 train loss:3.592135 +step:5281 train loss:3.587780 +step:5282 train loss:3.556016 +step:5283 train loss:3.558387 +step:5284 train loss:3.569394 +step:5285 train loss:3.633927 +step:5286 train loss:3.545285 +step:5287 train loss:3.643718 +step:5288 train loss:3.617872 +step:5289 train loss:3.589224 +step:5290 train loss:3.639629 +step:5291 train loss:3.596307 +step:5292 train loss:3.613341 +step:5293 train loss:3.582902 +step:5294 train loss:3.569756 +step:5295 train loss:3.576401 +step:5296 train loss:3.568806 +step:5297 train loss:3.590332 +step:5298 train loss:3.538440 +step:5299 train loss:3.627919 +step:5300 train loss:3.581647 +step:5301 train loss:3.648455 +step:5302 train loss:3.651372 +step:5303 train loss:3.517817 +step:5304 train loss:3.547216 +step:5305 train loss:3.523024 +step:5306 train loss:3.557696 +step:5307 train loss:3.565686 +step:5308 train loss:3.660395 +step:5309 train loss:3.608584 +step:5310 train loss:3.592016 +step:5311 train loss:3.665226 +step:5312 train loss:3.546203 +step:5313 train loss:3.632868 +step:5314 train loss:3.626119 +step:5315 train loss:3.585835 +step:5316 train loss:3.618756 +step:5317 train loss:3.636011 +step:5318 train loss:3.589079 +step:5319 train loss:3.616512 +step:5320 train loss:3.569290 +step:5321 train loss:3.687275 +step:5322 train loss:3.601452 +step:5323 train loss:3.601662 +step:5324 train loss:3.544304 +step:5325 train loss:3.628455 +step:5326 train loss:3.618043 +step:5327 train loss:3.508034 +step:5328 train loss:3.647232 +step:5329 train loss:3.610288 +step:5330 train loss:3.612864 +step:5331 train loss:3.657562 +step:5332 train loss:3.582914 +step:5333 train loss:3.645764 +step:5334 train loss:3.619292 +step:5335 train loss:3.678789 +step:5336 train loss:3.716388 +step:5337 train loss:3.550290 +step:5338 train loss:3.556629 +step:5339 train loss:3.577624 +step:5340 train loss:3.601743 +step:5341 train loss:3.617435 +step:5342 train loss:3.520205 +step:5343 train loss:3.678129 +step:5344 train loss:3.558665 +step:5345 train loss:3.560712 +step:5346 train loss:3.562399 +step:5347 train loss:3.587024 +step:5348 train loss:3.627878 +step:5349 train loss:3.564306 +step:5350 train loss:3.610121 +step:5351 train loss:3.681610 +step:5352 train loss:3.726454 +step:5353 train loss:3.635413 +step:5354 train loss:3.604165 +step:5355 train loss:3.570735 +step:5356 train loss:3.590793 +step:5357 train loss:3.571400 +step:5358 train loss:3.593450 +step:5359 train loss:3.604466 +step:5360 train loss:3.578353 +step:5361 train loss:3.578941 +step:5362 train loss:3.565275 +step:5363 train loss:3.557637 +step:5364 train loss:3.559142 +step:5365 train loss:3.591900 +step:5366 train loss:3.624865 +step:5367 train loss:3.550071 +step:5368 train loss:3.622108 +step:5369 train loss:3.637484 +step:5370 train loss:3.539557 +step:5371 train loss:3.587869 +step:5372 train loss:3.610467 +step:5373 train loss:3.649422 +step:5374 train loss:3.533002 +step:5375 train loss:3.578714 +step:5376 train loss:3.645467 +step:5377 train loss:3.582178 +step:5378 train loss:3.556706 +step:5379 train loss:3.558896 +step:5380 train loss:3.600750 +step:5381 train loss:3.635587 +step:5382 train loss:3.552470 +step:5383 train loss:3.607294 +step:5384 train loss:3.621182 +step:5385 train loss:3.626332 +step:5386 train loss:3.599365 +step:5387 train loss:3.608712 +step:5388 train loss:3.619074 +step:5389 train loss:3.552957 +step:5390 train loss:3.579212 +step:5391 train loss:3.519218 +step:5392 train loss:3.585191 +step:5393 train loss:3.572759 +step:5394 train loss:3.572456 +step:5395 train loss:3.643225 +step:5396 train loss:3.609122 +step:5397 train loss:3.629071 +step:5398 train loss:3.623271 +step:5399 train loss:3.656099 +step:5400 train loss:3.663141 +step:5401 train loss:3.620064 +step:5402 train loss:3.733095 +step:5403 train loss:3.632746 +step:5404 train loss:3.609723 +step:5405 train loss:3.677155 +step:5406 train loss:3.636204 +step:5407 train loss:3.564910 +step:5408 train loss:3.713413 +step:5409 train loss:3.549818 +step:5410 train loss:3.615770 +step:5411 train loss:3.596476 +step:5412 train loss:3.575408 +step:5413 train loss:3.623754 +step:5414 train loss:3.602729 +step:5415 train loss:3.581946 +step:5416 train loss:3.575310 +step:5417 train loss:3.644379 +step:5418 train loss:3.661718 +step:5419 train loss:3.562827 +step:5420 train loss:3.621440 +step:5421 train loss:3.590972 +step:5422 train loss:3.638934 +step:5423 train loss:3.612669 +step:5424 train loss:3.515135 +step:5425 train loss:3.581869 +step:5426 train loss:3.668648 +step:5427 train loss:3.564853 +step:5428 train loss:3.599841 +step:5429 train loss:3.545588 +step:5430 train loss:3.574797 +step:5431 train loss:3.634759 +step:5432 train loss:3.611327 +step:5433 train loss:3.616049 +step:5434 train loss:3.563893 +step:5435 train loss:3.561736 +step:5436 train loss:3.563504 +step:5437 train loss:3.604963 +step:5438 train loss:3.581361 +step:5439 train loss:3.589052 +step:5440 train loss:3.629013 +step:5441 train loss:3.652512 +step:5442 train loss:3.581898 +step:5443 train loss:3.575574 +step:5444 train loss:3.522230 +step:5445 train loss:3.607005 +step:5446 train loss:3.576770 +step:5447 train loss:3.612036 +step:5448 train loss:3.667520 +step:5449 train loss:3.556425 +step:5450 train loss:3.592397 +step:5451 train loss:3.582842 +step:5452 train loss:3.600009 +step:5453 train loss:3.654274 +step:5454 train loss:3.579040 +step:5455 train loss:3.570336 +step:5456 train loss:3.708540 +step:5457 train loss:3.590176 +step:5458 train loss:3.623313 +step:5459 train loss:3.567067 +step:5460 train loss:3.587160 +step:5461 train loss:3.588472 +step:5462 train loss:3.589496 +step:5463 train loss:3.598067 +step:5464 train loss:3.597268 +step:5465 train loss:3.542874 +step:5466 train loss:3.619049 +step:5467 train loss:3.598003 +step:5468 train loss:3.606724 +step:5469 train loss:3.699459 +step:5470 train loss:3.592959 +step:5471 train loss:3.665588 +step:5472 train loss:3.612662 +step:5473 train loss:3.519764 +step:5474 train loss:3.850546 +step:5475 train loss:3.528624 +step:5476 train loss:3.605185 +step:5477 train loss:3.606288 +step:5478 train loss:3.602345 +step:5479 train loss:3.745518 +step:5480 train loss:3.590419 +step:5481 train loss:3.654782 +step:5482 train loss:3.569689 +step:5483 train loss:3.601673 +step:5484 train loss:3.641701 +step:5485 train loss:3.558549 +step:5486 train loss:3.607842 +step:5487 train loss:3.605534 +step:5488 train loss:3.520896 +step:5489 train loss:3.621902 +step:5490 train loss:3.570494 +step:5491 train loss:3.671550 +step:5492 train loss:3.596735 +step:5493 train loss:3.529216 +step:5494 train loss:3.584304 +step:5495 train loss:3.560079 +step:5496 train loss:3.561482 +step:5497 train loss:3.675183 +step:5498 train loss:3.546280 +step:5499 train loss:3.683799 +step:5500 validation loss:3.519027 total_sharp:9.7864e-04 L1_sharp:1.0169e-03 L2_sharp:1.0560e-04 L3_sharp:3.3384e-04 L4_sharp:1.5996e-04 L5_sharp:1.1649e-04 L6_sharp:1.2237e-04 L7_sharp:1.9526e-04 L8_sharp:2.7079e-04 L9_sharp:3.3212e-04 L10_sharp:4.3768e-04 L11_sharp:3.8004e-04 L12_sharp:5.2982e-04 total_fnorm:4.4230e+00 total_l1_linf:3.9250e+04 total_spectral:4.4230e+00 L1_fnorm:9.4059e-01 L2_fnorm:9.8206e-01 L3_fnorm:9.9527e-01 L4_fnorm:1.0220e+00 L5_fnorm:1.0506e+00 L6_fnorm:1.0604e+00 L7_fnorm:1.0611e+00 L8_fnorm:1.0636e+00 L9_fnorm:1.0608e+00 L10_fnorm:1.0380e+00 L11_fnorm:1.0380e+00 L12_fnorm:1.0299e+00 L1_l1linf:1.2422e+00 L2_l1linf:1.0506e+00 L3_l1linf:1.0253e+00 L4_l1linf:1.0511e+00 L5_l1linf:1.1069e+00 L6_l1linf:1.0912e+00 L7_l1linf:1.0847e+00 L8_l1linf:1.0531e+00 L9_l1linf:1.1407e+00 L10_l1linf:1.1073e+00 L11_l1linf:1.1523e+00 L12_l1linf:1.2042e+00 L1_spectral:1.3496e-01 L2_spectral:1.4456e-01 L3_spectral:1.4306e-01 L4_spectral:1.1125e-01 L5_spectral:9.3261e-02 L6_spectral:8.8517e-02 L7_spectral:8.0352e-02 L8_spectral:8.0733e-02 L9_spectral:1.1299e-01 L10_spectral:1.3579e-01 L11_spectral:1.5483e-01 L12_spectral:1.6518e-01 ip_v_neg_g:1.6511e-02 cos_v_neg_g:3.0326e-03 v_norm:4.4230e+00 g_norm:1.2309e+00 hv_norm:3.4842e-01 cos_v_hv:1.2423e-02 hg_norm:1.1081e+01 cos_g_hg:6.9333e-01 v_par:7.7777e-04 v_perp:4.4230e+00 L1_cos_v_neg_g:7.6553e-03 L1_v_norm:9.4059e-01 L2_cos_v_neg_g:3.7606e-03 L2_v_norm:9.8206e-01 L3_cos_v_neg_g:4.0751e-03 L3_v_norm:9.9527e-01 L4_cos_v_neg_g:2.8703e-03 L4_v_norm:1.0220e+00 L5_cos_v_neg_g:2.8669e-03 L5_v_norm:1.0506e+00 L6_cos_v_neg_g:3.2786e-03 L6_v_norm:1.0604e+00 L7_cos_v_neg_g:3.4333e-03 L7_v_norm:1.0611e+00 L8_cos_v_neg_g:3.9023e-03 L8_v_norm:1.0636e+00 L9_cos_v_neg_g:4.3397e-03 L9_v_norm:1.0608e+00 L10_cos_v_neg_g:4.5304e-03 L10_v_norm:1.0380e+00 L11_cos_v_neg_g:3.4755e-03 L11_v_norm:1.0380e+00 L12_cos_v_neg_g:4.3235e-03 L12_v_norm:1.0299e+00 +step:5500 train loss:3.597536 +step:5501 train loss:3.672327 +step:5502 train loss:3.623081 +step:5503 train loss:3.586186 +step:5504 train loss:3.633848 +step:5505 train loss:3.594276 +step:5506 train loss:3.636876 +step:5507 train loss:3.624360 +step:5508 train loss:3.646612 +step:5509 train loss:3.655036 +step:5510 train loss:3.628396 +step:5511 train loss:3.622592 +step:5512 train loss:3.743859 +step:5513 train loss:3.546037 +step:5514 train loss:3.605109 +step:5515 train loss:3.630484 +step:5516 train loss:3.654910 +step:5517 train loss:3.612593 +step:5518 train loss:3.639941 +step:5519 train loss:3.672018 +step:5520 train loss:3.581385 +step:5521 train loss:3.594826 +step:5522 train loss:3.564032 +step:5523 train loss:3.607817 +step:5524 train loss:3.655089 +step:5525 train loss:3.561117 +step:5526 train loss:3.575370 +step:5527 train loss:3.597336 +step:5528 train loss:3.699361 +step:5529 train loss:3.665690 +step:5530 train loss:3.631843 +step:5531 train loss:3.566034 +step:5532 train loss:3.595765 +step:5533 train loss:3.627990 +step:5534 train loss:3.541234 +step:5535 train loss:3.595153 +step:5536 train loss:3.535008 +step:5537 train loss:3.582219 +step:5538 train loss:3.571589 +step:5539 train loss:3.515609 +step:5540 train loss:3.738044 +step:5541 train loss:3.550310 +step:5542 train loss:3.602936 +step:5543 train loss:3.591701 +step:5544 train loss:3.584701 +step:5545 train loss:3.573744 +step:5546 train loss:3.609354 +step:5547 train loss:3.541104 +step:5548 train loss:3.583473 +step:5549 train loss:3.587626 +step:5550 train loss:3.612644 +step:5551 train loss:3.615320 +step:5552 train loss:3.571259 +step:5553 train loss:3.601789 +step:5554 train loss:3.570865 +step:5555 train loss:3.576274 +step:5556 train loss:3.594106 +step:5557 train loss:3.657752 +step:5558 train loss:3.579822 +step:5559 train loss:3.587645 +step:5560 train loss:3.576184 +step:5561 train loss:3.613548 +step:5562 train loss:3.565801 +step:5563 train loss:3.548896 +step:5564 train loss:3.583973 +step:5565 train loss:3.650334 +step:5566 train loss:3.553989 +step:5567 train loss:3.669621 +step:5568 train loss:3.788789 +step:5569 train loss:3.579621 +step:5570 train loss:3.511878 +step:5571 train loss:3.601116 +step:5572 train loss:3.539326 +step:5573 train loss:3.530185 +step:5574 train loss:3.495239 +step:5575 train loss:3.596982 +step:5576 train loss:3.580629 +step:5577 train loss:3.585655 +step:5578 train loss:3.615366 +step:5579 train loss:3.571553 +step:5580 train loss:3.592802 +step:5581 train loss:3.615828 +step:5582 train loss:3.594762 +step:5583 train loss:3.603950 +step:5584 train loss:3.726518 +step:5585 train loss:3.632020 +step:5586 train loss:3.568653 +step:5587 train loss:3.599048 +step:5588 train loss:3.614734 +step:5589 train loss:3.613390 +step:5590 train loss:3.672656 +step:5591 train loss:3.538893 +step:5592 train loss:3.728341 +step:5593 train loss:3.599539 +step:5594 train loss:3.600556 +step:5595 train loss:3.594930 +step:5596 train loss:3.547923 +step:5597 train loss:3.565104 +step:5598 train loss:3.567000 +step:5599 train loss:3.572639 +step:5600 train loss:3.615129 +step:5601 train loss:3.641963 +step:5602 train loss:3.575636 +step:5603 train loss:3.614514 +step:5604 train loss:3.611422 +step:5605 train loss:3.585457 +step:5606 train loss:3.591318 +step:5607 train loss:3.617574 +step:5608 train loss:3.564639 +step:5609 train loss:3.611526 +step:5610 train loss:3.568518 +step:5611 train loss:3.607148 +step:5612 train loss:3.638117 +step:5613 train loss:3.597640 +step:5614 train loss:3.561928 +step:5615 train loss:3.666476 +step:5616 train loss:3.556684 +step:5617 train loss:3.648487 +step:5618 train loss:3.630199 +step:5619 train loss:3.587675 +step:5620 train loss:3.588534 +step:5621 train loss:3.661691 +step:5622 train loss:3.545828 +step:5623 train loss:3.583033 +step:5624 train loss:3.569131 +step:5625 train loss:3.605859 +step:5626 train loss:3.596327 +step:5627 train loss:3.569951 +step:5628 train loss:3.612709 +step:5629 train loss:3.592805 +step:5630 train loss:3.524210 +step:5631 train loss:3.563421 +step:5632 train loss:3.606549 +step:5633 train loss:3.598738 +step:5634 train loss:3.554818 +step:5635 train loss:3.590261 +step:5636 train loss:3.572537 +step:5637 train loss:3.710411 +step:5638 train loss:3.620884 +step:5639 train loss:3.601291 +step:5640 train loss:3.604528 +step:5641 train loss:3.642286 +step:5642 train loss:3.572608 +step:5643 train loss:3.592070 +step:5644 train loss:3.670900 +step:5645 train loss:3.630833 +step:5646 train loss:3.627423 +step:5647 train loss:3.617484 +step:5648 train loss:3.603267 +step:5649 train loss:3.520168 +step:5650 train loss:3.523661 +step:5651 train loss:3.600226 +step:5652 train loss:3.599947 +step:5653 train loss:3.567728 +step:5654 train loss:3.693869 +step:5655 train loss:3.557706 +step:5656 train loss:3.585766 +step:5657 train loss:3.648013 +step:5658 train loss:3.552797 +step:5659 train loss:3.588527 +step:5660 train loss:3.638315 +step:5661 train loss:3.578358 +step:5662 train loss:3.618803 +step:5663 train loss:3.506930 +step:5664 train loss:3.480693 +step:5665 train loss:3.602722 +step:5666 train loss:3.605243 +step:5667 train loss:3.639164 +step:5668 train loss:3.573120 +step:5669 train loss:3.586062 +step:5670 train loss:3.583156 +step:5671 train loss:3.573796 +step:5672 train loss:3.618444 +step:5673 train loss:3.587811 +step:5674 train loss:3.658921 +step:5675 train loss:3.573756 +step:5676 train loss:3.721920 +step:5677 train loss:3.614989 +step:5678 train loss:3.599180 +step:5679 train loss:3.590345 +step:5680 train loss:3.620425 +step:5681 train loss:3.589478 +step:5682 train loss:3.601469 +step:5683 train loss:3.560588 +step:5684 train loss:3.571684 +step:5685 train loss:3.613899 +step:5686 train loss:3.629067 +step:5687 train loss:3.582751 +step:5688 train loss:3.667884 +step:5689 train loss:3.573197 +step:5690 train loss:3.722059 +step:5691 train loss:3.550680 +step:5692 train loss:3.544539 +step:5693 train loss:3.548388 +step:5694 train loss:3.567948 +step:5695 train loss:3.585956 +step:5696 train loss:3.633953 +step:5697 train loss:3.561217 +step:5698 train loss:3.579412 +step:5699 train loss:3.590552 +step:5700 train loss:3.587513 +step:5701 train loss:3.585090 +step:5702 train loss:3.647982 +step:5703 train loss:3.548228 +step:5704 train loss:3.592901 +step:5705 train loss:3.602396 +step:5706 train loss:3.623651 +step:5707 train loss:3.543118 +step:5708 train loss:3.627732 +step:5709 train loss:3.630503 +step:5710 train loss:3.621721 +step:5711 train loss:3.643452 +step:5712 train loss:3.623622 +step:5713 train loss:3.548167 +step:5714 train loss:3.634391 +step:5715 train loss:3.591475 +step:5716 train loss:3.593331 +step:5717 train loss:3.621736 +step:5718 train loss:3.563114 +step:5719 train loss:3.635907 +step:5720 train loss:3.607581 +step:5721 train loss:3.537239 +step:5722 train loss:3.548365 +step:5723 train loss:3.630095 +step:5724 train loss:3.549222 +step:5725 train loss:3.622183 +step:5726 train loss:3.615911 +step:5727 train loss:3.572067 +step:5728 train loss:3.579250 +step:5729 train loss:3.575440 +step:5730 train loss:3.652764 +step:5731 train loss:3.518548 +step:5732 train loss:3.581010 +step:5733 train loss:3.574039 +step:5734 train loss:3.587549 +step:5735 train loss:3.577780 +step:5736 train loss:3.580571 +step:5737 train loss:3.598436 +step:5738 train loss:3.564373 +step:5739 train loss:3.575913 +step:5740 train loss:3.616187 +step:5741 train loss:3.592843 +step:5742 train loss:3.645459 +step:5743 train loss:3.608345 +step:5744 train loss:3.569224 +step:5745 train loss:3.570864 +step:5746 train loss:3.604319 +step:5747 train loss:3.586926 +step:5748 train loss:3.635305 +step:5749 train loss:3.590048 +step:5750 validation loss:3.515535 +step:5750 train loss:3.597316 +step:5751 train loss:3.611003 +step:5752 train loss:3.595683 +step:5753 train loss:3.567463 +step:5754 train loss:3.575286 +step:5755 train loss:3.589562 +step:5756 train loss:3.580919 +step:5757 train loss:3.641897 +step:5758 train loss:3.575712 +step:5759 train loss:3.539393 +step:5760 train loss:3.622667 +step:5761 train loss:3.617267 +step:5762 train loss:3.575283 +step:5763 train loss:3.601436 +step:5764 train loss:3.561844 +step:5765 train loss:3.682509 +step:5766 train loss:3.589563 +step:5767 train loss:3.626184 +step:5768 train loss:3.562937 +step:5769 train loss:3.682477 +step:5770 train loss:3.605877 +step:5771 train loss:3.633732 +step:5772 train loss:3.585730 +step:5773 train loss:3.563510 +step:5774 train loss:3.572997 +step:5775 train loss:3.641856 +step:5776 train loss:3.627580 +step:5777 train loss:3.548465 +step:5778 train loss:3.633439 +step:5779 train loss:3.596055 +step:5780 train loss:3.568168 +step:5781 train loss:3.634708 +step:5782 train loss:3.592974 +step:5783 train loss:3.568537 +step:5784 train loss:3.659779 +step:5785 train loss:3.649461 +step:5786 train loss:3.558030 +step:5787 train loss:3.606104 +step:5788 train loss:3.612083 +step:5789 train loss:3.556426 +step:5790 train loss:3.659788 +step:5791 train loss:3.586317 +step:5792 train loss:3.858540 +step:5793 train loss:3.626552 +step:5794 train loss:3.647423 +step:5795 train loss:3.637590 +step:5796 train loss:3.622538 +step:5797 train loss:3.604504 +step:5798 train loss:3.602376 +step:5799 train loss:3.569879 +step:5800 train loss:3.732620 +step:5801 train loss:3.603334 +step:5802 train loss:3.593508 +step:5803 train loss:3.601319 +step:5804 train loss:3.618138 +step:5805 train loss:3.584759 +step:5806 train loss:3.623589 +step:5807 train loss:3.544178 +step:5808 train loss:3.576790 +step:5809 train loss:3.589583 +step:5810 train loss:3.560465 +step:5811 train loss:3.576372 +step:5812 train loss:3.555618 +step:5813 train loss:3.568052 +step:5814 train loss:3.560228 +step:5815 train loss:3.562097 +step:5816 train loss:3.623356 +step:5817 train loss:3.637958 +step:5818 train loss:3.611119 +step:5819 train loss:3.661409 +step:5820 train loss:3.600725 +step:5821 train loss:3.594935 +step:5822 train loss:3.611668 +step:5823 train loss:3.616110 +step:5824 train loss:3.564536 +step:5825 train loss:3.659526 +step:5826 train loss:3.573481 +step:5827 train loss:3.537891 +step:5828 train loss:3.522999 +step:5829 train loss:3.589510 +step:5830 train loss:3.563956 +step:5831 train loss:3.533153 +step:5832 train loss:3.647899 +step:5833 train loss:3.627309 +step:5834 train loss:3.612999 +step:5835 train loss:3.560935 +step:5836 train loss:3.526011 +step:5837 train loss:3.647002 +step:5838 train loss:3.624979 +step:5839 train loss:3.600263 +step:5840 train loss:3.682172 +step:5841 train loss:3.606735 +step:5842 train loss:3.620046 +step:5843 train loss:3.565690 +step:5844 train loss:3.633693 +step:5845 train loss:3.543260 +step:5846 train loss:3.589566 +step:5847 train loss:3.618190 +step:5848 train loss:3.682150 +step:5849 train loss:3.579386 +step:5850 train loss:3.610300 +step:5851 train loss:3.574796 +step:5852 train loss:3.665434 +step:5853 train loss:3.755630 +step:5854 train loss:3.544137 +step:5855 train loss:3.606787 +step:5856 train loss:3.577660 +step:5857 train loss:3.589950 +step:5858 train loss:3.562392 +step:5859 train loss:3.570091 +step:5860 train loss:3.667294 +step:5861 train loss:3.555191 +step:5862 train loss:3.666612 +step:5863 train loss:3.606675 +step:5864 train loss:3.594167 +step:5865 train loss:3.596297 +step:5866 train loss:3.586969 +step:5867 train loss:3.672075 +step:5868 train loss:3.591470 +step:5869 train loss:3.620112 +step:5870 train loss:3.595890 +step:5871 train loss:3.575969 +step:5872 train loss:3.603999 +step:5873 train loss:3.582686 +step:5874 train loss:3.665793 +step:5875 train loss:3.591504 +step:5876 train loss:3.571315 +step:5877 train loss:3.579123 +step:5878 train loss:3.578975 +step:5879 train loss:3.552574 +step:5880 train loss:3.751114 +step:5881 train loss:3.590989 +step:5882 train loss:3.563046 +step:5883 train loss:3.571359 +step:5884 train loss:3.585088 +step:5885 train loss:3.581501 +step:5886 train loss:3.600699 +step:5887 train loss:3.598651 +step:5888 train loss:3.579577 +step:5889 train loss:3.560803 +step:5890 train loss:3.604529 +step:5891 train loss:3.549584 +step:5892 train loss:3.633984 +step:5893 train loss:3.556176 +step:5894 train loss:3.545415 +step:5895 train loss:3.551729 +step:5896 train loss:3.561617 +step:5897 train loss:3.629231 +step:5898 train loss:3.846863 +step:5899 train loss:3.578666 +step:5900 train loss:3.626714 +step:5901 train loss:3.579880 +step:5902 train loss:3.593223 +step:5903 train loss:3.583615 +step:5904 train loss:3.613155 +step:5905 train loss:3.720272 +step:5906 train loss:3.660172 +step:5907 train loss:3.603422 +step:5908 train loss:3.579916 +step:5909 train loss:3.572920 +step:5910 train loss:3.559174 +step:5911 train loss:3.576048 +step:5912 train loss:3.599025 +step:5913 train loss:3.609385 +step:5914 train loss:3.590790 +step:5915 train loss:3.720175 +step:5916 train loss:3.603941 +step:5917 train loss:3.571463 +step:5918 train loss:3.568673 +step:5919 train loss:3.596154 +step:5920 train loss:3.593213 +step:5921 train loss:3.563188 +step:5922 train loss:3.619839 +step:5923 train loss:3.611729 +step:5924 train loss:3.568223 +step:5925 train loss:3.693539 +step:5926 train loss:3.576391 +step:5927 train loss:3.552251 +step:5928 train loss:3.585580 +step:5929 train loss:3.610166 +step:5930 train loss:3.558474 +step:5931 train loss:3.545766 +step:5932 train loss:3.582431 +step:5933 train loss:3.640203 +step:5934 train loss:3.553649 +step:5935 train loss:3.581124 +step:5936 train loss:3.569395 +step:5937 train loss:3.548597 +step:5938 train loss:3.570024 +step:5939 train loss:3.545886 +step:5940 train loss:3.630810 +step:5941 train loss:3.561228 +step:5942 train loss:3.576502 +step:5943 train loss:3.579214 +step:5944 train loss:3.636008 +step:5945 train loss:3.566281 +step:5946 train loss:3.543882 +step:5947 train loss:3.563078 +step:5948 train loss:3.597728 +step:5949 train loss:3.644184 +step:5950 train loss:3.601914 +step:5951 train loss:3.603872 +step:5952 train loss:3.527127 +step:5953 train loss:3.571988 +step:5954 train loss:3.584100 +step:5955 train loss:3.586084 +step:5956 train loss:3.562772 +step:5957 train loss:3.530043 +step:5958 train loss:3.603481 +step:5959 train loss:3.562548 +step:5960 train loss:3.538095 +step:5961 train loss:3.564257 +step:5962 train loss:3.593877 +step:5963 train loss:3.626503 +step:5964 train loss:3.585626 +step:5965 train loss:3.602912 +step:5966 train loss:3.597125 +step:5967 train loss:3.564197 +step:5968 train loss:3.638021 +step:5969 train loss:3.579825 +step:5970 train loss:3.597756 +step:5971 train loss:3.548027 +step:5972 train loss:3.575965 +step:5973 train loss:3.563257 +step:5974 train loss:3.590451 +step:5975 train loss:3.555481 +step:5976 train loss:3.600121 +step:5977 train loss:3.557385 +step:5978 train loss:3.542906 +step:5979 train loss:3.578374 +step:5980 train loss:3.648846 +step:5981 train loss:3.541885 +step:5982 train loss:3.557407 +step:5983 train loss:3.627506 +step:5984 train loss:3.568195 +step:5985 train loss:3.613046 +step:5986 train loss:3.584991 +step:5987 train loss:3.570132 +step:5988 train loss:3.577635 +step:5989 train loss:3.595000 +step:5990 train loss:3.525030 +step:5991 train loss:3.587450 +step:5992 train loss:3.621353 +step:5993 train loss:3.571649 +step:5994 train loss:3.594020 +step:5995 train loss:3.485507 +step:5996 train loss:3.650030 +step:5997 train loss:3.628614 +step:5998 train loss:3.508403 +step:5999 train loss:3.535917 +step:6000 validation loss:3.502927 total_sharp:1.1012e-03 L1_sharp:9.7043e-04 L2_sharp:1.2946e-04 L3_sharp:2.5589e-04 L4_sharp:1.5273e-04 L5_sharp:1.5868e-04 L6_sharp:1.5472e-04 L7_sharp:2.8567e-04 L8_sharp:3.6080e-04 L9_sharp:3.3922e-04 L10_sharp:4.5047e-04 L11_sharp:4.1237e-04 L12_sharp:6.2143e-04 total_fnorm:4.4480e+00 total_l1_linf:3.9494e+04 total_spectral:4.4480e+00 L1_fnorm:9.8386e-01 L2_fnorm:9.8875e-01 L3_fnorm:1.0186e+00 L4_fnorm:1.0287e+00 L5_fnorm:1.0627e+00 L6_fnorm:1.0656e+00 L7_fnorm:1.0660e+00 L8_fnorm:1.0626e+00 L9_fnorm:1.0578e+00 L10_fnorm:1.0348e+00 L11_fnorm:1.0417e+00 L12_fnorm:1.0397e+00 L1_l1linf:1.1679e+00 L2_l1linf:1.0757e+00 L3_l1linf:1.0784e+00 L4_l1linf:1.0591e+00 L5_l1linf:1.0887e+00 L6_l1linf:1.0801e+00 L7_l1linf:1.0666e+00 L8_l1linf:1.0807e+00 L9_l1linf:1.1067e+00 L10_l1linf:1.1552e+00 L11_l1linf:1.1823e+00 L12_l1linf:1.1963e+00 L1_spectral:1.4209e-01 L2_spectral:1.4743e-01 L3_spectral:1.4165e-01 L4_spectral:1.1103e-01 L5_spectral:9.2758e-02 L6_spectral:8.6700e-02 L7_spectral:7.8223e-02 L8_spectral:8.3507e-02 L9_spectral:1.0725e-01 L10_spectral:1.3935e-01 L11_spectral:1.5362e-01 L12_spectral:1.6167e-01 ip_v_neg_g:1.2114e-02 cos_v_neg_g:3.9999e-03 v_norm:4.4480e+00 g_norm:6.8088e-01 hv_norm:2.9453e-01 cos_v_hv:1.6630e-02 hg_norm:3.0424e+00 cos_g_hg:5.7281e-01 v_par:7.5398e-04 v_perp:4.4480e+00 L1_cos_v_neg_g:1.0633e-02 L1_v_norm:9.8386e-01 L2_cos_v_neg_g:4.6775e-03 L2_v_norm:9.8875e-01 L3_cos_v_neg_g:4.5248e-03 L3_v_norm:1.0186e+00 L4_cos_v_neg_g:2.8001e-03 L4_v_norm:1.0287e+00 L5_cos_v_neg_g:3.0762e-03 L5_v_norm:1.0627e+00 L6_cos_v_neg_g:4.0561e-03 L6_v_norm:1.0656e+00 L7_cos_v_neg_g:4.8814e-03 L7_v_norm:1.0660e+00 L8_cos_v_neg_g:4.4040e-03 L8_v_norm:1.0626e+00 L9_cos_v_neg_g:4.3517e-03 L9_v_norm:1.0578e+00 L10_cos_v_neg_g:5.0037e-03 L10_v_norm:1.0348e+00 L11_cos_v_neg_g:7.3052e-03 L11_v_norm:1.0417e+00 L12_cos_v_neg_g:1.0918e-02 L12_v_norm:1.0397e+00 +step:6000 train loss:3.585345 +step:6001 train loss:3.548057 +step:6002 train loss:3.583396 +step:6003 train loss:3.603142 +step:6004 train loss:3.554029 +step:6005 train loss:3.621819 +step:6006 train loss:3.531694 +step:6007 train loss:3.551702 +step:6008 train loss:3.565414 +step:6009 train loss:3.603294 +step:6010 train loss:3.598615 +step:6011 train loss:3.590038 +step:6012 train loss:3.552570 +step:6013 train loss:3.612060 +step:6014 train loss:3.630905 +step:6015 train loss:3.625838 +step:6016 train loss:3.594523 +step:6017 train loss:3.602723 +step:6018 train loss:3.541493 +step:6019 train loss:3.578562 +step:6020 train loss:3.562951 +step:6021 train loss:3.490626 +step:6022 train loss:3.602284 +step:6023 train loss:3.539097 +step:6024 train loss:3.613033 +step:6025 train loss:3.580081 +step:6026 train loss:3.552160 +step:6027 train loss:3.593909 +step:6028 train loss:3.508265 +step:6029 train loss:3.627377 +step:6030 train loss:3.594668 +step:6031 train loss:3.564946 +step:6032 train loss:3.529236 +step:6033 train loss:3.584453 +step:6034 train loss:3.611827 +step:6035 train loss:3.528594 +step:6036 train loss:3.503732 +step:6037 train loss:3.618904 +step:6038 train loss:3.624459 +step:6039 train loss:3.607680 +step:6040 train loss:3.563859 +step:6041 train loss:3.541325 +step:6042 train loss:3.521945 +step:6043 train loss:3.581009 +step:6044 train loss:3.699247 +step:6045 train loss:3.542003 +step:6046 train loss:3.554793 +step:6047 train loss:3.589535 +step:6048 train loss:3.601870 +step:6049 train loss:3.576162 +step:6050 train loss:3.545871 +step:6051 train loss:3.595329 +step:6052 train loss:3.567490 +step:6053 train loss:3.687211 +step:6054 train loss:3.726551 +step:6055 train loss:3.540705 +step:6056 train loss:3.534909 +step:6057 train loss:3.568911 +step:6058 train loss:3.596991 +step:6059 train loss:3.597765 +step:6060 train loss:3.603896 +step:6061 train loss:3.619570 +step:6062 train loss:3.571561 +step:6063 train loss:3.584857 +step:6064 train loss:3.580860 +step:6065 train loss:3.582935 +step:6066 train loss:3.568528 +step:6067 train loss:3.610014 +step:6068 train loss:3.548181 +step:6069 train loss:3.508336 +step:6070 train loss:3.655107 +step:6071 train loss:3.599568 +step:6072 train loss:3.542487 +step:6073 train loss:3.578450 +step:6074 train loss:3.663619 +step:6075 train loss:3.583128 +step:6076 train loss:3.592341 +step:6077 train loss:3.594675 +step:6078 train loss:3.530674 +step:6079 train loss:3.560143 +step:6080 train loss:3.565527 +step:6081 train loss:3.603738 +step:6082 train loss:3.553570 +step:6083 train loss:3.564671 +step:6084 train loss:3.630324 +step:6085 train loss:3.625672 +step:6086 train loss:3.526922 +step:6087 train loss:3.573629 +step:6088 train loss:3.557902 +step:6089 train loss:3.612504 +step:6090 train loss:3.618662 +step:6091 train loss:3.566760 +step:6092 train loss:3.527399 +step:6093 train loss:3.590255 +step:6094 train loss:3.504838 +step:6095 train loss:3.671320 +step:6096 train loss:3.542005 +step:6097 train loss:3.616058 +step:6098 train loss:3.590709 +step:6099 train loss:3.658521 +step:6100 train loss:3.642657 +step:6101 train loss:3.576840 +step:6102 train loss:3.695347 +step:6103 train loss:3.578896 +step:6104 train loss:3.694159 +step:6105 train loss:3.629513 +step:6106 train loss:3.566076 +step:6107 train loss:3.631790 +step:6108 train loss:3.590665 +step:6109 train loss:3.662462 +step:6110 train loss:3.596086 +step:6111 train loss:3.627929 +step:6112 train loss:3.565119 +step:6113 train loss:3.592067 +step:6114 train loss:3.563472 +step:6115 train loss:3.622656 +step:6116 train loss:3.567692 +step:6117 train loss:3.620724 +step:6118 train loss:3.604137 +step:6119 train loss:3.609268 +step:6120 train loss:3.756081 +step:6121 train loss:3.592262 +step:6122 train loss:3.601153 +step:6123 train loss:3.576858 +step:6124 train loss:3.558448 +step:6125 train loss:3.549170 +step:6126 train loss:3.565962 +step:6127 train loss:3.555971 +step:6128 train loss:3.523661 +step:6129 train loss:3.762136 +step:6130 train loss:3.544257 +step:6131 train loss:3.519753 +step:6132 train loss:3.596144 +step:6133 train loss:3.561850 +step:6134 train loss:3.589834 +step:6135 train loss:3.673159 +step:6136 train loss:3.692957 +step:6137 train loss:3.552324 +step:6138 train loss:3.608238 +step:6139 train loss:3.586999 +step:6140 train loss:3.585680 +step:6141 train loss:3.545154 +step:6142 train loss:3.610482 +step:6143 train loss:3.574698 +step:6144 train loss:3.591679 +step:6145 train loss:3.846412 +step:6146 train loss:3.677548 +step:6147 train loss:3.758423 +step:6148 train loss:3.531721 +step:6149 train loss:3.658598 +step:6150 train loss:3.610642 +step:6151 train loss:3.561832 +step:6152 train loss:3.559765 +step:6153 train loss:3.626269 +step:6154 train loss:3.712096 +step:6155 train loss:3.575885 +step:6156 train loss:3.683590 +step:6157 train loss:3.606281 +step:6158 train loss:3.598949 +step:6159 train loss:3.563417 +step:6160 train loss:3.730062 +step:6161 train loss:3.581227 +step:6162 train loss:3.598298 +step:6163 train loss:3.627992 +step:6164 train loss:3.543623 +step:6165 train loss:3.610025 +step:6166 train loss:3.603049 +step:6167 train loss:3.623628 +step:6168 train loss:3.593200 +step:6169 train loss:3.589811 +step:6170 train loss:3.593360 +step:6171 train loss:3.559439 +step:6172 train loss:3.548959 +step:6173 train loss:3.599298 +step:6174 train loss:3.528164 +step:6175 train loss:3.539128 +step:6176 train loss:3.523008 +step:6177 train loss:3.617653 +step:6178 train loss:3.563129 +step:6179 train loss:3.569575 +step:6180 train loss:3.578413 +step:6181 train loss:3.611419 +step:6182 train loss:3.494392 +step:6183 train loss:3.504906 +step:6184 train loss:3.621259 +step:6185 train loss:3.578018 +step:6186 train loss:3.536000 +step:6187 train loss:3.576641 +step:6188 train loss:3.547338 +step:6189 train loss:3.584582 +step:6190 train loss:3.545716 +step:6191 train loss:3.577087 +step:6192 train loss:3.545117 +step:6193 train loss:3.613083 +step:6194 train loss:3.602886 +step:6195 train loss:3.584229 +step:6196 train loss:3.597462 +step:6197 train loss:3.624372 +step:6198 train loss:3.540174 +step:6199 train loss:3.560567 +step:6200 train loss:3.601219 +step:6201 train loss:3.644120 +step:6202 train loss:3.645960 +step:6203 train loss:3.641906 +step:6204 train loss:3.627829 +step:6205 train loss:3.566273 +step:6206 train loss:3.553147 +step:6207 train loss:3.612211 +step:6208 train loss:3.639009 +step:6209 train loss:3.606273 +step:6210 train loss:3.636461 +step:6211 train loss:3.555102 +step:6212 train loss:3.546158 +step:6213 train loss:3.560693 +step:6214 train loss:3.536739 +step:6215 train loss:3.709998 +step:6216 train loss:3.585051 +step:6217 train loss:3.641976 +step:6218 train loss:3.615580 +step:6219 train loss:3.630743 +step:6220 train loss:3.587366 +step:6221 train loss:3.552043 +step:6222 train loss:3.785419 +step:6223 train loss:3.562426 +step:6224 train loss:3.600740 +step:6225 train loss:3.577451 +step:6226 train loss:3.581425 +step:6227 train loss:3.584143 +step:6228 train loss:3.577284 +step:6229 train loss:3.617449 +step:6230 train loss:3.571291 +step:6231 train loss:3.683213 +step:6232 train loss:3.524880 +step:6233 train loss:3.564864 +step:6234 train loss:3.572062 +step:6235 train loss:3.599294 +step:6236 train loss:3.535162 +step:6237 train loss:3.557164 +step:6238 train loss:3.584610 +step:6239 train loss:3.569870 +step:6240 train loss:3.587734 +step:6241 train loss:3.573085 +step:6242 train loss:3.573296 +step:6243 train loss:3.608949 +step:6244 train loss:3.763567 +step:6245 train loss:3.560937 +step:6246 train loss:3.547592 +step:6247 train loss:3.542878 +step:6248 train loss:3.547410 +step:6249 train loss:3.482394 +step:6250 validation loss:3.493228 +step:6250 train loss:3.518719 +step:6251 train loss:3.541686 +step:6252 train loss:3.580348 +step:6253 train loss:3.594074 +step:6254 train loss:3.581518 +step:6255 train loss:3.547225 +step:6256 train loss:3.601384 +step:6257 train loss:3.598738 +step:6258 train loss:3.579952 +step:6259 train loss:3.588091 +step:6260 train loss:3.615889 +step:6261 train loss:3.633733 +step:6262 train loss:3.529803 +step:6263 train loss:3.563012 +step:6264 train loss:3.573103 +step:6265 train loss:3.560183 +step:6266 train loss:3.767249 +step:6267 train loss:3.567245 +step:6268 train loss:3.655061 +step:6269 train loss:3.526140 +step:6270 train loss:3.542665 +step:6271 train loss:3.590110 +step:6272 train loss:3.580877 +step:6273 train loss:3.781490 +step:6274 train loss:3.559189 +step:6275 train loss:3.595277 +step:6276 train loss:3.564140 +step:6277 train loss:3.550884 +step:6278 train loss:3.533199 +step:6279 train loss:3.588034 +step:6280 train loss:3.593285 +step:6281 train loss:3.525188 +step:6282 train loss:3.540040 +step:6283 train loss:3.630782 +step:6284 train loss:3.595227 +step:6285 train loss:3.598210 +step:6286 train loss:3.543252 +step:6287 train loss:3.572201 +step:6288 train loss:3.669137 +step:6289 train loss:3.533618 +step:6290 train loss:3.525588 +step:6291 train loss:3.563728 +step:6292 train loss:3.582786 +step:6293 train loss:3.566703 +step:6294 train loss:3.554772 +step:6295 train loss:3.576055 +step:6296 train loss:3.542605 +step:6297 train loss:3.667217 +step:6298 train loss:3.611391 +step:6299 train loss:3.508174 +step:6300 train loss:3.594693 +step:6301 train loss:3.616840 +step:6302 train loss:3.599333 +step:6303 train loss:3.568816 +step:6304 train loss:3.584655 +step:6305 train loss:3.555717 +step:6306 train loss:3.564759 +step:6307 train loss:3.575935 +step:6308 train loss:3.550629 +step:6309 train loss:3.550498 +step:6310 train loss:3.598212 +step:6311 train loss:3.553713 +step:6312 train loss:3.592330 +step:6313 train loss:3.527302 +step:6314 train loss:3.554746 +step:6315 train loss:3.605075 +step:6316 train loss:3.529589 +step:6317 train loss:3.520294 +step:6318 train loss:3.636481 +step:6319 train loss:3.562947 +step:6320 train loss:3.580062 +step:6321 train loss:3.564105 +step:6322 train loss:3.565919 +step:6323 train loss:3.495675 +step:6324 train loss:3.505768 +step:6325 train loss:3.603721 +step:6326 train loss:3.521999 +step:6327 train loss:3.598113 +step:6328 train loss:3.575501 +step:6329 train loss:3.496525 +step:6330 train loss:3.522882 +step:6331 train loss:3.542446 +step:6332 train loss:3.676905 +step:6333 train loss:3.552695 +step:6334 train loss:3.531390 +step:6335 train loss:3.501741 +step:6336 train loss:3.533524 +step:6337 train loss:3.557743 +step:6338 train loss:3.510851 +step:6339 train loss:3.556338 +step:6340 train loss:3.538631 +step:6341 train loss:3.551137 +step:6342 train loss:3.548061 +step:6343 train loss:3.648365 +step:6344 train loss:3.498775 +step:6345 train loss:3.510955 +step:6346 train loss:3.592826 +step:6347 train loss:3.468579 +step:6348 train loss:3.561358 +step:6349 train loss:3.538673 +step:6350 train loss:3.514065 +step:6351 train loss:3.512267 +step:6352 train loss:3.529040 +step:6353 train loss:3.546927 +step:6354 train loss:3.561949 +step:6355 train loss:3.569589 +step:6356 train loss:3.581179 +step:6357 train loss:3.439552 +step:6358 train loss:3.526923 +step:6359 train loss:3.586157 +step:6360 train loss:3.496803 +step:6361 train loss:3.497951 +step:6362 train loss:3.540784 +step:6363 train loss:3.521255 +step:6364 train loss:3.505270 +step:6365 train loss:3.577856 +step:6366 train loss:3.590906 +step:6367 train loss:3.516690 +step:6368 train loss:3.560858 +step:6369 train loss:3.527093 +step:6370 train loss:3.575762 +step:6371 train loss:3.492496 +step:6372 train loss:3.525560 +step:6373 train loss:3.548140 +step:6374 train loss:3.581362 +step:6375 train loss:3.537534 +step:6376 train loss:3.563515 +step:6377 train loss:3.561640 +step:6378 train loss:3.511014 +step:6379 train loss:3.549983 +step:6380 train loss:3.595526 +step:6381 train loss:3.558889 +step:6382 train loss:3.512851 +step:6383 train loss:3.580525 +step:6384 train loss:3.553018 +step:6385 train loss:3.529987 +step:6386 train loss:3.563312 +step:6387 train loss:3.544366 +step:6388 train loss:3.584309 +step:6389 train loss:3.591099 +step:6390 train loss:3.542822 +step:6391 train loss:3.529678 +step:6392 train loss:3.514728 +step:6393 train loss:3.570072 +step:6394 train loss:3.559875 +step:6395 train loss:3.741773 +step:6396 train loss:3.563840 +step:6397 train loss:3.503154 +step:6398 train loss:3.578241 +step:6399 train loss:3.516349 +step:6400 train loss:3.590952 +step:6401 train loss:3.627684 +step:6402 train loss:3.558023 +step:6403 train loss:3.550979 +step:6404 train loss:3.527054 +step:6405 train loss:3.554693 +step:6406 train loss:3.559945 +step:6407 train loss:3.617533 +step:6408 train loss:3.510636 +step:6409 train loss:3.494400 +step:6410 train loss:3.630306 +step:6411 train loss:3.557563 +step:6412 train loss:3.560367 +step:6413 train loss:3.564997 +step:6414 train loss:3.512188 +step:6415 train loss:3.571347 +step:6416 train loss:3.543817 +step:6417 train loss:3.513773 +step:6418 train loss:3.503965 +step:6419 train loss:3.588356 +step:6420 train loss:3.518519 +step:6421 train loss:3.542227 +step:6422 train loss:3.533897 +step:6423 train loss:3.540411 +step:6424 train loss:3.563895 +step:6425 train loss:3.558729 +step:6426 train loss:3.601315 +step:6427 train loss:3.563116 +step:6428 train loss:3.599032 +step:6429 train loss:3.565800 +step:6430 train loss:3.542063 +step:6431 train loss:3.514428 +step:6432 train loss:3.549431 +step:6433 train loss:3.562923 +step:6434 train loss:3.451756 +step:6435 train loss:3.628438 +step:6436 train loss:3.561099 +step:6437 train loss:3.524138 +step:6438 train loss:3.554740 +step:6439 train loss:3.531396 +step:6440 train loss:3.544151 +step:6441 train loss:3.537979 +step:6442 train loss:3.479584 +step:6443 train loss:3.535453 +step:6444 train loss:3.673037 +step:6445 train loss:3.573648 +step:6446 train loss:3.579430 +step:6447 train loss:3.560424 +step:6448 train loss:3.507366 +step:6449 train loss:3.529302 +step:6450 train loss:3.513440 +step:6451 train loss:3.503270 +step:6452 train loss:3.504959 +step:6453 train loss:3.549005 +step:6454 train loss:3.569478 +step:6455 train loss:3.565157 +step:6456 train loss:3.581543 +step:6457 train loss:3.559528 +step:6458 train loss:3.531535 +step:6459 train loss:3.513003 +step:6460 train loss:3.517718 +step:6461 train loss:3.517854 +step:6462 train loss:3.514075 +step:6463 train loss:3.609255 +step:6464 train loss:3.516622 +step:6465 train loss:3.561418 +step:6466 train loss:3.572978 +step:6467 train loss:3.498965 +step:6468 train loss:3.578875 +step:6469 train loss:3.485111 +step:6470 train loss:3.610193 +step:6471 train loss:3.515850 +step:6472 train loss:3.673088 +step:6473 train loss:3.561368 +step:6474 train loss:3.588180 +step:6475 train loss:3.534641 +step:6476 train loss:3.602996 +step:6477 train loss:3.534971 +step:6478 train loss:3.665677 +step:6479 train loss:3.583776 +step:6480 train loss:3.516644 +step:6481 train loss:3.572021 +step:6482 train loss:3.515756 +step:6483 train loss:3.574350 +step:6484 train loss:3.532817 +step:6485 train loss:3.591033 +step:6486 train loss:3.525189 +step:6487 train loss:3.522576 +step:6488 train loss:3.519771 +step:6489 train loss:3.521036 +step:6490 train loss:3.548489 +step:6491 train loss:3.516507 +step:6492 train loss:3.618098 +step:6493 train loss:3.523790 +step:6494 train loss:3.526402 +step:6495 train loss:3.527913 +step:6496 train loss:3.558792 +step:6497 train loss:3.578263 +step:6498 train loss:3.684626 +step:6499 train loss:3.660872 +step:6500 validation loss:3.486420 total_sharp:1.3717e-03 L1_sharp:9.8979e-04 L2_sharp:1.2818e-04 L3_sharp:2.2367e-04 L4_sharp:2.1800e-04 L5_sharp:1.9410e-04 L6_sharp:1.8860e-04 L7_sharp:2.6398e-04 L8_sharp:3.9638e-04 L9_sharp:4.5029e-04 L10_sharp:6.4266e-04 L11_sharp:4.1763e-04 L12_sharp:5.1092e-04 total_fnorm:4.5182e+00 total_l1_linf:4.0212e+04 total_spectral:4.5182e+00 L1_fnorm:1.0386e+00 L2_fnorm:1.0313e+00 L3_fnorm:1.0469e+00 L4_fnorm:1.0602e+00 L5_fnorm:1.0813e+00 L6_fnorm:1.0865e+00 L7_fnorm:1.0761e+00 L8_fnorm:1.0681e+00 L9_fnorm:1.0671e+00 L10_fnorm:1.0437e+00 L11_fnorm:1.0516e+00 L12_fnorm:1.0492e+00 L1_l1linf:1.2502e+00 L2_l1linf:1.1006e+00 L3_l1linf:1.1053e+00 L4_l1linf:1.1216e+00 L5_l1linf:1.1319e+00 L6_l1linf:1.1972e+00 L7_l1linf:1.0877e+00 L8_l1linf:1.0746e+00 L9_l1linf:1.1010e+00 L10_l1linf:1.1129e+00 L11_l1linf:1.2411e+00 L12_l1linf:1.1673e+00 L1_spectral:1.7174e-01 L2_spectral:1.5401e-01 L3_spectral:1.5279e-01 L4_spectral:1.0798e-01 L5_spectral:1.0198e-01 L6_spectral:1.0704e-01 L7_spectral:8.6271e-02 L8_spectral:8.2753e-02 L9_spectral:1.1219e-01 L10_spectral:1.4906e-01 L11_spectral:1.4648e-01 L12_spectral:1.5769e-01 ip_v_neg_g:1.2598e-02 cos_v_neg_g:3.8607e-03 v_norm:4.5182e+00 g_norm:7.2220e-01 hv_norm:3.8701e-01 cos_v_hv:1.6015e-02 hg_norm:3.9545e+00 cos_g_hg:6.0299e-01 v_par:8.2468e-04 v_perp:4.5182e+00 L1_cos_v_neg_g:7.6476e-03 L1_v_norm:1.0386e+00 L2_cos_v_neg_g:3.3520e-03 L2_v_norm:1.0313e+00 L3_cos_v_neg_g:4.3310e-03 L3_v_norm:1.0469e+00 L4_cos_v_neg_g:3.4580e-03 L4_v_norm:1.0602e+00 L5_cos_v_neg_g:3.4747e-03 L5_v_norm:1.0813e+00 L6_cos_v_neg_g:3.9205e-03 L6_v_norm:1.0865e+00 L7_cos_v_neg_g:3.8281e-03 L7_v_norm:1.0761e+00 L8_cos_v_neg_g:4.3908e-03 L8_v_norm:1.0681e+00 L9_cos_v_neg_g:5.4609e-03 L9_v_norm:1.0671e+00 L10_cos_v_neg_g:6.7998e-03 L10_v_norm:1.0437e+00 L11_cos_v_neg_g:8.0674e-03 L11_v_norm:1.0516e+00 L12_cos_v_neg_g:8.5865e-03 L12_v_norm:1.0492e+00 +step:6500 train loss:3.503580 +step:6501 train loss:3.520432 +step:6502 train loss:3.540558 +step:6503 train loss:3.597883 +step:6504 train loss:3.547872 +step:6505 train loss:3.553489 +step:6506 train loss:3.514304 +step:6507 train loss:3.582725 +step:6508 train loss:3.550361 +step:6509 train loss:3.530614 +step:6510 train loss:3.539707 +step:6511 train loss:3.554004 +step:6512 train loss:3.496421 +step:6513 train loss:3.565159 +step:6514 train loss:3.439749 +step:6515 train loss:3.528891 +step:6516 train loss:3.579996 +step:6517 train loss:3.495056 +step:6518 train loss:3.534544 +step:6519 train loss:3.524293 +step:6520 train loss:3.614553 +step:6521 train loss:3.590126 +step:6522 train loss:3.600716 +step:6523 train loss:3.495777 +step:6524 train loss:3.579538 +step:6525 train loss:3.568550 +step:6526 train loss:3.500683 +step:6527 train loss:3.556838 +step:6528 train loss:3.576645 +step:6529 train loss:3.602564 +step:6530 train loss:3.508850 +step:6531 train loss:3.587927 +step:6532 train loss:3.515893 +step:6533 train loss:3.552243 +step:6534 train loss:3.561908 +step:6535 train loss:3.535473 +step:6536 train loss:3.670542 +step:6537 train loss:3.488278 +step:6538 train loss:3.590401 +step:6539 train loss:3.513432 +step:6540 train loss:3.626302 +step:6541 train loss:3.606307 +step:6542 train loss:3.564178 +step:6543 train loss:3.518682 +step:6544 train loss:3.497411 +step:6545 train loss:3.490001 +step:6546 train loss:3.552457 +step:6547 train loss:3.604241 +step:6548 train loss:3.546113 +step:6549 train loss:3.564727 +step:6550 train loss:3.674388 +step:6551 train loss:3.554491 +step:6552 train loss:3.547050 +step:6553 train loss:3.584675 +step:6554 train loss:3.475424 +step:6555 train loss:3.559931 +step:6556 train loss:3.432697 +step:6557 train loss:3.779595 +step:6558 train loss:3.615377 +step:6559 train loss:3.528592 +step:6560 train loss:3.561659 +step:6561 train loss:3.538088 +step:6562 train loss:3.556628 +step:6563 train loss:3.447480 +step:6564 train loss:3.553362 +step:6565 train loss:3.459900 +step:6566 train loss:3.570823 +step:6567 train loss:3.540922 +step:6568 train loss:3.588781 +step:6569 train loss:3.532591 +step:6570 train loss:3.569729 +step:6571 train loss:3.498265 +step:6572 train loss:3.574547 +step:6573 train loss:3.588826 +step:6574 train loss:3.573268 +step:6575 train loss:3.516288 +step:6576 train loss:3.507146 +step:6577 train loss:3.580487 +step:6578 train loss:3.445702 +step:6579 train loss:3.548571 +step:6580 train loss:3.505285 +step:6581 train loss:3.516403 +step:6582 train loss:3.498687 +step:6583 train loss:3.596586 +step:6584 train loss:3.526429 +step:6585 train loss:3.564675 +step:6586 train loss:3.568854 +step:6587 train loss:3.579323 +step:6588 train loss:3.545357 +step:6589 train loss:3.572643 +step:6590 train loss:3.514724 +step:6591 train loss:3.567090 +step:6592 train loss:3.510048 +step:6593 train loss:3.517460 +step:6594 train loss:3.546103 +step:6595 train loss:3.521639 +step:6596 train loss:3.524475 +step:6597 train loss:3.547424 +step:6598 train loss:3.593315 +step:6599 train loss:3.482848 +step:6600 train loss:3.537180 +step:6601 train loss:3.596053 +step:6602 train loss:3.522196 +step:6603 train loss:3.544851 +step:6604 train loss:3.557991 +step:6605 train loss:3.539795 +step:6606 train loss:3.599598 +step:6607 train loss:3.516478 +step:6608 train loss:3.531603 +step:6609 train loss:3.500728 +step:6610 train loss:3.613807 +step:6611 train loss:3.537411 +step:6612 train loss:3.578859 +step:6613 train loss:3.495896 +step:6614 train loss:3.525565 +step:6615 train loss:3.523375 +step:6616 train loss:3.506646 +step:6617 train loss:3.542952 +step:6618 train loss:3.532801 +step:6619 train loss:3.505673 +step:6620 train loss:3.607907 +step:6621 train loss:3.483327 +step:6622 train loss:3.558638 +step:6623 train loss:3.489656 +step:6624 train loss:3.565010 +step:6625 train loss:3.604209 +step:6626 train loss:3.570589 +step:6627 train loss:3.518860 +step:6628 train loss:3.579075 +step:6629 train loss:3.481315 +step:6630 train loss:3.517265 +step:6631 train loss:3.550835 +step:6632 train loss:3.590870 +step:6633 train loss:3.544720 +step:6634 train loss:3.604284 +step:6635 train loss:3.504627 +step:6636 train loss:3.544713 +step:6637 train loss:3.511655 +step:6638 train loss:3.511570 +step:6639 train loss:3.525287 +step:6640 train loss:3.512283 +step:6641 train loss:3.525077 +step:6642 train loss:3.526436 +step:6643 train loss:3.607487 +step:6644 train loss:3.610398 +step:6645 train loss:3.484084 +step:6646 train loss:3.575632 +step:6647 train loss:3.534181 +step:6648 train loss:3.635126 +step:6649 train loss:3.563782 +step:6650 train loss:3.513241 +step:6651 train loss:3.559322 +step:6652 train loss:3.574488 +step:6653 train loss:3.518985 +step:6654 train loss:3.515367 +step:6655 train loss:3.556982 +step:6656 train loss:3.525564 +step:6657 train loss:3.549136 +step:6658 train loss:3.533237 +step:6659 train loss:3.682269 +step:6660 train loss:3.584717 +step:6661 train loss:3.508811 +step:6662 train loss:3.542892 +step:6663 train loss:3.477832 +step:6664 train loss:3.554914 +step:6665 train loss:3.565006 +step:6666 train loss:3.584545 +step:6667 train loss:3.495753 +step:6668 train loss:3.621485 +step:6669 train loss:3.505173 +step:6670 train loss:3.513844 +step:6671 train loss:3.597147 +step:6672 train loss:3.549309 +step:6673 train loss:3.559948 +step:6674 train loss:3.529285 +step:6675 train loss:3.550120 +step:6676 train loss:3.557920 +step:6677 train loss:3.513736 +step:6678 train loss:3.586654 +step:6679 train loss:3.620754 +step:6680 train loss:3.617570 +step:6681 train loss:3.575830 +step:6682 train loss:3.514751 +step:6683 train loss:3.537953 +step:6684 train loss:3.551923 +step:6685 train loss:3.565402 +step:6686 train loss:3.497483 +step:6687 train loss:3.515417 +step:6688 train loss:3.561587 +step:6689 train loss:3.569109 +step:6690 train loss:3.541727 +step:6691 train loss:3.578008 +step:6692 train loss:3.583421 +step:6693 train loss:3.616301 +step:6694 train loss:3.570761 +step:6695 train loss:3.542838 +step:6696 train loss:3.482169 +step:6697 train loss:3.695866 +step:6698 train loss:3.541010 +step:6699 train loss:3.539981 +step:6700 train loss:3.549531 +step:6701 train loss:3.611245 +step:6702 train loss:3.497358 +step:6703 train loss:3.544646 +step:6704 train loss:3.530715 +step:6705 train loss:3.544308 +step:6706 train loss:3.520901 +step:6707 train loss:3.594436 +step:6708 train loss:3.547340 +step:6709 train loss:3.576857 +step:6710 train loss:3.564860 +step:6711 train loss:3.520204 +step:6712 train loss:3.505897 +step:6713 train loss:3.530692 +step:6714 train loss:3.573300 +step:6715 train loss:3.515846 +step:6716 train loss:3.593423 +step:6717 train loss:3.538516 +step:6718 train loss:3.560257 +step:6719 train loss:3.595484 +step:6720 train loss:3.524937 +step:6721 train loss:3.540993 +step:6722 train loss:3.517664 +step:6723 train loss:3.644807 +step:6724 train loss:3.504331 +step:6725 train loss:3.560871 +step:6726 train loss:3.518532 +step:6727 train loss:3.580387 +step:6728 train loss:3.676614 +step:6729 train loss:3.539084 +step:6730 train loss:3.533562 +step:6731 train loss:3.576665 +step:6732 train loss:3.451296 +step:6733 train loss:3.590798 +step:6734 train loss:3.516521 +step:6735 train loss:3.543688 +step:6736 train loss:3.542443 +step:6737 train loss:3.542469 +step:6738 train loss:3.573535 +step:6739 train loss:3.529131 +step:6740 train loss:3.479168 +step:6741 train loss:3.590689 +step:6742 train loss:3.551475 +step:6743 train loss:3.554780 +step:6744 train loss:3.445649 +step:6745 train loss:3.606810 +step:6746 train loss:3.528968 +step:6747 train loss:3.525079 +step:6748 train loss:3.597806 +step:6749 train loss:3.578207 +step:6750 validation loss:3.477136 +step:6750 train loss:3.500894 +step:6751 train loss:3.535537 +step:6752 train loss:3.535790 +step:6753 train loss:3.573136 +step:6754 train loss:3.553674 +step:6755 train loss:3.566998 +step:6756 train loss:3.506503 +step:6757 train loss:3.476623 +step:6758 train loss:3.653167 +step:6759 train loss:3.546410 +step:6760 train loss:3.599972 +step:6761 train loss:3.532583 +step:6762 train loss:3.550458 +step:6763 train loss:3.451762 +step:6764 train loss:3.531966 +step:6765 train loss:3.535429 +step:6766 train loss:3.528557 +step:6767 train loss:3.483123 +step:6768 train loss:3.489728 +step:6769 train loss:3.453072 +step:6770 train loss:3.538145 +step:6771 train loss:3.537889 +step:6772 train loss:3.549488 +step:6773 train loss:3.526954 +step:6774 train loss:3.541205 +step:6775 train loss:3.586230 +step:6776 train loss:3.539489 +step:6777 train loss:3.619053 +step:6778 train loss:3.502948 +step:6779 train loss:3.557758 +step:6780 train loss:3.488188 +step:6781 train loss:3.550903 +step:6782 train loss:3.466541 +step:6783 train loss:3.496522 +step:6784 train loss:3.527601 +step:6785 train loss:3.511085 +step:6786 train loss:3.529422 +step:6787 train loss:3.603477 +step:6788 train loss:3.540331 +step:6789 train loss:3.549477 +step:6790 train loss:3.547048 +step:6791 train loss:3.560509 +step:6792 train loss:3.558930 +step:6793 train loss:3.557789 +step:6794 train loss:3.527375 +step:6795 train loss:3.526371 +step:6796 train loss:3.533191 +step:6797 train loss:3.629877 +step:6798 train loss:3.531737 +step:6799 train loss:3.525942 +step:6800 train loss:3.490755 +step:6801 train loss:3.623549 +step:6802 train loss:3.572246 +step:6803 train loss:3.563491 +step:6804 train loss:3.590040 +step:6805 train loss:3.551890 +step:6806 train loss:3.483473 +step:6807 train loss:3.542081 +step:6808 train loss:3.527051 +step:6809 train loss:3.553133 +step:6810 train loss:3.679596 +step:6811 train loss:3.581118 +step:6812 train loss:3.550364 +step:6813 train loss:3.566411 +step:6814 train loss:3.573473 +step:6815 train loss:3.619179 +step:6816 train loss:3.533386 +step:6817 train loss:3.561344 +step:6818 train loss:3.536712 +step:6819 train loss:3.522057 +step:6820 train loss:3.548308 +step:6821 train loss:3.515023 +step:6822 train loss:3.620334 +step:6823 train loss:3.599345 +step:6824 train loss:3.574176 +step:6825 train loss:3.521672 +step:6826 train loss:3.566823 +step:6827 train loss:3.553228 +step:6828 train loss:3.568889 +step:6829 train loss:3.556467 +step:6830 train loss:3.521555 +step:6831 train loss:3.483601 +step:6832 train loss:3.471280 +step:6833 train loss:3.485986 +step:6834 train loss:3.573330 +step:6835 train loss:3.546258 +step:6836 train loss:3.464806 +step:6837 train loss:3.530099 +step:6838 train loss:3.590748 +step:6839 train loss:3.672238 +step:6840 train loss:3.547180 +step:6841 train loss:3.501905 +step:6842 train loss:3.552677 +step:6843 train loss:3.656896 +step:6844 train loss:3.537862 +step:6845 train loss:3.590405 +step:6846 train loss:3.651132 +step:6847 train loss:3.584863 +step:6848 train loss:3.570532 +step:6849 train loss:3.603463 +step:6850 train loss:3.571619 +step:6851 train loss:3.500177 +step:6852 train loss:3.492463 +step:6853 train loss:3.483722 +step:6854 train loss:3.560328 +step:6855 train loss:3.531597 +step:6856 train loss:3.514670 +step:6857 train loss:3.568506 +step:6858 train loss:3.595731 +step:6859 train loss:3.504164 +step:6860 train loss:3.616392 +step:6861 train loss:3.639019 +step:6862 train loss:3.549053 +step:6863 train loss:3.543191 +step:6864 train loss:3.492726 +step:6865 train loss:3.562144 +step:6866 train loss:3.490419 +step:6867 train loss:3.670119 +step:6868 train loss:3.544245 +step:6869 train loss:3.577183 +step:6870 train loss:3.613496 +step:6871 train loss:3.526687 +step:6872 train loss:3.528645 +step:6873 train loss:3.544902 +step:6874 train loss:3.500980 +step:6875 train loss:3.509043 +step:6876 train loss:3.537536 +step:6877 train loss:3.580163 +step:6878 train loss:3.490021 +step:6879 train loss:3.539894 +step:6880 train loss:3.547779 +step:6881 train loss:3.511436 +step:6882 train loss:3.574810 +step:6883 train loss:3.544260 +step:6884 train loss:3.792017 +step:6885 train loss:3.561770 +step:6886 train loss:3.541030 +step:6887 train loss:3.478872 +step:6888 train loss:3.582586 +step:6889 train loss:3.463998 +step:6890 train loss:3.576711 +step:6891 train loss:3.583725 +step:6892 train loss:3.681652 +step:6893 train loss:3.512240 +step:6894 train loss:3.573241 +step:6895 train loss:3.574365 +step:6896 train loss:3.551272 +step:6897 train loss:3.504478 +step:6898 train loss:3.505932 +step:6899 train loss:3.589818 +step:6900 train loss:3.564428 +step:6901 train loss:3.514776 +step:6902 train loss:3.450532 +step:6903 train loss:3.494987 +step:6904 train loss:3.605171 +step:6905 train loss:3.639791 +step:6906 train loss:3.558940 +step:6907 train loss:3.574833 +step:6908 train loss:3.611305 +step:6909 train loss:3.602402 +step:6910 train loss:3.481889 +step:6911 train loss:3.609932 +step:6912 train loss:3.501012 +step:6913 train loss:3.541264 +step:6914 train loss:3.501487 +step:6915 train loss:3.530959 +step:6916 train loss:3.503167 +step:6917 train loss:3.627425 +step:6918 train loss:3.572522 +step:6919 train loss:3.566107 +step:6920 train loss:3.550810 +step:6921 train loss:3.614989 +step:6922 train loss:3.608324 +step:6923 train loss:3.471862 +step:6924 train loss:3.552619 +step:6925 train loss:3.524827 +step:6926 train loss:3.565151 +step:6927 train loss:3.616359 +step:6928 train loss:3.502451 +step:6929 train loss:3.516515 +step:6930 train loss:3.547679 +step:6931 train loss:3.546926 +step:6932 train loss:3.778695 +step:6933 train loss:3.612011 +step:6934 train loss:3.549270 +step:6935 train loss:3.536297 +step:6936 train loss:3.575027 +step:6937 train loss:3.521445 +step:6938 train loss:3.583078 +step:6939 train loss:3.519403 +step:6940 train loss:3.572471 +step:6941 train loss:3.488735 +step:6942 train loss:3.574153 +step:6943 train loss:3.467452 +step:6944 train loss:3.557017 +step:6945 train loss:3.498855 +step:6946 train loss:3.588487 +step:6947 train loss:3.515194 +step:6948 train loss:3.510058 +step:6949 train loss:3.584164 +step:6950 train loss:3.576335 +step:6951 train loss:3.579298 +step:6952 train loss:3.510002 +step:6953 train loss:3.555123 +step:6954 train loss:3.620809 +step:6955 train loss:3.533420 +step:6956 train loss:3.568329 +step:6957 train loss:3.559483 +step:6958 train loss:3.518913 +step:6959 train loss:3.559699 +step:6960 train loss:3.527187 +step:6961 train loss:3.533135 +step:6962 train loss:3.515175 +step:6963 train loss:3.487979 +step:6964 train loss:3.528517 +step:6965 train loss:3.524741 +step:6966 train loss:3.565532 +step:6967 train loss:3.502811 +step:6968 train loss:3.547037 +step:6969 train loss:3.561075 +step:6970 train loss:3.538081 +step:6971 train loss:3.602408 +step:6972 train loss:3.550581 +step:6973 train loss:3.507791 +step:6974 train loss:3.636255 +step:6975 train loss:3.538315 +step:6976 train loss:3.514339 +step:6977 train loss:3.547697 +step:6978 train loss:3.538743 +step:6979 train loss:3.552146 +step:6980 train loss:3.526556 +step:6981 train loss:3.592099 +step:6982 train loss:3.541113 +step:6983 train loss:3.531009 +step:6984 train loss:3.647051 +step:6985 train loss:3.495979 +step:6986 train loss:3.486778 +step:6987 train loss:3.535090 +step:6988 train loss:3.541202 +step:6989 train loss:3.684602 +step:6990 train loss:3.550447 +step:6991 train loss:3.506228 +step:6992 train loss:3.555269 +step:6993 train loss:3.623708 +step:6994 train loss:3.566481 +step:6995 train loss:3.518669 +step:6996 train loss:3.519363 +step:6997 train loss:3.601331 +step:6998 train loss:3.503304 +step:6999 train loss:3.551795 +step:7000 validation loss:3.471414 total_sharp:1.2386e-03 L1_sharp:8.9293e-04 L2_sharp:7.7488e-05 L3_sharp:5.3099e-04 L4_sharp:2.8749e-04 L5_sharp:2.1792e-04 L6_sharp:2.0857e-04 L7_sharp:2.7575e-04 L8_sharp:3.5561e-04 L9_sharp:3.4938e-04 L10_sharp:4.6503e-04 L11_sharp:3.7218e-04 L12_sharp:4.9040e-04 total_fnorm:4.4955e+00 total_l1_linf:3.9994e+04 total_spectral:4.4955e+00 L1_fnorm:1.0236e+00 L2_fnorm:1.0189e+00 L3_fnorm:1.0338e+00 L4_fnorm:1.0540e+00 L5_fnorm:1.0778e+00 L6_fnorm:1.0820e+00 L7_fnorm:1.0725e+00 L8_fnorm:1.0714e+00 L9_fnorm:1.0622e+00 L10_fnorm:1.0352e+00 L11_fnorm:1.0388e+00 L12_fnorm:1.0381e+00 L1_l1linf:1.1537e+00 L2_l1linf:1.0785e+00 L3_l1linf:1.0900e+00 L4_l1linf:1.0893e+00 L5_l1linf:1.1535e+00 L6_l1linf:1.1083e+00 L7_l1linf:1.0621e+00 L8_l1linf:1.0900e+00 L9_l1linf:1.1594e+00 L10_l1linf:1.1059e+00 L11_l1linf:1.2633e+00 L12_l1linf:1.2170e+00 L1_spectral:1.4932e-01 L2_spectral:1.4962e-01 L3_spectral:1.5135e-01 L4_spectral:1.1170e-01 L5_spectral:1.0226e-01 L6_spectral:1.0699e-01 L7_spectral:9.1757e-02 L8_spectral:8.3413e-02 L9_spectral:1.1712e-01 L10_spectral:1.4298e-01 L11_spectral:1.5150e-01 L12_spectral:1.5775e-01 ip_v_neg_g:1.3637e-02 cos_v_neg_g:4.0946e-03 v_norm:4.4955e+00 g_norm:7.4087e-01 hv_norm:3.8994e-01 cos_v_hv:1.4280e-02 hg_norm:5.0145e+00 cos_g_hg:5.5085e-01 v_par:7.8923e-04 v_perp:4.4955e+00 L1_cos_v_neg_g:1.1068e-02 L1_v_norm:1.0236e+00 L2_cos_v_neg_g:3.5197e-03 L2_v_norm:1.0189e+00 L3_cos_v_neg_g:4.4479e-03 L3_v_norm:1.0338e+00 L4_cos_v_neg_g:4.3256e-03 L4_v_norm:1.0540e+00 L5_cos_v_neg_g:3.1443e-03 L5_v_norm:1.0778e+00 L6_cos_v_neg_g:2.8757e-03 L6_v_norm:1.0820e+00 L7_cos_v_neg_g:3.6478e-03 L7_v_norm:1.0725e+00 L8_cos_v_neg_g:4.3461e-03 L8_v_norm:1.0714e+00 L9_cos_v_neg_g:5.0728e-03 L9_v_norm:1.0622e+00 L10_cos_v_neg_g:6.2723e-03 L10_v_norm:1.0352e+00 L11_cos_v_neg_g:9.4156e-03 L11_v_norm:1.0388e+00 L12_cos_v_neg_g:1.0747e-02 L12_v_norm:1.0381e+00 +step:7000 train loss:3.628748 +step:7001 train loss:3.537038 +step:7002 train loss:3.523316 +step:7003 train loss:3.544158 +step:7004 train loss:3.541295 +step:7005 train loss:3.525926 +step:7006 train loss:3.534375 +step:7007 train loss:3.583215 +step:7008 train loss:3.522208 +step:7009 train loss:3.570440 +step:7010 train loss:3.497668 +step:7011 train loss:3.558135 +step:7012 train loss:3.526948 +step:7013 train loss:3.601804 +step:7014 train loss:3.507783 +step:7015 train loss:3.567132 +step:7016 train loss:3.555162 +step:7017 train loss:3.521877 +step:7018 train loss:3.599571 +step:7019 train loss:3.528201 +step:7020 train loss:3.575749 +step:7021 train loss:3.515607 +step:7022 train loss:3.533964 +step:7023 train loss:3.551434 +step:7024 train loss:3.514913 +step:7025 train loss:3.564702 +step:7026 train loss:3.519171 +step:7027 train loss:3.583814 +step:7028 train loss:3.507030 +step:7029 train loss:3.496063 +step:7030 train loss:3.506340 +step:7031 train loss:3.553427 +step:7032 train loss:3.561732 +step:7033 train loss:3.534916 +step:7034 train loss:3.558304 +step:7035 train loss:3.608358 +step:7036 train loss:3.530112 +step:7037 train loss:3.555039 +step:7038 train loss:3.516454 +step:7039 train loss:3.568597 +step:7040 train loss:3.488175 +step:7041 train loss:3.580693 +step:7042 train loss:3.513989 +step:7043 train loss:3.484639 +step:7044 train loss:3.530993 +step:7045 train loss:3.532064 +step:7046 train loss:3.524634 +step:7047 train loss:3.562280 +step:7048 train loss:3.511004 +step:7049 train loss:3.518508 +step:7050 train loss:3.541636 +step:7051 train loss:3.560606 +step:7052 train loss:3.562321 +step:7053 train loss:3.524204 +step:7054 train loss:3.504671 +step:7055 train loss:3.575138 +step:7056 train loss:3.572750 +step:7057 train loss:3.496493 +step:7058 train loss:3.617722 +step:7059 train loss:3.523852 +step:7060 train loss:3.532338 +step:7061 train loss:3.506480 +step:7062 train loss:3.530554 +step:7063 train loss:3.589040 +step:7064 train loss:3.512166 +step:7065 train loss:3.561586 +step:7066 train loss:3.521082 +step:7067 train loss:3.559374 +step:7068 train loss:3.531693 +step:7069 train loss:3.496344 +step:7070 train loss:3.520226 +step:7071 train loss:3.492319 +step:7072 train loss:3.495343 +step:7073 train loss:3.488206 +step:7074 train loss:3.482824 +step:7075 train loss:3.504200 +step:7076 train loss:3.514933 +step:7077 train loss:3.523611 +step:7078 train loss:3.568951 +step:7079 train loss:3.580374 +step:7080 train loss:3.527088 +step:7081 train loss:3.544936 +step:7082 train loss:3.513256 +step:7083 train loss:3.544237 +step:7084 train loss:3.534824 +step:7085 train loss:3.498822 +step:7086 train loss:3.533737 +step:7087 train loss:3.513056 +step:7088 train loss:3.634755 +step:7089 train loss:3.534191 +step:7090 train loss:3.494464 +step:7091 train loss:3.508599 +step:7092 train loss:3.489641 +step:7093 train loss:3.583014 +step:7094 train loss:3.504702 +step:7095 train loss:3.521918 +step:7096 train loss:3.538384 +step:7097 train loss:3.524781 +step:7098 train loss:3.549039 +step:7099 train loss:3.503953 +step:7100 train loss:3.538051 +step:7101 train loss:3.607249 +step:7102 train loss:3.496765 +step:7103 train loss:3.520323 +step:7104 train loss:3.553993 +step:7105 train loss:3.532102 +step:7106 train loss:3.516607 +step:7107 train loss:3.551279 +step:7108 train loss:3.619357 +step:7109 train loss:3.552975 +step:7110 train loss:3.575618 +step:7111 train loss:3.554752 +step:7112 train loss:3.544103 +step:7113 train loss:3.541086 +step:7114 train loss:3.566368 +step:7115 train loss:3.596642 +step:7116 train loss:3.528658 +step:7117 train loss:3.576614 +step:7118 train loss:3.578829 +step:7119 train loss:3.540098 +step:7120 train loss:3.597923 +step:7121 train loss:3.511642 +step:7122 train loss:3.512248 +step:7123 train loss:3.456406 +step:7124 train loss:3.609709 +step:7125 train loss:3.462921 +step:7126 train loss:3.631907 +step:7127 train loss:3.590015 +step:7128 train loss:3.536551 +step:7129 train loss:3.538230 +step:7130 train loss:3.530720 +step:7131 train loss:3.470554 +step:7132 train loss:3.508711 +step:7133 train loss:3.558096 +step:7134 train loss:3.485433 +step:7135 train loss:3.543542 +step:7136 train loss:3.527278 +step:7137 train loss:3.506829 +step:7138 train loss:3.493025 +step:7139 train loss:3.497717 +step:7140 train loss:3.532230 +step:7141 train loss:3.529695 +step:7142 train loss:3.524276 +step:7143 train loss:3.562629 +step:7144 train loss:3.510253 +step:7145 train loss:3.526355 +step:7146 train loss:3.534554 +step:7147 train loss:3.557333 +step:7148 train loss:3.559654 +step:7149 train loss:3.568343 +step:7150 train loss:3.543876 +step:7151 train loss:3.505700 +step:7152 train loss:3.481339 +step:7153 train loss:3.513473 +step:7154 train loss:3.535256 +step:7155 train loss:3.552975 +step:7156 train loss:3.523498 +step:7157 train loss:3.541984 +step:7158 train loss:3.496083 +step:7159 train loss:3.550939 +step:7160 train loss:3.560365 +step:7161 train loss:3.508729 +step:7162 train loss:3.557749 +step:7163 train loss:3.493510 +step:7164 train loss:3.529088 +step:7165 train loss:3.533797 +step:7166 train loss:3.590551 +step:7167 train loss:3.569094 +step:7168 train loss:3.543556 +step:7169 train loss:3.522688 +step:7170 train loss:3.551113 +step:7171 train loss:3.499836 +step:7172 train loss:3.663692 +step:7173 train loss:3.510048 +step:7174 train loss:3.550260 +step:7175 train loss:3.524158 +step:7176 train loss:3.535428 +step:7177 train loss:3.548209 +step:7178 train loss:3.550241 +step:7179 train loss:3.532655 +step:7180 train loss:3.535841 +step:7181 train loss:3.563418 +step:7182 train loss:3.513491 +step:7183 train loss:3.588992 +step:7184 train loss:3.678372 +step:7185 train loss:3.591112 +step:7186 train loss:3.526690 +step:7187 train loss:3.539578 +step:7188 train loss:3.529761 +step:7189 train loss:3.526968 +step:7190 train loss:3.530861 +step:7191 train loss:3.522866 +step:7192 train loss:3.552908 +step:7193 train loss:3.474512 +step:7194 train loss:3.536652 +step:7195 train loss:3.512681 +step:7196 train loss:3.562827 +step:7197 train loss:3.537976 +step:7198 train loss:3.597757 +step:7199 train loss:3.555217 +step:7200 train loss:3.545273 +step:7201 train loss:3.556637 +step:7202 train loss:3.534333 +step:7203 train loss:3.549398 +step:7204 train loss:3.517642 +step:7205 train loss:3.475940 +step:7206 train loss:3.501086 +step:7207 train loss:3.681228 +step:7208 train loss:3.513245 +step:7209 train loss:3.593698 +step:7210 train loss:3.530084 +step:7211 train loss:3.559777 +step:7212 train loss:3.646203 +step:7213 train loss:3.490904 +step:7214 train loss:3.560022 +step:7215 train loss:3.528610 +step:7216 train loss:3.579741 +step:7217 train loss:3.537625 +step:7218 train loss:3.627492 +step:7219 train loss:3.533184 +step:7220 train loss:3.613858 +step:7221 train loss:3.492943 +step:7222 train loss:3.572891 +step:7223 train loss:3.491523 +step:7224 train loss:3.554024 +step:7225 train loss:3.533312 +step:7226 train loss:3.500736 +step:7227 train loss:3.521374 +step:7228 train loss:3.509240 +step:7229 train loss:3.512667 +step:7230 train loss:3.498623 +step:7231 train loss:3.630112 +step:7232 train loss:3.499022 +step:7233 train loss:3.569709 +step:7234 train loss:3.557299 +step:7235 train loss:3.529731 +step:7236 train loss:3.569151 +step:7237 train loss:3.521168 +step:7238 train loss:3.558494 +step:7239 train loss:3.514630 +step:7240 train loss:3.513175 +step:7241 train loss:3.522624 +step:7242 train loss:3.507513 +step:7243 train loss:3.550295 +step:7244 train loss:3.523487 +step:7245 train loss:3.530068 +step:7246 train loss:3.569347 +step:7247 train loss:3.525113 +step:7248 train loss:3.567097 +step:7249 train loss:3.512066 +step:7250 validation loss:3.463158 +step:7250 train loss:3.537982 +step:7251 train loss:3.582326 +step:7252 train loss:3.494112 +step:7253 train loss:3.584666 +step:7254 train loss:3.522640 +step:7255 train loss:3.492480 +step:7256 train loss:3.535029 +step:7257 train loss:3.576809 +step:7258 train loss:3.532355 +step:7259 train loss:3.518118 +step:7260 train loss:3.601599 +step:7261 train loss:3.557784 +step:7262 train loss:3.513886 +step:7263 train loss:3.557504 +step:7264 train loss:3.540141 +step:7265 train loss:3.444526 +step:7266 train loss:3.570022 +step:7267 train loss:3.486256 +step:7268 train loss:3.550753 +step:7269 train loss:3.555854 +step:7270 train loss:3.512880 +step:7271 train loss:3.528415 +step:7272 train loss:3.531661 +step:7273 train loss:3.531310 +step:7274 train loss:3.506970 +step:7275 train loss:3.580532 +step:7276 train loss:3.486640 +step:7277 train loss:3.532707 +step:7278 train loss:3.502428 +step:7279 train loss:3.486999 +step:7280 train loss:3.552213 +step:7281 train loss:3.577495 +step:7282 train loss:3.576737 +step:7283 train loss:3.467545 +step:7284 train loss:3.509137 +step:7285 train loss:3.535681 +step:7286 train loss:3.668465 +step:7287 train loss:3.577320 +step:7288 train loss:3.530710 +step:7289 train loss:3.536530 +step:7290 train loss:3.582328 +step:7291 train loss:3.545038 +step:7292 train loss:3.614124 +step:7293 train loss:3.512002 +step:7294 train loss:3.594625 +step:7295 train loss:3.483947 +step:7296 train loss:3.483432 +step:7297 train loss:3.526285 +step:7298 train loss:3.507605 +step:7299 train loss:3.545016 +step:7300 train loss:3.531174 +step:7301 train loss:3.484132 +step:7302 train loss:3.627772 +step:7303 train loss:3.520409 +step:7304 train loss:3.462938 +step:7305 train loss:3.539583 +step:7306 train loss:3.571872 +step:7307 train loss:3.574557 +step:7308 train loss:3.524333 +step:7309 train loss:3.490804 +step:7310 train loss:3.519699 +step:7311 train loss:3.506073 +step:7312 train loss:3.546752 +step:7313 train loss:3.581551 +step:7314 train loss:3.477734 +step:7315 train loss:3.473359 +step:7316 train loss:3.615669 +step:7317 train loss:3.554644 +step:7318 train loss:3.494983 +step:7319 train loss:3.520290 +step:7320 train loss:3.552398 +step:7321 train loss:3.581432 +step:7322 train loss:3.460813 +step:7323 train loss:3.515676 +step:7324 train loss:3.539716 +step:7325 train loss:3.505532 +step:7326 train loss:3.532139 +step:7327 train loss:3.509717 +step:7328 train loss:3.628918 +step:7329 train loss:3.473830 +step:7330 train loss:3.527563 +step:7331 train loss:3.520588 +step:7332 train loss:3.565145 +step:7333 train loss:3.541954 +step:7334 train loss:3.513247 +step:7335 train loss:3.509682 +step:7336 train loss:3.764268 +step:7337 train loss:3.549238 +step:7338 train loss:3.546804 +step:7339 train loss:3.557714 +step:7340 train loss:3.543815 +step:7341 train loss:3.536493 +step:7342 train loss:3.524954 +step:7343 train loss:3.541659 +step:7344 train loss:3.616450 +step:7345 train loss:3.475786 +step:7346 train loss:3.511975 +step:7347 train loss:3.504102 +step:7348 train loss:3.509781 +step:7349 train loss:3.611069 +step:7350 train loss:3.593746 +step:7351 train loss:3.527951 +step:7352 train loss:3.554914 +step:7353 train loss:3.537481 +step:7354 train loss:3.487782 +step:7355 train loss:3.669078 +step:7356 train loss:3.643219 +step:7357 train loss:3.566577 +step:7358 train loss:3.544076 +step:7359 train loss:3.515333 +step:7360 train loss:3.522194 +step:7361 train loss:3.478834 +step:7362 train loss:3.525451 +step:7363 train loss:3.539227 +step:7364 train loss:3.574922 +step:7365 train loss:3.557879 +step:7366 train loss:3.520510 +step:7367 train loss:3.597847 +step:7368 train loss:3.575568 +step:7369 train loss:3.569877 +step:7370 train loss:3.534047 +step:7371 train loss:3.494344 +step:7372 train loss:3.551428 +step:7373 train loss:3.571113 +step:7374 train loss:3.666305 +step:7375 train loss:3.495511 +step:7376 train loss:3.513064 +step:7377 train loss:3.558064 +step:7378 train loss:3.509737 +step:7379 train loss:3.634535 +step:7380 train loss:3.596469 +step:7381 train loss:3.562125 +step:7382 train loss:3.526489 +step:7383 train loss:3.618229 +step:7384 train loss:3.561916 +step:7385 train loss:3.516473 +step:7386 train loss:3.520450 +step:7387 train loss:3.566595 +step:7388 train loss:3.597005 +step:7389 train loss:3.539915 +step:7390 train loss:3.481472 +step:7391 train loss:3.518319 +step:7392 train loss:3.575491 +step:7393 train loss:3.543192 +step:7394 train loss:3.583303 +step:7395 train loss:3.471969 +step:7396 train loss:3.569030 +step:7397 train loss:3.501017 +step:7398 train loss:3.516520 +step:7399 train loss:3.563541 +step:7400 train loss:3.565761 +step:7401 train loss:3.484143 +step:7402 train loss:3.601278 +step:7403 train loss:3.485253 +step:7404 train loss:3.552539 +step:7405 train loss:3.679534 +step:7406 train loss:3.504061 +step:7407 train loss:3.551104 +step:7408 train loss:3.548764 +step:7409 train loss:3.522778 +step:7410 train loss:3.691377 +step:7411 train loss:3.538517 +step:7412 train loss:3.537737 +step:7413 train loss:3.594681 +step:7414 train loss:3.502434 +step:7415 train loss:3.563675 +step:7416 train loss:3.443437 +step:7417 train loss:3.565161 +step:7418 train loss:3.548002 +step:7419 train loss:3.514745 +step:7420 train loss:3.507909 +step:7421 train loss:3.540062 +step:7422 train loss:3.498069 +step:7423 train loss:3.637940 +step:7424 train loss:3.700577 +step:7425 train loss:3.590062 +step:7426 train loss:3.556457 +step:7427 train loss:3.528861 +step:7428 train loss:3.546634 +step:7429 train loss:3.563059 +step:7430 train loss:3.490303 +step:7431 train loss:3.495579 +step:7432 train loss:3.503185 +step:7433 train loss:3.603614 +step:7434 train loss:3.515780 +step:7435 train loss:3.603019 +step:7436 train loss:3.643217 +step:7437 train loss:3.468851 +step:7438 train loss:3.527668 +step:7439 train loss:3.536197 +step:7440 train loss:3.507729 +step:7441 train loss:3.477566 +step:7442 train loss:3.703686 +step:7443 train loss:3.527548 +step:7444 train loss:3.571455 +step:7445 train loss:3.499574 +step:7446 train loss:3.523892 +step:7447 train loss:3.449134 +step:7448 train loss:3.503421 +step:7449 train loss:3.517057 +step:7450 train loss:3.551810 +step:7451 train loss:3.582373 +step:7452 train loss:3.512592 +step:7453 train loss:3.536821 +step:7454 train loss:3.522794 +step:7455 train loss:3.530360 +step:7456 train loss:3.505166 +step:7457 train loss:3.512773 +step:7458 train loss:3.549227 +step:7459 train loss:3.529328 +step:7460 train loss:3.537054 +step:7461 train loss:3.573496 +step:7462 train loss:3.509335 +step:7463 train loss:3.573618 +step:7464 train loss:3.493469 +step:7465 train loss:3.503061 +step:7466 train loss:3.503638 +step:7467 train loss:3.514255 +step:7468 train loss:3.568823 +step:7469 train loss:3.497225 +step:7470 train loss:3.527128 +step:7471 train loss:3.518815 +step:7472 train loss:3.552109 +step:7473 train loss:3.491553 +step:7474 train loss:3.478863 +step:7475 train loss:3.509444 +step:7476 train loss:3.545972 +step:7477 train loss:3.521284 +step:7478 train loss:3.518234 +step:7479 train loss:3.531343 +step:7480 train loss:3.809367 +step:7481 train loss:3.462892 +step:7482 train loss:3.536838 +step:7483 train loss:3.526211 +step:7484 train loss:3.550644 +step:7485 train loss:3.535957 +step:7486 train loss:3.560445 +step:7487 train loss:3.553417 +step:7488 train loss:3.575381 +step:7489 train loss:3.570250 +step:7490 train loss:3.514796 +step:7491 train loss:3.537559 +step:7492 train loss:3.649344 +step:7493 train loss:3.617725 +step:7494 train loss:3.647065 +step:7495 train loss:3.513056 +step:7496 train loss:3.501288 +step:7497 train loss:3.600959 +step:7498 train loss:3.534539 +step:7499 train loss:3.571650 +step:7500 validation loss:3.461684 total_sharp:9.0681e-04 L1_sharp:8.3987e-04 L2_sharp:7.8042e-05 L3_sharp:1.7995e-04 L4_sharp:1.2473e-04 L5_sharp:1.4411e-04 L6_sharp:1.5609e-04 L7_sharp:2.4247e-04 L8_sharp:2.8986e-04 L9_sharp:3.0514e-04 L10_sharp:4.5218e-04 L11_sharp:3.6142e-04 L12_sharp:3.2787e-04 total_fnorm:4.5204e+00 total_l1_linf:4.0204e+04 total_spectral:4.5204e+00 L1_fnorm:1.0229e+00 L2_fnorm:1.0283e+00 L3_fnorm:1.0433e+00 L4_fnorm:1.0611e+00 L5_fnorm:1.0804e+00 L6_fnorm:1.0837e+00 L7_fnorm:1.0824e+00 L8_fnorm:1.0786e+00 L9_fnorm:1.0759e+00 L10_fnorm:1.0496e+00 L11_fnorm:1.0580e+00 L12_fnorm:1.0537e+00 L1_l1linf:1.1401e+00 L2_l1linf:1.0827e+00 L3_l1linf:1.0644e+00 L4_l1linf:1.0747e+00 L5_l1linf:1.0874e+00 L6_l1linf:1.0961e+00 L7_l1linf:1.0921e+00 L8_l1linf:1.1081e+00 L9_l1linf:1.1453e+00 L10_l1linf:1.1240e+00 L11_l1linf:1.2481e+00 L12_l1linf:1.2203e+00 L1_spectral:1.4216e-01 L2_spectral:1.4561e-01 L3_spectral:1.4718e-01 L4_spectral:1.0729e-01 L5_spectral:8.8484e-02 L6_spectral:8.4793e-02 L7_spectral:7.8064e-02 L8_spectral:8.1038e-02 L9_spectral:1.1638e-01 L10_spectral:1.3859e-01 L11_spectral:1.4944e-01 L12_spectral:1.5078e-01 ip_v_neg_g:1.2218e-02 cos_v_neg_g:3.2728e-03 v_norm:4.5204e+00 g_norm:8.2585e-01 hv_norm:3.0338e-01 cos_v_hv:1.3512e-02 hg_norm:6.8106e+00 cos_g_hg:6.0265e-01 v_par:5.3849e-04 v_perp:4.5204e+00 L1_cos_v_neg_g:7.7373e-03 L1_v_norm:1.0229e+00 L2_cos_v_neg_g:3.7053e-03 L2_v_norm:1.0283e+00 L3_cos_v_neg_g:3.7894e-03 L3_v_norm:1.0433e+00 L4_cos_v_neg_g:3.1374e-03 L4_v_norm:1.0611e+00 L5_cos_v_neg_g:3.3755e-03 L5_v_norm:1.0804e+00 L6_cos_v_neg_g:3.0908e-03 L6_v_norm:1.0837e+00 L7_cos_v_neg_g:3.6875e-03 L7_v_norm:1.0824e+00 L8_cos_v_neg_g:3.5563e-03 L8_v_norm:1.0786e+00 L9_cos_v_neg_g:3.1752e-03 L9_v_norm:1.0759e+00 L10_cos_v_neg_g:5.4496e-03 L10_v_norm:1.0496e+00 L11_cos_v_neg_g:6.9725e-03 L11_v_norm:1.0580e+00 L12_cos_v_neg_g:6.2234e-03 L12_v_norm:1.0537e+00 +step:7500 train loss:3.518735 +step:7501 train loss:3.506003 +step:7502 train loss:3.497462 +step:7503 train loss:3.477440 +step:7504 train loss:3.499899 +step:7505 train loss:3.488288 +step:7506 train loss:3.550911 +step:7507 train loss:3.467309 +step:7508 train loss:3.538107 +step:7509 train loss:3.508896 +step:7510 train loss:3.539779 +step:7511 train loss:3.545948 +step:7512 train loss:3.805174 +step:7513 train loss:3.495753 +step:7514 train loss:3.530225 +step:7515 train loss:3.492888 +step:7516 train loss:3.504852 +step:7517 train loss:3.538872 +step:7518 train loss:3.514817 +step:7519 train loss:3.528126 +step:7520 train loss:3.591715 +step:7521 train loss:3.479545 +step:7522 train loss:3.537137 +step:7523 train loss:3.567904 +step:7524 train loss:3.515456 +step:7525 train loss:3.518474 +step:7526 train loss:3.465572 +step:7527 train loss:3.474367 +step:7528 train loss:3.571195 +step:7529 train loss:3.549409 +step:7530 train loss:3.499111 +step:7531 train loss:3.574255 +step:7532 train loss:3.561384 +step:7533 train loss:3.488858 +step:7534 train loss:3.551413 +step:7535 train loss:3.555446 +step:7536 train loss:3.587635 +step:7537 train loss:3.603109 +step:7538 train loss:3.632055 +step:7539 train loss:3.533250 +step:7540 train loss:3.516795 +step:7541 train loss:3.571461 +step:7542 train loss:3.534325 +step:7543 train loss:3.491262 +step:7544 train loss:3.534137 +step:7545 train loss:3.519012 +step:7546 train loss:3.476798 +step:7547 train loss:3.523342 +step:7548 train loss:3.536524 +step:7549 train loss:3.516317 +step:7550 train loss:3.515478 +step:7551 train loss:3.615927 +step:7552 train loss:3.528443 +step:7553 train loss:3.566518 +step:7554 train loss:3.492274 +step:7555 train loss:3.582719 +step:7556 train loss:3.484912 +step:7557 train loss:3.580395 +step:7558 train loss:3.571398 +step:7559 train loss:3.525550 +step:7560 train loss:3.621347 +step:7561 train loss:3.593480 +step:7562 train loss:3.497090 +step:7563 train loss:3.493209 +step:7564 train loss:3.546819 +step:7565 train loss:3.566682 +step:7566 train loss:3.557461 +step:7567 train loss:3.572463 +step:7568 train loss:3.515815 +step:7569 train loss:3.578000 +step:7570 train loss:3.558418 +step:7571 train loss:3.641017 +step:7572 train loss:3.491452 +step:7573 train loss:3.558764 +step:7574 train loss:3.520941 +step:7575 train loss:3.513984 +step:7576 train loss:3.522096 +step:7577 train loss:3.537624 +step:7578 train loss:3.595346 +step:7579 train loss:3.531892 +step:7580 train loss:3.522243 +step:7581 train loss:3.507111 +step:7582 train loss:3.562508 +step:7583 train loss:3.498973 +step:7584 train loss:3.483716 +step:7585 train loss:3.451641 +step:7586 train loss:3.487635 +step:7587 train loss:3.549948 +step:7588 train loss:3.677623 +step:7589 train loss:3.498816 +step:7590 train loss:3.565481 +step:7591 train loss:3.568519 +step:7592 train loss:3.530229 +step:7593 train loss:3.554545 +step:7594 train loss:3.552669 +step:7595 train loss:3.520901 +step:7596 train loss:3.574929 +step:7597 train loss:3.478301 +step:7598 train loss:3.540617 +step:7599 train loss:3.531828 +step:7600 train loss:3.500927 +step:7601 train loss:3.608894 +step:7602 train loss:3.549374 +step:7603 train loss:3.509576 +step:7604 train loss:3.654541 +step:7605 train loss:3.543517 +step:7606 train loss:3.574945 +step:7607 train loss:3.532264 +step:7608 train loss:3.544271 +step:7609 train loss:3.571907 +step:7610 train loss:3.535871 +step:7611 train loss:3.509556 +step:7612 train loss:3.465777 +step:7613 train loss:3.512409 +step:7614 train loss:3.572354 +step:7615 train loss:3.536429 +step:7616 train loss:3.604470 +step:7617 train loss:3.503690 +step:7618 train loss:3.589018 +step:7619 train loss:3.533097 +step:7620 train loss:3.515941 +step:7621 train loss:3.463833 +step:7622 train loss:3.738369 +step:7623 train loss:3.752599 +step:7624 train loss:3.570404 +step:7625 train loss:3.603282 +step:7626 train loss:3.521696 +step:7627 train loss:3.593780 +step:7628 train loss:3.471934 +step:7629 train loss:3.533423 +step:7630 train loss:3.547640 +step:7631 train loss:3.526260 +step:7632 train loss:3.578863 +step:7633 train loss:3.643622 +step:7634 train loss:3.605462 +step:7635 train loss:3.511229 +step:7636 train loss:3.541752 +step:7637 train loss:3.482998 +step:7638 train loss:3.594216 +step:7639 train loss:3.522966 +step:7640 train loss:3.501571 +step:7641 train loss:3.531779 +step:7642 train loss:3.873327 +step:7643 train loss:3.622135 +step:7644 train loss:3.548844 +step:7645 train loss:3.536062 +step:7646 train loss:3.523636 +step:7647 train loss:3.515203 +step:7648 train loss:3.550890 +step:7649 train loss:3.508646 +step:7650 train loss:3.558785 +step:7651 train loss:3.577673 +step:7652 train loss:3.455918 +step:7653 train loss:3.655478 +step:7654 train loss:3.510677 +step:7655 train loss:3.529056 +step:7656 train loss:3.504124 +step:7657 train loss:3.516547 +step:7658 train loss:3.472288 +step:7659 train loss:3.536935 +step:7660 train loss:3.468649 +step:7661 train loss:3.487044 +step:7662 train loss:3.487013 +step:7663 train loss:3.534824 +step:7664 train loss:3.493892 +step:7665 train loss:3.470681 +step:7666 train loss:3.578073 +step:7667 train loss:3.491963 +step:7668 train loss:3.599652 +step:7669 train loss:3.532429 +step:7670 train loss:3.490045 +step:7671 train loss:3.544071 +step:7672 train loss:3.563653 +step:7673 train loss:3.527370 +step:7674 train loss:3.565905 +step:7675 train loss:3.618323 +step:7676 train loss:3.590049 +step:7677 train loss:3.614794 +step:7678 train loss:3.556910 +step:7679 train loss:3.577646 +step:7680 train loss:3.583883 +step:7681 train loss:3.551838 +step:7682 train loss:3.519501 +step:7683 train loss:3.523703 +step:7684 train loss:3.496663 +step:7685 train loss:3.475494 +step:7686 train loss:3.597096 +step:7687 train loss:3.510205 +step:7688 train loss:3.478917 +step:7689 train loss:3.525550 +step:7690 train loss:3.492844 +step:7691 train loss:3.521409 +step:7692 train loss:3.554057 +step:7693 train loss:3.555063 +step:7694 train loss:3.606738 +step:7695 train loss:3.538908 +step:7696 train loss:3.512523 +step:7697 train loss:3.498906 +step:7698 train loss:3.559801 +step:7699 train loss:3.554016 +step:7700 train loss:3.455308 +step:7701 train loss:3.572416 +step:7702 train loss:3.512963 +step:7703 train loss:3.513840 +step:7704 train loss:3.568551 +step:7705 train loss:3.525566 +step:7706 train loss:3.465479 +step:7707 train loss:3.581365 +step:7708 train loss:3.523257 +step:7709 train loss:3.541364 +step:7710 train loss:3.602586 +step:7711 train loss:3.563198 +step:7712 train loss:3.509522 +step:7713 train loss:3.589677 +step:7714 train loss:3.532370 +step:7715 train loss:3.485707 +step:7716 train loss:3.525352 +step:7717 train loss:3.547065 +step:7718 train loss:3.554330 +step:7719 train loss:3.510201 +step:7720 train loss:3.524486 +step:7721 train loss:3.567613 +step:7722 train loss:3.494890 +step:7723 train loss:3.866163 +step:7724 train loss:3.532617 +step:7725 train loss:3.441158 +step:7726 train loss:3.516671 +step:7727 train loss:3.546824 +step:7728 train loss:3.503200 +step:7729 train loss:3.508129 +step:7730 train loss:3.529959 +step:7731 train loss:3.559468 +step:7732 train loss:3.580264 +step:7733 train loss:3.493155 +step:7734 train loss:3.519245 +step:7735 train loss:3.612558 +step:7736 train loss:3.553588 +step:7737 train loss:3.568931 +step:7738 train loss:3.481421 +step:7739 train loss:3.549445 +step:7740 train loss:3.496783 +step:7741 train loss:3.534736 +step:7742 train loss:3.533472 +step:7743 train loss:3.484047 +step:7744 train loss:3.609937 +step:7745 train loss:3.499838 +step:7746 train loss:3.479332 +step:7747 train loss:3.575073 +step:7748 train loss:3.552706 +step:7749 train loss:3.475899 +step:7750 validation loss:3.464245 +step:7750 train loss:3.636024 +step:7751 train loss:3.516878 +step:7752 train loss:3.508540 +step:7753 train loss:3.512266 +step:7754 train loss:3.484433 +step:7755 train loss:3.550802 +step:7756 train loss:3.579059 +step:7757 train loss:3.526353 +step:7758 train loss:3.498431 +step:7759 train loss:3.523633 +step:7760 train loss:3.552530 +step:7761 train loss:3.542100 +step:7762 train loss:3.529557 +step:7763 train loss:3.514728 +step:7764 train loss:3.519801 +step:7765 train loss:3.473443 +step:7766 train loss:3.541695 +step:7767 train loss:3.539233 +step:7768 train loss:3.495494 +step:7769 train loss:3.562693 +step:7770 train loss:3.576864 +step:7771 train loss:3.551517 +step:7772 train loss:3.524342 +step:7773 train loss:3.585855 +step:7774 train loss:3.480542 +step:7775 train loss:3.468114 +step:7776 train loss:3.573808 +step:7777 train loss:3.526947 +step:7778 train loss:3.484993 +step:7779 train loss:3.528294 +step:7780 train loss:3.521655 +step:7781 train loss:3.532540 +step:7782 train loss:3.515359 +step:7783 train loss:3.498496 +step:7784 train loss:3.496501 +step:7785 train loss:3.540051 +step:7786 train loss:3.494998 +step:7787 train loss:3.574393 +step:7788 train loss:3.524687 +step:7789 train loss:3.462941 +step:7790 train loss:3.530647 +step:7791 train loss:3.554123 +step:7792 train loss:3.512087 +step:7793 train loss:3.536697 +step:7794 train loss:3.541336 +step:7795 train loss:3.558461 +step:7796 train loss:3.519542 +step:7797 train loss:3.539994 +step:7798 train loss:3.532700 +step:7799 train loss:3.521149 +step:7800 train loss:3.478634 +step:7801 train loss:3.541381 +step:7802 train loss:3.522506 +step:7803 train loss:3.573068 +step:7804 train loss:3.534147 +step:7805 train loss:3.527602 +step:7806 train loss:3.549794 +step:7807 train loss:3.623230 +step:7808 train loss:3.483954 +step:7809 train loss:3.463485 +step:7810 train loss:3.548968 +step:7811 train loss:3.482110 +step:7812 train loss:3.503500 +step:7813 train loss:3.591271 +step:7814 train loss:3.665532 +step:7815 train loss:3.474165 +step:7816 train loss:3.560771 +step:7817 train loss:3.587251 +step:7818 train loss:3.486764 +step:7819 train loss:3.535909 +step:7820 train loss:3.577754 +step:7821 train loss:3.510182 +step:7822 train loss:3.468666 +step:7823 train loss:3.568812 +step:7824 train loss:3.529685 +step:7825 train loss:3.520781 +step:7826 train loss:3.514657 +step:7827 train loss:3.565381 +step:7828 train loss:3.546628 +step:7829 train loss:3.500834 +step:7830 train loss:3.511995 +step:7831 train loss:3.519059 +step:7832 train loss:3.578982 +step:7833 train loss:3.558929 +step:7834 train loss:3.523527 +step:7835 train loss:3.548018 +step:7836 train loss:3.659843 +step:7837 train loss:3.545158 +step:7838 train loss:3.511597 +step:7839 train loss:3.473276 +step:7840 train loss:3.485703 +step:7841 train loss:3.584135 +step:7842 train loss:3.568535 +step:7843 train loss:3.625727 +step:7844 train loss:3.551416 +step:7845 train loss:3.529536 +step:7846 train loss:3.642056 +step:7847 train loss:3.529339 +step:7848 train loss:3.541628 +step:7849 train loss:3.553853 +step:7850 train loss:3.526611 +step:7851 train loss:3.550247 +step:7852 train loss:3.526217 +step:7853 train loss:3.495634 +step:7854 train loss:3.526222 +step:7855 train loss:3.523804 +step:7856 train loss:3.526553 +step:7857 train loss:3.515702 +step:7858 train loss:3.523900 +step:7859 train loss:3.532006 +step:7860 train loss:3.566936 +step:7861 train loss:3.553628 +step:7862 train loss:3.495943 +step:7863 train loss:3.600911 +step:7864 train loss:3.441603 +step:7865 train loss:3.520437 +step:7866 train loss:3.494028 +step:7867 train loss:3.540581 +step:7868 train loss:3.517180 +step:7869 train loss:3.521352 +step:7870 train loss:3.440395 +step:7871 train loss:3.508741 +step:7872 train loss:3.501477 +step:7873 train loss:3.577754 +step:7874 train loss:3.522820 +step:7875 train loss:3.524536 +step:7876 train loss:3.546575 +step:7877 train loss:3.497536 +step:7878 train loss:3.534568 +step:7879 train loss:3.872780 +step:7880 train loss:3.527546 +step:7881 train loss:3.554291 +step:7882 train loss:3.632099 +step:7883 train loss:3.447180 +step:7884 train loss:3.537309 +step:7885 train loss:3.521903 +step:7886 train loss:3.521979 +step:7887 train loss:3.513959 +step:7888 train loss:3.545544 +step:7889 train loss:3.595197 +step:7890 train loss:3.498893 +step:7891 train loss:3.548783 +step:7892 train loss:3.518845 +step:7893 train loss:3.496441 +step:7894 train loss:3.515058 +step:7895 train loss:3.498137 +step:7896 train loss:3.498048 +step:7897 train loss:3.522586 +step:7898 train loss:3.531421 +step:7899 train loss:3.521498 +step:7900 train loss:3.487579 +step:7901 train loss:3.480504 +step:7902 train loss:3.627392 +step:7903 train loss:3.474780 +step:7904 train loss:3.524703 +step:7905 train loss:3.592284 +step:7906 train loss:3.491579 +step:7907 train loss:3.516213 +step:7908 train loss:3.567611 +step:7909 train loss:3.616295 +step:7910 train loss:3.497553 +step:7911 train loss:3.517789 +step:7912 train loss:3.520557 +step:7913 train loss:3.495113 +step:7914 train loss:3.532599 +step:7915 train loss:3.635215 +step:7916 train loss:3.504699 +step:7917 train loss:3.563282 +step:7918 train loss:3.510856 +step:7919 train loss:3.495475 +step:7920 train loss:3.535185 +step:7921 train loss:3.540904 +step:7922 train loss:3.516287 +step:7923 train loss:3.565650 +step:7924 train loss:3.526369 +step:7925 train loss:3.547181 +step:7926 train loss:3.452033 +step:7927 train loss:3.729022 +step:7928 train loss:3.556370 +step:7929 train loss:3.521584 +step:7930 train loss:3.480958 +step:7931 train loss:3.505177 +step:7932 train loss:3.528584 +step:7933 train loss:3.538812 +step:7934 train loss:3.633780 +step:7935 train loss:3.557425 +step:7936 train loss:3.529206 +step:7937 train loss:3.474427 +step:7938 train loss:3.492029 +step:7939 train loss:3.540726 +step:7940 train loss:3.523622 +step:7941 train loss:3.552230 +step:7942 train loss:3.541869 +step:7943 train loss:3.555715 +step:7944 train loss:3.471436 +step:7945 train loss:3.577247 +step:7946 train loss:3.529377 +step:7947 train loss:3.538182 +step:7948 train loss:3.496815 +step:7949 train loss:3.550204 +step:7950 train loss:3.605165 +step:7951 train loss:3.575478 +step:7952 train loss:3.717657 +step:7953 train loss:3.607777 +step:7954 train loss:3.513969 +step:7955 train loss:3.500864 +step:7956 train loss:3.506221 +step:7957 train loss:3.580035 +step:7958 train loss:3.589996 +step:7959 train loss:3.547880 +step:7960 train loss:3.608167 +step:7961 train loss:3.518994 +step:7962 train loss:3.488608 +step:7963 train loss:3.527340 +step:7964 train loss:3.524691 +step:7965 train loss:3.532671 +step:7966 train loss:3.503454 +step:7967 train loss:3.526093 +step:7968 train loss:3.536009 +step:7969 train loss:3.494822 +step:7970 train loss:3.461707 +step:7971 train loss:3.547571 +step:7972 train loss:3.523880 +step:7973 train loss:3.495643 +step:7974 train loss:3.537077 +step:7975 train loss:3.521931 +step:7976 train loss:3.540970 +step:7977 train loss:3.571917 +step:7978 train loss:3.595337 +step:7979 train loss:3.541157 +step:7980 train loss:3.446086 +step:7981 train loss:3.485234 +step:7982 train loss:3.532940 +step:7983 train loss:3.548844 +step:7984 train loss:3.590027 +step:7985 train loss:3.512199 +step:7986 train loss:3.540525 +step:7987 train loss:3.591864 +step:7988 train loss:3.563340 +step:7989 train loss:3.469069 +step:7990 train loss:3.486171 +step:7991 train loss:3.500660 +step:7992 train loss:3.523495 +step:7993 train loss:3.506293 +step:7994 train loss:3.558467 +step:7995 train loss:3.558904 +step:7996 train loss:3.528765 +step:7997 train loss:3.545894 +step:7998 train loss:3.571651 +step:7999 train loss:3.500687 +step:8000 validation loss:3.450357 total_sharp:1.3226e-03 L1_sharp:9.9817e-04 L2_sharp:3.2289e-04 L3_sharp:4.9971e-04 L4_sharp:2.2219e-04 L5_sharp:1.6170e-04 L6_sharp:1.7683e-04 L7_sharp:2.7055e-04 L8_sharp:3.6656e-04 L9_sharp:4.6419e-04 L10_sharp:5.8912e-04 L11_sharp:3.8915e-04 L12_sharp:4.7461e-04 total_fnorm:4.5031e+00 total_l1_linf:4.0081e+04 total_spectral:4.5031e+00 L1_fnorm:1.0123e+00 L2_fnorm:1.0222e+00 L3_fnorm:1.0376e+00 L4_fnorm:1.0567e+00 L5_fnorm:1.0817e+00 L6_fnorm:1.0836e+00 L7_fnorm:1.0831e+00 L8_fnorm:1.0762e+00 L9_fnorm:1.0751e+00 L10_fnorm:1.0476e+00 L11_fnorm:1.0592e+00 L12_fnorm:1.0567e+00 L1_l1linf:1.2478e+00 L2_l1linf:1.0744e+00 L3_l1linf:1.0768e+00 L4_l1linf:1.1107e+00 L5_l1linf:1.1214e+00 L6_l1linf:1.0957e+00 L7_l1linf:1.0849e+00 L8_l1linf:1.1281e+00 L9_l1linf:1.1410e+00 L10_l1linf:1.2645e+00 L11_l1linf:1.2784e+00 L12_l1linf:1.2794e+00 L1_spectral:1.5840e-01 L2_spectral:1.5102e-01 L3_spectral:1.5532e-01 L4_spectral:1.1474e-01 L5_spectral:9.5544e-02 L6_spectral:9.6692e-02 L7_spectral:8.5580e-02 L8_spectral:9.5995e-02 L9_spectral:1.3656e-01 L10_spectral:1.6599e-01 L11_spectral:1.6454e-01 L12_spectral:1.6902e-01 ip_v_neg_g:1.2926e-02 cos_v_neg_g:3.8696e-03 v_norm:4.5031e+00 g_norm:7.4182e-01 hv_norm:4.1506e-01 cos_v_hv:1.4349e-02 hg_norm:5.6910e+00 cos_g_hg:5.5326e-01 v_par:7.1536e-04 v_perp:4.5031e+00 L1_cos_v_neg_g:7.7645e-03 L1_v_norm:1.0123e+00 L2_cos_v_neg_g:7.2195e-03 L2_v_norm:1.0222e+00 L3_cos_v_neg_g:4.5319e-03 L3_v_norm:1.0376e+00 L4_cos_v_neg_g:2.5729e-03 L4_v_norm:1.0567e+00 L5_cos_v_neg_g:2.3008e-03 L5_v_norm:1.0817e+00 L6_cos_v_neg_g:3.2883e-03 L6_v_norm:1.0836e+00 L7_cos_v_neg_g:3.3974e-03 L7_v_norm:1.0831e+00 L8_cos_v_neg_g:4.5068e-03 L8_v_norm:1.0762e+00 L9_cos_v_neg_g:5.1867e-03 L9_v_norm:1.0751e+00 L10_cos_v_neg_g:7.1695e-03 L10_v_norm:1.0476e+00 L11_cos_v_neg_g:8.5026e-03 L11_v_norm:1.0592e+00 L12_cos_v_neg_g:8.5556e-03 L12_v_norm:1.0567e+00 +step:8000 train loss:3.569827 +step:8001 train loss:3.528080 +step:8002 train loss:3.550469 +step:8003 train loss:3.568713 +step:8004 train loss:3.542052 +step:8005 train loss:3.465672 +step:8006 train loss:3.542168 +step:8007 train loss:3.510492 +step:8008 train loss:3.535897 +step:8009 train loss:3.614060 +step:8010 train loss:3.849408 +step:8011 train loss:3.503077 +step:8012 train loss:3.582242 +step:8013 train loss:3.532639 +step:8014 train loss:3.546021 +step:8015 train loss:3.542293 +step:8016 train loss:3.531294 +step:8017 train loss:3.551481 +step:8018 train loss:3.513631 +step:8019 train loss:3.483348 +step:8020 train loss:3.520338 +step:8021 train loss:3.591569 +step:8022 train loss:3.510107 +step:8023 train loss:3.540508 +step:8024 train loss:3.429585 +step:8025 train loss:3.516471 +step:8026 train loss:3.529120 +step:8027 train loss:3.533962 +step:8028 train loss:3.595143 +step:8029 train loss:3.520103 +step:8030 train loss:3.481607 +step:8031 train loss:3.540568 +step:8032 train loss:3.524289 +step:8033 train loss:3.474275 +step:8034 train loss:3.517695 +step:8035 train loss:3.504593 +step:8036 train loss:3.492890 +step:8037 train loss:3.462818 +step:8038 train loss:3.478159 +step:8039 train loss:3.574153 +step:8040 train loss:3.505781 +step:8041 train loss:3.499783 +step:8042 train loss:3.538715 +step:8043 train loss:3.482002 +step:8044 train loss:3.494119 +step:8045 train loss:3.561786 +step:8046 train loss:3.487850 +step:8047 train loss:3.494337 +step:8048 train loss:3.522697 +step:8049 train loss:3.570420 +step:8050 train loss:3.513605 +step:8051 train loss:3.486345 +step:8052 train loss:3.548232 +step:8053 train loss:3.500490 +step:8054 train loss:3.532172 +step:8055 train loss:3.564491 +step:8056 train loss:3.532248 +step:8057 train loss:3.607329 +step:8058 train loss:3.511264 +step:8059 train loss:3.571173 +step:8060 train loss:3.541348 +step:8061 train loss:3.431954 +step:8062 train loss:3.569081 +step:8063 train loss:3.536709 +step:8064 train loss:3.491659 +step:8065 train loss:3.555282 +step:8066 train loss:3.514942 +step:8067 train loss:3.581161 +step:8068 train loss:3.503500 +step:8069 train loss:3.529265 +step:8070 train loss:3.493280 +step:8071 train loss:3.500965 +step:8072 train loss:3.540754 +step:8073 train loss:3.496696 +step:8074 train loss:3.504148 +step:8075 train loss:3.491618 +step:8076 train loss:3.540144 +step:8077 train loss:3.546557 +step:8078 train loss:3.488974 +step:8079 train loss:3.510886 +step:8080 train loss:3.498857 +step:8081 train loss:3.513176 +step:8082 train loss:3.531636 +step:8083 train loss:3.431519 +step:8084 train loss:3.571721 +step:8085 train loss:3.444854 +step:8086 train loss:3.568110 +step:8087 train loss:3.467351 +step:8088 train loss:3.512827 +step:8089 train loss:3.546393 +step:8090 train loss:3.569639 +step:8091 train loss:3.515824 +step:8092 train loss:3.497071 +step:8093 train loss:3.502016 +step:8094 train loss:3.512694 +step:8095 train loss:3.529489 +step:8096 train loss:3.533729 +step:8097 train loss:3.458232 +step:8098 train loss:3.470116 +step:8099 train loss:3.469583 +step:8100 train loss:3.520181 +step:8101 train loss:3.597593 +step:8102 train loss:3.535340 +step:8103 train loss:3.484921 +step:8104 train loss:3.536018 +step:8105 train loss:3.534278 +step:8106 train loss:3.495234 +step:8107 train loss:3.475584 +step:8108 train loss:3.495038 +step:8109 train loss:3.487429 +step:8110 train loss:3.552871 +step:8111 train loss:3.474599 +step:8112 train loss:3.495286 +step:8113 train loss:3.484122 +step:8114 train loss:3.429980 +step:8115 train loss:3.486583 +step:8116 train loss:3.515567 +step:8117 train loss:3.491003 +step:8118 train loss:3.480537 +step:8119 train loss:3.524894 +step:8120 train loss:3.470860 +step:8121 train loss:3.528243 +step:8122 train loss:3.510577 +step:8123 train loss:3.513928 +step:8124 train loss:3.480020 +step:8125 train loss:3.461531 +step:8126 train loss:3.455216 +step:8127 train loss:3.547928 +step:8128 train loss:3.554206 +step:8129 train loss:3.472802 +step:8130 train loss:3.502990 +step:8131 train loss:3.473122 +step:8132 train loss:3.541770 +step:8133 train loss:3.464674 +step:8134 train loss:3.502314 +step:8135 train loss:3.493109 +step:8136 train loss:3.502208 +step:8137 train loss:3.567110 +step:8138 train loss:3.476893 +step:8139 train loss:3.548042 +step:8140 train loss:3.479263 +step:8141 train loss:3.501641 +step:8142 train loss:3.481421 +step:8143 train loss:3.531615 +step:8144 train loss:3.509809 +step:8145 train loss:3.477475 +step:8146 train loss:3.487005 +step:8147 train loss:3.506657 +step:8148 train loss:3.600685 +step:8149 train loss:3.514696 +step:8150 train loss:3.491573 +step:8151 train loss:3.488353 +step:8152 train loss:3.581448 +step:8153 train loss:3.460704 +step:8154 train loss:3.475690 +step:8155 train loss:3.501909 +step:8156 train loss:3.483224 +step:8157 train loss:3.504658 +step:8158 train loss:3.514197 +step:8159 train loss:3.531594 +step:8160 train loss:3.484313 +step:8161 train loss:3.524766 +step:8162 train loss:3.456584 +step:8163 train loss:3.516923 +step:8164 train loss:3.502979 +step:8165 train loss:3.553432 +step:8166 train loss:3.556109 +step:8167 train loss:3.465102 +step:8168 train loss:3.442774 +step:8169 train loss:3.491339 +step:8170 train loss:3.440995 +step:8171 train loss:3.504438 +step:8172 train loss:3.501854 +step:8173 train loss:3.501791 +step:8174 train loss:3.509981 +step:8175 train loss:3.473839 +step:8176 train loss:3.468291 +step:8177 train loss:3.512264 +step:8178 train loss:3.601506 +step:8179 train loss:3.508134 +step:8180 train loss:3.528545 +step:8181 train loss:3.532789 +step:8182 train loss:3.491426 +step:8183 train loss:3.478745 +step:8184 train loss:3.471871 +step:8185 train loss:3.509922 +step:8186 train loss:3.510207 +step:8187 train loss:3.524874 +step:8188 train loss:3.448064 +step:8189 train loss:3.597240 +step:8190 train loss:3.531879 +step:8191 train loss:3.533505 +step:8192 train loss:3.646792 +step:8193 train loss:3.512690 +step:8194 train loss:3.448995 +step:8195 train loss:3.547608 +step:8196 train loss:3.465547 +step:8197 train loss:3.492269 +step:8198 train loss:3.501317 +step:8199 train loss:3.501444 +step:8200 train loss:3.482034 +step:8201 train loss:3.593975 +step:8202 train loss:3.511279 +step:8203 train loss:3.533749 +step:8204 train loss:3.443949 +step:8205 train loss:3.450231 +step:8206 train loss:3.575856 +step:8207 train loss:3.500057 +step:8208 train loss:3.516461 +step:8209 train loss:3.561119 +step:8210 train loss:3.546887 +step:8211 train loss:3.476992 +step:8212 train loss:3.535461 +step:8213 train loss:3.546491 +step:8214 train loss:3.584197 +step:8215 train loss:3.562513 +step:8216 train loss:3.540447 +step:8217 train loss:3.519342 +step:8218 train loss:3.528125 +step:8219 train loss:3.662808 +step:8220 train loss:3.490538 +step:8221 train loss:3.512306 +step:8222 train loss:3.465079 +step:8223 train loss:3.483233 +step:8224 train loss:3.493266 +step:8225 train loss:3.543391 +step:8226 train loss:3.472775 +step:8227 train loss:3.540733 +step:8228 train loss:3.426193 +step:8229 train loss:3.470594 +step:8230 train loss:3.488681 +step:8231 train loss:3.509689 +step:8232 train loss:3.512078 +step:8233 train loss:3.554337 +step:8234 train loss:3.552561 +step:8235 train loss:3.523680 +step:8236 train loss:3.509360 +step:8237 train loss:3.457871 +step:8238 train loss:3.709270 +step:8239 train loss:3.547235 +step:8240 train loss:3.490907 +step:8241 train loss:3.462168 +step:8242 train loss:3.499836 +step:8243 train loss:3.490319 +step:8244 train loss:3.502704 +step:8245 train loss:3.488810 +step:8246 train loss:3.556161 +step:8247 train loss:3.587635 +step:8248 train loss:3.504298 +step:8249 train loss:3.498235 +step:8250 validation loss:3.442937 +step:8250 train loss:3.486120 +step:8251 train loss:3.579766 +step:8252 train loss:3.520731 +step:8253 train loss:3.486716 +step:8254 train loss:3.456539 +step:8255 train loss:3.489448 +step:8256 train loss:3.470945 +step:8257 train loss:3.581028 +step:8258 train loss:3.499895 +step:8259 train loss:3.487729 +step:8260 train loss:3.488204 +step:8261 train loss:3.485846 +step:8262 train loss:3.502080 +step:8263 train loss:3.511996 +step:8264 train loss:3.479302 +step:8265 train loss:3.468320 +step:8266 train loss:3.479958 +step:8267 train loss:3.409859 +step:8268 train loss:3.532938 +step:8269 train loss:3.465311 +step:8270 train loss:3.519699 +step:8271 train loss:3.544892 +step:8272 train loss:3.571388 +step:8273 train loss:3.449474 +step:8274 train loss:3.511515 +step:8275 train loss:3.471970 +step:8276 train loss:3.509358 +step:8277 train loss:3.576971 +step:8278 train loss:3.593171 +step:8279 train loss:3.503315 +step:8280 train loss:3.491372 +step:8281 train loss:3.457356 +step:8282 train loss:3.519820 +step:8283 train loss:3.506214 +step:8284 train loss:3.491067 +step:8285 train loss:3.483572 +step:8286 train loss:3.593067 +step:8287 train loss:3.532171 +step:8288 train loss:3.502159 +step:8289 train loss:3.515384 +step:8290 train loss:3.454347 +step:8291 train loss:3.501124 +step:8292 train loss:3.523870 +step:8293 train loss:3.499863 +step:8294 train loss:3.468729 +step:8295 train loss:3.507577 +step:8296 train loss:3.570901 +step:8297 train loss:3.656049 +step:8298 train loss:3.474203 +step:8299 train loss:3.510810 +step:8300 train loss:3.518085 +step:8301 train loss:3.490765 +step:8302 train loss:3.550919 +step:8303 train loss:3.682013 +step:8304 train loss:3.493920 +step:8305 train loss:3.538441 +step:8306 train loss:3.513730 +step:8307 train loss:3.531602 +step:8308 train loss:3.526661 +step:8309 train loss:3.549093 +step:8310 train loss:3.466721 +step:8311 train loss:3.558018 +step:8312 train loss:3.549958 +step:8313 train loss:3.613557 +step:8314 train loss:3.485449 +step:8315 train loss:3.433494 +step:8316 train loss:3.490055 +step:8317 train loss:3.512113 +step:8318 train loss:3.502155 +step:8319 train loss:3.539258 +step:8320 train loss:3.560502 +step:8321 train loss:3.467643 +step:8322 train loss:3.483535 +step:8323 train loss:3.522201 +step:8324 train loss:3.498457 +step:8325 train loss:3.553854 +step:8326 train loss:3.519234 +step:8327 train loss:3.508893 +step:8328 train loss:3.580912 +step:8329 train loss:3.489148 +step:8330 train loss:3.530118 +step:8331 train loss:3.456314 +step:8332 train loss:3.555642 +step:8333 train loss:3.569967 +step:8334 train loss:3.438443 +step:8335 train loss:3.501318 +step:8336 train loss:3.595987 +step:8337 train loss:3.526012 +step:8338 train loss:3.495483 +step:8339 train loss:3.471807 +step:8340 train loss:3.564739 +step:8341 train loss:3.464204 +step:8342 train loss:3.536355 +step:8343 train loss:3.449949 +step:8344 train loss:3.496976 +step:8345 train loss:3.530544 +step:8346 train loss:3.610808 +step:8347 train loss:3.501639 +step:8348 train loss:3.531641 +step:8349 train loss:3.502217 +step:8350 train loss:3.522549 +step:8351 train loss:3.465679 +step:8352 train loss:3.547582 +step:8353 train loss:3.502748 +step:8354 train loss:3.484832 +step:8355 train loss:3.485525 +step:8356 train loss:3.480866 +step:8357 train loss:3.494539 +step:8358 train loss:3.469308 +step:8359 train loss:3.466386 +step:8360 train loss:3.512130 +step:8361 train loss:3.527490 +step:8362 train loss:3.546410 +step:8363 train loss:3.543888 +step:8364 train loss:3.505898 +step:8365 train loss:3.654933 +step:8366 train loss:3.497577 +step:8367 train loss:3.473141 +step:8368 train loss:3.443798 +step:8369 train loss:3.474347 +step:8370 train loss:3.553982 +step:8371 train loss:3.526190 +step:8372 train loss:3.505443 +step:8373 train loss:3.516648 +step:8374 train loss:3.447213 +step:8375 train loss:3.509846 +step:8376 train loss:3.548766 +step:8377 train loss:3.375530 +step:8378 train loss:3.591513 +step:8379 train loss:3.456529 +step:8380 train loss:3.465647 +step:8381 train loss:3.469833 +step:8382 train loss:3.497804 +step:8383 train loss:3.457353 +step:8384 train loss:3.498915 +step:8385 train loss:3.510181 +step:8386 train loss:3.491982 +step:8387 train loss:3.654730 +step:8388 train loss:3.565126 +step:8389 train loss:3.543166 +step:8390 train loss:3.547292 +step:8391 train loss:3.474611 +step:8392 train loss:3.489620 +step:8393 train loss:3.444332 +step:8394 train loss:3.532591 +step:8395 train loss:3.537547 +step:8396 train loss:3.563242 +step:8397 train loss:3.494720 +step:8398 train loss:3.515049 +step:8399 train loss:3.480397 +step:8400 train loss:3.485609 +step:8401 train loss:3.494530 +step:8402 train loss:3.480931 +step:8403 train loss:3.494422 +step:8404 train loss:3.498430 +step:8405 train loss:3.452678 +step:8406 train loss:3.495656 +step:8407 train loss:3.536982 +step:8408 train loss:3.506013 +step:8409 train loss:3.429072 +step:8410 train loss:3.493100 +step:8411 train loss:3.519310 +step:8412 train loss:3.577883 +step:8413 train loss:3.554214 +step:8414 train loss:3.549336 +step:8415 train loss:3.472314 +step:8416 train loss:3.518920 +step:8417 train loss:3.436088 +step:8418 train loss:3.540414 +step:8419 train loss:3.491161 +step:8420 train loss:3.570284 +step:8421 train loss:3.487113 +step:8422 train loss:3.503946 +step:8423 train loss:3.520213 +step:8424 train loss:3.524387 +step:8425 train loss:3.581402 +step:8426 train loss:3.551572 +step:8427 train loss:3.466365 +step:8428 train loss:3.484570 +step:8429 train loss:3.545954 +step:8430 train loss:3.487818 +step:8431 train loss:3.490602 +step:8432 train loss:3.493875 +step:8433 train loss:3.474977 +step:8434 train loss:3.504650 +step:8435 train loss:3.423014 +step:8436 train loss:3.507659 +step:8437 train loss:3.546135 +step:8438 train loss:3.526788 +step:8439 train loss:3.468647 +step:8440 train loss:3.438013 +step:8441 train loss:3.492377 +step:8442 train loss:3.517376 +step:8443 train loss:3.475939 +step:8444 train loss:3.504851 +step:8445 train loss:3.455548 +step:8446 train loss:3.508251 +step:8447 train loss:3.518384 +step:8448 train loss:3.505157 +step:8449 train loss:3.491990 +step:8450 train loss:3.483244 +step:8451 train loss:3.514527 +step:8452 train loss:3.487377 +step:8453 train loss:3.468894 +step:8454 train loss:3.519581 +step:8455 train loss:3.593347 +step:8456 train loss:3.568801 +step:8457 train loss:3.623446 +step:8458 train loss:3.513244 +step:8459 train loss:3.516421 +step:8460 train loss:3.446719 +step:8461 train loss:3.601320 +step:8462 train loss:3.474596 +step:8463 train loss:3.511844 +step:8464 train loss:3.525282 +step:8465 train loss:3.530694 +step:8466 train loss:3.507677 +step:8467 train loss:3.507807 +step:8468 train loss:3.757596 +step:8469 train loss:3.472468 +step:8470 train loss:3.463868 +step:8471 train loss:3.508581 +step:8472 train loss:3.530384 +step:8473 train loss:3.486607 +step:8474 train loss:3.612142 +step:8475 train loss:3.566220 +step:8476 train loss:3.515174 +step:8477 train loss:3.506278 +step:8478 train loss:3.488616 +step:8479 train loss:3.487104 +step:8480 train loss:3.583168 +step:8481 train loss:3.487779 +step:8482 train loss:3.481743 +step:8483 train loss:3.631995 +step:8484 train loss:3.515872 +step:8485 train loss:3.559153 +step:8486 train loss:3.466569 +step:8487 train loss:3.522447 +step:8488 train loss:3.467470 +step:8489 train loss:3.546752 +step:8490 train loss:3.533109 +step:8491 train loss:3.554174 +step:8492 train loss:3.510709 +step:8493 train loss:3.579108 +step:8494 train loss:3.442503 +step:8495 train loss:3.539367 +step:8496 train loss:3.486191 +step:8497 train loss:3.518855 +step:8498 train loss:3.533280 +step:8499 train loss:3.511097 +step:8500 validation loss:3.442289 total_sharp:8.4913e-04 L1_sharp:7.6370e-04 L2_sharp:7.4902e-05 L3_sharp:1.9463e-04 L4_sharp:1.5009e-04 L5_sharp:1.7351e-04 L6_sharp:1.6259e-04 L7_sharp:2.0969e-04 L8_sharp:2.8864e-04 L9_sharp:2.8511e-04 L10_sharp:4.0493e-04 L11_sharp:3.1215e-04 L12_sharp:4.8389e-04 total_fnorm:4.3928e+00 total_l1_linf:3.8888e+04 total_spectral:4.3928e+00 L1_fnorm:9.6304e-01 L2_fnorm:9.8710e-01 L3_fnorm:9.9449e-01 L4_fnorm:1.0221e+00 L5_fnorm:1.0489e+00 L6_fnorm:1.0542e+00 L7_fnorm:1.0518e+00 L8_fnorm:1.0461e+00 L9_fnorm:1.0401e+00 L10_fnorm:1.0107e+00 L11_fnorm:1.0090e+00 L12_fnorm:1.0073e+00 L1_l1linf:1.1332e+00 L2_l1linf:1.0473e+00 L3_l1linf:1.0622e+00 L4_l1linf:1.0714e+00 L5_l1linf:1.1080e+00 L6_l1linf:1.0739e+00 L7_l1linf:1.0552e+00 L8_l1linf:1.0616e+00 L9_l1linf:1.1273e+00 L10_l1linf:1.1343e+00 L11_l1linf:1.3044e+00 L12_l1linf:1.2594e+00 L1_spectral:1.4785e-01 L2_spectral:1.4776e-01 L3_spectral:1.4847e-01 L4_spectral:1.0490e-01 L5_spectral:9.0788e-02 L6_spectral:9.0970e-02 L7_spectral:7.5616e-02 L8_spectral:8.9962e-02 L9_spectral:1.2149e-01 L10_spectral:1.5032e-01 L11_spectral:1.5987e-01 L12_spectral:1.6773e-01 ip_v_neg_g:9.9851e-03 cos_v_neg_g:3.1614e-03 v_norm:4.3928e+00 g_norm:7.1900e-01 hv_norm:2.5682e-01 cos_v_hv:1.4524e-02 hg_norm:4.1580e+00 cos_g_hg:5.4132e-01 v_par:5.3834e-04 v_perp:4.3928e+00 L1_cos_v_neg_g:9.6058e-03 L1_v_norm:9.6304e-01 L2_cos_v_neg_g:3.5932e-03 L2_v_norm:9.8710e-01 L3_cos_v_neg_g:3.5364e-03 L3_v_norm:9.9449e-01 L4_cos_v_neg_g:3.1080e-03 L4_v_norm:1.0221e+00 L5_cos_v_neg_g:3.4169e-03 L5_v_norm:1.0489e+00 L6_cos_v_neg_g:3.4889e-03 L6_v_norm:1.0542e+00 L7_cos_v_neg_g:3.3239e-03 L7_v_norm:1.0518e+00 L8_cos_v_neg_g:3.5288e-03 L8_v_norm:1.0461e+00 L9_cos_v_neg_g:3.4620e-03 L9_v_norm:1.0401e+00 L10_cos_v_neg_g:4.8671e-03 L10_v_norm:1.0107e+00 L11_cos_v_neg_g:6.3060e-03 L11_v_norm:1.0090e+00 L12_cos_v_neg_g:5.7677e-03 L12_v_norm:1.0073e+00 +step:8500 train loss:3.505041 +step:8501 train loss:3.727009 +step:8502 train loss:3.741685 +step:8503 train loss:3.499733 +step:8504 train loss:3.495005 +step:8505 train loss:3.471975 +step:8506 train loss:3.544207 +step:8507 train loss:3.483870 +step:8508 train loss:3.515950 +step:8509 train loss:3.455608 +step:8510 train loss:3.477820 +step:8511 train loss:3.434969 +step:8512 train loss:3.534605 +step:8513 train loss:3.537696 +step:8514 train loss:3.486412 +step:8515 train loss:3.578762 +step:8516 train loss:3.498617 +step:8517 train loss:3.519510 +step:8518 train loss:3.410023 +step:8519 train loss:3.502138 +step:8520 train loss:3.468293 +step:8521 train loss:3.507402 +step:8522 train loss:3.402943 +step:8523 train loss:3.499836 +step:8524 train loss:3.488452 +step:8525 train loss:3.555264 +step:8526 train loss:3.534353 +step:8527 train loss:3.478770 +step:8528 train loss:3.560583 +step:8529 train loss:3.519456 +step:8530 train loss:3.552852 +step:8531 train loss:3.541074 +step:8532 train loss:3.581564 +step:8533 train loss:3.532667 +step:8534 train loss:3.530048 +step:8535 train loss:3.505929 +step:8536 train loss:3.594435 +step:8537 train loss:3.507777 +step:8538 train loss:3.577483 +step:8539 train loss:3.497799 +step:8540 train loss:3.525036 +step:8541 train loss:3.465556 +step:8542 train loss:3.531456 +step:8543 train loss:3.446247 +step:8544 train loss:3.444287 +step:8545 train loss:3.493674 +step:8546 train loss:3.446339 +step:8547 train loss:3.501137 +step:8548 train loss:3.471365 +step:8549 train loss:3.513948 +step:8550 train loss:3.468016 +step:8551 train loss:3.517848 +step:8552 train loss:3.518683 +step:8553 train loss:3.523214 +step:8554 train loss:3.493330 +step:8555 train loss:3.508270 +step:8556 train loss:3.586391 +step:8557 train loss:3.484462 +step:8558 train loss:3.522330 +step:8559 train loss:3.511774 +step:8560 train loss:3.493626 +step:8561 train loss:3.451671 +step:8562 train loss:3.477467 +step:8563 train loss:3.475896 +step:8564 train loss:3.548347 +step:8565 train loss:3.522119 +step:8566 train loss:3.541502 +step:8567 train loss:3.487610 +step:8568 train loss:3.505972 +step:8569 train loss:3.512181 +step:8570 train loss:3.457922 +step:8571 train loss:3.498388 +step:8572 train loss:3.515666 +step:8573 train loss:3.591161 +step:8574 train loss:3.523283 +step:8575 train loss:3.520497 +step:8576 train loss:3.554518 +step:8577 train loss:3.635239 +step:8578 train loss:3.546979 +step:8579 train loss:3.530482 +step:8580 train loss:3.463479 +step:8581 train loss:3.507593 +step:8582 train loss:3.512487 +step:8583 train loss:3.509770 +step:8584 train loss:3.499422 +step:8585 train loss:3.575336 +step:8586 train loss:3.498460 +step:8587 train loss:3.509366 +step:8588 train loss:3.552577 +step:8589 train loss:3.501620 +step:8590 train loss:3.493966 +step:8591 train loss:3.495877 +step:8592 train loss:3.456286 +step:8593 train loss:3.532207 +step:8594 train loss:3.558394 +step:8595 train loss:3.477816 +step:8596 train loss:3.523720 +step:8597 train loss:3.483673 +step:8598 train loss:3.538463 +step:8599 train loss:3.502097 +step:8600 train loss:3.512430 +step:8601 train loss:3.502084 +step:8602 train loss:3.476506 +step:8603 train loss:3.534594 +step:8604 train loss:3.479160 +step:8605 train loss:3.492930 +step:8606 train loss:3.504263 +step:8607 train loss:3.513147 +step:8608 train loss:3.557990 +step:8609 train loss:3.455644 +step:8610 train loss:3.529740 +step:8611 train loss:3.457229 +step:8612 train loss:3.536706 +step:8613 train loss:3.470096 +step:8614 train loss:3.534747 +step:8615 train loss:3.576783 +step:8616 train loss:3.457745 +step:8617 train loss:3.525821 +step:8618 train loss:3.500618 +step:8619 train loss:3.455399 +step:8620 train loss:3.496056 +step:8621 train loss:3.527780 +step:8622 train loss:3.485708 +step:8623 train loss:3.497728 +step:8624 train loss:3.573766 +step:8625 train loss:3.493634 +step:8626 train loss:3.504004 +step:8627 train loss:3.499421 +step:8628 train loss:3.532651 +step:8629 train loss:3.444425 +step:8630 train loss:3.542029 +step:8631 train loss:3.487637 +step:8632 train loss:3.541463 +step:8633 train loss:3.489004 +step:8634 train loss:3.719556 +step:8635 train loss:3.516876 +step:8636 train loss:3.563727 +step:8637 train loss:3.489311 +step:8638 train loss:3.493179 +step:8639 train loss:3.545515 +step:8640 train loss:3.458566 +step:8641 train loss:3.555665 +step:8642 train loss:3.508972 +step:8643 train loss:3.622371 +step:8644 train loss:3.460999 +step:8645 train loss:3.531591 +step:8646 train loss:3.493527 +step:8647 train loss:3.519817 +step:8648 train loss:3.466840 +step:8649 train loss:3.551717 +step:8650 train loss:3.502540 +step:8651 train loss:3.516505 +step:8652 train loss:3.487082 +step:8653 train loss:3.516794 +step:8654 train loss:3.560373 +step:8655 train loss:3.488574 +step:8656 train loss:3.530937 +step:8657 train loss:3.532579 +step:8658 train loss:3.507518 +step:8659 train loss:3.497380 +step:8660 train loss:3.441884 +step:8661 train loss:3.504401 +step:8662 train loss:3.443897 +step:8663 train loss:3.518991 +step:8664 train loss:3.434590 +step:8665 train loss:3.456511 +step:8666 train loss:3.531523 +step:8667 train loss:3.426397 +step:8668 train loss:3.532374 +step:8669 train loss:3.573684 +step:8670 train loss:3.472185 +step:8671 train loss:3.467519 +step:8672 train loss:3.686630 +step:8673 train loss:3.453653 +step:8674 train loss:3.522162 +step:8675 train loss:3.560263 +step:8676 train loss:3.506825 +step:8677 train loss:3.528522 +step:8678 train loss:3.478765 +step:8679 train loss:3.534692 +step:8680 train loss:3.514945 +step:8681 train loss:3.515225 +step:8682 train loss:3.469543 +step:8683 train loss:3.487392 +step:8684 train loss:3.562580 +step:8685 train loss:3.507075 +step:8686 train loss:3.496527 +step:8687 train loss:3.451762 +step:8688 train loss:3.470085 +step:8689 train loss:3.541200 +step:8690 train loss:3.477391 +step:8691 train loss:3.554617 +step:8692 train loss:3.442800 +step:8693 train loss:3.530327 +step:8694 train loss:3.532975 +step:8695 train loss:3.519737 +step:8696 train loss:3.542918 +step:8697 train loss:3.497427 +step:8698 train loss:3.536187 +step:8699 train loss:3.487329 +step:8700 train loss:3.511095 +step:8701 train loss:3.472504 +step:8702 train loss:3.459780 +step:8703 train loss:3.475185 +step:8704 train loss:3.431213 +step:8705 train loss:3.508521 +step:8706 train loss:3.531810 +step:8707 train loss:3.530701 +step:8708 train loss:3.474375 +step:8709 train loss:3.535876 +step:8710 train loss:3.465044 +step:8711 train loss:3.517830 +step:8712 train loss:3.426466 +step:8713 train loss:3.502071 +step:8714 train loss:3.609066 +step:8715 train loss:3.466192 +step:8716 train loss:3.521148 +step:8717 train loss:3.493364 +step:8718 train loss:3.529857 +step:8719 train loss:3.500826 +step:8720 train loss:3.613158 +step:8721 train loss:3.505644 +step:8722 train loss:3.600137 +step:8723 train loss:3.463076 +step:8724 train loss:3.475680 +step:8725 train loss:3.504394 +step:8726 train loss:3.459629 +step:8727 train loss:3.536071 +step:8728 train loss:3.494594 +step:8729 train loss:3.497777 +step:8730 train loss:3.475099 +step:8731 train loss:3.479151 +step:8732 train loss:3.582944 +step:8733 train loss:3.503914 +step:8734 train loss:3.542557 +step:8735 train loss:3.613201 +step:8736 train loss:3.469173 +step:8737 train loss:3.496143 +step:8738 train loss:3.474909 +step:8739 train loss:3.535753 +step:8740 train loss:3.458232 +step:8741 train loss:3.510342 +step:8742 train loss:3.467902 +step:8743 train loss:3.503857 +step:8744 train loss:3.527302 +step:8745 train loss:3.566325 +step:8746 train loss:3.466285 +step:8747 train loss:3.570769 +step:8748 train loss:3.478514 +step:8749 train loss:3.515406 +step:8750 validation loss:3.433959 +step:8750 train loss:3.526809 +step:8751 train loss:3.565800 +step:8752 train loss:3.424917 +step:8753 train loss:3.474449 +step:8754 train loss:3.526049 +step:8755 train loss:3.506857 +step:8756 train loss:3.551004 +step:8757 train loss:3.464096 +step:8758 train loss:3.621188 +step:8759 train loss:3.466823 +step:8760 train loss:3.499090 +step:8761 train loss:3.575683 +step:8762 train loss:3.475221 +step:8763 train loss:3.445679 +step:8764 train loss:3.519174 +step:8765 train loss:3.589962 +step:8766 train loss:3.518011 +step:8767 train loss:3.476054 +step:8768 train loss:3.516860 +step:8769 train loss:3.489971 +step:8770 train loss:3.535655 +step:8771 train loss:3.507036 +step:8772 train loss:3.526019 +step:8773 train loss:3.486690 +step:8774 train loss:3.520558 +step:8775 train loss:3.517594 +step:8776 train loss:3.462888 +step:8777 train loss:3.500646 +step:8778 train loss:3.509999 +step:8779 train loss:3.530824 +step:8780 train loss:3.498281 +step:8781 train loss:3.500171 +step:8782 train loss:3.522437 +step:8783 train loss:3.503893 +step:8784 train loss:3.526749 +step:8785 train loss:3.511219 +step:8786 train loss:3.587771 +step:8787 train loss:3.533695 +step:8788 train loss:3.431835 +step:8789 train loss:3.530590 +step:8790 train loss:3.459791 +step:8791 train loss:3.511083 +step:8792 train loss:3.449081 +step:8793 train loss:3.538759 +step:8794 train loss:3.463671 +step:8795 train loss:3.532576 +step:8796 train loss:3.680492 +step:8797 train loss:3.428874 +step:8798 train loss:3.586234 +step:8799 train loss:3.505143 +step:8800 train loss:3.496614 +step:8801 train loss:3.515646 +step:8802 train loss:3.570526 +step:8803 train loss:3.528992 +step:8804 train loss:3.509957 +step:8805 train loss:3.527001 +step:8806 train loss:3.500386 +step:8807 train loss:3.488062 +step:8808 train loss:3.443794 +step:8809 train loss:3.571383 +step:8810 train loss:3.474460 +step:8811 train loss:3.461238 +step:8812 train loss:3.505475 +step:8813 train loss:3.414345 +step:8814 train loss:3.606498 +step:8815 train loss:3.449100 +step:8816 train loss:3.566588 +step:8817 train loss:3.501122 +step:8818 train loss:3.435323 +step:8819 train loss:3.553771 +step:8820 train loss:3.482565 +step:8821 train loss:3.506767 +step:8822 train loss:3.485813 +step:8823 train loss:3.505167 +step:8824 train loss:3.563115 +step:8825 train loss:3.539193 +step:8826 train loss:3.509198 +step:8827 train loss:3.469362 +step:8828 train loss:3.511820 +step:8829 train loss:3.487789 +step:8830 train loss:3.467972 +step:8831 train loss:3.544295 +step:8832 train loss:3.482335 +step:8833 train loss:3.515914 +step:8834 train loss:3.480230 +step:8835 train loss:3.420524 +step:8836 train loss:3.545232 +step:8837 train loss:3.453815 +step:8838 train loss:3.496140 +step:8839 train loss:3.480234 +step:8840 train loss:3.483519 +step:8841 train loss:3.497514 +step:8842 train loss:3.505566 +step:8843 train loss:3.518152 +step:8844 train loss:3.484281 +step:8845 train loss:3.506981 +step:8846 train loss:3.474109 +step:8847 train loss:3.510927 +step:8848 train loss:3.556334 +step:8849 train loss:3.538025 +step:8850 train loss:3.529770 +step:8851 train loss:3.413445 +step:8852 train loss:3.515738 +step:8853 train loss:3.497386 +step:8854 train loss:3.467640 +step:8855 train loss:3.542953 +step:8856 train loss:3.528533 +step:8857 train loss:3.596592 +step:8858 train loss:3.465435 +step:8859 train loss:3.534223 +step:8860 train loss:3.493054 +step:8861 train loss:3.475412 +step:8862 train loss:3.472933 +step:8863 train loss:3.457583 +step:8864 train loss:3.527131 +step:8865 train loss:3.519044 +step:8866 train loss:3.401728 +step:8867 train loss:3.504141 +step:8868 train loss:3.531757 +step:8869 train loss:3.615783 +step:8870 train loss:3.495203 +step:8871 train loss:3.517886 +step:8872 train loss:3.501984 +step:8873 train loss:3.500836 +step:8874 train loss:3.555248 +step:8875 train loss:3.489799 +step:8876 train loss:3.529692 +step:8877 train loss:3.509857 +step:8878 train loss:3.560015 +step:8879 train loss:3.519612 +step:8880 train loss:3.466525 +step:8881 train loss:3.431575 +step:8882 train loss:3.501659 +step:8883 train loss:3.489931 +step:8884 train loss:3.577239 +step:8885 train loss:3.512907 +step:8886 train loss:3.518328 +step:8887 train loss:3.545127 +step:8888 train loss:3.500821 +step:8889 train loss:3.508161 +step:8890 train loss:3.497914 +step:8891 train loss:3.469472 +step:8892 train loss:3.552100 +step:8893 train loss:3.492485 +step:8894 train loss:3.512044 +step:8895 train loss:3.538664 +step:8896 train loss:3.454040 +step:8897 train loss:3.547676 +step:8898 train loss:3.479288 +step:8899 train loss:3.504105 +step:8900 train loss:3.470697 +step:8901 train loss:3.487699 +step:8902 train loss:3.523803 +step:8903 train loss:3.466063 +step:8904 train loss:3.515703 +step:8905 train loss:3.490726 +step:8906 train loss:3.481921 +step:8907 train loss:3.494246 +step:8908 train loss:3.558084 +step:8909 train loss:3.504411 +step:8910 train loss:3.463704 +step:8911 train loss:3.563323 +step:8912 train loss:3.459692 +step:8913 train loss:3.471190 +step:8914 train loss:3.564427 +step:8915 train loss:3.506714 +step:8916 train loss:3.536369 +step:8917 train loss:3.492998 +step:8918 train loss:3.497441 +step:8919 train loss:3.486377 +step:8920 train loss:3.512850 +step:8921 train loss:3.505395 +step:8922 train loss:3.487839 +step:8923 train loss:3.673462 +step:8924 train loss:3.569149 +step:8925 train loss:3.500886 +step:8926 train loss:3.514854 +step:8927 train loss:3.542550 +step:8928 train loss:3.497683 +step:8929 train loss:3.489915 +step:8930 train loss:3.545433 +step:8931 train loss:3.455966 +step:8932 train loss:3.557581 +step:8933 train loss:3.465764 +step:8934 train loss:3.506214 +step:8935 train loss:3.519349 +step:8936 train loss:3.553056 +step:8937 train loss:3.548823 +step:8938 train loss:3.491673 +step:8939 train loss:3.557426 +step:8940 train loss:3.509168 +step:8941 train loss:3.456753 +step:8942 train loss:3.527994 +step:8943 train loss:3.464059 +step:8944 train loss:3.513846 +step:8945 train loss:3.530918 +step:8946 train loss:3.380421 +step:8947 train loss:3.567990 +step:8948 train loss:3.413573 +step:8949 train loss:3.419360 +step:8950 train loss:3.462630 +step:8951 train loss:3.503611 +step:8952 train loss:3.521161 +step:8953 train loss:3.474055 +step:8954 train loss:3.580056 +step:8955 train loss:3.496729 +step:8956 train loss:3.521729 +step:8957 train loss:3.512131 +step:8958 train loss:3.489258 +step:8959 train loss:3.479630 +step:8960 train loss:3.445991 +step:8961 train loss:3.472003 +step:8962 train loss:3.522127 +step:8963 train loss:3.500051 +step:8964 train loss:3.484094 +step:8965 train loss:3.527235 +step:8966 train loss:3.488468 +step:8967 train loss:3.463884 +step:8968 train loss:3.449424 +step:8969 train loss:3.440027 +step:8970 train loss:3.517581 +step:8971 train loss:3.467296 +step:8972 train loss:3.668120 +step:8973 train loss:3.552992 +step:8974 train loss:3.512181 +step:8975 train loss:3.512654 +step:8976 train loss:3.474172 +step:8977 train loss:3.564377 +step:8978 train loss:3.547260 +step:8979 train loss:3.464526 +step:8980 train loss:3.560337 +step:8981 train loss:3.509904 +step:8982 train loss:3.486114 +step:8983 train loss:3.429115 +step:8984 train loss:3.552537 +step:8985 train loss:3.470998 +step:8986 train loss:3.506301 +step:8987 train loss:3.481850 +step:8988 train loss:3.528474 +step:8989 train loss:3.441744 +step:8990 train loss:3.581741 +step:8991 train loss:3.433981 +step:8992 train loss:3.489759 +step:8993 train loss:3.580146 +step:8994 train loss:3.488805 +step:8995 train loss:3.513183 +step:8996 train loss:3.481136 +step:8997 train loss:3.431047 +step:8998 train loss:3.434792 +step:8999 train loss:3.457389 +step:9000 validation loss:3.430531 total_sharp:1.1834e-03 L1_sharp:7.1110e-04 L2_sharp:4.8065e-04 L3_sharp:4.9115e-04 L4_sharp:1.6963e-04 L5_sharp:1.3172e-04 L6_sharp:1.4739e-04 L7_sharp:2.4317e-04 L8_sharp:2.9180e-04 L9_sharp:4.1932e-04 L10_sharp:6.4313e-04 L11_sharp:3.8040e-04 L12_sharp:6.4656e-04 total_fnorm:4.4854e+00 total_l1_linf:3.9889e+04 total_spectral:4.4854e+00 L1_fnorm:9.9824e-01 L2_fnorm:1.0101e+00 L3_fnorm:1.0278e+00 L4_fnorm:1.0508e+00 L5_fnorm:1.0799e+00 L6_fnorm:1.0787e+00 L7_fnorm:1.0773e+00 L8_fnorm:1.0721e+00 L9_fnorm:1.0641e+00 L10_fnorm:1.0361e+00 L11_fnorm:1.0524e+00 L12_fnorm:1.0474e+00 L1_l1linf:1.2191e+00 L2_l1linf:1.0748e+00 L3_l1linf:1.0508e+00 L4_l1linf:1.0825e+00 L5_l1linf:1.1400e+00 L6_l1linf:1.0911e+00 L7_l1linf:1.0775e+00 L8_l1linf:1.0881e+00 L9_l1linf:1.1957e+00 L10_l1linf:1.1690e+00 L11_l1linf:1.2869e+00 L12_l1linf:1.2895e+00 L1_spectral:1.4682e-01 L2_spectral:1.4083e-01 L3_spectral:1.5475e-01 L4_spectral:1.0411e-01 L5_spectral:9.8547e-02 L6_spectral:9.3922e-02 L7_spectral:8.1370e-02 L8_spectral:8.8270e-02 L9_spectral:1.2555e-01 L10_spectral:1.6280e-01 L11_spectral:1.6558e-01 L12_spectral:1.7431e-01 ip_v_neg_g:1.0700e-02 cos_v_neg_g:3.1242e-03 v_norm:4.4854e+00 g_norm:7.6352e-01 hv_norm:5.1223e-01 cos_v_hv:1.0363e-02 hg_norm:7.8295e+00 cos_g_hg:4.6627e-01 v_par:5.5429e-04 v_perp:4.4854e+00 L1_cos_v_neg_g:7.2386e-03 L1_v_norm:9.9824e-01 L2_cos_v_neg_g:6.0627e-03 L2_v_norm:1.0101e+00 L3_cos_v_neg_g:4.2252e-03 L3_v_norm:1.0278e+00 L4_cos_v_neg_g:3.4095e-03 L4_v_norm:1.0508e+00 L5_cos_v_neg_g:2.6464e-03 L5_v_norm:1.0799e+00 L6_cos_v_neg_g:2.3720e-03 L6_v_norm:1.0787e+00 L7_cos_v_neg_g:2.3898e-03 L7_v_norm:1.0773e+00 L8_cos_v_neg_g:2.9712e-03 L8_v_norm:1.0721e+00 L9_cos_v_neg_g:3.6156e-03 L9_v_norm:1.0641e+00 L10_cos_v_neg_g:5.5436e-03 L10_v_norm:1.0361e+00 L11_cos_v_neg_g:6.0472e-03 L11_v_norm:1.0524e+00 L12_cos_v_neg_g:7.7194e-03 L12_v_norm:1.0474e+00 +step:9000 train loss:3.545103 +step:9001 train loss:3.511950 +step:9002 train loss:3.520670 +step:9003 train loss:3.458785 +step:9004 train loss:3.458961 +step:9005 train loss:3.472263 +step:9006 train loss:3.473680 +step:9007 train loss:3.493519 +step:9008 train loss:3.446756 +step:9009 train loss:3.445673 +step:9010 train loss:3.481355 +step:9011 train loss:3.477226 +step:9012 train loss:3.591143 +step:9013 train loss:3.418473 +step:9014 train loss:3.490192 +step:9015 train loss:3.490485 +step:9016 train loss:3.568015 +step:9017 train loss:3.507103 +step:9018 train loss:3.431520 +step:9019 train loss:3.514569 +step:9020 train loss:3.524199 +step:9021 train loss:3.484350 +step:9022 train loss:3.494587 +step:9023 train loss:3.490025 +step:9024 train loss:3.509257 +step:9025 train loss:3.494063 +step:9026 train loss:3.454438 +step:9027 train loss:3.498610 +step:9028 train loss:3.520923 +step:9029 train loss:3.540109 +step:9030 train loss:3.536401 +step:9031 train loss:3.501051 +step:9032 train loss:3.512081 +step:9033 train loss:3.494507 +step:9034 train loss:3.506843 +step:9035 train loss:3.509238 +step:9036 train loss:3.460056 +step:9037 train loss:3.454495 +step:9038 train loss:3.577708 +step:9039 train loss:3.479745 +step:9040 train loss:3.495807 +step:9041 train loss:3.544854 +step:9042 train loss:3.401262 +step:9043 train loss:3.495847 +step:9044 train loss:3.513024 +step:9045 train loss:3.458120 +step:9046 train loss:3.499758 +step:9047 train loss:3.495119 +step:9048 train loss:3.475445 +step:9049 train loss:3.510231 +step:9050 train loss:3.462081 +step:9051 train loss:3.504315 +step:9052 train loss:3.432726 +step:9053 train loss:3.556141 +step:9054 train loss:3.567938 +step:9055 train loss:3.492919 +step:9056 train loss:3.556528 +step:9057 train loss:3.408712 +step:9058 train loss:3.491896 +step:9059 train loss:3.570355 +step:9060 train loss:3.501071 +step:9061 train loss:3.527490 +step:9062 train loss:3.458225 +step:9063 train loss:3.591807 +step:9064 train loss:3.475443 +step:9065 train loss:3.488261 +step:9066 train loss:3.505962 +step:9067 train loss:3.468369 +step:9068 train loss:3.541496 +step:9069 train loss:3.499614 +step:9070 train loss:3.549581 +step:9071 train loss:3.484064 +step:9072 train loss:3.504796 +step:9073 train loss:3.465145 +step:9074 train loss:3.546378 +step:9075 train loss:3.493140 +step:9076 train loss:3.461190 +step:9077 train loss:3.535843 +step:9078 train loss:3.473541 +step:9079 train loss:3.517766 +step:9080 train loss:3.451205 +step:9081 train loss:3.491677 +step:9082 train loss:3.518530 +step:9083 train loss:3.545606 +step:9084 train loss:3.436773 +step:9085 train loss:3.509517 +step:9086 train loss:3.491033 +step:9087 train loss:3.440239 +step:9088 train loss:3.500328 +step:9089 train loss:3.516196 +step:9090 train loss:3.450983 +step:9091 train loss:3.550981 +step:9092 train loss:3.477803 +step:9093 train loss:3.474758 +step:9094 train loss:3.603192 +step:9095 train loss:3.470567 +step:9096 train loss:3.483989 +step:9097 train loss:3.470524 +step:9098 train loss:3.465178 +step:9099 train loss:3.588957 +step:9100 train loss:3.620685 +step:9101 train loss:3.538502 +step:9102 train loss:3.481512 +step:9103 train loss:3.486990 +step:9104 train loss:3.572810 +step:9105 train loss:3.434278 +step:9106 train loss:3.560228 +step:9107 train loss:3.497239 +step:9108 train loss:3.477242 +step:9109 train loss:3.505340 +step:9110 train loss:3.506926 +step:9111 train loss:3.484890 +step:9112 train loss:3.491213 +step:9113 train loss:3.517223 +step:9114 train loss:3.468339 +step:9115 train loss:3.498425 +step:9116 train loss:3.519646 +step:9117 train loss:3.529844 +step:9118 train loss:3.500189 +step:9119 train loss:3.423324 +step:9120 train loss:3.520001 +step:9121 train loss:3.552281 +step:9122 train loss:3.497087 +step:9123 train loss:3.517483 +step:9124 train loss:3.547205 +step:9125 train loss:3.502021 +step:9126 train loss:3.473390 +step:9127 train loss:3.508041 +step:9128 train loss:3.563232 +step:9129 train loss:3.518049 +step:9130 train loss:3.530144 +step:9131 train loss:3.509700 +step:9132 train loss:3.517296 +step:9133 train loss:3.506047 +step:9134 train loss:3.479203 +step:9135 train loss:3.505947 +step:9136 train loss:3.504383 +step:9137 train loss:3.559218 +step:9138 train loss:3.476677 +step:9139 train loss:3.552776 +step:9140 train loss:3.473497 +step:9141 train loss:3.452133 +step:9142 train loss:3.632840 +step:9143 train loss:3.457608 +step:9144 train loss:3.550334 +step:9145 train loss:3.556453 +step:9146 train loss:3.472610 +step:9147 train loss:3.545745 +step:9148 train loss:3.562760 +step:9149 train loss:3.474653 +step:9150 train loss:3.495695 +step:9151 train loss:3.558384 +step:9152 train loss:3.513658 +step:9153 train loss:3.480036 +step:9154 train loss:3.494360 +step:9155 train loss:3.459861 +step:9156 train loss:3.464572 +step:9157 train loss:3.481000 +step:9158 train loss:3.460398 +step:9159 train loss:3.553849 +step:9160 train loss:3.434505 +step:9161 train loss:3.463049 +step:9162 train loss:3.550125 +step:9163 train loss:3.495648 +step:9164 train loss:3.463846 +step:9165 train loss:3.462639 +step:9166 train loss:3.519437 +step:9167 train loss:3.461123 +step:9168 train loss:3.503805 +step:9169 train loss:3.441223 +step:9170 train loss:3.461853 +step:9171 train loss:3.527112 +step:9172 train loss:3.450794 +step:9173 train loss:3.570310 +step:9174 train loss:3.499707 +step:9175 train loss:3.479986 +step:9176 train loss:3.460673 +step:9177 train loss:3.507750 +step:9178 train loss:3.454201 +step:9179 train loss:3.410476 +step:9180 train loss:3.504792 +step:9181 train loss:3.515898 +step:9182 train loss:3.485612 +step:9183 train loss:3.493487 +step:9184 train loss:3.486741 +step:9185 train loss:3.501351 +step:9186 train loss:3.466041 +step:9187 train loss:3.536953 +step:9188 train loss:3.575510 +step:9189 train loss:3.499317 +step:9190 train loss:3.503927 +step:9191 train loss:3.493525 +step:9192 train loss:3.506705 +step:9193 train loss:3.510688 +step:9194 train loss:3.443021 +step:9195 train loss:3.437652 +step:9196 train loss:3.485361 +step:9197 train loss:3.444798 +step:9198 train loss:3.521348 +step:9199 train loss:3.469213 +step:9200 train loss:3.496266 +step:9201 train loss:3.531211 +step:9202 train loss:3.518311 +step:9203 train loss:3.471401 +step:9204 train loss:3.669314 +step:9205 train loss:3.582962 +step:9206 train loss:3.499292 +step:9207 train loss:3.548392 +step:9208 train loss:3.525431 +step:9209 train loss:3.548733 +step:9210 train loss:3.439724 +step:9211 train loss:3.465050 +step:9212 train loss:3.467061 +step:9213 train loss:3.527615 +step:9214 train loss:3.470096 +step:9215 train loss:3.536536 +step:9216 train loss:3.500210 +step:9217 train loss:3.439455 +step:9218 train loss:3.530507 +step:9219 train loss:3.490012 +step:9220 train loss:3.535882 +step:9221 train loss:3.590471 +step:9222 train loss:3.532510 +step:9223 train loss:3.701565 +step:9224 train loss:3.537689 +step:9225 train loss:3.470314 +step:9226 train loss:3.488141 +step:9227 train loss:3.505472 +step:9228 train loss:3.507366 +step:9229 train loss:3.464661 +step:9230 train loss:3.527366 +step:9231 train loss:3.413010 +step:9232 train loss:3.472724 +step:9233 train loss:3.492026 +step:9234 train loss:3.550283 +step:9235 train loss:3.553882 +step:9236 train loss:3.458172 +step:9237 train loss:3.519883 +step:9238 train loss:3.492284 +step:9239 train loss:3.484197 +step:9240 train loss:3.453419 +step:9241 train loss:3.484362 +step:9242 train loss:3.496026 +step:9243 train loss:3.492392 +step:9244 train loss:3.466433 +step:9245 train loss:3.473576 +step:9246 train loss:3.472439 +step:9247 train loss:3.483870 +step:9248 train loss:3.493716 +step:9249 train loss:3.490283 +step:9250 validation loss:3.429057 +step:9250 train loss:3.532397 +step:9251 train loss:3.474360 +step:9252 train loss:3.542862 +step:9253 train loss:3.535604 +step:9254 train loss:3.465072 +step:9255 train loss:3.581740 +step:9256 train loss:3.460600 +step:9257 train loss:3.406569 +step:9258 train loss:3.484376 +step:9259 train loss:3.487536 +step:9260 train loss:3.581518 +step:9261 train loss:3.461157 +step:9262 train loss:3.532971 +step:9263 train loss:3.435727 +step:9264 train loss:3.582383 +step:9265 train loss:3.611357 +step:9266 train loss:3.539254 +step:9267 train loss:3.488216 +step:9268 train loss:3.481330 +step:9269 train loss:3.508499 +step:9270 train loss:3.430565 +step:9271 train loss:3.541421 +step:9272 train loss:3.483780 +step:9273 train loss:3.500428 +step:9274 train loss:3.504469 +step:9275 train loss:3.501299 +step:9276 train loss:3.529085 +step:9277 train loss:3.504029 +step:9278 train loss:3.517084 +step:9279 train loss:3.509573 +step:9280 train loss:3.511450 +step:9281 train loss:3.482688 +step:9282 train loss:3.601882 +step:9283 train loss:3.491689 +step:9284 train loss:3.452798 +step:9285 train loss:3.475312 +step:9286 train loss:3.525749 +step:9287 train loss:3.495178 +step:9288 train loss:3.505262 +step:9289 train loss:3.476212 +step:9290 train loss:3.503513 +step:9291 train loss:3.480130 +step:9292 train loss:3.529939 +step:9293 train loss:3.578934 +step:9294 train loss:3.498733 +step:9295 train loss:3.483178 +step:9296 train loss:3.438881 +step:9297 train loss:3.504777 +step:9298 train loss:3.445782 +step:9299 train loss:3.433096 +step:9300 train loss:3.534445 +step:9301 train loss:3.561192 +step:9302 train loss:3.500218 +step:9303 train loss:3.547546 +step:9304 train loss:3.469060 +step:9305 train loss:3.462503 +step:9306 train loss:3.463232 +step:9307 train loss:3.465439 +step:9308 train loss:3.436935 +step:9309 train loss:3.427739 +step:9310 train loss:3.483228 +step:9311 train loss:3.544977 +step:9312 train loss:3.498083 +step:9313 train loss:3.439093 +step:9314 train loss:3.471727 +step:9315 train loss:3.500509 +step:9316 train loss:3.486683 +step:9317 train loss:3.462832 +step:9318 train loss:3.551600 +step:9319 train loss:3.459316 +step:9320 train loss:3.482846 +step:9321 train loss:3.494874 +step:9322 train loss:3.506085 +step:9323 train loss:3.577099 +step:9324 train loss:3.522495 +step:9325 train loss:3.463431 +step:9326 train loss:3.538056 +step:9327 train loss:3.534633 +step:9328 train loss:3.533308 +step:9329 train loss:3.427073 +step:9330 train loss:3.592746 +step:9331 train loss:3.523139 +step:9332 train loss:3.545404 +step:9333 train loss:3.562233 +step:9334 train loss:3.497436 +step:9335 train loss:3.592829 +step:9336 train loss:3.549587 +step:9337 train loss:3.503884 +step:9338 train loss:3.557850 +step:9339 train loss:3.539946 +step:9340 train loss:3.495967 +step:9341 train loss:3.586424 +step:9342 train loss:3.480397 +step:9343 train loss:3.475044 +step:9344 train loss:3.479133 +step:9345 train loss:3.622199 +step:9346 train loss:3.457372 +step:9347 train loss:3.475450 +step:9348 train loss:3.497725 +step:9349 train loss:3.444163 +step:9350 train loss:3.519176 +step:9351 train loss:3.494925 +step:9352 train loss:3.483398 +step:9353 train loss:3.516033 +step:9354 train loss:3.482410 +step:9355 train loss:3.477861 +step:9356 train loss:3.522658 +step:9357 train loss:3.475999 +step:9358 train loss:3.508373 +step:9359 train loss:3.447783 +step:9360 train loss:3.466953 +step:9361 train loss:3.465320 +step:9362 train loss:3.453451 +step:9363 train loss:3.517406 +step:9364 train loss:3.496442 +step:9365 train loss:3.501537 +step:9366 train loss:3.496415 +step:9367 train loss:3.507313 +step:9368 train loss:3.483572 +step:9369 train loss:3.478746 +step:9370 train loss:3.489793 +step:9371 train loss:3.508576 +step:9372 train loss:3.475247 +step:9373 train loss:3.458889 +step:9374 train loss:3.496549 +step:9375 train loss:3.506516 +step:9376 train loss:3.446292 +step:9377 train loss:3.518940 +step:9378 train loss:3.522317 +step:9379 train loss:3.549916 +step:9380 train loss:3.478479 +step:9381 train loss:3.488884 +step:9382 train loss:3.463795 +step:9383 train loss:3.460443 +step:9384 train loss:3.427598 +step:9385 train loss:3.504298 +step:9386 train loss:3.530488 +step:9387 train loss:3.509687 +step:9388 train loss:3.446016 +step:9389 train loss:3.464140 +step:9390 train loss:3.507764 +step:9391 train loss:3.514539 +step:9392 train loss:3.476802 +step:9393 train loss:3.467874 +step:9394 train loss:3.498848 +step:9395 train loss:3.490170 +step:9396 train loss:3.637691 +step:9397 train loss:3.528826 +step:9398 train loss:3.547883 +step:9399 train loss:3.499450 +step:9400 train loss:3.501157 +step:9401 train loss:3.495037 +step:9402 train loss:3.497311 +step:9403 train loss:3.428795 +step:9404 train loss:3.504370 +step:9405 train loss:3.464593 +step:9406 train loss:3.517493 +step:9407 train loss:3.461669 +step:9408 train loss:3.398253 +step:9409 train loss:3.462113 +step:9410 train loss:3.542505 +step:9411 train loss:3.504037 +step:9412 train loss:3.534325 +step:9413 train loss:3.549815 +step:9414 train loss:3.489309 +step:9415 train loss:3.482845 +step:9416 train loss:3.498122 +step:9417 train loss:3.449111 +step:9418 train loss:3.478955 +step:9419 train loss:3.445954 +step:9420 train loss:3.463867 +step:9421 train loss:3.513371 +step:9422 train loss:3.463813 +step:9423 train loss:3.527503 +step:9424 train loss:3.468328 +step:9425 train loss:3.508182 +step:9426 train loss:3.513656 +step:9427 train loss:3.486519 +step:9428 train loss:3.590878 +step:9429 train loss:3.481633 +step:9430 train loss:3.439812 +step:9431 train loss:3.531208 +step:9432 train loss:3.491724 +step:9433 train loss:3.532951 +step:9434 train loss:3.485398 +step:9435 train loss:3.510570 +step:9436 train loss:3.482102 +step:9437 train loss:3.491312 +step:9438 train loss:3.487406 +step:9439 train loss:3.487297 +step:9440 train loss:3.478056 +step:9441 train loss:3.487702 +step:9442 train loss:3.428874 +step:9443 train loss:3.482149 +step:9444 train loss:3.550494 +step:9445 train loss:3.480890 +step:9446 train loss:3.457224 +step:9447 train loss:3.524765 +step:9448 train loss:3.458276 +step:9449 train loss:3.481985 +step:9450 train loss:3.522874 +step:9451 train loss:3.439342 +step:9452 train loss:3.491807 +step:9453 train loss:3.472623 +step:9454 train loss:3.535512 +step:9455 train loss:3.517449 +step:9456 train loss:3.443573 +step:9457 train loss:3.486732 +step:9458 train loss:3.473853 +step:9459 train loss:3.469347 +step:9460 train loss:3.508819 +step:9461 train loss:3.537652 +step:9462 train loss:3.484488 +step:9463 train loss:3.515836 +step:9464 train loss:3.469487 +step:9465 train loss:3.560060 +step:9466 train loss:3.511020 +step:9467 train loss:3.535431 +step:9468 train loss:3.479379 +step:9469 train loss:3.466816 +step:9470 train loss:3.466074 +step:9471 train loss:3.506505 +step:9472 train loss:3.530615 +step:9473 train loss:3.520314 +step:9474 train loss:3.465086 +step:9475 train loss:3.456032 +step:9476 train loss:3.674992 +step:9477 train loss:3.548340 +step:9478 train loss:3.524777 +step:9479 train loss:3.623437 +step:9480 train loss:3.471395 +step:9481 train loss:3.504177 +step:9482 train loss:3.530805 +step:9483 train loss:3.488001 +step:9484 train loss:3.513938 +step:9485 train loss:3.439344 +step:9486 train loss:3.473546 +step:9487 train loss:3.506469 +step:9488 train loss:3.462222 +step:9489 train loss:3.507523 +step:9490 train loss:3.474858 +step:9491 train loss:3.515238 +step:9492 train loss:3.533153 +step:9493 train loss:3.510393 +step:9494 train loss:3.517325 +step:9495 train loss:3.468722 +step:9496 train loss:3.529444 +step:9497 train loss:3.544797 +step:9498 train loss:3.491736 +step:9499 train loss:3.543617 +step:9500 validation loss:3.428670 total_sharp:1.4000e-03 L1_sharp:8.1723e-04 L2_sharp:3.2278e-04 L3_sharp:6.7358e-04 L4_sharp:2.2061e-04 L5_sharp:2.3168e-04 L6_sharp:2.2795e-04 L7_sharp:2.8094e-04 L8_sharp:4.0729e-04 L9_sharp:5.1381e-04 L10_sharp:5.8354e-04 L11_sharp:4.5542e-04 L12_sharp:3.6955e-04 total_fnorm:4.5100e+00 total_l1_linf:4.0157e+04 total_spectral:4.5100e+00 L1_fnorm:1.0371e+00 L2_fnorm:1.0522e+00 L3_fnorm:1.0503e+00 L4_fnorm:1.0623e+00 L5_fnorm:1.0845e+00 L6_fnorm:1.0912e+00 L7_fnorm:1.0811e+00 L8_fnorm:1.0745e+00 L9_fnorm:1.0672e+00 L10_fnorm:1.0381e+00 L11_fnorm:1.0436e+00 L12_fnorm:1.0375e+00 L1_l1linf:1.2425e+00 L2_l1linf:1.1004e+00 L3_l1linf:1.0818e+00 L4_l1linf:1.1194e+00 L5_l1linf:1.1183e+00 L6_l1linf:1.0954e+00 L7_l1linf:1.0719e+00 L8_l1linf:1.1004e+00 L9_l1linf:1.1646e+00 L10_l1linf:1.1344e+00 L11_l1linf:1.2101e+00 L12_l1linf:1.3505e+00 L1_spectral:1.5663e-01 L2_spectral:1.4573e-01 L3_spectral:1.6524e-01 L4_spectral:1.0923e-01 L5_spectral:9.7580e-02 L6_spectral:1.0364e-01 L7_spectral:8.3766e-02 L8_spectral:9.7647e-02 L9_spectral:1.4669e-01 L10_spectral:1.6183e-01 L11_spectral:1.6984e-01 L12_spectral:1.5908e-01 ip_v_neg_g:1.3379e-02 cos_v_neg_g:3.8394e-03 v_norm:4.5100e+00 g_norm:7.7261e-01 hv_norm:4.4407e-01 cos_v_hv:1.4219e-02 hg_norm:5.1174e+00 cos_g_hg:6.0878e-01 v_par:5.1189e-04 v_perp:4.5100e+00 L1_cos_v_neg_g:1.1222e-02 L1_v_norm:1.0371e+00 L2_cos_v_neg_g:1.1787e-02 L2_v_norm:1.0522e+00 L3_cos_v_neg_g:7.1153e-03 L3_v_norm:1.0503e+00 L4_cos_v_neg_g:2.8506e-03 L4_v_norm:1.0623e+00 L5_cos_v_neg_g:2.8147e-03 L5_v_norm:1.0845e+00 L6_cos_v_neg_g:3.4885e-03 L6_v_norm:1.0912e+00 L7_cos_v_neg_g:2.1674e-03 L7_v_norm:1.0811e+00 L8_cos_v_neg_g:3.2384e-03 L8_v_norm:1.0745e+00 L9_cos_v_neg_g:3.5744e-03 L9_v_norm:1.0672e+00 L10_cos_v_neg_g:5.7765e-03 L10_v_norm:1.0381e+00 L11_cos_v_neg_g:7.6760e-03 L11_v_norm:1.0436e+00 L12_cos_v_neg_g:5.8465e-03 L12_v_norm:1.0375e+00 +step:9500 train loss:3.532079 +step:9501 train loss:3.511296 +step:9502 train loss:3.484926 +step:9503 train loss:3.500692 +step:9504 train loss:3.452817 +step:9505 train loss:3.480042 +step:9506 train loss:3.494618 +step:9507 train loss:3.479217 +step:9508 train loss:3.672349 +step:9509 train loss:3.491551 +step:9510 train loss:3.477692 +step:9511 train loss:3.502698 +step:9512 train loss:3.536714 +step:9513 train loss:3.526940 +step:9514 train loss:3.492006 +step:9515 train loss:3.392802 +step:9516 train loss:3.494401 +step:9517 train loss:3.532283 +step:9518 train loss:3.505209 +step:9519 train loss:3.516986 +step:9520 train loss:3.401448 +step:9521 train loss:3.396786 +step:9522 train loss:3.516530 +step:9523 train loss:3.513091 +step:9524 train loss:3.513220 +step:9525 train loss:3.559631 +step:9526 train loss:3.573960 +step:9527 train loss:3.530001 +step:9528 train loss:3.463858 +step:9529 train loss:3.506973 +step:9530 train loss:3.554652 +step:9531 train loss:3.459867 +step:9532 train loss:3.509344 +step:9533 train loss:3.480869 +step:9534 train loss:3.562580 +step:9535 train loss:3.483899 +step:9536 train loss:3.462127 +step:9537 train loss:3.409977 +step:9538 train loss:3.426897 +step:9539 train loss:3.500138 +step:9540 train loss:3.417609 +step:9541 train loss:3.475659 +step:9542 train loss:3.603879 +step:9543 train loss:3.503064 +step:9544 train loss:3.543589 +step:9545 train loss:3.476563 +step:9546 train loss:3.503026 +step:9547 train loss:3.545214 +step:9548 train loss:3.487923 +step:9549 train loss:3.456488 +step:9550 train loss:3.488128 +step:9551 train loss:3.480076 +step:9552 train loss:3.504165 +step:9553 train loss:3.498050 +step:9554 train loss:3.544784 +step:9555 train loss:3.548721 +step:9556 train loss:3.459059 +step:9557 train loss:3.477127 +step:9558 train loss:3.542991 +step:9559 train loss:3.548821 +step:9560 train loss:3.460295 +step:9561 train loss:3.488175 +step:9562 train loss:3.526712 +step:9563 train loss:3.474231 +step:9564 train loss:3.508505 +step:9565 train loss:3.486844 +step:9566 train loss:3.458064 +step:9567 train loss:3.525734 +step:9568 train loss:3.495448 +step:9569 train loss:3.537892 +step:9570 train loss:3.431316 +step:9571 train loss:3.506442 +step:9572 train loss:3.449903 +step:9573 train loss:3.478837 +step:9574 train loss:3.458917 +step:9575 train loss:3.528581 +step:9576 train loss:3.420015 +step:9577 train loss:3.467857 +step:9578 train loss:3.473566 +step:9579 train loss:3.473084 +step:9580 train loss:3.535554 +step:9581 train loss:3.529582 +step:9582 train loss:3.498495 +step:9583 train loss:3.522712 +step:9584 train loss:3.461478 +step:9585 train loss:3.480907 +step:9586 train loss:3.534900 +step:9587 train loss:3.503392 +step:9588 train loss:3.488843 +step:9589 train loss:3.546480 +step:9590 train loss:3.511620 +step:9591 train loss:3.478625 +step:9592 train loss:3.497440 +step:9593 train loss:3.499660 +step:9594 train loss:3.512045 +step:9595 train loss:3.489960 +step:9596 train loss:3.574505 +step:9597 train loss:3.482500 +step:9598 train loss:3.446627 +step:9599 train loss:3.450603 +step:9600 train loss:3.536825 +step:9601 train loss:3.454996 +step:9602 train loss:3.537338 +step:9603 train loss:3.532192 +step:9604 train loss:3.412910 +step:9605 train loss:3.499881 +step:9606 train loss:3.557199 +step:9607 train loss:3.475598 +step:9608 train loss:3.482506 +step:9609 train loss:3.489983 +step:9610 train loss:3.532785 +step:9611 train loss:3.466583 +step:9612 train loss:3.474841 +step:9613 train loss:3.511715 +step:9614 train loss:3.485121 +step:9615 train loss:3.672622 +step:9616 train loss:3.487154 +step:9617 train loss:3.471885 +step:9618 train loss:3.429879 +step:9619 train loss:3.494843 +step:9620 train loss:3.550821 +step:9621 train loss:3.474042 +step:9622 train loss:3.483744 +step:9623 train loss:3.525143 +step:9624 train loss:3.509488 +step:9625 train loss:3.525701 +step:9626 train loss:3.497559 +step:9627 train loss:3.576354 +step:9628 train loss:3.542024 +step:9629 train loss:3.457026 +step:9630 train loss:3.513081 +step:9631 train loss:3.497269 +step:9632 train loss:3.467472 +step:9633 train loss:3.510139 +step:9634 train loss:3.578757 +step:9635 train loss:3.478830 +step:9636 train loss:3.427314 +step:9637 train loss:3.559025 +step:9638 train loss:3.440331 +step:9639 train loss:3.411367 +step:9640 train loss:3.535166 +step:9641 train loss:3.506025 +step:9642 train loss:3.486318 +step:9643 train loss:3.488903 +step:9644 train loss:3.545215 +step:9645 train loss:3.471294 +step:9646 train loss:3.507195 +step:9647 train loss:3.516892 +step:9648 train loss:3.468019 +step:9649 train loss:3.441059 +step:9650 train loss:3.459193 +step:9651 train loss:3.547916 +step:9652 train loss:3.528961 +step:9653 train loss:3.471006 +step:9654 train loss:3.454210 +step:9655 train loss:3.449238 +step:9656 train loss:3.444025 +step:9657 train loss:3.470379 +step:9658 train loss:3.527661 +step:9659 train loss:3.635865 +step:9660 train loss:3.419673 +step:9661 train loss:3.438956 +step:9662 train loss:3.459901 +step:9663 train loss:3.500361 +step:9664 train loss:3.551341 +step:9665 train loss:3.393369 +step:9666 train loss:3.437113 +step:9667 train loss:3.573342 +step:9668 train loss:3.551211 +step:9669 train loss:3.571437 +step:9670 train loss:3.551384 +step:9671 train loss:3.549684 +step:9672 train loss:3.465747 +step:9673 train loss:3.488746 +step:9674 train loss:3.499323 +step:9675 train loss:3.496254 +step:9676 train loss:3.453828 +step:9677 train loss:3.460105 +step:9678 train loss:3.495921 +step:9679 train loss:3.486046 +step:9680 train loss:3.484822 +step:9681 train loss:3.471904 +step:9682 train loss:3.538092 +step:9683 train loss:3.510715 +step:9684 train loss:3.428558 +step:9685 train loss:3.514636 +step:9686 train loss:3.548278 +step:9687 train loss:3.454628 +step:9688 train loss:3.542150 +step:9689 train loss:3.641476 +step:9690 train loss:3.482833 +step:9691 train loss:3.472936 +step:9692 train loss:3.438658 +step:9693 train loss:3.433969 +step:9694 train loss:3.456306 +step:9695 train loss:3.565480 +step:9696 train loss:3.597393 +step:9697 train loss:3.504704 +step:9698 train loss:3.542911 +step:9699 train loss:3.500010 +step:9700 train loss:3.498171 +step:9701 train loss:3.558303 +step:9702 train loss:3.465290 +step:9703 train loss:3.491826 +step:9704 train loss:3.571118 +step:9705 train loss:3.471346 +step:9706 train loss:3.463975 +step:9707 train loss:3.514292 +step:9708 train loss:3.460305 +step:9709 train loss:3.481770 +step:9710 train loss:3.499254 +step:9711 train loss:3.472687 +step:9712 train loss:3.483970 +step:9713 train loss:3.537375 +step:9714 train loss:3.492767 +step:9715 train loss:3.507774 +step:9716 train loss:3.533952 +step:9717 train loss:3.454163 +step:9718 train loss:3.462452 +step:9719 train loss:3.547078 +step:9720 train loss:3.480399 +step:9721 train loss:3.468582 +step:9722 train loss:3.532413 +step:9723 train loss:3.477071 +step:9724 train loss:3.507059 +step:9725 train loss:3.556878 +step:9726 train loss:3.499030 +step:9727 train loss:3.478874 +step:9728 train loss:3.511642 +step:9729 train loss:3.539506 +step:9730 train loss:3.613832 +step:9731 train loss:3.530278 +step:9732 train loss:3.493001 +step:9733 train loss:3.532623 +step:9734 train loss:3.458072 +step:9735 train loss:3.564962 +step:9736 train loss:3.465050 +step:9737 train loss:3.521634 +step:9738 train loss:3.487858 +step:9739 train loss:3.561146 +step:9740 train loss:3.525851 +step:9741 train loss:3.467873 +step:9742 train loss:3.559431 +step:9743 train loss:3.433356 +step:9744 train loss:3.491269 +step:9745 train loss:3.451873 +step:9746 train loss:3.488833 +step:9747 train loss:3.479776 +step:9748 train loss:3.377574 +step:9749 train loss:3.479449 +step:9750 validation loss:3.421128 +step:9750 train loss:3.455857 +step:9751 train loss:3.597942 +step:9752 train loss:3.486339 +step:9753 train loss:3.439960 +step:9754 train loss:3.473950 +step:9755 train loss:3.471315 +step:9756 train loss:3.471106 +step:9757 train loss:3.441947 +step:9758 train loss:3.431301 +step:9759 train loss:3.476882 +step:9760 train loss:3.420527 +step:9761 train loss:3.463051 +step:9762 train loss:3.458893 +step:9763 train loss:3.478277 +step:9764 train loss:3.465006 +step:9765 train loss:3.426036 +step:9766 train loss:3.516713 +step:9767 train loss:3.474905 +step:9768 train loss:3.487307 +step:9769 train loss:3.438818 +step:9770 train loss:3.440709 +step:9771 train loss:3.487533 +step:9772 train loss:3.499687 +step:9773 train loss:3.477291 +step:9774 train loss:3.448253 +step:9775 train loss:3.537220 +step:9776 train loss:3.534252 +step:9777 train loss:3.426326 +step:9778 train loss:3.431822 +step:9779 train loss:3.435881 +step:9780 train loss:3.436390 +step:9781 train loss:3.454234 +step:9782 train loss:3.532538 +step:9783 train loss:3.441705 +step:9784 train loss:3.469421 +step:9785 train loss:3.463351 +step:9786 train loss:3.497499 +step:9787 train loss:3.522727 +step:9788 train loss:3.448258 +step:9789 train loss:3.459440 +step:9790 train loss:3.418975 +step:9791 train loss:3.465630 +step:9792 train loss:3.484445 +step:9793 train loss:3.498756 +step:9794 train loss:3.475985 +step:9795 train loss:3.481131 +step:9796 train loss:3.465574 +step:9797 train loss:3.461132 +step:9798 train loss:3.476241 +step:9799 train loss:3.478905 +step:9800 train loss:3.551785 +step:9801 train loss:3.475095 +step:9802 train loss:3.531993 +step:9803 train loss:3.391490 +step:9804 train loss:3.484817 +step:9805 train loss:3.492463 +step:9806 train loss:3.466028 +step:9807 train loss:3.434832 +step:9808 train loss:3.359895 +step:9809 train loss:3.548757 +step:9810 train loss:3.502909 +step:9811 train loss:3.492190 +step:9812 train loss:3.471639 +step:9813 train loss:3.542211 +step:9814 train loss:3.539260 +step:9815 train loss:3.446545 +step:9816 train loss:3.439916 +step:9817 train loss:3.471214 +step:9818 train loss:3.495745 +step:9819 train loss:3.465908 +step:9820 train loss:3.535224 +step:9821 train loss:3.511987 +step:9822 train loss:3.485322 +step:9823 train loss:3.543872 +step:9824 train loss:3.451877 +step:9825 train loss:3.533983 +step:9826 train loss:3.526532 +step:9827 train loss:3.532728 +step:9828 train loss:3.451190 +step:9829 train loss:3.456819 +step:9830 train loss:3.443524 +step:9831 train loss:3.502572 +step:9832 train loss:3.514704 +step:9833 train loss:3.427759 +step:9834 train loss:3.479643 +step:9835 train loss:3.445134 +step:9836 train loss:3.510921 +step:9837 train loss:3.478624 +step:9838 train loss:3.518716 +step:9839 train loss:3.491543 +step:9840 train loss:3.460302 +step:9841 train loss:3.468435 +step:9842 train loss:3.527387 +step:9843 train loss:3.522533 +step:9844 train loss:3.471725 +step:9845 train loss:3.503604 +step:9846 train loss:3.436723 +step:9847 train loss:3.567115 +step:9848 train loss:3.491360 +step:9849 train loss:3.514295 +step:9850 train loss:3.432376 +step:9851 train loss:3.489405 +step:9852 train loss:3.449397 +step:9853 train loss:3.473734 +step:9854 train loss:3.482908 +step:9855 train loss:3.434748 +step:9856 train loss:3.438581 +step:9857 train loss:3.429758 +step:9858 train loss:3.490047 +step:9859 train loss:3.411757 +step:9860 train loss:3.648201 +step:9861 train loss:3.478099 +step:9862 train loss:3.441098 +step:9863 train loss:3.423347 +step:9864 train loss:3.546884 +step:9865 train loss:3.423145 +step:9866 train loss:3.466041 +step:9867 train loss:3.463205 +step:9868 train loss:3.522995 +step:9869 train loss:3.484533 +step:9870 train loss:3.455662 +step:9871 train loss:3.498087 +step:9872 train loss:3.439851 +step:9873 train loss:3.490524 +step:9874 train loss:3.456151 +step:9875 train loss:3.459286 +step:9876 train loss:3.425081 +step:9877 train loss:3.475721 +step:9878 train loss:3.504311 +step:9879 train loss:3.506169 +step:9880 train loss:3.439624 +step:9881 train loss:3.492192 +step:9882 train loss:3.449923 +step:9883 train loss:3.460111 +step:9884 train loss:3.455131 +step:9885 train loss:3.518347 +step:9886 train loss:3.485478 +step:9887 train loss:3.485840 +step:9888 train loss:3.508938 +step:9889 train loss:3.543289 +step:9890 train loss:3.455939 +step:9891 train loss:3.460712 +step:9892 train loss:3.432253 +step:9893 train loss:3.554599 +step:9894 train loss:3.461791 +step:9895 train loss:3.400693 +step:9896 train loss:3.554960 +step:9897 train loss:3.429216 +step:9898 train loss:3.497936 +step:9899 train loss:3.475445 +step:9900 train loss:3.520841 +step:9901 train loss:3.445043 +step:9902 train loss:3.485784 +step:9903 train loss:3.458511 +step:9904 train loss:3.512358 +step:9905 train loss:3.413543 +step:9906 train loss:3.452425 +step:9907 train loss:3.461235 +step:9908 train loss:3.456831 +step:9909 train loss:3.477446 +step:9910 train loss:3.499732 +step:9911 train loss:3.583235 +step:9912 train loss:3.460014 +step:9913 train loss:3.462370 +step:9914 train loss:3.473029 +step:9915 train loss:3.473867 +step:9916 train loss:3.420532 +step:9917 train loss:3.461180 +step:9918 train loss:3.453264 +step:9919 train loss:3.618527 +step:9920 train loss:3.402798 +step:9921 train loss:3.497758 +step:9922 train loss:3.456323 +step:9923 train loss:3.512401 +step:9924 train loss:3.427261 +step:9925 train loss:3.486303 +step:9926 train loss:3.465128 +step:9927 train loss:3.507802 +step:9928 train loss:3.434764 +step:9929 train loss:3.474581 +step:9930 train loss:3.563349 +step:9931 train loss:3.525637 +step:9932 train loss:3.414164 +step:9933 train loss:3.507776 +step:9934 train loss:3.429060 +step:9935 train loss:3.544405 +step:9936 train loss:3.448497 +step:9937 train loss:3.475344 +step:9938 train loss:3.461250 +step:9939 train loss:3.527636 +step:9940 train loss:3.563908 +step:9941 train loss:3.441291 +step:9942 train loss:3.482462 +step:9943 train loss:3.617746 +step:9944 train loss:3.479811 +step:9945 train loss:3.502109 +step:9946 train loss:3.475082 +step:9947 train loss:3.424198 +step:9948 train loss:3.467327 +step:9949 train loss:3.361432 +step:9950 train loss:3.513579 +step:9951 train loss:3.431949 +step:9952 train loss:3.503829 +step:9953 train loss:3.468041 +step:9954 train loss:3.524591 +step:9955 train loss:3.502456 +step:9956 train loss:3.503524 +step:9957 train loss:3.478553 +step:9958 train loss:3.534129 +step:9959 train loss:3.431039 +step:9960 train loss:3.463787 +step:9961 train loss:3.472704 +step:9962 train loss:3.520496 +step:9963 train loss:3.412425 +step:9964 train loss:3.466490 +step:9965 train loss:3.470333 +step:9966 train loss:3.525794 +step:9967 train loss:3.437085 +step:9968 train loss:3.505241 +step:9969 train loss:3.417526 +step:9970 train loss:3.458811 +step:9971 train loss:3.500609 +step:9972 train loss:3.525239 +step:9973 train loss:3.500959 +step:9974 train loss:3.490071 +step:9975 train loss:3.460411 +step:9976 train loss:3.416267 +step:9977 train loss:3.467984 +step:9978 train loss:3.464103 +step:9979 train loss:3.476984 +step:9980 train loss:3.531023 +step:9981 train loss:3.440486 +step:9982 train loss:3.502161 +step:9983 train loss:3.417481 +step:9984 train loss:3.483027 +step:9985 train loss:3.427603 +step:9986 train loss:3.481179 +step:9987 train loss:3.534195 +step:9988 train loss:3.538998 +step:9989 train loss:3.430225 +step:9990 train loss:3.571781 +step:9991 train loss:3.417388 +step:9992 train loss:3.497171 +step:9993 train loss:3.483553 +step:9994 train loss:3.599847 +step:9995 train loss:3.537281 +step:9996 train loss:3.456843 +step:9997 train loss:3.497076 +step:9998 train loss:3.548887 +step:9999 train loss:3.513532 +step:10000 validation loss:3.419244 total_sharp:8.9944e-04 L1_sharp:7.4745e-04 L2_sharp:1.0477e-05 L3_sharp:2.0008e-04 L4_sharp:1.7844e-04 L5_sharp:1.4669e-04 L6_sharp:1.3625e-04 L7_sharp:2.0262e-04 L8_sharp:2.9883e-04 L9_sharp:2.9735e-04 L10_sharp:4.0123e-04 L11_sharp:3.3726e-04 L12_sharp:4.7989e-04 total_fnorm:4.5203e+00 total_l1_linf:4.0261e+04 total_spectral:4.5203e+00 L1_fnorm:1.0244e+00 L2_fnorm:1.0454e+00 L3_fnorm:1.0461e+00 L4_fnorm:1.0633e+00 L5_fnorm:1.0897e+00 L6_fnorm:1.0933e+00 L7_fnorm:1.0871e+00 L8_fnorm:1.0805e+00 L9_fnorm:1.0740e+00 L10_fnorm:1.0435e+00 L11_fnorm:1.0558e+00 L12_fnorm:1.0486e+00 L1_l1linf:1.2307e+00 L2_l1linf:1.1050e+00 L3_l1linf:1.0751e+00 L4_l1linf:1.0970e+00 L5_l1linf:1.1031e+00 L6_l1linf:1.1026e+00 L7_l1linf:1.0978e+00 L8_l1linf:1.0932e+00 L9_l1linf:1.1109e+00 L10_l1linf:1.2537e+00 L11_l1linf:1.3037e+00 L12_l1linf:1.2566e+00 L1_spectral:1.5859e-01 L2_spectral:1.4640e-01 L3_spectral:1.6278e-01 L4_spectral:1.0649e-01 L5_spectral:1.1072e-01 L6_spectral:1.0729e-01 L7_spectral:9.5204e-02 L8_spectral:1.0185e-01 L9_spectral:1.3661e-01 L10_spectral:1.7685e-01 L11_spectral:1.8098e-01 L12_spectral:1.8765e-01 ip_v_neg_g:1.0273e-02 cos_v_neg_g:2.3115e-03 v_norm:4.5203e+00 g_norm:9.8316e-01 hv_norm:3.8447e-01 cos_v_hv:1.0575e-02 hg_norm:1.0106e+01 cos_g_hg:5.6649e-01 v_par:4.3364e-04 v_perp:4.5203e+00 L1_cos_v_neg_g:6.3844e-03 L1_v_norm:1.0244e+00 L2_cos_v_neg_g:2.2096e-03 L2_v_norm:1.0454e+00 L3_cos_v_neg_g:2.6133e-03 L3_v_norm:1.0461e+00 L4_cos_v_neg_g:2.1331e-03 L4_v_norm:1.0633e+00 L5_cos_v_neg_g:1.5279e-03 L5_v_norm:1.0897e+00 L6_cos_v_neg_g:1.9996e-03 L6_v_norm:1.0933e+00 L7_cos_v_neg_g:1.9729e-03 L7_v_norm:1.0871e+00 L8_cos_v_neg_g:2.1370e-03 L8_v_norm:1.0805e+00 L9_cos_v_neg_g:2.8463e-03 L9_v_norm:1.0740e+00 L10_cos_v_neg_g:3.9558e-03 L10_v_norm:1.0435e+00 L11_cos_v_neg_g:6.2936e-03 L11_v_norm:1.0558e+00 L12_cos_v_neg_g:8.0630e-03 L12_v_norm:1.0486e+00 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..907d9014f71c3a121c1c41a1092e632980991c9f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.002, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 43, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "c59ffe06-5783-41a2-a9e1-68e967e0f6b3", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..60b92df4da8916a308edf32bcf3fa8968e52d4ed --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 3.609609603881836, + "total_l1_linf_norm": 30672.56640625, + "total_spectral_norm": 3.609610080718994, + "layer_1_update_fnorm": 0.5754238963127136, + "layer_1_max_l1_linf_norm": 0.7812140583992004, + "layer_1_max_spectral_norm": 0.11731904745101929, + "layer_2_update_fnorm": 0.6238077282905579, + "layer_2_max_l1_linf_norm": 0.776371955871582, + "layer_2_max_spectral_norm": 0.10288693010807037, + "layer_3_update_fnorm": 0.6486547589302063, + "layer_3_max_l1_linf_norm": 0.7876986861228943, + "layer_3_max_spectral_norm": 0.10425020009279251, + "layer_4_update_fnorm": 0.7125692963600159, + "layer_4_max_l1_linf_norm": 0.8216893672943115, + "layer_4_max_spectral_norm": 0.11807137727737427, + "layer_5_update_fnorm": 0.7447097301483154, + "layer_5_max_l1_linf_norm": 0.8667577505111694, + "layer_5_max_spectral_norm": 0.12326911091804504, + "layer_6_update_fnorm": 0.7524949312210083, + "layer_6_max_l1_linf_norm": 0.8519479036331177, + "layer_6_max_spectral_norm": 0.11520105600357056, + "layer_7_update_fnorm": 0.7751098871231079, + "layer_7_max_l1_linf_norm": 0.8949735164642334, + "layer_7_max_spectral_norm": 0.11024332791566849, + "layer_8_update_fnorm": 0.7825725674629211, + "layer_8_max_l1_linf_norm": 0.8619214296340942, + "layer_8_max_spectral_norm": 0.1058034598827362, + "layer_9_update_fnorm": 0.7995342016220093, + "layer_9_max_l1_linf_norm": 0.920696496963501, + "layer_9_max_spectral_norm": 0.098765529692173, + "layer_10_update_fnorm": 0.8042981028556824, + "layer_10_max_l1_linf_norm": 0.879180908203125, + "layer_10_max_spectral_norm": 0.1032271459698677, + "layer_11_update_fnorm": 0.8000339865684509, + "layer_11_max_l1_linf_norm": 0.9083056449890137, + "layer_11_max_spectral_norm": 0.10452089458703995, + "layer_12_update_fnorm": 0.7689381837844849, + "layer_12_max_l1_linf_norm": 0.9798452854156494, + "layer_12_max_spectral_norm": 0.12750272452831268, + "total_sharpness": 0.0043458049185574055, + "ip_v_neg_g": 0.026877038180828094, + "cos_v_neg_g": 0.012905565090477467, + "v_norm": 3.609609603881836, + "g_norm": 0.5769579410552979, + "hv_norm": 0.35797205567359924, + "cos_v_hv": 0.043820902705192566, + "hg_norm": 0.9437618851661682, + "cos_g_hg": 0.47502630949020386, + "v_parallel_norm": 0.002626679139211774, + "v_perp_norm": 3.6096086502075195, + "layer_1_v_norm": 0.5754238963127136, + "layer_1_cos_v_neg_g": 0.0353890061378479, + "layer_2_v_norm": 0.6238077282905579, + "layer_2_cos_v_neg_g": 0.02910541370511055, + "layer_3_v_norm": 0.6486547589302063, + "layer_3_cos_v_neg_g": 0.030957819893956184, + "layer_4_v_norm": 0.7125692963600159, + "layer_4_cos_v_neg_g": 0.023750104010105133, + "layer_5_v_norm": 0.7447097301483154, + "layer_5_cos_v_neg_g": 0.022415563464164734, + "layer_6_v_norm": 0.7524949312210083, + "layer_6_cos_v_neg_g": 0.02515229396522045, + "layer_7_v_norm": 0.7751098871231079, + "layer_7_cos_v_neg_g": 0.017207905650138855, + "layer_8_v_norm": 0.7825725674629211, + "layer_8_cos_v_neg_g": 0.01728091947734356, + "layer_9_v_norm": 0.7995342016220093, + "layer_9_cos_v_neg_g": 0.014921746216714382, + "layer_10_v_norm": 0.8042981028556824, + "layer_10_cos_v_neg_g": 0.02147691510617733, + "layer_11_v_norm": 0.8000339865684509, + "layer_11_cos_v_neg_g": 0.02063983492553234, + "layer_12_v_norm": 0.7689381837844849, + "layer_12_cos_v_neg_g": 0.02047002501785755, + "layer_1_sharpness": 0.013052677735686302, + "layer_2_sharpness": 0.0012267789570614696, + "layer_3_sharpness": 0.001583175384439528, + "layer_4_sharpness": 0.0006477257120423019, + "layer_5_sharpness": 0.0005117007531225681, + "layer_6_sharpness": 0.0006521895993500948, + "layer_7_sharpness": 0.0006937258294783533, + "layer_8_sharpness": 0.0008736076415516436, + "layer_9_sharpness": 0.0010192524641752243, + "layer_10_sharpness": 0.0011864678235724568, + "layer_11_sharpness": 0.0013122936943545938, + "layer_12_sharpness": 0.0014441460371017456 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..d2ac87b47c9ff85d49613b256e96d126fb078b41 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.107179164886475, + "total_l1_linf_norm": 36021.88671875, + "total_spectral_norm": 4.107178688049316, + "layer_1_update_fnorm": 0.7659117579460144, + "layer_1_max_l1_linf_norm": 1.1250886917114258, + "layer_1_max_spectral_norm": 0.15558750927448273, + "layer_2_update_fnorm": 0.8232071995735168, + "layer_2_max_l1_linf_norm": 0.9544382095336914, + "layer_2_max_spectral_norm": 0.13300281763076782, + "layer_3_update_fnorm": 0.8584813475608826, + "layer_3_max_l1_linf_norm": 0.9796375036239624, + "layer_3_max_spectral_norm": 0.12859177589416504, + "layer_4_update_fnorm": 0.9061647653579712, + "layer_4_max_l1_linf_norm": 0.9623483419418335, + "layer_4_max_spectral_norm": 0.12571445107460022, + "layer_5_update_fnorm": 0.9569198489189148, + "layer_5_max_l1_linf_norm": 0.9972022771835327, + "layer_5_max_spectral_norm": 0.13340236246585846, + "layer_6_update_fnorm": 0.9789425134658813, + "layer_6_max_l1_linf_norm": 0.9929001331329346, + "layer_6_max_spectral_norm": 0.14891986548900604, + "layer_7_update_fnorm": 0.973647952079773, + "layer_7_max_l1_linf_norm": 1.0075948238372803, + "layer_7_max_spectral_norm": 0.14123335480690002, + "layer_8_update_fnorm": 0.9577118158340454, + "layer_8_max_l1_linf_norm": 0.9952052235603333, + "layer_8_max_spectral_norm": 0.11238493025302887, + "layer_9_update_fnorm": 0.9544127583503723, + "layer_9_max_l1_linf_norm": 1.0542032718658447, + "layer_9_max_spectral_norm": 0.12542647123336792, + "layer_10_update_fnorm": 0.9413183927536011, + "layer_10_max_l1_linf_norm": 1.0090515613555908, + "layer_10_max_spectral_norm": 0.12634633481502533, + "layer_11_update_fnorm": 0.9442594051361084, + "layer_11_max_l1_linf_norm": 1.084203839302063, + "layer_11_max_spectral_norm": 0.14662668108940125, + "layer_12_update_fnorm": 0.9294596910476685, + "layer_12_max_l1_linf_norm": 1.0302406549453735, + "layer_12_max_spectral_norm": 0.18026718497276306, + "total_sharpness": 0.0037273424677550793, + "ip_v_neg_g": 0.034328311681747437, + "cos_v_neg_g": 0.013707942329347134, + "v_norm": 4.107179164886475, + "g_norm": 0.6097285151481628, + "hv_norm": 0.4010503888130188, + "cos_v_hv": 0.0381719172000885, + "hg_norm": 2.0816173553466797, + "cos_g_hg": 0.4758315980434418, + "v_parallel_norm": 0.0026181957218796015, + "v_perp_norm": 4.107178211212158, + "layer_1_v_norm": 0.7659117579460144, + "layer_1_cos_v_neg_g": 0.025476625189185143, + "layer_2_v_norm": 0.8232071995735168, + "layer_2_cos_v_neg_g": 0.015880361199378967, + "layer_3_v_norm": 0.8584813475608826, + "layer_3_cos_v_neg_g": 0.01962953805923462, + "layer_4_v_norm": 0.9061647653579712, + "layer_4_cos_v_neg_g": 0.020669469609856606, + "layer_5_v_norm": 0.9569198489189148, + "layer_5_cos_v_neg_g": 0.01891379989683628, + "layer_6_v_norm": 0.9789425134658813, + "layer_6_cos_v_neg_g": 0.018689105287194252, + "layer_7_v_norm": 0.973647952079773, + "layer_7_cos_v_neg_g": 0.018056875094771385, + "layer_8_v_norm": 0.9577118158340454, + "layer_8_cos_v_neg_g": 0.02241550013422966, + "layer_9_v_norm": 0.9544127583503723, + "layer_9_cos_v_neg_g": 0.022752586752176285, + "layer_10_v_norm": 0.9413183927536011, + "layer_10_cos_v_neg_g": 0.02384016662836075, + "layer_11_v_norm": 0.9442593455314636, + "layer_11_cos_v_neg_g": 0.024148495867848396, + "layer_12_v_norm": 0.9294596910476685, + "layer_12_cos_v_neg_g": 0.03414958342909813, + "layer_1_sharpness": 0.004835214931517839, + "layer_2_sharpness": 0.0002377027558395639, + "layer_3_sharpness": 0.0009835968958213925, + "layer_4_sharpness": 0.0006629009731113911, + "layer_5_sharpness": 0.0005479652318172157, + "layer_6_sharpness": 0.0005283659556880593, + "layer_7_sharpness": 0.0005600283620879054, + "layer_8_sharpness": 0.0010341200977563858, + "layer_9_sharpness": 0.0012342686532065272, + "layer_10_sharpness": 0.0011098275426775217, + "layer_11_sharpness": 0.001146739232353866, + "layer_12_sharpness": 0.0023877639323472977 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..3c0d3f18015fb57d5a6f4127f847cf98d75433a4 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.576204299926758, + "total_l1_linf_norm": 40645.4375, + "total_spectral_norm": 4.576204776763916, + "layer_1_update_fnorm": 1.0123820304870605, + "layer_1_max_l1_linf_norm": 1.4960980415344238, + "layer_1_max_spectral_norm": 0.2479071319103241, + "layer_2_update_fnorm": 1.12661612033844, + "layer_2_max_l1_linf_norm": 1.525779366493225, + "layer_2_max_spectral_norm": 0.26224711537361145, + "layer_3_update_fnorm": 1.0719764232635498, + "layer_3_max_l1_linf_norm": 1.1864783763885498, + "layer_3_max_spectral_norm": 0.2479187697172165, + "layer_4_update_fnorm": 1.1609917879104614, + "layer_4_max_l1_linf_norm": 1.1482396125793457, + "layer_4_max_spectral_norm": 0.21040399372577667, + "layer_5_update_fnorm": 1.1820523738861084, + "layer_5_max_l1_linf_norm": 1.1735599040985107, + "layer_5_max_spectral_norm": 0.23942703008651733, + "layer_6_update_fnorm": 1.1783909797668457, + "layer_6_max_l1_linf_norm": 1.1178057193756104, + "layer_6_max_spectral_norm": 0.24585649371147156, + "layer_7_update_fnorm": 1.133188009262085, + "layer_7_max_l1_linf_norm": 1.1166207790374756, + "layer_7_max_spectral_norm": 0.19891758263111115, + "layer_8_update_fnorm": 1.0814803838729858, + "layer_8_max_l1_linf_norm": 1.176501750946045, + "layer_8_max_spectral_norm": 0.13759355247020721, + "layer_9_update_fnorm": 1.0420855283737183, + "layer_9_max_l1_linf_norm": 1.3703885078430176, + "layer_9_max_spectral_norm": 0.15335185825824738, + "layer_10_update_fnorm": 1.0062295198440552, + "layer_10_max_l1_linf_norm": 1.160526990890503, + "layer_10_max_spectral_norm": 0.14322274923324585, + "layer_11_update_fnorm": 1.0078730583190918, + "layer_11_max_l1_linf_norm": 1.2735464572906494, + "layer_11_max_spectral_norm": 0.1705692708492279, + "layer_12_update_fnorm": 1.0050045251846313, + "layer_12_max_l1_linf_norm": 1.1167643070220947, + "layer_12_max_spectral_norm": 0.15658141672611237, + "total_sharpness": 0.005737450905144215, + "ip_v_neg_g": 0.07772979140281677, + "cos_v_neg_g": 0.02055760659277439, + "v_norm": 4.576204299926758, + "g_norm": 0.8262463212013245, + "hv_norm": 0.8954346776008606, + "cos_v_hv": 0.029321787878870964, + "hg_norm": 5.350620746612549, + "cos_g_hg": 0.6225531697273254, + "v_parallel_norm": 0.0032345526851713657, + "v_perp_norm": 4.576203346252441, + "layer_1_v_norm": 1.0123820304870605, + "layer_1_cos_v_neg_g": 0.040800634771585464, + "layer_2_v_norm": 1.12661612033844, + "layer_2_cos_v_neg_g": 0.05308108776807785, + "layer_3_v_norm": 1.0719764232635498, + "layer_3_cos_v_neg_g": 0.033481065183877945, + "layer_4_v_norm": 1.1609917879104614, + "layer_4_cos_v_neg_g": 0.03162740170955658, + "layer_5_v_norm": 1.1820523738861084, + "layer_5_cos_v_neg_g": 0.024722743779420853, + "layer_6_v_norm": 1.1783909797668457, + "layer_6_cos_v_neg_g": 0.022831782698631287, + "layer_7_v_norm": 1.133188009262085, + "layer_7_cos_v_neg_g": 0.021131323650479317, + "layer_8_v_norm": 1.0814803838729858, + "layer_8_cos_v_neg_g": 0.021899964660406113, + "layer_9_v_norm": 1.0420855283737183, + "layer_9_cos_v_neg_g": 0.02009989507496357, + "layer_10_v_norm": 1.0062295198440552, + "layer_10_cos_v_neg_g": 0.02147865667939186, + "layer_11_v_norm": 1.0078730583190918, + "layer_11_cos_v_neg_g": 0.02115672454237938, + "layer_12_v_norm": 1.0050045251846313, + "layer_12_cos_v_neg_g": 0.018374904990196228, + "layer_1_sharpness": 0.006412668153643608, + "layer_2_sharpness": 0.00536722457036376, + "layer_3_sharpness": 0.005003959871828556, + "layer_4_sharpness": 0.0018187493551522493, + "layer_5_sharpness": 0.0008533347863703966, + "layer_6_sharpness": 0.0005837384378537536, + "layer_7_sharpness": 0.0008250193786807358, + "layer_8_sharpness": 0.0010414168937131763, + "layer_9_sharpness": 0.001113430829718709, + "layer_10_sharpness": 0.0010030108969658613, + "layer_11_sharpness": 0.0011132225627079606, + "layer_12_sharpness": 0.0013866961235180497 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..c4d32b102e8d0f9f8c397b1892e559c19c816add --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.301000595092773, + "total_l1_linf_norm": 38007.1015625, + "total_spectral_norm": 4.301000118255615, + "layer_1_update_fnorm": 0.9458709359169006, + "layer_1_max_l1_linf_norm": 1.2784528732299805, + "layer_1_max_spectral_norm": 0.15588277578353882, + "layer_2_update_fnorm": 0.9415019154548645, + "layer_2_max_l1_linf_norm": 1.0784828662872314, + "layer_2_max_spectral_norm": 0.14027351140975952, + "layer_3_update_fnorm": 0.9356870055198669, + "layer_3_max_l1_linf_norm": 1.068177342414856, + "layer_3_max_spectral_norm": 0.12766854465007782, + "layer_4_update_fnorm": 0.9813969731330872, + "layer_4_max_l1_linf_norm": 1.0606544017791748, + "layer_4_max_spectral_norm": 0.1141616553068161, + "layer_5_update_fnorm": 1.0143054723739624, + "layer_5_max_l1_linf_norm": 1.092444896697998, + "layer_5_max_spectral_norm": 0.10201331228017807, + "layer_6_update_fnorm": 1.0260426998138428, + "layer_6_max_l1_linf_norm": 1.0734766721725464, + "layer_6_max_spectral_norm": 0.10438322275876999, + "layer_7_update_fnorm": 1.0171256065368652, + "layer_7_max_l1_linf_norm": 1.0631742477416992, + "layer_7_max_spectral_norm": 0.09364940971136093, + "layer_8_update_fnorm": 1.0110161304473877, + "layer_8_max_l1_linf_norm": 1.0344247817993164, + "layer_8_max_spectral_norm": 0.09165295213460922, + "layer_9_update_fnorm": 1.0100407600402832, + "layer_9_max_l1_linf_norm": 1.0597858428955078, + "layer_9_max_spectral_norm": 0.11548596620559692, + "layer_10_update_fnorm": 0.9937353134155273, + "layer_10_max_l1_linf_norm": 1.0596997737884521, + "layer_10_max_spectral_norm": 0.13765142858028412, + "layer_11_update_fnorm": 0.987762451171875, + "layer_11_max_l1_linf_norm": 1.1729989051818848, + "layer_11_max_spectral_norm": 0.1455170065164566, + "layer_12_update_fnorm": 0.9771077036857605, + "layer_12_max_l1_linf_norm": 1.167922019958496, + "layer_12_max_spectral_norm": 0.1600906401872635, + "total_sharpness": 0.0026513435877859592, + "ip_v_neg_g": 0.02533276565372944, + "cos_v_neg_g": 0.008935975842177868, + "v_norm": 4.301000595092773, + "g_norm": 0.6591300368309021, + "hv_norm": 0.4308145344257355, + "cos_v_hv": 0.02646946907043457, + "hg_norm": 2.4228341579437256, + "cos_g_hg": 0.5687701106071472, + "v_parallel_norm": 0.0017117890529334545, + "v_perp_norm": 4.301000118255615, + "layer_1_v_norm": 0.9458709359169006, + "layer_1_cos_v_neg_g": 0.019278697669506073, + "layer_2_v_norm": 0.9415019154548645, + "layer_2_cos_v_neg_g": 0.008370157331228256, + "layer_3_v_norm": 0.9356870055198669, + "layer_3_cos_v_neg_g": 0.011158718727529049, + "layer_4_v_norm": 0.9813969731330872, + "layer_4_cos_v_neg_g": 0.010971774347126484, + "layer_5_v_norm": 1.0143054723739624, + "layer_5_cos_v_neg_g": 0.009168978780508041, + "layer_6_v_norm": 1.0260426998138428, + "layer_6_cos_v_neg_g": 0.009508042596280575, + "layer_7_v_norm": 1.0171256065368652, + "layer_7_cos_v_neg_g": 0.010579369030892849, + "layer_8_v_norm": 1.0110161304473877, + "layer_8_cos_v_neg_g": 0.010506905615329742, + "layer_9_v_norm": 1.0100407600402832, + "layer_9_cos_v_neg_g": 0.010624644346535206, + "layer_10_v_norm": 0.9937353134155273, + "layer_10_cos_v_neg_g": 0.014865508303046227, + "layer_11_v_norm": 0.9877623915672302, + "layer_11_cos_v_neg_g": 0.01654536835849285, + "layer_12_v_norm": 0.9771077036857605, + "layer_12_cos_v_neg_g": 0.019475799053907394, + "layer_1_sharpness": 0.00412621907889843, + "layer_2_sharpness": 0.00017717300215736032, + "layer_3_sharpness": 0.0005157762789167464, + "layer_4_sharpness": 0.00041111125028692186, + "layer_5_sharpness": 0.0002818304346874356, + "layer_6_sharpness": 0.0002622923057060689, + "layer_7_sharpness": 0.0003550105029717088, + "layer_8_sharpness": 0.0006049637449905276, + "layer_9_sharpness": 0.0006099162856116891, + "layer_10_sharpness": 0.0008460492244921625, + "layer_11_sharpness": 0.000883571628946811, + "layer_12_sharpness": 0.001503055333159864 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..0332ce081728b36b92df2b5cd78e00610acf6777 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.406560897827148, + "total_l1_linf_norm": 39069.1640625, + "total_spectral_norm": 4.406561374664307, + "layer_1_update_fnorm": 1.0253831148147583, + "layer_1_max_l1_linf_norm": 1.169176459312439, + "layer_1_max_spectral_norm": 0.1793913096189499, + "layer_2_update_fnorm": 0.9666053056716919, + "layer_2_max_l1_linf_norm": 1.0717545747756958, + "layer_2_max_spectral_norm": 0.1451643854379654, + "layer_3_update_fnorm": 0.9837064146995544, + "layer_3_max_l1_linf_norm": 1.0793219804763794, + "layer_3_max_spectral_norm": 0.13333021104335785, + "layer_4_update_fnorm": 1.015711784362793, + "layer_4_max_l1_linf_norm": 1.118853211402893, + "layer_4_max_spectral_norm": 0.11663637310266495, + "layer_5_update_fnorm": 1.040750503540039, + "layer_5_max_l1_linf_norm": 1.0828840732574463, + "layer_5_max_spectral_norm": 0.104094959795475, + "layer_6_update_fnorm": 1.0460885763168335, + "layer_6_max_l1_linf_norm": 1.0838944911956787, + "layer_6_max_spectral_norm": 0.09528062492609024, + "layer_7_update_fnorm": 1.0460811853408813, + "layer_7_max_l1_linf_norm": 1.071800947189331, + "layer_7_max_spectral_norm": 0.09131184220314026, + "layer_8_update_fnorm": 1.0404249429702759, + "layer_8_max_l1_linf_norm": 1.0961260795593262, + "layer_8_max_spectral_norm": 0.08986568450927734, + "layer_9_update_fnorm": 1.0341920852661133, + "layer_9_max_l1_linf_norm": 1.062120795249939, + "layer_9_max_spectral_norm": 0.11297908425331116, + "layer_10_update_fnorm": 1.0239300727844238, + "layer_10_max_l1_linf_norm": 1.1261062622070312, + "layer_10_max_spectral_norm": 0.13562577962875366, + "layer_11_update_fnorm": 1.039965271949768, + "layer_11_max_l1_linf_norm": 1.2079198360443115, + "layer_11_max_spectral_norm": 0.15079376101493835, + "layer_12_update_fnorm": 1.0312265157699585, + "layer_12_max_l1_linf_norm": 1.1187849044799805, + "layer_12_max_spectral_norm": 0.1568770557641983, + "total_sharpness": 0.0020099207758903503, + "ip_v_neg_g": 0.02171325497329235, + "cos_v_neg_g": 0.00762272160500288, + "v_norm": 4.406560897827148, + "g_norm": 0.6464204788208008, + "hv_norm": 0.39597636461257935, + "cos_v_hv": 0.02236708626151085, + "hg_norm": 2.428823232650757, + "cos_g_hg": 0.5211487412452698, + "v_parallel_norm": 0.0013232121709734201, + "v_perp_norm": 4.40656042098999, + "layer_1_v_norm": 1.0253831148147583, + "layer_1_cos_v_neg_g": 0.016519738361239433, + "layer_2_v_norm": 0.9666053056716919, + "layer_2_cos_v_neg_g": 0.011087657883763313, + "layer_3_v_norm": 0.9837063550949097, + "layer_3_cos_v_neg_g": 0.011504275724291801, + "layer_4_v_norm": 1.015711784362793, + "layer_4_cos_v_neg_g": 0.00716882199048996, + "layer_5_v_norm": 1.040750503540039, + "layer_5_cos_v_neg_g": 0.006494901608675718, + "layer_6_v_norm": 1.0460885763168335, + "layer_6_cos_v_neg_g": 0.006589776836335659, + "layer_7_v_norm": 1.0460811853408813, + "layer_7_cos_v_neg_g": 0.006313779391348362, + "layer_8_v_norm": 1.0404249429702759, + "layer_8_cos_v_neg_g": 0.007485882844775915, + "layer_9_v_norm": 1.0341920852661133, + "layer_9_cos_v_neg_g": 0.010161804035305977, + "layer_10_v_norm": 1.0239300727844238, + "layer_10_cos_v_neg_g": 0.013622276484966278, + "layer_11_v_norm": 1.039965271949768, + "layer_11_cos_v_neg_g": 0.015381143428385258, + "layer_12_v_norm": 1.0312265157699585, + "layer_12_cos_v_neg_g": 0.01551813818514347, + "layer_1_sharpness": 0.00258623156696558, + "layer_2_sharpness": 0.00046429759822785854, + "layer_3_sharpness": 0.0010824193013831973, + "layer_4_sharpness": 0.00040681150858290493, + "layer_5_sharpness": 0.0002940153353847563, + "layer_6_sharpness": 0.0001851347042247653, + "layer_7_sharpness": 0.0002770695136860013, + "layer_8_sharpness": 0.0003823573642875999, + "layer_9_sharpness": 0.00047315258416347206, + "layer_10_sharpness": 0.0006302223191596568, + "layer_11_sharpness": 0.0006795426015742123, + "layer_12_sharpness": 0.0008793423767201602 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..8914caf496114596c01a6334c0334bb000a41269 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.358910083770752, + "total_l1_linf_norm": 38619.21875, + "total_spectral_norm": 4.358910083770752, + "layer_1_update_fnorm": 0.961272120475769, + "layer_1_max_l1_linf_norm": 1.1891107559204102, + "layer_1_max_spectral_norm": 0.14960011839866638, + "layer_2_update_fnorm": 0.9587472677230835, + "layer_2_max_l1_linf_norm": 1.0416951179504395, + "layer_2_max_spectral_norm": 0.1381898820400238, + "layer_3_update_fnorm": 0.9678000211715698, + "layer_3_max_l1_linf_norm": 1.0396935939788818, + "layer_3_max_spectral_norm": 0.13804131746292114, + "layer_4_update_fnorm": 1.0077160596847534, + "layer_4_max_l1_linf_norm": 1.0440833568572998, + "layer_4_max_spectral_norm": 0.11228201538324356, + "layer_5_update_fnorm": 1.0315191745758057, + "layer_5_max_l1_linf_norm": 1.0692987442016602, + "layer_5_max_spectral_norm": 0.0970110297203064, + "layer_6_update_fnorm": 1.042388916015625, + "layer_6_max_l1_linf_norm": 1.0536837577819824, + "layer_6_max_spectral_norm": 0.09868557006120682, + "layer_7_update_fnorm": 1.0416110754013062, + "layer_7_max_l1_linf_norm": 1.0541088581085205, + "layer_7_max_spectral_norm": 0.09391244500875473, + "layer_8_update_fnorm": 1.0355100631713867, + "layer_8_max_l1_linf_norm": 1.0556727647781372, + "layer_8_max_spectral_norm": 0.08864107728004456, + "layer_9_update_fnorm": 1.0270800590515137, + "layer_9_max_l1_linf_norm": 1.0669379234313965, + "layer_9_max_spectral_norm": 0.10918596386909485, + "layer_10_update_fnorm": 0.9993422031402588, + "layer_10_max_l1_linf_norm": 1.0734010934829712, + "layer_10_max_spectral_norm": 0.1255146563053131, + "layer_11_update_fnorm": 1.009167194366455, + "layer_11_max_l1_linf_norm": 1.1216115951538086, + "layer_11_max_spectral_norm": 0.14436489343643188, + "layer_12_update_fnorm": 0.9977074861526489, + "layer_12_max_l1_linf_norm": 1.2238574028015137, + "layer_12_max_spectral_norm": 0.1460159718990326, + "total_sharpness": 0.0016814139671623707, + "ip_v_neg_g": 0.014964347705245018, + "cos_v_neg_g": 0.004780945368111134, + "v_norm": 4.358910083770752, + "g_norm": 0.7180688381195068, + "hv_norm": 0.35761088132858276, + "cos_v_hv": 0.020494714379310608, + "hg_norm": 3.013154983520508, + "cos_g_hg": 0.6065897941589355, + "v_parallel_norm": 0.0007134565967135131, + "v_perp_norm": 4.358910083770752, + "layer_1_v_norm": 0.961272120475769, + "layer_1_cos_v_neg_g": 0.01202012412250042, + "layer_2_v_norm": 0.9587472677230835, + "layer_2_cos_v_neg_g": 0.005229651927947998, + "layer_3_v_norm": 0.9678000807762146, + "layer_3_cos_v_neg_g": 0.006226684432476759, + "layer_4_v_norm": 1.0077160596847534, + "layer_4_cos_v_neg_g": 0.004265145864337683, + "layer_5_v_norm": 1.0315191745758057, + "layer_5_cos_v_neg_g": 0.003581140423193574, + "layer_6_v_norm": 1.042388916015625, + "layer_6_cos_v_neg_g": 0.004852034151554108, + "layer_7_v_norm": 1.0416110754013062, + "layer_7_cos_v_neg_g": 0.004830070771276951, + "layer_8_v_norm": 1.0355100631713867, + "layer_8_cos_v_neg_g": 0.005163793917745352, + "layer_9_v_norm": 1.0270800590515137, + "layer_9_cos_v_neg_g": 0.005790662486106157, + "layer_10_v_norm": 0.9993422031402588, + "layer_10_cos_v_neg_g": 0.008432582952082157, + "layer_11_v_norm": 1.0091670751571655, + "layer_11_cos_v_neg_g": 0.010417250916361809, + "layer_12_v_norm": 0.9977074861526489, + "layer_12_cos_v_neg_g": 0.009611379355192184, + "layer_1_sharpness": 0.0017079926328733563, + "layer_2_sharpness": 0.0001851787237683311, + "layer_3_sharpness": 0.0008332410361617804, + "layer_4_sharpness": 0.00027736349147744477, + "layer_5_sharpness": 0.0002244670904474333, + "layer_6_sharpness": 0.0002827898715622723, + "layer_7_sharpness": 0.0003063015756197274, + "layer_8_sharpness": 0.00042457523522898555, + "layer_9_sharpness": 0.0004671381029766053, + "layer_10_sharpness": 0.0005662432522512972, + "layer_11_sharpness": 0.0005262824706733227, + "layer_12_sharpness": 0.0008210236555896699 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..b83bde1efad112609db95580afa9f5114df759a6 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.6158087253570557, + "total_l1_linf_norm": 22494.05078125, + "total_spectral_norm": 2.6158084869384766, + "layer_1_update_fnorm": 0.45312464237213135, + "layer_1_max_l1_linf_norm": 0.7589386701583862, + "layer_1_max_spectral_norm": 0.12094289809465408, + "layer_2_update_fnorm": 0.42241835594177246, + "layer_2_max_l1_linf_norm": 0.5523300170898438, + "layer_2_max_spectral_norm": 0.08671055734157562, + "layer_3_update_fnorm": 0.4375052750110626, + "layer_3_max_l1_linf_norm": 0.6489928364753723, + "layer_3_max_spectral_norm": 0.094327911734581, + "layer_4_update_fnorm": 0.47793033719062805, + "layer_4_max_l1_linf_norm": 0.6799646615982056, + "layer_4_max_spectral_norm": 0.12190935760736465, + "layer_5_update_fnorm": 0.49835386872291565, + "layer_5_max_l1_linf_norm": 0.7138664722442627, + "layer_5_max_spectral_norm": 0.11418677866458893, + "layer_6_update_fnorm": 0.5464693307876587, + "layer_6_max_l1_linf_norm": 0.8190258741378784, + "layer_6_max_spectral_norm": 0.12351862341165543, + "layer_7_update_fnorm": 0.5684593915939331, + "layer_7_max_l1_linf_norm": 0.7158530950546265, + "layer_7_max_spectral_norm": 0.1141185536980629, + "layer_8_update_fnorm": 0.5947886109352112, + "layer_8_max_l1_linf_norm": 0.6719858646392822, + "layer_8_max_spectral_norm": 0.11305910348892212, + "layer_9_update_fnorm": 0.6094967722892761, + "layer_9_max_l1_linf_norm": 0.6746588349342346, + "layer_9_max_spectral_norm": 0.11189468204975128, + "layer_10_update_fnorm": 0.6037663817405701, + "layer_10_max_l1_linf_norm": 0.6916928887367249, + "layer_10_max_spectral_norm": 0.11107578873634338, + "layer_11_update_fnorm": 0.5838567018508911, + "layer_11_max_l1_linf_norm": 0.6775433421134949, + "layer_11_max_spectral_norm": 0.1187293604016304, + "layer_12_update_fnorm": 0.5129277110099792, + "layer_12_max_l1_linf_norm": 0.6827594637870789, + "layer_12_max_spectral_norm": 0.14242249727249146, + "total_sharpness": 0.018227344378829002, + "ip_v_neg_g": 0.08620208501815796, + "cos_v_neg_g": 0.040880415588617325, + "v_norm": 2.6158087253570557, + "g_norm": 0.8061140775680542, + "hv_norm": 0.6059096455574036, + "cos_v_hv": 0.07869035005569458, + "hg_norm": 2.05434513092041, + "cos_g_hg": 0.5667916536331177, + "v_parallel_norm": 0.0049042063765227795, + "v_perp_norm": 2.6158041954040527, + "layer_1_v_norm": 0.45312464237213135, + "layer_1_cos_v_neg_g": 0.09280724078416824, + "layer_2_v_norm": 0.42241835594177246, + "layer_2_cos_v_neg_g": 0.12498386204242706, + "layer_3_v_norm": 0.4375052750110626, + "layer_3_cos_v_neg_g": 0.1349673867225647, + "layer_4_v_norm": 0.47793033719062805, + "layer_4_cos_v_neg_g": 0.10910581797361374, + "layer_5_v_norm": 0.49835386872291565, + "layer_5_cos_v_neg_g": 0.10563334822654724, + "layer_6_v_norm": 0.5464693307876587, + "layer_6_cos_v_neg_g": 0.09644411504268646, + "layer_7_v_norm": 0.5684593915939331, + "layer_7_cos_v_neg_g": 0.07565263658761978, + "layer_8_v_norm": 0.5947886109352112, + "layer_8_cos_v_neg_g": 0.05841636285185814, + "layer_9_v_norm": 0.6094967722892761, + "layer_9_cos_v_neg_g": 0.054739490151405334, + "layer_10_v_norm": 0.6037663817405701, + "layer_10_cos_v_neg_g": 0.053352586925029755, + "layer_11_v_norm": 0.5838567018508911, + "layer_11_cos_v_neg_g": 0.07374712824821472, + "layer_12_v_norm": 0.5129277110099792, + "layer_12_cos_v_neg_g": 0.14517955482006073, + "layer_1_sharpness": 0.05888858810067177, + "layer_2_sharpness": 0.007492405362427235, + "layer_3_sharpness": 0.008034424856305122, + "layer_4_sharpness": 0.004934342578053474, + "layer_5_sharpness": 0.004959658719599247, + "layer_6_sharpness": 0.0036209644749760628, + "layer_7_sharpness": 0.0029907242860645056, + "layer_8_sharpness": 0.0018604299984872341, + "layer_9_sharpness": 0.0016453864518553019, + "layer_10_sharpness": 0.0014977233950048685, + "layer_11_sharpness": 0.0017926051514223218, + "layer_12_sharpness": 0.012585144490003586 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..f66798e455f87fe1c09a88c9f5d6123f88b56633 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.002_mlr_0.01_seed_43/training_log.txt @@ -0,0 +1,5186 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019228 +step:1 train loss:10.861334 +step:2 train loss:10.587090 +step:3 train loss:10.326363 +step:4 train loss:10.129663 +step:5 train loss:9.953169 +step:6 train loss:9.829898 +step:7 train loss:9.710367 +step:8 train loss:9.682987 +step:9 train loss:9.619626 +step:10 train loss:9.592810 +step:11 train loss:9.559793 +step:12 train loss:9.493979 +step:13 train loss:9.427444 +step:14 train loss:9.376856 +step:15 train loss:9.313628 +step:16 train loss:9.259688 +step:17 train loss:9.188357 +step:18 train loss:9.170312 +step:19 train loss:9.090674 +step:20 train loss:9.036892 +step:21 train loss:9.007301 +step:22 train loss:8.870865 +step:23 train loss:8.855896 +step:24 train loss:8.758556 +step:25 train loss:8.730062 +step:26 train loss:8.651588 +step:27 train loss:8.559476 +step:28 train loss:8.537764 +step:29 train loss:8.475361 +step:30 train loss:8.417813 +step:31 train loss:8.307913 +step:32 train loss:8.251320 +step:33 train loss:8.183243 +step:34 train loss:8.172129 +step:35 train loss:8.062376 +step:36 train loss:8.015568 +step:37 train loss:7.919204 +step:38 train loss:7.906311 +step:39 train loss:7.820454 +step:40 train loss:7.779104 +step:41 train loss:7.693413 +step:42 train loss:7.689801 +step:43 train loss:7.590974 +step:44 train loss:7.533019 +step:45 train loss:7.532155 +step:46 train loss:7.474454 +step:47 train loss:7.476265 +step:48 train loss:7.378042 +step:49 train loss:7.351797 +step:50 train loss:7.275095 +step:51 train loss:7.267156 +step:52 train loss:7.262174 +step:53 train loss:7.227867 +step:54 train loss:7.203585 +step:55 train loss:7.137971 +step:56 train loss:7.101724 +step:57 train loss:7.121361 +step:58 train loss:7.039593 +step:59 train loss:7.059198 +step:60 train loss:7.028847 +step:61 train loss:6.993829 +step:62 train loss:6.979819 +step:63 train loss:7.037760 +step:64 train loss:6.914641 +step:65 train loss:6.945359 +step:66 train loss:6.938402 +step:67 train loss:6.969873 +step:68 train loss:6.905214 +step:69 train loss:6.872995 +step:70 train loss:6.828890 +step:71 train loss:6.810189 +step:72 train loss:6.850285 +step:73 train loss:6.781825 +step:74 train loss:6.815260 +step:75 train loss:6.749987 +step:76 train loss:6.830794 +step:77 train loss:6.764759 +step:78 train loss:6.518277 +step:79 train loss:6.699255 +step:80 train loss:6.671568 +step:81 train loss:6.755263 +step:82 train loss:6.705748 +step:83 train loss:6.663425 +step:84 train loss:6.627273 +step:85 train loss:6.614153 +step:86 train loss:6.613850 +step:87 train loss:6.582413 +step:88 train loss:6.592120 +step:89 train loss:6.549853 +step:90 train loss:6.590276 +step:91 train loss:6.595625 +step:92 train loss:6.602292 +step:93 train loss:6.555798 +step:94 train loss:6.513842 +step:95 train loss:6.469264 +step:96 train loss:6.562003 +step:97 train loss:6.508786 +step:98 train loss:6.500789 +step:99 train loss:6.470276 +step:100 train loss:6.489302 +step:101 train loss:6.416803 +step:102 train loss:6.436895 +step:103 train loss:6.430447 +step:104 train loss:6.448421 +step:105 train loss:6.507773 +step:106 train loss:6.469044 +step:107 train loss:6.415146 +step:108 train loss:6.434362 +step:109 train loss:6.474219 +step:110 train loss:6.400624 +step:111 train loss:6.410734 +step:112 train loss:6.409776 +step:113 train loss:6.366043 +step:114 train loss:6.438738 +step:115 train loss:6.397064 +step:116 train loss:6.388556 +step:117 train loss:6.319709 +step:118 train loss:6.385408 +step:119 train loss:6.327734 +step:120 train loss:6.348185 +step:121 train loss:6.264746 +step:122 train loss:6.359825 +step:123 train loss:6.288512 +step:124 train loss:6.276616 +step:125 train loss:6.257864 +step:126 train loss:6.363197 +step:127 train loss:6.267816 +step:128 train loss:6.334073 +step:129 train loss:6.321679 +step:130 train loss:6.341717 +step:131 train loss:6.306532 +step:132 train loss:6.240559 +step:133 train loss:6.287705 +step:134 train loss:6.284002 +step:135 train loss:6.190021 +step:136 train loss:6.236642 +step:137 train loss:6.238346 +step:138 train loss:6.187047 +step:139 train loss:6.264328 +step:140 train loss:6.192943 +step:141 train loss:6.282338 +step:142 train loss:6.231460 +step:143 train loss:6.243808 +step:144 train loss:6.227068 +step:145 train loss:6.159646 +step:146 train loss:6.168701 +step:147 train loss:6.217227 +step:148 train loss:6.225862 +step:149 train loss:6.189323 +step:150 train loss:6.179210 +step:151 train loss:6.097577 +step:152 train loss:6.134802 +step:153 train loss:6.122291 +step:154 train loss:6.202641 +step:155 train loss:6.186561 +step:156 train loss:6.206014 +step:157 train loss:6.122856 +step:158 train loss:6.109367 +step:159 train loss:6.134073 +step:160 train loss:6.116752 +step:161 train loss:6.119832 +step:162 train loss:6.082980 +step:163 train loss:6.104673 +step:164 train loss:6.101727 +step:165 train loss:6.128450 +step:166 train loss:6.083237 +step:167 train loss:6.086498 +step:168 train loss:6.059664 +step:169 train loss:6.026316 +step:170 train loss:5.987393 +step:171 train loss:6.113015 +step:172 train loss:6.046046 +step:173 train loss:6.098043 +step:174 train loss:6.106144 +step:175 train loss:6.076651 +step:176 train loss:6.029769 +step:177 train loss:6.070611 +step:178 train loss:6.076651 +step:179 train loss:6.039926 +step:180 train loss:6.022821 +step:181 train loss:6.071486 +step:182 train loss:6.002830 +step:183 train loss:6.079101 +step:184 train loss:6.040669 +step:185 train loss:5.974323 +step:186 train loss:6.100650 +step:187 train loss:6.044444 +step:188 train loss:5.880291 +step:189 train loss:6.027306 +step:190 train loss:6.015599 +step:191 train loss:5.943070 +step:192 train loss:5.871243 +step:193 train loss:6.050908 +step:194 train loss:6.071911 +step:195 train loss:6.076711 +step:196 train loss:6.033528 +step:197 train loss:6.016012 +step:198 train loss:5.976697 +step:199 train loss:6.032345 +step:200 train loss:6.085409 +step:201 train loss:6.003817 +step:202 train loss:6.009490 +step:203 train loss:5.968420 +step:204 train loss:5.989583 +step:205 train loss:5.858923 +step:206 train loss:5.972823 +step:207 train loss:5.961306 +step:208 train loss:5.900175 +step:209 train loss:5.880847 +step:210 train loss:5.901819 +step:211 train loss:5.984916 +step:212 train loss:5.944043 +step:213 train loss:5.965181 +step:214 train loss:5.947909 +step:215 train loss:5.956558 +step:216 train loss:5.895508 +step:217 train loss:5.919503 +step:218 train loss:5.878599 +step:219 train loss:5.864004 +step:220 train loss:5.897089 +step:221 train loss:5.855354 +step:222 train loss:5.893506 +step:223 train loss:5.926926 +step:224 train loss:5.902428 +step:225 train loss:5.832891 +step:226 train loss:5.838772 +step:227 train loss:5.895303 +step:228 train loss:5.866217 +step:229 train loss:5.927695 +step:230 train loss:5.826239 +step:231 train loss:5.901433 +step:232 train loss:5.893041 +step:233 train loss:5.881979 +step:234 train loss:5.876184 +step:235 train loss:5.949662 +step:236 train loss:5.901855 +step:237 train loss:5.927462 +step:238 train loss:5.922318 +step:239 train loss:5.828104 +step:240 train loss:5.890548 +step:241 train loss:5.934671 +step:242 train loss:5.918320 +step:243 train loss:5.832091 +step:244 train loss:5.853826 +step:245 train loss:5.818643 +step:246 train loss:5.820739 +step:247 train loss:5.808061 +step:248 train loss:5.773696 +step:249 train loss:5.835539 +step:250 validation loss:5.822805 +step:250 train loss:5.802944 +step:251 train loss:5.844009 +step:252 train loss:5.790364 +step:253 train loss:5.789723 +step:254 train loss:5.756825 +step:255 train loss:5.793142 +step:256 train loss:5.782022 +step:257 train loss:5.832694 +step:258 train loss:5.734014 +step:259 train loss:5.756837 +step:260 train loss:5.737042 +step:261 train loss:5.786653 +step:262 train loss:5.830417 +step:263 train loss:5.788062 +step:264 train loss:5.762704 +step:265 train loss:5.771264 +step:266 train loss:5.743286 +step:267 train loss:5.765219 +step:268 train loss:5.732064 +step:269 train loss:5.757221 +step:270 train loss:5.774129 +step:271 train loss:5.771387 +step:272 train loss:5.720804 +step:273 train loss:5.784801 +step:274 train loss:5.707802 +step:275 train loss:5.767078 +step:276 train loss:5.734666 +step:277 train loss:5.726031 +step:278 train loss:5.707774 +step:279 train loss:5.681393 +step:280 train loss:5.760983 +step:281 train loss:5.824955 +step:282 train loss:5.713831 +step:283 train loss:5.747330 +step:284 train loss:5.696109 +step:285 train loss:5.753062 +step:286 train loss:5.715885 +step:287 train loss:5.695151 +step:288 train loss:5.673263 +step:289 train loss:5.685461 +step:290 train loss:5.740462 +step:291 train loss:5.664736 +step:292 train loss:5.723010 +step:293 train loss:5.649328 +step:294 train loss:5.768362 +step:295 train loss:5.680345 +step:296 train loss:5.749357 +step:297 train loss:5.771623 +step:298 train loss:5.667944 +step:299 train loss:5.727709 +step:300 train loss:5.660028 +step:301 train loss:5.686479 +step:302 train loss:5.663590 +step:303 train loss:5.679567 +step:304 train loss:5.706382 +step:305 train loss:5.641803 +step:306 train loss:5.654729 +step:307 train loss:5.667765 +step:308 train loss:5.583588 +step:309 train loss:5.719253 +step:310 train loss:5.679681 +step:311 train loss:5.666775 +step:312 train loss:5.654319 +step:313 train loss:5.669327 +step:314 train loss:5.646987 +step:315 train loss:5.604078 +step:316 train loss:5.599547 +step:317 train loss:5.565401 +step:318 train loss:5.561191 +step:319 train loss:5.639200 +step:320 train loss:5.558930 +step:321 train loss:5.615213 +step:322 train loss:5.611981 +step:323 train loss:5.676416 +step:324 train loss:5.636370 +step:325 train loss:5.663087 +step:326 train loss:5.671094 +step:327 train loss:5.671201 +step:328 train loss:5.636016 +step:329 train loss:5.646519 +step:330 train loss:5.592628 +step:331 train loss:5.618661 +step:332 train loss:5.598336 +step:333 train loss:5.540606 +step:334 train loss:5.633733 +step:335 train loss:5.686812 +step:336 train loss:5.836441 +step:337 train loss:5.726868 +step:338 train loss:5.661695 +step:339 train loss:5.602091 +step:340 train loss:5.603826 +step:341 train loss:5.592367 +step:342 train loss:5.649501 +step:343 train loss:5.617290 +step:344 train loss:5.563759 +step:345 train loss:5.540330 +step:346 train loss:5.581482 +step:347 train loss:5.520452 +step:348 train loss:5.524901 +step:349 train loss:5.464433 +step:350 train loss:5.533477 +step:351 train loss:5.587149 +step:352 train loss:5.540303 +step:353 train loss:5.565097 +step:354 train loss:5.510416 +step:355 train loss:5.558609 +step:356 train loss:5.525207 +step:357 train loss:5.607764 +step:358 train loss:5.649297 +step:359 train loss:5.478008 +step:360 train loss:5.617045 +step:361 train loss:5.580316 +step:362 train loss:5.560294 +step:363 train loss:5.517720 +step:364 train loss:5.638784 +step:365 train loss:5.570756 +step:366 train loss:5.525483 +step:367 train loss:5.547225 +step:368 train loss:5.512973 +step:369 train loss:5.494663 +step:370 train loss:5.541586 +step:371 train loss:5.472928 +step:372 train loss:5.546321 +step:373 train loss:5.484586 +step:374 train loss:5.480125 +step:375 train loss:5.507029 +step:376 train loss:5.508160 +step:377 train loss:5.397804 +step:378 train loss:5.469162 +step:379 train loss:5.512350 +step:380 train loss:5.452761 +step:381 train loss:5.524534 +step:382 train loss:5.540493 +step:383 train loss:5.507309 +step:384 train loss:5.504949 +step:385 train loss:5.491133 +step:386 train loss:5.534386 +step:387 train loss:5.518817 +step:388 train loss:5.485433 +step:389 train loss:5.499506 +step:390 train loss:5.476063 +step:391 train loss:5.483314 +step:392 train loss:5.460849 +step:393 train loss:5.453758 +step:394 train loss:5.497042 +step:395 train loss:5.420899 +step:396 train loss:5.382095 +step:397 train loss:5.453084 +step:398 train loss:5.434918 +step:399 train loss:5.440712 +step:400 train loss:5.406772 +step:401 train loss:5.462076 +step:402 train loss:5.455675 +step:403 train loss:5.437868 +step:404 train loss:5.438688 +step:405 train loss:5.443373 +step:406 train loss:5.475406 +step:407 train loss:5.469287 +step:408 train loss:5.528482 +step:409 train loss:5.433267 +step:410 train loss:5.394803 +step:411 train loss:5.374175 +step:412 train loss:5.470325 +step:413 train loss:5.344597 +step:414 train loss:5.435500 +step:415 train loss:5.395582 +step:416 train loss:5.415849 +step:417 train loss:5.461890 +step:418 train loss:5.411244 +step:419 train loss:5.405806 +step:420 train loss:5.373088 +step:421 train loss:5.376712 +step:422 train loss:5.364219 +step:423 train loss:5.369666 +step:424 train loss:5.335857 +step:425 train loss:5.412464 +step:426 train loss:5.404067 +step:427 train loss:5.342807 +step:428 train loss:5.389554 +step:429 train loss:5.323731 +step:430 train loss:5.352901 +step:431 train loss:5.378530 +step:432 train loss:5.399217 +step:433 train loss:5.383537 +step:434 train loss:5.345076 +step:435 train loss:5.390457 +step:436 train loss:5.416584 +step:437 train loss:5.373221 +step:438 train loss:5.332677 +step:439 train loss:5.323174 +step:440 train loss:5.355566 +step:441 train loss:5.298264 +step:442 train loss:5.297202 +step:443 train loss:5.360301 +step:444 train loss:5.415011 +step:445 train loss:5.421640 +step:446 train loss:5.377172 +step:447 train loss:5.388768 +step:448 train loss:5.436863 +step:449 train loss:5.387277 +step:450 train loss:5.375743 +step:451 train loss:5.346837 +step:452 train loss:5.414769 +step:453 train loss:5.340866 +step:454 train loss:5.294965 +step:455 train loss:5.368341 +step:456 train loss:5.336903 +step:457 train loss:5.331816 +step:458 train loss:5.335149 +step:459 train loss:5.279406 +step:460 train loss:5.382467 +step:461 train loss:5.344930 +step:462 train loss:5.236481 +step:463 train loss:5.297889 +step:464 train loss:5.332536 +step:465 train loss:5.284811 +step:466 train loss:5.312377 +step:467 train loss:5.290277 +step:468 train loss:5.361179 +step:469 train loss:5.351273 +step:470 train loss:5.284577 +step:471 train loss:5.394029 +step:472 train loss:5.265159 +step:473 train loss:5.337935 +step:474 train loss:5.341104 +step:475 train loss:5.362998 +step:476 train loss:5.355041 +step:477 train loss:5.285661 +step:478 train loss:5.300182 +step:479 train loss:5.306218 +step:480 train loss:5.340128 +step:481 train loss:5.329628 +step:482 train loss:5.242691 +step:483 train loss:5.316965 +step:484 train loss:5.263752 +step:485 train loss:5.241934 +step:486 train loss:5.312285 +step:487 train loss:5.260091 +step:488 train loss:5.255183 +step:489 train loss:5.246960 +step:490 train loss:5.221869 +step:491 train loss:5.231447 +step:492 train loss:5.229911 +step:493 train loss:5.233644 +step:494 train loss:5.224679 +step:495 train loss:5.184752 +step:496 train loss:5.279656 +step:497 train loss:5.157531 +step:498 train loss:5.280296 +step:499 train loss:5.239131 +step:500 validation loss:5.224035 total_sharp:1.8227e-02 L1_sharp:5.8889e-02 L2_sharp:7.4924e-03 L3_sharp:8.0344e-03 L4_sharp:4.9343e-03 L5_sharp:4.9597e-03 L6_sharp:3.6210e-03 L7_sharp:2.9907e-03 L8_sharp:1.8604e-03 L9_sharp:1.6454e-03 L10_sharp:1.4977e-03 L11_sharp:1.7926e-03 L12_sharp:1.2585e-02 total_fnorm:2.6158e+00 total_l1_linf:2.2494e+04 total_spectral:2.6158e+00 L1_fnorm:4.5312e-01 L2_fnorm:4.2242e-01 L3_fnorm:4.3751e-01 L4_fnorm:4.7793e-01 L5_fnorm:4.9835e-01 L6_fnorm:5.4647e-01 L7_fnorm:5.6846e-01 L8_fnorm:5.9479e-01 L9_fnorm:6.0950e-01 L10_fnorm:6.0377e-01 L11_fnorm:5.8386e-01 L12_fnorm:5.1293e-01 L1_l1linf:7.5894e-01 L2_l1linf:5.5233e-01 L3_l1linf:6.4899e-01 L4_l1linf:6.7996e-01 L5_l1linf:7.1387e-01 L6_l1linf:8.1903e-01 L7_l1linf:7.1585e-01 L8_l1linf:6.7199e-01 L9_l1linf:6.7466e-01 L10_l1linf:6.9169e-01 L11_l1linf:6.7754e-01 L12_l1linf:6.8276e-01 L1_spectral:1.2094e-01 L2_spectral:8.6711e-02 L3_spectral:9.4328e-02 L4_spectral:1.2191e-01 L5_spectral:1.1419e-01 L6_spectral:1.2352e-01 L7_spectral:1.1412e-01 L8_spectral:1.1306e-01 L9_spectral:1.1189e-01 L10_spectral:1.1108e-01 L11_spectral:1.1873e-01 L12_spectral:1.4242e-01 ip_v_neg_g:8.6202e-02 cos_v_neg_g:4.0880e-02 v_norm:2.6158e+00 g_norm:8.0611e-01 hv_norm:6.0591e-01 cos_v_hv:7.8690e-02 hg_norm:2.0543e+00 cos_g_hg:5.6679e-01 v_par:4.9042e-03 v_perp:2.6158e+00 L1_cos_v_neg_g:9.2807e-02 L1_v_norm:4.5312e-01 L2_cos_v_neg_g:1.2498e-01 L2_v_norm:4.2242e-01 L3_cos_v_neg_g:1.3497e-01 L3_v_norm:4.3751e-01 L4_cos_v_neg_g:1.0911e-01 L4_v_norm:4.7793e-01 L5_cos_v_neg_g:1.0563e-01 L5_v_norm:4.9835e-01 L6_cos_v_neg_g:9.6444e-02 L6_v_norm:5.4647e-01 L7_cos_v_neg_g:7.5653e-02 L7_v_norm:5.6846e-01 L8_cos_v_neg_g:5.8416e-02 L8_v_norm:5.9479e-01 L9_cos_v_neg_g:5.4739e-02 L9_v_norm:6.0950e-01 L10_cos_v_neg_g:5.3353e-02 L10_v_norm:6.0377e-01 L11_cos_v_neg_g:7.3747e-02 L11_v_norm:5.8386e-01 L12_cos_v_neg_g:1.4518e-01 L12_v_norm:5.1293e-01 +step:500 train loss:5.242585 +step:501 train loss:5.187428 +step:502 train loss:5.218791 +step:503 train loss:5.141446 +step:504 train loss:5.228648 +step:505 train loss:5.169244 +step:506 train loss:5.183515 +step:507 train loss:5.185343 +step:508 train loss:5.199342 +step:509 train loss:5.184560 +step:510 train loss:5.137048 +step:511 train loss:5.138598 +step:512 train loss:5.125102 +step:513 train loss:5.159715 +step:514 train loss:5.231775 +step:515 train loss:5.179624 +step:516 train loss:5.253427 +step:517 train loss:5.185127 +step:518 train loss:5.160130 +step:519 train loss:5.212135 +step:520 train loss:5.165556 +step:521 train loss:5.157100 +step:522 train loss:5.174859 +step:523 train loss:5.182429 +step:524 train loss:5.123531 +step:525 train loss:5.120163 +step:526 train loss:5.139890 +step:527 train loss:5.135146 +step:528 train loss:5.141730 +step:529 train loss:5.169064 +step:530 train loss:5.118938 +step:531 train loss:5.156121 +step:532 train loss:5.143711 +step:533 train loss:5.082948 +step:534 train loss:5.140788 +step:535 train loss:5.124449 +step:536 train loss:5.187367 +step:537 train loss:5.063275 +step:538 train loss:5.042886 +step:539 train loss:5.162212 +step:540 train loss:5.197460 +step:541 train loss:5.101675 +step:542 train loss:5.131375 +step:543 train loss:5.139188 +step:544 train loss:5.140534 +step:545 train loss:5.133434 +step:546 train loss:5.092584 +step:547 train loss:5.099874 +step:548 train loss:5.048573 +step:549 train loss:5.097275 +step:550 train loss:5.100549 +step:551 train loss:5.099096 +step:552 train loss:5.202295 +step:553 train loss:5.163388 +step:554 train loss:5.092463 +step:555 train loss:5.141170 +step:556 train loss:5.079859 +step:557 train loss:5.048079 +step:558 train loss:5.014316 +step:559 train loss:5.071635 +step:560 train loss:5.137318 +step:561 train loss:5.015601 +step:562 train loss:4.999516 +step:563 train loss:5.078148 +step:564 train loss:5.033756 +step:565 train loss:5.032679 +step:566 train loss:5.030598 +step:567 train loss:5.029969 +step:568 train loss:5.065273 +step:569 train loss:5.046209 +step:570 train loss:4.979680 +step:571 train loss:5.027566 +step:572 train loss:5.034556 +step:573 train loss:5.086663 +step:574 train loss:5.090310 +step:575 train loss:5.045615 +step:576 train loss:5.075460 +step:577 train loss:5.104287 +step:578 train loss:5.066062 +step:579 train loss:5.104768 +step:580 train loss:5.033417 +step:581 train loss:5.088058 +step:582 train loss:5.049227 +step:583 train loss:5.079774 +step:584 train loss:5.059856 +step:585 train loss:5.037434 +step:586 train loss:5.018502 +step:587 train loss:5.089393 +step:588 train loss:4.999592 +step:589 train loss:5.057570 +step:590 train loss:5.067278 +step:591 train loss:4.985244 +step:592 train loss:4.959514 +step:593 train loss:4.969905 +step:594 train loss:4.939344 +step:595 train loss:4.980233 +step:596 train loss:4.961502 +step:597 train loss:4.993128 +step:598 train loss:4.944948 +step:599 train loss:4.955125 +step:600 train loss:4.939443 +step:601 train loss:4.919368 +step:602 train loss:4.929282 +step:603 train loss:4.967485 +step:604 train loss:4.941346 +step:605 train loss:4.981312 +step:606 train loss:4.914930 +step:607 train loss:4.907508 +step:608 train loss:4.920763 +step:609 train loss:4.910651 +step:610 train loss:4.905593 +step:611 train loss:4.924890 +step:612 train loss:4.971077 +step:613 train loss:4.871771 +step:614 train loss:4.923681 +step:615 train loss:4.965339 +step:616 train loss:4.876996 +step:617 train loss:4.925774 +step:618 train loss:4.887746 +step:619 train loss:4.922859 +step:620 train loss:4.956735 +step:621 train loss:4.869077 +step:622 train loss:4.965989 +step:623 train loss:4.934071 +step:624 train loss:4.920979 +step:625 train loss:4.923947 +step:626 train loss:4.908155 +step:627 train loss:4.884778 +step:628 train loss:4.887268 +step:629 train loss:4.824408 +step:630 train loss:4.859291 +step:631 train loss:4.838068 +step:632 train loss:4.863630 +step:633 train loss:4.908444 +step:634 train loss:4.879547 +step:635 train loss:4.828602 +step:636 train loss:4.928958 +step:637 train loss:4.852669 +step:638 train loss:4.768771 +step:639 train loss:4.911212 +step:640 train loss:4.884496 +step:641 train loss:4.885549 +step:642 train loss:4.919952 +step:643 train loss:4.839749 +step:644 train loss:4.936273 +step:645 train loss:4.904481 +step:646 train loss:4.939101 +step:647 train loss:4.944091 +step:648 train loss:5.040624 +step:649 train loss:4.925542 +step:650 train loss:4.959119 +step:651 train loss:4.847982 +step:652 train loss:4.865232 +step:653 train loss:4.858887 +step:654 train loss:4.909440 +step:655 train loss:4.951077 +step:656 train loss:4.950090 +step:657 train loss:4.971692 +step:658 train loss:4.940962 +step:659 train loss:5.024807 +step:660 train loss:4.970259 +step:661 train loss:4.987409 +step:662 train loss:5.003671 +step:663 train loss:5.040091 +step:664 train loss:4.923893 +step:665 train loss:4.941404 +step:666 train loss:4.921578 +step:667 train loss:4.973383 +step:668 train loss:4.957071 +step:669 train loss:4.911977 +step:670 train loss:4.931093 +step:671 train loss:4.884408 +step:672 train loss:4.838040 +step:673 train loss:4.925529 +step:674 train loss:4.909714 +step:675 train loss:4.801062 +step:676 train loss:4.887281 +step:677 train loss:4.820160 +step:678 train loss:4.798829 +step:679 train loss:4.830649 +step:680 train loss:4.803977 +step:681 train loss:4.842614 +step:682 train loss:4.738416 +step:683 train loss:4.802120 +step:684 train loss:4.833602 +step:685 train loss:4.760774 +step:686 train loss:4.854991 +step:687 train loss:4.792612 +step:688 train loss:4.722050 +step:689 train loss:4.760504 +step:690 train loss:4.740230 +step:691 train loss:4.764221 +step:692 train loss:4.760402 +step:693 train loss:4.773868 +step:694 train loss:4.782024 +step:695 train loss:4.758782 +step:696 train loss:4.716095 +step:697 train loss:4.844019 +step:698 train loss:4.743939 +step:699 train loss:4.767102 +step:700 train loss:4.821494 +step:701 train loss:4.732083 +step:702 train loss:4.786998 +step:703 train loss:4.734516 +step:704 train loss:4.705386 +step:705 train loss:4.787844 +step:706 train loss:4.672370 +step:707 train loss:4.743443 +step:708 train loss:4.864571 +step:709 train loss:4.814775 +step:710 train loss:4.757720 +step:711 train loss:4.799678 +step:712 train loss:4.754910 +step:713 train loss:4.716021 +step:714 train loss:4.791530 +step:715 train loss:4.662367 +step:716 train loss:4.821039 +step:717 train loss:4.694240 +step:718 train loss:4.765208 +step:719 train loss:4.711768 +step:720 train loss:4.670599 +step:721 train loss:4.698512 +step:722 train loss:4.689645 +step:723 train loss:4.727988 +step:724 train loss:4.720286 +step:725 train loss:4.658212 +step:726 train loss:4.666441 +step:727 train loss:4.703219 +step:728 train loss:4.710922 +step:729 train loss:4.668752 +step:730 train loss:4.731556 +step:731 train loss:4.760630 +step:732 train loss:4.713717 +step:733 train loss:4.691881 +step:734 train loss:4.693109 +step:735 train loss:4.758283 +step:736 train loss:4.678651 +step:737 train loss:4.677241 +step:738 train loss:4.707508 +step:739 train loss:4.636896 +step:740 train loss:4.673272 +step:741 train loss:4.737282 +step:742 train loss:4.634255 +step:743 train loss:4.624383 +step:744 train loss:4.649248 +step:745 train loss:4.566076 +step:746 train loss:4.603915 +step:747 train loss:4.619473 +step:748 train loss:4.596507 +step:749 train loss:4.622642 +step:750 validation loss:4.568025 +step:750 train loss:4.557231 +step:751 train loss:4.622184 +step:752 train loss:4.586461 +step:753 train loss:4.618543 +step:754 train loss:4.620917 +step:755 train loss:4.700489 +step:756 train loss:4.639968 +step:757 train loss:4.726008 +step:758 train loss:4.606112 +step:759 train loss:4.617486 +step:760 train loss:4.582935 +step:761 train loss:4.611442 +step:762 train loss:4.589478 +step:763 train loss:4.614604 +step:764 train loss:4.686306 +step:765 train loss:4.637092 +step:766 train loss:4.709322 +step:767 train loss:4.862551 +step:768 train loss:4.706589 +step:769 train loss:4.694159 +step:770 train loss:4.717651 +step:771 train loss:4.795242 +step:772 train loss:4.773699 +step:773 train loss:4.690799 +step:774 train loss:4.821903 +step:775 train loss:4.781037 +step:776 train loss:4.817561 +step:777 train loss:4.769806 +step:778 train loss:4.713812 +step:779 train loss:4.673267 +step:780 train loss:4.742261 +step:781 train loss:4.673335 +step:782 train loss:4.709270 +step:783 train loss:4.666292 +step:784 train loss:4.649357 +step:785 train loss:4.618404 +step:786 train loss:4.629672 +step:787 train loss:4.569987 +step:788 train loss:4.638482 +step:789 train loss:4.617796 +step:790 train loss:4.593389 +step:791 train loss:4.674579 +step:792 train loss:4.683212 +step:793 train loss:4.628058 +step:794 train loss:4.597256 +step:795 train loss:4.561970 +step:796 train loss:4.830649 +step:797 train loss:4.573360 +step:798 train loss:4.558365 +step:799 train loss:4.561873 +step:800 train loss:4.630677 +step:801 train loss:4.540092 +step:802 train loss:4.672965 +step:803 train loss:4.554747 +step:804 train loss:4.503791 +step:805 train loss:4.560516 +step:806 train loss:4.479798 +step:807 train loss:4.519822 +step:808 train loss:4.521027 +step:809 train loss:4.495903 +step:810 train loss:4.471022 +step:811 train loss:4.558084 +step:812 train loss:4.512831 +step:813 train loss:4.524584 +step:814 train loss:4.588440 +step:815 train loss:4.547709 +step:816 train loss:4.471516 +step:817 train loss:4.508641 +step:818 train loss:4.480648 +step:819 train loss:4.480791 +step:820 train loss:4.487310 +step:821 train loss:4.454045 +step:822 train loss:4.454445 +step:823 train loss:4.531852 +step:824 train loss:4.427239 +step:825 train loss:4.408553 +step:826 train loss:4.473715 +step:827 train loss:4.378366 +step:828 train loss:4.458633 +step:829 train loss:4.457912 +step:830 train loss:4.458243 +step:831 train loss:4.501358 +step:832 train loss:4.551762 +step:833 train loss:4.506350 +step:834 train loss:4.496985 +step:835 train loss:4.459706 +step:836 train loss:4.454860 +step:837 train loss:4.431870 +step:838 train loss:4.421132 +step:839 train loss:4.428844 +step:840 train loss:4.467559 +step:841 train loss:4.450779 +step:842 train loss:4.442502 +step:843 train loss:4.442492 +step:844 train loss:4.404753 +step:845 train loss:4.393959 +step:846 train loss:4.518895 +step:847 train loss:4.487284 +step:848 train loss:4.437365 +step:849 train loss:4.481483 +step:850 train loss:4.494378 +step:851 train loss:4.455312 +step:852 train loss:4.535812 +step:853 train loss:4.420095 +step:854 train loss:4.469963 +step:855 train loss:4.467546 +step:856 train loss:4.411795 +step:857 train loss:4.448950 +step:858 train loss:4.481761 +step:859 train loss:4.390062 +step:860 train loss:4.412072 +step:861 train loss:4.448303 +step:862 train loss:4.390124 +step:863 train loss:4.404963 +step:864 train loss:4.396839 +step:865 train loss:4.413738 +step:866 train loss:4.440550 +step:867 train loss:4.547535 +step:868 train loss:4.414265 +step:869 train loss:4.437670 +step:870 train loss:4.372505 +step:871 train loss:4.358742 +step:872 train loss:4.426641 +step:873 train loss:4.419097 +step:874 train loss:4.448349 +step:875 train loss:4.341856 +step:876 train loss:4.437404 +step:877 train loss:4.374887 +step:878 train loss:4.470862 +step:879 train loss:4.370276 +step:880 train loss:4.477380 +step:881 train loss:4.417384 +step:882 train loss:4.383171 +step:883 train loss:4.426041 +step:884 train loss:4.435187 +step:885 train loss:4.376631 +step:886 train loss:4.380503 +step:887 train loss:4.400637 +step:888 train loss:4.507289 +step:889 train loss:4.425628 +step:890 train loss:4.364027 +step:891 train loss:4.319422 +step:892 train loss:4.313925 +step:893 train loss:4.391288 +step:894 train loss:4.371815 +step:895 train loss:4.342728 +step:896 train loss:4.426637 +step:897 train loss:4.364156 +step:898 train loss:4.394381 +step:899 train loss:4.405375 +step:900 train loss:4.438599 +step:901 train loss:4.356228 +step:902 train loss:4.381343 +step:903 train loss:4.476303 +step:904 train loss:4.482365 +step:905 train loss:4.366052 +step:906 train loss:4.380434 +step:907 train loss:4.401351 +step:908 train loss:4.413828 +step:909 train loss:4.355079 +step:910 train loss:4.397254 +step:911 train loss:4.514993 +step:912 train loss:4.317142 +step:913 train loss:4.375566 +step:914 train loss:4.323890 +step:915 train loss:4.353590 +step:916 train loss:4.409093 +step:917 train loss:4.364257 +step:918 train loss:4.466015 +step:919 train loss:4.532442 +step:920 train loss:4.291513 +step:921 train loss:4.402668 +step:922 train loss:4.386165 +step:923 train loss:4.301213 +step:924 train loss:4.347900 +step:925 train loss:4.294440 +step:926 train loss:4.391343 +step:927 train loss:4.293160 +step:928 train loss:4.372465 +step:929 train loss:4.342726 +step:930 train loss:4.340386 +step:931 train loss:4.377232 +step:932 train loss:4.317512 +step:933 train loss:4.363873 +step:934 train loss:4.398873 +step:935 train loss:4.377124 +step:936 train loss:4.347459 +step:937 train loss:4.344778 +step:938 train loss:4.338372 +step:939 train loss:4.237659 +step:940 train loss:4.335074 +step:941 train loss:4.278279 +step:942 train loss:4.260363 +step:943 train loss:4.357243 +step:944 train loss:4.307899 +step:945 train loss:4.312711 +step:946 train loss:4.340777 +step:947 train loss:4.493374 +step:948 train loss:4.292525 +step:949 train loss:4.342392 +step:950 train loss:4.272925 +step:951 train loss:4.306165 +step:952 train loss:4.360407 +step:953 train loss:4.292609 +step:954 train loss:4.318137 +step:955 train loss:4.256959 +step:956 train loss:4.292132 +step:957 train loss:4.299086 +step:958 train loss:4.364920 +step:959 train loss:4.305413 +step:960 train loss:4.405747 +step:961 train loss:4.402382 +step:962 train loss:4.350256 +step:963 train loss:4.346810 +step:964 train loss:4.379504 +step:965 train loss:4.299775 +step:966 train loss:4.342834 +step:967 train loss:4.399998 +step:968 train loss:4.398282 +step:969 train loss:4.337981 +step:970 train loss:4.398211 +step:971 train loss:4.452868 +step:972 train loss:4.373154 +step:973 train loss:4.451053 +step:974 train loss:4.400138 +step:975 train loss:4.450944 +step:976 train loss:4.396324 +step:977 train loss:4.373370 +step:978 train loss:4.370364 +step:979 train loss:4.352307 +step:980 train loss:4.356079 +step:981 train loss:4.334001 +step:982 train loss:4.337236 +step:983 train loss:4.344623 +step:984 train loss:4.387676 +step:985 train loss:4.352829 +step:986 train loss:4.366054 +step:987 train loss:4.389420 +step:988 train loss:4.362140 +step:989 train loss:4.320190 +step:990 train loss:4.309105 +step:991 train loss:4.230170 +step:992 train loss:4.290076 +step:993 train loss:4.311615 +step:994 train loss:4.243879 +step:995 train loss:4.258838 +step:996 train loss:4.297861 +step:997 train loss:4.253076 +step:998 train loss:4.250609 +step:999 train loss:4.294210 +step:1000 validation loss:4.221220 total_sharp:4.3458e-03 L1_sharp:1.3053e-02 L2_sharp:1.2268e-03 L3_sharp:1.5832e-03 L4_sharp:6.4773e-04 L5_sharp:5.1170e-04 L6_sharp:6.5219e-04 L7_sharp:6.9373e-04 L8_sharp:8.7361e-04 L9_sharp:1.0193e-03 L10_sharp:1.1865e-03 L11_sharp:1.3123e-03 L12_sharp:1.4441e-03 total_fnorm:3.6096e+00 total_l1_linf:3.0673e+04 total_spectral:3.6096e+00 L1_fnorm:5.7542e-01 L2_fnorm:6.2381e-01 L3_fnorm:6.4865e-01 L4_fnorm:7.1257e-01 L5_fnorm:7.4471e-01 L6_fnorm:7.5249e-01 L7_fnorm:7.7511e-01 L8_fnorm:7.8257e-01 L9_fnorm:7.9953e-01 L10_fnorm:8.0430e-01 L11_fnorm:8.0003e-01 L12_fnorm:7.6894e-01 L1_l1linf:7.8121e-01 L2_l1linf:7.7637e-01 L3_l1linf:7.8770e-01 L4_l1linf:8.2169e-01 L5_l1linf:8.6676e-01 L6_l1linf:8.5195e-01 L7_l1linf:8.9497e-01 L8_l1linf:8.6192e-01 L9_l1linf:9.2070e-01 L10_l1linf:8.7918e-01 L11_l1linf:9.0831e-01 L12_l1linf:9.7985e-01 L1_spectral:1.1732e-01 L2_spectral:1.0289e-01 L3_spectral:1.0425e-01 L4_spectral:1.1807e-01 L5_spectral:1.2327e-01 L6_spectral:1.1520e-01 L7_spectral:1.1024e-01 L8_spectral:1.0580e-01 L9_spectral:9.8766e-02 L10_spectral:1.0323e-01 L11_spectral:1.0452e-01 L12_spectral:1.2750e-01 ip_v_neg_g:2.6877e-02 cos_v_neg_g:1.2906e-02 v_norm:3.6096e+00 g_norm:5.7696e-01 hv_norm:3.5797e-01 cos_v_hv:4.3821e-02 hg_norm:9.4376e-01 cos_g_hg:4.7503e-01 v_par:2.6267e-03 v_perp:3.6096e+00 L1_cos_v_neg_g:3.5389e-02 L1_v_norm:5.7542e-01 L2_cos_v_neg_g:2.9105e-02 L2_v_norm:6.2381e-01 L3_cos_v_neg_g:3.0958e-02 L3_v_norm:6.4865e-01 L4_cos_v_neg_g:2.3750e-02 L4_v_norm:7.1257e-01 L5_cos_v_neg_g:2.2416e-02 L5_v_norm:7.4471e-01 L6_cos_v_neg_g:2.5152e-02 L6_v_norm:7.5249e-01 L7_cos_v_neg_g:1.7208e-02 L7_v_norm:7.7511e-01 L8_cos_v_neg_g:1.7281e-02 L8_v_norm:7.8257e-01 L9_cos_v_neg_g:1.4922e-02 L9_v_norm:7.9953e-01 L10_cos_v_neg_g:2.1477e-02 L10_v_norm:8.0430e-01 L11_cos_v_neg_g:2.0640e-02 L11_v_norm:8.0003e-01 L12_cos_v_neg_g:2.0470e-02 L12_v_norm:7.6894e-01 +step:1000 train loss:4.296743 +step:1001 train loss:4.290882 +step:1002 train loss:4.282695 +step:1003 train loss:4.253931 +step:1004 train loss:4.229200 +step:1005 train loss:4.240061 +step:1006 train loss:4.323472 +step:1007 train loss:4.265841 +step:1008 train loss:4.251184 +step:1009 train loss:4.311390 +step:1010 train loss:4.275293 +step:1011 train loss:4.305305 +step:1012 train loss:4.258136 +step:1013 train loss:4.234599 +step:1014 train loss:4.231376 +step:1015 train loss:4.256000 +step:1016 train loss:4.271567 +step:1017 train loss:4.224742 +step:1018 train loss:4.279928 +step:1019 train loss:4.223944 +step:1020 train loss:4.228868 +step:1021 train loss:4.339618 +step:1022 train loss:4.242443 +step:1023 train loss:4.251896 +step:1024 train loss:4.341828 +step:1025 train loss:4.305725 +step:1026 train loss:4.250421 +step:1027 train loss:4.294611 +step:1028 train loss:4.287108 +step:1029 train loss:4.229624 +step:1030 train loss:4.317929 +step:1031 train loss:4.295367 +step:1032 train loss:4.256119 +step:1033 train loss:4.219327 +step:1034 train loss:4.278859 +step:1035 train loss:4.280694 +step:1036 train loss:4.191331 +step:1037 train loss:4.247104 +step:1038 train loss:4.269886 +step:1039 train loss:4.411717 +step:1040 train loss:4.254392 +step:1041 train loss:4.232199 +step:1042 train loss:4.249966 +step:1043 train loss:4.254185 +step:1044 train loss:4.242733 +step:1045 train loss:4.248981 +step:1046 train loss:4.187850 +step:1047 train loss:4.220734 +step:1048 train loss:4.213388 +step:1049 train loss:4.270900 +step:1050 train loss:4.231236 +step:1051 train loss:4.201644 +step:1052 train loss:4.309626 +step:1053 train loss:4.214366 +step:1054 train loss:4.209375 +step:1055 train loss:4.272987 +step:1056 train loss:4.215520 +step:1057 train loss:4.110881 +step:1058 train loss:4.215219 +step:1059 train loss:4.199584 +step:1060 train loss:4.194818 +step:1061 train loss:4.243959 +step:1062 train loss:4.205684 +step:1063 train loss:4.209912 +step:1064 train loss:4.203938 +step:1065 train loss:4.213617 +step:1066 train loss:4.187904 +step:1067 train loss:4.224225 +step:1068 train loss:4.180234 +step:1069 train loss:4.198932 +step:1070 train loss:4.212815 +step:1071 train loss:4.222455 +step:1072 train loss:4.247212 +step:1073 train loss:4.165476 +step:1074 train loss:4.179725 +step:1075 train loss:4.180346 +step:1076 train loss:4.249862 +step:1077 train loss:4.181534 +step:1078 train loss:4.237054 +step:1079 train loss:4.279408 +step:1080 train loss:4.151640 +step:1081 train loss:4.223293 +step:1082 train loss:4.221951 +step:1083 train loss:4.184698 +step:1084 train loss:4.159262 +step:1085 train loss:4.217206 +step:1086 train loss:4.211067 +step:1087 train loss:4.200523 +step:1088 train loss:4.195112 +step:1089 train loss:4.200096 +step:1090 train loss:4.143923 +step:1091 train loss:4.136203 +step:1092 train loss:4.239306 +step:1093 train loss:4.125669 +step:1094 train loss:4.182341 +step:1095 train loss:4.227648 +step:1096 train loss:4.167384 +step:1097 train loss:4.171568 +step:1098 train loss:4.144333 +step:1099 train loss:4.191865 +step:1100 train loss:4.239329 +step:1101 train loss:4.234588 +step:1102 train loss:4.242477 +step:1103 train loss:4.164351 +step:1104 train loss:4.196115 +step:1105 train loss:4.240665 +step:1106 train loss:4.175712 +step:1107 train loss:4.297274 +step:1108 train loss:4.236680 +step:1109 train loss:4.210623 +step:1110 train loss:4.168655 +step:1111 train loss:4.220088 +step:1112 train loss:4.132771 +step:1113 train loss:4.115221 +step:1114 train loss:4.105133 +step:1115 train loss:4.144058 +step:1116 train loss:4.207310 +step:1117 train loss:4.234735 +step:1118 train loss:4.259874 +step:1119 train loss:4.192431 +step:1120 train loss:4.213498 +step:1121 train loss:4.196165 +step:1122 train loss:4.184854 +step:1123 train loss:4.284474 +step:1124 train loss:4.171519 +step:1125 train loss:4.188540 +step:1126 train loss:4.169502 +step:1127 train loss:4.189179 +step:1128 train loss:4.186226 +step:1129 train loss:4.260438 +step:1130 train loss:4.190481 +step:1131 train loss:4.269643 +step:1132 train loss:4.214664 +step:1133 train loss:4.219889 +step:1134 train loss:4.190840 +step:1135 train loss:4.239398 +step:1136 train loss:4.251701 +step:1137 train loss:4.172477 +step:1138 train loss:4.231946 +step:1139 train loss:4.177784 +step:1140 train loss:4.264011 +step:1141 train loss:4.210589 +step:1142 train loss:4.142694 +step:1143 train loss:4.215616 +step:1144 train loss:4.241512 +step:1145 train loss:4.188226 +step:1146 train loss:4.130977 +step:1147 train loss:4.144015 +step:1148 train loss:4.172036 +step:1149 train loss:4.219532 +step:1150 train loss:4.223551 +step:1151 train loss:4.231180 +step:1152 train loss:4.133566 +step:1153 train loss:4.132200 +step:1154 train loss:4.109691 +step:1155 train loss:4.213765 +step:1156 train loss:4.113699 +step:1157 train loss:4.141097 +step:1158 train loss:4.197563 +step:1159 train loss:4.199372 +step:1160 train loss:4.121395 +step:1161 train loss:4.212699 +step:1162 train loss:4.156320 +step:1163 train loss:4.137643 +step:1164 train loss:4.043598 +step:1165 train loss:4.181807 +step:1166 train loss:4.107203 +step:1167 train loss:4.109860 +step:1168 train loss:4.171478 +step:1169 train loss:4.129112 +step:1170 train loss:4.137962 +step:1171 train loss:4.162358 +step:1172 train loss:4.124735 +step:1173 train loss:4.159878 +step:1174 train loss:4.096033 +step:1175 train loss:4.132509 +step:1176 train loss:4.242465 +step:1177 train loss:4.089096 +step:1178 train loss:4.151097 +step:1179 train loss:4.107887 +step:1180 train loss:4.147222 +step:1181 train loss:4.123732 +step:1182 train loss:4.181614 +step:1183 train loss:4.158078 +step:1184 train loss:4.101707 +step:1185 train loss:4.132942 +step:1186 train loss:4.120011 +step:1187 train loss:4.089579 +step:1188 train loss:4.124970 +step:1189 train loss:4.057611 +step:1190 train loss:4.122990 +step:1191 train loss:4.205224 +step:1192 train loss:4.150321 +step:1193 train loss:4.156495 +step:1194 train loss:4.259253 +step:1195 train loss:4.230287 +step:1196 train loss:4.122146 +step:1197 train loss:4.151416 +step:1198 train loss:4.134230 +step:1199 train loss:4.131151 +step:1200 train loss:4.191544 +step:1201 train loss:4.164481 +step:1202 train loss:4.101092 +step:1203 train loss:4.090864 +step:1204 train loss:4.127024 +step:1205 train loss:4.131633 +step:1206 train loss:4.078866 +step:1207 train loss:4.168126 +step:1208 train loss:4.138330 +step:1209 train loss:4.061190 +step:1210 train loss:4.156146 +step:1211 train loss:4.103528 +step:1212 train loss:4.125283 +step:1213 train loss:4.059604 +step:1214 train loss:4.140924 +step:1215 train loss:4.110415 +step:1216 train loss:4.125233 +step:1217 train loss:4.091343 +step:1218 train loss:4.157053 +step:1219 train loss:4.149188 +step:1220 train loss:4.189507 +step:1221 train loss:4.203939 +step:1222 train loss:4.254200 +step:1223 train loss:4.239741 +step:1224 train loss:4.226961 +step:1225 train loss:4.242096 +step:1226 train loss:4.175897 +step:1227 train loss:4.186475 +step:1228 train loss:4.181655 +step:1229 train loss:4.139335 +step:1230 train loss:4.126236 +step:1231 train loss:4.176497 +step:1232 train loss:4.133967 +step:1233 train loss:4.123829 +step:1234 train loss:4.201437 +step:1235 train loss:4.168926 +step:1236 train loss:4.069199 +step:1237 train loss:4.173476 +step:1238 train loss:4.115890 +step:1239 train loss:4.154707 +step:1240 train loss:4.063583 +step:1241 train loss:4.094868 +step:1242 train loss:4.117215 +step:1243 train loss:4.063225 +step:1244 train loss:4.185617 +step:1245 train loss:4.200881 +step:1246 train loss:4.126493 +step:1247 train loss:4.104189 +step:1248 train loss:4.125809 +step:1249 train loss:4.059118 +step:1250 validation loss:4.055932 +step:1250 train loss:4.074809 +step:1251 train loss:4.139595 +step:1252 train loss:4.089256 +step:1253 train loss:4.042369 +step:1254 train loss:4.073997 +step:1255 train loss:4.065042 +step:1256 train loss:4.117619 +step:1257 train loss:4.095465 +step:1258 train loss:4.142865 +step:1259 train loss:4.127454 +step:1260 train loss:4.038042 +step:1261 train loss:4.276247 +step:1262 train loss:4.120131 +step:1263 train loss:4.078515 +step:1264 train loss:4.090003 +step:1265 train loss:4.141596 +step:1266 train loss:4.097690 +step:1267 train loss:4.111869 +step:1268 train loss:4.115359 +step:1269 train loss:4.104434 +step:1270 train loss:4.029104 +step:1271 train loss:4.034081 +step:1272 train loss:4.062760 +step:1273 train loss:4.114838 +step:1274 train loss:4.079432 +step:1275 train loss:4.107143 +step:1276 train loss:4.106953 +step:1277 train loss:4.115773 +step:1278 train loss:4.052254 +step:1279 train loss:4.061605 +step:1280 train loss:4.078623 +step:1281 train loss:4.127810 +step:1282 train loss:4.056675 +step:1283 train loss:4.133444 +step:1284 train loss:4.072513 +step:1285 train loss:4.120416 +step:1286 train loss:4.018587 +step:1287 train loss:4.058178 +step:1288 train loss:4.084876 +step:1289 train loss:4.144689 +step:1290 train loss:4.097286 +step:1291 train loss:4.061336 +step:1292 train loss:4.046333 +step:1293 train loss:4.036723 +step:1294 train loss:4.085366 +step:1295 train loss:4.068839 +step:1296 train loss:4.114873 +step:1297 train loss:4.075148 +step:1298 train loss:4.091515 +step:1299 train loss:4.125954 +step:1300 train loss:4.048861 +step:1301 train loss:4.095930 +step:1302 train loss:4.062871 +step:1303 train loss:4.103551 +step:1304 train loss:4.130291 +step:1305 train loss:4.104761 +step:1306 train loss:4.104277 +step:1307 train loss:4.101912 +step:1308 train loss:4.069976 +step:1309 train loss:4.081526 +step:1310 train loss:4.053390 +step:1311 train loss:4.059865 +step:1312 train loss:4.131361 +step:1313 train loss:4.073833 +step:1314 train loss:4.086084 +step:1315 train loss:4.126611 +step:1316 train loss:4.085888 +step:1317 train loss:3.985119 +step:1318 train loss:4.128012 +step:1319 train loss:4.160400 +step:1320 train loss:4.073397 +step:1321 train loss:4.051198 +step:1322 train loss:4.152178 +step:1323 train loss:4.094491 +step:1324 train loss:4.197922 +step:1325 train loss:4.085142 +step:1326 train loss:4.137290 +step:1327 train loss:4.145627 +step:1328 train loss:4.041971 +step:1329 train loss:4.066425 +step:1330 train loss:4.089721 +step:1331 train loss:4.006480 +step:1332 train loss:4.139506 +step:1333 train loss:4.095349 +step:1334 train loss:4.095445 +step:1335 train loss:4.127979 +step:1336 train loss:4.130620 +step:1337 train loss:4.105870 +step:1338 train loss:4.073759 +step:1339 train loss:4.150012 +step:1340 train loss:4.114120 +step:1341 train loss:4.098321 +step:1342 train loss:4.071053 +step:1343 train loss:4.052742 +step:1344 train loss:4.121891 +step:1345 train loss:4.076397 +step:1346 train loss:4.156778 +step:1347 train loss:4.078102 +step:1348 train loss:4.048847 +step:1349 train loss:3.991162 +step:1350 train loss:4.023055 +step:1351 train loss:4.095883 +step:1352 train loss:4.065565 +step:1353 train loss:4.045991 +step:1354 train loss:4.056229 +step:1355 train loss:4.117097 +step:1356 train loss:4.028847 +step:1357 train loss:4.059782 +step:1358 train loss:4.046166 +step:1359 train loss:4.041511 +step:1360 train loss:4.075228 +step:1361 train loss:4.191741 +step:1362 train loss:4.113678 +step:1363 train loss:3.997261 +step:1364 train loss:4.017947 +step:1365 train loss:4.007124 +step:1366 train loss:4.056505 +step:1367 train loss:3.978478 +step:1368 train loss:4.015529 +step:1369 train loss:4.046596 +step:1370 train loss:4.063932 +step:1371 train loss:4.027503 +step:1372 train loss:4.053311 +step:1373 train loss:4.088791 +step:1374 train loss:4.094175 +step:1375 train loss:4.050827 +step:1376 train loss:4.082200 +step:1377 train loss:4.095509 +step:1378 train loss:4.083472 +step:1379 train loss:4.069528 +step:1380 train loss:4.131694 +step:1381 train loss:4.075032 +step:1382 train loss:4.093167 +step:1383 train loss:4.046361 +step:1384 train loss:4.127762 +step:1385 train loss:4.025991 +step:1386 train loss:4.092600 +step:1387 train loss:4.101846 +step:1388 train loss:4.061394 +step:1389 train loss:4.045950 +step:1390 train loss:4.080482 +step:1391 train loss:4.107874 +step:1392 train loss:4.082187 +step:1393 train loss:4.138904 +step:1394 train loss:4.060831 +step:1395 train loss:4.093823 +step:1396 train loss:4.067914 +step:1397 train loss:4.085468 +step:1398 train loss:4.092824 +step:1399 train loss:4.058361 +step:1400 train loss:4.033952 +step:1401 train loss:4.026297 +step:1402 train loss:4.030987 +step:1403 train loss:3.988363 +step:1404 train loss:4.048689 +step:1405 train loss:4.013572 +step:1406 train loss:4.038109 +step:1407 train loss:4.029206 +step:1408 train loss:4.012566 +step:1409 train loss:3.996439 +step:1410 train loss:4.017346 +step:1411 train loss:4.048458 +step:1412 train loss:4.101908 +step:1413 train loss:4.024642 +step:1414 train loss:4.052262 +step:1415 train loss:4.013200 +step:1416 train loss:4.065167 +step:1417 train loss:4.033838 +step:1418 train loss:3.975173 +step:1419 train loss:3.987359 +step:1420 train loss:4.016990 +step:1421 train loss:4.050906 +step:1422 train loss:4.025273 +step:1423 train loss:4.124190 +step:1424 train loss:4.024504 +step:1425 train loss:3.993907 +step:1426 train loss:4.016783 +step:1427 train loss:4.003397 +step:1428 train loss:3.984795 +step:1429 train loss:4.012580 +step:1430 train loss:4.017301 +step:1431 train loss:4.039665 +step:1432 train loss:4.027590 +step:1433 train loss:4.006556 +step:1434 train loss:3.979451 +step:1435 train loss:3.973908 +step:1436 train loss:4.055249 +step:1437 train loss:3.980762 +step:1438 train loss:3.986741 +step:1439 train loss:4.005384 +step:1440 train loss:4.035444 +step:1441 train loss:4.113012 +step:1442 train loss:4.084065 +step:1443 train loss:4.009059 +step:1444 train loss:4.013649 +step:1445 train loss:4.007807 +step:1446 train loss:4.041343 +step:1447 train loss:4.046363 +step:1448 train loss:4.009343 +step:1449 train loss:4.041667 +step:1450 train loss:4.079999 +step:1451 train loss:3.993581 +step:1452 train loss:4.050604 +step:1453 train loss:4.035577 +step:1454 train loss:4.025516 +step:1455 train loss:3.959924 +step:1456 train loss:4.033864 +step:1457 train loss:3.967800 +step:1458 train loss:4.106832 +step:1459 train loss:4.028375 +step:1460 train loss:3.988823 +step:1461 train loss:4.049991 +step:1462 train loss:4.052882 +step:1463 train loss:4.015399 +step:1464 train loss:4.001924 +step:1465 train loss:3.986931 +step:1466 train loss:3.949188 +step:1467 train loss:4.091331 +step:1468 train loss:3.972979 +step:1469 train loss:4.050521 +step:1470 train loss:3.986200 +step:1471 train loss:3.978831 +step:1472 train loss:3.984881 +step:1473 train loss:3.980899 +step:1474 train loss:3.929291 +step:1475 train loss:3.985923 +step:1476 train loss:4.068286 +step:1477 train loss:4.018010 +step:1478 train loss:3.948469 +step:1479 train loss:3.981985 +step:1480 train loss:3.976812 +step:1481 train loss:3.949242 +step:1482 train loss:4.011695 +step:1483 train loss:4.005333 +step:1484 train loss:4.031483 +step:1485 train loss:4.047098 +step:1486 train loss:3.981935 +step:1487 train loss:3.976263 +step:1488 train loss:3.986536 +step:1489 train loss:3.970829 +step:1490 train loss:4.024214 +step:1491 train loss:4.023578 +step:1492 train loss:4.014319 +step:1493 train loss:3.965573 +step:1494 train loss:4.000367 +step:1495 train loss:3.992076 +step:1496 train loss:3.950078 +step:1497 train loss:4.023155 +step:1498 train loss:3.928198 +step:1499 train loss:3.969710 +step:1500 validation loss:3.945783 total_sharp:3.7273e-03 L1_sharp:4.8352e-03 L2_sharp:2.3770e-04 L3_sharp:9.8360e-04 L4_sharp:6.6290e-04 L5_sharp:5.4797e-04 L6_sharp:5.2837e-04 L7_sharp:5.6003e-04 L8_sharp:1.0341e-03 L9_sharp:1.2343e-03 L10_sharp:1.1098e-03 L11_sharp:1.1467e-03 L12_sharp:2.3878e-03 total_fnorm:4.1072e+00 total_l1_linf:3.6022e+04 total_spectral:4.1072e+00 L1_fnorm:7.6591e-01 L2_fnorm:8.2321e-01 L3_fnorm:8.5848e-01 L4_fnorm:9.0616e-01 L5_fnorm:9.5692e-01 L6_fnorm:9.7894e-01 L7_fnorm:9.7365e-01 L8_fnorm:9.5771e-01 L9_fnorm:9.5441e-01 L10_fnorm:9.4132e-01 L11_fnorm:9.4426e-01 L12_fnorm:9.2946e-01 L1_l1linf:1.1251e+00 L2_l1linf:9.5444e-01 L3_l1linf:9.7964e-01 L4_l1linf:9.6235e-01 L5_l1linf:9.9720e-01 L6_l1linf:9.9290e-01 L7_l1linf:1.0076e+00 L8_l1linf:9.9521e-01 L9_l1linf:1.0542e+00 L10_l1linf:1.0091e+00 L11_l1linf:1.0842e+00 L12_l1linf:1.0302e+00 L1_spectral:1.5559e-01 L2_spectral:1.3300e-01 L3_spectral:1.2859e-01 L4_spectral:1.2571e-01 L5_spectral:1.3340e-01 L6_spectral:1.4892e-01 L7_spectral:1.4123e-01 L8_spectral:1.1238e-01 L9_spectral:1.2543e-01 L10_spectral:1.2635e-01 L11_spectral:1.4663e-01 L12_spectral:1.8027e-01 ip_v_neg_g:3.4328e-02 cos_v_neg_g:1.3708e-02 v_norm:4.1072e+00 g_norm:6.0973e-01 hv_norm:4.0105e-01 cos_v_hv:3.8172e-02 hg_norm:2.0816e+00 cos_g_hg:4.7583e-01 v_par:2.6182e-03 v_perp:4.1072e+00 L1_cos_v_neg_g:2.5477e-02 L1_v_norm:7.6591e-01 L2_cos_v_neg_g:1.5880e-02 L2_v_norm:8.2321e-01 L3_cos_v_neg_g:1.9630e-02 L3_v_norm:8.5848e-01 L4_cos_v_neg_g:2.0669e-02 L4_v_norm:9.0616e-01 L5_cos_v_neg_g:1.8914e-02 L5_v_norm:9.5692e-01 L6_cos_v_neg_g:1.8689e-02 L6_v_norm:9.7894e-01 L7_cos_v_neg_g:1.8057e-02 L7_v_norm:9.7365e-01 L8_cos_v_neg_g:2.2416e-02 L8_v_norm:9.5771e-01 L9_cos_v_neg_g:2.2753e-02 L9_v_norm:9.5441e-01 L10_cos_v_neg_g:2.3840e-02 L10_v_norm:9.4132e-01 L11_cos_v_neg_g:2.4148e-02 L11_v_norm:9.4426e-01 L12_cos_v_neg_g:3.4150e-02 L12_v_norm:9.2946e-01 +step:1500 train loss:3.969064 +step:1501 train loss:3.991071 +step:1502 train loss:3.922880 +step:1503 train loss:3.980634 +step:1504 train loss:3.944144 +step:1505 train loss:3.917357 +step:1506 train loss:3.911161 +step:1507 train loss:3.929102 +step:1508 train loss:3.940465 +step:1509 train loss:3.988000 +step:1510 train loss:3.938260 +step:1511 train loss:3.960953 +step:1512 train loss:3.933017 +step:1513 train loss:4.002665 +step:1514 train loss:3.953237 +step:1515 train loss:4.014906 +step:1516 train loss:3.943852 +step:1517 train loss:3.949571 +step:1518 train loss:4.034745 +step:1519 train loss:3.991937 +step:1520 train loss:4.042819 +step:1521 train loss:3.941929 +step:1522 train loss:4.002874 +step:1523 train loss:4.003195 +step:1524 train loss:3.924093 +step:1525 train loss:4.006407 +step:1526 train loss:3.921835 +step:1527 train loss:3.975796 +step:1528 train loss:4.029402 +step:1529 train loss:3.991141 +step:1530 train loss:4.027951 +step:1531 train loss:3.945091 +step:1532 train loss:4.025534 +step:1533 train loss:3.987699 +step:1534 train loss:3.938530 +step:1535 train loss:3.991037 +step:1536 train loss:4.018832 +step:1537 train loss:3.966955 +step:1538 train loss:3.979286 +step:1539 train loss:3.972953 +step:1540 train loss:3.994086 +step:1541 train loss:3.950764 +step:1542 train loss:4.042000 +step:1543 train loss:4.069761 +step:1544 train loss:3.939247 +step:1545 train loss:3.923201 +step:1546 train loss:3.966468 +step:1547 train loss:3.952880 +step:1548 train loss:3.990047 +step:1549 train loss:3.915842 +step:1550 train loss:4.032406 +step:1551 train loss:3.961087 +step:1552 train loss:3.990852 +step:1553 train loss:4.001964 +step:1554 train loss:4.009772 +step:1555 train loss:3.961727 +step:1556 train loss:3.944178 +step:1557 train loss:3.957539 +step:1558 train loss:3.984865 +step:1559 train loss:3.943455 +step:1560 train loss:4.023108 +step:1561 train loss:3.995223 +step:1562 train loss:3.885667 +step:1563 train loss:3.874697 +step:1564 train loss:3.999212 +step:1565 train loss:3.977942 +step:1566 train loss:3.995847 +step:1567 train loss:3.996818 +step:1568 train loss:3.950580 +step:1569 train loss:3.948087 +step:1570 train loss:3.961342 +step:1571 train loss:3.938831 +step:1572 train loss:3.941979 +step:1573 train loss:3.983039 +step:1574 train loss:3.941828 +step:1575 train loss:3.963641 +step:1576 train loss:3.922030 +step:1577 train loss:3.947702 +step:1578 train loss:3.932001 +step:1579 train loss:4.006751 +step:1580 train loss:3.961887 +step:1581 train loss:3.997648 +step:1582 train loss:3.998859 +step:1583 train loss:3.973768 +step:1584 train loss:3.889231 +step:1585 train loss:3.978125 +step:1586 train loss:3.943484 +step:1587 train loss:3.955727 +step:1588 train loss:3.941939 +step:1589 train loss:3.988159 +step:1590 train loss:3.893195 +step:1591 train loss:3.953793 +step:1592 train loss:3.904443 +step:1593 train loss:3.942014 +step:1594 train loss:3.945720 +step:1595 train loss:3.937239 +step:1596 train loss:3.941333 +step:1597 train loss:3.876498 +step:1598 train loss:3.971394 +step:1599 train loss:3.983503 +step:1600 train loss:3.864830 +step:1601 train loss:3.945894 +step:1602 train loss:4.006639 +step:1603 train loss:4.002997 +step:1604 train loss:3.926232 +step:1605 train loss:3.971324 +step:1606 train loss:4.018968 +step:1607 train loss:3.903980 +step:1608 train loss:3.935119 +step:1609 train loss:3.948560 +step:1610 train loss:4.008242 +step:1611 train loss:3.937948 +step:1612 train loss:3.861623 +step:1613 train loss:3.931866 +step:1614 train loss:4.036924 +step:1615 train loss:3.973825 +step:1616 train loss:4.015390 +step:1617 train loss:4.060205 +step:1618 train loss:4.076147 +step:1619 train loss:4.224231 +step:1620 train loss:3.981582 +step:1621 train loss:4.042266 +step:1622 train loss:3.968876 +step:1623 train loss:4.034190 +step:1624 train loss:4.013830 +step:1625 train loss:4.084841 +step:1626 train loss:3.968988 +step:1627 train loss:3.961521 +step:1628 train loss:3.975499 +step:1629 train loss:4.002168 +step:1630 train loss:4.018894 +step:1631 train loss:3.960371 +step:1632 train loss:3.934913 +step:1633 train loss:3.946904 +step:1634 train loss:3.992989 +step:1635 train loss:3.939919 +step:1636 train loss:3.914406 +step:1637 train loss:3.992023 +step:1638 train loss:4.096369 +step:1639 train loss:3.894213 +step:1640 train loss:3.973767 +step:1641 train loss:3.934428 +step:1642 train loss:4.026938 +step:1643 train loss:3.926100 +step:1644 train loss:3.934859 +step:1645 train loss:3.911296 +step:1646 train loss:3.993056 +step:1647 train loss:3.884089 +step:1648 train loss:3.947165 +step:1649 train loss:3.910360 +step:1650 train loss:3.922554 +step:1651 train loss:3.940820 +step:1652 train loss:3.959430 +step:1653 train loss:3.962060 +step:1654 train loss:3.955154 +step:1655 train loss:3.927428 +step:1656 train loss:3.921580 +step:1657 train loss:3.925803 +step:1658 train loss:3.896274 +step:1659 train loss:3.970387 +step:1660 train loss:3.872800 +step:1661 train loss:3.984643 +step:1662 train loss:3.920677 +step:1663 train loss:3.915664 +step:1664 train loss:4.009673 +step:1665 train loss:3.932302 +step:1666 train loss:3.943178 +step:1667 train loss:3.960552 +step:1668 train loss:3.934354 +step:1669 train loss:3.891064 +step:1670 train loss:3.947175 +step:1671 train loss:3.944064 +step:1672 train loss:3.937390 +step:1673 train loss:3.898181 +step:1674 train loss:3.895517 +step:1675 train loss:3.936075 +step:1676 train loss:4.207079 +step:1677 train loss:3.976071 +step:1678 train loss:3.906632 +step:1679 train loss:4.027033 +step:1680 train loss:3.946117 +step:1681 train loss:3.996980 +step:1682 train loss:3.955569 +step:1683 train loss:3.941045 +step:1684 train loss:3.901456 +step:1685 train loss:3.947479 +step:1686 train loss:3.930091 +step:1687 train loss:3.943197 +step:1688 train loss:3.921509 +step:1689 train loss:3.914445 +step:1690 train loss:3.939487 +step:1691 train loss:3.928742 +step:1692 train loss:3.942843 +step:1693 train loss:3.913183 +step:1694 train loss:3.866291 +step:1695 train loss:3.888220 +step:1696 train loss:3.894386 +step:1697 train loss:3.940436 +step:1698 train loss:3.941450 +step:1699 train loss:3.894182 +step:1700 train loss:3.973535 +step:1701 train loss:3.909906 +step:1702 train loss:3.904278 +step:1703 train loss:3.922035 +step:1704 train loss:3.930286 +step:1705 train loss:3.942718 +step:1706 train loss:3.951411 +step:1707 train loss:3.950780 +step:1708 train loss:3.871315 +step:1709 train loss:3.976350 +step:1710 train loss:3.888557 +step:1711 train loss:3.896375 +step:1712 train loss:3.925456 +step:1713 train loss:3.889144 +step:1714 train loss:4.256312 +step:1715 train loss:3.902487 +step:1716 train loss:3.887671 +step:1717 train loss:3.889679 +step:1718 train loss:3.966934 +step:1719 train loss:3.879906 +step:1720 train loss:3.960381 +step:1721 train loss:3.895760 +step:1722 train loss:3.869558 +step:1723 train loss:3.971038 +step:1724 train loss:3.921347 +step:1725 train loss:3.914211 +step:1726 train loss:3.913273 +step:1727 train loss:3.948895 +step:1728 train loss:3.955862 +step:1729 train loss:3.878047 +step:1730 train loss:3.955389 +step:1731 train loss:3.882834 +step:1732 train loss:3.894221 +step:1733 train loss:3.886282 +step:1734 train loss:3.941139 +step:1735 train loss:4.000753 +step:1736 train loss:3.906353 +step:1737 train loss:3.933599 +step:1738 train loss:3.895830 +step:1739 train loss:3.964768 +step:1740 train loss:3.951340 +step:1741 train loss:4.005868 +step:1742 train loss:3.993751 +step:1743 train loss:3.889051 +step:1744 train loss:3.901160 +step:1745 train loss:3.891951 +step:1746 train loss:3.874648 +step:1747 train loss:3.919976 +step:1748 train loss:3.856570 +step:1749 train loss:3.895185 +step:1750 validation loss:3.867073 +step:1750 train loss:3.926155 +step:1751 train loss:3.943424 +step:1752 train loss:3.912434 +step:1753 train loss:3.933040 +step:1754 train loss:3.929680 +step:1755 train loss:3.928795 +step:1756 train loss:3.949800 +step:1757 train loss:3.957221 +step:1758 train loss:3.873269 +step:1759 train loss:3.959111 +step:1760 train loss:3.909763 +step:1761 train loss:3.887281 +step:1762 train loss:3.884567 +step:1763 train loss:3.884494 +step:1764 train loss:4.177585 +step:1765 train loss:3.889483 +step:1766 train loss:3.983675 +step:1767 train loss:3.890135 +step:1768 train loss:3.872745 +step:1769 train loss:3.909276 +step:1770 train loss:3.907315 +step:1771 train loss:3.880555 +step:1772 train loss:3.986678 +step:1773 train loss:3.911777 +step:1774 train loss:3.918169 +step:1775 train loss:4.030872 +step:1776 train loss:3.919181 +step:1777 train loss:3.910366 +step:1778 train loss:3.963413 +step:1779 train loss:3.890362 +step:1780 train loss:3.949601 +step:1781 train loss:3.952717 +step:1782 train loss:3.987227 +step:1783 train loss:3.906963 +step:1784 train loss:3.998364 +step:1785 train loss:3.901195 +step:1786 train loss:3.897156 +step:1787 train loss:3.892298 +step:1788 train loss:3.917690 +step:1789 train loss:3.869288 +step:1790 train loss:3.885492 +step:1791 train loss:3.980720 +step:1792 train loss:3.965893 +step:1793 train loss:3.892004 +step:1794 train loss:3.930164 +step:1795 train loss:3.879883 +step:1796 train loss:3.863035 +step:1797 train loss:3.925258 +step:1798 train loss:3.864885 +step:1799 train loss:3.920126 +step:1800 train loss:3.946142 +step:1801 train loss:3.940254 +step:1802 train loss:3.944791 +step:1803 train loss:3.936224 +step:1804 train loss:3.931881 +step:1805 train loss:3.921847 +step:1806 train loss:3.932183 +step:1807 train loss:3.858994 +step:1808 train loss:3.922626 +step:1809 train loss:3.909053 +step:1810 train loss:3.902959 +step:1811 train loss:3.915476 +step:1812 train loss:3.897366 +step:1813 train loss:3.909032 +step:1814 train loss:3.981922 +step:1815 train loss:3.914328 +step:1816 train loss:3.869581 +step:1817 train loss:3.862633 +step:1818 train loss:3.914893 +step:1819 train loss:3.889425 +step:1820 train loss:3.920233 +step:1821 train loss:3.887369 +step:1822 train loss:3.865178 +step:1823 train loss:3.865659 +step:1824 train loss:3.940500 +step:1825 train loss:3.851824 +step:1826 train loss:3.893796 +step:1827 train loss:3.858696 +step:1828 train loss:3.910215 +step:1829 train loss:3.873883 +step:1830 train loss:4.073813 +step:1831 train loss:3.826777 +step:1832 train loss:3.877941 +step:1833 train loss:3.923093 +step:1834 train loss:3.875302 +step:1835 train loss:3.888096 +step:1836 train loss:3.925339 +step:1837 train loss:3.849185 +step:1838 train loss:3.948104 +step:1839 train loss:3.930864 +step:1840 train loss:3.892767 +step:1841 train loss:3.920822 +step:1842 train loss:3.891668 +step:1843 train loss:3.841427 +step:1844 train loss:3.909337 +step:1845 train loss:3.874864 +step:1846 train loss:3.929937 +step:1847 train loss:3.977450 +step:1848 train loss:3.780432 +step:1849 train loss:3.874370 +step:1850 train loss:3.849274 +step:1851 train loss:3.889286 +step:1852 train loss:3.875330 +step:1853 train loss:3.931311 +step:1854 train loss:3.892225 +step:1855 train loss:3.881124 +step:1856 train loss:3.880752 +step:1857 train loss:3.888221 +step:1858 train loss:3.934318 +step:1859 train loss:3.882878 +step:1860 train loss:3.860502 +step:1861 train loss:3.872037 +step:1862 train loss:3.911739 +step:1863 train loss:3.946864 +step:1864 train loss:3.845241 +step:1865 train loss:3.869089 +step:1866 train loss:3.868757 +step:1867 train loss:3.904610 +step:1868 train loss:3.949266 +step:1869 train loss:3.871254 +step:1870 train loss:3.895460 +step:1871 train loss:3.835409 +step:1872 train loss:3.907670 +step:1873 train loss:3.971598 +step:1874 train loss:3.835281 +step:1875 train loss:3.909229 +step:1876 train loss:3.875180 +step:1877 train loss:3.917448 +step:1878 train loss:3.834759 +step:1879 train loss:3.897715 +step:1880 train loss:3.974304 +step:1881 train loss:3.901636 +step:1882 train loss:3.918698 +step:1883 train loss:3.944450 +step:1884 train loss:3.959282 +step:1885 train loss:3.910915 +step:1886 train loss:3.843003 +step:1887 train loss:3.863178 +step:1888 train loss:3.873456 +step:1889 train loss:3.914801 +step:1890 train loss:3.903754 +step:1891 train loss:3.832089 +step:1892 train loss:3.931956 +step:1893 train loss:3.846275 +step:1894 train loss:3.867394 +step:1895 train loss:3.907796 +step:1896 train loss:3.947397 +step:1897 train loss:3.843892 +step:1898 train loss:3.889589 +step:1899 train loss:3.901380 +step:1900 train loss:3.850050 +step:1901 train loss:3.930813 +step:1902 train loss:3.914462 +step:1903 train loss:3.856811 +step:1904 train loss:3.845201 +step:1905 train loss:3.844455 +step:1906 train loss:3.899453 +step:1907 train loss:3.842569 +step:1908 train loss:3.859134 +step:1909 train loss:3.953456 +step:1910 train loss:3.842806 +step:1911 train loss:3.848362 +step:1912 train loss:3.901369 +step:1913 train loss:3.839405 +step:1914 train loss:3.875480 +step:1915 train loss:3.834002 +step:1916 train loss:3.886875 +step:1917 train loss:3.869367 +step:1918 train loss:3.779732 +step:1919 train loss:3.937899 +step:1920 train loss:4.038589 +step:1921 train loss:3.821387 +step:1922 train loss:3.806255 +step:1923 train loss:3.902279 +step:1924 train loss:3.942125 +step:1925 train loss:3.886910 +step:1926 train loss:3.821551 +step:1927 train loss:3.904436 +step:1928 train loss:3.816976 +step:1929 train loss:3.844354 +step:1930 train loss:3.915295 +step:1931 train loss:3.826951 +step:1932 train loss:3.877039 +step:1933 train loss:3.872351 +step:1934 train loss:3.945181 +step:1935 train loss:3.893083 +step:1936 train loss:3.865658 +step:1937 train loss:3.804737 +step:1938 train loss:4.174925 +step:1939 train loss:3.929345 +step:1940 train loss:3.923155 +step:1941 train loss:3.915721 +step:1942 train loss:3.898836 +step:1943 train loss:3.897877 +step:1944 train loss:3.857553 +step:1945 train loss:3.857137 +step:1946 train loss:3.880750 +step:1947 train loss:3.900231 +step:1948 train loss:3.809630 +step:1949 train loss:3.914328 +step:1950 train loss:3.850899 +step:1951 train loss:3.877063 +step:1952 train loss:3.897330 +step:1953 train loss:3.832632 +step:1954 train loss:3.869037 +step:1955 train loss:3.820569 +step:1956 train loss:3.905001 +step:1957 train loss:3.928040 +step:1958 train loss:3.944287 +step:1959 train loss:3.815894 +step:1960 train loss:3.860380 +step:1961 train loss:3.890006 +step:1962 train loss:3.877743 +step:1963 train loss:3.858804 +step:1964 train loss:3.893097 +step:1965 train loss:3.928213 +step:1966 train loss:3.842240 +step:1967 train loss:3.896519 +step:1968 train loss:3.832539 +step:1969 train loss:3.853176 +step:1970 train loss:3.906286 +step:1971 train loss:3.810389 +step:1972 train loss:3.922371 +step:1973 train loss:3.816002 +step:1974 train loss:3.863130 +step:1975 train loss:3.822042 +step:1976 train loss:3.849049 +step:1977 train loss:3.891393 +step:1978 train loss:3.834322 +step:1979 train loss:3.816353 +step:1980 train loss:3.851084 +step:1981 train loss:3.830316 +step:1982 train loss:3.911295 +step:1983 train loss:3.853114 +step:1984 train loss:3.893828 +step:1985 train loss:3.879487 +step:1986 train loss:3.873285 +step:1987 train loss:3.824087 +step:1988 train loss:3.851105 +step:1989 train loss:3.995487 +step:1990 train loss:3.830306 +step:1991 train loss:3.823883 +step:1992 train loss:3.841997 +step:1993 train loss:3.911528 +step:1994 train loss:3.895805 +step:1995 train loss:3.885743 +step:1996 train loss:3.908029 +step:1997 train loss:3.910366 +step:1998 train loss:3.859878 +step:1999 train loss:3.977120 +step:2000 validation loss:3.842303 total_sharp:5.7375e-03 L1_sharp:6.4127e-03 L2_sharp:5.3672e-03 L3_sharp:5.0040e-03 L4_sharp:1.8187e-03 L5_sharp:8.5333e-04 L6_sharp:5.8374e-04 L7_sharp:8.2502e-04 L8_sharp:1.0414e-03 L9_sharp:1.1134e-03 L10_sharp:1.0030e-03 L11_sharp:1.1132e-03 L12_sharp:1.3867e-03 total_fnorm:4.5762e+00 total_l1_linf:4.0645e+04 total_spectral:4.5762e+00 L1_fnorm:1.0124e+00 L2_fnorm:1.1266e+00 L3_fnorm:1.0720e+00 L4_fnorm:1.1610e+00 L5_fnorm:1.1821e+00 L6_fnorm:1.1784e+00 L7_fnorm:1.1332e+00 L8_fnorm:1.0815e+00 L9_fnorm:1.0421e+00 L10_fnorm:1.0062e+00 L11_fnorm:1.0079e+00 L12_fnorm:1.0050e+00 L1_l1linf:1.4961e+00 L2_l1linf:1.5258e+00 L3_l1linf:1.1865e+00 L4_l1linf:1.1482e+00 L5_l1linf:1.1736e+00 L6_l1linf:1.1178e+00 L7_l1linf:1.1166e+00 L8_l1linf:1.1765e+00 L9_l1linf:1.3704e+00 L10_l1linf:1.1605e+00 L11_l1linf:1.2735e+00 L12_l1linf:1.1168e+00 L1_spectral:2.4791e-01 L2_spectral:2.6225e-01 L3_spectral:2.4792e-01 L4_spectral:2.1040e-01 L5_spectral:2.3943e-01 L6_spectral:2.4586e-01 L7_spectral:1.9892e-01 L8_spectral:1.3759e-01 L9_spectral:1.5335e-01 L10_spectral:1.4322e-01 L11_spectral:1.7057e-01 L12_spectral:1.5658e-01 ip_v_neg_g:7.7730e-02 cos_v_neg_g:2.0558e-02 v_norm:4.5762e+00 g_norm:8.2625e-01 hv_norm:8.9543e-01 cos_v_hv:2.9322e-02 hg_norm:5.3506e+00 cos_g_hg:6.2255e-01 v_par:3.2346e-03 v_perp:4.5762e+00 L1_cos_v_neg_g:4.0801e-02 L1_v_norm:1.0124e+00 L2_cos_v_neg_g:5.3081e-02 L2_v_norm:1.1266e+00 L3_cos_v_neg_g:3.3481e-02 L3_v_norm:1.0720e+00 L4_cos_v_neg_g:3.1627e-02 L4_v_norm:1.1610e+00 L5_cos_v_neg_g:2.4723e-02 L5_v_norm:1.1821e+00 L6_cos_v_neg_g:2.2832e-02 L6_v_norm:1.1784e+00 L7_cos_v_neg_g:2.1131e-02 L7_v_norm:1.1332e+00 L8_cos_v_neg_g:2.1900e-02 L8_v_norm:1.0815e+00 L9_cos_v_neg_g:2.0100e-02 L9_v_norm:1.0421e+00 L10_cos_v_neg_g:2.1479e-02 L10_v_norm:1.0062e+00 L11_cos_v_neg_g:2.1157e-02 L11_v_norm:1.0079e+00 L12_cos_v_neg_g:1.8375e-02 L12_v_norm:1.0050e+00 +step:2000 train loss:3.945957 +step:2001 train loss:3.869373 +step:2002 train loss:3.968444 +step:2003 train loss:4.008357 +step:2004 train loss:3.869128 +step:2005 train loss:3.965876 +step:2006 train loss:3.849975 +step:2007 train loss:3.918580 +step:2008 train loss:3.866044 +step:2009 train loss:3.859124 +step:2010 train loss:3.985636 +step:2011 train loss:3.838918 +step:2012 train loss:3.863776 +step:2013 train loss:3.873637 +step:2014 train loss:3.774077 +step:2015 train loss:3.891419 +step:2016 train loss:3.872458 +step:2017 train loss:3.870893 +step:2018 train loss:3.842650 +step:2019 train loss:3.868924 +step:2020 train loss:3.878095 +step:2021 train loss:3.839185 +step:2022 train loss:3.880768 +step:2023 train loss:3.857529 +step:2024 train loss:3.907546 +step:2025 train loss:3.849077 +step:2026 train loss:3.824928 +step:2027 train loss:3.857095 +step:2028 train loss:3.786545 +step:2029 train loss:3.820098 +step:2030 train loss:3.819731 +step:2031 train loss:3.782924 +step:2032 train loss:3.836550 +step:2033 train loss:3.831381 +step:2034 train loss:3.829502 +step:2035 train loss:3.867623 +step:2036 train loss:3.860281 +step:2037 train loss:3.843472 +step:2038 train loss:3.841371 +step:2039 train loss:3.833745 +step:2040 train loss:3.862991 +step:2041 train loss:3.866183 +step:2042 train loss:3.798521 +step:2043 train loss:3.950825 +step:2044 train loss:3.817295 +step:2045 train loss:3.835759 +step:2046 train loss:3.845743 +step:2047 train loss:3.825926 +step:2048 train loss:3.867414 +step:2049 train loss:3.824428 +step:2050 train loss:3.843108 +step:2051 train loss:3.809898 +step:2052 train loss:3.862671 +step:2053 train loss:3.860013 +step:2054 train loss:3.835119 +step:2055 train loss:3.835144 +step:2056 train loss:3.877617 +step:2057 train loss:3.884889 +step:2058 train loss:3.850432 +step:2059 train loss:3.932896 +step:2060 train loss:3.874695 +step:2061 train loss:3.830991 +step:2062 train loss:3.856441 +step:2063 train loss:3.761378 +step:2064 train loss:3.876184 +step:2065 train loss:3.883236 +step:2066 train loss:3.742831 +step:2067 train loss:3.788194 +step:2068 train loss:3.898355 +step:2069 train loss:3.831200 +step:2070 train loss:3.832894 +step:2071 train loss:3.874975 +step:2072 train loss:3.807717 +step:2073 train loss:3.858428 +step:2074 train loss:3.834697 +step:2075 train loss:3.926169 +step:2076 train loss:3.870040 +step:2077 train loss:3.885744 +step:2078 train loss:3.840994 +step:2079 train loss:3.987135 +step:2080 train loss:3.803924 +step:2081 train loss:3.913458 +step:2082 train loss:3.843888 +step:2083 train loss:3.832254 +step:2084 train loss:3.809452 +step:2085 train loss:3.852971 +step:2086 train loss:3.864134 +step:2087 train loss:3.906766 +step:2088 train loss:3.778744 +step:2089 train loss:3.808794 +step:2090 train loss:3.842393 +step:2091 train loss:3.858141 +step:2092 train loss:3.834948 +step:2093 train loss:3.822969 +step:2094 train loss:3.864580 +step:2095 train loss:3.806281 +step:2096 train loss:3.797259 +step:2097 train loss:3.830846 +step:2098 train loss:3.828814 +step:2099 train loss:3.810430 +step:2100 train loss:3.877164 +step:2101 train loss:3.870111 +step:2102 train loss:3.833520 +step:2103 train loss:3.851157 +step:2104 train loss:3.828091 +step:2105 train loss:3.834580 +step:2106 train loss:3.832440 +step:2107 train loss:3.898785 +step:2108 train loss:3.819519 +step:2109 train loss:3.781508 +step:2110 train loss:3.875808 +step:2111 train loss:3.821354 +step:2112 train loss:3.882833 +step:2113 train loss:3.819440 +step:2114 train loss:3.821610 +step:2115 train loss:3.874870 +step:2116 train loss:3.818927 +step:2117 train loss:3.829988 +step:2118 train loss:3.822219 +step:2119 train loss:3.764255 +step:2120 train loss:3.847350 +step:2121 train loss:3.839406 +step:2122 train loss:3.849681 +step:2123 train loss:3.898326 +step:2124 train loss:3.904444 +step:2125 train loss:3.807059 +step:2126 train loss:3.816505 +step:2127 train loss:3.808039 +step:2128 train loss:3.804163 +step:2129 train loss:3.829855 +step:2130 train loss:3.830731 +step:2131 train loss:3.850471 +step:2132 train loss:3.782079 +step:2133 train loss:3.889817 +step:2134 train loss:3.842095 +step:2135 train loss:3.798886 +step:2136 train loss:3.891041 +step:2137 train loss:3.853730 +step:2138 train loss:3.811762 +step:2139 train loss:3.812606 +step:2140 train loss:3.815725 +step:2141 train loss:3.863003 +step:2142 train loss:3.832434 +step:2143 train loss:3.755186 +step:2144 train loss:3.860903 +step:2145 train loss:3.828450 +step:2146 train loss:3.868514 +step:2147 train loss:3.971645 +step:2148 train loss:3.774173 +step:2149 train loss:3.788624 +step:2150 train loss:3.810489 +step:2151 train loss:3.847299 +step:2152 train loss:3.840158 +step:2153 train loss:3.881208 +step:2154 train loss:3.801203 +step:2155 train loss:3.882890 +step:2156 train loss:3.805074 +step:2157 train loss:3.877450 +step:2158 train loss:3.916676 +step:2159 train loss:3.842736 +step:2160 train loss:3.913814 +step:2161 train loss:3.814305 +step:2162 train loss:3.823721 +step:2163 train loss:3.796995 +step:2164 train loss:3.820118 +step:2165 train loss:3.799535 +step:2166 train loss:3.914054 +step:2167 train loss:3.824930 +step:2168 train loss:3.835878 +step:2169 train loss:3.787427 +step:2170 train loss:3.930624 +step:2171 train loss:3.889852 +step:2172 train loss:3.823710 +step:2173 train loss:3.817222 +step:2174 train loss:3.882210 +step:2175 train loss:3.808030 +step:2176 train loss:3.890054 +step:2177 train loss:3.857662 +step:2178 train loss:3.784865 +step:2179 train loss:3.855355 +step:2180 train loss:3.871929 +step:2181 train loss:3.796540 +step:2182 train loss:3.848413 +step:2183 train loss:3.842400 +step:2184 train loss:3.795019 +step:2185 train loss:3.778438 +step:2186 train loss:3.814664 +step:2187 train loss:3.827648 +step:2188 train loss:3.882385 +step:2189 train loss:3.770224 +step:2190 train loss:3.820904 +step:2191 train loss:3.879831 +step:2192 train loss:3.800976 +step:2193 train loss:3.771619 +step:2194 train loss:3.778282 +step:2195 train loss:3.800074 +step:2196 train loss:3.804843 +step:2197 train loss:3.786678 +step:2198 train loss:3.813931 +step:2199 train loss:3.879284 +step:2200 train loss:3.811103 +step:2201 train loss:3.819774 +step:2202 train loss:3.777337 +step:2203 train loss:3.799861 +step:2204 train loss:3.832715 +step:2205 train loss:3.812314 +step:2206 train loss:3.810627 +step:2207 train loss:3.811534 +step:2208 train loss:3.787497 +step:2209 train loss:4.070046 +step:2210 train loss:3.840256 +step:2211 train loss:3.838235 +step:2212 train loss:3.834654 +step:2213 train loss:3.906299 +step:2214 train loss:3.913016 +step:2215 train loss:3.840322 +step:2216 train loss:3.801390 +step:2217 train loss:3.826557 +step:2218 train loss:3.825957 +step:2219 train loss:3.890866 +step:2220 train loss:3.808149 +step:2221 train loss:3.847373 +step:2222 train loss:3.853458 +step:2223 train loss:3.889653 +step:2224 train loss:3.859775 +step:2225 train loss:3.797159 +step:2226 train loss:3.862678 +step:2227 train loss:3.857278 +step:2228 train loss:3.854805 +step:2229 train loss:3.813366 +step:2230 train loss:3.924013 +step:2231 train loss:3.843076 +step:2232 train loss:3.840671 +step:2233 train loss:3.879791 +step:2234 train loss:3.779065 +step:2235 train loss:3.864180 +step:2236 train loss:3.801188 +step:2237 train loss:3.940587 +step:2238 train loss:3.736574 +step:2239 train loss:3.816014 +step:2240 train loss:3.830631 +step:2241 train loss:3.742720 +step:2242 train loss:3.886653 +step:2243 train loss:3.917697 +step:2244 train loss:3.795355 +step:2245 train loss:3.798092 +step:2246 train loss:3.762784 +step:2247 train loss:3.767768 +step:2248 train loss:3.826683 +step:2249 train loss:3.801260 +step:2250 validation loss:3.755086 +step:2250 train loss:3.821883 +step:2251 train loss:3.778413 +step:2252 train loss:3.780027 +step:2253 train loss:3.809408 +step:2254 train loss:3.814394 +step:2255 train loss:3.789724 +step:2256 train loss:3.826544 +step:2257 train loss:3.814239 +step:2258 train loss:3.808594 +step:2259 train loss:3.822307 +step:2260 train loss:3.775074 +step:2261 train loss:3.852762 +step:2262 train loss:3.876703 +step:2263 train loss:3.827904 +step:2264 train loss:3.942658 +step:2265 train loss:3.787883 +step:2266 train loss:3.831257 +step:2267 train loss:3.792646 +step:2268 train loss:3.794718 +step:2269 train loss:3.799067 +step:2270 train loss:3.787164 +step:2271 train loss:3.803351 +step:2272 train loss:3.837744 +step:2273 train loss:3.758550 +step:2274 train loss:3.792473 +step:2275 train loss:3.745202 +step:2276 train loss:3.820931 +step:2277 train loss:3.834137 +step:2278 train loss:3.810886 +step:2279 train loss:3.798991 +step:2280 train loss:3.708148 +step:2281 train loss:3.852761 +step:2282 train loss:3.782053 +step:2283 train loss:3.764817 +step:2284 train loss:3.779395 +step:2285 train loss:3.837147 +step:2286 train loss:3.793911 +step:2287 train loss:3.831724 +step:2288 train loss:3.802518 +step:2289 train loss:3.801769 +step:2290 train loss:3.809371 +step:2291 train loss:3.795877 +step:2292 train loss:3.839449 +step:2293 train loss:3.819055 +step:2294 train loss:3.813174 +step:2295 train loss:3.868972 +step:2296 train loss:3.804770 +step:2297 train loss:3.778116 +step:2298 train loss:3.835247 +step:2299 train loss:3.808601 +step:2300 train loss:3.724600 +step:2301 train loss:3.819943 +step:2302 train loss:3.835139 +step:2303 train loss:3.803516 +step:2304 train loss:3.790253 +step:2305 train loss:3.833536 +step:2306 train loss:3.825669 +step:2307 train loss:3.806776 +step:2308 train loss:3.825042 +step:2309 train loss:3.779653 +step:2310 train loss:3.768658 +step:2311 train loss:3.754688 +step:2312 train loss:3.822871 +step:2313 train loss:3.736463 +step:2314 train loss:3.811400 +step:2315 train loss:3.829808 +step:2316 train loss:3.864677 +step:2317 train loss:3.732567 +step:2318 train loss:3.781643 +step:2319 train loss:3.832206 +step:2320 train loss:3.798473 +step:2321 train loss:3.772144 +step:2322 train loss:3.787516 +step:2323 train loss:3.783786 +step:2324 train loss:3.813188 +step:2325 train loss:3.751282 +step:2326 train loss:3.778515 +step:2327 train loss:3.892897 +step:2328 train loss:3.843508 +step:2329 train loss:3.804114 +step:2330 train loss:3.761337 +step:2331 train loss:3.802447 +step:2332 train loss:3.725614 +step:2333 train loss:3.789483 +step:2334 train loss:3.771412 +step:2335 train loss:3.750740 +step:2336 train loss:4.004213 +step:2337 train loss:3.780597 +step:2338 train loss:3.821168 +step:2339 train loss:3.820135 +step:2340 train loss:3.840518 +step:2341 train loss:3.826563 +step:2342 train loss:3.778815 +step:2343 train loss:3.798901 +step:2344 train loss:3.838051 +step:2345 train loss:3.793508 +step:2346 train loss:3.822723 +step:2347 train loss:3.747415 +step:2348 train loss:3.805742 +step:2349 train loss:3.753952 +step:2350 train loss:3.812298 +step:2351 train loss:3.816773 +step:2352 train loss:3.823383 +step:2353 train loss:3.782378 +step:2354 train loss:3.831213 +step:2355 train loss:3.819866 +step:2356 train loss:3.857043 +step:2357 train loss:3.759319 +step:2358 train loss:3.778663 +step:2359 train loss:3.798893 +step:2360 train loss:3.822858 +step:2361 train loss:3.858773 +step:2362 train loss:3.685245 +step:2363 train loss:3.882177 +step:2364 train loss:3.828664 +step:2365 train loss:3.797811 +step:2366 train loss:3.748218 +step:2367 train loss:3.814359 +step:2368 train loss:3.802007 +step:2369 train loss:3.793365 +step:2370 train loss:3.807176 +step:2371 train loss:3.862398 +step:2372 train loss:3.724428 +step:2373 train loss:3.870826 +step:2374 train loss:3.849127 +step:2375 train loss:3.831320 +step:2376 train loss:3.821002 +step:2377 train loss:3.764733 +step:2378 train loss:3.811376 +step:2379 train loss:3.795345 +step:2380 train loss:3.851613 +step:2381 train loss:3.949494 +step:2382 train loss:3.733565 +step:2383 train loss:3.780711 +step:2384 train loss:3.812124 +step:2385 train loss:3.715046 +step:2386 train loss:3.868936 +step:2387 train loss:3.747969 +step:2388 train loss:3.797807 +step:2389 train loss:3.818351 +step:2390 train loss:3.771775 +step:2391 train loss:3.796291 +step:2392 train loss:3.820369 +step:2393 train loss:3.775465 +step:2394 train loss:3.801406 +step:2395 train loss:3.789164 +step:2396 train loss:3.793256 +step:2397 train loss:3.770226 +step:2398 train loss:3.826423 +step:2399 train loss:3.786685 +step:2400 train loss:3.767272 +step:2401 train loss:3.808605 +step:2402 train loss:3.760797 +step:2403 train loss:3.815772 +step:2404 train loss:3.772374 +step:2405 train loss:3.774430 +step:2406 train loss:3.803017 +step:2407 train loss:3.744277 +step:2408 train loss:3.787351 +step:2409 train loss:3.776512 +step:2410 train loss:3.777553 +step:2411 train loss:3.854525 +step:2412 train loss:3.838686 +step:2413 train loss:3.876672 +step:2414 train loss:3.768220 +step:2415 train loss:3.756518 +step:2416 train loss:3.772122 +step:2417 train loss:3.809467 +step:2418 train loss:3.829888 +step:2419 train loss:3.757754 +step:2420 train loss:3.776385 +step:2421 train loss:3.804604 +step:2422 train loss:3.856495 +step:2423 train loss:3.792075 +step:2424 train loss:3.761922 +step:2425 train loss:3.823227 +step:2426 train loss:3.768106 +step:2427 train loss:3.792964 +step:2428 train loss:3.863452 +step:2429 train loss:3.818751 +step:2430 train loss:3.909961 +step:2431 train loss:3.825242 +step:2432 train loss:3.789761 +step:2433 train loss:3.765704 +step:2434 train loss:3.752889 +step:2435 train loss:3.811327 +step:2436 train loss:3.767916 +step:2437 train loss:3.797822 +step:2438 train loss:3.842589 +step:2439 train loss:3.823964 +step:2440 train loss:3.768096 +step:2441 train loss:3.803119 +step:2442 train loss:3.796847 +step:2443 train loss:3.757462 +step:2444 train loss:3.792144 +step:2445 train loss:3.791234 +step:2446 train loss:3.759615 +step:2447 train loss:3.741336 +step:2448 train loss:3.795766 +step:2449 train loss:3.821390 +step:2450 train loss:3.780846 +step:2451 train loss:3.708527 +step:2452 train loss:3.803634 +step:2453 train loss:3.775120 +step:2454 train loss:3.770671 +step:2455 train loss:3.820452 +step:2456 train loss:3.775287 +step:2457 train loss:3.838422 +step:2458 train loss:3.813662 +step:2459 train loss:3.787907 +step:2460 train loss:3.798922 +step:2461 train loss:3.831867 +step:2462 train loss:3.801430 +step:2463 train loss:3.778819 +step:2464 train loss:3.792341 +step:2465 train loss:3.870654 +step:2466 train loss:3.954520 +step:2467 train loss:3.857227 +step:2468 train loss:3.750504 +step:2469 train loss:3.824320 +step:2470 train loss:3.870155 +step:2471 train loss:3.871806 +step:2472 train loss:3.849234 +step:2473 train loss:3.792159 +step:2474 train loss:3.764183 +step:2475 train loss:3.813097 +step:2476 train loss:3.888896 +step:2477 train loss:3.802332 +step:2478 train loss:3.756746 +step:2479 train loss:3.796129 +step:2480 train loss:3.787486 +step:2481 train loss:3.982044 +step:2482 train loss:3.787990 +step:2483 train loss:3.815249 +step:2484 train loss:3.768017 +step:2485 train loss:3.761609 +step:2486 train loss:3.789945 +step:2487 train loss:3.825500 +step:2488 train loss:3.736660 +step:2489 train loss:3.847097 +step:2490 train loss:3.766718 +step:2491 train loss:3.781220 +step:2492 train loss:3.825361 +step:2493 train loss:3.856152 +step:2494 train loss:3.781054 +step:2495 train loss:3.812837 +step:2496 train loss:3.787906 +step:2497 train loss:3.807935 +step:2498 train loss:3.810832 +step:2499 train loss:3.806058 +step:2500 validation loss:3.721012 total_sharp:2.6513e-03 L1_sharp:4.1262e-03 L2_sharp:1.7717e-04 L3_sharp:5.1578e-04 L4_sharp:4.1111e-04 L5_sharp:2.8183e-04 L6_sharp:2.6229e-04 L7_sharp:3.5501e-04 L8_sharp:6.0496e-04 L9_sharp:6.0992e-04 L10_sharp:8.4605e-04 L11_sharp:8.8357e-04 L12_sharp:1.5031e-03 total_fnorm:4.3010e+00 total_l1_linf:3.8007e+04 total_spectral:4.3010e+00 L1_fnorm:9.4587e-01 L2_fnorm:9.4150e-01 L3_fnorm:9.3569e-01 L4_fnorm:9.8140e-01 L5_fnorm:1.0143e+00 L6_fnorm:1.0260e+00 L7_fnorm:1.0171e+00 L8_fnorm:1.0110e+00 L9_fnorm:1.0100e+00 L10_fnorm:9.9374e-01 L11_fnorm:9.8776e-01 L12_fnorm:9.7711e-01 L1_l1linf:1.2785e+00 L2_l1linf:1.0785e+00 L3_l1linf:1.0682e+00 L4_l1linf:1.0607e+00 L5_l1linf:1.0924e+00 L6_l1linf:1.0735e+00 L7_l1linf:1.0632e+00 L8_l1linf:1.0344e+00 L9_l1linf:1.0598e+00 L10_l1linf:1.0597e+00 L11_l1linf:1.1730e+00 L12_l1linf:1.1679e+00 L1_spectral:1.5588e-01 L2_spectral:1.4027e-01 L3_spectral:1.2767e-01 L4_spectral:1.1416e-01 L5_spectral:1.0201e-01 L6_spectral:1.0438e-01 L7_spectral:9.3649e-02 L8_spectral:9.1653e-02 L9_spectral:1.1549e-01 L10_spectral:1.3765e-01 L11_spectral:1.4552e-01 L12_spectral:1.6009e-01 ip_v_neg_g:2.5333e-02 cos_v_neg_g:8.9360e-03 v_norm:4.3010e+00 g_norm:6.5913e-01 hv_norm:4.3081e-01 cos_v_hv:2.6469e-02 hg_norm:2.4228e+00 cos_g_hg:5.6877e-01 v_par:1.7118e-03 v_perp:4.3010e+00 L1_cos_v_neg_g:1.9279e-02 L1_v_norm:9.4587e-01 L2_cos_v_neg_g:8.3702e-03 L2_v_norm:9.4150e-01 L3_cos_v_neg_g:1.1159e-02 L3_v_norm:9.3569e-01 L4_cos_v_neg_g:1.0972e-02 L4_v_norm:9.8140e-01 L5_cos_v_neg_g:9.1690e-03 L5_v_norm:1.0143e+00 L6_cos_v_neg_g:9.5080e-03 L6_v_norm:1.0260e+00 L7_cos_v_neg_g:1.0579e-02 L7_v_norm:1.0171e+00 L8_cos_v_neg_g:1.0507e-02 L8_v_norm:1.0110e+00 L9_cos_v_neg_g:1.0625e-02 L9_v_norm:1.0100e+00 L10_cos_v_neg_g:1.4866e-02 L10_v_norm:9.9374e-01 L11_cos_v_neg_g:1.6545e-02 L11_v_norm:9.8776e-01 L12_cos_v_neg_g:1.9476e-02 L12_v_norm:9.7711e-01 +step:2500 train loss:3.750713 +step:2501 train loss:3.817953 +step:2502 train loss:3.804202 +step:2503 train loss:3.726210 +step:2504 train loss:3.765494 +step:2505 train loss:3.789061 +step:2506 train loss:3.751336 +step:2507 train loss:3.783172 +step:2508 train loss:3.736434 +step:2509 train loss:3.751237 +step:2510 train loss:3.743334 +step:2511 train loss:3.784568 +step:2512 train loss:3.834679 +step:2513 train loss:3.782455 +step:2514 train loss:3.768317 +step:2515 train loss:3.914719 +step:2516 train loss:3.791927 +step:2517 train loss:3.858254 +step:2518 train loss:3.823106 +step:2519 train loss:3.802551 +step:2520 train loss:3.821969 +step:2521 train loss:3.807856 +step:2522 train loss:3.827060 +step:2523 train loss:3.749003 +step:2524 train loss:3.807189 +step:2525 train loss:3.790379 +step:2526 train loss:3.841362 +step:2527 train loss:3.830028 +step:2528 train loss:3.806695 +step:2529 train loss:3.825923 +step:2530 train loss:3.805034 +step:2531 train loss:3.747949 +step:2532 train loss:3.845352 +step:2533 train loss:3.737275 +step:2534 train loss:3.834585 +step:2535 train loss:3.782958 +step:2536 train loss:3.704304 +step:2537 train loss:3.823388 +step:2538 train loss:3.799618 +step:2539 train loss:3.820095 +step:2540 train loss:3.752696 +step:2541 train loss:3.779616 +step:2542 train loss:3.790158 +step:2543 train loss:3.778161 +step:2544 train loss:3.765162 +step:2545 train loss:3.754810 +step:2546 train loss:3.717214 +step:2547 train loss:3.765813 +step:2548 train loss:3.785249 +step:2549 train loss:3.789771 +step:2550 train loss:3.929220 +step:2551 train loss:4.002908 +step:2552 train loss:3.729017 +step:2553 train loss:3.761946 +step:2554 train loss:3.908748 +step:2555 train loss:3.796757 +step:2556 train loss:3.719240 +step:2557 train loss:3.816827 +step:2558 train loss:3.806692 +step:2559 train loss:3.761275 +step:2560 train loss:3.753485 +step:2561 train loss:3.841994 +step:2562 train loss:3.797439 +step:2563 train loss:3.733119 +step:2564 train loss:3.803988 +step:2565 train loss:3.785968 +step:2566 train loss:3.768842 +step:2567 train loss:3.758105 +step:2568 train loss:3.806877 +step:2569 train loss:3.810829 +step:2570 train loss:3.763700 +step:2571 train loss:3.848710 +step:2572 train loss:3.804348 +step:2573 train loss:3.734543 +step:2574 train loss:3.784064 +step:2575 train loss:3.828020 +step:2576 train loss:3.776541 +step:2577 train loss:3.745252 +step:2578 train loss:3.781210 +step:2579 train loss:3.759116 +step:2580 train loss:3.729486 +step:2581 train loss:3.742234 +step:2582 train loss:3.751785 +step:2583 train loss:3.775715 +step:2584 train loss:3.792275 +step:2585 train loss:3.754043 +step:2586 train loss:3.775098 +step:2587 train loss:3.708926 +step:2588 train loss:3.740244 +step:2589 train loss:3.815166 +step:2590 train loss:3.737803 +step:2591 train loss:3.796209 +step:2592 train loss:3.847932 +step:2593 train loss:3.807571 +step:2594 train loss:3.765277 +step:2595 train loss:3.771263 +step:2596 train loss:3.809855 +step:2597 train loss:3.696550 +step:2598 train loss:3.848500 +step:2599 train loss:3.801002 +step:2600 train loss:3.829513 +step:2601 train loss:3.771765 +step:2602 train loss:3.794975 +step:2603 train loss:3.788092 +step:2604 train loss:3.714902 +step:2605 train loss:3.843954 +step:2606 train loss:3.792063 +step:2607 train loss:3.749638 +step:2608 train loss:3.726331 +step:2609 train loss:3.749001 +step:2610 train loss:3.771905 +step:2611 train loss:3.816181 +step:2612 train loss:3.776133 +step:2613 train loss:3.751443 +step:2614 train loss:3.744026 +step:2615 train loss:3.750792 +step:2616 train loss:3.822899 +step:2617 train loss:3.785520 +step:2618 train loss:3.745932 +step:2619 train loss:3.761733 +step:2620 train loss:3.750391 +step:2621 train loss:3.763350 +step:2622 train loss:3.837170 +step:2623 train loss:3.707396 +step:2624 train loss:3.727022 +step:2625 train loss:3.795323 +step:2626 train loss:3.788367 +step:2627 train loss:3.769470 +step:2628 train loss:3.817270 +step:2629 train loss:3.766376 +step:2630 train loss:3.761703 +step:2631 train loss:3.789157 +step:2632 train loss:3.759291 +step:2633 train loss:3.743602 +step:2634 train loss:3.788074 +step:2635 train loss:3.770444 +step:2636 train loss:3.821447 +step:2637 train loss:3.775146 +step:2638 train loss:3.761420 +step:2639 train loss:3.811571 +step:2640 train loss:3.730068 +step:2641 train loss:3.785905 +step:2642 train loss:3.705786 +step:2643 train loss:3.707220 +step:2644 train loss:3.797281 +step:2645 train loss:3.736479 +step:2646 train loss:3.763596 +step:2647 train loss:3.787346 +step:2648 train loss:3.820951 +step:2649 train loss:3.733775 +step:2650 train loss:3.721897 +step:2651 train loss:3.765090 +step:2652 train loss:3.736287 +step:2653 train loss:3.803416 +step:2654 train loss:3.764736 +step:2655 train loss:3.749933 +step:2656 train loss:3.772797 +step:2657 train loss:3.799130 +step:2658 train loss:3.813761 +step:2659 train loss:3.786737 +step:2660 train loss:3.771712 +step:2661 train loss:3.818295 +step:2662 train loss:3.793682 +step:2663 train loss:3.767239 +step:2664 train loss:3.778119 +step:2665 train loss:3.727690 +step:2666 train loss:3.756767 +step:2667 train loss:3.764369 +step:2668 train loss:3.739463 +step:2669 train loss:3.760404 +step:2670 train loss:3.780617 +step:2671 train loss:3.764533 +step:2672 train loss:3.786599 +step:2673 train loss:3.725442 +step:2674 train loss:3.813177 +step:2675 train loss:3.786989 +step:2676 train loss:3.805512 +step:2677 train loss:3.785632 +step:2678 train loss:3.766508 +step:2679 train loss:3.749001 +step:2680 train loss:3.732171 +step:2681 train loss:3.702675 +step:2682 train loss:3.792198 +step:2683 train loss:3.762835 +step:2684 train loss:3.789918 +step:2685 train loss:3.706938 +step:2686 train loss:3.722108 +step:2687 train loss:3.804181 +step:2688 train loss:3.817055 +step:2689 train loss:3.718453 +step:2690 train loss:3.803768 +step:2691 train loss:3.774590 +step:2692 train loss:3.799249 +step:2693 train loss:3.852944 +step:2694 train loss:3.751779 +step:2695 train loss:3.767437 +step:2696 train loss:3.772756 +step:2697 train loss:3.766338 +step:2698 train loss:3.776148 +step:2699 train loss:3.793112 +step:2700 train loss:3.761817 +step:2701 train loss:3.833627 +step:2702 train loss:3.766410 +step:2703 train loss:3.734710 +step:2704 train loss:3.813677 +step:2705 train loss:3.810411 +step:2706 train loss:3.746574 +step:2707 train loss:3.703815 +step:2708 train loss:3.801563 +step:2709 train loss:3.775579 +step:2710 train loss:3.782499 +step:2711 train loss:3.748553 +step:2712 train loss:3.812265 +step:2713 train loss:3.813361 +step:2714 train loss:3.752174 +step:2715 train loss:3.750098 +step:2716 train loss:3.817725 +step:2717 train loss:3.781527 +step:2718 train loss:3.776910 +step:2719 train loss:3.772316 +step:2720 train loss:3.735457 +step:2721 train loss:3.817753 +step:2722 train loss:3.746865 +step:2723 train loss:3.734342 +step:2724 train loss:3.755080 +step:2725 train loss:3.753069 +step:2726 train loss:3.726310 +step:2727 train loss:3.783482 +step:2728 train loss:3.720086 +step:2729 train loss:3.851252 +step:2730 train loss:3.795347 +step:2731 train loss:3.833358 +step:2732 train loss:3.745858 +step:2733 train loss:3.742624 +step:2734 train loss:3.788370 +step:2735 train loss:3.787450 +step:2736 train loss:3.710478 +step:2737 train loss:3.766366 +step:2738 train loss:3.823407 +step:2739 train loss:3.743036 +step:2740 train loss:3.745640 +step:2741 train loss:3.732131 +step:2742 train loss:3.659555 +step:2743 train loss:3.768322 +step:2744 train loss:3.793958 +step:2745 train loss:3.743758 +step:2746 train loss:3.755025 +step:2747 train loss:3.742548 +step:2748 train loss:3.700714 +step:2749 train loss:3.768571 +step:2750 validation loss:3.691316 +step:2750 train loss:3.776128 +step:2751 train loss:3.799213 +step:2752 train loss:3.788941 +step:2753 train loss:3.778457 +step:2754 train loss:3.713531 +step:2755 train loss:3.783601 +step:2756 train loss:3.757996 +step:2757 train loss:3.741808 +step:2758 train loss:3.767639 +step:2759 train loss:3.779202 +step:2760 train loss:3.690870 +step:2761 train loss:3.705662 +step:2762 train loss:3.722368 +step:2763 train loss:3.746198 +step:2764 train loss:3.685685 +step:2765 train loss:3.737770 +step:2766 train loss:3.827835 +step:2767 train loss:3.699612 +step:2768 train loss:3.764466 +step:2769 train loss:3.736607 +step:2770 train loss:3.753515 +step:2771 train loss:3.777501 +step:2772 train loss:3.744162 +step:2773 train loss:3.744905 +step:2774 train loss:3.734938 +step:2775 train loss:3.750969 +step:2776 train loss:3.704965 +step:2777 train loss:3.738601 +step:2778 train loss:3.747943 +step:2779 train loss:3.778613 +step:2780 train loss:3.745276 +step:2781 train loss:3.733604 +step:2782 train loss:3.718290 +step:2783 train loss:3.751046 +step:2784 train loss:3.758502 +step:2785 train loss:3.829756 +step:2786 train loss:3.794857 +step:2787 train loss:3.753046 +step:2788 train loss:3.751227 +step:2789 train loss:3.741460 +step:2790 train loss:3.682293 +step:2791 train loss:3.781969 +step:2792 train loss:3.772194 +step:2793 train loss:3.738642 +step:2794 train loss:3.747566 +step:2795 train loss:3.762068 +step:2796 train loss:3.757529 +step:2797 train loss:3.807357 +step:2798 train loss:3.795846 +step:2799 train loss:3.697608 +step:2800 train loss:3.745569 +step:2801 train loss:3.781457 +step:2802 train loss:3.809711 +step:2803 train loss:3.780275 +step:2804 train loss:3.713264 +step:2805 train loss:3.753614 +step:2806 train loss:3.747337 +step:2807 train loss:3.777091 +step:2808 train loss:3.713188 +step:2809 train loss:3.785776 +step:2810 train loss:3.771983 +step:2811 train loss:3.763358 +step:2812 train loss:3.813961 +step:2813 train loss:3.783022 +step:2814 train loss:3.768094 +step:2815 train loss:3.781228 +step:2816 train loss:3.784584 +step:2817 train loss:3.718349 +step:2818 train loss:3.823628 +step:2819 train loss:3.752453 +step:2820 train loss:3.749012 +step:2821 train loss:3.724354 +step:2822 train loss:3.765911 +step:2823 train loss:3.717741 +step:2824 train loss:3.615043 +step:2825 train loss:3.767214 +step:2826 train loss:3.764325 +step:2827 train loss:3.790676 +step:2828 train loss:3.787955 +step:2829 train loss:3.770769 +step:2830 train loss:3.804071 +step:2831 train loss:3.741091 +step:2832 train loss:3.712291 +step:2833 train loss:3.770026 +step:2834 train loss:3.719796 +step:2835 train loss:3.755764 +step:2836 train loss:3.758950 +step:2837 train loss:3.757075 +step:2838 train loss:3.700613 +step:2839 train loss:3.796599 +step:2840 train loss:3.757818 +step:2841 train loss:3.835322 +step:2842 train loss:3.780920 +step:2843 train loss:3.774117 +step:2844 train loss:3.805554 +step:2845 train loss:3.754002 +step:2846 train loss:3.703693 +step:2847 train loss:3.795419 +step:2848 train loss:3.757729 +step:2849 train loss:3.749555 +step:2850 train loss:3.805779 +step:2851 train loss:3.757048 +step:2852 train loss:3.837440 +step:2853 train loss:3.752161 +step:2854 train loss:3.698681 +step:2855 train loss:3.777149 +step:2856 train loss:3.693044 +step:2857 train loss:3.801970 +step:2858 train loss:3.754297 +step:2859 train loss:3.749495 +step:2860 train loss:3.734189 +step:2861 train loss:3.715370 +step:2862 train loss:3.748482 +step:2863 train loss:3.728485 +step:2864 train loss:3.735887 +step:2865 train loss:3.810805 +step:2866 train loss:3.822388 +step:2867 train loss:3.762447 +step:2868 train loss:3.760815 +step:2869 train loss:3.719886 +step:2870 train loss:3.805837 +step:2871 train loss:3.807483 +step:2872 train loss:3.768262 +step:2873 train loss:3.773999 +step:2874 train loss:3.752627 +step:2875 train loss:3.704038 +step:2876 train loss:3.747732 +step:2877 train loss:3.732875 +step:2878 train loss:3.745724 +step:2879 train loss:3.714155 +step:2880 train loss:3.731272 +step:2881 train loss:3.725705 +step:2882 train loss:3.655973 +step:2883 train loss:3.741274 +step:2884 train loss:3.813372 +step:2885 train loss:3.706712 +step:2886 train loss:3.756141 +step:2887 train loss:3.780620 +step:2888 train loss:3.754791 +step:2889 train loss:3.734799 +step:2890 train loss:3.711965 +step:2891 train loss:3.748954 +step:2892 train loss:3.755310 +step:2893 train loss:3.735011 +step:2894 train loss:3.712746 +step:2895 train loss:3.757475 +step:2896 train loss:3.805048 +step:2897 train loss:3.784780 +step:2898 train loss:3.919087 +step:2899 train loss:3.674346 +step:2900 train loss:3.750046 +step:2901 train loss:3.698709 +step:2902 train loss:3.702306 +step:2903 train loss:3.718848 +step:2904 train loss:3.739349 +step:2905 train loss:3.802416 +step:2906 train loss:3.773285 +step:2907 train loss:3.946122 +step:2908 train loss:3.696493 +step:2909 train loss:3.776077 +step:2910 train loss:3.749377 +step:2911 train loss:3.775359 +step:2912 train loss:3.732436 +step:2913 train loss:3.768718 +step:2914 train loss:3.800077 +step:2915 train loss:3.791312 +step:2916 train loss:3.745228 +step:2917 train loss:3.783557 +step:2918 train loss:3.774052 +step:2919 train loss:3.716140 +step:2920 train loss:3.768854 +step:2921 train loss:3.724036 +step:2922 train loss:3.744407 +step:2923 train loss:3.812346 +step:2924 train loss:3.748942 +step:2925 train loss:3.700394 +step:2926 train loss:3.792347 +step:2927 train loss:3.702240 +step:2928 train loss:3.672951 +step:2929 train loss:3.692237 +step:2930 train loss:3.710195 +step:2931 train loss:3.865113 +step:2932 train loss:3.780434 +step:2933 train loss:3.745152 +step:2934 train loss:3.737395 +step:2935 train loss:3.758789 +step:2936 train loss:3.709772 +step:2937 train loss:3.728719 +step:2938 train loss:3.747690 +step:2939 train loss:3.819646 +step:2940 train loss:3.717030 +step:2941 train loss:3.754785 +step:2942 train loss:3.713871 +step:2943 train loss:3.991449 +step:2944 train loss:3.821410 +step:2945 train loss:3.778471 +step:2946 train loss:3.792924 +step:2947 train loss:3.749595 +step:2948 train loss:3.711812 +step:2949 train loss:3.804931 +step:2950 train loss:3.753173 +step:2951 train loss:3.655959 +step:2952 train loss:3.722468 +step:2953 train loss:3.634763 +step:2954 train loss:3.727068 +step:2955 train loss:3.800919 +step:2956 train loss:3.744848 +step:2957 train loss:3.744182 +step:2958 train loss:3.697147 +step:2959 train loss:3.719378 +step:2960 train loss:3.812749 +step:2961 train loss:3.673979 +step:2962 train loss:3.752216 +step:2963 train loss:3.747607 +step:2964 train loss:3.724481 +step:2965 train loss:3.752598 +step:2966 train loss:3.725931 +step:2967 train loss:3.726080 +step:2968 train loss:3.699330 +step:2969 train loss:3.709269 +step:2970 train loss:3.781814 +step:2971 train loss:3.723827 +step:2972 train loss:3.706554 +step:2973 train loss:3.702425 +step:2974 train loss:3.738593 +step:2975 train loss:3.698748 +step:2976 train loss:3.738329 +step:2977 train loss:3.730274 +step:2978 train loss:3.808184 +step:2979 train loss:3.789520 +step:2980 train loss:3.810053 +step:2981 train loss:3.755819 +step:2982 train loss:3.749324 +step:2983 train loss:3.699134 +step:2984 train loss:3.679450 +step:2985 train loss:3.788097 +step:2986 train loss:3.680660 +step:2987 train loss:3.808876 +step:2988 train loss:3.733474 +step:2989 train loss:3.763000 +step:2990 train loss:3.712500 +step:2991 train loss:3.782394 +step:2992 train loss:3.771081 +step:2993 train loss:3.741897 +step:2994 train loss:3.733481 +step:2995 train loss:3.800092 +step:2996 train loss:3.723176 +step:2997 train loss:3.633688 +step:2998 train loss:3.748653 +step:2999 train loss:3.789082 +step:3000 validation loss:3.670604 total_sharp:2.0099e-03 L1_sharp:2.5862e-03 L2_sharp:4.6430e-04 L3_sharp:1.0824e-03 L4_sharp:4.0681e-04 L5_sharp:2.9402e-04 L6_sharp:1.8513e-04 L7_sharp:2.7707e-04 L8_sharp:3.8236e-04 L9_sharp:4.7315e-04 L10_sharp:6.3022e-04 L11_sharp:6.7954e-04 L12_sharp:8.7934e-04 total_fnorm:4.4066e+00 total_l1_linf:3.9069e+04 total_spectral:4.4066e+00 L1_fnorm:1.0254e+00 L2_fnorm:9.6661e-01 L3_fnorm:9.8371e-01 L4_fnorm:1.0157e+00 L5_fnorm:1.0408e+00 L6_fnorm:1.0461e+00 L7_fnorm:1.0461e+00 L8_fnorm:1.0404e+00 L9_fnorm:1.0342e+00 L10_fnorm:1.0239e+00 L11_fnorm:1.0400e+00 L12_fnorm:1.0312e+00 L1_l1linf:1.1692e+00 L2_l1linf:1.0718e+00 L3_l1linf:1.0793e+00 L4_l1linf:1.1189e+00 L5_l1linf:1.0829e+00 L6_l1linf:1.0839e+00 L7_l1linf:1.0718e+00 L8_l1linf:1.0961e+00 L9_l1linf:1.0621e+00 L10_l1linf:1.1261e+00 L11_l1linf:1.2079e+00 L12_l1linf:1.1188e+00 L1_spectral:1.7939e-01 L2_spectral:1.4516e-01 L3_spectral:1.3333e-01 L4_spectral:1.1664e-01 L5_spectral:1.0409e-01 L6_spectral:9.5281e-02 L7_spectral:9.1312e-02 L8_spectral:8.9866e-02 L9_spectral:1.1298e-01 L10_spectral:1.3563e-01 L11_spectral:1.5079e-01 L12_spectral:1.5688e-01 ip_v_neg_g:2.1713e-02 cos_v_neg_g:7.6227e-03 v_norm:4.4066e+00 g_norm:6.4642e-01 hv_norm:3.9598e-01 cos_v_hv:2.2367e-02 hg_norm:2.4288e+00 cos_g_hg:5.2115e-01 v_par:1.3232e-03 v_perp:4.4066e+00 L1_cos_v_neg_g:1.6520e-02 L1_v_norm:1.0254e+00 L2_cos_v_neg_g:1.1088e-02 L2_v_norm:9.6661e-01 L3_cos_v_neg_g:1.1504e-02 L3_v_norm:9.8371e-01 L4_cos_v_neg_g:7.1688e-03 L4_v_norm:1.0157e+00 L5_cos_v_neg_g:6.4949e-03 L5_v_norm:1.0408e+00 L6_cos_v_neg_g:6.5898e-03 L6_v_norm:1.0461e+00 L7_cos_v_neg_g:6.3138e-03 L7_v_norm:1.0461e+00 L8_cos_v_neg_g:7.4859e-03 L8_v_norm:1.0404e+00 L9_cos_v_neg_g:1.0162e-02 L9_v_norm:1.0342e+00 L10_cos_v_neg_g:1.3622e-02 L10_v_norm:1.0239e+00 L11_cos_v_neg_g:1.5381e-02 L11_v_norm:1.0400e+00 L12_cos_v_neg_g:1.5518e-02 L12_v_norm:1.0312e+00 +step:3000 train loss:3.686665 +step:3001 train loss:3.736012 +step:3002 train loss:3.733671 +step:3003 train loss:3.729143 +step:3004 train loss:3.759415 +step:3005 train loss:3.653827 +step:3006 train loss:3.705528 +step:3007 train loss:3.738514 +step:3008 train loss:3.784026 +step:3009 train loss:3.738201 +step:3010 train loss:3.757734 +step:3011 train loss:3.741802 +step:3012 train loss:3.722638 +step:3013 train loss:3.762100 +step:3014 train loss:3.720541 +step:3015 train loss:3.717632 +step:3016 train loss:3.732974 +step:3017 train loss:3.762229 +step:3018 train loss:3.687182 +step:3019 train loss:3.730042 +step:3020 train loss:3.748934 +step:3021 train loss:3.709537 +step:3022 train loss:3.799797 +step:3023 train loss:3.750499 +step:3024 train loss:3.735973 +step:3025 train loss:3.745790 +step:3026 train loss:3.719735 +step:3027 train loss:3.703524 +step:3028 train loss:3.745908 +step:3029 train loss:3.737270 +step:3030 train loss:3.709314 +step:3031 train loss:3.691346 +step:3032 train loss:3.681476 +step:3033 train loss:3.705961 +step:3034 train loss:3.755576 +step:3035 train loss:3.732932 +step:3036 train loss:3.692868 +step:3037 train loss:3.656297 +step:3038 train loss:3.769303 +step:3039 train loss:3.650865 +step:3040 train loss:3.635808 +step:3041 train loss:3.769249 +step:3042 train loss:3.702225 +step:3043 train loss:3.761660 +step:3044 train loss:3.654535 +step:3045 train loss:3.700897 +step:3046 train loss:3.679376 +step:3047 train loss:3.701977 +step:3048 train loss:3.675939 +step:3049 train loss:3.749162 +step:3050 train loss:3.641703 +step:3051 train loss:3.657916 +step:3052 train loss:3.677460 +step:3053 train loss:3.749825 +step:3054 train loss:3.820454 +step:3055 train loss:3.664533 +step:3056 train loss:3.690544 +step:3057 train loss:3.728640 +step:3058 train loss:3.678352 +step:3059 train loss:3.704184 +step:3060 train loss:3.705681 +step:3061 train loss:3.686270 +step:3062 train loss:3.741140 +step:3063 train loss:3.721012 +step:3064 train loss:3.746541 +step:3065 train loss:3.764467 +step:3066 train loss:3.658051 +step:3067 train loss:3.711745 +step:3068 train loss:3.762585 +step:3069 train loss:3.777820 +step:3070 train loss:3.703043 +step:3071 train loss:3.720895 +step:3072 train loss:3.721189 +step:3073 train loss:3.759859 +step:3074 train loss:3.694051 +step:3075 train loss:3.730989 +step:3076 train loss:3.665262 +step:3077 train loss:3.663286 +step:3078 train loss:3.697315 +step:3079 train loss:3.741345 +step:3080 train loss:3.732685 +step:3081 train loss:3.779052 +step:3082 train loss:3.752772 +step:3083 train loss:3.682912 +step:3084 train loss:3.766163 +step:3085 train loss:3.690959 +step:3086 train loss:3.752884 +step:3087 train loss:3.718727 +step:3088 train loss:3.798750 +step:3089 train loss:3.676177 +step:3090 train loss:3.747827 +step:3091 train loss:3.673008 +step:3092 train loss:3.691930 +step:3093 train loss:3.717623 +step:3094 train loss:3.702937 +step:3095 train loss:3.787363 +step:3096 train loss:3.720161 +step:3097 train loss:3.737275 +step:3098 train loss:3.709743 +step:3099 train loss:3.721922 +step:3100 train loss:3.745531 +step:3101 train loss:3.830381 +step:3102 train loss:3.751724 +step:3103 train loss:3.676218 +step:3104 train loss:3.759631 +step:3105 train loss:3.733634 +step:3106 train loss:3.729958 +step:3107 train loss:3.712479 +step:3108 train loss:3.683311 +step:3109 train loss:3.739912 +step:3110 train loss:3.667238 +step:3111 train loss:3.704825 +step:3112 train loss:3.637554 +step:3113 train loss:3.759288 +step:3114 train loss:3.669990 +step:3115 train loss:3.715461 +step:3116 train loss:3.598598 +step:3117 train loss:3.612499 +step:3118 train loss:3.718041 +step:3119 train loss:3.721400 +step:3120 train loss:3.724289 +step:3121 train loss:3.664954 +step:3122 train loss:3.750110 +step:3123 train loss:3.665305 +step:3124 train loss:3.729172 +step:3125 train loss:3.739310 +step:3126 train loss:3.847344 +step:3127 train loss:3.696389 +step:3128 train loss:3.723413 +step:3129 train loss:3.706214 +step:3130 train loss:3.682500 +step:3131 train loss:3.758888 +step:3132 train loss:3.744473 +step:3133 train loss:3.719223 +step:3134 train loss:3.605257 +step:3135 train loss:3.705733 +step:3136 train loss:3.676765 +step:3137 train loss:3.812921 +step:3138 train loss:3.716317 +step:3139 train loss:3.696085 +step:3140 train loss:3.716081 +step:3141 train loss:3.714750 +step:3142 train loss:3.651234 +step:3143 train loss:3.735110 +step:3144 train loss:3.683775 +step:3145 train loss:3.667752 +step:3146 train loss:3.683597 +step:3147 train loss:3.788627 +step:3148 train loss:3.697554 +step:3149 train loss:3.750372 +step:3150 train loss:3.738018 +step:3151 train loss:3.704602 +step:3152 train loss:3.703057 +step:3153 train loss:3.658428 +step:3154 train loss:3.744496 +step:3155 train loss:3.684088 +step:3156 train loss:3.738913 +step:3157 train loss:3.740484 +step:3158 train loss:3.714261 +step:3159 train loss:3.653165 +step:3160 train loss:3.700832 +step:3161 train loss:3.673619 +step:3162 train loss:3.727332 +step:3163 train loss:3.712358 +step:3164 train loss:3.686929 +step:3165 train loss:3.705883 +step:3166 train loss:3.743455 +step:3167 train loss:3.706535 +step:3168 train loss:3.791601 +step:3169 train loss:3.707732 +step:3170 train loss:3.687415 +step:3171 train loss:3.678660 +step:3172 train loss:3.685834 +step:3173 train loss:3.639386 +step:3174 train loss:3.746197 +step:3175 train loss:3.715410 +step:3176 train loss:3.725699 +step:3177 train loss:3.691584 +step:3178 train loss:3.672202 +step:3179 train loss:3.743585 +step:3180 train loss:3.677404 +step:3181 train loss:3.753131 +step:3182 train loss:3.759367 +step:3183 train loss:3.701653 +step:3184 train loss:3.696763 +step:3185 train loss:3.758246 +step:3186 train loss:3.717374 +step:3187 train loss:3.734622 +step:3188 train loss:3.776357 +step:3189 train loss:3.717717 +step:3190 train loss:3.675036 +step:3191 train loss:3.674221 +step:3192 train loss:3.644941 +step:3193 train loss:3.718919 +step:3194 train loss:3.684978 +step:3195 train loss:3.667833 +step:3196 train loss:3.723328 +step:3197 train loss:3.682895 +step:3198 train loss:3.717627 +step:3199 train loss:3.699455 +step:3200 train loss:3.707614 +step:3201 train loss:3.669414 +step:3202 train loss:3.733218 +step:3203 train loss:3.794298 +step:3204 train loss:3.760554 +step:3205 train loss:3.606731 +step:3206 train loss:3.897333 +step:3207 train loss:3.644246 +step:3208 train loss:3.712013 +step:3209 train loss:3.703496 +step:3210 train loss:3.686293 +step:3211 train loss:3.711913 +step:3212 train loss:3.730503 +step:3213 train loss:3.662989 +step:3214 train loss:3.769406 +step:3215 train loss:3.775855 +step:3216 train loss:3.640885 +step:3217 train loss:3.724809 +step:3218 train loss:3.761543 +step:3219 train loss:3.677491 +step:3220 train loss:3.746762 +step:3221 train loss:3.661408 +step:3222 train loss:3.702860 +step:3223 train loss:3.721505 +step:3224 train loss:3.729484 +step:3225 train loss:3.655366 +step:3226 train loss:3.688447 +step:3227 train loss:3.717115 +step:3228 train loss:3.713629 +step:3229 train loss:3.742694 +step:3230 train loss:3.761807 +step:3231 train loss:3.694707 +step:3232 train loss:3.708044 +step:3233 train loss:3.679424 +step:3234 train loss:3.667371 +step:3235 train loss:3.673968 +step:3236 train loss:3.691175 +step:3237 train loss:3.689782 +step:3238 train loss:3.708978 +step:3239 train loss:3.613265 +step:3240 train loss:3.725868 +step:3241 train loss:3.720972 +step:3242 train loss:3.777299 +step:3243 train loss:3.714665 +step:3244 train loss:3.734661 +step:3245 train loss:3.635143 +step:3246 train loss:3.765310 +step:3247 train loss:3.704380 +step:3248 train loss:3.728962 +step:3249 train loss:3.671274 +step:3250 validation loss:3.638602 +step:3250 train loss:3.668820 +step:3251 train loss:3.783324 +step:3252 train loss:3.712129 +step:3253 train loss:3.712118 +step:3254 train loss:3.788715 +step:3255 train loss:3.724972 +step:3256 train loss:3.718836 +step:3257 train loss:3.698975 +step:3258 train loss:3.627458 +step:3259 train loss:3.607705 +step:3260 train loss:3.719669 +step:3261 train loss:3.704091 +step:3262 train loss:3.692701 +step:3263 train loss:3.676297 +step:3264 train loss:3.785672 +step:3265 train loss:3.697972 +step:3266 train loss:3.728592 +step:3267 train loss:3.689055 +step:3268 train loss:3.692254 +step:3269 train loss:3.704797 +step:3270 train loss:3.735147 +step:3271 train loss:3.697007 +step:3272 train loss:3.677450 +step:3273 train loss:3.682623 +step:3274 train loss:3.819163 +step:3275 train loss:3.692616 +step:3276 train loss:3.763296 +step:3277 train loss:3.698678 +step:3278 train loss:3.677471 +step:3279 train loss:3.702669 +step:3280 train loss:3.727367 +step:3281 train loss:3.652984 +step:3282 train loss:3.724091 +step:3283 train loss:3.695179 +step:3284 train loss:3.659243 +step:3285 train loss:3.674103 +step:3286 train loss:3.704951 +step:3287 train loss:3.640574 +step:3288 train loss:3.722907 +step:3289 train loss:3.664325 +step:3290 train loss:3.701003 +step:3291 train loss:3.658265 +step:3292 train loss:3.682616 +step:3293 train loss:3.723946 +step:3294 train loss:3.740318 +step:3295 train loss:3.648960 +step:3296 train loss:3.707643 +step:3297 train loss:3.661215 +step:3298 train loss:3.667179 +step:3299 train loss:3.794008 +step:3300 train loss:3.639289 +step:3301 train loss:3.718836 +step:3302 train loss:3.689973 +step:3303 train loss:3.721179 +step:3304 train loss:3.682650 +step:3305 train loss:3.777697 +step:3306 train loss:3.702614 +step:3307 train loss:3.721348 +step:3308 train loss:3.679096 +step:3309 train loss:3.736520 +step:3310 train loss:3.656482 +step:3311 train loss:3.716712 +step:3312 train loss:3.681033 +step:3313 train loss:3.713823 +step:3314 train loss:3.710317 +step:3315 train loss:3.786428 +step:3316 train loss:3.640264 +step:3317 train loss:3.728215 +step:3318 train loss:3.741334 +step:3319 train loss:3.663631 +step:3320 train loss:3.825533 +step:3321 train loss:3.726694 +step:3322 train loss:3.726012 +step:3323 train loss:3.829800 +step:3324 train loss:3.745619 +step:3325 train loss:3.718597 +step:3326 train loss:3.709640 +step:3327 train loss:3.724094 +step:3328 train loss:3.702428 +step:3329 train loss:3.704710 +step:3330 train loss:3.691670 +step:3331 train loss:3.736849 +step:3332 train loss:3.757752 +step:3333 train loss:3.725169 +step:3334 train loss:3.654260 +step:3335 train loss:3.669621 +step:3336 train loss:3.703482 +step:3337 train loss:3.704351 +step:3338 train loss:3.690791 +step:3339 train loss:3.685415 +step:3340 train loss:3.723262 +step:3341 train loss:3.672130 +step:3342 train loss:3.719238 +step:3343 train loss:3.658763 +step:3344 train loss:3.716393 +step:3345 train loss:3.667171 +step:3346 train loss:3.681625 +step:3347 train loss:3.687794 +step:3348 train loss:3.713342 +step:3349 train loss:3.697022 +step:3350 train loss:3.726227 +step:3351 train loss:3.777488 +step:3352 train loss:3.722340 +step:3353 train loss:3.817795 +step:3354 train loss:3.659235 +step:3355 train loss:3.766074 +step:3356 train loss:3.717849 +step:3357 train loss:3.724178 +step:3358 train loss:3.668247 +step:3359 train loss:3.696275 +step:3360 train loss:3.691638 +step:3361 train loss:3.694510 +step:3362 train loss:3.682396 +step:3363 train loss:3.680799 +step:3364 train loss:3.660886 +step:3365 train loss:3.703243 +step:3366 train loss:3.729439 +step:3367 train loss:3.685093 +step:3368 train loss:3.783003 +step:3369 train loss:3.698252 +step:3370 train loss:3.788312 +step:3371 train loss:3.750867 +step:3372 train loss:3.718785 +step:3373 train loss:3.730608 +step:3374 train loss:3.777669 +step:3375 train loss:3.706855 +step:3376 train loss:3.723805 +step:3377 train loss:3.698184 +step:3378 train loss:3.674946 +step:3379 train loss:3.750593 +step:3380 train loss:3.730579 +step:3381 train loss:3.713884 +step:3382 train loss:3.730258 +step:3383 train loss:3.738165 +step:3384 train loss:3.668735 +step:3385 train loss:3.718348 +step:3386 train loss:3.696331 +step:3387 train loss:3.767219 +step:3388 train loss:3.672457 +step:3389 train loss:3.877811 +step:3390 train loss:3.623564 +step:3391 train loss:3.707633 +step:3392 train loss:3.694806 +step:3393 train loss:3.717565 +step:3394 train loss:3.673119 +step:3395 train loss:3.742815 +step:3396 train loss:3.652875 +step:3397 train loss:3.731786 +step:3398 train loss:3.695064 +step:3399 train loss:3.710550 +step:3400 train loss:3.659810 +step:3401 train loss:3.696584 +step:3402 train loss:3.850920 +step:3403 train loss:3.741788 +step:3404 train loss:3.861739 +step:3405 train loss:3.711526 +step:3406 train loss:3.686580 +step:3407 train loss:3.688155 +step:3408 train loss:3.665588 +step:3409 train loss:3.633764 +step:3410 train loss:3.663397 +step:3411 train loss:3.732578 +step:3412 train loss:3.656801 +step:3413 train loss:3.656182 +step:3414 train loss:3.689368 +step:3415 train loss:3.663639 +step:3416 train loss:3.669513 +step:3417 train loss:3.746285 +step:3418 train loss:3.748773 +step:3419 train loss:3.702991 +step:3420 train loss:3.680341 +step:3421 train loss:3.713310 +step:3422 train loss:3.730066 +step:3423 train loss:3.751269 +step:3424 train loss:3.629521 +step:3425 train loss:3.652692 +step:3426 train loss:3.647151 +step:3427 train loss:3.709802 +step:3428 train loss:3.632001 +step:3429 train loss:3.695742 +step:3430 train loss:3.665700 +step:3431 train loss:3.716972 +step:3432 train loss:3.701633 +step:3433 train loss:3.667646 +step:3434 train loss:3.751418 +step:3435 train loss:3.688106 +step:3436 train loss:3.778677 +step:3437 train loss:3.608843 +step:3438 train loss:3.718785 +step:3439 train loss:3.690986 +step:3440 train loss:3.786057 +step:3441 train loss:3.680544 +step:3442 train loss:3.747176 +step:3443 train loss:3.680006 +step:3444 train loss:3.701395 +step:3445 train loss:3.743415 +step:3446 train loss:3.651921 +step:3447 train loss:3.724791 +step:3448 train loss:3.678711 +step:3449 train loss:3.709002 +step:3450 train loss:3.636727 +step:3451 train loss:3.737662 +step:3452 train loss:3.685171 +step:3453 train loss:3.743236 +step:3454 train loss:3.767831 +step:3455 train loss:3.829043 +step:3456 train loss:3.771520 +step:3457 train loss:3.768196 +step:3458 train loss:3.688670 +step:3459 train loss:3.702267 +step:3460 train loss:3.647549 +step:3461 train loss:3.710130 +step:3462 train loss:3.714097 +step:3463 train loss:3.678100 +step:3464 train loss:3.732932 +step:3465 train loss:3.661797 +step:3466 train loss:3.732458 +step:3467 train loss:3.683522 +step:3468 train loss:3.699310 +step:3469 train loss:3.707441 +step:3470 train loss:3.690963 +step:3471 train loss:3.729248 +step:3472 train loss:3.613093 +step:3473 train loss:3.740590 +step:3474 train loss:3.634361 +step:3475 train loss:3.716331 +step:3476 train loss:3.685648 +step:3477 train loss:3.705827 +step:3478 train loss:3.678002 +step:3479 train loss:3.708156 +step:3480 train loss:3.727059 +step:3481 train loss:3.704016 +step:3482 train loss:3.692150 +step:3483 train loss:3.832428 +step:3484 train loss:3.673948 +step:3485 train loss:3.660243 +step:3486 train loss:3.713923 +step:3487 train loss:3.753065 +step:3488 train loss:3.660675 +step:3489 train loss:3.712278 +step:3490 train loss:3.678378 +step:3491 train loss:3.719453 +step:3492 train loss:3.752836 +step:3493 train loss:3.724705 +step:3494 train loss:3.717276 +step:3495 train loss:3.693336 +step:3496 train loss:3.661633 +step:3497 train loss:3.773008 +step:3498 train loss:3.718413 +step:3499 train loss:3.652189 +step:3500 validation loss:3.618078 total_sharp:1.6814e-03 L1_sharp:1.7080e-03 L2_sharp:1.8518e-04 L3_sharp:8.3324e-04 L4_sharp:2.7736e-04 L5_sharp:2.2447e-04 L6_sharp:2.8279e-04 L7_sharp:3.0630e-04 L8_sharp:4.2458e-04 L9_sharp:4.6714e-04 L10_sharp:5.6624e-04 L11_sharp:5.2628e-04 L12_sharp:8.2102e-04 total_fnorm:4.3589e+00 total_l1_linf:3.8619e+04 total_spectral:4.3589e+00 L1_fnorm:9.6127e-01 L2_fnorm:9.5875e-01 L3_fnorm:9.6780e-01 L4_fnorm:1.0077e+00 L5_fnorm:1.0315e+00 L6_fnorm:1.0424e+00 L7_fnorm:1.0416e+00 L8_fnorm:1.0355e+00 L9_fnorm:1.0271e+00 L10_fnorm:9.9934e-01 L11_fnorm:1.0092e+00 L12_fnorm:9.9771e-01 L1_l1linf:1.1891e+00 L2_l1linf:1.0417e+00 L3_l1linf:1.0397e+00 L4_l1linf:1.0441e+00 L5_l1linf:1.0693e+00 L6_l1linf:1.0537e+00 L7_l1linf:1.0541e+00 L8_l1linf:1.0557e+00 L9_l1linf:1.0669e+00 L10_l1linf:1.0734e+00 L11_l1linf:1.1216e+00 L12_l1linf:1.2239e+00 L1_spectral:1.4960e-01 L2_spectral:1.3819e-01 L3_spectral:1.3804e-01 L4_spectral:1.1228e-01 L5_spectral:9.7011e-02 L6_spectral:9.8686e-02 L7_spectral:9.3912e-02 L8_spectral:8.8641e-02 L9_spectral:1.0919e-01 L10_spectral:1.2551e-01 L11_spectral:1.4436e-01 L12_spectral:1.4602e-01 ip_v_neg_g:1.4964e-02 cos_v_neg_g:4.7809e-03 v_norm:4.3589e+00 g_norm:7.1807e-01 hv_norm:3.5761e-01 cos_v_hv:2.0495e-02 hg_norm:3.0132e+00 cos_g_hg:6.0659e-01 v_par:7.1346e-04 v_perp:4.3589e+00 L1_cos_v_neg_g:1.2020e-02 L1_v_norm:9.6127e-01 L2_cos_v_neg_g:5.2297e-03 L2_v_norm:9.5875e-01 L3_cos_v_neg_g:6.2267e-03 L3_v_norm:9.6780e-01 L4_cos_v_neg_g:4.2651e-03 L4_v_norm:1.0077e+00 L5_cos_v_neg_g:3.5811e-03 L5_v_norm:1.0315e+00 L6_cos_v_neg_g:4.8520e-03 L6_v_norm:1.0424e+00 L7_cos_v_neg_g:4.8301e-03 L7_v_norm:1.0416e+00 L8_cos_v_neg_g:5.1638e-03 L8_v_norm:1.0355e+00 L9_cos_v_neg_g:5.7907e-03 L9_v_norm:1.0271e+00 L10_cos_v_neg_g:8.4326e-03 L10_v_norm:9.9934e-01 L11_cos_v_neg_g:1.0417e-02 L11_v_norm:1.0092e+00 L12_cos_v_neg_g:9.6114e-03 L12_v_norm:9.9771e-01 +step:3500 train loss:3.670972 +step:3501 train loss:3.799506 +step:3502 train loss:3.779302 +step:3503 train loss:3.728630 +step:3504 train loss:3.680678 +step:3505 train loss:3.696182 +step:3506 train loss:3.593415 +step:3507 train loss:3.713414 +step:3508 train loss:3.656472 +step:3509 train loss:3.726294 +step:3510 train loss:3.658232 +step:3511 train loss:3.695189 +step:3512 train loss:3.832556 +step:3513 train loss:3.657180 +step:3514 train loss:3.674356 +step:3515 train loss:3.928537 +step:3516 train loss:3.717384 +step:3517 train loss:3.675951 +step:3518 train loss:3.678809 +step:3519 train loss:3.671290 +step:3520 train loss:3.703829 +step:3521 train loss:3.690491 +step:3522 train loss:3.601631 +step:3523 train loss:3.702756 +step:3524 train loss:3.686747 +step:3525 train loss:3.677829 +step:3526 train loss:3.702812 +step:3527 train loss:3.651665 +step:3528 train loss:3.700622 +step:3529 train loss:3.678876 +step:3530 train loss:3.672287 +step:3531 train loss:3.665014 +step:3532 train loss:3.855449 +step:3533 train loss:3.671684 +step:3534 train loss:3.690124 +step:3535 train loss:3.664844 +step:3536 train loss:3.661097 +step:3537 train loss:3.676548 +step:3538 train loss:3.702338 +step:3539 train loss:3.652608 +step:3540 train loss:3.718216 +step:3541 train loss:3.688346 +step:3542 train loss:3.700530 +step:3543 train loss:3.620664 +step:3544 train loss:3.645554 +step:3545 train loss:3.645547 +step:3546 train loss:3.708419 +step:3547 train loss:3.718914 +step:3548 train loss:3.690449 +step:3549 train loss:3.684248 +step:3550 train loss:3.676396 +step:3551 train loss:3.702714 +step:3552 train loss:3.610188 +step:3553 train loss:3.720446 +step:3554 train loss:3.718133 +step:3555 train loss:3.702296 +step:3556 train loss:3.730067 +step:3557 train loss:3.713187 +step:3558 train loss:3.683996 +step:3559 train loss:3.634131 +step:3560 train loss:3.724790 +step:3561 train loss:3.717905 +step:3562 train loss:3.893445 +step:3563 train loss:3.752567 +step:3564 train loss:3.715462 +step:3565 train loss:3.713201 +step:3566 train loss:3.682152 +step:3567 train loss:3.623922 +step:3568 train loss:3.649257 +step:3569 train loss:3.738528 +step:3570 train loss:3.756479 +step:3571 train loss:3.733223 +step:3572 train loss:3.723928 +step:3573 train loss:3.682753 +step:3574 train loss:3.680108 +step:3575 train loss:3.673126 +step:3576 train loss:3.654773 +step:3577 train loss:3.664819 +step:3578 train loss:3.747336 +step:3579 train loss:3.660082 +step:3580 train loss:3.738393 +step:3581 train loss:3.678214 +step:3582 train loss:3.733562 +step:3583 train loss:3.668632 +step:3584 train loss:3.646191 +step:3585 train loss:3.693738 +step:3586 train loss:3.644941 +step:3587 train loss:3.737418 +step:3588 train loss:3.869337 +step:3589 train loss:3.704689 +step:3590 train loss:3.685373 +step:3591 train loss:3.698859 +step:3592 train loss:3.659714 +step:3593 train loss:3.628122 +step:3594 train loss:3.684924 +step:3595 train loss:3.657704 +step:3596 train loss:3.739698 +step:3597 train loss:3.709394 +step:3598 train loss:3.660603 +step:3599 train loss:3.715101 +step:3600 train loss:3.651484 +step:3601 train loss:3.670322 +step:3602 train loss:3.655118 +step:3603 train loss:3.675438 +step:3604 train loss:3.698994 +step:3605 train loss:3.805022 +step:3606 train loss:3.705085 +step:3607 train loss:3.688686 +step:3608 train loss:3.704455 +step:3609 train loss:3.685562 +step:3610 train loss:3.653886 +step:3611 train loss:3.657646 +step:3612 train loss:3.728463 +step:3613 train loss:3.697576 +step:3614 train loss:3.642419 +step:3615 train loss:3.686684 +step:3616 train loss:3.675299 +step:3617 train loss:3.719537 +step:3618 train loss:3.686968 +step:3619 train loss:3.674775 +step:3620 train loss:3.700032 +step:3621 train loss:3.649415 +step:3622 train loss:3.755167 +step:3623 train loss:3.759696 +step:3624 train loss:3.718325 +step:3625 train loss:3.692927 +step:3626 train loss:3.701413 +step:3627 train loss:3.698923 +step:3628 train loss:3.679787 +step:3629 train loss:3.686509 +step:3630 train loss:3.769602 +step:3631 train loss:3.698550 +step:3632 train loss:3.726216 +step:3633 train loss:3.683184 +step:3634 train loss:3.684365 +step:3635 train loss:3.675057 +step:3636 train loss:3.745232 +step:3637 train loss:3.821240 +step:3638 train loss:3.733528 +step:3639 train loss:3.720897 +step:3640 train loss:3.725535 +step:3641 train loss:3.766239 +step:3642 train loss:3.658580 +step:3643 train loss:3.832419 +step:3644 train loss:3.720298 +step:3645 train loss:3.694599 +step:3646 train loss:3.815509 +step:3647 train loss:3.704545 +step:3648 train loss:3.696105 +step:3649 train loss:3.644455 +step:3650 train loss:3.685926 +step:3651 train loss:3.682261 +step:3652 train loss:3.668227 +step:3653 train loss:3.607232 +step:3654 train loss:3.664432 +step:3655 train loss:3.660032 +step:3656 train loss:3.689131 +step:3657 train loss:3.708287 +step:3658 train loss:3.700179 +step:3659 train loss:3.685006 +step:3660 train loss:3.660102 +step:3661 train loss:3.684218 +step:3662 train loss:3.655768 +step:3663 train loss:3.692838 +step:3664 train loss:3.647246 +step:3665 train loss:3.695886 +step:3666 train loss:3.725365 +step:3667 train loss:3.818455 +step:3668 train loss:3.701231 +step:3669 train loss:3.655637 +step:3670 train loss:3.704659 +step:3671 train loss:3.663640 +step:3672 train loss:3.697824 +step:3673 train loss:3.682100 +step:3674 train loss:3.698345 +step:3675 train loss:3.709752 +step:3676 train loss:3.676606 +step:3677 train loss:3.640828 +step:3678 train loss:3.698063 +step:3679 train loss:3.601025 +step:3680 train loss:3.702954 +step:3681 train loss:3.734051 +step:3682 train loss:3.714467 +step:3683 train loss:3.660700 +step:3684 train loss:3.658176 +step:3685 train loss:3.689673 +step:3686 train loss:3.713044 +step:3687 train loss:3.670178 +step:3688 train loss:3.643125 +step:3689 train loss:3.679979 +step:3690 train loss:3.666984 +step:3691 train loss:3.646499 +step:3692 train loss:3.706995 +step:3693 train loss:3.838330 +step:3694 train loss:3.658117 +step:3695 train loss:3.713753 +step:3696 train loss:3.679259 +step:3697 train loss:3.671682 +step:3698 train loss:3.618811 +step:3699 train loss:3.640615 +step:3700 train loss:3.668912 +step:3701 train loss:3.691525 +step:3702 train loss:3.710978 +step:3703 train loss:3.667359 +step:3704 train loss:3.713924 +step:3705 train loss:3.693422 +step:3706 train loss:3.641684 +step:3707 train loss:3.697251 +step:3708 train loss:3.673525 +step:3709 train loss:3.595001 +step:3710 train loss:3.718448 +step:3711 train loss:3.665866 +step:3712 train loss:3.704892 +step:3713 train loss:3.655761 +step:3714 train loss:3.674861 +step:3715 train loss:3.794379 +step:3716 train loss:3.698107 +step:3717 train loss:3.673290 +step:3718 train loss:3.676251 +step:3719 train loss:3.672830 +step:3720 train loss:3.684567 +step:3721 train loss:3.740394 +step:3722 train loss:3.753761 +step:3723 train loss:3.638338 +step:3724 train loss:3.696749 +step:3725 train loss:3.675186 +step:3726 train loss:3.696950 +step:3727 train loss:3.767664 +step:3728 train loss:3.734318 +step:3729 train loss:3.629492 +step:3730 train loss:3.648398 +step:3731 train loss:3.671560 +step:3732 train loss:3.825975 +step:3733 train loss:3.684551 +step:3734 train loss:3.683028 +step:3735 train loss:3.624509 +step:3736 train loss:3.682616 +step:3737 train loss:3.729140 +step:3738 train loss:3.753747 +step:3739 train loss:3.669024 +step:3740 train loss:3.574145 +step:3741 train loss:3.778610 +step:3742 train loss:3.690724 +step:3743 train loss:3.668981 +step:3744 train loss:3.682181 +step:3745 train loss:3.687078 +step:3746 train loss:3.666516 +step:3747 train loss:3.667471 +step:3748 train loss:3.711959 +step:3749 train loss:3.695625 +step:3750 validation loss:3.605776 +step:3750 train loss:3.705962 +step:3751 train loss:3.793325 +step:3752 train loss:3.726905 +step:3753 train loss:3.646927 +step:3754 train loss:3.698733 +step:3755 train loss:3.879214 +step:3756 train loss:3.655335 +step:3757 train loss:3.648366 +step:3758 train loss:3.681746 +step:3759 train loss:3.625151 +step:3760 train loss:3.624011 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1f62d8c63b77da9c21a11d87977275dc994881c --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.005, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "0a2f0a9a-a861-4fa7-b390-0525967a4a05", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..49b1dc652f92a1209a57fa11276d2522841d140c --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 9.244590759277344, + "total_l1_linf_norm": 79547.984375, + "total_spectral_norm": 9.244590759277344, + "layer_1_update_fnorm": 1.6946073770523071, + "layer_1_max_l1_linf_norm": 2.8630738258361816, + "layer_1_max_spectral_norm": 0.49186617136001587, + "layer_2_update_fnorm": 1.8524781465530396, + "layer_2_max_l1_linf_norm": 2.168936252593994, + "layer_2_max_spectral_norm": 0.27565625309944153, + "layer_3_update_fnorm": 1.696325421333313, + "layer_3_max_l1_linf_norm": 2.310838222503662, + "layer_3_max_spectral_norm": 0.259132981300354, + "layer_4_update_fnorm": 1.7497620582580566, + "layer_4_max_l1_linf_norm": 2.162848472595215, + "layer_4_max_spectral_norm": 0.2663313150405884, + "layer_5_update_fnorm": 1.852487564086914, + "layer_5_max_l1_linf_norm": 2.165701389312744, + "layer_5_max_spectral_norm": 0.33806800842285156, + "layer_6_update_fnorm": 1.993700385093689, + "layer_6_max_l1_linf_norm": 2.2288002967834473, + "layer_6_max_spectral_norm": 0.2970026433467865, + "layer_7_update_fnorm": 2.0648109912872314, + "layer_7_max_l1_linf_norm": 2.418283462524414, + "layer_7_max_spectral_norm": 0.32695645093917847, + "layer_8_update_fnorm": 2.0371222496032715, + "layer_8_max_l1_linf_norm": 2.496583938598633, + "layer_8_max_spectral_norm": 0.37371447682380676, + "layer_9_update_fnorm": 2.0654513835906982, + "layer_9_max_l1_linf_norm": 2.7145509719848633, + "layer_9_max_spectral_norm": 0.4750639498233795, + "layer_10_update_fnorm": 2.063450813293457, + "layer_10_max_l1_linf_norm": 2.751772403717041, + "layer_10_max_spectral_norm": 0.4886780083179474, + "layer_11_update_fnorm": 2.1124401092529297, + "layer_11_max_l1_linf_norm": 2.7502379417419434, + "layer_11_max_spectral_norm": 0.4934580624103546, + "layer_12_update_fnorm": 2.14141845703125, + "layer_12_max_l1_linf_norm": 3.1740074157714844, + "layer_12_max_spectral_norm": 0.5417348146438599, + "total_sharpness": 0.0018483555177226663, + "ip_v_neg_g": 0.10688885301351547, + "cos_v_neg_g": 0.022234106436371803, + "v_norm": 9.244590759277344, + "g_norm": 0.5200259685516357, + "hv_norm": 0.49738311767578125, + "cos_v_hv": 0.03435438126325607, + "hg_norm": 2.8909056186676025, + "cos_g_hg": 0.5282166600227356, + "v_parallel_norm": 0.010650459676980972, + "v_perp_norm": 9.244584083557129, + "layer_1_v_norm": 1.6946073770523071, + "layer_1_cos_v_neg_g": 0.11556180566549301, + "layer_2_v_norm": 1.8524781465530396, + "layer_2_cos_v_neg_g": 0.027179498225450516, + "layer_3_v_norm": 1.6963255405426025, + "layer_3_cos_v_neg_g": 0.03421466797590256, + "layer_4_v_norm": 1.7497620582580566, + "layer_4_cos_v_neg_g": 0.04141884297132492, + "layer_5_v_norm": 1.852487564086914, + "layer_5_cos_v_neg_g": 0.034326691180467606, + "layer_6_v_norm": 1.993700385093689, + "layer_6_cos_v_neg_g": 0.033290017396211624, + "layer_7_v_norm": 2.0648109912872314, + "layer_7_cos_v_neg_g": 0.03424510359764099, + "layer_8_v_norm": 2.0371224880218506, + "layer_8_cos_v_neg_g": 0.03896338865160942, + "layer_9_v_norm": 2.0654513835906982, + "layer_9_cos_v_neg_g": 0.0474499948322773, + "layer_10_v_norm": 2.063450813293457, + "layer_10_cos_v_neg_g": 0.052391428500413895, + "layer_11_v_norm": 2.1124401092529297, + "layer_11_cos_v_neg_g": 0.048252735286951065, + "layer_12_v_norm": 2.14141845703125, + "layer_12_cos_v_neg_g": 0.05912478640675545, + "layer_1_sharpness": 0.0038880323991179466, + "layer_2_sharpness": 0.00010387395013822243, + "layer_3_sharpness": 0.0003502231265883893, + "layer_4_sharpness": 0.0003091608814429492, + "layer_5_sharpness": 0.0002722347853705287, + "layer_6_sharpness": 0.00018571989494375885, + "layer_7_sharpness": 0.00014292153355199844, + "layer_8_sharpness": 0.00030018811230547726, + "layer_9_sharpness": 0.0004297727136872709, + "layer_10_sharpness": 0.0005546864704228938, + "layer_11_sharpness": 0.0006224234239198267, + "layer_12_sharpness": 0.0012637207983061671 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..b4d9c0ce65432d6ae7d53849fa428124c1c981ea --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.052172660827637, + "total_l1_linf_norm": 97956.484375, + "total_spectral_norm": 11.052173614501953, + "layer_1_update_fnorm": 2.6105237007141113, + "layer_1_max_l1_linf_norm": 2.6371259689331055, + "layer_1_max_spectral_norm": 0.34355610609054565, + "layer_2_update_fnorm": 2.562406063079834, + "layer_2_max_l1_linf_norm": 2.789918899536133, + "layer_2_max_spectral_norm": 0.32540765404701233, + "layer_3_update_fnorm": 2.5693249702453613, + "layer_3_max_l1_linf_norm": 2.8682727813720703, + "layer_3_max_spectral_norm": 0.3009251356124878, + "layer_4_update_fnorm": 2.5638175010681152, + "layer_4_max_l1_linf_norm": 2.8274662494659424, + "layer_4_max_spectral_norm": 0.3185838460922241, + "layer_5_update_fnorm": 2.521930456161499, + "layer_5_max_l1_linf_norm": 2.509770393371582, + "layer_5_max_spectral_norm": 0.2750397324562073, + "layer_6_update_fnorm": 2.6258368492126465, + "layer_6_max_l1_linf_norm": 2.6292269229888916, + "layer_6_max_spectral_norm": 0.25713810324668884, + "layer_7_update_fnorm": 2.647520065307617, + "layer_7_max_l1_linf_norm": 2.751368999481201, + "layer_7_max_spectral_norm": 0.3146626353263855, + "layer_8_update_fnorm": 2.613723039627075, + "layer_8_max_l1_linf_norm": 3.0496163368225098, + "layer_8_max_spectral_norm": 0.37311774492263794, + "layer_9_update_fnorm": 2.61061954498291, + "layer_9_max_l1_linf_norm": 2.9711506366729736, + "layer_9_max_spectral_norm": 0.41798490285873413, + "layer_10_update_fnorm": 2.599371910095215, + "layer_10_max_l1_linf_norm": 3.0833487510681152, + "layer_10_max_spectral_norm": 0.4381798505783081, + "layer_11_update_fnorm": 2.6229381561279297, + "layer_11_max_l1_linf_norm": 3.0358338356018066, + "layer_11_max_spectral_norm": 0.43016722798347473, + "layer_12_update_fnorm": 2.531911611557007, + "layer_12_max_l1_linf_norm": 3.0038747787475586, + "layer_12_max_spectral_norm": 0.43363285064697266, + "total_sharpness": 0.00018701925000641495, + "ip_v_neg_g": 0.013558454811573029, + "cos_v_neg_g": 0.0027866631280630827, + "v_norm": 11.052172660827637, + "g_norm": 0.4402284324169159, + "hv_norm": 0.19568948447704315, + "cos_v_hv": 0.010562495328485966, + "hg_norm": 3.11403751373291, + "cos_g_hg": 0.4315791428089142, + "v_parallel_norm": 0.0022069315891712904, + "v_perp_norm": 11.052172660827637, + "layer_1_v_norm": 2.6105237007141113, + "layer_1_cos_v_neg_g": 0.004444130230695009, + "layer_2_v_norm": 2.562406063079834, + "layer_2_cos_v_neg_g": 0.001739384839311242, + "layer_3_v_norm": 2.5693249702453613, + "layer_3_cos_v_neg_g": 0.0025792643427848816, + "layer_4_v_norm": 2.5638175010681152, + "layer_4_cos_v_neg_g": 0.002272270852699876, + "layer_5_v_norm": 2.521930456161499, + "layer_5_cos_v_neg_g": 0.002221554983407259, + "layer_6_v_norm": 2.6258368492126465, + "layer_6_cos_v_neg_g": 0.0027449203189462423, + "layer_7_v_norm": 2.647520065307617, + "layer_7_cos_v_neg_g": 0.002008689334616065, + "layer_8_v_norm": 2.613723039627075, + "layer_8_cos_v_neg_g": 0.004596192389726639, + "layer_9_v_norm": 2.61061954498291, + "layer_9_cos_v_neg_g": 0.00829701405018568, + "layer_10_v_norm": 2.599371910095215, + "layer_10_cos_v_neg_g": 0.01152307353913784, + "layer_11_v_norm": 2.6229381561279297, + "layer_11_cos_v_neg_g": 0.012932103127241135, + "layer_12_v_norm": 2.531911611557007, + "layer_12_cos_v_neg_g": 0.014469890855252743, + "layer_1_sharpness": 7.203739369288087e-05, + "layer_2_sharpness": 1.5766559954499826e-05, + "layer_3_sharpness": 3.193589873262681e-05, + "layer_4_sharpness": 2.8453463528421707e-05, + "layer_5_sharpness": 2.204861812060699e-05, + "layer_6_sharpness": 1.6528718333574943e-05, + "layer_7_sharpness": 3.008226485690102e-05, + "layer_8_sharpness": 4.498724592849612e-05, + "layer_9_sharpness": 6.551869591930881e-05, + "layer_10_sharpness": 7.857521995902061e-05, + "layer_11_sharpness": 5.320131094777025e-05, + "layer_12_sharpness": 0.00019073949079029262 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..a2b46f6deb4836e37d2dac903d4baddb206ee178 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 9.915380477905273, + "total_l1_linf_norm": 86437.8671875, + "total_spectral_norm": 9.915380477905273, + "layer_1_update_fnorm": 2.0438995361328125, + "layer_1_max_l1_linf_norm": 2.5398316383361816, + "layer_1_max_spectral_norm": 0.39368802309036255, + "layer_2_update_fnorm": 2.1880242824554443, + "layer_2_max_l1_linf_norm": 2.3677244186401367, + "layer_2_max_spectral_norm": 0.27002987265586853, + "layer_3_update_fnorm": 2.0477893352508545, + "layer_3_max_l1_linf_norm": 2.3326096534729004, + "layer_3_max_spectral_norm": 0.27257052063941956, + "layer_4_update_fnorm": 2.0600953102111816, + "layer_4_max_l1_linf_norm": 2.3216493129730225, + "layer_4_max_spectral_norm": 0.32536667585372925, + "layer_5_update_fnorm": 2.105133056640625, + "layer_5_max_l1_linf_norm": 2.2259433269500732, + "layer_5_max_spectral_norm": 0.3466506600379944, + "layer_6_update_fnorm": 2.2136919498443604, + "layer_6_max_l1_linf_norm": 2.328183650970459, + "layer_6_max_spectral_norm": 0.2690274119377136, + "layer_7_update_fnorm": 2.283515453338623, + "layer_7_max_l1_linf_norm": 2.4635486602783203, + "layer_7_max_spectral_norm": 0.28285133838653564, + "layer_8_update_fnorm": 2.240684986114502, + "layer_8_max_l1_linf_norm": 2.504565715789795, + "layer_8_max_spectral_norm": 0.31892818212509155, + "layer_9_update_fnorm": 2.232908248901367, + "layer_9_max_l1_linf_norm": 2.6165223121643066, + "layer_9_max_spectral_norm": 0.36975130438804626, + "layer_10_update_fnorm": 2.223939895629883, + "layer_10_max_l1_linf_norm": 2.4824514389038086, + "layer_10_max_spectral_norm": 0.38124755024909973, + "layer_11_update_fnorm": 2.2380213737487793, + "layer_11_max_l1_linf_norm": 2.7223458290100098, + "layer_11_max_spectral_norm": 0.37617990374565125, + "layer_12_update_fnorm": 2.2196433544158936, + "layer_12_max_l1_linf_norm": 2.5844247341156006, + "layer_12_max_spectral_norm": 0.40000802278518677, + "total_sharpness": 0.000912644958589226, + "ip_v_neg_g": 0.0494292788207531, + "cos_v_neg_g": 0.009358260780572891, + "v_norm": 9.915380477905273, + "g_norm": 0.5326964259147644, + "hv_norm": 0.5350024700164795, + "cos_v_hv": 0.016914356499910355, + "hg_norm": 5.925738334655762, + "cos_g_hg": 0.5814914703369141, + "v_parallel_norm": 0.00605872692540288, + "v_perp_norm": 9.91537857055664, + "layer_1_v_norm": 2.0438995361328125, + "layer_1_cos_v_neg_g": 0.049937546253204346, + "layer_2_v_norm": 2.1880242824554443, + "layer_2_cos_v_neg_g": 0.01841110549867153, + "layer_3_v_norm": 2.0477893352508545, + "layer_3_cos_v_neg_g": 0.027329303324222565, + "layer_4_v_norm": 2.0600953102111816, + "layer_4_cos_v_neg_g": 0.021034162491559982, + "layer_5_v_norm": 2.105133056640625, + "layer_5_cos_v_neg_g": 0.01777224987745285, + "layer_6_v_norm": 2.2136919498443604, + "layer_6_cos_v_neg_g": 0.012578587979078293, + "layer_7_v_norm": 2.283515453338623, + "layer_7_cos_v_neg_g": 0.016024969518184662, + "layer_8_v_norm": 2.240684986114502, + "layer_8_cos_v_neg_g": 0.01921374350786209, + "layer_9_v_norm": 2.232908248901367, + "layer_9_cos_v_neg_g": 0.02161727473139763, + "layer_10_v_norm": 2.223939895629883, + "layer_10_cos_v_neg_g": 0.021012702956795692, + "layer_11_v_norm": 2.2380211353302, + "layer_11_cos_v_neg_g": 0.02015974558889866, + "layer_12_v_norm": 2.2196433544158936, + "layer_12_cos_v_neg_g": 0.023818347603082657, + "layer_1_sharpness": 0.0012473235838115215, + "layer_2_sharpness": 7.326009654207155e-05, + "layer_3_sharpness": 0.00030767981661483645, + "layer_4_sharpness": 0.00022074930893722922, + "layer_5_sharpness": 0.00013390764070209116, + "layer_6_sharpness": 6.127274536993355e-05, + "layer_7_sharpness": 7.863587961765006e-05, + "layer_8_sharpness": 0.00015604918007738888, + "layer_9_sharpness": 0.00024123446200974286, + "layer_10_sharpness": 0.0002430282620480284, + "layer_11_sharpness": 0.00022360206639859825, + "layer_12_sharpness": 0.0005124674062244594 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..ff5a5f8ef1d14f01862b7c3fd44453233fdeebd2 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.263422966003418, + "total_l1_linf_norm": 89780.9453125, + "total_spectral_norm": 10.263420104980469, + "layer_1_update_fnorm": 2.286278247833252, + "layer_1_max_l1_linf_norm": 2.718672752380371, + "layer_1_max_spectral_norm": 0.31521356105804443, + "layer_2_update_fnorm": 2.246884822845459, + "layer_2_max_l1_linf_norm": 2.518848180770874, + "layer_2_max_spectral_norm": 0.2520323693752289, + "layer_3_update_fnorm": 2.1303677558898926, + "layer_3_max_l1_linf_norm": 2.4384407997131348, + "layer_3_max_spectral_norm": 0.2323179692029953, + "layer_4_update_fnorm": 2.172267436981201, + "layer_4_max_l1_linf_norm": 2.442047119140625, + "layer_4_max_spectral_norm": 0.3273738920688629, + "layer_5_update_fnorm": 2.22578763961792, + "layer_5_max_l1_linf_norm": 2.286651611328125, + "layer_5_max_spectral_norm": 0.32535186409950256, + "layer_6_update_fnorm": 2.3674376010894775, + "layer_6_max_l1_linf_norm": 2.3969993591308594, + "layer_6_max_spectral_norm": 0.2805888056755066, + "layer_7_update_fnorm": 2.3968136310577393, + "layer_7_max_l1_linf_norm": 2.473806858062744, + "layer_7_max_spectral_norm": 0.2787766754627228, + "layer_8_update_fnorm": 2.348999261856079, + "layer_8_max_l1_linf_norm": 2.5833351612091064, + "layer_8_max_spectral_norm": 0.32995644211769104, + "layer_9_update_fnorm": 2.351954221725464, + "layer_9_max_l1_linf_norm": 2.7337231636047363, + "layer_9_max_spectral_norm": 0.40985816717147827, + "layer_10_update_fnorm": 2.3330838680267334, + "layer_10_max_l1_linf_norm": 2.7761402130126953, + "layer_10_max_spectral_norm": 0.3758886456489563, + "layer_11_update_fnorm": 2.359994649887085, + "layer_11_max_l1_linf_norm": 2.8012564182281494, + "layer_11_max_spectral_norm": 0.38416677713394165, + "layer_12_update_fnorm": 2.2994983196258545, + "layer_12_max_l1_linf_norm": 2.941366195678711, + "layer_12_max_spectral_norm": 0.3921424448490143, + "total_sharpness": 0.0006116385338827968, + "ip_v_neg_g": 0.040696293115615845, + "cos_v_neg_g": 0.007238621823489666, + "v_norm": 10.263422966003418, + "g_norm": 0.5477806925773621, + "hv_norm": 0.37627166509628296, + "cos_v_hv": 0.016683436930179596, + "hg_norm": 5.001200199127197, + "cos_g_hg": 0.5308927297592163, + "v_parallel_norm": 0.0029963268898427486, + "v_perp_norm": 10.263422012329102, + "layer_1_v_norm": 2.286278247833252, + "layer_1_cos_v_neg_g": 0.02661876380443573, + "layer_2_v_norm": 2.246884822845459, + "layer_2_cos_v_neg_g": 0.008068752475082874, + "layer_3_v_norm": 2.1303679943084717, + "layer_3_cos_v_neg_g": 0.01131304632872343, + "layer_4_v_norm": 2.172267436981201, + "layer_4_cos_v_neg_g": 0.013804829679429531, + "layer_5_v_norm": 2.22578763961792, + "layer_5_cos_v_neg_g": 0.012961353175342083, + "layer_6_v_norm": 2.3674376010894775, + "layer_6_cos_v_neg_g": 0.010703140869736671, + "layer_7_v_norm": 2.3968136310577393, + "layer_7_cos_v_neg_g": 0.009174156002700329, + "layer_8_v_norm": 2.348999261856079, + "layer_8_cos_v_neg_g": 0.013434456661343575, + "layer_9_v_norm": 2.351954221725464, + "layer_9_cos_v_neg_g": 0.01810322515666485, + "layer_10_v_norm": 2.3330838680267334, + "layer_10_cos_v_neg_g": 0.02011301927268505, + "layer_11_v_norm": 2.359994411468506, + "layer_11_cos_v_neg_g": 0.018566302955150604, + "layer_12_v_norm": 2.2994983196258545, + "layer_12_cos_v_neg_g": 0.021296009421348572, + "layer_1_sharpness": 0.00048153699026443064, + "layer_2_sharpness": 4.0483686461811885e-05, + "layer_3_sharpness": 9.38211233005859e-05, + "layer_4_sharpness": 0.0001386212679790333, + "layer_5_sharpness": 0.0001650949998293072, + "layer_6_sharpness": 5.6371365644736215e-05, + "layer_7_sharpness": 6.2794606492389e-05, + "layer_8_sharpness": 0.00012219155905768275, + "layer_9_sharpness": 0.0002310273121111095, + "layer_10_sharpness": 0.00023030333977658302, + "layer_11_sharpness": 0.0001911867002490908, + "layer_12_sharpness": 0.0004624182765837759 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..3a736cfa386d2c796cd68d8a49a14558e148cb92 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.383089065551758, + "total_l1_linf_norm": 91098.078125, + "total_spectral_norm": 10.383090019226074, + "layer_1_update_fnorm": 2.351362466812134, + "layer_1_max_l1_linf_norm": 2.5544931888580322, + "layer_1_max_spectral_norm": 0.29653024673461914, + "layer_2_update_fnorm": 2.375035047531128, + "layer_2_max_l1_linf_norm": 2.5332045555114746, + "layer_2_max_spectral_norm": 0.2420983761548996, + "layer_3_update_fnorm": 2.2701213359832764, + "layer_3_max_l1_linf_norm": 2.4809436798095703, + "layer_3_max_spectral_norm": 0.24573932588100433, + "layer_4_update_fnorm": 2.2803235054016113, + "layer_4_max_l1_linf_norm": 2.5305745601654053, + "layer_4_max_spectral_norm": 0.3103942573070526, + "layer_5_update_fnorm": 2.2745449542999268, + "layer_5_max_l1_linf_norm": 2.333176612854004, + "layer_5_max_spectral_norm": 0.3163028955459595, + "layer_6_update_fnorm": 2.404024124145508, + "layer_6_max_l1_linf_norm": 2.4572668075561523, + "layer_6_max_spectral_norm": 0.2915378212928772, + "layer_7_update_fnorm": 2.43497633934021, + "layer_7_max_l1_linf_norm": 2.489731788635254, + "layer_7_max_spectral_norm": 0.27441728115081787, + "layer_8_update_fnorm": 2.3773598670959473, + "layer_8_max_l1_linf_norm": 2.4633383750915527, + "layer_8_max_spectral_norm": 0.292713463306427, + "layer_9_update_fnorm": 2.3835809230804443, + "layer_9_max_l1_linf_norm": 2.5368642807006836, + "layer_9_max_spectral_norm": 0.35168755054473877, + "layer_10_update_fnorm": 2.3707454204559326, + "layer_10_max_l1_linf_norm": 2.6307482719421387, + "layer_10_max_spectral_norm": 0.37819087505340576, + "layer_11_update_fnorm": 2.387603998184204, + "layer_11_max_l1_linf_norm": 2.735109806060791, + "layer_11_max_spectral_norm": 0.3672589957714081, + "layer_12_update_fnorm": 2.2964282035827637, + "layer_12_max_l1_linf_norm": 2.854572057723999, + "layer_12_max_spectral_norm": 0.3870275616645813, + "total_sharpness": 0.0004737069830298424, + "ip_v_neg_g": 0.028315741568803787, + "cos_v_neg_g": 0.005742334295064211, + "v_norm": 10.383089065551758, + "g_norm": 0.47491171956062317, + "hv_norm": 0.32750388979911804, + "cos_v_hv": 0.015018271282315254, + "hg_norm": 3.302027702331543, + "cos_g_hg": 0.5066918134689331, + "v_parallel_norm": 0.003577508730813861, + "v_perp_norm": 10.383089065551758, + "layer_1_v_norm": 2.351362466812134, + "layer_1_cos_v_neg_g": 0.023652248084545135, + "layer_2_v_norm": 2.375035047531128, + "layer_2_cos_v_neg_g": 0.005153636448085308, + "layer_3_v_norm": 2.2701210975646973, + "layer_3_cos_v_neg_g": 0.00626166258007288, + "layer_4_v_norm": 2.2803235054016113, + "layer_4_cos_v_neg_g": 0.008931688964366913, + "layer_5_v_norm": 2.2745449542999268, + "layer_5_cos_v_neg_g": 0.007681644521653652, + "layer_6_v_norm": 2.404024124145508, + "layer_6_cos_v_neg_g": 0.00726710120216012, + "layer_7_v_norm": 2.43497633934021, + "layer_7_cos_v_neg_g": 0.009155619889497757, + "layer_8_v_norm": 2.3773598670959473, + "layer_8_cos_v_neg_g": 0.011638582684099674, + "layer_9_v_norm": 2.3835809230804443, + "layer_9_cos_v_neg_g": 0.012576007284224033, + "layer_10_v_norm": 2.3707454204559326, + "layer_10_cos_v_neg_g": 0.015553365461528301, + "layer_11_v_norm": 2.387603998184204, + "layer_11_cos_v_neg_g": 0.01631925068795681, + "layer_12_v_norm": 2.2964282035827637, + "layer_12_cos_v_neg_g": 0.017730966210365295, + "layer_1_sharpness": 0.0002595282276161015, + "layer_2_sharpness": 1.9191214960301295e-05, + "layer_3_sharpness": 0.00011152659863000736, + "layer_4_sharpness": 7.876769814174622e-05, + "layer_5_sharpness": 6.036480044713244e-05, + "layer_6_sharpness": 3.325983925606124e-05, + "layer_7_sharpness": 6.238817150006071e-05, + "layer_8_sharpness": 8.666580833960325e-05, + "layer_9_sharpness": 0.00014606631884817034, + "layer_10_sharpness": 0.0001743217435432598, + "layer_11_sharpness": 0.00016375580162275583, + "layer_12_sharpness": 0.00036752488813363016 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..3c6bfdaa611131841057f7df3eb9615989432308 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.673310279846191, + "total_l1_linf_norm": 94002.421875, + "total_spectral_norm": 10.673311233520508, + "layer_1_update_fnorm": 2.4690022468566895, + "layer_1_max_l1_linf_norm": 2.5236976146698, + "layer_1_max_spectral_norm": 0.2917553782463074, + "layer_2_update_fnorm": 2.427267074584961, + "layer_2_max_l1_linf_norm": 2.6924147605895996, + "layer_2_max_spectral_norm": 0.25733357667922974, + "layer_3_update_fnorm": 2.3397841453552246, + "layer_3_max_l1_linf_norm": 2.62960147857666, + "layer_3_max_spectral_norm": 0.2604425847530365, + "layer_4_update_fnorm": 2.3644957542419434, + "layer_4_max_l1_linf_norm": 2.632394790649414, + "layer_4_max_spectral_norm": 0.3325355052947998, + "layer_5_update_fnorm": 2.3413584232330322, + "layer_5_max_l1_linf_norm": 2.366008996963501, + "layer_5_max_spectral_norm": 0.3100759983062744, + "layer_6_update_fnorm": 2.4785656929016113, + "layer_6_max_l1_linf_norm": 2.521883964538574, + "layer_6_max_spectral_norm": 0.2868313193321228, + "layer_7_update_fnorm": 2.5186901092529297, + "layer_7_max_l1_linf_norm": 2.7315783500671387, + "layer_7_max_spectral_norm": 0.2929537296295166, + "layer_8_update_fnorm": 2.492508888244629, + "layer_8_max_l1_linf_norm": 2.715674877166748, + "layer_8_max_spectral_norm": 0.3058781325817108, + "layer_9_update_fnorm": 2.4905645847320557, + "layer_9_max_l1_linf_norm": 3.029852867126465, + "layer_9_max_spectral_norm": 0.3746432363986969, + "layer_10_update_fnorm": 2.4902873039245605, + "layer_10_max_l1_linf_norm": 2.798569679260254, + "layer_10_max_spectral_norm": 0.3862682580947876, + "layer_11_update_fnorm": 2.5497422218322754, + "layer_11_max_l1_linf_norm": 2.880347490310669, + "layer_11_max_spectral_norm": 0.41240501403808594, + "layer_12_update_fnorm": 2.5042433738708496, + "layer_12_max_l1_linf_norm": 3.0798189640045166, + "layer_12_max_spectral_norm": 0.505025327205658, + "total_sharpness": 0.00038203701842576265, + "ip_v_neg_g": 0.027050191536545753, + "cos_v_neg_g": 0.005512669216841459, + "v_norm": 10.673310279846191, + "g_norm": 0.45973682403564453, + "hv_norm": 0.3484618365764618, + "cos_v_hv": 0.011701710522174835, + "hg_norm": 3.1497905254364014, + "cos_g_hg": 0.5219779014587402, + "v_parallel_norm": 0.005324503872543573, + "v_perp_norm": 10.673309326171875, + "layer_1_v_norm": 2.4690022468566895, + "layer_1_cos_v_neg_g": 0.015746312215924263, + "layer_2_v_norm": 2.427267074584961, + "layer_2_cos_v_neg_g": 0.003906283061951399, + "layer_3_v_norm": 2.3397841453552246, + "layer_3_cos_v_neg_g": 0.0055727120488882065, + "layer_4_v_norm": 2.3644957542419434, + "layer_4_cos_v_neg_g": 0.007414733525365591, + "layer_5_v_norm": 2.3413584232330322, + "layer_5_cos_v_neg_g": 0.006561627145856619, + "layer_6_v_norm": 2.4785656929016113, + "layer_6_cos_v_neg_g": 0.00609566317871213, + "layer_7_v_norm": 2.5186901092529297, + "layer_7_cos_v_neg_g": 0.007694188505411148, + "layer_8_v_norm": 2.492508888244629, + "layer_8_cos_v_neg_g": 0.010645559057593346, + "layer_9_v_norm": 2.4905645847320557, + "layer_9_cos_v_neg_g": 0.013145196251571178, + "layer_10_v_norm": 2.4902873039245605, + "layer_10_cos_v_neg_g": 0.014269820414483547, + "layer_11_v_norm": 2.5497422218322754, + "layer_11_cos_v_neg_g": 0.01616852544248104, + "layer_12_v_norm": 2.5042433738708496, + "layer_12_cos_v_neg_g": 0.028164926916360855, + "layer_1_sharpness": 0.00015623593935742974, + "layer_2_sharpness": 9.915265763993375e-06, + "layer_3_sharpness": 1.6438525562989525e-05, + "layer_4_sharpness": 3.717118306667544e-05, + "layer_5_sharpness": 4.3020576413255185e-05, + "layer_6_sharpness": 2.6060029995278455e-05, + "layer_7_sharpness": 4.046618050779216e-05, + "layer_8_sharpness": 8.314189472002909e-05, + "layer_9_sharpness": 0.00014916557120159268, + "layer_10_sharpness": 0.00013113375462125987, + "layer_11_sharpness": 0.00012391181371640414, + "layer_12_sharpness": 0.0006217760383151472 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..35390275134cd19a4610cd4cbc2225ba7854a5a3 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.563586235046387, + "total_l1_linf_norm": 92933.046875, + "total_spectral_norm": 10.56358528137207, + "layer_1_update_fnorm": 2.452115535736084, + "layer_1_max_l1_linf_norm": 2.5434937477111816, + "layer_1_max_spectral_norm": 0.2711496949195862, + "layer_2_update_fnorm": 2.4078972339630127, + "layer_2_max_l1_linf_norm": 2.5308592319488525, + "layer_2_max_spectral_norm": 0.24620835483074188, + "layer_3_update_fnorm": 2.3362112045288086, + "layer_3_max_l1_linf_norm": 2.4991040229797363, + "layer_3_max_spectral_norm": 0.2564293444156647, + "layer_4_update_fnorm": 2.3406732082366943, + "layer_4_max_l1_linf_norm": 2.5501456260681152, + "layer_4_max_spectral_norm": 0.3204785883426666, + "layer_5_update_fnorm": 2.3105990886688232, + "layer_5_max_l1_linf_norm": 2.3891000747680664, + "layer_5_max_spectral_norm": 0.29505372047424316, + "layer_6_update_fnorm": 2.4493842124938965, + "layer_6_max_l1_linf_norm": 2.485567569732666, + "layer_6_max_spectral_norm": 0.26345938444137573, + "layer_7_update_fnorm": 2.487562894821167, + "layer_7_max_l1_linf_norm": 2.6363015174865723, + "layer_7_max_spectral_norm": 0.2721329927444458, + "layer_8_update_fnorm": 2.4369494915008545, + "layer_8_max_l1_linf_norm": 2.6553361415863037, + "layer_8_max_spectral_norm": 0.3007034957408905, + "layer_9_update_fnorm": 2.439055919647217, + "layer_9_max_l1_linf_norm": 2.89217472076416, + "layer_9_max_spectral_norm": 0.33670467138290405, + "layer_10_update_fnorm": 2.4283297061920166, + "layer_10_max_l1_linf_norm": 2.884068012237549, + "layer_10_max_spectral_norm": 0.37113678455352783, + "layer_11_update_fnorm": 2.465278387069702, + "layer_11_max_l1_linf_norm": 2.670604705810547, + "layer_11_max_spectral_norm": 0.3677293658256531, + "layer_12_update_fnorm": 2.371413230895996, + "layer_12_max_l1_linf_norm": 2.8225367069244385, + "layer_12_max_spectral_norm": 0.4210892617702484, + "total_sharpness": 0.0002710173139348626, + "ip_v_neg_g": 0.015359017997980118, + "cos_v_neg_g": 0.002848345087841153, + "v_norm": 10.563586235046387, + "g_norm": 0.5104573369026184, + "hv_norm": 0.2837684750556946, + "cos_v_hv": 0.010088909417390823, + "hg_norm": 3.9000535011291504, + "cos_g_hg": 0.5401759743690491, + "v_parallel_norm": 0.002462095580995083, + "v_perp_norm": 10.56358528137207, + "layer_1_v_norm": 2.452115535736084, + "layer_1_cos_v_neg_g": 0.007780927699059248, + "layer_2_v_norm": 2.4078972339630127, + "layer_2_cos_v_neg_g": 0.002583047840744257, + "layer_3_v_norm": 2.3362114429473877, + "layer_3_cos_v_neg_g": 0.00260580750182271, + "layer_4_v_norm": 2.3406732082366943, + "layer_4_cos_v_neg_g": 0.004484222736209631, + "layer_5_v_norm": 2.3105990886688232, + "layer_5_cos_v_neg_g": 0.003735010977834463, + "layer_6_v_norm": 2.4493842124938965, + "layer_6_cos_v_neg_g": 0.0036194375716149807, + "layer_7_v_norm": 2.487562894821167, + "layer_7_cos_v_neg_g": 0.004057140555232763, + "layer_8_v_norm": 2.4369494915008545, + "layer_8_cos_v_neg_g": 0.005777789279818535, + "layer_9_v_norm": 2.439055919647217, + "layer_9_cos_v_neg_g": 0.006710890680551529, + "layer_10_v_norm": 2.4283297061920166, + "layer_10_cos_v_neg_g": 0.00754818320274353, + "layer_11_v_norm": 2.465278387069702, + "layer_11_cos_v_neg_g": 0.009369853883981705, + "layer_12_v_norm": 2.371413230895996, + "layer_12_cos_v_neg_g": 0.015104876831173897, + "layer_1_sharpness": 9.213466546498239e-05, + "layer_2_sharpness": 1.3381215467234142e-05, + "layer_3_sharpness": 2.4505141482222825e-05, + "layer_4_sharpness": 3.804594962275587e-05, + "layer_5_sharpness": 3.998633837909438e-05, + "layer_6_sharpness": 2.413857691863086e-05, + "layer_7_sharpness": 3.0258217520895414e-05, + "layer_8_sharpness": 6.107889203121886e-05, + "layer_9_sharpness": 9.50642570387572e-05, + "layer_10_sharpness": 0.00012116967263864353, + "layer_11_sharpness": 0.00010713984374888241, + "layer_12_sharpness": 0.00037823698949068785 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..8d9a6cfc8bd2075f3fc0467955a28d1b039b6edc --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.695096015930176, + "total_l1_linf_norm": 94311.1953125, + "total_spectral_norm": 10.695096969604492, + "layer_1_update_fnorm": 2.456200122833252, + "layer_1_max_l1_linf_norm": 2.5331859588623047, + "layer_1_max_spectral_norm": 0.27314189076423645, + "layer_2_update_fnorm": 2.4586269855499268, + "layer_2_max_l1_linf_norm": 2.5560102462768555, + "layer_2_max_spectral_norm": 0.25504130125045776, + "layer_3_update_fnorm": 2.3979058265686035, + "layer_3_max_l1_linf_norm": 2.5837979316711426, + "layer_3_max_spectral_norm": 0.26357606053352356, + "layer_4_update_fnorm": 2.3997488021850586, + "layer_4_max_l1_linf_norm": 2.598128318786621, + "layer_4_max_spectral_norm": 0.3330211639404297, + "layer_5_update_fnorm": 2.367027521133423, + "layer_5_max_l1_linf_norm": 2.420358657836914, + "layer_5_max_spectral_norm": 0.30297181010246277, + "layer_6_update_fnorm": 2.496079206466675, + "layer_6_max_l1_linf_norm": 2.471982479095459, + "layer_6_max_spectral_norm": 0.2702246606349945, + "layer_7_update_fnorm": 2.5309531688690186, + "layer_7_max_l1_linf_norm": 2.7051849365234375, + "layer_7_max_spectral_norm": 0.287362277507782, + "layer_8_update_fnorm": 2.4888312816619873, + "layer_8_max_l1_linf_norm": 2.7744123935699463, + "layer_8_max_spectral_norm": 0.3286466598510742, + "layer_9_update_fnorm": 2.4789645671844482, + "layer_9_max_l1_linf_norm": 2.8991994857788086, + "layer_9_max_spectral_norm": 0.368624746799469, + "layer_10_update_fnorm": 2.485370397567749, + "layer_10_max_l1_linf_norm": 3.2423255443573, + "layer_10_max_spectral_norm": 0.41189044713974, + "layer_11_update_fnorm": 2.5089240074157715, + "layer_11_max_l1_linf_norm": 2.9685192108154297, + "layer_11_max_spectral_norm": 0.4006081223487854, + "layer_12_update_fnorm": 2.4220144748687744, + "layer_12_max_l1_linf_norm": 2.987447738647461, + "layer_12_max_spectral_norm": 0.4270387291908264, + "total_sharpness": 0.0002861295943148434, + "ip_v_neg_g": 0.016635796055197716, + "cos_v_neg_g": 0.003911289386451244, + "v_norm": 10.695096015930176, + "g_norm": 0.397684782743454, + "hv_norm": 0.21616770327091217, + "cos_v_hv": 0.014156525023281574, + "hg_norm": 2.457103729248047, + "cos_g_hg": 0.44232794642448425, + "v_parallel_norm": 0.004254875238984823, + "v_perp_norm": 10.69509506225586, + "layer_1_v_norm": 2.456200122833252, + "layer_1_cos_v_neg_g": 0.010271025821566582, + "layer_2_v_norm": 2.4586269855499268, + "layer_2_cos_v_neg_g": 0.0024848717730492353, + "layer_3_v_norm": 2.3979058265686035, + "layer_3_cos_v_neg_g": 0.0037125989329069853, + "layer_4_v_norm": 2.3997488021850586, + "layer_4_cos_v_neg_g": 0.005320669151842594, + "layer_5_v_norm": 2.367027521133423, + "layer_5_cos_v_neg_g": 0.005165169481188059, + "layer_6_v_norm": 2.496079206466675, + "layer_6_cos_v_neg_g": 0.004008992575109005, + "layer_7_v_norm": 2.5309531688690186, + "layer_7_cos_v_neg_g": 0.0059535205364227295, + "layer_8_v_norm": 2.4888312816619873, + "layer_8_cos_v_neg_g": 0.007414746563881636, + "layer_9_v_norm": 2.4789645671844482, + "layer_9_cos_v_neg_g": 0.008592002093791962, + "layer_10_v_norm": 2.485370397567749, + "layer_10_cos_v_neg_g": 0.010110047645866871, + "layer_11_v_norm": 2.5089240074157715, + "layer_11_cos_v_neg_g": 0.012315794825553894, + "layer_12_v_norm": 2.4220144748687744, + "layer_12_cos_v_neg_g": 0.015391080640256405, + "layer_1_sharpness": 8.608481584815308e-05, + "layer_2_sharpness": 1.0088499038829468e-05, + "layer_3_sharpness": 1.9532239093678072e-05, + "layer_4_sharpness": 3.926345016225241e-05, + "layer_5_sharpness": 3.9830367313697934e-05, + "layer_6_sharpness": 2.1246134565444663e-05, + "layer_7_sharpness": 3.8776321162004024e-05, + "layer_8_sharpness": 7.040281343506649e-05, + "layer_9_sharpness": 0.0001070146172423847, + "layer_10_sharpness": 0.00015040319703985006, + "layer_11_sharpness": 0.00011231201642658561, + "layer_12_sharpness": 0.00033166646608151495 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..e4de826a55a98b54846d94abc92353bdd1d3d29f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.882524490356445, + "total_l1_linf_norm": 96128.390625, + "total_spectral_norm": 10.882523536682129, + "layer_1_update_fnorm": 2.580296516418457, + "layer_1_max_l1_linf_norm": 2.700620412826538, + "layer_1_max_spectral_norm": 0.3016272187232971, + "layer_2_update_fnorm": 2.5243263244628906, + "layer_2_max_l1_linf_norm": 2.6981096267700195, + "layer_2_max_spectral_norm": 0.2694537341594696, + "layer_3_update_fnorm": 2.46256422996521, + "layer_3_max_l1_linf_norm": 2.7382969856262207, + "layer_3_max_spectral_norm": 0.28356054425239563, + "layer_4_update_fnorm": 2.47914457321167, + "layer_4_max_l1_linf_norm": 2.6540637016296387, + "layer_4_max_spectral_norm": 0.3232242465019226, + "layer_5_update_fnorm": 2.4523651599884033, + "layer_5_max_l1_linf_norm": 2.4945764541625977, + "layer_5_max_spectral_norm": 0.310015469789505, + "layer_6_update_fnorm": 2.5481576919555664, + "layer_6_max_l1_linf_norm": 2.523074150085449, + "layer_6_max_spectral_norm": 0.2711200714111328, + "layer_7_update_fnorm": 2.5645699501037598, + "layer_7_max_l1_linf_norm": 2.6933183670043945, + "layer_7_max_spectral_norm": 0.297297865152359, + "layer_8_update_fnorm": 2.5228042602539062, + "layer_8_max_l1_linf_norm": 2.91436505317688, + "layer_8_max_spectral_norm": 0.3374768793582916, + "layer_9_update_fnorm": 2.520721435546875, + "layer_9_max_l1_linf_norm": 2.793165445327759, + "layer_9_max_spectral_norm": 0.38143253326416016, + "layer_10_update_fnorm": 2.5209405422210693, + "layer_10_max_l1_linf_norm": 2.8103930950164795, + "layer_10_max_spectral_norm": 0.4053286612033844, + "layer_11_update_fnorm": 2.5457820892333984, + "layer_11_max_l1_linf_norm": 2.954496383666992, + "layer_11_max_spectral_norm": 0.40674394369125366, + "layer_12_update_fnorm": 2.461291790008545, + "layer_12_max_l1_linf_norm": 2.8899080753326416, + "layer_12_max_spectral_norm": 0.43222564458847046, + "total_sharpness": 0.0002877218939829618, + "ip_v_neg_g": 0.019394434988498688, + "cos_v_neg_g": 0.004406442865729332, + "v_norm": 10.882524490356445, + "g_norm": 0.4044448435306549, + "hv_norm": 0.23844094574451447, + "cos_v_hv": 0.013131723739206791, + "hg_norm": 2.4999616146087646, + "cos_g_hg": 0.4714067876338959, + "v_parallel_norm": 0.0036143437027931213, + "v_perp_norm": 10.882523536682129, + "layer_1_v_norm": 2.580296516418457, + "layer_1_cos_v_neg_g": 0.01277222204953432, + "layer_2_v_norm": 2.5243263244628906, + "layer_2_cos_v_neg_g": 0.0032608548644930124, + "layer_3_v_norm": 2.46256422996521, + "layer_3_cos_v_neg_g": 0.004323846660554409, + "layer_4_v_norm": 2.47914457321167, + "layer_4_cos_v_neg_g": 0.005702052265405655, + "layer_5_v_norm": 2.4523651599884033, + "layer_5_cos_v_neg_g": 0.00477238604798913, + "layer_6_v_norm": 2.5481576919555664, + "layer_6_cos_v_neg_g": 0.005410631652921438, + "layer_7_v_norm": 2.5645699501037598, + "layer_7_cos_v_neg_g": 0.0066404789686203, + "layer_8_v_norm": 2.5228042602539062, + "layer_8_cos_v_neg_g": 0.008125768974423409, + "layer_9_v_norm": 2.520721435546875, + "layer_9_cos_v_neg_g": 0.011265105567872524, + "layer_10_v_norm": 2.5209405422210693, + "layer_10_cos_v_neg_g": 0.013747470453381538, + "layer_11_v_norm": 2.5457820892333984, + "layer_11_cos_v_neg_g": 0.015080825425684452, + "layer_12_v_norm": 2.461291790008545, + "layer_12_cos_v_neg_g": 0.015539453364908695, + "layer_1_sharpness": 0.00010545101395109668, + "layer_2_sharpness": 9.92404056887608e-06, + "layer_3_sharpness": 1.5065172192407772e-05, + "layer_4_sharpness": 3.1300187401939183e-05, + "layer_5_sharpness": 3.3491400245111436e-05, + "layer_6_sharpness": 2.5060382540686987e-05, + "layer_7_sharpness": 5.230049282545224e-05, + "layer_8_sharpness": 7.004935469012707e-05, + "layer_9_sharpness": 0.00012506918574217707, + "layer_10_sharpness": 0.0001385064679197967, + "layer_11_sharpness": 9.835037781158462e-05, + "layer_12_sharpness": 0.00029553897911682725 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..37ff696ac9b5f451242362cbabbee43f5c41d4a2 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 6.458477973937988, + "total_l1_linf_norm": 55302.921875, + "total_spectral_norm": 6.4584784507751465, + "layer_1_update_fnorm": 1.0805782079696655, + "layer_1_max_l1_linf_norm": 2.874324321746826, + "layer_1_max_spectral_norm": 0.3432009220123291, + "layer_2_update_fnorm": 1.132648229598999, + "layer_2_max_l1_linf_norm": 1.4457972049713135, + "layer_2_max_spectral_norm": 0.18307170271873474, + "layer_3_update_fnorm": 1.1789685487747192, + "layer_3_max_l1_linf_norm": 1.4546661376953125, + "layer_3_max_spectral_norm": 0.20787981152534485, + "layer_4_update_fnorm": 1.2307788133621216, + "layer_4_max_l1_linf_norm": 1.423206090927124, + "layer_4_max_spectral_norm": 0.22764837741851807, + "layer_5_update_fnorm": 1.2795464992523193, + "layer_5_max_l1_linf_norm": 1.5482652187347412, + "layer_5_max_spectral_norm": 0.27527403831481934, + "layer_6_update_fnorm": 1.400022268295288, + "layer_6_max_l1_linf_norm": 1.5407778024673462, + "layer_6_max_spectral_norm": 0.2988075911998749, + "layer_7_update_fnorm": 1.5037137269973755, + "layer_7_max_l1_linf_norm": 1.7126505374908447, + "layer_7_max_spectral_norm": 0.2902970612049103, + "layer_8_update_fnorm": 1.5197638273239136, + "layer_8_max_l1_linf_norm": 1.7723963260650635, + "layer_8_max_spectral_norm": 0.2848052680492401, + "layer_9_update_fnorm": 1.516340970993042, + "layer_9_max_l1_linf_norm": 1.709318995475769, + "layer_9_max_spectral_norm": 0.2677989900112152, + "layer_10_update_fnorm": 1.5266140699386597, + "layer_10_max_l1_linf_norm": 1.829153299331665, + "layer_10_max_spectral_norm": 0.2644772529602051, + "layer_11_update_fnorm": 1.43009614944458, + "layer_11_max_l1_linf_norm": 1.8213675022125244, + "layer_11_max_spectral_norm": 0.2616080343723297, + "layer_12_update_fnorm": 1.3344841003417969, + "layer_12_max_l1_linf_norm": 2.1655449867248535, + "layer_12_max_spectral_norm": 0.300443559885025, + "total_sharpness": 0.0035500105004757643, + "ip_v_neg_g": 0.08956904709339142, + "cos_v_neg_g": 0.024140138179063797, + "v_norm": 6.458477973937988, + "g_norm": 0.5744973421096802, + "hv_norm": 0.41424229741096497, + "cos_v_hv": 0.05534844473004341, + "hg_norm": 1.530552625656128, + "cos_g_hg": 0.46232160925865173, + "v_parallel_norm": 0.012041973881423473, + "v_perp_norm": 6.45846700668335, + "layer_1_v_norm": 1.0805782079696655, + "layer_1_cos_v_neg_g": 0.09234090894460678, + "layer_2_v_norm": 1.132648229598999, + "layer_2_cos_v_neg_g": 0.06453704833984375, + "layer_3_v_norm": 1.1789686679840088, + "layer_3_cos_v_neg_g": 0.06176647171378136, + "layer_4_v_norm": 1.2307788133621216, + "layer_4_cos_v_neg_g": 0.0641866996884346, + "layer_5_v_norm": 1.2795464992523193, + "layer_5_cos_v_neg_g": 0.044752538204193115, + "layer_6_v_norm": 1.4000223875045776, + "layer_6_cos_v_neg_g": 0.06471157819032669, + "layer_7_v_norm": 1.5037137269973755, + "layer_7_cos_v_neg_g": 0.058813177049160004, + "layer_8_v_norm": 1.5197639465332031, + "layer_8_cos_v_neg_g": 0.05668719857931137, + "layer_9_v_norm": 1.516340970993042, + "layer_9_cos_v_neg_g": 0.05501868948340416, + "layer_10_v_norm": 1.5266140699386597, + "layer_10_cos_v_neg_g": 0.05734039098024368, + "layer_11_v_norm": 1.43009614944458, + "layer_11_cos_v_neg_g": 0.063247911632061, + "layer_12_v_norm": 1.3344841003417969, + "layer_12_cos_v_neg_g": 0.05189661309123039, + "layer_1_sharpness": 0.013685910031199455, + "layer_2_sharpness": 0.0006183285149745643, + "layer_3_sharpness": 0.0005936508532613516, + "layer_4_sharpness": 0.0009017509291879833, + "layer_5_sharpness": 0.0006189326522871852, + "layer_6_sharpness": 0.0005418261280283332, + "layer_7_sharpness": 0.000736046175006777, + "layer_8_sharpness": 0.0004521456139627844, + "layer_9_sharpness": 0.000563790206797421, + "layer_10_sharpness": 0.0006712583708576858, + "layer_11_sharpness": 0.0007420234614983201, + "layer_12_sharpness": 0.0007457782048732042 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..f79dba60d5a28cfd119dda30b77d6e704a1369b0 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.750276565551758, + "total_l1_linf_norm": 94821.59375, + "total_spectral_norm": 10.75027847290039, + "layer_1_update_fnorm": 2.499739170074463, + "layer_1_max_l1_linf_norm": 2.6143462657928467, + "layer_1_max_spectral_norm": 0.28817322850227356, + "layer_2_update_fnorm": 2.4950804710388184, + "layer_2_max_l1_linf_norm": 2.6044130325317383, + "layer_2_max_spectral_norm": 0.2580440938472748, + "layer_3_update_fnorm": 2.438260555267334, + "layer_3_max_l1_linf_norm": 2.5747742652893066, + "layer_3_max_spectral_norm": 0.26946091651916504, + "layer_4_update_fnorm": 2.431394100189209, + "layer_4_max_l1_linf_norm": 2.631894588470459, + "layer_4_max_spectral_norm": 0.33117440342903137, + "layer_5_update_fnorm": 2.417062520980835, + "layer_5_max_l1_linf_norm": 2.400522232055664, + "layer_5_max_spectral_norm": 0.3005564212799072, + "layer_6_update_fnorm": 2.526876926422119, + "layer_6_max_l1_linf_norm": 2.476646900177002, + "layer_6_max_spectral_norm": 0.26363492012023926, + "layer_7_update_fnorm": 2.562922716140747, + "layer_7_max_l1_linf_norm": 2.6119914054870605, + "layer_7_max_spectral_norm": 0.29159295558929443, + "layer_8_update_fnorm": 2.5219552516937256, + "layer_8_max_l1_linf_norm": 2.7311854362487793, + "layer_8_max_spectral_norm": 0.3170386850833893, + "layer_9_update_fnorm": 2.494863271713257, + "layer_9_max_l1_linf_norm": 2.6290838718414307, + "layer_9_max_spectral_norm": 0.3497203588485718, + "layer_10_update_fnorm": 2.4725825786590576, + "layer_10_max_l1_linf_norm": 2.697087049484253, + "layer_10_max_spectral_norm": 0.33573994040489197, + "layer_11_update_fnorm": 2.5070035457611084, + "layer_11_max_l1_linf_norm": 2.7367115020751953, + "layer_11_max_spectral_norm": 0.3474510610103607, + "layer_12_update_fnorm": 2.3998517990112305, + "layer_12_max_l1_linf_norm": 2.841585636138916, + "layer_12_max_spectral_norm": 0.37909358739852905, + "total_sharpness": 0.0001858558098319918, + "ip_v_neg_g": 0.014379970729351044, + "cos_v_neg_g": 0.0021341885440051556, + "v_norm": 10.750276565551758, + "g_norm": 0.6267662048339844, + "hv_norm": 0.18042884767055511, + "cos_v_hv": 0.011073623783886433, + "hg_norm": 8.2421875, + "cos_g_hg": 0.7270760536193848, + "v_parallel_norm": 0.0024877863470464945, + "v_perp_norm": 10.750275611877441, + "layer_1_v_norm": 2.499739170074463, + "layer_1_cos_v_neg_g": 0.008109292015433311, + "layer_2_v_norm": 2.4950804710388184, + "layer_2_cos_v_neg_g": 0.0022472667042165995, + "layer_3_v_norm": 2.438260555267334, + "layer_3_cos_v_neg_g": 0.0025254078209400177, + "layer_4_v_norm": 2.431394100189209, + "layer_4_cos_v_neg_g": 0.0026563196443021297, + "layer_5_v_norm": 2.417062520980835, + "layer_5_cos_v_neg_g": 0.0026859180070459843, + "layer_6_v_norm": 2.5268771648406982, + "layer_6_cos_v_neg_g": 0.004358337260782719, + "layer_7_v_norm": 2.562922716140747, + "layer_7_cos_v_neg_g": 0.003963588271290064, + "layer_8_v_norm": 2.5219552516937256, + "layer_8_cos_v_neg_g": 0.005262905266135931, + "layer_9_v_norm": 2.494863271713257, + "layer_9_cos_v_neg_g": 0.006631051190197468, + "layer_10_v_norm": 2.4725825786590576, + "layer_10_cos_v_neg_g": 0.007320498116314411, + "layer_11_v_norm": 2.5070035457611084, + "layer_11_cos_v_neg_g": 0.007516270037740469, + "layer_12_v_norm": 2.3998517990112305, + "layer_12_cos_v_neg_g": 0.007693364284932613, + "layer_1_sharpness": 6.133179704193026e-05, + "layer_2_sharpness": 8.28523207019316e-06, + "layer_3_sharpness": 1.1634346265054774e-05, + "layer_4_sharpness": 2.356616096221842e-05, + "layer_5_sharpness": 3.0969658837420866e-05, + "layer_6_sharpness": 2.3490534658776596e-05, + "layer_7_sharpness": 4.021544009447098e-05, + "layer_8_sharpness": 7.022724457783625e-05, + "layer_9_sharpness": 8.354047895409167e-05, + "layer_10_sharpness": 7.610920147271827e-05, + "layer_11_sharpness": 6.130455585662276e-05, + "layer_12_sharpness": 0.00020911557658109814 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..9570ada53cd518542b6bd2f8f3adc79ca5dd11a9 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.855978012084961, + "total_l1_linf_norm": 95952.96875, + "total_spectral_norm": 10.855979919433594, + "layer_1_update_fnorm": 2.5398924350738525, + "layer_1_max_l1_linf_norm": 2.6295461654663086, + "layer_1_max_spectral_norm": 0.2958633303642273, + "layer_2_update_fnorm": 2.509390354156494, + "layer_2_max_l1_linf_norm": 2.643935203552246, + "layer_2_max_spectral_norm": 0.27534401416778564, + "layer_3_update_fnorm": 2.461306095123291, + "layer_3_max_l1_linf_norm": 2.55544376373291, + "layer_3_max_spectral_norm": 0.28097599744796753, + "layer_4_update_fnorm": 2.473623275756836, + "layer_4_max_l1_linf_norm": 2.6241657733917236, + "layer_4_max_spectral_norm": 0.3250953257083893, + "layer_5_update_fnorm": 2.450831890106201, + "layer_5_max_l1_linf_norm": 2.3935766220092773, + "layer_5_max_spectral_norm": 0.2903134822845459, + "layer_6_update_fnorm": 2.5462794303894043, + "layer_6_max_l1_linf_norm": 2.509443759918213, + "layer_6_max_spectral_norm": 0.25884130597114563, + "layer_7_update_fnorm": 2.5884268283843994, + "layer_7_max_l1_linf_norm": 2.6351733207702637, + "layer_7_max_spectral_norm": 0.30305546522140503, + "layer_8_update_fnorm": 2.5352137088775635, + "layer_8_max_l1_linf_norm": 2.6543402671813965, + "layer_8_max_spectral_norm": 0.3113987147808075, + "layer_9_update_fnorm": 2.540086269378662, + "layer_9_max_l1_linf_norm": 2.628190517425537, + "layer_9_max_spectral_norm": 0.37047600746154785, + "layer_10_update_fnorm": 2.533416748046875, + "layer_10_max_l1_linf_norm": 2.834587812423706, + "layer_10_max_spectral_norm": 0.37886884808540344, + "layer_11_update_fnorm": 2.5526418685913086, + "layer_11_max_l1_linf_norm": 2.869426727294922, + "layer_11_max_spectral_norm": 0.3896448016166687, + "layer_12_update_fnorm": 2.446298599243164, + "layer_12_max_l1_linf_norm": 2.832041025161743, + "layer_12_max_spectral_norm": 0.4232313334941864, + "total_sharpness": 0.00016816804418340325, + "ip_v_neg_g": 0.013785233721137047, + "cos_v_neg_g": 0.0017357267206534743, + "v_norm": 10.855978012084961, + "g_norm": 0.7315834164619446, + "hv_norm": 0.24060934782028198, + "cos_v_hv": 0.0075875213369727135, + "hg_norm": 3.2475059032440186, + "cos_g_hg": 0.6347116231918335, + "v_parallel_norm": 0.0016561612719669938, + "v_perp_norm": 10.855978012084961, + "layer_1_v_norm": 2.5398924350738525, + "layer_1_cos_v_neg_g": 0.005207525100558996, + "layer_2_v_norm": 2.509390354156494, + "layer_2_cos_v_neg_g": 0.0007525223190896213, + "layer_3_v_norm": 2.461305856704712, + "layer_3_cos_v_neg_g": 0.0021409473847597837, + "layer_4_v_norm": 2.473623275756836, + "layer_4_cos_v_neg_g": 0.0031921810004860163, + "layer_5_v_norm": 2.450831890106201, + "layer_5_cos_v_neg_g": 0.004414671566337347, + "layer_6_v_norm": 2.5462794303894043, + "layer_6_cos_v_neg_g": 0.003401737893000245, + "layer_7_v_norm": 2.5884268283843994, + "layer_7_cos_v_neg_g": 0.0029536564834415913, + "layer_8_v_norm": 2.5352137088775635, + "layer_8_cos_v_neg_g": 0.004074486903846264, + "layer_9_v_norm": 2.540086269378662, + "layer_9_cos_v_neg_g": 0.0048967329785227776, + "layer_10_v_norm": 2.533416748046875, + "layer_10_cos_v_neg_g": 0.005356448236852884, + "layer_11_v_norm": 2.5526418685913086, + "layer_11_cos_v_neg_g": 0.004763415548950434, + "layer_12_v_norm": 2.446298599243164, + "layer_12_cos_v_neg_g": 0.006983412895351648, + "layer_1_sharpness": 5.140095890965313e-05, + "layer_2_sharpness": 1.7261341781704687e-05, + "layer_3_sharpness": 2.3612763470737264e-05, + "layer_4_sharpness": 3.223279054509476e-05, + "layer_5_sharpness": 3.243389437557198e-05, + "layer_6_sharpness": 1.486157543695299e-05, + "layer_7_sharpness": 1.7323134670732543e-05, + "layer_8_sharpness": 3.9430040487786755e-05, + "layer_9_sharpness": 5.355584289645776e-05, + "layer_10_sharpness": 6.192627915879712e-05, + "layer_11_sharpness": 5.988230259390548e-05, + "layer_12_sharpness": 0.0002526850439608097 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..82b0025c8df3aecd790988484a79a518fe816e25 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.942940711975098, + "total_l1_linf_norm": 96878.171875, + "total_spectral_norm": 10.942940711975098, + "layer_1_update_fnorm": 2.5975708961486816, + "layer_1_max_l1_linf_norm": 2.5744102001190186, + "layer_1_max_spectral_norm": 0.3017703890800476, + "layer_2_update_fnorm": 2.571763038635254, + "layer_2_max_l1_linf_norm": 2.74543833732605, + "layer_2_max_spectral_norm": 0.3005322217941284, + "layer_3_update_fnorm": 2.5470387935638428, + "layer_3_max_l1_linf_norm": 2.7482004165649414, + "layer_3_max_spectral_norm": 0.31347426772117615, + "layer_4_update_fnorm": 2.526845693588257, + "layer_4_max_l1_linf_norm": 2.629855155944824, + "layer_4_max_spectral_norm": 0.3321598470211029, + "layer_5_update_fnorm": 2.4797964096069336, + "layer_5_max_l1_linf_norm": 2.4676356315612793, + "layer_5_max_spectral_norm": 0.2832341194152832, + "layer_6_update_fnorm": 2.5744822025299072, + "layer_6_max_l1_linf_norm": 2.5213499069213867, + "layer_6_max_spectral_norm": 0.2636994421482086, + "layer_7_update_fnorm": 2.610769748687744, + "layer_7_max_l1_linf_norm": 2.7402238845825195, + "layer_7_max_spectral_norm": 0.289570689201355, + "layer_8_update_fnorm": 2.565021514892578, + "layer_8_max_l1_linf_norm": 2.774341106414795, + "layer_8_max_spectral_norm": 0.34540635347366333, + "layer_9_update_fnorm": 2.560445785522461, + "layer_9_max_l1_linf_norm": 2.822889804840088, + "layer_9_max_spectral_norm": 0.3620731830596924, + "layer_10_update_fnorm": 2.5507164001464844, + "layer_10_max_l1_linf_norm": 3.139810800552368, + "layer_10_max_spectral_norm": 0.40151065587997437, + "layer_11_update_fnorm": 2.5482919216156006, + "layer_11_max_l1_linf_norm": 3.071796417236328, + "layer_11_max_spectral_norm": 0.36602428555488586, + "layer_12_update_fnorm": 2.4635396003723145, + "layer_12_max_l1_linf_norm": 3.2170748710632324, + "layer_12_max_spectral_norm": 0.3937053680419922, + "total_sharpness": 0.00018539035227149725, + "ip_v_neg_g": 0.011994539760053158, + "cos_v_neg_g": 0.002817879430949688, + "v_norm": 10.942940711975098, + "g_norm": 0.38897988200187683, + "hv_norm": 0.2021387368440628, + "cos_v_hv": 0.010036253370344639, + "hg_norm": 2.9063053131103516, + "cos_g_hg": 0.5056348443031311, + "v_parallel_norm": 0.003495769342407584, + "v_perp_norm": 10.942939758300781, + "layer_1_v_norm": 2.5975708961486816, + "layer_1_cos_v_neg_g": 0.00863350834697485, + "layer_2_v_norm": 2.571763038635254, + "layer_2_cos_v_neg_g": 0.002809928497299552, + "layer_3_v_norm": 2.5470387935638428, + "layer_3_cos_v_neg_g": 0.0022605913691222668, + "layer_4_v_norm": 2.526845693588257, + "layer_4_cos_v_neg_g": 0.002627684734761715, + "layer_5_v_norm": 2.4797964096069336, + "layer_5_cos_v_neg_g": 0.0030720659997314215, + "layer_6_v_norm": 2.5744822025299072, + "layer_6_cos_v_neg_g": 0.0029173986986279488, + "layer_7_v_norm": 2.610769748687744, + "layer_7_cos_v_neg_g": 0.004644070286303759, + "layer_8_v_norm": 2.565021514892578, + "layer_8_cos_v_neg_g": 0.0061775390058755875, + "layer_9_v_norm": 2.560445785522461, + "layer_9_cos_v_neg_g": 0.008121551014482975, + "layer_10_v_norm": 2.5507164001464844, + "layer_10_cos_v_neg_g": 0.008274870924651623, + "layer_11_v_norm": 2.5482919216156006, + "layer_11_cos_v_neg_g": 0.00923098810017109, + "layer_12_v_norm": 2.4635396003723145, + "layer_12_cos_v_neg_g": 0.013334577903151512, + "layer_1_sharpness": 6.339581159409136e-05, + "layer_2_sharpness": 1.7143434888566844e-05, + "layer_3_sharpness": 1.9293001969344914e-05, + "layer_4_sharpness": 2.023431625275407e-05, + "layer_5_sharpness": 1.9817751308437437e-05, + "layer_6_sharpness": 1.7785421732696705e-05, + "layer_7_sharpness": 2.600238258310128e-05, + "layer_8_sharpness": 5.452473851619288e-05, + "layer_9_sharpness": 7.488936535082757e-05, + "layer_10_sharpness": 8.875845378497615e-05, + "layer_11_sharpness": 6.931929965503514e-05, + "layer_12_sharpness": 0.00021186310914345086 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..3816dbf4e67d62e339c9d6d4f3f8e17aa25c4859 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.104918479919434, + "total_l1_linf_norm": 98446.203125, + "total_spectral_norm": 11.104917526245117, + "layer_1_update_fnorm": 2.625328779220581, + "layer_1_max_l1_linf_norm": 2.75814151763916, + "layer_1_max_spectral_norm": 0.32128116488456726, + "layer_2_update_fnorm": 2.582188367843628, + "layer_2_max_l1_linf_norm": 2.7896199226379395, + "layer_2_max_spectral_norm": 0.3064132332801819, + "layer_3_update_fnorm": 2.527261257171631, + "layer_3_max_l1_linf_norm": 2.7807834148406982, + "layer_3_max_spectral_norm": 0.3092650771141052, + "layer_4_update_fnorm": 2.550908327102661, + "layer_4_max_l1_linf_norm": 2.7162508964538574, + "layer_4_max_spectral_norm": 0.3258252739906311, + "layer_5_update_fnorm": 2.5602355003356934, + "layer_5_max_l1_linf_norm": 2.619558811187744, + "layer_5_max_spectral_norm": 0.31840193271636963, + "layer_6_update_fnorm": 2.654580593109131, + "layer_6_max_l1_linf_norm": 2.683487892150879, + "layer_6_max_spectral_norm": 0.3004801869392395, + "layer_7_update_fnorm": 2.674135208129883, + "layer_7_max_l1_linf_norm": 2.7360105514526367, + "layer_7_max_spectral_norm": 0.3324230909347534, + "layer_8_update_fnorm": 2.6167287826538086, + "layer_8_max_l1_linf_norm": 2.8724753856658936, + "layer_8_max_spectral_norm": 0.39329299330711365, + "layer_9_update_fnorm": 2.5939319133758545, + "layer_9_max_l1_linf_norm": 3.006169080734253, + "layer_9_max_spectral_norm": 0.43765538930892944, + "layer_10_update_fnorm": 2.5939061641693115, + "layer_10_max_l1_linf_norm": 2.996715545654297, + "layer_10_max_spectral_norm": 0.4653666019439697, + "layer_11_update_fnorm": 2.625455379486084, + "layer_11_max_l1_linf_norm": 3.1938958168029785, + "layer_11_max_spectral_norm": 0.41948479413986206, + "layer_12_update_fnorm": 2.547276020050049, + "layer_12_max_l1_linf_norm": 3.132776975631714, + "layer_12_max_spectral_norm": 0.49012309312820435, + "total_sharpness": 0.0003509478992782533, + "ip_v_neg_g": 0.019815802574157715, + "cos_v_neg_g": 0.004175608046352863, + "v_norm": 11.104918479919434, + "g_norm": 0.4273429811000824, + "hv_norm": 0.4084119498729706, + "cos_v_hv": 0.009542443789541721, + "hg_norm": 3.233952760696411, + "cos_g_hg": 0.5462964773178101, + "v_parallel_norm": 0.00412431824952364, + "v_perp_norm": 11.104917526245117, + "layer_1_v_norm": 2.625328779220581, + "layer_1_cos_v_neg_g": 0.010311415418982506, + "layer_2_v_norm": 2.582188367843628, + "layer_2_cos_v_neg_g": 0.003657882334664464, + "layer_3_v_norm": 2.5272610187530518, + "layer_3_cos_v_neg_g": 0.005152534693479538, + "layer_4_v_norm": 2.550908327102661, + "layer_4_cos_v_neg_g": 0.005430997349321842, + "layer_5_v_norm": 2.5602355003356934, + "layer_5_cos_v_neg_g": 0.005528878886252642, + "layer_6_v_norm": 2.65458083152771, + "layer_6_cos_v_neg_g": 0.0055287969298660755, + "layer_7_v_norm": 2.674135208129883, + "layer_7_cos_v_neg_g": 0.0064584300853312016, + "layer_8_v_norm": 2.6167287826538086, + "layer_8_cos_v_neg_g": 0.008229422383010387, + "layer_9_v_norm": 2.5939319133758545, + "layer_9_cos_v_neg_g": 0.011081193573772907, + "layer_10_v_norm": 2.5939061641693115, + "layer_10_cos_v_neg_g": 0.014972448348999023, + "layer_11_v_norm": 2.625455141067505, + "layer_11_cos_v_neg_g": 0.01635836437344551, + "layer_12_v_norm": 2.547276020050049, + "layer_12_cos_v_neg_g": 0.022378189489245415, + "layer_1_sharpness": 7.919561903690919e-05, + "layer_2_sharpness": 9.353058885608334e-06, + "layer_3_sharpness": 1.372083443129668e-05, + "layer_4_sharpness": 2.546077303122729e-05, + "layer_5_sharpness": 4.1160310502164066e-05, + "layer_6_sharpness": 2.6557861929177307e-05, + "layer_7_sharpness": 4.1558298107702285e-05, + "layer_8_sharpness": 7.692356302868575e-05, + "layer_9_sharpness": 0.0001212620481965132, + "layer_10_sharpness": 0.0001748492504702881, + "layer_11_sharpness": 0.00010389724047854543, + "layer_12_sharpness": 0.0004781718016602099 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..4a0155010dbfc7592448752e31fd4cbcfb366cfc --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.079763412475586, + "total_l1_linf_norm": 98300.296875, + "total_spectral_norm": 11.07976245880127, + "layer_1_update_fnorm": 2.623718023300171, + "layer_1_max_l1_linf_norm": 2.603382110595703, + "layer_1_max_spectral_norm": 0.3323379456996918, + "layer_2_update_fnorm": 2.6155450344085693, + "layer_2_max_l1_linf_norm": 2.677769184112549, + "layer_2_max_spectral_norm": 0.3007766008377075, + "layer_3_update_fnorm": 2.5798678398132324, + "layer_3_max_l1_linf_norm": 2.767493724822998, + "layer_3_max_spectral_norm": 0.2944738268852234, + "layer_4_update_fnorm": 2.5736124515533447, + "layer_4_max_l1_linf_norm": 2.662651777267456, + "layer_4_max_spectral_norm": 0.32574644684791565, + "layer_5_update_fnorm": 2.5242695808410645, + "layer_5_max_l1_linf_norm": 2.544348955154419, + "layer_5_max_spectral_norm": 0.28144127130508423, + "layer_6_update_fnorm": 2.617933750152588, + "layer_6_max_l1_linf_norm": 2.6620192527770996, + "layer_6_max_spectral_norm": 0.26257002353668213, + "layer_7_update_fnorm": 2.647184371948242, + "layer_7_max_l1_linf_norm": 2.765995502471924, + "layer_7_max_spectral_norm": 0.3041997253894806, + "layer_8_update_fnorm": 2.598259687423706, + "layer_8_max_l1_linf_norm": 2.853463649749756, + "layer_8_max_spectral_norm": 0.3394126892089844, + "layer_9_update_fnorm": 2.5953378677368164, + "layer_9_max_l1_linf_norm": 2.89996075630188, + "layer_9_max_spectral_norm": 0.38463857769966125, + "layer_10_update_fnorm": 2.5838844776153564, + "layer_10_max_l1_linf_norm": 3.1861090660095215, + "layer_10_max_spectral_norm": 0.39958053827285767, + "layer_11_update_fnorm": 2.5982539653778076, + "layer_11_max_l1_linf_norm": 3.247129440307617, + "layer_11_max_spectral_norm": 0.3904663622379303, + "layer_12_update_fnorm": 2.523104190826416, + "layer_12_max_l1_linf_norm": 3.3819997310638428, + "layer_12_max_spectral_norm": 0.44637343287467957, + "total_sharpness": 0.00021178365568630397, + "ip_v_neg_g": 0.013262521475553513, + "cos_v_neg_g": 0.003041207790374756, + "v_norm": 11.079763412475586, + "g_norm": 0.39359498023986816, + "hv_norm": 0.25234490633010864, + "cos_v_hv": 0.009298832155764103, + "hg_norm": 2.58496356010437, + "cos_g_hg": 0.46622762084007263, + "v_parallel_norm": 0.0028932460118085146, + "v_perp_norm": 11.07976245880127, + "layer_1_v_norm": 2.623718023300171, + "layer_1_cos_v_neg_g": 0.009782642126083374, + "layer_2_v_norm": 2.6155450344085693, + "layer_2_cos_v_neg_g": 0.005584602244198322, + "layer_3_v_norm": 2.5798678398132324, + "layer_3_cos_v_neg_g": 0.006697122007608414, + "layer_4_v_norm": 2.5736124515533447, + "layer_4_cos_v_neg_g": 0.005416195839643478, + "layer_5_v_norm": 2.5242695808410645, + "layer_5_cos_v_neg_g": 0.0036941489670425653, + "layer_6_v_norm": 2.617933750152588, + "layer_6_cos_v_neg_g": 0.0034669109154492617, + "layer_7_v_norm": 2.647184371948242, + "layer_7_cos_v_neg_g": 0.004729794338345528, + "layer_8_v_norm": 2.598259687423706, + "layer_8_cos_v_neg_g": 0.004294252954423428, + "layer_9_v_norm": 2.5953378677368164, + "layer_9_cos_v_neg_g": 0.005969519726932049, + "layer_10_v_norm": 2.5838844776153564, + "layer_10_cos_v_neg_g": 0.008658669888973236, + "layer_11_v_norm": 2.5982539653778076, + "layer_11_cos_v_neg_g": 0.008920108899474144, + "layer_12_v_norm": 2.523104190826416, + "layer_12_cos_v_neg_g": 0.013036426156759262, + "layer_1_sharpness": 5.386829070630483e-05, + "layer_2_sharpness": 9.149315701506566e-06, + "layer_3_sharpness": 1.1926753359148279e-05, + "layer_4_sharpness": 3.3066346077248454e-05, + "layer_5_sharpness": 3.3152708056150004e-05, + "layer_6_sharpness": 2.1362773622968234e-05, + "layer_7_sharpness": 3.78003969672136e-05, + "layer_8_sharpness": 5.612027962342836e-05, + "layer_9_sharpness": 7.577821088489145e-05, + "layer_10_sharpness": 9.589978435542434e-05, + "layer_11_sharpness": 6.0081951232859865e-05, + "layer_12_sharpness": 0.0002949379268102348 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..992b39c518db5e2fffa19163c6f1229ce32e9c68 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.054990768432617, + "total_l1_linf_norm": 97900.9375, + "total_spectral_norm": 11.054991722106934, + "layer_1_update_fnorm": 2.622735023498535, + "layer_1_max_l1_linf_norm": 2.5830941200256348, + "layer_1_max_spectral_norm": 0.3280313014984131, + "layer_2_update_fnorm": 2.5985450744628906, + "layer_2_max_l1_linf_norm": 2.7971463203430176, + "layer_2_max_spectral_norm": 0.30582118034362793, + "layer_3_update_fnorm": 2.5684282779693604, + "layer_3_max_l1_linf_norm": 2.7050092220306396, + "layer_3_max_spectral_norm": 0.28462010622024536, + "layer_4_update_fnorm": 2.5545127391815186, + "layer_4_max_l1_linf_norm": 2.664623737335205, + "layer_4_max_spectral_norm": 0.33321622014045715, + "layer_5_update_fnorm": 2.5181071758270264, + "layer_5_max_l1_linf_norm": 2.4718055725097656, + "layer_5_max_spectral_norm": 0.28599312901496887, + "layer_6_update_fnorm": 2.6156792640686035, + "layer_6_max_l1_linf_norm": 2.5485053062438965, + "layer_6_max_spectral_norm": 0.2579037845134735, + "layer_7_update_fnorm": 2.638237237930298, + "layer_7_max_l1_linf_norm": 2.7526540756225586, + "layer_7_max_spectral_norm": 0.3011539578437805, + "layer_8_update_fnorm": 2.583244800567627, + "layer_8_max_l1_linf_norm": 2.644038200378418, + "layer_8_max_spectral_norm": 0.3321531116962433, + "layer_9_update_fnorm": 2.579205274581909, + "layer_9_max_l1_linf_norm": 2.9734597206115723, + "layer_9_max_spectral_norm": 0.38543203473091125, + "layer_10_update_fnorm": 2.571704626083374, + "layer_10_max_l1_linf_norm": 2.9207592010498047, + "layer_10_max_spectral_norm": 0.40140315890312195, + "layer_11_update_fnorm": 2.605686902999878, + "layer_11_max_l1_linf_norm": 2.896374225616455, + "layer_11_max_spectral_norm": 0.38551121950149536, + "layer_12_update_fnorm": 2.5206050872802734, + "layer_12_max_l1_linf_norm": 2.7335591316223145, + "layer_12_max_spectral_norm": 0.40982958674430847, + "total_sharpness": 0.00017702726472634822, + "ip_v_neg_g": 0.007595031522214413, + "cos_v_neg_g": 0.001655473606660962, + "v_norm": 11.054990768432617, + "g_norm": 0.4150008261203766, + "hv_norm": 0.22585377097129822, + "cos_v_hv": 0.00866505317389965, + "hg_norm": 2.857908010482788, + "cos_g_hg": 0.46033936738967896, + "v_parallel_norm": 0.0015544078778475523, + "v_perp_norm": 11.054990768432617, + "layer_1_v_norm": 2.622735023498535, + "layer_1_cos_v_neg_g": 0.004823528695851564, + "layer_2_v_norm": 2.5985450744628906, + "layer_2_cos_v_neg_g": 0.001250091358087957, + "layer_3_v_norm": 2.5684282779693604, + "layer_3_cos_v_neg_g": 0.0013319217832759023, + "layer_4_v_norm": 2.5545127391815186, + "layer_4_cos_v_neg_g": 0.00218719569966197, + "layer_5_v_norm": 2.5181071758270264, + "layer_5_cos_v_neg_g": 0.0018959629815071821, + "layer_6_v_norm": 2.6156792640686035, + "layer_6_cos_v_neg_g": 0.0015220448840409517, + "layer_7_v_norm": 2.638237237930298, + "layer_7_cos_v_neg_g": 0.0023981770500540733, + "layer_8_v_norm": 2.583244800567627, + "layer_8_cos_v_neg_g": 0.0036546457558870316, + "layer_9_v_norm": 2.579205274581909, + "layer_9_cos_v_neg_g": 0.004625537898391485, + "layer_10_v_norm": 2.571704626083374, + "layer_10_cos_v_neg_g": 0.005636101588606834, + "layer_11_v_norm": 2.605686902999878, + "layer_11_cos_v_neg_g": 0.006194215267896652, + "layer_12_v_norm": 2.5206050872802734, + "layer_12_cos_v_neg_g": 0.0068238298408687115, + "layer_1_sharpness": 4.8609024815959856e-05, + "layer_2_sharpness": 6.941568244656082e-06, + "layer_3_sharpness": 1.4291431398305576e-05, + "layer_4_sharpness": 2.1889505660510622e-05, + "layer_5_sharpness": 3.511410977807827e-05, + "layer_6_sharpness": 1.8016104149864987e-05, + "layer_7_sharpness": 2.6973033527610824e-05, + "layer_8_sharpness": 4.8646415962139145e-05, + "layer_9_sharpness": 7.637896487722173e-05, + "layer_10_sharpness": 8.13481819932349e-05, + "layer_11_sharpness": 6.268976721912622e-05, + "layer_12_sharpness": 0.00020052006584592164 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..85229a8c350904ba6b34d9c72481fef2ab2c5975 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.056476593017578, + "total_l1_linf_norm": 98035.796875, + "total_spectral_norm": 11.056473731994629, + "layer_1_update_fnorm": 2.6386451721191406, + "layer_1_max_l1_linf_norm": 2.6139471530914307, + "layer_1_max_spectral_norm": 0.33912694454193115, + "layer_2_update_fnorm": 2.624215841293335, + "layer_2_max_l1_linf_norm": 2.707737922668457, + "layer_2_max_spectral_norm": 0.30269917845726013, + "layer_3_update_fnorm": 2.600086212158203, + "layer_3_max_l1_linf_norm": 2.780592441558838, + "layer_3_max_spectral_norm": 0.30753612518310547, + "layer_4_update_fnorm": 2.563688278198242, + "layer_4_max_l1_linf_norm": 2.774169921875, + "layer_4_max_spectral_norm": 0.33102327585220337, + "layer_5_update_fnorm": 2.5227324962615967, + "layer_5_max_l1_linf_norm": 2.5410451889038086, + "layer_5_max_spectral_norm": 0.2801970839500427, + "layer_6_update_fnorm": 2.6193466186523438, + "layer_6_max_l1_linf_norm": 2.637235403060913, + "layer_6_max_spectral_norm": 0.26033440232276917, + "layer_7_update_fnorm": 2.6452624797821045, + "layer_7_max_l1_linf_norm": 2.7876009941101074, + "layer_7_max_spectral_norm": 0.30347052216529846, + "layer_8_update_fnorm": 2.594113826751709, + "layer_8_max_l1_linf_norm": 2.8076672554016113, + "layer_8_max_spectral_norm": 0.3319632411003113, + "layer_9_update_fnorm": 2.5921077728271484, + "layer_9_max_l1_linf_norm": 2.709446430206299, + "layer_9_max_spectral_norm": 0.36048048734664917, + "layer_10_update_fnorm": 2.5737931728363037, + "layer_10_max_l1_linf_norm": 2.8949460983276367, + "layer_10_max_spectral_norm": 0.36249494552612305, + "layer_11_update_fnorm": 2.6023647785186768, + "layer_11_max_l1_linf_norm": 2.8331079483032227, + "layer_11_max_spectral_norm": 0.366548627614975, + "layer_12_update_fnorm": 2.519918918609619, + "layer_12_max_l1_linf_norm": 2.862178325653076, + "layer_12_max_spectral_norm": 0.4044412672519684, + "total_sharpness": 0.00018848731997422874, + "ip_v_neg_g": 0.009634803980588913, + "cos_v_neg_g": 0.0021944609470665455, + "v_norm": 11.056476593017578, + "g_norm": 0.3970985412597656, + "hv_norm": 0.21123415231704712, + "cos_v_hv": 0.009865854866802692, + "hg_norm": 2.888420581817627, + "cos_g_hg": 0.4757208526134491, + "v_parallel_norm": 0.002379625104367733, + "v_perp_norm": 11.056475639343262, + "layer_1_v_norm": 2.6386451721191406, + "layer_1_cos_v_neg_g": 0.010518123395740986, + "layer_2_v_norm": 2.624215841293335, + "layer_2_cos_v_neg_g": 0.0053969803266227245, + "layer_3_v_norm": 2.600086212158203, + "layer_3_cos_v_neg_g": 0.007114307954907417, + "layer_4_v_norm": 2.563688278198242, + "layer_4_cos_v_neg_g": 0.0055526020005345345, + "layer_5_v_norm": 2.5227324962615967, + "layer_5_cos_v_neg_g": 0.002907274290919304, + "layer_6_v_norm": 2.6193466186523438, + "layer_6_cos_v_neg_g": 0.0023377612233161926, + "layer_7_v_norm": 2.6452624797821045, + "layer_7_cos_v_neg_g": 0.0017326523084193468, + "layer_8_v_norm": 2.594113826751709, + "layer_8_cos_v_neg_g": 0.002012654673308134, + "layer_9_v_norm": 2.5921077728271484, + "layer_9_cos_v_neg_g": 0.004187225829809904, + "layer_10_v_norm": 2.5737931728363037, + "layer_10_cos_v_neg_g": 0.005025635473430157, + "layer_11_v_norm": 2.602365016937256, + "layer_11_cos_v_neg_g": 0.004353395197540522, + "layer_12_v_norm": 2.519918918609619, + "layer_12_cos_v_neg_g": 0.008726059459149837, + "layer_1_sharpness": 8.995120151666924e-05, + "layer_2_sharpness": 3.17351587000303e-05, + "layer_3_sharpness": 5.3479670896194875e-05, + "layer_4_sharpness": 4.3386640754761174e-05, + "layer_5_sharpness": 3.794254007516429e-05, + "layer_6_sharpness": 2.141185905202292e-05, + "layer_7_sharpness": 3.2352407288271934e-05, + "layer_8_sharpness": 4.7608718887204304e-05, + "layer_9_sharpness": 5.448947922559455e-05, + "layer_10_sharpness": 6.543700146721676e-05, + "layer_11_sharpness": 4.593414632836357e-05, + "layer_12_sharpness": 0.00016796875570435077 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..19f123a0f03903202cfbdd617250aca4ffba0f60 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.874363899230957, + "total_l1_linf_norm": 96019.6953125, + "total_spectral_norm": 10.874364852905273, + "layer_1_update_fnorm": 2.5402491092681885, + "layer_1_max_l1_linf_norm": 2.6124236583709717, + "layer_1_max_spectral_norm": 0.33249160647392273, + "layer_2_update_fnorm": 2.512749195098877, + "layer_2_max_l1_linf_norm": 2.676143169403076, + "layer_2_max_spectral_norm": 0.3107631504535675, + "layer_3_update_fnorm": 2.496847152709961, + "layer_3_max_l1_linf_norm": 2.627467155456543, + "layer_3_max_spectral_norm": 0.2933353781700134, + "layer_4_update_fnorm": 2.500513792037964, + "layer_4_max_l1_linf_norm": 2.6337192058563232, + "layer_4_max_spectral_norm": 0.3141956925392151, + "layer_5_update_fnorm": 2.463531732559204, + "layer_5_max_l1_linf_norm": 2.521735668182373, + "layer_5_max_spectral_norm": 0.2711395025253296, + "layer_6_update_fnorm": 2.5805366039276123, + "layer_6_max_l1_linf_norm": 2.7023673057556152, + "layer_6_max_spectral_norm": 0.25932443141937256, + "layer_7_update_fnorm": 2.6132800579071045, + "layer_7_max_l1_linf_norm": 3.0066580772399902, + "layer_7_max_spectral_norm": 0.31246626377105713, + "layer_8_update_fnorm": 2.5608131885528564, + "layer_8_max_l1_linf_norm": 2.9224653244018555, + "layer_8_max_spectral_norm": 0.36248430609703064, + "layer_9_update_fnorm": 2.5621371269226074, + "layer_9_max_l1_linf_norm": 3.0809152126312256, + "layer_9_max_spectral_norm": 0.3947908580303192, + "layer_10_update_fnorm": 2.5467212200164795, + "layer_10_max_l1_linf_norm": 3.0163350105285645, + "layer_10_max_spectral_norm": 0.3968026638031006, + "layer_11_update_fnorm": 2.5393707752227783, + "layer_11_max_l1_linf_norm": 3.250309467315674, + "layer_11_max_spectral_norm": 0.3982574939727783, + "layer_12_update_fnorm": 2.4242706298828125, + "layer_12_max_l1_linf_norm": 3.125075340270996, + "layer_12_max_spectral_norm": 0.43676698207855225, + "total_sharpness": 0.00017635009135119617, + "ip_v_neg_g": 0.011608166620135307, + "cos_v_neg_g": 0.002767808036878705, + "v_norm": 10.874363899230957, + "g_norm": 0.3856770396232605, + "hv_norm": 0.19173215329647064, + "cos_v_hv": 0.010001949034631252, + "hg_norm": 2.373950481414795, + "cos_g_hg": 0.47870033979415894, + "v_parallel_norm": 0.0031499587930738926, + "v_perp_norm": 10.874363899230957, + "layer_1_v_norm": 2.5402491092681885, + "layer_1_cos_v_neg_g": 0.007131201680749655, + "layer_2_v_norm": 2.512749195098877, + "layer_2_cos_v_neg_g": 0.0021844336297363043, + "layer_3_v_norm": 2.49684739112854, + "layer_3_cos_v_neg_g": 0.002447239588946104, + "layer_4_v_norm": 2.500513792037964, + "layer_4_cos_v_neg_g": 0.0034823070745915174, + "layer_5_v_norm": 2.463531732559204, + "layer_5_cos_v_neg_g": 0.004465487785637379, + "layer_6_v_norm": 2.5805366039276123, + "layer_6_cos_v_neg_g": 0.005089603364467621, + "layer_7_v_norm": 2.6132800579071045, + "layer_7_cos_v_neg_g": 0.005107562057673931, + "layer_8_v_norm": 2.5608131885528564, + "layer_8_cos_v_neg_g": 0.006877457723021507, + "layer_9_v_norm": 2.5621371269226074, + "layer_9_cos_v_neg_g": 0.008280512876808643, + "layer_10_v_norm": 2.5467212200164795, + "layer_10_cos_v_neg_g": 0.008818901143968105, + "layer_11_v_norm": 2.539370536804199, + "layer_11_cos_v_neg_g": 0.008911192417144775, + "layer_12_v_norm": 2.4242706298828125, + "layer_12_cos_v_neg_g": 0.013130358420312405, + "layer_1_sharpness": 4.203668868285604e-05, + "layer_2_sharpness": 6.430959274439374e-06, + "layer_3_sharpness": 1.0539896720729303e-05, + "layer_4_sharpness": 1.7484726413385943e-05, + "layer_5_sharpness": 2.9183345759520307e-05, + "layer_6_sharpness": 1.817491283873096e-05, + "layer_7_sharpness": 3.2535011996515095e-05, + "layer_8_sharpness": 6.517599831568077e-05, + "layer_9_sharpness": 7.160755194490775e-05, + "layer_10_sharpness": 7.79553665779531e-05, + "layer_11_sharpness": 4.9426602345192805e-05, + "layer_12_sharpness": 0.00022157278726808727 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..3e6e2eb55e2faabccf659a84fae31d814f2e42ad --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.015202522277832, + "total_l1_linf_norm": 97582.515625, + "total_spectral_norm": 11.015201568603516, + "layer_1_update_fnorm": 2.589641809463501, + "layer_1_max_l1_linf_norm": 2.614640235900879, + "layer_1_max_spectral_norm": 0.34117403626441956, + "layer_2_update_fnorm": 2.5823135375976562, + "layer_2_max_l1_linf_norm": 2.7176833152770996, + "layer_2_max_spectral_norm": 0.3164078891277313, + "layer_3_update_fnorm": 2.5558018684387207, + "layer_3_max_l1_linf_norm": 2.7021806240081787, + "layer_3_max_spectral_norm": 0.2929164171218872, + "layer_4_update_fnorm": 2.552467107772827, + "layer_4_max_l1_linf_norm": 2.7694103717803955, + "layer_4_max_spectral_norm": 0.31736403703689575, + "layer_5_update_fnorm": 2.500171422958374, + "layer_5_max_l1_linf_norm": 2.4899535179138184, + "layer_5_max_spectral_norm": 0.2758021652698517, + "layer_6_update_fnorm": 2.5995259284973145, + "layer_6_max_l1_linf_norm": 2.544395923614502, + "layer_6_max_spectral_norm": 0.26381707191467285, + "layer_7_update_fnorm": 2.6306607723236084, + "layer_7_max_l1_linf_norm": 2.7304112911224365, + "layer_7_max_spectral_norm": 0.3040209412574768, + "layer_8_update_fnorm": 2.5924177169799805, + "layer_8_max_l1_linf_norm": 2.860196352005005, + "layer_8_max_spectral_norm": 0.32894647121429443, + "layer_9_update_fnorm": 2.5930092334747314, + "layer_9_max_l1_linf_norm": 3.058706760406494, + "layer_9_max_spectral_norm": 0.37237799167633057, + "layer_10_update_fnorm": 2.5733256340026855, + "layer_10_max_l1_linf_norm": 2.8416266441345215, + "layer_10_max_spectral_norm": 0.3702548146247864, + "layer_11_update_fnorm": 2.6000142097473145, + "layer_11_max_l1_linf_norm": 3.040123462677002, + "layer_11_max_spectral_norm": 0.37816140055656433, + "layer_12_update_fnorm": 2.519141674041748, + "layer_12_max_l1_linf_norm": 3.1150591373443604, + "layer_12_max_spectral_norm": 0.4416665732860565, + "total_sharpness": 0.00015718495706096292, + "ip_v_neg_g": 0.010575932450592518, + "cos_v_neg_g": 0.002330903895199299, + "v_norm": 11.015202522277832, + "g_norm": 0.4119095206260681, + "hv_norm": 0.1995955854654312, + "cos_v_hv": 0.008674661628901958, + "hg_norm": 2.6015615463256836, + "cos_g_hg": 0.49979448318481445, + "v_parallel_norm": 0.003642657771706581, + "v_perp_norm": 11.015201568603516, + "layer_1_v_norm": 2.589641809463501, + "layer_1_cos_v_neg_g": 0.004061123821884394, + "layer_2_v_norm": 2.5823135375976562, + "layer_2_cos_v_neg_g": 0.0019707358442246914, + "layer_3_v_norm": 2.5558018684387207, + "layer_3_cos_v_neg_g": 0.001659086556173861, + "layer_4_v_norm": 2.552467107772827, + "layer_4_cos_v_neg_g": 0.002781204180791974, + "layer_5_v_norm": 2.500171422958374, + "layer_5_cos_v_neg_g": 0.0029473721515387297, + "layer_6_v_norm": 2.5995259284973145, + "layer_6_cos_v_neg_g": 0.0036472254432737827, + "layer_7_v_norm": 2.6306607723236084, + "layer_7_cos_v_neg_g": 0.004224306903779507, + "layer_8_v_norm": 2.5924179553985596, + "layer_8_cos_v_neg_g": 0.004562710411846638, + "layer_9_v_norm": 2.5930092334747314, + "layer_9_cos_v_neg_g": 0.006850798148661852, + "layer_10_v_norm": 2.5733256340026855, + "layer_10_cos_v_neg_g": 0.007921003736555576, + "layer_11_v_norm": 2.6000142097473145, + "layer_11_cos_v_neg_g": 0.007895184680819511, + "layer_12_v_norm": 2.519141674041748, + "layer_12_cos_v_neg_g": 0.013144457712769508, + "layer_1_sharpness": 3.71516180166509e-05, + "layer_2_sharpness": 9.154079634754453e-06, + "layer_3_sharpness": 8.564867130189668e-06, + "layer_4_sharpness": 1.6923084331210703e-05, + "layer_5_sharpness": 1.9268865798949264e-05, + "layer_6_sharpness": 1.393822913087206e-05, + "layer_7_sharpness": 2.6516525394981727e-05, + "layer_8_sharpness": 4.2671512346714735e-05, + "layer_9_sharpness": 6.0915575886610895e-05, + "layer_10_sharpness": 6.079416925786063e-05, + "layer_11_sharpness": 4.5403794501908123e-05, + "layer_12_sharpness": 0.0002581640728749335 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..5de30ddfd644d0970c72acb5cafce8a5ff977b89 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.056785583496094, + "total_l1_linf_norm": 98111.953125, + "total_spectral_norm": 11.056787490844727, + "layer_1_update_fnorm": 2.633742094039917, + "layer_1_max_l1_linf_norm": 2.64691162109375, + "layer_1_max_spectral_norm": 0.34130826592445374, + "layer_2_update_fnorm": 2.6433639526367188, + "layer_2_max_l1_linf_norm": 3.074808120727539, + "layer_2_max_spectral_norm": 0.33808717131614685, + "layer_3_update_fnorm": 2.646955728530884, + "layer_3_max_l1_linf_norm": 3.096388578414917, + "layer_3_max_spectral_norm": 0.39700159430503845, + "layer_4_update_fnorm": 2.5924267768859863, + "layer_4_max_l1_linf_norm": 2.8427553176879883, + "layer_4_max_spectral_norm": 0.3286387026309967, + "layer_5_update_fnorm": 2.5195579528808594, + "layer_5_max_l1_linf_norm": 2.4853038787841797, + "layer_5_max_spectral_norm": 0.27748778462409973, + "layer_6_update_fnorm": 2.6103973388671875, + "layer_6_max_l1_linf_norm": 2.7085535526275635, + "layer_6_max_spectral_norm": 0.2648756504058838, + "layer_7_update_fnorm": 2.641683578491211, + "layer_7_max_l1_linf_norm": 2.7767655849456787, + "layer_7_max_spectral_norm": 0.30653098225593567, + "layer_8_update_fnorm": 2.5976996421813965, + "layer_8_max_l1_linf_norm": 2.973989963531494, + "layer_8_max_spectral_norm": 0.3637271821498871, + "layer_9_update_fnorm": 2.584010362625122, + "layer_9_max_l1_linf_norm": 2.8323349952697754, + "layer_9_max_spectral_norm": 0.41273409128189087, + "layer_10_update_fnorm": 2.563535213470459, + "layer_10_max_l1_linf_norm": 2.865572452545166, + "layer_10_max_spectral_norm": 0.4107494056224823, + "layer_11_update_fnorm": 2.5777368545532227, + "layer_11_max_l1_linf_norm": 3.047977924346924, + "layer_11_max_spectral_norm": 0.3911917805671692, + "layer_12_update_fnorm": 2.4919538497924805, + "layer_12_max_l1_linf_norm": 2.973263740539551, + "layer_12_max_spectral_norm": 0.43186062574386597, + "total_sharpness": 0.000186786986887455, + "ip_v_neg_g": 0.012492051348090172, + "cos_v_neg_g": 0.002957576885819435, + "v_norm": 11.056785583496094, + "g_norm": 0.38200482726097107, + "hv_norm": 0.20219086110591888, + "cos_v_hv": 0.01021442748606205, + "hg_norm": 2.1060733795166016, + "cos_g_hg": 0.46092161536216736, + "v_parallel_norm": 0.0027047465555369854, + "v_perp_norm": 11.056785583496094, + "layer_1_v_norm": 2.633742094039917, + "layer_1_cos_v_neg_g": 0.011060941033065319, + "layer_2_v_norm": 2.6433639526367188, + "layer_2_cos_v_neg_g": 0.007078532595187426, + "layer_3_v_norm": 2.6469554901123047, + "layer_3_cos_v_neg_g": 0.008078324608504772, + "layer_4_v_norm": 2.5924267768859863, + "layer_4_cos_v_neg_g": 0.0041713109239935875, + "layer_5_v_norm": 2.5195579528808594, + "layer_5_cos_v_neg_g": 0.004001641646027565, + "layer_6_v_norm": 2.6103973388671875, + "layer_6_cos_v_neg_g": 0.003623628057539463, + "layer_7_v_norm": 2.641683578491211, + "layer_7_cos_v_neg_g": 0.003979207947850227, + "layer_8_v_norm": 2.5976996421813965, + "layer_8_cos_v_neg_g": 0.004333599004894495, + "layer_9_v_norm": 2.584010362625122, + "layer_9_cos_v_neg_g": 0.006597953382879496, + "layer_10_v_norm": 2.563535213470459, + "layer_10_cos_v_neg_g": 0.00757219223305583, + "layer_11_v_norm": 2.5777368545532227, + "layer_11_cos_v_neg_g": 0.008740865625441074, + "layer_12_v_norm": 2.4919538497924805, + "layer_12_cos_v_neg_g": 0.012985741719603539, + "layer_1_sharpness": 2.173388384107966e-05, + "layer_2_sharpness": -6.14614509686362e-06, + "layer_3_sharpness": -1.2867114492109977e-05, + "layer_4_sharpness": 1.3605025742435828e-05, + "layer_5_sharpness": 2.862211113097146e-05, + "layer_6_sharpness": 1.964831790246535e-05, + "layer_7_sharpness": 3.7709589378209785e-05, + "layer_8_sharpness": 5.602095916401595e-05, + "layer_9_sharpness": 7.13358458597213e-05, + "layer_10_sharpness": 7.304065366042778e-05, + "layer_11_sharpness": 5.649419836117886e-05, + "layer_12_sharpness": 0.0002250840188935399 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..54ea8d150f1227080f2b2fbbe633be012b03009a --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026604 +step:0 train loss:11.019225 +step:1 train loss:10.651471 +step:2 train loss:10.261358 +step:3 train loss:9.976452 +step:4 train loss:9.806090 +step:5 train loss:9.672937 +step:6 train loss:9.612681 +step:7 train loss:9.522202 +step:8 train loss:9.506382 +step:9 train loss:9.404758 +step:10 train loss:9.336102 +step:11 train loss:9.246750 +step:12 train loss:9.169037 +step:13 train loss:9.052170 +step:14 train loss:8.979721 +step:15 train loss:8.876817 +step:16 train loss:8.811045 +step:17 train loss:8.702904 +step:18 train loss:8.648784 +step:19 train loss:8.524988 +step:20 train loss:8.423357 +step:21 train loss:8.357502 +step:22 train loss:8.179543 +step:23 train loss:8.124870 +step:24 train loss:7.985281 +step:25 train loss:7.932375 +step:26 train loss:7.831648 +step:27 train loss:7.706161 +step:28 train loss:7.685916 +step:29 train loss:7.619381 +step:30 train loss:7.542132 +step:31 train loss:7.411094 +step:32 train loss:7.380839 +step:33 train loss:7.319015 +step:34 train loss:7.344049 +step:35 train loss:7.258625 +step:36 train loss:7.213071 +step:37 train loss:7.155584 +step:38 train loss:7.188903 +step:39 train loss:7.107199 +step:40 train loss:7.093133 +step:41 train loss:7.056510 +step:42 train loss:7.081450 +step:43 train loss:7.029290 +step:44 train loss:6.983466 +step:45 train loss:6.996648 +step:46 train loss:6.955415 +step:47 train loss:6.960924 +step:48 train loss:6.891493 +step:49 train loss:6.920848 +step:50 train loss:6.826282 +step:51 train loss:6.877565 +step:52 train loss:6.854701 +step:53 train loss:6.829060 +step:54 train loss:6.808784 +step:55 train loss:6.742470 +step:56 train loss:6.715693 +step:57 train loss:6.743542 +step:58 train loss:6.660424 +step:59 train loss:6.687605 +step:60 train loss:6.677683 +step:61 train loss:6.636292 +step:62 train loss:6.626475 +step:63 train loss:6.691537 +step:64 train loss:6.589521 +step:65 train loss:6.613579 +step:66 train loss:6.609834 +step:67 train loss:6.647625 +step:68 train loss:6.586644 +step:69 train loss:6.574841 +step:70 train loss:6.548091 +step:71 train loss:6.522923 +step:72 train loss:6.540075 +step:73 train loss:6.493841 +step:74 train loss:6.517091 +step:75 train loss:6.468133 +step:76 train loss:6.544918 +step:77 train loss:6.499124 +step:78 train loss:6.274253 +step:79 train loss:6.468691 +step:80 train loss:6.432199 +step:81 train loss:6.545518 +step:82 train loss:6.519169 +step:83 train loss:6.456831 +step:84 train loss:6.433950 +step:85 train loss:6.402084 +step:86 train loss:6.402933 +step:87 train loss:6.387092 +step:88 train loss:6.386302 +step:89 train loss:6.338058 +step:90 train loss:6.396680 +step:91 train loss:6.400297 +step:92 train loss:6.416034 +step:93 train loss:6.369058 +step:94 train loss:6.332676 +step:95 train loss:6.275589 +step:96 train loss:6.374394 +step:97 train loss:6.331461 +step:98 train loss:6.302747 +step:99 train loss:6.283644 +step:100 train loss:6.281329 +step:101 train loss:6.237933 +step:102 train loss:6.263513 +step:103 train loss:6.271356 +step:104 train loss:6.295498 +step:105 train loss:6.352055 +step:106 train loss:6.302783 +step:107 train loss:6.242809 +step:108 train loss:6.283556 +step:109 train loss:6.307353 +step:110 train loss:6.233478 +step:111 train loss:6.254946 +step:112 train loss:6.253875 +step:113 train loss:6.209214 +step:114 train loss:6.269244 +step:115 train loss:6.244458 +step:116 train loss:6.236762 +step:117 train loss:6.189393 +step:118 train loss:6.244820 +step:119 train loss:6.183927 +step:120 train loss:6.199008 +step:121 train loss:6.125618 +step:122 train loss:6.208154 +step:123 train loss:6.139987 +step:124 train loss:6.126417 +step:125 train loss:6.103831 +step:126 train loss:6.206270 +step:127 train loss:6.124193 +step:128 train loss:6.181850 +step:129 train loss:6.144567 +step:130 train loss:6.174741 +step:131 train loss:6.144506 +step:132 train loss:6.093905 +step:133 train loss:6.127281 +step:134 train loss:6.122147 +step:135 train loss:6.036718 +step:136 train loss:6.081192 +step:137 train loss:6.095068 +step:138 train loss:6.041440 +step:139 train loss:6.111451 +step:140 train loss:6.041628 +step:141 train loss:6.131902 +step:142 train loss:6.076623 +step:143 train loss:6.087592 +step:144 train loss:6.057003 +step:145 train loss:5.995790 +step:146 train loss:6.009725 +step:147 train loss:6.060258 +step:148 train loss:6.067174 +step:149 train loss:6.029859 +step:150 train loss:6.036469 +step:151 train loss:5.965943 +step:152 train loss:6.002942 +step:153 train loss:5.992119 +step:154 train loss:6.062020 +step:155 train loss:6.051520 +step:156 train loss:6.077212 +step:157 train loss:6.028303 +step:158 train loss:6.030898 +step:159 train loss:6.022952 +step:160 train loss:6.019738 +step:161 train loss:6.020428 +step:162 train loss:5.998006 +step:163 train loss:6.015513 +step:164 train loss:6.006569 +step:165 train loss:6.019999 +step:166 train loss:5.974936 +step:167 train loss:5.980644 +step:168 train loss:5.951769 +step:169 train loss:5.903101 +step:170 train loss:5.885556 +step:171 train loss:6.005754 +step:172 train loss:5.930685 +step:173 train loss:5.993117 +step:174 train loss:5.995285 +step:175 train loss:5.944676 +step:176 train loss:5.916722 +step:177 train loss:5.962249 +step:178 train loss:5.967535 +step:179 train loss:5.924016 +step:180 train loss:5.878422 +step:181 train loss:5.927581 +step:182 train loss:5.857228 +step:183 train loss:5.930611 +step:184 train loss:5.897960 +step:185 train loss:5.837955 +step:186 train loss:5.954567 +step:187 train loss:5.901813 +step:188 train loss:5.749524 +step:189 train loss:5.890527 +step:190 train loss:5.893103 +step:191 train loss:5.815697 +step:192 train loss:5.776677 +step:193 train loss:5.925421 +step:194 train loss:5.953784 +step:195 train loss:5.937577 +step:196 train loss:5.904708 +step:197 train loss:5.894193 +step:198 train loss:5.840966 +step:199 train loss:5.909474 +step:200 train loss:5.965903 +step:201 train loss:5.875873 +step:202 train loss:5.879416 +step:203 train loss:5.850081 +step:204 train loss:5.869786 +step:205 train loss:5.740010 +step:206 train loss:5.870827 +step:207 train loss:5.848151 +step:208 train loss:5.795728 +step:209 train loss:5.782706 +step:210 train loss:5.795074 +step:211 train loss:5.844537 +step:212 train loss:5.817430 +step:213 train loss:5.825307 +step:214 train loss:5.802386 +step:215 train loss:5.822280 +step:216 train loss:5.774833 +step:217 train loss:5.803336 +step:218 train loss:5.763017 +step:219 train loss:5.739607 +step:220 train loss:5.768610 +step:221 train loss:5.738883 +step:222 train loss:5.772779 +step:223 train loss:5.804096 +step:224 train loss:5.785709 +step:225 train loss:5.708727 +step:226 train loss:5.717317 +step:227 train loss:5.779974 +step:228 train loss:5.754941 +step:229 train loss:5.825929 +step:230 train loss:5.733750 +step:231 train loss:5.775956 +step:232 train loss:5.769599 +step:233 train loss:5.747573 +step:234 train loss:5.730211 +step:235 train loss:5.811359 +step:236 train loss:5.770810 +step:237 train loss:5.807449 +step:238 train loss:5.800949 +step:239 train loss:5.710793 +step:240 train loss:5.779760 +step:241 train loss:5.823363 +step:242 train loss:5.820115 +step:243 train loss:5.727960 +step:244 train loss:5.749251 +step:245 train loss:5.728963 +step:246 train loss:5.713609 +step:247 train loss:5.696808 +step:248 train loss:5.671583 +step:249 train loss:5.739144 +step:250 validation loss:5.732673 +step:250 train loss:5.702713 +step:251 train loss:5.748398 +step:252 train loss:5.709223 +step:253 train loss:5.721463 +step:254 train loss:5.704527 +step:255 train loss:5.744260 +step:256 train loss:5.758627 +step:257 train loss:5.787465 +step:258 train loss:5.700770 +step:259 train loss:5.707146 +step:260 train loss:5.671268 +step:261 train loss:5.661060 +step:262 train loss:5.732380 +step:263 train loss:5.701557 +step:264 train loss:5.679404 +step:265 train loss:5.680666 +step:266 train loss:5.665984 +step:267 train loss:5.703923 +step:268 train loss:5.659303 +step:269 train loss:5.694238 +step:270 train loss:5.755236 +step:271 train loss:5.744057 +step:272 train loss:5.657186 +step:273 train loss:5.729654 +step:274 train loss:5.640195 +step:275 train loss:5.708457 +step:276 train loss:5.665015 +step:277 train loss:5.660442 +step:278 train loss:5.641987 +step:279 train loss:5.612501 +step:280 train loss:5.686931 +step:281 train loss:5.739974 +step:282 train loss:5.615522 +step:283 train loss:5.653880 +step:284 train loss:5.618821 +step:285 train loss:5.669036 +step:286 train loss:5.627876 +step:287 train loss:5.617411 +step:288 train loss:5.640280 +step:289 train loss:5.636363 +step:290 train loss:5.688361 +step:291 train loss:5.624919 +step:292 train loss:5.682928 +step:293 train loss:5.595788 +step:294 train loss:5.712340 +step:295 train loss:5.596621 +step:296 train loss:5.640273 +step:297 train loss:5.657290 +step:298 train loss:5.559822 +step:299 train loss:5.619608 +step:300 train loss:5.571423 +step:301 train loss:5.607025 +step:302 train loss:5.580053 +step:303 train loss:5.580704 +step:304 train loss:5.610687 +step:305 train loss:5.539066 +step:306 train loss:5.589248 +step:307 train loss:5.645565 +step:308 train loss:5.555987 +step:309 train loss:5.676344 +step:310 train loss:5.631262 +step:311 train loss:5.620658 +step:312 train loss:5.592722 +step:313 train loss:5.618825 +step:314 train loss:5.595451 +step:315 train loss:5.544179 +step:316 train loss:5.545671 +step:317 train loss:5.516363 +step:318 train loss:5.515491 +step:319 train loss:5.587067 +step:320 train loss:5.500234 +step:321 train loss:5.553808 +step:322 train loss:5.537613 +step:323 train loss:5.588831 +step:324 train loss:5.550253 +step:325 train loss:5.600248 +step:326 train loss:5.606890 +step:327 train loss:5.589880 +step:328 train loss:5.548343 +step:329 train loss:5.561252 +step:330 train loss:5.494097 +step:331 train loss:5.539438 +step:332 train loss:5.530910 +step:333 train loss:5.520855 +step:334 train loss:5.561492 +step:335 train loss:5.626993 +step:336 train loss:5.755544 +step:337 train loss:5.634634 +step:338 train loss:5.555681 +step:339 train loss:5.528517 +step:340 train loss:5.583027 +step:341 train loss:5.536096 +step:342 train loss:5.607063 +step:343 train loss:5.580673 +step:344 train loss:5.546659 +step:345 train loss:5.499164 +step:346 train loss:5.544145 +step:347 train loss:5.478448 +step:348 train loss:5.480559 +step:349 train loss:5.404151 +step:350 train loss:5.446884 +step:351 train loss:5.526451 +step:352 train loss:5.475715 +step:353 train loss:5.504994 +step:354 train loss:5.452785 +step:355 train loss:5.509908 +step:356 train loss:5.496931 +step:357 train loss:5.548001 +step:358 train loss:5.568114 +step:359 train loss:5.427167 +step:360 train loss:5.552337 +step:361 train loss:5.524319 +step:362 train loss:5.487034 +step:363 train loss:5.435055 +step:364 train loss:5.550751 +step:365 train loss:5.499832 +step:366 train loss:5.478850 +step:367 train loss:5.512283 +step:368 train loss:5.469895 +step:369 train loss:5.481595 +step:370 train loss:5.537799 +step:371 train loss:5.466199 +step:372 train loss:5.538961 +step:373 train loss:5.466756 +step:374 train loss:5.478438 +step:375 train loss:5.508556 +step:376 train loss:5.481389 +step:377 train loss:5.391718 +step:378 train loss:5.457009 +step:379 train loss:5.493819 +step:380 train loss:5.412738 +step:381 train loss:5.466003 +step:382 train loss:5.472725 +step:383 train loss:5.426486 +step:384 train loss:5.420775 +step:385 train loss:5.405132 +step:386 train loss:5.429888 +step:387 train loss:5.430920 +step:388 train loss:5.393643 +step:389 train loss:5.450493 +step:390 train loss:5.437844 +step:391 train loss:5.483593 +step:392 train loss:5.468060 +step:393 train loss:5.448521 +step:394 train loss:5.498790 +step:395 train loss:5.420433 +step:396 train loss:5.368946 +step:397 train loss:5.427500 +step:398 train loss:5.413779 +step:399 train loss:5.411328 +step:400 train loss:5.372721 +step:401 train loss:5.431620 +step:402 train loss:5.405313 +step:403 train loss:5.427287 +step:404 train loss:5.425112 +step:405 train loss:5.427017 +step:406 train loss:5.453618 +step:407 train loss:5.423480 +step:408 train loss:5.487111 +step:409 train loss:5.394914 +step:410 train loss:5.368657 +step:411 train loss:5.340629 +step:412 train loss:5.436259 +step:413 train loss:5.318130 +step:414 train loss:5.411835 +step:415 train loss:5.382178 +step:416 train loss:5.381052 +step:417 train loss:5.424827 +step:418 train loss:5.357810 +step:419 train loss:5.334941 +step:420 train loss:5.326044 +step:421 train loss:5.322913 +step:422 train loss:5.327713 +step:423 train loss:5.365068 +step:424 train loss:5.305257 +step:425 train loss:5.396563 +step:426 train loss:5.378819 +step:427 train loss:5.335100 +step:428 train loss:5.404851 +step:429 train loss:5.339094 +step:430 train loss:5.359677 +step:431 train loss:5.416648 +step:432 train loss:5.421447 +step:433 train loss:5.419052 +step:434 train loss:5.372435 +step:435 train loss:5.399943 +step:436 train loss:5.416880 +step:437 train loss:5.357257 +step:438 train loss:5.319709 +step:439 train loss:5.306465 +step:440 train loss:5.334899 +step:441 train loss:5.281320 +step:442 train loss:5.275160 +step:443 train loss:5.312066 +step:444 train loss:5.367121 +step:445 train loss:5.366149 +step:446 train loss:5.306235 +step:447 train loss:5.294856 +step:448 train loss:5.364103 +step:449 train loss:5.318124 +step:450 train loss:5.303653 +step:451 train loss:5.282319 +step:452 train loss:5.352907 +step:453 train loss:5.310555 +step:454 train loss:5.250904 +step:455 train loss:5.336342 +step:456 train loss:5.290693 +step:457 train loss:5.261986 +step:458 train loss:5.282296 +step:459 train loss:5.213109 +step:460 train loss:5.302488 +step:461 train loss:5.271082 +step:462 train loss:5.181150 +step:463 train loss:5.252556 +step:464 train loss:5.327747 +step:465 train loss:5.272294 +step:466 train loss:5.298033 +step:467 train loss:5.267853 +step:468 train loss:5.317510 +step:469 train loss:5.283906 +step:470 train loss:5.254642 +step:471 train loss:5.338830 +step:472 train loss:5.218951 +step:473 train loss:5.308669 +step:474 train loss:5.333612 +step:475 train loss:5.391329 +step:476 train loss:5.354930 +step:477 train loss:5.263107 +step:478 train loss:5.313211 +step:479 train loss:5.324370 +step:480 train loss:5.342711 +step:481 train loss:5.341653 +step:482 train loss:5.267848 +step:483 train loss:5.326469 +step:484 train loss:5.271767 +step:485 train loss:5.245088 +step:486 train loss:5.297780 +step:487 train loss:5.271484 +step:488 train loss:5.266451 +step:489 train loss:5.251808 +step:490 train loss:5.210819 +step:491 train loss:5.212990 +step:492 train loss:5.214106 +step:493 train loss:5.248342 +step:494 train loss:5.277658 +step:495 train loss:5.201437 +step:496 train loss:5.335711 +step:497 train loss:5.179101 +step:498 train loss:5.292582 +step:499 train loss:5.247035 +step:500 validation loss:5.218981 total_sharp:3.5500e-03 L1_sharp:1.3686e-02 L2_sharp:6.1833e-04 L3_sharp:5.9365e-04 L4_sharp:9.0175e-04 L5_sharp:6.1893e-04 L6_sharp:5.4183e-04 L7_sharp:7.3605e-04 L8_sharp:4.5215e-04 L9_sharp:5.6379e-04 L10_sharp:6.7126e-04 L11_sharp:7.4202e-04 L12_sharp:7.4578e-04 total_fnorm:6.4585e+00 total_l1_linf:5.5303e+04 total_spectral:6.4585e+00 L1_fnorm:1.0806e+00 L2_fnorm:1.1326e+00 L3_fnorm:1.1790e+00 L4_fnorm:1.2308e+00 L5_fnorm:1.2795e+00 L6_fnorm:1.4000e+00 L7_fnorm:1.5037e+00 L8_fnorm:1.5198e+00 L9_fnorm:1.5163e+00 L10_fnorm:1.5266e+00 L11_fnorm:1.4301e+00 L12_fnorm:1.3345e+00 L1_l1linf:2.8743e+00 L2_l1linf:1.4458e+00 L3_l1linf:1.4547e+00 L4_l1linf:1.4232e+00 L5_l1linf:1.5483e+00 L6_l1linf:1.5408e+00 L7_l1linf:1.7127e+00 L8_l1linf:1.7724e+00 L9_l1linf:1.7093e+00 L10_l1linf:1.8292e+00 L11_l1linf:1.8214e+00 L12_l1linf:2.1655e+00 L1_spectral:3.4320e-01 L2_spectral:1.8307e-01 L3_spectral:2.0788e-01 L4_spectral:2.2765e-01 L5_spectral:2.7527e-01 L6_spectral:2.9881e-01 L7_spectral:2.9030e-01 L8_spectral:2.8481e-01 L9_spectral:2.6780e-01 L10_spectral:2.6448e-01 L11_spectral:2.6161e-01 L12_spectral:3.0044e-01 ip_v_neg_g:8.9569e-02 cos_v_neg_g:2.4140e-02 v_norm:6.4585e+00 g_norm:5.7450e-01 hv_norm:4.1424e-01 cos_v_hv:5.5348e-02 hg_norm:1.5306e+00 cos_g_hg:4.6232e-01 v_par:1.2042e-02 v_perp:6.4585e+00 L1_cos_v_neg_g:9.2341e-02 L1_v_norm:1.0806e+00 L2_cos_v_neg_g:6.4537e-02 L2_v_norm:1.1326e+00 L3_cos_v_neg_g:6.1766e-02 L3_v_norm:1.1790e+00 L4_cos_v_neg_g:6.4187e-02 L4_v_norm:1.2308e+00 L5_cos_v_neg_g:4.4753e-02 L5_v_norm:1.2795e+00 L6_cos_v_neg_g:6.4712e-02 L6_v_norm:1.4000e+00 L7_cos_v_neg_g:5.8813e-02 L7_v_norm:1.5037e+00 L8_cos_v_neg_g:5.6687e-02 L8_v_norm:1.5198e+00 L9_cos_v_neg_g:5.5019e-02 L9_v_norm:1.5163e+00 L10_cos_v_neg_g:5.7340e-02 L10_v_norm:1.5266e+00 L11_cos_v_neg_g:6.3248e-02 L11_v_norm:1.4301e+00 L12_cos_v_neg_g:5.1897e-02 L12_v_norm:1.3345e+00 +step:500 train loss:5.240607 +step:501 train loss:5.206547 +step:502 train loss:5.247596 +step:503 train loss:5.174365 +step:504 train loss:5.243079 +step:505 train loss:5.176918 +step:506 train loss:5.185967 +step:507 train loss:5.190735 +step:508 train loss:5.212025 +step:509 train loss:5.232874 +step:510 train loss:5.161682 +step:511 train loss:5.157600 +step:512 train loss:5.144748 +step:513 train loss:5.160919 +step:514 train loss:5.236849 +step:515 train loss:5.183525 +step:516 train loss:5.247167 +step:517 train loss:5.159560 +step:518 train loss:5.147056 +step:519 train loss:5.196306 +step:520 train loss:5.141990 +step:521 train loss:5.135652 +step:522 train loss:5.150629 +step:523 train loss:5.143915 +step:524 train loss:5.101321 +step:525 train loss:5.127205 +step:526 train loss:5.129093 +step:527 train loss:5.113238 +step:528 train loss:5.106590 +step:529 train loss:5.133252 +step:530 train loss:5.096928 +step:531 train loss:5.127001 +step:532 train loss:5.087164 +step:533 train loss:5.049878 +step:534 train loss:5.117647 +step:535 train loss:5.118475 +step:536 train loss:5.166687 +step:537 train loss:5.049929 +step:538 train loss:5.010296 +step:539 train loss:5.114776 +step:540 train loss:5.138701 +step:541 train loss:5.045568 +step:542 train loss:5.074997 +step:543 train loss:5.081744 +step:544 train loss:5.105314 +step:545 train loss:5.119900 +step:546 train loss:5.067789 +step:547 train loss:5.108557 +step:548 train loss:5.065718 +step:549 train loss:5.166128 +step:550 train loss:5.168294 +step:551 train loss:5.185779 +step:552 train loss:5.332976 +step:553 train loss:5.296555 +step:554 train loss:5.229301 +step:555 train loss:5.312698 +step:556 train loss:5.295516 +step:557 train loss:5.260870 +step:558 train loss:5.226705 +step:559 train loss:5.316289 +step:560 train loss:5.345970 +step:561 train loss:5.229111 +step:562 train loss:5.203194 +step:563 train loss:5.276219 +step:564 train loss:5.210228 +step:565 train loss:5.197881 +step:566 train loss:5.189726 +step:567 train loss:5.182777 +step:568 train loss:5.228614 +step:569 train loss:5.206917 +step:570 train loss:5.161000 +step:571 train loss:5.181271 +step:572 train loss:5.166698 +step:573 train loss:5.166956 +step:574 train loss:5.217863 +step:575 train loss:5.168269 +step:576 train loss:5.156528 +step:577 train loss:5.154261 +step:578 train loss:5.126575 +step:579 train loss:5.171863 +step:580 train loss:5.088171 +step:581 train loss:5.155236 +step:582 train loss:5.158362 +step:583 train loss:5.159158 +step:584 train loss:5.176084 +step:585 train loss:5.122598 +step:586 train loss:5.104386 +step:587 train loss:5.172551 +step:588 train loss:5.085446 +step:589 train loss:5.132641 +step:590 train loss:5.135300 +step:591 train loss:5.051095 +step:592 train loss:5.033335 +step:593 train loss:5.033633 +step:594 train loss:5.005666 +step:595 train loss:5.041926 +step:596 train loss:5.015141 +step:597 train loss:5.048581 +step:598 train loss:5.026567 +step:599 train loss:5.034139 +step:600 train loss:5.008183 +step:601 train loss:4.999339 +step:602 train loss:5.009269 +step:603 train loss:5.028025 +step:604 train loss:5.012852 +step:605 train loss:5.042019 +step:606 train loss:4.969634 +step:607 train loss:4.962238 +step:608 train loss:4.970896 +step:609 train loss:4.942015 +step:610 train loss:4.942908 +step:611 train loss:4.966094 +step:612 train loss:4.989273 +step:613 train loss:4.896575 +step:614 train loss:4.945755 +step:615 train loss:5.009762 +step:616 train loss:4.959902 +step:617 train loss:5.033251 +step:618 train loss:4.985545 +step:619 train loss:5.033351 +step:620 train loss:5.099299 +step:621 train loss:4.999345 +step:622 train loss:5.100780 +step:623 train loss:5.068158 +step:624 train loss:5.071051 +step:625 train loss:5.060441 +step:626 train loss:5.060080 +step:627 train loss:5.036587 +step:628 train loss:5.024920 +step:629 train loss:4.978204 +step:630 train loss:5.018825 +step:631 train loss:5.057141 +step:632 train loss:5.024545 +step:633 train loss:5.071968 +step:634 train loss:5.064050 +step:635 train loss:4.985822 +step:636 train loss:5.107281 +step:637 train loss:4.995379 +step:638 train loss:4.918836 +step:639 train loss:5.047444 +step:640 train loss:4.993206 +step:641 train loss:5.014813 +step:642 train loss:5.045618 +step:643 train loss:4.951482 +step:644 train loss:5.007874 +step:645 train loss:4.959793 +step:646 train loss:4.942656 +step:647 train loss:4.944882 +step:648 train loss:5.037190 +step:649 train loss:4.939926 +step:650 train loss:5.003975 +step:651 train loss:4.879919 +step:652 train loss:4.922275 +step:653 train loss:4.925973 +step:654 train loss:4.954238 +step:655 train loss:5.009470 +step:656 train loss:4.988321 +step:657 train loss:5.025656 +step:658 train loss:4.929469 +step:659 train loss:5.002394 +step:660 train loss:4.968983 +step:661 train loss:5.027169 +step:662 train loss:4.999800 +step:663 train loss:4.982077 +step:664 train loss:4.882772 +step:665 train loss:4.910180 +step:666 train loss:4.908243 +step:667 train loss:4.968584 +step:668 train loss:4.933701 +step:669 train loss:4.909967 +step:670 train loss:4.935656 +step:671 train loss:4.920361 +step:672 train loss:4.892200 +step:673 train loss:5.005241 +step:674 train loss:4.996798 +step:675 train loss:4.919375 +step:676 train loss:5.030154 +step:677 train loss:4.949196 +step:678 train loss:4.984161 +step:679 train loss:5.057147 +step:680 train loss:4.989498 +step:681 train loss:5.062452 +step:682 train loss:5.041337 +step:683 train loss:5.048788 +step:684 train loss:5.122859 +step:685 train loss:5.024883 +step:686 train loss:5.146771 +step:687 train loss:5.092597 +step:688 train loss:5.052050 +step:689 train loss:5.087133 +step:690 train loss:5.070021 +step:691 train loss:5.094754 +step:692 train loss:5.077781 +step:693 train loss:5.044607 +step:694 train loss:4.996229 +step:695 train loss:4.957898 +step:696 train loss:4.919304 +step:697 train loss:5.030838 +step:698 train loss:4.944660 +step:699 train loss:4.929683 +step:700 train loss:4.990254 +step:701 train loss:4.885678 +step:702 train loss:4.953431 +step:703 train loss:4.879900 +step:704 train loss:4.841263 +step:705 train loss:4.911584 +step:706 train loss:4.768266 +step:707 train loss:4.835310 +step:708 train loss:4.937790 +step:709 train loss:4.890000 +step:710 train loss:4.844175 +step:711 train loss:4.890082 +step:712 train loss:4.836417 +step:713 train loss:4.799016 +step:714 train loss:4.891548 +step:715 train loss:4.775177 +step:716 train loss:4.918695 +step:717 train loss:4.793028 +step:718 train loss:4.863529 +step:719 train loss:4.807084 +step:720 train loss:4.785514 +step:721 train loss:4.832503 +step:722 train loss:4.825310 +step:723 train loss:4.885195 +step:724 train loss:4.868632 +step:725 train loss:4.836219 +step:726 train loss:4.817460 +step:727 train loss:4.840795 +step:728 train loss:4.821876 +step:729 train loss:4.762523 +step:730 train loss:4.868761 +step:731 train loss:4.902819 +step:732 train loss:4.867271 +step:733 train loss:4.847795 +step:734 train loss:4.835745 +step:735 train loss:4.914881 +step:736 train loss:4.837281 +step:737 train loss:4.816937 +step:738 train loss:4.851843 +step:739 train loss:4.794584 +step:740 train loss:4.817265 +step:741 train loss:4.891353 +step:742 train loss:4.789433 +step:743 train loss:4.779838 +step:744 train loss:4.807309 +step:745 train loss:4.733892 +step:746 train loss:4.750943 +step:747 train loss:4.788102 +step:748 train loss:4.763247 +step:749 train loss:4.792392 +step:750 validation loss:4.750181 +step:750 train loss:4.731457 +step:751 train loss:4.755521 +step:752 train loss:4.704897 +step:753 train loss:4.753504 +step:754 train loss:4.749210 +step:755 train loss:4.809228 +step:756 train loss:4.771765 +step:757 train loss:4.872386 +step:758 train loss:4.758501 +step:759 train loss:4.754683 +step:760 train loss:4.708783 +step:761 train loss:4.785105 +step:762 train loss:4.754597 +step:763 train loss:4.764844 +step:764 train loss:4.742773 +step:765 train loss:4.751660 +step:766 train loss:4.863919 +step:767 train loss:4.983052 +step:768 train loss:4.841362 +step:769 train loss:4.882744 +step:770 train loss:4.927709 +step:771 train loss:5.008695 +step:772 train loss:4.939541 +step:773 train loss:4.865820 +step:774 train loss:4.924408 +step:775 train loss:4.869212 +step:776 train loss:4.868041 +step:777 train loss:4.819016 +step:778 train loss:4.841574 +step:779 train loss:4.815867 +step:780 train loss:4.858882 +step:781 train loss:4.791918 +step:782 train loss:4.839364 +step:783 train loss:4.804392 +step:784 train loss:4.862094 +step:785 train loss:4.850796 +step:786 train loss:4.821696 +step:787 train loss:4.745920 +step:788 train loss:4.804141 +step:789 train loss:4.777717 +step:790 train loss:4.721722 +step:791 train loss:4.799892 +step:792 train loss:4.800163 +step:793 train loss:4.764873 +step:794 train loss:4.748244 +step:795 train loss:4.711094 +step:796 train loss:4.961069 +step:797 train loss:4.728494 +step:798 train loss:4.724597 +step:799 train loss:4.716923 +step:800 train loss:4.795494 +step:801 train loss:4.694800 +step:802 train loss:4.814599 +step:803 train loss:4.707555 +step:804 train loss:4.658747 +step:805 train loss:4.723293 +step:806 train loss:4.616317 +step:807 train loss:4.672684 +step:808 train loss:4.659967 +step:809 train loss:4.627996 +step:810 train loss:4.596694 +step:811 train loss:4.685262 +step:812 train loss:4.642008 +step:813 train loss:4.654063 +step:814 train loss:4.719550 +step:815 train loss:4.677108 +step:816 train loss:4.608561 +step:817 train loss:4.634624 +step:818 train loss:4.619447 +step:819 train loss:4.676758 +step:820 train loss:4.821384 +step:821 train loss:4.735387 +step:822 train loss:4.820673 +step:823 train loss:4.846044 +step:824 train loss:4.791836 +step:825 train loss:4.829204 +step:826 train loss:4.874894 +step:827 train loss:4.768939 +step:828 train loss:4.795606 +step:829 train loss:4.794201 +step:830 train loss:4.781878 +step:831 train loss:4.796186 +step:832 train loss:4.861799 +step:833 train loss:4.794522 +step:834 train loss:4.774642 +step:835 train loss:4.755874 +step:836 train loss:4.730971 +step:837 train loss:4.752272 +step:838 train loss:4.762909 +step:839 train loss:4.761149 +step:840 train loss:4.785092 +step:841 train loss:4.739358 +step:842 train loss:4.722836 +step:843 train loss:4.696704 +step:844 train loss:4.664218 +step:845 train loss:4.632680 +step:846 train loss:4.719532 +step:847 train loss:4.670461 +step:848 train loss:4.616556 +step:849 train loss:4.655692 +step:850 train loss:4.650934 +step:851 train loss:4.605944 +step:852 train loss:4.680511 +step:853 train loss:4.564542 +step:854 train loss:4.606825 +step:855 train loss:4.599626 +step:856 train loss:4.545303 +step:857 train loss:4.582173 +step:858 train loss:4.615587 +step:859 train loss:4.535199 +step:860 train loss:4.553246 +step:861 train loss:4.591273 +step:862 train loss:4.534860 +step:863 train loss:4.551192 +step:864 train loss:4.547132 +step:865 train loss:4.559939 +step:866 train loss:4.584126 +step:867 train loss:4.676725 +step:868 train loss:4.539847 +step:869 train loss:4.561172 +step:870 train loss:4.507763 +step:871 train loss:4.500488 +step:872 train loss:4.550637 +step:873 train loss:4.526005 +step:874 train loss:4.542477 +step:875 train loss:4.457119 +step:876 train loss:4.549236 +step:877 train loss:4.476892 +step:878 train loss:4.573832 +step:879 train loss:4.479753 +step:880 train loss:4.585624 +step:881 train loss:4.523914 +step:882 train loss:4.484082 +step:883 train loss:4.527385 +step:884 train loss:4.544402 +step:885 train loss:4.488353 +step:886 train loss:4.484313 +step:887 train loss:4.507980 +step:888 train loss:4.604457 +step:889 train loss:4.541698 +step:890 train loss:4.489468 +step:891 train loss:4.449977 +step:892 train loss:4.441066 +step:893 train loss:4.528446 +step:894 train loss:4.523245 +step:895 train loss:4.510127 +step:896 train loss:4.620881 +step:897 train loss:4.583404 +step:898 train loss:4.600959 +step:899 train loss:4.580833 +step:900 train loss:4.603900 +step:901 train loss:4.532779 +step:902 train loss:4.580375 +step:903 train loss:4.665044 +step:904 train loss:4.658995 +step:905 train loss:4.575444 +step:906 train loss:4.634180 +step:907 train loss:4.607245 +step:908 train loss:4.667038 +step:909 train loss:4.699909 +step:910 train loss:4.666125 +step:911 train loss:4.855572 +step:912 train loss:4.656916 +step:913 train loss:4.792835 +step:914 train loss:4.736464 +step:915 train loss:4.690436 +step:916 train loss:4.754382 +step:917 train loss:4.672323 +step:918 train loss:4.730805 +step:919 train loss:4.776286 +step:920 train loss:4.539719 +step:921 train loss:4.634155 +step:922 train loss:4.601873 +step:923 train loss:4.510997 +step:924 train loss:4.550389 +step:925 train loss:4.496581 +step:926 train loss:4.587488 +step:927 train loss:4.488752 +step:928 train loss:4.564892 +step:929 train loss:4.529826 +step:930 train loss:4.520408 +step:931 train loss:4.558424 +step:932 train loss:4.504879 +step:933 train loss:4.528592 +step:934 train loss:4.560494 +step:935 train loss:4.536252 +step:936 train loss:4.508392 +step:937 train loss:4.505473 +step:938 train loss:4.497697 +step:939 train loss:4.388808 +step:940 train loss:4.484597 +step:941 train loss:4.426785 +step:942 train loss:4.405770 +step:943 train loss:4.502920 +step:944 train loss:4.451040 +step:945 train loss:4.460172 +step:946 train loss:4.480070 +step:947 train loss:4.612499 +step:948 train loss:4.426111 +step:949 train loss:4.472548 +step:950 train loss:4.406619 +step:951 train loss:4.436573 +step:952 train loss:4.490350 +step:953 train loss:4.422898 +step:954 train loss:4.451317 +step:955 train loss:4.401169 +step:956 train loss:4.420483 +step:957 train loss:4.421645 +step:958 train loss:4.493232 +step:959 train loss:4.428512 +step:960 train loss:4.528250 +step:961 train loss:4.497039 +step:962 train loss:4.467622 +step:963 train loss:4.450484 +step:964 train loss:4.481376 +step:965 train loss:4.393007 +step:966 train loss:4.396857 +step:967 train loss:4.455537 +step:968 train loss:4.457311 +step:969 train loss:4.416815 +step:970 train loss:4.478947 +step:971 train loss:4.441356 +step:972 train loss:4.368526 +step:973 train loss:4.465706 +step:974 train loss:4.415157 +step:975 train loss:4.479206 +step:976 train loss:4.460967 +step:977 train loss:4.441831 +step:978 train loss:4.438557 +step:979 train loss:4.418446 +step:980 train loss:4.417321 +step:981 train loss:4.391318 +step:982 train loss:4.402564 +step:983 train loss:4.412606 +step:984 train loss:4.442987 +step:985 train loss:4.406016 +step:986 train loss:4.431053 +step:987 train loss:4.460835 +step:988 train loss:4.437172 +step:989 train loss:4.412218 +step:990 train loss:4.401849 +step:991 train loss:4.327442 +step:992 train loss:4.390273 +step:993 train loss:4.413893 +step:994 train loss:4.360937 +step:995 train loss:4.366508 +step:996 train loss:4.405193 +step:997 train loss:4.367565 +step:998 train loss:4.360974 +step:999 train loss:4.418088 +step:1000 validation loss:4.355477 total_sharp:1.8484e-03 L1_sharp:3.8880e-03 L2_sharp:1.0387e-04 L3_sharp:3.5022e-04 L4_sharp:3.0916e-04 L5_sharp:2.7223e-04 L6_sharp:1.8572e-04 L7_sharp:1.4292e-04 L8_sharp:3.0019e-04 L9_sharp:4.2977e-04 L10_sharp:5.5469e-04 L11_sharp:6.2242e-04 L12_sharp:1.2637e-03 total_fnorm:9.2446e+00 total_l1_linf:7.9548e+04 total_spectral:9.2446e+00 L1_fnorm:1.6946e+00 L2_fnorm:1.8525e+00 L3_fnorm:1.6963e+00 L4_fnorm:1.7498e+00 L5_fnorm:1.8525e+00 L6_fnorm:1.9937e+00 L7_fnorm:2.0648e+00 L8_fnorm:2.0371e+00 L9_fnorm:2.0655e+00 L10_fnorm:2.0635e+00 L11_fnorm:2.1124e+00 L12_fnorm:2.1414e+00 L1_l1linf:2.8631e+00 L2_l1linf:2.1689e+00 L3_l1linf:2.3108e+00 L4_l1linf:2.1628e+00 L5_l1linf:2.1657e+00 L6_l1linf:2.2288e+00 L7_l1linf:2.4183e+00 L8_l1linf:2.4966e+00 L9_l1linf:2.7146e+00 L10_l1linf:2.7518e+00 L11_l1linf:2.7502e+00 L12_l1linf:3.1740e+00 L1_spectral:4.9187e-01 L2_spectral:2.7566e-01 L3_spectral:2.5913e-01 L4_spectral:2.6633e-01 L5_spectral:3.3807e-01 L6_spectral:2.9700e-01 L7_spectral:3.2696e-01 L8_spectral:3.7371e-01 L9_spectral:4.7506e-01 L10_spectral:4.8868e-01 L11_spectral:4.9346e-01 L12_spectral:5.4173e-01 ip_v_neg_g:1.0689e-01 cos_v_neg_g:2.2234e-02 v_norm:9.2446e+00 g_norm:5.2003e-01 hv_norm:4.9738e-01 cos_v_hv:3.4354e-02 hg_norm:2.8909e+00 cos_g_hg:5.2822e-01 v_par:1.0650e-02 v_perp:9.2446e+00 L1_cos_v_neg_g:1.1556e-01 L1_v_norm:1.6946e+00 L2_cos_v_neg_g:2.7179e-02 L2_v_norm:1.8525e+00 L3_cos_v_neg_g:3.4215e-02 L3_v_norm:1.6963e+00 L4_cos_v_neg_g:4.1419e-02 L4_v_norm:1.7498e+00 L5_cos_v_neg_g:3.4327e-02 L5_v_norm:1.8525e+00 L6_cos_v_neg_g:3.3290e-02 L6_v_norm:1.9937e+00 L7_cos_v_neg_g:3.4245e-02 L7_v_norm:2.0648e+00 L8_cos_v_neg_g:3.8963e-02 L8_v_norm:2.0371e+00 L9_cos_v_neg_g:4.7450e-02 L9_v_norm:2.0655e+00 L10_cos_v_neg_g:5.2391e-02 L10_v_norm:2.0635e+00 L11_cos_v_neg_g:4.8253e-02 L11_v_norm:2.1124e+00 L12_cos_v_neg_g:5.9125e-02 L12_v_norm:2.1414e+00 +step:1000 train loss:4.430173 +step:1001 train loss:4.428345 +step:1002 train loss:4.414354 +step:1003 train loss:4.387729 +step:1004 train loss:4.359750 +step:1005 train loss:4.369598 +step:1006 train loss:4.462379 +step:1007 train loss:4.432208 +step:1008 train loss:4.444749 +step:1009 train loss:4.514098 +step:1010 train loss:4.460661 +step:1011 train loss:4.481466 +step:1012 train loss:4.425139 +step:1013 train loss:4.446110 +step:1014 train loss:4.522092 +step:1015 train loss:4.672770 +step:1016 train loss:4.699427 +step:1017 train loss:4.616827 +step:1018 train loss:4.663913 +step:1019 train loss:4.681207 +step:1020 train loss:4.920141 +step:1021 train loss:4.892916 +step:1022 train loss:4.734508 +step:1023 train loss:4.697925 +step:1024 train loss:4.808365 +step:1025 train loss:4.689691 +step:1026 train loss:4.699564 +step:1027 train loss:4.709383 +step:1028 train loss:4.658450 +step:1029 train loss:4.576643 +step:1030 train loss:4.633949 +step:1031 train loss:4.601757 +step:1032 train loss:4.542065 +step:1033 train loss:4.499325 +step:1034 train loss:4.552972 +step:1035 train loss:4.545496 +step:1036 train loss:4.455049 +step:1037 train loss:4.533661 +step:1038 train loss:4.533073 +step:1039 train loss:4.660270 +step:1040 train loss:4.488171 +step:1041 train loss:4.465542 +step:1042 train loss:4.469790 +step:1043 train loss:4.466080 +step:1044 train loss:4.444364 +step:1045 train loss:4.443739 +step:1046 train loss:4.379993 +step:1047 train loss:4.412048 +step:1048 train loss:4.401104 +step:1049 train loss:4.451897 +step:1050 train loss:4.405887 +step:1051 train loss:4.375674 +step:1052 train loss:4.466931 +step:1053 train loss:4.373277 +step:1054 train loss:4.359760 +step:1055 train loss:4.421691 +step:1056 train loss:4.369534 +step:1057 train loss:4.263121 +step:1058 train loss:4.364504 +step:1059 train loss:4.349622 +step:1060 train loss:4.340876 +step:1061 train loss:4.391577 +step:1062 train loss:4.348864 +step:1063 train loss:4.352017 +step:1064 train loss:4.340761 +step:1065 train loss:4.350955 +step:1066 train loss:4.325929 +step:1067 train loss:4.355416 +step:1068 train loss:4.313811 +step:1069 train loss:4.330623 +step:1070 train loss:4.338792 +step:1071 train loss:4.351455 +step:1072 train loss:4.377029 +step:1073 train loss:4.292428 +step:1074 train loss:4.306302 +step:1075 train loss:4.314335 +step:1076 train loss:4.381289 +step:1077 train loss:4.316099 +step:1078 train loss:4.367768 +step:1079 train loss:4.404070 +step:1080 train loss:4.279087 +step:1081 train loss:4.350446 +step:1082 train loss:4.355450 +step:1083 train loss:4.329535 +step:1084 train loss:4.303785 +step:1085 train loss:4.363255 +step:1086 train loss:4.351560 +step:1087 train loss:4.333248 +step:1088 train loss:4.331282 +step:1089 train loss:4.334435 +step:1090 train loss:4.281063 +step:1091 train loss:4.271089 +step:1092 train loss:4.379544 +step:1093 train loss:4.264721 +step:1094 train loss:4.318560 +step:1095 train loss:4.363184 +step:1096 train loss:4.294600 +step:1097 train loss:4.292908 +step:1098 train loss:4.266798 +step:1099 train loss:4.321441 +step:1100 train loss:4.366589 +step:1101 train loss:4.359962 +step:1102 train loss:4.370947 +step:1103 train loss:4.294876 +step:1104 train loss:4.324545 +step:1105 train loss:4.373786 +step:1106 train loss:4.312741 +step:1107 train loss:4.432577 +step:1108 train loss:4.370339 +step:1109 train loss:4.340547 +step:1110 train loss:4.289880 +step:1111 train loss:4.351251 +step:1112 train loss:4.267655 +step:1113 train loss:4.250419 +step:1114 train loss:4.243134 +step:1115 train loss:4.290901 +step:1116 train loss:4.360139 +step:1117 train loss:4.367898 +step:1118 train loss:4.406277 +step:1119 train loss:4.338468 +step:1120 train loss:4.343488 +step:1121 train loss:4.335876 +step:1122 train loss:4.332973 +step:1123 train loss:4.447784 +step:1124 train loss:4.325533 +step:1125 train loss:4.351331 +step:1126 train loss:4.296542 +step:1127 train loss:4.320162 +step:1128 train loss:4.312157 +step:1129 train loss:4.361815 +step:1130 train loss:4.279914 +step:1131 train loss:4.367091 +step:1132 train loss:4.313733 +step:1133 train loss:4.322353 +step:1134 train loss:4.291724 +step:1135 train loss:4.343916 +step:1136 train loss:4.354866 +step:1137 train loss:4.275570 +step:1138 train loss:4.347223 +step:1139 train loss:4.297127 +step:1140 train loss:4.368762 +step:1141 train loss:4.321730 +step:1142 train loss:4.257704 +step:1143 train loss:4.331625 +step:1144 train loss:4.352135 +step:1145 train loss:4.299828 +step:1146 train loss:4.247208 +step:1147 train loss:4.259554 +step:1148 train loss:4.287129 +step:1149 train loss:4.333688 +step:1150 train loss:4.343555 +step:1151 train loss:4.348479 +step:1152 train loss:4.255821 +step:1153 train loss:4.254938 +step:1154 train loss:4.231919 +step:1155 train loss:4.340910 +step:1156 train loss:4.248322 +step:1157 train loss:4.308655 +step:1158 train loss:4.367306 +step:1159 train loss:4.372606 +step:1160 train loss:4.298866 +step:1161 train loss:4.388969 +step:1162 train loss:4.326351 +step:1163 train loss:4.317888 +step:1164 train loss:4.224151 +step:1165 train loss:4.354448 +step:1166 train loss:4.283834 +step:1167 train loss:4.280910 +step:1168 train loss:4.334708 +step:1169 train loss:4.287851 +step:1170 train loss:4.292960 +step:1171 train loss:4.321635 +step:1172 train loss:4.287257 +step:1173 train loss:4.306588 +step:1174 train loss:4.243433 +step:1175 train loss:4.270048 +step:1176 train loss:4.377548 +step:1177 train loss:4.227974 +step:1178 train loss:4.286119 +step:1179 train loss:4.247676 +step:1180 train loss:4.283460 +step:1181 train loss:4.258874 +step:1182 train loss:4.315832 +step:1183 train loss:4.296928 +step:1184 train loss:4.235348 +step:1185 train loss:4.264338 +step:1186 train loss:4.246051 +step:1187 train loss:4.221884 +step:1188 train loss:4.253557 +step:1189 train loss:4.190804 +step:1190 train loss:4.256620 +step:1191 train loss:4.310934 +step:1192 train loss:4.260370 +step:1193 train loss:4.257226 +step:1194 train loss:4.371982 +step:1195 train loss:4.345492 +step:1196 train loss:4.237983 +step:1197 train loss:4.260124 +step:1198 train loss:4.238580 +step:1199 train loss:4.237693 +step:1200 train loss:4.292829 +step:1201 train loss:4.265478 +step:1202 train loss:4.205972 +step:1203 train loss:4.201017 +step:1204 train loss:4.237218 +step:1205 train loss:4.250132 +step:1206 train loss:4.208762 +step:1207 train loss:4.358488 +step:1208 train loss:4.317328 +step:1209 train loss:4.241951 +step:1210 train loss:4.341085 +step:1211 train loss:4.275895 +step:1212 train loss:4.297925 +step:1213 train loss:4.225582 +step:1214 train loss:4.312875 +step:1215 train loss:4.274287 +step:1216 train loss:4.282830 +step:1217 train loss:4.225797 +step:1218 train loss:4.277776 +step:1219 train loss:4.216327 +step:1220 train loss:4.242539 +step:1221 train loss:4.256810 +step:1222 train loss:4.300164 +step:1223 train loss:4.267517 +step:1224 train loss:4.240115 +step:1225 train loss:4.285303 +step:1226 train loss:4.222778 +step:1227 train loss:4.227262 +step:1228 train loss:4.232536 +step:1229 train loss:4.198971 +step:1230 train loss:4.191973 +step:1231 train loss:4.241437 +step:1232 train loss:4.193509 +step:1233 train loss:4.191097 +step:1234 train loss:4.278015 +step:1235 train loss:4.266802 +step:1236 train loss:4.180031 +step:1237 train loss:4.284156 +step:1238 train loss:4.231626 +step:1239 train loss:4.267294 +step:1240 train loss:4.168631 +step:1241 train loss:4.203638 +step:1242 train loss:4.224777 +step:1243 train loss:4.172656 +step:1244 train loss:4.282120 +step:1245 train loss:4.300142 +step:1246 train loss:4.232888 +step:1247 train loss:4.207375 +step:1248 train loss:4.232580 +step:1249 train loss:4.163619 +step:1250 validation loss:4.162017 +step:1250 train loss:4.174712 +step:1251 train loss:4.245660 +step:1252 train loss:4.194247 +step:1253 train loss:4.154642 +step:1254 train loss:4.180440 +step:1255 train loss:4.177019 +step:1256 train loss:4.223412 +step:1257 train loss:4.203199 +step:1258 train loss:4.256716 +step:1259 train loss:4.248785 +step:1260 train loss:4.143730 +step:1261 train loss:4.378057 +step:1262 train loss:4.223951 +step:1263 train loss:4.196104 +step:1264 train loss:4.205783 +step:1265 train loss:4.252403 +step:1266 train loss:4.200376 +step:1267 train loss:4.219612 +step:1268 train loss:4.264816 +step:1269 train loss:4.266047 +step:1270 train loss:4.196443 +step:1271 train loss:4.194475 +step:1272 train loss:4.219663 +step:1273 train loss:4.269082 +step:1274 train loss:4.232741 +step:1275 train loss:4.253013 +step:1276 train loss:4.249336 +step:1277 train loss:4.255322 +step:1278 train loss:4.197439 +step:1279 train loss:4.200336 +step:1280 train loss:4.213556 +step:1281 train loss:4.267273 +step:1282 train loss:4.189920 +step:1283 train loss:4.265018 +step:1284 train loss:4.203510 +step:1285 train loss:4.251833 +step:1286 train loss:4.148621 +step:1287 train loss:4.189731 +step:1288 train loss:4.215144 +step:1289 train loss:4.269206 +step:1290 train loss:4.216424 +step:1291 train loss:4.182810 +step:1292 train loss:4.165953 +step:1293 train loss:4.153402 +step:1294 train loss:4.201976 +step:1295 train loss:4.183372 +step:1296 train loss:4.232574 +step:1297 train loss:4.184811 +step:1298 train loss:4.202834 +step:1299 train loss:4.235328 +step:1300 train loss:4.158679 +step:1301 train loss:4.199604 +step:1302 train loss:4.161578 +step:1303 train loss:4.197338 +step:1304 train loss:4.225577 +step:1305 train loss:4.202805 +step:1306 train loss:4.192643 +step:1307 train loss:4.182474 +step:1308 train loss:4.141018 +step:1309 train loss:4.159714 +step:1310 train loss:4.154421 +step:1311 train loss:4.157495 +step:1312 train loss:4.225552 +step:1313 train loss:4.135843 +step:1314 train loss:4.143080 +step:1315 train loss:4.193132 +step:1316 train loss:4.165329 +step:1317 train loss:4.058988 +step:1318 train loss:4.215621 +step:1319 train loss:4.249064 +step:1320 train loss:4.165220 +step:1321 train loss:4.137465 +step:1322 train loss:4.243478 +step:1323 train loss:4.193269 +step:1324 train loss:4.288892 +step:1325 train loss:4.180111 +step:1326 train loss:4.222121 +step:1327 train loss:4.241972 +step:1328 train loss:4.151570 +step:1329 train loss:4.180190 +step:1330 train loss:4.195994 +step:1331 train loss:4.100484 +step:1332 train loss:4.241521 +step:1333 train loss:4.224686 +step:1334 train loss:4.215885 +step:1335 train loss:4.239433 +step:1336 train loss:4.245525 +step:1337 train loss:4.220665 +step:1338 train loss:4.189353 +step:1339 train loss:4.263385 +step:1340 train loss:4.224336 +step:1341 train loss:4.200114 +step:1342 train loss:4.172063 +step:1343 train loss:4.157803 +step:1344 train loss:4.223548 +step:1345 train loss:4.185245 +step:1346 train loss:4.260104 +step:1347 train loss:4.183998 +step:1348 train loss:4.148042 +step:1349 train loss:4.096502 +step:1350 train loss:4.125823 +step:1351 train loss:4.193008 +step:1352 train loss:4.163380 +step:1353 train loss:4.140792 +step:1354 train loss:4.145358 +step:1355 train loss:4.216636 +step:1356 train loss:4.130119 +step:1357 train loss:4.154194 +step:1358 train loss:4.146887 +step:1359 train loss:4.143098 +step:1360 train loss:4.174783 +step:1361 train loss:4.288319 +step:1362 train loss:4.208296 +step:1363 train loss:4.091114 +step:1364 train loss:4.115067 +step:1365 train loss:4.107852 +step:1366 train loss:4.148054 +step:1367 train loss:4.079428 +step:1368 train loss:4.115909 +step:1369 train loss:4.154650 +step:1370 train loss:4.174786 +step:1371 train loss:4.139657 +step:1372 train loss:4.191669 +step:1373 train loss:4.238231 +step:1374 train loss:4.240927 +step:1375 train loss:4.210516 +step:1376 train loss:4.239168 +step:1377 train loss:4.214998 +step:1378 train loss:4.220325 +step:1379 train loss:4.208708 +step:1380 train loss:4.276540 +step:1381 train loss:4.220624 +step:1382 train loss:4.178680 +step:1383 train loss:4.154086 +step:1384 train loss:4.230517 +step:1385 train loss:4.132400 +step:1386 train loss:4.194149 +step:1387 train loss:4.198418 +step:1388 train loss:4.161449 +step:1389 train loss:4.132428 +step:1390 train loss:4.163249 +step:1391 train loss:4.193461 +step:1392 train loss:4.172709 +step:1393 train loss:4.226853 +step:1394 train loss:4.154840 +step:1395 train loss:4.197567 +step:1396 train loss:4.175264 +step:1397 train loss:4.189154 +step:1398 train loss:4.190502 +step:1399 train loss:4.156798 +step:1400 train loss:4.131734 +step:1401 train loss:4.122127 +step:1402 train loss:4.125784 +step:1403 train loss:4.083438 +step:1404 train loss:4.143039 +step:1405 train loss:4.105970 +step:1406 train loss:4.133810 +step:1407 train loss:4.127919 +step:1408 train loss:4.109459 +step:1409 train loss:4.095014 +step:1410 train loss:4.113825 +step:1411 train loss:4.145256 +step:1412 train loss:4.203519 +step:1413 train loss:4.120225 +step:1414 train loss:4.148256 +step:1415 train loss:4.111641 +step:1416 train loss:4.163175 +step:1417 train loss:4.133765 +step:1418 train loss:4.077014 +step:1419 train loss:4.082008 +step:1420 train loss:4.109040 +step:1421 train loss:4.150094 +step:1422 train loss:4.127252 +step:1423 train loss:4.225863 +step:1424 train loss:4.123524 +step:1425 train loss:4.088277 +step:1426 train loss:4.110010 +step:1427 train loss:4.103935 +step:1428 train loss:4.089578 +step:1429 train loss:4.124390 +step:1430 train loss:4.122951 +step:1431 train loss:4.141344 +step:1432 train loss:4.128751 +step:1433 train loss:4.109859 +step:1434 train loss:4.088092 +step:1435 train loss:4.076112 +step:1436 train loss:4.152417 +step:1437 train loss:4.083082 +step:1438 train loss:4.084853 +step:1439 train loss:4.069202 +step:1440 train loss:4.106157 +step:1441 train loss:4.183128 +step:1442 train loss:4.141475 +step:1443 train loss:4.069849 +step:1444 train loss:4.081963 +step:1445 train loss:4.080335 +step:1446 train loss:4.104995 +step:1447 train loss:4.123072 +step:1448 train loss:4.098508 +step:1449 train loss:4.131941 +step:1450 train loss:4.146579 +step:1451 train loss:4.065467 +step:1452 train loss:4.121992 +step:1453 train loss:4.113476 +step:1454 train loss:4.108996 +step:1455 train loss:4.041591 +step:1456 train loss:4.117758 +step:1457 train loss:4.045496 +step:1458 train loss:4.187079 +step:1459 train loss:4.108459 +step:1460 train loss:4.074920 +step:1461 train loss:4.131665 +step:1462 train loss:4.137675 +step:1463 train loss:4.102983 +step:1464 train loss:4.084810 +step:1465 train loss:4.079948 +step:1466 train loss:4.041760 +step:1467 train loss:4.182593 +step:1468 train loss:4.077246 +step:1469 train loss:4.158039 +step:1470 train loss:4.087210 +step:1471 train loss:4.087982 +step:1472 train loss:4.089238 +step:1473 train loss:4.088423 +step:1474 train loss:4.032206 +step:1475 train loss:4.089846 +step:1476 train loss:4.168504 +step:1477 train loss:4.120037 +step:1478 train loss:4.049366 +step:1479 train loss:4.084636 +step:1480 train loss:4.082156 +step:1481 train loss:4.053263 +step:1482 train loss:4.119382 +step:1483 train loss:4.109535 +step:1484 train loss:4.134028 +step:1485 train loss:4.146722 +step:1486 train loss:4.081848 +step:1487 train loss:4.066819 +step:1488 train loss:4.072197 +step:1489 train loss:4.063178 +step:1490 train loss:4.119173 +step:1491 train loss:4.108770 +step:1492 train loss:4.109031 +step:1493 train loss:4.044662 +step:1494 train loss:4.082057 +step:1495 train loss:4.064459 +step:1496 train loss:4.031343 +step:1497 train loss:4.104862 +step:1498 train loss:4.015309 +step:1499 train loss:4.060545 +step:1500 validation loss:4.035630 total_sharp:9.1264e-04 L1_sharp:1.2473e-03 L2_sharp:7.3260e-05 L3_sharp:3.0768e-04 L4_sharp:2.2075e-04 L5_sharp:1.3391e-04 L6_sharp:6.1273e-05 L7_sharp:7.8636e-05 L8_sharp:1.5605e-04 L9_sharp:2.4123e-04 L10_sharp:2.4303e-04 L11_sharp:2.2360e-04 L12_sharp:5.1247e-04 total_fnorm:9.9154e+00 total_l1_linf:8.6438e+04 total_spectral:9.9154e+00 L1_fnorm:2.0439e+00 L2_fnorm:2.1880e+00 L3_fnorm:2.0478e+00 L4_fnorm:2.0601e+00 L5_fnorm:2.1051e+00 L6_fnorm:2.2137e+00 L7_fnorm:2.2835e+00 L8_fnorm:2.2407e+00 L9_fnorm:2.2329e+00 L10_fnorm:2.2239e+00 L11_fnorm:2.2380e+00 L12_fnorm:2.2196e+00 L1_l1linf:2.5398e+00 L2_l1linf:2.3677e+00 L3_l1linf:2.3326e+00 L4_l1linf:2.3216e+00 L5_l1linf:2.2259e+00 L6_l1linf:2.3282e+00 L7_l1linf:2.4635e+00 L8_l1linf:2.5046e+00 L9_l1linf:2.6165e+00 L10_l1linf:2.4825e+00 L11_l1linf:2.7223e+00 L12_l1linf:2.5844e+00 L1_spectral:3.9369e-01 L2_spectral:2.7003e-01 L3_spectral:2.7257e-01 L4_spectral:3.2537e-01 L5_spectral:3.4665e-01 L6_spectral:2.6903e-01 L7_spectral:2.8285e-01 L8_spectral:3.1893e-01 L9_spectral:3.6975e-01 L10_spectral:3.8125e-01 L11_spectral:3.7618e-01 L12_spectral:4.0001e-01 ip_v_neg_g:4.9429e-02 cos_v_neg_g:9.3583e-03 v_norm:9.9154e+00 g_norm:5.3270e-01 hv_norm:5.3500e-01 cos_v_hv:1.6914e-02 hg_norm:5.9257e+00 cos_g_hg:5.8149e-01 v_par:6.0587e-03 v_perp:9.9154e+00 L1_cos_v_neg_g:4.9938e-02 L1_v_norm:2.0439e+00 L2_cos_v_neg_g:1.8411e-02 L2_v_norm:2.1880e+00 L3_cos_v_neg_g:2.7329e-02 L3_v_norm:2.0478e+00 L4_cos_v_neg_g:2.1034e-02 L4_v_norm:2.0601e+00 L5_cos_v_neg_g:1.7772e-02 L5_v_norm:2.1051e+00 L6_cos_v_neg_g:1.2579e-02 L6_v_norm:2.2137e+00 L7_cos_v_neg_g:1.6025e-02 L7_v_norm:2.2835e+00 L8_cos_v_neg_g:1.9214e-02 L8_v_norm:2.2407e+00 L9_cos_v_neg_g:2.1617e-02 L9_v_norm:2.2329e+00 L10_cos_v_neg_g:2.1013e-02 L10_v_norm:2.2239e+00 L11_cos_v_neg_g:2.0160e-02 L11_v_norm:2.2380e+00 L12_cos_v_neg_g:2.3818e-02 L12_v_norm:2.2196e+00 +step:1500 train loss:4.059323 +step:1501 train loss:4.082567 +step:1502 train loss:4.018510 +step:1503 train loss:4.077516 +step:1504 train loss:4.044542 +step:1505 train loss:4.020530 +step:1506 train loss:4.005510 +step:1507 train loss:4.036702 +step:1508 train loss:4.053032 +step:1509 train loss:4.115467 +step:1510 train loss:4.046856 +step:1511 train loss:4.074304 +step:1512 train loss:4.054205 +step:1513 train loss:4.115774 +step:1514 train loss:4.059701 +step:1515 train loss:4.120036 +step:1516 train loss:4.046870 +step:1517 train loss:4.052618 +step:1518 train loss:4.129534 +step:1519 train loss:4.095157 +step:1520 train loss:4.138166 +step:1521 train loss:4.037304 +step:1522 train loss:4.095464 +step:1523 train loss:4.097504 +step:1524 train loss:4.017514 +step:1525 train loss:4.100399 +step:1526 train loss:4.021218 +step:1527 train loss:4.072831 +step:1528 train loss:4.120801 +step:1529 train loss:4.077294 +step:1530 train loss:4.119031 +step:1531 train loss:4.035347 +step:1532 train loss:4.106461 +step:1533 train loss:4.078667 +step:1534 train loss:4.026426 +step:1535 train loss:4.077444 +step:1536 train loss:4.101018 +step:1537 train loss:4.061234 +step:1538 train loss:4.063821 +step:1539 train loss:4.065570 +step:1540 train loss:4.084455 +step:1541 train loss:4.048526 +step:1542 train loss:4.133515 +step:1543 train loss:4.160025 +step:1544 train loss:4.031926 +step:1545 train loss:4.009497 +step:1546 train loss:4.049876 +step:1547 train loss:4.035872 +step:1548 train loss:4.078047 +step:1549 train loss:4.007409 +step:1550 train loss:4.122172 +step:1551 train loss:4.054230 +step:1552 train loss:4.083899 +step:1553 train loss:4.090874 +step:1554 train loss:4.100608 +step:1555 train loss:4.058294 +step:1556 train loss:4.038350 +step:1557 train loss:4.046705 +step:1558 train loss:4.065233 +step:1559 train loss:4.034197 +step:1560 train loss:4.114661 +step:1561 train loss:4.084609 +step:1562 train loss:3.974481 +step:1563 train loss:3.942024 +step:1564 train loss:4.084198 +step:1565 train loss:4.061134 +step:1566 train loss:4.079595 +step:1567 train loss:4.081426 +step:1568 train loss:4.034432 +step:1569 train loss:4.030510 +step:1570 train loss:4.053219 +step:1571 train loss:4.048156 +step:1572 train loss:4.043828 +step:1573 train loss:4.083672 +step:1574 train loss:4.042244 +step:1575 train loss:4.067364 +step:1576 train loss:4.021560 +step:1577 train loss:4.042992 +step:1578 train loss:4.024127 +step:1579 train loss:4.096610 +step:1580 train loss:4.049500 +step:1581 train loss:4.083465 +step:1582 train loss:4.088754 +step:1583 train loss:4.062375 +step:1584 train loss:3.986187 +step:1585 train loss:4.074092 +step:1586 train loss:4.035892 +step:1587 train loss:4.047026 +step:1588 train loss:4.030576 +step:1589 train loss:4.077430 +step:1590 train loss:3.982967 +step:1591 train loss:4.042533 +step:1592 train loss:3.992829 +step:1593 train loss:4.026566 +step:1594 train loss:4.030572 +step:1595 train loss:4.021109 +step:1596 train loss:4.028175 +step:1597 train loss:3.963748 +step:1598 train loss:4.061516 +step:1599 train loss:4.074160 +step:1600 train loss:3.954170 +step:1601 train loss:4.026929 +step:1602 train loss:4.088769 +step:1603 train loss:4.086916 +step:1604 train loss:4.012266 +step:1605 train loss:4.060162 +step:1606 train loss:4.111055 +step:1607 train loss:3.990813 +step:1608 train loss:4.017885 +step:1609 train loss:4.031873 +step:1610 train loss:4.089924 +step:1611 train loss:4.020588 +step:1612 train loss:3.949714 +step:1613 train loss:4.031790 +step:1614 train loss:4.135510 +step:1615 train loss:4.054802 +step:1616 train loss:4.077858 +step:1617 train loss:4.048333 +step:1618 train loss:4.053278 +step:1619 train loss:4.221143 +step:1620 train loss:4.016820 +step:1621 train loss:4.075910 +step:1622 train loss:3.995330 +step:1623 train loss:4.053125 +step:1624 train loss:4.027997 +step:1625 train loss:4.100095 +step:1626 train loss:3.991705 +step:1627 train loss:3.995784 +step:1628 train loss:4.014362 +step:1629 train loss:4.043393 +step:1630 train loss:4.057855 +step:1631 train loss:4.008247 +step:1632 train loss:3.982241 +step:1633 train loss:3.999198 +step:1634 train loss:4.050437 +step:1635 train loss:3.994402 +step:1636 train loss:3.979206 +step:1637 train loss:4.052066 +step:1638 train loss:4.157362 +step:1639 train loss:3.961671 +step:1640 train loss:4.037949 +step:1641 train loss:4.005314 +step:1642 train loss:4.095709 +step:1643 train loss:3.998506 +step:1644 train loss:4.012147 +step:1645 train loss:3.990366 +step:1646 train loss:4.074288 +step:1647 train loss:3.977533 +step:1648 train loss:4.069284 +step:1649 train loss:4.029966 +step:1650 train loss:4.041034 +step:1651 train loss:4.069991 +step:1652 train loss:4.071899 +step:1653 train loss:4.071591 +step:1654 train loss:4.062079 +step:1655 train loss:4.035502 +step:1656 train loss:4.040836 +step:1657 train loss:4.031298 +step:1658 train loss:4.005702 +step:1659 train loss:4.073580 +step:1660 train loss:3.972964 +step:1661 train loss:4.080354 +step:1662 train loss:4.014151 +step:1663 train loss:4.007069 +step:1664 train loss:4.098707 +step:1665 train loss:4.017978 +step:1666 train loss:4.030217 +step:1667 train loss:4.046570 +step:1668 train loss:4.019255 +step:1669 train loss:3.980467 +step:1670 train loss:4.031013 +step:1671 train loss:4.030360 +step:1672 train loss:4.026297 +step:1673 train loss:3.983510 +step:1674 train loss:3.980090 +step:1675 train loss:4.020875 +step:1676 train loss:4.283796 +step:1677 train loss:4.052814 +step:1678 train loss:3.995098 +step:1679 train loss:4.128189 +step:1680 train loss:4.034140 +step:1681 train loss:4.100582 +step:1682 train loss:4.052238 +step:1683 train loss:4.041137 +step:1684 train loss:3.990767 +step:1685 train loss:4.041958 +step:1686 train loss:4.023780 +step:1687 train loss:4.035295 +step:1688 train loss:4.014029 +step:1689 train loss:4.006627 +step:1690 train loss:4.035077 +step:1691 train loss:4.017541 +step:1692 train loss:4.029153 +step:1693 train loss:4.001168 +step:1694 train loss:3.953840 +step:1695 train loss:3.976603 +step:1696 train loss:3.983799 +step:1697 train loss:4.027396 +step:1698 train loss:4.019599 +step:1699 train loss:3.977219 +step:1700 train loss:4.054038 +step:1701 train loss:3.991709 +step:1702 train loss:3.982209 +step:1703 train loss:4.006241 +step:1704 train loss:4.013890 +step:1705 train loss:4.021828 +step:1706 train loss:4.031888 +step:1707 train loss:4.031976 +step:1708 train loss:3.952468 +step:1709 train loss:4.054320 +step:1710 train loss:3.968532 +step:1711 train loss:3.970431 +step:1712 train loss:4.000122 +step:1713 train loss:3.962319 +step:1714 train loss:4.329069 +step:1715 train loss:3.984735 +step:1716 train loss:3.967726 +step:1717 train loss:3.971885 +step:1718 train loss:4.046656 +step:1719 train loss:3.964456 +step:1720 train loss:4.043386 +step:1721 train loss:3.981786 +step:1722 train loss:3.956148 +step:1723 train loss:4.048706 +step:1724 train loss:4.006371 +step:1725 train loss:3.999286 +step:1726 train loss:4.004179 +step:1727 train loss:4.034890 +step:1728 train loss:4.041862 +step:1729 train loss:3.967154 +step:1730 train loss:4.036214 +step:1731 train loss:3.967297 +step:1732 train loss:3.979533 +step:1733 train loss:3.979893 +step:1734 train loss:4.023842 +step:1735 train loss:4.087841 +step:1736 train loss:3.990815 +step:1737 train loss:4.020705 +step:1738 train loss:3.981673 +step:1739 train loss:4.043328 +step:1740 train loss:4.038643 +step:1741 train loss:4.085979 +step:1742 train loss:4.076743 +step:1743 train loss:3.968059 +step:1744 train loss:3.980729 +step:1745 train loss:3.967410 +step:1746 train loss:3.951122 +step:1747 train loss:3.988204 +step:1748 train loss:3.937064 +step:1749 train loss:3.972117 +step:1750 validation loss:3.949267 +step:1750 train loss:4.007934 +step:1751 train loss:4.024390 +step:1752 train loss:3.985207 +step:1753 train loss:4.012458 +step:1754 train loss:4.006856 +step:1755 train loss:4.001143 +step:1756 train loss:4.023479 +step:1757 train loss:4.024396 +step:1758 train loss:3.945619 +step:1759 train loss:4.034910 +step:1760 train loss:3.987398 +step:1761 train loss:3.966775 +step:1762 train loss:3.964965 +step:1763 train loss:3.965334 +step:1764 train loss:4.254941 +step:1765 train loss:3.975504 +step:1766 train loss:4.059507 +step:1767 train loss:3.973040 +step:1768 train loss:3.951128 +step:1769 train loss:3.980572 +step:1770 train loss:3.987537 +step:1771 train loss:3.959274 +step:1772 train loss:4.066030 +step:1773 train loss:3.989529 +step:1774 train loss:3.995458 +step:1775 train loss:4.111488 +step:1776 train loss:3.992991 +step:1777 train loss:3.984040 +step:1778 train loss:4.039943 +step:1779 train loss:3.977308 +step:1780 train loss:4.028287 +step:1781 train loss:4.033891 +step:1782 train loss:4.057897 +step:1783 train loss:3.984666 +step:1784 train loss:4.074576 +step:1785 train loss:3.978001 +step:1786 train loss:3.976642 +step:1787 train loss:3.976516 +step:1788 train loss:3.998223 +step:1789 train loss:3.951016 +step:1790 train loss:3.963284 +step:1791 train loss:4.041035 +step:1792 train loss:4.039665 +step:1793 train loss:3.961379 +step:1794 train loss:4.004073 +step:1795 train loss:3.954325 +step:1796 train loss:3.935039 +step:1797 train loss:4.000111 +step:1798 train loss:3.942966 +step:1799 train loss:3.993257 +step:1800 train loss:4.018577 +step:1801 train loss:4.010605 +step:1802 train loss:4.018770 +step:1803 train loss:4.009838 +step:1804 train loss:4.006991 +step:1805 train loss:3.994221 +step:1806 train loss:4.005055 +step:1807 train loss:3.938362 +step:1808 train loss:4.000458 +step:1809 train loss:3.983860 +step:1810 train loss:3.979986 +step:1811 train loss:3.999830 +step:1812 train loss:3.977419 +step:1813 train loss:3.992179 +step:1814 train loss:4.050582 +step:1815 train loss:3.993051 +step:1816 train loss:3.947945 +step:1817 train loss:3.939108 +step:1818 train loss:3.995799 +step:1819 train loss:3.965836 +step:1820 train loss:3.998938 +step:1821 train loss:3.966475 +step:1822 train loss:3.942493 +step:1823 train loss:3.939570 +step:1824 train loss:4.013092 +step:1825 train loss:3.927343 +step:1826 train loss:3.974064 +step:1827 train loss:3.935552 +step:1828 train loss:3.986354 +step:1829 train loss:3.947715 +step:1830 train loss:4.145007 +step:1831 train loss:3.905872 +step:1832 train loss:3.951142 +step:1833 train loss:4.010003 +step:1834 train loss:3.953130 +step:1835 train loss:3.969249 +step:1836 train loss:4.010250 +step:1837 train loss:3.934286 +step:1838 train loss:4.027896 +step:1839 train loss:4.005318 +step:1840 train loss:3.974692 +step:1841 train loss:3.999835 +step:1842 train loss:3.968253 +step:1843 train loss:3.919043 +step:1844 train loss:3.992170 +step:1845 train loss:3.974782 +step:1846 train loss:4.138668 +step:1847 train loss:4.203512 +step:1848 train loss:4.423929 +step:1849 train loss:4.166107 +step:1850 train loss:4.136028 +step:1851 train loss:4.305248 +step:1852 train loss:4.268807 +step:1853 train loss:4.243567 +step:1854 train loss:4.155213 +step:1855 train loss:4.116452 +step:1856 train loss:4.103361 +step:1857 train loss:4.090870 +step:1858 train loss:4.114796 +step:1859 train loss:4.062682 +step:1860 train loss:4.021874 +step:1861 train loss:4.031578 +step:1862 train loss:4.069813 +step:1863 train loss:4.100262 +step:1864 train loss:3.988691 +step:1865 train loss:4.008039 +step:1866 train loss:4.008910 +step:1867 train loss:4.030436 +step:1868 train loss:4.076350 +step:1869 train loss:3.990479 +step:1870 train loss:4.017533 +step:1871 train loss:3.950395 +step:1872 train loss:4.019347 +step:1873 train loss:4.076800 +step:1874 train loss:3.938265 +step:1875 train loss:4.014249 +step:1876 train loss:3.973187 +step:1877 train loss:4.013943 +step:1878 train loss:3.934035 +step:1879 train loss:3.990803 +step:1880 train loss:4.065726 +step:1881 train loss:3.990242 +step:1882 train loss:4.008020 +step:1883 train loss:4.031108 +step:1884 train loss:4.037294 +step:1885 train loss:3.994539 +step:1886 train loss:3.925444 +step:1887 train loss:3.941763 +step:1888 train loss:3.943783 +step:1889 train loss:3.961877 +step:1890 train loss:3.958935 +step:1891 train loss:3.898355 +step:1892 train loss:3.987867 +step:1893 train loss:3.909405 +step:1894 train loss:3.931260 +step:1895 train loss:3.967721 +step:1896 train loss:4.013848 +step:1897 train loss:3.910730 +step:1898 train loss:3.954374 +step:1899 train loss:3.969559 +step:1900 train loss:3.921214 +step:1901 train loss:3.996516 +step:1902 train loss:3.988762 +step:1903 train loss:3.927596 +step:1904 train loss:3.920697 +step:1905 train loss:3.922067 +step:1906 train loss:3.975475 +step:1907 train loss:3.918310 +step:1908 train loss:3.936850 +step:1909 train loss:4.031275 +step:1910 train loss:3.918499 +step:1911 train loss:3.924846 +step:1912 train loss:3.977305 +step:1913 train loss:3.917895 +step:1914 train loss:3.952134 +step:1915 train loss:3.915465 +step:1916 train loss:3.966975 +step:1917 train loss:3.947091 +step:1918 train loss:3.859437 +step:1919 train loss:4.012037 +step:1920 train loss:4.115913 +step:1921 train loss:3.900189 +step:1922 train loss:3.891936 +step:1923 train loss:3.987998 +step:1924 train loss:4.027495 +step:1925 train loss:3.978364 +step:1926 train loss:3.909628 +step:1927 train loss:3.990911 +step:1928 train loss:3.906761 +step:1929 train loss:3.928228 +step:1930 train loss:3.996176 +step:1931 train loss:3.911191 +step:1932 train loss:3.959133 +step:1933 train loss:3.954276 +step:1934 train loss:4.025069 +step:1935 train loss:3.980131 +step:1936 train loss:3.948925 +step:1937 train loss:3.886613 +step:1938 train loss:4.243265 +step:1939 train loss:3.999647 +step:1940 train loss:3.984950 +step:1941 train loss:3.982138 +step:1942 train loss:3.979921 +step:1943 train loss:3.972865 +step:1944 train loss:3.933481 +step:1945 train loss:3.930254 +step:1946 train loss:3.950768 +step:1947 train loss:3.972503 +step:1948 train loss:3.877138 +step:1949 train loss:3.990311 +step:1950 train loss:3.927809 +step:1951 train loss:3.947196 +step:1952 train loss:3.974643 +step:1953 train loss:3.908580 +step:1954 train loss:3.944495 +step:1955 train loss:3.899493 +step:1956 train loss:3.984646 +step:1957 train loss:4.007029 +step:1958 train loss:4.025764 +step:1959 train loss:3.895605 +step:1960 train loss:3.939048 +step:1961 train loss:3.968508 +step:1962 train loss:3.956545 +step:1963 train loss:3.936715 +step:1964 train loss:3.972136 +step:1965 train loss:4.006664 +step:1966 train loss:3.916304 +step:1967 train loss:3.971280 +step:1968 train loss:3.911654 +step:1969 train loss:3.931235 +step:1970 train loss:3.989790 +step:1971 train loss:3.891421 +step:1972 train loss:3.996310 +step:1973 train loss:3.902758 +step:1974 train loss:3.951046 +step:1975 train loss:3.903964 +step:1976 train loss:3.926806 +step:1977 train loss:3.969125 +step:1978 train loss:3.911161 +step:1979 train loss:3.889747 +step:1980 train loss:3.930643 +step:1981 train loss:3.908043 +step:1982 train loss:3.989532 +step:1983 train loss:3.932956 +step:1984 train loss:3.973930 +step:1985 train loss:3.960713 +step:1986 train loss:3.955145 +step:1987 train loss:3.912774 +step:1988 train loss:3.937284 +step:1989 train loss:4.073982 +step:1990 train loss:3.914516 +step:1991 train loss:3.905485 +step:1992 train loss:3.915677 +step:1993 train loss:3.944596 +step:1994 train loss:3.937809 +step:1995 train loss:3.890367 +step:1996 train loss:3.943427 +step:1997 train loss:3.948589 +step:1998 train loss:3.900002 +step:1999 train loss:4.013748 +step:2000 validation loss:3.885110 total_sharp:6.1164e-04 L1_sharp:4.8154e-04 L2_sharp:4.0484e-05 L3_sharp:9.3821e-05 L4_sharp:1.3862e-04 L5_sharp:1.6509e-04 L6_sharp:5.6371e-05 L7_sharp:6.2795e-05 L8_sharp:1.2219e-04 L9_sharp:2.3103e-04 L10_sharp:2.3030e-04 L11_sharp:1.9119e-04 L12_sharp:4.6242e-04 total_fnorm:1.0263e+01 total_l1_linf:8.9781e+04 total_spectral:1.0263e+01 L1_fnorm:2.2863e+00 L2_fnorm:2.2469e+00 L3_fnorm:2.1304e+00 L4_fnorm:2.1723e+00 L5_fnorm:2.2258e+00 L6_fnorm:2.3674e+00 L7_fnorm:2.3968e+00 L8_fnorm:2.3490e+00 L9_fnorm:2.3520e+00 L10_fnorm:2.3331e+00 L11_fnorm:2.3600e+00 L12_fnorm:2.2995e+00 L1_l1linf:2.7187e+00 L2_l1linf:2.5188e+00 L3_l1linf:2.4384e+00 L4_l1linf:2.4420e+00 L5_l1linf:2.2867e+00 L6_l1linf:2.3970e+00 L7_l1linf:2.4738e+00 L8_l1linf:2.5833e+00 L9_l1linf:2.7337e+00 L10_l1linf:2.7761e+00 L11_l1linf:2.8013e+00 L12_l1linf:2.9414e+00 L1_spectral:3.1521e-01 L2_spectral:2.5203e-01 L3_spectral:2.3232e-01 L4_spectral:3.2737e-01 L5_spectral:3.2535e-01 L6_spectral:2.8059e-01 L7_spectral:2.7878e-01 L8_spectral:3.2996e-01 L9_spectral:4.0986e-01 L10_spectral:3.7589e-01 L11_spectral:3.8417e-01 L12_spectral:3.9214e-01 ip_v_neg_g:4.0696e-02 cos_v_neg_g:7.2386e-03 v_norm:1.0263e+01 g_norm:5.4778e-01 hv_norm:3.7627e-01 cos_v_hv:1.6683e-02 hg_norm:5.0012e+00 cos_g_hg:5.3089e-01 v_par:2.9963e-03 v_perp:1.0263e+01 L1_cos_v_neg_g:2.6619e-02 L1_v_norm:2.2863e+00 L2_cos_v_neg_g:8.0688e-03 L2_v_norm:2.2469e+00 L3_cos_v_neg_g:1.1313e-02 L3_v_norm:2.1304e+00 L4_cos_v_neg_g:1.3805e-02 L4_v_norm:2.1723e+00 L5_cos_v_neg_g:1.2961e-02 L5_v_norm:2.2258e+00 L6_cos_v_neg_g:1.0703e-02 L6_v_norm:2.3674e+00 L7_cos_v_neg_g:9.1742e-03 L7_v_norm:2.3968e+00 L8_cos_v_neg_g:1.3434e-02 L8_v_norm:2.3490e+00 L9_cos_v_neg_g:1.8103e-02 L9_v_norm:2.3520e+00 L10_cos_v_neg_g:2.0113e-02 L10_v_norm:2.3331e+00 L11_cos_v_neg_g:1.8566e-02 L11_v_norm:2.3600e+00 L12_cos_v_neg_g:2.1296e-02 L12_v_norm:2.2995e+00 +step:2000 train loss:3.983163 +step:2001 train loss:3.910469 +step:2002 train loss:4.005392 +step:2003 train loss:4.048542 +step:2004 train loss:3.918876 +step:2005 train loss:4.017233 +step:2006 train loss:3.904878 +step:2007 train loss:3.978321 +step:2008 train loss:3.920320 +step:2009 train loss:3.922214 +step:2010 train loss:4.049455 +step:2011 train loss:3.902888 +step:2012 train loss:3.928052 +step:2013 train loss:3.942199 +step:2014 train loss:3.840565 +step:2015 train loss:3.967725 +step:2016 train loss:3.959681 +step:2017 train loss:3.958193 +step:2018 train loss:3.927855 +step:2019 train loss:3.953490 +step:2020 train loss:3.962245 +step:2021 train loss:3.923715 +step:2022 train loss:3.964420 +step:2023 train loss:3.938857 +step:2024 train loss:3.987953 +step:2025 train loss:3.930233 +step:2026 train loss:3.908112 +step:2027 train loss:3.937261 +step:2028 train loss:3.867118 +step:2029 train loss:3.898096 +step:2030 train loss:3.900126 +step:2031 train loss:3.860677 +step:2032 train loss:3.913356 +step:2033 train loss:3.910500 +step:2034 train loss:3.906163 +step:2035 train loss:3.948362 +step:2036 train loss:3.939660 +step:2037 train loss:3.925553 +step:2038 train loss:3.919750 +step:2039 train loss:3.910008 +step:2040 train loss:3.938989 +step:2041 train loss:3.941837 +step:2042 train loss:3.870906 +step:2043 train loss:4.026331 +step:2044 train loss:3.894005 +step:2045 train loss:3.919425 +step:2046 train loss:3.929452 +step:2047 train loss:3.906337 +step:2048 train loss:3.944729 +step:2049 train loss:3.903572 +step:2050 train loss:3.921130 +step:2051 train loss:3.889854 +step:2052 train loss:3.936981 +step:2053 train loss:3.944919 +step:2054 train loss:3.905065 +step:2055 train loss:3.911960 +step:2056 train loss:3.955130 +step:2057 train loss:3.960322 +step:2058 train loss:3.926836 +step:2059 train loss:4.005567 +step:2060 train loss:3.956699 +step:2061 train loss:3.907875 +step:2062 train loss:3.932294 +step:2063 train loss:3.838110 +step:2064 train loss:3.952462 +step:2065 train loss:3.958606 +step:2066 train loss:3.816348 +step:2067 train loss:3.865204 +step:2068 train loss:3.971755 +step:2069 train loss:3.903955 +step:2070 train loss:3.907353 +step:2071 train loss:3.949431 +step:2072 train loss:3.880309 +step:2073 train loss:3.937963 +step:2074 train loss:3.915730 +step:2075 train loss:4.000967 +step:2076 train loss:3.943795 +step:2077 train loss:3.958546 +step:2078 train loss:3.911767 +step:2079 train loss:4.058452 +step:2080 train loss:3.876837 +step:2081 train loss:3.984339 +step:2082 train loss:3.917056 +step:2083 train loss:3.903956 +step:2084 train loss:3.880714 +step:2085 train loss:3.925643 +step:2086 train loss:3.938843 +step:2087 train loss:3.979524 +step:2088 train loss:3.851230 +step:2089 train loss:3.878008 +step:2090 train loss:3.911141 +step:2091 train loss:3.931057 +step:2092 train loss:3.907890 +step:2093 train loss:3.895244 +step:2094 train loss:3.930933 +step:2095 train loss:3.883549 +step:2096 train loss:3.865611 +step:2097 train loss:3.902451 +step:2098 train loss:3.903737 +step:2099 train loss:3.880909 +step:2100 train loss:3.954283 +step:2101 train loss:3.944571 +step:2102 train loss:3.917723 +step:2103 train loss:3.929169 +step:2104 train loss:3.901368 +step:2105 train loss:3.907619 +step:2106 train loss:3.902862 +step:2107 train loss:3.968801 +step:2108 train loss:3.890173 +step:2109 train loss:3.849000 +step:2110 train loss:3.953091 +step:2111 train loss:3.898249 +step:2112 train loss:3.956338 +step:2113 train loss:3.893923 +step:2114 train loss:3.899023 +step:2115 train loss:3.949047 +step:2116 train loss:3.886820 +step:2117 train loss:3.901372 +step:2118 train loss:3.898296 +step:2119 train loss:3.847317 +step:2120 train loss:3.931160 +step:2121 train loss:3.916452 +step:2122 train loss:3.925356 +step:2123 train loss:3.978937 +step:2124 train loss:3.981726 +step:2125 train loss:3.884295 +step:2126 train loss:3.900166 +step:2127 train loss:3.887868 +step:2128 train loss:3.884725 +step:2129 train loss:3.905457 +step:2130 train loss:3.909588 +step:2131 train loss:3.933756 +step:2132 train loss:3.856933 +step:2133 train loss:3.963164 +step:2134 train loss:3.918219 +step:2135 train loss:3.871566 +step:2136 train loss:3.961930 +step:2137 train loss:3.927089 +step:2138 train loss:3.883654 +step:2139 train loss:3.886422 +step:2140 train loss:3.888169 +step:2141 train loss:3.935685 +step:2142 train loss:3.909943 +step:2143 train loss:3.828929 +step:2144 train loss:3.935688 +step:2145 train loss:3.904019 +step:2146 train loss:3.939986 +step:2147 train loss:4.042250 +step:2148 train loss:3.849199 +step:2149 train loss:3.858989 +step:2150 train loss:3.885212 +step:2151 train loss:3.918440 +step:2152 train loss:3.909181 +step:2153 train loss:3.947869 +step:2154 train loss:3.868667 +step:2155 train loss:3.948755 +step:2156 train loss:3.870925 +step:2157 train loss:3.947098 +step:2158 train loss:3.986096 +step:2159 train loss:3.910841 +step:2160 train loss:3.986543 +step:2161 train loss:3.883604 +step:2162 train loss:3.892881 +step:2163 train loss:3.868035 +step:2164 train loss:3.892569 +step:2165 train loss:3.871861 +step:2166 train loss:3.991918 +step:2167 train loss:3.906377 +step:2168 train loss:3.913066 +step:2169 train loss:3.863795 +step:2170 train loss:4.006586 +step:2171 train loss:3.964049 +step:2172 train loss:3.903282 +step:2173 train loss:3.888104 +step:2174 train loss:3.953094 +step:2175 train loss:3.885158 +step:2176 train loss:3.962935 +step:2177 train loss:3.931084 +step:2178 train loss:3.859718 +step:2179 train loss:3.924781 +step:2180 train loss:3.943693 +step:2181 train loss:3.869011 +step:2182 train loss:3.922698 +step:2183 train loss:3.916070 +step:2184 train loss:3.867142 +step:2185 train loss:3.848060 +step:2186 train loss:3.886123 +step:2187 train loss:3.895598 +step:2188 train loss:3.951858 +step:2189 train loss:3.836347 +step:2190 train loss:3.882972 +step:2191 train loss:3.941239 +step:2192 train loss:3.869240 +step:2193 train loss:3.836179 +step:2194 train loss:3.848577 +step:2195 train loss:3.867321 +step:2196 train loss:3.870290 +step:2197 train loss:3.856145 +step:2198 train loss:3.878059 +step:2199 train loss:3.950250 +step:2200 train loss:3.882418 +step:2201 train loss:3.887613 +step:2202 train loss:3.849186 +step:2203 train loss:3.871421 +step:2204 train loss:3.902557 +step:2205 train loss:3.883247 +step:2206 train loss:3.880738 +step:2207 train loss:3.880681 +step:2208 train loss:3.858095 +step:2209 train loss:4.137335 +step:2210 train loss:3.912942 +step:2211 train loss:3.915115 +step:2212 train loss:3.900001 +step:2213 train loss:3.973636 +step:2214 train loss:3.971481 +step:2215 train loss:3.897012 +step:2216 train loss:3.859922 +step:2217 train loss:3.884313 +step:2218 train loss:3.879375 +step:2219 train loss:3.915380 +step:2220 train loss:3.854929 +step:2221 train loss:3.883731 +step:2222 train loss:3.898497 +step:2223 train loss:3.938369 +step:2224 train loss:3.913729 +step:2225 train loss:3.849534 +step:2226 train loss:3.915322 +step:2227 train loss:3.915313 +step:2228 train loss:3.915191 +step:2229 train loss:3.856983 +step:2230 train loss:3.979579 +step:2231 train loss:3.895657 +step:2232 train loss:3.895197 +step:2233 train loss:3.938101 +step:2234 train loss:3.833851 +step:2235 train loss:3.921546 +step:2236 train loss:3.862743 +step:2237 train loss:4.000147 +step:2238 train loss:3.803805 +step:2239 train loss:3.880945 +step:2240 train loss:3.893957 +step:2241 train loss:3.806812 +step:2242 train loss:3.947300 +step:2243 train loss:3.987518 +step:2244 train loss:3.859675 +step:2245 train loss:3.864133 +step:2246 train loss:3.830007 +step:2247 train loss:3.833909 +step:2248 train loss:3.889004 +step:2249 train loss:3.870273 +step:2250 validation loss:3.821701 +step:2250 train loss:3.880835 +step:2251 train loss:3.846739 +step:2252 train loss:3.845106 +step:2253 train loss:3.873300 +step:2254 train loss:3.875234 +step:2255 train loss:3.836345 +step:2256 train loss:3.891205 +step:2257 train loss:3.876783 +step:2258 train loss:3.869330 +step:2259 train loss:3.883488 +step:2260 train loss:3.838057 +step:2261 train loss:3.916935 +step:2262 train loss:3.932864 +step:2263 train loss:3.891922 +step:2264 train loss:4.002973 +step:2265 train loss:3.851700 +step:2266 train loss:3.893848 +step:2267 train loss:3.857549 +step:2268 train loss:3.858154 +step:2269 train loss:3.864169 +step:2270 train loss:3.850121 +step:2271 train loss:3.866891 +step:2272 train loss:3.902360 +step:2273 train loss:3.826678 +step:2274 train loss:3.858321 +step:2275 train loss:3.812573 +step:2276 train loss:3.885732 +step:2277 train loss:3.900210 +step:2278 train loss:3.878865 +step:2279 train loss:3.862286 +step:2280 train loss:3.772779 +step:2281 train loss:3.920666 +step:2282 train loss:3.850478 +step:2283 train loss:3.838970 +step:2284 train loss:3.853098 +step:2285 train loss:3.903960 +step:2286 train loss:3.865268 +step:2287 train loss:3.900416 +step:2288 train loss:3.871262 +step:2289 train loss:3.870959 +step:2290 train loss:3.875134 +step:2291 train loss:3.863094 +step:2292 train loss:3.906087 +step:2293 train loss:3.881383 +step:2294 train loss:3.876646 +step:2295 train loss:3.934505 +step:2296 train loss:3.864554 +step:2297 train loss:3.838257 +step:2298 train loss:3.899873 +step:2299 train loss:3.876791 +step:2300 train loss:3.789075 +step:2301 train loss:3.889916 +step:2302 train loss:3.906060 +step:2303 train loss:3.872555 +step:2304 train loss:3.857260 +step:2305 train loss:3.902623 +step:2306 train loss:3.893417 +step:2307 train loss:3.870168 +step:2308 train loss:3.892306 +step:2309 train loss:3.846551 +step:2310 train loss:3.836341 +step:2311 train loss:3.822297 +step:2312 train loss:3.891077 +step:2313 train loss:3.807509 +step:2314 train loss:3.880427 +step:2315 train loss:3.899010 +step:2316 train loss:3.934579 +step:2317 train loss:3.797288 +step:2318 train loss:3.843663 +step:2319 train loss:3.897244 +step:2320 train loss:3.863726 +step:2321 train loss:3.837909 +step:2322 train loss:3.853954 +step:2323 train loss:3.849656 +step:2324 train loss:3.879737 +step:2325 train loss:3.815296 +step:2326 train loss:3.845327 +step:2327 train loss:3.959860 +step:2328 train loss:3.909356 +step:2329 train loss:3.868381 +step:2330 train loss:3.823410 +step:2331 train loss:3.867128 +step:2332 train loss:3.791005 +step:2333 train loss:3.855382 +step:2334 train loss:3.829157 +step:2335 train loss:3.813421 +step:2336 train loss:4.065193 +step:2337 train loss:3.846677 +step:2338 train loss:3.884676 +step:2339 train loss:3.887505 +step:2340 train loss:3.909451 +step:2341 train loss:3.889035 +step:2342 train loss:3.843086 +step:2343 train loss:3.862908 +step:2344 train loss:3.907062 +step:2345 train loss:3.857903 +step:2346 train loss:3.889133 +step:2347 train loss:3.812598 +step:2348 train loss:3.870253 +step:2349 train loss:3.822322 +step:2350 train loss:3.876692 +step:2351 train loss:3.884583 +step:2352 train loss:3.886329 +step:2353 train loss:3.849047 +step:2354 train loss:3.895430 +step:2355 train loss:3.888706 +step:2356 train loss:3.926905 +step:2357 train loss:3.840471 +step:2358 train loss:3.858561 +step:2359 train loss:3.877888 +step:2360 train loss:3.892750 +step:2361 train loss:3.928849 +step:2362 train loss:3.757867 +step:2363 train loss:3.948119 +step:2364 train loss:3.897440 +step:2365 train loss:3.865117 +step:2366 train loss:3.816093 +step:2367 train loss:3.882229 +step:2368 train loss:3.869285 +step:2369 train loss:3.857485 +step:2370 train loss:3.876470 +step:2371 train loss:3.927866 +step:2372 train loss:3.785006 +step:2373 train loss:3.924284 +step:2374 train loss:3.906564 +step:2375 train loss:3.891570 +step:2376 train loss:3.882491 +step:2377 train loss:3.828359 +step:2378 train loss:3.875424 +step:2379 train loss:3.857573 +step:2380 train loss:3.918078 +step:2381 train loss:4.011930 +step:2382 train loss:3.799998 +step:2383 train loss:3.846848 +step:2384 train loss:3.876374 +step:2385 train loss:3.779534 +step:2386 train loss:3.933632 +step:2387 train loss:3.814469 +step:2388 train loss:3.864319 +step:2389 train loss:3.883049 +step:2390 train loss:3.837846 +step:2391 train loss:3.858171 +step:2392 train loss:3.886610 +step:2393 train loss:3.840546 +step:2394 train loss:3.867671 +step:2395 train loss:3.855102 +step:2396 train loss:3.861603 +step:2397 train loss:3.837129 +step:2398 train loss:3.886678 +step:2399 train loss:3.850066 +step:2400 train loss:3.831108 +step:2401 train loss:3.869542 +step:2402 train loss:3.821882 +step:2403 train loss:3.875001 +step:2404 train loss:3.831956 +step:2405 train loss:3.831863 +step:2406 train loss:3.861474 +step:2407 train loss:3.807258 +step:2408 train loss:3.848150 +step:2409 train loss:3.839363 +step:2410 train loss:3.838909 +step:2411 train loss:3.918342 +step:2412 train loss:3.904124 +step:2413 train loss:3.943496 +step:2414 train loss:3.833008 +step:2415 train loss:3.823232 +step:2416 train loss:3.837474 +step:2417 train loss:3.873906 +step:2418 train loss:3.893045 +step:2419 train loss:3.820247 +step:2420 train loss:3.840871 +step:2421 train loss:3.866366 +step:2422 train loss:3.914030 +step:2423 train loss:3.854386 +step:2424 train loss:3.821788 +step:2425 train loss:3.883618 +step:2426 train loss:3.824490 +step:2427 train loss:3.850707 +step:2428 train loss:3.925910 +step:2429 train loss:3.881737 +step:2430 train loss:3.975343 +step:2431 train loss:3.885815 +step:2432 train loss:3.855753 +step:2433 train loss:3.831645 +step:2434 train loss:3.817350 +step:2435 train loss:3.874917 +step:2436 train loss:3.831077 +step:2437 train loss:3.862182 +step:2438 train loss:3.906268 +step:2439 train loss:3.887770 +step:2440 train loss:3.832197 +step:2441 train loss:3.868451 +step:2442 train loss:3.858089 +step:2443 train loss:3.821612 +step:2444 train loss:3.858377 +step:2445 train loss:3.854965 +step:2446 train loss:3.824991 +step:2447 train loss:3.805680 +step:2448 train loss:3.853767 +step:2449 train loss:3.883699 +step:2450 train loss:3.845472 +step:2451 train loss:3.768718 +step:2452 train loss:3.874494 +step:2453 train loss:3.841467 +step:2454 train loss:3.838046 +step:2455 train loss:3.888837 +step:2456 train loss:3.840476 +step:2457 train loss:3.899112 +step:2458 train loss:3.875913 +step:2459 train loss:3.851642 +step:2460 train loss:3.856836 +step:2461 train loss:3.892556 +step:2462 train loss:3.862103 +step:2463 train loss:3.839676 +step:2464 train loss:3.855373 +step:2465 train loss:3.930475 +step:2466 train loss:4.018417 +step:2467 train loss:3.923363 +step:2468 train loss:3.816880 +step:2469 train loss:3.886297 +step:2470 train loss:3.932698 +step:2471 train loss:3.937520 +step:2472 train loss:3.915728 +step:2473 train loss:3.854171 +step:2474 train loss:3.816123 +step:2475 train loss:3.870278 +step:2476 train loss:3.940803 +step:2477 train loss:3.866453 +step:2478 train loss:3.822427 +step:2479 train loss:3.862790 +step:2480 train loss:3.849522 +step:2481 train loss:4.042915 +step:2482 train loss:3.851701 +step:2483 train loss:3.886619 +step:2484 train loss:3.830456 +step:2485 train loss:3.816068 +step:2486 train loss:3.853629 +step:2487 train loss:3.886874 +step:2488 train loss:3.799522 +step:2489 train loss:3.902676 +step:2490 train loss:3.827698 +step:2491 train loss:3.838624 +step:2492 train loss:3.879292 +step:2493 train loss:3.917149 +step:2494 train loss:3.835397 +step:2495 train loss:3.869632 +step:2496 train loss:3.844893 +step:2497 train loss:3.861151 +step:2498 train loss:3.866576 +step:2499 train loss:3.860824 +step:2500 validation loss:3.782071 total_sharp:4.7371e-04 L1_sharp:2.5953e-04 L2_sharp:1.9191e-05 L3_sharp:1.1153e-04 L4_sharp:7.8768e-05 L5_sharp:6.0365e-05 L6_sharp:3.3260e-05 L7_sharp:6.2388e-05 L8_sharp:8.6666e-05 L9_sharp:1.4607e-04 L10_sharp:1.7432e-04 L11_sharp:1.6376e-04 L12_sharp:3.6752e-04 total_fnorm:1.0383e+01 total_l1_linf:9.1098e+04 total_spectral:1.0383e+01 L1_fnorm:2.3514e+00 L2_fnorm:2.3750e+00 L3_fnorm:2.2701e+00 L4_fnorm:2.2803e+00 L5_fnorm:2.2745e+00 L6_fnorm:2.4040e+00 L7_fnorm:2.4350e+00 L8_fnorm:2.3774e+00 L9_fnorm:2.3836e+00 L10_fnorm:2.3707e+00 L11_fnorm:2.3876e+00 L12_fnorm:2.2964e+00 L1_l1linf:2.5545e+00 L2_l1linf:2.5332e+00 L3_l1linf:2.4809e+00 L4_l1linf:2.5306e+00 L5_l1linf:2.3332e+00 L6_l1linf:2.4573e+00 L7_l1linf:2.4897e+00 L8_l1linf:2.4633e+00 L9_l1linf:2.5369e+00 L10_l1linf:2.6307e+00 L11_l1linf:2.7351e+00 L12_l1linf:2.8546e+00 L1_spectral:2.9653e-01 L2_spectral:2.4210e-01 L3_spectral:2.4574e-01 L4_spectral:3.1039e-01 L5_spectral:3.1630e-01 L6_spectral:2.9154e-01 L7_spectral:2.7442e-01 L8_spectral:2.9271e-01 L9_spectral:3.5169e-01 L10_spectral:3.7819e-01 L11_spectral:3.6726e-01 L12_spectral:3.8703e-01 ip_v_neg_g:2.8316e-02 cos_v_neg_g:5.7423e-03 v_norm:1.0383e+01 g_norm:4.7491e-01 hv_norm:3.2750e-01 cos_v_hv:1.5018e-02 hg_norm:3.3020e+00 cos_g_hg:5.0669e-01 v_par:3.5775e-03 v_perp:1.0383e+01 L1_cos_v_neg_g:2.3652e-02 L1_v_norm:2.3514e+00 L2_cos_v_neg_g:5.1536e-03 L2_v_norm:2.3750e+00 L3_cos_v_neg_g:6.2617e-03 L3_v_norm:2.2701e+00 L4_cos_v_neg_g:8.9317e-03 L4_v_norm:2.2803e+00 L5_cos_v_neg_g:7.6816e-03 L5_v_norm:2.2745e+00 L6_cos_v_neg_g:7.2671e-03 L6_v_norm:2.4040e+00 L7_cos_v_neg_g:9.1556e-03 L7_v_norm:2.4350e+00 L8_cos_v_neg_g:1.1639e-02 L8_v_norm:2.3774e+00 L9_cos_v_neg_g:1.2576e-02 L9_v_norm:2.3836e+00 L10_cos_v_neg_g:1.5553e-02 L10_v_norm:2.3707e+00 L11_cos_v_neg_g:1.6319e-02 L11_v_norm:2.3876e+00 L12_cos_v_neg_g:1.7731e-02 L12_v_norm:2.2964e+00 +step:2500 train loss:3.808276 +step:2501 train loss:3.871275 +step:2502 train loss:3.866103 +step:2503 train loss:3.794124 +step:2504 train loss:3.829139 +step:2505 train loss:3.854254 +step:2506 train loss:3.814780 +step:2507 train loss:3.848641 +step:2508 train loss:3.791369 +step:2509 train loss:3.817529 +step:2510 train loss:3.809401 +step:2511 train loss:3.850456 +step:2512 train loss:3.898570 +step:2513 train loss:3.847224 +step:2514 train loss:3.831553 +step:2515 train loss:3.973598 +step:2516 train loss:3.861480 +step:2517 train loss:3.925774 +step:2518 train loss:3.890710 +step:2519 train loss:3.862251 +step:2520 train loss:3.869792 +step:2521 train loss:3.848620 +step:2522 train loss:3.888734 +step:2523 train loss:3.806853 +step:2524 train loss:3.860178 +step:2525 train loss:3.846559 +step:2526 train loss:3.896748 +step:2527 train loss:3.888580 +step:2528 train loss:3.869178 +step:2529 train loss:3.888627 +step:2530 train loss:3.869155 +step:2531 train loss:3.804719 +step:2532 train loss:3.902162 +step:2533 train loss:3.791595 +step:2534 train loss:3.889341 +step:2535 train loss:3.841996 +step:2536 train loss:3.763811 +step:2537 train loss:3.879276 +step:2538 train loss:3.858819 +step:2539 train loss:3.876454 +step:2540 train loss:3.812357 +step:2541 train loss:3.837692 +step:2542 train loss:3.846397 +step:2543 train loss:3.838233 +step:2544 train loss:3.824088 +step:2545 train loss:3.813517 +step:2546 train loss:3.778874 +step:2547 train loss:3.823528 +step:2548 train loss:3.845992 +step:2549 train loss:3.851070 +step:2550 train loss:3.979213 +step:2551 train loss:4.048695 +step:2552 train loss:3.788867 +step:2553 train loss:3.821141 +step:2554 train loss:3.965395 +step:2555 train loss:3.858644 +step:2556 train loss:3.782230 +step:2557 train loss:3.879262 +step:2558 train loss:3.866702 +step:2559 train loss:3.821161 +step:2560 train loss:3.807719 +step:2561 train loss:3.901129 +step:2562 train loss:3.855375 +step:2563 train loss:3.791761 +step:2564 train loss:3.863969 +step:2565 train loss:3.841799 +step:2566 train loss:3.819799 +step:2567 train loss:3.799588 +step:2568 train loss:3.856544 +step:2569 train loss:3.862515 +step:2570 train loss:3.813162 +step:2571 train loss:3.896966 +step:2572 train loss:3.857865 +step:2573 train loss:3.792721 +step:2574 train loss:3.842712 +step:2575 train loss:3.883802 +step:2576 train loss:3.835907 +step:2577 train loss:3.797457 +step:2578 train loss:3.839927 +step:2579 train loss:3.816142 +step:2580 train loss:3.788952 +step:2581 train loss:3.804419 +step:2582 train loss:3.809407 +step:2583 train loss:3.832156 +step:2584 train loss:3.845931 +step:2585 train loss:3.811309 +step:2586 train loss:3.834102 +step:2587 train loss:3.766075 +step:2588 train loss:3.798466 +step:2589 train loss:3.874913 +step:2590 train loss:3.797600 +step:2591 train loss:3.857211 +step:2592 train loss:3.908608 +step:2593 train loss:3.864681 +step:2594 train loss:3.827698 +step:2595 train loss:3.835812 +step:2596 train loss:3.876329 +step:2597 train loss:3.760995 +step:2598 train loss:3.909851 +step:2599 train loss:3.863548 +step:2600 train loss:3.891531 +step:2601 train loss:3.828150 +step:2602 train loss:3.860873 +step:2603 train loss:3.851788 +step:2604 train loss:3.774471 +step:2605 train loss:3.901627 +step:2606 train loss:3.851404 +step:2607 train loss:3.811810 +step:2608 train loss:3.786731 +step:2609 train loss:3.810349 +step:2610 train loss:3.834192 +step:2611 train loss:3.876059 +step:2612 train loss:3.837026 +step:2613 train loss:3.812543 +step:2614 train loss:3.799816 +step:2615 train loss:3.797146 +step:2616 train loss:3.874599 +step:2617 train loss:3.829353 +step:2618 train loss:3.794742 +step:2619 train loss:3.813159 +step:2620 train loss:3.808882 +step:2621 train loss:3.820612 +step:2622 train loss:3.894292 +step:2623 train loss:3.764069 +step:2624 train loss:3.780602 +step:2625 train loss:3.851823 +step:2626 train loss:3.846581 +step:2627 train loss:3.826460 +step:2628 train loss:3.875477 +step:2629 train loss:3.827465 +step:2630 train loss:3.820549 +step:2631 train loss:3.848023 +step:2632 train loss:3.815187 +step:2633 train loss:3.801568 +step:2634 train loss:3.852201 +step:2635 train loss:3.845937 +step:2636 train loss:3.893115 +step:2637 train loss:3.849221 +step:2638 train loss:3.831096 +step:2639 train loss:3.883258 +step:2640 train loss:3.797142 +step:2641 train loss:3.850508 +step:2642 train loss:3.770053 +step:2643 train loss:3.768202 +step:2644 train loss:3.860451 +step:2645 train loss:3.795991 +step:2646 train loss:3.830671 +step:2647 train loss:3.846054 +step:2648 train loss:3.882046 +step:2649 train loss:3.793967 +step:2650 train loss:3.781918 +step:2651 train loss:3.822861 +step:2652 train loss:3.795026 +step:2653 train loss:3.864028 +step:2654 train loss:3.821760 +step:2655 train loss:3.810086 +step:2656 train loss:3.829749 +step:2657 train loss:3.856671 +step:2658 train loss:3.866565 +step:2659 train loss:3.844828 +step:2660 train loss:3.828554 +step:2661 train loss:3.875508 +step:2662 train loss:3.851000 +step:2663 train loss:3.825110 +step:2664 train loss:3.834356 +step:2665 train loss:3.784502 +step:2666 train loss:3.816310 +step:2667 train loss:3.822550 +step:2668 train loss:3.800011 +step:2669 train loss:3.813315 +step:2670 train loss:3.834507 +step:2671 train loss:3.810102 +step:2672 train loss:3.833542 +step:2673 train loss:3.768658 +step:2674 train loss:3.865475 +step:2675 train loss:3.831932 +step:2676 train loss:3.854517 +step:2677 train loss:3.833284 +step:2678 train loss:3.820709 +step:2679 train loss:3.802131 +step:2680 train loss:3.786618 +step:2681 train loss:3.757908 +step:2682 train loss:3.842961 +step:2683 train loss:3.818115 +step:2684 train loss:3.848665 +step:2685 train loss:3.762707 +step:2686 train loss:3.779454 +step:2687 train loss:3.858326 +step:2688 train loss:3.874059 +step:2689 train loss:3.778631 +step:2690 train loss:3.863955 +step:2691 train loss:3.836762 +step:2692 train loss:3.860117 +step:2693 train loss:3.915673 +step:2694 train loss:3.810577 +step:2695 train loss:3.829617 +step:2696 train loss:3.834243 +step:2697 train loss:3.823154 +step:2698 train loss:3.833488 +step:2699 train loss:3.850271 +step:2700 train loss:3.823271 +step:2701 train loss:3.890310 +step:2702 train loss:3.825535 +step:2703 train loss:3.784459 +step:2704 train loss:3.861170 +step:2705 train loss:3.855676 +step:2706 train loss:3.799657 +step:2707 train loss:3.775459 +step:2708 train loss:3.870341 +step:2709 train loss:3.846780 +step:2710 train loss:3.855078 +step:2711 train loss:3.812593 +step:2712 train loss:3.873579 +step:2713 train loss:3.873629 +step:2714 train loss:3.814974 +step:2715 train loss:3.807006 +step:2716 train loss:3.873100 +step:2717 train loss:3.841382 +step:2718 train loss:3.834263 +step:2719 train loss:3.830958 +step:2720 train loss:3.795973 +step:2721 train loss:3.874797 +step:2722 train loss:3.804281 +step:2723 train loss:3.790244 +step:2724 train loss:3.813837 +step:2725 train loss:3.814201 +step:2726 train loss:3.785171 +step:2727 train loss:3.841007 +step:2728 train loss:3.781731 +step:2729 train loss:3.912175 +step:2730 train loss:3.852864 +step:2731 train loss:3.890251 +step:2732 train loss:3.805106 +step:2733 train loss:3.800770 +step:2734 train loss:3.851849 +step:2735 train loss:3.848112 +step:2736 train loss:3.770350 +step:2737 train loss:3.823469 +step:2738 train loss:3.879654 +step:2739 train loss:3.800064 +step:2740 train loss:3.798372 +step:2741 train loss:3.786099 +step:2742 train loss:3.711983 +step:2743 train loss:3.827190 +step:2744 train loss:3.853447 +step:2745 train loss:3.804021 +step:2746 train loss:3.815181 +step:2747 train loss:3.804458 +step:2748 train loss:3.759015 +step:2749 train loss:3.825663 +step:2750 validation loss:3.750137 +step:2750 train loss:3.833769 +step:2751 train loss:3.857234 +step:2752 train loss:3.844062 +step:2753 train loss:3.831442 +step:2754 train loss:3.770384 +step:2755 train loss:3.840664 +step:2756 train loss:3.811466 +step:2757 train loss:3.798973 +step:2758 train loss:3.825792 +step:2759 train loss:3.835873 +step:2760 train loss:3.746490 +step:2761 train loss:3.761943 +step:2762 train loss:3.778506 +step:2763 train loss:3.808249 +step:2764 train loss:3.746316 +step:2765 train loss:3.793443 +step:2766 train loss:3.885529 +step:2767 train loss:3.755345 +step:2768 train loss:3.817379 +step:2769 train loss:3.791888 +step:2770 train loss:3.812724 +step:2771 train loss:3.836361 +step:2772 train loss:3.798276 +step:2773 train loss:3.799197 +step:2774 train loss:3.792509 +step:2775 train loss:3.809544 +step:2776 train loss:3.760728 +step:2777 train loss:3.796201 +step:2778 train loss:3.804051 +step:2779 train loss:3.830355 +step:2780 train loss:3.799750 +step:2781 train loss:3.790290 +step:2782 train loss:3.775942 +step:2783 train loss:3.809198 +step:2784 train loss:3.814560 +step:2785 train loss:3.885319 +step:2786 train loss:3.853516 +step:2787 train loss:3.811393 +step:2788 train loss:3.805226 +step:2789 train loss:3.800486 +step:2790 train loss:3.740009 +step:2791 train loss:3.839476 +step:2792 train loss:3.831067 +step:2793 train loss:3.799647 +step:2794 train loss:3.811305 +step:2795 train loss:3.823929 +step:2796 train loss:3.811043 +step:2797 train loss:3.861118 +step:2798 train loss:3.847611 +step:2799 train loss:3.754393 +step:2800 train loss:3.798946 +step:2801 train loss:3.834615 +step:2802 train loss:3.862703 +step:2803 train loss:3.835142 +step:2804 train loss:3.771764 +step:2805 train loss:3.811367 +step:2806 train loss:3.802947 +step:2807 train loss:3.835156 +step:2808 train loss:3.772900 +step:2809 train loss:3.841266 +step:2810 train loss:3.828741 +step:2811 train loss:3.819125 +step:2812 train loss:3.868227 +step:2813 train loss:3.837687 +step:2814 train loss:3.823645 +step:2815 train loss:3.834951 +step:2816 train loss:3.839857 +step:2817 train loss:3.778842 +step:2818 train loss:3.879230 +step:2819 train loss:3.805499 +step:2820 train loss:3.802755 +step:2821 train loss:3.785882 +step:2822 train loss:3.823314 +step:2823 train loss:3.777969 +step:2824 train loss:3.672770 +step:2825 train loss:3.823506 +step:2826 train loss:3.823077 +step:2827 train loss:3.846208 +step:2828 train loss:3.841369 +step:2829 train loss:3.823937 +step:2830 train loss:3.861736 +step:2831 train loss:3.792940 +step:2832 train loss:3.763874 +step:2833 train loss:3.824301 +step:2834 train loss:3.777728 +step:2835 train loss:3.809820 +step:2836 train loss:3.815961 +step:2837 train loss:3.814973 +step:2838 train loss:3.758765 +step:2839 train loss:3.854670 +step:2840 train loss:3.814152 +step:2841 train loss:3.894015 +step:2842 train loss:3.834849 +step:2843 train loss:3.831485 +step:2844 train loss:3.861027 +step:2845 train loss:3.813962 +step:2846 train loss:3.760772 +step:2847 train loss:3.854244 +step:2848 train loss:3.809972 +step:2849 train loss:3.800232 +step:2850 train loss:3.858677 +step:2851 train loss:3.809151 +step:2852 train loss:3.889308 +step:2853 train loss:3.804825 +step:2854 train loss:3.749282 +step:2855 train loss:3.826095 +step:2856 train loss:3.752994 +step:2857 train loss:3.852784 +step:2858 train loss:3.808113 +step:2859 train loss:3.811352 +step:2860 train loss:3.787488 +step:2861 train loss:3.772739 +step:2862 train loss:3.802104 +step:2863 train loss:3.787967 +step:2864 train loss:3.794235 +step:2865 train loss:3.868441 +step:2866 train loss:3.882803 +step:2867 train loss:3.819259 +step:2868 train loss:3.813389 +step:2869 train loss:3.776755 +step:2870 train loss:3.858948 +step:2871 train loss:3.859331 +step:2872 train loss:3.820786 +step:2873 train loss:3.829632 +step:2874 train loss:3.805554 +step:2875 train loss:3.758525 +step:2876 train loss:3.807071 +step:2877 train loss:3.785165 +step:2878 train loss:3.802317 +step:2879 train loss:3.769563 +step:2880 train loss:3.785880 +step:2881 train loss:3.780364 +step:2882 train loss:3.717566 +step:2883 train loss:3.800093 +step:2884 train loss:3.869985 +step:2885 train loss:3.765916 +step:2886 train loss:3.815228 +step:2887 train loss:3.838688 +step:2888 train loss:3.812611 +step:2889 train loss:3.792847 +step:2890 train loss:3.765016 +step:2891 train loss:3.807411 +step:2892 train loss:3.814539 +step:2893 train loss:3.793342 +step:2894 train loss:3.766205 +step:2895 train loss:3.815887 +step:2896 train loss:3.865976 +step:2897 train loss:3.844468 +step:2898 train loss:3.975566 +step:2899 train loss:3.735634 +step:2900 train loss:3.808064 +step:2901 train loss:3.756737 +step:2902 train loss:3.757859 +step:2903 train loss:3.771177 +step:2904 train loss:3.796976 +step:2905 train loss:3.859098 +step:2906 train loss:3.829000 +step:2907 train loss:4.002854 +step:2908 train loss:3.762636 +step:2909 train loss:3.856621 +step:2910 train loss:3.865829 +step:2911 train loss:3.862337 +step:2912 train loss:3.825742 +step:2913 train loss:3.888048 +step:2914 train loss:3.933273 +step:2915 train loss:3.908556 +step:2916 train loss:3.854038 +step:2917 train loss:3.871836 +step:2918 train loss:3.860631 +step:2919 train loss:3.800995 +step:2920 train loss:3.851153 +step:2921 train loss:3.800357 +step:2922 train loss:3.822739 +step:2923 train loss:3.891424 +step:2924 train loss:3.822665 +step:2925 train loss:3.771447 +step:2926 train loss:3.861803 +step:2927 train loss:3.766581 +step:2928 train loss:3.739261 +step:2929 train loss:3.750788 +step:2930 train loss:3.768977 +step:2931 train loss:3.925266 +step:2932 train loss:3.842035 +step:2933 train loss:3.804234 +step:2934 train loss:3.795817 +step:2935 train loss:3.820834 +step:2936 train loss:3.768520 +step:2937 train loss:3.786230 +step:2938 train loss:3.803315 +step:2939 train loss:3.875830 +step:2940 train loss:3.775491 +step:2941 train loss:3.810106 +step:2942 train loss:3.771743 +step:2943 train loss:4.044346 +step:2944 train loss:3.877404 +step:2945 train loss:3.834910 +step:2946 train loss:3.847035 +step:2947 train loss:3.802864 +step:2948 train loss:3.764858 +step:2949 train loss:3.861292 +step:2950 train loss:3.808691 +step:2951 train loss:3.707452 +step:2952 train loss:3.777139 +step:2953 train loss:3.689401 +step:2954 train loss:3.778125 +step:2955 train loss:3.856392 +step:2956 train loss:3.797782 +step:2957 train loss:3.798437 +step:2958 train loss:3.751882 +step:2959 train loss:3.774703 +step:2960 train loss:3.871250 +step:2961 train loss:3.732728 +step:2962 train loss:3.806094 +step:2963 train loss:3.804101 +step:2964 train loss:3.780076 +step:2965 train loss:3.807467 +step:2966 train loss:3.785175 +step:2967 train loss:3.781626 +step:2968 train loss:3.761396 +step:2969 train loss:3.769241 +step:2970 train loss:3.839726 +step:2971 train loss:3.768246 +step:2972 train loss:3.751074 +step:2973 train loss:3.750631 +step:2974 train loss:3.787127 +step:2975 train loss:3.749551 +step:2976 train loss:3.791779 +step:2977 train loss:3.780199 +step:2978 train loss:3.861841 +step:2979 train loss:3.841285 +step:2980 train loss:3.851536 +step:2981 train loss:3.809626 +step:2982 train loss:3.800119 +step:2983 train loss:3.752208 +step:2984 train loss:3.726035 +step:2985 train loss:3.838398 +step:2986 train loss:3.731112 +step:2987 train loss:3.860356 +step:2988 train loss:3.785506 +step:2989 train loss:3.814127 +step:2990 train loss:3.768956 +step:2991 train loss:3.837255 +step:2992 train loss:3.827554 +step:2993 train loss:3.797045 +step:2994 train loss:3.786575 +step:2995 train loss:3.852653 +step:2996 train loss:3.775715 +step:2997 train loss:3.687549 +step:2998 train loss:3.801160 +step:2999 train loss:3.843845 +step:3000 validation loss:3.725525 total_sharp:3.8204e-04 L1_sharp:1.5624e-04 L2_sharp:9.9153e-06 L3_sharp:1.6439e-05 L4_sharp:3.7171e-05 L5_sharp:4.3021e-05 L6_sharp:2.6060e-05 L7_sharp:4.0466e-05 L8_sharp:8.3142e-05 L9_sharp:1.4917e-04 L10_sharp:1.3113e-04 L11_sharp:1.2391e-04 L12_sharp:6.2178e-04 total_fnorm:1.0673e+01 total_l1_linf:9.4002e+04 total_spectral:1.0673e+01 L1_fnorm:2.4690e+00 L2_fnorm:2.4273e+00 L3_fnorm:2.3398e+00 L4_fnorm:2.3645e+00 L5_fnorm:2.3414e+00 L6_fnorm:2.4786e+00 L7_fnorm:2.5187e+00 L8_fnorm:2.4925e+00 L9_fnorm:2.4906e+00 L10_fnorm:2.4903e+00 L11_fnorm:2.5497e+00 L12_fnorm:2.5042e+00 L1_l1linf:2.5237e+00 L2_l1linf:2.6924e+00 L3_l1linf:2.6296e+00 L4_l1linf:2.6324e+00 L5_l1linf:2.3660e+00 L6_l1linf:2.5219e+00 L7_l1linf:2.7316e+00 L8_l1linf:2.7157e+00 L9_l1linf:3.0299e+00 L10_l1linf:2.7986e+00 L11_l1linf:2.8803e+00 L12_l1linf:3.0798e+00 L1_spectral:2.9176e-01 L2_spectral:2.5733e-01 L3_spectral:2.6044e-01 L4_spectral:3.3254e-01 L5_spectral:3.1008e-01 L6_spectral:2.8683e-01 L7_spectral:2.9295e-01 L8_spectral:3.0588e-01 L9_spectral:3.7464e-01 L10_spectral:3.8627e-01 L11_spectral:4.1241e-01 L12_spectral:5.0503e-01 ip_v_neg_g:2.7050e-02 cos_v_neg_g:5.5127e-03 v_norm:1.0673e+01 g_norm:4.5974e-01 hv_norm:3.4846e-01 cos_v_hv:1.1702e-02 hg_norm:3.1498e+00 cos_g_hg:5.2198e-01 v_par:5.3245e-03 v_perp:1.0673e+01 L1_cos_v_neg_g:1.5746e-02 L1_v_norm:2.4690e+00 L2_cos_v_neg_g:3.9063e-03 L2_v_norm:2.4273e+00 L3_cos_v_neg_g:5.5727e-03 L3_v_norm:2.3398e+00 L4_cos_v_neg_g:7.4147e-03 L4_v_norm:2.3645e+00 L5_cos_v_neg_g:6.5616e-03 L5_v_norm:2.3414e+00 L6_cos_v_neg_g:6.0957e-03 L6_v_norm:2.4786e+00 L7_cos_v_neg_g:7.6942e-03 L7_v_norm:2.5187e+00 L8_cos_v_neg_g:1.0646e-02 L8_v_norm:2.4925e+00 L9_cos_v_neg_g:1.3145e-02 L9_v_norm:2.4906e+00 L10_cos_v_neg_g:1.4270e-02 L10_v_norm:2.4903e+00 L11_cos_v_neg_g:1.6169e-02 L11_v_norm:2.5497e+00 L12_cos_v_neg_g:2.8165e-02 L12_v_norm:2.5042e+00 +step:3000 train loss:3.738516 +step:3001 train loss:3.791651 +step:3002 train loss:3.787308 +step:3003 train loss:3.786479 +step:3004 train loss:3.817005 +step:3005 train loss:3.708499 +step:3006 train loss:3.760194 +step:3007 train loss:3.795301 +step:3008 train loss:3.841702 +step:3009 train loss:3.793761 +step:3010 train loss:3.815641 +step:3011 train loss:3.802639 +step:3012 train loss:3.781828 +step:3013 train loss:3.823111 +step:3014 train loss:3.775582 +step:3015 train loss:3.773780 +step:3016 train loss:3.794150 +step:3017 train loss:3.813410 +step:3018 train loss:3.745946 +step:3019 train loss:3.782588 +step:3020 train loss:3.802128 +step:3021 train loss:3.766801 +step:3022 train loss:3.858259 +step:3023 train loss:3.806173 +step:3024 train loss:3.793684 +step:3025 train loss:3.801901 +step:3026 train loss:3.778203 +step:3027 train loss:3.756488 +step:3028 train loss:3.802717 +step:3029 train loss:3.790981 +step:3030 train loss:3.768473 +step:3031 train loss:3.750256 +step:3032 train loss:3.736973 +step:3033 train loss:3.768930 +step:3034 train loss:3.811589 +step:3035 train loss:3.790924 +step:3036 train loss:3.749929 +step:3037 train loss:3.709803 +step:3038 train loss:3.826491 +step:3039 train loss:3.708951 +step:3040 train loss:3.692491 +step:3041 train loss:3.821495 +step:3042 train loss:3.757036 +step:3043 train loss:3.816731 +step:3044 train loss:3.711556 +step:3045 train loss:3.759501 +step:3046 train loss:3.734485 +step:3047 train loss:3.761698 +step:3048 train loss:3.728895 +step:3049 train loss:3.809617 +step:3050 train loss:3.695733 +step:3051 train loss:3.714547 +step:3052 train loss:3.736031 +step:3053 train loss:3.804618 +step:3054 train loss:3.874958 +step:3055 train loss:3.717289 +step:3056 train loss:3.747703 +step:3057 train loss:3.783731 +step:3058 train loss:3.730422 +step:3059 train loss:3.757007 +step:3060 train loss:3.755045 +step:3061 train loss:3.739489 +step:3062 train loss:3.792588 +step:3063 train loss:3.777929 +step:3064 train loss:3.802129 +step:3065 train loss:3.822220 +step:3066 train loss:3.718288 +step:3067 train loss:3.768989 +step:3068 train loss:3.819222 +step:3069 train loss:3.834717 +step:3070 train loss:3.762600 +step:3071 train loss:3.778407 +step:3072 train loss:3.782010 +step:3073 train loss:3.814019 +step:3074 train loss:3.751581 +step:3075 train loss:3.784682 +step:3076 train loss:3.722305 +step:3077 train loss:3.715415 +step:3078 train loss:3.749417 +step:3079 train loss:3.796546 +step:3080 train loss:3.786627 +step:3081 train loss:3.835513 +step:3082 train loss:3.816756 +step:3083 train loss:3.740496 +step:3084 train loss:3.824336 +step:3085 train loss:3.752865 +step:3086 train loss:3.812120 +step:3087 train loss:3.777304 +step:3088 train loss:3.853930 +step:3089 train loss:3.733888 +step:3090 train loss:3.807221 +step:3091 train loss:3.730072 +step:3092 train loss:3.751651 +step:3093 train loss:3.775702 +step:3094 train loss:3.765912 +step:3095 train loss:3.846156 +step:3096 train loss:3.777088 +step:3097 train loss:3.787600 +step:3098 train loss:3.759900 +step:3099 train loss:3.771987 +step:3100 train loss:3.798486 +step:3101 train loss:3.878058 +step:3102 train loss:3.804193 +step:3103 train loss:3.732379 +step:3104 train loss:3.815613 +step:3105 train loss:3.789700 +step:3106 train loss:3.784104 +step:3107 train loss:3.764091 +step:3108 train loss:3.742932 +step:3109 train loss:3.796568 +step:3110 train loss:3.722760 +step:3111 train loss:3.758347 +step:3112 train loss:3.693963 +step:3113 train loss:3.814907 +step:3114 train loss:3.727347 +step:3115 train loss:3.770404 +step:3116 train loss:3.650814 +step:3117 train loss:3.667876 +step:3118 train loss:3.772475 +step:3119 train loss:3.778975 +step:3120 train loss:3.780224 +step:3121 train loss:3.723536 +step:3122 train loss:3.808575 +step:3123 train loss:3.721396 +step:3124 train loss:3.784132 +step:3125 train loss:3.795477 +step:3126 train loss:3.905009 +step:3127 train loss:3.753302 +step:3128 train loss:3.780350 +step:3129 train loss:3.765531 +step:3130 train loss:3.738494 +step:3131 train loss:3.815567 +step:3132 train loss:3.804524 +step:3133 train loss:3.774964 +step:3134 train loss:3.668849 +step:3135 train loss:3.759610 +step:3136 train loss:3.736403 +step:3137 train loss:3.866732 +step:3138 train loss:3.767386 +step:3139 train loss:3.747726 +step:3140 train loss:3.770444 +step:3141 train loss:3.769573 +step:3142 train loss:3.710003 +step:3143 train loss:3.792876 +step:3144 train loss:3.740273 +step:3145 train loss:3.723964 +step:3146 train loss:3.739185 +step:3147 train loss:3.847108 +step:3148 train loss:3.753348 +step:3149 train loss:3.809083 +step:3150 train loss:3.791164 +step:3151 train loss:3.762178 +step:3152 train loss:3.761757 +step:3153 train loss:3.717142 +step:3154 train loss:3.800838 +step:3155 train loss:3.744427 +step:3156 train loss:3.795763 +step:3157 train loss:3.799515 +step:3158 train loss:3.772010 +step:3159 train loss:3.708139 +step:3160 train loss:3.759914 +step:3161 train loss:3.725907 +step:3162 train loss:3.787368 +step:3163 train loss:3.769140 +step:3164 train loss:3.747087 +step:3165 train loss:3.762773 +step:3166 train loss:3.801280 +step:3167 train loss:3.761589 +step:3168 train loss:3.848152 +step:3169 train loss:3.757445 +step:3170 train loss:3.742478 +step:3171 train loss:3.730517 +step:3172 train loss:3.735677 +step:3173 train loss:3.677181 +step:3174 train loss:3.796049 +step:3175 train loss:3.763968 +step:3176 train loss:3.775283 +step:3177 train loss:3.740835 +step:3178 train loss:3.723022 +step:3179 train loss:3.796109 +step:3180 train loss:3.727735 +step:3181 train loss:3.803486 +step:3182 train loss:3.810360 +step:3183 train loss:3.752288 +step:3184 train loss:3.748874 +step:3185 train loss:3.809669 +step:3186 train loss:3.769188 +step:3187 train loss:3.786987 +step:3188 train loss:3.825832 +step:3189 train loss:3.771152 +step:3190 train loss:3.726000 +step:3191 train loss:3.730653 +step:3192 train loss:3.696779 +step:3193 train loss:3.774539 +step:3194 train loss:3.738635 +step:3195 train loss:3.722992 +step:3196 train loss:3.774462 +step:3197 train loss:3.736072 +step:3198 train loss:3.768902 +step:3199 train loss:3.753407 +step:3200 train loss:3.759036 +step:3201 train loss:3.723973 +step:3202 train loss:3.787491 +step:3203 train loss:3.847563 +step:3204 train loss:3.811256 +step:3205 train loss:3.656679 +step:3206 train loss:3.938827 +step:3207 train loss:3.697158 +step:3208 train loss:3.762718 +step:3209 train loss:3.751718 +step:3210 train loss:3.735632 +step:3211 train loss:3.760892 +step:3212 train loss:3.777050 +step:3213 train loss:3.713488 +step:3214 train loss:3.821033 +step:3215 train loss:3.823195 +step:3216 train loss:3.690606 +step:3217 train loss:3.771133 +step:3218 train loss:3.814756 +step:3219 train loss:3.731601 +step:3220 train loss:3.799772 +step:3221 train loss:3.716090 +step:3222 train loss:3.758017 +step:3223 train loss:3.773134 +step:3224 train loss:3.779640 +step:3225 train loss:3.708783 +step:3226 train loss:3.740625 +step:3227 train loss:3.772316 +step:3228 train loss:3.766880 +step:3229 train loss:3.798533 +step:3230 train loss:3.811041 +step:3231 train loss:3.748004 +step:3232 train loss:3.759099 +step:3233 train loss:3.729690 +step:3234 train loss:3.718298 +step:3235 train loss:3.724589 +step:3236 train loss:3.740513 +step:3237 train loss:3.741977 +step:3238 train loss:3.758101 +step:3239 train loss:3.665972 +step:3240 train loss:3.782743 +step:3241 train loss:3.772074 +step:3242 train loss:3.830817 +step:3243 train loss:3.770268 +step:3244 train loss:3.787965 +step:3245 train loss:3.695044 +step:3246 train loss:3.819931 +step:3247 train loss:3.760315 +step:3248 train loss:3.780267 +step:3249 train loss:3.726124 +step:3250 validation loss:3.692203 +step:3250 train loss:3.725955 +step:3251 train loss:3.834935 +step:3252 train loss:3.767071 +step:3253 train loss:3.763051 +step:3254 train loss:3.832054 +step:3255 train loss:3.774131 +step:3256 train loss:3.769638 +step:3257 train loss:3.750819 +step:3258 train loss:3.682478 +step:3259 train loss:3.662507 +step:3260 train loss:3.775198 +step:3261 train loss:3.760265 +step:3262 train loss:3.747367 +step:3263 train loss:3.732533 +step:3264 train loss:3.843191 +step:3265 train loss:3.754403 +step:3266 train loss:3.781001 +step:3267 train loss:3.744819 +step:3268 train loss:3.744912 +step:3269 train loss:3.757169 +step:3270 train loss:3.787840 +step:3271 train loss:3.751581 +step:3272 train loss:3.727014 +step:3273 train loss:3.737159 +step:3274 train loss:3.872466 +step:3275 train loss:3.746286 +step:3276 train loss:3.814625 +step:3277 train loss:3.750263 +step:3278 train loss:3.728919 +step:3279 train loss:3.754346 +step:3280 train loss:3.777194 +step:3281 train loss:3.710586 +step:3282 train loss:3.777276 +step:3283 train loss:3.750244 +step:3284 train loss:3.711995 +step:3285 train loss:3.729772 +step:3286 train loss:3.760065 +step:3287 train loss:3.696374 +step:3288 train loss:3.777626 +step:3289 train loss:3.719339 +step:3290 train loss:3.753018 +step:3291 train loss:3.712815 +step:3292 train loss:3.736139 +step:3293 train loss:3.777652 +step:3294 train loss:3.793737 +step:3295 train loss:3.708664 +step:3296 train loss:3.761788 +step:3297 train loss:3.720062 +step:3298 train loss:3.722164 +step:3299 train loss:3.848660 +step:3300 train loss:3.689989 +step:3301 train loss:3.770435 +step:3302 train loss:3.742335 +step:3303 train loss:3.775703 +step:3304 train loss:3.737902 +step:3305 train loss:3.838170 +step:3306 train loss:3.761393 +step:3307 train loss:3.780375 +step:3308 train loss:3.734474 +step:3309 train loss:3.790191 +step:3310 train loss:3.706487 +step:3311 train loss:3.759817 +step:3312 train loss:3.725903 +step:3313 train loss:3.760712 +step:3314 train loss:3.755821 +step:3315 train loss:3.837555 +step:3316 train loss:3.688628 +step:3317 train loss:3.779639 +step:3318 train loss:3.792289 +step:3319 train loss:3.716664 +step:3320 train loss:3.872320 +step:3321 train loss:3.781171 +step:3322 train loss:3.777973 +step:3323 train loss:3.879514 +step:3324 train loss:3.797547 +step:3325 train loss:3.771975 +step:3326 train loss:3.762932 +step:3327 train loss:3.777467 +step:3328 train loss:3.753849 +step:3329 train loss:3.754293 +step:3330 train loss:3.743575 +step:3331 train loss:3.789090 +step:3332 train loss:3.809170 +step:3333 train loss:3.777296 +step:3334 train loss:3.709238 +step:3335 train loss:3.721963 +step:3336 train loss:3.756080 +step:3337 train loss:3.752251 +step:3338 train loss:3.745408 +step:3339 train loss:3.735906 +step:3340 train loss:3.774734 +step:3341 train loss:3.721583 +step:3342 train loss:3.771063 +step:3343 train loss:3.710019 +step:3344 train loss:3.766150 +step:3345 train loss:3.714821 +step:3346 train loss:3.731663 +step:3347 train loss:3.738357 +step:3348 train loss:3.764911 +step:3349 train loss:3.750753 +step:3350 train loss:3.777199 +step:3351 train loss:3.834887 +step:3352 train loss:3.772754 +step:3353 train loss:3.868708 +step:3354 train loss:3.713035 +step:3355 train loss:3.817429 +step:3356 train loss:3.767232 +step:3357 train loss:3.775571 +step:3358 train loss:3.718130 +step:3359 train loss:3.748181 +step:3360 train loss:3.737375 +step:3361 train loss:3.739944 +step:3362 train loss:3.732038 +step:3363 train loss:3.732877 +step:3364 train loss:3.712683 +step:3365 train loss:3.750580 +step:3366 train loss:3.777297 +step:3367 train loss:3.732294 +step:3368 train loss:3.827897 +step:3369 train loss:3.742688 +step:3370 train loss:3.831095 +step:3371 train loss:3.795152 +step:3372 train loss:3.766522 +step:3373 train loss:3.770696 +step:3374 train loss:3.822676 +step:3375 train loss:3.753788 +step:3376 train loss:3.761621 +step:3377 train loss:3.742894 +step:3378 train loss:3.720608 +step:3379 train loss:3.800134 +step:3380 train loss:3.778357 +step:3381 train loss:3.765085 +step:3382 train loss:3.779628 +step:3383 train loss:3.787874 +step:3384 train loss:3.719696 +step:3385 train loss:3.765567 +step:3386 train loss:3.744714 +step:3387 train loss:3.819852 +step:3388 train loss:3.722844 +step:3389 train loss:3.915742 +step:3390 train loss:3.684848 +step:3391 train loss:3.772019 +step:3392 train loss:3.748993 +step:3393 train loss:3.774741 +step:3394 train loss:3.729293 +step:3395 train loss:3.806418 +step:3396 train loss:3.712742 +step:3397 train loss:3.792215 +step:3398 train loss:3.749692 +step:3399 train loss:3.766580 +step:3400 train loss:3.715219 +step:3401 train loss:3.749147 +step:3402 train loss:3.904111 +step:3403 train loss:3.796493 +step:3404 train loss:3.915436 +step:3405 train loss:3.766784 +step:3406 train loss:3.744985 +step:3407 train loss:3.748655 +step:3408 train loss:3.723766 +step:3409 train loss:3.691118 +step:3410 train loss:3.719932 +step:3411 train loss:3.790391 +step:3412 train loss:3.710460 +step:3413 train loss:3.705210 +step:3414 train loss:3.742317 +step:3415 train loss:3.716045 +step:3416 train loss:3.719886 +step:3417 train loss:3.801093 +step:3418 train loss:3.798924 +step:3419 train loss:3.756985 +step:3420 train loss:3.732865 +step:3421 train loss:3.764328 +step:3422 train loss:3.781519 +step:3423 train loss:3.798508 +step:3424 train loss:3.681288 +step:3425 train loss:3.702693 +step:3426 train loss:3.699533 +step:3427 train loss:3.761437 +step:3428 train loss:3.685637 +step:3429 train loss:3.748641 +step:3430 train loss:3.717277 +step:3431 train loss:3.771048 +step:3432 train loss:3.753812 +step:3433 train loss:3.716076 +step:3434 train loss:3.805560 +step:3435 train loss:3.742631 +step:3436 train loss:3.833044 +step:3437 train loss:3.659352 +step:3438 train loss:3.767543 +step:3439 train loss:3.742985 +step:3440 train loss:3.835178 +step:3441 train loss:3.731066 +step:3442 train loss:3.798013 +step:3443 train loss:3.730477 +step:3444 train loss:3.752280 +step:3445 train loss:3.796054 +step:3446 train loss:3.700722 +step:3447 train loss:3.775055 +step:3448 train loss:3.728994 +step:3449 train loss:3.759938 +step:3450 train loss:3.671269 +step:3451 train loss:3.787920 +step:3452 train loss:3.735013 +step:3453 train loss:3.793861 +step:3454 train loss:3.816965 +step:3455 train loss:3.880904 +step:3456 train loss:3.824302 +step:3457 train loss:3.820439 +step:3458 train loss:3.739877 +step:3459 train loss:3.756576 +step:3460 train loss:3.695225 +step:3461 train loss:3.758349 +step:3462 train loss:3.758820 +step:3463 train loss:3.726588 +step:3464 train loss:3.781153 +step:3465 train loss:3.714822 +step:3466 train loss:3.781991 +step:3467 train loss:3.750350 +step:3468 train loss:3.758430 +step:3469 train loss:3.771144 +step:3470 train loss:3.744667 +step:3471 train loss:3.787306 +step:3472 train loss:3.670198 +step:3473 train loss:3.792117 +step:3474 train loss:3.688564 +step:3475 train loss:3.769016 +step:3476 train loss:3.737204 +step:3477 train loss:3.758657 +step:3478 train loss:3.732458 +step:3479 train loss:3.760283 +step:3480 train loss:3.779613 +step:3481 train loss:3.757794 +step:3482 train loss:3.742251 +step:3483 train loss:3.882982 +step:3484 train loss:3.724628 +step:3485 train loss:3.711308 +step:3486 train loss:3.765882 +step:3487 train loss:3.803117 +step:3488 train loss:3.708375 +step:3489 train loss:3.763479 +step:3490 train loss:3.730905 +step:3491 train loss:3.766823 +step:3492 train loss:3.801365 +step:3493 train loss:3.776374 +step:3494 train loss:3.768808 +step:3495 train loss:3.741948 +step:3496 train loss:3.712034 +step:3497 train loss:3.821350 +step:3498 train loss:3.769529 +step:3499 train loss:3.701712 +step:3500 validation loss:3.665079 total_sharp:2.7102e-04 L1_sharp:9.2135e-05 L2_sharp:1.3381e-05 L3_sharp:2.4505e-05 L4_sharp:3.8046e-05 L5_sharp:3.9986e-05 L6_sharp:2.4139e-05 L7_sharp:3.0258e-05 L8_sharp:6.1079e-05 L9_sharp:9.5064e-05 L10_sharp:1.2117e-04 L11_sharp:1.0714e-04 L12_sharp:3.7824e-04 total_fnorm:1.0564e+01 total_l1_linf:9.2933e+04 total_spectral:1.0564e+01 L1_fnorm:2.4521e+00 L2_fnorm:2.4079e+00 L3_fnorm:2.3362e+00 L4_fnorm:2.3407e+00 L5_fnorm:2.3106e+00 L6_fnorm:2.4494e+00 L7_fnorm:2.4876e+00 L8_fnorm:2.4369e+00 L9_fnorm:2.4391e+00 L10_fnorm:2.4283e+00 L11_fnorm:2.4653e+00 L12_fnorm:2.3714e+00 L1_l1linf:2.5435e+00 L2_l1linf:2.5309e+00 L3_l1linf:2.4991e+00 L4_l1linf:2.5501e+00 L5_l1linf:2.3891e+00 L6_l1linf:2.4856e+00 L7_l1linf:2.6363e+00 L8_l1linf:2.6553e+00 L9_l1linf:2.8922e+00 L10_l1linf:2.8841e+00 L11_l1linf:2.6706e+00 L12_l1linf:2.8225e+00 L1_spectral:2.7115e-01 L2_spectral:2.4621e-01 L3_spectral:2.5643e-01 L4_spectral:3.2048e-01 L5_spectral:2.9505e-01 L6_spectral:2.6346e-01 L7_spectral:2.7213e-01 L8_spectral:3.0070e-01 L9_spectral:3.3670e-01 L10_spectral:3.7114e-01 L11_spectral:3.6773e-01 L12_spectral:4.2109e-01 ip_v_neg_g:1.5359e-02 cos_v_neg_g:2.8483e-03 v_norm:1.0564e+01 g_norm:5.1046e-01 hv_norm:2.8377e-01 cos_v_hv:1.0089e-02 hg_norm:3.9001e+00 cos_g_hg:5.4018e-01 v_par:2.4621e-03 v_perp:1.0564e+01 L1_cos_v_neg_g:7.7809e-03 L1_v_norm:2.4521e+00 L2_cos_v_neg_g:2.5830e-03 L2_v_norm:2.4079e+00 L3_cos_v_neg_g:2.6058e-03 L3_v_norm:2.3362e+00 L4_cos_v_neg_g:4.4842e-03 L4_v_norm:2.3407e+00 L5_cos_v_neg_g:3.7350e-03 L5_v_norm:2.3106e+00 L6_cos_v_neg_g:3.6194e-03 L6_v_norm:2.4494e+00 L7_cos_v_neg_g:4.0571e-03 L7_v_norm:2.4876e+00 L8_cos_v_neg_g:5.7778e-03 L8_v_norm:2.4369e+00 L9_cos_v_neg_g:6.7109e-03 L9_v_norm:2.4391e+00 L10_cos_v_neg_g:7.5482e-03 L10_v_norm:2.4283e+00 L11_cos_v_neg_g:9.3699e-03 L11_v_norm:2.4653e+00 L12_cos_v_neg_g:1.5105e-02 L12_v_norm:2.3714e+00 +step:3500 train loss:3.721437 +step:3501 train loss:3.849520 +step:3502 train loss:3.826883 +step:3503 train loss:3.782982 +step:3504 train loss:3.731799 +step:3505 train loss:3.743121 +step:3506 train loss:3.643419 +step:3507 train loss:3.763343 +step:3508 train loss:3.708301 +step:3509 train loss:3.774643 +step:3510 train loss:3.709063 +step:3511 train loss:3.745291 +step:3512 train loss:3.885913 +step:3513 train loss:3.707262 +step:3514 train loss:3.719218 +step:3515 train loss:3.972275 +step:3516 train loss:3.765450 +step:3517 train loss:3.728472 +step:3518 train loss:3.730047 +step:3519 train loss:3.721781 +step:3520 train loss:3.750391 +step:3521 train loss:3.741713 +step:3522 train loss:3.650520 +step:3523 train loss:3.754606 +step:3524 train loss:3.739748 +step:3525 train loss:3.728267 +step:3526 train loss:3.750274 +step:3527 train loss:3.699717 +step:3528 train loss:3.754782 +step:3529 train loss:3.730586 +step:3530 train loss:3.722732 +step:3531 train loss:3.718255 +step:3532 train loss:3.900856 +step:3533 train loss:3.723109 +step:3534 train loss:3.739024 +step:3535 train loss:3.714730 +step:3536 train loss:3.711626 +step:3537 train loss:3.724884 +step:3538 train loss:3.754538 +step:3539 train loss:3.702838 +step:3540 train loss:3.769681 +step:3541 train loss:3.736649 +step:3542 train loss:3.748801 +step:3543 train loss:3.666483 +step:3544 train loss:3.688732 +step:3545 train loss:3.692744 +step:3546 train loss:3.761213 +step:3547 train loss:3.764883 +step:3548 train loss:3.741517 +step:3549 train loss:3.733877 +step:3550 train loss:3.724331 +step:3551 train loss:3.754137 +step:3552 train loss:3.648745 +step:3553 train loss:3.771724 +step:3554 train loss:3.765715 +step:3555 train loss:3.751604 +step:3556 train loss:3.778886 +step:3557 train loss:3.758951 +step:3558 train loss:3.736112 +step:3559 train loss:3.683029 +step:3560 train loss:3.772591 +step:3561 train loss:3.770241 +step:3562 train loss:3.944129 +step:3563 train loss:3.812028 +step:3564 train loss:3.766368 +step:3565 train loss:3.774245 +step:3566 train loss:3.744565 +step:3567 train loss:3.681988 +step:3568 train loss:3.710338 +step:3569 train loss:3.788813 +step:3570 train loss:3.812412 +step:3571 train loss:3.788976 +step:3572 train loss:3.779979 +step:3573 train loss:3.737710 +step:3574 train loss:3.733656 +step:3575 train loss:3.725554 +step:3576 train loss:3.706279 +step:3577 train loss:3.715178 +step:3578 train loss:3.799416 +step:3579 train loss:3.711150 +step:3580 train loss:3.790087 +step:3581 train loss:3.730204 +step:3582 train loss:3.786129 +step:3583 train loss:3.722935 +step:3584 train loss:3.696406 +step:3585 train loss:3.747313 +step:3586 train loss:3.695784 +step:3587 train loss:3.791617 +step:3588 train loss:3.912807 +step:3589 train loss:3.752324 +step:3590 train loss:3.739549 +step:3591 train loss:3.750813 +step:3592 train loss:3.707854 +step:3593 train loss:3.678507 +step:3594 train loss:3.731728 +step:3595 train loss:3.708068 +step:3596 train loss:3.788866 +step:3597 train loss:3.761080 +step:3598 train loss:3.714423 +step:3599 train loss:3.762629 +step:3600 train loss:3.702442 +step:3601 train loss:3.719105 +step:3602 train loss:3.709518 +step:3603 train loss:3.725225 +step:3604 train loss:3.749045 +step:3605 train loss:3.852310 +step:3606 train loss:3.757092 +step:3607 train loss:3.740063 +step:3608 train loss:3.756390 +step:3609 train loss:3.739089 +step:3610 train loss:3.711990 +step:3611 train loss:3.709997 +step:3612 train loss:3.777594 +step:3613 train loss:3.749535 +step:3614 train loss:3.687676 +step:3615 train loss:3.731835 +step:3616 train loss:3.703639 +step:3617 train loss:3.778281 +step:3618 train loss:3.741438 +step:3619 train loss:3.724498 +step:3620 train loss:3.740624 +step:3621 train loss:3.699837 +step:3622 train loss:3.806405 +step:3623 train loss:3.792312 +step:3624 train loss:3.759166 +step:3625 train loss:3.739645 +step:3626 train loss:3.746676 +step:3627 train loss:3.740073 +step:3628 train loss:3.725532 +step:3629 train loss:3.729550 +step:3630 train loss:3.815831 +step:3631 train loss:3.734516 +step:3632 train loss:3.768745 +step:3633 train loss:3.726827 +step:3634 train loss:3.728108 +step:3635 train loss:3.716743 +step:3636 train loss:3.788031 +step:3637 train loss:3.864625 +step:3638 train loss:3.778404 +step:3639 train loss:3.768853 +step:3640 train loss:3.773912 +step:3641 train loss:3.810385 +step:3642 train loss:3.704521 +step:3643 train loss:3.878197 +step:3644 train loss:3.767373 +step:3645 train loss:3.739311 +step:3646 train loss:3.860091 +step:3647 train loss:3.753952 +step:3648 train loss:3.743520 +step:3649 train loss:3.696849 +step:3650 train loss:3.735115 +step:3651 train loss:3.731106 +step:3652 train loss:3.718066 +step:3653 train loss:3.651093 +step:3654 train loss:3.714533 +step:3655 train loss:3.711943 +step:3656 train loss:3.741403 +step:3657 train loss:3.756824 +step:3658 train loss:3.751909 +step:3659 train loss:3.736858 +step:3660 train loss:3.709752 +step:3661 train loss:3.732258 +step:3662 train loss:3.706698 +step:3663 train loss:3.743299 +step:3664 train loss:3.699589 +step:3665 train loss:3.746605 +step:3666 train loss:3.778059 +step:3667 train loss:3.867963 +step:3668 train loss:3.753511 +step:3669 train loss:3.706561 +step:3670 train loss:3.757093 +step:3671 train loss:3.711772 +step:3672 train loss:3.750247 +step:3673 train loss:3.736955 +step:3674 train loss:3.748953 +step:3675 train loss:3.762106 +step:3676 train loss:3.727408 +step:3677 train loss:3.686678 +step:3678 train loss:3.746662 +step:3679 train loss:3.646742 +step:3680 train loss:3.749716 +step:3681 train loss:3.782704 +step:3682 train loss:3.760858 +step:3683 train loss:3.709135 +step:3684 train loss:3.708098 +step:3685 train loss:3.737062 +step:3686 train loss:3.763821 +step:3687 train loss:3.717938 +step:3688 train loss:3.692586 +step:3689 train loss:3.730557 +step:3690 train loss:3.717174 +step:3691 train loss:3.696155 +step:3692 train loss:3.756486 +step:3693 train loss:3.889931 +step:3694 train loss:3.707552 +step:3695 train loss:3.766014 +step:3696 train loss:3.729116 +step:3697 train loss:3.722206 +step:3698 train loss:3.661243 +step:3699 train loss:3.684397 +step:3700 train loss:3.715219 +step:3701 train loss:3.733614 +step:3702 train loss:3.753097 +step:3703 train loss:3.713768 +step:3704 train loss:3.761449 +step:3705 train loss:3.736276 +step:3706 train loss:3.691996 +step:3707 train loss:3.744044 +step:3708 train loss:3.718672 +step:3709 train loss:3.640335 +step:3710 train loss:3.764403 +step:3711 train loss:3.713741 +step:3712 train loss:3.753951 +step:3713 train loss:3.703561 +step:3714 train loss:3.719449 +step:3715 train loss:3.842778 +step:3716 train loss:3.745820 +step:3717 train loss:3.717991 +step:3718 train loss:3.725154 +step:3719 train loss:3.724761 +step:3720 train loss:3.732813 +step:3721 train loss:3.790436 +step:3722 train loss:3.802925 +step:3723 train loss:3.687051 +step:3724 train loss:3.742792 +step:3725 train loss:3.722283 +step:3726 train loss:3.744389 +step:3727 train loss:3.814663 +step:3728 train loss:3.781563 +step:3729 train loss:3.685919 +step:3730 train loss:3.701074 +step:3731 train loss:3.721850 +step:3732 train loss:3.875587 +step:3733 train loss:3.735540 +step:3734 train loss:3.737442 +step:3735 train loss:3.674620 +step:3736 train loss:3.729756 +step:3737 train loss:3.781754 +step:3738 train loss:3.802455 +step:3739 train loss:3.721157 +step:3740 train loss:3.622313 +step:3741 train loss:3.832161 +step:3742 train loss:3.742186 +step:3743 train loss:3.714184 +step:3744 train loss:3.718218 +step:3745 train loss:3.734642 +step:3746 train loss:3.707858 +step:3747 train loss:3.716076 +step:3748 train loss:3.758676 +step:3749 train loss:3.742713 +step:3750 validation loss:3.652423 +step:3750 train loss:3.755863 +step:3751 train loss:3.841008 +step:3752 train loss:3.778537 +step:3753 train loss:3.701961 +step:3754 train loss:3.747509 +step:3755 train loss:3.920126 +step:3756 train loss:3.705874 +step:3757 train loss:3.697497 +step:3758 train loss:3.730959 +step:3759 train loss:3.675700 +step:3760 train loss:3.669963 +step:3761 train loss:3.723790 +step:3762 train loss:3.715781 +step:3763 train loss:3.716312 +step:3764 train loss:3.710733 +step:3765 train loss:3.706467 +step:3766 train loss:3.678531 +step:3767 train loss:3.759978 +step:3768 train loss:3.702949 +step:3769 train loss:3.965544 +step:3770 train loss:3.757766 +step:3771 train loss:3.765618 +step:3772 train loss:3.723879 +step:3773 train loss:3.717160 +step:3774 train loss:3.720597 +step:3775 train loss:3.718563 +step:3776 train loss:3.716716 +step:3777 train loss:3.676368 +step:3778 train loss:3.694305 +step:3779 train loss:3.679049 +step:3780 train loss:3.757942 +step:3781 train loss:3.724638 +step:3782 train loss:3.645176 +step:3783 train loss:3.751151 +step:3784 train loss:3.759602 +step:3785 train loss:3.667847 +step:3786 train loss:3.780410 +step:3787 train loss:3.687209 +step:3788 train loss:3.702538 +step:3789 train loss:3.614667 +step:3790 train loss:3.729697 +step:3791 train loss:3.749579 +step:3792 train loss:3.721567 +step:3793 train loss:3.720252 +step:3794 train loss:3.743574 +step:3795 train loss:3.714798 +step:3796 train loss:3.732659 +step:3797 train loss:3.706902 +step:3798 train loss:3.715667 +step:3799 train loss:3.725119 +step:3800 train loss:3.634451 +step:3801 train loss:3.748725 +step:3802 train loss:3.676389 +step:3803 train loss:3.757879 +step:3804 train loss:3.778732 +step:3805 train loss:3.731046 +step:3806 train loss:3.745539 +step:3807 train loss:3.767449 +step:3808 train loss:3.722955 +step:3809 train loss:3.734577 +step:3810 train loss:3.736116 +step:3811 train loss:3.723190 +step:3812 train loss:3.725226 +step:3813 train loss:3.677847 +step:3814 train loss:3.720987 +step:3815 train loss:3.722582 +step:3816 train loss:3.742048 +step:3817 train loss:3.760786 +step:3818 train loss:3.733617 +step:3819 train loss:3.744150 +step:3820 train loss:3.741495 +step:3821 train loss:3.699934 +step:3822 train loss:3.780776 +step:3823 train loss:3.677056 +step:3824 train loss:3.691112 +step:3825 train loss:3.700498 +step:3826 train loss:3.767738 +step:3827 train loss:3.791108 +step:3828 train loss:3.685769 +step:3829 train loss:3.702629 +step:3830 train loss:3.761554 +step:3831 train loss:3.696071 +step:3832 train loss:3.755386 +step:3833 train loss:3.697066 +step:3834 train loss:3.661754 +step:3835 train loss:3.703379 +step:3836 train loss:3.677932 +step:3837 train loss:3.747249 +step:3838 train loss:3.700869 +step:3839 train loss:3.739521 +step:3840 train loss:3.758079 +step:3841 train loss:3.706723 +step:3842 train loss:3.739490 +step:3843 train loss:3.757284 +step:3844 train loss:3.721653 +step:3845 train loss:3.743652 +step:3846 train loss:3.785981 +step:3847 train loss:3.680058 +step:3848 train loss:3.690732 +step:3849 train loss:3.704692 +step:3850 train loss:3.727741 +step:3851 train loss:3.861341 +step:3852 train loss:3.842066 +step:3853 train loss:3.743936 +step:3854 train loss:3.706571 +step:3855 train loss:3.756901 +step:3856 train loss:3.676995 +step:3857 train loss:3.738201 +step:3858 train loss:3.649404 +step:3859 train loss:3.700199 +step:3860 train loss:3.769850 +step:3861 train loss:3.740990 +step:3862 train loss:3.676676 +step:3863 train loss:3.727600 +step:3864 train loss:3.698066 +step:3865 train loss:3.732017 +step:3866 train loss:3.753886 +step:3867 train loss:3.750701 +step:3868 train loss:3.700900 +step:3869 train loss:3.699413 +step:3870 train loss:3.674173 +step:3871 train loss:3.678853 +step:3872 train loss:3.810733 +step:3873 train loss:3.731259 +step:3874 train loss:3.745646 +step:3875 train loss:3.855259 +step:3876 train loss:3.728629 +step:3877 train loss:3.757702 +step:3878 train loss:3.781276 +step:3879 train loss:3.769387 +step:3880 train loss:3.851139 +step:3881 train loss:3.672592 +step:3882 train loss:3.703822 +step:3883 train loss:3.715481 +step:3884 train loss:3.708990 +step:3885 train loss:3.721593 +step:3886 train loss:3.781661 +step:3887 train loss:3.761641 +step:3888 train loss:3.722980 +step:3889 train loss:3.695577 +step:3890 train loss:3.730110 +step:3891 train loss:3.747238 +step:3892 train loss:3.653755 +step:3893 train loss:3.761124 +step:3894 train loss:3.709571 +step:3895 train loss:3.728261 +step:3896 train loss:3.720315 +step:3897 train loss:3.687017 +step:3898 train loss:3.746821 +step:3899 train loss:3.787015 +step:3900 train loss:3.739777 +step:3901 train loss:3.757392 +step:3902 train loss:3.681182 +step:3903 train loss:3.696670 +step:3904 train loss:3.729752 +step:3905 train loss:3.666351 +step:3906 train loss:3.700269 +step:3907 train loss:3.736526 +step:3908 train loss:3.812097 +step:3909 train loss:3.703546 +step:3910 train loss:3.732013 +step:3911 train loss:3.745737 +step:3912 train loss:3.694034 +step:3913 train loss:3.710301 +step:3914 train loss:3.729540 +step:3915 train loss:3.698293 +step:3916 train loss:3.734087 +step:3917 train loss:3.780040 +step:3918 train loss:3.756265 +step:3919 train loss:3.730690 +step:3920 train loss:3.707551 +step:3921 train loss:3.749467 +step:3922 train loss:3.750608 +step:3923 train loss:3.742043 +step:3924 train loss:3.679555 +step:3925 train loss:3.875923 +step:3926 train loss:3.725632 +step:3927 train loss:3.707284 +step:3928 train loss:3.784385 +step:3929 train loss:3.842168 +step:3930 train loss:3.745830 +step:3931 train loss:3.684381 +step:3932 train loss:3.734477 +step:3933 train loss:3.751135 +step:3934 train loss:3.702482 +step:3935 train loss:3.676063 +step:3936 train loss:3.768787 +step:3937 train loss:3.725799 +step:3938 train loss:3.736133 +step:3939 train loss:3.761612 +step:3940 train loss:3.713698 +step:3941 train loss:3.798993 +step:3942 train loss:3.754951 +step:3943 train loss:3.739068 +step:3944 train loss:3.788883 +step:3945 train loss:3.700183 +step:3946 train loss:3.644966 +step:3947 train loss:3.771820 +step:3948 train loss:3.742030 +step:3949 train loss:3.904605 +step:3950 train loss:3.711775 +step:3951 train loss:3.632122 +step:3952 train loss:3.596906 +step:3953 train loss:3.671652 +step:3954 train loss:3.721407 +step:3955 train loss:3.751133 +step:3956 train loss:3.708746 +step:3957 train loss:3.758492 +step:3958 train loss:3.739200 +step:3959 train loss:3.773358 +step:3960 train loss:3.698549 +step:3961 train loss:3.723085 +step:3962 train loss:3.729109 +step:3963 train loss:3.704312 +step:3964 train loss:3.685347 +step:3965 train loss:3.737679 +step:3966 train loss:3.695435 +step:3967 train loss:3.739865 +step:3968 train loss:3.759979 +step:3969 train loss:3.666717 +step:3970 train loss:3.782559 +step:3971 train loss:3.697491 +step:3972 train loss:3.729876 +step:3973 train loss:3.686536 +step:3974 train loss:3.779196 +step:3975 train loss:3.734461 +step:3976 train loss:3.685565 +step:3977 train loss:3.743174 +step:3978 train loss:3.711242 +step:3979 train loss:3.695577 +step:3980 train loss:3.764791 +step:3981 train loss:3.696777 +step:3982 train loss:3.723235 +step:3983 train loss:3.703501 +step:3984 train loss:3.736903 +step:3985 train loss:3.713640 +step:3986 train loss:3.726703 +step:3987 train loss:3.734040 +step:3988 train loss:3.669729 +step:3989 train loss:3.742324 +step:3990 train loss:3.736283 +step:3991 train loss:3.750101 +step:3992 train loss:3.708789 +step:3993 train loss:3.740982 +step:3994 train loss:3.690402 +step:3995 train loss:3.744512 +step:3996 train loss:3.659963 +step:3997 train loss:3.737539 +step:3998 train loss:3.623427 +step:3999 train loss:3.777334 +step:4000 validation loss:3.636326 total_sharp:2.8613e-04 L1_sharp:8.6085e-05 L2_sharp:1.0088e-05 L3_sharp:1.9532e-05 L4_sharp:3.9263e-05 L5_sharp:3.9830e-05 L6_sharp:2.1246e-05 L7_sharp:3.8776e-05 L8_sharp:7.0403e-05 L9_sharp:1.0701e-04 L10_sharp:1.5040e-04 L11_sharp:1.1231e-04 L12_sharp:3.3167e-04 total_fnorm:1.0695e+01 total_l1_linf:9.4311e+04 total_spectral:1.0695e+01 L1_fnorm:2.4562e+00 L2_fnorm:2.4586e+00 L3_fnorm:2.3979e+00 L4_fnorm:2.3997e+00 L5_fnorm:2.3670e+00 L6_fnorm:2.4961e+00 L7_fnorm:2.5310e+00 L8_fnorm:2.4888e+00 L9_fnorm:2.4790e+00 L10_fnorm:2.4854e+00 L11_fnorm:2.5089e+00 L12_fnorm:2.4220e+00 L1_l1linf:2.5332e+00 L2_l1linf:2.5560e+00 L3_l1linf:2.5838e+00 L4_l1linf:2.5981e+00 L5_l1linf:2.4204e+00 L6_l1linf:2.4720e+00 L7_l1linf:2.7052e+00 L8_l1linf:2.7744e+00 L9_l1linf:2.8992e+00 L10_l1linf:3.2423e+00 L11_l1linf:2.9685e+00 L12_l1linf:2.9874e+00 L1_spectral:2.7314e-01 L2_spectral:2.5504e-01 L3_spectral:2.6358e-01 L4_spectral:3.3302e-01 L5_spectral:3.0297e-01 L6_spectral:2.7022e-01 L7_spectral:2.8736e-01 L8_spectral:3.2865e-01 L9_spectral:3.6862e-01 L10_spectral:4.1189e-01 L11_spectral:4.0061e-01 L12_spectral:4.2704e-01 ip_v_neg_g:1.6636e-02 cos_v_neg_g:3.9113e-03 v_norm:1.0695e+01 g_norm:3.9768e-01 hv_norm:2.1617e-01 cos_v_hv:1.4157e-02 hg_norm:2.4571e+00 cos_g_hg:4.4233e-01 v_par:4.2549e-03 v_perp:1.0695e+01 L1_cos_v_neg_g:1.0271e-02 L1_v_norm:2.4562e+00 L2_cos_v_neg_g:2.4849e-03 L2_v_norm:2.4586e+00 L3_cos_v_neg_g:3.7126e-03 L3_v_norm:2.3979e+00 L4_cos_v_neg_g:5.3207e-03 L4_v_norm:2.3997e+00 L5_cos_v_neg_g:5.1652e-03 L5_v_norm:2.3670e+00 L6_cos_v_neg_g:4.0090e-03 L6_v_norm:2.4961e+00 L7_cos_v_neg_g:5.9535e-03 L7_v_norm:2.5310e+00 L8_cos_v_neg_g:7.4147e-03 L8_v_norm:2.4888e+00 L9_cos_v_neg_g:8.5920e-03 L9_v_norm:2.4790e+00 L10_cos_v_neg_g:1.0110e-02 L10_v_norm:2.4854e+00 L11_cos_v_neg_g:1.2316e-02 L11_v_norm:2.5089e+00 L12_cos_v_neg_g:1.5391e-02 L12_v_norm:2.4220e+00 +step:4000 train loss:3.655919 +step:4001 train loss:3.733519 +step:4002 train loss:3.711759 +step:4003 train loss:3.746214 +step:4004 train loss:3.653293 +step:4005 train loss:3.748859 +step:4006 train loss:3.757139 +step:4007 train loss:3.680431 +step:4008 train loss:3.638076 +step:4009 train loss:3.719453 +step:4010 train loss:3.696559 +step:4011 train loss:3.704252 +step:4012 train loss:3.718592 +step:4013 train loss:3.692291 +step:4014 train loss:3.706653 +step:4015 train loss:3.696293 +step:4016 train loss:3.706640 +step:4017 train loss:3.667549 +step:4018 train loss:3.612145 +step:4019 train loss:3.666729 +step:4020 train loss:3.734471 +step:4021 train loss:3.680624 +step:4022 train loss:3.685948 +step:4023 train loss:3.696846 +step:4024 train loss:3.611215 +step:4025 train loss:3.734545 +step:4026 train loss:3.720193 +step:4027 train loss:3.728943 +step:4028 train loss:3.745607 +step:4029 train loss:3.777529 +step:4030 train loss:3.690893 +step:4031 train loss:3.734209 +step:4032 train loss:3.692569 +step:4033 train loss:3.727385 +step:4034 train loss:3.740160 +step:4035 train loss:3.716126 +step:4036 train loss:3.715253 +step:4037 train loss:3.732746 +step:4038 train loss:3.652000 +step:4039 train loss:3.707649 +step:4040 train loss:3.685744 +step:4041 train loss:3.680234 +step:4042 train loss:3.700144 +step:4043 train loss:3.684648 +step:4044 train loss:3.719140 +step:4045 train loss:3.724811 +step:4046 train loss:3.680488 +step:4047 train loss:3.707391 +step:4048 train loss:3.718128 +step:4049 train loss:3.680715 +step:4050 train loss:3.782937 +step:4051 train loss:3.696185 +step:4052 train loss:3.717288 +step:4053 train loss:3.767345 +step:4054 train loss:3.737340 +step:4055 train loss:3.754131 +step:4056 train loss:3.749567 +step:4057 train loss:3.691073 +step:4058 train loss:3.673321 +step:4059 train loss:3.751840 +step:4060 train loss:3.694585 +step:4061 train loss:3.665563 +step:4062 train loss:3.779817 +step:4063 train loss:3.727692 +step:4064 train loss:3.695366 +step:4065 train loss:3.681124 +step:4066 train loss:3.710279 +step:4067 train loss:3.733921 +step:4068 train loss:3.700664 +step:4069 train loss:3.758888 +step:4070 train loss:3.673405 +step:4071 train loss:3.648503 +step:4072 train loss:3.722564 +step:4073 train loss:3.656075 +step:4074 train loss:3.714958 +step:4075 train loss:3.778538 +step:4076 train loss:3.632690 +step:4077 train loss:3.715113 +step:4078 train loss:3.812102 +step:4079 train loss:3.757313 +step:4080 train loss:3.700389 +step:4081 train loss:3.670920 +step:4082 train loss:3.724961 +step:4083 train loss:3.660628 +step:4084 train loss:3.681966 +step:4085 train loss:3.912700 +step:4086 train loss:3.682116 +step:4087 train loss:3.724526 +step:4088 train loss:3.711333 +step:4089 train loss:3.699720 +step:4090 train loss:3.717337 +step:4091 train loss:3.741251 +step:4092 train loss:3.665406 +step:4093 train loss:3.694216 +step:4094 train loss:3.715461 +step:4095 train loss:3.669382 +step:4096 train loss:3.704187 +step:4097 train loss:3.704475 +step:4098 train loss:3.677678 +step:4099 train loss:3.682075 +step:4100 train loss:3.736123 +step:4101 train loss:3.658539 +step:4102 train loss:3.690757 +step:4103 train loss:3.893759 +step:4104 train loss:3.710043 +step:4105 train loss:3.678009 +step:4106 train loss:3.749557 +step:4107 train loss:3.667612 +step:4108 train loss:3.675668 +step:4109 train loss:3.729259 +step:4110 train loss:3.738919 +step:4111 train loss:3.713511 +step:4112 train loss:3.732959 +step:4113 train loss:3.690634 +step:4114 train loss:3.639256 +step:4115 train loss:3.675563 +step:4116 train loss:3.662646 +step:4117 train loss:3.683082 +step:4118 train loss:3.735872 +step:4119 train loss:3.759024 +step:4120 train loss:3.680918 +step:4121 train loss:3.671931 +step:4122 train loss:3.737565 +step:4123 train loss:3.753793 +step:4124 train loss:3.732058 +step:4125 train loss:3.767388 +step:4126 train loss:3.702878 +step:4127 train loss:3.721456 +step:4128 train loss:3.711310 +step:4129 train loss:3.756747 +step:4130 train loss:3.688722 +step:4131 train loss:3.722993 +step:4132 train loss:3.737961 +step:4133 train loss:3.688143 +step:4134 train loss:3.745111 +step:4135 train loss:3.676316 +step:4136 train loss:3.698843 +step:4137 train loss:3.669920 +step:4138 train loss:3.677476 +step:4139 train loss:3.723458 +step:4140 train loss:3.682322 +step:4141 train loss:3.647361 +step:4142 train loss:3.690145 +step:4143 train loss:3.727472 +step:4144 train loss:3.680472 +step:4145 train loss:3.645662 +step:4146 train loss:3.714456 +step:4147 train loss:3.687416 +step:4148 train loss:3.682579 +step:4149 train loss:3.761754 +step:4150 train loss:3.728698 +step:4151 train loss:3.706951 +step:4152 train loss:3.731351 +step:4153 train loss:3.737128 +step:4154 train loss:3.743480 +step:4155 train loss:3.765495 +step:4156 train loss:3.641862 +step:4157 train loss:3.664203 +step:4158 train loss:3.720845 +step:4159 train loss:3.623990 +step:4160 train loss:3.712331 +step:4161 train loss:3.716152 +step:4162 train loss:3.623346 +step:4163 train loss:3.703516 +step:4164 train loss:3.654354 +step:4165 train loss:3.655148 +step:4166 train loss:3.720681 +step:4167 train loss:3.718177 +step:4168 train loss:3.707944 +step:4169 train loss:3.738357 +step:4170 train loss:3.856596 +step:4171 train loss:3.714504 +step:4172 train loss:3.727978 +step:4173 train loss:3.723459 +step:4174 train loss:3.689140 +step:4175 train loss:3.774553 +step:4176 train loss:3.704257 +step:4177 train loss:3.725004 +step:4178 train loss:3.701814 +step:4179 train loss:3.653276 +step:4180 train loss:3.650045 +step:4181 train loss:3.701855 +step:4182 train loss:3.685543 +step:4183 train loss:3.619882 +step:4184 train loss:3.693431 +step:4185 train loss:3.757154 +step:4186 train loss:3.739064 +step:4187 train loss:3.744838 +step:4188 train loss:3.716300 +step:4189 train loss:3.680828 +step:4190 train loss:3.719100 +step:4191 train loss:3.668681 +step:4192 train loss:3.756910 +step:4193 train loss:3.663967 +step:4194 train loss:3.646412 +step:4195 train loss:3.645803 +step:4196 train loss:3.712674 +step:4197 train loss:3.729596 +step:4198 train loss:3.650716 +step:4199 train loss:3.732000 +step:4200 train loss:3.696047 +step:4201 train loss:3.676686 +step:4202 train loss:3.689096 +step:4203 train loss:3.701639 +step:4204 train loss:3.696007 +step:4205 train loss:3.710241 +step:4206 train loss:3.728764 +step:4207 train loss:3.728353 +step:4208 train loss:3.689290 +step:4209 train loss:3.755970 +step:4210 train loss:3.783934 +step:4211 train loss:3.665354 +step:4212 train loss:3.708266 +step:4213 train loss:3.658181 +step:4214 train loss:3.669001 +step:4215 train loss:3.685905 +step:4216 train loss:3.655615 +step:4217 train loss:3.679451 +step:4218 train loss:3.723268 +step:4219 train loss:3.721587 +step:4220 train loss:3.803905 +step:4221 train loss:3.697941 +step:4222 train loss:3.758095 +step:4223 train loss:3.684614 +step:4224 train loss:3.754245 +step:4225 train loss:3.679493 +step:4226 train loss:3.735557 +step:4227 train loss:3.708291 +step:4228 train loss:3.683022 +step:4229 train loss:3.689189 +step:4230 train loss:3.672018 +step:4231 train loss:3.660026 +step:4232 train loss:3.708748 +step:4233 train loss:3.618109 +step:4234 train loss:3.699774 +step:4235 train loss:3.774784 +step:4236 train loss:3.743941 +step:4237 train loss:3.728351 +step:4238 train loss:3.736178 +step:4239 train loss:3.786637 +step:4240 train loss:3.694045 +step:4241 train loss:3.619499 +step:4242 train loss:3.737755 +step:4243 train loss:3.739512 +step:4244 train loss:3.750165 +step:4245 train loss:3.808590 +step:4246 train loss:3.678228 +step:4247 train loss:3.740817 +step:4248 train loss:3.689793 +step:4249 train loss:3.692448 +step:4250 validation loss:3.617519 +step:4250 train loss:3.675831 +step:4251 train loss:3.771456 +step:4252 train loss:3.679797 +step:4253 train loss:3.675601 +step:4254 train loss:3.684255 +step:4255 train loss:3.667105 +step:4256 train loss:3.681789 +step:4257 train loss:3.742146 +step:4258 train loss:3.599499 +step:4259 train loss:3.666774 +step:4260 train loss:3.729383 +step:4261 train loss:3.714963 +step:4262 train loss:3.847733 +step:4263 train loss:3.794129 +step:4264 train loss:3.734009 +step:4265 train loss:3.724723 +step:4266 train loss:3.721951 +step:4267 train loss:3.723411 +step:4268 train loss:3.665866 +step:4269 train loss:3.758599 +step:4270 train loss:3.739222 +step:4271 train loss:3.652547 +step:4272 train loss:3.708341 +step:4273 train loss:3.684230 +step:4274 train loss:3.669734 +step:4275 train loss:3.689482 +step:4276 train loss:3.656426 +step:4277 train loss:3.790042 +step:4278 train loss:3.640781 +step:4279 train loss:3.669854 +step:4280 train loss:3.752765 +step:4281 train loss:3.735479 +step:4282 train loss:3.801913 +step:4283 train loss:3.656121 +step:4284 train loss:3.685675 +step:4285 train loss:3.689797 +step:4286 train loss:3.749842 +step:4287 train loss:3.752188 +step:4288 train loss:3.733362 +step:4289 train loss:3.682726 +step:4290 train loss:3.694724 +step:4291 train loss:3.654634 +step:4292 train loss:3.696784 +step:4293 train loss:3.707801 +step:4294 train loss:3.692260 +step:4295 train loss:3.628069 +step:4296 train loss:3.701051 +step:4297 train loss:3.683449 +step:4298 train loss:3.694346 +step:4299 train loss:3.689447 +step:4300 train loss:3.807140 +step:4301 train loss:3.624676 +step:4302 train loss:3.765640 +step:4303 train loss:3.643630 +step:4304 train loss:3.650945 +step:4305 train loss:3.672811 +step:4306 train loss:3.744707 +step:4307 train loss:3.660722 +step:4308 train loss:3.659628 +step:4309 train loss:3.728535 +step:4310 train loss:3.665704 +step:4311 train loss:3.716112 +step:4312 train loss:3.714853 +step:4313 train loss:3.706122 +step:4314 train loss:3.654327 +step:4315 train loss:3.683515 +step:4316 train loss:3.631465 +step:4317 train loss:3.688314 +step:4318 train loss:3.728279 +step:4319 train loss:3.676031 +step:4320 train loss:3.735857 +step:4321 train loss:3.720927 +step:4322 train loss:3.677442 +step:4323 train loss:3.616753 +step:4324 train loss:3.706574 +step:4325 train loss:3.685090 +step:4326 train loss:3.675981 +step:4327 train loss:3.779216 +step:4328 train loss:3.692250 +step:4329 train loss:3.646394 +step:4330 train loss:3.692754 +step:4331 train loss:3.707307 +step:4332 train loss:3.733561 +step:4333 train loss:3.694103 +step:4334 train loss:3.714140 +step:4335 train loss:3.712960 +step:4336 train loss:3.722198 +step:4337 train loss:3.685749 +step:4338 train loss:3.810982 +step:4339 train loss:3.715707 +step:4340 train loss:3.720272 +step:4341 train loss:3.690352 +step:4342 train loss:3.702541 +step:4343 train loss:3.822741 +step:4344 train loss:3.712109 +step:4345 train loss:3.727265 +step:4346 train loss:3.742177 +step:4347 train loss:3.750809 +step:4348 train loss:3.663306 +step:4349 train loss:3.745510 +step:4350 train loss:3.685546 +step:4351 train loss:3.638848 +step:4352 train loss:3.716944 +step:4353 train loss:3.662604 +step:4354 train loss:3.718453 +step:4355 train loss:3.678975 +step:4356 train loss:3.702118 +step:4357 train loss:3.683445 +step:4358 train loss:3.776237 +step:4359 train loss:3.727325 +step:4360 train loss:3.643876 +step:4361 train loss:3.688964 +step:4362 train loss:3.712677 +step:4363 train loss:3.728502 +step:4364 train loss:3.697170 +step:4365 train loss:3.678679 +step:4366 train loss:3.721527 +step:4367 train loss:3.736655 +step:4368 train loss:3.714394 +step:4369 train loss:3.583083 +step:4370 train loss:3.714348 +step:4371 train loss:3.622622 +step:4372 train loss:3.770432 +step:4373 train loss:3.712457 +step:4374 train loss:3.680503 +step:4375 train loss:3.725146 +step:4376 train loss:3.735282 +step:4377 train loss:3.668495 +step:4378 train loss:3.680839 +step:4379 train loss:3.760778 +step:4380 train loss:3.744486 +step:4381 train loss:3.643673 +step:4382 train loss:3.688321 +step:4383 train loss:3.719551 +step:4384 train loss:3.711104 +step:4385 train loss:3.640503 +step:4386 train loss:3.696285 +step:4387 train loss:3.666754 +step:4388 train loss:3.685339 +step:4389 train loss:3.716302 +step:4390 train loss:3.753605 +step:4391 train loss:3.682782 +step:4392 train loss:3.754722 +step:4393 train loss:3.718246 +step:4394 train loss:3.654965 +step:4395 train loss:3.710377 +step:4396 train loss:3.686485 +step:4397 train loss:3.728089 +step:4398 train loss:3.675063 +step:4399 train loss:3.665097 +step:4400 train loss:3.669883 +step:4401 train loss:3.734004 +step:4402 train loss:3.730201 +step:4403 train loss:3.684032 +step:4404 train loss:3.710495 +step:4405 train loss:3.632176 +step:4406 train loss:3.709517 +step:4407 train loss:3.645971 +step:4408 train loss:3.740685 +step:4409 train loss:3.697192 +step:4410 train loss:3.705407 +step:4411 train loss:3.665412 +step:4412 train loss:3.777927 +step:4413 train loss:3.676224 +step:4414 train loss:3.684302 +step:4415 train loss:3.669655 +step:4416 train loss:3.663067 +step:4417 train loss:3.656196 +step:4418 train loss:3.729557 +step:4419 train loss:3.699038 +step:4420 train loss:3.703693 +step:4421 train loss:3.732418 +step:4422 train loss:3.752086 +step:4423 train loss:3.708532 +step:4424 train loss:3.693666 +step:4425 train loss:3.652820 +step:4426 train loss:3.730288 +step:4427 train loss:3.690237 +step:4428 train loss:3.630317 +step:4429 train loss:3.690217 +step:4430 train loss:3.727657 +step:4431 train loss:3.721481 +step:4432 train loss:3.627730 +step:4433 train loss:3.679858 +step:4434 train loss:3.678748 +step:4435 train loss:3.709372 +step:4436 train loss:3.647551 +step:4437 train loss:3.724203 +step:4438 train loss:3.690152 +step:4439 train loss:3.697950 +step:4440 train loss:3.695778 +step:4441 train loss:3.695954 +step:4442 train loss:3.743219 +step:4443 train loss:3.681098 +step:4444 train loss:3.766737 +step:4445 train loss:3.731210 +step:4446 train loss:3.664839 +step:4447 train loss:3.706413 +step:4448 train loss:3.728680 +step:4449 train loss:3.667881 +step:4450 train loss:3.683018 +step:4451 train loss:3.736107 +step:4452 train loss:3.792694 +step:4453 train loss:3.728666 +step:4454 train loss:3.699568 +step:4455 train loss:3.748099 +step:4456 train loss:3.694568 +step:4457 train loss:3.694877 +step:4458 train loss:3.701883 +step:4459 train loss:3.737691 +step:4460 train loss:3.649363 +step:4461 train loss:3.621973 +step:4462 train loss:3.676425 +step:4463 train loss:3.699077 +step:4464 train loss:3.666468 +step:4465 train loss:3.702555 +step:4466 train loss:3.793265 +step:4467 train loss:3.674505 +step:4468 train loss:3.667784 +step:4469 train loss:3.662504 +step:4470 train loss:3.636976 +step:4471 train loss:3.696465 +step:4472 train loss:3.623981 +step:4473 train loss:3.710045 +step:4474 train loss:3.737371 +step:4475 train loss:3.693702 +step:4476 train loss:3.661041 +step:4477 train loss:3.645939 +step:4478 train loss:3.702471 +step:4479 train loss:3.803834 +step:4480 train loss:3.639630 +step:4481 train loss:3.711430 +step:4482 train loss:3.671235 +step:4483 train loss:3.668495 +step:4484 train loss:3.713858 +step:4485 train loss:3.672881 +step:4486 train loss:3.774523 +step:4487 train loss:3.669796 +step:4488 train loss:3.669250 +step:4489 train loss:3.623896 +step:4490 train loss:3.705773 +step:4491 train loss:3.660178 +step:4492 train loss:3.689059 +step:4493 train loss:3.677018 +step:4494 train loss:3.669050 +step:4495 train loss:3.735314 +step:4496 train loss:3.677571 +step:4497 train loss:3.759235 +step:4498 train loss:3.649821 +step:4499 train loss:3.706745 +step:4500 validation loss:3.605747 total_sharp:2.8772e-04 L1_sharp:1.0545e-04 L2_sharp:9.9240e-06 L3_sharp:1.5065e-05 L4_sharp:3.1300e-05 L5_sharp:3.3491e-05 L6_sharp:2.5060e-05 L7_sharp:5.2300e-05 L8_sharp:7.0049e-05 L9_sharp:1.2507e-04 L10_sharp:1.3851e-04 L11_sharp:9.8350e-05 L12_sharp:2.9554e-04 total_fnorm:1.0883e+01 total_l1_linf:9.6128e+04 total_spectral:1.0883e+01 L1_fnorm:2.5803e+00 L2_fnorm:2.5243e+00 L3_fnorm:2.4626e+00 L4_fnorm:2.4791e+00 L5_fnorm:2.4524e+00 L6_fnorm:2.5482e+00 L7_fnorm:2.5646e+00 L8_fnorm:2.5228e+00 L9_fnorm:2.5207e+00 L10_fnorm:2.5209e+00 L11_fnorm:2.5458e+00 L12_fnorm:2.4613e+00 L1_l1linf:2.7006e+00 L2_l1linf:2.6981e+00 L3_l1linf:2.7383e+00 L4_l1linf:2.6541e+00 L5_l1linf:2.4946e+00 L6_l1linf:2.5231e+00 L7_l1linf:2.6933e+00 L8_l1linf:2.9144e+00 L9_l1linf:2.7932e+00 L10_l1linf:2.8104e+00 L11_l1linf:2.9545e+00 L12_l1linf:2.8899e+00 L1_spectral:3.0163e-01 L2_spectral:2.6945e-01 L3_spectral:2.8356e-01 L4_spectral:3.2322e-01 L5_spectral:3.1002e-01 L6_spectral:2.7112e-01 L7_spectral:2.9730e-01 L8_spectral:3.3748e-01 L9_spectral:3.8143e-01 L10_spectral:4.0533e-01 L11_spectral:4.0674e-01 L12_spectral:4.3223e-01 ip_v_neg_g:1.9394e-02 cos_v_neg_g:4.4064e-03 v_norm:1.0883e+01 g_norm:4.0444e-01 hv_norm:2.3844e-01 cos_v_hv:1.3132e-02 hg_norm:2.5000e+00 cos_g_hg:4.7141e-01 v_par:3.6143e-03 v_perp:1.0883e+01 L1_cos_v_neg_g:1.2772e-02 L1_v_norm:2.5803e+00 L2_cos_v_neg_g:3.2609e-03 L2_v_norm:2.5243e+00 L3_cos_v_neg_g:4.3238e-03 L3_v_norm:2.4626e+00 L4_cos_v_neg_g:5.7021e-03 L4_v_norm:2.4791e+00 L5_cos_v_neg_g:4.7724e-03 L5_v_norm:2.4524e+00 L6_cos_v_neg_g:5.4106e-03 L6_v_norm:2.5482e+00 L7_cos_v_neg_g:6.6405e-03 L7_v_norm:2.5646e+00 L8_cos_v_neg_g:8.1258e-03 L8_v_norm:2.5228e+00 L9_cos_v_neg_g:1.1265e-02 L9_v_norm:2.5207e+00 L10_cos_v_neg_g:1.3747e-02 L10_v_norm:2.5209e+00 L11_cos_v_neg_g:1.5081e-02 L11_v_norm:2.5458e+00 L12_cos_v_neg_g:1.5539e-02 L12_v_norm:2.4613e+00 +step:4500 train loss:3.614058 +step:4501 train loss:3.672440 +step:4502 train loss:3.798174 +step:4503 train loss:3.699708 +step:4504 train loss:3.708711 +step:4505 train loss:3.693373 +step:4506 train loss:3.666007 +step:4507 train loss:3.738549 +step:4508 train loss:3.674710 +step:4509 train loss:3.675029 +step:4510 train loss:3.707293 +step:4511 train loss:3.661463 +step:4512 train loss:3.683709 +step:4513 train loss:3.740010 +step:4514 train loss:3.647105 +step:4515 train loss:3.760748 +step:4516 train loss:3.737853 +step:4517 train loss:3.691322 +step:4518 train loss:3.630220 +step:4519 train loss:3.666443 +step:4520 train loss:3.678650 +step:4521 train loss:3.619195 +step:4522 train loss:3.674902 +step:4523 train loss:3.719977 +step:4524 train loss:3.704319 +step:4525 train loss:3.626072 +step:4526 train loss:3.666673 +step:4527 train loss:3.654847 +step:4528 train loss:3.682966 +step:4529 train loss:3.681418 +step:4530 train loss:3.775115 +step:4531 train loss:3.666249 +step:4532 train loss:3.688406 +step:4533 train loss:3.664599 +step:4534 train loss:3.756201 +step:4535 train loss:3.656293 +step:4536 train loss:3.723434 +step:4537 train loss:3.705921 +step:4538 train loss:3.686075 +step:4539 train loss:3.708768 +step:4540 train loss:3.688378 +step:4541 train loss:3.651142 +step:4542 train loss:3.703798 +step:4543 train loss:3.784552 +step:4544 train loss:3.729594 +step:4545 train loss:3.670755 +step:4546 train loss:3.765892 +step:4547 train loss:3.724759 +step:4548 train loss:3.724473 +step:4549 train loss:3.681624 +step:4550 train loss:3.649172 +step:4551 train loss:3.665426 +step:4552 train loss:3.669201 +step:4553 train loss:3.748558 +step:4554 train loss:3.646651 +step:4555 train loss:3.757961 +step:4556 train loss:3.697997 +step:4557 train loss:3.625764 +step:4558 train loss:3.710762 +step:4559 train loss:3.718572 +step:4560 train loss:3.658072 +step:4561 train loss:3.646079 +step:4562 train loss:3.687175 +step:4563 train loss:3.639505 +step:4564 train loss:3.664652 +step:4565 train loss:3.665564 +step:4566 train loss:3.638579 +step:4567 train loss:3.667096 +step:4568 train loss:3.664004 +step:4569 train loss:3.648583 +step:4570 train loss:3.702054 +step:4571 train loss:3.680178 +step:4572 train loss:3.672475 +step:4573 train loss:3.680236 +step:4574 train loss:3.826472 +step:4575 train loss:3.661440 +step:4576 train loss:3.649398 +step:4577 train loss:3.691115 +step:4578 train loss:3.731580 +step:4579 train loss:3.679092 +step:4580 train loss:3.741511 +step:4581 train loss:3.679104 +step:4582 train loss:3.670640 +step:4583 train loss:3.677345 +step:4584 train loss:3.648065 +step:4585 train loss:3.729957 +step:4586 train loss:3.715824 +step:4587 train loss:3.617255 +step:4588 train loss:3.660973 +step:4589 train loss:3.738122 +step:4590 train loss:3.706580 +step:4591 train loss:3.646903 +step:4592 train loss:3.731353 +step:4593 train loss:3.652400 +step:4594 train loss:3.681128 +step:4595 train loss:3.705228 +step:4596 train loss:3.642133 +step:4597 train loss:3.776220 +step:4598 train loss:3.695113 +step:4599 train loss:3.650509 +step:4600 train loss:3.656871 +step:4601 train loss:3.681001 +step:4602 train loss:3.631098 +step:4603 train loss:3.641356 +step:4604 train loss:3.748516 +step:4605 train loss:3.669173 +step:4606 train loss:3.697800 +step:4607 train loss:3.676152 +step:4608 train loss:3.714345 +step:4609 train loss:3.672249 +step:4610 train loss:3.717469 +step:4611 train loss:3.740160 +step:4612 train loss:3.743860 +step:4613 train loss:3.720514 +step:4614 train loss:3.713967 +step:4615 train loss:3.655079 +step:4616 train loss:3.637223 +step:4617 train loss:3.680244 +step:4618 train loss:3.696544 +step:4619 train loss:3.658453 +step:4620 train loss:3.669790 +step:4621 train loss:3.675896 +step:4622 train loss:3.609888 +step:4623 train loss:3.716487 +step:4624 train loss:3.699618 +step:4625 train loss:3.658030 +step:4626 train loss:3.705553 +step:4627 train loss:3.676001 +step:4628 train loss:3.661394 +step:4629 train loss:3.701159 +step:4630 train loss:3.755879 +step:4631 train loss:3.757675 +step:4632 train loss:3.651913 +step:4633 train loss:3.664516 +step:4634 train loss:3.738760 +step:4635 train loss:3.703457 +step:4636 train loss:3.716316 +step:4637 train loss:3.654449 +step:4638 train loss:3.658031 +step:4639 train loss:3.656834 +step:4640 train loss:3.666236 +step:4641 train loss:3.674435 +step:4642 train loss:3.706757 +step:4643 train loss:3.669492 +step:4644 train loss:3.693561 +step:4645 train loss:3.711128 +step:4646 train loss:3.662403 +step:4647 train loss:3.621668 +step:4648 train loss:3.726378 +step:4649 train loss:3.740197 +step:4650 train loss:3.686070 +step:4651 train loss:3.686351 +step:4652 train loss:3.679391 +step:4653 train loss:3.733483 +step:4654 train loss:3.729929 +step:4655 train loss:3.632805 +step:4656 train loss:3.669025 +step:4657 train loss:3.721681 +step:4658 train loss:3.676960 +step:4659 train loss:3.689988 +step:4660 train loss:3.734797 +step:4661 train loss:3.651577 +step:4662 train loss:3.665986 +step:4663 train loss:3.671964 +step:4664 train loss:3.729692 +step:4665 train loss:3.722091 +step:4666 train loss:3.720872 +step:4667 train loss:3.710534 +step:4668 train loss:3.680041 +step:4669 train loss:3.689905 +step:4670 train loss:3.717167 +step:4671 train loss:3.737881 +step:4672 train loss:3.608657 +step:4673 train loss:3.638717 +step:4674 train loss:3.765887 +step:4675 train loss:3.670135 +step:4676 train loss:3.630565 +step:4677 train loss:3.636795 +step:4678 train loss:3.605990 +step:4679 train loss:3.707589 +step:4680 train loss:3.641237 +step:4681 train loss:3.697268 +step:4682 train loss:3.642631 +step:4683 train loss:3.615953 +step:4684 train loss:3.733685 +step:4685 train loss:3.666114 +step:4686 train loss:3.680248 +step:4687 train loss:3.713971 +step:4688 train loss:3.644115 +step:4689 train loss:3.720926 +step:4690 train loss:3.659048 +step:4691 train loss:3.693977 +step:4692 train loss:3.621278 +step:4693 train loss:3.661902 +step:4694 train loss:3.703700 +step:4695 train loss:3.720878 +step:4696 train loss:3.708261 +step:4697 train loss:3.617350 +step:4698 train loss:3.638422 +step:4699 train loss:3.691279 +step:4700 train loss:3.660994 +step:4701 train loss:3.666591 +step:4702 train loss:3.623471 +step:4703 train loss:3.702526 +step:4704 train loss:3.688622 +step:4705 train loss:3.636158 +step:4706 train loss:3.642460 +step:4707 train loss:3.627341 +step:4708 train loss:3.696235 +step:4709 train loss:3.643152 +step:4710 train loss:3.659042 +step:4711 train loss:3.715368 +step:4712 train loss:3.615129 +step:4713 train loss:3.718559 +step:4714 train loss:3.619319 +step:4715 train loss:3.708927 +step:4716 train loss:3.675397 +step:4717 train loss:3.607566 +step:4718 train loss:3.695008 +step:4719 train loss:3.625526 +step:4720 train loss:3.723406 +step:4721 train loss:3.676175 +step:4722 train loss:3.729624 +step:4723 train loss:3.629266 +step:4724 train loss:3.678396 +step:4725 train loss:3.614267 +step:4726 train loss:3.661703 +step:4727 train loss:3.667096 +step:4728 train loss:3.675378 +step:4729 train loss:3.702986 +step:4730 train loss:3.600756 +step:4731 train loss:3.663054 +step:4732 train loss:3.615709 +step:4733 train loss:3.556188 +step:4734 train loss:3.691264 +step:4735 train loss:3.645283 +step:4736 train loss:3.685451 +step:4737 train loss:3.562835 +step:4738 train loss:3.711256 +step:4739 train loss:3.588716 +step:4740 train loss:3.700701 +step:4741 train loss:3.666315 +step:4742 train loss:3.629286 +step:4743 train loss:3.626711 +step:4744 train loss:3.670458 +step:4745 train loss:3.687072 +step:4746 train loss:3.727167 +step:4747 train loss:3.689629 +step:4748 train loss:3.589578 +step:4749 train loss:3.653627 +step:4750 validation loss:3.589072 +step:4750 train loss:3.601820 +step:4751 train loss:3.694737 +step:4752 train loss:3.628570 +step:4753 train loss:3.737587 +step:4754 train loss:3.604329 +step:4755 train loss:3.648393 +step:4756 train loss:3.718266 +step:4757 train loss:3.647028 +step:4758 train loss:3.663267 +step:4759 train loss:3.666921 +step:4760 train loss:3.692081 +step:4761 train loss:3.611578 +step:4762 train loss:3.644858 +step:4763 train loss:3.665192 +step:4764 train loss:3.724839 +step:4765 train loss:3.618051 +step:4766 train loss:3.642388 +step:4767 train loss:3.592257 +step:4768 train loss:3.652768 +step:4769 train loss:3.674584 +step:4770 train loss:3.635057 +step:4771 train loss:3.644739 +step:4772 train loss:3.618948 +step:4773 train loss:3.655782 +step:4774 train loss:3.597127 +step:4775 train loss:3.729145 +step:4776 train loss:3.597058 +step:4777 train loss:3.671769 +step:4778 train loss:3.608545 +step:4779 train loss:3.659913 +step:4780 train loss:3.596089 +step:4781 train loss:3.602596 +step:4782 train loss:3.706852 +step:4783 train loss:3.698171 +step:4784 train loss:3.659954 +step:4785 train loss:3.658720 +step:4786 train loss:3.766344 +step:4787 train loss:3.602609 +step:4788 train loss:3.626431 +step:4789 train loss:3.647524 +step:4790 train loss:3.703321 +step:4791 train loss:3.663935 +step:4792 train loss:3.705542 +step:4793 train loss:3.625658 +step:4794 train loss:3.699701 +step:4795 train loss:3.648838 +step:4796 train loss:3.639246 +step:4797 train loss:3.647366 +step:4798 train loss:3.656721 +step:4799 train loss:3.651440 +step:4800 train loss:3.684005 +step:4801 train loss:3.670745 +step:4802 train loss:3.711199 +step:4803 train loss:3.695606 +step:4804 train loss:3.651016 +step:4805 train loss:3.648610 +step:4806 train loss:3.626186 +step:4807 train loss:3.733405 +step:4808 train loss:3.606167 +step:4809 train loss:3.709681 +step:4810 train loss:3.648730 +step:4811 train loss:3.670090 +step:4812 train loss:3.642152 +step:4813 train loss:3.600354 +step:4814 train loss:3.594809 +step:4815 train loss:3.588776 +step:4816 train loss:3.652585 +step:4817 train loss:3.593770 +step:4818 train loss:3.654463 +step:4819 train loss:3.653505 +step:4820 train loss:3.901620 +step:4821 train loss:3.680013 +step:4822 train loss:3.690416 +step:4823 train loss:3.620849 +step:4824 train loss:3.629058 +step:4825 train loss:3.609014 +step:4826 train loss:3.694809 +step:4827 train loss:3.641909 +step:4828 train loss:3.584967 +step:4829 train loss:3.687700 +step:4830 train loss:3.630992 +step:4831 train loss:3.776120 +step:4832 train loss:3.648843 +step:4833 train loss:3.684983 +step:4834 train loss:3.585254 +step:4835 train loss:3.678112 +step:4836 train loss:3.656870 +step:4837 train loss:3.686316 +step:4838 train loss:3.624510 +step:4839 train loss:3.689130 +step:4840 train loss:3.595081 +step:4841 train loss:3.690681 +step:4842 train loss:3.608110 +step:4843 train loss:3.682241 +step:4844 train loss:3.684980 +step:4845 train loss:3.625767 +step:4846 train loss:3.638548 +step:4847 train loss:3.625850 +step:4848 train loss:3.645762 +step:4849 train loss:3.602291 +step:4850 train loss:3.610941 +step:4851 train loss:3.606339 +step:4852 train loss:3.682079 +step:4853 train loss:3.658289 +step:4854 train loss:3.638432 +step:4855 train loss:3.701000 +step:4856 train loss:3.668679 +step:4857 train loss:3.678444 +step:4858 train loss:3.757846 +step:4859 train loss:3.602875 +step:4860 train loss:3.681102 +step:4861 train loss:3.653006 +step:4862 train loss:3.689201 +step:4863 train loss:3.623290 +step:4864 train loss:3.633357 +step:4865 train loss:3.626316 +step:4866 train loss:3.673010 +step:4867 train loss:3.639649 +step:4868 train loss:3.660545 +step:4869 train loss:3.610046 +step:4870 train loss:3.640738 +step:4871 train loss:3.722555 +step:4872 train loss:3.667182 +step:4873 train loss:3.665315 +step:4874 train loss:3.639844 +step:4875 train loss:3.602885 +step:4876 train loss:3.617191 +step:4877 train loss:3.621661 +step:4878 train loss:3.662869 +step:4879 train loss:3.618975 +step:4880 train loss:3.649140 +step:4881 train loss:3.590412 +step:4882 train loss:3.790078 +step:4883 train loss:3.600716 +step:4884 train loss:3.633214 +step:4885 train loss:3.608704 +step:4886 train loss:3.682820 +step:4887 train loss:3.631927 +step:4888 train loss:3.647895 +step:4889 train loss:3.632397 +step:4890 train loss:3.682303 +step:4891 train loss:3.614317 +step:4892 train loss:3.619988 +step:4893 train loss:3.669139 +step:4894 train loss:3.606347 +step:4895 train loss:3.635566 +step:4896 train loss:3.622534 +step:4897 train loss:3.688760 +step:4898 train loss:3.646406 +step:4899 train loss:3.627106 +step:4900 train loss:3.672068 +step:4901 train loss:3.621111 +step:4902 train loss:3.615067 +step:4903 train loss:3.634647 +step:4904 train loss:3.651591 +step:4905 train loss:3.645611 +step:4906 train loss:3.644892 +step:4907 train loss:3.717851 +step:4908 train loss:3.625046 +step:4909 train loss:3.630674 +step:4910 train loss:3.653132 +step:4911 train loss:3.705308 +step:4912 train loss:3.679581 +step:4913 train loss:3.656380 +step:4914 train loss:3.647562 +step:4915 train loss:3.628452 +step:4916 train loss:3.570881 +step:4917 train loss:3.593912 +step:4918 train loss:3.627325 +step:4919 train loss:3.618968 +step:4920 train loss:3.616580 +step:4921 train loss:3.779958 +step:4922 train loss:3.676013 +step:4923 train loss:3.690914 +step:4924 train loss:3.690588 +step:4925 train loss:3.623406 +step:4926 train loss:3.619868 +step:4927 train loss:3.646152 +step:4928 train loss:3.687326 +step:4929 train loss:3.644240 +step:4930 train loss:3.623772 +step:4931 train loss:3.622509 +step:4932 train loss:3.624441 +step:4933 train loss:3.622050 +step:4934 train loss:3.686519 +step:4935 train loss:3.677977 +step:4936 train loss:3.633784 +step:4937 train loss:3.744301 +step:4938 train loss:3.732237 +step:4939 train loss:3.599347 +step:4940 train loss:3.677759 +step:4941 train loss:3.579888 +step:4942 train loss:3.621091 +step:4943 train loss:3.622967 +step:4944 train loss:3.623501 +step:4945 train loss:3.672626 +step:4946 train loss:3.647110 +step:4947 train loss:3.629769 +step:4948 train loss:3.664073 +step:4949 train loss:3.572952 +step:4950 train loss:3.652854 +step:4951 train loss:3.701543 +step:4952 train loss:3.646300 +step:4953 train loss:3.677952 +step:4954 train loss:3.582316 +step:4955 train loss:3.655085 +step:4956 train loss:3.683182 +step:4957 train loss:3.680496 +step:4958 train loss:3.591520 +step:4959 train loss:3.709758 +step:4960 train loss:3.635265 +step:4961 train loss:3.656714 +step:4962 train loss:3.616830 +step:4963 train loss:3.664425 +step:4964 train loss:3.618255 +step:4965 train loss:3.767876 +step:4966 train loss:3.619053 +step:4967 train loss:3.723665 +step:4968 train loss:3.614311 +step:4969 train loss:3.657760 +step:4970 train loss:3.647718 +step:4971 train loss:3.599857 +step:4972 train loss:3.644765 +step:4973 train loss:3.651072 +step:4974 train loss:3.640569 +step:4975 train loss:3.722964 +step:4976 train loss:3.704076 +step:4977 train loss:3.648201 +step:4978 train loss:3.634667 +step:4979 train loss:3.635435 +step:4980 train loss:3.741926 +step:4981 train loss:3.579223 +step:4982 train loss:3.662993 +step:4983 train loss:3.583245 +step:4984 train loss:3.772721 +step:4985 train loss:3.671830 +step:4986 train loss:3.615779 +step:4987 train loss:3.633070 +step:4988 train loss:3.830510 +step:4989 train loss:3.637309 +step:4990 train loss:3.633789 +step:4991 train loss:3.647042 +step:4992 train loss:3.631840 +step:4993 train loss:3.609925 +step:4994 train loss:3.718322 +step:4995 train loss:3.642573 +step:4996 train loss:3.733169 +step:4997 train loss:3.630523 +step:4998 train loss:3.632229 +step:4999 train loss:3.613411 +step:5000 validation loss:3.580704 total_sharp:1.8586e-04 L1_sharp:6.1332e-05 L2_sharp:8.2852e-06 L3_sharp:1.1634e-05 L4_sharp:2.3566e-05 L5_sharp:3.0970e-05 L6_sharp:2.3491e-05 L7_sharp:4.0215e-05 L8_sharp:7.0227e-05 L9_sharp:8.3540e-05 L10_sharp:7.6109e-05 L11_sharp:6.1305e-05 L12_sharp:2.0912e-04 total_fnorm:1.0750e+01 total_l1_linf:9.4822e+04 total_spectral:1.0750e+01 L1_fnorm:2.4997e+00 L2_fnorm:2.4951e+00 L3_fnorm:2.4383e+00 L4_fnorm:2.4314e+00 L5_fnorm:2.4171e+00 L6_fnorm:2.5269e+00 L7_fnorm:2.5629e+00 L8_fnorm:2.5220e+00 L9_fnorm:2.4949e+00 L10_fnorm:2.4726e+00 L11_fnorm:2.5070e+00 L12_fnorm:2.3999e+00 L1_l1linf:2.6143e+00 L2_l1linf:2.6044e+00 L3_l1linf:2.5748e+00 L4_l1linf:2.6319e+00 L5_l1linf:2.4005e+00 L6_l1linf:2.4766e+00 L7_l1linf:2.6120e+00 L8_l1linf:2.7312e+00 L9_l1linf:2.6291e+00 L10_l1linf:2.6971e+00 L11_l1linf:2.7367e+00 L12_l1linf:2.8416e+00 L1_spectral:2.8817e-01 L2_spectral:2.5804e-01 L3_spectral:2.6946e-01 L4_spectral:3.3117e-01 L5_spectral:3.0056e-01 L6_spectral:2.6363e-01 L7_spectral:2.9159e-01 L8_spectral:3.1704e-01 L9_spectral:3.4972e-01 L10_spectral:3.3574e-01 L11_spectral:3.4745e-01 L12_spectral:3.7909e-01 ip_v_neg_g:1.4380e-02 cos_v_neg_g:2.1342e-03 v_norm:1.0750e+01 g_norm:6.2677e-01 hv_norm:1.8043e-01 cos_v_hv:1.1074e-02 hg_norm:8.2422e+00 cos_g_hg:7.2708e-01 v_par:2.4878e-03 v_perp:1.0750e+01 L1_cos_v_neg_g:8.1093e-03 L1_v_norm:2.4997e+00 L2_cos_v_neg_g:2.2473e-03 L2_v_norm:2.4951e+00 L3_cos_v_neg_g:2.5254e-03 L3_v_norm:2.4383e+00 L4_cos_v_neg_g:2.6563e-03 L4_v_norm:2.4314e+00 L5_cos_v_neg_g:2.6859e-03 L5_v_norm:2.4171e+00 L6_cos_v_neg_g:4.3583e-03 L6_v_norm:2.5269e+00 L7_cos_v_neg_g:3.9636e-03 L7_v_norm:2.5629e+00 L8_cos_v_neg_g:5.2629e-03 L8_v_norm:2.5220e+00 L9_cos_v_neg_g:6.6311e-03 L9_v_norm:2.4949e+00 L10_cos_v_neg_g:7.3205e-03 L10_v_norm:2.4726e+00 L11_cos_v_neg_g:7.5163e-03 L11_v_norm:2.5070e+00 L12_cos_v_neg_g:7.6934e-03 L12_v_norm:2.3999e+00 +step:5000 train loss:3.730173 +step:5001 train loss:3.595753 +step:5002 train loss:3.652652 +step:5003 train loss:3.647431 +step:5004 train loss:3.639047 +step:5005 train loss:3.637821 +step:5006 train loss:3.676195 +step:5007 train loss:3.682135 +step:5008 train loss:3.618314 +step:5009 train loss:3.661323 +step:5010 train loss:3.612493 +step:5011 train loss:3.642130 +step:5012 train loss:3.616229 +step:5013 train loss:3.720448 +step:5014 train loss:3.634875 +step:5015 train loss:3.711077 +step:5016 train loss:3.637287 +step:5017 train loss:3.684110 +step:5018 train loss:3.603381 +step:5019 train loss:3.637151 +step:5020 train loss:3.628980 +step:5021 train loss:3.644640 +step:5022 train loss:3.676138 +step:5023 train loss:3.646983 +step:5024 train loss:3.698216 +step:5025 train loss:3.586738 +step:5026 train loss:3.710573 +step:5027 train loss:3.643644 +step:5028 train loss:3.710311 +step:5029 train loss:3.604713 +step:5030 train loss:3.641647 +step:5031 train loss:3.629566 +step:5032 train loss:3.662004 +step:5033 train loss:3.644037 +step:5034 train loss:3.640860 +step:5035 train loss:3.723176 +step:5036 train loss:3.673719 +step:5037 train loss:3.624154 +step:5038 train loss:3.674346 +step:5039 train loss:3.686732 +step:5040 train loss:3.648203 +step:5041 train loss:3.665339 +step:5042 train loss:3.568941 +step:5043 train loss:3.711980 +step:5044 train loss:3.631679 +step:5045 train loss:3.679364 +step:5046 train loss:3.599855 +step:5047 train loss:3.677057 +step:5048 train loss:3.594989 +step:5049 train loss:3.727186 +step:5050 train loss:3.614532 +step:5051 train loss:3.657824 +step:5052 train loss:3.556479 +step:5053 train loss:3.742328 +step:5054 train loss:3.626856 +step:5055 train loss:3.650899 +step:5056 train loss:3.687767 +step:5057 train loss:3.615233 +step:5058 train loss:3.650534 +step:5059 train loss:3.612291 +step:5060 train loss:3.657205 +step:5061 train loss:3.653330 +step:5062 train loss:3.621059 +step:5063 train loss:3.617579 +step:5064 train loss:3.623312 +step:5065 train loss:3.608986 +step:5066 train loss:3.666819 +step:5067 train loss:3.653405 +step:5068 train loss:3.636371 +step:5069 train loss:3.611085 +step:5070 train loss:3.638705 +step:5071 train loss:3.707515 +step:5072 train loss:3.603848 +step:5073 train loss:3.607408 +step:5074 train loss:3.555528 +step:5075 train loss:3.624625 +step:5076 train loss:3.554784 +step:5077 train loss:3.619884 +step:5078 train loss:3.620243 +step:5079 train loss:3.658077 +step:5080 train loss:3.637640 +step:5081 train loss:3.645490 +step:5082 train loss:3.637871 +step:5083 train loss:3.697114 +step:5084 train loss:3.675866 +step:5085 train loss:3.634923 +step:5086 train loss:3.710603 +step:5087 train loss:3.697856 +step:5088 train loss:3.618289 +step:5089 train loss:3.682914 +step:5090 train loss:3.627763 +step:5091 train loss:3.628975 +step:5092 train loss:3.730856 +step:5093 train loss:3.614143 +step:5094 train loss:3.609750 +step:5095 train loss:3.661622 +step:5096 train loss:3.626703 +step:5097 train loss:3.636369 +step:5098 train loss:3.640130 +step:5099 train loss:3.599926 +step:5100 train loss:3.613443 +step:5101 train loss:3.804490 +step:5102 train loss:3.652872 +step:5103 train loss:3.660922 +step:5104 train loss:3.708426 +step:5105 train loss:3.654848 +step:5106 train loss:3.610030 +step:5107 train loss:3.625762 +step:5108 train loss:3.620759 +step:5109 train loss:3.701302 +step:5110 train loss:3.611382 +step:5111 train loss:3.700688 +step:5112 train loss:3.613215 +step:5113 train loss:3.595081 +step:5114 train loss:3.640339 +step:5115 train loss:3.602094 +step:5116 train loss:3.658653 +step:5117 train loss:3.600017 +step:5118 train loss:3.633276 +step:5119 train loss:3.614764 +step:5120 train loss:3.652708 +step:5121 train loss:3.601537 +step:5122 train loss:3.612710 +step:5123 train loss:3.608891 +step:5124 train loss:3.558681 +step:5125 train loss:3.668867 +step:5126 train loss:3.655456 +step:5127 train loss:3.658849 +step:5128 train loss:3.671113 +step:5129 train loss:3.601484 +step:5130 train loss:3.612741 +step:5131 train loss:3.550733 +step:5132 train loss:3.672988 +step:5133 train loss:3.639521 +step:5134 train loss:3.641222 +step:5135 train loss:3.591715 +step:5136 train loss:3.658487 +step:5137 train loss:3.658069 +step:5138 train loss:3.639258 +step:5139 train loss:3.676106 +step:5140 train loss:3.646625 +step:5141 train loss:3.679237 +step:5142 train loss:3.625708 +step:5143 train loss:3.654246 +step:5144 train loss:3.653293 +step:5145 train loss:3.595742 +step:5146 train loss:3.588556 +step:5147 train loss:3.665568 +step:5148 train loss:3.612213 +step:5149 train loss:3.668976 +step:5150 train loss:3.647921 +step:5151 train loss:3.612633 +step:5152 train loss:3.659683 +step:5153 train loss:3.627125 +step:5154 train loss:3.642576 +step:5155 train loss:3.649688 +step:5156 train loss:3.632914 +step:5157 train loss:3.627000 +step:5158 train loss:3.652592 +step:5159 train loss:3.685529 +step:5160 train loss:3.750244 +step:5161 train loss:3.680768 +step:5162 train loss:3.697590 +step:5163 train loss:3.613514 +step:5164 train loss:3.680250 +step:5165 train loss:3.691804 +step:5166 train loss:3.628091 +step:5167 train loss:3.728631 +step:5168 train loss:3.644118 +step:5169 train loss:3.675754 +step:5170 train loss:3.654952 +step:5171 train loss:3.697084 +step:5172 train loss:3.614281 +step:5173 train loss:3.678533 +step:5174 train loss:3.614048 +step:5175 train loss:3.645565 +step:5176 train loss:3.635910 +step:5177 train loss:3.635123 +step:5178 train loss:3.697071 +step:5179 train loss:3.610072 +step:5180 train loss:3.687881 +step:5181 train loss:3.630703 +step:5182 train loss:3.688144 +step:5183 train loss:3.620854 +step:5184 train loss:3.600245 +step:5185 train loss:3.627251 +step:5186 train loss:3.682244 +step:5187 train loss:3.677061 +step:5188 train loss:3.609130 +step:5189 train loss:3.652749 +step:5190 train loss:3.637374 +step:5191 train loss:3.616907 +step:5192 train loss:3.601965 +step:5193 train loss:3.684507 +step:5194 train loss:3.636977 +step:5195 train loss:3.609715 +step:5196 train loss:3.678783 +step:5197 train loss:3.724792 +step:5198 train loss:3.641300 +step:5199 train loss:3.630925 +step:5200 train loss:3.652462 +step:5201 train loss:3.643423 +step:5202 train loss:3.646009 +step:5203 train loss:3.652616 +step:5204 train loss:3.623464 +step:5205 train loss:3.664756 +step:5206 train loss:3.600367 +step:5207 train loss:3.605400 +step:5208 train loss:3.666154 +step:5209 train loss:3.683718 +step:5210 train loss:3.595640 +step:5211 train loss:3.641528 +step:5212 train loss:3.653919 +step:5213 train loss:3.627006 +step:5214 train loss:3.677307 +step:5215 train loss:3.784055 +step:5216 train loss:3.637635 +step:5217 train loss:3.614526 +step:5218 train loss:3.620921 +step:5219 train loss:3.683052 +step:5220 train loss:3.598410 +step:5221 train loss:3.600090 +step:5222 train loss:3.686138 +step:5223 train loss:3.675595 +step:5224 train loss:3.575835 +step:5225 train loss:3.724037 +step:5226 train loss:3.635218 +step:5227 train loss:3.709649 +step:5228 train loss:3.678239 +step:5229 train loss:3.619303 +step:5230 train loss:3.634663 +step:5231 train loss:3.579586 +step:5232 train loss:3.702857 +step:5233 train loss:3.663342 +step:5234 train loss:3.667417 +step:5235 train loss:3.614660 +step:5236 train loss:3.691319 +step:5237 train loss:3.747475 +step:5238 train loss:3.646766 +step:5239 train loss:3.708290 +step:5240 train loss:3.589556 +step:5241 train loss:3.650841 +step:5242 train loss:3.622713 +step:5243 train loss:3.626102 +step:5244 train loss:3.626699 +step:5245 train loss:3.670398 +step:5246 train loss:3.710131 +step:5247 train loss:3.640783 +step:5248 train loss:3.608975 +step:5249 train loss:3.665787 +step:5250 validation loss:3.565110 +step:5250 train loss:3.633065 +step:5251 train loss:3.700109 +step:5252 train loss:3.591903 +step:5253 train loss:3.739766 +step:5254 train loss:3.617548 +step:5255 train loss:3.686759 +step:5256 train loss:3.605711 +step:5257 train loss:3.655139 +step:5258 train loss:3.658819 +step:5259 train loss:3.644362 +step:5260 train loss:3.637779 +step:5261 train loss:3.628211 +step:5262 train loss:3.670291 +step:5263 train loss:3.655846 +step:5264 train loss:3.606316 +step:5265 train loss:3.685329 +step:5266 train loss:3.600693 +step:5267 train loss:3.612978 +step:5268 train loss:3.595918 +step:5269 train loss:3.597267 +step:5270 train loss:3.650281 +step:5271 train loss:3.573547 +step:5272 train loss:3.667309 +step:5273 train loss:3.575281 +step:5274 train loss:3.624484 +step:5275 train loss:3.638977 +step:5276 train loss:3.762621 +step:5277 train loss:3.665769 +step:5278 train loss:3.611083 +step:5279 train loss:3.659295 +step:5280 train loss:3.636510 +step:5281 train loss:3.628737 +step:5282 train loss:3.602524 +step:5283 train loss:3.603627 +step:5284 train loss:3.610866 +step:5285 train loss:3.677927 +step:5286 train loss:3.586104 +step:5287 train loss:3.689084 +step:5288 train loss:3.662362 +step:5289 train loss:3.631072 +step:5290 train loss:3.685942 +step:5291 train loss:3.636927 +step:5292 train loss:3.656718 +step:5293 train loss:3.626117 +step:5294 train loss:3.613865 +step:5295 train loss:3.622815 +step:5296 train loss:3.611309 +step:5297 train loss:3.631368 +step:5298 train loss:3.581064 +step:5299 train loss:3.675807 +step:5300 train loss:3.623276 +step:5301 train loss:3.692828 +step:5302 train loss:3.695423 +step:5303 train loss:3.557304 +step:5304 train loss:3.592306 +step:5305 train loss:3.566423 +step:5306 train loss:3.601520 +step:5307 train loss:3.605312 +step:5308 train loss:3.696568 +step:5309 train loss:3.647340 +step:5310 train loss:3.632097 +step:5311 train loss:3.703593 +step:5312 train loss:3.587414 +step:5313 train loss:3.675246 +step:5314 train loss:3.667961 +step:5315 train loss:3.628300 +step:5316 train loss:3.662848 +step:5317 train loss:3.677286 +step:5318 train loss:3.630843 +step:5319 train loss:3.659127 +step:5320 train loss:3.609560 +step:5321 train loss:3.730050 +step:5322 train loss:3.641889 +step:5323 train loss:3.645841 +step:5324 train loss:3.586220 +step:5325 train loss:3.668120 +step:5326 train loss:3.663405 +step:5327 train loss:3.550997 +step:5328 train loss:3.689020 +step:5329 train loss:3.656137 +step:5330 train loss:3.654725 +step:5331 train loss:3.702096 +step:5332 train loss:3.623239 +step:5333 train loss:3.689383 +step:5334 train loss:3.663615 +step:5335 train loss:3.719423 +step:5336 train loss:3.755130 +step:5337 train loss:3.589383 +step:5338 train loss:3.600141 +step:5339 train loss:3.617499 +step:5340 train loss:3.643268 +step:5341 train loss:3.660962 +step:5342 train loss:3.561590 +step:5343 train loss:3.719737 +step:5344 train loss:3.600860 +step:5345 train loss:3.601822 +step:5346 train loss:3.605169 +step:5347 train loss:3.629223 +step:5348 train loss:3.670460 +step:5349 train loss:3.612695 +step:5350 train loss:3.651678 +step:5351 train loss:3.726649 +step:5352 train loss:3.758067 +step:5353 train loss:3.675626 +step:5354 train loss:3.647371 +step:5355 train loss:3.612277 +step:5356 train loss:3.635759 +step:5357 train loss:3.613659 +step:5358 train loss:3.634547 +step:5359 train loss:3.647402 +step:5360 train loss:3.618371 +step:5361 train loss:3.622300 +step:5362 train loss:3.611109 +step:5363 train loss:3.599769 +step:5364 train loss:3.602563 +step:5365 train loss:3.633860 +step:5366 train loss:3.666490 +step:5367 train loss:3.589265 +step:5368 train loss:3.659766 +step:5369 train loss:3.676149 +step:5370 train loss:3.580703 +step:5371 train loss:3.632305 +step:5372 train loss:3.647613 +step:5373 train loss:3.695024 +step:5374 train loss:3.574948 +step:5375 train loss:3.620928 +step:5376 train loss:3.690535 +step:5377 train loss:3.623667 +step:5378 train loss:3.601414 +step:5379 train loss:3.602271 +step:5380 train loss:3.640337 +step:5381 train loss:3.679986 +step:5382 train loss:3.593057 +step:5383 train loss:3.641100 +step:5384 train loss:3.658916 +step:5385 train loss:3.663232 +step:5386 train loss:3.641723 +step:5387 train loss:3.647220 +step:5388 train loss:3.663209 +step:5389 train loss:3.589856 +step:5390 train loss:3.625378 +step:5391 train loss:3.561579 +step:5392 train loss:3.626629 +step:5393 train loss:3.610223 +step:5394 train loss:3.612358 +step:5395 train loss:3.687240 +step:5396 train loss:3.652166 +step:5397 train loss:3.670634 +step:5398 train loss:3.668865 +step:5399 train loss:3.697050 +step:5400 train loss:3.705316 +step:5401 train loss:3.664391 +step:5402 train loss:3.766095 +step:5403 train loss:3.675128 +step:5404 train loss:3.650923 +step:5405 train loss:3.716706 +step:5406 train loss:3.675536 +step:5407 train loss:3.605267 +step:5408 train loss:3.749983 +step:5409 train loss:3.592299 +step:5410 train loss:3.656933 +step:5411 train loss:3.639084 +step:5412 train loss:3.614440 +step:5413 train loss:3.667612 +step:5414 train loss:3.641450 +step:5415 train loss:3.623000 +step:5416 train loss:3.615484 +step:5417 train loss:3.684608 +step:5418 train loss:3.701925 +step:5419 train loss:3.604931 +step:5420 train loss:3.664971 +step:5421 train loss:3.633790 +step:5422 train loss:3.678277 +step:5423 train loss:3.653523 +step:5424 train loss:3.557223 +step:5425 train loss:3.627509 +step:5426 train loss:3.712147 +step:5427 train loss:3.601940 +step:5428 train loss:3.640419 +step:5429 train loss:3.575300 +step:5430 train loss:3.607707 +step:5431 train loss:3.676691 +step:5432 train loss:3.650727 +step:5433 train loss:3.655293 +step:5434 train loss:3.606066 +step:5435 train loss:3.602185 +step:5436 train loss:3.603330 +step:5437 train loss:3.642638 +step:5438 train loss:3.618972 +step:5439 train loss:3.625780 +step:5440 train loss:3.666469 +step:5441 train loss:3.693277 +step:5442 train loss:3.617012 +step:5443 train loss:3.616807 +step:5444 train loss:3.562497 +step:5445 train loss:3.651808 +step:5446 train loss:3.618509 +step:5447 train loss:3.655920 +step:5448 train loss:3.708707 +step:5449 train loss:3.599030 +step:5450 train loss:3.632369 +step:5451 train loss:3.623497 +step:5452 train loss:3.640010 +step:5453 train loss:3.694815 +step:5454 train loss:3.622074 +step:5455 train loss:3.607188 +step:5456 train loss:3.745586 +step:5457 train loss:3.629517 +step:5458 train loss:3.660840 +step:5459 train loss:3.606097 +step:5460 train loss:3.623862 +step:5461 train loss:3.627043 +step:5462 train loss:3.625254 +step:5463 train loss:3.635317 +step:5464 train loss:3.638263 +step:5465 train loss:3.581840 +step:5466 train loss:3.653807 +step:5467 train loss:3.639792 +step:5468 train loss:3.645755 +step:5469 train loss:3.739195 +step:5470 train loss:3.631167 +step:5471 train loss:3.707700 +step:5472 train loss:3.652610 +step:5473 train loss:3.558846 +step:5474 train loss:3.891051 +step:5475 train loss:3.569531 +step:5476 train loss:3.646375 +step:5477 train loss:3.645551 +step:5478 train loss:3.644493 +step:5479 train loss:3.789994 +step:5480 train loss:3.633101 +step:5481 train loss:3.701399 +step:5482 train loss:3.608128 +step:5483 train loss:3.644924 +step:5484 train loss:3.686066 +step:5485 train loss:3.600674 +step:5486 train loss:3.648449 +step:5487 train loss:3.650609 +step:5488 train loss:3.561873 +step:5489 train loss:3.665905 +step:5490 train loss:3.611529 +step:5491 train loss:3.710938 +step:5492 train loss:3.640650 +step:5493 train loss:3.570965 +step:5494 train loss:3.626029 +step:5495 train loss:3.603317 +step:5496 train loss:3.600431 +step:5497 train loss:3.718032 +step:5498 train loss:3.589097 +step:5499 train loss:3.723559 +step:5500 validation loss:3.562794 total_sharp:1.6817e-04 L1_sharp:5.1401e-05 L2_sharp:1.7261e-05 L3_sharp:2.3613e-05 L4_sharp:3.2233e-05 L5_sharp:3.2434e-05 L6_sharp:1.4862e-05 L7_sharp:1.7323e-05 L8_sharp:3.9430e-05 L9_sharp:5.3556e-05 L10_sharp:6.1926e-05 L11_sharp:5.9882e-05 L12_sharp:2.5269e-04 total_fnorm:1.0856e+01 total_l1_linf:9.5953e+04 total_spectral:1.0856e+01 L1_fnorm:2.5399e+00 L2_fnorm:2.5094e+00 L3_fnorm:2.4613e+00 L4_fnorm:2.4736e+00 L5_fnorm:2.4508e+00 L6_fnorm:2.5463e+00 L7_fnorm:2.5884e+00 L8_fnorm:2.5352e+00 L9_fnorm:2.5401e+00 L10_fnorm:2.5334e+00 L11_fnorm:2.5526e+00 L12_fnorm:2.4463e+00 L1_l1linf:2.6295e+00 L2_l1linf:2.6439e+00 L3_l1linf:2.5554e+00 L4_l1linf:2.6242e+00 L5_l1linf:2.3936e+00 L6_l1linf:2.5094e+00 L7_l1linf:2.6352e+00 L8_l1linf:2.6543e+00 L9_l1linf:2.6282e+00 L10_l1linf:2.8346e+00 L11_l1linf:2.8694e+00 L12_l1linf:2.8320e+00 L1_spectral:2.9586e-01 L2_spectral:2.7534e-01 L3_spectral:2.8098e-01 L4_spectral:3.2510e-01 L5_spectral:2.9031e-01 L6_spectral:2.5884e-01 L7_spectral:3.0306e-01 L8_spectral:3.1140e-01 L9_spectral:3.7048e-01 L10_spectral:3.7887e-01 L11_spectral:3.8964e-01 L12_spectral:4.2323e-01 ip_v_neg_g:1.3785e-02 cos_v_neg_g:1.7357e-03 v_norm:1.0856e+01 g_norm:7.3158e-01 hv_norm:2.4061e-01 cos_v_hv:7.5875e-03 hg_norm:3.2475e+00 cos_g_hg:6.3471e-01 v_par:1.6562e-03 v_perp:1.0856e+01 L1_cos_v_neg_g:5.2075e-03 L1_v_norm:2.5399e+00 L2_cos_v_neg_g:7.5252e-04 L2_v_norm:2.5094e+00 L3_cos_v_neg_g:2.1409e-03 L3_v_norm:2.4613e+00 L4_cos_v_neg_g:3.1922e-03 L4_v_norm:2.4736e+00 L5_cos_v_neg_g:4.4147e-03 L5_v_norm:2.4508e+00 L6_cos_v_neg_g:3.4017e-03 L6_v_norm:2.5463e+00 L7_cos_v_neg_g:2.9537e-03 L7_v_norm:2.5884e+00 L8_cos_v_neg_g:4.0745e-03 L8_v_norm:2.5352e+00 L9_cos_v_neg_g:4.8967e-03 L9_v_norm:2.5401e+00 L10_cos_v_neg_g:5.3564e-03 L10_v_norm:2.5334e+00 L11_cos_v_neg_g:4.7634e-03 L11_v_norm:2.5526e+00 L12_cos_v_neg_g:6.9834e-03 L12_v_norm:2.4463e+00 +step:5500 train loss:3.636940 +step:5501 train loss:3.712574 +step:5502 train loss:3.662243 +step:5503 train loss:3.625364 +step:5504 train loss:3.671450 +step:5505 train loss:3.635631 +step:5506 train loss:3.675895 +step:5507 train loss:3.664244 +step:5508 train loss:3.690229 +step:5509 train loss:3.694804 +step:5510 train loss:3.669132 +step:5511 train loss:3.662837 +step:5512 train loss:3.785411 +step:5513 train loss:3.586899 +step:5514 train loss:3.647800 +step:5515 train loss:3.672423 +step:5516 train loss:3.697574 +step:5517 train loss:3.652365 +step:5518 train loss:3.681819 +step:5519 train loss:3.716080 +step:5520 train loss:3.622678 +step:5521 train loss:3.637963 +step:5522 train loss:3.600703 +step:5523 train loss:3.649118 +step:5524 train loss:3.695815 +step:5525 train loss:3.606251 +step:5526 train loss:3.618245 +step:5527 train loss:3.637730 +step:5528 train loss:3.739028 +step:5529 train loss:3.707644 +step:5530 train loss:3.676636 +step:5531 train loss:3.608980 +step:5532 train loss:3.636313 +step:5533 train loss:3.673539 +step:5534 train loss:3.584319 +step:5535 train loss:3.636502 +step:5536 train loss:3.573431 +step:5537 train loss:3.621781 +step:5538 train loss:3.612009 +step:5539 train loss:3.560186 +step:5540 train loss:3.778901 +step:5541 train loss:3.595246 +step:5542 train loss:3.644545 +step:5543 train loss:3.631605 +step:5544 train loss:3.628021 +step:5545 train loss:3.613557 +step:5546 train loss:3.652487 +step:5547 train loss:3.584077 +step:5548 train loss:3.623664 +step:5549 train loss:3.628931 +step:5550 train loss:3.652722 +step:5551 train loss:3.657408 +step:5552 train loss:3.611922 +step:5553 train loss:3.641666 +step:5554 train loss:3.611726 +step:5555 train loss:3.618355 +step:5556 train loss:3.636244 +step:5557 train loss:3.700021 +step:5558 train loss:3.622874 +step:5559 train loss:3.626544 +step:5560 train loss:3.618656 +step:5561 train loss:3.654693 +step:5562 train loss:3.607997 +step:5563 train loss:3.592604 +step:5564 train loss:3.627198 +step:5565 train loss:3.691263 +step:5566 train loss:3.594564 +step:5567 train loss:3.710628 +step:5568 train loss:3.831595 +step:5569 train loss:3.622169 +step:5570 train loss:3.550549 +step:5571 train loss:3.643855 +step:5572 train loss:3.581568 +step:5573 train loss:3.571871 +step:5574 train loss:3.540135 +step:5575 train loss:3.635838 +step:5576 train loss:3.623750 +step:5577 train loss:3.630353 +step:5578 train loss:3.655347 +step:5579 train loss:3.611820 +step:5580 train loss:3.637588 +step:5581 train loss:3.657309 +step:5582 train loss:3.639165 +step:5583 train loss:3.647777 +step:5584 train loss:3.766856 +step:5585 train loss:3.673118 +step:5586 train loss:3.606673 +step:5587 train loss:3.643266 +step:5588 train loss:3.655998 +step:5589 train loss:3.655736 +step:5590 train loss:3.713690 +step:5591 train loss:3.578603 +step:5592 train loss:3.754743 +step:5593 train loss:3.645661 +step:5594 train loss:3.669358 +step:5595 train loss:3.656151 +step:5596 train loss:3.611009 +step:5597 train loss:3.620133 +step:5598 train loss:3.621073 +step:5599 train loss:3.628163 +step:5600 train loss:3.663809 +step:5601 train loss:3.691355 +step:5602 train loss:3.619464 +step:5603 train loss:3.656861 +step:5604 train loss:3.656471 +step:5605 train loss:3.627803 +step:5606 train loss:3.633090 +step:5607 train loss:3.661120 +step:5608 train loss:3.605486 +step:5609 train loss:3.656165 +step:5610 train loss:3.609828 +step:5611 train loss:3.647622 +step:5612 train loss:3.678533 +step:5613 train loss:3.638858 +step:5614 train loss:3.602484 +step:5615 train loss:3.702846 +step:5616 train loss:3.600574 +step:5617 train loss:3.690258 +step:5618 train loss:3.672030 +step:5619 train loss:3.628445 +step:5620 train loss:3.628042 +step:5621 train loss:3.702077 +step:5622 train loss:3.588519 +step:5623 train loss:3.622483 +step:5624 train loss:3.611354 +step:5625 train loss:3.645739 +step:5626 train loss:3.636865 +step:5627 train loss:3.610792 +step:5628 train loss:3.652194 +step:5629 train loss:3.632836 +step:5630 train loss:3.560959 +step:5631 train loss:3.602561 +step:5632 train loss:3.648666 +step:5633 train loss:3.640676 +step:5634 train loss:3.594828 +step:5635 train loss:3.635560 +step:5636 train loss:3.614454 +step:5637 train loss:3.749736 +step:5638 train loss:3.662894 +step:5639 train loss:3.641847 +step:5640 train loss:3.644186 +step:5641 train loss:3.681805 +step:5642 train loss:3.614153 +step:5643 train loss:3.633373 +step:5644 train loss:3.715050 +step:5645 train loss:3.668587 +step:5646 train loss:3.670463 +step:5647 train loss:3.659949 +step:5648 train loss:3.644485 +step:5649 train loss:3.562387 +step:5650 train loss:3.568345 +step:5651 train loss:3.642354 +step:5652 train loss:3.642010 +step:5653 train loss:3.611107 +step:5654 train loss:3.737495 +step:5655 train loss:3.598672 +step:5656 train loss:3.625449 +step:5657 train loss:3.689772 +step:5658 train loss:3.594273 +step:5659 train loss:3.630456 +step:5660 train loss:3.681229 +step:5661 train loss:3.619341 +step:5662 train loss:3.660608 +step:5663 train loss:3.550899 +step:5664 train loss:3.520538 +step:5665 train loss:3.644435 +step:5666 train loss:3.647284 +step:5667 train loss:3.679698 +step:5668 train loss:3.613915 +step:5669 train loss:3.629364 +step:5670 train loss:3.625416 +step:5671 train loss:3.616534 +step:5672 train loss:3.661438 +step:5673 train loss:3.630711 +step:5674 train loss:3.702122 +step:5675 train loss:3.614575 +step:5676 train loss:3.763076 +step:5677 train loss:3.661303 +step:5678 train loss:3.639055 +step:5679 train loss:3.631990 +step:5680 train loss:3.663459 +step:5681 train loss:3.630834 +step:5682 train loss:3.642956 +step:5683 train loss:3.602245 +step:5684 train loss:3.609437 +step:5685 train loss:3.653548 +step:5686 train loss:3.671535 +step:5687 train loss:3.614397 +step:5688 train loss:3.709293 +step:5689 train loss:3.610876 +step:5690 train loss:3.759734 +step:5691 train loss:3.592286 +step:5692 train loss:3.583355 +step:5693 train loss:3.586809 +step:5694 train loss:3.606881 +step:5695 train loss:3.627218 +step:5696 train loss:3.674337 +step:5697 train loss:3.603848 +step:5698 train loss:3.622701 +step:5699 train loss:3.632112 +step:5700 train loss:3.627342 +step:5701 train loss:3.622509 +step:5702 train loss:3.688507 +step:5703 train loss:3.587570 +step:5704 train loss:3.632135 +step:5705 train loss:3.643512 +step:5706 train loss:3.663708 +step:5707 train loss:3.580493 +step:5708 train loss:3.666324 +step:5709 train loss:3.670901 +step:5710 train loss:3.661252 +step:5711 train loss:3.682216 +step:5712 train loss:3.665077 +step:5713 train loss:3.588576 +step:5714 train loss:3.675784 +step:5715 train loss:3.629108 +step:5716 train loss:3.633171 +step:5717 train loss:3.664477 +step:5718 train loss:3.601324 +step:5719 train loss:3.675773 +step:5720 train loss:3.651445 +step:5721 train loss:3.579452 +step:5722 train loss:3.591100 +step:5723 train loss:3.674948 +step:5724 train loss:3.589561 +step:5725 train loss:3.660096 +step:5726 train loss:3.655190 +step:5727 train loss:3.614462 +step:5728 train loss:3.618726 +step:5729 train loss:3.619910 +step:5730 train loss:3.688266 +step:5731 train loss:3.561109 +step:5732 train loss:3.618069 +step:5733 train loss:3.611725 +step:5734 train loss:3.625095 +step:5735 train loss:3.619570 +step:5736 train loss:3.619692 +step:5737 train loss:3.641746 +step:5738 train loss:3.608451 +step:5739 train loss:3.617865 +step:5740 train loss:3.658244 +step:5741 train loss:3.635338 +step:5742 train loss:3.683864 +step:5743 train loss:3.648620 +step:5744 train loss:3.609986 +step:5745 train loss:3.614853 +step:5746 train loss:3.644791 +step:5747 train loss:3.628513 +step:5748 train loss:3.675276 +step:5749 train loss:3.632431 +step:5750 validation loss:3.553654 +step:5750 train loss:3.638576 +step:5751 train loss:3.651014 +step:5752 train loss:3.637858 +step:5753 train loss:3.608052 +step:5754 train loss:3.615028 +step:5755 train loss:3.632148 +step:5756 train loss:3.621998 +step:5757 train loss:3.686601 +step:5758 train loss:3.618475 +step:5759 train loss:3.581459 +step:5760 train loss:3.660880 +step:5761 train loss:3.657293 +step:5762 train loss:3.616366 +step:5763 train loss:3.642420 +step:5764 train loss:3.605745 +step:5765 train loss:3.725131 +step:5766 train loss:3.632637 +step:5767 train loss:3.667675 +step:5768 train loss:3.602706 +step:5769 train loss:3.724964 +step:5770 train loss:3.648154 +step:5771 train loss:3.675201 +step:5772 train loss:3.625738 +step:5773 train loss:3.604307 +step:5774 train loss:3.615298 +step:5775 train loss:3.684587 +step:5776 train loss:3.669409 +step:5777 train loss:3.587739 +step:5778 train loss:3.667975 +step:5779 train loss:3.634443 +step:5780 train loss:3.605522 +step:5781 train loss:3.669668 +step:5782 train loss:3.629766 +step:5783 train loss:3.591370 +step:5784 train loss:3.693385 +step:5785 train loss:3.684950 +step:5786 train loss:3.595412 +step:5787 train loss:3.642101 +step:5788 train loss:3.648714 +step:5789 train loss:3.593412 +step:5790 train loss:3.696214 +step:5791 train loss:3.621239 +step:5792 train loss:3.894308 +step:5793 train loss:3.662443 +step:5794 train loss:3.684741 +step:5795 train loss:3.676094 +step:5796 train loss:3.660824 +step:5797 train loss:3.643025 +step:5798 train loss:3.640014 +step:5799 train loss:3.610935 +step:5800 train loss:3.769447 +step:5801 train loss:3.639493 +step:5802 train loss:3.632272 +step:5803 train loss:3.640880 +step:5804 train loss:3.659906 +step:5805 train loss:3.624032 +step:5806 train loss:3.664503 +step:5807 train loss:3.587539 +step:5808 train loss:3.616100 +step:5809 train loss:3.628534 +step:5810 train loss:3.601991 +step:5811 train loss:3.615402 +step:5812 train loss:3.596519 +step:5813 train loss:3.607485 +step:5814 train loss:3.603663 +step:5815 train loss:3.603292 +step:5816 train loss:3.663711 +step:5817 train loss:3.678653 +step:5818 train loss:3.651602 +step:5819 train loss:3.703957 +step:5820 train loss:3.641776 +step:5821 train loss:3.636037 +step:5822 train loss:3.653783 +step:5823 train loss:3.655022 +step:5824 train loss:3.606049 +step:5825 train loss:3.699209 +step:5826 train loss:3.614289 +step:5827 train loss:3.577024 +step:5828 train loss:3.564556 +step:5829 train loss:3.629437 +step:5830 train loss:3.602570 +step:5831 train loss:3.573826 +step:5832 train loss:3.690520 +step:5833 train loss:3.667148 +step:5834 train loss:3.649756 +step:5835 train loss:3.601737 +step:5836 train loss:3.565527 +step:5837 train loss:3.688412 +step:5838 train loss:3.667781 +step:5839 train loss:3.643116 +step:5840 train loss:3.726038 +step:5841 train loss:3.649230 +step:5842 train loss:3.661723 +step:5843 train loss:3.607261 +step:5844 train loss:3.671683 +step:5845 train loss:3.585434 +step:5846 train loss:3.629235 +step:5847 train loss:3.658650 +step:5848 train loss:3.722640 +step:5849 train loss:3.618882 +step:5850 train loss:3.648805 +step:5851 train loss:3.617135 +step:5852 train loss:3.702696 +step:5853 train loss:3.795658 +step:5854 train loss:3.586863 +step:5855 train loss:3.644930 +step:5856 train loss:3.617236 +step:5857 train loss:3.629883 +step:5858 train loss:3.601171 +step:5859 train loss:3.607175 +step:5860 train loss:3.708895 +step:5861 train loss:3.593574 +step:5862 train loss:3.706440 +step:5863 train loss:3.645104 +step:5864 train loss:3.634066 +step:5865 train loss:3.638294 +step:5866 train loss:3.629575 +step:5867 train loss:3.710166 +step:5868 train loss:3.634228 +step:5869 train loss:3.657692 +step:5870 train loss:3.633753 +step:5871 train loss:3.617259 +step:5872 train loss:3.643752 +step:5873 train loss:3.622403 +step:5874 train loss:3.703293 +step:5875 train loss:3.635851 +step:5876 train loss:3.612942 +step:5877 train loss:3.621480 +step:5878 train loss:3.620641 +step:5879 train loss:3.593070 +step:5880 train loss:3.789720 +step:5881 train loss:3.631405 +step:5882 train loss:3.605910 +step:5883 train loss:3.605043 +step:5884 train loss:3.624196 +step:5885 train loss:3.619620 +step:5886 train loss:3.639119 +step:5887 train loss:3.639666 +step:5888 train loss:3.616825 +step:5889 train loss:3.597796 +step:5890 train loss:3.643476 +step:5891 train loss:3.587290 +step:5892 train loss:3.669972 +step:5893 train loss:3.592746 +step:5894 train loss:3.586108 +step:5895 train loss:3.589188 +step:5896 train loss:3.602440 +step:5897 train loss:3.666761 +step:5898 train loss:3.886431 +step:5899 train loss:3.617326 +step:5900 train loss:3.668085 +step:5901 train loss:3.624700 +step:5902 train loss:3.636564 +step:5903 train loss:3.623849 +step:5904 train loss:3.649525 +step:5905 train loss:3.756155 +step:5906 train loss:3.699866 +step:5907 train loss:3.642259 +step:5908 train loss:3.619302 +step:5909 train loss:3.612798 +step:5910 train loss:3.600976 +step:5911 train loss:3.616133 +step:5912 train loss:3.645885 +step:5913 train loss:3.650414 +step:5914 train loss:3.633576 +step:5915 train loss:3.771390 +step:5916 train loss:3.649899 +step:5917 train loss:3.617269 +step:5918 train loss:3.614910 +step:5919 train loss:3.642493 +step:5920 train loss:3.638507 +step:5921 train loss:3.606766 +step:5922 train loss:3.664068 +step:5923 train loss:3.655512 +step:5924 train loss:3.610712 +step:5925 train loss:3.733529 +step:5926 train loss:3.620394 +step:5927 train loss:3.595683 +step:5928 train loss:3.628229 +step:5929 train loss:3.653888 +step:5930 train loss:3.604635 +step:5931 train loss:3.583108 +step:5932 train loss:3.622817 +step:5933 train loss:3.679459 +step:5934 train loss:3.595111 +step:5935 train loss:3.622325 +step:5936 train loss:3.611409 +step:5937 train loss:3.589981 +step:5938 train loss:3.607596 +step:5939 train loss:3.586309 +step:5940 train loss:3.666828 +step:5941 train loss:3.603146 +step:5942 train loss:3.616485 +step:5943 train loss:3.622582 +step:5944 train loss:3.674889 +step:5945 train loss:3.605109 +step:5946 train loss:3.586424 +step:5947 train loss:3.598960 +step:5948 train loss:3.635676 +step:5949 train loss:3.682867 +step:5950 train loss:3.642296 +step:5951 train loss:3.646429 +step:5952 train loss:3.569134 +step:5953 train loss:3.611607 +step:5954 train loss:3.620696 +step:5955 train loss:3.626807 +step:5956 train loss:3.602978 +step:5957 train loss:3.568039 +step:5958 train loss:3.643312 +step:5959 train loss:3.602371 +step:5960 train loss:3.576707 +step:5961 train loss:3.604247 +step:5962 train loss:3.634592 +step:5963 train loss:3.666170 +step:5964 train loss:3.624497 +step:5965 train loss:3.642081 +step:5966 train loss:3.637388 +step:5967 train loss:3.603361 +step:5968 train loss:3.674904 +step:5969 train loss:3.619174 +step:5970 train loss:3.638043 +step:5971 train loss:3.587770 +step:5972 train loss:3.614051 +step:5973 train loss:3.605878 +step:5974 train loss:3.626591 +step:5975 train loss:3.596296 +step:5976 train loss:3.642891 +step:5977 train loss:3.592811 +step:5978 train loss:3.584858 +step:5979 train loss:3.618983 +step:5980 train loss:3.691570 +step:5981 train loss:3.582811 +step:5982 train loss:3.593649 +step:5983 train loss:3.662775 +step:5984 train loss:3.606696 +step:5985 train loss:3.649086 +step:5986 train loss:3.622590 +step:5987 train loss:3.608757 +step:5988 train loss:3.616774 +step:5989 train loss:3.636410 +step:5990 train loss:3.564809 +step:5991 train loss:3.630405 +step:5992 train loss:3.665157 +step:5993 train loss:3.615810 +step:5994 train loss:3.633968 +step:5995 train loss:3.530360 +step:5996 train loss:3.691627 +step:5997 train loss:3.671147 +step:5998 train loss:3.549301 +step:5999 train loss:3.577185 +step:6000 validation loss:3.544871 total_sharp:1.8539e-04 L1_sharp:6.3396e-05 L2_sharp:1.7143e-05 L3_sharp:1.9293e-05 L4_sharp:2.0234e-05 L5_sharp:1.9818e-05 L6_sharp:1.7785e-05 L7_sharp:2.6002e-05 L8_sharp:5.4525e-05 L9_sharp:7.4889e-05 L10_sharp:8.8758e-05 L11_sharp:6.9319e-05 L12_sharp:2.1186e-04 total_fnorm:1.0943e+01 total_l1_linf:9.6878e+04 total_spectral:1.0943e+01 L1_fnorm:2.5976e+00 L2_fnorm:2.5718e+00 L3_fnorm:2.5470e+00 L4_fnorm:2.5268e+00 L5_fnorm:2.4798e+00 L6_fnorm:2.5745e+00 L7_fnorm:2.6108e+00 L8_fnorm:2.5650e+00 L9_fnorm:2.5604e+00 L10_fnorm:2.5507e+00 L11_fnorm:2.5483e+00 L12_fnorm:2.4635e+00 L1_l1linf:2.5744e+00 L2_l1linf:2.7454e+00 L3_l1linf:2.7482e+00 L4_l1linf:2.6299e+00 L5_l1linf:2.4676e+00 L6_l1linf:2.5213e+00 L7_l1linf:2.7402e+00 L8_l1linf:2.7743e+00 L9_l1linf:2.8229e+00 L10_l1linf:3.1398e+00 L11_l1linf:3.0718e+00 L12_l1linf:3.2171e+00 L1_spectral:3.0177e-01 L2_spectral:3.0053e-01 L3_spectral:3.1347e-01 L4_spectral:3.3216e-01 L5_spectral:2.8323e-01 L6_spectral:2.6370e-01 L7_spectral:2.8957e-01 L8_spectral:3.4541e-01 L9_spectral:3.6207e-01 L10_spectral:4.0151e-01 L11_spectral:3.6602e-01 L12_spectral:3.9371e-01 ip_v_neg_g:1.1995e-02 cos_v_neg_g:2.8179e-03 v_norm:1.0943e+01 g_norm:3.8898e-01 hv_norm:2.0214e-01 cos_v_hv:1.0036e-02 hg_norm:2.9063e+00 cos_g_hg:5.0563e-01 v_par:3.4958e-03 v_perp:1.0943e+01 L1_cos_v_neg_g:8.6335e-03 L1_v_norm:2.5976e+00 L2_cos_v_neg_g:2.8099e-03 L2_v_norm:2.5718e+00 L3_cos_v_neg_g:2.2606e-03 L3_v_norm:2.5470e+00 L4_cos_v_neg_g:2.6277e-03 L4_v_norm:2.5268e+00 L5_cos_v_neg_g:3.0721e-03 L5_v_norm:2.4798e+00 L6_cos_v_neg_g:2.9174e-03 L6_v_norm:2.5745e+00 L7_cos_v_neg_g:4.6441e-03 L7_v_norm:2.6108e+00 L8_cos_v_neg_g:6.1775e-03 L8_v_norm:2.5650e+00 L9_cos_v_neg_g:8.1216e-03 L9_v_norm:2.5604e+00 L10_cos_v_neg_g:8.2749e-03 L10_v_norm:2.5507e+00 L11_cos_v_neg_g:9.2310e-03 L11_v_norm:2.5483e+00 L12_cos_v_neg_g:1.3335e-02 L12_v_norm:2.4635e+00 +step:6000 train loss:3.628344 +step:6001 train loss:3.590173 +step:6002 train loss:3.623929 +step:6003 train loss:3.647203 +step:6004 train loss:3.594970 +step:6005 train loss:3.661541 +step:6006 train loss:3.576864 +step:6007 train loss:3.589852 +step:6008 train loss:3.604810 +step:6009 train loss:3.645177 +step:6010 train loss:3.635933 +step:6011 train loss:3.627422 +step:6012 train loss:3.592070 +step:6013 train loss:3.651549 +step:6014 train loss:3.674015 +step:6015 train loss:3.666880 +step:6016 train loss:3.635584 +step:6017 train loss:3.646792 +step:6018 train loss:3.582368 +step:6019 train loss:3.619168 +step:6020 train loss:3.605267 +step:6021 train loss:3.533428 +step:6022 train loss:3.645330 +step:6023 train loss:3.579961 +step:6024 train loss:3.655627 +step:6025 train loss:3.620013 +step:6026 train loss:3.593399 +step:6027 train loss:3.637360 +step:6028 train loss:3.551661 +step:6029 train loss:3.664491 +step:6030 train loss:3.638338 +step:6031 train loss:3.604978 +step:6032 train loss:3.566803 +step:6033 train loss:3.622741 +step:6034 train loss:3.653352 +step:6035 train loss:3.569667 +step:6036 train loss:3.544684 +step:6037 train loss:3.657492 +step:6038 train loss:3.662898 +step:6039 train loss:3.644803 +step:6040 train loss:3.602669 +step:6041 train loss:3.586719 +step:6042 train loss:3.566855 +step:6043 train loss:3.620713 +step:6044 train loss:3.741699 +step:6045 train loss:3.584315 +step:6046 train loss:3.594796 +step:6047 train loss:3.635822 +step:6048 train loss:3.643324 +step:6049 train loss:3.615247 +step:6050 train loss:3.585866 +step:6051 train loss:3.636366 +step:6052 train loss:3.609230 +step:6053 train loss:3.727719 +step:6054 train loss:3.762264 +step:6055 train loss:3.581164 +step:6056 train loss:3.572569 +step:6057 train loss:3.606023 +step:6058 train loss:3.634782 +step:6059 train loss:3.637147 +step:6060 train loss:3.645997 +step:6061 train loss:3.659917 +step:6062 train loss:3.609292 +step:6063 train loss:3.630755 +step:6064 train loss:3.623421 +step:6065 train loss:3.620847 +step:6066 train loss:3.608285 +step:6067 train loss:3.649654 +step:6068 train loss:3.588141 +step:6069 train loss:3.546309 +step:6070 train loss:3.694635 +step:6071 train loss:3.639364 +step:6072 train loss:3.584905 +step:6073 train loss:3.620498 +step:6074 train loss:3.702897 +step:6075 train loss:3.626621 +step:6076 train loss:3.635230 +step:6077 train loss:3.635778 +step:6078 train loss:3.573165 +step:6079 train loss:3.601690 +step:6080 train loss:3.608030 +step:6081 train loss:3.646426 +step:6082 train loss:3.594379 +step:6083 train loss:3.604594 +step:6084 train loss:3.668050 +step:6085 train loss:3.666853 +step:6086 train loss:3.566795 +step:6087 train loss:3.612374 +step:6088 train loss:3.596797 +step:6089 train loss:3.655761 +step:6090 train loss:3.658328 +step:6091 train loss:3.607397 +step:6092 train loss:3.570161 +step:6093 train loss:3.630827 +step:6094 train loss:3.546441 +step:6095 train loss:3.709633 +step:6096 train loss:3.580129 +step:6097 train loss:3.659046 +step:6098 train loss:3.631781 +step:6099 train loss:3.690680 +step:6100 train loss:3.682889 +step:6101 train loss:3.617219 +step:6102 train loss:3.729234 +step:6103 train loss:3.619898 +step:6104 train loss:3.731325 +step:6105 train loss:3.666285 +step:6106 train loss:3.602941 +step:6107 train loss:3.667807 +step:6108 train loss:3.629735 +step:6109 train loss:3.700641 +step:6110 train loss:3.632328 +step:6111 train loss:3.664524 +step:6112 train loss:3.607143 +step:6113 train loss:3.632879 +step:6114 train loss:3.602176 +step:6115 train loss:3.669688 +step:6116 train loss:3.609380 +step:6117 train loss:3.660111 +step:6118 train loss:3.642560 +step:6119 train loss:3.652230 +step:6120 train loss:3.795595 +step:6121 train loss:3.630435 +step:6122 train loss:3.641880 +step:6123 train loss:3.618893 +step:6124 train loss:3.597768 +step:6125 train loss:3.589141 +step:6126 train loss:3.608167 +step:6127 train loss:3.597262 +step:6128 train loss:3.577639 +step:6129 train loss:3.800768 +step:6130 train loss:3.587116 +step:6131 train loss:3.562648 +step:6132 train loss:3.632797 +step:6133 train loss:3.600617 +step:6134 train loss:3.631646 +step:6135 train loss:3.715562 +step:6136 train loss:3.732835 +step:6137 train loss:3.592518 +step:6138 train loss:3.651131 +step:6139 train loss:3.627917 +step:6140 train loss:3.627941 +step:6141 train loss:3.584929 +step:6142 train loss:3.649721 +step:6143 train loss:3.615440 +step:6144 train loss:3.633749 +step:6145 train loss:3.885989 +step:6146 train loss:3.718763 +step:6147 train loss:3.803507 +step:6148 train loss:3.572957 +step:6149 train loss:3.694545 +step:6150 train loss:3.651912 +step:6151 train loss:3.603049 +step:6152 train loss:3.599586 +step:6153 train loss:3.668709 +step:6154 train loss:3.752634 +step:6155 train loss:3.619041 +step:6156 train loss:3.717051 +step:6157 train loss:3.643737 +step:6158 train loss:3.639544 +step:6159 train loss:3.600233 +step:6160 train loss:3.768343 +step:6161 train loss:3.615978 +step:6162 train loss:3.637565 +step:6163 train loss:3.667816 +step:6164 train loss:3.580570 +step:6165 train loss:3.650592 +step:6166 train loss:3.640790 +step:6167 train loss:3.661941 +step:6168 train loss:3.637464 +step:6169 train loss:3.629380 +step:6170 train loss:3.632339 +step:6171 train loss:3.602545 +step:6172 train loss:3.586766 +step:6173 train loss:3.641588 +step:6174 train loss:3.567746 +step:6175 train loss:3.579844 +step:6176 train loss:3.562875 +step:6177 train loss:3.657290 +step:6178 train loss:3.602039 +step:6179 train loss:3.610291 +step:6180 train loss:3.617074 +step:6181 train loss:3.650646 +step:6182 train loss:3.533377 +step:6183 train loss:3.542482 +step:6184 train loss:3.660494 +step:6185 train loss:3.615823 +step:6186 train loss:3.576289 +step:6187 train loss:3.618789 +step:6188 train loss:3.585965 +step:6189 train loss:3.623477 +step:6190 train loss:3.585366 +step:6191 train loss:3.616920 +step:6192 train loss:3.583941 +step:6193 train loss:3.650423 +step:6194 train loss:3.642417 +step:6195 train loss:3.623726 +step:6196 train loss:3.637882 +step:6197 train loss:3.662295 +step:6198 train loss:3.577507 +step:6199 train loss:3.598874 +step:6200 train loss:3.640807 +step:6201 train loss:3.683259 +step:6202 train loss:3.685824 +step:6203 train loss:3.682113 +step:6204 train loss:3.668647 +step:6205 train loss:3.605772 +step:6206 train loss:3.596053 +step:6207 train loss:3.657694 +step:6208 train loss:3.678307 +step:6209 train loss:3.645678 +step:6210 train loss:3.680683 +step:6211 train loss:3.596036 +step:6212 train loss:3.590579 +step:6213 train loss:3.603863 +step:6214 train loss:3.581119 +step:6215 train loss:3.753003 +step:6216 train loss:3.624950 +step:6217 train loss:3.683927 +step:6218 train loss:3.654136 +step:6219 train loss:3.670638 +step:6220 train loss:3.627621 +step:6221 train loss:3.593423 +step:6222 train loss:3.829141 +step:6223 train loss:3.609896 +step:6224 train loss:3.653549 +step:6225 train loss:3.628584 +step:6226 train loss:3.631562 +step:6227 train loss:3.630978 +step:6228 train loss:3.629029 +step:6229 train loss:3.661258 +step:6230 train loss:3.620160 +step:6231 train loss:3.730107 +step:6232 train loss:3.569702 +step:6233 train loss:3.607896 +step:6234 train loss:3.616551 +step:6235 train loss:3.640534 +step:6236 train loss:3.576900 +step:6237 train loss:3.602325 +step:6238 train loss:3.624043 +step:6239 train loss:3.611023 +step:6240 train loss:3.629622 +step:6241 train loss:3.616482 +step:6242 train loss:3.610516 +step:6243 train loss:3.648765 +step:6244 train loss:3.806711 +step:6245 train loss:3.600736 +step:6246 train loss:3.585423 +step:6247 train loss:3.582409 +step:6248 train loss:3.583090 +step:6249 train loss:3.524847 +step:6250 validation loss:3.532305 +step:6250 train loss:3.562099 +step:6251 train loss:3.577644 +step:6252 train loss:3.623499 +step:6253 train loss:3.633028 +step:6254 train loss:3.623132 +step:6255 train loss:3.589977 +step:6256 train loss:3.639532 +step:6257 train loss:3.639006 +step:6258 train loss:3.617739 +step:6259 train loss:3.624796 +step:6260 train loss:3.650767 +step:6261 train loss:3.674400 +step:6262 train loss:3.572833 +step:6263 train loss:3.602757 +step:6264 train loss:3.612463 +step:6265 train loss:3.597996 +step:6266 train loss:3.802246 +step:6267 train loss:3.607033 +step:6268 train loss:3.692887 +step:6269 train loss:3.568038 +step:6270 train loss:3.580584 +step:6271 train loss:3.629146 +step:6272 train loss:3.622034 +step:6273 train loss:3.827325 +step:6274 train loss:3.597425 +step:6275 train loss:3.635177 +step:6276 train loss:3.606153 +step:6277 train loss:3.589692 +step:6278 train loss:3.571425 +step:6279 train loss:3.626441 +step:6280 train loss:3.632922 +step:6281 train loss:3.564068 +step:6282 train loss:3.580131 +step:6283 train loss:3.664109 +step:6284 train loss:3.636468 +step:6285 train loss:3.637569 +step:6286 train loss:3.583019 +step:6287 train loss:3.610854 +step:6288 train loss:3.705913 +step:6289 train loss:3.570085 +step:6290 train loss:3.564578 +step:6291 train loss:3.602829 +step:6292 train loss:3.623106 +step:6293 train loss:3.605859 +step:6294 train loss:3.595527 +step:6295 train loss:3.616154 +step:6296 train loss:3.581090 +step:6297 train loss:3.705555 +step:6298 train loss:3.650304 +step:6299 train loss:3.545942 +step:6300 train loss:3.629597 +step:6301 train loss:3.658634 +step:6302 train loss:3.640245 +step:6303 train loss:3.603808 +step:6304 train loss:3.622326 +step:6305 train loss:3.591321 +step:6306 train loss:3.605280 +step:6307 train loss:3.614189 +step:6308 train loss:3.589481 +step:6309 train loss:3.587029 +step:6310 train loss:3.637450 +step:6311 train loss:3.594217 +step:6312 train loss:3.632347 +step:6313 train loss:3.564356 +step:6314 train loss:3.590913 +step:6315 train loss:3.644327 +step:6316 train loss:3.567303 +step:6317 train loss:3.562459 +step:6318 train loss:3.673675 +step:6319 train loss:3.606578 +step:6320 train loss:3.620242 +step:6321 train loss:3.603276 +step:6322 train loss:3.607089 +step:6323 train loss:3.541687 +step:6324 train loss:3.547042 +step:6325 train loss:3.645743 +step:6326 train loss:3.565021 +step:6327 train loss:3.639458 +step:6328 train loss:3.617176 +step:6329 train loss:3.536304 +step:6330 train loss:3.566990 +step:6331 train loss:3.583052 +step:6332 train loss:3.718410 +step:6333 train loss:3.593285 +step:6334 train loss:3.571398 +step:6335 train loss:3.541256 +step:6336 train loss:3.573592 +step:6337 train loss:3.596723 +step:6338 train loss:3.554102 +step:6339 train loss:3.597827 +step:6340 train loss:3.578707 +step:6341 train loss:3.592362 +step:6342 train loss:3.587292 +step:6343 train loss:3.686450 +step:6344 train loss:3.537823 +step:6345 train loss:3.553929 +step:6346 train loss:3.635511 +step:6347 train loss:3.507820 +step:6348 train loss:3.602008 +step:6349 train loss:3.582299 +step:6350 train loss:3.557374 +step:6351 train loss:3.553699 +step:6352 train loss:3.567795 +step:6353 train loss:3.589465 +step:6354 train loss:3.603486 +step:6355 train loss:3.606692 +step:6356 train loss:3.622552 +step:6357 train loss:3.476199 +step:6358 train loss:3.568821 +step:6359 train loss:3.624780 +step:6360 train loss:3.537625 +step:6361 train loss:3.540305 +step:6362 train loss:3.576461 +step:6363 train loss:3.561100 +step:6364 train loss:3.542574 +step:6365 train loss:3.618168 +step:6366 train loss:3.628754 +step:6367 train loss:3.555021 +step:6368 train loss:3.600167 +step:6369 train loss:3.565595 +step:6370 train loss:3.616211 +step:6371 train loss:3.534268 +step:6372 train loss:3.565027 +step:6373 train loss:3.587909 +step:6374 train loss:3.620268 +step:6375 train loss:3.578228 +step:6376 train loss:3.605772 +step:6377 train loss:3.600467 +step:6378 train loss:3.551364 +step:6379 train loss:3.590198 +step:6380 train loss:3.632041 +step:6381 train loss:3.598876 +step:6382 train loss:3.551505 +step:6383 train loss:3.617718 +step:6384 train loss:3.593763 +step:6385 train loss:3.572461 +step:6386 train loss:3.607531 +step:6387 train loss:3.583195 +step:6388 train loss:3.627141 +step:6389 train loss:3.633552 +step:6390 train loss:3.582968 +step:6391 train loss:3.568820 +step:6392 train loss:3.554834 +step:6393 train loss:3.609703 +step:6394 train loss:3.597144 +step:6395 train loss:3.781805 +step:6396 train loss:3.601371 +step:6397 train loss:3.545084 +step:6398 train loss:3.613041 +step:6399 train loss:3.557607 +step:6400 train loss:3.632899 +step:6401 train loss:3.662522 +step:6402 train loss:3.601201 +step:6403 train loss:3.587668 +step:6404 train loss:3.569322 +step:6405 train loss:3.593539 +step:6406 train loss:3.599888 +step:6407 train loss:3.658166 +step:6408 train loss:3.551152 +step:6409 train loss:3.536229 +step:6410 train loss:3.667859 +step:6411 train loss:3.598582 +step:6412 train loss:3.599936 +step:6413 train loss:3.603623 +step:6414 train loss:3.551892 +step:6415 train loss:3.607065 +step:6416 train loss:3.584764 +step:6417 train loss:3.553340 +step:6418 train loss:3.545468 +step:6419 train loss:3.630264 +step:6420 train loss:3.556635 +step:6421 train loss:3.584850 +step:6422 train loss:3.570601 +step:6423 train loss:3.582637 +step:6424 train loss:3.603525 +step:6425 train loss:3.599651 +step:6426 train loss:3.640557 +step:6427 train loss:3.602477 +step:6428 train loss:3.640123 +step:6429 train loss:3.604925 +step:6430 train loss:3.581206 +step:6431 train loss:3.554882 +step:6432 train loss:3.588515 +step:6433 train loss:3.601365 +step:6434 train loss:3.484329 +step:6435 train loss:3.669771 +step:6436 train loss:3.600024 +step:6437 train loss:3.564835 +step:6438 train loss:3.594974 +step:6439 train loss:3.572752 +step:6440 train loss:3.584006 +step:6441 train loss:3.576243 +step:6442 train loss:3.518021 +step:6443 train loss:3.570547 +step:6444 train loss:3.713140 +step:6445 train loss:3.614840 +step:6446 train loss:3.618570 +step:6447 train loss:3.598781 +step:6448 train loss:3.546575 +step:6449 train loss:3.571245 +step:6450 train loss:3.553920 +step:6451 train loss:3.541014 +step:6452 train loss:3.545822 +step:6453 train loss:3.589044 +step:6454 train loss:3.609807 +step:6455 train loss:3.601666 +step:6456 train loss:3.616962 +step:6457 train loss:3.596262 +step:6458 train loss:3.569934 +step:6459 train loss:3.551165 +step:6460 train loss:3.558548 +step:6461 train loss:3.558726 +step:6462 train loss:3.553970 +step:6463 train loss:3.649452 +step:6464 train loss:3.555886 +step:6465 train loss:3.600455 +step:6466 train loss:3.615815 +step:6467 train loss:3.540394 +step:6468 train loss:3.617430 +step:6469 train loss:3.526169 +step:6470 train loss:3.648592 +step:6471 train loss:3.557137 +step:6472 train loss:3.711504 +step:6473 train loss:3.597301 +step:6474 train loss:3.629169 +step:6475 train loss:3.574380 +step:6476 train loss:3.643897 +step:6477 train loss:3.574840 +step:6478 train loss:3.706513 +step:6479 train loss:3.623726 +step:6480 train loss:3.557874 +step:6481 train loss:3.612548 +step:6482 train loss:3.556344 +step:6483 train loss:3.615210 +step:6484 train loss:3.570829 +step:6485 train loss:3.633374 +step:6486 train loss:3.565936 +step:6487 train loss:3.566036 +step:6488 train loss:3.561134 +step:6489 train loss:3.563319 +step:6490 train loss:3.590063 +step:6491 train loss:3.558339 +step:6492 train loss:3.664276 +step:6493 train loss:3.565742 +step:6494 train loss:3.570684 +step:6495 train loss:3.568833 +step:6496 train loss:3.604147 +step:6497 train loss:3.620188 +step:6498 train loss:3.728614 +step:6499 train loss:3.700610 +step:6500 validation loss:3.527849 total_sharp:3.5095e-04 L1_sharp:7.9196e-05 L2_sharp:9.3531e-06 L3_sharp:1.3721e-05 L4_sharp:2.5461e-05 L5_sharp:4.1160e-05 L6_sharp:2.6558e-05 L7_sharp:4.1558e-05 L8_sharp:7.6924e-05 L9_sharp:1.2126e-04 L10_sharp:1.7485e-04 L11_sharp:1.0390e-04 L12_sharp:4.7817e-04 total_fnorm:1.1105e+01 total_l1_linf:9.8446e+04 total_spectral:1.1105e+01 L1_fnorm:2.6253e+00 L2_fnorm:2.5822e+00 L3_fnorm:2.5273e+00 L4_fnorm:2.5509e+00 L5_fnorm:2.5602e+00 L6_fnorm:2.6546e+00 L7_fnorm:2.6741e+00 L8_fnorm:2.6167e+00 L9_fnorm:2.5939e+00 L10_fnorm:2.5939e+00 L11_fnorm:2.6255e+00 L12_fnorm:2.5473e+00 L1_l1linf:2.7581e+00 L2_l1linf:2.7896e+00 L3_l1linf:2.7808e+00 L4_l1linf:2.7163e+00 L5_l1linf:2.6196e+00 L6_l1linf:2.6835e+00 L7_l1linf:2.7360e+00 L8_l1linf:2.8725e+00 L9_l1linf:3.0062e+00 L10_l1linf:2.9967e+00 L11_l1linf:3.1939e+00 L12_l1linf:3.1328e+00 L1_spectral:3.2128e-01 L2_spectral:3.0641e-01 L3_spectral:3.0927e-01 L4_spectral:3.2583e-01 L5_spectral:3.1840e-01 L6_spectral:3.0048e-01 L7_spectral:3.3242e-01 L8_spectral:3.9329e-01 L9_spectral:4.3766e-01 L10_spectral:4.6537e-01 L11_spectral:4.1948e-01 L12_spectral:4.9012e-01 ip_v_neg_g:1.9816e-02 cos_v_neg_g:4.1756e-03 v_norm:1.1105e+01 g_norm:4.2734e-01 hv_norm:4.0841e-01 cos_v_hv:9.5424e-03 hg_norm:3.2340e+00 cos_g_hg:5.4630e-01 v_par:4.1243e-03 v_perp:1.1105e+01 L1_cos_v_neg_g:1.0311e-02 L1_v_norm:2.6253e+00 L2_cos_v_neg_g:3.6579e-03 L2_v_norm:2.5822e+00 L3_cos_v_neg_g:5.1525e-03 L3_v_norm:2.5273e+00 L4_cos_v_neg_g:5.4310e-03 L4_v_norm:2.5509e+00 L5_cos_v_neg_g:5.5289e-03 L5_v_norm:2.5602e+00 L6_cos_v_neg_g:5.5288e-03 L6_v_norm:2.6546e+00 L7_cos_v_neg_g:6.4584e-03 L7_v_norm:2.6741e+00 L8_cos_v_neg_g:8.2294e-03 L8_v_norm:2.6167e+00 L9_cos_v_neg_g:1.1081e-02 L9_v_norm:2.5939e+00 L10_cos_v_neg_g:1.4972e-02 L10_v_norm:2.5939e+00 L11_cos_v_neg_g:1.6358e-02 L11_v_norm:2.6255e+00 L12_cos_v_neg_g:2.2378e-02 L12_v_norm:2.5473e+00 +step:6500 train loss:3.548316 +step:6501 train loss:3.562557 +step:6502 train loss:3.583225 +step:6503 train loss:3.637062 +step:6504 train loss:3.586958 +step:6505 train loss:3.594786 +step:6506 train loss:3.554322 +step:6507 train loss:3.622698 +step:6508 train loss:3.586388 +step:6509 train loss:3.571887 +step:6510 train loss:3.580247 +step:6511 train loss:3.596118 +step:6512 train loss:3.536611 +step:6513 train loss:3.605519 +step:6514 train loss:3.477829 +step:6515 train loss:3.570102 +step:6516 train loss:3.622392 +step:6517 train loss:3.532602 +step:6518 train loss:3.571980 +step:6519 train loss:3.566678 +step:6520 train loss:3.653812 +step:6521 train loss:3.626938 +step:6522 train loss:3.641429 +step:6523 train loss:3.534216 +step:6524 train loss:3.621704 +step:6525 train loss:3.602724 +step:6526 train loss:3.543880 +step:6527 train loss:3.595682 +step:6528 train loss:3.616175 +step:6529 train loss:3.642026 +step:6530 train loss:3.549405 +step:6531 train loss:3.628358 +step:6532 train loss:3.556730 +step:6533 train loss:3.595016 +step:6534 train loss:3.601801 +step:6535 train loss:3.579448 +step:6536 train loss:3.710169 +step:6537 train loss:3.521439 +step:6538 train loss:3.627719 +step:6539 train loss:3.555192 +step:6540 train loss:3.663655 +step:6541 train loss:3.642195 +step:6542 train loss:3.601927 +step:6543 train loss:3.555735 +step:6544 train loss:3.537558 +step:6545 train loss:3.527360 +step:6546 train loss:3.591118 +step:6547 train loss:3.643175 +step:6548 train loss:3.588557 +step:6549 train loss:3.600303 +step:6550 train loss:3.714787 +step:6551 train loss:3.589793 +step:6552 train loss:3.584055 +step:6553 train loss:3.623721 +step:6554 train loss:3.515607 +step:6555 train loss:3.600946 +step:6556 train loss:3.471062 +step:6557 train loss:3.819255 +step:6558 train loss:3.653728 +step:6559 train loss:3.567359 +step:6560 train loss:3.603086 +step:6561 train loss:3.579050 +step:6562 train loss:3.594424 +step:6563 train loss:3.489590 +step:6564 train loss:3.590128 +step:6565 train loss:3.500506 +step:6566 train loss:3.615062 +step:6567 train loss:3.582333 +step:6568 train loss:3.628901 +step:6569 train loss:3.572396 +step:6570 train loss:3.612219 +step:6571 train loss:3.539313 +step:6572 train loss:3.619302 +step:6573 train loss:3.627883 +step:6574 train loss:3.612403 +step:6575 train loss:3.557009 +step:6576 train loss:3.548594 +step:6577 train loss:3.618946 +step:6578 train loss:3.489098 +step:6579 train loss:3.591329 +step:6580 train loss:3.546692 +step:6581 train loss:3.555691 +step:6582 train loss:3.536674 +step:6583 train loss:3.634233 +step:6584 train loss:3.565060 +step:6585 train loss:3.604324 +step:6586 train loss:3.608634 +step:6587 train loss:3.618311 +step:6588 train loss:3.581866 +step:6589 train loss:3.607969 +step:6590 train loss:3.553155 +step:6591 train loss:3.606388 +step:6592 train loss:3.547087 +step:6593 train loss:3.560720 +step:6594 train loss:3.582885 +step:6595 train loss:3.566366 +step:6596 train loss:3.566064 +step:6597 train loss:3.588189 +step:6598 train loss:3.627688 +step:6599 train loss:3.523662 +step:6600 train loss:3.572920 +step:6601 train loss:3.636778 +step:6602 train loss:3.559766 +step:6603 train loss:3.585687 +step:6604 train loss:3.597877 +step:6605 train loss:3.576631 +step:6606 train loss:3.637753 +step:6607 train loss:3.556695 +step:6608 train loss:3.570493 +step:6609 train loss:3.544456 +step:6610 train loss:3.648059 +step:6611 train loss:3.573780 +step:6612 train loss:3.616666 +step:6613 train loss:3.535790 +step:6614 train loss:3.565348 +step:6615 train loss:3.562425 +step:6616 train loss:3.543096 +step:6617 train loss:3.582488 +step:6618 train loss:3.569412 +step:6619 train loss:3.540333 +step:6620 train loss:3.647815 +step:6621 train loss:3.523622 +step:6622 train loss:3.598449 +step:6623 train loss:3.528346 +step:6624 train loss:3.602581 +step:6625 train loss:3.644270 +step:6626 train loss:3.608524 +step:6627 train loss:3.559106 +step:6628 train loss:3.619011 +step:6629 train loss:3.520308 +step:6630 train loss:3.556951 +step:6631 train loss:3.592051 +step:6632 train loss:3.629445 +step:6633 train loss:3.582803 +step:6634 train loss:3.644510 +step:6635 train loss:3.544074 +step:6636 train loss:3.586058 +step:6637 train loss:3.548890 +step:6638 train loss:3.551600 +step:6639 train loss:3.565143 +step:6640 train loss:3.550496 +step:6641 train loss:3.563438 +step:6642 train loss:3.564894 +step:6643 train loss:3.643878 +step:6644 train loss:3.652018 +step:6645 train loss:3.524472 +step:6646 train loss:3.615783 +step:6647 train loss:3.571684 +step:6648 train loss:3.677558 +step:6649 train loss:3.602768 +step:6650 train loss:3.556311 +step:6651 train loss:3.597079 +step:6652 train loss:3.613706 +step:6653 train loss:3.555497 +step:6654 train loss:3.550796 +step:6655 train loss:3.594504 +step:6656 train loss:3.562438 +step:6657 train loss:3.588425 +step:6658 train loss:3.572880 +step:6659 train loss:3.721767 +step:6660 train loss:3.621500 +step:6661 train loss:3.545830 +step:6662 train loss:3.577605 +step:6663 train loss:3.510561 +step:6664 train loss:3.593100 +step:6665 train loss:3.599638 +step:6666 train loss:3.618449 +step:6667 train loss:3.530296 +step:6668 train loss:3.661605 +step:6669 train loss:3.542354 +step:6670 train loss:3.553093 +step:6671 train loss:3.633729 +step:6672 train loss:3.588220 +step:6673 train loss:3.593558 +step:6674 train loss:3.570239 +step:6675 train loss:3.584650 +step:6676 train loss:3.599006 +step:6677 train loss:3.554688 +step:6678 train loss:3.625044 +step:6679 train loss:3.659021 +step:6680 train loss:3.660743 +step:6681 train loss:3.614071 +step:6682 train loss:3.554134 +step:6683 train loss:3.580693 +step:6684 train loss:3.590705 +step:6685 train loss:3.603449 +step:6686 train loss:3.537188 +step:6687 train loss:3.555115 +step:6688 train loss:3.602755 +step:6689 train loss:3.607382 +step:6690 train loss:3.582123 +step:6691 train loss:3.618990 +step:6692 train loss:3.625948 +step:6693 train loss:3.657015 +step:6694 train loss:3.610250 +step:6695 train loss:3.584393 +step:6696 train loss:3.520833 +step:6697 train loss:3.735788 +step:6698 train loss:3.581605 +step:6699 train loss:3.578537 +step:6700 train loss:3.589183 +step:6701 train loss:3.647688 +step:6702 train loss:3.538552 +step:6703 train loss:3.584836 +step:6704 train loss:3.568809 +step:6705 train loss:3.581307 +step:6706 train loss:3.558435 +step:6707 train loss:3.633850 +step:6708 train loss:3.586393 +step:6709 train loss:3.616508 +step:6710 train loss:3.603199 +step:6711 train loss:3.557267 +step:6712 train loss:3.541511 +step:6713 train loss:3.569957 +step:6714 train loss:3.613785 +step:6715 train loss:3.555912 +step:6716 train loss:3.631591 +step:6717 train loss:3.578080 +step:6718 train loss:3.598467 +step:6719 train loss:3.635050 +step:6720 train loss:3.563336 +step:6721 train loss:3.580559 +step:6722 train loss:3.559150 +step:6723 train loss:3.685118 +step:6724 train loss:3.542406 +step:6725 train loss:3.603231 +step:6726 train loss:3.558712 +step:6727 train loss:3.622723 +step:6728 train loss:3.715362 +step:6729 train loss:3.580517 +step:6730 train loss:3.573067 +step:6731 train loss:3.618906 +step:6732 train loss:3.490645 +step:6733 train loss:3.628907 +step:6734 train loss:3.552501 +step:6735 train loss:3.584055 +step:6736 train loss:3.584417 +step:6737 train loss:3.578731 +step:6738 train loss:3.615808 +step:6739 train loss:3.568351 +step:6740 train loss:3.519805 +step:6741 train loss:3.630569 +step:6742 train loss:3.589492 +step:6743 train loss:3.593764 +step:6744 train loss:3.485100 +step:6745 train loss:3.642668 +step:6746 train loss:3.569870 +step:6747 train loss:3.561987 +step:6748 train loss:3.638446 +step:6749 train loss:3.615897 +step:6750 validation loss:3.517457 +step:6750 train loss:3.541605 +step:6751 train loss:3.574852 +step:6752 train loss:3.576531 +step:6753 train loss:3.609615 +step:6754 train loss:3.590128 +step:6755 train loss:3.600521 +step:6756 train loss:3.545703 +step:6757 train loss:3.514972 +step:6758 train loss:3.692167 +step:6759 train loss:3.580964 +step:6760 train loss:3.637859 +step:6761 train loss:3.569133 +step:6762 train loss:3.592400 +step:6763 train loss:3.494273 +step:6764 train loss:3.572077 +step:6765 train loss:3.572876 +step:6766 train loss:3.570229 +step:6767 train loss:3.522335 +step:6768 train loss:3.526584 +step:6769 train loss:3.492589 +step:6770 train loss:3.575192 +step:6771 train loss:3.579765 +step:6772 train loss:3.589435 +step:6773 train loss:3.567592 +step:6774 train loss:3.581269 +step:6775 train loss:3.624139 +step:6776 train loss:3.577209 +step:6777 train loss:3.657847 +step:6778 train loss:3.539855 +step:6779 train loss:3.596801 +step:6780 train loss:3.526581 +step:6781 train loss:3.591789 +step:6782 train loss:3.504723 +step:6783 train loss:3.538255 +step:6784 train loss:3.564353 +step:6785 train loss:3.551407 +step:6786 train loss:3.567734 +step:6787 train loss:3.641110 +step:6788 train loss:3.581083 +step:6789 train loss:3.589998 +step:6790 train loss:3.586983 +step:6791 train loss:3.599025 +step:6792 train loss:3.597837 +step:6793 train loss:3.599184 +step:6794 train loss:3.566800 +step:6795 train loss:3.567800 +step:6796 train loss:3.570618 +step:6797 train loss:3.669906 +step:6798 train loss:3.571402 +step:6799 train loss:3.563631 +step:6800 train loss:3.529893 +step:6801 train loss:3.662686 +step:6802 train loss:3.610380 +step:6803 train loss:3.601487 +step:6804 train loss:3.628560 +step:6805 train loss:3.587887 +step:6806 train loss:3.522640 +step:6807 train loss:3.583126 +step:6808 train loss:3.567107 +step:6809 train loss:3.593634 +step:6810 train loss:3.717949 +step:6811 train loss:3.620550 +step:6812 train loss:3.592252 +step:6813 train loss:3.604971 +step:6814 train loss:3.612290 +step:6815 train loss:3.658036 +step:6816 train loss:3.574787 +step:6817 train loss:3.597504 +step:6818 train loss:3.578923 +step:6819 train loss:3.558964 +step:6820 train loss:3.587395 +step:6821 train loss:3.552696 +step:6822 train loss:3.657503 +step:6823 train loss:3.638809 +step:6824 train loss:3.615816 +step:6825 train loss:3.559166 +step:6826 train loss:3.607585 +step:6827 train loss:3.592134 +step:6828 train loss:3.607856 +step:6829 train loss:3.594456 +step:6830 train loss:3.562417 +step:6831 train loss:3.521708 +step:6832 train loss:3.509367 +step:6833 train loss:3.526433 +step:6834 train loss:3.610235 +step:6835 train loss:3.584182 +step:6836 train loss:3.502242 +step:6837 train loss:3.569739 +step:6838 train loss:3.626876 +step:6839 train loss:3.714607 +step:6840 train loss:3.583393 +step:6841 train loss:3.542408 +step:6842 train loss:3.594978 +step:6843 train loss:3.695781 +step:6844 train loss:3.576349 +step:6845 train loss:3.629858 +step:6846 train loss:3.695300 +step:6847 train loss:3.621522 +step:6848 train loss:3.613327 +step:6849 train loss:3.640679 +step:6850 train loss:3.610910 +step:6851 train loss:3.537529 +step:6852 train loss:3.531765 +step:6853 train loss:3.519926 +step:6854 train loss:3.599518 +step:6855 train loss:3.569865 +step:6856 train loss:3.555040 +step:6857 train loss:3.609043 +step:6858 train loss:3.635966 +step:6859 train loss:3.544273 +step:6860 train loss:3.658851 +step:6861 train loss:3.677436 +step:6862 train loss:3.589752 +step:6863 train loss:3.586386 +step:6864 train loss:3.531343 +step:6865 train loss:3.600245 +step:6866 train loss:3.528408 +step:6867 train loss:3.709544 +step:6868 train loss:3.582058 +step:6869 train loss:3.616297 +step:6870 train loss:3.648553 +step:6871 train loss:3.563887 +step:6872 train loss:3.566659 +step:6873 train loss:3.586576 +step:6874 train loss:3.538746 +step:6875 train loss:3.548636 +step:6876 train loss:3.578014 +step:6877 train loss:3.616472 +step:6878 train loss:3.530936 +step:6879 train loss:3.578399 +step:6880 train loss:3.586621 +step:6881 train loss:3.549714 +step:6882 train loss:3.615407 +step:6883 train loss:3.614537 +step:6884 train loss:3.834401 +step:6885 train loss:3.604379 +step:6886 train loss:3.586356 +step:6887 train loss:3.520963 +step:6888 train loss:3.624684 +step:6889 train loss:3.506854 +step:6890 train loss:3.614944 +step:6891 train loss:3.622301 +step:6892 train loss:3.724253 +step:6893 train loss:3.552039 +step:6894 train loss:3.615350 +step:6895 train loss:3.615036 +step:6896 train loss:3.588886 +step:6897 train loss:3.544382 +step:6898 train loss:3.543164 +step:6899 train loss:3.628866 +step:6900 train loss:3.602956 +step:6901 train loss:3.551114 +step:6902 train loss:3.489039 +step:6903 train loss:3.532526 +step:6904 train loss:3.643097 +step:6905 train loss:3.676219 +step:6906 train loss:3.599183 +step:6907 train loss:3.612241 +step:6908 train loss:3.648260 +step:6909 train loss:3.643465 +step:6910 train loss:3.519749 +step:6911 train loss:3.647473 +step:6912 train loss:3.539700 +step:6913 train loss:3.576324 +step:6914 train loss:3.535670 +step:6915 train loss:3.561886 +step:6916 train loss:3.536421 +step:6917 train loss:3.662292 +step:6918 train loss:3.607312 +step:6919 train loss:3.601723 +step:6920 train loss:3.586601 +step:6921 train loss:3.653133 +step:6922 train loss:3.645595 +step:6923 train loss:3.509027 +step:6924 train loss:3.589525 +step:6925 train loss:3.565296 +step:6926 train loss:3.601444 +step:6927 train loss:3.652677 +step:6928 train loss:3.540219 +step:6929 train loss:3.558083 +step:6930 train loss:3.586335 +step:6931 train loss:3.587021 +step:6932 train loss:3.816597 +step:6933 train loss:3.652032 +step:6934 train loss:3.591027 +step:6935 train loss:3.575688 +step:6936 train loss:3.615052 +step:6937 train loss:3.559354 +step:6938 train loss:3.623691 +step:6939 train loss:3.556664 +step:6940 train loss:3.610735 +step:6941 train loss:3.527412 +step:6942 train loss:3.613921 +step:6943 train loss:3.506629 +step:6944 train loss:3.601266 +step:6945 train loss:3.541305 +step:6946 train loss:3.628719 +step:6947 train loss:3.555386 +step:6948 train loss:3.548623 +step:6949 train loss:3.625137 +step:6950 train loss:3.615297 +step:6951 train loss:3.618996 +step:6952 train loss:3.549808 +step:6953 train loss:3.595691 +step:6954 train loss:3.658581 +step:6955 train loss:3.569183 +step:6956 train loss:3.609982 +step:6957 train loss:3.597353 +step:6958 train loss:3.559265 +step:6959 train loss:3.598349 +step:6960 train loss:3.566982 +step:6961 train loss:3.572308 +step:6962 train loss:3.553040 +step:6963 train loss:3.526840 +step:6964 train loss:3.568330 +step:6965 train loss:3.558758 +step:6966 train loss:3.604676 +step:6967 train loss:3.543603 +step:6968 train loss:3.582407 +step:6969 train loss:3.601367 +step:6970 train loss:3.576619 +step:6971 train loss:3.638954 +step:6972 train loss:3.586366 +step:6973 train loss:3.547204 +step:6974 train loss:3.672808 +step:6975 train loss:3.577142 +step:6976 train loss:3.550359 +step:6977 train loss:3.585841 +step:6978 train loss:3.578366 +step:6979 train loss:3.589955 +step:6980 train loss:3.567621 +step:6981 train loss:3.625034 +step:6982 train loss:3.580655 +step:6983 train loss:3.569866 +step:6984 train loss:3.687129 +step:6985 train loss:3.533670 +step:6986 train loss:3.523433 +step:6987 train loss:3.576593 +step:6988 train loss:3.578218 +step:6989 train loss:3.724065 +step:6990 train loss:3.587655 +step:6991 train loss:3.544386 +step:6992 train loss:3.593075 +step:6993 train loss:3.661993 +step:6994 train loss:3.604918 +step:6995 train loss:3.556768 +step:6996 train loss:3.561702 +step:6997 train loss:3.638792 +step:6998 train loss:3.542146 +step:6999 train loss:3.588638 +step:7000 validation loss:3.513538 total_sharp:2.1178e-04 L1_sharp:5.3868e-05 L2_sharp:9.1493e-06 L3_sharp:1.1927e-05 L4_sharp:3.3066e-05 L5_sharp:3.3153e-05 L6_sharp:2.1363e-05 L7_sharp:3.7800e-05 L8_sharp:5.6120e-05 L9_sharp:7.5778e-05 L10_sharp:9.5900e-05 L11_sharp:6.0082e-05 L12_sharp:2.9494e-04 total_fnorm:1.1080e+01 total_l1_linf:9.8300e+04 total_spectral:1.1080e+01 L1_fnorm:2.6237e+00 L2_fnorm:2.6155e+00 L3_fnorm:2.5799e+00 L4_fnorm:2.5736e+00 L5_fnorm:2.5243e+00 L6_fnorm:2.6179e+00 L7_fnorm:2.6472e+00 L8_fnorm:2.5983e+00 L9_fnorm:2.5953e+00 L10_fnorm:2.5839e+00 L11_fnorm:2.5983e+00 L12_fnorm:2.5231e+00 L1_l1linf:2.6034e+00 L2_l1linf:2.6778e+00 L3_l1linf:2.7675e+00 L4_l1linf:2.6627e+00 L5_l1linf:2.5443e+00 L6_l1linf:2.6620e+00 L7_l1linf:2.7660e+00 L8_l1linf:2.8535e+00 L9_l1linf:2.9000e+00 L10_l1linf:3.1861e+00 L11_l1linf:3.2471e+00 L12_l1linf:3.3820e+00 L1_spectral:3.3234e-01 L2_spectral:3.0078e-01 L3_spectral:2.9447e-01 L4_spectral:3.2575e-01 L5_spectral:2.8144e-01 L6_spectral:2.6257e-01 L7_spectral:3.0420e-01 L8_spectral:3.3941e-01 L9_spectral:3.8464e-01 L10_spectral:3.9958e-01 L11_spectral:3.9047e-01 L12_spectral:4.4637e-01 ip_v_neg_g:1.3263e-02 cos_v_neg_g:3.0412e-03 v_norm:1.1080e+01 g_norm:3.9359e-01 hv_norm:2.5234e-01 cos_v_hv:9.2988e-03 hg_norm:2.5850e+00 cos_g_hg:4.6623e-01 v_par:2.8932e-03 v_perp:1.1080e+01 L1_cos_v_neg_g:9.7826e-03 L1_v_norm:2.6237e+00 L2_cos_v_neg_g:5.5846e-03 L2_v_norm:2.6155e+00 L3_cos_v_neg_g:6.6971e-03 L3_v_norm:2.5799e+00 L4_cos_v_neg_g:5.4162e-03 L4_v_norm:2.5736e+00 L5_cos_v_neg_g:3.6941e-03 L5_v_norm:2.5243e+00 L6_cos_v_neg_g:3.4669e-03 L6_v_norm:2.6179e+00 L7_cos_v_neg_g:4.7298e-03 L7_v_norm:2.6472e+00 L8_cos_v_neg_g:4.2943e-03 L8_v_norm:2.5983e+00 L9_cos_v_neg_g:5.9695e-03 L9_v_norm:2.5953e+00 L10_cos_v_neg_g:8.6587e-03 L10_v_norm:2.5839e+00 L11_cos_v_neg_g:8.9201e-03 L11_v_norm:2.5983e+00 L12_cos_v_neg_g:1.3036e-02 L12_v_norm:2.5231e+00 +step:7000 train loss:3.665562 +step:7001 train loss:3.568665 +step:7002 train loss:3.561357 +step:7003 train loss:3.585124 +step:7004 train loss:3.578826 +step:7005 train loss:3.565481 +step:7006 train loss:3.568071 +step:7007 train loss:3.622375 +step:7008 train loss:3.561169 +step:7009 train loss:3.603450 +step:7010 train loss:3.535223 +step:7011 train loss:3.593320 +step:7012 train loss:3.565661 +step:7013 train loss:3.637741 +step:7014 train loss:3.544292 +step:7015 train loss:3.605144 +step:7016 train loss:3.594749 +step:7017 train loss:3.559774 +step:7018 train loss:3.638498 +step:7019 train loss:3.567947 +step:7020 train loss:3.613571 +step:7021 train loss:3.556943 +step:7022 train loss:3.570155 +step:7023 train loss:3.590852 +step:7024 train loss:3.551904 +step:7025 train loss:3.600470 +step:7026 train loss:3.560201 +step:7027 train loss:3.621356 +step:7028 train loss:3.544847 +step:7029 train loss:3.535649 +step:7030 train loss:3.537396 +step:7031 train loss:3.590982 +step:7032 train loss:3.600442 +step:7033 train loss:3.574497 +step:7034 train loss:3.594542 +step:7035 train loss:3.645762 +step:7036 train loss:3.565116 +step:7037 train loss:3.592363 +step:7038 train loss:3.552981 +step:7039 train loss:3.604221 +step:7040 train loss:3.526604 +step:7041 train loss:3.615501 +step:7042 train loss:3.548896 +step:7043 train loss:3.521060 +step:7044 train loss:3.569935 +step:7045 train loss:3.567322 +step:7046 train loss:3.561122 +step:7047 train loss:3.601279 +step:7048 train loss:3.547860 +step:7049 train loss:3.558053 +step:7050 train loss:3.580431 +step:7051 train loss:3.598052 +step:7052 train loss:3.601858 +step:7053 train loss:3.563854 +step:7054 train loss:3.545802 +step:7055 train loss:3.611819 +step:7056 train loss:3.611897 +step:7057 train loss:3.535317 +step:7058 train loss:3.653097 +step:7059 train loss:3.559996 +step:7060 train loss:3.572769 +step:7061 train loss:3.549241 +step:7062 train loss:3.570728 +step:7063 train loss:3.627640 +step:7064 train loss:3.553071 +step:7065 train loss:3.600232 +step:7066 train loss:3.560935 +step:7067 train loss:3.598828 +step:7068 train loss:3.570786 +step:7069 train loss:3.535001 +step:7070 train loss:3.560363 +step:7071 train loss:3.527673 +step:7072 train loss:3.532479 +step:7073 train loss:3.523338 +step:7074 train loss:3.521289 +step:7075 train loss:3.539010 +step:7076 train loss:3.552390 +step:7077 train loss:3.562840 +step:7078 train loss:3.608264 +step:7079 train loss:3.619245 +step:7080 train loss:3.563469 +step:7081 train loss:3.583833 +step:7082 train loss:3.553095 +step:7083 train loss:3.580669 +step:7084 train loss:3.572692 +step:7085 train loss:3.535136 +step:7086 train loss:3.573473 +step:7087 train loss:3.548634 +step:7088 train loss:3.670553 +step:7089 train loss:3.565655 +step:7090 train loss:3.529377 +step:7091 train loss:3.545869 +step:7092 train loss:3.524464 +step:7093 train loss:3.619150 +step:7094 train loss:3.541516 +step:7095 train loss:3.555730 +step:7096 train loss:3.576582 +step:7097 train loss:3.563537 +step:7098 train loss:3.587283 +step:7099 train loss:3.540492 +step:7100 train loss:3.574618 +step:7101 train loss:3.642064 +step:7102 train loss:3.534347 +step:7103 train loss:3.560349 +step:7104 train loss:3.588736 +step:7105 train loss:3.568505 +step:7106 train loss:3.555220 +step:7107 train loss:3.586738 +step:7108 train loss:3.656803 +step:7109 train loss:3.583953 +step:7110 train loss:3.611326 +step:7111 train loss:3.590720 +step:7112 train loss:3.581466 +step:7113 train loss:3.581817 +step:7114 train loss:3.600931 +step:7115 train loss:3.635075 +step:7116 train loss:3.566221 +step:7117 train loss:3.603118 +step:7118 train loss:3.614441 +step:7119 train loss:3.576581 +step:7120 train loss:3.634586 +step:7121 train loss:3.548731 +step:7122 train loss:3.551491 +step:7123 train loss:3.492402 +step:7124 train loss:3.648239 +step:7125 train loss:3.501836 +step:7126 train loss:3.664385 +step:7127 train loss:3.627277 +step:7128 train loss:3.569173 +step:7129 train loss:3.576997 +step:7130 train loss:3.565030 +step:7131 train loss:3.505433 +step:7132 train loss:3.546665 +step:7133 train loss:3.594748 +step:7134 train loss:3.521779 +step:7135 train loss:3.581766 +step:7136 train loss:3.563920 +step:7137 train loss:3.541853 +step:7138 train loss:3.531195 +step:7139 train loss:3.536654 +step:7140 train loss:3.568468 +step:7141 train loss:3.569943 +step:7142 train loss:3.563293 +step:7143 train loss:3.599518 +step:7144 train loss:3.549703 +step:7145 train loss:3.565083 +step:7146 train loss:3.573034 +step:7147 train loss:3.595996 +step:7148 train loss:3.597843 +step:7149 train loss:3.606024 +step:7150 train loss:3.578251 +step:7151 train loss:3.544796 +step:7152 train loss:3.517888 +step:7153 train loss:3.549552 +step:7154 train loss:3.569652 +step:7155 train loss:3.588657 +step:7156 train loss:3.557665 +step:7157 train loss:3.578346 +step:7158 train loss:3.533066 +step:7159 train loss:3.584342 +step:7160 train loss:3.596636 +step:7161 train loss:3.546035 +step:7162 train loss:3.593930 +step:7163 train loss:3.527991 +step:7164 train loss:3.568106 +step:7165 train loss:3.569933 +step:7166 train loss:3.626205 +step:7167 train loss:3.606749 +step:7168 train loss:3.582017 +step:7169 train loss:3.558969 +step:7170 train loss:3.590673 +step:7171 train loss:3.537432 +step:7172 train loss:3.699116 +step:7173 train loss:3.545698 +step:7174 train loss:3.588330 +step:7175 train loss:3.563622 +step:7176 train loss:3.569604 +step:7177 train loss:3.588158 +step:7178 train loss:3.587167 +step:7179 train loss:3.570907 +step:7180 train loss:3.572377 +step:7181 train loss:3.601130 +step:7182 train loss:3.554336 +step:7183 train loss:3.625389 +step:7184 train loss:3.715751 +step:7185 train loss:3.627302 +step:7186 train loss:3.564891 +step:7187 train loss:3.580704 +step:7188 train loss:3.566313 +step:7189 train loss:3.565598 +step:7190 train loss:3.566907 +step:7191 train loss:3.559684 +step:7192 train loss:3.592469 +step:7193 train loss:3.512009 +step:7194 train loss:3.574332 +step:7195 train loss:3.551040 +step:7196 train loss:3.599019 +step:7197 train loss:3.577336 +step:7198 train loss:3.633323 +step:7199 train loss:3.591474 +step:7200 train loss:3.586620 +step:7201 train loss:3.595316 +step:7202 train loss:3.570568 +step:7203 train loss:3.587415 +step:7204 train loss:3.556243 +step:7205 train loss:3.514380 +step:7206 train loss:3.540517 +step:7207 train loss:3.716632 +step:7208 train loss:3.551214 +step:7209 train loss:3.632338 +step:7210 train loss:3.570837 +step:7211 train loss:3.600500 +step:7212 train loss:3.677120 +step:7213 train loss:3.531777 +step:7214 train loss:3.598465 +step:7215 train loss:3.569072 +step:7216 train loss:3.619806 +step:7217 train loss:3.575351 +step:7218 train loss:3.664053 +step:7219 train loss:3.573272 +step:7220 train loss:3.650699 +step:7221 train loss:3.529589 +step:7222 train loss:3.615119 +step:7223 train loss:3.529589 +step:7224 train loss:3.593998 +step:7225 train loss:3.569671 +step:7226 train loss:3.537106 +step:7227 train loss:3.560658 +step:7228 train loss:3.547506 +step:7229 train loss:3.551873 +step:7230 train loss:3.534713 +step:7231 train loss:3.665533 +step:7232 train loss:3.535089 +step:7233 train loss:3.607348 +step:7234 train loss:3.594906 +step:7235 train loss:3.567301 +step:7236 train loss:3.606105 +step:7237 train loss:3.556936 +step:7238 train loss:3.594298 +step:7239 train loss:3.550538 +step:7240 train loss:3.545706 +step:7241 train loss:3.561205 +step:7242 train loss:3.540903 +step:7243 train loss:3.587815 +step:7244 train loss:3.562326 +step:7245 train loss:3.567082 +step:7246 train loss:3.606292 +step:7247 train loss:3.562189 +step:7248 train loss:3.601365 +step:7249 train loss:3.549584 +step:7250 validation loss:3.500528 +step:7250 train loss:3.574718 +step:7251 train loss:3.619628 +step:7252 train loss:3.536208 +step:7253 train loss:3.624865 +step:7254 train loss:3.560303 +step:7255 train loss:3.528819 +step:7256 train loss:3.572429 +step:7257 train loss:3.617780 +step:7258 train loss:3.569524 +step:7259 train loss:3.555836 +step:7260 train loss:3.642256 +step:7261 train loss:3.600369 +step:7262 train loss:3.551812 +step:7263 train loss:3.594433 +step:7264 train loss:3.582847 +step:7265 train loss:3.483207 +step:7266 train loss:3.609457 +step:7267 train loss:3.527484 +step:7268 train loss:3.588436 +step:7269 train loss:3.595880 +step:7270 train loss:3.551124 +step:7271 train loss:3.566053 +step:7272 train loss:3.572489 +step:7273 train loss:3.568276 +step:7274 train loss:3.546488 +step:7275 train loss:3.618070 +step:7276 train loss:3.524605 +step:7277 train loss:3.572654 +step:7278 train loss:3.540015 +step:7279 train loss:3.524488 +step:7280 train loss:3.592156 +step:7281 train loss:3.611009 +step:7282 train loss:3.613877 +step:7283 train loss:3.504194 +step:7284 train loss:3.546817 +step:7285 train loss:3.573063 +step:7286 train loss:3.702606 +step:7287 train loss:3.614081 +step:7288 train loss:3.570363 +step:7289 train loss:3.572025 +step:7290 train loss:3.619823 +step:7291 train loss:3.583460 +step:7292 train loss:3.651027 +step:7293 train loss:3.548809 +step:7294 train loss:3.632947 +step:7295 train loss:3.524979 +step:7296 train loss:3.521629 +step:7297 train loss:3.567888 +step:7298 train loss:3.545606 +step:7299 train loss:3.582721 +step:7300 train loss:3.569213 +step:7301 train loss:3.522275 +step:7302 train loss:3.664450 +step:7303 train loss:3.557966 +step:7304 train loss:3.502990 +step:7305 train loss:3.576181 +step:7306 train loss:3.607262 +step:7307 train loss:3.613069 +step:7308 train loss:3.562674 +step:7309 train loss:3.528951 +step:7310 train loss:3.556481 +step:7311 train loss:3.542930 +step:7312 train loss:3.581818 +step:7313 train loss:3.620233 +step:7314 train loss:3.516671 +step:7315 train loss:3.509268 +step:7316 train loss:3.648637 +step:7317 train loss:3.591971 +step:7318 train loss:3.534305 +step:7319 train loss:3.558244 +step:7320 train loss:3.591474 +step:7321 train loss:3.617346 +step:7322 train loss:3.499657 +step:7323 train loss:3.553387 +step:7324 train loss:3.578494 +step:7325 train loss:3.542140 +step:7326 train loss:3.572077 +step:7327 train loss:3.547552 +step:7328 train loss:3.668130 +step:7329 train loss:3.510814 +step:7330 train loss:3.563796 +step:7331 train loss:3.558937 +step:7332 train loss:3.599729 +step:7333 train loss:3.580801 +step:7334 train loss:3.550052 +step:7335 train loss:3.546404 +step:7336 train loss:3.800177 +step:7337 train loss:3.587389 +step:7338 train loss:3.582697 +step:7339 train loss:3.593821 +step:7340 train loss:3.580488 +step:7341 train loss:3.571590 +step:7342 train loss:3.562463 +step:7343 train loss:3.574595 +step:7344 train loss:3.654400 +step:7345 train loss:3.512830 +step:7346 train loss:3.549303 +step:7347 train loss:3.543435 +step:7348 train loss:3.546707 +step:7349 train loss:3.645784 +step:7350 train loss:3.629691 +step:7351 train loss:3.564567 +step:7352 train loss:3.593303 +step:7353 train loss:3.575585 +step:7354 train loss:3.524394 +step:7355 train loss:3.707190 +step:7356 train loss:3.679399 +step:7357 train loss:3.604892 +step:7358 train loss:3.583044 +step:7359 train loss:3.552302 +step:7360 train loss:3.562072 +step:7361 train loss:3.514067 +step:7362 train loss:3.563606 +step:7363 train loss:3.577054 +step:7364 train loss:3.609193 +step:7365 train loss:3.595759 +step:7366 train loss:3.560219 +step:7367 train loss:3.636257 +step:7368 train loss:3.614890 +step:7369 train loss:3.608198 +step:7370 train loss:3.573465 +step:7371 train loss:3.530983 +step:7372 train loss:3.590690 +step:7373 train loss:3.610090 +step:7374 train loss:3.703193 +step:7375 train loss:3.532215 +step:7376 train loss:3.553236 +step:7377 train loss:3.595276 +step:7378 train loss:3.549628 +step:7379 train loss:3.669722 +step:7380 train loss:3.636418 +step:7381 train loss:3.599360 +step:7382 train loss:3.561750 +step:7383 train loss:3.655967 +step:7384 train loss:3.597260 +step:7385 train loss:3.555626 +step:7386 train loss:3.560095 +step:7387 train loss:3.603622 +step:7388 train loss:3.633574 +step:7389 train loss:3.577003 +step:7390 train loss:3.519996 +step:7391 train loss:3.556453 +step:7392 train loss:3.612160 +step:7393 train loss:3.578970 +step:7394 train loss:3.621789 +step:7395 train loss:3.509142 +step:7396 train loss:3.602180 +step:7397 train loss:3.538089 +step:7398 train loss:3.553536 +step:7399 train loss:3.601649 +step:7400 train loss:3.603088 +step:7401 train loss:3.520335 +step:7402 train loss:3.639943 +step:7403 train loss:3.522645 +step:7404 train loss:3.592204 +step:7405 train loss:3.713363 +step:7406 train loss:3.540135 +step:7407 train loss:3.588905 +step:7408 train loss:3.586884 +step:7409 train loss:3.560609 +step:7410 train loss:3.727122 +step:7411 train loss:3.573800 +step:7412 train loss:3.577975 +step:7413 train loss:3.629017 +step:7414 train loss:3.539451 +step:7415 train loss:3.598302 +step:7416 train loss:3.482732 +step:7417 train loss:3.600581 +step:7418 train loss:3.581153 +step:7419 train loss:3.551840 +step:7420 train loss:3.540198 +step:7421 train loss:3.577412 +step:7422 train loss:3.536293 +step:7423 train loss:3.673522 +step:7424 train loss:3.734337 +step:7425 train loss:3.625789 +step:7426 train loss:3.590461 +step:7427 train loss:3.559966 +step:7428 train loss:3.579997 +step:7429 train loss:3.597720 +step:7430 train loss:3.527666 +step:7431 train loss:3.529678 +step:7432 train loss:3.540833 +step:7433 train loss:3.635282 +step:7434 train loss:3.552249 +step:7435 train loss:3.636523 +step:7436 train loss:3.677669 +step:7437 train loss:3.499720 +step:7438 train loss:3.559666 +step:7439 train loss:3.572085 +step:7440 train loss:3.543903 +step:7441 train loss:3.509128 +step:7442 train loss:3.740056 +step:7443 train loss:3.562540 +step:7444 train loss:3.607133 +step:7445 train loss:3.535697 +step:7446 train loss:3.557679 +step:7447 train loss:3.490050 +step:7448 train loss:3.541779 +step:7449 train loss:3.555449 +step:7450 train loss:3.588281 +step:7451 train loss:3.620001 +step:7452 train loss:3.548339 +step:7453 train loss:3.573240 +step:7454 train loss:3.560968 +step:7455 train loss:3.568137 +step:7456 train loss:3.542378 +step:7457 train loss:3.550046 +step:7458 train loss:3.590755 +step:7459 train loss:3.568392 +step:7460 train loss:3.576298 +step:7461 train loss:3.611089 +step:7462 train loss:3.543958 +step:7463 train loss:3.609491 +step:7464 train loss:3.533978 +step:7465 train loss:3.541022 +step:7466 train loss:3.543590 +step:7467 train loss:3.556128 +step:7468 train loss:3.602962 +step:7469 train loss:3.536405 +step:7470 train loss:3.564642 +step:7471 train loss:3.555991 +step:7472 train loss:3.591861 +step:7473 train loss:3.532524 +step:7474 train loss:3.516233 +step:7475 train loss:3.546599 +step:7476 train loss:3.582340 +step:7477 train loss:3.557388 +step:7478 train loss:3.555451 +step:7479 train loss:3.567979 +step:7480 train loss:3.849662 +step:7481 train loss:3.502443 +step:7482 train loss:3.573467 +step:7483 train loss:3.565262 +step:7484 train loss:3.588819 +step:7485 train loss:3.572498 +step:7486 train loss:3.596620 +step:7487 train loss:3.590879 +step:7488 train loss:3.611913 +step:7489 train loss:3.604901 +step:7490 train loss:3.552925 +step:7491 train loss:3.573477 +step:7492 train loss:3.682139 +step:7493 train loss:3.657271 +step:7494 train loss:3.680198 +step:7495 train loss:3.551600 +step:7496 train loss:3.538750 +step:7497 train loss:3.636349 +step:7498 train loss:3.568307 +step:7499 train loss:3.608185 +step:7500 validation loss:3.497371 total_sharp:1.7703e-04 L1_sharp:4.8609e-05 L2_sharp:6.9416e-06 L3_sharp:1.4291e-05 L4_sharp:2.1890e-05 L5_sharp:3.5114e-05 L6_sharp:1.8016e-05 L7_sharp:2.6973e-05 L8_sharp:4.8646e-05 L9_sharp:7.6379e-05 L10_sharp:8.1348e-05 L11_sharp:6.2690e-05 L12_sharp:2.0052e-04 total_fnorm:1.1055e+01 total_l1_linf:9.7901e+04 total_spectral:1.1055e+01 L1_fnorm:2.6227e+00 L2_fnorm:2.5985e+00 L3_fnorm:2.5684e+00 L4_fnorm:2.5545e+00 L5_fnorm:2.5181e+00 L6_fnorm:2.6157e+00 L7_fnorm:2.6382e+00 L8_fnorm:2.5832e+00 L9_fnorm:2.5792e+00 L10_fnorm:2.5717e+00 L11_fnorm:2.6057e+00 L12_fnorm:2.5206e+00 L1_l1linf:2.5831e+00 L2_l1linf:2.7971e+00 L3_l1linf:2.7050e+00 L4_l1linf:2.6646e+00 L5_l1linf:2.4718e+00 L6_l1linf:2.5485e+00 L7_l1linf:2.7527e+00 L8_l1linf:2.6440e+00 L9_l1linf:2.9735e+00 L10_l1linf:2.9208e+00 L11_l1linf:2.8964e+00 L12_l1linf:2.7336e+00 L1_spectral:3.2803e-01 L2_spectral:3.0582e-01 L3_spectral:2.8462e-01 L4_spectral:3.3322e-01 L5_spectral:2.8599e-01 L6_spectral:2.5790e-01 L7_spectral:3.0115e-01 L8_spectral:3.3215e-01 L9_spectral:3.8543e-01 L10_spectral:4.0140e-01 L11_spectral:3.8551e-01 L12_spectral:4.0983e-01 ip_v_neg_g:7.5950e-03 cos_v_neg_g:1.6555e-03 v_norm:1.1055e+01 g_norm:4.1500e-01 hv_norm:2.2585e-01 cos_v_hv:8.6651e-03 hg_norm:2.8579e+00 cos_g_hg:4.6034e-01 v_par:1.5544e-03 v_perp:1.1055e+01 L1_cos_v_neg_g:4.8235e-03 L1_v_norm:2.6227e+00 L2_cos_v_neg_g:1.2501e-03 L2_v_norm:2.5985e+00 L3_cos_v_neg_g:1.3319e-03 L3_v_norm:2.5684e+00 L4_cos_v_neg_g:2.1872e-03 L4_v_norm:2.5545e+00 L5_cos_v_neg_g:1.8960e-03 L5_v_norm:2.5181e+00 L6_cos_v_neg_g:1.5220e-03 L6_v_norm:2.6157e+00 L7_cos_v_neg_g:2.3982e-03 L7_v_norm:2.6382e+00 L8_cos_v_neg_g:3.6546e-03 L8_v_norm:2.5832e+00 L9_cos_v_neg_g:4.6255e-03 L9_v_norm:2.5792e+00 L10_cos_v_neg_g:5.6361e-03 L10_v_norm:2.5717e+00 L11_cos_v_neg_g:6.1942e-03 L11_v_norm:2.6057e+00 L12_cos_v_neg_g:6.8238e-03 L12_v_norm:2.5206e+00 +step:7500 train loss:3.553285 +step:7501 train loss:3.544984 +step:7502 train loss:3.536026 +step:7503 train loss:3.515773 +step:7504 train loss:3.536753 +step:7505 train loss:3.524958 +step:7506 train loss:3.586741 +step:7507 train loss:3.502799 +step:7508 train loss:3.574164 +step:7509 train loss:3.546490 +step:7510 train loss:3.576111 +step:7511 train loss:3.580299 +step:7512 train loss:3.838614 +step:7513 train loss:3.534041 +step:7514 train loss:3.567102 +step:7515 train loss:3.530268 +step:7516 train loss:3.543298 +step:7517 train loss:3.575567 +step:7518 train loss:3.553308 +step:7519 train loss:3.563544 +step:7520 train loss:3.627107 +step:7521 train loss:3.518025 +step:7522 train loss:3.571147 +step:7523 train loss:3.603607 +step:7524 train loss:3.552647 +step:7525 train loss:3.555631 +step:7526 train loss:3.505361 +step:7527 train loss:3.511715 +step:7528 train loss:3.608914 +step:7529 train loss:3.586820 +step:7530 train loss:3.535483 +step:7531 train loss:3.607872 +step:7532 train loss:3.597158 +step:7533 train loss:3.522352 +step:7534 train loss:3.588648 +step:7535 train loss:3.590838 +step:7536 train loss:3.622466 +step:7537 train loss:3.640614 +step:7538 train loss:3.668177 +step:7539 train loss:3.568165 +step:7540 train loss:3.555449 +step:7541 train loss:3.609425 +step:7542 train loss:3.571262 +step:7543 train loss:3.528189 +step:7544 train loss:3.571295 +step:7545 train loss:3.556352 +step:7546 train loss:3.511698 +step:7547 train loss:3.561313 +step:7548 train loss:3.572150 +step:7549 train loss:3.552902 +step:7550 train loss:3.552465 +step:7551 train loss:3.650529 +step:7552 train loss:3.566472 +step:7553 train loss:3.604178 +step:7554 train loss:3.527974 +step:7555 train loss:3.618069 +step:7556 train loss:3.522130 +step:7557 train loss:3.618533 +step:7558 train loss:3.605790 +step:7559 train loss:3.562450 +step:7560 train loss:3.657544 +step:7561 train loss:3.630198 +step:7562 train loss:3.533649 +step:7563 train loss:3.528068 +step:7564 train loss:3.585545 +step:7565 train loss:3.601894 +step:7566 train loss:3.591274 +step:7567 train loss:3.609960 +step:7568 train loss:3.554216 +step:7569 train loss:3.612273 +step:7570 train loss:3.594990 +step:7571 train loss:3.678171 +step:7572 train loss:3.523511 +step:7573 train loss:3.594337 +step:7574 train loss:3.555351 +step:7575 train loss:3.551522 +step:7576 train loss:3.559072 +step:7577 train loss:3.575946 +step:7578 train loss:3.631962 +step:7579 train loss:3.569122 +step:7580 train loss:3.557981 +step:7581 train loss:3.544080 +step:7582 train loss:3.600542 +step:7583 train loss:3.534997 +step:7584 train loss:3.520030 +step:7585 train loss:3.491571 +step:7586 train loss:3.526816 +step:7587 train loss:3.586352 +step:7588 train loss:3.715075 +step:7589 train loss:3.536757 +step:7590 train loss:3.602529 +step:7591 train loss:3.606310 +step:7592 train loss:3.564670 +step:7593 train loss:3.589515 +step:7594 train loss:3.587390 +step:7595 train loss:3.556755 +step:7596 train loss:3.609968 +step:7597 train loss:3.515140 +step:7598 train loss:3.577196 +step:7599 train loss:3.570990 +step:7600 train loss:3.527728 +step:7601 train loss:3.642324 +step:7602 train loss:3.583626 +step:7603 train loss:3.540910 +step:7604 train loss:3.688211 +step:7605 train loss:3.577603 +step:7606 train loss:3.610792 +step:7607 train loss:3.565285 +step:7608 train loss:3.573995 +step:7609 train loss:3.607308 +step:7610 train loss:3.566177 +step:7611 train loss:3.544025 +step:7612 train loss:3.484611 +step:7613 train loss:3.534091 +step:7614 train loss:3.603689 +step:7615 train loss:3.564932 +step:7616 train loss:3.629775 +step:7617 train loss:3.528914 +step:7618 train loss:3.617539 +step:7619 train loss:3.559522 +step:7620 train loss:3.547563 +step:7621 train loss:3.494915 +step:7622 train loss:3.767807 +step:7623 train loss:3.783446 +step:7624 train loss:3.601482 +step:7625 train loss:3.635751 +step:7626 train loss:3.555183 +step:7627 train loss:3.626093 +step:7628 train loss:3.507617 +step:7629 train loss:3.565195 +step:7630 train loss:3.582666 +step:7631 train loss:3.562177 +step:7632 train loss:3.611377 +step:7633 train loss:3.681213 +step:7634 train loss:3.643152 +step:7635 train loss:3.544958 +step:7636 train loss:3.575353 +step:7637 train loss:3.520074 +step:7638 train loss:3.629827 +step:7639 train loss:3.562046 +step:7640 train loss:3.538843 +step:7641 train loss:3.571068 +step:7642 train loss:3.907085 +step:7643 train loss:3.658212 +step:7644 train loss:3.581183 +step:7645 train loss:3.572558 +step:7646 train loss:3.559470 +step:7647 train loss:3.551031 +step:7648 train loss:3.586280 +step:7649 train loss:3.546232 +step:7650 train loss:3.594803 +step:7651 train loss:3.616087 +step:7652 train loss:3.492006 +step:7653 train loss:3.686550 +step:7654 train loss:3.547935 +step:7655 train loss:3.565217 +step:7656 train loss:3.541554 +step:7657 train loss:3.559378 +step:7658 train loss:3.511258 +step:7659 train loss:3.573387 +step:7660 train loss:3.506860 +step:7661 train loss:3.523296 +step:7662 train loss:3.522196 +step:7663 train loss:3.573832 +step:7664 train loss:3.533131 +step:7665 train loss:3.505679 +step:7666 train loss:3.613827 +step:7667 train loss:3.527286 +step:7668 train loss:3.636297 +step:7669 train loss:3.571810 +step:7670 train loss:3.524797 +step:7671 train loss:3.582750 +step:7672 train loss:3.601145 +step:7673 train loss:3.562858 +step:7674 train loss:3.603539 +step:7675 train loss:3.656945 +step:7676 train loss:3.624500 +step:7677 train loss:3.653476 +step:7678 train loss:3.591761 +step:7679 train loss:3.614721 +step:7680 train loss:3.621877 +step:7681 train loss:3.589636 +step:7682 train loss:3.555795 +step:7683 train loss:3.562740 +step:7684 train loss:3.534745 +step:7685 train loss:3.512142 +step:7686 train loss:3.631271 +step:7687 train loss:3.549234 +step:7688 train loss:3.515114 +step:7689 train loss:3.565256 +step:7690 train loss:3.527711 +step:7691 train loss:3.556530 +step:7692 train loss:3.590407 +step:7693 train loss:3.591742 +step:7694 train loss:3.646018 +step:7695 train loss:3.575416 +step:7696 train loss:3.549109 +step:7697 train loss:3.534832 +step:7698 train loss:3.598053 +step:7699 train loss:3.590675 +step:7700 train loss:3.492750 +step:7701 train loss:3.606769 +step:7702 train loss:3.550044 +step:7703 train loss:3.551511 +step:7704 train loss:3.601752 +step:7705 train loss:3.562338 +step:7706 train loss:3.497875 +step:7707 train loss:3.617287 +step:7708 train loss:3.559935 +step:7709 train loss:3.575849 +step:7710 train loss:3.638713 +step:7711 train loss:3.600724 +step:7712 train loss:3.543358 +step:7713 train loss:3.624917 +step:7714 train loss:3.568534 +step:7715 train loss:3.521511 +step:7716 train loss:3.561348 +step:7717 train loss:3.587529 +step:7718 train loss:3.589663 +step:7719 train loss:3.546227 +step:7720 train loss:3.559579 +step:7721 train loss:3.605910 +step:7722 train loss:3.531219 +step:7723 train loss:3.901664 +step:7724 train loss:3.569321 +step:7725 train loss:3.476159 +step:7726 train loss:3.555658 +step:7727 train loss:3.582140 +step:7728 train loss:3.543346 +step:7729 train loss:3.544604 +step:7730 train loss:3.566180 +step:7731 train loss:3.593835 +step:7732 train loss:3.617360 +step:7733 train loss:3.528195 +step:7734 train loss:3.554186 +step:7735 train loss:3.642351 +step:7736 train loss:3.587644 +step:7737 train loss:3.607556 +step:7738 train loss:3.510629 +step:7739 train loss:3.584957 +step:7740 train loss:3.532691 +step:7741 train loss:3.568937 +step:7742 train loss:3.570611 +step:7743 train loss:3.522001 +step:7744 train loss:3.642443 +step:7745 train loss:3.536992 +step:7746 train loss:3.513522 +step:7747 train loss:3.607771 +step:7748 train loss:3.587277 +step:7749 train loss:3.511454 +step:7750 validation loss:3.496646 +step:7750 train loss:3.671356 +step:7751 train loss:3.551803 +step:7752 train loss:3.545398 +step:7753 train loss:3.549692 +step:7754 train loss:3.521446 +step:7755 train loss:3.585029 +step:7756 train loss:3.614486 +step:7757 train loss:3.564311 +step:7758 train loss:3.533605 +step:7759 train loss:3.559889 +step:7760 train loss:3.589920 +step:7761 train loss:3.579239 +step:7762 train loss:3.564290 +step:7763 train loss:3.552262 +step:7764 train loss:3.551430 +step:7765 train loss:3.510212 +step:7766 train loss:3.573908 +step:7767 train loss:3.578949 +step:7768 train loss:3.532476 +step:7769 train loss:3.596530 +step:7770 train loss:3.615700 +step:7771 train loss:3.587807 +step:7772 train loss:3.560937 +step:7773 train loss:3.619894 +step:7774 train loss:3.517452 +step:7775 train loss:3.506321 +step:7776 train loss:3.607404 +step:7777 train loss:3.563282 +step:7778 train loss:3.521832 +step:7779 train loss:3.562954 +step:7780 train loss:3.557095 +step:7781 train loss:3.568685 +step:7782 train loss:3.544831 +step:7783 train loss:3.530255 +step:7784 train loss:3.529977 +step:7785 train loss:3.572624 +step:7786 train loss:3.529222 +step:7787 train loss:3.609905 +step:7788 train loss:3.561892 +step:7789 train loss:3.496427 +step:7790 train loss:3.552402 +step:7791 train loss:3.587633 +step:7792 train loss:3.549777 +step:7793 train loss:3.570637 +step:7794 train loss:3.557424 +step:7795 train loss:3.591269 +step:7796 train loss:3.553549 +step:7797 train loss:3.568871 +step:7798 train loss:3.567235 +step:7799 train loss:3.552625 +step:7800 train loss:3.510640 +step:7801 train loss:3.575187 +step:7802 train loss:3.555775 +step:7803 train loss:3.604834 +step:7804 train loss:3.567187 +step:7805 train loss:3.564859 +step:7806 train loss:3.581571 +step:7807 train loss:3.655919 +step:7808 train loss:3.521484 +step:7809 train loss:3.496212 +step:7810 train loss:3.583727 +step:7811 train loss:3.518729 +step:7812 train loss:3.540249 +step:7813 train loss:3.625109 +step:7814 train loss:3.698705 +step:7815 train loss:3.506111 +step:7816 train loss:3.593567 +step:7817 train loss:3.623082 +step:7818 train loss:3.522147 +step:7819 train loss:3.572427 +step:7820 train loss:3.614832 +step:7821 train loss:3.542526 +step:7822 train loss:3.505282 +step:7823 train loss:3.574703 +step:7824 train loss:3.560859 +step:7825 train loss:3.550705 +step:7826 train loss:3.546554 +step:7827 train loss:3.594603 +step:7828 train loss:3.581103 +step:7829 train loss:3.532873 +step:7830 train loss:3.545659 +step:7831 train loss:3.549164 +step:7832 train loss:3.612236 +step:7833 train loss:3.590266 +step:7834 train loss:3.555589 +step:7835 train loss:3.578552 +step:7836 train loss:3.691945 +step:7837 train loss:3.575627 +step:7838 train loss:3.543814 +step:7839 train loss:3.509285 +step:7840 train loss:3.521006 +step:7841 train loss:3.616146 +step:7842 train loss:3.601136 +step:7843 train loss:3.658438 +step:7844 train loss:3.585591 +step:7845 train loss:3.562806 +step:7846 train loss:3.674840 +step:7847 train loss:3.562917 +step:7848 train loss:3.576926 +step:7849 train loss:3.586965 +step:7850 train loss:3.558439 +step:7851 train loss:3.583554 +step:7852 train loss:3.561363 +step:7853 train loss:3.531324 +step:7854 train loss:3.561783 +step:7855 train loss:3.561779 +step:7856 train loss:3.565255 +step:7857 train loss:3.552382 +step:7858 train loss:3.559806 +step:7859 train loss:3.569324 +step:7860 train loss:3.603214 +step:7861 train loss:3.588973 +step:7862 train loss:3.535731 +step:7863 train loss:3.637569 +step:7864 train loss:3.478207 +step:7865 train loss:3.553550 +step:7866 train loss:3.530705 +step:7867 train loss:3.575067 +step:7868 train loss:3.551626 +step:7869 train loss:3.559542 +step:7870 train loss:3.482412 +step:7871 train loss:3.548505 +step:7872 train loss:3.547328 +step:7873 train loss:3.620162 +step:7874 train loss:3.564202 +step:7875 train loss:3.567966 +step:7876 train loss:3.586914 +step:7877 train loss:3.539994 +step:7878 train loss:3.573470 +step:7879 train loss:3.907876 +step:7880 train loss:3.566385 +step:7881 train loss:3.593869 +step:7882 train loss:3.672946 +step:7883 train loss:3.486559 +step:7884 train loss:3.576269 +step:7885 train loss:3.561172 +step:7886 train loss:3.562580 +step:7887 train loss:3.553236 +step:7888 train loss:3.585221 +step:7889 train loss:3.631445 +step:7890 train loss:3.536320 +step:7891 train loss:3.586794 +step:7892 train loss:3.557701 +step:7893 train loss:3.532864 +step:7894 train loss:3.553671 +step:7895 train loss:3.534423 +step:7896 train loss:3.537348 +step:7897 train loss:3.557140 +step:7898 train loss:3.569035 +step:7899 train loss:3.555063 +step:7900 train loss:3.525724 +step:7901 train loss:3.514085 +step:7902 train loss:3.664227 +step:7903 train loss:3.509213 +step:7904 train loss:3.560365 +step:7905 train loss:3.627267 +step:7906 train loss:3.524794 +step:7907 train loss:3.551915 +step:7908 train loss:3.602938 +step:7909 train loss:3.653326 +step:7910 train loss:3.532033 +step:7911 train loss:3.553032 +step:7912 train loss:3.556366 +step:7913 train loss:3.531802 +step:7914 train loss:3.566650 +step:7915 train loss:3.669051 +step:7916 train loss:3.538958 +step:7917 train loss:3.597816 +step:7918 train loss:3.543677 +step:7919 train loss:3.531575 +step:7920 train loss:3.572944 +step:7921 train loss:3.574750 +step:7922 train loss:3.552435 +step:7923 train loss:3.600661 +step:7924 train loss:3.560310 +step:7925 train loss:3.585207 +step:7926 train loss:3.486374 +step:7927 train loss:3.767117 +step:7928 train loss:3.590789 +step:7929 train loss:3.558895 +step:7930 train loss:3.516951 +step:7931 train loss:3.540226 +step:7932 train loss:3.561889 +step:7933 train loss:3.577277 +step:7934 train loss:3.672464 +step:7935 train loss:3.590883 +step:7936 train loss:3.564808 +step:7937 train loss:3.511416 +step:7938 train loss:3.528099 +step:7939 train loss:3.574874 +step:7940 train loss:3.560152 +step:7941 train loss:3.590972 +step:7942 train loss:3.580326 +step:7943 train loss:3.589482 +step:7944 train loss:3.509444 +step:7945 train loss:3.616670 +step:7946 train loss:3.562298 +step:7947 train loss:3.574674 +step:7948 train loss:3.535207 +step:7949 train loss:3.588178 +step:7950 train loss:3.642829 +step:7951 train loss:3.606367 +step:7952 train loss:3.751743 +step:7953 train loss:3.644226 +step:7954 train loss:3.548610 +step:7955 train loss:3.536814 +step:7956 train loss:3.538301 +step:7957 train loss:3.615996 +step:7958 train loss:3.620896 +step:7959 train loss:3.580083 +step:7960 train loss:3.645608 +step:7961 train loss:3.554547 +step:7962 train loss:3.522134 +step:7963 train loss:3.560589 +step:7964 train loss:3.558720 +step:7965 train loss:3.567078 +step:7966 train loss:3.538568 +step:7967 train loss:3.561362 +step:7968 train loss:3.572173 +step:7969 train loss:3.527512 +step:7970 train loss:3.496014 +step:7971 train loss:3.581967 +step:7972 train loss:3.557980 +step:7973 train loss:3.528259 +step:7974 train loss:3.568846 +step:7975 train loss:3.557797 +step:7976 train loss:3.577782 +step:7977 train loss:3.606829 +step:7978 train loss:3.627897 +step:7979 train loss:3.578267 +step:7980 train loss:3.482195 +step:7981 train loss:3.522564 +step:7982 train loss:3.568138 +step:7983 train loss:3.585357 +step:7984 train loss:3.624007 +step:7985 train loss:3.551484 +step:7986 train loss:3.575010 +step:7987 train loss:3.627444 +step:7988 train loss:3.600720 +step:7989 train loss:3.506595 +step:7990 train loss:3.519382 +step:7991 train loss:3.536994 +step:7992 train loss:3.561544 +step:7993 train loss:3.541289 +step:7994 train loss:3.593607 +step:7995 train loss:3.595634 +step:7996 train loss:3.564270 +step:7997 train loss:3.580256 +step:7998 train loss:3.606262 +step:7999 train loss:3.538002 +step:8000 validation loss:3.486318 total_sharp:1.8849e-04 L1_sharp:8.9951e-05 L2_sharp:3.1735e-05 L3_sharp:5.3480e-05 L4_sharp:4.3387e-05 L5_sharp:3.7943e-05 L6_sharp:2.1412e-05 L7_sharp:3.2352e-05 L8_sharp:4.7609e-05 L9_sharp:5.4489e-05 L10_sharp:6.5437e-05 L11_sharp:4.5934e-05 L12_sharp:1.6797e-04 total_fnorm:1.1056e+01 total_l1_linf:9.8036e+04 total_spectral:1.1056e+01 L1_fnorm:2.6386e+00 L2_fnorm:2.6242e+00 L3_fnorm:2.6001e+00 L4_fnorm:2.5637e+00 L5_fnorm:2.5227e+00 L6_fnorm:2.6193e+00 L7_fnorm:2.6453e+00 L8_fnorm:2.5941e+00 L9_fnorm:2.5921e+00 L10_fnorm:2.5738e+00 L11_fnorm:2.6024e+00 L12_fnorm:2.5199e+00 L1_l1linf:2.6139e+00 L2_l1linf:2.7077e+00 L3_l1linf:2.7806e+00 L4_l1linf:2.7742e+00 L5_l1linf:2.5410e+00 L6_l1linf:2.6372e+00 L7_l1linf:2.7876e+00 L8_l1linf:2.8077e+00 L9_l1linf:2.7094e+00 L10_l1linf:2.8949e+00 L11_l1linf:2.8331e+00 L12_l1linf:2.8622e+00 L1_spectral:3.3913e-01 L2_spectral:3.0270e-01 L3_spectral:3.0754e-01 L4_spectral:3.3102e-01 L5_spectral:2.8020e-01 L6_spectral:2.6033e-01 L7_spectral:3.0347e-01 L8_spectral:3.3196e-01 L9_spectral:3.6048e-01 L10_spectral:3.6249e-01 L11_spectral:3.6655e-01 L12_spectral:4.0444e-01 ip_v_neg_g:9.6348e-03 cos_v_neg_g:2.1945e-03 v_norm:1.1056e+01 g_norm:3.9710e-01 hv_norm:2.1123e-01 cos_v_hv:9.8659e-03 hg_norm:2.8884e+00 cos_g_hg:4.7572e-01 v_par:2.3796e-03 v_perp:1.1056e+01 L1_cos_v_neg_g:1.0518e-02 L1_v_norm:2.6386e+00 L2_cos_v_neg_g:5.3970e-03 L2_v_norm:2.6242e+00 L3_cos_v_neg_g:7.1143e-03 L3_v_norm:2.6001e+00 L4_cos_v_neg_g:5.5526e-03 L4_v_norm:2.5637e+00 L5_cos_v_neg_g:2.9073e-03 L5_v_norm:2.5227e+00 L6_cos_v_neg_g:2.3378e-03 L6_v_norm:2.6193e+00 L7_cos_v_neg_g:1.7327e-03 L7_v_norm:2.6453e+00 L8_cos_v_neg_g:2.0127e-03 L8_v_norm:2.5941e+00 L9_cos_v_neg_g:4.1872e-03 L9_v_norm:2.5921e+00 L10_cos_v_neg_g:5.0256e-03 L10_v_norm:2.5738e+00 L11_cos_v_neg_g:4.3534e-03 L11_v_norm:2.6024e+00 L12_cos_v_neg_g:8.7261e-03 L12_v_norm:2.5199e+00 +step:8000 train loss:3.603716 +step:8001 train loss:3.565585 +step:8002 train loss:3.587336 +step:8003 train loss:3.604223 +step:8004 train loss:3.578563 +step:8005 train loss:3.500489 +step:8006 train loss:3.575146 +step:8007 train loss:3.547642 +step:8008 train loss:3.571594 +step:8009 train loss:3.648130 +step:8010 train loss:3.875620 +step:8011 train loss:3.541428 +step:8012 train loss:3.620003 +step:8013 train loss:3.570237 +step:8014 train loss:3.581506 +step:8015 train loss:3.579865 +step:8016 train loss:3.567801 +step:8017 train loss:3.590289 +step:8018 train loss:3.549771 +step:8019 train loss:3.519904 +step:8020 train loss:3.552878 +step:8021 train loss:3.629671 +step:8022 train loss:3.542602 +step:8023 train loss:3.577328 +step:8024 train loss:3.445832 +step:8025 train loss:3.553577 +step:8026 train loss:3.564044 +step:8027 train loss:3.567934 +step:8028 train loss:3.628000 +step:8029 train loss:3.556224 +step:8030 train loss:3.513138 +step:8031 train loss:3.573560 +step:8032 train loss:3.560602 +step:8033 train loss:3.509707 +step:8034 train loss:3.549565 +step:8035 train loss:3.536029 +step:8036 train loss:3.528486 +step:8037 train loss:3.495084 +step:8038 train loss:3.508451 +step:8039 train loss:3.602803 +step:8040 train loss:3.537522 +step:8041 train loss:3.536356 +step:8042 train loss:3.571964 +step:8043 train loss:3.513805 +step:8044 train loss:3.526231 +step:8045 train loss:3.592049 +step:8046 train loss:3.520725 +step:8047 train loss:3.527973 +step:8048 train loss:3.554379 +step:8049 train loss:3.601574 +step:8050 train loss:3.541952 +step:8051 train loss:3.521183 +step:8052 train loss:3.582142 +step:8053 train loss:3.531967 +step:8054 train loss:3.570089 +step:8055 train loss:3.594709 +step:8056 train loss:3.566506 +step:8057 train loss:3.641781 +step:8058 train loss:3.547668 +step:8059 train loss:3.606273 +step:8060 train loss:3.577336 +step:8061 train loss:3.466658 +step:8062 train loss:3.607032 +step:8063 train loss:3.570549 +step:8064 train loss:3.527857 +step:8065 train loss:3.591378 +step:8066 train loss:3.551639 +step:8067 train loss:3.614948 +step:8068 train loss:3.540011 +step:8069 train loss:3.567849 +step:8070 train loss:3.528026 +step:8071 train loss:3.536857 +step:8072 train loss:3.578416 +step:8073 train loss:3.531831 +step:8074 train loss:3.541448 +step:8075 train loss:3.522293 +step:8076 train loss:3.576627 +step:8077 train loss:3.583703 +step:8078 train loss:3.525722 +step:8079 train loss:3.549057 +step:8080 train loss:3.533005 +step:8081 train loss:3.551303 +step:8082 train loss:3.567685 +step:8083 train loss:3.466952 +step:8084 train loss:3.603758 +step:8085 train loss:3.476610 +step:8086 train loss:3.603193 +step:8087 train loss:3.500141 +step:8088 train loss:3.549254 +step:8089 train loss:3.579841 +step:8090 train loss:3.605323 +step:8091 train loss:3.549347 +step:8092 train loss:3.530460 +step:8093 train loss:3.539741 +step:8094 train loss:3.540273 +step:8095 train loss:3.567114 +step:8096 train loss:3.565362 +step:8097 train loss:3.497754 +step:8098 train loss:3.510996 +step:8099 train loss:3.502276 +step:8100 train loss:3.558336 +step:8101 train loss:3.628199 +step:8102 train loss:3.569877 +step:8103 train loss:3.522249 +step:8104 train loss:3.569674 +step:8105 train loss:3.566802 +step:8106 train loss:3.529856 +step:8107 train loss:3.512985 +step:8108 train loss:3.528306 +step:8109 train loss:3.523416 +step:8110 train loss:3.585621 +step:8111 train loss:3.510368 +step:8112 train loss:3.529123 +step:8113 train loss:3.515727 +step:8114 train loss:3.463675 +step:8115 train loss:3.518216 +step:8116 train loss:3.553408 +step:8117 train loss:3.522113 +step:8118 train loss:3.516437 +step:8119 train loss:3.556322 +step:8120 train loss:3.503515 +step:8121 train loss:3.561293 +step:8122 train loss:3.544315 +step:8123 train loss:3.553257 +step:8124 train loss:3.511987 +step:8125 train loss:3.496235 +step:8126 train loss:3.486932 +step:8127 train loss:3.584250 +step:8128 train loss:3.590028 +step:8129 train loss:3.506920 +step:8130 train loss:3.538766 +step:8131 train loss:3.505273 +step:8132 train loss:3.578424 +step:8133 train loss:3.497740 +step:8134 train loss:3.534991 +step:8135 train loss:3.526076 +step:8136 train loss:3.538815 +step:8137 train loss:3.599743 +step:8138 train loss:3.512136 +step:8139 train loss:3.584109 +step:8140 train loss:3.513705 +step:8141 train loss:3.535988 +step:8142 train loss:3.516146 +step:8143 train loss:3.565470 +step:8144 train loss:3.543546 +step:8145 train loss:3.513297 +step:8146 train loss:3.518827 +step:8147 train loss:3.543357 +step:8148 train loss:3.633612 +step:8149 train loss:3.549022 +step:8150 train loss:3.526093 +step:8151 train loss:3.521026 +step:8152 train loss:3.617770 +step:8153 train loss:3.493172 +step:8154 train loss:3.512220 +step:8155 train loss:3.536248 +step:8156 train loss:3.522369 +step:8157 train loss:3.539215 +step:8158 train loss:3.553007 +step:8159 train loss:3.564980 +step:8160 train loss:3.518714 +step:8161 train loss:3.560476 +step:8162 train loss:3.493078 +step:8163 train loss:3.552462 +step:8164 train loss:3.537637 +step:8165 train loss:3.591234 +step:8166 train loss:3.593504 +step:8167 train loss:3.497818 +step:8168 train loss:3.477819 +step:8169 train loss:3.529264 +step:8170 train loss:3.474231 +step:8171 train loss:3.538898 +step:8172 train loss:3.534667 +step:8173 train loss:3.536420 +step:8174 train loss:3.545292 +step:8175 train loss:3.508960 +step:8176 train loss:3.503110 +step:8177 train loss:3.549399 +step:8178 train loss:3.639804 +step:8179 train loss:3.543887 +step:8180 train loss:3.563792 +step:8181 train loss:3.565886 +step:8182 train loss:3.524404 +step:8183 train loss:3.517373 +step:8184 train loss:3.508536 +step:8185 train loss:3.544842 +step:8186 train loss:3.545482 +step:8187 train loss:3.558659 +step:8188 train loss:3.485725 +step:8189 train loss:3.633383 +step:8190 train loss:3.566730 +step:8191 train loss:3.569459 +step:8192 train loss:3.677341 +step:8193 train loss:3.550280 +step:8194 train loss:3.481004 +step:8195 train loss:3.582405 +step:8196 train loss:3.500739 +step:8197 train loss:3.528043 +step:8198 train loss:3.535651 +step:8199 train loss:3.538389 +step:8200 train loss:3.514985 +step:8201 train loss:3.632267 +step:8202 train loss:3.549779 +step:8203 train loss:3.567667 +step:8204 train loss:3.477040 +step:8205 train loss:3.485744 +step:8206 train loss:3.609269 +step:8207 train loss:3.534375 +step:8208 train loss:3.552325 +step:8209 train loss:3.597024 +step:8210 train loss:3.581200 +step:8211 train loss:3.513135 +step:8212 train loss:3.571486 +step:8213 train loss:3.580723 +step:8214 train loss:3.619320 +step:8215 train loss:3.593286 +step:8216 train loss:3.575382 +step:8217 train loss:3.553909 +step:8218 train loss:3.563667 +step:8219 train loss:3.695036 +step:8220 train loss:3.526280 +step:8221 train loss:3.547844 +step:8222 train loss:3.498184 +step:8223 train loss:3.518478 +step:8224 train loss:3.527425 +step:8225 train loss:3.579493 +step:8226 train loss:3.507157 +step:8227 train loss:3.579825 +step:8228 train loss:3.461674 +step:8229 train loss:3.507467 +step:8230 train loss:3.519150 +step:8231 train loss:3.546362 +step:8232 train loss:3.545751 +step:8233 train loss:3.592495 +step:8234 train loss:3.586760 +step:8235 train loss:3.559026 +step:8236 train loss:3.544338 +step:8237 train loss:3.494086 +step:8238 train loss:3.744527 +step:8239 train loss:3.577589 +step:8240 train loss:3.528308 +step:8241 train loss:3.498301 +step:8242 train loss:3.535749 +step:8243 train loss:3.523235 +step:8244 train loss:3.540814 +step:8245 train loss:3.523794 +step:8246 train loss:3.592057 +step:8247 train loss:3.620592 +step:8248 train loss:3.541080 +step:8249 train loss:3.535343 +step:8250 validation loss:3.477887 +step:8250 train loss:3.519908 +step:8251 train loss:3.615057 +step:8252 train loss:3.552160 +step:8253 train loss:3.523534 +step:8254 train loss:3.491191 +step:8255 train loss:3.527212 +step:8256 train loss:3.506023 +step:8257 train loss:3.617903 +step:8258 train loss:3.536966 +step:8259 train loss:3.523624 +step:8260 train loss:3.522371 +step:8261 train loss:3.524622 +step:8262 train loss:3.534974 +step:8263 train loss:3.550966 +step:8264 train loss:3.516302 +step:8265 train loss:3.506679 +step:8266 train loss:3.514608 +step:8267 train loss:3.447473 +step:8268 train loss:3.567516 +step:8269 train loss:3.502853 +step:8270 train loss:3.554209 +step:8271 train loss:3.580643 +step:8272 train loss:3.604064 +step:8273 train loss:3.483009 +step:8274 train loss:3.546192 +step:8275 train loss:3.510397 +step:8276 train loss:3.544194 +step:8277 train loss:3.609544 +step:8278 train loss:3.625424 +step:8279 train loss:3.537441 +step:8280 train loss:3.530354 +step:8281 train loss:3.491669 +step:8282 train loss:3.554174 +step:8283 train loss:3.536209 +step:8284 train loss:3.526405 +step:8285 train loss:3.515484 +step:8286 train loss:3.624616 +step:8287 train loss:3.560660 +step:8288 train loss:3.535571 +step:8289 train loss:3.547924 +step:8290 train loss:3.488224 +step:8291 train loss:3.527382 +step:8292 train loss:3.555771 +step:8293 train loss:3.533526 +step:8294 train loss:3.501547 +step:8295 train loss:3.538996 +step:8296 train loss:3.606050 +step:8297 train loss:3.684780 +step:8298 train loss:3.509557 +step:8299 train loss:3.545821 +step:8300 train loss:3.553786 +step:8301 train loss:3.523231 +step:8302 train loss:3.583189 +step:8303 train loss:3.715550 +step:8304 train loss:3.528467 +step:8305 train loss:3.572332 +step:8306 train loss:3.548340 +step:8307 train loss:3.564375 +step:8308 train loss:3.563811 +step:8309 train loss:3.585230 +step:8310 train loss:3.501786 +step:8311 train loss:3.592755 +step:8312 train loss:3.584149 +step:8313 train loss:3.646911 +step:8314 train loss:3.519890 +step:8315 train loss:3.465689 +step:8316 train loss:3.523968 +step:8317 train loss:3.549328 +step:8318 train loss:3.536315 +step:8319 train loss:3.573279 +step:8320 train loss:3.594555 +step:8321 train loss:3.503181 +step:8322 train loss:3.520776 +step:8323 train loss:3.557528 +step:8324 train loss:3.532623 +step:8325 train loss:3.587999 +step:8326 train loss:3.551696 +step:8327 train loss:3.543573 +step:8328 train loss:3.616661 +step:8329 train loss:3.524465 +step:8330 train loss:3.563131 +step:8331 train loss:3.492779 +step:8332 train loss:3.591174 +step:8333 train loss:3.608751 +step:8334 train loss:3.474683 +step:8335 train loss:3.541791 +step:8336 train loss:3.632116 +step:8337 train loss:3.562114 +step:8338 train loss:3.529790 +step:8339 train loss:3.509120 +step:8340 train loss:3.599417 +step:8341 train loss:3.499748 +step:8342 train loss:3.570800 +step:8343 train loss:3.485135 +step:8344 train loss:3.531033 +step:8345 train loss:3.564219 +step:8346 train loss:3.645927 +step:8347 train loss:3.539211 +step:8348 train loss:3.565392 +step:8349 train loss:3.537233 +step:8350 train loss:3.558779 +step:8351 train loss:3.498537 +step:8352 train loss:3.583728 +step:8353 train loss:3.538963 +step:8354 train loss:3.521214 +step:8355 train loss:3.522270 +step:8356 train loss:3.512878 +step:8357 train loss:3.529696 +step:8358 train loss:3.505164 +step:8359 train loss:3.501433 +step:8360 train loss:3.547574 +step:8361 train loss:3.563251 +step:8362 train loss:3.579988 +step:8363 train loss:3.579167 +step:8364 train loss:3.541543 +step:8365 train loss:3.688856 +step:8366 train loss:3.531404 +step:8367 train loss:3.507795 +step:8368 train loss:3.475746 +step:8369 train loss:3.509181 +step:8370 train loss:3.589548 +step:8371 train loss:3.560212 +step:8372 train loss:3.538782 +step:8373 train loss:3.551573 +step:8374 train loss:3.481758 +step:8375 train loss:3.547418 +step:8376 train loss:3.582019 +step:8377 train loss:3.407221 +step:8378 train loss:3.625980 +step:8379 train loss:3.491014 +step:8380 train loss:3.500137 +step:8381 train loss:3.504659 +step:8382 train loss:3.533845 +step:8383 train loss:3.491807 +step:8384 train loss:3.534793 +step:8385 train loss:3.547266 +step:8386 train loss:3.526450 +step:8387 train loss:3.690943 +step:8388 train loss:3.596782 +step:8389 train loss:3.574363 +step:8390 train loss:3.576580 +step:8391 train loss:3.507750 +step:8392 train loss:3.522940 +step:8393 train loss:3.476020 +step:8394 train loss:3.570161 +step:8395 train loss:3.574198 +step:8396 train loss:3.597664 +step:8397 train loss:3.531503 +step:8398 train loss:3.550421 +step:8399 train loss:3.515617 +step:8400 train loss:3.524499 +step:8401 train loss:3.536663 +step:8402 train loss:3.515397 +step:8403 train loss:3.528041 +step:8404 train loss:3.535106 +step:8405 train loss:3.487464 +step:8406 train loss:3.530149 +step:8407 train loss:3.569462 +step:8408 train loss:3.543408 +step:8409 train loss:3.465576 +step:8410 train loss:3.528295 +step:8411 train loss:3.555313 +step:8412 train loss:3.611318 +step:8413 train loss:3.589762 +step:8414 train loss:3.584623 +step:8415 train loss:3.507142 +step:8416 train loss:3.555668 +step:8417 train loss:3.473258 +step:8418 train loss:3.575591 +step:8419 train loss:3.525941 +step:8420 train loss:3.605423 +step:8421 train loss:3.520922 +step:8422 train loss:3.539283 +step:8423 train loss:3.554118 +step:8424 train loss:3.558103 +step:8425 train loss:3.615654 +step:8426 train loss:3.585699 +step:8427 train loss:3.502822 +step:8428 train loss:3.517171 +step:8429 train loss:3.580774 +step:8430 train loss:3.519208 +step:8431 train loss:3.524307 +step:8432 train loss:3.525562 +step:8433 train loss:3.502247 +step:8434 train loss:3.536297 +step:8435 train loss:3.457080 +step:8436 train loss:3.537057 +step:8437 train loss:3.582202 +step:8438 train loss:3.560205 +step:8439 train loss:3.501491 +step:8440 train loss:3.472415 +step:8441 train loss:3.527345 +step:8442 train loss:3.551672 +step:8443 train loss:3.507812 +step:8444 train loss:3.540102 +step:8445 train loss:3.491189 +step:8446 train loss:3.540661 +step:8447 train loss:3.552490 +step:8448 train loss:3.536558 +step:8449 train loss:3.528723 +step:8450 train loss:3.515363 +step:8451 train loss:3.548620 +step:8452 train loss:3.521160 +step:8453 train loss:3.504990 +step:8454 train loss:3.555735 +step:8455 train loss:3.626605 +step:8456 train loss:3.602545 +step:8457 train loss:3.658358 +step:8458 train loss:3.546341 +step:8459 train loss:3.551407 +step:8460 train loss:3.478842 +step:8461 train loss:3.636147 +step:8462 train loss:3.507053 +step:8463 train loss:3.548279 +step:8464 train loss:3.561220 +step:8465 train loss:3.567256 +step:8466 train loss:3.541055 +step:8467 train loss:3.542831 +step:8468 train loss:3.791228 +step:8469 train loss:3.505579 +step:8470 train loss:3.498260 +step:8471 train loss:3.543721 +step:8472 train loss:3.563536 +step:8473 train loss:3.521606 +step:8474 train loss:3.644217 +step:8475 train loss:3.604191 +step:8476 train loss:3.550178 +step:8477 train loss:3.541576 +step:8478 train loss:3.522451 +step:8479 train loss:3.521607 +step:8480 train loss:3.597036 +step:8481 train loss:3.525256 +step:8482 train loss:3.517095 +step:8483 train loss:3.664918 +step:8484 train loss:3.550715 +step:8485 train loss:3.593912 +step:8486 train loss:3.501953 +step:8487 train loss:3.557591 +step:8488 train loss:3.504653 +step:8489 train loss:3.579576 +step:8490 train loss:3.569285 +step:8491 train loss:3.586473 +step:8492 train loss:3.544566 +step:8493 train loss:3.612195 +step:8494 train loss:3.477002 +step:8495 train loss:3.574173 +step:8496 train loss:3.521505 +step:8497 train loss:3.553361 +step:8498 train loss:3.566983 +step:8499 train loss:3.545452 +step:8500 validation loss:3.475029 total_sharp:1.7635e-04 L1_sharp:4.2037e-05 L2_sharp:6.4310e-06 L3_sharp:1.0540e-05 L4_sharp:1.7485e-05 L5_sharp:2.9183e-05 L6_sharp:1.8175e-05 L7_sharp:3.2535e-05 L8_sharp:6.5176e-05 L9_sharp:7.1608e-05 L10_sharp:7.7955e-05 L11_sharp:4.9427e-05 L12_sharp:2.2157e-04 total_fnorm:1.0874e+01 total_l1_linf:9.6020e+04 total_spectral:1.0874e+01 L1_fnorm:2.5402e+00 L2_fnorm:2.5127e+00 L3_fnorm:2.4968e+00 L4_fnorm:2.5005e+00 L5_fnorm:2.4635e+00 L6_fnorm:2.5805e+00 L7_fnorm:2.6133e+00 L8_fnorm:2.5608e+00 L9_fnorm:2.5621e+00 L10_fnorm:2.5467e+00 L11_fnorm:2.5394e+00 L12_fnorm:2.4243e+00 L1_l1linf:2.6124e+00 L2_l1linf:2.6761e+00 L3_l1linf:2.6275e+00 L4_l1linf:2.6337e+00 L5_l1linf:2.5217e+00 L6_l1linf:2.7024e+00 L7_l1linf:3.0067e+00 L8_l1linf:2.9225e+00 L9_l1linf:3.0809e+00 L10_l1linf:3.0163e+00 L11_l1linf:3.2503e+00 L12_l1linf:3.1251e+00 L1_spectral:3.3249e-01 L2_spectral:3.1076e-01 L3_spectral:2.9334e-01 L4_spectral:3.1420e-01 L5_spectral:2.7114e-01 L6_spectral:2.5932e-01 L7_spectral:3.1247e-01 L8_spectral:3.6248e-01 L9_spectral:3.9479e-01 L10_spectral:3.9680e-01 L11_spectral:3.9826e-01 L12_spectral:4.3677e-01 ip_v_neg_g:1.1608e-02 cos_v_neg_g:2.7678e-03 v_norm:1.0874e+01 g_norm:3.8568e-01 hv_norm:1.9173e-01 cos_v_hv:1.0002e-02 hg_norm:2.3740e+00 cos_g_hg:4.7870e-01 v_par:3.1500e-03 v_perp:1.0874e+01 L1_cos_v_neg_g:7.1312e-03 L1_v_norm:2.5402e+00 L2_cos_v_neg_g:2.1844e-03 L2_v_norm:2.5127e+00 L3_cos_v_neg_g:2.4472e-03 L3_v_norm:2.4968e+00 L4_cos_v_neg_g:3.4823e-03 L4_v_norm:2.5005e+00 L5_cos_v_neg_g:4.4655e-03 L5_v_norm:2.4635e+00 L6_cos_v_neg_g:5.0896e-03 L6_v_norm:2.5805e+00 L7_cos_v_neg_g:5.1076e-03 L7_v_norm:2.6133e+00 L8_cos_v_neg_g:6.8775e-03 L8_v_norm:2.5608e+00 L9_cos_v_neg_g:8.2805e-03 L9_v_norm:2.5621e+00 L10_cos_v_neg_g:8.8189e-03 L10_v_norm:2.5467e+00 L11_cos_v_neg_g:8.9112e-03 L11_v_norm:2.5394e+00 L12_cos_v_neg_g:1.3130e-02 L12_v_norm:2.4243e+00 +step:8500 train loss:3.540998 +step:8501 train loss:3.756804 +step:8502 train loss:3.775451 +step:8503 train loss:3.535453 +step:8504 train loss:3.534021 +step:8505 train loss:3.507176 +step:8506 train loss:3.579350 +step:8507 train loss:3.517283 +step:8508 train loss:3.551980 +step:8509 train loss:3.489488 +step:8510 train loss:3.513982 +step:8511 train loss:3.472092 +step:8512 train loss:3.569693 +step:8513 train loss:3.573504 +step:8514 train loss:3.521089 +step:8515 train loss:3.614739 +step:8516 train loss:3.531572 +step:8517 train loss:3.555623 +step:8518 train loss:3.441928 +step:8519 train loss:3.539284 +step:8520 train loss:3.502114 +step:8521 train loss:3.546288 +step:8522 train loss:3.438567 +step:8523 train loss:3.534715 +step:8524 train loss:3.524306 +step:8525 train loss:3.590143 +step:8526 train loss:3.573526 +step:8527 train loss:3.513281 +step:8528 train loss:3.595740 +step:8529 train loss:3.549139 +step:8530 train loss:3.586705 +step:8531 train loss:3.575187 +step:8532 train loss:3.612318 +step:8533 train loss:3.565022 +step:8534 train loss:3.564261 +step:8535 train loss:3.539617 +step:8536 train loss:3.628317 +step:8537 train loss:3.543957 +step:8538 train loss:3.610280 +step:8539 train loss:3.534149 +step:8540 train loss:3.559752 +step:8541 train loss:3.501230 +step:8542 train loss:3.563854 +step:8543 train loss:3.481245 +step:8544 train loss:3.476581 +step:8545 train loss:3.529419 +step:8546 train loss:3.479970 +step:8547 train loss:3.532590 +step:8548 train loss:3.507491 +step:8549 train loss:3.548439 +step:8550 train loss:3.504633 +step:8551 train loss:3.549298 +step:8552 train loss:3.556887 +step:8553 train loss:3.557210 +step:8554 train loss:3.532296 +step:8555 train loss:3.543297 +step:8556 train loss:3.619751 +step:8557 train loss:3.517715 +step:8558 train loss:3.558770 +step:8559 train loss:3.547412 +step:8560 train loss:3.531051 +step:8561 train loss:3.485679 +step:8562 train loss:3.515946 +step:8563 train loss:3.512655 +step:8564 train loss:3.582489 +step:8565 train loss:3.556519 +step:8566 train loss:3.577619 +step:8567 train loss:3.522424 +step:8568 train loss:3.541152 +step:8569 train loss:3.549362 +step:8570 train loss:3.492552 +step:8571 train loss:3.535574 +step:8572 train loss:3.553787 +step:8573 train loss:3.626719 +step:8574 train loss:3.556439 +step:8575 train loss:3.556614 +step:8576 train loss:3.588151 +step:8577 train loss:3.673312 +step:8578 train loss:3.579329 +step:8579 train loss:3.565294 +step:8580 train loss:3.497939 +step:8581 train loss:3.542825 +step:8582 train loss:3.544373 +step:8583 train loss:3.545471 +step:8584 train loss:3.534629 +step:8585 train loss:3.613291 +step:8586 train loss:3.532456 +step:8587 train loss:3.543600 +step:8588 train loss:3.588944 +step:8589 train loss:3.534415 +step:8590 train loss:3.527263 +step:8591 train loss:3.530009 +step:8592 train loss:3.490691 +step:8593 train loss:3.570000 +step:8594 train loss:3.591118 +step:8595 train loss:3.515719 +step:8596 train loss:3.557490 +step:8597 train loss:3.520840 +step:8598 train loss:3.570238 +step:8599 train loss:3.537745 +step:8600 train loss:3.548184 +step:8601 train loss:3.535295 +step:8602 train loss:3.511288 +step:8603 train loss:3.568182 +step:8604 train loss:3.513138 +step:8605 train loss:3.526287 +step:8606 train loss:3.539730 +step:8607 train loss:3.545269 +step:8608 train loss:3.590021 +step:8609 train loss:3.487686 +step:8610 train loss:3.559429 +step:8611 train loss:3.493243 +step:8612 train loss:3.568453 +step:8613 train loss:3.503470 +step:8614 train loss:3.564944 +step:8615 train loss:3.607072 +step:8616 train loss:3.489948 +step:8617 train loss:3.557836 +step:8618 train loss:3.532224 +step:8619 train loss:3.485338 +step:8620 train loss:3.529821 +step:8621 train loss:3.557910 +step:8622 train loss:3.519140 +step:8623 train loss:3.530937 +step:8624 train loss:3.606516 +step:8625 train loss:3.530357 +step:8626 train loss:3.538459 +step:8627 train loss:3.533532 +step:8628 train loss:3.565959 +step:8629 train loss:3.473901 +step:8630 train loss:3.575494 +step:8631 train loss:3.518899 +step:8632 train loss:3.577759 +step:8633 train loss:3.520642 +step:8634 train loss:3.753588 +step:8635 train loss:3.548477 +step:8636 train loss:3.595526 +step:8637 train loss:3.519991 +step:8638 train loss:3.521077 +step:8639 train loss:3.577810 +step:8640 train loss:3.491367 +step:8641 train loss:3.590310 +step:8642 train loss:3.542028 +step:8643 train loss:3.659162 +step:8644 train loss:3.493280 +step:8645 train loss:3.567364 +step:8646 train loss:3.527109 +step:8647 train loss:3.551229 +step:8648 train loss:3.501318 +step:8649 train loss:3.585654 +step:8650 train loss:3.538449 +step:8651 train loss:3.552636 +step:8652 train loss:3.519152 +step:8653 train loss:3.550470 +step:8654 train loss:3.596527 +step:8655 train loss:3.524436 +step:8656 train loss:3.566194 +step:8657 train loss:3.567818 +step:8658 train loss:3.540297 +step:8659 train loss:3.531417 +step:8660 train loss:3.477911 +step:8661 train loss:3.536732 +step:8662 train loss:3.476984 +step:8663 train loss:3.551198 +step:8664 train loss:3.467072 +step:8665 train loss:3.489237 +step:8666 train loss:3.568007 +step:8667 train loss:3.459827 +step:8668 train loss:3.566808 +step:8669 train loss:3.605309 +step:8670 train loss:3.507525 +step:8671 train loss:3.502032 +step:8672 train loss:3.721846 +step:8673 train loss:3.489571 +step:8674 train loss:3.556718 +step:8675 train loss:3.598492 +step:8676 train loss:3.539176 +step:8677 train loss:3.563492 +step:8678 train loss:3.510408 +step:8679 train loss:3.567087 +step:8680 train loss:3.549479 +step:8681 train loss:3.550084 +step:8682 train loss:3.505940 +step:8683 train loss:3.520322 +step:8684 train loss:3.596773 +step:8685 train loss:3.541409 +step:8686 train loss:3.532668 +step:8687 train loss:3.483720 +step:8688 train loss:3.504188 +step:8689 train loss:3.574217 +step:8690 train loss:3.510482 +step:8691 train loss:3.587801 +step:8692 train loss:3.476011 +step:8693 train loss:3.565955 +step:8694 train loss:3.566953 +step:8695 train loss:3.552585 +step:8696 train loss:3.576894 +step:8697 train loss:3.529018 +step:8698 train loss:3.570294 +step:8699 train loss:3.523071 +step:8700 train loss:3.548532 +step:8701 train loss:3.509100 +step:8702 train loss:3.493577 +step:8703 train loss:3.509840 +step:8704 train loss:3.463247 +step:8705 train loss:3.544651 +step:8706 train loss:3.565889 +step:8707 train loss:3.562057 +step:8708 train loss:3.506451 +step:8709 train loss:3.570823 +step:8710 train loss:3.498848 +step:8711 train loss:3.555603 +step:8712 train loss:3.459804 +step:8713 train loss:3.535897 +step:8714 train loss:3.640542 +step:8715 train loss:3.500368 +step:8716 train loss:3.556660 +step:8717 train loss:3.524642 +step:8718 train loss:3.565592 +step:8719 train loss:3.534262 +step:8720 train loss:3.647152 +step:8721 train loss:3.539980 +step:8722 train loss:3.630914 +step:8723 train loss:3.498640 +step:8724 train loss:3.507941 +step:8725 train loss:3.539633 +step:8726 train loss:3.495785 +step:8727 train loss:3.571587 +step:8728 train loss:3.528937 +step:8729 train loss:3.531713 +step:8730 train loss:3.507805 +step:8731 train loss:3.514936 +step:8732 train loss:3.615287 +step:8733 train loss:3.538784 +step:8734 train loss:3.574477 +step:8735 train loss:3.647202 +step:8736 train loss:3.502350 +step:8737 train loss:3.527908 +step:8738 train loss:3.508604 +step:8739 train loss:3.571047 +step:8740 train loss:3.491971 +step:8741 train loss:3.543688 +step:8742 train loss:3.502720 +step:8743 train loss:3.537647 +step:8744 train loss:3.562490 +step:8745 train loss:3.602105 +step:8746 train loss:3.498307 +step:8747 train loss:3.605728 +step:8748 train loss:3.512015 +step:8749 train loss:3.549717 +step:8750 validation loss:3.469800 +step:8750 train loss:3.564260 +step:8751 train loss:3.600644 +step:8752 train loss:3.460504 +step:8753 train loss:3.506698 +step:8754 train loss:3.561712 +step:8755 train loss:3.538980 +step:8756 train loss:3.586994 +step:8757 train loss:3.499238 +step:8758 train loss:3.651563 +step:8759 train loss:3.499969 +step:8760 train loss:3.531385 +step:8761 train loss:3.609201 +step:8762 train loss:3.507179 +step:8763 train loss:3.481051 +step:8764 train loss:3.551893 +step:8765 train loss:3.623194 +step:8766 train loss:3.552541 +step:8767 train loss:3.509166 +step:8768 train loss:3.549270 +step:8769 train loss:3.523529 +step:8770 train loss:3.567902 +step:8771 train loss:3.542555 +step:8772 train loss:3.561989 +step:8773 train loss:3.521652 +step:8774 train loss:3.553356 +step:8775 train loss:3.551937 +step:8776 train loss:3.497406 +step:8777 train loss:3.534744 +step:8778 train loss:3.542815 +step:8779 train loss:3.561573 +step:8780 train loss:3.530251 +step:8781 train loss:3.532719 +step:8782 train loss:3.555251 +step:8783 train loss:3.537410 +step:8784 train loss:3.560817 +step:8785 train loss:3.545835 +step:8786 train loss:3.619233 +step:8787 train loss:3.566737 +step:8788 train loss:3.466222 +step:8789 train loss:3.563468 +step:8790 train loss:3.493314 +step:8791 train loss:3.545441 +step:8792 train loss:3.482872 +step:8793 train loss:3.571842 +step:8794 train loss:3.500230 +step:8795 train loss:3.566814 +step:8796 train loss:3.713305 +step:8797 train loss:3.460001 +step:8798 train loss:3.619230 +step:8799 train loss:3.535835 +step:8800 train loss:3.527808 +step:8801 train loss:3.550035 +step:8802 train loss:3.606101 +step:8803 train loss:3.563239 +step:8804 train loss:3.541798 +step:8805 train loss:3.562070 +step:8806 train loss:3.530940 +step:8807 train loss:3.521222 +step:8808 train loss:3.478237 +step:8809 train loss:3.601511 +step:8810 train loss:3.507999 +step:8811 train loss:3.494842 +step:8812 train loss:3.538581 +step:8813 train loss:3.448649 +step:8814 train loss:3.638319 +step:8815 train loss:3.483414 +step:8816 train loss:3.598117 +step:8817 train loss:3.536922 +step:8818 train loss:3.468933 +step:8819 train loss:3.587153 +step:8820 train loss:3.513343 +step:8821 train loss:3.540403 +step:8822 train loss:3.521972 +step:8823 train loss:3.536706 +step:8824 train loss:3.598934 +step:8825 train loss:3.573439 +step:8826 train loss:3.546347 +step:8827 train loss:3.502777 +step:8828 train loss:3.544054 +step:8829 train loss:3.525662 +step:8830 train loss:3.502761 +step:8831 train loss:3.576177 +step:8832 train loss:3.519391 +step:8833 train loss:3.547692 +step:8834 train loss:3.516606 +step:8835 train loss:3.454591 +step:8836 train loss:3.584304 +step:8837 train loss:3.486870 +step:8838 train loss:3.533149 +step:8839 train loss:3.515258 +step:8840 train loss:3.517298 +step:8841 train loss:3.533414 +step:8842 train loss:3.544122 +step:8843 train loss:3.553258 +step:8844 train loss:3.521816 +step:8845 train loss:3.539227 +step:8846 train loss:3.508996 +step:8847 train loss:3.543624 +step:8848 train loss:3.592326 +step:8849 train loss:3.568892 +step:8850 train loss:3.563688 +step:8851 train loss:3.449063 +step:8852 train loss:3.549372 +step:8853 train loss:3.532685 +step:8854 train loss:3.502583 +step:8855 train loss:3.575029 +step:8856 train loss:3.567320 +step:8857 train loss:3.632385 +step:8858 train loss:3.499985 +step:8859 train loss:3.566696 +step:8860 train loss:3.527661 +step:8861 train loss:3.508200 +step:8862 train loss:3.509835 +step:8863 train loss:3.493773 +step:8864 train loss:3.562761 +step:8865 train loss:3.552783 +step:8866 train loss:3.433326 +step:8867 train loss:3.541209 +step:8868 train loss:3.565315 +step:8869 train loss:3.646661 +step:8870 train loss:3.531432 +step:8871 train loss:3.554066 +step:8872 train loss:3.536620 +step:8873 train loss:3.537222 +step:8874 train loss:3.590105 +step:8875 train loss:3.524806 +step:8876 train loss:3.561601 +step:8877 train loss:3.544756 +step:8878 train loss:3.594452 +step:8879 train loss:3.555597 +step:8880 train loss:3.502947 +step:8881 train loss:3.466126 +step:8882 train loss:3.538384 +step:8883 train loss:3.523179 +step:8884 train loss:3.612294 +step:8885 train loss:3.549479 +step:8886 train loss:3.551511 +step:8887 train loss:3.577829 +step:8888 train loss:3.533424 +step:8889 train loss:3.541191 +step:8890 train loss:3.533714 +step:8891 train loss:3.507082 +step:8892 train loss:3.587229 +step:8893 train loss:3.523961 +step:8894 train loss:3.543745 +step:8895 train loss:3.575389 +step:8896 train loss:3.490047 +step:8897 train loss:3.581560 +step:8898 train loss:3.513667 +step:8899 train loss:3.539458 +step:8900 train loss:3.503031 +step:8901 train loss:3.518495 +step:8902 train loss:3.559374 +step:8903 train loss:3.499551 +step:8904 train loss:3.549050 +step:8905 train loss:3.520889 +step:8906 train loss:3.513807 +step:8907 train loss:3.527691 +step:8908 train loss:3.591114 +step:8909 train loss:3.537822 +step:8910 train loss:3.496371 +step:8911 train loss:3.597676 +step:8912 train loss:3.492993 +step:8913 train loss:3.506947 +step:8914 train loss:3.604263 +step:8915 train loss:3.542390 +step:8916 train loss:3.570876 +step:8917 train loss:3.524713 +step:8918 train loss:3.531949 +step:8919 train loss:3.518571 +step:8920 train loss:3.544327 +step:8921 train loss:3.541349 +step:8922 train loss:3.522862 +step:8923 train loss:3.704498 +step:8924 train loss:3.605525 +step:8925 train loss:3.535419 +step:8926 train loss:3.548251 +step:8927 train loss:3.575006 +step:8928 train loss:3.532007 +step:8929 train loss:3.524179 +step:8930 train loss:3.579293 +step:8931 train loss:3.489664 +step:8932 train loss:3.592754 +step:8933 train loss:3.498368 +step:8934 train loss:3.537792 +step:8935 train loss:3.552137 +step:8936 train loss:3.585460 +step:8937 train loss:3.590971 +step:8938 train loss:3.524678 +step:8939 train loss:3.590489 +step:8940 train loss:3.546244 +step:8941 train loss:3.491151 +step:8942 train loss:3.564189 +step:8943 train loss:3.498737 +step:8944 train loss:3.548972 +step:8945 train loss:3.568264 +step:8946 train loss:3.413492 +step:8947 train loss:3.603935 +step:8948 train loss:3.449749 +step:8949 train loss:3.454965 +step:8950 train loss:3.496586 +step:8951 train loss:3.536939 +step:8952 train loss:3.559210 +step:8953 train loss:3.512070 +step:8954 train loss:3.615416 +step:8955 train loss:3.530636 +step:8956 train loss:3.557085 +step:8957 train loss:3.548071 +step:8958 train loss:3.525195 +step:8959 train loss:3.514384 +step:8960 train loss:3.480603 +step:8961 train loss:3.502923 +step:8962 train loss:3.557478 +step:8963 train loss:3.534154 +step:8964 train loss:3.519707 +step:8965 train loss:3.561934 +step:8966 train loss:3.519915 +step:8967 train loss:3.495771 +step:8968 train loss:3.482883 +step:8969 train loss:3.470733 +step:8970 train loss:3.552105 +step:8971 train loss:3.500513 +step:8972 train loss:3.698633 +step:8973 train loss:3.586573 +step:8974 train loss:3.545680 +step:8975 train loss:3.546176 +step:8976 train loss:3.509257 +step:8977 train loss:3.598818 +step:8978 train loss:3.579069 +step:8979 train loss:3.498953 +step:8980 train loss:3.593871 +step:8981 train loss:3.545657 +step:8982 train loss:3.516450 +step:8983 train loss:3.463367 +step:8984 train loss:3.590867 +step:8985 train loss:3.504819 +step:8986 train loss:3.541405 +step:8987 train loss:3.514921 +step:8988 train loss:3.563397 +step:8989 train loss:3.474086 +step:8990 train loss:3.614539 +step:8991 train loss:3.467834 +step:8992 train loss:3.523847 +step:8993 train loss:3.615221 +step:8994 train loss:3.518870 +step:8995 train loss:3.546986 +step:8996 train loss:3.517244 +step:8997 train loss:3.465155 +step:8998 train loss:3.468534 +step:8999 train loss:3.491389 +step:9000 validation loss:3.464936 total_sharp:1.5718e-04 L1_sharp:3.7152e-05 L2_sharp:9.1541e-06 L3_sharp:8.5649e-06 L4_sharp:1.6923e-05 L5_sharp:1.9269e-05 L6_sharp:1.3938e-05 L7_sharp:2.6517e-05 L8_sharp:4.2672e-05 L9_sharp:6.0916e-05 L10_sharp:6.0794e-05 L11_sharp:4.5404e-05 L12_sharp:2.5816e-04 total_fnorm:1.1015e+01 total_l1_linf:9.7583e+04 total_spectral:1.1015e+01 L1_fnorm:2.5896e+00 L2_fnorm:2.5823e+00 L3_fnorm:2.5558e+00 L4_fnorm:2.5525e+00 L5_fnorm:2.5002e+00 L6_fnorm:2.5995e+00 L7_fnorm:2.6307e+00 L8_fnorm:2.5924e+00 L9_fnorm:2.5930e+00 L10_fnorm:2.5733e+00 L11_fnorm:2.6000e+00 L12_fnorm:2.5191e+00 L1_l1linf:2.6146e+00 L2_l1linf:2.7177e+00 L3_l1linf:2.7022e+00 L4_l1linf:2.7694e+00 L5_l1linf:2.4900e+00 L6_l1linf:2.5444e+00 L7_l1linf:2.7304e+00 L8_l1linf:2.8602e+00 L9_l1linf:3.0587e+00 L10_l1linf:2.8416e+00 L11_l1linf:3.0401e+00 L12_l1linf:3.1151e+00 L1_spectral:3.4117e-01 L2_spectral:3.1641e-01 L3_spectral:2.9292e-01 L4_spectral:3.1736e-01 L5_spectral:2.7580e-01 L6_spectral:2.6382e-01 L7_spectral:3.0402e-01 L8_spectral:3.2895e-01 L9_spectral:3.7238e-01 L10_spectral:3.7025e-01 L11_spectral:3.7816e-01 L12_spectral:4.4167e-01 ip_v_neg_g:1.0576e-02 cos_v_neg_g:2.3309e-03 v_norm:1.1015e+01 g_norm:4.1191e-01 hv_norm:1.9960e-01 cos_v_hv:8.6747e-03 hg_norm:2.6016e+00 cos_g_hg:4.9979e-01 v_par:3.6427e-03 v_perp:1.1015e+01 L1_cos_v_neg_g:4.0611e-03 L1_v_norm:2.5896e+00 L2_cos_v_neg_g:1.9707e-03 L2_v_norm:2.5823e+00 L3_cos_v_neg_g:1.6591e-03 L3_v_norm:2.5558e+00 L4_cos_v_neg_g:2.7812e-03 L4_v_norm:2.5525e+00 L5_cos_v_neg_g:2.9474e-03 L5_v_norm:2.5002e+00 L6_cos_v_neg_g:3.6472e-03 L6_v_norm:2.5995e+00 L7_cos_v_neg_g:4.2243e-03 L7_v_norm:2.6307e+00 L8_cos_v_neg_g:4.5627e-03 L8_v_norm:2.5924e+00 L9_cos_v_neg_g:6.8508e-03 L9_v_norm:2.5930e+00 L10_cos_v_neg_g:7.9210e-03 L10_v_norm:2.5733e+00 L11_cos_v_neg_g:7.8952e-03 L11_v_norm:2.6000e+00 L12_cos_v_neg_g:1.3144e-02 L12_v_norm:2.5191e+00 +step:9000 train loss:3.578679 +step:9001 train loss:3.544611 +step:9002 train loss:3.552346 +step:9003 train loss:3.492142 +step:9004 train loss:3.490705 +step:9005 train loss:3.504790 +step:9006 train loss:3.508798 +step:9007 train loss:3.524646 +step:9008 train loss:3.483341 +step:9009 train loss:3.478270 +step:9010 train loss:3.514389 +step:9011 train loss:3.510185 +step:9012 train loss:3.627487 +step:9013 train loss:3.452202 +step:9014 train loss:3.523841 +step:9015 train loss:3.526591 +step:9016 train loss:3.598285 +step:9017 train loss:3.541433 +step:9018 train loss:3.465478 +step:9019 train loss:3.548036 +step:9020 train loss:3.558916 +step:9021 train loss:3.517059 +step:9022 train loss:3.527025 +step:9023 train loss:3.524908 +step:9024 train loss:3.544225 +step:9025 train loss:3.530101 +step:9026 train loss:3.486159 +step:9027 train loss:3.536519 +step:9028 train loss:3.552400 +step:9029 train loss:3.570728 +step:9030 train loss:3.570280 +step:9031 train loss:3.531966 +step:9032 train loss:3.543641 +step:9033 train loss:3.528779 +step:9034 train loss:3.537829 +step:9035 train loss:3.540672 +step:9036 train loss:3.493559 +step:9037 train loss:3.486358 +step:9038 train loss:3.608940 +step:9039 train loss:3.512311 +step:9040 train loss:3.528027 +step:9041 train loss:3.577533 +step:9042 train loss:3.432881 +step:9043 train loss:3.526648 +step:9044 train loss:3.547355 +step:9045 train loss:3.491129 +step:9046 train loss:3.533194 +step:9047 train loss:3.528867 +step:9048 train loss:3.507855 +step:9049 train loss:3.544060 +step:9050 train loss:3.496462 +step:9051 train loss:3.536249 +step:9052 train loss:3.465816 +step:9053 train loss:3.589407 +step:9054 train loss:3.603190 +step:9055 train loss:3.524842 +step:9056 train loss:3.587234 +step:9057 train loss:3.443594 +step:9058 train loss:3.525065 +step:9059 train loss:3.604425 +step:9060 train loss:3.535882 +step:9061 train loss:3.560158 +step:9062 train loss:3.492227 +step:9063 train loss:3.619244 +step:9064 train loss:3.509347 +step:9065 train loss:3.519598 +step:9066 train loss:3.539932 +step:9067 train loss:3.504728 +step:9068 train loss:3.576332 +step:9069 train loss:3.534364 +step:9070 train loss:3.582775 +step:9071 train loss:3.516311 +step:9072 train loss:3.536197 +step:9073 train loss:3.499092 +step:9074 train loss:3.581237 +step:9075 train loss:3.525975 +step:9076 train loss:3.492757 +step:9077 train loss:3.570946 +step:9078 train loss:3.509717 +step:9079 train loss:3.554093 +step:9080 train loss:3.483949 +step:9081 train loss:3.524135 +step:9082 train loss:3.550779 +step:9083 train loss:3.579174 +step:9084 train loss:3.470782 +step:9085 train loss:3.543326 +step:9086 train loss:3.527383 +step:9087 train loss:3.472758 +step:9088 train loss:3.535419 +step:9089 train loss:3.551199 +step:9090 train loss:3.484674 +step:9091 train loss:3.585414 +step:9092 train loss:3.512737 +step:9093 train loss:3.507010 +step:9094 train loss:3.635867 +step:9095 train loss:3.502889 +step:9096 train loss:3.517980 +step:9097 train loss:3.502205 +step:9098 train loss:3.497089 +step:9099 train loss:3.622030 +step:9100 train loss:3.653365 +step:9101 train loss:3.572166 +step:9102 train loss:3.515156 +step:9103 train loss:3.521569 +step:9104 train loss:3.604809 +step:9105 train loss:3.468224 +step:9106 train loss:3.597307 +step:9107 train loss:3.529752 +step:9108 train loss:3.513054 +step:9109 train loss:3.536564 +step:9110 train loss:3.542497 +step:9111 train loss:3.521239 +step:9112 train loss:3.521738 +step:9113 train loss:3.546377 +step:9114 train loss:3.500520 +step:9115 train loss:3.530259 +step:9116 train loss:3.552750 +step:9117 train loss:3.562955 +step:9118 train loss:3.537035 +step:9119 train loss:3.457403 +step:9120 train loss:3.552927 +step:9121 train loss:3.586669 +step:9122 train loss:3.529393 +step:9123 train loss:3.549883 +step:9124 train loss:3.580803 +step:9125 train loss:3.529526 +step:9126 train loss:3.506696 +step:9127 train loss:3.541178 +step:9128 train loss:3.596119 +step:9129 train loss:3.549496 +step:9130 train loss:3.563792 +step:9131 train loss:3.540108 +step:9132 train loss:3.548469 +step:9133 train loss:3.536339 +step:9134 train loss:3.510645 +step:9135 train loss:3.538799 +step:9136 train loss:3.534276 +step:9137 train loss:3.589870 +step:9138 train loss:3.508396 +step:9139 train loss:3.584054 +step:9140 train loss:3.508925 +step:9141 train loss:3.484269 +step:9142 train loss:3.659779 +step:9143 train loss:3.491385 +step:9144 train loss:3.584713 +step:9145 train loss:3.593592 +step:9146 train loss:3.506183 +step:9147 train loss:3.577547 +step:9148 train loss:3.599208 +step:9149 train loss:3.507452 +step:9150 train loss:3.530722 +step:9151 train loss:3.593981 +step:9152 train loss:3.546201 +step:9153 train loss:3.514501 +step:9154 train loss:3.527711 +step:9155 train loss:3.495473 +step:9156 train loss:3.497304 +step:9157 train loss:3.515145 +step:9158 train loss:3.496555 +step:9159 train loss:3.586377 +step:9160 train loss:3.468864 +step:9161 train loss:3.495784 +step:9162 train loss:3.584813 +step:9163 train loss:3.527498 +step:9164 train loss:3.498720 +step:9165 train loss:3.495356 +step:9166 train loss:3.549759 +step:9167 train loss:3.492959 +step:9168 train loss:3.539207 +step:9169 train loss:3.474910 +step:9170 train loss:3.494211 +step:9171 train loss:3.562702 +step:9172 train loss:3.483756 +step:9173 train loss:3.603252 +step:9174 train loss:3.534501 +step:9175 train loss:3.511284 +step:9176 train loss:3.496272 +step:9177 train loss:3.542382 +step:9178 train loss:3.487172 +step:9179 train loss:3.447467 +step:9180 train loss:3.537573 +step:9181 train loss:3.548635 +step:9182 train loss:3.521556 +step:9183 train loss:3.527514 +step:9184 train loss:3.520424 +step:9185 train loss:3.537660 +step:9186 train loss:3.500117 +step:9187 train loss:3.570789 +step:9188 train loss:3.607240 +step:9189 train loss:3.531820 +step:9190 train loss:3.536559 +step:9191 train loss:3.526765 +step:9192 train loss:3.538831 +step:9193 train loss:3.544135 +step:9194 train loss:3.481086 +step:9195 train loss:3.470887 +step:9196 train loss:3.519943 +step:9197 train loss:3.476974 +step:9198 train loss:3.552476 +step:9199 train loss:3.501075 +step:9200 train loss:3.525742 +step:9201 train loss:3.562096 +step:9202 train loss:3.550723 +step:9203 train loss:3.505229 +step:9204 train loss:3.702125 +step:9205 train loss:3.614461 +step:9206 train loss:3.530416 +step:9207 train loss:3.581408 +step:9208 train loss:3.558892 +step:9209 train loss:3.580269 +step:9210 train loss:3.475115 +step:9211 train loss:3.496862 +step:9212 train loss:3.501016 +step:9213 train loss:3.561534 +step:9214 train loss:3.501988 +step:9215 train loss:3.569026 +step:9216 train loss:3.533636 +step:9217 train loss:3.473202 +step:9218 train loss:3.563919 +step:9219 train loss:3.524875 +step:9220 train loss:3.569929 +step:9221 train loss:3.621830 +step:9222 train loss:3.566917 +step:9223 train loss:3.732497 +step:9224 train loss:3.571393 +step:9225 train loss:3.503495 +step:9226 train loss:3.519973 +step:9227 train loss:3.539974 +step:9228 train loss:3.537744 +step:9229 train loss:3.498160 +step:9230 train loss:3.560542 +step:9231 train loss:3.446354 +step:9232 train loss:3.502089 +step:9233 train loss:3.524723 +step:9234 train loss:3.583330 +step:9235 train loss:3.584640 +step:9236 train loss:3.492152 +step:9237 train loss:3.552599 +step:9238 train loss:3.524938 +step:9239 train loss:3.519564 +step:9240 train loss:3.485012 +step:9241 train loss:3.517595 +step:9242 train loss:3.526488 +step:9243 train loss:3.524255 +step:9244 train loss:3.500473 +step:9245 train loss:3.507099 +step:9246 train loss:3.503129 +step:9247 train loss:3.519290 +step:9248 train loss:3.526844 +step:9249 train loss:3.524551 +step:9250 validation loss:3.464524 +step:9250 train loss:3.567751 +step:9251 train loss:3.508607 +step:9252 train loss:3.576934 +step:9253 train loss:3.570631 +step:9254 train loss:3.498004 +step:9255 train loss:3.615227 +step:9256 train loss:3.495228 +step:9257 train loss:3.438218 +step:9258 train loss:3.518518 +step:9259 train loss:3.520252 +step:9260 train loss:3.617530 +step:9261 train loss:3.496770 +step:9262 train loss:3.566877 +step:9263 train loss:3.467646 +step:9264 train loss:3.620336 +step:9265 train loss:3.640405 +step:9266 train loss:3.575458 +step:9267 train loss:3.523257 +step:9268 train loss:3.514795 +step:9269 train loss:3.540979 +step:9270 train loss:3.462869 +step:9271 train loss:3.574160 +step:9272 train loss:3.516910 +step:9273 train loss:3.534555 +step:9274 train loss:3.539437 +step:9275 train loss:3.536830 +step:9276 train loss:3.562942 +step:9277 train loss:3.535713 +step:9278 train loss:3.550411 +step:9279 train loss:3.543843 +step:9280 train loss:3.542905 +step:9281 train loss:3.517747 +step:9282 train loss:3.634169 +step:9283 train loss:3.524925 +step:9284 train loss:3.488678 +step:9285 train loss:3.507586 +step:9286 train loss:3.560034 +step:9287 train loss:3.528473 +step:9288 train loss:3.536297 +step:9289 train loss:3.508382 +step:9290 train loss:3.535369 +step:9291 train loss:3.512926 +step:9292 train loss:3.563922 +step:9293 train loss:3.611791 +step:9294 train loss:3.532429 +step:9295 train loss:3.518029 +step:9296 train loss:3.469650 +step:9297 train loss:3.540239 +step:9298 train loss:3.479683 +step:9299 train loss:3.462914 +step:9300 train loss:3.565704 +step:9301 train loss:3.594164 +step:9302 train loss:3.531050 +step:9303 train loss:3.580693 +step:9304 train loss:3.503819 +step:9305 train loss:3.495974 +step:9306 train loss:3.497239 +step:9307 train loss:3.499787 +step:9308 train loss:3.469934 +step:9309 train loss:3.458248 +step:9310 train loss:3.515358 +step:9311 train loss:3.577120 +step:9312 train loss:3.528882 +step:9313 train loss:3.470842 +step:9314 train loss:3.503921 +step:9315 train loss:3.536939 +step:9316 train loss:3.522078 +step:9317 train loss:3.496719 +step:9318 train loss:3.582414 +step:9319 train loss:3.491444 +step:9320 train loss:3.510469 +step:9321 train loss:3.526017 +step:9322 train loss:3.532498 +step:9323 train loss:3.609003 +step:9324 train loss:3.552315 +step:9325 train loss:3.494247 +step:9326 train loss:3.568934 +step:9327 train loss:3.566575 +step:9328 train loss:3.567593 +step:9329 train loss:3.453616 +step:9330 train loss:3.623037 +step:9331 train loss:3.553265 +step:9332 train loss:3.573770 +step:9333 train loss:3.593862 +step:9334 train loss:3.528475 +step:9335 train loss:3.624896 +step:9336 train loss:3.584435 +step:9337 train loss:3.536179 +step:9338 train loss:3.590040 +step:9339 train loss:3.571625 +step:9340 train loss:3.529207 +step:9341 train loss:3.618889 +step:9342 train loss:3.515131 +step:9343 train loss:3.507158 +step:9344 train loss:3.511538 +step:9345 train loss:3.651941 +step:9346 train loss:3.486732 +step:9347 train loss:3.506145 +step:9348 train loss:3.530794 +step:9349 train loss:3.475132 +step:9350 train loss:3.552465 +step:9351 train loss:3.527717 +step:9352 train loss:3.516968 +step:9353 train loss:3.545823 +step:9354 train loss:3.515795 +step:9355 train loss:3.508322 +step:9356 train loss:3.553292 +step:9357 train loss:3.509791 +step:9358 train loss:3.541800 +step:9359 train loss:3.480527 +step:9360 train loss:3.500326 +step:9361 train loss:3.497859 +step:9362 train loss:3.486580 +step:9363 train loss:3.551291 +step:9364 train loss:3.529674 +step:9365 train loss:3.532409 +step:9366 train loss:3.528666 +step:9367 train loss:3.539723 +step:9368 train loss:3.517989 +step:9369 train loss:3.515827 +step:9370 train loss:3.523590 +step:9371 train loss:3.541495 +step:9372 train loss:3.510562 +step:9373 train loss:3.492544 +step:9374 train loss:3.530030 +step:9375 train loss:3.540240 +step:9376 train loss:3.481628 +step:9377 train loss:3.552617 +step:9378 train loss:3.555513 +step:9379 train loss:3.582693 +step:9380 train loss:3.514055 +step:9381 train loss:3.522657 +step:9382 train loss:3.497776 +step:9383 train loss:3.493934 +step:9384 train loss:3.464309 +step:9385 train loss:3.538949 +step:9386 train loss:3.565476 +step:9387 train loss:3.541762 +step:9388 train loss:3.478671 +step:9389 train loss:3.494869 +step:9390 train loss:3.542501 +step:9391 train loss:3.547473 +step:9392 train loss:3.510439 +step:9393 train loss:3.504613 +step:9394 train loss:3.529582 +step:9395 train loss:3.525027 +step:9396 train loss:3.671841 +step:9397 train loss:3.560408 +step:9398 train loss:3.579427 +step:9399 train loss:3.532419 +step:9400 train loss:3.533973 +step:9401 train loss:3.529672 +step:9402 train loss:3.531351 +step:9403 train loss:3.462037 +step:9404 train loss:3.538634 +step:9405 train loss:3.498800 +step:9406 train loss:3.552860 +step:9407 train loss:3.493216 +step:9408 train loss:3.433337 +step:9409 train loss:3.495208 +step:9410 train loss:3.577415 +step:9411 train loss:3.538076 +step:9412 train loss:3.566870 +step:9413 train loss:3.588118 +step:9414 train loss:3.524337 +step:9415 train loss:3.515115 +step:9416 train loss:3.531261 +step:9417 train loss:3.484363 +step:9418 train loss:3.512104 +step:9419 train loss:3.481628 +step:9420 train loss:3.497626 +step:9421 train loss:3.545576 +step:9422 train loss:3.500342 +step:9423 train loss:3.559402 +step:9424 train loss:3.500275 +step:9425 train loss:3.543779 +step:9426 train loss:3.547371 +step:9427 train loss:3.518142 +step:9428 train loss:3.624810 +step:9429 train loss:3.517261 +step:9430 train loss:3.471835 +step:9431 train loss:3.564414 +step:9432 train loss:3.528832 +step:9433 train loss:3.564307 +step:9434 train loss:3.518852 +step:9435 train loss:3.544388 +step:9436 train loss:3.514031 +step:9437 train loss:3.524715 +step:9438 train loss:3.520565 +step:9439 train loss:3.519456 +step:9440 train loss:3.511253 +step:9441 train loss:3.524706 +step:9442 train loss:3.464398 +step:9443 train loss:3.517668 +step:9444 train loss:3.584292 +step:9445 train loss:3.512064 +step:9446 train loss:3.494329 +step:9447 train loss:3.559465 +step:9448 train loss:3.493077 +step:9449 train loss:3.517601 +step:9450 train loss:3.556107 +step:9451 train loss:3.474508 +step:9452 train loss:3.527933 +step:9453 train loss:3.503242 +step:9454 train loss:3.568889 +step:9455 train loss:3.546733 +step:9456 train loss:3.479041 +step:9457 train loss:3.520904 +step:9458 train loss:3.509789 +step:9459 train loss:3.501515 +step:9460 train loss:3.543880 +step:9461 train loss:3.570659 +step:9462 train loss:3.519330 +step:9463 train loss:3.548617 +step:9464 train loss:3.503961 +step:9465 train loss:3.593856 +step:9466 train loss:3.541598 +step:9467 train loss:3.567822 +step:9468 train loss:3.514090 +step:9469 train loss:3.501244 +step:9470 train loss:3.500428 +step:9471 train loss:3.538800 +step:9472 train loss:3.561294 +step:9473 train loss:3.553315 +step:9474 train loss:3.498520 +step:9475 train loss:3.489989 +step:9476 train loss:3.707275 +step:9477 train loss:3.580733 +step:9478 train loss:3.556351 +step:9479 train loss:3.654201 +step:9480 train loss:3.501638 +step:9481 train loss:3.535919 +step:9482 train loss:3.562276 +step:9483 train loss:3.519086 +step:9484 train loss:3.548985 +step:9485 train loss:3.473926 +step:9486 train loss:3.506763 +step:9487 train loss:3.538210 +step:9488 train loss:3.491476 +step:9489 train loss:3.541714 +step:9490 train loss:3.506082 +step:9491 train loss:3.548930 +step:9492 train loss:3.568752 +step:9493 train loss:3.543000 +step:9494 train loss:3.549822 +step:9495 train loss:3.504230 +step:9496 train loss:3.563488 +step:9497 train loss:3.579111 +step:9498 train loss:3.524805 +step:9499 train loss:3.577754 +step:9500 validation loss:3.463929 total_sharp:1.8679e-04 L1_sharp:2.1734e-05 L2_sharp:-6.1461e-06 L3_sharp:-1.2867e-05 L4_sharp:1.3605e-05 L5_sharp:2.8622e-05 L6_sharp:1.9648e-05 L7_sharp:3.7710e-05 L8_sharp:5.6021e-05 L9_sharp:7.1336e-05 L10_sharp:7.3041e-05 L11_sharp:5.6494e-05 L12_sharp:2.2508e-04 total_fnorm:1.1057e+01 total_l1_linf:9.8112e+04 total_spectral:1.1057e+01 L1_fnorm:2.6337e+00 L2_fnorm:2.6434e+00 L3_fnorm:2.6470e+00 L4_fnorm:2.5924e+00 L5_fnorm:2.5196e+00 L6_fnorm:2.6104e+00 L7_fnorm:2.6417e+00 L8_fnorm:2.5977e+00 L9_fnorm:2.5840e+00 L10_fnorm:2.5635e+00 L11_fnorm:2.5777e+00 L12_fnorm:2.4920e+00 L1_l1linf:2.6469e+00 L2_l1linf:3.0748e+00 L3_l1linf:3.0964e+00 L4_l1linf:2.8428e+00 L5_l1linf:2.4853e+00 L6_l1linf:2.7086e+00 L7_l1linf:2.7768e+00 L8_l1linf:2.9740e+00 L9_l1linf:2.8323e+00 L10_l1linf:2.8656e+00 L11_l1linf:3.0480e+00 L12_l1linf:2.9733e+00 L1_spectral:3.4131e-01 L2_spectral:3.3809e-01 L3_spectral:3.9700e-01 L4_spectral:3.2864e-01 L5_spectral:2.7749e-01 L6_spectral:2.6488e-01 L7_spectral:3.0653e-01 L8_spectral:3.6373e-01 L9_spectral:4.1273e-01 L10_spectral:4.1075e-01 L11_spectral:3.9119e-01 L12_spectral:4.3186e-01 ip_v_neg_g:1.2492e-02 cos_v_neg_g:2.9576e-03 v_norm:1.1057e+01 g_norm:3.8200e-01 hv_norm:2.0219e-01 cos_v_hv:1.0214e-02 hg_norm:2.1061e+00 cos_g_hg:4.6092e-01 v_par:2.7047e-03 v_perp:1.1057e+01 L1_cos_v_neg_g:1.1061e-02 L1_v_norm:2.6337e+00 L2_cos_v_neg_g:7.0785e-03 L2_v_norm:2.6434e+00 L3_cos_v_neg_g:8.0783e-03 L3_v_norm:2.6470e+00 L4_cos_v_neg_g:4.1713e-03 L4_v_norm:2.5924e+00 L5_cos_v_neg_g:4.0016e-03 L5_v_norm:2.5196e+00 L6_cos_v_neg_g:3.6236e-03 L6_v_norm:2.6104e+00 L7_cos_v_neg_g:3.9792e-03 L7_v_norm:2.6417e+00 L8_cos_v_neg_g:4.3336e-03 L8_v_norm:2.5977e+00 L9_cos_v_neg_g:6.5980e-03 L9_v_norm:2.5840e+00 L10_cos_v_neg_g:7.5722e-03 L10_v_norm:2.5635e+00 L11_cos_v_neg_g:8.7409e-03 L11_v_norm:2.5777e+00 L12_cos_v_neg_g:1.2986e-02 L12_v_norm:2.4920e+00 +step:9500 train loss:3.569025 +step:9501 train loss:3.545155 +step:9502 train loss:3.518573 +step:9503 train loss:3.533195 +step:9504 train loss:3.489210 +step:9505 train loss:3.513613 +step:9506 train loss:3.527208 +step:9507 train loss:3.513269 +step:9508 train loss:3.706649 +step:9509 train loss:3.523592 +step:9510 train loss:3.512482 +step:9511 train loss:3.540532 +step:9512 train loss:3.565249 +step:9513 train loss:3.560768 +step:9514 train loss:3.526593 +step:9515 train loss:3.425030 +step:9516 train loss:3.526607 +step:9517 train loss:3.563833 +step:9518 train loss:3.537372 +step:9519 train loss:3.547844 +step:9520 train loss:3.435943 +step:9521 train loss:3.431191 +step:9522 train loss:3.548384 +step:9523 train loss:3.543832 +step:9524 train loss:3.545334 +step:9525 train loss:3.589298 +step:9526 train loss:3.605863 +step:9527 train loss:3.564748 +step:9528 train loss:3.494070 +step:9529 train loss:3.540680 +step:9530 train loss:3.588883 +step:9531 train loss:3.492097 +step:9532 train loss:3.541605 +step:9533 train loss:3.512290 +step:9534 train loss:3.596157 +step:9535 train loss:3.515661 +step:9536 train loss:3.495401 +step:9537 train loss:3.444033 +step:9538 train loss:3.461179 +step:9539 train loss:3.533397 +step:9540 train loss:3.451486 +step:9541 train loss:3.512014 +step:9542 train loss:3.636475 +step:9543 train loss:3.536429 +step:9544 train loss:3.576497 +step:9545 train loss:3.508342 +step:9546 train loss:3.532857 +step:9547 train loss:3.579355 +step:9548 train loss:3.518110 +step:9549 train loss:3.488740 +step:9550 train loss:3.520376 +step:9551 train loss:3.513143 +step:9552 train loss:3.535302 +step:9553 train loss:3.532681 +step:9554 train loss:3.577644 +step:9555 train loss:3.582932 +step:9556 train loss:3.492909 +step:9557 train loss:3.509372 +step:9558 train loss:3.574659 +step:9559 train loss:3.580156 +step:9560 train loss:3.492444 +step:9561 train loss:3.518777 +step:9562 train loss:3.557483 +step:9563 train loss:3.505084 +step:9564 train loss:3.537657 +step:9565 train loss:3.516478 +step:9566 train loss:3.490177 +step:9567 train loss:3.554688 +step:9568 train loss:3.529383 +step:9569 train loss:3.569705 +step:9570 train loss:3.463838 +step:9571 train loss:3.538678 +step:9572 train loss:3.481943 +step:9573 train loss:3.514325 +step:9574 train loss:3.488152 +step:9575 train loss:3.562549 +step:9576 train loss:3.453468 +step:9577 train loss:3.502473 +step:9578 train loss:3.507020 +step:9579 train loss:3.505219 +step:9580 train loss:3.568053 +step:9581 train loss:3.558681 +step:9582 train loss:3.532863 +step:9583 train loss:3.557559 +step:9584 train loss:3.493999 +step:9585 train loss:3.513193 +step:9586 train loss:3.562934 +step:9587 train loss:3.534191 +step:9588 train loss:3.518184 +step:9589 train loss:3.577977 +step:9590 train loss:3.543215 +step:9591 train loss:3.506711 +step:9592 train loss:3.529515 +step:9593 train loss:3.529039 +step:9594 train loss:3.544788 +step:9595 train loss:3.522016 +step:9596 train loss:3.605853 +step:9597 train loss:3.516327 +step:9598 train loss:3.478708 +step:9599 train loss:3.483888 +step:9600 train loss:3.571261 +step:9601 train loss:3.486683 +step:9602 train loss:3.570050 +step:9603 train loss:3.563371 +step:9604 train loss:3.446031 +step:9605 train loss:3.532327 +step:9606 train loss:3.588768 +step:9607 train loss:3.509560 +step:9608 train loss:3.515030 +step:9609 train loss:3.525037 +step:9610 train loss:3.568551 +step:9611 train loss:3.498870 +step:9612 train loss:3.507771 +step:9613 train loss:3.545823 +step:9614 train loss:3.518803 +step:9615 train loss:3.703848 +step:9616 train loss:3.517312 +step:9617 train loss:3.508405 +step:9618 train loss:3.464414 +step:9619 train loss:3.528192 +step:9620 train loss:3.584935 +step:9621 train loss:3.502589 +step:9622 train loss:3.519372 +step:9623 train loss:3.561875 +step:9624 train loss:3.546256 +step:9625 train loss:3.560015 +step:9626 train loss:3.529817 +step:9627 train loss:3.613312 +step:9628 train loss:3.574186 +step:9629 train loss:3.490768 +step:9630 train loss:3.544169 +step:9631 train loss:3.532055 +step:9632 train loss:3.500534 +step:9633 train loss:3.543443 +step:9634 train loss:3.609902 +step:9635 train loss:3.514466 +step:9636 train loss:3.459684 +step:9637 train loss:3.593805 +step:9638 train loss:3.476754 +step:9639 train loss:3.444603 +step:9640 train loss:3.570156 +step:9641 train loss:3.539895 +step:9642 train loss:3.518572 +step:9643 train loss:3.522640 +step:9644 train loss:3.577749 +step:9645 train loss:3.505164 +step:9646 train loss:3.543121 +step:9647 train loss:3.551364 +step:9648 train loss:3.501091 +step:9649 train loss:3.473771 +step:9650 train loss:3.492030 +step:9651 train loss:3.582476 +step:9652 train loss:3.563026 +step:9653 train loss:3.503503 +step:9654 train loss:3.485880 +step:9655 train loss:3.480521 +step:9656 train loss:3.475605 +step:9657 train loss:3.504087 +step:9658 train loss:3.561114 +step:9659 train loss:3.667896 +step:9660 train loss:3.450663 +step:9661 train loss:3.472263 +step:9662 train loss:3.491763 +step:9663 train loss:3.533375 +step:9664 train loss:3.583516 +step:9665 train loss:3.425273 +step:9666 train loss:3.470320 +step:9667 train loss:3.604209 +step:9668 train loss:3.587031 +step:9669 train loss:3.602036 +step:9670 train loss:3.584570 +step:9671 train loss:3.582321 +step:9672 train loss:3.494919 +step:9673 train loss:3.518867 +step:9674 train loss:3.528845 +step:9675 train loss:3.524569 +step:9676 train loss:3.486561 +step:9677 train loss:3.493663 +step:9678 train loss:3.526254 +step:9679 train loss:3.518200 +step:9680 train loss:3.516469 +step:9681 train loss:3.503845 +step:9682 train loss:3.571428 +step:9683 train loss:3.543427 +step:9684 train loss:3.463303 +step:9685 train loss:3.546400 +step:9686 train loss:3.581957 +step:9687 train loss:3.487020 +step:9688 train loss:3.572362 +step:9689 train loss:3.672464 +step:9690 train loss:3.517823 +step:9691 train loss:3.506187 +step:9692 train loss:3.467180 +step:9693 train loss:3.464970 +step:9694 train loss:3.489618 +step:9695 train loss:3.593654 +step:9696 train loss:3.625880 +step:9697 train loss:3.538100 +step:9698 train loss:3.572797 +step:9699 train loss:3.530107 +step:9700 train loss:3.531202 +step:9701 train loss:3.587458 +step:9702 train loss:3.499019 +step:9703 train loss:3.522065 +step:9704 train loss:3.603514 +step:9705 train loss:3.503567 +step:9706 train loss:3.494306 +step:9707 train loss:3.545100 +step:9708 train loss:3.494487 +step:9709 train loss:3.515274 +step:9710 train loss:3.531429 +step:9711 train loss:3.507533 +step:9712 train loss:3.517695 +step:9713 train loss:3.567983 +step:9714 train loss:3.523114 +step:9715 train loss:3.544043 +step:9716 train loss:3.566259 +step:9717 train loss:3.486394 +step:9718 train loss:3.493150 +step:9719 train loss:3.574369 +step:9720 train loss:3.509045 +step:9721 train loss:3.498412 +step:9722 train loss:3.565216 +step:9723 train loss:3.507387 +step:9724 train loss:3.537104 +step:9725 train loss:3.590431 +step:9726 train loss:3.528707 +step:9727 train loss:3.510612 +step:9728 train loss:3.544981 +step:9729 train loss:3.573646 +step:9730 train loss:3.643170 +step:9731 train loss:3.564710 +step:9732 train loss:3.525606 +step:9733 train loss:3.568055 +step:9734 train loss:3.487155 +step:9735 train loss:3.597875 +step:9736 train loss:3.498444 +step:9737 train loss:3.555319 +step:9738 train loss:3.522217 +step:9739 train loss:3.594747 +step:9740 train loss:3.557733 +step:9741 train loss:3.501265 +step:9742 train loss:3.591602 +step:9743 train loss:3.465389 +step:9744 train loss:3.527133 +step:9745 train loss:3.483887 +step:9746 train loss:3.521534 +step:9747 train loss:3.510223 +step:9748 train loss:3.412494 +step:9749 train loss:3.509319 +step:9750 validation loss:3.455682 +step:9750 train loss:3.487702 +step:9751 train loss:3.630507 +step:9752 train loss:3.514732 +step:9753 train loss:3.472946 +step:9754 train loss:3.507258 +step:9755 train loss:3.500753 +step:9756 train loss:3.503589 +step:9757 train loss:3.465950 +step:9758 train loss:3.463908 +step:9759 train loss:3.510030 +step:9760 train loss:3.452893 +step:9761 train loss:3.494995 +step:9762 train loss:3.490281 +step:9763 train loss:3.512777 +step:9764 train loss:3.495823 +step:9765 train loss:3.459882 +step:9766 train loss:3.551324 +step:9767 train loss:3.505013 +step:9768 train loss:3.518382 +step:9769 train loss:3.469832 +step:9770 train loss:3.472331 +step:9771 train loss:3.519900 +step:9772 train loss:3.534221 +step:9773 train loss:3.510913 +step:9774 train loss:3.481722 +step:9775 train loss:3.566943 +step:9776 train loss:3.567471 +step:9777 train loss:3.461496 +step:9778 train loss:3.464103 +step:9779 train loss:3.471596 +step:9780 train loss:3.467820 +step:9781 train loss:3.486156 +step:9782 train loss:3.565700 +step:9783 train loss:3.477074 +step:9784 train loss:3.504768 +step:9785 train loss:3.492160 +step:9786 train loss:3.531123 +step:9787 train loss:3.553181 +step:9788 train loss:3.480435 +step:9789 train loss:3.491311 +step:9790 train loss:3.452357 +step:9791 train loss:3.499283 +step:9792 train loss:3.516976 +step:9793 train loss:3.533752 +step:9794 train loss:3.508370 +step:9795 train loss:3.514120 +step:9796 train loss:3.498952 +step:9797 train loss:3.495134 +step:9798 train loss:3.509974 +step:9799 train loss:3.514994 +step:9800 train loss:3.580720 +step:9801 train loss:3.508680 +step:9802 train loss:3.564412 +step:9803 train loss:3.424438 +step:9804 train loss:3.518131 +step:9805 train loss:3.524916 +step:9806 train loss:3.499407 +step:9807 train loss:3.468686 +step:9808 train loss:3.382627 +step:9809 train loss:3.570853 +step:9810 train loss:3.525775 +step:9811 train loss:3.511528 +step:9812 train loss:3.486634 +step:9813 train loss:3.568123 +step:9814 train loss:3.556822 +step:9815 train loss:3.459342 +step:9816 train loss:3.462584 +step:9817 train loss:3.494558 +step:9818 train loss:3.521213 +step:9819 train loss:3.490932 +step:9820 train loss:3.559582 +step:9821 train loss:3.537162 +step:9822 train loss:3.513126 +step:9823 train loss:3.572469 +step:9824 train loss:3.478721 +step:9825 train loss:3.561965 +step:9826 train loss:3.557601 +step:9827 train loss:3.561310 +step:9828 train loss:3.480142 +step:9829 train loss:3.487456 +step:9830 train loss:3.474832 +step:9831 train loss:3.533367 +step:9832 train loss:3.547116 +step:9833 train loss:3.462891 +step:9834 train loss:3.511139 +step:9835 train loss:3.476652 +step:9836 train loss:3.538939 +step:9837 train loss:3.515286 +step:9838 train loss:3.552592 +step:9839 train loss:3.526499 +step:9840 train loss:3.491321 +step:9841 train loss:3.501379 +step:9842 train loss:3.561801 +step:9843 train loss:3.555976 +step:9844 train loss:3.505369 +step:9845 train loss:3.533103 +step:9846 train loss:3.469091 +step:9847 train loss:3.597438 +step:9848 train loss:3.522194 +step:9849 train loss:3.550673 +step:9850 train loss:3.465083 +step:9851 train loss:3.520541 +step:9852 train loss:3.484732 +step:9853 train loss:3.507607 +step:9854 train loss:3.517882 +step:9855 train loss:3.466993 +step:9856 train loss:3.469497 +step:9857 train loss:3.459579 +step:9858 train loss:3.525498 +step:9859 train loss:3.442827 +step:9860 train loss:3.681356 +step:9861 train loss:3.509973 +step:9862 train loss:3.472936 +step:9863 train loss:3.457223 +step:9864 train loss:3.582215 +step:9865 train loss:3.454548 +step:9866 train loss:3.500166 +step:9867 train loss:3.495411 +step:9868 train loss:3.553452 +step:9869 train loss:3.513372 +step:9870 train loss:3.488321 +step:9871 train loss:3.529868 +step:9872 train loss:3.472787 +step:9873 train loss:3.522724 +step:9874 train loss:3.490638 +step:9875 train loss:3.492872 +step:9876 train loss:3.460893 +step:9877 train loss:3.509587 +step:9878 train loss:3.541588 +step:9879 train loss:3.538355 +step:9880 train loss:3.472915 +step:9881 train loss:3.523603 +step:9882 train loss:3.484408 +step:9883 train loss:3.494370 +step:9884 train loss:3.485692 +step:9885 train loss:3.550660 +step:9886 train loss:3.516932 +step:9887 train loss:3.519983 +step:9888 train loss:3.542531 +step:9889 train loss:3.574303 +step:9890 train loss:3.486072 +step:9891 train loss:3.493175 +step:9892 train loss:3.463201 +step:9893 train loss:3.582194 +step:9894 train loss:3.494800 +step:9895 train loss:3.431389 +step:9896 train loss:3.584450 +step:9897 train loss:3.461919 +step:9898 train loss:3.531395 +step:9899 train loss:3.510580 +step:9900 train loss:3.552392 +step:9901 train loss:3.477030 +step:9902 train loss:3.519011 +step:9903 train loss:3.491178 +step:9904 train loss:3.541695 +step:9905 train loss:3.447418 +step:9906 train loss:3.488503 +step:9907 train loss:3.493008 +step:9908 train loss:3.493572 +step:9909 train loss:3.508405 +step:9910 train loss:3.532734 +step:9911 train loss:3.615865 +step:9912 train loss:3.492762 +step:9913 train loss:3.493870 +step:9914 train loss:3.507330 +step:9915 train loss:3.503807 +step:9916 train loss:3.456470 +step:9917 train loss:3.490067 +step:9918 train loss:3.489087 +step:9919 train loss:3.649630 +step:9920 train loss:3.432987 +step:9921 train loss:3.528877 +step:9922 train loss:3.489581 +step:9923 train loss:3.544522 +step:9924 train loss:3.460263 +step:9925 train loss:3.519984 +step:9926 train loss:3.498195 +step:9927 train loss:3.540628 +step:9928 train loss:3.468473 +step:9929 train loss:3.505781 +step:9930 train loss:3.595374 +step:9931 train loss:3.561275 +step:9932 train loss:3.446993 +step:9933 train loss:3.541052 +step:9934 train loss:3.457170 +step:9935 train loss:3.576166 +step:9936 train loss:3.481226 +step:9937 train loss:3.509622 +step:9938 train loss:3.494718 +step:9939 train loss:3.560652 +step:9940 train loss:3.594671 +step:9941 train loss:3.475107 +step:9942 train loss:3.514956 +step:9943 train loss:3.650612 +step:9944 train loss:3.513335 +step:9945 train loss:3.534558 +step:9946 train loss:3.507412 +step:9947 train loss:3.458494 +step:9948 train loss:3.501238 +step:9949 train loss:3.396169 +step:9950 train loss:3.544784 +step:9951 train loss:3.470636 +step:9952 train loss:3.543024 +step:9953 train loss:3.502010 +step:9954 train loss:3.557069 +step:9955 train loss:3.534175 +step:9956 train loss:3.536099 +step:9957 train loss:3.513022 +step:9958 train loss:3.562820 +step:9959 train loss:3.464320 +step:9960 train loss:3.497372 +step:9961 train loss:3.508610 +step:9962 train loss:3.554298 +step:9963 train loss:3.443990 +step:9964 train loss:3.499366 +step:9965 train loss:3.500599 +step:9966 train loss:3.558244 +step:9967 train loss:3.471767 +step:9968 train loss:3.539133 +step:9969 train loss:3.452725 +step:9970 train loss:3.489050 +step:9971 train loss:3.538898 +step:9972 train loss:3.558806 +step:9973 train loss:3.535126 +step:9974 train loss:3.522186 +step:9975 train loss:3.491112 +step:9976 train loss:3.448366 +step:9977 train loss:3.503284 +step:9978 train loss:3.499377 +step:9979 train loss:3.508279 +step:9980 train loss:3.565022 +step:9981 train loss:3.472672 +step:9982 train loss:3.535835 +step:9983 train loss:3.452557 +step:9984 train loss:3.518356 +step:9985 train loss:3.459738 +step:9986 train loss:3.513380 +step:9987 train loss:3.569665 +step:9988 train loss:3.572630 +step:9989 train loss:3.464809 +step:9990 train loss:3.604451 +step:9991 train loss:3.449071 +step:9992 train loss:3.528203 +step:9993 train loss:3.516053 +step:9994 train loss:3.632417 +step:9995 train loss:3.572632 +step:9996 train loss:3.487683 +step:9997 train loss:3.529204 +step:9998 train loss:3.581078 +step:9999 train loss:3.545379 +step:10000 validation loss:3.452758 total_sharp:1.8702e-04 L1_sharp:7.2037e-05 L2_sharp:1.5767e-05 L3_sharp:3.1936e-05 L4_sharp:2.8453e-05 L5_sharp:2.2049e-05 L6_sharp:1.6529e-05 L7_sharp:3.0082e-05 L8_sharp:4.4987e-05 L9_sharp:6.5519e-05 L10_sharp:7.8575e-05 L11_sharp:5.3201e-05 L12_sharp:1.9074e-04 total_fnorm:1.1052e+01 total_l1_linf:9.7956e+04 total_spectral:1.1052e+01 L1_fnorm:2.6105e+00 L2_fnorm:2.5624e+00 L3_fnorm:2.5693e+00 L4_fnorm:2.5638e+00 L5_fnorm:2.5219e+00 L6_fnorm:2.6258e+00 L7_fnorm:2.6475e+00 L8_fnorm:2.6137e+00 L9_fnorm:2.6106e+00 L10_fnorm:2.5994e+00 L11_fnorm:2.6229e+00 L12_fnorm:2.5319e+00 L1_l1linf:2.6371e+00 L2_l1linf:2.7899e+00 L3_l1linf:2.8683e+00 L4_l1linf:2.8275e+00 L5_l1linf:2.5098e+00 L6_l1linf:2.6292e+00 L7_l1linf:2.7514e+00 L8_l1linf:3.0496e+00 L9_l1linf:2.9712e+00 L10_l1linf:3.0833e+00 L11_l1linf:3.0358e+00 L12_l1linf:3.0039e+00 L1_spectral:3.4356e-01 L2_spectral:3.2541e-01 L3_spectral:3.0093e-01 L4_spectral:3.1858e-01 L5_spectral:2.7504e-01 L6_spectral:2.5714e-01 L7_spectral:3.1466e-01 L8_spectral:3.7312e-01 L9_spectral:4.1798e-01 L10_spectral:4.3818e-01 L11_spectral:4.3017e-01 L12_spectral:4.3363e-01 ip_v_neg_g:1.3558e-02 cos_v_neg_g:2.7867e-03 v_norm:1.1052e+01 g_norm:4.4023e-01 hv_norm:1.9569e-01 cos_v_hv:1.0562e-02 hg_norm:3.1140e+00 cos_g_hg:4.3158e-01 v_par:2.2069e-03 v_perp:1.1052e+01 L1_cos_v_neg_g:4.4441e-03 L1_v_norm:2.6105e+00 L2_cos_v_neg_g:1.7394e-03 L2_v_norm:2.5624e+00 L3_cos_v_neg_g:2.5793e-03 L3_v_norm:2.5693e+00 L4_cos_v_neg_g:2.2723e-03 L4_v_norm:2.5638e+00 L5_cos_v_neg_g:2.2216e-03 L5_v_norm:2.5219e+00 L6_cos_v_neg_g:2.7449e-03 L6_v_norm:2.6258e+00 L7_cos_v_neg_g:2.0087e-03 L7_v_norm:2.6475e+00 L8_cos_v_neg_g:4.5962e-03 L8_v_norm:2.6137e+00 L9_cos_v_neg_g:8.2970e-03 L9_v_norm:2.6106e+00 L10_cos_v_neg_g:1.1523e-02 L10_v_norm:2.5994e+00 L11_cos_v_neg_g:1.2932e-02 L11_v_norm:2.6229e+00 L12_cos_v_neg_g:1.4470e-02 L12_v_norm:2.5319e+00 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..4a61d99c05b04c31de7cb2099124b4d8a55caf57 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.005, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 43, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "9a8e9bb9-b936-4132-8b51-1e35b9bce77b", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..dd6004a91f45f3b232e6da5756fd09a7629412c6 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 9.2918119430542, + "total_l1_linf_norm": 79811.65625, + "total_spectral_norm": 9.2918119430542, + "layer_1_update_fnorm": 1.6484299898147583, + "layer_1_max_l1_linf_norm": 3.4539666175842285, + "layer_1_max_spectral_norm": 0.3975818455219269, + "layer_2_update_fnorm": 1.856082558631897, + "layer_2_max_l1_linf_norm": 2.0501856803894043, + "layer_2_max_spectral_norm": 0.22083517909049988, + "layer_3_update_fnorm": 1.7541563510894775, + "layer_3_max_l1_linf_norm": 2.135124683380127, + "layer_3_max_spectral_norm": 0.2218143492937088, + "layer_4_update_fnorm": 1.7467689514160156, + "layer_4_max_l1_linf_norm": 2.0507678985595703, + "layer_4_max_spectral_norm": 0.25329676270484924, + "layer_5_update_fnorm": 1.8887864351272583, + "layer_5_max_l1_linf_norm": 2.101533889770508, + "layer_5_max_spectral_norm": 0.3423948884010315, + "layer_6_update_fnorm": 2.0351529121398926, + "layer_6_max_l1_linf_norm": 2.2137298583984375, + "layer_6_max_spectral_norm": 0.33946743607521057, + "layer_7_update_fnorm": 2.1487579345703125, + "layer_7_max_l1_linf_norm": 2.4086523056030273, + "layer_7_max_spectral_norm": 0.33348608016967773, + "layer_8_update_fnorm": 2.0984859466552734, + "layer_8_max_l1_linf_norm": 2.695167064666748, + "layer_8_max_spectral_norm": 0.3438078463077545, + "layer_9_update_fnorm": 2.0518062114715576, + "layer_9_max_l1_linf_norm": 2.9002485275268555, + "layer_9_max_spectral_norm": 0.3574732840061188, + "layer_10_update_fnorm": 2.0411558151245117, + "layer_10_max_l1_linf_norm": 2.8282368183135986, + "layer_10_max_spectral_norm": 0.37845367193222046, + "layer_11_update_fnorm": 2.1009182929992676, + "layer_11_max_l1_linf_norm": 2.6112892627716064, + "layer_11_max_spectral_norm": 0.4017374813556671, + "layer_12_update_fnorm": 2.10556960105896, + "layer_12_max_l1_linf_norm": 2.708024740219116, + "layer_12_max_spectral_norm": 0.3790830075740814, + "total_sharpness": 0.0009884873870760202, + "ip_v_neg_g": 0.041583023965358734, + "cos_v_neg_g": 0.009523113258183002, + "v_norm": 9.2918119430542, + "g_norm": 0.4699338376522064, + "hv_norm": 0.344952791929245, + "cos_v_hv": 0.026626363396644592, + "hg_norm": 2.8446929454803467, + "cos_g_hg": 0.5046699047088623, + "v_parallel_norm": 0.007658477872610092, + "v_perp_norm": 9.29180908203125, + "layer_1_v_norm": 1.6484299898147583, + "layer_1_cos_v_neg_g": 0.04496842622756958, + "layer_2_v_norm": 1.856082558631897, + "layer_2_cos_v_neg_g": 0.019146546721458435, + "layer_3_v_norm": 1.7541563510894775, + "layer_3_cos_v_neg_g": 0.023223623633384705, + "layer_4_v_norm": 1.7467689514160156, + "layer_4_cos_v_neg_g": 0.018388064578175545, + "layer_5_v_norm": 1.8887864351272583, + "layer_5_cos_v_neg_g": 0.01674877479672432, + "layer_6_v_norm": 2.0351529121398926, + "layer_6_cos_v_neg_g": 0.01656186394393444, + "layer_7_v_norm": 2.1487579345703125, + "layer_7_cos_v_neg_g": 0.01595297083258629, + "layer_8_v_norm": 2.0984857082366943, + "layer_8_cos_v_neg_g": 0.017667317762970924, + "layer_9_v_norm": 2.0518062114715576, + "layer_9_cos_v_neg_g": 0.018990444019436836, + "layer_10_v_norm": 2.0411558151245117, + "layer_10_cos_v_neg_g": 0.021837839856743813, + "layer_11_v_norm": 2.1009182929992676, + "layer_11_cos_v_neg_g": 0.02261536382138729, + "layer_12_v_norm": 2.10556960105896, + "layer_12_cos_v_neg_g": 0.026667295023798943, + "layer_1_sharpness": 0.0020110984332859516, + "layer_2_sharpness": 7.491892029065639e-05, + "layer_3_sharpness": 0.00017764409130904824, + "layer_4_sharpness": 0.00013632382615469396, + "layer_5_sharpness": 0.00019296376558486372, + "layer_6_sharpness": 0.00012820457050111145, + "layer_7_sharpness": 0.00010803186160046607, + "layer_8_sharpness": 0.00020236005366314203, + "layer_9_sharpness": 0.00030862772837281227, + "layer_10_sharpness": 0.00028431840473785996, + "layer_11_sharpness": 0.00030938227428123355, + "layer_12_sharpness": 0.0004803370975423604 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_10000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..3f11946075c41762afd26866425d63610f68e153 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.032038688659668, + "total_l1_linf_norm": 97669.734375, + "total_spectral_norm": 11.032038688659668, + "layer_1_update_fnorm": 2.598966598510742, + "layer_1_max_l1_linf_norm": 2.6269824504852295, + "layer_1_max_spectral_norm": 0.35629934072494507, + "layer_2_update_fnorm": 2.5683839321136475, + "layer_2_max_l1_linf_norm": 2.681238889694214, + "layer_2_max_spectral_norm": 0.33612334728240967, + "layer_3_update_fnorm": 2.5806219577789307, + "layer_3_max_l1_linf_norm": 2.7222251892089844, + "layer_3_max_spectral_norm": 0.3120930790901184, + "layer_4_update_fnorm": 2.561830520629883, + "layer_4_max_l1_linf_norm": 2.6663403511047363, + "layer_4_max_spectral_norm": 0.33325254917144775, + "layer_5_update_fnorm": 2.5278892517089844, + "layer_5_max_l1_linf_norm": 2.554281711578369, + "layer_5_max_spectral_norm": 0.27946731448173523, + "layer_6_update_fnorm": 2.5843067169189453, + "layer_6_max_l1_linf_norm": 2.5812020301818848, + "layer_6_max_spectral_norm": 0.25632062554359436, + "layer_7_update_fnorm": 2.6424453258514404, + "layer_7_max_l1_linf_norm": 2.749660015106201, + "layer_7_max_spectral_norm": 0.3160971403121948, + "layer_8_update_fnorm": 2.5796287059783936, + "layer_8_max_l1_linf_norm": 2.9380147457122803, + "layer_8_max_spectral_norm": 0.38116782903671265, + "layer_9_update_fnorm": 2.5923454761505127, + "layer_9_max_l1_linf_norm": 3.008397102355957, + "layer_9_max_spectral_norm": 0.4424395263195038, + "layer_10_update_fnorm": 2.584707498550415, + "layer_10_max_l1_linf_norm": 3.128085136413574, + "layer_10_max_spectral_norm": 0.4752359390258789, + "layer_11_update_fnorm": 2.6217031478881836, + "layer_11_max_l1_linf_norm": 3.233473777770996, + "layer_11_max_spectral_norm": 0.4775351285934448, + "layer_12_update_fnorm": 2.553884983062744, + "layer_12_max_l1_linf_norm": 3.1180267333984375, + "layer_12_max_spectral_norm": 0.5017610192298889, + "total_sharpness": 0.000180545641342178, + "ip_v_neg_g": 0.007929024286568165, + "cos_v_neg_g": 0.0016944727394729853, + "v_norm": 11.032038688659668, + "g_norm": 0.4241596758365631, + "hv_norm": 0.16834968328475952, + "cos_v_hv": 0.011831245385110378, + "hg_norm": 3.2974963188171387, + "cos_g_hg": 0.4173028767108917, + "v_parallel_norm": 0.002438795519992709, + "v_perp_norm": 11.032037734985352, + "layer_1_v_norm": 2.598966598510742, + "layer_1_cos_v_neg_g": -0.0003554424038156867, + "layer_2_v_norm": 2.5683839321136475, + "layer_2_cos_v_neg_g": -0.0006512359832413495, + "layer_3_v_norm": 2.5806219577789307, + "layer_3_cos_v_neg_g": 1.3169404155632947e-05, + "layer_4_v_norm": 2.561830520629883, + "layer_4_cos_v_neg_g": -0.00027135893469676375, + "layer_5_v_norm": 2.5278892517089844, + "layer_5_cos_v_neg_g": -0.001104332972317934, + "layer_6_v_norm": 2.5843067169189453, + "layer_6_cos_v_neg_g": 0.0005459341919049621, + "layer_7_v_norm": 2.6424453258514404, + "layer_7_cos_v_neg_g": 0.0010215338552370667, + "layer_8_v_norm": 2.5796287059783936, + "layer_8_cos_v_neg_g": 0.003617140231654048, + "layer_9_v_norm": 2.5923454761505127, + "layer_9_cos_v_neg_g": 0.005113318562507629, + "layer_10_v_norm": 2.584707498550415, + "layer_10_cos_v_neg_g": 0.008138122037053108, + "layer_11_v_norm": 2.6217031478881836, + "layer_11_cos_v_neg_g": 0.01184616144746542, + "layer_12_v_norm": 2.553884983062744, + "layer_12_cos_v_neg_g": 0.015624839812517166, + "layer_1_sharpness": 3.686223499244079e-05, + "layer_2_sharpness": 1.0769762411655392e-05, + "layer_3_sharpness": 1.3068450243736152e-05, + "layer_4_sharpness": 1.8676604668144137e-05, + "layer_5_sharpness": 2.5263556381105445e-05, + "layer_6_sharpness": 1.3767265954811592e-05, + "layer_7_sharpness": 1.7572552678757347e-05, + "layer_8_sharpness": 4.244630690664053e-05, + "layer_9_sharpness": 6.208218110259622e-05, + "layer_10_sharpness": 7.154954073484987e-05, + "layer_11_sharpness": 7.030551205389202e-05, + "layer_12_sharpness": 0.0002344615204492584 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..083bac101e1cc8e64fa689d907f4399afbc589a2 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 9.912532806396484, + "total_l1_linf_norm": 86332.5625, + "total_spectral_norm": 9.9125337600708, + "layer_1_update_fnorm": 2.0502514839172363, + "layer_1_max_l1_linf_norm": 2.39587664604187, + "layer_1_max_spectral_norm": 0.35928115248680115, + "layer_2_update_fnorm": 2.13907527923584, + "layer_2_max_l1_linf_norm": 2.275629997253418, + "layer_2_max_spectral_norm": 0.26910528540611267, + "layer_3_update_fnorm": 2.069040060043335, + "layer_3_max_l1_linf_norm": 2.325525999069214, + "layer_3_max_spectral_norm": 0.250946044921875, + "layer_4_update_fnorm": 2.0408458709716797, + "layer_4_max_l1_linf_norm": 2.279287338256836, + "layer_4_max_spectral_norm": 0.2975994646549225, + "layer_5_update_fnorm": 2.1077353954315186, + "layer_5_max_l1_linf_norm": 2.261303663253784, + "layer_5_max_spectral_norm": 0.3585335910320282, + "layer_6_update_fnorm": 2.219980478286743, + "layer_6_max_l1_linf_norm": 2.2789955139160156, + "layer_6_max_spectral_norm": 0.30522897839546204, + "layer_7_update_fnorm": 2.3111519813537598, + "layer_7_max_l1_linf_norm": 2.463653087615967, + "layer_7_max_spectral_norm": 0.27135515213012695, + "layer_8_update_fnorm": 2.248063325881958, + "layer_8_max_l1_linf_norm": 2.436189889907837, + "layer_8_max_spectral_norm": 0.2959287464618683, + "layer_9_update_fnorm": 2.229365348815918, + "layer_9_max_l1_linf_norm": 2.8490066528320312, + "layer_9_max_spectral_norm": 0.35986363887786865, + "layer_10_update_fnorm": 2.2055442333221436, + "layer_10_max_l1_linf_norm": 2.6593194007873535, + "layer_10_max_spectral_norm": 0.370064914226532, + "layer_11_update_fnorm": 2.242227792739868, + "layer_11_max_l1_linf_norm": 2.6802072525024414, + "layer_11_max_spectral_norm": 0.35889023542404175, + "layer_12_update_fnorm": 2.2215051651000977, + "layer_12_max_l1_linf_norm": 2.716306447982788, + "layer_12_max_spectral_norm": 0.38989871740341187, + "total_sharpness": 0.0007931676809675992, + "ip_v_neg_g": 0.03767399489879608, + "cos_v_neg_g": 0.00816737487912178, + "v_norm": 9.912532806396484, + "g_norm": 0.46534445881843567, + "hv_norm": 0.3540329933166504, + "cos_v_hv": 0.022207822650671005, + "hg_norm": 2.8379507064819336, + "cos_g_hg": 0.46551063656806946, + "v_parallel_norm": 0.004721136298030615, + "v_perp_norm": 9.912531852722168, + "layer_1_v_norm": 2.0502514839172363, + "layer_1_cos_v_neg_g": 0.038148682564496994, + "layer_2_v_norm": 2.13907527923584, + "layer_2_cos_v_neg_g": 0.018641000613570213, + "layer_3_v_norm": 2.069040060043335, + "layer_3_cos_v_neg_g": 0.03234035521745682, + "layer_4_v_norm": 2.0408458709716797, + "layer_4_cos_v_neg_g": 0.018910305574536324, + "layer_5_v_norm": 2.1077353954315186, + "layer_5_cos_v_neg_g": 0.016077997162938118, + "layer_6_v_norm": 2.219980478286743, + "layer_6_cos_v_neg_g": 0.011377894319593906, + "layer_7_v_norm": 2.3111519813537598, + "layer_7_cos_v_neg_g": 0.011374544352293015, + "layer_8_v_norm": 2.248063325881958, + "layer_8_cos_v_neg_g": 0.014872276224195957, + "layer_9_v_norm": 2.229365348815918, + "layer_9_cos_v_neg_g": 0.01567845791578293, + "layer_10_v_norm": 2.2055442333221436, + "layer_10_cos_v_neg_g": 0.015510737895965576, + "layer_11_v_norm": 2.242227792739868, + "layer_11_cos_v_neg_g": 0.017694618552923203, + "layer_12_v_norm": 2.2215051651000977, + "layer_12_cos_v_neg_g": 0.019845569506287575, + "layer_1_sharpness": 0.0008731743437238038, + "layer_2_sharpness": 0.00010128703434020281, + "layer_3_sharpness": 0.0004760108422487974, + "layer_4_sharpness": 0.00026392933796159923, + "layer_5_sharpness": 0.00015867788169998676, + "layer_6_sharpness": 6.098963785916567e-05, + "layer_7_sharpness": 7.377128349617124e-05, + "layer_8_sharpness": 0.00017613166710361838, + "layer_9_sharpness": 0.0002599433355499059, + "layer_10_sharpness": 0.00024685889366082847, + "layer_11_sharpness": 0.00022306836035568267, + "layer_12_sharpness": 0.0003723402041941881 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..970d154032e400e9c5d85d88c24babb0f6f06837 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.288349151611328, + "total_l1_linf_norm": 90019.5703125, + "total_spectral_norm": 10.288347244262695, + "layer_1_update_fnorm": 2.3019936084747314, + "layer_1_max_l1_linf_norm": 3.022897720336914, + "layer_1_max_spectral_norm": 0.355823814868927, + "layer_2_update_fnorm": 2.2839088439941406, + "layer_2_max_l1_linf_norm": 2.398240566253662, + "layer_2_max_spectral_norm": 0.2509942054748535, + "layer_3_update_fnorm": 2.157331943511963, + "layer_3_max_l1_linf_norm": 2.4225013256073, + "layer_3_max_spectral_norm": 0.2543847858905792, + "layer_4_update_fnorm": 2.2026867866516113, + "layer_4_max_l1_linf_norm": 2.492269515991211, + "layer_4_max_spectral_norm": 0.3143415153026581, + "layer_5_update_fnorm": 2.256978988647461, + "layer_5_max_l1_linf_norm": 2.4646425247192383, + "layer_5_max_spectral_norm": 0.32777705788612366, + "layer_6_update_fnorm": 2.3579294681549072, + "layer_6_max_l1_linf_norm": 2.3976950645446777, + "layer_6_max_spectral_norm": 0.2727806270122528, + "layer_7_update_fnorm": 2.427180051803589, + "layer_7_max_l1_linf_norm": 2.6848318576812744, + "layer_7_max_spectral_norm": 0.31148380041122437, + "layer_8_update_fnorm": 2.3457846641540527, + "layer_8_max_l1_linf_norm": 2.840564250946045, + "layer_8_max_spectral_norm": 0.33296242356300354, + "layer_9_update_fnorm": 2.3355839252471924, + "layer_9_max_l1_linf_norm": 2.746367931365967, + "layer_9_max_spectral_norm": 0.3742099106311798, + "layer_10_update_fnorm": 2.309612989425659, + "layer_10_max_l1_linf_norm": 2.7991180419921875, + "layer_10_max_spectral_norm": 0.41181376576423645, + "layer_11_update_fnorm": 2.3682825565338135, + "layer_11_max_l1_linf_norm": 2.8844661712646484, + "layer_11_max_spectral_norm": 0.3951311409473419, + "layer_12_update_fnorm": 2.309662342071533, + "layer_12_max_l1_linf_norm": 3.0639095306396484, + "layer_12_max_spectral_norm": 0.4202518165111542, + "total_sharpness": 0.0006136025185696781, + "ip_v_neg_g": 0.035388845950365067, + "cos_v_neg_g": 0.006974559742957354, + "v_norm": 10.288349151611328, + "g_norm": 0.49317821860313416, + "hv_norm": 0.3207188546657562, + "cos_v_hv": 0.019683770835399628, + "hg_norm": 2.9035727977752686, + "cos_g_hg": 0.4556567072868347, + "v_parallel_norm": 0.0032921016681939363, + "v_perp_norm": 10.288348197937012, + "layer_1_v_norm": 2.3019936084747314, + "layer_1_cos_v_neg_g": 0.03153770789504051, + "layer_2_v_norm": 2.2839088439941406, + "layer_2_cos_v_neg_g": 0.009840693324804306, + "layer_3_v_norm": 2.157331705093384, + "layer_3_cos_v_neg_g": 0.011312523856759071, + "layer_4_v_norm": 2.2026867866516113, + "layer_4_cos_v_neg_g": 0.012455251067876816, + "layer_5_v_norm": 2.256978988647461, + "layer_5_cos_v_neg_g": 0.012596029788255692, + "layer_6_v_norm": 2.3579294681549072, + "layer_6_cos_v_neg_g": 0.010948739014565945, + "layer_7_v_norm": 2.427180051803589, + "layer_7_cos_v_neg_g": 0.009394554421305656, + "layer_8_v_norm": 2.3457846641540527, + "layer_8_cos_v_neg_g": 0.01194453239440918, + "layer_9_v_norm": 2.3355839252471924, + "layer_9_cos_v_neg_g": 0.014498438686132431, + "layer_10_v_norm": 2.309612989425659, + "layer_10_cos_v_neg_g": 0.017824161797761917, + "layer_11_v_norm": 2.3682825565338135, + "layer_11_cos_v_neg_g": 0.01719600148499012, + "layer_12_v_norm": 2.309662342071533, + "layer_12_cos_v_neg_g": 0.016964254900813103, + "layer_1_sharpness": 0.00047612827620469034, + "layer_2_sharpness": 3.705461494973861e-05, + "layer_3_sharpness": 8.45934555400163e-05, + "layer_4_sharpness": 8.856276690494269e-05, + "layer_5_sharpness": 9.900864824885502e-05, + "layer_6_sharpness": 6.297136860666797e-05, + "layer_7_sharpness": 6.009087883285247e-05, + "layer_8_sharpness": 0.00011956653906963766, + "layer_9_sharpness": 0.00020094896899536252, + "layer_10_sharpness": 0.0002548811899032444, + "layer_11_sharpness": 0.00023300718748942018, + "layer_12_sharpness": 0.0003629521233960986 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..698837bdb2ceef90000d62571ac2ae58b59aa677 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.408519744873047, + "total_l1_linf_norm": 91412.203125, + "total_spectral_norm": 10.408519744873047, + "layer_1_update_fnorm": 2.3520753383636475, + "layer_1_max_l1_linf_norm": 2.654740333557129, + "layer_1_max_spectral_norm": 0.3005821406841278, + "layer_2_update_fnorm": 2.3590304851531982, + "layer_2_max_l1_linf_norm": 2.5780577659606934, + "layer_2_max_spectral_norm": 0.2583797872066498, + "layer_3_update_fnorm": 2.278252124786377, + "layer_3_max_l1_linf_norm": 2.5802135467529297, + "layer_3_max_spectral_norm": 0.29757559299468994, + "layer_4_update_fnorm": 2.2856242656707764, + "layer_4_max_l1_linf_norm": 2.4462695121765137, + "layer_4_max_spectral_norm": 0.33028674125671387, + "layer_5_update_fnorm": 2.30487322807312, + "layer_5_max_l1_linf_norm": 2.462651252746582, + "layer_5_max_spectral_norm": 0.3293600082397461, + "layer_6_update_fnorm": 2.362299680709839, + "layer_6_max_l1_linf_norm": 2.404581308364868, + "layer_6_max_spectral_norm": 0.26919493079185486, + "layer_7_update_fnorm": 2.461510419845581, + "layer_7_max_l1_linf_norm": 2.5495059490203857, + "layer_7_max_spectral_norm": 0.27144280076026917, + "layer_8_update_fnorm": 2.3990557193756104, + "layer_8_max_l1_linf_norm": 2.694335460662842, + "layer_8_max_spectral_norm": 0.3163668215274811, + "layer_9_update_fnorm": 2.4110159873962402, + "layer_9_max_l1_linf_norm": 2.780512571334839, + "layer_9_max_spectral_norm": 0.3974562883377075, + "layer_10_update_fnorm": 2.3813648223876953, + "layer_10_max_l1_linf_norm": 2.9010846614837646, + "layer_10_max_spectral_norm": 0.4232676327228546, + "layer_11_update_fnorm": 2.4131224155426025, + "layer_11_max_l1_linf_norm": 3.0156960487365723, + "layer_11_max_spectral_norm": 0.38989895582199097, + "layer_12_update_fnorm": 2.3146302700042725, + "layer_12_max_l1_linf_norm": 2.9439339637756348, + "layer_12_max_spectral_norm": 0.40890082716941833, + "total_sharpness": 0.0006547432858496904, + "ip_v_neg_g": 0.043568696826696396, + "cos_v_neg_g": 0.008616896346211433, + "v_norm": 10.408519744873047, + "g_norm": 0.48577454686164856, + "hv_norm": 0.41031473875045776, + "cos_v_hv": 0.01660897769033909, + "hg_norm": 2.939622402191162, + "cos_g_hg": 0.5163513422012329, + "v_parallel_norm": 0.004184533841907978, + "v_perp_norm": 10.40851879119873, + "layer_1_v_norm": 2.3520753383636475, + "layer_1_cos_v_neg_g": 0.031136129051446915, + "layer_2_v_norm": 2.3590304851531982, + "layer_2_cos_v_neg_g": 0.010338223539292812, + "layer_3_v_norm": 2.278252363204956, + "layer_3_cos_v_neg_g": 0.018186122179031372, + "layer_4_v_norm": 2.2856242656707764, + "layer_4_cos_v_neg_g": 0.016610370948910713, + "layer_5_v_norm": 2.30487322807312, + "layer_5_cos_v_neg_g": 0.016050608828663826, + "layer_6_v_norm": 2.362299680709839, + "layer_6_cos_v_neg_g": 0.011679315939545631, + "layer_7_v_norm": 2.461510419845581, + "layer_7_cos_v_neg_g": 0.012092442251741886, + "layer_8_v_norm": 2.3990557193756104, + "layer_8_cos_v_neg_g": 0.01643231138586998, + "layer_9_v_norm": 2.4110159873962402, + "layer_9_cos_v_neg_g": 0.019311506301164627, + "layer_10_v_norm": 2.3813648223876953, + "layer_10_cos_v_neg_g": 0.02070046216249466, + "layer_11_v_norm": 2.4131224155426025, + "layer_11_cos_v_neg_g": 0.020597385242581367, + "layer_12_v_norm": 2.3146302700042725, + "layer_12_cos_v_neg_g": 0.02309078350663185, + "layer_1_sharpness": 0.00034093548310920596, + "layer_2_sharpness": 4.1593986679799855e-05, + "layer_3_sharpness": 0.00014973795623518527, + "layer_4_sharpness": 0.0001135600105044432, + "layer_5_sharpness": 0.0001458392944186926, + "layer_6_sharpness": 6.584983930224553e-05, + "layer_7_sharpness": 7.860207551857457e-05, + "layer_8_sharpness": 0.0001393145794281736, + "layer_9_sharpness": 0.00021871387434657663, + "layer_10_sharpness": 0.0002481382107362151, + "layer_11_sharpness": 0.00020685253548435867, + "layer_12_sharpness": 0.00048117010737769306 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..6e6d7f4757c701ffa29d807c0d34ebce41af709b --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.640807151794434, + "total_l1_linf_norm": 93467.03125, + "total_spectral_norm": 10.640807151794434, + "layer_1_update_fnorm": 2.434699296951294, + "layer_1_max_l1_linf_norm": 2.5822205543518066, + "layer_1_max_spectral_norm": 0.3007369935512543, + "layer_2_update_fnorm": 2.3943397998809814, + "layer_2_max_l1_linf_norm": 2.5681824684143066, + "layer_2_max_spectral_norm": 0.25676438212394714, + "layer_3_update_fnorm": 2.322101593017578, + "layer_3_max_l1_linf_norm": 2.69942045211792, + "layer_3_max_spectral_norm": 0.2682972550392151, + "layer_4_update_fnorm": 2.3554816246032715, + "layer_4_max_l1_linf_norm": 2.601613998413086, + "layer_4_max_spectral_norm": 0.3084697425365448, + "layer_5_update_fnorm": 2.365025281906128, + "layer_5_max_l1_linf_norm": 2.445218086242676, + "layer_5_max_spectral_norm": 0.317781001329422, + "layer_6_update_fnorm": 2.43144154548645, + "layer_6_max_l1_linf_norm": 2.4403789043426514, + "layer_6_max_spectral_norm": 0.2669981122016907, + "layer_7_update_fnorm": 2.509073257446289, + "layer_7_max_l1_linf_norm": 2.6533517837524414, + "layer_7_max_spectral_norm": 0.2755717635154724, + "layer_8_update_fnorm": 2.4813339710235596, + "layer_8_max_l1_linf_norm": 2.835939407348633, + "layer_8_max_spectral_norm": 0.33373093605041504, + "layer_9_update_fnorm": 2.4772424697875977, + "layer_9_max_l1_linf_norm": 3.160310983657837, + "layer_9_max_spectral_norm": 0.38383790850639343, + "layer_10_update_fnorm": 2.4913265705108643, + "layer_10_max_l1_linf_norm": 2.9234867095947266, + "layer_10_max_spectral_norm": 0.4221661388874054, + "layer_11_update_fnorm": 2.552401065826416, + "layer_11_max_l1_linf_norm": 3.2478280067443848, + "layer_11_max_spectral_norm": 0.4145395755767822, + "layer_12_update_fnorm": 2.506828784942627, + "layer_12_max_l1_linf_norm": 3.4294846057891846, + "layer_12_max_spectral_norm": 0.479800820350647, + "total_sharpness": 0.00035575052606873214, + "ip_v_neg_g": 0.02195502631366253, + "cos_v_neg_g": 0.0047998810186982155, + "v_norm": 10.640807151794434, + "g_norm": 0.429861843585968, + "hv_norm": 0.21983225643634796, + "cos_v_hv": 0.017219822853803635, + "hg_norm": 2.635772466659546, + "cos_g_hg": 0.47278451919555664, + "v_parallel_norm": 0.00408665556460619, + "v_perp_norm": 10.640806198120117, + "layer_1_v_norm": 2.434699296951294, + "layer_1_cos_v_neg_g": 0.015686195343732834, + "layer_2_v_norm": 2.3943397998809814, + "layer_2_cos_v_neg_g": 0.004075673874467611, + "layer_3_v_norm": 2.322101593017578, + "layer_3_cos_v_neg_g": 0.004699054639786482, + "layer_4_v_norm": 2.3554816246032715, + "layer_4_cos_v_neg_g": 0.005651318933814764, + "layer_5_v_norm": 2.365025281906128, + "layer_5_cos_v_neg_g": 0.006182925309985876, + "layer_6_v_norm": 2.43144154548645, + "layer_6_cos_v_neg_g": 0.00522434851154685, + "layer_7_v_norm": 2.509073257446289, + "layer_7_cos_v_neg_g": 0.0059939841739833355, + "layer_8_v_norm": 2.4813342094421387, + "layer_8_cos_v_neg_g": 0.009803648106753826, + "layer_9_v_norm": 2.4772424697875977, + "layer_9_cos_v_neg_g": 0.01201677042990923, + "layer_10_v_norm": 2.4913265705108643, + "layer_10_cos_v_neg_g": 0.015167638659477234, + "layer_11_v_norm": 2.552401065826416, + "layer_11_cos_v_neg_g": 0.014959355816245079, + "layer_12_v_norm": 2.506828784942627, + "layer_12_cos_v_neg_g": 0.021529268473386765, + "layer_1_sharpness": 0.00013157953799236566, + "layer_2_sharpness": 1.1984273442067206e-05, + "layer_3_sharpness": 1.4446773093368392e-05, + "layer_4_sharpness": 2.586789196357131e-05, + "layer_5_sharpness": 4.5127730118110776e-05, + "layer_6_sharpness": 3.8392994611058384e-05, + "layer_7_sharpness": 4.112342867301777e-05, + "layer_8_sharpness": 9.281932580051944e-05, + "layer_9_sharpness": 0.00013315789692569524, + "layer_10_sharpness": 0.00015591931878589094, + "layer_11_sharpness": 0.00013585227134171873, + "layer_12_sharpness": 0.00041708332719281316 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..f3295272e0e903ccf5d87fdcb54054708a96f28f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.6151123046875, + "total_l1_linf_norm": 93390.8359375, + "total_spectral_norm": 10.6151123046875, + "layer_1_update_fnorm": 2.463484525680542, + "layer_1_max_l1_linf_norm": 2.5868582725524902, + "layer_1_max_spectral_norm": 0.2821098268032074, + "layer_2_update_fnorm": 2.4080560207366943, + "layer_2_max_l1_linf_norm": 2.541163206100464, + "layer_2_max_spectral_norm": 0.2559903860092163, + "layer_3_update_fnorm": 2.357696294784546, + "layer_3_max_l1_linf_norm": 2.547513484954834, + "layer_3_max_spectral_norm": 0.2747863829135895, + "layer_4_update_fnorm": 2.3637149333953857, + "layer_4_max_l1_linf_norm": 2.580780267715454, + "layer_4_max_spectral_norm": 0.3168995976448059, + "layer_5_update_fnorm": 2.3634896278381348, + "layer_5_max_l1_linf_norm": 2.4793310165405273, + "layer_5_max_spectral_norm": 0.3113875687122345, + "layer_6_update_fnorm": 2.4449894428253174, + "layer_6_max_l1_linf_norm": 2.6404356956481934, + "layer_6_max_spectral_norm": 0.2680272161960602, + "layer_7_update_fnorm": 2.527355909347534, + "layer_7_max_l1_linf_norm": 3.030724048614502, + "layer_7_max_spectral_norm": 0.3035317659378052, + "layer_8_update_fnorm": 2.4649438858032227, + "layer_8_max_l1_linf_norm": 3.1881096363067627, + "layer_8_max_spectral_norm": 0.35931485891342163, + "layer_9_update_fnorm": 2.469041347503662, + "layer_9_max_l1_linf_norm": 2.719470977783203, + "layer_9_max_spectral_norm": 0.3792484700679779, + "layer_10_update_fnorm": 2.4439809322357178, + "layer_10_max_l1_linf_norm": 2.7426939010620117, + "layer_10_max_spectral_norm": 0.3881489932537079, + "layer_11_update_fnorm": 2.4853689670562744, + "layer_11_max_l1_linf_norm": 2.7984256744384766, + "layer_11_max_spectral_norm": 0.37355080246925354, + "layer_12_update_fnorm": 2.374232530593872, + "layer_12_max_l1_linf_norm": 2.6342933177948, + "layer_12_max_spectral_norm": 0.4023936688899994, + "total_sharpness": 0.00030482670990750194, + "ip_v_neg_g": 0.019020909443497658, + "cos_v_neg_g": 0.003811858594417572, + "v_norm": 10.6151123046875, + "g_norm": 0.4700779914855957, + "hv_norm": 0.21311983466148376, + "cos_v_hv": 0.015182867646217346, + "hg_norm": 2.671250581741333, + "cos_g_hg": 0.4813729226589203, + "v_parallel_norm": 0.002044126857072115, + "v_perp_norm": 10.6151123046875, + "layer_1_v_norm": 2.463484525680542, + "layer_1_cos_v_neg_g": 0.014756598509848118, + "layer_2_v_norm": 2.4080560207366943, + "layer_2_cos_v_neg_g": 0.003932601306587458, + "layer_3_v_norm": 2.357696056365967, + "layer_3_cos_v_neg_g": 0.004211120307445526, + "layer_4_v_norm": 2.3637149333953857, + "layer_4_cos_v_neg_g": 0.004698419477790594, + "layer_5_v_norm": 2.3634896278381348, + "layer_5_cos_v_neg_g": 0.005352874286472797, + "layer_6_v_norm": 2.4449892044067383, + "layer_6_cos_v_neg_g": 0.005097164772450924, + "layer_7_v_norm": 2.527355909347534, + "layer_7_cos_v_neg_g": 0.006555821280926466, + "layer_8_v_norm": 2.4649436473846436, + "layer_8_cos_v_neg_g": 0.009129718877375126, + "layer_9_v_norm": 2.469041347503662, + "layer_9_cos_v_neg_g": 0.008582633920013905, + "layer_10_v_norm": 2.4439809322357178, + "layer_10_cos_v_neg_g": 0.011290477588772774, + "layer_11_v_norm": 2.4853689670562744, + "layer_11_cos_v_neg_g": 0.011905319057404995, + "layer_12_v_norm": 2.374232530593872, + "layer_12_cos_v_neg_g": 0.011730107478797436, + "layer_1_sharpness": 0.00010685731831472367, + "layer_2_sharpness": 9.460096407565288e-06, + "layer_3_sharpness": 2.0495901480899192e-05, + "layer_4_sharpness": 3.7124718801351264e-05, + "layer_5_sharpness": 6.108837987994775e-05, + "layer_6_sharpness": 3.604259109124541e-05, + "layer_7_sharpness": 4.5697703171754256e-05, + "layer_8_sharpness": 0.00010385664063505828, + "layer_9_sharpness": 0.00012597655586432666, + "layer_10_sharpness": 0.00012960036110598594, + "layer_11_sharpness": 0.00010722481238190085, + "layer_12_sharpness": 0.00024095734988804907 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_4000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..ab388a38732c1c53d146547e3dd5fcdcff2987dd --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.662513732910156, + "total_l1_linf_norm": 93954.8359375, + "total_spectral_norm": 10.662514686584473, + "layer_1_update_fnorm": 2.440528392791748, + "layer_1_max_l1_linf_norm": 2.5019357204437256, + "layer_1_max_spectral_norm": 0.29036808013916016, + "layer_2_update_fnorm": 2.469943046569824, + "layer_2_max_l1_linf_norm": 2.5833280086517334, + "layer_2_max_spectral_norm": 0.2754170894622803, + "layer_3_update_fnorm": 2.437042713165283, + "layer_3_max_l1_linf_norm": 2.6028923988342285, + "layer_3_max_spectral_norm": 0.3134225606918335, + "layer_4_update_fnorm": 2.402261257171631, + "layer_4_max_l1_linf_norm": 2.5452921390533447, + "layer_4_max_spectral_norm": 0.33975228667259216, + "layer_5_update_fnorm": 2.361348867416382, + "layer_5_max_l1_linf_norm": 2.400205612182617, + "layer_5_max_spectral_norm": 0.3141518831253052, + "layer_6_update_fnorm": 2.4430184364318848, + "layer_6_max_l1_linf_norm": 2.3900582790374756, + "layer_6_max_spectral_norm": 0.26199400424957275, + "layer_7_update_fnorm": 2.5303328037261963, + "layer_7_max_l1_linf_norm": 2.5803189277648926, + "layer_7_max_spectral_norm": 0.2744041383266449, + "layer_8_update_fnorm": 2.4605398178100586, + "layer_8_max_l1_linf_norm": 2.715113878250122, + "layer_8_max_spectral_norm": 0.3101756274700165, + "layer_9_update_fnorm": 2.475576162338257, + "layer_9_max_l1_linf_norm": 2.7983250617980957, + "layer_9_max_spectral_norm": 0.3913750648498535, + "layer_10_update_fnorm": 2.4649415016174316, + "layer_10_max_l1_linf_norm": 2.818850517272949, + "layer_10_max_spectral_norm": 0.4032765328884125, + "layer_11_update_fnorm": 2.4863240718841553, + "layer_11_max_l1_linf_norm": 2.904714822769165, + "layer_11_max_spectral_norm": 0.3745626211166382, + "layer_12_update_fnorm": 2.3833835124969482, + "layer_12_max_l1_linf_norm": 2.9557273387908936, + "layer_12_max_spectral_norm": 0.4130367934703827, + "total_sharpness": 0.0003727528383024037, + "ip_v_neg_g": 0.02356790378689766, + "cos_v_neg_g": 0.005387154407799244, + "v_norm": 10.662513732910156, + "g_norm": 0.4103003740310669, + "hv_norm": 0.2848755717277527, + "cos_v_hv": 0.013951643370091915, + "hg_norm": 2.811699390411377, + "cos_g_hg": 0.5016055703163147, + "v_parallel_norm": 0.003959129564464092, + "v_perp_norm": 10.66251277923584, + "layer_1_v_norm": 2.440528392791748, + "layer_1_cos_v_neg_g": 0.02004094608128071, + "layer_2_v_norm": 2.469943046569824, + "layer_2_cos_v_neg_g": 0.008579396642744541, + "layer_3_v_norm": 2.437042474746704, + "layer_3_cos_v_neg_g": 0.016200393438339233, + "layer_4_v_norm": 2.402261257171631, + "layer_4_cos_v_neg_g": 0.009577048011124134, + "layer_5_v_norm": 2.361348867416382, + "layer_5_cos_v_neg_g": 0.00657013151794672, + "layer_6_v_norm": 2.4430181980133057, + "layer_6_cos_v_neg_g": 0.005406621843576431, + "layer_7_v_norm": 2.5303328037261963, + "layer_7_cos_v_neg_g": 0.006502988748252392, + "layer_8_v_norm": 2.4605398178100586, + "layer_8_cos_v_neg_g": 0.008686703629791737, + "layer_9_v_norm": 2.475576162338257, + "layer_9_cos_v_neg_g": 0.011974257417023182, + "layer_10_v_norm": 2.4649415016174316, + "layer_10_cos_v_neg_g": 0.013661859557032585, + "layer_11_v_norm": 2.4863240718841553, + "layer_11_cos_v_neg_g": 0.013017399236559868, + "layer_12_v_norm": 2.3833835124969482, + "layer_12_cos_v_neg_g": 0.014156167395412922, + "layer_1_sharpness": 0.00020321772899478674, + "layer_2_sharpness": 5.473390410770662e-05, + "layer_3_sharpness": 0.00018854781228583306, + "layer_4_sharpness": 0.00011782661022152752, + "layer_5_sharpness": 6.295134517131373e-05, + "layer_6_sharpness": 3.455156183918007e-05, + "layer_7_sharpness": 3.986187948612496e-05, + "layer_8_sharpness": 8.626887574791908e-05, + "layer_9_sharpness": 0.0001495412871008739, + "layer_10_sharpness": 0.00014848308637738228, + "layer_11_sharpness": 0.00010011544509325176, + "layer_12_sharpness": 0.0003027409256901592 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_4500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..b766998c25d307c95c6f23f308cd938f8035cd44 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.832443237304688, + "total_l1_linf_norm": 95505.40625, + "total_spectral_norm": 10.83244514465332, + "layer_1_update_fnorm": 2.540900945663452, + "layer_1_max_l1_linf_norm": 2.716062068939209, + "layer_1_max_spectral_norm": 0.3169368505477905, + "layer_2_update_fnorm": 2.5010666847229004, + "layer_2_max_l1_linf_norm": 2.619601249694824, + "layer_2_max_spectral_norm": 0.29517364501953125, + "layer_3_update_fnorm": 2.4606664180755615, + "layer_3_max_l1_linf_norm": 2.7233364582061768, + "layer_3_max_spectral_norm": 0.31792595982551575, + "layer_4_update_fnorm": 2.4619557857513428, + "layer_4_max_l1_linf_norm": 2.727252721786499, + "layer_4_max_spectral_norm": 0.3331766426563263, + "layer_5_update_fnorm": 2.4560208320617676, + "layer_5_max_l1_linf_norm": 2.5555386543273926, + "layer_5_max_spectral_norm": 0.2981284558773041, + "layer_6_update_fnorm": 2.4973883628845215, + "layer_6_max_l1_linf_norm": 2.4786906242370605, + "layer_6_max_spectral_norm": 0.2629282772541046, + "layer_7_update_fnorm": 2.5690085887908936, + "layer_7_max_l1_linf_norm": 2.697613477706909, + "layer_7_max_spectral_norm": 0.2919842004776001, + "layer_8_update_fnorm": 2.4908368587493896, + "layer_8_max_l1_linf_norm": 2.6904001235961914, + "layer_8_max_spectral_norm": 0.33534613251686096, + "layer_9_update_fnorm": 2.4909732341766357, + "layer_9_max_l1_linf_norm": 2.890852928161621, + "layer_9_max_spectral_norm": 0.38034895062446594, + "layer_10_update_fnorm": 2.4831366539001465, + "layer_10_max_l1_linf_norm": 2.834150791168213, + "layer_10_max_spectral_norm": 0.3971809446811676, + "layer_11_update_fnorm": 2.5376360416412354, + "layer_11_max_l1_linf_norm": 2.8100080490112305, + "layer_11_max_spectral_norm": 0.42142701148986816, + "layer_12_update_fnorm": 2.471438407897949, + "layer_12_max_l1_linf_norm": 2.9533019065856934, + "layer_12_max_spectral_norm": 0.440552681684494, + "total_sharpness": 0.00031397512066178024, + "ip_v_neg_g": 0.020116835832595825, + "cos_v_neg_g": 0.004613503348082304, + "v_norm": 10.832443237304688, + "g_norm": 0.4025338292121887, + "hv_norm": 0.2710968255996704, + "cos_v_hv": 0.012545767240226269, + "hg_norm": 2.430245876312256, + "cos_g_hg": 0.4599730968475342, + "v_parallel_norm": 0.003659273264929652, + "v_perp_norm": 10.832442283630371, + "layer_1_v_norm": 2.540900945663452, + "layer_1_cos_v_neg_g": 0.016796434298157692, + "layer_2_v_norm": 2.5010666847229004, + "layer_2_cos_v_neg_g": 0.005414952524006367, + "layer_3_v_norm": 2.4606666564941406, + "layer_3_cos_v_neg_g": 0.006607661955058575, + "layer_4_v_norm": 2.4619557857513428, + "layer_4_cos_v_neg_g": 0.006424614693969488, + "layer_5_v_norm": 2.4560208320617676, + "layer_5_cos_v_neg_g": 0.0069018141366541386, + "layer_6_v_norm": 2.4973883628845215, + "layer_6_cos_v_neg_g": 0.005427999887615442, + "layer_7_v_norm": 2.5690085887908936, + "layer_7_cos_v_neg_g": 0.00641341507434845, + "layer_8_v_norm": 2.4908368587493896, + "layer_8_cos_v_neg_g": 0.008163166232407093, + "layer_9_v_norm": 2.4909732341766357, + "layer_9_cos_v_neg_g": 0.010481749661266804, + "layer_10_v_norm": 2.4831366539001465, + "layer_10_cos_v_neg_g": 0.012747008353471756, + "layer_11_v_norm": 2.5376358032226562, + "layer_11_cos_v_neg_g": 0.013599623925983906, + "layer_12_v_norm": 2.471438407897949, + "layer_12_cos_v_neg_g": 0.01428859494626522, + "layer_1_sharpness": 0.0001249589113285765, + "layer_2_sharpness": 1.7415695765521377e-05, + "layer_3_sharpness": 3.161308995913714e-05, + "layer_4_sharpness": 3.997840758529492e-05, + "layer_5_sharpness": 5.020648677600548e-05, + "layer_6_sharpness": 2.5639135856181383e-05, + "layer_7_sharpness": 3.210503564332612e-05, + "layer_8_sharpness": 7.21966935088858e-05, + "layer_9_sharpness": 0.00011506073497002944, + "layer_10_sharpness": 0.00011486487346701324, + "layer_11_sharpness": 0.00011080983676947653, + "layer_12_sharpness": 0.0003293879854027182 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..3f06031c06b4b4387241f3dbe955c44245813f1e --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 6.324275970458984, + "total_l1_linf_norm": 53912.8125, + "total_spectral_norm": 6.324276447296143, + "layer_1_update_fnorm": 1.067452311515808, + "layer_1_max_l1_linf_norm": 1.9840431213378906, + "layer_1_max_spectral_norm": 0.30886000394821167, + "layer_2_update_fnorm": 1.1187175512313843, + "layer_2_max_l1_linf_norm": 1.252407431602478, + "layer_2_max_spectral_norm": 0.20231084525585175, + "layer_3_update_fnorm": 1.116664171218872, + "layer_3_max_l1_linf_norm": 1.2666174173355103, + "layer_3_max_spectral_norm": 0.20308411121368408, + "layer_4_update_fnorm": 1.1600477695465088, + "layer_4_max_l1_linf_norm": 1.3739326000213623, + "layer_4_max_spectral_norm": 0.20468401908874512, + "layer_5_update_fnorm": 1.2273463010787964, + "layer_5_max_l1_linf_norm": 1.4400205612182617, + "layer_5_max_spectral_norm": 0.24211707711219788, + "layer_6_update_fnorm": 1.2962430715560913, + "layer_6_max_l1_linf_norm": 1.4435756206512451, + "layer_6_max_spectral_norm": 0.26408466696739197, + "layer_7_update_fnorm": 1.374430775642395, + "layer_7_max_l1_linf_norm": 1.448999285697937, + "layer_7_max_spectral_norm": 0.2377752810716629, + "layer_8_update_fnorm": 1.4129459857940674, + "layer_8_max_l1_linf_norm": 1.4893217086791992, + "layer_8_max_spectral_norm": 0.24260962009429932, + "layer_9_update_fnorm": 1.4686375856399536, + "layer_9_max_l1_linf_norm": 1.7719430923461914, + "layer_9_max_spectral_norm": 0.23683875799179077, + "layer_10_update_fnorm": 1.481635332107544, + "layer_10_max_l1_linf_norm": 2.050588846206665, + "layer_10_max_spectral_norm": 0.25782644748687744, + "layer_11_update_fnorm": 1.4026474952697754, + "layer_11_max_l1_linf_norm": 2.3382110595703125, + "layer_11_max_spectral_norm": 0.2588537335395813, + "layer_12_update_fnorm": 1.3425406217575073, + "layer_12_max_l1_linf_norm": 2.4804115295410156, + "layer_12_max_spectral_norm": 0.2737457752227783, + "total_sharpness": 0.0035592711064964533, + "ip_v_neg_g": 0.08698448538780212, + "cos_v_neg_g": 0.0245169997215271, + "v_norm": 6.324275970458984, + "g_norm": 0.5610010623931885, + "hv_norm": 0.3980485200881958, + "cos_v_hv": 0.05655042454600334, + "hg_norm": 1.4608628749847412, + "cos_g_hg": 0.45035168528556824, + "v_parallel_norm": 0.012172253802418709, + "v_perp_norm": 6.3242645263671875, + "layer_1_v_norm": 1.067452311515808, + "layer_1_cos_v_neg_g": 0.08775202184915543, + "layer_2_v_norm": 1.1187175512313843, + "layer_2_cos_v_neg_g": 0.08368474245071411, + "layer_3_v_norm": 1.1166642904281616, + "layer_3_cos_v_neg_g": 0.08571884781122208, + "layer_4_v_norm": 1.1600477695465088, + "layer_4_cos_v_neg_g": 0.07940199971199036, + "layer_5_v_norm": 1.2273463010787964, + "layer_5_cos_v_neg_g": 0.077145054936409, + "layer_6_v_norm": 1.2962430715560913, + "layer_6_cos_v_neg_g": 0.05655398592352867, + "layer_7_v_norm": 1.374430775642395, + "layer_7_cos_v_neg_g": 0.050352878868579865, + "layer_8_v_norm": 1.4129458665847778, + "layer_8_cos_v_neg_g": 0.05028756707906723, + "layer_9_v_norm": 1.4686375856399536, + "layer_9_cos_v_neg_g": 0.053936686366796494, + "layer_10_v_norm": 1.481635332107544, + "layer_10_cos_v_neg_g": 0.04397471621632576, + "layer_11_v_norm": 1.4026474952697754, + "layer_11_cos_v_neg_g": 0.04377233609557152, + "layer_12_v_norm": 1.3425406217575073, + "layer_12_cos_v_neg_g": 0.0543462298810482, + "layer_1_sharpness": 0.0140866469591856, + "layer_2_sharpness": 0.001063710544258356, + "layer_3_sharpness": 0.0014521778794005513, + "layer_4_sharpness": 0.001201861654408276, + "layer_5_sharpness": 0.0013594747288152575, + "layer_6_sharpness": 0.0003425873874220997, + "layer_7_sharpness": 0.0004480943316593766, + "layer_8_sharpness": 0.00042746574035845697, + "layer_9_sharpness": 0.0006174648297019303, + "layer_10_sharpness": 0.0005470554460771382, + "layer_11_sharpness": 0.00042131292866542935, + "layer_12_sharpness": 0.001122767454944551 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_5000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..6bfa11e34d8c42b3c7f60c314f58db06c19d701f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.767459869384766, + "total_l1_linf_norm": 94850.046875, + "total_spectral_norm": 10.767461776733398, + "layer_1_update_fnorm": 2.5048317909240723, + "layer_1_max_l1_linf_norm": 2.5458741188049316, + "layer_1_max_spectral_norm": 0.29569554328918457, + "layer_2_update_fnorm": 2.4627482891082764, + "layer_2_max_l1_linf_norm": 2.5695815086364746, + "layer_2_max_spectral_norm": 0.275940477848053, + "layer_3_update_fnorm": 2.411391496658325, + "layer_3_max_l1_linf_norm": 2.629634380340576, + "layer_3_max_spectral_norm": 0.2772161662578583, + "layer_4_update_fnorm": 2.4280197620391846, + "layer_4_max_l1_linf_norm": 2.578273057937622, + "layer_4_max_spectral_norm": 0.33724433183670044, + "layer_5_update_fnorm": 2.428309679031372, + "layer_5_max_l1_linf_norm": 2.441462516784668, + "layer_5_max_spectral_norm": 0.28869494795799255, + "layer_6_update_fnorm": 2.516585350036621, + "layer_6_max_l1_linf_norm": 2.460200786590576, + "layer_6_max_spectral_norm": 0.2537359297275543, + "layer_7_update_fnorm": 2.5909206867218018, + "layer_7_max_l1_linf_norm": 2.768629789352417, + "layer_7_max_spectral_norm": 0.27820920944213867, + "layer_8_update_fnorm": 2.5085809230804443, + "layer_8_max_l1_linf_norm": 2.774839401245117, + "layer_8_max_spectral_norm": 0.3074953556060791, + "layer_9_update_fnorm": 2.5162744522094727, + "layer_9_max_l1_linf_norm": 2.804276466369629, + "layer_9_max_spectral_norm": 0.3593945801258087, + "layer_10_update_fnorm": 2.4984512329101562, + "layer_10_max_l1_linf_norm": 3.230334520339966, + "layer_10_max_spectral_norm": 0.3717653751373291, + "layer_11_update_fnorm": 2.5246946811676025, + "layer_11_max_l1_linf_norm": 2.9516258239746094, + "layer_11_max_spectral_norm": 0.37846440076828003, + "layer_12_update_fnorm": 2.4217870235443115, + "layer_12_max_l1_linf_norm": 2.937711238861084, + "layer_12_max_spectral_norm": 0.4027349054813385, + "total_sharpness": 0.00020127990865148604, + "ip_v_neg_g": 0.015933843329548836, + "cos_v_neg_g": 0.002258692169561982, + "v_norm": 10.767459869384766, + "g_norm": 0.655164361000061, + "hv_norm": 0.17994049191474915, + "cos_v_hv": 0.0120443906635046, + "hg_norm": 9.164546012878418, + "cos_g_hg": 0.7648957967758179, + "v_parallel_norm": 0.0024024276062846184, + "v_perp_norm": 10.767459869384766, + "layer_1_v_norm": 2.5048317909240723, + "layer_1_cos_v_neg_g": 0.004320340696722269, + "layer_2_v_norm": 2.4627482891082764, + "layer_2_cos_v_neg_g": 0.0012998085003346205, + "layer_3_v_norm": 2.411391258239746, + "layer_3_cos_v_neg_g": 0.0032690048683434725, + "layer_4_v_norm": 2.4280197620391846, + "layer_4_cos_v_neg_g": 0.003530238289386034, + "layer_5_v_norm": 2.428309679031372, + "layer_5_cos_v_neg_g": 0.003198038786649704, + "layer_6_v_norm": 2.516585350036621, + "layer_6_cos_v_neg_g": 0.004927723668515682, + "layer_7_v_norm": 2.5909206867218018, + "layer_7_cos_v_neg_g": 0.0045733582228422165, + "layer_8_v_norm": 2.5085809230804443, + "layer_8_cos_v_neg_g": 0.0058828191831707954, + "layer_9_v_norm": 2.5162744522094727, + "layer_9_cos_v_neg_g": 0.006372060161083937, + "layer_10_v_norm": 2.4984512329101562, + "layer_10_cos_v_neg_g": 0.008699699304997921, + "layer_11_v_norm": 2.5246949195861816, + "layer_11_cos_v_neg_g": 0.009698447771370411, + "layer_12_v_norm": 2.4217870235443115, + "layer_12_cos_v_neg_g": 0.007564837578684092, + "layer_1_sharpness": 6.941193714737892e-05, + "layer_2_sharpness": 1.0473646398168057e-05, + "layer_3_sharpness": 1.1007158718712162e-05, + "layer_4_sharpness": 3.0303321182145737e-05, + "layer_5_sharpness": 3.219157952116802e-05, + "layer_6_sharpness": 2.696386945899576e-05, + "layer_7_sharpness": 2.2592863388126716e-05, + "layer_8_sharpness": 6.30674185231328e-05, + "layer_9_sharpness": 9.686357225291431e-05, + "layer_10_sharpness": 8.579748828196898e-05, + "layer_11_sharpness": 7.08673833287321e-05, + "layer_12_sharpness": 0.0002558420819696039 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_5500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..42faffd3ef7b8bdd47407303978b87b7bf302612 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.822486877441406, + "total_l1_linf_norm": 95523.59375, + "total_spectral_norm": 10.822486877441406, + "layer_1_update_fnorm": 2.527947425842285, + "layer_1_max_l1_linf_norm": 2.5469260215759277, + "layer_1_max_spectral_norm": 0.3128260672092438, + "layer_2_update_fnorm": 2.49151349067688, + "layer_2_max_l1_linf_norm": 2.5907976627349854, + "layer_2_max_spectral_norm": 0.28331634402275085, + "layer_3_update_fnorm": 2.458282947540283, + "layer_3_max_l1_linf_norm": 2.637281894683838, + "layer_3_max_spectral_norm": 0.2858390212059021, + "layer_4_update_fnorm": 2.4497857093811035, + "layer_4_max_l1_linf_norm": 2.5425238609313965, + "layer_4_max_spectral_norm": 0.33425411581993103, + "layer_5_update_fnorm": 2.440844774246216, + "layer_5_max_l1_linf_norm": 2.444875478744507, + "layer_5_max_spectral_norm": 0.273785263299942, + "layer_6_update_fnorm": 2.501628875732422, + "layer_6_max_l1_linf_norm": 2.368929147720337, + "layer_6_max_spectral_norm": 0.23929859697818756, + "layer_7_update_fnorm": 2.5834624767303467, + "layer_7_max_l1_linf_norm": 2.7521257400512695, + "layer_7_max_spectral_norm": 0.27818965911865234, + "layer_8_update_fnorm": 2.5215189456939697, + "layer_8_max_l1_linf_norm": 2.6800425052642822, + "layer_8_max_spectral_norm": 0.31504037976264954, + "layer_9_update_fnorm": 2.536482095718384, + "layer_9_max_l1_linf_norm": 3.2514867782592773, + "layer_9_max_spectral_norm": 0.3712466359138489, + "layer_10_update_fnorm": 2.526071548461914, + "layer_10_max_l1_linf_norm": 2.907980442047119, + "layer_10_max_spectral_norm": 0.3828004002571106, + "layer_11_update_fnorm": 2.5562281608581543, + "layer_11_max_l1_linf_norm": 3.104245185852051, + "layer_11_max_spectral_norm": 0.4012663662433624, + "layer_12_update_fnorm": 2.4751148223876953, + "layer_12_max_l1_linf_norm": 3.3496718406677246, + "layer_12_max_spectral_norm": 0.458507239818573, + "total_sharpness": 0.0001716361875878647, + "ip_v_neg_g": 0.017374083399772644, + "cos_v_neg_g": 0.00213531288318336, + "v_norm": 10.822486877441406, + "g_norm": 0.7518190145492554, + "hv_norm": 0.24504691362380981, + "cos_v_hv": 0.007580304518342018, + "hg_norm": 4.141212463378906, + "cos_g_hg": 0.6639787554740906, + "v_parallel_norm": 0.0025397599674761295, + "v_perp_norm": 10.82248592376709, + "layer_1_v_norm": 2.527947425842285, + "layer_1_cos_v_neg_g": 0.004655706230551004, + "layer_2_v_norm": 2.49151349067688, + "layer_2_cos_v_neg_g": 0.002271259669214487, + "layer_3_v_norm": 2.4582831859588623, + "layer_3_cos_v_neg_g": 0.001840998767875135, + "layer_4_v_norm": 2.4497857093811035, + "layer_4_cos_v_neg_g": 0.0025243291165679693, + "layer_5_v_norm": 2.440844774246216, + "layer_5_cos_v_neg_g": 0.0037522597704082727, + "layer_6_v_norm": 2.5016286373138428, + "layer_6_cos_v_neg_g": 0.003953663166612387, + "layer_7_v_norm": 2.5834624767303467, + "layer_7_cos_v_neg_g": 0.0025715476367622614, + "layer_8_v_norm": 2.5215189456939697, + "layer_8_cos_v_neg_g": 0.004860227461904287, + "layer_9_v_norm": 2.536482095718384, + "layer_9_cos_v_neg_g": 0.00571038294583559, + "layer_10_v_norm": 2.526071548461914, + "layer_10_cos_v_neg_g": 0.007617473602294922, + "layer_11_v_norm": 2.5562281608581543, + "layer_11_cos_v_neg_g": 0.007983637042343616, + "layer_12_v_norm": 2.4751148223876953, + "layer_12_cos_v_neg_g": 0.009635427966713905, + "layer_1_sharpness": 3.854012174997479e-05, + "layer_2_sharpness": 6.465268597821705e-06, + "layer_3_sharpness": 1.382027767249383e-05, + "layer_4_sharpness": 1.437885202903999e-05, + "layer_5_sharpness": 3.085141725023277e-05, + "layer_6_sharpness": 1.8415485101286322e-05, + "layer_7_sharpness": 1.8209546396974474e-05, + "layer_8_sharpness": 4.264567178324796e-05, + "layer_9_sharpness": 5.609961590380408e-05, + "layer_10_sharpness": 6.828517507528886e-05, + "layer_11_sharpness": 6.828260666225106e-05, + "layer_12_sharpness": 0.0002766582474578172 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_6000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..d61b4f817e63b54327140c8a1692b66a9f85c84f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.866066932678223, + "total_l1_linf_norm": 95931.125, + "total_spectral_norm": 10.866067886352539, + "layer_1_update_fnorm": 2.5473573207855225, + "layer_1_max_l1_linf_norm": 2.550948143005371, + "layer_1_max_spectral_norm": 0.3042279779911041, + "layer_2_update_fnorm": 2.4984774589538574, + "layer_2_max_l1_linf_norm": 2.580806016921997, + "layer_2_max_spectral_norm": 0.28732800483703613, + "layer_3_update_fnorm": 2.4825587272644043, + "layer_3_max_l1_linf_norm": 2.6925294399261475, + "layer_3_max_spectral_norm": 0.2889402210712433, + "layer_4_update_fnorm": 2.4803099632263184, + "layer_4_max_l1_linf_norm": 2.619616985321045, + "layer_4_max_spectral_norm": 0.33385664224624634, + "layer_5_update_fnorm": 2.4630558490753174, + "layer_5_max_l1_linf_norm": 2.4795632362365723, + "layer_5_max_spectral_norm": 0.2773365378379822, + "layer_6_update_fnorm": 2.543287515640259, + "layer_6_max_l1_linf_norm": 2.4672000408172607, + "layer_6_max_spectral_norm": 0.2577938139438629, + "layer_7_update_fnorm": 2.609238862991333, + "layer_7_max_l1_linf_norm": 2.6962904930114746, + "layer_7_max_spectral_norm": 0.28328242897987366, + "layer_8_update_fnorm": 2.5400702953338623, + "layer_8_max_l1_linf_norm": 2.8364267349243164, + "layer_8_max_spectral_norm": 0.36686766147613525, + "layer_9_update_fnorm": 2.5482003688812256, + "layer_9_max_l1_linf_norm": 2.816140651702881, + "layer_9_max_spectral_norm": 0.3717193305492401, + "layer_10_update_fnorm": 2.5345916748046875, + "layer_10_max_l1_linf_norm": 2.941295623779297, + "layer_10_max_spectral_norm": 0.38293221592903137, + "layer_11_update_fnorm": 2.553109884262085, + "layer_11_max_l1_linf_norm": 2.8895599842071533, + "layer_11_max_spectral_norm": 0.38090434670448303, + "layer_12_update_fnorm": 2.4569599628448486, + "layer_12_max_l1_linf_norm": 2.778067111968994, + "layer_12_max_spectral_norm": 0.4227031469345093, + "total_sharpness": 0.000201734495931305, + "ip_v_neg_g": 0.011670950800180435, + "cos_v_neg_g": 0.002790820086374879, + "v_norm": 10.866066932678223, + "g_norm": 0.3848593235015869, + "hv_norm": 0.1646740436553955, + "cos_v_hv": 0.01331151183694601, + "hg_norm": 2.387467861175537, + "cos_g_hg": 0.5035554766654968, + "v_parallel_norm": 0.0035307235084474087, + "v_perp_norm": 10.866065979003906, + "layer_1_v_norm": 2.5473573207855225, + "layer_1_cos_v_neg_g": 0.007451446261256933, + "layer_2_v_norm": 2.4984774589538574, + "layer_2_cos_v_neg_g": 0.002323053777217865, + "layer_3_v_norm": 2.4825587272644043, + "layer_3_cos_v_neg_g": 0.003012038068845868, + "layer_4_v_norm": 2.4803099632263184, + "layer_4_cos_v_neg_g": 0.0038832081481814384, + "layer_5_v_norm": 2.4630558490753174, + "layer_5_cos_v_neg_g": 0.004757123999297619, + "layer_6_v_norm": 2.543287515640259, + "layer_6_cos_v_neg_g": 0.0032629158813506365, + "layer_7_v_norm": 2.609238862991333, + "layer_7_cos_v_neg_g": 0.004204247146844864, + "layer_8_v_norm": 2.540070056915283, + "layer_8_cos_v_neg_g": 0.00598188815638423, + "layer_9_v_norm": 2.5482003688812256, + "layer_9_cos_v_neg_g": 0.006494240369647741, + "layer_10_v_norm": 2.5345916748046875, + "layer_10_cos_v_neg_g": 0.006376140285283327, + "layer_11_v_norm": 2.553109884262085, + "layer_11_cos_v_neg_g": 0.008263004012405872, + "layer_12_v_norm": 2.4569599628448486, + "layer_12_cos_v_neg_g": 0.017363013699650764, + "layer_1_sharpness": 7.731409277766943e-05, + "layer_2_sharpness": 1.1395328328944743e-05, + "layer_3_sharpness": 1.1334309419908095e-05, + "layer_4_sharpness": 2.3606484319316223e-05, + "layer_5_sharpness": 3.3692609576974064e-05, + "layer_6_sharpness": 1.677844011283014e-05, + "layer_7_sharpness": 2.9002903829677962e-05, + "layer_8_sharpness": 7.958782225614414e-05, + "layer_9_sharpness": 8.64535104483366e-05, + "layer_10_sharpness": 8.637538849143311e-05, + "layer_11_sharpness": 6.559060420840979e-05, + "layer_12_sharpness": 0.00027714853058569133 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_6500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..47ee1eebac283eaac3683ebe5f82697108b343c8 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.029056549072266, + "total_l1_linf_norm": 97578.234375, + "total_spectral_norm": 11.02905559539795, + "layer_1_update_fnorm": 2.6315112113952637, + "layer_1_max_l1_linf_norm": 2.6796762943267822, + "layer_1_max_spectral_norm": 0.3359679579734802, + "layer_2_update_fnorm": 2.56453537940979, + "layer_2_max_l1_linf_norm": 2.6438117027282715, + "layer_2_max_spectral_norm": 0.31154415011405945, + "layer_3_update_fnorm": 2.5484988689422607, + "layer_3_max_l1_linf_norm": 2.737588405609131, + "layer_3_max_spectral_norm": 0.30991989374160767, + "layer_4_update_fnorm": 2.527681350708008, + "layer_4_max_l1_linf_norm": 2.7411623001098633, + "layer_4_max_spectral_norm": 0.33705875277519226, + "layer_5_update_fnorm": 2.525348424911499, + "layer_5_max_l1_linf_norm": 2.6035075187683105, + "layer_5_max_spectral_norm": 0.2806074917316437, + "layer_6_update_fnorm": 2.579068660736084, + "layer_6_max_l1_linf_norm": 2.480167865753174, + "layer_6_max_spectral_norm": 0.2550855576992035, + "layer_7_update_fnorm": 2.633012294769287, + "layer_7_max_l1_linf_norm": 2.687986135482788, + "layer_7_max_spectral_norm": 0.29223909974098206, + "layer_8_update_fnorm": 2.570258140563965, + "layer_8_max_l1_linf_norm": 2.830946445465088, + "layer_8_max_spectral_norm": 0.3372350335121155, + "layer_9_update_fnorm": 2.5626838207244873, + "layer_9_max_l1_linf_norm": 2.741884231567383, + "layer_9_max_spectral_norm": 0.3778058588504791, + "layer_10_update_fnorm": 2.565047264099121, + "layer_10_max_l1_linf_norm": 2.9872684478759766, + "layer_10_max_spectral_norm": 0.39252135157585144, + "layer_11_update_fnorm": 2.59379506111145, + "layer_11_max_l1_linf_norm": 2.946896553039551, + "layer_11_max_spectral_norm": 0.37819868326187134, + "layer_12_update_fnorm": 2.5307445526123047, + "layer_12_max_l1_linf_norm": 2.966371536254883, + "layer_12_max_spectral_norm": 0.45224687457084656, + "total_sharpness": 0.00024052395019680262, + "ip_v_neg_g": 0.013728838413953781, + "cos_v_neg_g": 0.0030671488493680954, + "v_norm": 11.029056549072266, + "g_norm": 0.4058453440666199, + "hv_norm": 0.22899886965751648, + "cos_v_hv": 0.011584129184484482, + "hg_norm": 2.5299293994903564, + "cos_g_hg": 0.4983285963535309, + "v_parallel_norm": 0.003545205807313323, + "v_perp_norm": 11.029056549072266, + "layer_1_v_norm": 2.6315112113952637, + "layer_1_cos_v_neg_g": 0.00761040672659874, + "layer_2_v_norm": 2.56453537940979, + "layer_2_cos_v_neg_g": 0.0025765765458345413, + "layer_3_v_norm": 2.54849910736084, + "layer_3_cos_v_neg_g": 0.002813741797581315, + "layer_4_v_norm": 2.527681350708008, + "layer_4_cos_v_neg_g": 0.0034027695655822754, + "layer_5_v_norm": 2.525348424911499, + "layer_5_cos_v_neg_g": 0.004427982959896326, + "layer_6_v_norm": 2.579068422317505, + "layer_6_cos_v_neg_g": 0.004025065805763006, + "layer_7_v_norm": 2.633012294769287, + "layer_7_cos_v_neg_g": 0.003684495110064745, + "layer_8_v_norm": 2.570258378982544, + "layer_8_cos_v_neg_g": 0.006126052234321833, + "layer_9_v_norm": 2.5626838207244873, + "layer_9_cos_v_neg_g": 0.008808002807199955, + "layer_10_v_norm": 2.565047264099121, + "layer_10_cos_v_neg_g": 0.010915289632976055, + "layer_11_v_norm": 2.59379506111145, + "layer_11_cos_v_neg_g": 0.01158411055803299, + "layer_12_v_norm": 2.5307445526123047, + "layer_12_cos_v_neg_g": 0.016670703887939453, + "layer_1_sharpness": 6.445332110160962e-05, + "layer_2_sharpness": 1.7411108274245635e-05, + "layer_3_sharpness": 2.948529254354071e-05, + "layer_4_sharpness": 2.8220150852575898e-05, + "layer_5_sharpness": 4.390387039165944e-05, + "layer_6_sharpness": 2.262281850562431e-05, + "layer_7_sharpness": 3.292474866611883e-05, + "layer_8_sharpness": 6.546185613842681e-05, + "layer_9_sharpness": 9.580609912518412e-05, + "layer_10_sharpness": 0.00010909344564424828, + "layer_11_sharpness": 7.564410771010444e-05, + "layer_12_sharpness": 0.00029678549617528915 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_7000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..cd38ca633d8c11e8b0a9558baba50c98846c512a --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.99418830871582, + "total_l1_linf_norm": 97266.125, + "total_spectral_norm": 10.994187355041504, + "layer_1_update_fnorm": 2.5691354274749756, + "layer_1_max_l1_linf_norm": 2.630760669708252, + "layer_1_max_spectral_norm": 0.33259305357933044, + "layer_2_update_fnorm": 2.572232723236084, + "layer_2_max_l1_linf_norm": 2.6984713077545166, + "layer_2_max_spectral_norm": 0.31373563408851624, + "layer_3_update_fnorm": 2.5237700939178467, + "layer_3_max_l1_linf_norm": 2.6431007385253906, + "layer_3_max_spectral_norm": 0.3026607632637024, + "layer_4_update_fnorm": 2.5223894119262695, + "layer_4_max_l1_linf_norm": 2.613743782043457, + "layer_4_max_spectral_norm": 0.34163787961006165, + "layer_5_update_fnorm": 2.4992971420288086, + "layer_5_max_l1_linf_norm": 2.500960350036621, + "layer_5_max_spectral_norm": 0.278647780418396, + "layer_6_update_fnorm": 2.5719285011291504, + "layer_6_max_l1_linf_norm": 2.501634120941162, + "layer_6_max_spectral_norm": 0.26576200127601624, + "layer_7_update_fnorm": 2.6334781646728516, + "layer_7_max_l1_linf_norm": 2.7534866333007812, + "layer_7_max_spectral_norm": 0.2841494679450989, + "layer_8_update_fnorm": 2.568127393722534, + "layer_8_max_l1_linf_norm": 2.7862884998321533, + "layer_8_max_spectral_norm": 0.3387257158756256, + "layer_9_update_fnorm": 2.573880672454834, + "layer_9_max_l1_linf_norm": 2.8544323444366455, + "layer_9_max_spectral_norm": 0.3848297894001007, + "layer_10_update_fnorm": 2.5648183822631836, + "layer_10_max_l1_linf_norm": 2.990593671798706, + "layer_10_max_spectral_norm": 0.4201887249946594, + "layer_11_update_fnorm": 2.588543176651001, + "layer_11_max_l1_linf_norm": 2.932436943054199, + "layer_11_max_spectral_norm": 0.39430224895477295, + "layer_12_update_fnorm": 2.5209431648254395, + "layer_12_max_l1_linf_norm": 3.072686195373535, + "layer_12_max_spectral_norm": 0.4518556594848633, + "total_sharpness": 0.00022153423924464732, + "ip_v_neg_g": 0.014425965026021004, + "cos_v_neg_g": 0.003395980456843972, + "v_norm": 10.99418830871582, + "g_norm": 0.38638168573379517, + "hv_norm": 0.24757201969623566, + "cos_v_hv": 0.009837901219725609, + "hg_norm": 2.2461252212524414, + "cos_g_hg": 0.45277345180511475, + "v_parallel_norm": 0.0032855842728167772, + "v_perp_norm": 10.99418830871582, + "layer_1_v_norm": 2.5691354274749756, + "layer_1_cos_v_neg_g": 0.011260700412094593, + "layer_2_v_norm": 2.572232723236084, + "layer_2_cos_v_neg_g": 0.004389453213661909, + "layer_3_v_norm": 2.5237700939178467, + "layer_3_cos_v_neg_g": 0.003954247571527958, + "layer_4_v_norm": 2.5223894119262695, + "layer_4_cos_v_neg_g": 0.004779375623911619, + "layer_5_v_norm": 2.4992971420288086, + "layer_5_cos_v_neg_g": 0.0032465197145938873, + "layer_6_v_norm": 2.5719287395477295, + "layer_6_cos_v_neg_g": 0.004640203434973955, + "layer_7_v_norm": 2.6334781646728516, + "layer_7_cos_v_neg_g": 0.0043799071572721004, + "layer_8_v_norm": 2.568127393722534, + "layer_8_cos_v_neg_g": 0.006085855420678854, + "layer_9_v_norm": 2.573880672454834, + "layer_9_cos_v_neg_g": 0.00817311741411686, + "layer_10_v_norm": 2.5648183822631836, + "layer_10_cos_v_neg_g": 0.010066046379506588, + "layer_11_v_norm": 2.588543176651001, + "layer_11_cos_v_neg_g": 0.011011222377419472, + "layer_12_v_norm": 2.5209431648254395, + "layer_12_cos_v_neg_g": 0.01634683832526207, + "layer_1_sharpness": 6.50836227578111e-05, + "layer_2_sharpness": 1.6008330931072123e-05, + "layer_3_sharpness": 1.780995080480352e-05, + "layer_4_sharpness": 3.0936047551222146e-05, + "layer_5_sharpness": 3.0784827686147764e-05, + "layer_6_sharpness": 1.9986933693871833e-05, + "layer_7_sharpness": 3.00594274449395e-05, + "layer_8_sharpness": 5.198858707444742e-05, + "layer_9_sharpness": 8.127062028506771e-05, + "layer_10_sharpness": 9.707854042062536e-05, + "layer_11_sharpness": 7.62521885917522e-05, + "layer_12_sharpness": 0.0003058522706851363 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_7500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..16cffe16026041cb7c8226adfcdd149d1c3f1f44 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.048707008361816, + "total_l1_linf_norm": 97727.5703125, + "total_spectral_norm": 11.048707008361816, + "layer_1_update_fnorm": 2.605064868927002, + "layer_1_max_l1_linf_norm": 2.6172633171081543, + "layer_1_max_spectral_norm": 0.3482551574707031, + "layer_2_update_fnorm": 2.5752854347229004, + "layer_2_max_l1_linf_norm": 2.6850385665893555, + "layer_2_max_spectral_norm": 0.31660133600234985, + "layer_3_update_fnorm": 2.5698435306549072, + "layer_3_max_l1_linf_norm": 2.6917176246643066, + "layer_3_max_spectral_norm": 0.31499844789505005, + "layer_4_update_fnorm": 2.536696195602417, + "layer_4_max_l1_linf_norm": 2.710818290710449, + "layer_4_max_spectral_norm": 0.34030815958976746, + "layer_5_update_fnorm": 2.521083354949951, + "layer_5_max_l1_linf_norm": 2.6162915229797363, + "layer_5_max_spectral_norm": 0.27279147505760193, + "layer_6_update_fnorm": 2.5900557041168213, + "layer_6_max_l1_linf_norm": 2.577810049057007, + "layer_6_max_spectral_norm": 0.2513599097728729, + "layer_7_update_fnorm": 2.6552326679229736, + "layer_7_max_l1_linf_norm": 2.74672269821167, + "layer_7_max_spectral_norm": 0.2841908633708954, + "layer_8_update_fnorm": 2.591564416885376, + "layer_8_max_l1_linf_norm": 2.7805685997009277, + "layer_8_max_spectral_norm": 0.3404390513896942, + "layer_9_update_fnorm": 2.590656280517578, + "layer_9_max_l1_linf_norm": 3.0107593536376953, + "layer_9_max_spectral_norm": 0.39213982224464417, + "layer_10_update_fnorm": 2.5848419666290283, + "layer_10_max_l1_linf_norm": 2.9113924503326416, + "layer_10_max_spectral_norm": 0.4059130549430847, + "layer_11_update_fnorm": 2.6047379970550537, + "layer_11_max_l1_linf_norm": 2.850111484527588, + "layer_11_max_spectral_norm": 0.3893303871154785, + "layer_12_update_fnorm": 2.5359411239624023, + "layer_12_max_l1_linf_norm": 2.8467607498168945, + "layer_12_max_spectral_norm": 0.42490482330322266, + "total_sharpness": 0.00018629408441483974, + "ip_v_neg_g": 0.00883820466697216, + "cos_v_neg_g": 0.0018894609529525042, + "v_norm": 11.048707008361816, + "g_norm": 0.4233647286891937, + "hv_norm": 0.19814160466194153, + "cos_v_hv": 0.010388068854808807, + "hg_norm": 2.9576187133789062, + "cos_g_hg": 0.4878533184528351, + "v_parallel_norm": 0.0014743995852768421, + "v_perp_norm": 11.048707008361816, + "layer_1_v_norm": 2.605064868927002, + "layer_1_cos_v_neg_g": 0.0034777061082422733, + "layer_2_v_norm": 2.5752854347229004, + "layer_2_cos_v_neg_g": 0.0014132953947409987, + "layer_3_v_norm": 2.5698437690734863, + "layer_3_cos_v_neg_g": 0.0017302428605034947, + "layer_4_v_norm": 2.536696195602417, + "layer_4_cos_v_neg_g": 0.002503889612853527, + "layer_5_v_norm": 2.521083354949951, + "layer_5_cos_v_neg_g": 0.0038778844755142927, + "layer_6_v_norm": 2.590055465698242, + "layer_6_cos_v_neg_g": 0.0025660148821771145, + "layer_7_v_norm": 2.6552326679229736, + "layer_7_cos_v_neg_g": 0.002768442500382662, + "layer_8_v_norm": 2.591564416885376, + "layer_8_cos_v_neg_g": 0.0042822654359042645, + "layer_9_v_norm": 2.590656280517578, + "layer_9_cos_v_neg_g": 0.0062148976139724255, + "layer_10_v_norm": 2.5848419666290283, + "layer_10_cos_v_neg_g": 0.006996415089815855, + "layer_11_v_norm": 2.604738235473633, + "layer_11_cos_v_neg_g": 0.006655052304267883, + "layer_12_v_norm": 2.5359411239624023, + "layer_12_cos_v_neg_g": 0.006790472660213709, + "layer_1_sharpness": 3.9903668948682025e-05, + "layer_2_sharpness": 6.83598864270607e-06, + "layer_3_sharpness": 1.0359552106820047e-05, + "layer_4_sharpness": 2.5853008992271498e-05, + "layer_5_sharpness": 4.1556271753506735e-05, + "layer_6_sharpness": 1.820293618948199e-05, + "layer_7_sharpness": 2.411637979093939e-05, + "layer_8_sharpness": 6.494065746665001e-05, + "layer_9_sharpness": 8.380110375583172e-05, + "layer_10_sharpness": 9.84784055617638e-05, + "layer_11_sharpness": 7.756937702652067e-05, + "layer_12_sharpness": 0.00020777661120519042 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_8000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..1fe581dceac4b937417f59798cb9d361965b6f7a --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.018402099609375, + "total_l1_linf_norm": 97537.953125, + "total_spectral_norm": 11.018403053283691, + "layer_1_update_fnorm": 2.61287522315979, + "layer_1_max_l1_linf_norm": 2.590320348739624, + "layer_1_max_spectral_norm": 0.3420000374317169, + "layer_2_update_fnorm": 2.5843608379364014, + "layer_2_max_l1_linf_norm": 2.632929563522339, + "layer_2_max_spectral_norm": 0.31608977913856506, + "layer_3_update_fnorm": 2.583656072616577, + "layer_3_max_l1_linf_norm": 2.676370620727539, + "layer_3_max_spectral_norm": 0.31158044934272766, + "layer_4_update_fnorm": 2.5508856773376465, + "layer_4_max_l1_linf_norm": 2.6183276176452637, + "layer_4_max_spectral_norm": 0.3366183638572693, + "layer_5_update_fnorm": 2.5376062393188477, + "layer_5_max_l1_linf_norm": 2.5269088745117188, + "layer_5_max_spectral_norm": 0.2776784300804138, + "layer_6_update_fnorm": 2.5818755626678467, + "layer_6_max_l1_linf_norm": 2.4511494636535645, + "layer_6_max_spectral_norm": 0.25094714760780334, + "layer_7_update_fnorm": 2.643493890762329, + "layer_7_max_l1_linf_norm": 2.7427122592926025, + "layer_7_max_spectral_norm": 0.2758033871650696, + "layer_8_update_fnorm": 2.5851473808288574, + "layer_8_max_l1_linf_norm": 2.720241069793701, + "layer_8_max_spectral_norm": 0.3248271942138672, + "layer_9_update_fnorm": 2.5829882621765137, + "layer_9_max_l1_linf_norm": 3.1220545768737793, + "layer_9_max_spectral_norm": 0.3633964955806732, + "layer_10_update_fnorm": 2.573756217956543, + "layer_10_max_l1_linf_norm": 2.9093761444091797, + "layer_10_max_spectral_norm": 0.38045862317085266, + "layer_11_update_fnorm": 2.591094732284546, + "layer_11_max_l1_linf_norm": 2.9082815647125244, + "layer_11_max_spectral_norm": 0.365648090839386, + "layer_12_update_fnorm": 2.511073350906372, + "layer_12_max_l1_linf_norm": 2.939089059829712, + "layer_12_max_spectral_norm": 0.4068818986415863, + "total_sharpness": 0.00017237629799637944, + "ip_v_neg_g": 0.010224214754998684, + "cos_v_neg_g": 0.002376553835347295, + "v_norm": 11.018402099609375, + "g_norm": 0.3904484510421753, + "hv_norm": 0.1982458084821701, + "cos_v_hv": 0.009580588899552822, + "hg_norm": 3.0261597633361816, + "cos_g_hg": 0.4452417492866516, + "v_parallel_norm": 0.0030573972035199404, + "v_perp_norm": 11.018402099609375, + "layer_1_v_norm": 2.61287522315979, + "layer_1_cos_v_neg_g": 0.009861363098025322, + "layer_2_v_norm": 2.5843608379364014, + "layer_2_cos_v_neg_g": 0.0048558954149484634, + "layer_3_v_norm": 2.583656072616577, + "layer_3_cos_v_neg_g": 0.007276113145053387, + "layer_4_v_norm": 2.5508856773376465, + "layer_4_cos_v_neg_g": 0.006122508551925421, + "layer_5_v_norm": 2.5376062393188477, + "layer_5_cos_v_neg_g": 0.004569969139993191, + "layer_6_v_norm": 2.5818753242492676, + "layer_6_cos_v_neg_g": 0.0021465937606990337, + "layer_7_v_norm": 2.643493890762329, + "layer_7_cos_v_neg_g": 0.0026493454352021217, + "layer_8_v_norm": 2.5851473808288574, + "layer_8_cos_v_neg_g": 0.0024306359700858593, + "layer_9_v_norm": 2.5829882621765137, + "layer_9_cos_v_neg_g": 0.0028600196819752455, + "layer_10_v_norm": 2.573756217956543, + "layer_10_cos_v_neg_g": 0.005472567863762379, + "layer_11_v_norm": 2.591094732284546, + "layer_11_cos_v_neg_g": 0.004975974094122648, + "layer_12_v_norm": 2.511073350906372, + "layer_12_cos_v_neg_g": 0.009667824022471905, + "layer_1_sharpness": 8.387486741412431e-05, + "layer_2_sharpness": 2.2824417101219296e-05, + "layer_3_sharpness": 3.806317181442864e-05, + "layer_4_sharpness": 4.3284129787934944e-05, + "layer_5_sharpness": 3.467394344625063e-05, + "layer_6_sharpness": 1.557540599606e-05, + "layer_7_sharpness": 2.6255735065205954e-05, + "layer_8_sharpness": 4.410034671309404e-05, + "layer_9_sharpness": 5.705187140847556e-05, + "layer_10_sharpness": 6.585222581634298e-05, + "layer_11_sharpness": 4.820105459657498e-05, + "layer_12_sharpness": 0.00016021875489968807 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_8500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..a51666fd3ba41ca6510a758f1ed1fe56a371f4fc --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.804460525512695, + "total_l1_linf_norm": 95194.078125, + "total_spectral_norm": 10.804461479187012, + "layer_1_update_fnorm": 2.4787487983703613, + "layer_1_max_l1_linf_norm": 2.5546789169311523, + "layer_1_max_spectral_norm": 0.33238139748573303, + "layer_2_update_fnorm": 2.4699270725250244, + "layer_2_max_l1_linf_norm": 2.588775634765625, + "layer_2_max_spectral_norm": 0.32771363854408264, + "layer_3_update_fnorm": 2.466106653213501, + "layer_3_max_l1_linf_norm": 2.6854894161224365, + "layer_3_max_spectral_norm": 0.3056803345680237, + "layer_4_update_fnorm": 2.4640450477600098, + "layer_4_max_l1_linf_norm": 2.622455596923828, + "layer_4_max_spectral_norm": 0.3344191908836365, + "layer_5_update_fnorm": 2.4598708152770996, + "layer_5_max_l1_linf_norm": 2.5819497108459473, + "layer_5_max_spectral_norm": 0.2734818756580353, + "layer_6_update_fnorm": 2.5425925254821777, + "layer_6_max_l1_linf_norm": 2.6405978202819824, + "layer_6_max_spectral_norm": 0.25948071479797363, + "layer_7_update_fnorm": 2.6133499145507812, + "layer_7_max_l1_linf_norm": 2.9595518112182617, + "layer_7_max_spectral_norm": 0.27969399094581604, + "layer_8_update_fnorm": 2.5384016036987305, + "layer_8_max_l1_linf_norm": 2.720719814300537, + "layer_8_max_spectral_norm": 0.3357873260974884, + "layer_9_update_fnorm": 2.540586471557617, + "layer_9_max_l1_linf_norm": 2.7820441722869873, + "layer_9_max_spectral_norm": 0.3705163598060608, + "layer_10_update_fnorm": 2.5150063037872314, + "layer_10_max_l1_linf_norm": 3.111665964126587, + "layer_10_max_spectral_norm": 0.37503647804260254, + "layer_11_update_fnorm": 2.519137144088745, + "layer_11_max_l1_linf_norm": 3.172607898712158, + "layer_11_max_spectral_norm": 0.3831631541252136, + "layer_12_update_fnorm": 2.415107488632202, + "layer_12_max_l1_linf_norm": 3.2116358280181885, + "layer_12_max_spectral_norm": 0.42924773693084717, + "total_sharpness": 0.00017530660261400044, + "ip_v_neg_g": 0.011807873845100403, + "cos_v_neg_g": 0.0028160251677036285, + "v_norm": 10.804460525512695, + "g_norm": 0.38808968663215637, + "hv_norm": 0.1892281472682953, + "cos_v_hv": 0.010009574703872204, + "hg_norm": 2.316485643386841, + "cos_g_hg": 0.482914537191391, + "v_parallel_norm": 0.003008246188983321, + "v_perp_norm": 10.804459571838379, + "layer_1_v_norm": 2.4787487983703613, + "layer_1_cos_v_neg_g": 0.005463889800012112, + "layer_2_v_norm": 2.4699270725250244, + "layer_2_cos_v_neg_g": 0.0028990546707063913, + "layer_3_v_norm": 2.466106653213501, + "layer_3_cos_v_neg_g": 0.003758054692298174, + "layer_4_v_norm": 2.4640450477600098, + "layer_4_cos_v_neg_g": 0.003669311059638858, + "layer_5_v_norm": 2.4598708152770996, + "layer_5_cos_v_neg_g": 0.005082899238914251, + "layer_6_v_norm": 2.5425925254821777, + "layer_6_cos_v_neg_g": 0.00466996431350708, + "layer_7_v_norm": 2.6133499145507812, + "layer_7_cos_v_neg_g": 0.004922647029161453, + "layer_8_v_norm": 2.5384016036987305, + "layer_8_cos_v_neg_g": 0.005383338779211044, + "layer_9_v_norm": 2.540586471557617, + "layer_9_cos_v_neg_g": 0.007805961184203625, + "layer_10_v_norm": 2.5150063037872314, + "layer_10_cos_v_neg_g": 0.009115487337112427, + "layer_11_v_norm": 2.519137144088745, + "layer_11_cos_v_neg_g": 0.01006573997437954, + "layer_12_v_norm": 2.415107488632202, + "layer_12_cos_v_neg_g": 0.01484366599470377, + "layer_1_sharpness": 3.850086432066746e-05, + "layer_2_sharpness": 6.501573352579726e-06, + "layer_3_sharpness": 1.1536165402503684e-05, + "layer_4_sharpness": 2.775109351205174e-05, + "layer_5_sharpness": 3.3748321584425867e-05, + "layer_6_sharpness": 1.971575511561241e-05, + "layer_7_sharpness": 2.7784657504525967e-05, + "layer_8_sharpness": 4.55775843875017e-05, + "layer_9_sharpness": 6.676602060906589e-05, + "layer_10_sharpness": 6.691319867968559e-05, + "layer_11_sharpness": 5.29313474544324e-05, + "layer_12_sharpness": 0.00026366885867901146 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_9000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..ca1b23f3352e365c5b402ee33c753eaa847fbfd9 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 11.048823356628418, + "total_l1_linf_norm": 97808.15625, + "total_spectral_norm": 11.048823356628418, + "layer_1_update_fnorm": 2.6081104278564453, + "layer_1_max_l1_linf_norm": 2.5723464488983154, + "layer_1_max_spectral_norm": 0.3509295880794525, + "layer_2_update_fnorm": 2.5832552909851074, + "layer_2_max_l1_linf_norm": 2.621255874633789, + "layer_2_max_spectral_norm": 0.3299889862537384, + "layer_3_update_fnorm": 2.5651636123657227, + "layer_3_max_l1_linf_norm": 2.7382822036743164, + "layer_3_max_spectral_norm": 0.3050539493560791, + "layer_4_update_fnorm": 2.569775104522705, + "layer_4_max_l1_linf_norm": 2.8443708419799805, + "layer_4_max_spectral_norm": 0.33068928122520447, + "layer_5_update_fnorm": 2.5391690731048584, + "layer_5_max_l1_linf_norm": 2.48197340965271, + "layer_5_max_spectral_norm": 0.2664453387260437, + "layer_6_update_fnorm": 2.603661298751831, + "layer_6_max_l1_linf_norm": 2.5441436767578125, + "layer_6_max_spectral_norm": 0.2506136894226074, + "layer_7_update_fnorm": 2.653676986694336, + "layer_7_max_l1_linf_norm": 2.731919050216675, + "layer_7_max_spectral_norm": 0.2753710150718689, + "layer_8_update_fnorm": 2.5895981788635254, + "layer_8_max_l1_linf_norm": 2.7300665378570557, + "layer_8_max_spectral_norm": 0.31299158930778503, + "layer_9_update_fnorm": 2.5973072052001953, + "layer_9_max_l1_linf_norm": 2.9281153678894043, + "layer_9_max_spectral_norm": 0.3541557788848877, + "layer_10_update_fnorm": 2.5738697052001953, + "layer_10_max_l1_linf_norm": 3.017085075378418, + "layer_10_max_spectral_norm": 0.36568570137023926, + "layer_11_update_fnorm": 2.6023919582366943, + "layer_11_max_l1_linf_norm": 3.0996241569519043, + "layer_11_max_spectral_norm": 0.3848645091056824, + "layer_12_update_fnorm": 2.5213897228240967, + "layer_12_max_l1_linf_norm": 3.574449062347412, + "layer_12_max_spectral_norm": 0.42935243248939514, + "total_sharpness": 0.0001629954349482432, + "ip_v_neg_g": 0.01088343933224678, + "cos_v_neg_g": 0.0024073361419141293, + "v_norm": 11.048823356628418, + "g_norm": 0.40917906165122986, + "hv_norm": 0.20881842076778412, + "cos_v_hv": 0.008624276146292686, + "hg_norm": 3.3706860542297363, + "cos_g_hg": 0.5032000541687012, + "v_parallel_norm": 0.0039763180539011955, + "v_perp_norm": 11.048822402954102, + "layer_1_v_norm": 2.6081104278564453, + "layer_1_cos_v_neg_g": 0.007040954660624266, + "layer_2_v_norm": 2.5832552909851074, + "layer_2_cos_v_neg_g": 0.0037401732988655567, + "layer_3_v_norm": 2.5651636123657227, + "layer_3_cos_v_neg_g": 0.0040661790408194065, + "layer_4_v_norm": 2.569775104522705, + "layer_4_cos_v_neg_g": 0.005104848649352789, + "layer_5_v_norm": 2.5391690731048584, + "layer_5_cos_v_neg_g": 0.0045273457653820515, + "layer_6_v_norm": 2.603661298751831, + "layer_6_cos_v_neg_g": 0.0030758180655539036, + "layer_7_v_norm": 2.653676986694336, + "layer_7_cos_v_neg_g": 0.004076093435287476, + "layer_8_v_norm": 2.5895981788635254, + "layer_8_cos_v_neg_g": 0.004761841613799334, + "layer_9_v_norm": 2.5973072052001953, + "layer_9_cos_v_neg_g": 0.004517795518040657, + "layer_10_v_norm": 2.5738697052001953, + "layer_10_cos_v_neg_g": 0.005748869385570288, + "layer_11_v_norm": 2.6023919582366943, + "layer_11_cos_v_neg_g": 0.007346508093178272, + "layer_12_v_norm": 2.5213897228240967, + "layer_12_cos_v_neg_g": 0.01153133437037468, + "layer_1_sharpness": 7.778142753522843e-05, + "layer_2_sharpness": 1.6363197573809884e-05, + "layer_3_sharpness": 3.278522126493044e-05, + "layer_4_sharpness": 4.81270435557235e-05, + "layer_5_sharpness": 3.401295543881133e-05, + "layer_6_sharpness": 1.630764381843619e-05, + "layer_7_sharpness": 2.518817927921191e-05, + "layer_8_sharpness": 4.0907027141656727e-05, + "layer_9_sharpness": 4.8018198867794126e-05, + "layer_10_sharpness": 5.048225648351945e-05, + "layer_11_sharpness": 4.628931128536351e-05, + "layer_12_sharpness": 0.00021973071852698922 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_9500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..d25d68828ecb55d962dd06e5ce9f947c752a874c --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.991706848144531, + "total_l1_linf_norm": 97189.5078125, + "total_spectral_norm": 10.991706848144531, + "layer_1_update_fnorm": 2.5893640518188477, + "layer_1_max_l1_linf_norm": 2.5885233879089355, + "layer_1_max_spectral_norm": 0.3434082567691803, + "layer_2_update_fnorm": 2.5590200424194336, + "layer_2_max_l1_linf_norm": 2.648050308227539, + "layer_2_max_spectral_norm": 0.3220635652542114, + "layer_3_update_fnorm": 2.5435702800750732, + "layer_3_max_l1_linf_norm": 2.699418067932129, + "layer_3_max_spectral_norm": 0.3040994703769684, + "layer_4_update_fnorm": 2.5430471897125244, + "layer_4_max_l1_linf_norm": 2.6536641120910645, + "layer_4_max_spectral_norm": 0.32256555557250977, + "layer_5_update_fnorm": 2.5075225830078125, + "layer_5_max_l1_linf_norm": 2.582134962081909, + "layer_5_max_spectral_norm": 0.27404361963272095, + "layer_6_update_fnorm": 2.5850613117218018, + "layer_6_max_l1_linf_norm": 2.5587222576141357, + "layer_6_max_spectral_norm": 0.2490183264017105, + "layer_7_update_fnorm": 2.6473186016082764, + "layer_7_max_l1_linf_norm": 2.6735265254974365, + "layer_7_max_spectral_norm": 0.2885143756866455, + "layer_8_update_fnorm": 2.5995829105377197, + "layer_8_max_l1_linf_norm": 2.942854881286621, + "layer_8_max_spectral_norm": 0.36096590757369995, + "layer_9_update_fnorm": 2.596895933151245, + "layer_9_max_l1_linf_norm": 3.0258936882019043, + "layer_9_max_spectral_norm": 0.39419883489608765, + "layer_10_update_fnorm": 2.577414035797119, + "layer_10_max_l1_linf_norm": 2.9985837936401367, + "layer_10_max_spectral_norm": 0.416296124458313, + "layer_11_update_fnorm": 2.5902130603790283, + "layer_11_max_l1_linf_norm": 3.0231380462646484, + "layer_11_max_spectral_norm": 0.4096452593803406, + "layer_12_update_fnorm": 2.492332696914673, + "layer_12_max_l1_linf_norm": 3.080366849899292, + "layer_12_max_spectral_norm": 0.42001286149024963, + "total_sharpness": 0.00016870534454938024, + "ip_v_neg_g": 0.009150288999080658, + "cos_v_neg_g": 0.0021064390894025564, + "v_norm": 10.991706848144531, + "g_norm": 0.39520347118377686, + "hv_norm": 0.14891192317008972, + "cos_v_hv": 0.012452727183699608, + "hg_norm": 2.833372116088867, + "cos_g_hg": 0.5160974264144897, + "v_parallel_norm": 0.0025001615285873413, + "v_perp_norm": 10.991705894470215, + "layer_1_v_norm": 2.5893640518188477, + "layer_1_cos_v_neg_g": 0.005580368917435408, + "layer_2_v_norm": 2.5590200424194336, + "layer_2_cos_v_neg_g": 0.001499298494309187, + "layer_3_v_norm": 2.543570041656494, + "layer_3_cos_v_neg_g": 0.0022613494656980038, + "layer_4_v_norm": 2.5430471897125244, + "layer_4_cos_v_neg_g": 0.0023402702063322067, + "layer_5_v_norm": 2.5075225830078125, + "layer_5_cos_v_neg_g": 0.0027907623443752527, + "layer_6_v_norm": 2.5850610733032227, + "layer_6_cos_v_neg_g": 0.002812228398397565, + "layer_7_v_norm": 2.6473186016082764, + "layer_7_cos_v_neg_g": 0.0029948006849735975, + "layer_8_v_norm": 2.599583148956299, + "layer_8_cos_v_neg_g": 0.004429360385984182, + "layer_9_v_norm": 2.596895933151245, + "layer_9_cos_v_neg_g": 0.005981840193271637, + "layer_10_v_norm": 2.577414035797119, + "layer_10_cos_v_neg_g": 0.008092869073152542, + "layer_11_v_norm": 2.5902130603790283, + "layer_11_cos_v_neg_g": 0.009074832312762737, + "layer_12_v_norm": 2.492332696914673, + "layer_12_cos_v_neg_g": 0.012229932472109795, + "layer_1_sharpness": 3.630108403740451e-05, + "layer_2_sharpness": 5.875705937796738e-06, + "layer_3_sharpness": 9.527230758976657e-06, + "layer_4_sharpness": 1.8167798771173693e-05, + "layer_5_sharpness": 2.0996436433051713e-05, + "layer_6_sharpness": 1.2819424227927811e-05, + "layer_7_sharpness": 2.168439641536679e-05, + "layer_8_sharpness": 4.8380072257714346e-05, + "layer_9_sharpness": 6.485912308562547e-05, + "layer_10_sharpness": 7.479413761757314e-05, + "layer_11_sharpness": 6.963191117392853e-05, + "layer_12_sharpness": 0.00021534056577365845 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d7fa0ba0215217799fb34f2df1f58959aa9a35c --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.005_mlr_0.01_seed_43/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019227 +step:1 train loss:10.651477 +step:2 train loss:10.261354 +step:3 train loss:9.976444 +step:4 train loss:9.806094 +step:5 train loss:9.672942 +step:6 train loss:9.612673 +step:7 train loss:9.522190 +step:8 train loss:9.506386 +step:9 train loss:9.404762 +step:10 train loss:9.336097 +step:11 train loss:9.246762 +step:12 train loss:9.169094 +step:13 train loss:9.052175 +step:14 train loss:8.979706 +step:15 train loss:8.876831 +step:16 train loss:8.811038 +step:17 train loss:8.702903 +step:18 train loss:8.648781 +step:19 train loss:8.524993 +step:20 train loss:8.423347 +step:21 train loss:8.357477 +step:22 train loss:8.179523 +step:23 train loss:8.124855 +step:24 train loss:7.985216 +step:25 train loss:7.932238 +step:26 train loss:7.831684 +step:27 train loss:7.706348 +step:28 train loss:7.685682 +step:29 train loss:7.619422 +step:30 train loss:7.542893 +step:31 train loss:7.410525 +step:32 train loss:7.381474 +step:33 train loss:7.318817 +step:34 train loss:7.343979 +step:35 train loss:7.259536 +step:36 train loss:7.214106 +step:37 train loss:7.158415 +step:38 train loss:7.189099 +step:39 train loss:7.108748 +step:40 train loss:7.093450 +step:41 train loss:7.051092 +step:42 train loss:7.078508 +step:43 train loss:7.005951 +step:44 train loss:6.981624 +step:45 train loss:6.982143 +step:46 train loss:6.986427 +step:47 train loss:6.954516 +step:48 train loss:6.922225 +step:49 train loss:6.902924 +step:50 train loss:6.821846 +step:51 train loss:6.838815 +step:52 train loss:6.833956 +step:53 train loss:6.810765 +step:54 train loss:6.810460 +step:55 train loss:6.728861 +step:56 train loss:6.707646 +step:57 train loss:6.753854 +step:58 train loss:6.665743 +step:59 train loss:6.696834 +step:60 train loss:6.681886 +step:61 train loss:6.651755 +step:62 train loss:6.626246 +step:63 train loss:6.691125 +step:64 train loss:6.567445 +step:65 train loss:6.601740 +step:66 train loss:6.603513 +step:67 train loss:6.664975 +step:68 train loss:6.627843 +step:69 train loss:6.574377 +step:70 train loss:6.556942 +step:71 train loss:6.556903 +step:72 train loss:6.557484 +step:73 train loss:6.524875 +step:74 train loss:6.541226 +step:75 train loss:6.492582 +step:76 train loss:6.563810 +step:77 train loss:6.515646 +step:78 train loss:6.282352 +step:79 train loss:6.467750 +step:80 train loss:6.429571 +step:81 train loss:6.539211 +step:82 train loss:6.485224 +step:83 train loss:6.452591 +step:84 train loss:6.427296 +step:85 train loss:6.394438 +step:86 train loss:6.389487 +step:87 train loss:6.377009 +step:88 train loss:6.381126 +step:89 train loss:6.331511 +step:90 train loss:6.382986 +step:91 train loss:6.384558 +step:92 train loss:6.392732 +step:93 train loss:6.351485 +step:94 train loss:6.309232 +step:95 train loss:6.257412 +step:96 train loss:6.359293 +step:97 train loss:6.315110 +step:98 train loss:6.299713 +step:99 train loss:6.269906 +step:100 train loss:6.266052 +step:101 train loss:6.245290 +step:102 train loss:6.268411 +step:103 train loss:6.284889 +step:104 train loss:6.310628 +step:105 train loss:6.376681 +step:106 train loss:6.322681 +step:107 train loss:6.276124 +step:108 train loss:6.309752 +step:109 train loss:6.334323 +step:110 train loss:6.261678 +step:111 train loss:6.315596 +step:112 train loss:6.326577 +step:113 train loss:6.238226 +step:114 train loss:6.298920 +step:115 train loss:6.257271 +step:116 train loss:6.231506 +step:117 train loss:6.183203 +step:118 train loss:6.240054 +step:119 train loss:6.197568 +step:120 train loss:6.211314 +step:121 train loss:6.121696 +step:122 train loss:6.208084 +step:123 train loss:6.140877 +step:124 train loss:6.131348 +step:125 train loss:6.103235 +step:126 train loss:6.207461 +step:127 train loss:6.115655 +step:128 train loss:6.176116 +step:129 train loss:6.131977 +step:130 train loss:6.158952 +step:131 train loss:6.123324 +step:132 train loss:6.078347 +step:133 train loss:6.134724 +step:134 train loss:6.125115 +step:135 train loss:6.032884 +step:136 train loss:6.067860 +step:137 train loss:6.079499 +step:138 train loss:6.028833 +step:139 train loss:6.088027 +step:140 train loss:6.012625 +step:141 train loss:6.098505 +step:142 train loss:6.055116 +step:143 train loss:6.076653 +step:144 train loss:6.052804 +step:145 train loss:5.995026 +step:146 train loss:5.997288 +step:147 train loss:6.051122 +step:148 train loss:6.060309 +step:149 train loss:6.025221 +step:150 train loss:6.030192 +step:151 train loss:5.949610 +step:152 train loss:5.991196 +step:153 train loss:5.984315 +step:154 train loss:6.052904 +step:155 train loss:6.036779 +step:156 train loss:6.053847 +step:157 train loss:5.991110 +step:158 train loss:6.001208 +step:159 train loss:6.035642 +step:160 train loss:6.042710 +step:161 train loss:6.038799 +step:162 train loss:6.002388 +step:163 train loss:6.029034 +step:164 train loss:6.005327 +step:165 train loss:6.042577 +step:166 train loss:6.000160 +step:167 train loss:6.030852 +step:168 train loss:5.994174 +step:169 train loss:5.947657 +step:170 train loss:5.899360 +step:171 train loss:6.037514 +step:172 train loss:5.964104 +step:173 train loss:6.027739 +step:174 train loss:6.018936 +step:175 train loss:5.977740 +step:176 train loss:5.937507 +step:177 train loss:5.971131 +step:178 train loss:5.967841 +step:179 train loss:5.927056 +step:180 train loss:5.894490 +step:181 train loss:5.938709 +step:182 train loss:5.866736 +step:183 train loss:5.944852 +step:184 train loss:5.911599 +step:185 train loss:5.854526 +step:186 train loss:5.974281 +step:187 train loss:5.918491 +step:188 train loss:5.775621 +step:189 train loss:5.909720 +step:190 train loss:5.915123 +step:191 train loss:5.832421 +step:192 train loss:5.770502 +step:193 train loss:5.916622 +step:194 train loss:5.928253 +step:195 train loss:5.905343 +step:196 train loss:5.883470 +step:197 train loss:5.870683 +step:198 train loss:5.834753 +step:199 train loss:5.920440 +step:200 train loss:6.005408 +step:201 train loss:5.931986 +step:202 train loss:5.913824 +step:203 train loss:5.888720 +step:204 train loss:5.886737 +step:205 train loss:5.751433 +step:206 train loss:5.868694 +step:207 train loss:5.858169 +step:208 train loss:5.795493 +step:209 train loss:5.775764 +step:210 train loss:5.794661 +step:211 train loss:5.855309 +step:212 train loss:5.836061 +step:213 train loss:5.836341 +step:214 train loss:5.823072 +step:215 train loss:5.846087 +step:216 train loss:5.807106 +step:217 train loss:5.829292 +step:218 train loss:5.779421 +step:219 train loss:5.759874 +step:220 train loss:5.790783 +step:221 train loss:5.751935 +step:222 train loss:5.784566 +step:223 train loss:5.824264 +step:224 train loss:5.796353 +step:225 train loss:5.721067 +step:226 train loss:5.730317 +step:227 train loss:5.783230 +step:228 train loss:5.758622 +step:229 train loss:5.811783 +step:230 train loss:5.712709 +step:231 train loss:5.788659 +step:232 train loss:5.772682 +step:233 train loss:5.750140 +step:234 train loss:5.730288 +step:235 train loss:5.830147 +step:236 train loss:5.782961 +step:237 train loss:5.812034 +step:238 train loss:5.804787 +step:239 train loss:5.713814 +step:240 train loss:5.780307 +step:241 train loss:5.828405 +step:242 train loss:5.832641 +step:243 train loss:5.730062 +step:244 train loss:5.741899 +step:245 train loss:5.714258 +step:246 train loss:5.715131 +step:247 train loss:5.698288 +step:248 train loss:5.687782 +step:249 train loss:5.784995 +step:250 validation loss:5.775362 +step:250 train loss:5.748722 +step:251 train loss:5.794914 +step:252 train loss:5.726120 +step:253 train loss:5.727076 +step:254 train loss:5.694750 +step:255 train loss:5.727774 +step:256 train loss:5.721599 +step:257 train loss:5.776384 +step:258 train loss:5.681479 +step:259 train loss:5.700642 +step:260 train loss:5.678601 +step:261 train loss:5.672491 +step:262 train loss:5.737668 +step:263 train loss:5.695458 +step:264 train loss:5.688168 +step:265 train loss:5.729829 +step:266 train loss:5.693939 +step:267 train loss:5.702623 +step:268 train loss:5.653318 +step:269 train loss:5.675600 +step:270 train loss:5.686097 +step:271 train loss:5.694312 +step:272 train loss:5.626854 +step:273 train loss:5.695986 +step:274 train loss:5.615568 +step:275 train loss:5.684805 +step:276 train loss:5.659492 +step:277 train loss:5.668230 +step:278 train loss:5.629266 +step:279 train loss:5.596508 +step:280 train loss:5.683447 +step:281 train loss:5.759451 +step:282 train loss:5.651745 +step:283 train loss:5.675384 +step:284 train loss:5.626523 +step:285 train loss:5.678189 +step:286 train loss:5.641345 +step:287 train loss:5.625260 +step:288 train loss:5.601268 +step:289 train loss:5.613591 +step:290 train loss:5.680071 +step:291 train loss:5.631561 +step:292 train loss:5.675351 +step:293 train loss:5.588808 +step:294 train loss:5.703092 +step:295 train loss:5.599204 +step:296 train loss:5.657468 +step:297 train loss:5.698054 +step:298 train loss:5.594975 +step:299 train loss:5.641886 +step:300 train loss:5.586297 +step:301 train loss:5.614848 +step:302 train loss:5.587243 +step:303 train loss:5.594812 +step:304 train loss:5.629338 +step:305 train loss:5.578970 +step:306 train loss:5.593318 +step:307 train loss:5.597645 +step:308 train loss:5.508494 +step:309 train loss:5.640310 +step:310 train loss:5.587554 +step:311 train loss:5.580227 +step:312 train loss:5.555078 +step:313 train loss:5.584464 +step:314 train loss:5.562563 +step:315 train loss:5.555789 +step:316 train loss:5.596450 +step:317 train loss:5.539557 +step:318 train loss:5.553508 +step:319 train loss:5.619201 +step:320 train loss:5.535516 +step:321 train loss:5.591397 +step:322 train loss:5.573319 +step:323 train loss:5.631240 +step:324 train loss:5.592807 +step:325 train loss:5.630916 +step:326 train loss:5.628118 +step:327 train loss:5.611360 +step:328 train loss:5.561348 +step:329 train loss:5.588316 +step:330 train loss:5.507458 +step:331 train loss:5.547844 +step:332 train loss:5.536443 +step:333 train loss:5.487717 +step:334 train loss:5.573917 +step:335 train loss:5.622687 +step:336 train loss:5.747955 +step:337 train loss:5.609210 +step:338 train loss:5.529485 +step:339 train loss:5.482771 +step:340 train loss:5.494041 +step:341 train loss:5.497896 +step:342 train loss:5.552986 +step:343 train loss:5.533921 +step:344 train loss:5.479044 +step:345 train loss:5.443347 +step:346 train loss:5.512931 +step:347 train loss:5.469324 +step:348 train loss:5.496369 +step:349 train loss:5.488036 +step:350 train loss:5.484584 +step:351 train loss:5.557215 +step:352 train loss:5.502005 +step:353 train loss:5.524873 +step:354 train loss:5.492462 +step:355 train loss:5.519670 +step:356 train loss:5.486508 +step:357 train loss:5.554216 +step:358 train loss:5.574530 +step:359 train loss:5.413882 +step:360 train loss:5.536598 +step:361 train loss:5.528964 +step:362 train loss:5.527734 +step:363 train loss:5.456088 +step:364 train loss:5.592863 +step:365 train loss:5.533945 +step:366 train loss:5.490280 +step:367 train loss:5.533314 +step:368 train loss:5.508026 +step:369 train loss:5.493020 +step:370 train loss:5.529632 +step:371 train loss:5.452849 +step:372 train loss:5.528637 +step:373 train loss:5.477636 +step:374 train loss:5.461472 +step:375 train loss:5.499634 +step:376 train loss:5.470733 +step:377 train loss:5.369527 +step:378 train loss:5.435220 +step:379 train loss:5.478310 +step:380 train loss:5.397838 +step:381 train loss:5.445836 +step:382 train loss:5.450679 +step:383 train loss:5.401037 +step:384 train loss:5.390069 +step:385 train loss:5.405328 +step:386 train loss:5.441012 +step:387 train loss:5.466928 +step:388 train loss:5.414776 +step:389 train loss:5.437622 +step:390 train loss:5.398866 +step:391 train loss:5.420889 +step:392 train loss:5.426223 +step:393 train loss:5.430775 +step:394 train loss:5.477891 +step:395 train loss:5.387173 +step:396 train loss:5.339775 +step:397 train loss:5.400427 +step:398 train loss:5.391231 +step:399 train loss:5.386673 +step:400 train loss:5.330047 +step:401 train loss:5.391616 +step:402 train loss:5.360518 +step:403 train loss:5.363578 +step:404 train loss:5.347380 +step:405 train loss:5.333144 +step:406 train loss:5.389501 +step:407 train loss:5.391477 +step:408 train loss:5.464744 +step:409 train loss:5.411954 +step:410 train loss:5.438172 +step:411 train loss:5.395073 +step:412 train loss:5.520644 +step:413 train loss:5.386763 +step:414 train loss:5.469730 +step:415 train loss:5.409725 +step:416 train loss:5.430062 +step:417 train loss:5.474753 +step:418 train loss:5.411033 +step:419 train loss:5.394361 +step:420 train loss:5.367846 +step:421 train loss:5.403473 +step:422 train loss:5.421937 +step:423 train loss:5.402365 +step:424 train loss:5.355111 +step:425 train loss:5.417380 +step:426 train loss:5.410494 +step:427 train loss:5.355574 +step:428 train loss:5.426705 +step:429 train loss:5.301857 +step:430 train loss:5.355036 +step:431 train loss:5.361310 +step:432 train loss:5.376000 +step:433 train loss:5.369906 +step:434 train loss:5.329621 +step:435 train loss:5.367549 +step:436 train loss:5.392657 +step:437 train loss:5.354814 +step:438 train loss:5.301828 +step:439 train loss:5.283728 +step:440 train loss:5.317257 +step:441 train loss:5.248715 +step:442 train loss:5.255774 +step:443 train loss:5.291626 +step:444 train loss:5.344720 +step:445 train loss:5.323435 +step:446 train loss:5.267577 +step:447 train loss:5.285239 +step:448 train loss:5.367475 +step:449 train loss:5.325028 +step:450 train loss:5.316110 +step:451 train loss:5.300299 +step:452 train loss:5.364850 +step:453 train loss:5.318234 +step:454 train loss:5.257895 +step:455 train loss:5.332836 +step:456 train loss:5.288864 +step:457 train loss:5.260650 +step:458 train loss:5.291742 +step:459 train loss:5.232047 +step:460 train loss:5.348721 +step:461 train loss:5.310884 +step:462 train loss:5.200499 +step:463 train loss:5.252318 +step:464 train loss:5.299823 +step:465 train loss:5.245129 +step:466 train loss:5.258250 +step:467 train loss:5.206330 +step:468 train loss:5.265520 +step:469 train loss:5.259137 +step:470 train loss:5.214772 +step:471 train loss:5.312422 +step:472 train loss:5.235167 +step:473 train loss:5.319467 +step:474 train loss:5.299520 +step:475 train loss:5.323139 +step:476 train loss:5.304700 +step:477 train loss:5.221677 +step:478 train loss:5.245411 +step:479 train loss:5.240932 +step:480 train loss:5.255967 +step:481 train loss:5.285547 +step:482 train loss:5.239095 +step:483 train loss:5.312573 +step:484 train loss:5.270420 +step:485 train loss:5.246006 +step:486 train loss:5.299376 +step:487 train loss:5.256220 +step:488 train loss:5.251555 +step:489 train loss:5.229934 +step:490 train loss:5.214483 +step:491 train loss:5.222601 +step:492 train loss:5.241540 +step:493 train loss:5.250990 +step:494 train loss:5.230387 +step:495 train loss:5.178827 +step:496 train loss:5.274351 +step:497 train loss:5.157247 +step:498 train loss:5.259309 +step:499 train loss:5.209328 +step:500 validation loss:5.180855 total_sharp:3.5593e-03 L1_sharp:1.4087e-02 L2_sharp:1.0637e-03 L3_sharp:1.4522e-03 L4_sharp:1.2019e-03 L5_sharp:1.3595e-03 L6_sharp:3.4259e-04 L7_sharp:4.4809e-04 L8_sharp:4.2747e-04 L9_sharp:6.1746e-04 L10_sharp:5.4706e-04 L11_sharp:4.2131e-04 L12_sharp:1.1228e-03 total_fnorm:6.3243e+00 total_l1_linf:5.3913e+04 total_spectral:6.3243e+00 L1_fnorm:1.0675e+00 L2_fnorm:1.1187e+00 L3_fnorm:1.1167e+00 L4_fnorm:1.1600e+00 L5_fnorm:1.2273e+00 L6_fnorm:1.2962e+00 L7_fnorm:1.3744e+00 L8_fnorm:1.4129e+00 L9_fnorm:1.4686e+00 L10_fnorm:1.4816e+00 L11_fnorm:1.4026e+00 L12_fnorm:1.3425e+00 L1_l1linf:1.9840e+00 L2_l1linf:1.2524e+00 L3_l1linf:1.2666e+00 L4_l1linf:1.3739e+00 L5_l1linf:1.4400e+00 L6_l1linf:1.4436e+00 L7_l1linf:1.4490e+00 L8_l1linf:1.4893e+00 L9_l1linf:1.7719e+00 L10_l1linf:2.0506e+00 L11_l1linf:2.3382e+00 L12_l1linf:2.4804e+00 L1_spectral:3.0886e-01 L2_spectral:2.0231e-01 L3_spectral:2.0308e-01 L4_spectral:2.0468e-01 L5_spectral:2.4212e-01 L6_spectral:2.6408e-01 L7_spectral:2.3778e-01 L8_spectral:2.4261e-01 L9_spectral:2.3684e-01 L10_spectral:2.5783e-01 L11_spectral:2.5885e-01 L12_spectral:2.7375e-01 ip_v_neg_g:8.6984e-02 cos_v_neg_g:2.4517e-02 v_norm:6.3243e+00 g_norm:5.6100e-01 hv_norm:3.9805e-01 cos_v_hv:5.6550e-02 hg_norm:1.4609e+00 cos_g_hg:4.5035e-01 v_par:1.2172e-02 v_perp:6.3243e+00 L1_cos_v_neg_g:8.7752e-02 L1_v_norm:1.0675e+00 L2_cos_v_neg_g:8.3685e-02 L2_v_norm:1.1187e+00 L3_cos_v_neg_g:8.5719e-02 L3_v_norm:1.1167e+00 L4_cos_v_neg_g:7.9402e-02 L4_v_norm:1.1600e+00 L5_cos_v_neg_g:7.7145e-02 L5_v_norm:1.2273e+00 L6_cos_v_neg_g:5.6554e-02 L6_v_norm:1.2962e+00 L7_cos_v_neg_g:5.0353e-02 L7_v_norm:1.3744e+00 L8_cos_v_neg_g:5.0288e-02 L8_v_norm:1.4129e+00 L9_cos_v_neg_g:5.3937e-02 L9_v_norm:1.4686e+00 L10_cos_v_neg_g:4.3975e-02 L10_v_norm:1.4816e+00 L11_cos_v_neg_g:4.3772e-02 L11_v_norm:1.4026e+00 L12_cos_v_neg_g:5.4346e-02 L12_v_norm:1.3425e+00 +step:500 train loss:5.203619 +step:501 train loss:5.175073 +step:502 train loss:5.220325 +step:503 train loss:5.143078 +step:504 train loss:5.229140 +step:505 train loss:5.153146 +step:506 train loss:5.159822 +step:507 train loss:5.159142 +step:508 train loss:5.189857 +step:509 train loss:5.208802 +step:510 train loss:5.130175 +step:511 train loss:5.153513 +step:512 train loss:5.143678 +step:513 train loss:5.158025 +step:514 train loss:5.250971 +step:515 train loss:5.175194 +step:516 train loss:5.255334 +step:517 train loss:5.150557 +step:518 train loss:5.141032 +step:519 train loss:5.198751 +step:520 train loss:5.156660 +step:521 train loss:5.214794 +step:522 train loss:5.191436 +step:523 train loss:5.214288 +step:524 train loss:5.139520 +step:525 train loss:5.139407 +step:526 train loss:5.148370 +step:527 train loss:5.116819 +step:528 train loss:5.117425 +step:529 train loss:5.146747 +step:530 train loss:5.113175 +step:531 train loss:5.151165 +step:532 train loss:5.131903 +step:533 train loss:5.090636 +step:534 train loss:5.150819 +step:535 train loss:5.135091 +step:536 train loss:5.204501 +step:537 train loss:5.090640 +step:538 train loss:5.083808 +step:539 train loss:5.169765 +step:540 train loss:5.241642 +step:541 train loss:5.115313 +step:542 train loss:5.126085 +step:543 train loss:5.125803 +step:544 train loss:5.142297 +step:545 train loss:5.180291 +step:546 train loss:5.102830 +step:547 train loss:5.118683 +step:548 train loss:5.081761 +step:549 train loss:5.142042 +step:550 train loss:5.125325 +step:551 train loss:5.146688 +step:552 train loss:5.221955 +step:553 train loss:5.174803 +step:554 train loss:5.096207 +step:555 train loss:5.146615 +step:556 train loss:5.106414 +step:557 train loss:5.089800 +step:558 train loss:5.044476 +step:559 train loss:5.107182 +step:560 train loss:5.178745 +step:561 train loss:5.029854 +step:562 train loss:4.998441 +step:563 train loss:5.066804 +step:564 train loss:5.023518 +step:565 train loss:5.034208 +step:566 train loss:5.039243 +step:567 train loss:5.035986 +step:568 train loss:5.124685 +step:569 train loss:5.076741 +step:570 train loss:5.013985 +step:571 train loss:5.034440 +step:572 train loss:5.018851 +step:573 train loss:5.040479 +step:574 train loss:5.085987 +step:575 train loss:5.051423 +step:576 train loss:5.073046 +step:577 train loss:5.060233 +step:578 train loss:5.046577 +step:579 train loss:5.114405 +step:580 train loss:5.013191 +step:581 train loss:5.065716 +step:582 train loss:5.029280 +step:583 train loss:5.052993 +step:584 train loss:5.024165 +step:585 train loss:5.011165 +step:586 train loss:4.995990 +step:587 train loss:5.065484 +step:588 train loss:4.968722 +step:589 train loss:5.028788 +step:590 train loss:5.036820 +step:591 train loss:4.969543 +step:592 train loss:4.948853 +step:593 train loss:4.967521 +step:594 train loss:4.942657 +step:595 train loss:5.040337 +step:596 train loss:5.032199 +step:597 train loss:5.063330 +step:598 train loss:5.038144 +step:599 train loss:5.043787 +step:600 train loss:5.002324 +step:601 train loss:5.020691 +step:602 train loss:5.018266 +step:603 train loss:5.066499 +step:604 train loss:5.042449 +step:605 train loss:5.076282 +step:606 train loss:5.020605 +step:607 train loss:5.012396 +step:608 train loss:5.036506 +step:609 train loss:5.025595 +step:610 train loss:5.017620 +step:611 train loss:5.086242 +step:612 train loss:5.085066 +step:613 train loss:5.019992 +step:614 train loss:5.069318 +step:615 train loss:5.108089 +step:616 train loss:5.009655 +step:617 train loss:5.055351 +step:618 train loss:5.020814 +step:619 train loss:5.035457 +step:620 train loss:5.062306 +step:621 train loss:4.970748 +step:622 train loss:5.039600 +step:623 train loss:5.023132 +step:624 train loss:4.999785 +step:625 train loss:5.006929 +step:626 train loss:4.998260 +step:627 train loss:4.970272 +step:628 train loss:4.973512 +step:629 train loss:4.902684 +step:630 train loss:4.934571 +step:631 train loss:4.921974 +step:632 train loss:4.923831 +step:633 train loss:4.948692 +step:634 train loss:4.932816 +step:635 train loss:4.869441 +step:636 train loss:4.955897 +step:637 train loss:4.864449 +step:638 train loss:4.794137 +step:639 train loss:4.925936 +step:640 train loss:4.877104 +step:641 train loss:4.908088 +step:642 train loss:4.928582 +step:643 train loss:4.825281 +step:644 train loss:4.910347 +step:645 train loss:4.867421 +step:646 train loss:4.853552 +step:647 train loss:4.875041 +step:648 train loss:4.995115 +step:649 train loss:4.914179 +step:650 train loss:4.967509 +step:651 train loss:4.841437 +step:652 train loss:4.867089 +step:653 train loss:4.870535 +step:654 train loss:4.913430 +step:655 train loss:4.996408 +step:656 train loss:5.007045 +step:657 train loss:5.083174 +step:658 train loss:5.146026 +step:659 train loss:5.246306 +step:660 train loss:5.369871 +step:661 train loss:5.326247 +step:662 train loss:5.311628 +step:663 train loss:5.392523 +step:664 train loss:5.298063 +step:665 train loss:5.264760 +step:666 train loss:5.315334 +step:667 train loss:5.536341 +step:668 train loss:5.447227 +step:669 train loss:5.408012 +step:670 train loss:5.379858 +step:671 train loss:5.332297 +step:672 train loss:5.273668 +step:673 train loss:5.331175 +step:674 train loss:5.302813 +step:675 train loss:5.184813 +step:676 train loss:5.226933 +step:677 train loss:5.134023 +step:678 train loss:5.098333 +step:679 train loss:5.133264 +step:680 train loss:5.085725 +step:681 train loss:5.166022 +step:682 train loss:5.048857 +step:683 train loss:5.166029 +step:684 train loss:5.204339 +step:685 train loss:5.188471 +step:686 train loss:5.263255 +step:687 train loss:5.168469 +step:688 train loss:5.089799 +step:689 train loss:5.109107 +step:690 train loss:5.044343 +step:691 train loss:5.035962 +step:692 train loss:5.044604 +step:693 train loss:5.026215 +step:694 train loss:4.999557 +step:695 train loss:4.937335 +step:696 train loss:4.901210 +step:697 train loss:5.049881 +step:698 train loss:4.960286 +step:699 train loss:4.940235 +step:700 train loss:5.013213 +step:701 train loss:4.897750 +step:702 train loss:4.967131 +step:703 train loss:4.897959 +step:704 train loss:4.845331 +step:705 train loss:4.897941 +step:706 train loss:4.781855 +step:707 train loss:4.841218 +step:708 train loss:4.939684 +step:709 train loss:4.895494 +step:710 train loss:4.853026 +step:711 train loss:4.912086 +step:712 train loss:4.849075 +step:713 train loss:4.820186 +step:714 train loss:4.912874 +step:715 train loss:4.789318 +step:716 train loss:4.934045 +step:717 train loss:4.807248 +step:718 train loss:4.876340 +step:719 train loss:4.842008 +step:720 train loss:4.825994 +step:721 train loss:4.835740 +step:722 train loss:4.839160 +step:723 train loss:4.887369 +step:724 train loss:4.865256 +step:725 train loss:4.839938 +step:726 train loss:4.823386 +step:727 train loss:4.866397 +step:728 train loss:4.868340 +step:729 train loss:4.920485 +step:730 train loss:5.005970 +step:731 train loss:5.094909 +step:732 train loss:5.116783 +step:733 train loss:5.186221 +step:734 train loss:5.108405 +step:735 train loss:5.133085 +step:736 train loss:5.051124 +step:737 train loss:5.017558 +step:738 train loss:5.027299 +step:739 train loss:4.968362 +step:740 train loss:4.994675 +step:741 train loss:5.118510 +step:742 train loss:5.036347 +step:743 train loss:5.070449 +step:744 train loss:5.084177 +step:745 train loss:5.029690 +step:746 train loss:5.003454 +step:747 train loss:4.983458 +step:748 train loss:4.936139 +step:749 train loss:4.962164 +step:750 validation loss:4.914859 +step:750 train loss:4.893374 +step:751 train loss:4.896566 +step:752 train loss:4.829057 +step:753 train loss:4.875041 +step:754 train loss:4.868205 +step:755 train loss:4.919298 +step:756 train loss:4.888566 +step:757 train loss:4.991040 +step:758 train loss:4.864844 +step:759 train loss:4.854213 +step:760 train loss:4.814117 +step:761 train loss:4.852561 +step:762 train loss:4.814822 +step:763 train loss:4.803695 +step:764 train loss:4.773847 +step:765 train loss:4.774479 +step:766 train loss:4.849784 +step:767 train loss:4.945627 +step:768 train loss:4.780322 +step:769 train loss:4.824615 +step:770 train loss:4.873093 +step:771 train loss:4.960449 +step:772 train loss:4.957143 +step:773 train loss:4.896794 +step:774 train loss:4.935198 +step:775 train loss:4.879329 +step:776 train loss:4.899442 +step:777 train loss:4.845873 +step:778 train loss:4.841409 +step:779 train loss:4.815041 +step:780 train loss:4.891437 +step:781 train loss:4.815695 +step:782 train loss:4.842701 +step:783 train loss:4.808618 +step:784 train loss:4.807065 +step:785 train loss:4.769237 +step:786 train loss:4.784176 +step:787 train loss:4.723804 +step:788 train loss:4.786610 +step:789 train loss:4.768676 +step:790 train loss:4.724254 +step:791 train loss:4.806947 +step:792 train loss:4.805801 +step:793 train loss:4.761946 +step:794 train loss:4.748763 +step:795 train loss:4.708454 +step:796 train loss:4.976242 +step:797 train loss:4.752436 +step:798 train loss:4.757441 +step:799 train loss:4.750358 +step:800 train loss:4.825058 +step:801 train loss:4.724257 +step:802 train loss:4.848878 +step:803 train loss:4.761614 +step:804 train loss:4.699859 +step:805 train loss:4.749600 +step:806 train loss:4.668930 +step:807 train loss:4.736188 +step:808 train loss:4.735925 +step:809 train loss:4.692320 +step:810 train loss:4.660937 +step:811 train loss:4.751251 +step:812 train loss:4.705422 +step:813 train loss:4.718209 +step:814 train loss:4.767971 +step:815 train loss:4.724431 +step:816 train loss:4.653491 +step:817 train loss:4.685927 +step:818 train loss:4.662433 +step:819 train loss:4.653760 +step:820 train loss:4.653245 +step:821 train loss:4.617160 +step:822 train loss:4.602676 +step:823 train loss:4.686659 +step:824 train loss:4.596592 +step:825 train loss:4.584845 +step:826 train loss:4.629982 +step:827 train loss:4.549596 +step:828 train loss:4.628930 +step:829 train loss:4.641902 +step:830 train loss:4.692448 +step:831 train loss:4.698804 +step:832 train loss:4.769811 +step:833 train loss:4.734908 +step:834 train loss:4.726810 +step:835 train loss:4.706608 +step:836 train loss:4.719889 +step:837 train loss:4.672473 +step:838 train loss:4.672372 +step:839 train loss:4.661284 +step:840 train loss:4.722184 +step:841 train loss:4.711364 +step:842 train loss:4.687340 +step:843 train loss:4.674594 +step:844 train loss:4.641915 +step:845 train loss:4.628815 +step:846 train loss:4.743796 +step:847 train loss:4.717586 +step:848 train loss:4.649587 +step:849 train loss:4.707195 +step:850 train loss:4.722336 +step:851 train loss:4.681074 +step:852 train loss:4.748042 +step:853 train loss:4.631934 +step:854 train loss:4.657151 +step:855 train loss:4.646517 +step:856 train loss:4.588665 +step:857 train loss:4.618697 +step:858 train loss:4.650618 +step:859 train loss:4.578138 +step:860 train loss:4.591021 +step:861 train loss:4.624822 +step:862 train loss:4.569083 +step:863 train loss:4.582643 +step:864 train loss:4.563843 +step:865 train loss:4.574407 +step:866 train loss:4.593997 +step:867 train loss:4.696407 +step:868 train loss:4.551166 +step:869 train loss:4.572572 +step:870 train loss:4.523646 +step:871 train loss:4.520283 +step:872 train loss:4.593226 +step:873 train loss:4.569079 +step:874 train loss:4.589303 +step:875 train loss:4.491572 +step:876 train loss:4.592778 +step:877 train loss:4.519901 +step:878 train loss:4.623255 +step:879 train loss:4.531616 +step:880 train loss:4.639446 +step:881 train loss:4.562469 +step:882 train loss:4.518809 +step:883 train loss:4.557322 +step:884 train loss:4.572224 +step:885 train loss:4.511755 +step:886 train loss:4.506373 +step:887 train loss:4.530869 +step:888 train loss:4.634254 +step:889 train loss:4.558864 +step:890 train loss:4.499176 +step:891 train loss:4.459210 +step:892 train loss:4.440526 +step:893 train loss:4.505777 +step:894 train loss:4.484964 +step:895 train loss:4.475972 +step:896 train loss:4.567527 +step:897 train loss:4.494197 +step:898 train loss:4.506928 +step:899 train loss:4.544783 +step:900 train loss:4.571786 +step:901 train loss:4.501766 +step:902 train loss:4.537817 +step:903 train loss:4.627593 +step:904 train loss:4.653643 +step:905 train loss:4.541401 +step:906 train loss:4.544859 +step:907 train loss:4.566495 +step:908 train loss:4.564059 +step:909 train loss:4.508760 +step:910 train loss:4.540572 +step:911 train loss:4.658373 +step:912 train loss:4.460416 +step:913 train loss:4.522056 +step:914 train loss:4.480092 +step:915 train loss:4.499628 +step:916 train loss:4.553948 +step:917 train loss:4.505970 +step:918 train loss:4.577187 +step:919 train loss:4.649368 +step:920 train loss:4.406486 +step:921 train loss:4.523804 +step:922 train loss:4.496347 +step:923 train loss:4.416617 +step:924 train loss:4.483130 +step:925 train loss:4.431426 +step:926 train loss:4.519087 +step:927 train loss:4.428829 +step:928 train loss:4.510311 +step:929 train loss:4.477163 +step:930 train loss:4.483751 +step:931 train loss:4.545617 +step:932 train loss:4.495607 +step:933 train loss:4.523465 +step:934 train loss:4.560405 +step:935 train loss:4.534700 +step:936 train loss:4.511860 +step:937 train loss:4.501859 +step:938 train loss:4.499055 +step:939 train loss:4.378954 +step:940 train loss:4.485855 +step:941 train loss:4.426249 +step:942 train loss:4.414875 +step:943 train loss:4.506474 +step:944 train loss:4.450958 +step:945 train loss:4.451530 +step:946 train loss:4.478462 +step:947 train loss:4.618196 +step:948 train loss:4.435627 +step:949 train loss:4.481166 +step:950 train loss:4.416127 +step:951 train loss:4.444509 +step:952 train loss:4.492692 +step:953 train loss:4.427090 +step:954 train loss:4.455034 +step:955 train loss:4.406074 +step:956 train loss:4.426384 +step:957 train loss:4.427099 +step:958 train loss:4.495639 +step:959 train loss:4.429442 +step:960 train loss:4.515671 +step:961 train loss:4.474961 +step:962 train loss:4.428998 +step:963 train loss:4.414695 +step:964 train loss:4.452927 +step:965 train loss:4.365215 +step:966 train loss:4.380452 +step:967 train loss:4.439857 +step:968 train loss:4.441948 +step:969 train loss:4.391631 +step:970 train loss:4.440488 +step:971 train loss:4.420773 +step:972 train loss:4.339725 +step:973 train loss:4.434869 +step:974 train loss:4.394404 +step:975 train loss:4.468646 +step:976 train loss:4.431961 +step:977 train loss:4.427886 +step:978 train loss:4.424165 +step:979 train loss:4.398186 +step:980 train loss:4.409558 +step:981 train loss:4.389247 +step:982 train loss:4.403720 +step:983 train loss:4.408200 +step:984 train loss:4.434012 +step:985 train loss:4.414406 +step:986 train loss:4.443476 +step:987 train loss:4.480708 +step:988 train loss:4.450324 +step:989 train loss:4.420744 +step:990 train loss:4.404448 +step:991 train loss:4.333189 +step:992 train loss:4.397522 +step:993 train loss:4.433002 +step:994 train loss:4.388315 +step:995 train loss:4.390330 +step:996 train loss:4.424715 +step:997 train loss:4.381265 +step:998 train loss:4.364965 +step:999 train loss:4.409895 +step:1000 validation loss:4.341752 total_sharp:9.8849e-04 L1_sharp:2.0111e-03 L2_sharp:7.4919e-05 L3_sharp:1.7764e-04 L4_sharp:1.3632e-04 L5_sharp:1.9296e-04 L6_sharp:1.2820e-04 L7_sharp:1.0803e-04 L8_sharp:2.0236e-04 L9_sharp:3.0863e-04 L10_sharp:2.8432e-04 L11_sharp:3.0938e-04 L12_sharp:4.8034e-04 total_fnorm:9.2918e+00 total_l1_linf:7.9812e+04 total_spectral:9.2918e+00 L1_fnorm:1.6484e+00 L2_fnorm:1.8561e+00 L3_fnorm:1.7542e+00 L4_fnorm:1.7468e+00 L5_fnorm:1.8888e+00 L6_fnorm:2.0352e+00 L7_fnorm:2.1488e+00 L8_fnorm:2.0985e+00 L9_fnorm:2.0518e+00 L10_fnorm:2.0412e+00 L11_fnorm:2.1009e+00 L12_fnorm:2.1056e+00 L1_l1linf:3.4540e+00 L2_l1linf:2.0502e+00 L3_l1linf:2.1351e+00 L4_l1linf:2.0508e+00 L5_l1linf:2.1015e+00 L6_l1linf:2.2137e+00 L7_l1linf:2.4087e+00 L8_l1linf:2.6952e+00 L9_l1linf:2.9002e+00 L10_l1linf:2.8282e+00 L11_l1linf:2.6113e+00 L12_l1linf:2.7080e+00 L1_spectral:3.9758e-01 L2_spectral:2.2084e-01 L3_spectral:2.2181e-01 L4_spectral:2.5330e-01 L5_spectral:3.4239e-01 L6_spectral:3.3947e-01 L7_spectral:3.3349e-01 L8_spectral:3.4381e-01 L9_spectral:3.5747e-01 L10_spectral:3.7845e-01 L11_spectral:4.0174e-01 L12_spectral:3.7908e-01 ip_v_neg_g:4.1583e-02 cos_v_neg_g:9.5231e-03 v_norm:9.2918e+00 g_norm:4.6993e-01 hv_norm:3.4495e-01 cos_v_hv:2.6626e-02 hg_norm:2.8447e+00 cos_g_hg:5.0467e-01 v_par:7.6585e-03 v_perp:9.2918e+00 L1_cos_v_neg_g:4.4968e-02 L1_v_norm:1.6484e+00 L2_cos_v_neg_g:1.9147e-02 L2_v_norm:1.8561e+00 L3_cos_v_neg_g:2.3224e-02 L3_v_norm:1.7542e+00 L4_cos_v_neg_g:1.8388e-02 L4_v_norm:1.7468e+00 L5_cos_v_neg_g:1.6749e-02 L5_v_norm:1.8888e+00 L6_cos_v_neg_g:1.6562e-02 L6_v_norm:2.0352e+00 L7_cos_v_neg_g:1.5953e-02 L7_v_norm:2.1488e+00 L8_cos_v_neg_g:1.7667e-02 L8_v_norm:2.0985e+00 L9_cos_v_neg_g:1.8990e-02 L9_v_norm:2.0518e+00 L10_cos_v_neg_g:2.1838e-02 L10_v_norm:2.0412e+00 L11_cos_v_neg_g:2.2615e-02 L11_v_norm:2.1009e+00 L12_cos_v_neg_g:2.6667e-02 L12_v_norm:2.1056e+00 +step:1000 train loss:4.414564 +step:1001 train loss:4.413089 +step:1002 train loss:4.407874 +step:1003 train loss:4.387099 +step:1004 train loss:4.372099 +step:1005 train loss:4.386324 +step:1006 train loss:4.465281 +step:1007 train loss:4.404422 +step:1008 train loss:4.396027 +step:1009 train loss:4.459552 +step:1010 train loss:4.423798 +step:1011 train loss:4.441461 +step:1012 train loss:4.379019 +step:1013 train loss:4.353123 +step:1014 train loss:4.352948 +step:1015 train loss:4.378853 +step:1016 train loss:4.389400 +step:1017 train loss:4.341686 +step:1018 train loss:4.401389 +step:1019 train loss:4.342533 +step:1020 train loss:4.355200 +step:1021 train loss:4.477819 +step:1022 train loss:4.375725 +step:1023 train loss:4.388251 +step:1024 train loss:4.472997 +step:1025 train loss:4.418115 +step:1026 train loss:4.349077 +step:1027 train loss:4.387691 +step:1028 train loss:4.389698 +step:1029 train loss:4.327230 +step:1030 train loss:4.413636 +step:1031 train loss:4.398831 +step:1032 train loss:4.352793 +step:1033 train loss:4.323404 +step:1034 train loss:4.382324 +step:1035 train loss:4.380669 +step:1036 train loss:4.297905 +step:1037 train loss:4.363966 +step:1038 train loss:4.381698 +step:1039 train loss:4.518833 +step:1040 train loss:4.358538 +step:1041 train loss:4.348866 +step:1042 train loss:4.366800 +step:1043 train loss:4.372016 +step:1044 train loss:4.365676 +step:1045 train loss:4.371986 +step:1046 train loss:4.309488 +step:1047 train loss:4.331736 +step:1048 train loss:4.321599 +step:1049 train loss:4.377552 +step:1050 train loss:4.338864 +step:1051 train loss:4.315218 +step:1052 train loss:4.408877 +step:1053 train loss:4.316216 +step:1054 train loss:4.310807 +step:1055 train loss:4.372964 +step:1056 train loss:4.323488 +step:1057 train loss:4.215787 +step:1058 train loss:4.316316 +step:1059 train loss:4.305775 +step:1060 train loss:4.298505 +step:1061 train loss:4.350809 +step:1062 train loss:4.307073 +step:1063 train loss:4.318613 +step:1064 train loss:4.312449 +step:1065 train loss:4.311039 +step:1066 train loss:4.285593 +step:1067 train loss:4.315048 +step:1068 train loss:4.271353 +step:1069 train loss:4.292101 +step:1070 train loss:4.309259 +step:1071 train loss:4.383531 +step:1072 train loss:4.401128 +step:1073 train loss:4.319948 +step:1074 train loss:4.343077 +step:1075 train loss:4.373026 +step:1076 train loss:4.427821 +step:1077 train loss:4.351824 +step:1078 train loss:4.403958 +step:1079 train loss:4.429989 +step:1080 train loss:4.292155 +step:1081 train loss:4.360799 +step:1082 train loss:4.355724 +step:1083 train loss:4.317551 +step:1084 train loss:4.295177 +step:1085 train loss:4.352463 +step:1086 train loss:4.337054 +step:1087 train loss:4.324168 +step:1088 train loss:4.329887 +step:1089 train loss:4.329329 +step:1090 train loss:4.272766 +step:1091 train loss:4.266713 +step:1092 train loss:4.382191 +step:1093 train loss:4.261182 +step:1094 train loss:4.311964 +step:1095 train loss:4.345153 +step:1096 train loss:4.277590 +step:1097 train loss:4.275489 +step:1098 train loss:4.241207 +step:1099 train loss:4.289791 +step:1100 train loss:4.333306 +step:1101 train loss:4.324565 +step:1102 train loss:4.332828 +step:1103 train loss:4.252096 +step:1104 train loss:4.284345 +step:1105 train loss:4.339834 +step:1106 train loss:4.286031 +step:1107 train loss:4.420782 +step:1108 train loss:4.348740 +step:1109 train loss:4.319005 +step:1110 train loss:4.264147 +step:1111 train loss:4.318103 +step:1112 train loss:4.236343 +step:1113 train loss:4.237203 +step:1114 train loss:4.245183 +step:1115 train loss:4.272649 +step:1116 train loss:4.337000 +step:1117 train loss:4.369856 +step:1118 train loss:4.407359 +step:1119 train loss:4.338121 +step:1120 train loss:4.350910 +step:1121 train loss:4.330945 +step:1122 train loss:4.307630 +step:1123 train loss:4.404637 +step:1124 train loss:4.275850 +step:1125 train loss:4.290929 +step:1126 train loss:4.255679 +step:1127 train loss:4.279886 +step:1128 train loss:4.278910 +step:1129 train loss:4.335986 +step:1130 train loss:4.248176 +step:1131 train loss:4.337699 +step:1132 train loss:4.283738 +step:1133 train loss:4.290568 +step:1134 train loss:4.257209 +step:1135 train loss:4.305310 +step:1136 train loss:4.314211 +step:1137 train loss:4.235548 +step:1138 train loss:4.314362 +step:1139 train loss:4.271661 +step:1140 train loss:4.340435 +step:1141 train loss:4.300322 +step:1142 train loss:4.231624 +step:1143 train loss:4.308086 +step:1144 train loss:4.328011 +step:1145 train loss:4.272300 +step:1146 train loss:4.220556 +step:1147 train loss:4.231071 +step:1148 train loss:4.255616 +step:1149 train loss:4.305111 +step:1150 train loss:4.318245 +step:1151 train loss:4.337784 +step:1152 train loss:4.254979 +step:1153 train loss:4.246916 +step:1154 train loss:4.229123 +step:1155 train loss:4.331033 +step:1156 train loss:4.231258 +step:1157 train loss:4.252398 +step:1158 train loss:4.303991 +step:1159 train loss:4.302168 +step:1160 train loss:4.229192 +step:1161 train loss:4.320054 +step:1162 train loss:4.262263 +step:1163 train loss:4.240216 +step:1164 train loss:4.154659 +step:1165 train loss:4.288138 +step:1166 train loss:4.210479 +step:1167 train loss:4.214197 +step:1168 train loss:4.277278 +step:1169 train loss:4.232493 +step:1170 train loss:4.245769 +step:1171 train loss:4.267221 +step:1172 train loss:4.232681 +step:1173 train loss:4.257841 +step:1174 train loss:4.191200 +step:1175 train loss:4.219282 +step:1176 train loss:4.334394 +step:1177 train loss:4.183940 +step:1178 train loss:4.245638 +step:1179 train loss:4.203889 +step:1180 train loss:4.238454 +step:1181 train loss:4.228164 +step:1182 train loss:4.286288 +step:1183 train loss:4.279929 +step:1184 train loss:4.212382 +step:1185 train loss:4.247139 +step:1186 train loss:4.226280 +step:1187 train loss:4.198050 +step:1188 train loss:4.239218 +step:1189 train loss:4.168529 +step:1190 train loss:4.240169 +step:1191 train loss:4.306285 +step:1192 train loss:4.241498 +step:1193 train loss:4.245128 +step:1194 train loss:4.352474 +step:1195 train loss:4.323270 +step:1196 train loss:4.217807 +step:1197 train loss:4.233358 +step:1198 train loss:4.213758 +step:1199 train loss:4.212660 +step:1200 train loss:4.272254 +step:1201 train loss:4.247901 +step:1202 train loss:4.189130 +step:1203 train loss:4.180056 +step:1204 train loss:4.214606 +step:1205 train loss:4.217974 +step:1206 train loss:4.164728 +step:1207 train loss:4.256207 +step:1208 train loss:4.230289 +step:1209 train loss:4.153989 +step:1210 train loss:4.249649 +step:1211 train loss:4.206603 +step:1212 train loss:4.224120 +step:1213 train loss:4.156751 +step:1214 train loss:4.247677 +step:1215 train loss:4.215433 +step:1216 train loss:4.221897 +step:1217 train loss:4.165135 +step:1218 train loss:4.232446 +step:1219 train loss:4.171639 +step:1220 train loss:4.206603 +step:1221 train loss:4.228470 +step:1222 train loss:4.274463 +step:1223 train loss:4.245754 +step:1224 train loss:4.218695 +step:1225 train loss:4.271619 +step:1226 train loss:4.218736 +step:1227 train loss:4.221882 +step:1228 train loss:4.229463 +step:1229 train loss:4.191900 +step:1230 train loss:4.180040 +step:1231 train loss:4.227120 +step:1232 train loss:4.177671 +step:1233 train loss:4.170947 +step:1234 train loss:4.254363 +step:1235 train loss:4.230730 +step:1236 train loss:4.135011 +step:1237 train loss:4.236982 +step:1238 train loss:4.183288 +step:1239 train loss:4.227664 +step:1240 train loss:4.143078 +step:1241 train loss:4.192412 +step:1242 train loss:4.222966 +step:1243 train loss:4.159276 +step:1244 train loss:4.279850 +step:1245 train loss:4.301213 +step:1246 train loss:4.246104 +step:1247 train loss:4.217805 +step:1248 train loss:4.242290 +step:1249 train loss:4.172927 +step:1250 validation loss:4.167559 +step:1250 train loss:4.180403 +step:1251 train loss:4.246443 +step:1252 train loss:4.191844 +step:1253 train loss:4.145269 +step:1254 train loss:4.173310 +step:1255 train loss:4.167020 +step:1256 train loss:4.210294 +step:1257 train loss:4.190207 +step:1258 train loss:4.236927 +step:1259 train loss:4.219526 +step:1260 train loss:4.119582 +step:1261 train loss:4.357409 +step:1262 train loss:4.218781 +step:1263 train loss:4.187420 +step:1264 train loss:4.183238 +step:1265 train loss:4.238527 +step:1266 train loss:4.177380 +step:1267 train loss:4.181348 +step:1268 train loss:4.190765 +step:1269 train loss:4.180048 +step:1270 train loss:4.105729 +step:1271 train loss:4.115347 +step:1272 train loss:4.144617 +step:1273 train loss:4.202128 +step:1274 train loss:4.174186 +step:1275 train loss:4.199495 +step:1276 train loss:4.194779 +step:1277 train loss:4.206630 +step:1278 train loss:4.146295 +step:1279 train loss:4.150952 +step:1280 train loss:4.169251 +step:1281 train loss:4.224216 +step:1282 train loss:4.150895 +step:1283 train loss:4.224492 +step:1284 train loss:4.164846 +step:1285 train loss:4.211729 +step:1286 train loss:4.110067 +step:1287 train loss:4.154742 +step:1288 train loss:4.179053 +step:1289 train loss:4.235195 +step:1290 train loss:4.184194 +step:1291 train loss:4.151539 +step:1292 train loss:4.139321 +step:1293 train loss:4.130195 +step:1294 train loss:4.185330 +step:1295 train loss:4.170298 +step:1296 train loss:4.223174 +step:1297 train loss:4.181052 +step:1298 train loss:4.204227 +step:1299 train loss:4.307294 +step:1300 train loss:4.194726 +step:1301 train loss:4.302005 +step:1302 train loss:4.311685 +step:1303 train loss:4.501642 +step:1304 train loss:4.707776 +step:1305 train loss:4.578559 +step:1306 train loss:4.502111 +step:1307 train loss:4.411312 +step:1308 train loss:4.454416 +step:1309 train loss:4.475607 +step:1310 train loss:4.391404 +step:1311 train loss:4.379448 +step:1312 train loss:4.413845 +step:1313 train loss:4.318965 +step:1314 train loss:4.310229 +step:1315 train loss:4.323715 +step:1316 train loss:4.284082 +step:1317 train loss:4.170938 +step:1318 train loss:4.320189 +step:1319 train loss:4.357336 +step:1320 train loss:4.271246 +step:1321 train loss:4.216540 +step:1322 train loss:4.318434 +step:1323 train loss:4.260422 +step:1324 train loss:4.351872 +step:1325 train loss:4.235039 +step:1326 train loss:4.258186 +step:1327 train loss:4.261303 +step:1328 train loss:4.169095 +step:1329 train loss:4.194620 +step:1330 train loss:4.209773 +step:1331 train loss:4.071434 +step:1332 train loss:4.247688 +step:1333 train loss:4.208622 +step:1334 train loss:4.214268 +step:1335 train loss:4.233405 +step:1336 train loss:4.238133 +step:1337 train loss:4.200294 +step:1338 train loss:4.176690 +step:1339 train loss:4.255792 +step:1340 train loss:4.216023 +step:1341 train loss:4.197233 +step:1342 train loss:4.163098 +step:1343 train loss:4.149091 +step:1344 train loss:4.216716 +step:1345 train loss:4.172997 +step:1346 train loss:4.249284 +step:1347 train loss:4.170830 +step:1348 train loss:4.139827 +step:1349 train loss:4.084865 +step:1350 train loss:4.115826 +step:1351 train loss:4.183481 +step:1352 train loss:4.155582 +step:1353 train loss:4.128409 +step:1354 train loss:4.130188 +step:1355 train loss:4.202678 +step:1356 train loss:4.115366 +step:1357 train loss:4.142911 +step:1358 train loss:4.136549 +step:1359 train loss:4.142234 +step:1360 train loss:4.181767 +step:1361 train loss:4.295053 +step:1362 train loss:4.210635 +step:1363 train loss:4.089260 +step:1364 train loss:4.118665 +step:1365 train loss:4.113513 +step:1366 train loss:4.151253 +step:1367 train loss:4.076294 +step:1368 train loss:4.113381 +step:1369 train loss:4.147612 +step:1370 train loss:4.163045 +step:1371 train loss:4.121999 +step:1372 train loss:4.155216 +step:1373 train loss:4.189524 +step:1374 train loss:4.195735 +step:1375 train loss:4.153912 +step:1376 train loss:4.169974 +step:1377 train loss:4.163882 +step:1378 train loss:4.155071 +step:1379 train loss:4.131287 +step:1380 train loss:4.197113 +step:1381 train loss:4.144582 +step:1382 train loss:4.118828 +step:1383 train loss:4.105154 +step:1384 train loss:4.183771 +step:1385 train loss:4.091203 +step:1386 train loss:4.155115 +step:1387 train loss:4.161725 +step:1388 train loss:4.124803 +step:1389 train loss:4.096877 +step:1390 train loss:4.135108 +step:1391 train loss:4.165436 +step:1392 train loss:4.143900 +step:1393 train loss:4.198237 +step:1394 train loss:4.123873 +step:1395 train loss:4.162095 +step:1396 train loss:4.148779 +step:1397 train loss:4.164762 +step:1398 train loss:4.168156 +step:1399 train loss:4.136350 +step:1400 train loss:4.113783 +step:1401 train loss:4.105290 +step:1402 train loss:4.124694 +step:1403 train loss:4.096453 +step:1404 train loss:4.170971 +step:1405 train loss:4.119547 +step:1406 train loss:4.149317 +step:1407 train loss:4.141183 +step:1408 train loss:4.119431 +step:1409 train loss:4.106374 +step:1410 train loss:4.118325 +step:1411 train loss:4.147246 +step:1412 train loss:4.207626 +step:1413 train loss:4.125606 +step:1414 train loss:4.148757 +step:1415 train loss:4.111078 +step:1416 train loss:4.169283 +step:1417 train loss:4.129786 +step:1418 train loss:4.068280 +step:1419 train loss:4.077377 +step:1420 train loss:4.106316 +step:1421 train loss:4.149464 +step:1422 train loss:4.123374 +step:1423 train loss:4.227360 +step:1424 train loss:4.123873 +step:1425 train loss:4.090915 +step:1426 train loss:4.113624 +step:1427 train loss:4.100444 +step:1428 train loss:4.081713 +step:1429 train loss:4.101102 +step:1430 train loss:4.104389 +step:1431 train loss:4.128824 +step:1432 train loss:4.109875 +step:1433 train loss:4.092673 +step:1434 train loss:4.065939 +step:1435 train loss:4.062738 +step:1436 train loss:4.135280 +step:1437 train loss:4.064503 +step:1438 train loss:4.064507 +step:1439 train loss:4.049565 +step:1440 train loss:4.087192 +step:1441 train loss:4.164459 +step:1442 train loss:4.129688 +step:1443 train loss:4.058558 +step:1444 train loss:4.069170 +step:1445 train loss:4.072626 +step:1446 train loss:4.105903 +step:1447 train loss:4.116049 +step:1448 train loss:4.084275 +step:1449 train loss:4.127421 +step:1450 train loss:4.144786 +step:1451 train loss:4.070927 +step:1452 train loss:4.125022 +step:1453 train loss:4.105888 +step:1454 train loss:4.101550 +step:1455 train loss:4.032387 +step:1456 train loss:4.113111 +step:1457 train loss:4.045961 +step:1458 train loss:4.185276 +step:1459 train loss:4.103850 +step:1460 train loss:4.070231 +step:1461 train loss:4.126958 +step:1462 train loss:4.128646 +step:1463 train loss:4.092123 +step:1464 train loss:4.073344 +step:1465 train loss:4.070026 +step:1466 train loss:4.032123 +step:1467 train loss:4.171999 +step:1468 train loss:4.059409 +step:1469 train loss:4.135857 +step:1470 train loss:4.072168 +step:1471 train loss:4.066920 +step:1472 train loss:4.074368 +step:1473 train loss:4.076196 +step:1474 train loss:4.022893 +step:1475 train loss:4.078954 +step:1476 train loss:4.160198 +step:1477 train loss:4.111495 +step:1478 train loss:4.039968 +step:1479 train loss:4.071746 +step:1480 train loss:4.065972 +step:1481 train loss:4.036080 +step:1482 train loss:4.099718 +step:1483 train loss:4.091329 +step:1484 train loss:4.119198 +step:1485 train loss:4.132014 +step:1486 train loss:4.068325 +step:1487 train loss:4.057366 +step:1488 train loss:4.062176 +step:1489 train loss:4.055284 +step:1490 train loss:4.107078 +step:1491 train loss:4.105156 +step:1492 train loss:4.107010 +step:1493 train loss:4.051515 +step:1494 train loss:4.081783 +step:1495 train loss:4.060959 +step:1496 train loss:4.031105 +step:1497 train loss:4.102638 +step:1498 train loss:4.010715 +step:1499 train loss:4.050195 +step:1500 validation loss:4.024103 total_sharp:7.9317e-04 L1_sharp:8.7317e-04 L2_sharp:1.0129e-04 L3_sharp:4.7601e-04 L4_sharp:2.6393e-04 L5_sharp:1.5868e-04 L6_sharp:6.0990e-05 L7_sharp:7.3771e-05 L8_sharp:1.7613e-04 L9_sharp:2.5994e-04 L10_sharp:2.4686e-04 L11_sharp:2.2307e-04 L12_sharp:3.7234e-04 total_fnorm:9.9125e+00 total_l1_linf:8.6333e+04 total_spectral:9.9125e+00 L1_fnorm:2.0503e+00 L2_fnorm:2.1391e+00 L3_fnorm:2.0690e+00 L4_fnorm:2.0408e+00 L5_fnorm:2.1077e+00 L6_fnorm:2.2200e+00 L7_fnorm:2.3112e+00 L8_fnorm:2.2481e+00 L9_fnorm:2.2294e+00 L10_fnorm:2.2055e+00 L11_fnorm:2.2422e+00 L12_fnorm:2.2215e+00 L1_l1linf:2.3959e+00 L2_l1linf:2.2756e+00 L3_l1linf:2.3255e+00 L4_l1linf:2.2793e+00 L5_l1linf:2.2613e+00 L6_l1linf:2.2790e+00 L7_l1linf:2.4637e+00 L8_l1linf:2.4362e+00 L9_l1linf:2.8490e+00 L10_l1linf:2.6593e+00 L11_l1linf:2.6802e+00 L12_l1linf:2.7163e+00 L1_spectral:3.5928e-01 L2_spectral:2.6911e-01 L3_spectral:2.5095e-01 L4_spectral:2.9760e-01 L5_spectral:3.5853e-01 L6_spectral:3.0523e-01 L7_spectral:2.7136e-01 L8_spectral:2.9593e-01 L9_spectral:3.5986e-01 L10_spectral:3.7006e-01 L11_spectral:3.5889e-01 L12_spectral:3.8990e-01 ip_v_neg_g:3.7674e-02 cos_v_neg_g:8.1674e-03 v_norm:9.9125e+00 g_norm:4.6534e-01 hv_norm:3.5403e-01 cos_v_hv:2.2208e-02 hg_norm:2.8380e+00 cos_g_hg:4.6551e-01 v_par:4.7211e-03 v_perp:9.9125e+00 L1_cos_v_neg_g:3.8149e-02 L1_v_norm:2.0503e+00 L2_cos_v_neg_g:1.8641e-02 L2_v_norm:2.1391e+00 L3_cos_v_neg_g:3.2340e-02 L3_v_norm:2.0690e+00 L4_cos_v_neg_g:1.8910e-02 L4_v_norm:2.0408e+00 L5_cos_v_neg_g:1.6078e-02 L5_v_norm:2.1077e+00 L6_cos_v_neg_g:1.1378e-02 L6_v_norm:2.2200e+00 L7_cos_v_neg_g:1.1375e-02 L7_v_norm:2.3112e+00 L8_cos_v_neg_g:1.4872e-02 L8_v_norm:2.2481e+00 L9_cos_v_neg_g:1.5678e-02 L9_v_norm:2.2294e+00 L10_cos_v_neg_g:1.5511e-02 L10_v_norm:2.2055e+00 L11_cos_v_neg_g:1.7695e-02 L11_v_norm:2.2422e+00 L12_cos_v_neg_g:1.9846e-02 L12_v_norm:2.2215e+00 +step:1500 train loss:4.047730 +step:1501 train loss:4.069458 +step:1502 train loss:4.007379 +step:1503 train loss:4.061267 +step:1504 train loss:4.025670 +step:1505 train loss:3.999517 +step:1506 train loss:3.994372 +step:1507 train loss:4.015235 +step:1508 train loss:4.029017 +step:1509 train loss:4.080603 +step:1510 train loss:4.017835 +step:1511 train loss:4.057536 +step:1512 train loss:4.035375 +step:1513 train loss:4.101176 +step:1514 train loss:4.051688 +step:1515 train loss:4.108442 +step:1516 train loss:4.034519 +step:1517 train loss:4.041155 +step:1518 train loss:4.125053 +step:1519 train loss:4.082793 +step:1520 train loss:4.124796 +step:1521 train loss:4.028919 +step:1522 train loss:4.089158 +step:1523 train loss:4.087806 +step:1524 train loss:4.007684 +step:1525 train loss:4.093172 +step:1526 train loss:4.007576 +step:1527 train loss:4.057990 +step:1528 train loss:4.108501 +step:1529 train loss:4.066045 +step:1530 train loss:4.107835 +step:1531 train loss:4.026718 +step:1532 train loss:4.100309 +step:1533 train loss:4.071424 +step:1534 train loss:4.018554 +step:1535 train loss:4.073817 +step:1536 train loss:4.097502 +step:1537 train loss:4.050635 +step:1538 train loss:4.048608 +step:1539 train loss:4.052009 +step:1540 train loss:4.069438 +step:1541 train loss:4.033843 +step:1542 train loss:4.125397 +step:1543 train loss:4.147895 +step:1544 train loss:4.018807 +step:1545 train loss:4.002220 +step:1546 train loss:4.039429 +step:1547 train loss:4.027657 +step:1548 train loss:4.068680 +step:1549 train loss:3.995639 +step:1550 train loss:4.117967 +step:1551 train loss:4.055094 +step:1552 train loss:4.083150 +step:1553 train loss:4.088951 +step:1554 train loss:4.093177 +step:1555 train loss:4.050635 +step:1556 train loss:4.025099 +step:1557 train loss:4.040845 +step:1558 train loss:4.059471 +step:1559 train loss:4.024708 +step:1560 train loss:4.102163 +step:1561 train loss:4.073444 +step:1562 train loss:3.966149 +step:1563 train loss:3.938341 +step:1564 train loss:4.077840 +step:1565 train loss:4.057431 +step:1566 train loss:4.069748 +step:1567 train loss:4.069485 +step:1568 train loss:4.021294 +step:1569 train loss:4.018087 +step:1570 train loss:4.035509 +step:1571 train loss:4.010146 +step:1572 train loss:4.016887 +step:1573 train loss:4.054911 +step:1574 train loss:4.015951 +step:1575 train loss:4.037301 +step:1576 train loss:3.997198 +step:1577 train loss:4.022764 +step:1578 train loss:4.012953 +step:1579 train loss:4.089596 +step:1580 train loss:4.042548 +step:1581 train loss:4.073399 +step:1582 train loss:4.077966 +step:1583 train loss:4.048977 +step:1584 train loss:3.970471 +step:1585 train loss:4.057228 +step:1586 train loss:4.026873 +step:1587 train loss:4.036551 +step:1588 train loss:4.020385 +step:1589 train loss:4.068529 +step:1590 train loss:3.976244 +step:1591 train loss:4.035110 +step:1592 train loss:3.984308 +step:1593 train loss:4.021059 +step:1594 train loss:4.022670 +step:1595 train loss:4.015117 +step:1596 train loss:4.020113 +step:1597 train loss:3.953313 +step:1598 train loss:4.055415 +step:1599 train loss:4.063653 +step:1600 train loss:3.941944 +step:1601 train loss:4.014518 +step:1602 train loss:4.070240 +step:1603 train loss:4.066421 +step:1604 train loss:3.994363 +step:1605 train loss:4.041283 +step:1606 train loss:4.093266 +step:1607 train loss:3.978938 +step:1608 train loss:4.005732 +step:1609 train loss:4.025848 +step:1610 train loss:4.090066 +step:1611 train loss:4.033742 +step:1612 train loss:3.953313 +step:1613 train loss:4.023317 +step:1614 train loss:4.134382 +step:1615 train loss:4.057181 +step:1616 train loss:4.091053 +step:1617 train loss:4.070261 +step:1618 train loss:4.069319 +step:1619 train loss:4.233404 +step:1620 train loss:4.021674 +step:1621 train loss:4.080221 +step:1622 train loss:4.001071 +step:1623 train loss:4.058548 +step:1624 train loss:4.021446 +step:1625 train loss:4.092818 +step:1626 train loss:3.982821 +step:1627 train loss:3.991847 +step:1628 train loss:4.012813 +step:1629 train loss:4.042014 +step:1630 train loss:4.049669 +step:1631 train loss:4.003295 +step:1632 train loss:3.976451 +step:1633 train loss:3.989422 +step:1634 train loss:4.042941 +step:1635 train loss:3.987887 +step:1636 train loss:3.972208 +step:1637 train loss:4.044597 +step:1638 train loss:4.150200 +step:1639 train loss:3.956005 +step:1640 train loss:4.032937 +step:1641 train loss:3.998543 +step:1642 train loss:4.090517 +step:1643 train loss:3.990946 +step:1644 train loss:4.003391 +step:1645 train loss:3.979270 +step:1646 train loss:4.060076 +step:1647 train loss:3.954236 +step:1648 train loss:4.015787 +step:1649 train loss:3.983935 +step:1650 train loss:3.993498 +step:1651 train loss:4.012429 +step:1652 train loss:4.031733 +step:1653 train loss:4.031432 +step:1654 train loss:4.029333 +step:1655 train loss:4.002982 +step:1656 train loss:4.001345 +step:1657 train loss:4.006923 +step:1658 train loss:3.974354 +step:1659 train loss:4.050649 +step:1660 train loss:3.950357 +step:1661 train loss:4.059850 +step:1662 train loss:3.997494 +step:1663 train loss:3.990583 +step:1664 train loss:4.083510 +step:1665 train loss:4.012324 +step:1666 train loss:4.023341 +step:1667 train loss:4.042181 +step:1668 train loss:4.017894 +step:1669 train loss:3.973710 +step:1670 train loss:4.022765 +step:1671 train loss:4.020669 +step:1672 train loss:4.013546 +step:1673 train loss:3.972839 +step:1674 train loss:3.971260 +step:1675 train loss:4.013766 +step:1676 train loss:4.279771 +step:1677 train loss:4.088439 +step:1678 train loss:4.020848 +step:1679 train loss:4.128623 +step:1680 train loss:4.074792 +step:1681 train loss:4.484697 +step:1682 train loss:4.273059 +step:1683 train loss:4.757969 +step:1684 train loss:4.402094 +step:1685 train loss:4.376161 +step:1686 train loss:4.255139 +step:1687 train loss:4.304095 +step:1688 train loss:4.232327 +step:1689 train loss:4.182388 +step:1690 train loss:4.177129 +step:1691 train loss:4.151782 +step:1692 train loss:4.146664 +step:1693 train loss:4.106828 +step:1694 train loss:4.050551 +step:1695 train loss:4.073864 +step:1696 train loss:4.091455 +step:1697 train loss:4.122323 +step:1698 train loss:4.109637 +step:1699 train loss:4.055215 +step:1700 train loss:4.126214 +step:1701 train loss:4.057976 +step:1702 train loss:4.040476 +step:1703 train loss:4.057609 +step:1704 train loss:4.059996 +step:1705 train loss:4.065632 +step:1706 train loss:4.075554 +step:1707 train loss:4.069259 +step:1708 train loss:3.988033 +step:1709 train loss:4.087694 +step:1710 train loss:3.998649 +step:1711 train loss:3.996608 +step:1712 train loss:4.023020 +step:1713 train loss:3.984310 +step:1714 train loss:4.348366 +step:1715 train loss:3.995584 +step:1716 train loss:3.976276 +step:1717 train loss:3.977519 +step:1718 train loss:4.051670 +step:1719 train loss:3.967784 +step:1720 train loss:4.044944 +step:1721 train loss:3.981482 +step:1722 train loss:3.953295 +step:1723 train loss:4.049940 +step:1724 train loss:3.999342 +step:1725 train loss:3.997245 +step:1726 train loss:3.996524 +step:1727 train loss:4.028683 +step:1728 train loss:4.033799 +step:1729 train loss:3.971372 +step:1730 train loss:4.043718 +step:1731 train loss:3.970908 +step:1732 train loss:3.987130 +step:1733 train loss:3.992517 +step:1734 train loss:4.022061 +step:1735 train loss:4.084751 +step:1736 train loss:3.989603 +step:1737 train loss:4.018537 +step:1738 train loss:3.982386 +step:1739 train loss:4.050225 +step:1740 train loss:4.040092 +step:1741 train loss:4.093711 +step:1742 train loss:4.083070 +step:1743 train loss:3.967285 +step:1744 train loss:3.985718 +step:1745 train loss:3.969974 +step:1746 train loss:3.954251 +step:1747 train loss:3.991662 +step:1748 train loss:3.934581 +step:1749 train loss:3.968160 +step:1750 validation loss:3.942251 +step:1750 train loss:4.000164 +step:1751 train loss:4.015184 +step:1752 train loss:3.978872 +step:1753 train loss:4.009624 +step:1754 train loss:4.001665 +step:1755 train loss:3.998013 +step:1756 train loss:4.024210 +step:1757 train loss:4.027448 +step:1758 train loss:3.943583 +step:1759 train loss:4.035458 +step:1760 train loss:3.983468 +step:1761 train loss:3.959581 +step:1762 train loss:3.958689 +step:1763 train loss:3.961756 +step:1764 train loss:4.250535 +step:1765 train loss:3.969001 +step:1766 train loss:4.060223 +step:1767 train loss:3.970822 +step:1768 train loss:3.951544 +step:1769 train loss:3.981592 +step:1770 train loss:3.983477 +step:1771 train loss:3.954203 +step:1772 train loss:4.061491 +step:1773 train loss:3.988976 +step:1774 train loss:3.994361 +step:1775 train loss:4.110717 +step:1776 train loss:3.991735 +step:1777 train loss:3.987687 +step:1778 train loss:4.042256 +step:1779 train loss:3.969640 +step:1780 train loss:4.026930 +step:1781 train loss:4.048319 +step:1782 train loss:4.069564 +step:1783 train loss:3.997144 +step:1784 train loss:4.087638 +step:1785 train loss:3.989344 +step:1786 train loss:3.982852 +step:1787 train loss:3.983750 +step:1788 train loss:3.998709 +step:1789 train loss:3.953852 +step:1790 train loss:3.963367 +step:1791 train loss:4.045856 +step:1792 train loss:4.044196 +step:1793 train loss:3.957681 +step:1794 train loss:4.003005 +step:1795 train loss:3.956792 +step:1796 train loss:3.939022 +step:1797 train loss:4.003705 +step:1798 train loss:3.945816 +step:1799 train loss:4.003575 +step:1800 train loss:4.025130 +step:1801 train loss:4.017073 +step:1802 train loss:4.018944 +step:1803 train loss:4.013021 +step:1804 train loss:4.010542 +step:1805 train loss:3.997681 +step:1806 train loss:4.011146 +step:1807 train loss:3.940695 +step:1808 train loss:4.002801 +step:1809 train loss:3.985620 +step:1810 train loss:3.980189 +step:1811 train loss:3.998058 +step:1812 train loss:3.983189 +step:1813 train loss:3.992818 +step:1814 train loss:4.050738 +step:1815 train loss:3.993549 +step:1816 train loss:3.948248 +step:1817 train loss:3.937062 +step:1818 train loss:3.991439 +step:1819 train loss:3.962219 +step:1820 train loss:3.998983 +step:1821 train loss:3.961989 +step:1822 train loss:3.941593 +step:1823 train loss:3.941039 +step:1824 train loss:4.016041 +step:1825 train loss:3.934199 +step:1826 train loss:3.978531 +step:1827 train loss:3.940127 +step:1828 train loss:3.992754 +step:1829 train loss:3.956343 +step:1830 train loss:4.157189 +step:1831 train loss:3.908722 +step:1832 train loss:3.957453 +step:1833 train loss:4.011349 +step:1834 train loss:3.954860 +step:1835 train loss:3.975842 +step:1836 train loss:4.006674 +step:1837 train loss:3.929019 +step:1838 train loss:4.023482 +step:1839 train loss:3.998336 +step:1840 train loss:3.967843 +step:1841 train loss:3.993517 +step:1842 train loss:3.968809 +step:1843 train loss:3.928683 +step:1844 train loss:3.991525 +step:1845 train loss:3.951683 +step:1846 train loss:4.004753 +step:1847 train loss:4.056801 +step:1848 train loss:3.852051 +step:1849 train loss:3.950931 +step:1850 train loss:3.924215 +step:1851 train loss:3.964382 +step:1852 train loss:3.949816 +step:1853 train loss:4.005497 +step:1854 train loss:3.968060 +step:1855 train loss:3.954076 +step:1856 train loss:3.955837 +step:1857 train loss:3.963099 +step:1858 train loss:4.019126 +step:1859 train loss:3.986855 +step:1860 train loss:3.953360 +step:1861 train loss:3.961571 +step:1862 train loss:3.998954 +step:1863 train loss:4.030713 +step:1864 train loss:3.927026 +step:1865 train loss:3.946654 +step:1866 train loss:3.950256 +step:1867 train loss:3.983309 +step:1868 train loss:4.027579 +step:1869 train loss:3.945081 +step:1870 train loss:3.976434 +step:1871 train loss:3.916691 +step:1872 train loss:3.993084 +step:1873 train loss:4.052659 +step:1874 train loss:3.913037 +step:1875 train loss:3.994714 +step:1876 train loss:3.954991 +step:1877 train loss:3.991385 +step:1878 train loss:3.914018 +step:1879 train loss:3.972884 +step:1880 train loss:4.047966 +step:1881 train loss:3.974558 +step:1882 train loss:3.993429 +step:1883 train loss:4.017551 +step:1884 train loss:4.026866 +step:1885 train loss:3.984235 +step:1886 train loss:3.913253 +step:1887 train loss:3.933576 +step:1888 train loss:3.935405 +step:1889 train loss:3.955542 +step:1890 train loss:3.956766 +step:1891 train loss:3.903618 +step:1892 train loss:3.991226 +step:1893 train loss:3.920650 +step:1894 train loss:3.931799 +step:1895 train loss:3.968838 +step:1896 train loss:4.012998 +step:1897 train loss:3.909792 +step:1898 train loss:3.955415 +step:1899 train loss:3.969798 +step:1900 train loss:3.919209 +step:1901 train loss:3.998579 +step:1902 train loss:3.988307 +step:1903 train loss:3.929313 +step:1904 train loss:3.916950 +step:1905 train loss:3.917419 +step:1906 train loss:3.972513 +step:1907 train loss:3.916415 +step:1908 train loss:3.927321 +step:1909 train loss:4.024633 +step:1910 train loss:3.919858 +step:1911 train loss:3.926836 +step:1912 train loss:3.972389 +step:1913 train loss:3.916129 +step:1914 train loss:3.952456 +step:1915 train loss:3.914937 +step:1916 train loss:3.965640 +step:1917 train loss:3.946537 +step:1918 train loss:3.855830 +step:1919 train loss:4.007054 +step:1920 train loss:4.109826 +step:1921 train loss:3.892478 +step:1922 train loss:3.870419 +step:1923 train loss:3.973026 +step:1924 train loss:4.022518 +step:1925 train loss:3.963804 +step:1926 train loss:3.900899 +step:1927 train loss:3.982046 +step:1928 train loss:3.896726 +step:1929 train loss:3.918351 +step:1930 train loss:3.991528 +step:1931 train loss:3.900918 +step:1932 train loss:3.956031 +step:1933 train loss:3.949123 +step:1934 train loss:4.018982 +step:1935 train loss:3.973690 +step:1936 train loss:3.943563 +step:1937 train loss:3.881486 +step:1938 train loss:4.253340 +step:1939 train loss:4.006532 +step:1940 train loss:3.990845 +step:1941 train loss:3.988845 +step:1942 train loss:3.983247 +step:1943 train loss:3.972138 +step:1944 train loss:3.928364 +step:1945 train loss:3.929771 +step:1946 train loss:3.961505 +step:1947 train loss:3.973257 +step:1948 train loss:3.880934 +step:1949 train loss:3.987352 +step:1950 train loss:3.923873 +step:1951 train loss:3.947390 +step:1952 train loss:3.973082 +step:1953 train loss:3.906875 +step:1954 train loss:3.940815 +step:1955 train loss:3.895388 +step:1956 train loss:3.982517 +step:1957 train loss:4.001370 +step:1958 train loss:4.020164 +step:1959 train loss:3.888662 +step:1960 train loss:3.932717 +step:1961 train loss:3.965557 +step:1962 train loss:3.951775 +step:1963 train loss:3.929693 +step:1964 train loss:3.963498 +step:1965 train loss:4.002924 +step:1966 train loss:3.909245 +step:1967 train loss:3.965319 +step:1968 train loss:3.901795 +step:1969 train loss:3.920423 +step:1970 train loss:3.980905 +step:1971 train loss:3.882571 +step:1972 train loss:3.988991 +step:1973 train loss:3.893506 +step:1974 train loss:3.939828 +step:1975 train loss:3.893895 +step:1976 train loss:3.919450 +step:1977 train loss:3.963084 +step:1978 train loss:3.905416 +step:1979 train loss:3.885432 +step:1980 train loss:3.928602 +step:1981 train loss:3.906878 +step:1982 train loss:3.986263 +step:1983 train loss:3.929358 +step:1984 train loss:3.967601 +step:1985 train loss:3.953626 +step:1986 train loss:3.946153 +step:1987 train loss:3.898384 +step:1988 train loss:3.929745 +step:1989 train loss:4.069606 +step:1990 train loss:3.910748 +step:1991 train loss:3.902527 +step:1992 train loss:3.912600 +step:1993 train loss:3.943219 +step:1994 train loss:3.931740 +step:1995 train loss:3.885594 +step:1996 train loss:3.940467 +step:1997 train loss:3.944505 +step:1998 train loss:3.898017 +step:1999 train loss:4.008618 +step:2000 validation loss:3.875219 total_sharp:6.1360e-04 L1_sharp:4.7613e-04 L2_sharp:3.7055e-05 L3_sharp:8.4593e-05 L4_sharp:8.8563e-05 L5_sharp:9.9009e-05 L6_sharp:6.2971e-05 L7_sharp:6.0091e-05 L8_sharp:1.1957e-04 L9_sharp:2.0095e-04 L10_sharp:2.5488e-04 L11_sharp:2.3301e-04 L12_sharp:3.6295e-04 total_fnorm:1.0288e+01 total_l1_linf:9.0020e+04 total_spectral:1.0288e+01 L1_fnorm:2.3020e+00 L2_fnorm:2.2839e+00 L3_fnorm:2.1573e+00 L4_fnorm:2.2027e+00 L5_fnorm:2.2570e+00 L6_fnorm:2.3579e+00 L7_fnorm:2.4272e+00 L8_fnorm:2.3458e+00 L9_fnorm:2.3356e+00 L10_fnorm:2.3096e+00 L11_fnorm:2.3683e+00 L12_fnorm:2.3097e+00 L1_l1linf:3.0229e+00 L2_l1linf:2.3982e+00 L3_l1linf:2.4225e+00 L4_l1linf:2.4923e+00 L5_l1linf:2.4646e+00 L6_l1linf:2.3977e+00 L7_l1linf:2.6848e+00 L8_l1linf:2.8406e+00 L9_l1linf:2.7464e+00 L10_l1linf:2.7991e+00 L11_l1linf:2.8845e+00 L12_l1linf:3.0639e+00 L1_spectral:3.5582e-01 L2_spectral:2.5099e-01 L3_spectral:2.5438e-01 L4_spectral:3.1434e-01 L5_spectral:3.2778e-01 L6_spectral:2.7278e-01 L7_spectral:3.1148e-01 L8_spectral:3.3296e-01 L9_spectral:3.7421e-01 L10_spectral:4.1181e-01 L11_spectral:3.9513e-01 L12_spectral:4.2025e-01 ip_v_neg_g:3.5389e-02 cos_v_neg_g:6.9746e-03 v_norm:1.0288e+01 g_norm:4.9318e-01 hv_norm:3.2072e-01 cos_v_hv:1.9684e-02 hg_norm:2.9036e+00 cos_g_hg:4.5566e-01 v_par:3.2921e-03 v_perp:1.0288e+01 L1_cos_v_neg_g:3.1538e-02 L1_v_norm:2.3020e+00 L2_cos_v_neg_g:9.8407e-03 L2_v_norm:2.2839e+00 L3_cos_v_neg_g:1.1313e-02 L3_v_norm:2.1573e+00 L4_cos_v_neg_g:1.2455e-02 L4_v_norm:2.2027e+00 L5_cos_v_neg_g:1.2596e-02 L5_v_norm:2.2570e+00 L6_cos_v_neg_g:1.0949e-02 L6_v_norm:2.3579e+00 L7_cos_v_neg_g:9.3946e-03 L7_v_norm:2.4272e+00 L8_cos_v_neg_g:1.1945e-02 L8_v_norm:2.3458e+00 L9_cos_v_neg_g:1.4498e-02 L9_v_norm:2.3356e+00 L10_cos_v_neg_g:1.7824e-02 L10_v_norm:2.3096e+00 L11_cos_v_neg_g:1.7196e-02 L11_v_norm:2.3683e+00 L12_cos_v_neg_g:1.6964e-02 L12_v_norm:2.3097e+00 +step:2000 train loss:3.972678 +step:2001 train loss:3.898404 +step:2002 train loss:3.998164 +step:2003 train loss:4.040480 +step:2004 train loss:3.912385 +step:2005 train loss:4.010886 +step:2006 train loss:3.903619 +step:2007 train loss:3.978896 +step:2008 train loss:3.923357 +step:2009 train loss:3.919186 +step:2010 train loss:4.047628 +step:2011 train loss:3.897507 +step:2012 train loss:3.924870 +step:2013 train loss:3.934942 +step:2014 train loss:3.832041 +step:2015 train loss:3.957320 +step:2016 train loss:3.942473 +step:2017 train loss:3.943445 +step:2018 train loss:3.913874 +step:2019 train loss:3.939144 +step:2020 train loss:3.946106 +step:2021 train loss:3.909962 +step:2022 train loss:3.950083 +step:2023 train loss:3.926218 +step:2024 train loss:3.978918 +step:2025 train loss:3.920316 +step:2026 train loss:3.900621 +step:2027 train loss:3.928434 +step:2028 train loss:3.857586 +step:2029 train loss:3.889867 +step:2030 train loss:3.896041 +step:2031 train loss:3.857114 +step:2032 train loss:3.909298 +step:2033 train loss:3.908709 +step:2034 train loss:3.903136 +step:2035 train loss:3.945117 +step:2036 train loss:3.933665 +step:2037 train loss:3.916565 +step:2038 train loss:3.908650 +step:2039 train loss:3.902055 +step:2040 train loss:3.933795 +step:2041 train loss:3.933633 +step:2042 train loss:3.864305 +step:2043 train loss:4.017377 +step:2044 train loss:3.884112 +step:2045 train loss:3.907258 +step:2046 train loss:3.916798 +step:2047 train loss:3.890143 +step:2048 train loss:3.935437 +step:2049 train loss:3.895479 +step:2050 train loss:3.912800 +step:2051 train loss:3.881586 +step:2052 train loss:3.931599 +step:2053 train loss:3.932800 +step:2054 train loss:3.900753 +step:2055 train loss:3.908725 +step:2056 train loss:3.951905 +step:2057 train loss:3.954277 +step:2058 train loss:3.920947 +step:2059 train loss:4.000293 +step:2060 train loss:3.946922 +step:2061 train loss:3.898937 +step:2062 train loss:3.922987 +step:2063 train loss:3.826828 +step:2064 train loss:3.945448 +step:2065 train loss:3.953741 +step:2066 train loss:3.814016 +step:2067 train loss:3.874871 +step:2068 train loss:3.975609 +step:2069 train loss:3.913258 +step:2070 train loss:3.917551 +step:2071 train loss:3.952836 +step:2072 train loss:3.882717 +step:2073 train loss:3.934978 +step:2074 train loss:3.907371 +step:2075 train loss:3.995205 +step:2076 train loss:3.936710 +step:2077 train loss:3.950174 +step:2078 train loss:3.907284 +step:2079 train loss:4.055776 +step:2080 train loss:3.874622 +step:2081 train loss:3.982303 +step:2082 train loss:3.912482 +step:2083 train loss:3.897248 +step:2084 train loss:3.876859 +step:2085 train loss:3.919668 +step:2086 train loss:3.931340 +step:2087 train loss:3.972000 +step:2088 train loss:3.837563 +step:2089 train loss:3.866648 +step:2090 train loss:3.902127 +step:2091 train loss:3.919514 +step:2092 train loss:3.897757 +step:2093 train loss:3.887933 +step:2094 train loss:3.923516 +step:2095 train loss:3.872232 +step:2096 train loss:3.858168 +step:2097 train loss:3.895269 +step:2098 train loss:3.896471 +step:2099 train loss:3.876009 +step:2100 train loss:3.945936 +step:2101 train loss:3.936035 +step:2102 train loss:3.905406 +step:2103 train loss:3.922538 +step:2104 train loss:3.894651 +step:2105 train loss:3.903031 +step:2106 train loss:3.897032 +step:2107 train loss:3.965094 +step:2108 train loss:3.888597 +step:2109 train loss:3.847020 +step:2110 train loss:3.944380 +step:2111 train loss:3.893232 +step:2112 train loss:3.952914 +step:2113 train loss:3.900623 +step:2114 train loss:3.904397 +step:2115 train loss:3.952127 +step:2116 train loss:3.886880 +step:2117 train loss:3.902804 +step:2118 train loss:3.889386 +step:2119 train loss:3.827380 +step:2120 train loss:3.916159 +step:2121 train loss:3.899577 +step:2122 train loss:3.907312 +step:2123 train loss:3.962387 +step:2124 train loss:3.966530 +step:2125 train loss:3.873873 +step:2126 train loss:3.882101 +step:2127 train loss:3.878085 +step:2128 train loss:3.871492 +step:2129 train loss:3.894180 +step:2130 train loss:3.894350 +step:2131 train loss:3.919293 +step:2132 train loss:3.849402 +step:2133 train loss:3.957405 +step:2134 train loss:3.912255 +step:2135 train loss:3.866801 +step:2136 train loss:3.960835 +step:2137 train loss:3.923394 +step:2138 train loss:3.879310 +step:2139 train loss:3.879998 +step:2140 train loss:3.884012 +step:2141 train loss:3.930788 +step:2142 train loss:3.903793 +step:2143 train loss:3.826260 +step:2144 train loss:3.931077 +step:2145 train loss:3.901731 +step:2146 train loss:3.936480 +step:2147 train loss:4.043325 +step:2148 train loss:3.842120 +step:2149 train loss:3.855927 +step:2150 train loss:3.880557 +step:2151 train loss:3.916897 +step:2152 train loss:3.905458 +step:2153 train loss:3.943847 +step:2154 train loss:3.863378 +step:2155 train loss:3.944803 +step:2156 train loss:3.867871 +step:2157 train loss:3.944294 +step:2158 train loss:3.980816 +step:2159 train loss:3.907745 +step:2160 train loss:3.980675 +step:2161 train loss:3.878830 +step:2162 train loss:3.886001 +step:2163 train loss:3.862969 +step:2164 train loss:3.884079 +step:2165 train loss:3.862358 +step:2166 train loss:3.979141 +step:2167 train loss:3.888014 +step:2168 train loss:3.900081 +step:2169 train loss:3.851173 +step:2170 train loss:3.995214 +step:2171 train loss:3.955255 +step:2172 train loss:3.892121 +step:2173 train loss:3.884407 +step:2174 train loss:3.950321 +step:2175 train loss:3.881515 +step:2176 train loss:3.974804 +step:2177 train loss:3.933328 +step:2178 train loss:3.864751 +step:2179 train loss:3.930621 +step:2180 train loss:3.943532 +step:2181 train loss:3.871011 +step:2182 train loss:3.920139 +step:2183 train loss:3.912320 +step:2184 train loss:3.864476 +step:2185 train loss:3.847136 +step:2186 train loss:3.883422 +step:2187 train loss:3.893500 +step:2188 train loss:3.948303 +step:2189 train loss:3.835715 +step:2190 train loss:3.877918 +step:2191 train loss:3.936058 +step:2192 train loss:3.863972 +step:2193 train loss:3.833251 +step:2194 train loss:3.844061 +step:2195 train loss:3.860779 +step:2196 train loss:3.865561 +step:2197 train loss:3.852345 +step:2198 train loss:3.874180 +step:2199 train loss:3.949082 +step:2200 train loss:3.880411 +step:2201 train loss:3.881156 +step:2202 train loss:3.843235 +step:2203 train loss:3.864362 +step:2204 train loss:3.896710 +step:2205 train loss:3.878159 +step:2206 train loss:3.878943 +step:2207 train loss:3.875223 +step:2208 train loss:3.853145 +step:2209 train loss:4.131395 +step:2210 train loss:3.906825 +step:2211 train loss:3.911945 +step:2212 train loss:3.876350 +step:2213 train loss:3.957099 +step:2214 train loss:3.947551 +step:2215 train loss:3.873475 +step:2216 train loss:3.839601 +step:2217 train loss:3.869469 +step:2218 train loss:3.867639 +step:2219 train loss:3.899759 +step:2220 train loss:3.844602 +step:2221 train loss:3.876099 +step:2222 train loss:3.893418 +step:2223 train loss:3.928364 +step:2224 train loss:3.903869 +step:2225 train loss:3.846593 +step:2226 train loss:3.910034 +step:2227 train loss:3.912600 +step:2228 train loss:3.905923 +step:2229 train loss:3.850502 +step:2230 train loss:3.971509 +step:2231 train loss:3.891478 +step:2232 train loss:3.890310 +step:2233 train loss:3.930563 +step:2234 train loss:3.831134 +step:2235 train loss:3.917006 +step:2236 train loss:3.856955 +step:2237 train loss:3.989816 +step:2238 train loss:3.796048 +step:2239 train loss:3.870871 +step:2240 train loss:3.888795 +step:2241 train loss:3.818198 +step:2242 train loss:4.057567 +step:2243 train loss:4.059660 +step:2244 train loss:4.225480 +step:2245 train loss:4.014575 +step:2246 train loss:3.975679 +step:2247 train loss:3.984187 +step:2248 train loss:4.005049 +step:2249 train loss:3.959704 +step:2250 validation loss:3.902814 +step:2250 train loss:3.952914 +step:2251 train loss:3.909235 +step:2252 train loss:3.902195 +step:2253 train loss:3.929001 +step:2254 train loss:3.924973 +step:2255 train loss:3.882753 +step:2256 train loss:3.929681 +step:2257 train loss:3.913012 +step:2258 train loss:3.901476 +step:2259 train loss:3.913178 +step:2260 train loss:3.867413 +step:2261 train loss:3.942134 +step:2262 train loss:3.956690 +step:2263 train loss:3.911207 +step:2264 train loss:4.021412 +step:2265 train loss:3.871760 +step:2266 train loss:3.914100 +step:2267 train loss:3.876893 +step:2268 train loss:3.871329 +step:2269 train loss:3.876254 +step:2270 train loss:3.863736 +step:2271 train loss:3.873714 +step:2272 train loss:3.910843 +step:2273 train loss:3.829652 +step:2274 train loss:3.862474 +step:2275 train loss:3.817126 +step:2276 train loss:3.888152 +step:2277 train loss:3.900562 +step:2278 train loss:3.879864 +step:2279 train loss:3.866641 +step:2280 train loss:3.773270 +step:2281 train loss:3.916496 +step:2282 train loss:3.850276 +step:2283 train loss:3.831661 +step:2284 train loss:3.850645 +step:2285 train loss:3.899031 +step:2286 train loss:3.859580 +step:2287 train loss:3.897717 +step:2288 train loss:3.870648 +step:2289 train loss:3.867821 +step:2290 train loss:3.873022 +step:2291 train loss:3.861594 +step:2292 train loss:3.900749 +step:2293 train loss:3.878671 +step:2294 train loss:3.875700 +step:2295 train loss:3.930758 +step:2296 train loss:3.860126 +step:2297 train loss:3.834227 +step:2298 train loss:3.895067 +step:2299 train loss:3.871171 +step:2300 train loss:3.785169 +step:2301 train loss:3.882918 +step:2302 train loss:3.895045 +step:2303 train loss:3.862081 +step:2304 train loss:3.853763 +step:2305 train loss:3.897058 +step:2306 train loss:3.888219 +step:2307 train loss:3.866554 +step:2308 train loss:3.887597 +step:2309 train loss:3.845093 +step:2310 train loss:3.830392 +step:2311 train loss:3.817829 +step:2312 train loss:3.885694 +step:2313 train loss:3.803640 +step:2314 train loss:3.878850 +step:2315 train loss:3.891546 +step:2316 train loss:3.929485 +step:2317 train loss:3.796127 +step:2318 train loss:3.842573 +step:2319 train loss:3.898557 +step:2320 train loss:3.862315 +step:2321 train loss:3.835778 +step:2322 train loss:3.851697 +step:2323 train loss:3.846315 +step:2324 train loss:3.874326 +step:2325 train loss:3.815484 +step:2326 train loss:3.845515 +step:2327 train loss:3.957434 +step:2328 train loss:3.906426 +step:2329 train loss:3.864419 +step:2330 train loss:3.823696 +step:2331 train loss:3.866201 +step:2332 train loss:3.788180 +step:2333 train loss:3.851519 +step:2334 train loss:3.826582 +step:2335 train loss:3.817995 +step:2336 train loss:4.068498 +step:2337 train loss:3.842227 +step:2338 train loss:3.883126 +step:2339 train loss:3.885361 +step:2340 train loss:3.901732 +step:2341 train loss:3.886871 +step:2342 train loss:3.838901 +step:2343 train loss:3.859076 +step:2344 train loss:3.901056 +step:2345 train loss:3.854468 +step:2346 train loss:3.886248 +step:2347 train loss:3.809985 +step:2348 train loss:3.869262 +step:2349 train loss:3.815331 +step:2350 train loss:3.879204 +step:2351 train loss:3.884534 +step:2352 train loss:3.888362 +step:2353 train loss:3.846658 +step:2354 train loss:3.892799 +step:2355 train loss:3.884509 +step:2356 train loss:3.921896 +step:2357 train loss:3.821853 +step:2358 train loss:3.841721 +step:2359 train loss:3.867260 +step:2360 train loss:3.888148 +step:2361 train loss:3.924160 +step:2362 train loss:3.751504 +step:2363 train loss:3.950922 +step:2364 train loss:3.894869 +step:2365 train loss:3.863739 +step:2366 train loss:3.812104 +step:2367 train loss:3.879406 +step:2368 train loss:3.866946 +step:2369 train loss:3.859462 +step:2370 train loss:3.872866 +step:2371 train loss:3.926945 +step:2372 train loss:3.785999 +step:2373 train loss:3.920114 +step:2374 train loss:3.907012 +step:2375 train loss:3.891514 +step:2376 train loss:3.881350 +step:2377 train loss:3.825682 +step:2378 train loss:3.874819 +step:2379 train loss:3.860163 +step:2380 train loss:3.912810 +step:2381 train loss:4.007719 +step:2382 train loss:3.796924 +step:2383 train loss:3.845025 +step:2384 train loss:3.875572 +step:2385 train loss:3.775094 +step:2386 train loss:3.932119 +step:2387 train loss:3.816821 +step:2388 train loss:3.871314 +step:2389 train loss:3.893856 +step:2390 train loss:3.844545 +step:2391 train loss:3.868593 +step:2392 train loss:3.890347 +step:2393 train loss:3.846243 +step:2394 train loss:3.872059 +step:2395 train loss:3.859059 +step:2396 train loss:3.864117 +step:2397 train loss:3.844066 +step:2398 train loss:3.895010 +step:2399 train loss:3.851544 +step:2400 train loss:3.833704 +step:2401 train loss:3.871379 +step:2402 train loss:3.827833 +step:2403 train loss:3.877834 +step:2404 train loss:3.832119 +step:2405 train loss:3.830806 +step:2406 train loss:3.865841 +step:2407 train loss:3.809101 +step:2408 train loss:3.850079 +step:2409 train loss:3.837776 +step:2410 train loss:3.837692 +step:2411 train loss:3.911341 +step:2412 train loss:3.897415 +step:2413 train loss:3.934047 +step:2414 train loss:3.826375 +step:2415 train loss:3.818191 +step:2416 train loss:3.833276 +step:2417 train loss:3.869689 +step:2418 train loss:3.888086 +step:2419 train loss:3.818370 +step:2420 train loss:3.839095 +step:2421 train loss:3.866772 +step:2422 train loss:3.921786 +step:2423 train loss:3.858119 +step:2424 train loss:3.821096 +step:2425 train loss:3.882641 +step:2426 train loss:3.824958 +step:2427 train loss:3.847676 +step:2428 train loss:3.927058 +step:2429 train loss:3.878745 +step:2430 train loss:3.967279 +step:2431 train loss:3.882040 +step:2432 train loss:3.851422 +step:2433 train loss:3.829184 +step:2434 train loss:3.816493 +step:2435 train loss:3.871211 +step:2436 train loss:3.831886 +step:2437 train loss:3.863639 +step:2438 train loss:3.905715 +step:2439 train loss:3.888914 +step:2440 train loss:3.832580 +step:2441 train loss:3.866608 +step:2442 train loss:3.857489 +step:2443 train loss:3.820993 +step:2444 train loss:3.857661 +step:2445 train loss:3.854415 +step:2446 train loss:3.821645 +step:2447 train loss:3.804173 +step:2448 train loss:3.856230 +step:2449 train loss:3.884073 +step:2450 train loss:3.842464 +step:2451 train loss:3.769246 +step:2452 train loss:3.868397 +step:2453 train loss:3.838291 +step:2454 train loss:3.835533 +step:2455 train loss:3.888290 +step:2456 train loss:3.843990 +step:2457 train loss:3.900255 +step:2458 train loss:3.876092 +step:2459 train loss:3.848853 +step:2460 train loss:3.856313 +step:2461 train loss:3.890051 +step:2462 train loss:3.863578 +step:2463 train loss:3.840409 +step:2464 train loss:3.858956 +step:2465 train loss:3.932955 +step:2466 train loss:4.017750 +step:2467 train loss:3.923995 +step:2468 train loss:3.818646 +step:2469 train loss:3.882045 +step:2470 train loss:3.931763 +step:2471 train loss:3.938845 +step:2472 train loss:3.917846 +step:2473 train loss:3.855954 +step:2474 train loss:3.818234 +step:2475 train loss:3.870633 +step:2476 train loss:3.944675 +step:2477 train loss:3.861404 +step:2478 train loss:3.818263 +step:2479 train loss:3.858459 +step:2480 train loss:3.847481 +step:2481 train loss:4.034606 +step:2482 train loss:3.848447 +step:2483 train loss:3.880716 +step:2484 train loss:3.827646 +step:2485 train loss:3.813968 +step:2486 train loss:3.846608 +step:2487 train loss:3.885043 +step:2488 train loss:3.798122 +step:2489 train loss:3.901623 +step:2490 train loss:3.824582 +step:2491 train loss:3.836920 +step:2492 train loss:3.880363 +step:2493 train loss:3.915437 +step:2494 train loss:3.833906 +step:2495 train loss:3.869232 +step:2496 train loss:3.846427 +step:2497 train loss:3.863899 +step:2498 train loss:3.867319 +step:2499 train loss:3.862799 +step:2500 validation loss:3.787708 total_sharp:6.5474e-04 L1_sharp:3.4094e-04 L2_sharp:4.1594e-05 L3_sharp:1.4974e-04 L4_sharp:1.1356e-04 L5_sharp:1.4584e-04 L6_sharp:6.5850e-05 L7_sharp:7.8602e-05 L8_sharp:1.3931e-04 L9_sharp:2.1871e-04 L10_sharp:2.4814e-04 L11_sharp:2.0685e-04 L12_sharp:4.8117e-04 total_fnorm:1.0409e+01 total_l1_linf:9.1412e+04 total_spectral:1.0409e+01 L1_fnorm:2.3521e+00 L2_fnorm:2.3590e+00 L3_fnorm:2.2783e+00 L4_fnorm:2.2856e+00 L5_fnorm:2.3049e+00 L6_fnorm:2.3623e+00 L7_fnorm:2.4615e+00 L8_fnorm:2.3991e+00 L9_fnorm:2.4110e+00 L10_fnorm:2.3814e+00 L11_fnorm:2.4131e+00 L12_fnorm:2.3146e+00 L1_l1linf:2.6547e+00 L2_l1linf:2.5781e+00 L3_l1linf:2.5802e+00 L4_l1linf:2.4463e+00 L5_l1linf:2.4627e+00 L6_l1linf:2.4046e+00 L7_l1linf:2.5495e+00 L8_l1linf:2.6943e+00 L9_l1linf:2.7805e+00 L10_l1linf:2.9011e+00 L11_l1linf:3.0157e+00 L12_l1linf:2.9439e+00 L1_spectral:3.0058e-01 L2_spectral:2.5838e-01 L3_spectral:2.9758e-01 L4_spectral:3.3029e-01 L5_spectral:3.2936e-01 L6_spectral:2.6919e-01 L7_spectral:2.7144e-01 L8_spectral:3.1637e-01 L9_spectral:3.9746e-01 L10_spectral:4.2327e-01 L11_spectral:3.8990e-01 L12_spectral:4.0890e-01 ip_v_neg_g:4.3569e-02 cos_v_neg_g:8.6169e-03 v_norm:1.0409e+01 g_norm:4.8577e-01 hv_norm:4.1031e-01 cos_v_hv:1.6609e-02 hg_norm:2.9396e+00 cos_g_hg:5.1635e-01 v_par:4.1845e-03 v_perp:1.0409e+01 L1_cos_v_neg_g:3.1136e-02 L1_v_norm:2.3521e+00 L2_cos_v_neg_g:1.0338e-02 L2_v_norm:2.3590e+00 L3_cos_v_neg_g:1.8186e-02 L3_v_norm:2.2783e+00 L4_cos_v_neg_g:1.6610e-02 L4_v_norm:2.2856e+00 L5_cos_v_neg_g:1.6051e-02 L5_v_norm:2.3049e+00 L6_cos_v_neg_g:1.1679e-02 L6_v_norm:2.3623e+00 L7_cos_v_neg_g:1.2092e-02 L7_v_norm:2.4615e+00 L8_cos_v_neg_g:1.6432e-02 L8_v_norm:2.3991e+00 L9_cos_v_neg_g:1.9312e-02 L9_v_norm:2.4110e+00 L10_cos_v_neg_g:2.0700e-02 L10_v_norm:2.3814e+00 L11_cos_v_neg_g:2.0597e-02 L11_v_norm:2.4131e+00 L12_cos_v_neg_g:2.3091e-02 L12_v_norm:2.3146e+00 +step:2500 train loss:3.813627 +step:2501 train loss:3.875242 +step:2502 train loss:3.866100 +step:2503 train loss:3.795459 +step:2504 train loss:3.828257 +step:2505 train loss:3.853415 +step:2506 train loss:3.813158 +step:2507 train loss:3.840464 +step:2508 train loss:3.794772 +step:2509 train loss:3.812503 +step:2510 train loss:3.804389 +step:2511 train loss:3.845778 +step:2512 train loss:3.892033 +step:2513 train loss:3.843424 +step:2514 train loss:3.825761 +step:2515 train loss:3.964446 +step:2516 train loss:3.856362 +step:2517 train loss:3.920901 +step:2518 train loss:3.890783 +step:2519 train loss:3.869901 +step:2520 train loss:3.888141 +step:2521 train loss:3.857918 +step:2522 train loss:3.888136 +step:2523 train loss:3.804810 +step:2524 train loss:3.863195 +step:2525 train loss:3.846552 +step:2526 train loss:3.899078 +step:2527 train loss:3.890310 +step:2528 train loss:3.869498 +step:2529 train loss:3.881540 +step:2530 train loss:3.867029 +step:2531 train loss:3.798959 +step:2532 train loss:3.897485 +step:2533 train loss:3.790937 +step:2534 train loss:3.885130 +step:2535 train loss:3.840212 +step:2536 train loss:3.761824 +step:2537 train loss:3.881106 +step:2538 train loss:3.855829 +step:2539 train loss:3.872630 +step:2540 train loss:3.811909 +step:2541 train loss:3.836882 +step:2542 train loss:3.847996 +step:2543 train loss:3.838944 +step:2544 train loss:3.822685 +step:2545 train loss:3.813699 +step:2546 train loss:3.780847 +step:2547 train loss:3.822289 +step:2548 train loss:3.847128 +step:2549 train loss:3.851355 +step:2550 train loss:3.984909 +step:2551 train loss:4.059221 +step:2552 train loss:3.785861 +step:2553 train loss:3.820346 +step:2554 train loss:3.966378 +step:2555 train loss:3.857028 +step:2556 train loss:3.777024 +step:2557 train loss:3.873176 +step:2558 train loss:3.864262 +step:2559 train loss:3.817868 +step:2560 train loss:3.803138 +step:2561 train loss:3.897826 +step:2562 train loss:3.853520 +step:2563 train loss:3.785928 +step:2564 train loss:3.860476 +step:2565 train loss:3.840561 +step:2566 train loss:3.819668 +step:2567 train loss:3.797605 +step:2568 train loss:3.852055 +step:2569 train loss:3.860312 +step:2570 train loss:3.809245 +step:2571 train loss:3.893607 +step:2572 train loss:3.854807 +step:2573 train loss:3.787335 +step:2574 train loss:3.834963 +step:2575 train loss:3.879212 +step:2576 train loss:3.833696 +step:2577 train loss:3.793601 +step:2578 train loss:3.835275 +step:2579 train loss:3.815830 +step:2580 train loss:3.784556 +step:2581 train loss:3.797488 +step:2582 train loss:3.806736 +step:2583 train loss:3.832697 +step:2584 train loss:3.848761 +step:2585 train loss:3.811882 +step:2586 train loss:3.839000 +step:2587 train loss:3.772518 +step:2588 train loss:3.801686 +step:2589 train loss:3.875815 +step:2590 train loss:3.799241 +step:2591 train loss:3.857915 +step:2592 train loss:3.907958 +step:2593 train loss:3.865411 +step:2594 train loss:3.827090 +step:2595 train loss:3.832662 +step:2596 train loss:3.875070 +step:2597 train loss:3.756721 +step:2598 train loss:3.909759 +step:2599 train loss:3.860397 +step:2600 train loss:3.889470 +step:2601 train loss:3.828505 +step:2602 train loss:3.862678 +step:2603 train loss:3.887213 +step:2604 train loss:3.799733 +step:2605 train loss:3.930177 +step:2606 train loss:3.888150 +step:2607 train loss:3.842976 +step:2608 train loss:3.805541 +step:2609 train loss:3.828735 +step:2610 train loss:3.851369 +step:2611 train loss:3.892274 +step:2612 train loss:3.858281 +step:2613 train loss:3.820001 +step:2614 train loss:3.808731 +step:2615 train loss:3.807266 +step:2616 train loss:3.876996 +step:2617 train loss:3.835484 +step:2618 train loss:3.800312 +step:2619 train loss:3.814434 +step:2620 train loss:3.808240 +step:2621 train loss:3.818632 +step:2622 train loss:3.893909 +step:2623 train loss:3.764304 +step:2624 train loss:3.780001 +step:2625 train loss:3.851603 +step:2626 train loss:3.846221 +step:2627 train loss:3.824235 +step:2628 train loss:3.873719 +step:2629 train loss:3.827048 +step:2630 train loss:3.819740 +step:2631 train loss:3.846940 +step:2632 train loss:3.813090 +step:2633 train loss:3.802987 +step:2634 train loss:3.846992 +step:2635 train loss:3.831065 +step:2636 train loss:3.878562 +step:2637 train loss:3.829068 +step:2638 train loss:3.812644 +step:2639 train loss:3.867046 +step:2640 train loss:3.788863 +step:2641 train loss:3.842325 +step:2642 train loss:3.763003 +step:2643 train loss:3.763671 +step:2644 train loss:3.856436 +step:2645 train loss:3.791166 +step:2646 train loss:3.824449 +step:2647 train loss:3.844300 +step:2648 train loss:3.879541 +step:2649 train loss:3.790244 +step:2650 train loss:3.779314 +step:2651 train loss:3.820363 +step:2652 train loss:3.793072 +step:2653 train loss:3.861203 +step:2654 train loss:3.821964 +step:2655 train loss:3.807511 +step:2656 train loss:3.830695 +step:2657 train loss:3.855171 +step:2658 train loss:3.864587 +step:2659 train loss:3.842056 +step:2660 train loss:3.828397 +step:2661 train loss:3.874016 +step:2662 train loss:3.850659 +step:2663 train loss:3.825666 +step:2664 train loss:3.838844 +step:2665 train loss:3.784989 +step:2666 train loss:3.814560 +step:2667 train loss:3.819743 +step:2668 train loss:3.800574 +step:2669 train loss:3.821483 +step:2670 train loss:3.835068 +step:2671 train loss:3.811710 +step:2672 train loss:3.832752 +step:2673 train loss:3.767714 +step:2674 train loss:3.860785 +step:2675 train loss:3.828439 +step:2676 train loss:3.851417 +step:2677 train loss:3.834532 +step:2678 train loss:3.820628 +step:2679 train loss:3.805915 +step:2680 train loss:3.787621 +step:2681 train loss:3.763600 +step:2682 train loss:3.847881 +step:2683 train loss:3.818111 +step:2684 train loss:3.849878 +step:2685 train loss:3.766244 +step:2686 train loss:3.781168 +step:2687 train loss:3.860410 +step:2688 train loss:3.874096 +step:2689 train loss:3.777455 +step:2690 train loss:3.859030 +step:2691 train loss:3.829845 +step:2692 train loss:3.854262 +step:2693 train loss:3.908308 +step:2694 train loss:3.806188 +step:2695 train loss:3.830442 +step:2696 train loss:3.833893 +step:2697 train loss:3.823664 +step:2698 train loss:3.833464 +step:2699 train loss:3.852151 +step:2700 train loss:3.823477 +step:2701 train loss:3.890298 +step:2702 train loss:3.826898 +step:2703 train loss:3.780987 +step:2704 train loss:3.857922 +step:2705 train loss:3.854473 +step:2706 train loss:3.790051 +step:2707 train loss:3.749827 +step:2708 train loss:3.844814 +step:2709 train loss:3.823600 +step:2710 train loss:3.833842 +step:2711 train loss:3.797279 +step:2712 train loss:3.859384 +step:2713 train loss:3.863588 +step:2714 train loss:3.804134 +step:2715 train loss:3.796608 +step:2716 train loss:3.865906 +step:2717 train loss:3.832268 +step:2718 train loss:3.826718 +step:2719 train loss:3.823907 +step:2720 train loss:3.789725 +step:2721 train loss:3.872907 +step:2722 train loss:3.803086 +step:2723 train loss:3.792611 +step:2724 train loss:3.814971 +step:2725 train loss:3.809770 +step:2726 train loss:3.786124 +step:2727 train loss:3.842136 +step:2728 train loss:3.781334 +step:2729 train loss:3.910645 +step:2730 train loss:3.850991 +step:2731 train loss:3.890957 +step:2732 train loss:3.804856 +step:2733 train loss:3.797552 +step:2734 train loss:3.844303 +step:2735 train loss:3.846938 +step:2736 train loss:3.769489 +step:2737 train loss:3.824610 +step:2738 train loss:3.879243 +step:2739 train loss:3.797870 +step:2740 train loss:3.798193 +step:2741 train loss:3.788398 +step:2742 train loss:3.713563 +step:2743 train loss:3.827141 +step:2744 train loss:3.854329 +step:2745 train loss:3.803339 +step:2746 train loss:3.818642 +step:2747 train loss:3.802254 +step:2748 train loss:3.759695 +step:2749 train loss:3.826021 +step:2750 validation loss:3.750372 +step:2750 train loss:3.835353 +step:2751 train loss:3.858530 +step:2752 train loss:3.842286 +step:2753 train loss:3.831543 +step:2754 train loss:3.772871 +step:2755 train loss:3.841349 +step:2756 train loss:3.814917 +step:2757 train loss:3.798403 +step:2758 train loss:3.827847 +step:2759 train loss:3.837309 +step:2760 train loss:3.746551 +step:2761 train loss:3.762542 +step:2762 train loss:3.777559 +step:2763 train loss:3.801885 +step:2764 train loss:3.746550 +step:2765 train loss:3.796462 +step:2766 train loss:3.887590 +step:2767 train loss:3.758497 +step:2768 train loss:3.823024 +step:2769 train loss:3.793648 +step:2770 train loss:3.811822 +step:2771 train loss:3.835337 +step:2772 train loss:3.801807 +step:2773 train loss:3.800314 +step:2774 train loss:3.790778 +step:2775 train loss:3.807702 +step:2776 train loss:3.762116 +step:2777 train loss:3.794081 +step:2778 train loss:3.803751 +step:2779 train loss:3.832272 +step:2780 train loss:3.797077 +step:2781 train loss:3.787920 +step:2782 train loss:3.773721 +step:2783 train loss:3.807197 +step:2784 train loss:3.813871 +step:2785 train loss:3.886610 +step:2786 train loss:3.850067 +step:2787 train loss:3.811751 +step:2788 train loss:3.807360 +step:2789 train loss:3.800366 +step:2790 train loss:3.737833 +step:2791 train loss:3.839407 +step:2792 train loss:3.831429 +step:2793 train loss:3.797476 +step:2794 train loss:3.802955 +step:2795 train loss:3.821811 +step:2796 train loss:3.811876 +step:2797 train loss:3.855246 +step:2798 train loss:3.842970 +step:2799 train loss:3.750994 +step:2800 train loss:3.796016 +step:2801 train loss:3.829821 +step:2802 train loss:3.855072 +step:2803 train loss:3.830061 +step:2804 train loss:3.768010 +step:2805 train loss:3.807384 +step:2806 train loss:3.801870 +step:2807 train loss:3.832174 +step:2808 train loss:3.769283 +step:2809 train loss:3.838935 +step:2810 train loss:3.830011 +step:2811 train loss:3.816216 +step:2812 train loss:3.866579 +step:2813 train loss:3.839289 +step:2814 train loss:3.829238 +step:2815 train loss:3.834247 +step:2816 train loss:3.840438 +step:2817 train loss:3.773373 +step:2818 train loss:3.878630 +step:2819 train loss:3.805912 +step:2820 train loss:3.802528 +step:2821 train loss:3.781453 +step:2822 train loss:3.821174 +step:2823 train loss:3.773055 +step:2824 train loss:3.673461 +step:2825 train loss:3.829267 +step:2826 train loss:3.829940 +step:2827 train loss:3.853505 +step:2828 train loss:3.845799 +step:2829 train loss:3.830216 +step:2830 train loss:3.864322 +step:2831 train loss:3.797104 +step:2832 train loss:3.767183 +step:2833 train loss:3.825859 +step:2834 train loss:3.779881 +step:2835 train loss:3.810695 +step:2836 train loss:3.816729 +step:2837 train loss:3.814195 +step:2838 train loss:3.757024 +step:2839 train loss:3.853234 +step:2840 train loss:3.814560 +step:2841 train loss:3.889668 +step:2842 train loss:3.835161 +step:2843 train loss:3.829934 +step:2844 train loss:3.858420 +step:2845 train loss:3.813117 +step:2846 train loss:3.759974 +step:2847 train loss:3.851157 +step:2848 train loss:3.807963 +step:2849 train loss:3.803980 +step:2850 train loss:3.861001 +step:2851 train loss:3.812469 +step:2852 train loss:3.889778 +step:2853 train loss:3.806534 +step:2854 train loss:3.748887 +step:2855 train loss:3.828011 +step:2856 train loss:3.751443 +step:2857 train loss:3.853634 +step:2858 train loss:3.808043 +step:2859 train loss:3.797213 +step:2860 train loss:3.786093 +step:2861 train loss:3.769418 +step:2862 train loss:3.802065 +step:2863 train loss:3.784617 +step:2864 train loss:3.789589 +step:2865 train loss:3.864559 +step:2866 train loss:3.878988 +step:2867 train loss:3.815634 +step:2868 train loss:3.812433 +step:2869 train loss:3.772038 +step:2870 train loss:3.861199 +step:2871 train loss:3.861359 +step:2872 train loss:3.819012 +step:2873 train loss:3.826965 +step:2874 train loss:3.806244 +step:2875 train loss:3.759334 +step:2876 train loss:3.802893 +step:2877 train loss:3.789939 +step:2878 train loss:3.802281 +step:2879 train loss:3.769359 +step:2880 train loss:3.787781 +step:2881 train loss:3.781457 +step:2882 train loss:3.713640 +step:2883 train loss:3.798333 +step:2884 train loss:3.868443 +step:2885 train loss:3.761384 +step:2886 train loss:3.812198 +step:2887 train loss:3.838019 +step:2888 train loss:3.809557 +step:2889 train loss:3.791810 +step:2890 train loss:3.762717 +step:2891 train loss:3.805622 +step:2892 train loss:3.813430 +step:2893 train loss:3.794325 +step:2894 train loss:3.764880 +step:2895 train loss:3.815350 +step:2896 train loss:3.864542 +step:2897 train loss:3.842106 +step:2898 train loss:3.976416 +step:2899 train loss:3.734806 +step:2900 train loss:3.809067 +step:2901 train loss:3.757752 +step:2902 train loss:3.759643 +step:2903 train loss:3.773953 +step:2904 train loss:3.797646 +step:2905 train loss:3.859506 +step:2906 train loss:3.828778 +step:2907 train loss:3.997224 +step:2908 train loss:3.755320 +step:2909 train loss:3.835456 +step:2910 train loss:3.804344 +step:2911 train loss:3.832400 +step:2912 train loss:3.787399 +step:2913 train loss:3.824478 +step:2914 train loss:3.850835 +step:2915 train loss:3.844921 +step:2916 train loss:3.802579 +step:2917 train loss:3.836844 +step:2918 train loss:3.828079 +step:2919 train loss:3.769910 +step:2920 train loss:3.824141 +step:2921 train loss:3.779366 +step:2922 train loss:3.800438 +step:2923 train loss:3.872027 +step:2924 train loss:3.802973 +step:2925 train loss:3.756154 +step:2926 train loss:3.848271 +step:2927 train loss:3.755172 +step:2928 train loss:3.725324 +step:2929 train loss:3.742117 +step:2930 train loss:3.763304 +step:2931 train loss:3.918482 +step:2932 train loss:3.836955 +step:2933 train loss:3.798305 +step:2934 train loss:3.793754 +step:2935 train loss:3.815225 +step:2936 train loss:3.764946 +step:2937 train loss:3.783617 +step:2938 train loss:3.801861 +step:2939 train loss:3.874835 +step:2940 train loss:3.773344 +step:2941 train loss:3.809718 +step:2942 train loss:3.769275 +step:2943 train loss:4.052186 +step:2944 train loss:3.878888 +step:2945 train loss:3.836499 +step:2946 train loss:3.852932 +step:2947 train loss:3.807794 +step:2948 train loss:3.771481 +step:2949 train loss:3.872644 +step:2950 train loss:3.811383 +step:2951 train loss:3.710045 +step:2952 train loss:3.779603 +step:2953 train loss:3.698448 +step:2954 train loss:3.783324 +step:2955 train loss:3.865812 +step:2956 train loss:3.802574 +step:2957 train loss:3.802770 +step:2958 train loss:3.756474 +step:2959 train loss:3.776430 +step:2960 train loss:3.875787 +step:2961 train loss:3.734218 +step:2962 train loss:3.812314 +step:2963 train loss:3.804824 +step:2964 train loss:3.782675 +step:2965 train loss:3.809449 +step:2966 train loss:3.783861 +step:2967 train loss:3.783339 +step:2968 train loss:3.759863 +step:2969 train loss:3.769258 +step:2970 train loss:3.840497 +step:2971 train loss:3.764182 +step:2972 train loss:3.748962 +step:2973 train loss:3.745860 +step:2974 train loss:3.786025 +step:2975 train loss:3.748719 +step:2976 train loss:3.788268 +step:2977 train loss:3.778147 +step:2978 train loss:3.861708 +step:2979 train loss:3.842002 +step:2980 train loss:3.850896 +step:2981 train loss:3.804265 +step:2982 train loss:3.801161 +step:2983 train loss:3.769106 +step:2984 train loss:3.735869 +step:2985 train loss:3.852570 +step:2986 train loss:3.738849 +step:2987 train loss:3.865019 +step:2988 train loss:3.786939 +step:2989 train loss:3.819180 +step:2990 train loss:3.768432 +step:2991 train loss:3.838161 +step:2992 train loss:3.828930 +step:2993 train loss:3.799513 +step:2994 train loss:3.783135 +step:2995 train loss:3.853065 +step:2996 train loss:3.776899 +step:2997 train loss:3.688282 +step:2998 train loss:3.798566 +step:2999 train loss:3.841718 +step:3000 validation loss:3.723254 total_sharp:3.5575e-04 L1_sharp:1.3158e-04 L2_sharp:1.1984e-05 L3_sharp:1.4447e-05 L4_sharp:2.5868e-05 L5_sharp:4.5128e-05 L6_sharp:3.8393e-05 L7_sharp:4.1123e-05 L8_sharp:9.2819e-05 L9_sharp:1.3316e-04 L10_sharp:1.5592e-04 L11_sharp:1.3585e-04 L12_sharp:4.1708e-04 total_fnorm:1.0641e+01 total_l1_linf:9.3467e+04 total_spectral:1.0641e+01 L1_fnorm:2.4347e+00 L2_fnorm:2.3943e+00 L3_fnorm:2.3221e+00 L4_fnorm:2.3555e+00 L5_fnorm:2.3650e+00 L6_fnorm:2.4314e+00 L7_fnorm:2.5091e+00 L8_fnorm:2.4813e+00 L9_fnorm:2.4772e+00 L10_fnorm:2.4913e+00 L11_fnorm:2.5524e+00 L12_fnorm:2.5068e+00 L1_l1linf:2.5822e+00 L2_l1linf:2.5682e+00 L3_l1linf:2.6994e+00 L4_l1linf:2.6016e+00 L5_l1linf:2.4452e+00 L6_l1linf:2.4404e+00 L7_l1linf:2.6534e+00 L8_l1linf:2.8359e+00 L9_l1linf:3.1603e+00 L10_l1linf:2.9235e+00 L11_l1linf:3.2478e+00 L12_l1linf:3.4295e+00 L1_spectral:3.0074e-01 L2_spectral:2.5676e-01 L3_spectral:2.6830e-01 L4_spectral:3.0847e-01 L5_spectral:3.1778e-01 L6_spectral:2.6700e-01 L7_spectral:2.7557e-01 L8_spectral:3.3373e-01 L9_spectral:3.8384e-01 L10_spectral:4.2217e-01 L11_spectral:4.1454e-01 L12_spectral:4.7980e-01 ip_v_neg_g:2.1955e-02 cos_v_neg_g:4.7999e-03 v_norm:1.0641e+01 g_norm:4.2986e-01 hv_norm:2.1983e-01 cos_v_hv:1.7220e-02 hg_norm:2.6358e+00 cos_g_hg:4.7278e-01 v_par:4.0867e-03 v_perp:1.0641e+01 L1_cos_v_neg_g:1.5686e-02 L1_v_norm:2.4347e+00 L2_cos_v_neg_g:4.0757e-03 L2_v_norm:2.3943e+00 L3_cos_v_neg_g:4.6991e-03 L3_v_norm:2.3221e+00 L4_cos_v_neg_g:5.6513e-03 L4_v_norm:2.3555e+00 L5_cos_v_neg_g:6.1829e-03 L5_v_norm:2.3650e+00 L6_cos_v_neg_g:5.2243e-03 L6_v_norm:2.4314e+00 L7_cos_v_neg_g:5.9940e-03 L7_v_norm:2.5091e+00 L8_cos_v_neg_g:9.8036e-03 L8_v_norm:2.4813e+00 L9_cos_v_neg_g:1.2017e-02 L9_v_norm:2.4772e+00 L10_cos_v_neg_g:1.5168e-02 L10_v_norm:2.4913e+00 L11_cos_v_neg_g:1.4959e-02 L11_v_norm:2.5524e+00 L12_cos_v_neg_g:2.1529e-02 L12_v_norm:2.5068e+00 +step:3000 train loss:3.738185 +step:3001 train loss:3.791787 +step:3002 train loss:3.785797 +step:3003 train loss:3.783278 +step:3004 train loss:3.811103 +step:3005 train loss:3.705830 +step:3006 train loss:3.761370 +step:3007 train loss:3.793287 +step:3008 train loss:3.834229 +step:3009 train loss:3.790186 +step:3010 train loss:3.810162 +step:3011 train loss:3.798390 +step:3012 train loss:3.774452 +step:3013 train loss:3.820868 +step:3014 train loss:3.776798 +step:3015 train loss:3.772368 +step:3016 train loss:3.792444 +step:3017 train loss:3.813473 +step:3018 train loss:3.743213 +step:3019 train loss:3.779614 +step:3020 train loss:3.801886 +step:3021 train loss:3.764270 +step:3022 train loss:3.852311 +step:3023 train loss:3.805458 +step:3024 train loss:3.791657 +step:3025 train loss:3.800360 +step:3026 train loss:3.775509 +step:3027 train loss:3.755609 +step:3028 train loss:3.802878 +step:3029 train loss:3.789235 +step:3030 train loss:3.766417 +step:3031 train loss:3.749301 +step:3032 train loss:3.737744 +step:3033 train loss:3.761251 +step:3034 train loss:3.809417 +step:3035 train loss:3.787046 +step:3036 train loss:3.747470 +step:3037 train loss:3.711720 +step:3038 train loss:3.826608 +step:3039 train loss:3.704348 +step:3040 train loss:3.689793 +step:3041 train loss:3.817988 +step:3042 train loss:3.752837 +step:3043 train loss:3.814507 +step:3044 train loss:3.710804 +step:3045 train loss:3.755941 +step:3046 train loss:3.730186 +step:3047 train loss:3.758360 +step:3048 train loss:3.727607 +step:3049 train loss:3.806916 +step:3050 train loss:3.696718 +step:3051 train loss:3.710950 +step:3052 train loss:3.733565 +step:3053 train loss:3.802625 +step:3054 train loss:3.872948 +step:3055 train loss:3.713818 +step:3056 train loss:3.743452 +step:3057 train loss:3.779504 +step:3058 train loss:3.729058 +step:3059 train loss:3.755359 +step:3060 train loss:3.751026 +step:3061 train loss:3.738927 +step:3062 train loss:3.798381 +step:3063 train loss:3.789371 +step:3064 train loss:3.809600 +step:3065 train loss:3.829232 +step:3066 train loss:3.724520 +step:3067 train loss:3.772899 +step:3068 train loss:3.828019 +step:3069 train loss:3.836756 +step:3070 train loss:3.764139 +step:3071 train loss:3.782060 +step:3072 train loss:3.782568 +step:3073 train loss:3.816467 +step:3074 train loss:3.750913 +step:3075 train loss:3.786383 +step:3076 train loss:3.722747 +step:3077 train loss:3.718415 +step:3078 train loss:3.749408 +step:3079 train loss:3.799860 +step:3080 train loss:3.785025 +step:3081 train loss:3.835062 +step:3082 train loss:3.810783 +step:3083 train loss:3.738239 +step:3084 train loss:3.822317 +step:3085 train loss:3.747828 +step:3086 train loss:3.809303 +step:3087 train loss:3.773949 +step:3088 train loss:3.852985 +step:3089 train loss:3.733257 +step:3090 train loss:3.802243 +step:3091 train loss:3.724066 +step:3092 train loss:3.748415 +step:3093 train loss:3.776453 +step:3094 train loss:3.761539 +step:3095 train loss:3.840713 +step:3096 train loss:3.772287 +step:3097 train loss:3.782929 +step:3098 train loss:3.759561 +step:3099 train loss:3.767586 +step:3100 train loss:3.791653 +step:3101 train loss:3.876307 +step:3102 train loss:3.804997 +step:3103 train loss:3.730524 +step:3104 train loss:3.816387 +step:3105 train loss:3.786977 +step:3106 train loss:3.781652 +step:3107 train loss:3.767057 +step:3108 train loss:3.740965 +step:3109 train loss:3.795662 +step:3110 train loss:3.724681 +step:3111 train loss:3.762162 +step:3112 train loss:3.700089 +step:3113 train loss:3.816872 +step:3114 train loss:3.729081 +step:3115 train loss:3.771818 +step:3116 train loss:3.649623 +step:3117 train loss:3.672413 +step:3118 train loss:3.770823 +step:3119 train loss:3.779632 +step:3120 train loss:3.779633 +step:3121 train loss:3.722269 +step:3122 train loss:3.805600 +step:3123 train loss:3.720711 +step:3124 train loss:3.783772 +step:3125 train loss:3.798248 +step:3126 train loss:3.904207 +step:3127 train loss:3.749473 +step:3128 train loss:3.777700 +step:3129 train loss:3.762793 +step:3130 train loss:3.741750 +step:3131 train loss:3.819098 +step:3132 train loss:3.809248 +step:3133 train loss:3.774297 +step:3134 train loss:3.667935 +step:3135 train loss:3.758012 +step:3136 train loss:3.735597 +step:3137 train loss:3.863196 +step:3138 train loss:3.766844 +step:3139 train loss:3.747170 +step:3140 train loss:3.770226 +step:3141 train loss:3.769792 +step:3142 train loss:3.706211 +step:3143 train loss:3.789479 +step:3144 train loss:3.740011 +step:3145 train loss:3.724077 +step:3146 train loss:3.738795 +step:3147 train loss:3.846933 +step:3148 train loss:3.751138 +step:3149 train loss:3.807096 +step:3150 train loss:3.791258 +step:3151 train loss:3.762078 +step:3152 train loss:3.760462 +step:3153 train loss:3.715661 +step:3154 train loss:3.800175 +step:3155 train loss:3.742009 +step:3156 train loss:3.791245 +step:3157 train loss:3.795982 +step:3158 train loss:3.771032 +step:3159 train loss:3.709749 +step:3160 train loss:3.755338 +step:3161 train loss:3.725684 +step:3162 train loss:3.784845 +step:3163 train loss:3.764861 +step:3164 train loss:3.744124 +step:3165 train loss:3.759506 +step:3166 train loss:3.796824 +step:3167 train loss:3.759699 +step:3168 train loss:3.838542 +step:3169 train loss:3.752174 +step:3170 train loss:3.737663 +step:3171 train loss:3.725976 +step:3172 train loss:3.728001 +step:3173 train loss:3.670637 +step:3174 train loss:3.791400 +step:3175 train loss:3.759040 +step:3176 train loss:3.772465 +step:3177 train loss:3.741055 +step:3178 train loss:3.719953 +step:3179 train loss:3.792336 +step:3180 train loss:3.723072 +step:3181 train loss:3.802910 +step:3182 train loss:3.811415 +step:3183 train loss:3.747604 +step:3184 train loss:3.748558 +step:3185 train loss:3.808144 +step:3186 train loss:3.767627 +step:3187 train loss:3.784846 +step:3188 train loss:3.824840 +step:3189 train loss:3.769653 +step:3190 train loss:3.722200 +step:3191 train loss:3.727959 +step:3192 train loss:3.694078 +step:3193 train loss:3.772433 +step:3194 train loss:3.736132 +step:3195 train loss:3.721828 +step:3196 train loss:3.771827 +step:3197 train loss:3.733141 +step:3198 train loss:3.770778 +step:3199 train loss:3.748597 +step:3200 train loss:3.756348 +step:3201 train loss:3.720528 +step:3202 train loss:3.783553 +step:3203 train loss:3.841798 +step:3204 train loss:3.805718 +step:3205 train loss:3.655406 +step:3206 train loss:3.935604 +step:3207 train loss:3.694732 +step:3208 train loss:3.764746 +step:3209 train loss:3.756153 +step:3210 train loss:3.736860 +step:3211 train loss:3.759509 +step:3212 train loss:3.775096 +step:3213 train loss:3.717247 +step:3214 train loss:3.820908 +step:3215 train loss:3.824035 +step:3216 train loss:3.691528 +step:3217 train loss:3.771141 +step:3218 train loss:3.813517 +step:3219 train loss:3.732649 +step:3220 train loss:3.798391 +step:3221 train loss:3.710168 +step:3222 train loss:3.753150 +step:3223 train loss:3.768771 +step:3224 train loss:3.776129 +step:3225 train loss:3.708602 +step:3226 train loss:3.737571 +step:3227 train loss:3.767810 +step:3228 train loss:3.760449 +step:3229 train loss:3.795916 +step:3230 train loss:3.809458 +step:3231 train loss:3.748559 +step:3232 train loss:3.756935 +step:3233 train loss:3.731435 +step:3234 train loss:3.719446 +step:3235 train loss:3.723356 +step:3236 train loss:3.740368 +step:3237 train loss:3.742259 +step:3238 train loss:3.762540 +step:3239 train loss:3.661871 +step:3240 train loss:3.775526 +step:3241 train loss:3.771980 +step:3242 train loss:3.829426 +step:3243 train loss:3.768898 +step:3244 train loss:3.785526 +step:3245 train loss:3.690223 +step:3246 train loss:3.816649 +step:3247 train loss:3.756324 +step:3248 train loss:3.778615 +step:3249 train loss:3.724736 +step:3250 validation loss:3.690193 +step:3250 train loss:3.725679 +step:3251 train loss:3.833843 +step:3252 train loss:3.766490 +step:3253 train loss:3.766274 +step:3254 train loss:3.838192 +step:3255 train loss:3.775136 +step:3256 train loss:3.770629 +step:3257 train loss:3.752050 +step:3258 train loss:3.680568 +step:3259 train loss:3.658875 +step:3260 train loss:3.774562 +step:3261 train loss:3.755602 +step:3262 train loss:3.745843 +step:3263 train loss:3.730787 +step:3264 train loss:3.842130 +step:3265 train loss:3.750292 +step:3266 train loss:3.776538 +step:3267 train loss:3.741962 +step:3268 train loss:3.742312 +step:3269 train loss:3.754463 +step:3270 train loss:3.781656 +step:3271 train loss:3.748085 +step:3272 train loss:3.724158 +step:3273 train loss:3.733348 +step:3274 train loss:3.869966 +step:3275 train loss:3.747175 +step:3276 train loss:3.809048 +step:3277 train loss:3.750362 +step:3278 train loss:3.727241 +step:3279 train loss:3.752126 +step:3280 train loss:3.776415 +step:3281 train loss:3.704757 +step:3282 train loss:3.772671 +step:3283 train loss:3.745453 +step:3284 train loss:3.706774 +step:3285 train loss:3.725131 +step:3286 train loss:3.753003 +step:3287 train loss:3.693633 +step:3288 train loss:3.773911 +step:3289 train loss:3.717055 +step:3290 train loss:3.751494 +step:3291 train loss:3.713652 +step:3292 train loss:3.735348 +step:3293 train loss:3.776188 +step:3294 train loss:3.791928 +step:3295 train loss:3.700818 +step:3296 train loss:3.762776 +step:3297 train loss:3.718666 +step:3298 train loss:3.720157 +step:3299 train loss:3.852944 +step:3300 train loss:3.696587 +step:3301 train loss:3.771103 +step:3302 train loss:3.742729 +step:3303 train loss:3.772342 +step:3304 train loss:3.747539 +step:3305 train loss:3.839684 +step:3306 train loss:3.759881 +step:3307 train loss:3.779809 +step:3308 train loss:3.730918 +step:3309 train loss:3.790210 +step:3310 train loss:3.708170 +step:3311 train loss:3.756528 +step:3312 train loss:3.726602 +step:3313 train loss:3.759502 +step:3314 train loss:3.754757 +step:3315 train loss:3.835611 +step:3316 train loss:3.684077 +step:3317 train loss:3.777898 +step:3318 train loss:3.786849 +step:3319 train loss:3.713489 +step:3320 train loss:3.870711 +step:3321 train loss:3.776164 +step:3322 train loss:3.776318 +step:3323 train loss:3.878658 +step:3324 train loss:3.794456 +step:3325 train loss:3.770976 +step:3326 train loss:3.761240 +step:3327 train loss:3.773857 +step:3328 train loss:3.752256 +step:3329 train loss:3.753461 +step:3330 train loss:3.741375 +step:3331 train loss:3.788264 +step:3332 train loss:3.805969 +step:3333 train loss:3.775347 +step:3334 train loss:3.708318 +step:3335 train loss:3.719782 +step:3336 train loss:3.757415 +step:3337 train loss:3.755132 +step:3338 train loss:3.743499 +step:3339 train loss:3.735953 +step:3340 train loss:3.775833 +step:3341 train loss:3.727694 +step:3342 train loss:3.772314 +step:3343 train loss:3.710639 +step:3344 train loss:3.767155 +step:3345 train loss:3.717537 +step:3346 train loss:3.730695 +step:3347 train loss:3.738143 +step:3348 train loss:3.757082 +step:3349 train loss:3.749473 +step:3350 train loss:3.773892 +step:3351 train loss:3.825707 +step:3352 train loss:3.764591 +step:3353 train loss:3.864943 +step:3354 train loss:3.709855 +step:3355 train loss:3.818841 +step:3356 train loss:3.773781 +step:3357 train loss:3.777872 +step:3358 train loss:3.719874 +step:3359 train loss:3.747224 +step:3360 train loss:3.737240 +step:3361 train loss:3.739320 +step:3362 train loss:3.735239 +step:3363 train loss:3.732319 +step:3364 train loss:3.712033 +step:3365 train loss:3.748309 +step:3366 train loss:3.778511 +step:3367 train loss:3.732597 +step:3368 train loss:3.830014 +step:3369 train loss:3.742631 +step:3370 train loss:3.824907 +step:3371 train loss:3.796706 +step:3372 train loss:3.761492 +step:3373 train loss:3.770206 +step:3374 train loss:3.821828 +step:3375 train loss:3.751363 +step:3376 train loss:3.761971 +step:3377 train loss:3.745690 +step:3378 train loss:3.721672 +step:3379 train loss:3.798930 +step:3380 train loss:3.777460 +step:3381 train loss:3.760260 +step:3382 train loss:3.777678 +step:3383 train loss:3.789353 +step:3384 train loss:3.717262 +step:3385 train loss:3.763165 +step:3386 train loss:3.742272 +step:3387 train loss:3.819486 +step:3388 train loss:3.725804 +step:3389 train loss:3.918306 +step:3390 train loss:3.687979 +step:3391 train loss:3.788657 +step:3392 train loss:3.785612 +step:3393 train loss:3.790247 +step:3394 train loss:3.744920 +step:3395 train loss:3.811037 +step:3396 train loss:3.720767 +step:3397 train loss:3.796116 +step:3398 train loss:3.761162 +step:3399 train loss:3.770406 +step:3400 train loss:3.720571 +step:3401 train loss:3.755178 +step:3402 train loss:3.909778 +step:3403 train loss:3.801188 +step:3404 train loss:3.922024 +step:3405 train loss:3.772316 +step:3406 train loss:3.749254 +step:3407 train loss:3.746092 +step:3408 train loss:3.726754 +step:3409 train loss:3.690311 +step:3410 train loss:3.719905 +step:3411 train loss:3.789968 +step:3412 train loss:3.712797 +step:3413 train loss:3.704911 +step:3414 train loss:3.743231 +step:3415 train loss:3.717280 +step:3416 train loss:3.719638 +step:3417 train loss:3.801365 +step:3418 train loss:3.798040 +step:3419 train loss:3.756201 +step:3420 train loss:3.730413 +step:3421 train loss:3.764175 +step:3422 train loss:3.778775 +step:3423 train loss:3.800009 +step:3424 train loss:3.681055 +step:3425 train loss:3.701727 +step:3426 train loss:3.698573 +step:3427 train loss:3.759311 +step:3428 train loss:3.686448 +step:3429 train loss:3.748625 +step:3430 train loss:3.716187 +step:3431 train loss:3.768020 +step:3432 train loss:3.754378 +step:3433 train loss:3.719530 +step:3434 train loss:3.801231 +step:3435 train loss:3.740762 +step:3436 train loss:3.832167 +step:3437 train loss:3.659511 +step:3438 train loss:3.766169 +step:3439 train loss:3.739806 +step:3440 train loss:3.833080 +step:3441 train loss:3.730419 +step:3442 train loss:3.794954 +step:3443 train loss:3.728760 +step:3444 train loss:3.750399 +step:3445 train loss:3.795979 +step:3446 train loss:3.700419 +step:3447 train loss:3.774208 +step:3448 train loss:3.727713 +step:3449 train loss:3.761343 +step:3450 train loss:3.673131 +step:3451 train loss:3.786743 +step:3452 train loss:3.736323 +step:3453 train loss:3.789593 +step:3454 train loss:3.815233 +step:3455 train loss:3.877688 +step:3456 train loss:3.822101 +step:3457 train loss:3.818117 +step:3458 train loss:3.739339 +step:3459 train loss:3.753424 +step:3460 train loss:3.697156 +step:3461 train loss:3.757324 +step:3462 train loss:3.756691 +step:3463 train loss:3.728281 +step:3464 train loss:3.779208 +step:3465 train loss:3.711468 +step:3466 train loss:3.778327 +step:3467 train loss:3.734904 +step:3468 train loss:3.748670 +step:3469 train loss:3.760026 +step:3470 train loss:3.741291 +step:3471 train loss:3.779208 +step:3472 train loss:3.665336 +step:3473 train loss:3.787654 +step:3474 train loss:3.684492 +step:3475 train loss:3.766909 +step:3476 train loss:3.738160 +step:3477 train loss:3.756631 +step:3478 train loss:3.732281 +step:3479 train loss:3.758879 +step:3480 train loss:3.778397 +step:3481 train loss:3.759253 +step:3482 train loss:3.740866 +step:3483 train loss:3.883401 +step:3484 train loss:3.724564 +step:3485 train loss:3.711629 +step:3486 train loss:3.764238 +step:3487 train loss:3.802932 +step:3488 train loss:3.710804 +step:3489 train loss:3.762922 +step:3490 train loss:3.729122 +step:3491 train loss:3.764185 +step:3492 train loss:3.805548 +step:3493 train loss:3.777050 +step:3494 train loss:3.771828 +step:3495 train loss:3.746922 +step:3496 train loss:3.713716 +step:3497 train loss:3.824813 +step:3498 train loss:3.773515 +step:3499 train loss:3.703688 +step:3500 validation loss:3.671392 total_sharp:3.0483e-04 L1_sharp:1.0686e-04 L2_sharp:9.4601e-06 L3_sharp:2.0496e-05 L4_sharp:3.7125e-05 L5_sharp:6.1088e-05 L6_sharp:3.6043e-05 L7_sharp:4.5698e-05 L8_sharp:1.0386e-04 L9_sharp:1.2598e-04 L10_sharp:1.2960e-04 L11_sharp:1.0722e-04 L12_sharp:2.4096e-04 total_fnorm:1.0615e+01 total_l1_linf:9.3391e+04 total_spectral:1.0615e+01 L1_fnorm:2.4635e+00 L2_fnorm:2.4081e+00 L3_fnorm:2.3577e+00 L4_fnorm:2.3637e+00 L5_fnorm:2.3635e+00 L6_fnorm:2.4450e+00 L7_fnorm:2.5274e+00 L8_fnorm:2.4649e+00 L9_fnorm:2.4690e+00 L10_fnorm:2.4440e+00 L11_fnorm:2.4854e+00 L12_fnorm:2.3742e+00 L1_l1linf:2.5869e+00 L2_l1linf:2.5412e+00 L3_l1linf:2.5475e+00 L4_l1linf:2.5808e+00 L5_l1linf:2.4793e+00 L6_l1linf:2.6404e+00 L7_l1linf:3.0307e+00 L8_l1linf:3.1881e+00 L9_l1linf:2.7195e+00 L10_l1linf:2.7427e+00 L11_l1linf:2.7984e+00 L12_l1linf:2.6343e+00 L1_spectral:2.8211e-01 L2_spectral:2.5599e-01 L3_spectral:2.7479e-01 L4_spectral:3.1690e-01 L5_spectral:3.1139e-01 L6_spectral:2.6803e-01 L7_spectral:3.0353e-01 L8_spectral:3.5931e-01 L9_spectral:3.7925e-01 L10_spectral:3.8815e-01 L11_spectral:3.7355e-01 L12_spectral:4.0239e-01 ip_v_neg_g:1.9021e-02 cos_v_neg_g:3.8119e-03 v_norm:1.0615e+01 g_norm:4.7008e-01 hv_norm:2.1312e-01 cos_v_hv:1.5183e-02 hg_norm:2.6713e+00 cos_g_hg:4.8137e-01 v_par:2.0441e-03 v_perp:1.0615e+01 L1_cos_v_neg_g:1.4757e-02 L1_v_norm:2.4635e+00 L2_cos_v_neg_g:3.9326e-03 L2_v_norm:2.4081e+00 L3_cos_v_neg_g:4.2111e-03 L3_v_norm:2.3577e+00 L4_cos_v_neg_g:4.6984e-03 L4_v_norm:2.3637e+00 L5_cos_v_neg_g:5.3529e-03 L5_v_norm:2.3635e+00 L6_cos_v_neg_g:5.0972e-03 L6_v_norm:2.4450e+00 L7_cos_v_neg_g:6.5558e-03 L7_v_norm:2.5274e+00 L8_cos_v_neg_g:9.1297e-03 L8_v_norm:2.4649e+00 L9_cos_v_neg_g:8.5826e-03 L9_v_norm:2.4690e+00 L10_cos_v_neg_g:1.1290e-02 L10_v_norm:2.4440e+00 L11_cos_v_neg_g:1.1905e-02 L11_v_norm:2.4854e+00 L12_cos_v_neg_g:1.1730e-02 L12_v_norm:2.3742e+00 +step:3500 train loss:3.723734 +step:3501 train loss:3.850388 +step:3502 train loss:3.828601 +step:3503 train loss:3.779939 +step:3504 train loss:3.732251 +step:3505 train loss:3.751140 +step:3506 train loss:3.646072 +step:3507 train loss:3.763772 +step:3508 train loss:3.709404 +step:3509 train loss:3.776586 +step:3510 train loss:3.710018 +step:3511 train loss:3.746114 +step:3512 train loss:3.888209 +step:3513 train loss:3.707307 +step:3514 train loss:3.720482 +step:3515 train loss:3.971857 +step:3516 train loss:3.770986 +step:3517 train loss:3.728658 +step:3518 train loss:3.729179 +step:3519 train loss:3.721272 +step:3520 train loss:3.752122 +step:3521 train loss:3.743463 +step:3522 train loss:3.651972 +step:3523 train loss:3.754979 +step:3524 train loss:3.738481 +step:3525 train loss:3.729518 +step:3526 train loss:3.751287 +step:3527 train loss:3.700447 +step:3528 train loss:3.753484 +step:3529 train loss:3.729128 +step:3530 train loss:3.722582 +step:3531 train loss:3.717804 +step:3532 train loss:3.906195 +step:3533 train loss:3.728444 +step:3534 train loss:3.749209 +step:3535 train loss:3.719199 +step:3536 train loss:3.715554 +step:3537 train loss:3.726842 +step:3538 train loss:3.758911 +step:3539 train loss:3.703807 +step:3540 train loss:3.770781 +step:3541 train loss:3.736667 +step:3542 train loss:3.748103 +step:3543 train loss:3.668118 +step:3544 train loss:3.690267 +step:3545 train loss:3.691718 +step:3546 train loss:3.761400 +step:3547 train loss:3.763709 +step:3548 train loss:3.741454 +step:3549 train loss:3.732642 +step:3550 train loss:3.723983 +step:3551 train loss:3.752930 +step:3552 train loss:3.653973 +step:3553 train loss:3.771073 +step:3554 train loss:3.765564 +step:3555 train loss:3.750407 +step:3556 train loss:3.775350 +step:3557 train loss:3.757655 +step:3558 train loss:3.734210 +step:3559 train loss:3.680393 +step:3560 train loss:3.771540 +step:3561 train loss:3.769261 +step:3562 train loss:3.941468 +step:3563 train loss:3.803471 +step:3564 train loss:3.761194 +step:3565 train loss:3.760645 +step:3566 train loss:3.735033 +step:3567 train loss:3.673127 +step:3568 train loss:3.700085 +step:3569 train loss:3.783092 +step:3570 train loss:3.806947 +step:3571 train loss:3.786692 +step:3572 train loss:3.774772 +step:3573 train loss:3.733702 +step:3574 train loss:3.729296 +step:3575 train loss:3.721194 +step:3576 train loss:3.705588 +step:3577 train loss:3.711927 +step:3578 train loss:3.796513 +step:3579 train loss:3.708482 +step:3580 train loss:3.788115 +step:3581 train loss:3.728874 +step:3582 train loss:3.782515 +step:3583 train loss:3.720250 +step:3584 train loss:3.696256 +step:3585 train loss:3.746262 +step:3586 train loss:3.692986 +step:3587 train loss:3.789404 +step:3588 train loss:3.913640 +step:3589 train loss:3.750685 +step:3590 train loss:3.736846 +step:3591 train loss:3.745753 +step:3592 train loss:3.710496 +step:3593 train loss:3.677988 +step:3594 train loss:3.731614 +step:3595 train loss:3.710300 +step:3596 train loss:3.786778 +step:3597 train loss:3.758851 +step:3598 train loss:3.713293 +step:3599 train loss:3.759687 +step:3600 train loss:3.701117 +step:3601 train loss:3.718351 +step:3602 train loss:3.708278 +step:3603 train loss:3.722651 +step:3604 train loss:3.748491 +step:3605 train loss:3.850968 +step:3606 train loss:3.759019 +step:3607 train loss:3.742835 +step:3608 train loss:3.759807 +step:3609 train loss:3.741093 +step:3610 train loss:3.711476 +step:3611 train loss:3.710650 +step:3612 train loss:3.778117 +step:3613 train loss:3.747672 +step:3614 train loss:3.685726 +step:3615 train loss:3.734273 +step:3616 train loss:3.719662 +step:3617 train loss:3.787342 +step:3618 train loss:3.742396 +step:3619 train loss:3.736312 +step:3620 train loss:3.765953 +step:3621 train loss:3.705477 +step:3622 train loss:3.810598 +step:3623 train loss:3.802499 +step:3624 train loss:3.767208 +step:3625 train loss:3.748791 +step:3626 train loss:3.749681 +step:3627 train loss:3.745627 +step:3628 train loss:3.730887 +step:3629 train loss:3.733903 +step:3630 train loss:3.817169 +step:3631 train loss:3.742302 +step:3632 train loss:3.771462 +step:3633 train loss:3.727820 +step:3634 train loss:3.730740 +step:3635 train loss:3.717417 +step:3636 train loss:3.788583 +step:3637 train loss:3.865640 +step:3638 train loss:3.779511 +step:3639 train loss:3.769818 +step:3640 train loss:3.775306 +step:3641 train loss:3.814161 +step:3642 train loss:3.705494 +step:3643 train loss:3.877235 +step:3644 train loss:3.767681 +step:3645 train loss:3.736421 +step:3646 train loss:3.862407 +step:3647 train loss:3.753699 +step:3648 train loss:3.742333 +step:3649 train loss:3.693178 +step:3650 train loss:3.734013 +step:3651 train loss:3.727853 +step:3652 train loss:3.714734 +step:3653 train loss:3.649096 +step:3654 train loss:3.717099 +step:3655 train loss:3.709915 +step:3656 train loss:3.741729 +step:3657 train loss:3.756603 +step:3658 train loss:3.752973 +step:3659 train loss:3.737978 +step:3660 train loss:3.707341 +step:3661 train loss:3.735466 +step:3662 train loss:3.709426 +step:3663 train loss:3.745122 +step:3664 train loss:3.700460 +step:3665 train loss:3.746175 +step:3666 train loss:3.779489 +step:3667 train loss:3.869031 +step:3668 train loss:3.750672 +step:3669 train loss:3.706874 +step:3670 train loss:3.755206 +step:3671 train loss:3.712832 +step:3672 train loss:3.747409 +step:3673 train loss:3.733502 +step:3674 train loss:3.750575 +step:3675 train loss:3.761109 +step:3676 train loss:3.725691 +step:3677 train loss:3.686557 +step:3678 train loss:3.743887 +step:3679 train loss:3.649404 +step:3680 train loss:3.750092 +step:3681 train loss:3.781611 +step:3682 train loss:3.763483 +step:3683 train loss:3.706867 +step:3684 train loss:3.705228 +step:3685 train loss:3.737876 +step:3686 train loss:3.764509 +step:3687 train loss:3.716112 +step:3688 train loss:3.692996 +step:3689 train loss:3.730104 +step:3690 train loss:3.717092 +step:3691 train loss:3.696470 +step:3692 train loss:3.755340 +step:3693 train loss:3.887293 +step:3694 train loss:3.706030 +step:3695 train loss:3.765969 +step:3696 train loss:3.726635 +step:3697 train loss:3.718438 +step:3698 train loss:3.656343 +step:3699 train loss:3.684281 +step:3700 train loss:3.714384 +step:3701 train loss:3.734128 +step:3702 train loss:3.755062 +step:3703 train loss:3.714274 +step:3704 train loss:3.759915 +step:3705 train loss:3.738330 +step:3706 train loss:3.690149 +step:3707 train loss:3.743289 +step:3708 train loss:3.717726 +step:3709 train loss:3.639809 +step:3710 train loss:3.763049 +step:3711 train loss:3.714436 +step:3712 train loss:3.752083 +step:3713 train loss:3.705531 +step:3714 train loss:3.720248 +step:3715 train loss:3.838251 +step:3716 train loss:3.747211 +step:3717 train loss:3.714819 +step:3718 train loss:3.724127 +step:3719 train loss:3.723823 +step:3720 train loss:3.732902 +step:3721 train loss:3.786088 +step:3722 train loss:3.798688 +step:3723 train loss:3.689300 +step:3724 train loss:3.746497 +step:3725 train loss:3.723719 +step:3726 train loss:3.742572 +step:3727 train loss:3.817817 +step:3728 train loss:3.781331 +step:3729 train loss:3.678298 +step:3730 train loss:3.697575 +step:3731 train loss:3.718948 +step:3732 train loss:3.874049 +step:3733 train loss:3.734329 +step:3734 train loss:3.737151 +step:3735 train loss:3.678453 +step:3736 train loss:3.733567 +step:3737 train loss:3.783640 +step:3738 train loss:3.802684 +step:3739 train loss:3.720583 +step:3740 train loss:3.623783 +step:3741 train loss:3.831180 +step:3742 train loss:3.743732 +step:3743 train loss:3.715106 +step:3744 train loss:3.719972 +step:3745 train loss:3.731703 +step:3746 train loss:3.708467 +step:3747 train loss:3.714092 +step:3748 train loss:3.757228 +step:3749 train loss:3.742700 +step:3750 validation loss:3.654924 +step:3750 train loss:3.753064 +step:3751 train loss:3.842224 +step:3752 train loss:3.773650 +step:3753 train loss:3.690803 +step:3754 train loss:3.743272 +step:3755 train loss:3.921031 +step:3756 train loss:3.703667 +step:3757 train loss:3.695267 +step:3758 train loss:3.729177 +step:3759 train loss:3.672735 +step:3760 train loss:3.669867 +step:3761 train loss:3.723817 +step:3762 train loss:3.714533 +step:3763 train loss:3.718085 +step:3764 train loss:3.711040 +step:3765 train loss:3.705387 +step:3766 train loss:3.676799 +step:3767 train loss:3.760334 +step:3768 train loss:3.701199 +step:3769 train loss:3.966889 +step:3770 train loss:3.756660 +step:3771 train loss:3.766836 +step:3772 train loss:3.721904 +step:3773 train loss:3.714395 +step:3774 train loss:3.724082 +step:3775 train loss:3.713392 +step:3776 train loss:3.717804 +step:3777 train loss:3.675696 +step:3778 train loss:3.692377 +step:3779 train loss:3.680121 +step:3780 train loss:3.759509 +step:3781 train loss:3.723584 +step:3782 train loss:3.645510 +step:3783 train loss:3.747919 +step:3784 train loss:3.760327 +step:3785 train loss:3.666869 +step:3786 train loss:3.777892 +step:3787 train loss:3.687405 +step:3788 train loss:3.699226 +step:3789 train loss:3.604710 +step:3790 train loss:3.726510 +step:3791 train loss:3.746540 +step:3792 train loss:3.718668 +step:3793 train loss:3.716014 +step:3794 train loss:3.743463 +step:3795 train loss:3.710629 +step:3796 train loss:3.729831 +step:3797 train loss:3.706203 +step:3798 train loss:3.713991 +step:3799 train loss:3.723732 +step:3800 train loss:3.632111 +step:3801 train loss:3.748562 +step:3802 train loss:3.674014 +step:3803 train loss:3.756061 +step:3804 train loss:3.771187 +step:3805 train loss:3.725964 +step:3806 train loss:3.741812 +step:3807 train loss:3.765572 +step:3808 train loss:3.719015 +step:3809 train loss:3.735443 +step:3810 train loss:3.736988 +step:3811 train loss:3.723079 +step:3812 train loss:3.723045 +step:3813 train loss:3.680850 +step:3814 train loss:3.728191 +step:3815 train loss:3.729439 +step:3816 train loss:3.748011 +step:3817 train loss:3.762879 +step:3818 train loss:3.733572 +step:3819 train loss:3.746325 +step:3820 train loss:3.743516 +step:3821 train loss:3.703494 +step:3822 train loss:3.783847 +step:3823 train loss:3.679426 +step:3824 train loss:3.692681 +step:3825 train loss:3.701399 +step:3826 train loss:3.759934 +step:3827 train loss:3.788334 +step:3828 train loss:3.680463 +step:3829 train loss:3.699459 +step:3830 train loss:3.760689 +step:3831 train loss:3.692276 +step:3832 train loss:3.753102 +step:3833 train loss:3.694578 +step:3834 train loss:3.657958 +step:3835 train loss:3.704302 +step:3836 train loss:3.675872 +step:3837 train loss:3.746066 +step:3838 train loss:3.702901 +step:3839 train loss:3.740343 +step:3840 train loss:3.757032 +step:3841 train loss:3.704218 +step:3842 train loss:3.738371 +step:3843 train loss:3.754774 +step:3844 train loss:3.719985 +step:3845 train loss:3.743933 +step:3846 train loss:3.783663 +step:3847 train loss:3.681969 +step:3848 train loss:3.688227 +step:3849 train loss:3.709000 +step:3850 train loss:3.727726 +step:3851 train loss:3.859020 +step:3852 train loss:3.839651 +step:3853 train loss:3.741052 +step:3854 train loss:3.706851 +step:3855 train loss:3.754844 +step:3856 train loss:3.675715 +step:3857 train loss:3.737384 +step:3858 train loss:3.653303 +step:3859 train loss:3.700864 +step:3860 train loss:3.766414 +step:3861 train loss:3.738984 +step:3862 train loss:3.675732 +step:3863 train loss:3.725095 +step:3864 train loss:3.696929 +step:3865 train loss:3.733047 +step:3866 train loss:3.755695 +step:3867 train loss:3.750151 +step:3868 train loss:3.701855 +step:3869 train loss:3.702962 +step:3870 train loss:3.679335 +step:3871 train loss:3.672277 +step:3872 train loss:3.806745 +step:3873 train loss:3.729055 +step:3874 train loss:3.740184 +step:3875 train loss:3.850202 +step:3876 train loss:3.723872 +step:3877 train loss:3.752615 +step:3878 train loss:3.778867 +step:3879 train loss:3.765241 +step:3880 train loss:3.845709 +step:3881 train loss:3.668532 +step:3882 train loss:3.704031 +step:3883 train loss:3.715704 +step:3884 train loss:3.706489 +step:3885 train loss:3.718587 +step:3886 train loss:3.780302 +step:3887 train loss:3.760103 +step:3888 train loss:3.722552 +step:3889 train loss:3.693859 +step:3890 train loss:3.728476 +step:3891 train loss:3.743911 +step:3892 train loss:3.652643 +step:3893 train loss:3.760876 +step:3894 train loss:3.709795 +step:3895 train loss:3.725814 +step:3896 train loss:3.721692 +step:3897 train loss:3.687807 +step:3898 train loss:3.745896 +step:3899 train loss:3.786961 +step:3900 train loss:3.739637 +step:3901 train loss:3.758994 +step:3902 train loss:3.683287 +step:3903 train loss:3.696808 +step:3904 train loss:3.731007 +step:3905 train loss:3.667258 +step:3906 train loss:3.701244 +step:3907 train loss:3.734793 +step:3908 train loss:3.810529 +step:3909 train loss:3.702476 +step:3910 train loss:3.731657 +step:3911 train loss:3.746522 +step:3912 train loss:3.694266 +step:3913 train loss:3.708645 +step:3914 train loss:3.730654 +step:3915 train loss:3.698716 +step:3916 train loss:3.734138 +step:3917 train loss:3.777877 +step:3918 train loss:3.757162 +step:3919 train loss:3.728989 +step:3920 train loss:3.706335 +step:3921 train loss:3.748219 +step:3922 train loss:3.751149 +step:3923 train loss:3.740165 +step:3924 train loss:3.677641 +step:3925 train loss:3.875915 +step:3926 train loss:3.725079 +step:3927 train loss:3.702008 +step:3928 train loss:3.780193 +step:3929 train loss:3.845197 +step:3930 train loss:3.747766 +step:3931 train loss:3.694762 +step:3932 train loss:3.735117 +step:3933 train loss:3.756943 +step:3934 train loss:3.706710 +step:3935 train loss:3.680001 +step:3936 train loss:3.768749 +step:3937 train loss:3.727707 +step:3938 train loss:3.740408 +step:3939 train loss:3.761709 +step:3940 train loss:3.712533 +step:3941 train loss:3.797527 +step:3942 train loss:3.755954 +step:3943 train loss:3.738756 +step:3944 train loss:3.791355 +step:3945 train loss:3.699422 +step:3946 train loss:3.644014 +step:3947 train loss:3.771863 +step:3948 train loss:3.741990 +step:3949 train loss:3.905126 +step:3950 train loss:3.709121 +step:3951 train loss:3.637259 +step:3952 train loss:3.597721 +step:3953 train loss:3.674184 +step:3954 train loss:3.724417 +step:3955 train loss:3.749346 +step:3956 train loss:3.708635 +step:3957 train loss:3.758904 +step:3958 train loss:3.734259 +step:3959 train loss:3.776057 +step:3960 train loss:3.696873 +step:3961 train loss:3.725244 +step:3962 train loss:3.733499 +step:3963 train loss:3.708561 +step:3964 train loss:3.686989 +step:3965 train loss:3.742463 +step:3966 train loss:3.695638 +step:3967 train loss:3.738714 +step:3968 train loss:3.760578 +step:3969 train loss:3.665979 +step:3970 train loss:3.779804 +step:3971 train loss:3.699353 +step:3972 train loss:3.728635 +step:3973 train loss:3.685458 +step:3974 train loss:3.780840 +step:3975 train loss:3.731925 +step:3976 train loss:3.686212 +step:3977 train loss:3.741578 +step:3978 train loss:3.709954 +step:3979 train loss:3.694909 +step:3980 train loss:3.764222 +step:3981 train loss:3.696731 +step:3982 train loss:3.718899 +step:3983 train loss:3.701288 +step:3984 train loss:3.735928 +step:3985 train loss:3.709108 +step:3986 train loss:3.721792 +step:3987 train loss:3.731973 +step:3988 train loss:3.667824 +step:3989 train loss:3.739436 +step:3990 train loss:3.734481 +step:3991 train loss:3.746695 +step:3992 train loss:3.703712 +step:3993 train loss:3.738339 +step:3994 train loss:3.687295 +step:3995 train loss:3.740369 +step:3996 train loss:3.658098 +step:3997 train loss:3.734309 +step:3998 train loss:3.621986 +step:3999 train loss:3.776894 +step:4000 validation loss:3.635707 total_sharp:3.7275e-04 L1_sharp:2.0322e-04 L2_sharp:5.4734e-05 L3_sharp:1.8855e-04 L4_sharp:1.1783e-04 L5_sharp:6.2951e-05 L6_sharp:3.4552e-05 L7_sharp:3.9862e-05 L8_sharp:8.6269e-05 L9_sharp:1.4954e-04 L10_sharp:1.4848e-04 L11_sharp:1.0012e-04 L12_sharp:3.0274e-04 total_fnorm:1.0663e+01 total_l1_linf:9.3955e+04 total_spectral:1.0663e+01 L1_fnorm:2.4405e+00 L2_fnorm:2.4699e+00 L3_fnorm:2.4370e+00 L4_fnorm:2.4023e+00 L5_fnorm:2.3613e+00 L6_fnorm:2.4430e+00 L7_fnorm:2.5303e+00 L8_fnorm:2.4605e+00 L9_fnorm:2.4756e+00 L10_fnorm:2.4649e+00 L11_fnorm:2.4863e+00 L12_fnorm:2.3834e+00 L1_l1linf:2.5019e+00 L2_l1linf:2.5833e+00 L3_l1linf:2.6029e+00 L4_l1linf:2.5453e+00 L5_l1linf:2.4002e+00 L6_l1linf:2.3901e+00 L7_l1linf:2.5803e+00 L8_l1linf:2.7151e+00 L9_l1linf:2.7983e+00 L10_l1linf:2.8189e+00 L11_l1linf:2.9047e+00 L12_l1linf:2.9557e+00 L1_spectral:2.9037e-01 L2_spectral:2.7542e-01 L3_spectral:3.1342e-01 L4_spectral:3.3975e-01 L5_spectral:3.1415e-01 L6_spectral:2.6199e-01 L7_spectral:2.7440e-01 L8_spectral:3.1018e-01 L9_spectral:3.9138e-01 L10_spectral:4.0328e-01 L11_spectral:3.7456e-01 L12_spectral:4.1304e-01 ip_v_neg_g:2.3568e-02 cos_v_neg_g:5.3872e-03 v_norm:1.0663e+01 g_norm:4.1030e-01 hv_norm:2.8488e-01 cos_v_hv:1.3952e-02 hg_norm:2.8117e+00 cos_g_hg:5.0161e-01 v_par:3.9591e-03 v_perp:1.0663e+01 L1_cos_v_neg_g:2.0041e-02 L1_v_norm:2.4405e+00 L2_cos_v_neg_g:8.5794e-03 L2_v_norm:2.4699e+00 L3_cos_v_neg_g:1.6200e-02 L3_v_norm:2.4370e+00 L4_cos_v_neg_g:9.5770e-03 L4_v_norm:2.4023e+00 L5_cos_v_neg_g:6.5701e-03 L5_v_norm:2.3613e+00 L6_cos_v_neg_g:5.4066e-03 L6_v_norm:2.4430e+00 L7_cos_v_neg_g:6.5030e-03 L7_v_norm:2.5303e+00 L8_cos_v_neg_g:8.6867e-03 L8_v_norm:2.4605e+00 L9_cos_v_neg_g:1.1974e-02 L9_v_norm:2.4756e+00 L10_cos_v_neg_g:1.3662e-02 L10_v_norm:2.4649e+00 L11_cos_v_neg_g:1.3017e-02 L11_v_norm:2.4863e+00 L12_cos_v_neg_g:1.4156e-02 L12_v_norm:2.3834e+00 +step:4000 train loss:3.656543 +step:4001 train loss:3.735368 +step:4002 train loss:3.713351 +step:4003 train loss:3.745699 +step:4004 train loss:3.658736 +step:4005 train loss:3.746808 +step:4006 train loss:3.757283 +step:4007 train loss:3.678238 +step:4008 train loss:3.637475 +step:4009 train loss:3.718540 +step:4010 train loss:3.693172 +step:4011 train loss:3.700005 +step:4012 train loss:3.717695 +step:4013 train loss:3.693071 +step:4014 train loss:3.703970 +step:4015 train loss:3.696449 +step:4016 train loss:3.704794 +step:4017 train loss:3.669820 +step:4018 train loss:3.611521 +step:4019 train loss:3.664889 +step:4020 train loss:3.735795 +step:4021 train loss:3.682144 +step:4022 train loss:3.683200 +step:4023 train loss:3.697832 +step:4024 train loss:3.607941 +step:4025 train loss:3.735660 +step:4026 train loss:3.722601 +step:4027 train loss:3.728112 +step:4028 train loss:3.743562 +step:4029 train loss:3.776971 +step:4030 train loss:3.689387 +step:4031 train loss:3.735184 +step:4032 train loss:3.692361 +step:4033 train loss:3.724293 +step:4034 train loss:3.739757 +step:4035 train loss:3.716043 +step:4036 train loss:3.715723 +step:4037 train loss:3.730985 +step:4038 train loss:3.651515 +step:4039 train loss:3.706527 +step:4040 train loss:3.684882 +step:4041 train loss:3.679549 +step:4042 train loss:3.701710 +step:4043 train loss:3.684122 +step:4044 train loss:3.718940 +step:4045 train loss:3.721897 +step:4046 train loss:3.679980 +step:4047 train loss:3.711415 +step:4048 train loss:3.719019 +step:4049 train loss:3.681142 +step:4050 train loss:3.781942 +step:4051 train loss:3.697375 +step:4052 train loss:3.716712 +step:4053 train loss:3.765864 +step:4054 train loss:3.737155 +step:4055 train loss:3.751525 +step:4056 train loss:3.747870 +step:4057 train loss:3.687335 +step:4058 train loss:3.670551 +step:4059 train loss:3.749833 +step:4060 train loss:3.693424 +step:4061 train loss:3.664736 +step:4062 train loss:3.776994 +step:4063 train loss:3.727188 +step:4064 train loss:3.696616 +step:4065 train loss:3.681561 +step:4066 train loss:3.709158 +step:4067 train loss:3.733171 +step:4068 train loss:3.702401 +step:4069 train loss:3.759963 +step:4070 train loss:3.675551 +step:4071 train loss:3.648972 +step:4072 train loss:3.722231 +step:4073 train loss:3.657360 +step:4074 train loss:3.711333 +step:4075 train loss:3.780664 +step:4076 train loss:3.634789 +step:4077 train loss:3.714090 +step:4078 train loss:3.811869 +step:4079 train loss:3.757423 +step:4080 train loss:3.702208 +step:4081 train loss:3.669554 +step:4082 train loss:3.724187 +step:4083 train loss:3.661055 +step:4084 train loss:3.678583 +step:4085 train loss:3.912274 +step:4086 train loss:3.680326 +step:4087 train loss:3.726587 +step:4088 train loss:3.709004 +step:4089 train loss:3.698984 +step:4090 train loss:3.719014 +step:4091 train loss:3.743536 +step:4092 train loss:3.668133 +step:4093 train loss:3.693944 +step:4094 train loss:3.717754 +step:4095 train loss:3.669366 +step:4096 train loss:3.701918 +step:4097 train loss:3.704837 +step:4098 train loss:3.682949 +step:4099 train loss:3.681353 +step:4100 train loss:3.733448 +step:4101 train loss:3.655941 +step:4102 train loss:3.690372 +step:4103 train loss:3.893800 +step:4104 train loss:3.711795 +step:4105 train loss:3.676975 +step:4106 train loss:3.749919 +step:4107 train loss:3.670373 +step:4108 train loss:3.676428 +step:4109 train loss:3.740679 +step:4110 train loss:3.744096 +step:4111 train loss:3.715439 +step:4112 train loss:3.734235 +step:4113 train loss:3.691048 +step:4114 train loss:3.640893 +step:4115 train loss:3.677985 +step:4116 train loss:3.662821 +step:4117 train loss:3.683084 +step:4118 train loss:3.734073 +step:4119 train loss:3.757830 +step:4120 train loss:3.679131 +step:4121 train loss:3.672915 +step:4122 train loss:3.737831 +step:4123 train loss:3.749699 +step:4124 train loss:3.730118 +step:4125 train loss:3.764780 +step:4126 train loss:3.701285 +step:4127 train loss:3.719660 +step:4128 train loss:3.710229 +step:4129 train loss:3.755791 +step:4130 train loss:3.687093 +step:4131 train loss:3.721911 +step:4132 train loss:3.737558 +step:4133 train loss:3.688728 +step:4134 train loss:3.743869 +step:4135 train loss:3.676877 +step:4136 train loss:3.698111 +step:4137 train loss:3.670083 +step:4138 train loss:3.676770 +step:4139 train loss:3.723859 +step:4140 train loss:3.686274 +step:4141 train loss:3.648139 +step:4142 train loss:3.691205 +step:4143 train loss:3.729828 +step:4144 train loss:3.679799 +step:4145 train loss:3.645134 +step:4146 train loss:3.713222 +step:4147 train loss:3.688490 +step:4148 train loss:3.683365 +step:4149 train loss:3.761493 +step:4150 train loss:3.728275 +step:4151 train loss:3.706995 +step:4152 train loss:3.731934 +step:4153 train loss:3.737480 +step:4154 train loss:3.741367 +step:4155 train loss:3.765395 +step:4156 train loss:3.641149 +step:4157 train loss:3.666145 +step:4158 train loss:3.722294 +step:4159 train loss:3.624959 +step:4160 train loss:3.714393 +step:4161 train loss:3.716758 +step:4162 train loss:3.624482 +step:4163 train loss:3.701870 +step:4164 train loss:3.653970 +step:4165 train loss:3.654460 +step:4166 train loss:3.723718 +step:4167 train loss:3.716916 +step:4168 train loss:3.711301 +step:4169 train loss:3.736937 +step:4170 train loss:3.858492 +step:4171 train loss:3.713211 +step:4172 train loss:3.728273 +step:4173 train loss:3.724858 +step:4174 train loss:3.690388 +step:4175 train loss:3.777041 +step:4176 train loss:3.702355 +step:4177 train loss:3.725993 +step:4178 train loss:3.701227 +step:4179 train loss:3.654129 +step:4180 train loss:3.652303 +step:4181 train loss:3.700869 +step:4182 train loss:3.686314 +step:4183 train loss:3.621922 +step:4184 train loss:3.696143 +step:4185 train loss:3.757705 +step:4186 train loss:3.733091 +step:4187 train loss:3.747728 +step:4188 train loss:3.718509 +step:4189 train loss:3.679004 +step:4190 train loss:3.719206 +step:4191 train loss:3.669840 +step:4192 train loss:3.756268 +step:4193 train loss:3.666550 +step:4194 train loss:3.647047 +step:4195 train loss:3.645542 +step:4196 train loss:3.711526 +step:4197 train loss:3.727336 +step:4198 train loss:3.650672 +step:4199 train loss:3.733489 +step:4200 train loss:3.695023 +step:4201 train loss:3.673525 +step:4202 train loss:3.690050 +step:4203 train loss:3.702749 +step:4204 train loss:3.695804 +step:4205 train loss:3.712216 +step:4206 train loss:3.729484 +step:4207 train loss:3.732451 +step:4208 train loss:3.694640 +step:4209 train loss:3.758664 +step:4210 train loss:3.785780 +step:4211 train loss:3.667481 +step:4212 train loss:3.707726 +step:4213 train loss:3.661099 +step:4214 train loss:3.667997 +step:4215 train loss:3.684472 +step:4216 train loss:3.656368 +step:4217 train loss:3.680454 +step:4218 train loss:3.721116 +step:4219 train loss:3.718541 +step:4220 train loss:3.807427 +step:4221 train loss:3.696078 +step:4222 train loss:3.751168 +step:4223 train loss:3.669231 +step:4224 train loss:3.742524 +step:4225 train loss:3.668655 +step:4226 train loss:3.727938 +step:4227 train loss:3.702783 +step:4228 train loss:3.674537 +step:4229 train loss:3.687014 +step:4230 train loss:3.667056 +step:4231 train loss:3.655500 +step:4232 train loss:3.705452 +step:4233 train loss:3.615038 +step:4234 train loss:3.696525 +step:4235 train loss:3.773046 +step:4236 train loss:3.742615 +step:4237 train loss:3.725824 +step:4238 train loss:3.734907 +step:4239 train loss:3.785028 +step:4240 train loss:3.694227 +step:4241 train loss:3.621518 +step:4242 train loss:3.735847 +step:4243 train loss:3.737738 +step:4244 train loss:3.750571 +step:4245 train loss:3.807121 +step:4246 train loss:3.679168 +step:4247 train loss:3.738648 +step:4248 train loss:3.688210 +step:4249 train loss:3.695669 +step:4250 validation loss:3.621261 +step:4250 train loss:3.676879 +step:4251 train loss:3.770863 +step:4252 train loss:3.681074 +step:4253 train loss:3.676149 +step:4254 train loss:3.682415 +step:4255 train loss:3.667592 +step:4256 train loss:3.681791 +step:4257 train loss:3.741379 +step:4258 train loss:3.601142 +step:4259 train loss:3.665451 +step:4260 train loss:3.729964 +step:4261 train loss:3.718103 +step:4262 train loss:3.851389 +step:4263 train loss:3.798458 +step:4264 train loss:3.741703 +step:4265 train loss:3.733603 +step:4266 train loss:3.726944 +step:4267 train loss:3.724916 +step:4268 train loss:3.669008 +step:4269 train loss:3.764491 +step:4270 train loss:3.744761 +step:4271 train loss:3.655180 +step:4272 train loss:3.708979 +step:4273 train loss:3.686220 +step:4274 train loss:3.671556 +step:4275 train loss:3.688893 +step:4276 train loss:3.655256 +step:4277 train loss:3.790367 +step:4278 train loss:3.640227 +step:4279 train loss:3.667895 +step:4280 train loss:3.751167 +step:4281 train loss:3.736720 +step:4282 train loss:3.799753 +step:4283 train loss:3.656550 +step:4284 train loss:3.686404 +step:4285 train loss:3.689024 +step:4286 train loss:3.751548 +step:4287 train loss:3.752620 +step:4288 train loss:3.732085 +step:4289 train loss:3.684585 +step:4290 train loss:3.694298 +step:4291 train loss:3.653573 +step:4292 train loss:3.695818 +step:4293 train loss:3.708473 +step:4294 train loss:3.692198 +step:4295 train loss:3.628046 +step:4296 train loss:3.702231 +step:4297 train loss:3.681956 +step:4298 train loss:3.693143 +step:4299 train loss:3.688517 +step:4300 train loss:3.805954 +step:4301 train loss:3.621588 +step:4302 train loss:3.764793 +step:4303 train loss:3.641760 +step:4304 train loss:3.651803 +step:4305 train loss:3.672360 +step:4306 train loss:3.743579 +step:4307 train loss:3.658876 +step:4308 train loss:3.659180 +step:4309 train loss:3.728304 +step:4310 train loss:3.665920 +step:4311 train loss:3.723130 +step:4312 train loss:3.714673 +step:4313 train loss:3.708753 +step:4314 train loss:3.654763 +step:4315 train loss:3.684353 +step:4316 train loss:3.632047 +step:4317 train loss:3.688289 +step:4318 train loss:3.727844 +step:4319 train loss:3.675974 +step:4320 train loss:3.738162 +step:4321 train loss:3.723079 +step:4322 train loss:3.677053 +step:4323 train loss:3.617411 +step:4324 train loss:3.707438 +step:4325 train loss:3.685973 +step:4326 train loss:3.675233 +step:4327 train loss:3.780295 +step:4328 train loss:3.692964 +step:4329 train loss:3.648990 +step:4330 train loss:3.693323 +step:4331 train loss:3.706716 +step:4332 train loss:3.734193 +step:4333 train loss:3.693417 +step:4334 train loss:3.714115 +step:4335 train loss:3.711633 +step:4336 train loss:3.720890 +step:4337 train loss:3.685503 +step:4338 train loss:3.806941 +step:4339 train loss:3.714729 +step:4340 train loss:3.719701 +step:4341 train loss:3.690373 +step:4342 train loss:3.701557 +step:4343 train loss:3.819630 +step:4344 train loss:3.712917 +step:4345 train loss:3.729887 +step:4346 train loss:3.740646 +step:4347 train loss:3.750382 +step:4348 train loss:3.665729 +step:4349 train loss:3.746792 +step:4350 train loss:3.687828 +step:4351 train loss:3.639276 +step:4352 train loss:3.714970 +step:4353 train loss:3.661442 +step:4354 train loss:3.719058 +step:4355 train loss:3.679140 +step:4356 train loss:3.702428 +step:4357 train loss:3.681779 +step:4358 train loss:3.775104 +step:4359 train loss:3.726803 +step:4360 train loss:3.643383 +step:4361 train loss:3.688679 +step:4362 train loss:3.711762 +step:4363 train loss:3.729500 +step:4364 train loss:3.695617 +step:4365 train loss:3.675370 +step:4366 train loss:3.723180 +step:4367 train loss:3.734540 +step:4368 train loss:3.715660 +step:4369 train loss:3.582427 +step:4370 train loss:3.714422 +step:4371 train loss:3.622378 +step:4372 train loss:3.772615 +step:4373 train loss:3.711456 +step:4374 train loss:3.680898 +step:4375 train loss:3.725063 +step:4376 train loss:3.736073 +step:4377 train loss:3.668638 +step:4378 train loss:3.679616 +step:4379 train loss:3.761567 +step:4380 train loss:3.743513 +step:4381 train loss:3.644304 +step:4382 train loss:3.688658 +step:4383 train loss:3.718905 +step:4384 train loss:3.711924 +step:4385 train loss:3.639114 +step:4386 train loss:3.696522 +step:4387 train loss:3.666966 +step:4388 train loss:3.684114 +step:4389 train loss:3.713335 +step:4390 train loss:3.756450 +step:4391 train loss:3.681607 +step:4392 train loss:3.752397 +step:4393 train loss:3.718611 +step:4394 train loss:3.656489 +step:4395 train loss:3.711859 +step:4396 train loss:3.687081 +step:4397 train loss:3.728253 +step:4398 train loss:3.674550 +step:4399 train loss:3.665663 +step:4400 train loss:3.669481 +step:4401 train loss:3.734049 +step:4402 train loss:3.734769 +step:4403 train loss:3.683559 +step:4404 train loss:3.710727 +step:4405 train loss:3.629936 +step:4406 train loss:3.708301 +step:4407 train loss:3.643458 +step:4408 train loss:3.739491 +step:4409 train loss:3.697770 +step:4410 train loss:3.705186 +step:4411 train loss:3.664022 +step:4412 train loss:3.778488 +step:4413 train loss:3.675640 +step:4414 train loss:3.680443 +step:4415 train loss:3.673172 +step:4416 train loss:3.664173 +step:4417 train loss:3.655814 +step:4418 train loss:3.729918 +step:4419 train loss:3.698862 +step:4420 train loss:3.709280 +step:4421 train loss:3.735784 +step:4422 train loss:3.752463 +step:4423 train loss:3.708239 +step:4424 train loss:3.694944 +step:4425 train loss:3.656549 +step:4426 train loss:3.731387 +step:4427 train loss:3.690606 +step:4428 train loss:3.631795 +step:4429 train loss:3.690128 +step:4430 train loss:3.728849 +step:4431 train loss:3.722722 +step:4432 train loss:3.628111 +step:4433 train loss:3.681057 +step:4434 train loss:3.679712 +step:4435 train loss:3.710275 +step:4436 train loss:3.643057 +step:4437 train loss:3.726614 +step:4438 train loss:3.691041 +step:4439 train loss:3.694067 +step:4440 train loss:3.693416 +step:4441 train loss:3.693352 +step:4442 train loss:3.748656 +step:4443 train loss:3.682335 +step:4444 train loss:3.766544 +step:4445 train loss:3.730422 +step:4446 train loss:3.662784 +step:4447 train loss:3.707043 +step:4448 train loss:3.729077 +step:4449 train loss:3.664508 +step:4450 train loss:3.683967 +step:4451 train loss:3.736916 +step:4452 train loss:3.794690 +step:4453 train loss:3.728951 +step:4454 train loss:3.700877 +step:4455 train loss:3.751664 +step:4456 train loss:3.695458 +step:4457 train loss:3.692403 +step:4458 train loss:3.702482 +step:4459 train loss:3.739143 +step:4460 train loss:3.648440 +step:4461 train loss:3.617084 +step:4462 train loss:3.673814 +step:4463 train loss:3.699319 +step:4464 train loss:3.663318 +step:4465 train loss:3.699982 +step:4466 train loss:3.794315 +step:4467 train loss:3.673140 +step:4468 train loss:3.667388 +step:4469 train loss:3.659749 +step:4470 train loss:3.638381 +step:4471 train loss:3.697391 +step:4472 train loss:3.622737 +step:4473 train loss:3.712386 +step:4474 train loss:3.738480 +step:4475 train loss:3.695049 +step:4476 train loss:3.661693 +step:4477 train loss:3.645501 +step:4478 train loss:3.704559 +step:4479 train loss:3.805066 +step:4480 train loss:3.640590 +step:4481 train loss:3.712461 +step:4482 train loss:3.674117 +step:4483 train loss:3.666623 +step:4484 train loss:3.717337 +step:4485 train loss:3.676778 +step:4486 train loss:3.777589 +step:4487 train loss:3.674138 +step:4488 train loss:3.672834 +step:4489 train loss:3.626110 +step:4490 train loss:3.708127 +step:4491 train loss:3.660027 +step:4492 train loss:3.689353 +step:4493 train loss:3.677441 +step:4494 train loss:3.667518 +step:4495 train loss:3.737441 +step:4496 train loss:3.678313 +step:4497 train loss:3.760653 +step:4498 train loss:3.650111 +step:4499 train loss:3.705224 +step:4500 validation loss:3.606457 total_sharp:3.1398e-04 L1_sharp:1.2496e-04 L2_sharp:1.7416e-05 L3_sharp:3.1613e-05 L4_sharp:3.9978e-05 L5_sharp:5.0206e-05 L6_sharp:2.5639e-05 L7_sharp:3.2105e-05 L8_sharp:7.2197e-05 L9_sharp:1.1506e-04 L10_sharp:1.1486e-04 L11_sharp:1.1081e-04 L12_sharp:3.2939e-04 total_fnorm:1.0832e+01 total_l1_linf:9.5505e+04 total_spectral:1.0832e+01 L1_fnorm:2.5409e+00 L2_fnorm:2.5011e+00 L3_fnorm:2.4607e+00 L4_fnorm:2.4620e+00 L5_fnorm:2.4560e+00 L6_fnorm:2.4974e+00 L7_fnorm:2.5690e+00 L8_fnorm:2.4908e+00 L9_fnorm:2.4910e+00 L10_fnorm:2.4831e+00 L11_fnorm:2.5376e+00 L12_fnorm:2.4714e+00 L1_l1linf:2.7161e+00 L2_l1linf:2.6196e+00 L3_l1linf:2.7233e+00 L4_l1linf:2.7273e+00 L5_l1linf:2.5555e+00 L6_l1linf:2.4787e+00 L7_l1linf:2.6976e+00 L8_l1linf:2.6904e+00 L9_l1linf:2.8909e+00 L10_l1linf:2.8342e+00 L11_l1linf:2.8100e+00 L12_l1linf:2.9533e+00 L1_spectral:3.1694e-01 L2_spectral:2.9517e-01 L3_spectral:3.1793e-01 L4_spectral:3.3318e-01 L5_spectral:2.9813e-01 L6_spectral:2.6293e-01 L7_spectral:2.9198e-01 L8_spectral:3.3535e-01 L9_spectral:3.8035e-01 L10_spectral:3.9718e-01 L11_spectral:4.2143e-01 L12_spectral:4.4055e-01 ip_v_neg_g:2.0117e-02 cos_v_neg_g:4.6135e-03 v_norm:1.0832e+01 g_norm:4.0253e-01 hv_norm:2.7110e-01 cos_v_hv:1.2546e-02 hg_norm:2.4302e+00 cos_g_hg:4.5997e-01 v_par:3.6593e-03 v_perp:1.0832e+01 L1_cos_v_neg_g:1.6796e-02 L1_v_norm:2.5409e+00 L2_cos_v_neg_g:5.4150e-03 L2_v_norm:2.5011e+00 L3_cos_v_neg_g:6.6077e-03 L3_v_norm:2.4607e+00 L4_cos_v_neg_g:6.4246e-03 L4_v_norm:2.4620e+00 L5_cos_v_neg_g:6.9018e-03 L5_v_norm:2.4560e+00 L6_cos_v_neg_g:5.4280e-03 L6_v_norm:2.4974e+00 L7_cos_v_neg_g:6.4134e-03 L7_v_norm:2.5690e+00 L8_cos_v_neg_g:8.1632e-03 L8_v_norm:2.4908e+00 L9_cos_v_neg_g:1.0482e-02 L9_v_norm:2.4910e+00 L10_cos_v_neg_g:1.2747e-02 L10_v_norm:2.4831e+00 L11_cos_v_neg_g:1.3600e-02 L11_v_norm:2.5376e+00 L12_cos_v_neg_g:1.4289e-02 L12_v_norm:2.4714e+00 +step:4500 train loss:3.614150 +step:4501 train loss:3.672902 +step:4502 train loss:3.796280 +step:4503 train loss:3.699839 +step:4504 train loss:3.709800 +step:4505 train loss:3.694045 +step:4506 train loss:3.664928 +step:4507 train loss:3.738592 +step:4508 train loss:3.673616 +step:4509 train loss:3.672848 +step:4510 train loss:3.708457 +step:4511 train loss:3.660205 +step:4512 train loss:3.684426 +step:4513 train loss:3.739963 +step:4514 train loss:3.647316 +step:4515 train loss:3.761852 +step:4516 train loss:3.738209 +step:4517 train loss:3.692519 +step:4518 train loss:3.631390 +step:4519 train loss:3.666152 +step:4520 train loss:3.678453 +step:4521 train loss:3.617860 +step:4522 train loss:3.674768 +step:4523 train loss:3.720986 +step:4524 train loss:3.703253 +step:4525 train loss:3.627019 +step:4526 train loss:3.665280 +step:4527 train loss:3.653797 +step:4528 train loss:3.683495 +step:4529 train loss:3.681132 +step:4530 train loss:3.773648 +step:4531 train loss:3.665888 +step:4532 train loss:3.689906 +step:4533 train loss:3.663419 +step:4534 train loss:3.753891 +step:4535 train loss:3.653177 +step:4536 train loss:3.725250 +step:4537 train loss:3.706696 +step:4538 train loss:3.685602 +step:4539 train loss:3.707312 +step:4540 train loss:3.688013 +step:4541 train loss:3.653707 +step:4542 train loss:3.704852 +step:4543 train loss:3.785200 +step:4544 train loss:3.728554 +step:4545 train loss:3.674109 +step:4546 train loss:3.768641 +step:4547 train loss:3.726267 +step:4548 train loss:3.729587 +step:4549 train loss:3.683101 +step:4550 train loss:3.651444 +step:4551 train loss:3.667678 +step:4552 train loss:3.670785 +step:4553 train loss:3.752513 +step:4554 train loss:3.647133 +step:4555 train loss:3.760009 +step:4556 train loss:3.697283 +step:4557 train loss:3.626974 +step:4558 train loss:3.709349 +step:4559 train loss:3.718231 +step:4560 train loss:3.658563 +step:4561 train loss:3.646903 +step:4562 train loss:3.687181 +step:4563 train loss:3.636546 +step:4564 train loss:3.665556 +step:4565 train loss:3.664701 +step:4566 train loss:3.640136 +step:4567 train loss:3.666922 +step:4568 train loss:3.665291 +step:4569 train loss:3.649586 +step:4570 train loss:3.699018 +step:4571 train loss:3.678854 +step:4572 train loss:3.671308 +step:4573 train loss:3.681147 +step:4574 train loss:3.825927 +step:4575 train loss:3.660351 +step:4576 train loss:3.649432 +step:4577 train loss:3.688502 +step:4578 train loss:3.732964 +step:4579 train loss:3.678823 +step:4580 train loss:3.738755 +step:4581 train loss:3.677448 +step:4582 train loss:3.670903 +step:4583 train loss:3.677064 +step:4584 train loss:3.650517 +step:4585 train loss:3.730435 +step:4586 train loss:3.720159 +step:4587 train loss:3.619943 +step:4588 train loss:3.659333 +step:4589 train loss:3.737983 +step:4590 train loss:3.707060 +step:4591 train loss:3.645141 +step:4592 train loss:3.728911 +step:4593 train loss:3.653091 +step:4594 train loss:3.680442 +step:4595 train loss:3.702624 +step:4596 train loss:3.639257 +step:4597 train loss:3.776997 +step:4598 train loss:3.695755 +step:4599 train loss:3.649137 +step:4600 train loss:3.655516 +step:4601 train loss:3.681052 +step:4602 train loss:3.629666 +step:4603 train loss:3.643391 +step:4604 train loss:3.748479 +step:4605 train loss:3.668503 +step:4606 train loss:3.694578 +step:4607 train loss:3.676902 +step:4608 train loss:3.712853 +step:4609 train loss:3.672640 +step:4610 train loss:3.717954 +step:4611 train loss:3.740343 +step:4612 train loss:3.741557 +step:4613 train loss:3.719115 +step:4614 train loss:3.713943 +step:4615 train loss:3.654850 +step:4616 train loss:3.638120 +step:4617 train loss:3.680065 +step:4618 train loss:3.696417 +step:4619 train loss:3.655454 +step:4620 train loss:3.671766 +step:4621 train loss:3.674279 +step:4622 train loss:3.609132 +step:4623 train loss:3.718867 +step:4624 train loss:3.698266 +step:4625 train loss:3.661360 +step:4626 train loss:3.704705 +step:4627 train loss:3.674217 +step:4628 train loss:3.659859 +step:4629 train loss:3.701875 +step:4630 train loss:3.758334 +step:4631 train loss:3.755347 +step:4632 train loss:3.653013 +step:4633 train loss:3.664002 +step:4634 train loss:3.740937 +step:4635 train loss:3.702791 +step:4636 train loss:3.714485 +step:4637 train loss:3.653014 +step:4638 train loss:3.656287 +step:4639 train loss:3.657104 +step:4640 train loss:3.665571 +step:4641 train loss:3.673393 +step:4642 train loss:3.704587 +step:4643 train loss:3.668450 +step:4644 train loss:3.692619 +step:4645 train loss:3.707522 +step:4646 train loss:3.661836 +step:4647 train loss:3.619917 +step:4648 train loss:3.728109 +step:4649 train loss:3.740748 +step:4650 train loss:3.686108 +step:4651 train loss:3.689036 +step:4652 train loss:3.679036 +step:4653 train loss:3.734761 +step:4654 train loss:3.730712 +step:4655 train loss:3.633888 +step:4656 train loss:3.668905 +step:4657 train loss:3.721405 +step:4658 train loss:3.674881 +step:4659 train loss:3.689146 +step:4660 train loss:3.732253 +step:4661 train loss:3.651958 +step:4662 train loss:3.668307 +step:4663 train loss:3.670278 +step:4664 train loss:3.729372 +step:4665 train loss:3.725022 +step:4666 train loss:3.722209 +step:4667 train loss:3.710814 +step:4668 train loss:3.679638 +step:4669 train loss:3.688095 +step:4670 train loss:3.717658 +step:4671 train loss:3.730851 +step:4672 train loss:3.602962 +step:4673 train loss:3.635981 +step:4674 train loss:3.765641 +step:4675 train loss:3.667576 +step:4676 train loss:3.628835 +step:4677 train loss:3.634508 +step:4678 train loss:3.607529 +step:4679 train loss:3.708594 +step:4680 train loss:3.642716 +step:4681 train loss:3.695864 +step:4682 train loss:3.644890 +step:4683 train loss:3.616642 +step:4684 train loss:3.732107 +step:4685 train loss:3.665361 +step:4686 train loss:3.683030 +step:4687 train loss:3.711582 +step:4688 train loss:3.643418 +step:4689 train loss:3.718960 +step:4690 train loss:3.663539 +step:4691 train loss:3.694572 +step:4692 train loss:3.622591 +step:4693 train loss:3.660170 +step:4694 train loss:3.702384 +step:4695 train loss:3.721791 +step:4696 train loss:3.709152 +step:4697 train loss:3.620174 +step:4698 train loss:3.640644 +step:4699 train loss:3.689554 +step:4700 train loss:3.658550 +step:4701 train loss:3.666763 +step:4702 train loss:3.623574 +step:4703 train loss:3.701915 +step:4704 train loss:3.686838 +step:4705 train loss:3.635015 +step:4706 train loss:3.640160 +step:4707 train loss:3.628040 +step:4708 train loss:3.696100 +step:4709 train loss:3.641915 +step:4710 train loss:3.656102 +step:4711 train loss:3.714427 +step:4712 train loss:3.614299 +step:4713 train loss:3.717901 +step:4714 train loss:3.617385 +step:4715 train loss:3.707648 +step:4716 train loss:3.673948 +step:4717 train loss:3.606255 +step:4718 train loss:3.694541 +step:4719 train loss:3.622901 +step:4720 train loss:3.721905 +step:4721 train loss:3.675574 +step:4722 train loss:3.727653 +step:4723 train loss:3.626446 +step:4724 train loss:3.677192 +step:4725 train loss:3.613538 +step:4726 train loss:3.660243 +step:4727 train loss:3.664101 +step:4728 train loss:3.672795 +step:4729 train loss:3.700540 +step:4730 train loss:3.603266 +step:4731 train loss:3.663103 +step:4732 train loss:3.614597 +step:4733 train loss:3.557825 +step:4734 train loss:3.693961 +step:4735 train loss:3.647891 +step:4736 train loss:3.685131 +step:4737 train loss:3.568839 +step:4738 train loss:3.714801 +step:4739 train loss:3.589005 +step:4740 train loss:3.702599 +step:4741 train loss:3.668498 +step:4742 train loss:3.630092 +step:4743 train loss:3.626161 +step:4744 train loss:3.670627 +step:4745 train loss:3.688715 +step:4746 train loss:3.727423 +step:4747 train loss:3.687158 +step:4748 train loss:3.590031 +step:4749 train loss:3.654365 +step:4750 validation loss:3.591954 +step:4750 train loss:3.601556 +step:4751 train loss:3.697388 +step:4752 train loss:3.627671 +step:4753 train loss:3.739608 +step:4754 train loss:3.604382 +step:4755 train loss:3.647823 +step:4756 train loss:3.720254 +step:4757 train loss:3.646257 +step:4758 train loss:3.662350 +step:4759 train loss:3.662274 +step:4760 train loss:3.693303 +step:4761 train loss:3.613651 +step:4762 train loss:3.645776 +step:4763 train loss:3.664200 +step:4764 train loss:3.726302 +step:4765 train loss:3.615562 +step:4766 train loss:3.642369 +step:4767 train loss:3.595921 +step:4768 train loss:3.651101 +step:4769 train loss:3.677372 +step:4770 train loss:3.633501 +step:4771 train loss:3.647064 +step:4772 train loss:3.623422 +step:4773 train loss:3.659441 +step:4774 train loss:3.597966 +step:4775 train loss:3.730839 +step:4776 train loss:3.599445 +step:4777 train loss:3.674212 +step:4778 train loss:3.609756 +step:4779 train loss:3.660487 +step:4780 train loss:3.595183 +step:4781 train loss:3.601626 +step:4782 train loss:3.708265 +step:4783 train loss:3.697728 +step:4784 train loss:3.657745 +step:4785 train loss:3.658394 +step:4786 train loss:3.767479 +step:4787 train loss:3.602326 +step:4788 train loss:3.624951 +step:4789 train loss:3.646219 +step:4790 train loss:3.701309 +step:4791 train loss:3.666964 +step:4792 train loss:3.707019 +step:4793 train loss:3.624482 +step:4794 train loss:3.696832 +step:4795 train loss:3.645828 +step:4796 train loss:3.640731 +step:4797 train loss:3.645271 +step:4798 train loss:3.654361 +step:4799 train loss:3.652061 +step:4800 train loss:3.683129 +step:4801 train loss:3.674335 +step:4802 train loss:3.711835 +step:4803 train loss:3.693429 +step:4804 train loss:3.650145 +step:4805 train loss:3.651623 +step:4806 train loss:3.629705 +step:4807 train loss:3.734350 +step:4808 train loss:3.605013 +step:4809 train loss:3.710539 +step:4810 train loss:3.649402 +step:4811 train loss:3.669286 +step:4812 train loss:3.642678 +step:4813 train loss:3.600647 +step:4814 train loss:3.593219 +step:4815 train loss:3.587480 +step:4816 train loss:3.654658 +step:4817 train loss:3.593167 +step:4818 train loss:3.656508 +step:4819 train loss:3.653329 +step:4820 train loss:3.898842 +step:4821 train loss:3.678937 +step:4822 train loss:3.689718 +step:4823 train loss:3.622506 +step:4824 train loss:3.629550 +step:4825 train loss:3.608597 +step:4826 train loss:3.695416 +step:4827 train loss:3.642464 +step:4828 train loss:3.584908 +step:4829 train loss:3.687701 +step:4830 train loss:3.630924 +step:4831 train loss:3.775197 +step:4832 train loss:3.647208 +step:4833 train loss:3.684996 +step:4834 train loss:3.587791 +step:4835 train loss:3.677886 +step:4836 train loss:3.658355 +step:4837 train loss:3.689120 +step:4838 train loss:3.624185 +step:4839 train loss:3.689016 +step:4840 train loss:3.595811 +step:4841 train loss:3.693549 +step:4842 train loss:3.606375 +step:4843 train loss:3.684905 +step:4844 train loss:3.683924 +step:4845 train loss:3.626245 +step:4846 train loss:3.639237 +step:4847 train loss:3.625096 +step:4848 train loss:3.648047 +step:4849 train loss:3.605079 +step:4850 train loss:3.613781 +step:4851 train loss:3.602981 +step:4852 train loss:3.684885 +step:4853 train loss:3.661462 +step:4854 train loss:3.637121 +step:4855 train loss:3.702005 +step:4856 train loss:3.669371 +step:4857 train loss:3.678467 +step:4858 train loss:3.758993 +step:4859 train loss:3.603649 +step:4860 train loss:3.681546 +step:4861 train loss:3.654872 +step:4862 train loss:3.687009 +step:4863 train loss:3.624749 +step:4864 train loss:3.633077 +step:4865 train loss:3.626570 +step:4866 train loss:3.671718 +step:4867 train loss:3.642842 +step:4868 train loss:3.657836 +step:4869 train loss:3.609863 +step:4870 train loss:3.636701 +step:4871 train loss:3.723148 +step:4872 train loss:3.668170 +step:4873 train loss:3.665812 +step:4874 train loss:3.639808 +step:4875 train loss:3.603934 +step:4876 train loss:3.617065 +step:4877 train loss:3.619053 +step:4878 train loss:3.659031 +step:4879 train loss:3.619820 +step:4880 train loss:3.644219 +step:4881 train loss:3.589506 +step:4882 train loss:3.790894 +step:4883 train loss:3.603205 +step:4884 train loss:3.634302 +step:4885 train loss:3.609114 +step:4886 train loss:3.685105 +step:4887 train loss:3.636998 +step:4888 train loss:3.647891 +step:4889 train loss:3.638514 +step:4890 train loss:3.681786 +step:4891 train loss:3.616253 +step:4892 train loss:3.619307 +step:4893 train loss:3.667257 +step:4894 train loss:3.604865 +step:4895 train loss:3.636422 +step:4896 train loss:3.620054 +step:4897 train loss:3.690176 +step:4898 train loss:3.644232 +step:4899 train loss:3.624203 +step:4900 train loss:3.669441 +step:4901 train loss:3.621503 +step:4902 train loss:3.614639 +step:4903 train loss:3.635112 +step:4904 train loss:3.651760 +step:4905 train loss:3.648323 +step:4906 train loss:3.647669 +step:4907 train loss:3.716882 +step:4908 train loss:3.623859 +step:4909 train loss:3.630365 +step:4910 train loss:3.652660 +step:4911 train loss:3.703200 +step:4912 train loss:3.681066 +step:4913 train loss:3.656752 +step:4914 train loss:3.646722 +step:4915 train loss:3.633394 +step:4916 train loss:3.574796 +step:4917 train loss:3.598608 +step:4918 train loss:3.627904 +step:4919 train loss:3.619962 +step:4920 train loss:3.616060 +step:4921 train loss:3.781497 +step:4922 train loss:3.678650 +step:4923 train loss:3.691677 +step:4924 train loss:3.691930 +step:4925 train loss:3.623112 +step:4926 train loss:3.621291 +step:4927 train loss:3.648028 +step:4928 train loss:3.687743 +step:4929 train loss:3.640746 +step:4930 train loss:3.625016 +step:4931 train loss:3.621913 +step:4932 train loss:3.628258 +step:4933 train loss:3.623214 +step:4934 train loss:3.683236 +step:4935 train loss:3.673903 +step:4936 train loss:3.634273 +step:4937 train loss:3.743947 +step:4938 train loss:3.730243 +step:4939 train loss:3.596818 +step:4940 train loss:3.677361 +step:4941 train loss:3.577329 +step:4942 train loss:3.620341 +step:4943 train loss:3.621108 +step:4944 train loss:3.622158 +step:4945 train loss:3.669881 +step:4946 train loss:3.645309 +step:4947 train loss:3.627775 +step:4948 train loss:3.661379 +step:4949 train loss:3.571758 +step:4950 train loss:3.652795 +step:4951 train loss:3.701586 +step:4952 train loss:3.643057 +step:4953 train loss:3.679696 +step:4954 train loss:3.581508 +step:4955 train loss:3.655345 +step:4956 train loss:3.681094 +step:4957 train loss:3.676383 +step:4958 train loss:3.593483 +step:4959 train loss:3.711212 +step:4960 train loss:3.636256 +step:4961 train loss:3.655259 +step:4962 train loss:3.616396 +step:4963 train loss:3.663423 +step:4964 train loss:3.612542 +step:4965 train loss:3.766261 +step:4966 train loss:3.619052 +step:4967 train loss:3.724005 +step:4968 train loss:3.613843 +step:4969 train loss:3.655561 +step:4970 train loss:3.646545 +step:4971 train loss:3.597617 +step:4972 train loss:3.643405 +step:4973 train loss:3.652040 +step:4974 train loss:3.641191 +step:4975 train loss:3.722424 +step:4976 train loss:3.703521 +step:4977 train loss:3.649178 +step:4978 train loss:3.637146 +step:4979 train loss:3.638196 +step:4980 train loss:3.741417 +step:4981 train loss:3.580415 +step:4982 train loss:3.663194 +step:4983 train loss:3.586241 +step:4984 train loss:3.772712 +step:4985 train loss:3.672261 +step:4986 train loss:3.612793 +step:4987 train loss:3.632256 +step:4988 train loss:3.829760 +step:4989 train loss:3.635790 +step:4990 train loss:3.631300 +step:4991 train loss:3.644728 +step:4992 train loss:3.630755 +step:4993 train loss:3.607027 +step:4994 train loss:3.717421 +step:4995 train loss:3.645991 +step:4996 train loss:3.734628 +step:4997 train loss:3.626561 +step:4998 train loss:3.634069 +step:4999 train loss:3.612882 +step:5000 validation loss:3.587608 total_sharp:2.0128e-04 L1_sharp:6.9412e-05 L2_sharp:1.0474e-05 L3_sharp:1.1007e-05 L4_sharp:3.0303e-05 L5_sharp:3.2192e-05 L6_sharp:2.6964e-05 L7_sharp:2.2593e-05 L8_sharp:6.3067e-05 L9_sharp:9.6864e-05 L10_sharp:8.5797e-05 L11_sharp:7.0867e-05 L12_sharp:2.5584e-04 total_fnorm:1.0767e+01 total_l1_linf:9.4850e+04 total_spectral:1.0767e+01 L1_fnorm:2.5048e+00 L2_fnorm:2.4627e+00 L3_fnorm:2.4114e+00 L4_fnorm:2.4280e+00 L5_fnorm:2.4283e+00 L6_fnorm:2.5166e+00 L7_fnorm:2.5909e+00 L8_fnorm:2.5086e+00 L9_fnorm:2.5163e+00 L10_fnorm:2.4985e+00 L11_fnorm:2.5247e+00 L12_fnorm:2.4218e+00 L1_l1linf:2.5459e+00 L2_l1linf:2.5696e+00 L3_l1linf:2.6296e+00 L4_l1linf:2.5783e+00 L5_l1linf:2.4415e+00 L6_l1linf:2.4602e+00 L7_l1linf:2.7686e+00 L8_l1linf:2.7748e+00 L9_l1linf:2.8043e+00 L10_l1linf:3.2303e+00 L11_l1linf:2.9516e+00 L12_l1linf:2.9377e+00 L1_spectral:2.9570e-01 L2_spectral:2.7594e-01 L3_spectral:2.7722e-01 L4_spectral:3.3724e-01 L5_spectral:2.8869e-01 L6_spectral:2.5374e-01 L7_spectral:2.7821e-01 L8_spectral:3.0750e-01 L9_spectral:3.5939e-01 L10_spectral:3.7177e-01 L11_spectral:3.7846e-01 L12_spectral:4.0273e-01 ip_v_neg_g:1.5934e-02 cos_v_neg_g:2.2587e-03 v_norm:1.0767e+01 g_norm:6.5516e-01 hv_norm:1.7994e-01 cos_v_hv:1.2044e-02 hg_norm:9.1645e+00 cos_g_hg:7.6490e-01 v_par:2.4024e-03 v_perp:1.0767e+01 L1_cos_v_neg_g:4.3203e-03 L1_v_norm:2.5048e+00 L2_cos_v_neg_g:1.2998e-03 L2_v_norm:2.4627e+00 L3_cos_v_neg_g:3.2690e-03 L3_v_norm:2.4114e+00 L4_cos_v_neg_g:3.5302e-03 L4_v_norm:2.4280e+00 L5_cos_v_neg_g:3.1980e-03 L5_v_norm:2.4283e+00 L6_cos_v_neg_g:4.9277e-03 L6_v_norm:2.5166e+00 L7_cos_v_neg_g:4.5734e-03 L7_v_norm:2.5909e+00 L8_cos_v_neg_g:5.8828e-03 L8_v_norm:2.5086e+00 L9_cos_v_neg_g:6.3721e-03 L9_v_norm:2.5163e+00 L10_cos_v_neg_g:8.6997e-03 L10_v_norm:2.4985e+00 L11_cos_v_neg_g:9.6984e-03 L11_v_norm:2.5247e+00 L12_cos_v_neg_g:7.5648e-03 L12_v_norm:2.4218e+00 +step:5000 train loss:3.730164 +step:5001 train loss:3.596728 +step:5002 train loss:3.651465 +step:5003 train loss:3.648509 +step:5004 train loss:3.638722 +step:5005 train loss:3.637654 +step:5006 train loss:3.679241 +step:5007 train loss:3.681934 +step:5008 train loss:3.620058 +step:5009 train loss:3.663510 +step:5010 train loss:3.613271 +step:5011 train loss:3.642925 +step:5012 train loss:3.618646 +step:5013 train loss:3.722764 +step:5014 train loss:3.635523 +step:5015 train loss:3.708477 +step:5016 train loss:3.638497 +step:5017 train loss:3.682716 +step:5018 train loss:3.603162 +step:5019 train loss:3.636732 +step:5020 train loss:3.629427 +step:5021 train loss:3.643203 +step:5022 train loss:3.677001 +step:5023 train loss:3.645973 +step:5024 train loss:3.700244 +step:5025 train loss:3.585833 +step:5026 train loss:3.711219 +step:5027 train loss:3.643561 +step:5028 train loss:3.709366 +step:5029 train loss:3.604827 +step:5030 train loss:3.644765 +step:5031 train loss:3.628692 +step:5032 train loss:3.660241 +step:5033 train loss:3.645378 +step:5034 train loss:3.642458 +step:5035 train loss:3.723238 +step:5036 train loss:3.672470 +step:5037 train loss:3.625513 +step:5038 train loss:3.673314 +step:5039 train loss:3.687024 +step:5040 train loss:3.648272 +step:5041 train loss:3.665589 +step:5042 train loss:3.572203 +step:5043 train loss:3.715101 +step:5044 train loss:3.631230 +step:5045 train loss:3.677667 +step:5046 train loss:3.602604 +step:5047 train loss:3.677004 +step:5048 train loss:3.594166 +step:5049 train loss:3.729575 +step:5050 train loss:3.614829 +step:5051 train loss:3.658037 +step:5052 train loss:3.555754 +step:5053 train loss:3.737527 +step:5054 train loss:3.630044 +step:5055 train loss:3.656569 +step:5056 train loss:3.688365 +step:5057 train loss:3.621439 +step:5058 train loss:3.650866 +step:5059 train loss:3.611306 +step:5060 train loss:3.658080 +step:5061 train loss:3.654310 +step:5062 train loss:3.622884 +step:5063 train loss:3.617236 +step:5064 train loss:3.625433 +step:5065 train loss:3.609709 +step:5066 train loss:3.667861 +step:5067 train loss:3.652306 +step:5068 train loss:3.636451 +step:5069 train loss:3.611558 +step:5070 train loss:3.641016 +step:5071 train loss:3.707952 +step:5072 train loss:3.601847 +step:5073 train loss:3.609010 +step:5074 train loss:3.555933 +step:5075 train loss:3.625413 +step:5076 train loss:3.555769 +step:5077 train loss:3.619504 +step:5078 train loss:3.622442 +step:5079 train loss:3.660760 +step:5080 train loss:3.638048 +step:5081 train loss:3.649136 +step:5082 train loss:3.640006 +step:5083 train loss:3.701722 +step:5084 train loss:3.679564 +step:5085 train loss:3.638436 +step:5086 train loss:3.713753 +step:5087 train loss:3.698335 +step:5088 train loss:3.616753 +step:5089 train loss:3.681309 +step:5090 train loss:3.627490 +step:5091 train loss:3.631810 +step:5092 train loss:3.729959 +step:5093 train loss:3.613177 +step:5094 train loss:3.609644 +step:5095 train loss:3.661604 +step:5096 train loss:3.626554 +step:5097 train loss:3.638037 +step:5098 train loss:3.636946 +step:5099 train loss:3.600552 +step:5100 train loss:3.611706 +step:5101 train loss:3.800091 +step:5102 train loss:3.650967 +step:5103 train loss:3.658979 +step:5104 train loss:3.709649 +step:5105 train loss:3.653738 +step:5106 train loss:3.610464 +step:5107 train loss:3.625560 +step:5108 train loss:3.622277 +step:5109 train loss:3.701338 +step:5110 train loss:3.610886 +step:5111 train loss:3.699085 +step:5112 train loss:3.613116 +step:5113 train loss:3.593420 +step:5114 train loss:3.641190 +step:5115 train loss:3.603717 +step:5116 train loss:3.656449 +step:5117 train loss:3.601649 +step:5118 train loss:3.631565 +step:5119 train loss:3.613514 +step:5120 train loss:3.652021 +step:5121 train loss:3.602636 +step:5122 train loss:3.613921 +step:5123 train loss:3.603411 +step:5124 train loss:3.558646 +step:5125 train loss:3.667371 +step:5126 train loss:3.653091 +step:5127 train loss:3.660860 +step:5128 train loss:3.670551 +step:5129 train loss:3.598072 +step:5130 train loss:3.612764 +step:5131 train loss:3.547775 +step:5132 train loss:3.671010 +step:5133 train loss:3.640207 +step:5134 train loss:3.640737 +step:5135 train loss:3.593259 +step:5136 train loss:3.658710 +step:5137 train loss:3.660601 +step:5138 train loss:3.640200 +step:5139 train loss:3.674792 +step:5140 train loss:3.646839 +step:5141 train loss:3.680919 +step:5142 train loss:3.627283 +step:5143 train loss:3.656434 +step:5144 train loss:3.652941 +step:5145 train loss:3.597373 +step:5146 train loss:3.591139 +step:5147 train loss:3.667074 +step:5148 train loss:3.607723 +step:5149 train loss:3.668047 +step:5150 train loss:3.644640 +step:5151 train loss:3.610616 +step:5152 train loss:3.654197 +step:5153 train loss:3.627770 +step:5154 train loss:3.641044 +step:5155 train loss:3.647869 +step:5156 train loss:3.629786 +step:5157 train loss:3.626248 +step:5158 train loss:3.652238 +step:5159 train loss:3.685299 +step:5160 train loss:3.750367 +step:5161 train loss:3.680102 +step:5162 train loss:3.695939 +step:5163 train loss:3.613606 +step:5164 train loss:3.688407 +step:5165 train loss:3.693100 +step:5166 train loss:3.626065 +step:5167 train loss:3.729558 +step:5168 train loss:3.648520 +step:5169 train loss:3.675519 +step:5170 train loss:3.655243 +step:5171 train loss:3.696951 +step:5172 train loss:3.615108 +step:5173 train loss:3.678043 +step:5174 train loss:3.613027 +step:5175 train loss:3.645161 +step:5176 train loss:3.637655 +step:5177 train loss:3.632398 +step:5178 train loss:3.697500 +step:5179 train loss:3.609004 +step:5180 train loss:3.685925 +step:5181 train loss:3.633111 +step:5182 train loss:3.692374 +step:5183 train loss:3.620811 +step:5184 train loss:3.598915 +step:5185 train loss:3.625605 +step:5186 train loss:3.682734 +step:5187 train loss:3.675340 +step:5188 train loss:3.608489 +step:5189 train loss:3.654119 +step:5190 train loss:3.635268 +step:5191 train loss:3.618792 +step:5192 train loss:3.603387 +step:5193 train loss:3.685212 +step:5194 train loss:3.635036 +step:5195 train loss:3.609282 +step:5196 train loss:3.678702 +step:5197 train loss:3.720496 +step:5198 train loss:3.639862 +step:5199 train loss:3.629516 +step:5200 train loss:3.651140 +step:5201 train loss:3.641621 +step:5202 train loss:3.646338 +step:5203 train loss:3.647735 +step:5204 train loss:3.617839 +step:5205 train loss:3.665547 +step:5206 train loss:3.599801 +step:5207 train loss:3.606272 +step:5208 train loss:3.665903 +step:5209 train loss:3.679726 +step:5210 train loss:3.595922 +step:5211 train loss:3.637478 +step:5212 train loss:3.653827 +step:5213 train loss:3.629180 +step:5214 train loss:3.675763 +step:5215 train loss:3.782228 +step:5216 train loss:3.639726 +step:5217 train loss:3.616532 +step:5218 train loss:3.622067 +step:5219 train loss:3.681685 +step:5220 train loss:3.596143 +step:5221 train loss:3.601867 +step:5222 train loss:3.685814 +step:5223 train loss:3.677754 +step:5224 train loss:3.576079 +step:5225 train loss:3.723425 +step:5226 train loss:3.636344 +step:5227 train loss:3.710511 +step:5228 train loss:3.679626 +step:5229 train loss:3.619988 +step:5230 train loss:3.632516 +step:5231 train loss:3.578446 +step:5232 train loss:3.702553 +step:5233 train loss:3.662923 +step:5234 train loss:3.664828 +step:5235 train loss:3.614344 +step:5236 train loss:3.692296 +step:5237 train loss:3.746994 +step:5238 train loss:3.645412 +step:5239 train loss:3.708649 +step:5240 train loss:3.590569 +step:5241 train loss:3.652312 +step:5242 train loss:3.624310 +step:5243 train loss:3.623761 +step:5244 train loss:3.625100 +step:5245 train loss:3.667070 +step:5246 train loss:3.709601 +step:5247 train loss:3.641093 +step:5248 train loss:3.609958 +step:5249 train loss:3.667705 +step:5250 validation loss:3.568277 +step:5250 train loss:3.634275 +step:5251 train loss:3.698564 +step:5252 train loss:3.591494 +step:5253 train loss:3.743136 +step:5254 train loss:3.618081 +step:5255 train loss:3.686721 +step:5256 train loss:3.604035 +step:5257 train loss:3.658484 +step:5258 train loss:3.656386 +step:5259 train loss:3.642431 +step:5260 train loss:3.636335 +step:5261 train loss:3.626150 +step:5262 train loss:3.667796 +step:5263 train loss:3.653991 +step:5264 train loss:3.606038 +step:5265 train loss:3.684839 +step:5266 train loss:3.601160 +step:5267 train loss:3.614392 +step:5268 train loss:3.596044 +step:5269 train loss:3.595123 +step:5270 train loss:3.650867 +step:5271 train loss:3.578401 +step:5272 train loss:3.668259 +step:5273 train loss:3.573570 +step:5274 train loss:3.623720 +step:5275 train loss:3.641518 +step:5276 train loss:3.766832 +step:5277 train loss:3.667463 +step:5278 train loss:3.614331 +step:5279 train loss:3.661221 +step:5280 train loss:3.639861 +step:5281 train loss:3.630552 +step:5282 train loss:3.601963 +step:5283 train loss:3.604417 +step:5284 train loss:3.610067 +step:5285 train loss:3.678446 +step:5286 train loss:3.584865 +step:5287 train loss:3.686756 +step:5288 train loss:3.662460 +step:5289 train loss:3.628806 +step:5290 train loss:3.686696 +step:5291 train loss:3.637759 +step:5292 train loss:3.656924 +step:5293 train loss:3.627837 +step:5294 train loss:3.613105 +step:5295 train loss:3.620534 +step:5296 train loss:3.610161 +step:5297 train loss:3.633654 +step:5298 train loss:3.578527 +step:5299 train loss:3.671086 +step:5300 train loss:3.622046 +step:5301 train loss:3.691637 +step:5302 train loss:3.692823 +step:5303 train loss:3.556543 +step:5304 train loss:3.590932 +step:5305 train loss:3.565458 +step:5306 train loss:3.600561 +step:5307 train loss:3.605591 +step:5308 train loss:3.698239 +step:5309 train loss:3.647459 +step:5310 train loss:3.632561 +step:5311 train loss:3.701445 +step:5312 train loss:3.587897 +step:5313 train loss:3.677482 +step:5314 train loss:3.666976 +step:5315 train loss:3.627589 +step:5316 train loss:3.660190 +step:5317 train loss:3.677814 +step:5318 train loss:3.632209 +step:5319 train loss:3.659437 +step:5320 train loss:3.607570 +step:5321 train loss:3.731464 +step:5322 train loss:3.640931 +step:5323 train loss:3.645544 +step:5324 train loss:3.586556 +step:5325 train loss:3.667244 +step:5326 train loss:3.662691 +step:5327 train loss:3.549405 +step:5328 train loss:3.689167 +step:5329 train loss:3.653267 +step:5330 train loss:3.654670 +step:5331 train loss:3.699673 +step:5332 train loss:3.625363 +step:5333 train loss:3.686601 +step:5334 train loss:3.663062 +step:5335 train loss:3.718882 +step:5336 train loss:3.756938 +step:5337 train loss:3.591556 +step:5338 train loss:3.598300 +step:5339 train loss:3.618593 +step:5340 train loss:3.642834 +step:5341 train loss:3.659363 +step:5342 train loss:3.562554 +step:5343 train loss:3.718958 +step:5344 train loss:3.603551 +step:5345 train loss:3.604700 +step:5346 train loss:3.605265 +step:5347 train loss:3.629235 +step:5348 train loss:3.671716 +step:5349 train loss:3.611019 +step:5350 train loss:3.651317 +step:5351 train loss:3.725698 +step:5352 train loss:3.763077 +step:5353 train loss:3.679828 +step:5354 train loss:3.646131 +step:5355 train loss:3.612370 +step:5356 train loss:3.634690 +step:5357 train loss:3.613604 +step:5358 train loss:3.637245 +step:5359 train loss:3.646873 +step:5360 train loss:3.618674 +step:5361 train loss:3.622815 +step:5362 train loss:3.608732 +step:5363 train loss:3.598896 +step:5364 train loss:3.601142 +step:5365 train loss:3.633602 +step:5366 train loss:3.667195 +step:5367 train loss:3.592909 +step:5368 train loss:3.658377 +step:5369 train loss:3.678595 +step:5370 train loss:3.579180 +step:5371 train loss:3.630710 +step:5372 train loss:3.647715 +step:5373 train loss:3.691823 +step:5374 train loss:3.573790 +step:5375 train loss:3.621198 +step:5376 train loss:3.688039 +step:5377 train loss:3.623852 +step:5378 train loss:3.601127 +step:5379 train loss:3.601614 +step:5380 train loss:3.641929 +step:5381 train loss:3.678873 +step:5382 train loss:3.590705 +step:5383 train loss:3.648029 +step:5384 train loss:3.659666 +step:5385 train loss:3.664625 +step:5386 train loss:3.640883 +step:5387 train loss:3.648240 +step:5388 train loss:3.662422 +step:5389 train loss:3.592213 +step:5390 train loss:3.619720 +step:5391 train loss:3.562239 +step:5392 train loss:3.626537 +step:5393 train loss:3.611057 +step:5394 train loss:3.613827 +step:5395 train loss:3.697210 +step:5396 train loss:3.660408 +step:5397 train loss:3.685645 +step:5398 train loss:3.672573 +step:5399 train loss:3.706435 +step:5400 train loss:3.709399 +step:5401 train loss:3.669586 +step:5402 train loss:3.772826 +step:5403 train loss:3.679163 +step:5404 train loss:3.652099 +step:5405 train loss:3.720923 +step:5406 train loss:3.679133 +step:5407 train loss:3.607437 +step:5408 train loss:3.754243 +step:5409 train loss:3.594207 +step:5410 train loss:3.657039 +step:5411 train loss:3.641903 +step:5412 train loss:3.616130 +step:5413 train loss:3.667142 +step:5414 train loss:3.642767 +step:5415 train loss:3.622416 +step:5416 train loss:3.615927 +step:5417 train loss:3.682710 +step:5418 train loss:3.700897 +step:5419 train loss:3.603851 +step:5420 train loss:3.665417 +step:5421 train loss:3.633667 +step:5422 train loss:3.677660 +step:5423 train loss:3.654350 +step:5424 train loss:3.557606 +step:5425 train loss:3.623068 +step:5426 train loss:3.711800 +step:5427 train loss:3.602300 +step:5428 train loss:3.640733 +step:5429 train loss:3.575999 +step:5430 train loss:3.611018 +step:5431 train loss:3.675891 +step:5432 train loss:3.653949 +step:5433 train loss:3.654535 +step:5434 train loss:3.605081 +step:5435 train loss:3.602279 +step:5436 train loss:3.602211 +step:5437 train loss:3.641011 +step:5438 train loss:3.619645 +step:5439 train loss:3.625048 +step:5440 train loss:3.667473 +step:5441 train loss:3.694242 +step:5442 train loss:3.617634 +step:5443 train loss:3.611950 +step:5444 train loss:3.561673 +step:5445 train loss:3.646050 +step:5446 train loss:3.617174 +step:5447 train loss:3.650051 +step:5448 train loss:3.706974 +step:5449 train loss:3.596706 +step:5450 train loss:3.629509 +step:5451 train loss:3.621187 +step:5452 train loss:3.637959 +step:5453 train loss:3.694838 +step:5454 train loss:3.620365 +step:5455 train loss:3.609031 +step:5456 train loss:3.745890 +step:5457 train loss:3.627550 +step:5458 train loss:3.663800 +step:5459 train loss:3.604794 +step:5460 train loss:3.621131 +step:5461 train loss:3.626158 +step:5462 train loss:3.628277 +step:5463 train loss:3.633395 +step:5464 train loss:3.636662 +step:5465 train loss:3.582839 +step:5466 train loss:3.657269 +step:5467 train loss:3.638891 +step:5468 train loss:3.646548 +step:5469 train loss:3.739062 +step:5470 train loss:3.634017 +step:5471 train loss:3.707710 +step:5472 train loss:3.652547 +step:5473 train loss:3.559134 +step:5474 train loss:3.892457 +step:5475 train loss:3.568743 +step:5476 train loss:3.646111 +step:5477 train loss:3.645993 +step:5478 train loss:3.645095 +step:5479 train loss:3.789464 +step:5480 train loss:3.632801 +step:5481 train loss:3.695773 +step:5482 train loss:3.611648 +step:5483 train loss:3.644999 +step:5484 train loss:3.686156 +step:5485 train loss:3.598914 +step:5486 train loss:3.645540 +step:5487 train loss:3.647763 +step:5488 train loss:3.563145 +step:5489 train loss:3.663702 +step:5490 train loss:3.609782 +step:5491 train loss:3.710202 +step:5492 train loss:3.640311 +step:5493 train loss:3.570470 +step:5494 train loss:3.625827 +step:5495 train loss:3.602446 +step:5496 train loss:3.601399 +step:5497 train loss:3.718422 +step:5498 train loss:3.588877 +step:5499 train loss:3.723985 +step:5500 validation loss:3.563248 total_sharp:1.7164e-04 L1_sharp:3.8540e-05 L2_sharp:6.4653e-06 L3_sharp:1.3820e-05 L4_sharp:1.4379e-05 L5_sharp:3.0851e-05 L6_sharp:1.8415e-05 L7_sharp:1.8210e-05 L8_sharp:4.2646e-05 L9_sharp:5.6100e-05 L10_sharp:6.8285e-05 L11_sharp:6.8283e-05 L12_sharp:2.7666e-04 total_fnorm:1.0822e+01 total_l1_linf:9.5524e+04 total_spectral:1.0822e+01 L1_fnorm:2.5279e+00 L2_fnorm:2.4915e+00 L3_fnorm:2.4583e+00 L4_fnorm:2.4498e+00 L5_fnorm:2.4408e+00 L6_fnorm:2.5016e+00 L7_fnorm:2.5835e+00 L8_fnorm:2.5215e+00 L9_fnorm:2.5365e+00 L10_fnorm:2.5261e+00 L11_fnorm:2.5562e+00 L12_fnorm:2.4751e+00 L1_l1linf:2.5469e+00 L2_l1linf:2.5908e+00 L3_l1linf:2.6373e+00 L4_l1linf:2.5425e+00 L5_l1linf:2.4449e+00 L6_l1linf:2.3689e+00 L7_l1linf:2.7521e+00 L8_l1linf:2.6800e+00 L9_l1linf:3.2515e+00 L10_l1linf:2.9080e+00 L11_l1linf:3.1042e+00 L12_l1linf:3.3497e+00 L1_spectral:3.1283e-01 L2_spectral:2.8332e-01 L3_spectral:2.8584e-01 L4_spectral:3.3425e-01 L5_spectral:2.7379e-01 L6_spectral:2.3930e-01 L7_spectral:2.7819e-01 L8_spectral:3.1504e-01 L9_spectral:3.7125e-01 L10_spectral:3.8280e-01 L11_spectral:4.0127e-01 L12_spectral:4.5851e-01 ip_v_neg_g:1.7374e-02 cos_v_neg_g:2.1353e-03 v_norm:1.0822e+01 g_norm:7.5182e-01 hv_norm:2.4505e-01 cos_v_hv:7.5803e-03 hg_norm:4.1412e+00 cos_g_hg:6.6398e-01 v_par:2.5398e-03 v_perp:1.0822e+01 L1_cos_v_neg_g:4.6557e-03 L1_v_norm:2.5279e+00 L2_cos_v_neg_g:2.2713e-03 L2_v_norm:2.4915e+00 L3_cos_v_neg_g:1.8410e-03 L3_v_norm:2.4583e+00 L4_cos_v_neg_g:2.5243e-03 L4_v_norm:2.4498e+00 L5_cos_v_neg_g:3.7523e-03 L5_v_norm:2.4408e+00 L6_cos_v_neg_g:3.9537e-03 L6_v_norm:2.5016e+00 L7_cos_v_neg_g:2.5715e-03 L7_v_norm:2.5835e+00 L8_cos_v_neg_g:4.8602e-03 L8_v_norm:2.5215e+00 L9_cos_v_neg_g:5.7104e-03 L9_v_norm:2.5365e+00 L10_cos_v_neg_g:7.6175e-03 L10_v_norm:2.5261e+00 L11_cos_v_neg_g:7.9836e-03 L11_v_norm:2.5562e+00 L12_cos_v_neg_g:9.6354e-03 L12_v_norm:2.4751e+00 +step:5500 train loss:3.636242 +step:5501 train loss:3.712032 +step:5502 train loss:3.663721 +step:5503 train loss:3.626754 +step:5504 train loss:3.669906 +step:5505 train loss:3.635525 +step:5506 train loss:3.674378 +step:5507 train loss:3.664443 +step:5508 train loss:3.687299 +step:5509 train loss:3.693305 +step:5510 train loss:3.668106 +step:5511 train loss:3.662194 +step:5512 train loss:3.783896 +step:5513 train loss:3.587157 +step:5514 train loss:3.648576 +step:5515 train loss:3.672547 +step:5516 train loss:3.696338 +step:5517 train loss:3.652811 +step:5518 train loss:3.682290 +step:5519 train loss:3.715991 +step:5520 train loss:3.621766 +step:5521 train loss:3.637600 +step:5522 train loss:3.599649 +step:5523 train loss:3.651051 +step:5524 train loss:3.693191 +step:5525 train loss:3.602342 +step:5526 train loss:3.618112 +step:5527 train loss:3.634210 +step:5528 train loss:3.742171 +step:5529 train loss:3.709051 +step:5530 train loss:3.673616 +step:5531 train loss:3.610148 +step:5532 train loss:3.634243 +step:5533 train loss:3.672473 +step:5534 train loss:3.582796 +step:5535 train loss:3.637490 +step:5536 train loss:3.575531 +step:5537 train loss:3.621429 +step:5538 train loss:3.612527 +step:5539 train loss:3.558529 +step:5540 train loss:3.777735 +step:5541 train loss:3.595361 +step:5542 train loss:3.645049 +step:5543 train loss:3.631690 +step:5544 train loss:3.623963 +step:5545 train loss:3.615534 +step:5546 train loss:3.652882 +step:5547 train loss:3.583826 +step:5548 train loss:3.622647 +step:5549 train loss:3.627313 +step:5550 train loss:3.650902 +step:5551 train loss:3.659118 +step:5552 train loss:3.612843 +step:5553 train loss:3.639070 +step:5554 train loss:3.610678 +step:5555 train loss:3.618617 +step:5556 train loss:3.635776 +step:5557 train loss:3.701475 +step:5558 train loss:3.622667 +step:5559 train loss:3.627528 +step:5560 train loss:3.620668 +step:5561 train loss:3.653166 +step:5562 train loss:3.607858 +step:5563 train loss:3.591208 +step:5564 train loss:3.625542 +step:5565 train loss:3.690899 +step:5566 train loss:3.592658 +step:5567 train loss:3.709641 +step:5568 train loss:3.832450 +step:5569 train loss:3.625594 +step:5570 train loss:3.552247 +step:5571 train loss:3.643014 +step:5572 train loss:3.580763 +step:5573 train loss:3.572604 +step:5574 train loss:3.539084 +step:5575 train loss:3.636656 +step:5576 train loss:3.621726 +step:5577 train loss:3.628258 +step:5578 train loss:3.656782 +step:5579 train loss:3.612769 +step:5580 train loss:3.638340 +step:5581 train loss:3.654837 +step:5582 train loss:3.637696 +step:5583 train loss:3.643802 +step:5584 train loss:3.769867 +step:5585 train loss:3.673157 +step:5586 train loss:3.605687 +step:5587 train loss:3.640554 +step:5588 train loss:3.656730 +step:5589 train loss:3.653542 +step:5590 train loss:3.714405 +step:5591 train loss:3.577729 +step:5592 train loss:3.758133 +step:5593 train loss:3.635900 +step:5594 train loss:3.650379 +step:5595 train loss:3.638145 +step:5596 train loss:3.592547 +step:5597 train loss:3.605490 +step:5598 train loss:3.609344 +step:5599 train loss:3.620125 +step:5600 train loss:3.659169 +step:5601 train loss:3.683804 +step:5602 train loss:3.613981 +step:5603 train loss:3.651991 +step:5604 train loss:3.654022 +step:5605 train loss:3.621698 +step:5606 train loss:3.626655 +step:5607 train loss:3.658072 +step:5608 train loss:3.602051 +step:5609 train loss:3.652992 +step:5610 train loss:3.613092 +step:5611 train loss:3.646729 +step:5612 train loss:3.675559 +step:5613 train loss:3.638388 +step:5614 train loss:3.603909 +step:5615 train loss:3.702485 +step:5616 train loss:3.600185 +step:5617 train loss:3.690073 +step:5618 train loss:3.673280 +step:5619 train loss:3.625890 +step:5620 train loss:3.627703 +step:5621 train loss:3.703180 +step:5622 train loss:3.583818 +step:5623 train loss:3.623172 +step:5624 train loss:3.612498 +step:5625 train loss:3.644620 +step:5626 train loss:3.638920 +step:5627 train loss:3.610465 +step:5628 train loss:3.652414 +step:5629 train loss:3.632571 +step:5630 train loss:3.564175 +step:5631 train loss:3.608380 +step:5632 train loss:3.648145 +step:5633 train loss:3.640048 +step:5634 train loss:3.596558 +step:5635 train loss:3.632833 +step:5636 train loss:3.613552 +step:5637 train loss:3.754546 +step:5638 train loss:3.661257 +step:5639 train loss:3.642503 +step:5640 train loss:3.644250 +step:5641 train loss:3.683254 +step:5642 train loss:3.613999 +step:5643 train loss:3.632920 +step:5644 train loss:3.714105 +step:5645 train loss:3.671167 +step:5646 train loss:3.669778 +step:5647 train loss:3.657658 +step:5648 train loss:3.645666 +step:5649 train loss:3.561386 +step:5650 train loss:3.567697 +step:5651 train loss:3.641788 +step:5652 train loss:3.642407 +step:5653 train loss:3.608756 +step:5654 train loss:3.737361 +step:5655 train loss:3.598509 +step:5656 train loss:3.624634 +step:5657 train loss:3.690248 +step:5658 train loss:3.594545 +step:5659 train loss:3.629011 +step:5660 train loss:3.678143 +step:5661 train loss:3.620062 +step:5662 train loss:3.660229 +step:5663 train loss:3.549602 +step:5664 train loss:3.520362 +step:5665 train loss:3.643192 +step:5666 train loss:3.646692 +step:5667 train loss:3.678816 +step:5668 train loss:3.614363 +step:5669 train loss:3.628380 +step:5670 train loss:3.627048 +step:5671 train loss:3.617452 +step:5672 train loss:3.661571 +step:5673 train loss:3.632194 +step:5674 train loss:3.703545 +step:5675 train loss:3.616224 +step:5676 train loss:3.763841 +step:5677 train loss:3.662852 +step:5678 train loss:3.640607 +step:5679 train loss:3.632316 +step:5680 train loss:3.662971 +step:5681 train loss:3.630750 +step:5682 train loss:3.645389 +step:5683 train loss:3.602901 +step:5684 train loss:3.611404 +step:5685 train loss:3.654477 +step:5686 train loss:3.671823 +step:5687 train loss:3.615055 +step:5688 train loss:3.709983 +step:5689 train loss:3.613938 +step:5690 train loss:3.761286 +step:5691 train loss:3.591933 +step:5692 train loss:3.581317 +step:5693 train loss:3.588639 +step:5694 train loss:3.610270 +step:5695 train loss:3.624983 +step:5696 train loss:3.673967 +step:5697 train loss:3.599057 +step:5698 train loss:3.619148 +step:5699 train loss:3.633122 +step:5700 train loss:3.629249 +step:5701 train loss:3.622529 +step:5702 train loss:3.691433 +step:5703 train loss:3.588305 +step:5704 train loss:3.631748 +step:5705 train loss:3.638997 +step:5706 train loss:3.663846 +step:5707 train loss:3.578470 +step:5708 train loss:3.664392 +step:5709 train loss:3.670065 +step:5710 train loss:3.662511 +step:5711 train loss:3.685833 +step:5712 train loss:3.663263 +step:5713 train loss:3.588839 +step:5714 train loss:3.676345 +step:5715 train loss:3.630610 +step:5716 train loss:3.633182 +step:5717 train loss:3.665263 +step:5718 train loss:3.601732 +step:5719 train loss:3.675563 +step:5720 train loss:3.648514 +step:5721 train loss:3.580611 +step:5722 train loss:3.594183 +step:5723 train loss:3.673069 +step:5724 train loss:3.590801 +step:5725 train loss:3.661553 +step:5726 train loss:3.655869 +step:5727 train loss:3.614282 +step:5728 train loss:3.618036 +step:5729 train loss:3.616765 +step:5730 train loss:3.688186 +step:5731 train loss:3.561285 +step:5732 train loss:3.619519 +step:5733 train loss:3.612018 +step:5734 train loss:3.625883 +step:5735 train loss:3.619024 +step:5736 train loss:3.620710 +step:5737 train loss:3.640643 +step:5738 train loss:3.606835 +step:5739 train loss:3.616224 +step:5740 train loss:3.656173 +step:5741 train loss:3.630452 +step:5742 train loss:3.685530 +step:5743 train loss:3.649849 +step:5744 train loss:3.610745 +step:5745 train loss:3.612206 +step:5746 train loss:3.643557 +step:5747 train loss:3.627229 +step:5748 train loss:3.672183 +step:5749 train loss:3.631803 +step:5750 validation loss:3.554612 +step:5750 train loss:3.637080 +step:5751 train loss:3.649318 +step:5752 train loss:3.636461 +step:5753 train loss:3.603883 +step:5754 train loss:3.615874 +step:5755 train loss:3.632848 +step:5756 train loss:3.619393 +step:5757 train loss:3.685402 +step:5758 train loss:3.617614 +step:5759 train loss:3.583001 +step:5760 train loss:3.663794 +step:5761 train loss:3.658835 +step:5762 train loss:3.614008 +step:5763 train loss:3.642074 +step:5764 train loss:3.602109 +step:5765 train loss:3.724321 +step:5766 train loss:3.631259 +step:5767 train loss:3.666871 +step:5768 train loss:3.602915 +step:5769 train loss:3.723980 +step:5770 train loss:3.647208 +step:5771 train loss:3.674444 +step:5772 train loss:3.626035 +step:5773 train loss:3.604654 +step:5774 train loss:3.615391 +step:5775 train loss:3.683697 +step:5776 train loss:3.668681 +step:5777 train loss:3.587703 +step:5778 train loss:3.670115 +step:5779 train loss:3.637860 +step:5780 train loss:3.605409 +step:5781 train loss:3.670486 +step:5782 train loss:3.628642 +step:5783 train loss:3.590288 +step:5784 train loss:3.691998 +step:5785 train loss:3.683888 +step:5786 train loss:3.595502 +step:5787 train loss:3.639709 +step:5788 train loss:3.648257 +step:5789 train loss:3.592695 +step:5790 train loss:3.693436 +step:5791 train loss:3.623489 +step:5792 train loss:3.894609 +step:5793 train loss:3.662136 +step:5794 train loss:3.686334 +step:5795 train loss:3.676356 +step:5796 train loss:3.660650 +step:5797 train loss:3.642955 +step:5798 train loss:3.642205 +step:5799 train loss:3.608077 +step:5800 train loss:3.771216 +step:5801 train loss:3.641071 +step:5802 train loss:3.634734 +step:5803 train loss:3.641552 +step:5804 train loss:3.660433 +step:5805 train loss:3.623856 +step:5806 train loss:3.667297 +step:5807 train loss:3.584291 +step:5808 train loss:3.619454 +step:5809 train loss:3.628040 +step:5810 train loss:3.603431 +step:5811 train loss:3.617924 +step:5812 train loss:3.595714 +step:5813 train loss:3.607723 +step:5814 train loss:3.603283 +step:5815 train loss:3.604920 +step:5816 train loss:3.669093 +step:5817 train loss:3.677477 +step:5818 train loss:3.652382 +step:5819 train loss:3.700613 +step:5820 train loss:3.646502 +step:5821 train loss:3.634728 +step:5822 train loss:3.654147 +step:5823 train loss:3.656757 +step:5824 train loss:3.606590 +step:5825 train loss:3.699113 +step:5826 train loss:3.613924 +step:5827 train loss:3.577220 +step:5828 train loss:3.564427 +step:5829 train loss:3.628928 +step:5830 train loss:3.604151 +step:5831 train loss:3.575652 +step:5832 train loss:3.689415 +step:5833 train loss:3.666985 +step:5834 train loss:3.649899 +step:5835 train loss:3.600997 +step:5836 train loss:3.565409 +step:5837 train loss:3.688591 +step:5838 train loss:3.664673 +step:5839 train loss:3.641408 +step:5840 train loss:3.721846 +step:5841 train loss:3.649508 +step:5842 train loss:3.661911 +step:5843 train loss:3.606885 +step:5844 train loss:3.674704 +step:5845 train loss:3.582348 +step:5846 train loss:3.629997 +step:5847 train loss:3.658771 +step:5848 train loss:3.722479 +step:5849 train loss:3.618646 +step:5850 train loss:3.647482 +step:5851 train loss:3.617099 +step:5852 train loss:3.704149 +step:5853 train loss:3.797597 +step:5854 train loss:3.585745 +step:5855 train loss:3.646304 +step:5856 train loss:3.616806 +step:5857 train loss:3.634389 +step:5858 train loss:3.600012 +step:5859 train loss:3.607149 +step:5860 train loss:3.710511 +step:5861 train loss:3.592420 +step:5862 train loss:3.707681 +step:5863 train loss:3.645196 +step:5864 train loss:3.633774 +step:5865 train loss:3.640240 +step:5866 train loss:3.629264 +step:5867 train loss:3.709100 +step:5868 train loss:3.633191 +step:5869 train loss:3.658754 +step:5870 train loss:3.634732 +step:5871 train loss:3.616576 +step:5872 train loss:3.644210 +step:5873 train loss:3.621733 +step:5874 train loss:3.703977 +step:5875 train loss:3.632065 +step:5876 train loss:3.613491 +step:5877 train loss:3.620045 +step:5878 train loss:3.621856 +step:5879 train loss:3.593497 +step:5880 train loss:3.787691 +step:5881 train loss:3.629519 +step:5882 train loss:3.603636 +step:5883 train loss:3.608535 +step:5884 train loss:3.625287 +step:5885 train loss:3.621009 +step:5886 train loss:3.637136 +step:5887 train loss:3.637984 +step:5888 train loss:3.615382 +step:5889 train loss:3.595968 +step:5890 train loss:3.643473 +step:5891 train loss:3.587077 +step:5892 train loss:3.670390 +step:5893 train loss:3.594451 +step:5894 train loss:3.584328 +step:5895 train loss:3.590291 +step:5896 train loss:3.602355 +step:5897 train loss:3.668687 +step:5898 train loss:3.884737 +step:5899 train loss:3.618526 +step:5900 train loss:3.668344 +step:5901 train loss:3.620247 +step:5902 train loss:3.635191 +step:5903 train loss:3.622404 +step:5904 train loss:3.650111 +step:5905 train loss:3.757457 +step:5906 train loss:3.698508 +step:5907 train loss:3.641161 +step:5908 train loss:3.616226 +step:5909 train loss:3.611620 +step:5910 train loss:3.599030 +step:5911 train loss:3.618612 +step:5912 train loss:3.646235 +step:5913 train loss:3.648675 +step:5914 train loss:3.631617 +step:5915 train loss:3.763374 +step:5916 train loss:3.641416 +step:5917 train loss:3.614099 +step:5918 train loss:3.608790 +step:5919 train loss:3.640278 +step:5920 train loss:3.635020 +step:5921 train loss:3.604433 +step:5922 train loss:3.663881 +step:5923 train loss:3.653754 +step:5924 train loss:3.609447 +step:5925 train loss:3.731876 +step:5926 train loss:3.617130 +step:5927 train loss:3.593609 +step:5928 train loss:3.627046 +step:5929 train loss:3.652041 +step:5930 train loss:3.600519 +step:5931 train loss:3.583518 +step:5932 train loss:3.622734 +step:5933 train loss:3.677039 +step:5934 train loss:3.592617 +step:5935 train loss:3.619587 +step:5936 train loss:3.610734 +step:5937 train loss:3.590260 +step:5938 train loss:3.605524 +step:5939 train loss:3.584599 +step:5940 train loss:3.674548 +step:5941 train loss:3.602176 +step:5942 train loss:3.614254 +step:5943 train loss:3.619684 +step:5944 train loss:3.677212 +step:5945 train loss:3.603958 +step:5946 train loss:3.585631 +step:5947 train loss:3.601177 +step:5948 train loss:3.637565 +step:5949 train loss:3.685446 +step:5950 train loss:3.645774 +step:5951 train loss:3.644576 +step:5952 train loss:3.569909 +step:5953 train loss:3.613253 +step:5954 train loss:3.620122 +step:5955 train loss:3.627823 +step:5956 train loss:3.603868 +step:5957 train loss:3.571312 +step:5958 train loss:3.640533 +step:5959 train loss:3.605322 +step:5960 train loss:3.577431 +step:5961 train loss:3.603219 +step:5962 train loss:3.634597 +step:5963 train loss:3.668605 +step:5964 train loss:3.626002 +step:5965 train loss:3.642568 +step:5966 train loss:3.637271 +step:5967 train loss:3.603272 +step:5968 train loss:3.677765 +step:5969 train loss:3.616418 +step:5970 train loss:3.635839 +step:5971 train loss:3.586220 +step:5972 train loss:3.612216 +step:5973 train loss:3.604592 +step:5974 train loss:3.628582 +step:5975 train loss:3.597177 +step:5976 train loss:3.635789 +step:5977 train loss:3.594054 +step:5978 train loss:3.585241 +step:5979 train loss:3.621839 +step:5980 train loss:3.690268 +step:5981 train loss:3.581154 +step:5982 train loss:3.595144 +step:5983 train loss:3.661541 +step:5984 train loss:3.604532 +step:5985 train loss:3.649229 +step:5986 train loss:3.622829 +step:5987 train loss:3.609643 +step:5988 train loss:3.615958 +step:5989 train loss:3.635242 +step:5990 train loss:3.563845 +step:5991 train loss:3.628381 +step:5992 train loss:3.666281 +step:5993 train loss:3.612927 +step:5994 train loss:3.633446 +step:5995 train loss:3.526265 +step:5996 train loss:3.692567 +step:5997 train loss:3.670167 +step:5998 train loss:3.549115 +step:5999 train loss:3.575750 +step:6000 validation loss:3.543875 total_sharp:2.0173e-04 L1_sharp:7.7314e-05 L2_sharp:1.1395e-05 L3_sharp:1.1334e-05 L4_sharp:2.3606e-05 L5_sharp:3.3693e-05 L6_sharp:1.6778e-05 L7_sharp:2.9003e-05 L8_sharp:7.9588e-05 L9_sharp:8.6454e-05 L10_sharp:8.6375e-05 L11_sharp:6.5591e-05 L12_sharp:2.7715e-04 total_fnorm:1.0866e+01 total_l1_linf:9.5931e+04 total_spectral:1.0866e+01 L1_fnorm:2.5474e+00 L2_fnorm:2.4985e+00 L3_fnorm:2.4826e+00 L4_fnorm:2.4803e+00 L5_fnorm:2.4631e+00 L6_fnorm:2.5433e+00 L7_fnorm:2.6092e+00 L8_fnorm:2.5401e+00 L9_fnorm:2.5482e+00 L10_fnorm:2.5346e+00 L11_fnorm:2.5531e+00 L12_fnorm:2.4570e+00 L1_l1linf:2.5509e+00 L2_l1linf:2.5808e+00 L3_l1linf:2.6925e+00 L4_l1linf:2.6196e+00 L5_l1linf:2.4796e+00 L6_l1linf:2.4672e+00 L7_l1linf:2.6963e+00 L8_l1linf:2.8364e+00 L9_l1linf:2.8161e+00 L10_l1linf:2.9413e+00 L11_l1linf:2.8896e+00 L12_l1linf:2.7781e+00 L1_spectral:3.0423e-01 L2_spectral:2.8733e-01 L3_spectral:2.8894e-01 L4_spectral:3.3386e-01 L5_spectral:2.7734e-01 L6_spectral:2.5779e-01 L7_spectral:2.8328e-01 L8_spectral:3.6687e-01 L9_spectral:3.7172e-01 L10_spectral:3.8293e-01 L11_spectral:3.8090e-01 L12_spectral:4.2270e-01 ip_v_neg_g:1.1671e-02 cos_v_neg_g:2.7908e-03 v_norm:1.0866e+01 g_norm:3.8486e-01 hv_norm:1.6467e-01 cos_v_hv:1.3312e-02 hg_norm:2.3875e+00 cos_g_hg:5.0356e-01 v_par:3.5307e-03 v_perp:1.0866e+01 L1_cos_v_neg_g:7.4514e-03 L1_v_norm:2.5474e+00 L2_cos_v_neg_g:2.3231e-03 L2_v_norm:2.4985e+00 L3_cos_v_neg_g:3.0120e-03 L3_v_norm:2.4826e+00 L4_cos_v_neg_g:3.8832e-03 L4_v_norm:2.4803e+00 L5_cos_v_neg_g:4.7571e-03 L5_v_norm:2.4631e+00 L6_cos_v_neg_g:3.2629e-03 L6_v_norm:2.5433e+00 L7_cos_v_neg_g:4.2042e-03 L7_v_norm:2.6092e+00 L8_cos_v_neg_g:5.9819e-03 L8_v_norm:2.5401e+00 L9_cos_v_neg_g:6.4942e-03 L9_v_norm:2.5482e+00 L10_cos_v_neg_g:6.3761e-03 L10_v_norm:2.5346e+00 L11_cos_v_neg_g:8.2630e-03 L11_v_norm:2.5531e+00 L12_cos_v_neg_g:1.7363e-02 L12_v_norm:2.4570e+00 +step:6000 train loss:3.625666 +step:6001 train loss:3.588077 +step:6002 train loss:3.618747 +step:6003 train loss:3.643183 +step:6004 train loss:3.593349 +step:6005 train loss:3.661229 +step:6006 train loss:3.570489 +step:6007 train loss:3.588350 +step:6008 train loss:3.603599 +step:6009 train loss:3.643977 +step:6010 train loss:3.634230 +step:6011 train loss:3.625529 +step:6012 train loss:3.590806 +step:6013 train loss:3.650674 +step:6014 train loss:3.669925 +step:6015 train loss:3.663335 +step:6016 train loss:3.633907 +step:6017 train loss:3.642643 +step:6018 train loss:3.580858 +step:6019 train loss:3.618979 +step:6020 train loss:3.606147 +step:6021 train loss:3.532662 +step:6022 train loss:3.644506 +step:6023 train loss:3.579823 +step:6024 train loss:3.654218 +step:6025 train loss:3.621644 +step:6026 train loss:3.594133 +step:6027 train loss:3.635810 +step:6028 train loss:3.551922 +step:6029 train loss:3.662887 +step:6030 train loss:3.635517 +step:6031 train loss:3.603591 +step:6032 train loss:3.565621 +step:6033 train loss:3.622184 +step:6034 train loss:3.653734 +step:6035 train loss:3.569173 +step:6036 train loss:3.543847 +step:6037 train loss:3.656366 +step:6038 train loss:3.661345 +step:6039 train loss:3.642979 +step:6040 train loss:3.601200 +step:6041 train loss:3.583465 +step:6042 train loss:3.561717 +step:6043 train loss:3.621904 +step:6044 train loss:3.739678 +step:6045 train loss:3.582066 +step:6046 train loss:3.595843 +step:6047 train loss:3.632514 +step:6048 train loss:3.641575 +step:6049 train loss:3.615687 +step:6050 train loss:3.584857 +step:6051 train loss:3.636612 +step:6052 train loss:3.606948 +step:6053 train loss:3.727359 +step:6054 train loss:3.765646 +step:6055 train loss:3.580341 +step:6056 train loss:3.572595 +step:6057 train loss:3.606358 +step:6058 train loss:3.636770 +step:6059 train loss:3.637464 +step:6060 train loss:3.641810 +step:6061 train loss:3.658153 +step:6062 train loss:3.610044 +step:6063 train loss:3.625568 +step:6064 train loss:3.620411 +step:6065 train loss:3.620847 +step:6066 train loss:3.607315 +step:6067 train loss:3.648135 +step:6068 train loss:3.587156 +step:6069 train loss:3.544830 +step:6070 train loss:3.693360 +step:6071 train loss:3.640296 +step:6072 train loss:3.582979 +step:6073 train loss:3.620326 +step:6074 train loss:3.702685 +step:6075 train loss:3.623432 +step:6076 train loss:3.634923 +step:6077 train loss:3.635525 +step:6078 train loss:3.572479 +step:6079 train loss:3.601379 +step:6080 train loss:3.606226 +step:6081 train loss:3.646220 +step:6082 train loss:3.592849 +step:6083 train loss:3.605839 +step:6084 train loss:3.669536 +step:6085 train loss:3.668337 +step:6086 train loss:3.567141 +step:6087 train loss:3.613687 +step:6088 train loss:3.597294 +step:6089 train loss:3.656216 +step:6090 train loss:3.657473 +step:6091 train loss:3.606280 +step:6092 train loss:3.573294 +step:6093 train loss:3.631767 +step:6094 train loss:3.544252 +step:6095 train loss:3.710112 +step:6096 train loss:3.580999 +step:6097 train loss:3.659483 +step:6098 train loss:3.629228 +step:6099 train loss:3.690005 +step:6100 train loss:3.683337 +step:6101 train loss:3.617260 +step:6102 train loss:3.730824 +step:6103 train loss:3.620663 +step:6104 train loss:3.736299 +step:6105 train loss:3.667852 +step:6106 train loss:3.605136 +step:6107 train loss:3.669434 +step:6108 train loss:3.629910 +step:6109 train loss:3.703091 +step:6110 train loss:3.632685 +step:6111 train loss:3.669471 +step:6112 train loss:3.605535 +step:6113 train loss:3.633693 +step:6114 train loss:3.603834 +step:6115 train loss:3.667166 +step:6116 train loss:3.608032 +step:6117 train loss:3.661825 +step:6118 train loss:3.642338 +step:6119 train loss:3.651633 +step:6120 train loss:3.794314 +step:6121 train loss:3.629253 +step:6122 train loss:3.640419 +step:6123 train loss:3.621401 +step:6124 train loss:3.596459 +step:6125 train loss:3.588477 +step:6126 train loss:3.606468 +step:6127 train loss:3.593279 +step:6128 train loss:3.570039 +step:6129 train loss:3.799129 +step:6130 train loss:3.582717 +step:6131 train loss:3.560632 +step:6132 train loss:3.635852 +step:6133 train loss:3.597647 +step:6134 train loss:3.629341 +step:6135 train loss:3.710980 +step:6136 train loss:3.730876 +step:6137 train loss:3.590198 +step:6138 train loss:3.647124 +step:6139 train loss:3.626783 +step:6140 train loss:3.625680 +step:6141 train loss:3.585618 +step:6142 train loss:3.650353 +step:6143 train loss:3.614944 +step:6144 train loss:3.632647 +step:6145 train loss:3.878129 +step:6146 train loss:3.716883 +step:6147 train loss:3.795743 +step:6148 train loss:3.567581 +step:6149 train loss:3.693077 +step:6150 train loss:3.648255 +step:6151 train loss:3.599798 +step:6152 train loss:3.598861 +step:6153 train loss:3.666388 +step:6154 train loss:3.752788 +step:6155 train loss:3.617317 +step:6156 train loss:3.710357 +step:6157 train loss:3.643712 +step:6158 train loss:3.637930 +step:6159 train loss:3.604185 +step:6160 train loss:3.768466 +step:6161 train loss:3.619030 +step:6162 train loss:3.635996 +step:6163 train loss:3.669320 +step:6164 train loss:3.582172 +step:6165 train loss:3.650453 +step:6166 train loss:3.642759 +step:6167 train loss:3.663110 +step:6168 train loss:3.636980 +step:6169 train loss:3.629438 +step:6170 train loss:3.633400 +step:6171 train loss:3.601742 +step:6172 train loss:3.586950 +step:6173 train loss:3.637385 +step:6174 train loss:3.567763 +step:6175 train loss:3.578341 +step:6176 train loss:3.562321 +step:6177 train loss:3.656886 +step:6178 train loss:3.602330 +step:6179 train loss:3.610143 +step:6180 train loss:3.614921 +step:6181 train loss:3.650678 +step:6182 train loss:3.533653 +step:6183 train loss:3.543408 +step:6184 train loss:3.661787 +step:6185 train loss:3.616060 +step:6186 train loss:3.576307 +step:6187 train loss:3.617763 +step:6188 train loss:3.585357 +step:6189 train loss:3.626133 +step:6190 train loss:3.584405 +step:6191 train loss:3.616347 +step:6192 train loss:3.582664 +step:6193 train loss:3.653734 +step:6194 train loss:3.642230 +step:6195 train loss:3.623559 +step:6196 train loss:3.635554 +step:6197 train loss:3.662210 +step:6198 train loss:3.575085 +step:6199 train loss:3.599484 +step:6200 train loss:3.639371 +step:6201 train loss:3.685725 +step:6202 train loss:3.683955 +step:6203 train loss:3.685193 +step:6204 train loss:3.667851 +step:6205 train loss:3.607354 +step:6206 train loss:3.593380 +step:6207 train loss:3.654917 +step:6208 train loss:3.680212 +step:6209 train loss:3.646966 +step:6210 train loss:3.677581 +step:6211 train loss:3.594641 +step:6212 train loss:3.588814 +step:6213 train loss:3.603217 +step:6214 train loss:3.578702 +step:6215 train loss:3.751135 +step:6216 train loss:3.625467 +step:6217 train loss:3.682284 +step:6218 train loss:3.658624 +step:6219 train loss:3.669897 +step:6220 train loss:3.626086 +step:6221 train loss:3.593387 +step:6222 train loss:3.827250 +step:6223 train loss:3.600251 +step:6224 train loss:3.632726 +step:6225 train loss:3.610550 +step:6226 train loss:3.616490 +step:6227 train loss:3.619501 +step:6228 train loss:3.614336 +step:6229 train loss:3.652873 +step:6230 train loss:3.611436 +step:6231 train loss:3.723227 +step:6232 train loss:3.562327 +step:6233 train loss:3.601873 +step:6234 train loss:3.609748 +step:6235 train loss:3.638126 +step:6236 train loss:3.574808 +step:6237 train loss:3.598692 +step:6238 train loss:3.622725 +step:6239 train loss:3.608681 +step:6240 train loss:3.630836 +step:6241 train loss:3.615628 +step:6242 train loss:3.610937 +step:6243 train loss:3.647317 +step:6244 train loss:3.804036 +step:6245 train loss:3.600854 +step:6246 train loss:3.584796 +step:6247 train loss:3.581190 +step:6248 train loss:3.585001 +step:6249 train loss:3.525098 +step:6250 validation loss:3.533498 +step:6250 train loss:3.560769 +step:6251 train loss:3.580368 +step:6252 train loss:3.621876 +step:6253 train loss:3.635288 +step:6254 train loss:3.623400 +step:6255 train loss:3.590369 +step:6256 train loss:3.641341 +step:6257 train loss:3.637691 +step:6258 train loss:3.622330 +step:6259 train loss:3.624631 +step:6260 train loss:3.651953 +step:6261 train loss:3.673079 +step:6262 train loss:3.571377 +step:6263 train loss:3.603680 +step:6264 train loss:3.612290 +step:6265 train loss:3.598529 +step:6266 train loss:3.805240 +step:6267 train loss:3.608577 +step:6268 train loss:3.697413 +step:6269 train loss:3.569592 +step:6270 train loss:3.581230 +step:6271 train loss:3.628886 +step:6272 train loss:3.620174 +step:6273 train loss:3.825446 +step:6274 train loss:3.600001 +step:6275 train loss:3.637282 +step:6276 train loss:3.608489 +step:6277 train loss:3.592426 +step:6278 train loss:3.574378 +step:6279 train loss:3.626880 +step:6280 train loss:3.634613 +step:6281 train loss:3.566994 +step:6282 train loss:3.581487 +step:6283 train loss:3.667876 +step:6284 train loss:3.636600 +step:6285 train loss:3.637924 +step:6286 train loss:3.585229 +step:6287 train loss:3.611604 +step:6288 train loss:3.708045 +step:6289 train loss:3.573018 +step:6290 train loss:3.559501 +step:6291 train loss:3.601473 +step:6292 train loss:3.621124 +step:6293 train loss:3.607311 +step:6294 train loss:3.594496 +step:6295 train loss:3.613958 +step:6296 train loss:3.578600 +step:6297 train loss:3.706497 +step:6298 train loss:3.648908 +step:6299 train loss:3.544841 +step:6300 train loss:3.630100 +step:6301 train loss:3.655507 +step:6302 train loss:3.637692 +step:6303 train loss:3.605440 +step:6304 train loss:3.622232 +step:6305 train loss:3.588855 +step:6306 train loss:3.604437 +step:6307 train loss:3.615191 +step:6308 train loss:3.587217 +step:6309 train loss:3.584627 +step:6310 train loss:3.639606 +step:6311 train loss:3.591032 +step:6312 train loss:3.632375 +step:6313 train loss:3.564503 +step:6314 train loss:3.590524 +step:6315 train loss:3.644319 +step:6316 train loss:3.565603 +step:6317 train loss:3.560899 +step:6318 train loss:3.672986 +step:6319 train loss:3.605498 +step:6320 train loss:3.622840 +step:6321 train loss:3.602543 +step:6322 train loss:3.604246 +step:6323 train loss:3.540187 +step:6324 train loss:3.545163 +step:6325 train loss:3.644171 +step:6326 train loss:3.561646 +step:6327 train loss:3.636130 +step:6328 train loss:3.615919 +step:6329 train loss:3.535137 +step:6330 train loss:3.567214 +step:6331 train loss:3.581632 +step:6332 train loss:3.717879 +step:6333 train loss:3.592437 +step:6334 train loss:3.573609 +step:6335 train loss:3.541676 +step:6336 train loss:3.575474 +step:6337 train loss:3.597189 +step:6338 train loss:3.554487 +step:6339 train loss:3.598205 +step:6340 train loss:3.580996 +step:6341 train loss:3.592045 +step:6342 train loss:3.587001 +step:6343 train loss:3.686984 +step:6344 train loss:3.539298 +step:6345 train loss:3.554699 +step:6346 train loss:3.633418 +step:6347 train loss:3.507997 +step:6348 train loss:3.603820 +step:6349 train loss:3.580886 +step:6350 train loss:3.554508 +step:6351 train loss:3.552851 +step:6352 train loss:3.569521 +step:6353 train loss:3.589129 +step:6354 train loss:3.603887 +step:6355 train loss:3.609262 +step:6356 train loss:3.622643 +step:6357 train loss:3.478571 +step:6358 train loss:3.570823 +step:6359 train loss:3.624268 +step:6360 train loss:3.537967 +step:6361 train loss:3.540156 +step:6362 train loss:3.579633 +step:6363 train loss:3.560447 +step:6364 train loss:3.545621 +step:6365 train loss:3.614896 +step:6366 train loss:3.630811 +step:6367 train loss:3.554785 +step:6368 train loss:3.600671 +step:6369 train loss:3.566981 +step:6370 train loss:3.615882 +step:6371 train loss:3.534398 +step:6372 train loss:3.563085 +step:6373 train loss:3.588784 +step:6374 train loss:3.619585 +step:6375 train loss:3.577762 +step:6376 train loss:3.602396 +step:6377 train loss:3.599825 +step:6378 train loss:3.551624 +step:6379 train loss:3.591242 +step:6380 train loss:3.632572 +step:6381 train loss:3.599851 +step:6382 train loss:3.553597 +step:6383 train loss:3.617953 +step:6384 train loss:3.591280 +step:6385 train loss:3.570662 +step:6386 train loss:3.604354 +step:6387 train loss:3.582869 +step:6388 train loss:3.623554 +step:6389 train loss:3.631597 +step:6390 train loss:3.583726 +step:6391 train loss:3.568152 +step:6392 train loss:3.553111 +step:6393 train loss:3.609939 +step:6394 train loss:3.597047 +step:6395 train loss:3.779201 +step:6396 train loss:3.600881 +step:6397 train loss:3.545622 +step:6398 train loss:3.614386 +step:6399 train loss:3.557546 +step:6400 train loss:3.631467 +step:6401 train loss:3.665734 +step:6402 train loss:3.600767 +step:6403 train loss:3.590982 +step:6404 train loss:3.568879 +step:6405 train loss:3.596357 +step:6406 train loss:3.598205 +step:6407 train loss:3.657490 +step:6408 train loss:3.549139 +step:6409 train loss:3.536601 +step:6410 train loss:3.664727 +step:6411 train loss:3.597640 +step:6412 train loss:3.601732 +step:6413 train loss:3.602735 +step:6414 train loss:3.554454 +step:6415 train loss:3.610162 +step:6416 train loss:3.584265 +step:6417 train loss:3.554917 +step:6418 train loss:3.545014 +step:6419 train loss:3.629093 +step:6420 train loss:3.559688 +step:6421 train loss:3.584055 +step:6422 train loss:3.568089 +step:6423 train loss:3.584222 +step:6424 train loss:3.603783 +step:6425 train loss:3.598239 +step:6426 train loss:3.640425 +step:6427 train loss:3.602179 +step:6428 train loss:3.638827 +step:6429 train loss:3.602264 +step:6430 train loss:3.581234 +step:6431 train loss:3.555059 +step:6432 train loss:3.587917 +step:6433 train loss:3.602591 +step:6434 train loss:3.487746 +step:6435 train loss:3.667484 +step:6436 train loss:3.597900 +step:6437 train loss:3.562731 +step:6438 train loss:3.593143 +step:6439 train loss:3.571363 +step:6440 train loss:3.581153 +step:6441 train loss:3.574837 +step:6442 train loss:3.517914 +step:6443 train loss:3.570238 +step:6444 train loss:3.715005 +step:6445 train loss:3.615660 +step:6446 train loss:3.622326 +step:6447 train loss:3.602777 +step:6448 train loss:3.550496 +step:6449 train loss:3.574844 +step:6450 train loss:3.553321 +step:6451 train loss:3.545856 +step:6452 train loss:3.546676 +step:6453 train loss:3.591574 +step:6454 train loss:3.612879 +step:6455 train loss:3.602937 +step:6456 train loss:3.620078 +step:6457 train loss:3.599329 +step:6458 train loss:3.571386 +step:6459 train loss:3.552521 +step:6460 train loss:3.559845 +step:6461 train loss:3.561496 +step:6462 train loss:3.553054 +step:6463 train loss:3.652654 +step:6464 train loss:3.557071 +step:6465 train loss:3.603142 +step:6466 train loss:3.618340 +step:6467 train loss:3.543457 +step:6468 train loss:3.619086 +step:6469 train loss:3.529820 +step:6470 train loss:3.648262 +step:6471 train loss:3.559335 +step:6472 train loss:3.713334 +step:6473 train loss:3.598727 +step:6474 train loss:3.630554 +step:6475 train loss:3.573558 +step:6476 train loss:3.644095 +step:6477 train loss:3.575255 +step:6478 train loss:3.706384 +step:6479 train loss:3.620422 +step:6480 train loss:3.556803 +step:6481 train loss:3.613726 +step:6482 train loss:3.554969 +step:6483 train loss:3.614480 +step:6484 train loss:3.569541 +step:6485 train loss:3.631047 +step:6486 train loss:3.564545 +step:6487 train loss:3.564342 +step:6488 train loss:3.559964 +step:6489 train loss:3.563000 +step:6490 train loss:3.587123 +step:6491 train loss:3.554219 +step:6492 train loss:3.661527 +step:6493 train loss:3.564450 +step:6494 train loss:3.567025 +step:6495 train loss:3.566743 +step:6496 train loss:3.601539 +step:6497 train loss:3.616981 +step:6498 train loss:3.727204 +step:6499 train loss:3.697335 +step:6500 validation loss:3.528052 total_sharp:2.4052e-04 L1_sharp:6.4453e-05 L2_sharp:1.7411e-05 L3_sharp:2.9485e-05 L4_sharp:2.8220e-05 L5_sharp:4.3904e-05 L6_sharp:2.2623e-05 L7_sharp:3.2925e-05 L8_sharp:6.5462e-05 L9_sharp:9.5806e-05 L10_sharp:1.0909e-04 L11_sharp:7.5644e-05 L12_sharp:2.9679e-04 total_fnorm:1.1029e+01 total_l1_linf:9.7578e+04 total_spectral:1.1029e+01 L1_fnorm:2.6315e+00 L2_fnorm:2.5645e+00 L3_fnorm:2.5485e+00 L4_fnorm:2.5277e+00 L5_fnorm:2.5253e+00 L6_fnorm:2.5791e+00 L7_fnorm:2.6330e+00 L8_fnorm:2.5703e+00 L9_fnorm:2.5627e+00 L10_fnorm:2.5650e+00 L11_fnorm:2.5938e+00 L12_fnorm:2.5307e+00 L1_l1linf:2.6797e+00 L2_l1linf:2.6438e+00 L3_l1linf:2.7376e+00 L4_l1linf:2.7412e+00 L5_l1linf:2.6035e+00 L6_l1linf:2.4802e+00 L7_l1linf:2.6880e+00 L8_l1linf:2.8309e+00 L9_l1linf:2.7419e+00 L10_l1linf:2.9873e+00 L11_l1linf:2.9469e+00 L12_l1linf:2.9664e+00 L1_spectral:3.3597e-01 L2_spectral:3.1154e-01 L3_spectral:3.0992e-01 L4_spectral:3.3706e-01 L5_spectral:2.8061e-01 L6_spectral:2.5509e-01 L7_spectral:2.9224e-01 L8_spectral:3.3724e-01 L9_spectral:3.7781e-01 L10_spectral:3.9252e-01 L11_spectral:3.7820e-01 L12_spectral:4.5225e-01 ip_v_neg_g:1.3729e-02 cos_v_neg_g:3.0671e-03 v_norm:1.1029e+01 g_norm:4.0585e-01 hv_norm:2.2900e-01 cos_v_hv:1.1584e-02 hg_norm:2.5299e+00 cos_g_hg:4.9833e-01 v_par:3.5452e-03 v_perp:1.1029e+01 L1_cos_v_neg_g:7.6104e-03 L1_v_norm:2.6315e+00 L2_cos_v_neg_g:2.5766e-03 L2_v_norm:2.5645e+00 L3_cos_v_neg_g:2.8137e-03 L3_v_norm:2.5485e+00 L4_cos_v_neg_g:3.4028e-03 L4_v_norm:2.5277e+00 L5_cos_v_neg_g:4.4280e-03 L5_v_norm:2.5253e+00 L6_cos_v_neg_g:4.0251e-03 L6_v_norm:2.5791e+00 L7_cos_v_neg_g:3.6845e-03 L7_v_norm:2.6330e+00 L8_cos_v_neg_g:6.1261e-03 L8_v_norm:2.5703e+00 L9_cos_v_neg_g:8.8080e-03 L9_v_norm:2.5627e+00 L10_cos_v_neg_g:1.0915e-02 L10_v_norm:2.5650e+00 L11_cos_v_neg_g:1.1584e-02 L11_v_norm:2.5938e+00 L12_cos_v_neg_g:1.6671e-02 L12_v_norm:2.5307e+00 +step:6500 train loss:3.546939 +step:6501 train loss:3.561104 +step:6502 train loss:3.579025 +step:6503 train loss:3.636754 +step:6504 train loss:3.585066 +step:6505 train loss:3.595116 +step:6506 train loss:3.553889 +step:6507 train loss:3.622037 +step:6508 train loss:3.586153 +step:6509 train loss:3.570378 +step:6510 train loss:3.579187 +step:6511 train loss:3.597506 +step:6512 train loss:3.536948 +step:6513 train loss:3.606699 +step:6514 train loss:3.478776 +step:6515 train loss:3.570915 +step:6516 train loss:3.623396 +step:6517 train loss:3.535923 +step:6518 train loss:3.574359 +step:6519 train loss:3.565123 +step:6520 train loss:3.655557 +step:6521 train loss:3.631446 +step:6522 train loss:3.643985 +step:6523 train loss:3.535062 +step:6524 train loss:3.621875 +step:6525 train loss:3.603943 +step:6526 train loss:3.545137 +step:6527 train loss:3.595615 +step:6528 train loss:3.616875 +step:6529 train loss:3.644488 +step:6530 train loss:3.553863 +step:6531 train loss:3.634113 +step:6532 train loss:3.559742 +step:6533 train loss:3.597500 +step:6534 train loss:3.605897 +step:6535 train loss:3.580527 +step:6536 train loss:3.713835 +step:6537 train loss:3.530440 +step:6538 train loss:3.629296 +step:6539 train loss:3.553739 +step:6540 train loss:3.664386 +step:6541 train loss:3.646370 +step:6542 train loss:3.604437 +step:6543 train loss:3.555193 +step:6544 train loss:3.538623 +step:6545 train loss:3.528359 +step:6546 train loss:3.591358 +step:6547 train loss:3.642943 +step:6548 train loss:3.587483 +step:6549 train loss:3.602424 +step:6550 train loss:3.716714 +step:6551 train loss:3.591148 +step:6552 train loss:3.585991 +step:6553 train loss:3.623914 +step:6554 train loss:3.513973 +step:6555 train loss:3.601349 +step:6556 train loss:3.471944 +step:6557 train loss:3.817981 +step:6558 train loss:3.654117 +step:6559 train loss:3.564599 +step:6560 train loss:3.600743 +step:6561 train loss:3.578758 +step:6562 train loss:3.594898 +step:6563 train loss:3.489125 +step:6564 train loss:3.590450 +step:6565 train loss:3.497978 +step:6566 train loss:3.611315 +step:6567 train loss:3.581003 +step:6568 train loss:3.629067 +step:6569 train loss:3.571824 +step:6570 train loss:3.611222 +step:6571 train loss:3.539396 +step:6572 train loss:3.615849 +step:6573 train loss:3.628092 +step:6574 train loss:3.613383 +step:6575 train loss:3.557549 +step:6576 train loss:3.547288 +step:6577 train loss:3.620233 +step:6578 train loss:3.492465 +step:6579 train loss:3.590491 +step:6580 train loss:3.546804 +step:6581 train loss:3.558146 +step:6582 train loss:3.538981 +step:6583 train loss:3.635863 +step:6584 train loss:3.566266 +step:6585 train loss:3.603815 +step:6586 train loss:3.609288 +step:6587 train loss:3.619651 +step:6588 train loss:3.581352 +step:6589 train loss:3.611665 +step:6590 train loss:3.553715 +step:6591 train loss:3.606883 +step:6592 train loss:3.547095 +step:6593 train loss:3.556368 +step:6594 train loss:3.583017 +step:6595 train loss:3.564888 +step:6596 train loss:3.562468 +step:6597 train loss:3.589099 +step:6598 train loss:3.630113 +step:6599 train loss:3.521679 +step:6600 train loss:3.575099 +step:6601 train loss:3.635818 +step:6602 train loss:3.557796 +step:6603 train loss:3.583259 +step:6604 train loss:3.597018 +step:6605 train loss:3.576885 +step:6606 train loss:3.638173 +step:6607 train loss:3.555744 +step:6608 train loss:3.570081 +step:6609 train loss:3.540394 +step:6610 train loss:3.652472 +step:6611 train loss:3.575069 +step:6612 train loss:3.617721 +step:6613 train loss:3.536061 +step:6614 train loss:3.566282 +step:6615 train loss:3.564450 +step:6616 train loss:3.546510 +step:6617 train loss:3.583433 +step:6618 train loss:3.568505 +step:6619 train loss:3.542237 +step:6620 train loss:3.647323 +step:6621 train loss:3.528248 +step:6622 train loss:3.600040 +step:6623 train loss:3.529128 +step:6624 train loss:3.599193 +step:6625 train loss:3.645633 +step:6626 train loss:3.608106 +step:6627 train loss:3.561309 +step:6628 train loss:3.618033 +step:6629 train loss:3.521574 +step:6630 train loss:3.556962 +step:6631 train loss:3.590977 +step:6632 train loss:3.631855 +step:6633 train loss:3.582464 +step:6634 train loss:3.643074 +step:6635 train loss:3.544394 +step:6636 train loss:3.583571 +step:6637 train loss:3.550377 +step:6638 train loss:3.552007 +step:6639 train loss:3.563998 +step:6640 train loss:3.549529 +step:6641 train loss:3.563161 +step:6642 train loss:3.562939 +step:6643 train loss:3.644539 +step:6644 train loss:3.651163 +step:6645 train loss:3.523604 +step:6646 train loss:3.615701 +step:6647 train loss:3.571125 +step:6648 train loss:3.673894 +step:6649 train loss:3.602498 +step:6650 train loss:3.554413 +step:6651 train loss:3.598055 +step:6652 train loss:3.612442 +step:6653 train loss:3.554927 +step:6654 train loss:3.553119 +step:6655 train loss:3.593396 +step:6656 train loss:3.562060 +step:6657 train loss:3.587082 +step:6658 train loss:3.571803 +step:6659 train loss:3.720479 +step:6660 train loss:3.619856 +step:6661 train loss:3.546566 +step:6662 train loss:3.579187 +step:6663 train loss:3.512848 +step:6664 train loss:3.594738 +step:6665 train loss:3.600213 +step:6666 train loss:3.619877 +step:6667 train loss:3.530057 +step:6668 train loss:3.661699 +step:6669 train loss:3.542459 +step:6670 train loss:3.552035 +step:6671 train loss:3.634125 +step:6672 train loss:3.588319 +step:6673 train loss:3.596536 +step:6674 train loss:3.569873 +step:6675 train loss:3.587962 +step:6676 train loss:3.600103 +step:6677 train loss:3.550800 +step:6678 train loss:3.627020 +step:6679 train loss:3.660160 +step:6680 train loss:3.656532 +step:6681 train loss:3.615292 +step:6682 train loss:3.555561 +step:6683 train loss:3.578786 +step:6684 train loss:3.591200 +step:6685 train loss:3.605597 +step:6686 train loss:3.538521 +step:6687 train loss:3.555490 +step:6688 train loss:3.601312 +step:6689 train loss:3.607984 +step:6690 train loss:3.583158 +step:6691 train loss:3.615369 +step:6692 train loss:3.625331 +step:6693 train loss:3.657860 +step:6694 train loss:3.608546 +step:6695 train loss:3.584044 +step:6696 train loss:3.522784 +step:6697 train loss:3.732858 +step:6698 train loss:3.582153 +step:6699 train loss:3.579230 +step:6700 train loss:3.588877 +step:6701 train loss:3.649417 +step:6702 train loss:3.538226 +step:6703 train loss:3.584534 +step:6704 train loss:3.570927 +step:6705 train loss:3.580626 +step:6706 train loss:3.556357 +step:6707 train loss:3.633352 +step:6708 train loss:3.587080 +step:6709 train loss:3.615311 +step:6710 train loss:3.602720 +step:6711 train loss:3.558329 +step:6712 train loss:3.544097 +step:6713 train loss:3.567689 +step:6714 train loss:3.614731 +step:6715 train loss:3.553812 +step:6716 train loss:3.629866 +step:6717 train loss:3.577118 +step:6718 train loss:3.598838 +step:6719 train loss:3.633573 +step:6720 train loss:3.561530 +step:6721 train loss:3.578857 +step:6722 train loss:3.558858 +step:6723 train loss:3.684073 +step:6724 train loss:3.540985 +step:6725 train loss:3.601296 +step:6726 train loss:3.558006 +step:6727 train loss:3.622170 +step:6728 train loss:3.714137 +step:6729 train loss:3.578494 +step:6730 train loss:3.574911 +step:6731 train loss:3.616741 +step:6732 train loss:3.492442 +step:6733 train loss:3.627856 +step:6734 train loss:3.554102 +step:6735 train loss:3.581674 +step:6736 train loss:3.584334 +step:6737 train loss:3.580507 +step:6738 train loss:3.613366 +step:6739 train loss:3.568721 +step:6740 train loss:3.518965 +step:6741 train loss:3.630385 +step:6742 train loss:3.589802 +step:6743 train loss:3.592579 +step:6744 train loss:3.485678 +step:6745 train loss:3.642172 +step:6746 train loss:3.571465 +step:6747 train loss:3.562083 +step:6748 train loss:3.639568 +step:6749 train loss:3.617200 +step:6750 validation loss:3.518125 +step:6750 train loss:3.538554 +step:6751 train loss:3.573862 +step:6752 train loss:3.577317 +step:6753 train loss:3.611127 +step:6754 train loss:3.590596 +step:6755 train loss:3.600468 +step:6756 train loss:3.545828 +step:6757 train loss:3.515605 +step:6758 train loss:3.689946 +step:6759 train loss:3.579981 +step:6760 train loss:3.636388 +step:6761 train loss:3.570036 +step:6762 train loss:3.591251 +step:6763 train loss:3.492963 +step:6764 train loss:3.573510 +step:6765 train loss:3.573947 +step:6766 train loss:3.569809 +step:6767 train loss:3.521863 +step:6768 train loss:3.529543 +step:6769 train loss:3.490262 +step:6770 train loss:3.576694 +step:6771 train loss:3.577595 +step:6772 train loss:3.587001 +step:6773 train loss:3.569091 +step:6774 train loss:3.578145 +step:6775 train loss:3.625348 +step:6776 train loss:3.580135 +step:6777 train loss:3.657680 +step:6778 train loss:3.538967 +step:6779 train loss:3.593927 +step:6780 train loss:3.525181 +step:6781 train loss:3.589775 +step:6782 train loss:3.502473 +step:6783 train loss:3.538872 +step:6784 train loss:3.563835 +step:6785 train loss:3.551850 +step:6786 train loss:3.568239 +step:6787 train loss:3.642816 +step:6788 train loss:3.578694 +step:6789 train loss:3.589310 +step:6790 train loss:3.587059 +step:6791 train loss:3.598410 +step:6792 train loss:3.596816 +step:6793 train loss:3.597438 +step:6794 train loss:3.566513 +step:6795 train loss:3.569289 +step:6796 train loss:3.571657 +step:6797 train loss:3.671287 +step:6798 train loss:3.572756 +step:6799 train loss:3.566997 +step:6800 train loss:3.528016 +step:6801 train loss:3.663162 +step:6802 train loss:3.611101 +step:6803 train loss:3.602504 +step:6804 train loss:3.628895 +step:6805 train loss:3.589499 +step:6806 train loss:3.524683 +step:6807 train loss:3.581679 +step:6808 train loss:3.565392 +step:6809 train loss:3.595347 +step:6810 train loss:3.717336 +step:6811 train loss:3.620569 +step:6812 train loss:3.593245 +step:6813 train loss:3.606858 +step:6814 train loss:3.616453 +step:6815 train loss:3.659403 +step:6816 train loss:3.574281 +step:6817 train loss:3.599836 +step:6818 train loss:3.579137 +step:6819 train loss:3.560503 +step:6820 train loss:3.587259 +step:6821 train loss:3.554109 +step:6822 train loss:3.656598 +step:6823 train loss:3.637712 +step:6824 train loss:3.615407 +step:6825 train loss:3.557611 +step:6826 train loss:3.605262 +step:6827 train loss:3.590979 +step:6828 train loss:3.609121 +step:6829 train loss:3.594505 +step:6830 train loss:3.562359 +step:6831 train loss:3.521501 +step:6832 train loss:3.506104 +step:6833 train loss:3.526751 +step:6834 train loss:3.610911 +step:6835 train loss:3.583808 +step:6836 train loss:3.502599 +step:6837 train loss:3.568277 +step:6838 train loss:3.626535 +step:6839 train loss:3.711354 +step:6840 train loss:3.586684 +step:6841 train loss:3.545269 +step:6842 train loss:3.592987 +step:6843 train loss:3.694836 +step:6844 train loss:3.577945 +step:6845 train loss:3.629013 +step:6846 train loss:3.693452 +step:6847 train loss:3.623322 +step:6848 train loss:3.611697 +step:6849 train loss:3.640656 +step:6850 train loss:3.612005 +step:6851 train loss:3.538220 +step:6852 train loss:3.531827 +step:6853 train loss:3.520341 +step:6854 train loss:3.602468 +step:6855 train loss:3.568275 +step:6856 train loss:3.555826 +step:6857 train loss:3.608357 +step:6858 train loss:3.636532 +step:6859 train loss:3.546115 +step:6860 train loss:3.655834 +step:6861 train loss:3.677082 +step:6862 train loss:3.591767 +step:6863 train loss:3.584326 +step:6864 train loss:3.533141 +step:6865 train loss:3.599920 +step:6866 train loss:3.528070 +step:6867 train loss:3.706095 +step:6868 train loss:3.580224 +step:6869 train loss:3.618572 +step:6870 train loss:3.653680 +step:6871 train loss:3.569825 +step:6872 train loss:3.569995 +step:6873 train loss:3.584407 +step:6874 train loss:3.541946 +step:6875 train loss:3.548887 +step:6876 train loss:3.577666 +step:6877 train loss:3.617708 +step:6878 train loss:3.529622 +step:6879 train loss:3.577521 +step:6880 train loss:3.585847 +step:6881 train loss:3.553022 +step:6882 train loss:3.610851 +step:6883 train loss:3.580687 +step:6884 train loss:3.828254 +step:6885 train loss:3.601152 +step:6886 train loss:3.583001 +step:6887 train loss:3.518282 +step:6888 train loss:3.622615 +step:6889 train loss:3.503507 +step:6890 train loss:3.614481 +step:6891 train loss:3.619453 +step:6892 train loss:3.720366 +step:6893 train loss:3.552139 +step:6894 train loss:3.614868 +step:6895 train loss:3.615829 +step:6896 train loss:3.585664 +step:6897 train loss:3.540607 +step:6898 train loss:3.542585 +step:6899 train loss:3.629063 +step:6900 train loss:3.600517 +step:6901 train loss:3.553828 +step:6902 train loss:3.486357 +step:6903 train loss:3.532933 +step:6904 train loss:3.640279 +step:6905 train loss:3.677052 +step:6906 train loss:3.594091 +step:6907 train loss:3.613739 +step:6908 train loss:3.646865 +step:6909 train loss:3.641308 +step:6910 train loss:3.521502 +step:6911 train loss:3.644238 +step:6912 train loss:3.539861 +step:6913 train loss:3.577894 +step:6914 train loss:3.539793 +step:6915 train loss:3.563999 +step:6916 train loss:3.541724 +step:6917 train loss:3.663538 +step:6918 train loss:3.610570 +step:6919 train loss:3.604868 +step:6920 train loss:3.587898 +step:6921 train loss:3.653793 +step:6922 train loss:3.649189 +step:6923 train loss:3.510714 +step:6924 train loss:3.593801 +step:6925 train loss:3.564933 +step:6926 train loss:3.603341 +step:6927 train loss:3.657799 +step:6928 train loss:3.542536 +step:6929 train loss:3.558065 +step:6930 train loss:3.586570 +step:6931 train loss:3.587818 +step:6932 train loss:3.817411 +step:6933 train loss:3.651968 +step:6934 train loss:3.587224 +step:6935 train loss:3.577640 +step:6936 train loss:3.615960 +step:6937 train loss:3.557055 +step:6938 train loss:3.624223 +step:6939 train loss:3.557016 +step:6940 train loss:3.608697 +step:6941 train loss:3.525877 +step:6942 train loss:3.612572 +step:6943 train loss:3.505104 +step:6944 train loss:3.600460 +step:6945 train loss:3.542934 +step:6946 train loss:3.626842 +step:6947 train loss:3.555308 +step:6948 train loss:3.548837 +step:6949 train loss:3.626717 +step:6950 train loss:3.614096 +step:6951 train loss:3.618538 +step:6952 train loss:3.550079 +step:6953 train loss:3.595907 +step:6954 train loss:3.658687 +step:6955 train loss:3.568695 +step:6956 train loss:3.609797 +step:6957 train loss:3.599073 +step:6958 train loss:3.557336 +step:6959 train loss:3.597970 +step:6960 train loss:3.567544 +step:6961 train loss:3.572594 +step:6962 train loss:3.553255 +step:6963 train loss:3.525815 +step:6964 train loss:3.570466 +step:6965 train loss:3.558854 +step:6966 train loss:3.607325 +step:6967 train loss:3.543459 +step:6968 train loss:3.582451 +step:6969 train loss:3.600960 +step:6970 train loss:3.577075 +step:6971 train loss:3.640552 +step:6972 train loss:3.585797 +step:6973 train loss:3.546716 +step:6974 train loss:3.672079 +step:6975 train loss:3.577740 +step:6976 train loss:3.551509 +step:6977 train loss:3.584697 +step:6978 train loss:3.577686 +step:6979 train loss:3.588432 +step:6980 train loss:3.569206 +step:6981 train loss:3.625259 +step:6982 train loss:3.580018 +step:6983 train loss:3.570443 +step:6984 train loss:3.685195 +step:6985 train loss:3.534871 +step:6986 train loss:3.525107 +step:6987 train loss:3.576401 +step:6988 train loss:3.577941 +step:6989 train loss:3.722332 +step:6990 train loss:3.588016 +step:6991 train loss:3.544197 +step:6992 train loss:3.591828 +step:6993 train loss:3.660494 +step:6994 train loss:3.605903 +step:6995 train loss:3.557091 +step:6996 train loss:3.558813 +step:6997 train loss:3.642351 +step:6998 train loss:3.543241 +step:6999 train loss:3.590292 +step:7000 validation loss:3.513332 total_sharp:2.2153e-04 L1_sharp:6.5084e-05 L2_sharp:1.6008e-05 L3_sharp:1.7810e-05 L4_sharp:3.0936e-05 L5_sharp:3.0785e-05 L6_sharp:1.9987e-05 L7_sharp:3.0059e-05 L8_sharp:5.1989e-05 L9_sharp:8.1271e-05 L10_sharp:9.7079e-05 L11_sharp:7.6252e-05 L12_sharp:3.0585e-04 total_fnorm:1.0994e+01 total_l1_linf:9.7266e+04 total_spectral:1.0994e+01 L1_fnorm:2.5691e+00 L2_fnorm:2.5722e+00 L3_fnorm:2.5238e+00 L4_fnorm:2.5224e+00 L5_fnorm:2.4993e+00 L6_fnorm:2.5719e+00 L7_fnorm:2.6335e+00 L8_fnorm:2.5681e+00 L9_fnorm:2.5739e+00 L10_fnorm:2.5648e+00 L11_fnorm:2.5885e+00 L12_fnorm:2.5209e+00 L1_l1linf:2.6308e+00 L2_l1linf:2.6985e+00 L3_l1linf:2.6431e+00 L4_l1linf:2.6137e+00 L5_l1linf:2.5010e+00 L6_l1linf:2.5016e+00 L7_l1linf:2.7535e+00 L8_l1linf:2.7863e+00 L9_l1linf:2.8544e+00 L10_l1linf:2.9906e+00 L11_l1linf:2.9324e+00 L12_l1linf:3.0727e+00 L1_spectral:3.3259e-01 L2_spectral:3.1374e-01 L3_spectral:3.0266e-01 L4_spectral:3.4164e-01 L5_spectral:2.7865e-01 L6_spectral:2.6576e-01 L7_spectral:2.8415e-01 L8_spectral:3.3873e-01 L9_spectral:3.8483e-01 L10_spectral:4.2019e-01 L11_spectral:3.9430e-01 L12_spectral:4.5186e-01 ip_v_neg_g:1.4426e-02 cos_v_neg_g:3.3960e-03 v_norm:1.0994e+01 g_norm:3.8638e-01 hv_norm:2.4757e-01 cos_v_hv:9.8379e-03 hg_norm:2.2461e+00 cos_g_hg:4.5277e-01 v_par:3.2856e-03 v_perp:1.0994e+01 L1_cos_v_neg_g:1.1261e-02 L1_v_norm:2.5691e+00 L2_cos_v_neg_g:4.3895e-03 L2_v_norm:2.5722e+00 L3_cos_v_neg_g:3.9542e-03 L3_v_norm:2.5238e+00 L4_cos_v_neg_g:4.7794e-03 L4_v_norm:2.5224e+00 L5_cos_v_neg_g:3.2465e-03 L5_v_norm:2.4993e+00 L6_cos_v_neg_g:4.6402e-03 L6_v_norm:2.5719e+00 L7_cos_v_neg_g:4.3799e-03 L7_v_norm:2.6335e+00 L8_cos_v_neg_g:6.0859e-03 L8_v_norm:2.5681e+00 L9_cos_v_neg_g:8.1731e-03 L9_v_norm:2.5739e+00 L10_cos_v_neg_g:1.0066e-02 L10_v_norm:2.5648e+00 L11_cos_v_neg_g:1.1011e-02 L11_v_norm:2.5885e+00 L12_cos_v_neg_g:1.6347e-02 L12_v_norm:2.5209e+00 +step:7000 train loss:3.664596 +step:7001 train loss:3.567990 +step:7002 train loss:3.561001 +step:7003 train loss:3.583293 +step:7004 train loss:3.579578 +step:7005 train loss:3.563492 +step:7006 train loss:3.569746 +step:7007 train loss:3.620360 +step:7008 train loss:3.559398 +step:7009 train loss:3.605174 +step:7010 train loss:3.534212 +step:7011 train loss:3.595481 +step:7012 train loss:3.564568 +step:7013 train loss:3.637763 +step:7014 train loss:3.543325 +step:7015 train loss:3.604962 +step:7016 train loss:3.594173 +step:7017 train loss:3.559419 +step:7018 train loss:3.636852 +step:7019 train loss:3.566353 +step:7020 train loss:3.613754 +step:7021 train loss:3.557303 +step:7022 train loss:3.572757 +step:7023 train loss:3.591142 +step:7024 train loss:3.552601 +step:7025 train loss:3.602781 +step:7026 train loss:3.560014 +step:7027 train loss:3.622366 +step:7028 train loss:3.543526 +step:7029 train loss:3.535506 +step:7030 train loss:3.534012 +step:7031 train loss:3.590339 +step:7032 train loss:3.600260 +step:7033 train loss:3.573512 +step:7034 train loss:3.595178 +step:7035 train loss:3.646015 +step:7036 train loss:3.566700 +step:7037 train loss:3.592066 +step:7038 train loss:3.552983 +step:7039 train loss:3.607851 +step:7040 train loss:3.524380 +step:7041 train loss:3.617034 +step:7042 train loss:3.551749 +step:7043 train loss:3.522771 +step:7044 train loss:3.570391 +step:7045 train loss:3.569055 +step:7046 train loss:3.560439 +step:7047 train loss:3.601006 +step:7048 train loss:3.549413 +step:7049 train loss:3.559375 +step:7050 train loss:3.580328 +step:7051 train loss:3.599844 +step:7052 train loss:3.601213 +step:7053 train loss:3.561574 +step:7054 train loss:3.544575 +step:7055 train loss:3.610967 +step:7056 train loss:3.609327 +step:7057 train loss:3.531435 +step:7058 train loss:3.653987 +step:7059 train loss:3.555380 +step:7060 train loss:3.571138 +step:7061 train loss:3.546032 +step:7062 train loss:3.568160 +step:7063 train loss:3.625977 +step:7064 train loss:3.549124 +step:7065 train loss:3.601773 +step:7066 train loss:3.557808 +step:7067 train loss:3.596532 +step:7068 train loss:3.573256 +step:7069 train loss:3.536685 +step:7070 train loss:3.561281 +step:7071 train loss:3.529430 +step:7072 train loss:3.534543 +step:7073 train loss:3.524726 +step:7074 train loss:3.520386 +step:7075 train loss:3.542127 +step:7076 train loss:3.553522 +step:7077 train loss:3.564698 +step:7078 train loss:3.608562 +step:7079 train loss:3.618439 +step:7080 train loss:3.561677 +step:7081 train loss:3.586039 +step:7082 train loss:3.552246 +step:7083 train loss:3.579936 +step:7084 train loss:3.574321 +step:7085 train loss:3.537312 +step:7086 train loss:3.572518 +step:7087 train loss:3.549380 +step:7088 train loss:3.668566 +step:7089 train loss:3.566249 +step:7090 train loss:3.530990 +step:7091 train loss:3.548860 +step:7092 train loss:3.528230 +step:7093 train loss:3.620659 +step:7094 train loss:3.541378 +step:7095 train loss:3.558044 +step:7096 train loss:3.576859 +step:7097 train loss:3.563346 +step:7098 train loss:3.587032 +step:7099 train loss:3.542662 +step:7100 train loss:3.573666 +step:7101 train loss:3.643726 +step:7102 train loss:3.535967 +step:7103 train loss:3.562287 +step:7104 train loss:3.589633 +step:7105 train loss:3.569330 +step:7106 train loss:3.554838 +step:7107 train loss:3.589559 +step:7108 train loss:3.660889 +step:7109 train loss:3.582468 +step:7110 train loss:3.612316 +step:7111 train loss:3.590971 +step:7112 train loss:3.581367 +step:7113 train loss:3.580533 +step:7114 train loss:3.598374 +step:7115 train loss:3.634729 +step:7116 train loss:3.565393 +step:7117 train loss:3.603268 +step:7118 train loss:3.614506 +step:7119 train loss:3.576224 +step:7120 train loss:3.633566 +step:7121 train loss:3.548405 +step:7122 train loss:3.548676 +step:7123 train loss:3.492967 +step:7124 train loss:3.645154 +step:7125 train loss:3.501670 +step:7126 train loss:3.665015 +step:7127 train loss:3.627258 +step:7128 train loss:3.567905 +step:7129 train loss:3.574925 +step:7130 train loss:3.565557 +step:7131 train loss:3.505901 +step:7132 train loss:3.544885 +step:7133 train loss:3.593253 +step:7134 train loss:3.523018 +step:7135 train loss:3.580379 +step:7136 train loss:3.563115 +step:7137 train loss:3.541731 +step:7138 train loss:3.528570 +step:7139 train loss:3.537374 +step:7140 train loss:3.569245 +step:7141 train loss:3.567726 +step:7142 train loss:3.564838 +step:7143 train loss:3.601304 +step:7144 train loss:3.549893 +step:7145 train loss:3.564342 +step:7146 train loss:3.572450 +step:7147 train loss:3.595324 +step:7148 train loss:3.598653 +step:7149 train loss:3.605333 +step:7150 train loss:3.582063 +step:7151 train loss:3.542850 +step:7152 train loss:3.519297 +step:7153 train loss:3.553579 +step:7154 train loss:3.572179 +step:7155 train loss:3.589165 +step:7156 train loss:3.558441 +step:7157 train loss:3.578345 +step:7158 train loss:3.534490 +step:7159 train loss:3.586571 +step:7160 train loss:3.597325 +step:7161 train loss:3.547810 +step:7162 train loss:3.596918 +step:7163 train loss:3.529726 +step:7164 train loss:3.567058 +step:7165 train loss:3.572715 +step:7166 train loss:3.626582 +step:7167 train loss:3.605524 +step:7168 train loss:3.580784 +step:7169 train loss:3.558380 +step:7170 train loss:3.589160 +step:7171 train loss:3.534936 +step:7172 train loss:3.699241 +step:7173 train loss:3.546679 +step:7174 train loss:3.587622 +step:7175 train loss:3.563631 +step:7176 train loss:3.570854 +step:7177 train loss:3.588032 +step:7178 train loss:3.587416 +step:7179 train loss:3.571523 +step:7180 train loss:3.574925 +step:7181 train loss:3.599817 +step:7182 train loss:3.552233 +step:7183 train loss:3.624427 +step:7184 train loss:3.718122 +step:7185 train loss:3.629360 +step:7186 train loss:3.567394 +step:7187 train loss:3.580390 +step:7188 train loss:3.566456 +step:7189 train loss:3.564734 +step:7190 train loss:3.569011 +step:7191 train loss:3.560879 +step:7192 train loss:3.595100 +step:7193 train loss:3.511183 +step:7194 train loss:3.574053 +step:7195 train loss:3.550901 +step:7196 train loss:3.599494 +step:7197 train loss:3.576317 +step:7198 train loss:3.635973 +step:7199 train loss:3.592351 +step:7200 train loss:3.587493 +step:7201 train loss:3.595736 +step:7202 train loss:3.573349 +step:7203 train loss:3.588611 +step:7204 train loss:3.558372 +step:7205 train loss:3.516624 +step:7206 train loss:3.542876 +step:7207 train loss:3.718555 +step:7208 train loss:3.552440 +step:7209 train loss:3.634384 +step:7210 train loss:3.571107 +step:7211 train loss:3.600333 +step:7212 train loss:3.678211 +step:7213 train loss:3.530535 +step:7214 train loss:3.596464 +step:7215 train loss:3.567412 +step:7216 train loss:3.619161 +step:7217 train loss:3.573027 +step:7218 train loss:3.665635 +step:7219 train loss:3.571075 +step:7220 train loss:3.649266 +step:7221 train loss:3.530747 +step:7222 train loss:3.613513 +step:7223 train loss:3.529553 +step:7224 train loss:3.594021 +step:7225 train loss:3.569471 +step:7226 train loss:3.540116 +step:7227 train loss:3.559276 +step:7228 train loss:3.547616 +step:7229 train loss:3.550283 +step:7230 train loss:3.535563 +step:7231 train loss:3.668213 +step:7232 train loss:3.539591 +step:7233 train loss:3.608295 +step:7234 train loss:3.595205 +step:7235 train loss:3.570461 +step:7236 train loss:3.609232 +step:7237 train loss:3.557998 +step:7238 train loss:3.598425 +step:7239 train loss:3.549995 +step:7240 train loss:3.547950 +step:7241 train loss:3.563420 +step:7242 train loss:3.543035 +step:7243 train loss:3.588396 +step:7244 train loss:3.561976 +step:7245 train loss:3.564877 +step:7246 train loss:3.606370 +step:7247 train loss:3.560818 +step:7248 train loss:3.601842 +step:7249 train loss:3.550280 +step:7250 validation loss:3.502932 +step:7250 train loss:3.574231 +step:7251 train loss:3.618609 +step:7252 train loss:3.533339 +step:7253 train loss:3.621645 +step:7254 train loss:3.559980 +step:7255 train loss:3.528230 +step:7256 train loss:3.571776 +step:7257 train loss:3.615375 +step:7258 train loss:3.568484 +step:7259 train loss:3.555064 +step:7260 train loss:3.640351 +step:7261 train loss:3.596862 +step:7262 train loss:3.551942 +step:7263 train loss:3.592216 +step:7264 train loss:3.579951 +step:7265 train loss:3.483232 +step:7266 train loss:3.605652 +step:7267 train loss:3.524885 +step:7268 train loss:3.588819 +step:7269 train loss:3.596112 +step:7270 train loss:3.550009 +step:7271 train loss:3.566588 +step:7272 train loss:3.572101 +step:7273 train loss:3.568669 +step:7274 train loss:3.546163 +step:7275 train loss:3.618469 +step:7276 train loss:3.524591 +step:7277 train loss:3.573084 +step:7278 train loss:3.540259 +step:7279 train loss:3.524859 +step:7280 train loss:3.591570 +step:7281 train loss:3.614239 +step:7282 train loss:3.614229 +step:7283 train loss:3.503892 +step:7284 train loss:3.546892 +step:7285 train loss:3.574918 +step:7286 train loss:3.705492 +step:7287 train loss:3.615284 +step:7288 train loss:3.571270 +step:7289 train loss:3.574966 +step:7290 train loss:3.620672 +step:7291 train loss:3.582942 +step:7292 train loss:3.653789 +step:7293 train loss:3.548446 +step:7294 train loss:3.633611 +step:7295 train loss:3.524781 +step:7296 train loss:3.521054 +step:7297 train loss:3.566803 +step:7298 train loss:3.543419 +step:7299 train loss:3.583273 +step:7300 train loss:3.569391 +step:7301 train loss:3.520335 +step:7302 train loss:3.665420 +step:7303 train loss:3.556981 +step:7304 train loss:3.501225 +step:7305 train loss:3.577931 +step:7306 train loss:3.606181 +step:7307 train loss:3.611431 +step:7308 train loss:3.564252 +step:7309 train loss:3.527319 +step:7310 train loss:3.559290 +step:7311 train loss:3.543863 +step:7312 train loss:3.580302 +step:7313 train loss:3.617622 +step:7314 train loss:3.515337 +step:7315 train loss:3.508191 +step:7316 train loss:3.649980 +step:7317 train loss:3.591035 +step:7318 train loss:3.531875 +step:7319 train loss:3.556040 +step:7320 train loss:3.592428 +step:7321 train loss:3.618691 +step:7322 train loss:3.495252 +step:7323 train loss:3.553517 +step:7324 train loss:3.578829 +step:7325 train loss:3.542579 +step:7326 train loss:3.570361 +step:7327 train loss:3.547846 +step:7328 train loss:3.666862 +step:7329 train loss:3.511774 +step:7330 train loss:3.566725 +step:7331 train loss:3.558989 +step:7332 train loss:3.600944 +step:7333 train loss:3.582079 +step:7334 train loss:3.550547 +step:7335 train loss:3.544490 +step:7336 train loss:3.800369 +step:7337 train loss:3.586474 +step:7338 train loss:3.581613 +step:7339 train loss:3.593240 +step:7340 train loss:3.579009 +step:7341 train loss:3.571039 +step:7342 train loss:3.564708 +step:7343 train loss:3.575854 +step:7344 train loss:3.652577 +step:7345 train loss:3.512582 +step:7346 train loss:3.549508 +step:7347 train loss:3.542574 +step:7348 train loss:3.547096 +step:7349 train loss:3.645390 +step:7350 train loss:3.630808 +step:7351 train loss:3.564632 +step:7352 train loss:3.593843 +step:7353 train loss:3.575484 +step:7354 train loss:3.522706 +step:7355 train loss:3.708128 +step:7356 train loss:3.680208 +step:7357 train loss:3.606830 +step:7358 train loss:3.585586 +step:7359 train loss:3.552773 +step:7360 train loss:3.565172 +step:7361 train loss:3.516140 +step:7362 train loss:3.563550 +step:7363 train loss:3.579253 +step:7364 train loss:3.612130 +step:7365 train loss:3.597645 +step:7366 train loss:3.559080 +step:7367 train loss:3.634491 +step:7368 train loss:3.614424 +step:7369 train loss:3.606092 +step:7370 train loss:3.573145 +step:7371 train loss:3.532389 +step:7372 train loss:3.589813 +step:7373 train loss:3.609483 +step:7374 train loss:3.703855 +step:7375 train loss:3.530420 +step:7376 train loss:3.550260 +step:7377 train loss:3.593920 +step:7378 train loss:3.549215 +step:7379 train loss:3.670910 +step:7380 train loss:3.632486 +step:7381 train loss:3.599951 +step:7382 train loss:3.562815 +step:7383 train loss:3.655811 +step:7384 train loss:3.599767 +step:7385 train loss:3.558163 +step:7386 train loss:3.562533 +step:7387 train loss:3.603096 +step:7388 train loss:3.635174 +step:7389 train loss:3.577859 +step:7390 train loss:3.516047 +step:7391 train loss:3.555636 +step:7392 train loss:3.612703 +step:7393 train loss:3.579848 +step:7394 train loss:3.620155 +step:7395 train loss:3.507298 +step:7396 train loss:3.597035 +step:7397 train loss:3.538348 +step:7398 train loss:3.551231 +step:7399 train loss:3.601458 +step:7400 train loss:3.601911 +step:7401 train loss:3.520228 +step:7402 train loss:3.638721 +step:7403 train loss:3.521186 +step:7404 train loss:3.592855 +step:7405 train loss:3.712573 +step:7406 train loss:3.539633 +step:7407 train loss:3.588259 +step:7408 train loss:3.589038 +step:7409 train loss:3.561942 +step:7410 train loss:3.726739 +step:7411 train loss:3.573921 +step:7412 train loss:3.578055 +step:7413 train loss:3.630216 +step:7414 train loss:3.538428 +step:7415 train loss:3.599333 +step:7416 train loss:3.482457 +step:7417 train loss:3.598729 +step:7418 train loss:3.585085 +step:7419 train loss:3.551289 +step:7420 train loss:3.541519 +step:7421 train loss:3.579003 +step:7422 train loss:3.536519 +step:7423 train loss:3.674514 +step:7424 train loss:3.735666 +step:7425 train loss:3.628670 +step:7426 train loss:3.592436 +step:7427 train loss:3.561440 +step:7428 train loss:3.579642 +step:7429 train loss:3.603906 +step:7430 train loss:3.528540 +step:7431 train loss:3.530786 +step:7432 train loss:3.540220 +step:7433 train loss:3.636919 +step:7434 train loss:3.553741 +step:7435 train loss:3.639890 +step:7436 train loss:3.677127 +step:7437 train loss:3.502008 +step:7438 train loss:3.562757 +step:7439 train loss:3.573737 +step:7440 train loss:3.543591 +step:7441 train loss:3.510481 +step:7442 train loss:3.741147 +step:7443 train loss:3.563965 +step:7444 train loss:3.607191 +step:7445 train loss:3.535565 +step:7446 train loss:3.559189 +step:7447 train loss:3.487958 +step:7448 train loss:3.540731 +step:7449 train loss:3.557039 +step:7450 train loss:3.588491 +step:7451 train loss:3.618739 +step:7452 train loss:3.549072 +step:7453 train loss:3.574607 +step:7454 train loss:3.560321 +step:7455 train loss:3.568638 +step:7456 train loss:3.542009 +step:7457 train loss:3.550667 +step:7458 train loss:3.588604 +step:7459 train loss:3.567078 +step:7460 train loss:3.575704 +step:7461 train loss:3.610335 +step:7462 train loss:3.546572 +step:7463 train loss:3.609013 +step:7464 train loss:3.531713 +step:7465 train loss:3.540749 +step:7466 train loss:3.541162 +step:7467 train loss:3.554350 +step:7468 train loss:3.603109 +step:7469 train loss:3.535076 +step:7470 train loss:3.565472 +step:7471 train loss:3.557043 +step:7472 train loss:3.590548 +step:7473 train loss:3.531865 +step:7474 train loss:3.517104 +step:7475 train loss:3.545921 +step:7476 train loss:3.587384 +step:7477 train loss:3.558053 +step:7478 train loss:3.557988 +step:7479 train loss:3.572275 +step:7480 train loss:3.848413 +step:7481 train loss:3.500672 +step:7482 train loss:3.573227 +step:7483 train loss:3.566112 +step:7484 train loss:3.587314 +step:7485 train loss:3.572339 +step:7486 train loss:3.597601 +step:7487 train loss:3.591211 +step:7488 train loss:3.611930 +step:7489 train loss:3.605271 +step:7490 train loss:3.552804 +step:7491 train loss:3.575528 +step:7492 train loss:3.686454 +step:7493 train loss:3.658052 +step:7494 train loss:3.680586 +step:7495 train loss:3.551747 +step:7496 train loss:3.538333 +step:7497 train loss:3.638842 +step:7498 train loss:3.569795 +step:7499 train loss:3.608033 +step:7500 validation loss:3.500004 total_sharp:1.8629e-04 L1_sharp:3.9904e-05 L2_sharp:6.8360e-06 L3_sharp:1.0360e-05 L4_sharp:2.5853e-05 L5_sharp:4.1556e-05 L6_sharp:1.8203e-05 L7_sharp:2.4116e-05 L8_sharp:6.4941e-05 L9_sharp:8.3801e-05 L10_sharp:9.8478e-05 L11_sharp:7.7569e-05 L12_sharp:2.0778e-04 total_fnorm:1.1049e+01 total_l1_linf:9.7728e+04 total_spectral:1.1049e+01 L1_fnorm:2.6051e+00 L2_fnorm:2.5753e+00 L3_fnorm:2.5698e+00 L4_fnorm:2.5367e+00 L5_fnorm:2.5211e+00 L6_fnorm:2.5901e+00 L7_fnorm:2.6552e+00 L8_fnorm:2.5916e+00 L9_fnorm:2.5907e+00 L10_fnorm:2.5848e+00 L11_fnorm:2.6047e+00 L12_fnorm:2.5359e+00 L1_l1linf:2.6173e+00 L2_l1linf:2.6850e+00 L3_l1linf:2.6917e+00 L4_l1linf:2.7108e+00 L5_l1linf:2.6163e+00 L6_l1linf:2.5778e+00 L7_l1linf:2.7467e+00 L8_l1linf:2.7806e+00 L9_l1linf:3.0108e+00 L10_l1linf:2.9114e+00 L11_l1linf:2.8501e+00 L12_l1linf:2.8468e+00 L1_spectral:3.4826e-01 L2_spectral:3.1660e-01 L3_spectral:3.1500e-01 L4_spectral:3.4031e-01 L5_spectral:2.7279e-01 L6_spectral:2.5136e-01 L7_spectral:2.8419e-01 L8_spectral:3.4044e-01 L9_spectral:3.9214e-01 L10_spectral:4.0591e-01 L11_spectral:3.8933e-01 L12_spectral:4.2490e-01 ip_v_neg_g:8.8382e-03 cos_v_neg_g:1.8895e-03 v_norm:1.1049e+01 g_norm:4.2336e-01 hv_norm:1.9814e-01 cos_v_hv:1.0388e-02 hg_norm:2.9576e+00 cos_g_hg:4.8785e-01 v_par:1.4744e-03 v_perp:1.1049e+01 L1_cos_v_neg_g:3.4777e-03 L1_v_norm:2.6051e+00 L2_cos_v_neg_g:1.4133e-03 L2_v_norm:2.5753e+00 L3_cos_v_neg_g:1.7302e-03 L3_v_norm:2.5698e+00 L4_cos_v_neg_g:2.5039e-03 L4_v_norm:2.5367e+00 L5_cos_v_neg_g:3.8779e-03 L5_v_norm:2.5211e+00 L6_cos_v_neg_g:2.5660e-03 L6_v_norm:2.5901e+00 L7_cos_v_neg_g:2.7684e-03 L7_v_norm:2.6552e+00 L8_cos_v_neg_g:4.2823e-03 L8_v_norm:2.5916e+00 L9_cos_v_neg_g:6.2149e-03 L9_v_norm:2.5907e+00 L10_cos_v_neg_g:6.9964e-03 L10_v_norm:2.5848e+00 L11_cos_v_neg_g:6.6551e-03 L11_v_norm:2.6047e+00 L12_cos_v_neg_g:6.7905e-03 L12_v_norm:2.5359e+00 +step:7500 train loss:3.554982 +step:7501 train loss:3.545454 +step:7502 train loss:3.535636 +step:7503 train loss:3.516403 +step:7504 train loss:3.537350 +step:7505 train loss:3.525331 +step:7506 train loss:3.586640 +step:7507 train loss:3.504797 +step:7508 train loss:3.575743 +step:7509 train loss:3.545474 +step:7510 train loss:3.577417 +step:7511 train loss:3.582449 +step:7512 train loss:3.840877 +step:7513 train loss:3.536912 +step:7514 train loss:3.568263 +step:7515 train loss:3.532517 +step:7516 train loss:3.546436 +step:7517 train loss:3.577194 +step:7518 train loss:3.556438 +step:7519 train loss:3.565600 +step:7520 train loss:3.631916 +step:7521 train loss:3.519512 +step:7522 train loss:3.578906 +step:7523 train loss:3.607561 +step:7524 train loss:3.557242 +step:7525 train loss:3.557011 +step:7526 train loss:3.505950 +step:7527 train loss:3.514788 +step:7528 train loss:3.611719 +step:7529 train loss:3.588512 +step:7530 train loss:3.538146 +step:7531 train loss:3.610110 +step:7532 train loss:3.597014 +step:7533 train loss:3.524445 +step:7534 train loss:3.589968 +step:7535 train loss:3.591689 +step:7536 train loss:3.623356 +step:7537 train loss:3.641015 +step:7538 train loss:3.668330 +step:7539 train loss:3.571042 +step:7540 train loss:3.557542 +step:7541 train loss:3.611211 +step:7542 train loss:3.572945 +step:7543 train loss:3.529291 +step:7544 train loss:3.569546 +step:7545 train loss:3.558207 +step:7546 train loss:3.513719 +step:7547 train loss:3.560441 +step:7548 train loss:3.571236 +step:7549 train loss:3.554613 +step:7550 train loss:3.553903 +step:7551 train loss:3.651656 +step:7552 train loss:3.566728 +step:7553 train loss:3.604283 +step:7554 train loss:3.530340 +step:7555 train loss:3.620819 +step:7556 train loss:3.523328 +step:7557 train loss:3.617998 +step:7558 train loss:3.609075 +step:7559 train loss:3.563646 +step:7560 train loss:3.658730 +step:7561 train loss:3.629278 +step:7562 train loss:3.532783 +step:7563 train loss:3.529894 +step:7564 train loss:3.583475 +step:7565 train loss:3.602919 +step:7566 train loss:3.593159 +step:7567 train loss:3.608925 +step:7568 train loss:3.552441 +step:7569 train loss:3.613531 +step:7570 train loss:3.594197 +step:7571 train loss:3.676439 +step:7572 train loss:3.524971 +step:7573 train loss:3.594788 +step:7574 train loss:3.557137 +step:7575 train loss:3.550476 +step:7576 train loss:3.561113 +step:7577 train loss:3.576475 +step:7578 train loss:3.630509 +step:7579 train loss:3.567315 +step:7580 train loss:3.556977 +step:7581 train loss:3.542563 +step:7582 train loss:3.600091 +step:7583 train loss:3.537702 +step:7584 train loss:3.523059 +step:7585 train loss:3.490569 +step:7586 train loss:3.527437 +step:7587 train loss:3.587798 +step:7588 train loss:3.717269 +step:7589 train loss:3.536406 +step:7590 train loss:3.605763 +step:7591 train loss:3.609420 +step:7592 train loss:3.567121 +step:7593 train loss:3.590295 +step:7594 train loss:3.591537 +step:7595 train loss:3.561771 +step:7596 train loss:3.612952 +step:7597 train loss:3.516594 +step:7598 train loss:3.578442 +step:7599 train loss:3.570546 +step:7600 train loss:3.528105 +step:7601 train loss:3.643945 +step:7602 train loss:3.584993 +step:7603 train loss:3.542455 +step:7604 train loss:3.690012 +step:7605 train loss:3.577826 +step:7606 train loss:3.613827 +step:7607 train loss:3.566059 +step:7608 train loss:3.574946 +step:7609 train loss:3.609439 +step:7610 train loss:3.566726 +step:7611 train loss:3.544238 +step:7612 train loss:3.485809 +step:7613 train loss:3.534639 +step:7614 train loss:3.601791 +step:7615 train loss:3.564757 +step:7616 train loss:3.631608 +step:7617 train loss:3.530743 +step:7618 train loss:3.617738 +step:7619 train loss:3.558777 +step:7620 train loss:3.545662 +step:7621 train loss:3.496624 +step:7622 train loss:3.767892 +step:7623 train loss:3.785132 +step:7624 train loss:3.601005 +step:7625 train loss:3.637608 +step:7626 train loss:3.556419 +step:7627 train loss:3.627785 +step:7628 train loss:3.509438 +step:7629 train loss:3.569203 +step:7630 train loss:3.585261 +step:7631 train loss:3.561466 +step:7632 train loss:3.610810 +step:7633 train loss:3.680068 +step:7634 train loss:3.641185 +step:7635 train loss:3.544399 +step:7636 train loss:3.575138 +step:7637 train loss:3.521120 +step:7638 train loss:3.630008 +step:7639 train loss:3.559740 +step:7640 train loss:3.538899 +step:7641 train loss:3.570540 +step:7642 train loss:3.904171 +step:7643 train loss:3.658338 +step:7644 train loss:3.583215 +step:7645 train loss:3.572779 +step:7646 train loss:3.560184 +step:7647 train loss:3.552071 +step:7648 train loss:3.585161 +step:7649 train loss:3.544533 +step:7650 train loss:3.596871 +step:7651 train loss:3.617646 +step:7652 train loss:3.491627 +step:7653 train loss:3.687606 +step:7654 train loss:3.549477 +step:7655 train loss:3.566701 +step:7656 train loss:3.543226 +step:7657 train loss:3.558962 +step:7658 train loss:3.509629 +step:7659 train loss:3.573061 +step:7660 train loss:3.506717 +step:7661 train loss:3.523665 +step:7662 train loss:3.524492 +step:7663 train loss:3.574163 +step:7664 train loss:3.533831 +step:7665 train loss:3.509419 +step:7666 train loss:3.616096 +step:7667 train loss:3.528381 +step:7668 train loss:3.636985 +step:7669 train loss:3.570663 +step:7670 train loss:3.526388 +step:7671 train loss:3.583501 +step:7672 train loss:3.600573 +step:7673 train loss:3.564965 +step:7674 train loss:3.603043 +step:7675 train loss:3.655541 +step:7676 train loss:3.625758 +step:7677 train loss:3.652364 +step:7678 train loss:3.593124 +step:7679 train loss:3.615262 +step:7680 train loss:3.623385 +step:7681 train loss:3.589166 +step:7682 train loss:3.557153 +step:7683 train loss:3.562669 +step:7684 train loss:3.533081 +step:7685 train loss:3.513002 +step:7686 train loss:3.634207 +step:7687 train loss:3.548167 +step:7688 train loss:3.516152 +step:7689 train loss:3.564651 +step:7690 train loss:3.531273 +step:7691 train loss:3.556992 +step:7692 train loss:3.592076 +step:7693 train loss:3.593840 +step:7694 train loss:3.643920 +step:7695 train loss:3.573411 +step:7696 train loss:3.550448 +step:7697 train loss:3.534557 +step:7698 train loss:3.597726 +step:7699 train loss:3.590617 +step:7700 train loss:3.494153 +step:7701 train loss:3.607689 +step:7702 train loss:3.550163 +step:7703 train loss:3.552511 +step:7704 train loss:3.601999 +step:7705 train loss:3.562106 +step:7706 train loss:3.498948 +step:7707 train loss:3.619311 +step:7708 train loss:3.563992 +step:7709 train loss:3.577725 +step:7710 train loss:3.641367 +step:7711 train loss:3.601489 +step:7712 train loss:3.546202 +step:7713 train loss:3.626803 +step:7714 train loss:3.573435 +step:7715 train loss:3.522608 +step:7716 train loss:3.560596 +step:7717 train loss:3.586366 +step:7718 train loss:3.591328 +step:7719 train loss:3.547280 +step:7720 train loss:3.561056 +step:7721 train loss:3.604562 +step:7722 train loss:3.529313 +step:7723 train loss:3.901997 +step:7724 train loss:3.568336 +step:7725 train loss:3.473202 +step:7726 train loss:3.556521 +step:7727 train loss:3.583222 +step:7728 train loss:3.545676 +step:7729 train loss:3.544851 +step:7730 train loss:3.567889 +step:7731 train loss:3.595524 +step:7732 train loss:3.617310 +step:7733 train loss:3.527093 +step:7734 train loss:3.557212 +step:7735 train loss:3.645173 +step:7736 train loss:3.590498 +step:7737 train loss:3.605229 +step:7738 train loss:3.509002 +step:7739 train loss:3.585613 +step:7740 train loss:3.534331 +step:7741 train loss:3.569211 +step:7742 train loss:3.572338 +step:7743 train loss:3.522684 +step:7744 train loss:3.641988 +step:7745 train loss:3.536484 +step:7746 train loss:3.514184 +step:7747 train loss:3.606237 +step:7748 train loss:3.588303 +step:7749 train loss:3.510346 +step:7750 validation loss:3.498213 +step:7750 train loss:3.668792 +step:7751 train loss:3.552473 +step:7752 train loss:3.546009 +step:7753 train loss:3.550096 +step:7754 train loss:3.519626 +step:7755 train loss:3.584570 +step:7756 train loss:3.616048 +step:7757 train loss:3.564362 +step:7758 train loss:3.533732 +step:7759 train loss:3.559303 +step:7760 train loss:3.590323 +step:7761 train loss:3.577368 +step:7762 train loss:3.566088 +step:7763 train loss:3.552299 +step:7764 train loss:3.555302 +step:7765 train loss:3.510815 +step:7766 train loss:3.574188 +step:7767 train loss:3.578806 +step:7768 train loss:3.534327 +step:7769 train loss:3.599359 +step:7770 train loss:3.617009 +step:7771 train loss:3.590339 +step:7772 train loss:3.563809 +step:7773 train loss:3.620705 +step:7774 train loss:3.518864 +step:7775 train loss:3.505957 +step:7776 train loss:3.611075 +step:7777 train loss:3.566357 +step:7778 train loss:3.524022 +step:7779 train loss:3.564690 +step:7780 train loss:3.558873 +step:7781 train loss:3.570948 +step:7782 train loss:3.550161 +step:7783 train loss:3.531312 +step:7784 train loss:3.532076 +step:7785 train loss:3.573265 +step:7786 train loss:3.530866 +step:7787 train loss:3.610587 +step:7788 train loss:3.561102 +step:7789 train loss:3.497106 +step:7790 train loss:3.554105 +step:7791 train loss:3.589108 +step:7792 train loss:3.548129 +step:7793 train loss:3.569868 +step:7794 train loss:3.556852 +step:7795 train loss:3.588923 +step:7796 train loss:3.552055 +step:7797 train loss:3.569994 +step:7798 train loss:3.565190 +step:7799 train loss:3.554054 +step:7800 train loss:3.507627 +step:7801 train loss:3.573474 +step:7802 train loss:3.555602 +step:7803 train loss:3.604289 +step:7804 train loss:3.567307 +step:7805 train loss:3.565718 +step:7806 train loss:3.582683 +step:7807 train loss:3.654140 +step:7808 train loss:3.516985 +step:7809 train loss:3.494960 +step:7810 train loss:3.584281 +step:7811 train loss:3.517248 +step:7812 train loss:3.538870 +step:7813 train loss:3.625412 +step:7814 train loss:3.698397 +step:7815 train loss:3.507512 +step:7816 train loss:3.595122 +step:7817 train loss:3.624884 +step:7818 train loss:3.522849 +step:7819 train loss:3.572540 +step:7820 train loss:3.615159 +step:7821 train loss:3.543298 +step:7822 train loss:3.503975 +step:7823 train loss:3.603332 +step:7824 train loss:3.565624 +step:7825 train loss:3.551682 +step:7826 train loss:3.548365 +step:7827 train loss:3.593942 +step:7828 train loss:3.580007 +step:7829 train loss:3.534080 +step:7830 train loss:3.544042 +step:7831 train loss:3.550859 +step:7832 train loss:3.614510 +step:7833 train loss:3.593242 +step:7834 train loss:3.559017 +step:7835 train loss:3.579279 +step:7836 train loss:3.694385 +step:7837 train loss:3.577226 +step:7838 train loss:3.545185 +step:7839 train loss:3.509958 +step:7840 train loss:3.522305 +step:7841 train loss:3.614418 +step:7842 train loss:3.602246 +step:7843 train loss:3.659324 +step:7844 train loss:3.585169 +step:7845 train loss:3.565250 +step:7846 train loss:3.676219 +step:7847 train loss:3.563729 +step:7848 train loss:3.575518 +step:7849 train loss:3.590236 +step:7850 train loss:3.559148 +step:7851 train loss:3.583180 +step:7852 train loss:3.563100 +step:7853 train loss:3.532271 +step:7854 train loss:3.563368 +step:7855 train loss:3.562845 +step:7856 train loss:3.566696 +step:7857 train loss:3.549389 +step:7858 train loss:3.560189 +step:7859 train loss:3.570256 +step:7860 train loss:3.603865 +step:7861 train loss:3.589864 +step:7862 train loss:3.534339 +step:7863 train loss:3.640450 +step:7864 train loss:3.478751 +step:7865 train loss:3.556031 +step:7866 train loss:3.529673 +step:7867 train loss:3.575468 +step:7868 train loss:3.552065 +step:7869 train loss:3.558082 +step:7870 train loss:3.482697 +step:7871 train loss:3.549134 +step:7872 train loss:3.545651 +step:7873 train loss:3.623313 +step:7874 train loss:3.566275 +step:7875 train loss:3.570220 +step:7876 train loss:3.585720 +step:7877 train loss:3.540356 +step:7878 train loss:3.574153 +step:7879 train loss:3.910582 +step:7880 train loss:3.567562 +step:7881 train loss:3.591389 +step:7882 train loss:3.671906 +step:7883 train loss:3.485054 +step:7884 train loss:3.573687 +step:7885 train loss:3.559743 +step:7886 train loss:3.555450 +step:7887 train loss:3.549526 +step:7888 train loss:3.584588 +step:7889 train loss:3.631990 +step:7890 train loss:3.534255 +step:7891 train loss:3.585462 +step:7892 train loss:3.555732 +step:7893 train loss:3.531878 +step:7894 train loss:3.552542 +step:7895 train loss:3.534997 +step:7896 train loss:3.534686 +step:7897 train loss:3.558350 +step:7898 train loss:3.568186 +step:7899 train loss:3.555712 +step:7900 train loss:3.525449 +step:7901 train loss:3.515747 +step:7902 train loss:3.660963 +step:7903 train loss:3.511689 +step:7904 train loss:3.559935 +step:7905 train loss:3.627983 +step:7906 train loss:3.527638 +step:7907 train loss:3.554234 +step:7908 train loss:3.606545 +step:7909 train loss:3.654624 +step:7910 train loss:3.536148 +step:7911 train loss:3.554955 +step:7912 train loss:3.557176 +step:7913 train loss:3.533561 +step:7914 train loss:3.567714 +step:7915 train loss:3.670118 +step:7916 train loss:3.541419 +step:7917 train loss:3.600430 +step:7918 train loss:3.540486 +step:7919 train loss:3.533740 +step:7920 train loss:3.571656 +step:7921 train loss:3.576034 +step:7922 train loss:3.551273 +step:7923 train loss:3.598074 +step:7924 train loss:3.560092 +step:7925 train loss:3.582737 +step:7926 train loss:3.489633 +step:7927 train loss:3.764024 +step:7928 train loss:3.592683 +step:7929 train loss:3.556950 +step:7930 train loss:3.515526 +step:7931 train loss:3.540283 +step:7932 train loss:3.561167 +step:7933 train loss:3.576274 +step:7934 train loss:3.670036 +step:7935 train loss:3.591972 +step:7936 train loss:3.564062 +step:7937 train loss:3.513119 +step:7938 train loss:3.528826 +step:7939 train loss:3.578879 +step:7940 train loss:3.561077 +step:7941 train loss:3.590176 +step:7942 train loss:3.579470 +step:7943 train loss:3.589840 +step:7944 train loss:3.510240 +step:7945 train loss:3.615144 +step:7946 train loss:3.563578 +step:7947 train loss:3.576038 +step:7948 train loss:3.533784 +step:7949 train loss:3.586905 +step:7950 train loss:3.642545 +step:7951 train loss:3.609535 +step:7952 train loss:3.753724 +step:7953 train loss:3.644187 +step:7954 train loss:3.548052 +step:7955 train loss:3.538635 +step:7956 train loss:3.542664 +step:7957 train loss:3.616046 +step:7958 train loss:3.623804 +step:7959 train loss:3.581353 +step:7960 train loss:3.643322 +step:7961 train loss:3.553663 +step:7962 train loss:3.522172 +step:7963 train loss:3.562432 +step:7964 train loss:3.560047 +step:7965 train loss:3.566214 +step:7966 train loss:3.538594 +step:7967 train loss:3.561303 +step:7968 train loss:3.571981 +step:7969 train loss:3.528683 +step:7970 train loss:3.496288 +step:7971 train loss:3.581567 +step:7972 train loss:3.556434 +step:7973 train loss:3.530647 +step:7974 train loss:3.569308 +step:7975 train loss:3.555804 +step:7976 train loss:3.575118 +step:7977 train loss:3.606012 +step:7978 train loss:3.629645 +step:7979 train loss:3.577772 +step:7980 train loss:3.481060 +step:7981 train loss:3.522567 +step:7982 train loss:3.567951 +step:7983 train loss:3.586102 +step:7984 train loss:3.625633 +step:7985 train loss:3.550007 +step:7986 train loss:3.575377 +step:7987 train loss:3.627643 +step:7988 train loss:3.600745 +step:7989 train loss:3.505704 +step:7990 train loss:3.520689 +step:7991 train loss:3.536517 +step:7992 train loss:3.561620 +step:7993 train loss:3.543078 +step:7994 train loss:3.593740 +step:7995 train loss:3.594940 +step:7996 train loss:3.564852 +step:7997 train loss:3.582405 +step:7998 train loss:3.605907 +step:7999 train loss:3.537395 +step:8000 validation loss:3.489560 total_sharp:1.7238e-04 L1_sharp:8.3875e-05 L2_sharp:2.2824e-05 L3_sharp:3.8063e-05 L4_sharp:4.3284e-05 L5_sharp:3.4674e-05 L6_sharp:1.5575e-05 L7_sharp:2.6256e-05 L8_sharp:4.4100e-05 L9_sharp:5.7052e-05 L10_sharp:6.5852e-05 L11_sharp:4.8201e-05 L12_sharp:1.6022e-04 total_fnorm:1.1018e+01 total_l1_linf:9.7538e+04 total_spectral:1.1018e+01 L1_fnorm:2.6129e+00 L2_fnorm:2.5844e+00 L3_fnorm:2.5837e+00 L4_fnorm:2.5509e+00 L5_fnorm:2.5376e+00 L6_fnorm:2.5819e+00 L7_fnorm:2.6435e+00 L8_fnorm:2.5851e+00 L9_fnorm:2.5830e+00 L10_fnorm:2.5738e+00 L11_fnorm:2.5911e+00 L12_fnorm:2.5111e+00 L1_l1linf:2.5903e+00 L2_l1linf:2.6329e+00 L3_l1linf:2.6764e+00 L4_l1linf:2.6183e+00 L5_l1linf:2.5269e+00 L6_l1linf:2.4511e+00 L7_l1linf:2.7427e+00 L8_l1linf:2.7202e+00 L9_l1linf:3.1221e+00 L10_l1linf:2.9094e+00 L11_l1linf:2.9083e+00 L12_l1linf:2.9391e+00 L1_spectral:3.4200e-01 L2_spectral:3.1609e-01 L3_spectral:3.1158e-01 L4_spectral:3.3662e-01 L5_spectral:2.7768e-01 L6_spectral:2.5095e-01 L7_spectral:2.7580e-01 L8_spectral:3.2483e-01 L9_spectral:3.6340e-01 L10_spectral:3.8046e-01 L11_spectral:3.6565e-01 L12_spectral:4.0688e-01 ip_v_neg_g:1.0224e-02 cos_v_neg_g:2.3766e-03 v_norm:1.1018e+01 g_norm:3.9045e-01 hv_norm:1.9825e-01 cos_v_hv:9.5806e-03 hg_norm:3.0262e+00 cos_g_hg:4.4524e-01 v_par:3.0574e-03 v_perp:1.1018e+01 L1_cos_v_neg_g:9.8614e-03 L1_v_norm:2.6129e+00 L2_cos_v_neg_g:4.8559e-03 L2_v_norm:2.5844e+00 L3_cos_v_neg_g:7.2761e-03 L3_v_norm:2.5837e+00 L4_cos_v_neg_g:6.1225e-03 L4_v_norm:2.5509e+00 L5_cos_v_neg_g:4.5700e-03 L5_v_norm:2.5376e+00 L6_cos_v_neg_g:2.1466e-03 L6_v_norm:2.5819e+00 L7_cos_v_neg_g:2.6493e-03 L7_v_norm:2.6435e+00 L8_cos_v_neg_g:2.4306e-03 L8_v_norm:2.5851e+00 L9_cos_v_neg_g:2.8600e-03 L9_v_norm:2.5830e+00 L10_cos_v_neg_g:5.4726e-03 L10_v_norm:2.5738e+00 L11_cos_v_neg_g:4.9760e-03 L11_v_norm:2.5911e+00 L12_cos_v_neg_g:9.6678e-03 L12_v_norm:2.5111e+00 +step:8000 train loss:3.603390 +step:8001 train loss:3.564462 +step:8002 train loss:3.585183 +step:8003 train loss:3.604445 +step:8004 train loss:3.579174 +step:8005 train loss:3.498727 +step:8006 train loss:3.576819 +step:8007 train loss:3.548103 +step:8008 train loss:3.569661 +step:8009 train loss:3.646076 +step:8010 train loss:3.878917 +step:8011 train loss:3.539343 +step:8012 train loss:3.614085 +step:8013 train loss:3.568576 +step:8014 train loss:3.580065 +step:8015 train loss:3.577556 +step:8016 train loss:3.565830 +step:8017 train loss:3.587235 +step:8018 train loss:3.550199 +step:8019 train loss:3.519346 +step:8020 train loss:3.551436 +step:8021 train loss:3.628773 +step:8022 train loss:3.544250 +step:8023 train loss:3.576267 +step:8024 train loss:3.441192 +step:8025 train loss:3.553716 +step:8026 train loss:3.562053 +step:8027 train loss:3.568650 +step:8028 train loss:3.626518 +step:8029 train loss:3.555167 +step:8030 train loss:3.513667 +step:8031 train loss:3.570297 +step:8032 train loss:3.558192 +step:8033 train loss:3.511518 +step:8034 train loss:3.551566 +step:8035 train loss:3.536799 +step:8036 train loss:3.529618 +step:8037 train loss:3.497629 +step:8038 train loss:3.511100 +step:8039 train loss:3.605597 +step:8040 train loss:3.538919 +step:8041 train loss:3.536197 +step:8042 train loss:3.573508 +step:8043 train loss:3.513382 +step:8044 train loss:3.527534 +step:8045 train loss:3.595529 +step:8046 train loss:3.521405 +step:8047 train loss:3.528670 +step:8048 train loss:3.553980 +step:8049 train loss:3.605014 +step:8050 train loss:3.542871 +step:8051 train loss:3.521277 +step:8052 train loss:3.581085 +step:8053 train loss:3.532216 +step:8054 train loss:3.570258 +step:8055 train loss:3.599640 +step:8056 train loss:3.564502 +step:8057 train loss:3.642481 +step:8058 train loss:3.547587 +step:8059 train loss:3.609968 +step:8060 train loss:3.575684 +step:8061 train loss:3.464478 +step:8062 train loss:3.604774 +step:8063 train loss:3.568871 +step:8064 train loss:3.527320 +step:8065 train loss:3.587822 +step:8066 train loss:3.548030 +step:8067 train loss:3.611795 +step:8068 train loss:3.537621 +step:8069 train loss:3.566217 +step:8070 train loss:3.526670 +step:8071 train loss:3.537929 +step:8072 train loss:3.576947 +step:8073 train loss:3.531335 +step:8074 train loss:3.541651 +step:8075 train loss:3.528647 +step:8076 train loss:3.575362 +step:8077 train loss:3.584617 +step:8078 train loss:3.525296 +step:8079 train loss:3.548890 +step:8080 train loss:3.532009 +step:8081 train loss:3.550372 +step:8082 train loss:3.566781 +step:8083 train loss:3.472873 +step:8084 train loss:3.605433 +step:8085 train loss:3.479139 +step:8086 train loss:3.606600 +step:8087 train loss:3.500537 +step:8088 train loss:3.551353 +step:8089 train loss:3.582704 +step:8090 train loss:3.605481 +step:8091 train loss:3.550167 +step:8092 train loss:3.531203 +step:8093 train loss:3.539027 +step:8094 train loss:3.543131 +step:8095 train loss:3.565010 +step:8096 train loss:3.568175 +step:8097 train loss:3.498312 +step:8098 train loss:3.509130 +step:8099 train loss:3.510727 +step:8100 train loss:3.558458 +step:8101 train loss:3.628419 +step:8102 train loss:3.572022 +step:8103 train loss:3.522595 +step:8104 train loss:3.571173 +step:8105 train loss:3.567120 +step:8106 train loss:3.529345 +step:8107 train loss:3.512346 +step:8108 train loss:3.528570 +step:8109 train loss:3.525255 +step:8110 train loss:3.587106 +step:8111 train loss:3.511807 +step:8112 train loss:3.530559 +step:8113 train loss:3.517895 +step:8114 train loss:3.464127 +step:8115 train loss:3.521942 +step:8116 train loss:3.554454 +step:8117 train loss:3.523600 +step:8118 train loss:3.516734 +step:8119 train loss:3.559459 +step:8120 train loss:3.506000 +step:8121 train loss:3.562862 +step:8122 train loss:3.545203 +step:8123 train loss:3.553415 +step:8124 train loss:3.512725 +step:8125 train loss:3.498797 +step:8126 train loss:3.488587 +step:8127 train loss:3.585227 +step:8128 train loss:3.590880 +step:8129 train loss:3.507376 +step:8130 train loss:3.537844 +step:8131 train loss:3.508618 +step:8132 train loss:3.578403 +step:8133 train loss:3.499976 +step:8134 train loss:3.536729 +step:8135 train loss:3.526600 +step:8136 train loss:3.539972 +step:8137 train loss:3.602563 +step:8138 train loss:3.513796 +step:8139 train loss:3.583620 +step:8140 train loss:3.514351 +step:8141 train loss:3.536994 +step:8142 train loss:3.515283 +step:8143 train loss:3.565678 +step:8144 train loss:3.544045 +step:8145 train loss:3.514689 +step:8146 train loss:3.516903 +step:8147 train loss:3.540005 +step:8148 train loss:3.633956 +step:8149 train loss:3.547855 +step:8150 train loss:3.526381 +step:8151 train loss:3.520895 +step:8152 train loss:3.619114 +step:8153 train loss:3.494396 +step:8154 train loss:3.512629 +step:8155 train loss:3.535708 +step:8156 train loss:3.523139 +step:8157 train loss:3.537672 +step:8158 train loss:3.552230 +step:8159 train loss:3.566530 +step:8160 train loss:3.521427 +step:8161 train loss:3.567552 +step:8162 train loss:3.497403 +step:8163 train loss:3.554529 +step:8164 train loss:3.542888 +step:8165 train loss:3.593494 +step:8166 train loss:3.594881 +step:8167 train loss:3.500484 +step:8168 train loss:3.477021 +step:8169 train loss:3.529906 +step:8170 train loss:3.474580 +step:8171 train loss:3.539824 +step:8172 train loss:3.537122 +step:8173 train loss:3.537697 +step:8174 train loss:3.548122 +step:8175 train loss:3.507621 +step:8176 train loss:3.503002 +step:8177 train loss:3.547266 +step:8178 train loss:3.638972 +step:8179 train loss:3.540373 +step:8180 train loss:3.565451 +step:8181 train loss:3.565901 +step:8182 train loss:3.526024 +step:8183 train loss:3.511974 +step:8184 train loss:3.507748 +step:8185 train loss:3.545026 +step:8186 train loss:3.544195 +step:8187 train loss:3.556063 +step:8188 train loss:3.484602 +step:8189 train loss:3.633997 +step:8190 train loss:3.565415 +step:8191 train loss:3.569878 +step:8192 train loss:3.679104 +step:8193 train loss:3.549387 +step:8194 train loss:3.485549 +step:8195 train loss:3.583730 +step:8196 train loss:3.496635 +step:8197 train loss:3.527454 +step:8198 train loss:3.534570 +step:8199 train loss:3.537628 +step:8200 train loss:3.515740 +step:8201 train loss:3.631545 +step:8202 train loss:3.544117 +step:8203 train loss:3.568366 +step:8204 train loss:3.477642 +step:8205 train loss:3.486287 +step:8206 train loss:3.607039 +step:8207 train loss:3.533470 +step:8208 train loss:3.554348 +step:8209 train loss:3.598779 +step:8210 train loss:3.580594 +step:8211 train loss:3.515629 +step:8212 train loss:3.572428 +step:8213 train loss:3.581551 +step:8214 train loss:3.621294 +step:8215 train loss:3.596879 +step:8216 train loss:3.577786 +step:8217 train loss:3.553842 +step:8218 train loss:3.563976 +step:8219 train loss:3.695553 +step:8220 train loss:3.527665 +step:8221 train loss:3.546215 +step:8222 train loss:3.499094 +step:8223 train loss:3.518637 +step:8224 train loss:3.528402 +step:8225 train loss:3.580129 +step:8226 train loss:3.508253 +step:8227 train loss:3.577985 +step:8228 train loss:3.463002 +step:8229 train loss:3.507100 +step:8230 train loss:3.519872 +step:8231 train loss:3.548583 +step:8232 train loss:3.548608 +step:8233 train loss:3.593906 +step:8234 train loss:3.588832 +step:8235 train loss:3.560894 +step:8236 train loss:3.544412 +step:8237 train loss:3.495088 +step:8238 train loss:3.743959 +step:8239 train loss:3.579529 +step:8240 train loss:3.527915 +step:8241 train loss:3.498689 +step:8242 train loss:3.536981 +step:8243 train loss:3.524856 +step:8244 train loss:3.540112 +step:8245 train loss:3.521820 +step:8246 train loss:3.590078 +step:8247 train loss:3.619508 +step:8248 train loss:3.540327 +step:8249 train loss:3.533748 +step:8250 validation loss:3.480676 +step:8250 train loss:3.521199 +step:8251 train loss:3.615776 +step:8252 train loss:3.555766 +step:8253 train loss:3.523212 +step:8254 train loss:3.491362 +step:8255 train loss:3.527479 +step:8256 train loss:3.505508 +step:8257 train loss:3.617259 +step:8258 train loss:3.532492 +step:8259 train loss:3.521906 +step:8260 train loss:3.522879 +step:8261 train loss:3.521893 +step:8262 train loss:3.534690 +step:8263 train loss:3.550349 +step:8264 train loss:3.516204 +step:8265 train loss:3.505618 +step:8266 train loss:3.515043 +step:8267 train loss:3.448514 +step:8268 train loss:3.567313 +step:8269 train loss:3.501911 +step:8270 train loss:3.557025 +step:8271 train loss:3.580223 +step:8272 train loss:3.606869 +step:8273 train loss:3.484384 +step:8274 train loss:3.547716 +step:8275 train loss:3.510776 +step:8276 train loss:3.545918 +step:8277 train loss:3.612974 +step:8278 train loss:3.625629 +step:8279 train loss:3.541882 +step:8280 train loss:3.530493 +step:8281 train loss:3.493425 +step:8282 train loss:3.556279 +step:8283 train loss:3.537582 +step:8284 train loss:3.529215 +step:8285 train loss:3.517514 +step:8286 train loss:3.624495 +step:8287 train loss:3.565275 +step:8288 train loss:3.536822 +step:8289 train loss:3.547716 +step:8290 train loss:3.489317 +step:8291 train loss:3.530191 +step:8292 train loss:3.557786 +step:8293 train loss:3.531908 +step:8294 train loss:3.502362 +step:8295 train loss:3.538708 +step:8296 train loss:3.606084 +step:8297 train loss:3.685728 +step:8298 train loss:3.508090 +step:8299 train loss:3.544823 +step:8300 train loss:3.552824 +step:8301 train loss:3.523416 +step:8302 train loss:3.585829 +step:8303 train loss:3.717801 +step:8304 train loss:3.529573 +step:8305 train loss:3.572203 +step:8306 train loss:3.548249 +step:8307 train loss:3.563690 +step:8308 train loss:3.561997 +step:8309 train loss:3.583588 +step:8310 train loss:3.499465 +step:8311 train loss:3.592086 +step:8312 train loss:3.584036 +step:8313 train loss:3.645636 +step:8314 train loss:3.520841 +step:8315 train loss:3.468183 +step:8316 train loss:3.527190 +step:8317 train loss:3.546470 +step:8318 train loss:3.542109 +step:8319 train loss:3.572627 +step:8320 train loss:3.594577 +step:8321 train loss:3.503492 +step:8322 train loss:3.519936 +step:8323 train loss:3.559581 +step:8324 train loss:3.532388 +step:8325 train loss:3.589285 +step:8326 train loss:3.551365 +step:8327 train loss:3.542898 +step:8328 train loss:3.617588 +step:8329 train loss:3.526531 +step:8330 train loss:3.566030 +step:8331 train loss:3.491109 +step:8332 train loss:3.591889 +step:8333 train loss:3.609680 +step:8334 train loss:3.475389 +step:8335 train loss:3.541014 +step:8336 train loss:3.634046 +step:8337 train loss:3.563806 +step:8338 train loss:3.531614 +step:8339 train loss:3.508924 +step:8340 train loss:3.600533 +step:8341 train loss:3.500712 +step:8342 train loss:3.573570 +step:8343 train loss:3.486439 +step:8344 train loss:3.533098 +step:8345 train loss:3.567019 +step:8346 train loss:3.647894 +step:8347 train loss:3.539344 +step:8348 train loss:3.572216 +step:8349 train loss:3.541447 +step:8350 train loss:3.558484 +step:8351 train loss:3.502642 +step:8352 train loss:3.582677 +step:8353 train loss:3.541206 +step:8354 train loss:3.521601 +step:8355 train loss:3.521760 +step:8356 train loss:3.512147 +step:8357 train loss:3.531938 +step:8358 train loss:3.505322 +step:8359 train loss:3.501146 +step:8360 train loss:3.546649 +step:8361 train loss:3.563223 +step:8362 train loss:3.581496 +step:8363 train loss:3.578826 +step:8364 train loss:3.542326 +step:8365 train loss:3.688179 +step:8366 train loss:3.529826 +step:8367 train loss:3.505993 +step:8368 train loss:3.476075 +step:8369 train loss:3.508169 +step:8370 train loss:3.590301 +step:8371 train loss:3.561791 +step:8372 train loss:3.539326 +step:8373 train loss:3.550455 +step:8374 train loss:3.482689 +step:8375 train loss:3.546756 +step:8376 train loss:3.583095 +step:8377 train loss:3.413507 +step:8378 train loss:3.625017 +step:8379 train loss:3.490310 +step:8380 train loss:3.501920 +step:8381 train loss:3.506053 +step:8382 train loss:3.536247 +step:8383 train loss:3.494155 +step:8384 train loss:3.534707 +step:8385 train loss:3.548668 +step:8386 train loss:3.529657 +step:8387 train loss:3.690331 +step:8388 train loss:3.599836 +step:8389 train loss:3.570888 +step:8390 train loss:3.575978 +step:8391 train loss:3.508679 +step:8392 train loss:3.522962 +step:8393 train loss:3.477969 +step:8394 train loss:3.569909 +step:8395 train loss:3.572134 +step:8396 train loss:3.597545 +step:8397 train loss:3.532822 +step:8398 train loss:3.554110 +step:8399 train loss:3.515619 +step:8400 train loss:3.523422 +step:8401 train loss:3.534364 +step:8402 train loss:3.515238 +step:8403 train loss:3.529489 +step:8404 train loss:3.536323 +step:8405 train loss:3.490005 +step:8406 train loss:3.530931 +step:8407 train loss:3.571421 +step:8408 train loss:3.542709 +step:8409 train loss:3.465092 +step:8410 train loss:3.529343 +step:8411 train loss:3.555399 +step:8412 train loss:3.611933 +step:8413 train loss:3.588478 +step:8414 train loss:3.585829 +step:8415 train loss:3.506312 +step:8416 train loss:3.553869 +step:8417 train loss:3.473020 +step:8418 train loss:3.575127 +step:8419 train loss:3.524551 +step:8420 train loss:3.606935 +step:8421 train loss:3.522757 +step:8422 train loss:3.539392 +step:8423 train loss:3.551956 +step:8424 train loss:3.557577 +step:8425 train loss:3.616345 +step:8426 train loss:3.585549 +step:8427 train loss:3.505852 +step:8428 train loss:3.518373 +step:8429 train loss:3.580413 +step:8430 train loss:3.518653 +step:8431 train loss:3.524471 +step:8432 train loss:3.526856 +step:8433 train loss:3.507766 +step:8434 train loss:3.539353 +step:8435 train loss:3.458129 +step:8436 train loss:3.541612 +step:8437 train loss:3.582338 +step:8438 train loss:3.562832 +step:8439 train loss:3.504176 +step:8440 train loss:3.472631 +step:8441 train loss:3.528988 +step:8442 train loss:3.553329 +step:8443 train loss:3.509461 +step:8444 train loss:3.541729 +step:8445 train loss:3.493888 +step:8446 train loss:3.541811 +step:8447 train loss:3.553565 +step:8448 train loss:3.538977 +step:8449 train loss:3.529959 +step:8450 train loss:3.517108 +step:8451 train loss:3.550370 +step:8452 train loss:3.525431 +step:8453 train loss:3.507637 +step:8454 train loss:3.552520 +step:8455 train loss:3.628988 +step:8456 train loss:3.603734 +step:8457 train loss:3.658456 +step:8458 train loss:3.546927 +step:8459 train loss:3.553363 +step:8460 train loss:3.480730 +step:8461 train loss:3.634200 +step:8462 train loss:3.506338 +step:8463 train loss:3.548829 +step:8464 train loss:3.560302 +step:8465 train loss:3.566293 +step:8466 train loss:3.541945 +step:8467 train loss:3.543153 +step:8468 train loss:3.793936 +step:8469 train loss:3.509322 +step:8470 train loss:3.499422 +step:8471 train loss:3.543644 +step:8472 train loss:3.565587 +step:8473 train loss:3.520715 +step:8474 train loss:3.647256 +step:8475 train loss:3.603868 +step:8476 train loss:3.551023 +step:8477 train loss:3.542570 +step:8478 train loss:3.523316 +step:8479 train loss:3.523422 +step:8480 train loss:3.610030 +step:8481 train loss:3.524831 +step:8482 train loss:3.519356 +step:8483 train loss:3.668103 +step:8484 train loss:3.550102 +step:8485 train loss:3.595313 +step:8486 train loss:3.503375 +step:8487 train loss:3.560536 +step:8488 train loss:3.506426 +step:8489 train loss:3.584646 +step:8490 train loss:3.570777 +step:8491 train loss:3.589369 +step:8492 train loss:3.545209 +step:8493 train loss:3.614994 +step:8494 train loss:3.478826 +step:8495 train loss:3.574790 +step:8496 train loss:3.520945 +step:8497 train loss:3.557248 +step:8498 train loss:3.569347 +step:8499 train loss:3.546227 +step:8500 validation loss:3.478882 total_sharp:1.7531e-04 L1_sharp:3.8501e-05 L2_sharp:6.5016e-06 L3_sharp:1.1536e-05 L4_sharp:2.7751e-05 L5_sharp:3.3748e-05 L6_sharp:1.9716e-05 L7_sharp:2.7785e-05 L8_sharp:4.5578e-05 L9_sharp:6.6766e-05 L10_sharp:6.6913e-05 L11_sharp:5.2931e-05 L12_sharp:2.6367e-04 total_fnorm:1.0804e+01 total_l1_linf:9.5194e+04 total_spectral:1.0804e+01 L1_fnorm:2.4787e+00 L2_fnorm:2.4699e+00 L3_fnorm:2.4661e+00 L4_fnorm:2.4640e+00 L5_fnorm:2.4599e+00 L6_fnorm:2.5426e+00 L7_fnorm:2.6133e+00 L8_fnorm:2.5384e+00 L9_fnorm:2.5406e+00 L10_fnorm:2.5150e+00 L11_fnorm:2.5191e+00 L12_fnorm:2.4151e+00 L1_l1linf:2.5547e+00 L2_l1linf:2.5888e+00 L3_l1linf:2.6855e+00 L4_l1linf:2.6225e+00 L5_l1linf:2.5819e+00 L6_l1linf:2.6406e+00 L7_l1linf:2.9596e+00 L8_l1linf:2.7207e+00 L9_l1linf:2.7820e+00 L10_l1linf:3.1117e+00 L11_l1linf:3.1726e+00 L12_l1linf:3.2116e+00 L1_spectral:3.3238e-01 L2_spectral:3.2771e-01 L3_spectral:3.0568e-01 L4_spectral:3.3442e-01 L5_spectral:2.7348e-01 L6_spectral:2.5948e-01 L7_spectral:2.7969e-01 L8_spectral:3.3579e-01 L9_spectral:3.7052e-01 L10_spectral:3.7504e-01 L11_spectral:3.8316e-01 L12_spectral:4.2925e-01 ip_v_neg_g:1.1808e-02 cos_v_neg_g:2.8160e-03 v_norm:1.0804e+01 g_norm:3.8809e-01 hv_norm:1.8923e-01 cos_v_hv:1.0010e-02 hg_norm:2.3165e+00 cos_g_hg:4.8291e-01 v_par:3.0082e-03 v_perp:1.0804e+01 L1_cos_v_neg_g:5.4639e-03 L1_v_norm:2.4787e+00 L2_cos_v_neg_g:2.8991e-03 L2_v_norm:2.4699e+00 L3_cos_v_neg_g:3.7581e-03 L3_v_norm:2.4661e+00 L4_cos_v_neg_g:3.6693e-03 L4_v_norm:2.4640e+00 L5_cos_v_neg_g:5.0829e-03 L5_v_norm:2.4599e+00 L6_cos_v_neg_g:4.6700e-03 L6_v_norm:2.5426e+00 L7_cos_v_neg_g:4.9226e-03 L7_v_norm:2.6133e+00 L8_cos_v_neg_g:5.3833e-03 L8_v_norm:2.5384e+00 L9_cos_v_neg_g:7.8060e-03 L9_v_norm:2.5406e+00 L10_cos_v_neg_g:9.1155e-03 L10_v_norm:2.5150e+00 L11_cos_v_neg_g:1.0066e-02 L11_v_norm:2.5191e+00 L12_cos_v_neg_g:1.4844e-02 L12_v_norm:2.4151e+00 +step:8500 train loss:3.542084 +step:8501 train loss:3.757361 +step:8502 train loss:3.776735 +step:8503 train loss:3.535248 +step:8504 train loss:3.535019 +step:8505 train loss:3.510145 +step:8506 train loss:3.580687 +step:8507 train loss:3.520742 +step:8508 train loss:3.552886 +step:8509 train loss:3.491877 +step:8510 train loss:3.517352 +step:8511 train loss:3.472857 +step:8512 train loss:3.571540 +step:8513 train loss:3.575452 +step:8514 train loss:3.519413 +step:8515 train loss:3.617868 +step:8516 train loss:3.534065 +step:8517 train loss:3.555552 +step:8518 train loss:3.444691 +step:8519 train loss:3.539436 +step:8520 train loss:3.504619 +step:8521 train loss:3.546534 +step:8522 train loss:3.440612 +step:8523 train loss:3.534537 +step:8524 train loss:3.527135 +step:8525 train loss:3.589952 +step:8526 train loss:3.572591 +step:8527 train loss:3.514582 +step:8528 train loss:3.597347 +step:8529 train loss:3.549530 +step:8530 train loss:3.587685 +step:8531 train loss:3.576179 +step:8532 train loss:3.612644 +step:8533 train loss:3.565020 +step:8534 train loss:3.565396 +step:8535 train loss:3.540345 +step:8536 train loss:3.628875 +step:8537 train loss:3.541176 +step:8538 train loss:3.611352 +step:8539 train loss:3.536731 +step:8540 train loss:3.558903 +step:8541 train loss:3.501163 +step:8542 train loss:3.566912 +step:8543 train loss:3.481508 +step:8544 train loss:3.475839 +step:8545 train loss:3.531261 +step:8546 train loss:3.480031 +step:8547 train loss:3.533677 +step:8548 train loss:3.507075 +step:8549 train loss:3.550350 +step:8550 train loss:3.501574 +step:8551 train loss:3.552128 +step:8552 train loss:3.556919 +step:8553 train loss:3.556426 +step:8554 train loss:3.530277 +step:8555 train loss:3.543230 +step:8556 train loss:3.620721 +step:8557 train loss:3.520987 +step:8558 train loss:3.559620 +step:8559 train loss:3.547777 +step:8560 train loss:3.532269 +step:8561 train loss:3.486555 +step:8562 train loss:3.515878 +step:8563 train loss:3.513035 +step:8564 train loss:3.583045 +step:8565 train loss:3.557636 +step:8566 train loss:3.579072 +step:8567 train loss:3.523137 +step:8568 train loss:3.539220 +step:8569 train loss:3.551005 +step:8570 train loss:3.494004 +step:8571 train loss:3.535095 +step:8572 train loss:3.549975 +step:8573 train loss:3.626437 +step:8574 train loss:3.557991 +step:8575 train loss:3.555026 +step:8576 train loss:3.586969 +step:8577 train loss:3.672105 +step:8578 train loss:3.576871 +step:8579 train loss:3.567467 +step:8580 train loss:3.496072 +step:8581 train loss:3.542679 +step:8582 train loss:3.547126 +step:8583 train loss:3.545232 +step:8584 train loss:3.537214 +step:8585 train loss:3.614434 +step:8586 train loss:3.531903 +step:8587 train loss:3.542307 +step:8588 train loss:3.588987 +step:8589 train loss:3.533578 +step:8590 train loss:3.528993 +step:8591 train loss:3.532372 +step:8592 train loss:3.491398 +step:8593 train loss:3.568563 +step:8594 train loss:3.592836 +step:8595 train loss:3.513936 +step:8596 train loss:3.557137 +step:8597 train loss:3.521706 +step:8598 train loss:3.572453 +step:8599 train loss:3.542933 +step:8600 train loss:3.550353 +step:8601 train loss:3.535460 +step:8602 train loss:3.512832 +step:8603 train loss:3.568599 +step:8604 train loss:3.515209 +step:8605 train loss:3.526539 +step:8606 train loss:3.543176 +step:8607 train loss:3.547985 +step:8608 train loss:3.591414 +step:8609 train loss:3.488892 +step:8610 train loss:3.562824 +step:8611 train loss:3.493047 +step:8612 train loss:3.570893 +step:8613 train loss:3.504203 +step:8614 train loss:3.566429 +step:8615 train loss:3.609909 +step:8616 train loss:3.489434 +step:8617 train loss:3.558503 +step:8618 train loss:3.533073 +step:8619 train loss:3.487427 +step:8620 train loss:3.530959 +step:8621 train loss:3.560598 +step:8622 train loss:3.520290 +step:8623 train loss:3.533693 +step:8624 train loss:3.606673 +step:8625 train loss:3.529612 +step:8626 train loss:3.539133 +step:8627 train loss:3.532557 +step:8628 train loss:3.568823 +step:8629 train loss:3.474080 +step:8630 train loss:3.574778 +step:8631 train loss:3.520860 +step:8632 train loss:3.579085 +step:8633 train loss:3.521941 +step:8634 train loss:3.751898 +step:8635 train loss:3.552982 +step:8636 train loss:3.596296 +step:8637 train loss:3.523939 +step:8638 train loss:3.520046 +step:8639 train loss:3.580052 +step:8640 train loss:3.490098 +step:8641 train loss:3.590144 +step:8642 train loss:3.541632 +step:8643 train loss:3.658755 +step:8644 train loss:3.496055 +step:8645 train loss:3.567209 +step:8646 train loss:3.526167 +step:8647 train loss:3.553159 +step:8648 train loss:3.502556 +step:8649 train loss:3.585504 +step:8650 train loss:3.539073 +step:8651 train loss:3.552574 +step:8652 train loss:3.521282 +step:8653 train loss:3.552255 +step:8654 train loss:3.596065 +step:8655 train loss:3.525504 +step:8656 train loss:3.566627 +step:8657 train loss:3.568471 +step:8658 train loss:3.542444 +step:8659 train loss:3.533106 +step:8660 train loss:3.478170 +step:8661 train loss:3.538085 +step:8662 train loss:3.479051 +step:8663 train loss:3.551873 +step:8664 train loss:3.467486 +step:8665 train loss:3.489161 +step:8666 train loss:3.567608 +step:8667 train loss:3.460243 +step:8668 train loss:3.570897 +step:8669 train loss:3.606265 +step:8670 train loss:3.508633 +step:8671 train loss:3.502620 +step:8672 train loss:3.721380 +step:8673 train loss:3.489653 +step:8674 train loss:3.556275 +step:8675 train loss:3.596345 +step:8676 train loss:3.538947 +step:8677 train loss:3.564155 +step:8678 train loss:3.512719 +step:8679 train loss:3.568933 +step:8680 train loss:3.551310 +step:8681 train loss:3.550351 +step:8682 train loss:3.507213 +step:8683 train loss:3.523210 +step:8684 train loss:3.598425 +step:8685 train loss:3.540864 +step:8686 train loss:3.531656 +step:8687 train loss:3.484479 +step:8688 train loss:3.504456 +step:8689 train loss:3.574717 +step:8690 train loss:3.511117 +step:8691 train loss:3.590346 +step:8692 train loss:3.477686 +step:8693 train loss:3.566942 +step:8694 train loss:3.568536 +step:8695 train loss:3.552979 +step:8696 train loss:3.577266 +step:8697 train loss:3.529805 +step:8698 train loss:3.571204 +step:8699 train loss:3.521969 +step:8700 train loss:3.547253 +step:8701 train loss:3.508137 +step:8702 train loss:3.493793 +step:8703 train loss:3.510398 +step:8704 train loss:3.464955 +step:8705 train loss:3.546950 +step:8706 train loss:3.566701 +step:8707 train loss:3.563106 +step:8708 train loss:3.507623 +step:8709 train loss:3.570898 +step:8710 train loss:3.499480 +step:8711 train loss:3.554569 +step:8712 train loss:3.458894 +step:8713 train loss:3.536853 +step:8714 train loss:3.644387 +step:8715 train loss:3.501964 +step:8716 train loss:3.558203 +step:8717 train loss:3.527168 +step:8718 train loss:3.564488 +step:8719 train loss:3.535112 +step:8720 train loss:3.646523 +step:8721 train loss:3.539320 +step:8722 train loss:3.629655 +step:8723 train loss:3.500408 +step:8724 train loss:3.507827 +step:8725 train loss:3.539876 +step:8726 train loss:3.495275 +step:8727 train loss:3.570350 +step:8728 train loss:3.529774 +step:8729 train loss:3.531547 +step:8730 train loss:3.510722 +step:8731 train loss:3.514706 +step:8732 train loss:3.617290 +step:8733 train loss:3.539820 +step:8734 train loss:3.574327 +step:8735 train loss:3.647945 +step:8736 train loss:3.501847 +step:8737 train loss:3.530765 +step:8738 train loss:3.512360 +step:8739 train loss:3.570937 +step:8740 train loss:3.494287 +step:8741 train loss:3.545030 +step:8742 train loss:3.504360 +step:8743 train loss:3.541478 +step:8744 train loss:3.564581 +step:8745 train loss:3.601538 +step:8746 train loss:3.502238 +step:8747 train loss:3.606336 +step:8748 train loss:3.514512 +step:8749 train loss:3.550166 +step:8750 validation loss:3.472650 +step:8750 train loss:3.564194 +step:8751 train loss:3.600631 +step:8752 train loss:3.463108 +step:8753 train loss:3.507549 +step:8754 train loss:3.563442 +step:8755 train loss:3.542834 +step:8756 train loss:3.589380 +step:8757 train loss:3.500468 +step:8758 train loss:3.652700 +step:8759 train loss:3.501325 +step:8760 train loss:3.533864 +step:8761 train loss:3.609893 +step:8762 train loss:3.506562 +step:8763 train loss:3.481862 +step:8764 train loss:3.552622 +step:8765 train loss:3.624202 +step:8766 train loss:3.554940 +step:8767 train loss:3.510417 +step:8768 train loss:3.552093 +step:8769 train loss:3.523222 +step:8770 train loss:3.570086 +step:8771 train loss:3.541585 +step:8772 train loss:3.561624 +step:8773 train loss:3.523575 +step:8774 train loss:3.553375 +step:8775 train loss:3.553488 +step:8776 train loss:3.497997 +step:8777 train loss:3.535504 +step:8778 train loss:3.543936 +step:8779 train loss:3.565706 +step:8780 train loss:3.532308 +step:8781 train loss:3.534867 +step:8782 train loss:3.557623 +step:8783 train loss:3.536819 +step:8784 train loss:3.562411 +step:8785 train loss:3.546711 +step:8786 train loss:3.620184 +step:8787 train loss:3.565548 +step:8788 train loss:3.466777 +step:8789 train loss:3.566718 +step:8790 train loss:3.494090 +step:8791 train loss:3.545552 +step:8792 train loss:3.484219 +step:8793 train loss:3.571400 +step:8794 train loss:3.499992 +step:8795 train loss:3.568441 +step:8796 train loss:3.715485 +step:8797 train loss:3.461870 +step:8798 train loss:3.616644 +step:8799 train loss:3.536639 +step:8800 train loss:3.529858 +step:8801 train loss:3.549719 +step:8802 train loss:3.606894 +step:8803 train loss:3.563342 +step:8804 train loss:3.543704 +step:8805 train loss:3.564915 +step:8806 train loss:3.531738 +step:8807 train loss:3.525036 +step:8808 train loss:3.476954 +step:8809 train loss:3.603984 +step:8810 train loss:3.507920 +step:8811 train loss:3.493128 +step:8812 train loss:3.542155 +step:8813 train loss:3.451168 +step:8814 train loss:3.639600 +step:8815 train loss:3.483053 +step:8816 train loss:3.600197 +step:8817 train loss:3.538519 +step:8818 train loss:3.469231 +step:8819 train loss:3.587025 +step:8820 train loss:3.514619 +step:8821 train loss:3.541401 +step:8822 train loss:3.522564 +step:8823 train loss:3.537644 +step:8824 train loss:3.598629 +step:8825 train loss:3.572678 +step:8826 train loss:3.545900 +step:8827 train loss:3.505880 +step:8828 train loss:3.544679 +step:8829 train loss:3.525848 +step:8830 train loss:3.504966 +step:8831 train loss:3.577407 +step:8832 train loss:3.519915 +step:8833 train loss:3.548550 +step:8834 train loss:3.518262 +step:8835 train loss:3.455322 +step:8836 train loss:3.581162 +step:8837 train loss:3.486753 +step:8838 train loss:3.533960 +step:8839 train loss:3.516942 +step:8840 train loss:3.517821 +step:8841 train loss:3.532124 +step:8842 train loss:3.543073 +step:8843 train loss:3.555213 +step:8844 train loss:3.520768 +step:8845 train loss:3.541220 +step:8846 train loss:3.507824 +step:8847 train loss:3.543580 +step:8848 train loss:3.591959 +step:8849 train loss:3.571081 +step:8850 train loss:3.563889 +step:8851 train loss:3.452125 +step:8852 train loss:3.555206 +step:8853 train loss:3.537192 +step:8854 train loss:3.503237 +step:8855 train loss:3.576551 +step:8856 train loss:3.567393 +step:8857 train loss:3.632467 +step:8858 train loss:3.500175 +step:8859 train loss:3.567022 +step:8860 train loss:3.528965 +step:8861 train loss:3.508666 +step:8862 train loss:3.511584 +step:8863 train loss:3.493158 +step:8864 train loss:3.561419 +step:8865 train loss:3.555815 +step:8866 train loss:3.433924 +step:8867 train loss:3.541433 +step:8868 train loss:3.566881 +step:8869 train loss:3.649364 +step:8870 train loss:3.531896 +step:8871 train loss:3.554368 +step:8872 train loss:3.538513 +step:8873 train loss:3.536999 +step:8874 train loss:3.589666 +step:8875 train loss:3.524543 +step:8876 train loss:3.560487 +step:8877 train loss:3.545782 +step:8878 train loss:3.593824 +step:8879 train loss:3.553872 +step:8880 train loss:3.501263 +step:8881 train loss:3.463809 +step:8882 train loss:3.537182 +step:8883 train loss:3.521569 +step:8884 train loss:3.612325 +step:8885 train loss:3.547609 +step:8886 train loss:3.552583 +step:8887 train loss:3.579330 +step:8888 train loss:3.536038 +step:8889 train loss:3.540984 +step:8890 train loss:3.534165 +step:8891 train loss:3.505668 +step:8892 train loss:3.586078 +step:8893 train loss:3.527070 +step:8894 train loss:3.544307 +step:8895 train loss:3.576762 +step:8896 train loss:3.490487 +step:8897 train loss:3.583383 +step:8898 train loss:3.514591 +step:8899 train loss:3.540717 +step:8900 train loss:3.505759 +step:8901 train loss:3.518905 +step:8902 train loss:3.560667 +step:8903 train loss:3.499474 +step:8904 train loss:3.551003 +step:8905 train loss:3.522174 +step:8906 train loss:3.518134 +step:8907 train loss:3.529919 +step:8908 train loss:3.591352 +step:8909 train loss:3.538818 +step:8910 train loss:3.500292 +step:8911 train loss:3.598304 +step:8912 train loss:3.495341 +step:8913 train loss:3.506427 +step:8914 train loss:3.603998 +step:8915 train loss:3.541059 +step:8916 train loss:3.573394 +step:8917 train loss:3.527483 +step:8918 train loss:3.532883 +step:8919 train loss:3.517891 +step:8920 train loss:3.543598 +step:8921 train loss:3.543033 +step:8922 train loss:3.524046 +step:8923 train loss:3.706284 +step:8924 train loss:3.608336 +step:8925 train loss:3.534512 +step:8926 train loss:3.547254 +step:8927 train loss:3.575427 +step:8928 train loss:3.530600 +step:8929 train loss:3.522952 +step:8930 train loss:3.580673 +step:8931 train loss:3.490910 +step:8932 train loss:3.589670 +step:8933 train loss:3.502542 +step:8934 train loss:3.540028 +step:8935 train loss:3.552636 +step:8936 train loss:3.587685 +step:8937 train loss:3.585127 +step:8938 train loss:3.528005 +step:8939 train loss:3.589814 +step:8940 train loss:3.545514 +step:8941 train loss:3.490984 +step:8942 train loss:3.565701 +step:8943 train loss:3.499439 +step:8944 train loss:3.547834 +step:8945 train loss:3.566190 +step:8946 train loss:3.414052 +step:8947 train loss:3.604105 +step:8948 train loss:3.450720 +step:8949 train loss:3.453779 +step:8950 train loss:3.497445 +step:8951 train loss:3.536596 +step:8952 train loss:3.558759 +step:8953 train loss:3.511679 +step:8954 train loss:3.613941 +step:8955 train loss:3.530402 +step:8956 train loss:3.554840 +step:8957 train loss:3.547867 +step:8958 train loss:3.520610 +step:8959 train loss:3.512707 +step:8960 train loss:3.481347 +step:8961 train loss:3.504073 +step:8962 train loss:3.560326 +step:8963 train loss:3.537303 +step:8964 train loss:3.518421 +step:8965 train loss:3.560966 +step:8966 train loss:3.519666 +step:8967 train loss:3.495541 +step:8968 train loss:3.480165 +step:8969 train loss:3.471263 +step:8970 train loss:3.550254 +step:8971 train loss:3.500084 +step:8972 train loss:3.697387 +step:8973 train loss:3.586770 +step:8974 train loss:3.546308 +step:8975 train loss:3.546969 +step:8976 train loss:3.510780 +step:8977 train loss:3.600452 +step:8978 train loss:3.580133 +step:8979 train loss:3.497638 +step:8980 train loss:3.595494 +step:8981 train loss:3.543180 +step:8982 train loss:3.515785 +step:8983 train loss:3.463079 +step:8984 train loss:3.585484 +step:8985 train loss:3.504663 +step:8986 train loss:3.541407 +step:8987 train loss:3.514346 +step:8988 train loss:3.563426 +step:8989 train loss:3.476556 +step:8990 train loss:3.616309 +step:8991 train loss:3.469157 +step:8992 train loss:3.525516 +step:8993 train loss:3.614401 +step:8994 train loss:3.519232 +step:8995 train loss:3.548506 +step:8996 train loss:3.517056 +step:8997 train loss:3.462942 +step:8998 train loss:3.469402 +step:8999 train loss:3.491629 +step:9000 validation loss:3.467221 total_sharp:1.6300e-04 L1_sharp:7.7781e-05 L2_sharp:1.6363e-05 L3_sharp:3.2785e-05 L4_sharp:4.8127e-05 L5_sharp:3.4013e-05 L6_sharp:1.6308e-05 L7_sharp:2.5188e-05 L8_sharp:4.0907e-05 L9_sharp:4.8018e-05 L10_sharp:5.0482e-05 L11_sharp:4.6289e-05 L12_sharp:2.1973e-04 total_fnorm:1.1049e+01 total_l1_linf:9.7808e+04 total_spectral:1.1049e+01 L1_fnorm:2.6081e+00 L2_fnorm:2.5833e+00 L3_fnorm:2.5652e+00 L4_fnorm:2.5698e+00 L5_fnorm:2.5392e+00 L6_fnorm:2.6037e+00 L7_fnorm:2.6537e+00 L8_fnorm:2.5896e+00 L9_fnorm:2.5973e+00 L10_fnorm:2.5739e+00 L11_fnorm:2.6024e+00 L12_fnorm:2.5214e+00 L1_l1linf:2.5723e+00 L2_l1linf:2.6213e+00 L3_l1linf:2.7383e+00 L4_l1linf:2.8444e+00 L5_l1linf:2.4820e+00 L6_l1linf:2.5441e+00 L7_l1linf:2.7319e+00 L8_l1linf:2.7301e+00 L9_l1linf:2.9281e+00 L10_l1linf:3.0171e+00 L11_l1linf:3.0996e+00 L12_l1linf:3.5744e+00 L1_spectral:3.5093e-01 L2_spectral:3.2999e-01 L3_spectral:3.0505e-01 L4_spectral:3.3069e-01 L5_spectral:2.6645e-01 L6_spectral:2.5061e-01 L7_spectral:2.7537e-01 L8_spectral:3.1299e-01 L9_spectral:3.5416e-01 L10_spectral:3.6569e-01 L11_spectral:3.8486e-01 L12_spectral:4.2935e-01 ip_v_neg_g:1.0883e-02 cos_v_neg_g:2.4073e-03 v_norm:1.1049e+01 g_norm:4.0918e-01 hv_norm:2.0882e-01 cos_v_hv:8.6243e-03 hg_norm:3.3707e+00 cos_g_hg:5.0320e-01 v_par:3.9763e-03 v_perp:1.1049e+01 L1_cos_v_neg_g:7.0410e-03 L1_v_norm:2.6081e+00 L2_cos_v_neg_g:3.7402e-03 L2_v_norm:2.5833e+00 L3_cos_v_neg_g:4.0662e-03 L3_v_norm:2.5652e+00 L4_cos_v_neg_g:5.1048e-03 L4_v_norm:2.5698e+00 L5_cos_v_neg_g:4.5273e-03 L5_v_norm:2.5392e+00 L6_cos_v_neg_g:3.0758e-03 L6_v_norm:2.6037e+00 L7_cos_v_neg_g:4.0761e-03 L7_v_norm:2.6537e+00 L8_cos_v_neg_g:4.7618e-03 L8_v_norm:2.5896e+00 L9_cos_v_neg_g:4.5178e-03 L9_v_norm:2.5973e+00 L10_cos_v_neg_g:5.7489e-03 L10_v_norm:2.5739e+00 L11_cos_v_neg_g:7.3465e-03 L11_v_norm:2.6024e+00 L12_cos_v_neg_g:1.1531e-02 L12_v_norm:2.5214e+00 +step:9000 train loss:3.577582 +step:9001 train loss:3.546184 +step:9002 train loss:3.553774 +step:9003 train loss:3.495050 +step:9004 train loss:3.490978 +step:9005 train loss:3.507581 +step:9006 train loss:3.509454 +step:9007 train loss:3.526536 +step:9008 train loss:3.483890 +step:9009 train loss:3.480006 +step:9010 train loss:3.515824 +step:9011 train loss:3.510849 +step:9012 train loss:3.621505 +step:9013 train loss:3.452472 +step:9014 train loss:3.523511 +step:9015 train loss:3.526238 +step:9016 train loss:3.600134 +step:9017 train loss:3.542175 +step:9018 train loss:3.467095 +step:9019 train loss:3.548022 +step:9020 train loss:3.559848 +step:9021 train loss:3.515898 +step:9022 train loss:3.528236 +step:9023 train loss:3.523370 +step:9024 train loss:3.544236 +step:9025 train loss:3.528280 +step:9026 train loss:3.487438 +step:9027 train loss:3.533843 +step:9028 train loss:3.554873 +step:9029 train loss:3.572032 +step:9030 train loss:3.569923 +step:9031 train loss:3.534805 +step:9032 train loss:3.547089 +step:9033 train loss:3.530750 +step:9034 train loss:3.541689 +step:9035 train loss:3.541369 +step:9036 train loss:3.494784 +step:9037 train loss:3.486499 +step:9038 train loss:3.609013 +step:9039 train loss:3.513922 +step:9040 train loss:3.529724 +step:9041 train loss:3.579896 +step:9042 train loss:3.431928 +step:9043 train loss:3.529341 +step:9044 train loss:3.547333 +step:9045 train loss:3.488614 +step:9046 train loss:3.532505 +step:9047 train loss:3.529580 +step:9048 train loss:3.508368 +step:9049 train loss:3.544252 +step:9050 train loss:3.495396 +step:9051 train loss:3.536773 +step:9052 train loss:3.464425 +step:9053 train loss:3.592172 +step:9054 train loss:3.604239 +step:9055 train loss:3.525587 +step:9056 train loss:3.589683 +step:9057 train loss:3.441976 +step:9058 train loss:3.525283 +step:9059 train loss:3.603134 +step:9060 train loss:3.532683 +step:9061 train loss:3.559196 +step:9062 train loss:3.493584 +step:9063 train loss:3.622685 +step:9064 train loss:3.510247 +step:9065 train loss:3.521649 +step:9066 train loss:3.540805 +step:9067 train loss:3.503707 +step:9068 train loss:3.577857 +step:9069 train loss:3.536496 +step:9070 train loss:3.581899 +step:9071 train loss:3.517436 +step:9072 train loss:3.539281 +step:9073 train loss:3.502363 +step:9074 train loss:3.582876 +step:9075 train loss:3.528017 +step:9076 train loss:3.495020 +step:9077 train loss:3.574580 +step:9078 train loss:3.509408 +step:9079 train loss:3.554200 +step:9080 train loss:3.484490 +step:9081 train loss:3.524650 +step:9082 train loss:3.550447 +step:9083 train loss:3.579974 +step:9084 train loss:3.472950 +step:9085 train loss:3.543454 +step:9086 train loss:3.528127 +step:9087 train loss:3.471061 +step:9088 train loss:3.535037 +step:9089 train loss:3.551872 +step:9090 train loss:3.485715 +step:9091 train loss:3.584143 +step:9092 train loss:3.516548 +step:9093 train loss:3.507771 +step:9094 train loss:3.639245 +step:9095 train loss:3.503969 +step:9096 train loss:3.518388 +step:9097 train loss:3.501086 +step:9098 train loss:3.495493 +step:9099 train loss:3.623478 +step:9100 train loss:3.651944 +step:9101 train loss:3.573029 +step:9102 train loss:3.516090 +step:9103 train loss:3.522111 +step:9104 train loss:3.607249 +step:9105 train loss:3.469081 +step:9106 train loss:3.596149 +step:9107 train loss:3.533826 +step:9108 train loss:3.512620 +step:9109 train loss:3.537918 +step:9110 train loss:3.544091 +step:9111 train loss:3.520889 +step:9112 train loss:3.524094 +step:9113 train loss:3.547423 +step:9114 train loss:3.501076 +step:9115 train loss:3.530150 +step:9116 train loss:3.551642 +step:9117 train loss:3.562061 +step:9118 train loss:3.535213 +step:9119 train loss:3.457610 +step:9120 train loss:3.555453 +step:9121 train loss:3.587168 +step:9122 train loss:3.531279 +step:9123 train loss:3.552784 +step:9124 train loss:3.580962 +step:9125 train loss:3.529468 +step:9126 train loss:3.509346 +step:9127 train loss:3.542377 +step:9128 train loss:3.597025 +step:9129 train loss:3.551416 +step:9130 train loss:3.564519 +step:9131 train loss:3.542778 +step:9132 train loss:3.553495 +step:9133 train loss:3.537875 +step:9134 train loss:3.511132 +step:9135 train loss:3.539781 +step:9136 train loss:3.538364 +step:9137 train loss:3.592335 +step:9138 train loss:3.510363 +step:9139 train loss:3.586124 +step:9140 train loss:3.507530 +step:9141 train loss:3.483981 +step:9142 train loss:3.660870 +step:9143 train loss:3.490760 +step:9144 train loss:3.583792 +step:9145 train loss:3.592315 +step:9146 train loss:3.507638 +step:9147 train loss:3.577396 +step:9148 train loss:3.601809 +step:9149 train loss:3.514461 +step:9150 train loss:3.534285 +step:9151 train loss:3.598632 +step:9152 train loss:3.551062 +step:9153 train loss:3.515139 +step:9154 train loss:3.528908 +step:9155 train loss:3.493928 +step:9156 train loss:3.498557 +step:9157 train loss:3.515402 +step:9158 train loss:3.497688 +step:9159 train loss:3.585383 +step:9160 train loss:3.470848 +step:9161 train loss:3.495823 +step:9162 train loss:3.584813 +step:9163 train loss:3.528757 +step:9164 train loss:3.499239 +step:9165 train loss:3.497153 +step:9166 train loss:3.551280 +step:9167 train loss:3.493430 +step:9168 train loss:3.538631 +step:9169 train loss:3.472136 +step:9170 train loss:3.494050 +step:9171 train loss:3.562894 +step:9172 train loss:3.483943 +step:9173 train loss:3.603949 +step:9174 train loss:3.534572 +step:9175 train loss:3.510613 +step:9176 train loss:3.495438 +step:9177 train loss:3.542527 +step:9178 train loss:3.486992 +step:9179 train loss:3.444711 +step:9180 train loss:3.539999 +step:9181 train loss:3.549155 +step:9182 train loss:3.520814 +step:9183 train loss:3.528527 +step:9184 train loss:3.521068 +step:9185 train loss:3.539304 +step:9186 train loss:3.500036 +step:9187 train loss:3.572763 +step:9188 train loss:3.609205 +step:9189 train loss:3.531770 +step:9190 train loss:3.536309 +step:9191 train loss:3.528538 +step:9192 train loss:3.539416 +step:9193 train loss:3.542748 +step:9194 train loss:3.479966 +step:9195 train loss:3.469306 +step:9196 train loss:3.520739 +step:9197 train loss:3.480311 +step:9198 train loss:3.556428 +step:9199 train loss:3.503591 +step:9200 train loss:3.528656 +step:9201 train loss:3.563902 +step:9202 train loss:3.551206 +step:9203 train loss:3.506546 +step:9204 train loss:3.706100 +step:9205 train loss:3.615620 +step:9206 train loss:3.531821 +step:9207 train loss:3.584018 +step:9208 train loss:3.560096 +step:9209 train loss:3.582082 +step:9210 train loss:3.476290 +step:9211 train loss:3.501028 +step:9212 train loss:3.501277 +step:9213 train loss:3.562665 +step:9214 train loss:3.505045 +step:9215 train loss:3.570745 +step:9216 train loss:3.536000 +step:9217 train loss:3.475084 +step:9218 train loss:3.564618 +step:9219 train loss:3.527912 +step:9220 train loss:3.571304 +step:9221 train loss:3.623663 +step:9222 train loss:3.567801 +step:9223 train loss:3.730769 +step:9224 train loss:3.573540 +step:9225 train loss:3.505315 +step:9226 train loss:3.521115 +step:9227 train loss:3.540933 +step:9228 train loss:3.542519 +step:9229 train loss:3.498398 +step:9230 train loss:3.562032 +step:9231 train loss:3.446964 +step:9232 train loss:3.504559 +step:9233 train loss:3.525978 +step:9234 train loss:3.584306 +step:9235 train loss:3.585946 +step:9236 train loss:3.492059 +step:9237 train loss:3.553008 +step:9238 train loss:3.527541 +step:9239 train loss:3.520891 +step:9240 train loss:3.488162 +step:9241 train loss:3.516247 +step:9242 train loss:3.527553 +step:9243 train loss:3.525545 +step:9244 train loss:3.501162 +step:9245 train loss:3.508843 +step:9246 train loss:3.504195 +step:9247 train loss:3.518815 +step:9248 train loss:3.526372 +step:9249 train loss:3.526016 +step:9250 validation loss:3.463521 +step:9250 train loss:3.566850 +step:9251 train loss:3.510507 +step:9252 train loss:3.576634 +step:9253 train loss:3.572578 +step:9254 train loss:3.500331 +step:9255 train loss:3.617209 +step:9256 train loss:3.494651 +step:9257 train loss:3.436996 +step:9258 train loss:3.519330 +step:9259 train loss:3.522065 +step:9260 train loss:3.614522 +step:9261 train loss:3.497432 +step:9262 train loss:3.568910 +step:9263 train loss:3.467888 +step:9264 train loss:3.618816 +step:9265 train loss:3.643892 +step:9266 train loss:3.576507 +step:9267 train loss:3.523531 +step:9268 train loss:3.515967 +step:9269 train loss:3.542645 +step:9270 train loss:3.464915 +step:9271 train loss:3.575312 +step:9272 train loss:3.518661 +step:9273 train loss:3.534086 +step:9274 train loss:3.539473 +step:9275 train loss:3.534108 +step:9276 train loss:3.564359 +step:9277 train loss:3.536700 +step:9278 train loss:3.551887 +step:9279 train loss:3.543846 +step:9280 train loss:3.545610 +step:9281 train loss:3.518017 +step:9282 train loss:3.635042 +step:9283 train loss:3.523993 +step:9284 train loss:3.487854 +step:9285 train loss:3.508331 +step:9286 train loss:3.562056 +step:9287 train loss:3.531091 +step:9288 train loss:3.540104 +step:9289 train loss:3.510196 +step:9290 train loss:3.537387 +step:9291 train loss:3.514053 +step:9292 train loss:3.574718 +step:9293 train loss:3.612392 +step:9294 train loss:3.531653 +step:9295 train loss:3.520474 +step:9296 train loss:3.473965 +step:9297 train loss:3.543240 +step:9298 train loss:3.481948 +step:9299 train loss:3.465379 +step:9300 train loss:3.570075 +step:9301 train loss:3.594211 +step:9302 train loss:3.533878 +step:9303 train loss:3.584459 +step:9304 train loss:3.506611 +step:9305 train loss:3.497684 +step:9306 train loss:3.500620 +step:9307 train loss:3.498900 +step:9308 train loss:3.471834 +step:9309 train loss:3.459693 +step:9310 train loss:3.520074 +step:9311 train loss:3.576183 +step:9312 train loss:3.530776 +step:9313 train loss:3.471569 +step:9314 train loss:3.503681 +step:9315 train loss:3.536006 +step:9316 train loss:3.522532 +step:9317 train loss:3.495772 +step:9318 train loss:3.584189 +step:9319 train loss:3.492152 +step:9320 train loss:3.512623 +step:9321 train loss:3.527080 +step:9322 train loss:3.534444 +step:9323 train loss:3.609507 +step:9324 train loss:3.553578 +step:9325 train loss:3.492853 +step:9326 train loss:3.570451 +step:9327 train loss:3.565028 +step:9328 train loss:3.567752 +step:9329 train loss:3.454339 +step:9330 train loss:3.624402 +step:9331 train loss:3.553533 +step:9332 train loss:3.575086 +step:9333 train loss:3.594681 +step:9334 train loss:3.530298 +step:9335 train loss:3.622723 +step:9336 train loss:3.585367 +step:9337 train loss:3.537182 +step:9338 train loss:3.590587 +step:9339 train loss:3.569926 +step:9340 train loss:3.530965 +step:9341 train loss:3.619663 +step:9342 train loss:3.517186 +step:9343 train loss:3.507885 +step:9344 train loss:3.513384 +step:9345 train loss:3.654407 +step:9346 train loss:3.488517 +step:9347 train loss:3.508222 +step:9348 train loss:3.531807 +step:9349 train loss:3.477299 +step:9350 train loss:3.554100 +step:9351 train loss:3.529950 +step:9352 train loss:3.514814 +step:9353 train loss:3.544767 +step:9354 train loss:3.515420 +step:9355 train loss:3.506717 +step:9356 train loss:3.553554 +step:9357 train loss:3.510123 +step:9358 train loss:3.541605 +step:9359 train loss:3.482898 +step:9360 train loss:3.500583 +step:9361 train loss:3.498381 +step:9362 train loss:3.487894 +step:9363 train loss:3.551273 +step:9364 train loss:3.527868 +step:9365 train loss:3.533360 +step:9366 train loss:3.528493 +step:9367 train loss:3.543067 +step:9368 train loss:3.516655 +step:9369 train loss:3.515487 +step:9370 train loss:3.523054 +step:9371 train loss:3.544068 +step:9372 train loss:3.511742 +step:9373 train loss:3.493142 +step:9374 train loss:3.528733 +step:9375 train loss:3.542908 +step:9376 train loss:3.482619 +step:9377 train loss:3.555521 +step:9378 train loss:3.558977 +step:9379 train loss:3.583739 +step:9380 train loss:3.515190 +step:9381 train loss:3.525465 +step:9382 train loss:3.497488 +step:9383 train loss:3.494736 +step:9384 train loss:3.464059 +step:9385 train loss:3.540302 +step:9386 train loss:3.565877 +step:9387 train loss:3.544217 +step:9388 train loss:3.481334 +step:9389 train loss:3.499146 +step:9390 train loss:3.539279 +step:9391 train loss:3.550238 +step:9392 train loss:3.510157 +step:9393 train loss:3.505761 +step:9394 train loss:3.530648 +step:9395 train loss:3.523151 +step:9396 train loss:3.673644 +step:9397 train loss:3.558759 +step:9398 train loss:3.582254 +step:9399 train loss:3.532446 +step:9400 train loss:3.534109 +step:9401 train loss:3.530014 +step:9402 train loss:3.533314 +step:9403 train loss:3.462295 +step:9404 train loss:3.539913 +step:9405 train loss:3.501639 +step:9406 train loss:3.551885 +step:9407 train loss:3.495060 +step:9408 train loss:3.435182 +step:9409 train loss:3.496861 +step:9410 train loss:3.577636 +step:9411 train loss:3.538161 +step:9412 train loss:3.566280 +step:9413 train loss:3.586535 +step:9414 train loss:3.522468 +step:9415 train loss:3.516984 +step:9416 train loss:3.530080 +step:9417 train loss:3.483952 +step:9418 train loss:3.512586 +step:9419 train loss:3.479714 +step:9420 train loss:3.499835 +step:9421 train loss:3.546240 +step:9422 train loss:3.501765 +step:9423 train loss:3.560534 +step:9424 train loss:3.499244 +step:9425 train loss:3.544542 +step:9426 train loss:3.546993 +step:9427 train loss:3.520336 +step:9428 train loss:3.626602 +step:9429 train loss:3.514064 +step:9430 train loss:3.472106 +step:9431 train loss:3.562899 +step:9432 train loss:3.527379 +step:9433 train loss:3.565722 +step:9434 train loss:3.518940 +step:9435 train loss:3.547076 +step:9436 train loss:3.514610 +step:9437 train loss:3.526980 +step:9438 train loss:3.520760 +step:9439 train loss:3.519843 +step:9440 train loss:3.510127 +step:9441 train loss:3.524945 +step:9442 train loss:3.465369 +step:9443 train loss:3.515956 +step:9444 train loss:3.582748 +step:9445 train loss:3.516600 +step:9446 train loss:3.494454 +step:9447 train loss:3.560829 +step:9448 train loss:3.493859 +step:9449 train loss:3.516135 +step:9450 train loss:3.556654 +step:9451 train loss:3.475011 +step:9452 train loss:3.529081 +step:9453 train loss:3.506030 +step:9454 train loss:3.570485 +step:9455 train loss:3.549034 +step:9456 train loss:3.474796 +step:9457 train loss:3.522159 +step:9458 train loss:3.508518 +step:9459 train loss:3.499808 +step:9460 train loss:3.545719 +step:9461 train loss:3.569644 +step:9462 train loss:3.520815 +step:9463 train loss:3.547656 +step:9464 train loss:3.504568 +step:9465 train loss:3.593249 +step:9466 train loss:3.541632 +step:9467 train loss:3.567158 +step:9468 train loss:3.513631 +step:9469 train loss:3.502599 +step:9470 train loss:3.499643 +step:9471 train loss:3.541813 +step:9472 train loss:3.563856 +step:9473 train loss:3.555509 +step:9474 train loss:3.499574 +step:9475 train loss:3.489549 +step:9476 train loss:3.708227 +step:9477 train loss:3.579695 +step:9478 train loss:3.558714 +step:9479 train loss:3.653612 +step:9480 train loss:3.505677 +step:9481 train loss:3.536511 +step:9482 train loss:3.565862 +step:9483 train loss:3.519997 +step:9484 train loss:3.547780 +step:9485 train loss:3.473030 +step:9486 train loss:3.508046 +step:9487 train loss:3.539313 +step:9488 train loss:3.493609 +step:9489 train loss:3.543224 +step:9490 train loss:3.507306 +step:9491 train loss:3.548726 +step:9492 train loss:3.569732 +step:9493 train loss:3.540732 +step:9494 train loss:3.551168 +step:9495 train loss:3.504353 +step:9496 train loss:3.563924 +step:9497 train loss:3.577223 +step:9498 train loss:3.525902 +step:9499 train loss:3.574327 +step:9500 validation loss:3.463183 total_sharp:1.6871e-04 L1_sharp:3.6301e-05 L2_sharp:5.8757e-06 L3_sharp:9.5272e-06 L4_sharp:1.8168e-05 L5_sharp:2.0996e-05 L6_sharp:1.2819e-05 L7_sharp:2.1684e-05 L8_sharp:4.8380e-05 L9_sharp:6.4859e-05 L10_sharp:7.4794e-05 L11_sharp:6.9632e-05 L12_sharp:2.1534e-04 total_fnorm:1.0992e+01 total_l1_linf:9.7190e+04 total_spectral:1.0992e+01 L1_fnorm:2.5894e+00 L2_fnorm:2.5590e+00 L3_fnorm:2.5436e+00 L4_fnorm:2.5430e+00 L5_fnorm:2.5075e+00 L6_fnorm:2.5851e+00 L7_fnorm:2.6473e+00 L8_fnorm:2.5996e+00 L9_fnorm:2.5969e+00 L10_fnorm:2.5774e+00 L11_fnorm:2.5902e+00 L12_fnorm:2.4923e+00 L1_l1linf:2.5885e+00 L2_l1linf:2.6481e+00 L3_l1linf:2.6994e+00 L4_l1linf:2.6537e+00 L5_l1linf:2.5821e+00 L6_l1linf:2.5587e+00 L7_l1linf:2.6735e+00 L8_l1linf:2.9429e+00 L9_l1linf:3.0259e+00 L10_l1linf:2.9986e+00 L11_l1linf:3.0231e+00 L12_l1linf:3.0804e+00 L1_spectral:3.4341e-01 L2_spectral:3.2206e-01 L3_spectral:3.0410e-01 L4_spectral:3.2257e-01 L5_spectral:2.7404e-01 L6_spectral:2.4902e-01 L7_spectral:2.8851e-01 L8_spectral:3.6097e-01 L9_spectral:3.9420e-01 L10_spectral:4.1630e-01 L11_spectral:4.0965e-01 L12_spectral:4.2001e-01 ip_v_neg_g:9.1503e-03 cos_v_neg_g:2.1064e-03 v_norm:1.0992e+01 g_norm:3.9520e-01 hv_norm:1.4891e-01 cos_v_hv:1.2453e-02 hg_norm:2.8334e+00 cos_g_hg:5.1610e-01 v_par:2.5002e-03 v_perp:1.0992e+01 L1_cos_v_neg_g:5.5804e-03 L1_v_norm:2.5894e+00 L2_cos_v_neg_g:1.4993e-03 L2_v_norm:2.5590e+00 L3_cos_v_neg_g:2.2613e-03 L3_v_norm:2.5436e+00 L4_cos_v_neg_g:2.3403e-03 L4_v_norm:2.5430e+00 L5_cos_v_neg_g:2.7908e-03 L5_v_norm:2.5075e+00 L6_cos_v_neg_g:2.8122e-03 L6_v_norm:2.5851e+00 L7_cos_v_neg_g:2.9948e-03 L7_v_norm:2.6473e+00 L8_cos_v_neg_g:4.4294e-03 L8_v_norm:2.5996e+00 L9_cos_v_neg_g:5.9818e-03 L9_v_norm:2.5969e+00 L10_cos_v_neg_g:8.0929e-03 L10_v_norm:2.5774e+00 L11_cos_v_neg_g:9.0748e-03 L11_v_norm:2.5902e+00 L12_cos_v_neg_g:1.2230e-02 L12_v_norm:2.4923e+00 +step:9500 train loss:3.566807 +step:9501 train loss:3.545350 +step:9502 train loss:3.515891 +step:9503 train loss:3.534059 +step:9504 train loss:3.487731 +step:9505 train loss:3.510997 +step:9506 train loss:3.526026 +step:9507 train loss:3.514791 +step:9508 train loss:3.706466 +step:9509 train loss:3.524046 +step:9510 train loss:3.511615 +step:9511 train loss:3.539948 +step:9512 train loss:3.567383 +step:9513 train loss:3.559660 +step:9514 train loss:3.525375 +step:9515 train loss:3.424722 +step:9516 train loss:3.527800 +step:9517 train loss:3.564198 +step:9518 train loss:3.537596 +step:9519 train loss:3.548534 +step:9520 train loss:3.436163 +step:9521 train loss:3.431015 +step:9522 train loss:3.546483 +step:9523 train loss:3.546126 +step:9524 train loss:3.546845 +step:9525 train loss:3.590791 +step:9526 train loss:3.607578 +step:9527 train loss:3.564354 +step:9528 train loss:3.496684 +step:9529 train loss:3.542468 +step:9530 train loss:3.589080 +step:9531 train loss:3.492482 +step:9532 train loss:3.541319 +step:9533 train loss:3.512016 +step:9534 train loss:3.596335 +step:9535 train loss:3.517808 +step:9536 train loss:3.500355 +step:9537 train loss:3.443898 +step:9538 train loss:3.460851 +step:9539 train loss:3.534364 +step:9540 train loss:3.454494 +step:9541 train loss:3.513694 +step:9542 train loss:3.638811 +step:9543 train loss:3.539217 +step:9544 train loss:3.576900 +step:9545 train loss:3.508335 +step:9546 train loss:3.534782 +step:9547 train loss:3.579504 +step:9548 train loss:3.519334 +step:9549 train loss:3.489055 +step:9550 train loss:3.519461 +step:9551 train loss:3.514431 +step:9552 train loss:3.535403 +step:9553 train loss:3.534500 +step:9554 train loss:3.578979 +step:9555 train loss:3.580518 +step:9556 train loss:3.490243 +step:9557 train loss:3.508947 +step:9558 train loss:3.575390 +step:9559 train loss:3.585205 +step:9560 train loss:3.494521 +step:9561 train loss:3.520145 +step:9562 train loss:3.559020 +step:9563 train loss:3.506513 +step:9564 train loss:3.541780 +step:9565 train loss:3.516804 +step:9566 train loss:3.493377 +step:9567 train loss:3.556981 +step:9568 train loss:3.529409 +step:9569 train loss:3.571478 +step:9570 train loss:3.466561 +step:9571 train loss:3.539957 +step:9572 train loss:3.483834 +step:9573 train loss:3.514130 +step:9574 train loss:3.490922 +step:9575 train loss:3.563194 +step:9576 train loss:3.452575 +step:9577 train loss:3.503455 +step:9578 train loss:3.507949 +step:9579 train loss:3.506745 +step:9580 train loss:3.570972 +step:9581 train loss:3.558839 +step:9582 train loss:3.544018 +step:9583 train loss:3.558012 +step:9584 train loss:3.494552 +step:9585 train loss:3.513787 +step:9586 train loss:3.565038 +step:9587 train loss:3.536383 +step:9588 train loss:3.522073 +step:9589 train loss:3.578061 +step:9590 train loss:3.545476 +step:9591 train loss:3.508299 +step:9592 train loss:3.530553 +step:9593 train loss:3.530691 +step:9594 train loss:3.548220 +step:9595 train loss:3.525524 +step:9596 train loss:3.608946 +step:9597 train loss:3.514858 +step:9598 train loss:3.480712 +step:9599 train loss:3.486538 +step:9600 train loss:3.572834 +step:9601 train loss:3.488409 +step:9602 train loss:3.570645 +step:9603 train loss:3.564432 +step:9604 train loss:3.447736 +step:9605 train loss:3.533679 +step:9606 train loss:3.589198 +step:9607 train loss:3.510699 +step:9608 train loss:3.517770 +step:9609 train loss:3.525009 +step:9610 train loss:3.569094 +step:9611 train loss:3.500610 +step:9612 train loss:3.507773 +step:9613 train loss:3.546564 +step:9614 train loss:3.519246 +step:9615 train loss:3.703934 +step:9616 train loss:3.520207 +step:9617 train loss:3.512538 +step:9618 train loss:3.469606 +step:9619 train loss:3.527348 +step:9620 train loss:3.588321 +step:9621 train loss:3.505754 +step:9622 train loss:3.517398 +step:9623 train loss:3.563240 +step:9624 train loss:3.546990 +step:9625 train loss:3.560699 +step:9626 train loss:3.531319 +step:9627 train loss:3.612255 +step:9628 train loss:3.573384 +step:9629 train loss:3.492516 +step:9630 train loss:3.546017 +step:9631 train loss:3.532887 +step:9632 train loss:3.504733 +step:9633 train loss:3.544768 +step:9634 train loss:3.612642 +step:9635 train loss:3.514375 +step:9636 train loss:3.460196 +step:9637 train loss:3.595506 +step:9638 train loss:3.477024 +step:9639 train loss:3.444632 +step:9640 train loss:3.572397 +step:9641 train loss:3.543366 +step:9642 train loss:3.519940 +step:9643 train loss:3.521930 +step:9644 train loss:3.578049 +step:9645 train loss:3.505782 +step:9646 train loss:3.543040 +step:9647 train loss:3.552657 +step:9648 train loss:3.502799 +step:9649 train loss:3.476000 +step:9650 train loss:3.490529 +step:9651 train loss:3.583339 +step:9652 train loss:3.563869 +step:9653 train loss:3.505747 +step:9654 train loss:3.489628 +step:9655 train loss:3.483793 +step:9656 train loss:3.475932 +step:9657 train loss:3.503444 +step:9658 train loss:3.561321 +step:9659 train loss:3.667204 +step:9660 train loss:3.451756 +step:9661 train loss:3.477265 +step:9662 train loss:3.491876 +step:9663 train loss:3.534453 +step:9664 train loss:3.583581 +step:9665 train loss:3.428976 +step:9666 train loss:3.468686 +step:9667 train loss:3.605459 +step:9668 train loss:3.586981 +step:9669 train loss:3.603245 +step:9670 train loss:3.584750 +step:9671 train loss:3.584853 +step:9672 train loss:3.499523 +step:9673 train loss:3.520255 +step:9674 train loss:3.529577 +step:9675 train loss:3.527298 +step:9676 train loss:3.486973 +step:9677 train loss:3.493881 +step:9678 train loss:3.525923 +step:9679 train loss:3.520291 +step:9680 train loss:3.516571 +step:9681 train loss:3.506049 +step:9682 train loss:3.571672 +step:9683 train loss:3.544864 +step:9684 train loss:3.463651 +step:9685 train loss:3.546400 +step:9686 train loss:3.580776 +step:9687 train loss:3.487221 +step:9688 train loss:3.573145 +step:9689 train loss:3.672688 +step:9690 train loss:3.517457 +step:9691 train loss:3.508310 +step:9692 train loss:3.469993 +step:9693 train loss:3.465488 +step:9694 train loss:3.488651 +step:9695 train loss:3.593956 +step:9696 train loss:3.629313 +step:9697 train loss:3.536960 +step:9698 train loss:3.572166 +step:9699 train loss:3.530740 +step:9700 train loss:3.530242 +step:9701 train loss:3.592387 +step:9702 train loss:3.499369 +step:9703 train loss:3.523592 +step:9704 train loss:3.602827 +step:9705 train loss:3.505619 +step:9706 train loss:3.495667 +step:9707 train loss:3.551147 +step:9708 train loss:3.495098 +step:9709 train loss:3.514004 +step:9710 train loss:3.533397 +step:9711 train loss:3.510419 +step:9712 train loss:3.518354 +step:9713 train loss:3.569430 +step:9714 train loss:3.524026 +step:9715 train loss:3.544665 +step:9716 train loss:3.565475 +step:9717 train loss:3.485067 +step:9718 train loss:3.496582 +step:9719 train loss:3.579215 +step:9720 train loss:3.509593 +step:9721 train loss:3.498818 +step:9722 train loss:3.564216 +step:9723 train loss:3.508923 +step:9724 train loss:3.538651 +step:9725 train loss:3.589073 +step:9726 train loss:3.530540 +step:9727 train loss:3.511272 +step:9728 train loss:3.545835 +step:9729 train loss:3.576402 +step:9730 train loss:3.643109 +step:9731 train loss:3.565203 +step:9732 train loss:3.527254 +step:9733 train loss:3.568299 +step:9734 train loss:3.486281 +step:9735 train loss:3.597386 +step:9736 train loss:3.497180 +step:9737 train loss:3.556147 +step:9738 train loss:3.523849 +step:9739 train loss:3.596014 +step:9740 train loss:3.557991 +step:9741 train loss:3.499193 +step:9742 train loss:3.590503 +step:9743 train loss:3.466788 +step:9744 train loss:3.527079 +step:9745 train loss:3.485029 +step:9746 train loss:3.521157 +step:9747 train loss:3.513515 +step:9748 train loss:3.415790 +step:9749 train loss:3.510997 +step:9750 validation loss:3.456149 +step:9750 train loss:3.487865 +step:9751 train loss:3.630114 +step:9752 train loss:3.516803 +step:9753 train loss:3.474489 +step:9754 train loss:3.506474 +step:9755 train loss:3.504822 +step:9756 train loss:3.504836 +step:9757 train loss:3.470241 +step:9758 train loss:3.463918 +step:9759 train loss:3.511263 +step:9760 train loss:3.455178 +step:9761 train loss:3.497211 +step:9762 train loss:3.491928 +step:9763 train loss:3.514146 +step:9764 train loss:3.496601 +step:9765 train loss:3.460979 +step:9766 train loss:3.551686 +step:9767 train loss:3.505726 +step:9768 train loss:3.519623 +step:9769 train loss:3.472897 +step:9770 train loss:3.473050 +step:9771 train loss:3.521934 +step:9772 train loss:3.535355 +step:9773 train loss:3.512036 +step:9774 train loss:3.482409 +step:9775 train loss:3.569356 +step:9776 train loss:3.570645 +step:9777 train loss:3.461788 +step:9778 train loss:3.464579 +step:9779 train loss:3.472348 +step:9780 train loss:3.467881 +step:9781 train loss:3.486098 +step:9782 train loss:3.567302 +step:9783 train loss:3.475945 +step:9784 train loss:3.503911 +step:9785 train loss:3.494204 +step:9786 train loss:3.531729 +step:9787 train loss:3.554224 +step:9788 train loss:3.483856 +step:9789 train loss:3.492086 +step:9790 train loss:3.454725 +step:9791 train loss:3.501320 +step:9792 train loss:3.516972 +step:9793 train loss:3.533576 +step:9794 train loss:3.511785 +step:9795 train loss:3.515959 +step:9796 train loss:3.500688 +step:9797 train loss:3.494156 +step:9798 train loss:3.509486 +step:9799 train loss:3.513700 +step:9800 train loss:3.584918 +step:9801 train loss:3.508960 +step:9802 train loss:3.566864 +step:9803 train loss:3.423720 +step:9804 train loss:3.518547 +step:9805 train loss:3.524364 +step:9806 train loss:3.498552 +step:9807 train loss:3.466909 +step:9808 train loss:3.383473 +step:9809 train loss:3.571784 +step:9810 train loss:3.526435 +step:9811 train loss:3.513815 +step:9812 train loss:3.486055 +step:9813 train loss:3.567781 +step:9814 train loss:3.557597 +step:9815 train loss:3.464496 +step:9816 train loss:3.463674 +step:9817 train loss:3.495794 +step:9818 train loss:3.522878 +step:9819 train loss:3.491476 +step:9820 train loss:3.562274 +step:9821 train loss:3.538871 +step:9822 train loss:3.515186 +step:9823 train loss:3.573155 +step:9824 train loss:3.478612 +step:9825 train loss:3.562426 +step:9826 train loss:3.558603 +step:9827 train loss:3.562271 +step:9828 train loss:3.480803 +step:9829 train loss:3.487733 +step:9830 train loss:3.471661 +step:9831 train loss:3.536313 +step:9832 train loss:3.547463 +step:9833 train loss:3.458019 +step:9834 train loss:3.511309 +step:9835 train loss:3.479532 +step:9836 train loss:3.539633 +step:9837 train loss:3.513187 +step:9838 train loss:3.552564 +step:9839 train loss:3.527960 +step:9840 train loss:3.492696 +step:9841 train loss:3.501342 +step:9842 train loss:3.562611 +step:9843 train loss:3.554378 +step:9844 train loss:3.509205 +step:9845 train loss:3.534956 +step:9846 train loss:3.471654 +step:9847 train loss:3.600597 +step:9848 train loss:3.524462 +step:9849 train loss:3.551449 +step:9850 train loss:3.467647 +step:9851 train loss:3.523151 +step:9852 train loss:3.485066 +step:9853 train loss:3.508621 +step:9854 train loss:3.518409 +step:9855 train loss:3.468126 +step:9856 train loss:3.470602 +step:9857 train loss:3.462931 +step:9858 train loss:3.525466 +step:9859 train loss:3.443979 +step:9860 train loss:3.682139 +step:9861 train loss:3.510187 +step:9862 train loss:3.475784 +step:9863 train loss:3.457952 +step:9864 train loss:3.581739 +step:9865 train loss:3.457198 +step:9866 train loss:3.499476 +step:9867 train loss:3.497746 +step:9868 train loss:3.555451 +step:9869 train loss:3.515373 +step:9870 train loss:3.490409 +step:9871 train loss:3.529286 +step:9872 train loss:3.477927 +step:9873 train loss:3.522608 +step:9874 train loss:3.491901 +step:9875 train loss:3.492300 +step:9876 train loss:3.459188 +step:9877 train loss:3.511322 +step:9878 train loss:3.542376 +step:9879 train loss:3.541468 +step:9880 train loss:3.475425 +step:9881 train loss:3.525637 +step:9882 train loss:3.485417 +step:9883 train loss:3.496811 +step:9884 train loss:3.487616 +step:9885 train loss:3.553373 +step:9886 train loss:3.518101 +step:9887 train loss:3.522976 +step:9888 train loss:3.543969 +step:9889 train loss:3.577528 +step:9890 train loss:3.487618 +step:9891 train loss:3.493342 +step:9892 train loss:3.464658 +step:9893 train loss:3.586308 +step:9894 train loss:3.494987 +step:9895 train loss:3.432363 +step:9896 train loss:3.585414 +step:9897 train loss:3.461821 +step:9898 train loss:3.531927 +step:9899 train loss:3.509365 +step:9900 train loss:3.554141 +step:9901 train loss:3.475929 +step:9902 train loss:3.520504 +step:9903 train loss:3.492399 +step:9904 train loss:3.544436 +step:9905 train loss:3.447475 +step:9906 train loss:3.488442 +step:9907 train loss:3.495133 +step:9908 train loss:3.491839 +step:9909 train loss:3.510376 +step:9910 train loss:3.532841 +step:9911 train loss:3.617076 +step:9912 train loss:3.494855 +step:9913 train loss:3.494096 +step:9914 train loss:3.506528 +step:9915 train loss:3.504181 +step:9916 train loss:3.454840 +step:9917 train loss:3.490856 +step:9918 train loss:3.489734 +step:9919 train loss:3.650067 +step:9920 train loss:3.441671 +step:9921 train loss:3.528717 +step:9922 train loss:3.489833 +step:9923 train loss:3.545605 +step:9924 train loss:3.460312 +step:9925 train loss:3.520541 +step:9926 train loss:3.499535 +step:9927 train loss:3.540576 +step:9928 train loss:3.470406 +step:9929 train loss:3.506606 +step:9930 train loss:3.597164 +step:9931 train loss:3.562658 +step:9932 train loss:3.449381 +step:9933 train loss:3.541533 +step:9934 train loss:3.469206 +step:9935 train loss:3.577721 +step:9936 train loss:3.483621 +step:9937 train loss:3.510836 +step:9938 train loss:3.495869 +step:9939 train loss:3.561442 +step:9940 train loss:3.596257 +step:9941 train loss:3.475296 +step:9942 train loss:3.516998 +step:9943 train loss:3.655534 +step:9944 train loss:3.515279 +step:9945 train loss:3.542219 +step:9946 train loss:3.507776 +step:9947 train loss:3.458839 +step:9948 train loss:3.502197 +step:9949 train loss:3.395994 +step:9950 train loss:3.544324 +step:9951 train loss:3.469625 +step:9952 train loss:3.541732 +step:9953 train loss:3.502543 +step:9954 train loss:3.558687 +step:9955 train loss:3.533888 +step:9956 train loss:3.535236 +step:9957 train loss:3.511909 +step:9958 train loss:3.566716 +step:9959 train loss:3.463942 +step:9960 train loss:3.497657 +step:9961 train loss:3.509359 +step:9962 train loss:3.554850 +step:9963 train loss:3.447801 +step:9964 train loss:3.501711 +step:9965 train loss:3.501249 +step:9966 train loss:3.560641 +step:9967 train loss:3.475961 +step:9968 train loss:3.539509 +step:9969 train loss:3.452056 +step:9970 train loss:3.491635 +step:9971 train loss:3.538513 +step:9972 train loss:3.559189 +step:9973 train loss:3.535625 +step:9974 train loss:3.521875 +step:9975 train loss:3.491464 +step:9976 train loss:3.450627 +step:9977 train loss:3.501027 +step:9978 train loss:3.498405 +step:9979 train loss:3.511257 +step:9980 train loss:3.563829 +step:9981 train loss:3.473401 +step:9982 train loss:3.537745 +step:9983 train loss:3.452933 +step:9984 train loss:3.518433 +step:9985 train loss:3.460718 +step:9986 train loss:3.515728 +step:9987 train loss:3.574917 +step:9988 train loss:3.574725 +step:9989 train loss:3.464916 +step:9990 train loss:3.605439 +step:9991 train loss:3.448492 +step:9992 train loss:3.530294 +step:9993 train loss:3.520254 +step:9994 train loss:3.632527 +step:9995 train loss:3.572690 +step:9996 train loss:3.493098 +step:9997 train loss:3.530017 +step:9998 train loss:3.583281 +step:9999 train loss:3.546678 +step:10000 validation loss:3.452084 total_sharp:1.8055e-04 L1_sharp:3.6862e-05 L2_sharp:1.0770e-05 L3_sharp:1.3068e-05 L4_sharp:1.8677e-05 L5_sharp:2.5264e-05 L6_sharp:1.3767e-05 L7_sharp:1.7573e-05 L8_sharp:4.2446e-05 L9_sharp:6.2082e-05 L10_sharp:7.1550e-05 L11_sharp:7.0306e-05 L12_sharp:2.3446e-04 total_fnorm:1.1032e+01 total_l1_linf:9.7670e+04 total_spectral:1.1032e+01 L1_fnorm:2.5990e+00 L2_fnorm:2.5684e+00 L3_fnorm:2.5806e+00 L4_fnorm:2.5618e+00 L5_fnorm:2.5279e+00 L6_fnorm:2.5843e+00 L7_fnorm:2.6424e+00 L8_fnorm:2.5796e+00 L9_fnorm:2.5923e+00 L10_fnorm:2.5847e+00 L11_fnorm:2.6217e+00 L12_fnorm:2.5539e+00 L1_l1linf:2.6270e+00 L2_l1linf:2.6812e+00 L3_l1linf:2.7222e+00 L4_l1linf:2.6663e+00 L5_l1linf:2.5543e+00 L6_l1linf:2.5812e+00 L7_l1linf:2.7497e+00 L8_l1linf:2.9380e+00 L9_l1linf:3.0084e+00 L10_l1linf:3.1281e+00 L11_l1linf:3.2335e+00 L12_l1linf:3.1180e+00 L1_spectral:3.5630e-01 L2_spectral:3.3612e-01 L3_spectral:3.1209e-01 L4_spectral:3.3325e-01 L5_spectral:2.7947e-01 L6_spectral:2.5632e-01 L7_spectral:3.1610e-01 L8_spectral:3.8117e-01 L9_spectral:4.4244e-01 L10_spectral:4.7524e-01 L11_spectral:4.7754e-01 L12_spectral:5.0176e-01 ip_v_neg_g:7.9290e-03 cos_v_neg_g:1.6945e-03 v_norm:1.1032e+01 g_norm:4.2416e-01 hv_norm:1.6835e-01 cos_v_hv:1.1831e-02 hg_norm:3.2975e+00 cos_g_hg:4.1730e-01 v_par:2.4388e-03 v_perp:1.1032e+01 L1_cos_v_neg_g:-3.5544e-04 L1_v_norm:2.5990e+00 L2_cos_v_neg_g:-6.5124e-04 L2_v_norm:2.5684e+00 L3_cos_v_neg_g:1.3169e-05 L3_v_norm:2.5806e+00 L4_cos_v_neg_g:-2.7136e-04 L4_v_norm:2.5618e+00 L5_cos_v_neg_g:-1.1043e-03 L5_v_norm:2.5279e+00 L6_cos_v_neg_g:5.4593e-04 L6_v_norm:2.5843e+00 L7_cos_v_neg_g:1.0215e-03 L7_v_norm:2.6424e+00 L8_cos_v_neg_g:3.6171e-03 L8_v_norm:2.5796e+00 L9_cos_v_neg_g:5.1133e-03 L9_v_norm:2.5923e+00 L10_cos_v_neg_g:8.1381e-03 L10_v_norm:2.5847e+00 L11_cos_v_neg_g:1.1846e-02 L11_v_norm:2.6217e+00 L12_cos_v_neg_g:1.5625e-02 L12_v_norm:2.5539e+00 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..7307122d57d0b3d792a71fdb90179063ebbf1cfa --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.01, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "9edc1e5f-66fa-43b3-a3e9-58c6c9f9524b", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..3244a6d7a146b53085ae48cfa2d5b6b2631ed832 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 17.753005981445312, + "total_l1_linf_norm": 151032.625, + "total_spectral_norm": 17.753005981445312, + "layer_1_update_fnorm": 3.330369234085083, + "layer_1_max_l1_linf_norm": 5.40902853012085, + "layer_1_max_spectral_norm": 0.7689023613929749, + "layer_2_update_fnorm": 3.234208345413208, + "layer_2_max_l1_linf_norm": 4.314440727233887, + "layer_2_max_spectral_norm": 0.6776873469352722, + "layer_3_update_fnorm": 2.915846586227417, + "layer_3_max_l1_linf_norm": 4.530951023101807, + "layer_3_max_spectral_norm": 0.5849562287330627, + "layer_4_update_fnorm": 3.20123291015625, + "layer_4_max_l1_linf_norm": 4.292622089385986, + "layer_4_max_spectral_norm": 0.5749169588088989, + "layer_5_update_fnorm": 3.504340887069702, + "layer_5_max_l1_linf_norm": 4.477747917175293, + "layer_5_max_spectral_norm": 0.6410690546035767, + "layer_6_update_fnorm": 3.6283771991729736, + "layer_6_max_l1_linf_norm": 4.347526550292969, + "layer_6_max_spectral_norm": 0.5942925810813904, + "layer_7_update_fnorm": 3.8500821590423584, + "layer_7_max_l1_linf_norm": 4.971179008483887, + "layer_7_max_spectral_norm": 0.6699299216270447, + "layer_8_update_fnorm": 3.8416690826416016, + "layer_8_max_l1_linf_norm": 5.030657768249512, + "layer_8_max_spectral_norm": 0.7283706068992615, + "layer_9_update_fnorm": 3.974426746368408, + "layer_9_max_l1_linf_norm": 5.282060623168945, + "layer_9_max_spectral_norm": 0.8334036469459534, + "layer_10_update_fnorm": 4.032845497131348, + "layer_10_max_l1_linf_norm": 5.795698165893555, + "layer_10_max_spectral_norm": 0.9395682215690613, + "layer_11_update_fnorm": 4.093038558959961, + "layer_11_max_l1_linf_norm": 7.5553717613220215, + "layer_11_max_spectral_norm": 0.9874979853630066, + "layer_12_update_fnorm": 3.9101104736328125, + "layer_12_max_l1_linf_norm": 7.027392387390137, + "layer_12_max_spectral_norm": 0.998779296875, + "total_sharpness": 0.00046779928379692137, + "ip_v_neg_g": 0.08318533003330231, + "cos_v_neg_g": 0.010132924653589725, + "v_norm": 17.753005981445312, + "g_norm": 0.4624236226081848, + "hv_norm": 0.45060303807258606, + "cos_v_hv": 0.01843050867319107, + "hg_norm": 2.9104409217834473, + "cos_g_hg": 0.5930625200271606, + "v_parallel_norm": 0.018938574939966202, + "v_perp_norm": 17.75299644470215, + "layer_1_v_norm": 3.330369234085083, + "layer_1_cos_v_neg_g": 0.08174244314432144, + "layer_2_v_norm": 3.234208345413208, + "layer_2_cos_v_neg_g": 0.031815189868211746, + "layer_3_v_norm": 2.915846586227417, + "layer_3_cos_v_neg_g": 0.04021076485514641, + "layer_4_v_norm": 3.20123291015625, + "layer_4_cos_v_neg_g": 0.03439107909798622, + "layer_5_v_norm": 3.504340887069702, + "layer_5_cos_v_neg_g": 0.0342828594148159, + "layer_6_v_norm": 3.6283774375915527, + "layer_6_cos_v_neg_g": 0.023101454600691795, + "layer_7_v_norm": 3.8500821590423584, + "layer_7_cos_v_neg_g": 0.018771953880786896, + "layer_8_v_norm": 3.8416690826416016, + "layer_8_cos_v_neg_g": 0.027604492381215096, + "layer_9_v_norm": 3.974426746368408, + "layer_9_cos_v_neg_g": 0.032618265599012375, + "layer_10_v_norm": 4.032845497131348, + "layer_10_cos_v_neg_g": 0.038703396916389465, + "layer_11_v_norm": 4.093038558959961, + "layer_11_cos_v_neg_g": 0.03541378676891327, + "layer_12_v_norm": 3.9101104736328125, + "layer_12_cos_v_neg_g": 0.048419155180454254, + "layer_1_sharpness": 0.0004591096949297935, + "layer_2_sharpness": 7.203311542980373e-05, + "layer_3_sharpness": 0.0002548236516304314, + "layer_4_sharpness": 0.00013898151519242674, + "layer_5_sharpness": 9.165330993710086e-05, + "layer_6_sharpness": 3.800193371716887e-05, + "layer_7_sharpness": 2.5111419745371677e-05, + "layer_8_sharpness": 6.35613760096021e-05, + "layer_9_sharpness": 0.00010671128984540701, + "layer_10_sharpness": 0.00013898138422518969, + "layer_11_sharpness": 0.0001495398610131815, + "layer_12_sharpness": 0.00038797041634097695 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..50feddded08773bd88453e4c8c58dcc2102a7602 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.52871322631836, + "total_l1_linf_norm": 189386.84375, + "total_spectral_norm": 21.528711318969727, + "layer_1_update_fnorm": 5.005324363708496, + "layer_1_max_l1_linf_norm": 4.810749530792236, + "layer_1_max_spectral_norm": 0.9751498103141785, + "layer_2_update_fnorm": 4.850612163543701, + "layer_2_max_l1_linf_norm": 4.8797125816345215, + "layer_2_max_spectral_norm": 0.6314032673835754, + "layer_3_update_fnorm": 4.920040130615234, + "layer_3_max_l1_linf_norm": 5.032121658325195, + "layer_3_max_spectral_norm": 0.6079773306846619, + "layer_4_update_fnorm": 4.851810455322266, + "layer_4_max_l1_linf_norm": 5.07845401763916, + "layer_4_max_spectral_norm": 0.6749182343482971, + "layer_5_update_fnorm": 5.053580284118652, + "layer_5_max_l1_linf_norm": 5.0613555908203125, + "layer_5_max_spectral_norm": 0.5547962188720703, + "layer_6_update_fnorm": 5.122228145599365, + "layer_6_max_l1_linf_norm": 5.039349555969238, + "layer_6_max_spectral_norm": 0.5492259860038757, + "layer_7_update_fnorm": 5.1998724937438965, + "layer_7_max_l1_linf_norm": 5.363152027130127, + "layer_7_max_spectral_norm": 0.5696063041687012, + "layer_8_update_fnorm": 5.141791820526123, + "layer_8_max_l1_linf_norm": 5.35352897644043, + "layer_8_max_spectral_norm": 0.7055315971374512, + "layer_9_update_fnorm": 5.168773651123047, + "layer_9_max_l1_linf_norm": 6.090542316436768, + "layer_9_max_spectral_norm": 0.8640213012695312, + "layer_10_update_fnorm": 5.128912448883057, + "layer_10_max_l1_linf_norm": 5.917961120605469, + "layer_10_max_spectral_norm": 0.9061164259910583, + "layer_11_update_fnorm": 5.144669055938721, + "layer_11_max_l1_linf_norm": 6.138904571533203, + "layer_11_max_spectral_norm": 0.8804420828819275, + "layer_12_update_fnorm": 4.890619277954102, + "layer_12_max_l1_linf_norm": 5.817718505859375, + "layer_12_max_spectral_norm": 0.9663490056991577, + "total_sharpness": 6.070451490813866e-05, + "ip_v_neg_g": 0.010726116597652435, + "cos_v_neg_g": 0.0013153983745723963, + "v_norm": 21.52871322631836, + "g_norm": 0.3787626326084137, + "hv_norm": 0.18307390809059143, + "cos_v_hv": 0.0071385931223630905, + "hg_norm": 4.447868824005127, + "cos_g_hg": 0.5493252873420715, + "v_parallel_norm": 0.006239546928554773, + "v_perp_norm": 21.52871322631836, + "layer_1_v_norm": 5.005324363708496, + "layer_1_cos_v_neg_g": -0.0009346444276161492, + "layer_2_v_norm": 4.850612163543701, + "layer_2_cos_v_neg_g": 0.0007060820935294032, + "layer_3_v_norm": 4.920040130615234, + "layer_3_cos_v_neg_g": 0.0008361838408745825, + "layer_4_v_norm": 4.851810455322266, + "layer_4_cos_v_neg_g": 0.0017743363277986646, + "layer_5_v_norm": 5.053580284118652, + "layer_5_cos_v_neg_g": 0.0009446904296055436, + "layer_6_v_norm": 5.122228145599365, + "layer_6_cos_v_neg_g": 0.001124925329349935, + "layer_7_v_norm": 5.1998724937438965, + "layer_7_cos_v_neg_g": 0.0005020180833525956, + "layer_8_v_norm": 5.141791820526123, + "layer_8_cos_v_neg_g": 0.0043146247044205666, + "layer_9_v_norm": 5.168773651123047, + "layer_9_cos_v_neg_g": 0.0068411012180149555, + "layer_10_v_norm": 5.128912448883057, + "layer_10_cos_v_neg_g": 0.00851409137248993, + "layer_11_v_norm": 5.144669055938721, + "layer_11_cos_v_neg_g": 0.01106720045208931, + "layer_12_v_norm": 4.890619277954102, + "layer_12_cos_v_neg_g": 0.013767681084573269, + "layer_1_sharpness": 1.539674667583313e-05, + "layer_2_sharpness": 5.066513040219434e-06, + "layer_3_sharpness": 1.3746434888162185e-05, + "layer_4_sharpness": 1.2274786058696918e-05, + "layer_5_sharpness": 6.4302521423087455e-06, + "layer_6_sharpness": 4.582664587360341e-06, + "layer_7_sharpness": 5.290644821798196e-06, + "layer_8_sharpness": 9.46675118029816e-06, + "layer_9_sharpness": 1.7410609871149063e-05, + "layer_10_sharpness": 1.9749339116970077e-05, + "layer_11_sharpness": 1.3963377568870783e-05, + "layer_12_sharpness": 9.986754594137892e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..2e440973f914f36d9ea222b8f770adea8a34c11d --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 19.380096435546875, + "total_l1_linf_norm": 167347.71875, + "total_spectral_norm": 19.380096435546875, + "layer_1_update_fnorm": 4.03411340713501, + "layer_1_max_l1_linf_norm": 4.453674793243408, + "layer_1_max_spectral_norm": 0.6142440438270569, + "layer_2_update_fnorm": 3.899470329284668, + "layer_2_max_l1_linf_norm": 4.5614213943481445, + "layer_2_max_spectral_norm": 0.5756098031997681, + "layer_3_update_fnorm": 3.742245674133301, + "layer_3_max_l1_linf_norm": 4.701664924621582, + "layer_3_max_spectral_norm": 0.6089986562728882, + "layer_4_update_fnorm": 3.978976011276245, + "layer_4_max_l1_linf_norm": 4.259932041168213, + "layer_4_max_spectral_norm": 0.6333116292953491, + "layer_5_update_fnorm": 4.20025634765625, + "layer_5_max_l1_linf_norm": 4.689790725708008, + "layer_5_max_spectral_norm": 0.6509745121002197, + "layer_6_update_fnorm": 4.392881870269775, + "layer_6_max_l1_linf_norm": 4.623373985290527, + "layer_6_max_spectral_norm": 0.6896891593933105, + "layer_7_update_fnorm": 4.525204181671143, + "layer_7_max_l1_linf_norm": 5.119574546813965, + "layer_7_max_spectral_norm": 0.6648283004760742, + "layer_8_update_fnorm": 4.463282108306885, + "layer_8_max_l1_linf_norm": 5.237505912780762, + "layer_8_max_spectral_norm": 0.7314948439598083, + "layer_9_update_fnorm": 4.424633979797363, + "layer_9_max_l1_linf_norm": 5.654844284057617, + "layer_9_max_spectral_norm": 0.7806624174118042, + "layer_10_update_fnorm": 4.407309055328369, + "layer_10_max_l1_linf_norm": 5.567608833312988, + "layer_10_max_spectral_norm": 0.7886268496513367, + "layer_11_update_fnorm": 4.383020401000977, + "layer_11_max_l1_linf_norm": 5.5035080909729, + "layer_11_max_spectral_norm": 0.8224895000457764, + "layer_12_update_fnorm": 4.103225231170654, + "layer_12_max_l1_linf_norm": 5.909735679626465, + "layer_12_max_spectral_norm": 0.8663978576660156, + "total_sharpness": 0.00022858752345200628, + "ip_v_neg_g": 0.04429827257990837, + "cos_v_neg_g": 0.005109037272632122, + "v_norm": 19.380096435546875, + "g_norm": 0.4473956823348999, + "hv_norm": 0.40919172763824463, + "cos_v_hv": 0.010826338082551956, + "hg_norm": 4.243737697601318, + "cos_g_hg": 0.5532695651054382, + "v_parallel_norm": 0.011277598328888416, + "v_perp_norm": 19.38009262084961, + "layer_1_v_norm": 4.03411340713501, + "layer_1_cos_v_neg_g": 0.030242063105106354, + "layer_2_v_norm": 3.899470329284668, + "layer_2_cos_v_neg_g": 0.012598937377333641, + "layer_3_v_norm": 3.742245674133301, + "layer_3_cos_v_neg_g": 0.010969361290335655, + "layer_4_v_norm": 3.978976011276245, + "layer_4_cos_v_neg_g": 0.01332606840878725, + "layer_5_v_norm": 4.20025634765625, + "layer_5_cos_v_neg_g": 0.01672576181590557, + "layer_6_v_norm": 4.392881870269775, + "layer_6_cos_v_neg_g": 0.01410659495741129, + "layer_7_v_norm": 4.525204181671143, + "layer_7_cos_v_neg_g": 0.015430031344294548, + "layer_8_v_norm": 4.463282108306885, + "layer_8_cos_v_neg_g": 0.01699223183095455, + "layer_9_v_norm": 4.424633979797363, + "layer_9_cos_v_neg_g": 0.020768631249666214, + "layer_10_v_norm": 4.407309055328369, + "layer_10_cos_v_neg_g": 0.022193629294633865, + "layer_11_v_norm": 4.383020877838135, + "layer_11_cos_v_neg_g": 0.026408225297927856, + "layer_12_v_norm": 4.103225231170654, + "layer_12_cos_v_neg_g": 0.03191066533327103, + "layer_1_sharpness": 0.00011976534005952999, + "layer_2_sharpness": 1.4587299119739328e-05, + "layer_3_sharpness": 1.8757405996439047e-05, + "layer_4_sharpness": 2.1017956896685064e-05, + "layer_5_sharpness": 3.060375456698239e-05, + "layer_6_sharpness": 2.754397428361699e-05, + "layer_7_sharpness": 2.8279695470700972e-05, + "layer_8_sharpness": 4.4326916395220906e-05, + "layer_9_sharpness": 6.77474235999398e-05, + "layer_10_sharpness": 6.013798338244669e-05, + "layer_11_sharpness": 7.763678877381608e-05, + "layer_12_sharpness": 0.00026965441065840423 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..635dd4daa1f2abdc9da206d5bf914f6c5ce128c1 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 19.935556411743164, + "total_l1_linf_norm": 173113.578125, + "total_spectral_norm": 19.93556022644043, + "layer_1_update_fnorm": 4.299106597900391, + "layer_1_max_l1_linf_norm": 4.427061557769775, + "layer_1_max_spectral_norm": 0.6659882068634033, + "layer_2_update_fnorm": 4.2860541343688965, + "layer_2_max_l1_linf_norm": 4.531638145446777, + "layer_2_max_spectral_norm": 0.561767041683197, + "layer_3_update_fnorm": 4.14844274520874, + "layer_3_max_l1_linf_norm": 4.588823318481445, + "layer_3_max_spectral_norm": 0.5656903982162476, + "layer_4_update_fnorm": 4.214837074279785, + "layer_4_max_l1_linf_norm": 4.333456993103027, + "layer_4_max_spectral_norm": 0.6683666706085205, + "layer_5_update_fnorm": 4.419923305511475, + "layer_5_max_l1_linf_norm": 4.6019287109375, + "layer_5_max_spectral_norm": 0.6434960961341858, + "layer_6_update_fnorm": 4.537291049957275, + "layer_6_max_l1_linf_norm": 4.5786614418029785, + "layer_6_max_spectral_norm": 0.5254372358322144, + "layer_7_update_fnorm": 4.626996994018555, + "layer_7_max_l1_linf_norm": 5.004222869873047, + "layer_7_max_spectral_norm": 0.5273209810256958, + "layer_8_update_fnorm": 4.555572986602783, + "layer_8_max_l1_linf_norm": 5.212281227111816, + "layer_8_max_spectral_norm": 0.5779269337654114, + "layer_9_update_fnorm": 4.553448677062988, + "layer_9_max_l1_linf_norm": 5.244572639465332, + "layer_9_max_spectral_norm": 0.7158926129341125, + "layer_10_update_fnorm": 4.556350231170654, + "layer_10_max_l1_linf_norm": 5.374973297119141, + "layer_10_max_spectral_norm": 0.7749281525611877, + "layer_11_update_fnorm": 4.575545787811279, + "layer_11_max_l1_linf_norm": 5.593353271484375, + "layer_11_max_spectral_norm": 0.756730854511261, + "layer_12_update_fnorm": 4.183842658996582, + "layer_12_max_l1_linf_norm": 5.351097106933594, + "layer_12_max_spectral_norm": 0.7702118754386902, + "total_sharpness": 0.00020387313270475715, + "ip_v_neg_g": 0.0359385684132576, + "cos_v_neg_g": 0.003618850139901042, + "v_norm": 19.935556411743164, + "g_norm": 0.4981519281864166, + "hv_norm": 0.39658647775650024, + "cos_v_hv": 0.010248268023133278, + "hg_norm": 5.462404251098633, + "cos_g_hg": 0.5937953591346741, + "v_parallel_norm": 0.005715859122574329, + "v_perp_norm": 19.935556411743164, + "layer_1_v_norm": 4.299106597900391, + "layer_1_cos_v_neg_g": 0.021333111450076103, + "layer_2_v_norm": 4.2860541343688965, + "layer_2_cos_v_neg_g": 0.014509390108287334, + "layer_3_v_norm": 4.148443222045898, + "layer_3_cos_v_neg_g": 0.020941931754350662, + "layer_4_v_norm": 4.214837074279785, + "layer_4_cos_v_neg_g": 0.011994188651442528, + "layer_5_v_norm": 4.419923305511475, + "layer_5_cos_v_neg_g": 0.011274858377873898, + "layer_6_v_norm": 4.537291049957275, + "layer_6_cos_v_neg_g": 0.008675028569996357, + "layer_7_v_norm": 4.626996994018555, + "layer_7_cos_v_neg_g": 0.0077060433104634285, + "layer_8_v_norm": 4.555572986602783, + "layer_8_cos_v_neg_g": 0.008268744684755802, + "layer_9_v_norm": 4.553448677062988, + "layer_9_cos_v_neg_g": 0.011234759353101254, + "layer_10_v_norm": 4.556350231170654, + "layer_10_cos_v_neg_g": 0.016917811706662178, + "layer_11_v_norm": 4.575545787811279, + "layer_11_cos_v_neg_g": 0.017356937751173973, + "layer_12_v_norm": 4.183842658996582, + "layer_12_cos_v_neg_g": 0.025328755378723145, + "layer_1_sharpness": 8.049274765653536e-05, + "layer_2_sharpness": 4.40658004663419e-05, + "layer_3_sharpness": 0.00023591065837536007, + "layer_4_sharpness": 6.537498120451346e-05, + "layer_5_sharpness": 3.3975647966144606e-05, + "layer_6_sharpness": 2.2143118258100003e-05, + "layer_7_sharpness": 1.5786028598085977e-05, + "layer_8_sharpness": 2.612629032228142e-05, + "layer_9_sharpness": 4.336524580139667e-05, + "layer_10_sharpness": 5.608442734228447e-05, + "layer_11_sharpness": 4.975278352503665e-05, + "layer_12_sharpness": 0.0002160174335585907 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..e222d9dc144019ad66569398a528ebd1e4354356 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 20.212034225463867, + "total_l1_linf_norm": 176037.0, + "total_spectral_norm": 20.212034225463867, + "layer_1_update_fnorm": 4.503045558929443, + "layer_1_max_l1_linf_norm": 4.540431022644043, + "layer_1_max_spectral_norm": 0.7343961596488953, + "layer_2_update_fnorm": 4.39249324798584, + "layer_2_max_l1_linf_norm": 4.920454978942871, + "layer_2_max_spectral_norm": 0.6035897731781006, + "layer_3_update_fnorm": 4.269756317138672, + "layer_3_max_l1_linf_norm": 4.598601341247559, + "layer_3_max_spectral_norm": 0.5822853446006775, + "layer_4_update_fnorm": 4.283863544464111, + "layer_4_max_l1_linf_norm": 4.3443403244018555, + "layer_4_max_spectral_norm": 0.6805903315544128, + "layer_5_update_fnorm": 4.467389106750488, + "layer_5_max_l1_linf_norm": 4.708404541015625, + "layer_5_max_spectral_norm": 0.6103185415267944, + "layer_6_update_fnorm": 4.614538669586182, + "layer_6_max_l1_linf_norm": 4.647468566894531, + "layer_6_max_spectral_norm": 0.5503875613212585, + "layer_7_update_fnorm": 4.732608318328857, + "layer_7_max_l1_linf_norm": 4.997769832611084, + "layer_7_max_spectral_norm": 0.5273169875144958, + "layer_8_update_fnorm": 4.705328464508057, + "layer_8_max_l1_linf_norm": 5.662029266357422, + "layer_8_max_spectral_norm": 0.6567811369895935, + "layer_9_update_fnorm": 4.726790904998779, + "layer_9_max_l1_linf_norm": 5.415905475616455, + "layer_9_max_spectral_norm": 0.8277666568756104, + "layer_10_update_fnorm": 4.733288288116455, + "layer_10_max_l1_linf_norm": 6.389261245727539, + "layer_10_max_spectral_norm": 0.836116373538971, + "layer_11_update_fnorm": 4.71497917175293, + "layer_11_max_l1_linf_norm": 5.624500751495361, + "layer_11_max_spectral_norm": 0.7656236290931702, + "layer_12_update_fnorm": 4.319314479827881, + "layer_12_max_l1_linf_norm": 5.2711687088012695, + "layer_12_max_spectral_norm": 0.8919767737388611, + "total_sharpness": 0.0001821098558139056, + "ip_v_neg_g": 0.039004672318696976, + "cos_v_neg_g": 0.00441908510401845, + "v_norm": 20.212034225463867, + "g_norm": 0.4366909861564636, + "hv_norm": 0.4033339023590088, + "cos_v_hv": 0.009125963784754276, + "hg_norm": 3.6221578121185303, + "cos_g_hg": 0.592481791973114, + "v_parallel_norm": 0.00830780528485775, + "v_perp_norm": 20.212032318115234, + "layer_1_v_norm": 4.503045558929443, + "layer_1_cos_v_neg_g": 0.028999241068959236, + "layer_2_v_norm": 4.39249324798584, + "layer_2_cos_v_neg_g": 0.013850017450749874, + "layer_3_v_norm": 4.269756317138672, + "layer_3_cos_v_neg_g": 0.017163394019007683, + "layer_4_v_norm": 4.283863544464111, + "layer_4_cos_v_neg_g": 0.012377016246318817, + "layer_5_v_norm": 4.467389106750488, + "layer_5_cos_v_neg_g": 0.010531718842685223, + "layer_6_v_norm": 4.614538669586182, + "layer_6_cos_v_neg_g": 0.00847586989402771, + "layer_7_v_norm": 4.732608318328857, + "layer_7_cos_v_neg_g": 0.010614755563437939, + "layer_8_v_norm": 4.705328464508057, + "layer_8_cos_v_neg_g": 0.013197941705584526, + "layer_9_v_norm": 4.726790904998779, + "layer_9_cos_v_neg_g": 0.019348040223121643, + "layer_10_v_norm": 4.733288288116455, + "layer_10_cos_v_neg_g": 0.02069459669291973, + "layer_11_v_norm": 4.7149786949157715, + "layer_11_cos_v_neg_g": 0.021790198981761932, + "layer_12_v_norm": 4.319314479827881, + "layer_12_cos_v_neg_g": 0.032108135521411896, + "layer_1_sharpness": 8.037203224375844e-05, + "layer_2_sharpness": 1.5250667274813168e-05, + "layer_3_sharpness": 3.7431389500852674e-05, + "layer_4_sharpness": 2.7018197215511464e-05, + "layer_5_sharpness": 1.6766165572335012e-05, + "layer_6_sharpness": 1.2718039215542376e-05, + "layer_7_sharpness": 1.54723984451266e-05, + "layer_8_sharpness": 3.044899676751811e-05, + "layer_9_sharpness": 6.790639599785209e-05, + "layer_10_sharpness": 6.0948124882997945e-05, + "layer_11_sharpness": 4.509412974584848e-05, + "layer_12_sharpness": 0.0003861681907437742 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..d8779dc402c2011243fc263de2c27f2aeb27fb2f --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 20.993480682373047, + "total_l1_linf_norm": 183978.453125, + "total_spectral_norm": 20.993480682373047, + "layer_1_update_fnorm": 4.740405082702637, + "layer_1_max_l1_linf_norm": 4.458680629730225, + "layer_1_max_spectral_norm": 0.7740747332572937, + "layer_2_update_fnorm": 4.763822078704834, + "layer_2_max_l1_linf_norm": 4.921521186828613, + "layer_2_max_spectral_norm": 0.6485388278961182, + "layer_3_update_fnorm": 4.6187005043029785, + "layer_3_max_l1_linf_norm": 5.016980171203613, + "layer_3_max_spectral_norm": 0.6078768968582153, + "layer_4_update_fnorm": 4.618789196014404, + "layer_4_max_l1_linf_norm": 4.892423629760742, + "layer_4_max_spectral_norm": 0.6749882698059082, + "layer_5_update_fnorm": 4.790266990661621, + "layer_5_max_l1_linf_norm": 5.0911455154418945, + "layer_5_max_spectral_norm": 0.6045520901679993, + "layer_6_update_fnorm": 4.829002380371094, + "layer_6_max_l1_linf_norm": 4.802654266357422, + "layer_6_max_spectral_norm": 0.5454087257385254, + "layer_7_update_fnorm": 4.960903167724609, + "layer_7_max_l1_linf_norm": 5.313200950622559, + "layer_7_max_spectral_norm": 0.5906931161880493, + "layer_8_update_fnorm": 4.895791530609131, + "layer_8_max_l1_linf_norm": 5.547791481018066, + "layer_8_max_spectral_norm": 0.698998212814331, + "layer_9_update_fnorm": 4.916499137878418, + "layer_9_max_l1_linf_norm": 5.712166786193848, + "layer_9_max_spectral_norm": 0.8094433546066284, + "layer_10_update_fnorm": 4.948986530303955, + "layer_10_max_l1_linf_norm": 5.862912178039551, + "layer_10_max_spectral_norm": 0.8698210120201111, + "layer_11_update_fnorm": 5.05017614364624, + "layer_11_max_l1_linf_norm": 6.818748474121094, + "layer_11_max_spectral_norm": 0.8994287848472595, + "layer_12_update_fnorm": 4.800408363342285, + "layer_12_max_l1_linf_norm": 6.229004383087158, + "layer_12_max_spectral_norm": 1.0969685316085815, + "total_sharpness": 0.00016507500549778342, + "ip_v_neg_g": 0.042092472314834595, + "cos_v_neg_g": 0.004985675681382418, + "v_norm": 20.993480682373047, + "g_norm": 0.4021573066711426, + "hv_norm": 0.33004918694496155, + "cos_v_hv": 0.010499945841729641, + "hg_norm": 2.7430882453918457, + "cos_g_hg": 0.5105940103530884, + "v_parallel_norm": 0.011691156774759293, + "v_perp_norm": 20.99347686767578, + "layer_1_v_norm": 4.740405082702637, + "layer_1_cos_v_neg_g": 0.02948734164237976, + "layer_2_v_norm": 4.763822078704834, + "layer_2_cos_v_neg_g": 0.014507218264043331, + "layer_3_v_norm": 4.6187005043029785, + "layer_3_cos_v_neg_g": 0.01667722687125206, + "layer_4_v_norm": 4.618789196014404, + "layer_4_cos_v_neg_g": 0.011308769695460796, + "layer_5_v_norm": 4.790266990661621, + "layer_5_cos_v_neg_g": 0.010570727288722992, + "layer_6_v_norm": 4.829002380371094, + "layer_6_cos_v_neg_g": 0.008642545901238918, + "layer_7_v_norm": 4.960903167724609, + "layer_7_cos_v_neg_g": 0.008598488755524158, + "layer_8_v_norm": 4.895791530609131, + "layer_8_cos_v_neg_g": 0.012784622609615326, + "layer_9_v_norm": 4.916499137878418, + "layer_9_cos_v_neg_g": 0.017540834844112396, + "layer_10_v_norm": 4.948986530303955, + "layer_10_cos_v_neg_g": 0.018951863050460815, + "layer_11_v_norm": 5.05017614364624, + "layer_11_cos_v_neg_g": 0.0219077467918396, + "layer_12_v_norm": 4.800408363342285, + "layer_12_cos_v_neg_g": 0.04038948565721512, + "layer_1_sharpness": 6.463723548222333e-05, + "layer_2_sharpness": 1.6293468434014358e-05, + "layer_3_sharpness": 3.617507536546327e-05, + "layer_4_sharpness": 2.2393798644770868e-05, + "layer_5_sharpness": 1.3308820598467719e-05, + "layer_6_sharpness": 9.035401490109507e-06, + "layer_7_sharpness": 1.1630129847617354e-05, + "layer_8_sharpness": 2.5385945264133625e-05, + "layer_9_sharpness": 3.8415746530517936e-05, + "layer_10_sharpness": 4.447413812158629e-05, + "layer_11_sharpness": 3.7678255466744304e-05, + "layer_12_sharpness": 0.0003874322574120015 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..ae04e46cd73fbf04019535d58faadffe26908976 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 20.528940200805664, + "total_l1_linf_norm": 179139.4375, + "total_spectral_norm": 20.528940200805664, + "layer_1_update_fnorm": 4.649204254150391, + "layer_1_max_l1_linf_norm": 4.523554801940918, + "layer_1_max_spectral_norm": 0.7451485991477966, + "layer_2_update_fnorm": 4.4841532707214355, + "layer_2_max_l1_linf_norm": 4.805505752563477, + "layer_2_max_spectral_norm": 0.4866662621498108, + "layer_3_update_fnorm": 4.399540901184082, + "layer_3_max_l1_linf_norm": 4.630030155181885, + "layer_3_max_spectral_norm": 0.5301638841629028, + "layer_4_update_fnorm": 4.433605670928955, + "layer_4_max_l1_linf_norm": 4.554644584655762, + "layer_4_max_spectral_norm": 0.7060633897781372, + "layer_5_update_fnorm": 4.648045539855957, + "layer_5_max_l1_linf_norm": 4.751317024230957, + "layer_5_max_spectral_norm": 0.5691937208175659, + "layer_6_update_fnorm": 4.767122268676758, + "layer_6_max_l1_linf_norm": 4.835308074951172, + "layer_6_max_spectral_norm": 0.5137003660202026, + "layer_7_update_fnorm": 4.847724437713623, + "layer_7_max_l1_linf_norm": 4.964856147766113, + "layer_7_max_spectral_norm": 0.5340455770492554, + "layer_8_update_fnorm": 4.817875385284424, + "layer_8_max_l1_linf_norm": 4.926020622253418, + "layer_8_max_spectral_norm": 0.6059513092041016, + "layer_9_update_fnorm": 4.799391269683838, + "layer_9_max_l1_linf_norm": 4.902547836303711, + "layer_9_max_spectral_norm": 0.6418488621711731, + "layer_10_update_fnorm": 4.789061069488525, + "layer_10_max_l1_linf_norm": 5.199568748474121, + "layer_10_max_spectral_norm": 0.6974296569824219, + "layer_11_update_fnorm": 4.812004089355469, + "layer_11_max_l1_linf_norm": 5.710603713989258, + "layer_11_max_spectral_norm": 0.6624519228935242, + "layer_12_update_fnorm": 4.417119026184082, + "layer_12_max_l1_linf_norm": 5.328802108764648, + "layer_12_max_spectral_norm": 0.7328308820724487, + "total_sharpness": 9.011470683617517e-05, + "ip_v_neg_g": 0.019168242812156677, + "cos_v_neg_g": 0.0021983031183481216, + "v_norm": 20.528940200805664, + "g_norm": 0.42474493384361267, + "hv_norm": 0.24081575870513916, + "cos_v_hv": 0.007682052440941334, + "hg_norm": 2.9971518516540527, + "cos_g_hg": 0.5002729892730713, + "v_parallel_norm": 0.006585872266441584, + "v_perp_norm": 20.52893829345703, + "layer_1_v_norm": 4.649204254150391, + "layer_1_cos_v_neg_g": 0.01288689486682415, + "layer_2_v_norm": 4.4841532707214355, + "layer_2_cos_v_neg_g": 0.004973543342202902, + "layer_3_v_norm": 4.39954137802124, + "layer_3_cos_v_neg_g": 0.00571239972487092, + "layer_4_v_norm": 4.433605670928955, + "layer_4_cos_v_neg_g": 0.006050014868378639, + "layer_5_v_norm": 4.648045539855957, + "layer_5_cos_v_neg_g": 0.006787844002246857, + "layer_6_v_norm": 4.767122745513916, + "layer_6_cos_v_neg_g": 0.005629224237054586, + "layer_7_v_norm": 4.847724437713623, + "layer_7_cos_v_neg_g": 0.005576748866587877, + "layer_8_v_norm": 4.817875385284424, + "layer_8_cos_v_neg_g": 0.007584334351122379, + "layer_9_v_norm": 4.799391269683838, + "layer_9_cos_v_neg_g": 0.007891540415585041, + "layer_10_v_norm": 4.789061069488525, + "layer_10_cos_v_neg_g": 0.01011490449309349, + "layer_11_v_norm": 4.812004089355469, + "layer_11_cos_v_neg_g": 0.01055733859539032, + "layer_12_v_norm": 4.417119026184082, + "layer_12_cos_v_neg_g": 0.014949105679988861, + "layer_1_sharpness": 2.9686341804335825e-05, + "layer_2_sharpness": 5.505731223820476e-06, + "layer_3_sharpness": 1.9319913917570375e-05, + "layer_4_sharpness": 1.2548089216579683e-05, + "layer_5_sharpness": 1.2905802577733994e-05, + "layer_6_sharpness": 1.1420536793593783e-05, + "layer_7_sharpness": 1.0687147550925147e-05, + "layer_8_sharpness": 2.2420637833420187e-05, + "layer_9_sharpness": 2.562633562774863e-05, + "layer_10_sharpness": 3.08650778606534e-05, + "layer_11_sharpness": 2.408644104434643e-05, + "layer_12_sharpness": 0.00013690498599316925 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..471b37387f703bcdae688223a55e493445c314a2 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 20.813493728637695, + "total_l1_linf_norm": 181904.140625, + "total_spectral_norm": 20.81348991394043, + "layer_1_update_fnorm": 4.600827693939209, + "layer_1_max_l1_linf_norm": 4.617715358734131, + "layer_1_max_spectral_norm": 0.7429611086845398, + "layer_2_update_fnorm": 4.569876194000244, + "layer_2_max_l1_linf_norm": 4.638580322265625, + "layer_2_max_spectral_norm": 0.5271137356758118, + "layer_3_update_fnorm": 4.388751029968262, + "layer_3_max_l1_linf_norm": 4.601042747497559, + "layer_3_max_spectral_norm": 0.5455111861228943, + "layer_4_update_fnorm": 4.5348405838012695, + "layer_4_max_l1_linf_norm": 4.604710578918457, + "layer_4_max_spectral_norm": 0.7049440145492554, + "layer_5_update_fnorm": 4.754286766052246, + "layer_5_max_l1_linf_norm": 4.7539191246032715, + "layer_5_max_spectral_norm": 0.5771114826202393, + "layer_6_update_fnorm": 4.87628173828125, + "layer_6_max_l1_linf_norm": 5.181018829345703, + "layer_6_max_spectral_norm": 0.5428074598312378, + "layer_7_update_fnorm": 4.955214977264404, + "layer_7_max_l1_linf_norm": 5.199823379516602, + "layer_7_max_spectral_norm": 0.5815059542655945, + "layer_8_update_fnorm": 4.904874801635742, + "layer_8_max_l1_linf_norm": 5.435468673706055, + "layer_8_max_spectral_norm": 0.7228012084960938, + "layer_9_update_fnorm": 4.9324727058410645, + "layer_9_max_l1_linf_norm": 6.314116954803467, + "layer_9_max_spectral_norm": 0.8595116138458252, + "layer_10_update_fnorm": 4.921615123748779, + "layer_10_max_l1_linf_norm": 6.023798942565918, + "layer_10_max_spectral_norm": 0.9210484623908997, + "layer_11_update_fnorm": 4.944281101226807, + "layer_11_max_l1_linf_norm": 6.3798699378967285, + "layer_11_max_spectral_norm": 0.8487975001335144, + "layer_12_update_fnorm": 4.591728210449219, + "layer_12_max_l1_linf_norm": 6.550904273986816, + "layer_12_max_spectral_norm": 0.9266603589057922, + "total_sharpness": 0.00011863371037179604, + "ip_v_neg_g": 0.028117068111896515, + "cos_v_neg_g": 0.0034351376816630363, + "v_norm": 20.813493728637695, + "g_norm": 0.39326104521751404, + "hv_norm": 0.37322697043418884, + "cos_v_hv": 0.006615765392780304, + "hg_norm": 3.7591323852539062, + "cos_g_hg": 0.641351044178009, + "v_parallel_norm": 0.008924291469156742, + "v_perp_norm": 20.813491821289062, + "layer_1_v_norm": 4.600827693939209, + "layer_1_cos_v_neg_g": 0.014446994289755821, + "layer_2_v_norm": 4.569876194000244, + "layer_2_cos_v_neg_g": 0.00561875244602561, + "layer_3_v_norm": 4.388751029968262, + "layer_3_cos_v_neg_g": 0.004993709735572338, + "layer_4_v_norm": 4.5348405838012695, + "layer_4_cos_v_neg_g": 0.005197419319301844, + "layer_5_v_norm": 4.754286766052246, + "layer_5_cos_v_neg_g": 0.004889883566647768, + "layer_6_v_norm": 4.87628173828125, + "layer_6_cos_v_neg_g": 0.0054909042082726955, + "layer_7_v_norm": 4.955214977264404, + "layer_7_cos_v_neg_g": 0.007795338053256273, + "layer_8_v_norm": 4.904874801635742, + "layer_8_cos_v_neg_g": 0.01293869223445654, + "layer_9_v_norm": 4.9324727058410645, + "layer_9_cos_v_neg_g": 0.018436435610055923, + "layer_10_v_norm": 4.921615123748779, + "layer_10_cos_v_neg_g": 0.0217919759452343, + "layer_11_v_norm": 4.944281578063965, + "layer_11_cos_v_neg_g": 0.02103576250374317, + "layer_12_v_norm": 4.591728210449219, + "layer_12_cos_v_neg_g": 0.031712234020233154, + "layer_1_sharpness": 2.3049415176501498e-05, + "layer_2_sharpness": 3.5698965348274214e-06, + "layer_3_sharpness": 3.847667812806321e-06, + "layer_4_sharpness": 1.0989756447088439e-05, + "layer_5_sharpness": 8.340230124304071e-06, + "layer_6_sharpness": 6.9429570430656895e-06, + "layer_7_sharpness": 9.021215191751253e-06, + "layer_8_sharpness": 2.6984422220266424e-05, + "layer_9_sharpness": 4.687091495725326e-05, + "layer_10_sharpness": 6.015405961079523e-05, + "layer_11_sharpness": 3.459460276644677e-05, + "layer_12_sharpness": 0.0002747340186033398 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..f0ef8a71273f53a6034a7bdb920631a72b9d2b62 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.22403907775879, + "total_l1_linf_norm": 186008.546875, + "total_spectral_norm": 21.224037170410156, + "layer_1_update_fnorm": 4.887924671173096, + "layer_1_max_l1_linf_norm": 4.631319522857666, + "layer_1_max_spectral_norm": 0.8322649598121643, + "layer_2_update_fnorm": 4.7532830238342285, + "layer_2_max_l1_linf_norm": 4.804813385009766, + "layer_2_max_spectral_norm": 0.5728445649147034, + "layer_3_update_fnorm": 4.764646053314209, + "layer_3_max_l1_linf_norm": 4.978477478027344, + "layer_3_max_spectral_norm": 0.6046046018600464, + "layer_4_update_fnorm": 4.73769998550415, + "layer_4_max_l1_linf_norm": 5.186203479766846, + "layer_4_max_spectral_norm": 0.6900038123130798, + "layer_5_update_fnorm": 4.894599437713623, + "layer_5_max_l1_linf_norm": 5.1037774085998535, + "layer_5_max_spectral_norm": 0.568708598613739, + "layer_6_update_fnorm": 4.9690165519714355, + "layer_6_max_l1_linf_norm": 4.956426620483398, + "layer_6_max_spectral_norm": 0.5546836256980896, + "layer_7_update_fnorm": 5.049245357513428, + "layer_7_max_l1_linf_norm": 5.251799583435059, + "layer_7_max_spectral_norm": 0.5700982809066772, + "layer_8_update_fnorm": 4.938060283660889, + "layer_8_max_l1_linf_norm": 5.902762413024902, + "layer_8_max_spectral_norm": 0.6744959354400635, + "layer_9_update_fnorm": 4.950692653656006, + "layer_9_max_l1_linf_norm": 5.738384246826172, + "layer_9_max_spectral_norm": 0.7120225429534912, + "layer_10_update_fnorm": 4.9507341384887695, + "layer_10_max_l1_linf_norm": 5.662572860717773, + "layer_10_max_spectral_norm": 0.7962280511856079, + "layer_11_update_fnorm": 5.00590181350708, + "layer_11_max_l1_linf_norm": 6.246165752410889, + "layer_11_max_spectral_norm": 0.8024412393569946, + "layer_12_update_fnorm": 4.737405776977539, + "layer_12_max_l1_linf_norm": 6.263922214508057, + "layer_12_max_spectral_norm": 0.9528371095657349, + "total_sharpness": 0.00010224289871985093, + "ip_v_neg_g": 0.027111956849694252, + "cos_v_neg_g": 0.003340605879202485, + "v_norm": 21.22403907775879, + "g_norm": 0.38239094614982605, + "hv_norm": 0.37898457050323486, + "cos_v_hv": 0.005725845694541931, + "hg_norm": 3.811384677886963, + "cos_g_hg": 0.5870416164398193, + "v_parallel_norm": 0.0073524778708815575, + "v_perp_norm": 21.224037170410156, + "layer_1_v_norm": 4.887924671173096, + "layer_1_cos_v_neg_g": 0.02121177315711975, + "layer_2_v_norm": 4.7532830238342285, + "layer_2_cos_v_neg_g": 0.008823125623166561, + "layer_3_v_norm": 4.764646530151367, + "layer_3_cos_v_neg_g": 0.012164879590272903, + "layer_4_v_norm": 4.73769998550415, + "layer_4_cos_v_neg_g": 0.009235315024852753, + "layer_5_v_norm": 4.894599437713623, + "layer_5_cos_v_neg_g": 0.008834095671772957, + "layer_6_v_norm": 4.9690165519714355, + "layer_6_cos_v_neg_g": 0.005664822645485401, + "layer_7_v_norm": 5.049245357513428, + "layer_7_cos_v_neg_g": 0.007472934201359749, + "layer_8_v_norm": 4.938060283660889, + "layer_8_cos_v_neg_g": 0.009864925406873226, + "layer_9_v_norm": 4.950692653656006, + "layer_9_cos_v_neg_g": 0.011893528513610363, + "layer_10_v_norm": 4.9507341384887695, + "layer_10_cos_v_neg_g": 0.015312360599637032, + "layer_11_v_norm": 5.00590181350708, + "layer_11_cos_v_neg_g": 0.019241295754909515, + "layer_12_v_norm": 4.737405776977539, + "layer_12_cos_v_neg_g": 0.02784554474055767, + "layer_1_sharpness": 3.1452258554054424e-05, + "layer_2_sharpness": 2.7211349333811086e-06, + "layer_3_sharpness": 1.3270923773234244e-05, + "layer_4_sharpness": 1.5424682715092786e-05, + "layer_5_sharpness": 1.0277900400978979e-05, + "layer_6_sharpness": 7.287821063073352e-06, + "layer_7_sharpness": 9.917575880535878e-06, + "layer_8_sharpness": 2.1802605260745622e-05, + "layer_9_sharpness": 2.7057789338869043e-05, + "layer_10_sharpness": 3.0506813345709816e-05, + "layer_11_sharpness": 2.385580592090264e-05, + "layer_12_sharpness": 0.00021446970640681684 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..b38630e36b6e6ca6d645e4cd51a729c004ea0efb --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 12.852071762084961, + "total_l1_linf_norm": 110297.71875, + "total_spectral_norm": 12.852070808410645, + "layer_1_update_fnorm": 2.0411641597747803, + "layer_1_max_l1_linf_norm": 5.918032646179199, + "layer_1_max_spectral_norm": 0.7033106684684753, + "layer_2_update_fnorm": 2.3812382221221924, + "layer_2_max_l1_linf_norm": 2.6079516410827637, + "layer_2_max_spectral_norm": 0.4043969511985779, + "layer_3_update_fnorm": 2.452237606048584, + "layer_3_max_l1_linf_norm": 2.718398094177246, + "layer_3_max_spectral_norm": 0.5524057149887085, + "layer_4_update_fnorm": 2.5442287921905518, + "layer_4_max_l1_linf_norm": 3.018995761871338, + "layer_4_max_spectral_norm": 0.46977099776268005, + "layer_5_update_fnorm": 2.671553373336792, + "layer_5_max_l1_linf_norm": 3.2997875213623047, + "layer_5_max_spectral_norm": 0.6335961818695068, + "layer_6_update_fnorm": 2.750037908554077, + "layer_6_max_l1_linf_norm": 3.1972224712371826, + "layer_6_max_spectral_norm": 0.6125772595405579, + "layer_7_update_fnorm": 2.867539644241333, + "layer_7_max_l1_linf_norm": 3.080625057220459, + "layer_7_max_spectral_norm": 0.5836646556854248, + "layer_8_update_fnorm": 2.9349582195281982, + "layer_8_max_l1_linf_norm": 3.8543808460235596, + "layer_8_max_spectral_norm": 0.578004002571106, + "layer_9_update_fnorm": 2.967374324798584, + "layer_9_max_l1_linf_norm": 4.136910438537598, + "layer_9_max_spectral_norm": 0.650541365146637, + "layer_10_update_fnorm": 3.049791097640991, + "layer_10_max_l1_linf_norm": 4.915254592895508, + "layer_10_max_spectral_norm": 0.5867013335227966, + "layer_11_update_fnorm": 2.8937387466430664, + "layer_11_max_l1_linf_norm": 4.578734397888184, + "layer_11_max_spectral_norm": 0.6012803912162781, + "layer_12_update_fnorm": 2.8216912746429443, + "layer_12_max_l1_linf_norm": 3.8744850158691406, + "layer_12_max_spectral_norm": 0.6110708713531494, + "total_sharpness": 0.0015528410440310836, + "ip_v_neg_g": 0.16041193902492523, + "cos_v_neg_g": 0.02075372263789177, + "v_norm": 12.852071762084961, + "g_norm": 0.601405680179596, + "hv_norm": 0.5981667637825012, + "cos_v_hv": 0.033363983035087585, + "hg_norm": 5.640716552734375, + "cos_g_hg": 0.5648027658462524, + "v_parallel_norm": 0.01614959165453911, + "v_perp_norm": 12.852062225341797, + "layer_1_v_norm": 2.0411641597747803, + "layer_1_cos_v_neg_g": 0.1841733753681183, + "layer_2_v_norm": 2.3812382221221924, + "layer_2_cos_v_neg_g": 0.07611238956451416, + "layer_3_v_norm": 2.452237606048584, + "layer_3_cos_v_neg_g": 0.08891744911670685, + "layer_4_v_norm": 2.5442287921905518, + "layer_4_cos_v_neg_g": 0.055630672723054886, + "layer_5_v_norm": 2.671553373336792, + "layer_5_cos_v_neg_g": 0.05877775698900223, + "layer_6_v_norm": 2.750037908554077, + "layer_6_cos_v_neg_g": 0.04470624402165413, + "layer_7_v_norm": 2.867539644241333, + "layer_7_cos_v_neg_g": 0.03458894416689873, + "layer_8_v_norm": 2.9349582195281982, + "layer_8_cos_v_neg_g": 0.029178308323025703, + "layer_9_v_norm": 2.967374324798584, + "layer_9_cos_v_neg_g": 0.03454971686005592, + "layer_10_v_norm": 3.049791097640991, + "layer_10_cos_v_neg_g": 0.03849432244896889, + "layer_11_v_norm": 2.8937387466430664, + "layer_11_cos_v_neg_g": 0.043682828545570374, + "layer_12_v_norm": 2.8216912746429443, + "layer_12_cos_v_neg_g": 0.03732800856232643, + "layer_1_sharpness": 0.005845577921718359, + "layer_2_sharpness": 0.0005575752002187073, + "layer_3_sharpness": 0.0018101349705830216, + "layer_4_sharpness": 0.0009527661604806781, + "layer_5_sharpness": 0.00035457726335152984, + "layer_6_sharpness": 0.00016906371456570923, + "layer_7_sharpness": 0.00011850405280711129, + "layer_8_sharpness": 0.00011943489516852424, + "layer_9_sharpness": 0.0001243913866346702, + "layer_10_sharpness": 0.00017898344958666712, + "layer_11_sharpness": 0.0002109704219037667, + "layer_12_sharpness": 0.0002616186102386564 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..dd67bd31e9142a76ebd7ecda25320ecaafe0dc82 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.046281814575195, + "total_l1_linf_norm": 184383.96875, + "total_spectral_norm": 21.046283721923828, + "layer_1_update_fnorm": 4.8058061599731445, + "layer_1_max_l1_linf_norm": 4.555922508239746, + "layer_1_max_spectral_norm": 0.7864726185798645, + "layer_2_update_fnorm": 4.710298538208008, + "layer_2_max_l1_linf_norm": 4.766532897949219, + "layer_2_max_spectral_norm": 0.5228899717330933, + "layer_3_update_fnorm": 4.672447204589844, + "layer_3_max_l1_linf_norm": 4.774532794952393, + "layer_3_max_spectral_norm": 0.5467727184295654, + "layer_4_update_fnorm": 4.617176532745361, + "layer_4_max_l1_linf_norm": 4.728889465332031, + "layer_4_max_spectral_norm": 0.6699106693267822, + "layer_5_update_fnorm": 4.844180583953857, + "layer_5_max_l1_linf_norm": 4.938606262207031, + "layer_5_max_spectral_norm": 0.5585166811943054, + "layer_6_update_fnorm": 4.9047932624816895, + "layer_6_max_l1_linf_norm": 4.920845031738281, + "layer_6_max_spectral_norm": 0.4825829565525055, + "layer_7_update_fnorm": 5.03452730178833, + "layer_7_max_l1_linf_norm": 5.519214630126953, + "layer_7_max_spectral_norm": 0.5514573454856873, + "layer_8_update_fnorm": 4.987414836883545, + "layer_8_max_l1_linf_norm": 5.830284118652344, + "layer_8_max_spectral_norm": 0.6297935843467712, + "layer_9_update_fnorm": 4.976454734802246, + "layer_9_max_l1_linf_norm": 5.479311943054199, + "layer_9_max_spectral_norm": 0.6892759203910828, + "layer_10_update_fnorm": 4.9416913986206055, + "layer_10_max_l1_linf_norm": 5.318289756774902, + "layer_10_max_spectral_norm": 0.7194643020629883, + "layer_11_update_fnorm": 4.999913215637207, + "layer_11_max_l1_linf_norm": 5.763460159301758, + "layer_11_max_spectral_norm": 0.7373834252357483, + "layer_12_update_fnorm": 4.625738143920898, + "layer_12_max_l1_linf_norm": 5.655141353607178, + "layer_12_max_spectral_norm": 0.8141258358955383, + "total_sharpness": 6.366040906868875e-05, + "ip_v_neg_g": 0.015644624829292297, + "cos_v_neg_g": 0.0011696373112499714, + "v_norm": 21.046281814575195, + "g_norm": 0.6355336904525757, + "hv_norm": 0.21021825075149536, + "cos_v_hv": 0.006373446900397539, + "hg_norm": 11.451029777526855, + "cos_g_hg": 0.77430260181427, + "v_parallel_norm": 0.003831098787486553, + "v_perp_norm": 21.046281814575195, + "layer_1_v_norm": 4.8058061599731445, + "layer_1_cos_v_neg_g": 0.005069628357887268, + "layer_2_v_norm": 4.710298538208008, + "layer_2_cos_v_neg_g": 0.00213826447725296, + "layer_3_v_norm": 4.672447204589844, + "layer_3_cos_v_neg_g": 0.0042365421541035175, + "layer_4_v_norm": 4.617176532745361, + "layer_4_cos_v_neg_g": 0.0029206941835582256, + "layer_5_v_norm": 4.844180583953857, + "layer_5_cos_v_neg_g": 0.0034382673911750317, + "layer_6_v_norm": 4.9047932624816895, + "layer_6_cos_v_neg_g": 0.00322708603926003, + "layer_7_v_norm": 5.03452730178833, + "layer_7_cos_v_neg_g": 0.004379891324788332, + "layer_8_v_norm": 4.987414836883545, + "layer_8_cos_v_neg_g": 0.0066948821768164635, + "layer_9_v_norm": 4.976454734802246, + "layer_9_cos_v_neg_g": 0.008881213143467903, + "layer_10_v_norm": 4.9416913986206055, + "layer_10_cos_v_neg_g": 0.008066941983997822, + "layer_11_v_norm": 4.999913215637207, + "layer_11_cos_v_neg_g": 0.008452148176729679, + "layer_12_v_norm": 4.625738143920898, + "layer_12_cos_v_neg_g": 0.006473914720118046, + "layer_1_sharpness": 2.0951010810676962e-05, + "layer_2_sharpness": 2.7002181468560593e-06, + "layer_3_sharpness": 6.65974584990181e-06, + "layer_4_sharpness": 1.2349198186711874e-05, + "layer_5_sharpness": 1.0024712537415326e-05, + "layer_6_sharpness": 6.2321892073669005e-06, + "layer_7_sharpness": 9.074696208699606e-06, + "layer_8_sharpness": 1.6031770428526215e-05, + "layer_9_sharpness": 2.1961550373816863e-05, + "layer_10_sharpness": 2.2448663003160618e-05, + "layer_11_sharpness": 1.7417494746041484e-05, + "layer_12_sharpness": 0.00011495386570459232 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..bf031984402476e929e22e5064b9564b4b0e9058 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.128538131713867, + "total_l1_linf_norm": 185397.28125, + "total_spectral_norm": 21.128538131713867, + "layer_1_update_fnorm": 4.779438495635986, + "layer_1_max_l1_linf_norm": 4.533751964569092, + "layer_1_max_spectral_norm": 0.8235089182853699, + "layer_2_update_fnorm": 4.726933479309082, + "layer_2_max_l1_linf_norm": 4.794520378112793, + "layer_2_max_spectral_norm": 0.5400344133377075, + "layer_3_update_fnorm": 4.742366790771484, + "layer_3_max_l1_linf_norm": 4.961355209350586, + "layer_3_max_spectral_norm": 0.5724079012870789, + "layer_4_update_fnorm": 4.650689601898193, + "layer_4_max_l1_linf_norm": 4.5994157791137695, + "layer_4_max_spectral_norm": 0.7069040536880493, + "layer_5_update_fnorm": 4.87357759475708, + "layer_5_max_l1_linf_norm": 4.906936168670654, + "layer_5_max_spectral_norm": 0.5422182679176331, + "layer_6_update_fnorm": 4.941236972808838, + "layer_6_max_l1_linf_norm": 4.932635307312012, + "layer_6_max_spectral_norm": 0.4776615798473358, + "layer_7_update_fnorm": 5.053431034088135, + "layer_7_max_l1_linf_norm": 5.139193534851074, + "layer_7_max_spectral_norm": 0.5322932004928589, + "layer_8_update_fnorm": 5.010265827178955, + "layer_8_max_l1_linf_norm": 5.323296546936035, + "layer_8_max_spectral_norm": 0.6088186502456665, + "layer_9_update_fnorm": 4.990063667297363, + "layer_9_max_l1_linf_norm": 5.640338897705078, + "layer_9_max_spectral_norm": 0.6720752716064453, + "layer_10_update_fnorm": 5.001919746398926, + "layer_10_max_l1_linf_norm": 5.585179328918457, + "layer_10_max_spectral_norm": 0.7021519541740417, + "layer_11_update_fnorm": 5.032907485961914, + "layer_11_max_l1_linf_norm": 5.429305076599121, + "layer_11_max_spectral_norm": 0.7030912637710571, + "layer_12_update_fnorm": 4.755692481994629, + "layer_12_max_l1_linf_norm": 6.929317474365234, + "layer_12_max_spectral_norm": 0.9439668655395508, + "total_sharpness": 7.423406350426376e-05, + "ip_v_neg_g": 0.015203924849629402, + "cos_v_neg_g": 0.0009580807527527213, + "v_norm": 21.128538131713867, + "g_norm": 0.7510764598846436, + "hv_norm": 0.326711505651474, + "cos_v_hv": 0.004800740629434586, + "hg_norm": 5.829710960388184, + "cos_g_hg": 0.6589349508285522, + "v_parallel_norm": 0.0036609950475394726, + "v_perp_norm": 21.128538131713867, + "layer_1_v_norm": 4.779438495635986, + "layer_1_cos_v_neg_g": 0.00513227004557848, + "layer_2_v_norm": 4.726933479309082, + "layer_2_cos_v_neg_g": 0.0003365623124409467, + "layer_3_v_norm": 4.742366790771484, + "layer_3_cos_v_neg_g": 0.0002709802065510303, + "layer_4_v_norm": 4.650689601898193, + "layer_4_cos_v_neg_g": 0.0016493016155436635, + "layer_5_v_norm": 4.87357759475708, + "layer_5_cos_v_neg_g": 0.0021220319904386997, + "layer_6_v_norm": 4.941236972808838, + "layer_6_cos_v_neg_g": 0.0019170950399711728, + "layer_7_v_norm": 5.053431034088135, + "layer_7_cos_v_neg_g": 0.00128794030752033, + "layer_8_v_norm": 5.010265827178955, + "layer_8_cos_v_neg_g": 0.0022609757725149393, + "layer_9_v_norm": 4.990063667297363, + "layer_9_cos_v_neg_g": 0.0028186566196382046, + "layer_10_v_norm": 5.001919746398926, + "layer_10_cos_v_neg_g": 0.003165000583976507, + "layer_11_v_norm": 5.032907485961914, + "layer_11_cos_v_neg_g": 0.004510608501732349, + "layer_12_v_norm": 4.755692481994629, + "layer_12_cos_v_neg_g": 0.015432581305503845, + "layer_1_sharpness": 3.1065977964317426e-05, + "layer_2_sharpness": 6.074282282497734e-06, + "layer_3_sharpness": 2.993905945913866e-05, + "layer_4_sharpness": 2.0302433767938055e-05, + "layer_5_sharpness": 6.5470794652355835e-06, + "layer_6_sharpness": 4.22374023401062e-06, + "layer_7_sharpness": 7.221331998152891e-06, + "layer_8_sharpness": 1.2602727110788692e-05, + "layer_9_sharpness": 1.2561852599901613e-05, + "layer_10_sharpness": 1.3175568710721564e-05, + "layer_11_sharpness": 1.2765632163791452e-05, + "layer_12_sharpness": 0.00019851673278026283 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..c9dd0459f9ccb12f4861b7e7eff2b71c241154bf --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.349227905273438, + "total_l1_linf_norm": 187583.328125, + "total_spectral_norm": 21.349225997924805, + "layer_1_update_fnorm": 4.93538761138916, + "layer_1_max_l1_linf_norm": 4.584513187408447, + "layer_1_max_spectral_norm": 0.8616721034049988, + "layer_2_update_fnorm": 4.84356164932251, + "layer_2_max_l1_linf_norm": 4.921326160430908, + "layer_2_max_spectral_norm": 0.5631535649299622, + "layer_3_update_fnorm": 4.831028938293457, + "layer_3_max_l1_linf_norm": 4.975825309753418, + "layer_3_max_spectral_norm": 0.5698674321174622, + "layer_4_update_fnorm": 4.788726806640625, + "layer_4_max_l1_linf_norm": 4.983154296875, + "layer_4_max_spectral_norm": 0.6857524514198303, + "layer_5_update_fnorm": 4.972353935241699, + "layer_5_max_l1_linf_norm": 4.999467849731445, + "layer_5_max_spectral_norm": 0.562804102897644, + "layer_6_update_fnorm": 5.010195255279541, + "layer_6_max_l1_linf_norm": 4.89307165145874, + "layer_6_max_spectral_norm": 0.482753723859787, + "layer_7_update_fnorm": 5.127697467803955, + "layer_7_max_l1_linf_norm": 5.23498010635376, + "layer_7_max_spectral_norm": 0.5689364671707153, + "layer_8_update_fnorm": 5.066160202026367, + "layer_8_max_l1_linf_norm": 5.82785701751709, + "layer_8_max_spectral_norm": 0.6783166527748108, + "layer_9_update_fnorm": 5.052347660064697, + "layer_9_max_l1_linf_norm": 6.197835445404053, + "layer_9_max_spectral_norm": 0.7235143780708313, + "layer_10_update_fnorm": 5.047706604003906, + "layer_10_max_l1_linf_norm": 6.596912384033203, + "layer_10_max_spectral_norm": 0.765771746635437, + "layer_11_update_fnorm": 5.077579021453857, + "layer_11_max_l1_linf_norm": 5.651093482971191, + "layer_11_max_spectral_norm": 0.7588379383087158, + "layer_12_update_fnorm": 4.776913166046143, + "layer_12_max_l1_linf_norm": 6.066167831420898, + "layer_12_max_spectral_norm": 0.866544783115387, + "total_sharpness": 7.18242590664886e-05, + "ip_v_neg_g": 0.016301589086651802, + "cos_v_neg_g": 0.002241196110844612, + "v_norm": 21.349227905273438, + "g_norm": 0.34069669246673584, + "hv_norm": 0.20738288760185242, + "cos_v_hv": 0.00739401625469327, + "hg_norm": 1.9999661445617676, + "cos_g_hg": 0.5307050943374634, + "v_parallel_norm": 0.005365424323827028, + "v_perp_norm": 21.349225997924805, + "layer_1_v_norm": 4.93538761138916, + "layer_1_cos_v_neg_g": 0.014983078464865685, + "layer_2_v_norm": 4.84356164932251, + "layer_2_cos_v_neg_g": 0.006877138279378414, + "layer_3_v_norm": 4.831028938293457, + "layer_3_cos_v_neg_g": 0.007967628538608551, + "layer_4_v_norm": 4.788726806640625, + "layer_4_cos_v_neg_g": 0.006855374667793512, + "layer_5_v_norm": 4.972353935241699, + "layer_5_cos_v_neg_g": 0.006791302934288979, + "layer_6_v_norm": 5.010195255279541, + "layer_6_cos_v_neg_g": 0.00579268392175436, + "layer_7_v_norm": 5.127697467803955, + "layer_7_cos_v_neg_g": 0.006317093037068844, + "layer_8_v_norm": 5.066160202026367, + "layer_8_cos_v_neg_g": 0.007647858001291752, + "layer_9_v_norm": 5.052347660064697, + "layer_9_cos_v_neg_g": 0.010084467008709908, + "layer_10_v_norm": 5.047706604003906, + "layer_10_cos_v_neg_g": 0.008967665955424309, + "layer_11_v_norm": 5.077579021453857, + "layer_11_cos_v_neg_g": 0.009094560518860817, + "layer_12_v_norm": 4.776913166046143, + "layer_12_cos_v_neg_g": 0.014969099313020706, + "layer_1_sharpness": 2.8966031095478684e-05, + "layer_2_sharpness": 8.52466018841369e-06, + "layer_3_sharpness": 1.4500813449558336e-05, + "layer_4_sharpness": 1.393064758303808e-05, + "layer_5_sharpness": 8.952867574407719e-06, + "layer_6_sharpness": 7.135237865441013e-06, + "layer_7_sharpness": 7.844556421332527e-06, + "layer_8_sharpness": 1.6813819456729107e-05, + "layer_9_sharpness": 1.925068318087142e-05, + "layer_10_sharpness": 1.958408029167913e-05, + "layer_11_sharpness": 1.537498610559851e-05, + "layer_12_sharpness": 0.00010752227535704151 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..7ed543a198a5f2fce8c9265ea632baebb3c22380 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.587865829467773, + "total_l1_linf_norm": 189895.765625, + "total_spectral_norm": 21.587865829467773, + "layer_1_update_fnorm": 4.970032691955566, + "layer_1_max_l1_linf_norm": 4.6800127029418945, + "layer_1_max_spectral_norm": 0.8753938674926758, + "layer_2_update_fnorm": 4.867230415344238, + "layer_2_max_l1_linf_norm": 5.002788543701172, + "layer_2_max_spectral_norm": 0.5801168084144592, + "layer_3_update_fnorm": 4.871804237365723, + "layer_3_max_l1_linf_norm": 5.123744964599609, + "layer_3_max_spectral_norm": 0.6120581030845642, + "layer_4_update_fnorm": 4.815035820007324, + "layer_4_max_l1_linf_norm": 4.947514533996582, + "layer_4_max_spectral_norm": 0.6799240112304688, + "layer_5_update_fnorm": 5.048771381378174, + "layer_5_max_l1_linf_norm": 5.110003471374512, + "layer_5_max_spectral_norm": 0.5412216186523438, + "layer_6_update_fnorm": 5.133573532104492, + "layer_6_max_l1_linf_norm": 5.143847465515137, + "layer_6_max_spectral_norm": 0.5480538606643677, + "layer_7_update_fnorm": 5.170472621917725, + "layer_7_max_l1_linf_norm": 5.388370513916016, + "layer_7_max_spectral_norm": 0.5816015601158142, + "layer_8_update_fnorm": 5.141138553619385, + "layer_8_max_l1_linf_norm": 5.608377456665039, + "layer_8_max_spectral_norm": 0.7673571109771729, + "layer_9_update_fnorm": 5.125766277313232, + "layer_9_max_l1_linf_norm": 6.017078876495361, + "layer_9_max_spectral_norm": 0.8525882959365845, + "layer_10_update_fnorm": 5.134021759033203, + "layer_10_max_l1_linf_norm": 5.971071243286133, + "layer_10_max_spectral_norm": 0.8965135812759399, + "layer_11_update_fnorm": 5.166045188903809, + "layer_11_max_l1_linf_norm": 6.397073745727539, + "layer_11_max_spectral_norm": 0.7808995246887207, + "layer_12_update_fnorm": 4.868468761444092, + "layer_12_max_l1_linf_norm": 6.089331150054932, + "layer_12_max_spectral_norm": 0.932396650314331, + "total_sharpness": 0.00011182120942976326, + "ip_v_neg_g": 0.024891655892133713, + "cos_v_neg_g": 0.002641758183017373, + "v_norm": 21.587865829467773, + "g_norm": 0.43646660447120667, + "hv_norm": 0.4138982594013214, + "cos_v_hv": 0.005832306109368801, + "hg_norm": 4.792891502380371, + "cos_g_hg": 0.6735793948173523, + "v_parallel_norm": 0.0069014825858175755, + "v_perp_norm": 21.58786392211914, + "layer_1_v_norm": 4.970032691955566, + "layer_1_cos_v_neg_g": 0.010957491584122181, + "layer_2_v_norm": 4.867230415344238, + "layer_2_cos_v_neg_g": 0.002612778451293707, + "layer_3_v_norm": 4.871804714202881, + "layer_3_cos_v_neg_g": 0.002736347494646907, + "layer_4_v_norm": 4.815035820007324, + "layer_4_cos_v_neg_g": 0.003725440939888358, + "layer_5_v_norm": 5.048771381378174, + "layer_5_cos_v_neg_g": 0.006087505724281073, + "layer_6_v_norm": 5.13357400894165, + "layer_6_cos_v_neg_g": 0.008049516007304192, + "layer_7_v_norm": 5.170472621917725, + "layer_7_cos_v_neg_g": 0.0075139617547392845, + "layer_8_v_norm": 5.141139030456543, + "layer_8_cos_v_neg_g": 0.011942853219807148, + "layer_9_v_norm": 5.125766277313232, + "layer_9_cos_v_neg_g": 0.015747953206300735, + "layer_10_v_norm": 5.134021759033203, + "layer_10_cos_v_neg_g": 0.019233103841543198, + "layer_11_v_norm": 5.166045188903809, + "layer_11_cos_v_neg_g": 0.020726051181554794, + "layer_12_v_norm": 4.868468761444092, + "layer_12_cos_v_neg_g": 0.03222068399190903, + "layer_1_sharpness": 2.0824258172069676e-05, + "layer_2_sharpness": 2.5098636342590908e-06, + "layer_3_sharpness": 1.776743920345325e-05, + "layer_4_sharpness": 1.176900968857808e-05, + "layer_5_sharpness": 9.972586667572614e-06, + "layer_6_sharpness": 9.805585250433069e-06, + "layer_7_sharpness": 8.677866389916744e-06, + "layer_8_sharpness": 2.294793739565648e-05, + "layer_9_sharpness": 3.763022687053308e-05, + "layer_10_sharpness": 4.516710760071874e-05, + "layer_11_sharpness": 2.5838869987637736e-05, + "layer_12_sharpness": 0.0002130910288542509 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..62d23c87ab7c35560ccf8de5276db4ab70a55884 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.431167602539062, + "total_l1_linf_norm": 188443.328125, + "total_spectral_norm": 21.431169509887695, + "layer_1_update_fnorm": 4.868854999542236, + "layer_1_max_l1_linf_norm": 4.53377103805542, + "layer_1_max_spectral_norm": 0.8740261197090149, + "layer_2_update_fnorm": 4.8594651222229, + "layer_2_max_l1_linf_norm": 4.9156646728515625, + "layer_2_max_spectral_norm": 0.5732304453849792, + "layer_3_update_fnorm": 4.815456390380859, + "layer_3_max_l1_linf_norm": 4.870425224304199, + "layer_3_max_spectral_norm": 0.588548481464386, + "layer_4_update_fnorm": 4.774237155914307, + "layer_4_max_l1_linf_norm": 4.70429801940918, + "layer_4_max_spectral_norm": 0.6842601299285889, + "layer_5_update_fnorm": 4.958930015563965, + "layer_5_max_l1_linf_norm": 4.857335090637207, + "layer_5_max_spectral_norm": 0.5464507341384888, + "layer_6_update_fnorm": 5.059928894042969, + "layer_6_max_l1_linf_norm": 4.920614242553711, + "layer_6_max_spectral_norm": 0.486642062664032, + "layer_7_update_fnorm": 5.156691074371338, + "layer_7_max_l1_linf_norm": 5.267112731933594, + "layer_7_max_spectral_norm": 0.5519863963127136, + "layer_8_update_fnorm": 5.110672473907471, + "layer_8_max_l1_linf_norm": 5.838413238525391, + "layer_8_max_spectral_norm": 0.6963159441947937, + "layer_9_update_fnorm": 5.101220607757568, + "layer_9_max_l1_linf_norm": 5.4690680503845215, + "layer_9_max_spectral_norm": 0.7830363512039185, + "layer_10_update_fnorm": 5.071040630340576, + "layer_10_max_l1_linf_norm": 5.87711238861084, + "layer_10_max_spectral_norm": 0.8135461211204529, + "layer_11_update_fnorm": 5.119908809661865, + "layer_11_max_l1_linf_norm": 5.686832904815674, + "layer_11_max_spectral_norm": 0.7771334052085876, + "layer_12_update_fnorm": 4.818779945373535, + "layer_12_max_l1_linf_norm": 5.868354797363281, + "layer_12_max_spectral_norm": 0.8675390481948853, + "total_sharpness": 7.938696217024699e-05, + "ip_v_neg_g": 0.015450761653482914, + "cos_v_neg_g": 0.0020336390007287264, + "v_norm": 21.431167602539062, + "g_norm": 0.3545113801956177, + "hv_norm": 0.2783719599246979, + "cos_v_hv": 0.006111806258559227, + "hg_norm": 2.6614561080932617, + "cos_g_hg": 0.5332536697387695, + "v_parallel_norm": 0.00662632891908288, + "v_perp_norm": 21.431167602539062, + "layer_1_v_norm": 4.868854999542236, + "layer_1_cos_v_neg_g": 0.013355366885662079, + "layer_2_v_norm": 4.8594651222229, + "layer_2_cos_v_neg_g": 0.00413974653929472, + "layer_3_v_norm": 4.815456390380859, + "layer_3_cos_v_neg_g": 0.003469145158305764, + "layer_4_v_norm": 4.774237155914307, + "layer_4_cos_v_neg_g": 0.0027063805609941483, + "layer_5_v_norm": 4.958930015563965, + "layer_5_cos_v_neg_g": 0.0036509435158222914, + "layer_6_v_norm": 5.059928894042969, + "layer_6_cos_v_neg_g": 0.0040220832452178, + "layer_7_v_norm": 5.156691074371338, + "layer_7_cos_v_neg_g": 0.005922682583332062, + "layer_8_v_norm": 5.110672950744629, + "layer_8_cos_v_neg_g": 0.008163612335920334, + "layer_9_v_norm": 5.101220607757568, + "layer_9_cos_v_neg_g": 0.008422249928116798, + "layer_10_v_norm": 5.071040630340576, + "layer_10_cos_v_neg_g": 0.009475228376686573, + "layer_11_v_norm": 5.119908809661865, + "layer_11_cos_v_neg_g": 0.01036045141518116, + "layer_12_v_norm": 4.818779945373535, + "layer_12_cos_v_neg_g": 0.01878315955400467, + "layer_1_sharpness": 3.2466075936099514e-05, + "layer_2_sharpness": 9.65925573837012e-06, + "layer_3_sharpness": 1.703733687463682e-05, + "layer_4_sharpness": 1.2885800970252603e-05, + "layer_5_sharpness": 7.946953701321036e-06, + "layer_6_sharpness": 5.981789854558883e-06, + "layer_7_sharpness": 1.1195049410162028e-05, + "layer_8_sharpness": 1.838341631810181e-05, + "layer_9_sharpness": 2.3330425392487086e-05, + "layer_10_sharpness": 2.2800915758125484e-05, + "layer_11_sharpness": 1.5050189176690765e-05, + "layer_12_sharpness": 0.00012238553608767688 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..4be69c43665b0bb1f74ff0c1479adea668235a64 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.278459548950195, + "total_l1_linf_norm": 186438.59375, + "total_spectral_norm": 21.278459548950195, + "layer_1_update_fnorm": 4.8639817237854, + "layer_1_max_l1_linf_norm": 4.659607887268066, + "layer_1_max_spectral_norm": 0.8952616453170776, + "layer_2_update_fnorm": 4.734237194061279, + "layer_2_max_l1_linf_norm": 4.848986625671387, + "layer_2_max_spectral_norm": 0.5893778800964355, + "layer_3_update_fnorm": 4.698522090911865, + "layer_3_max_l1_linf_norm": 4.775003910064697, + "layer_3_max_spectral_norm": 0.5737539529800415, + "layer_4_update_fnorm": 4.690336227416992, + "layer_4_max_l1_linf_norm": 4.880223274230957, + "layer_4_max_spectral_norm": 0.6061646938323975, + "layer_5_update_fnorm": 4.906155586242676, + "layer_5_max_l1_linf_norm": 4.958622455596924, + "layer_5_max_spectral_norm": 0.5258752703666687, + "layer_6_update_fnorm": 5.024054527282715, + "layer_6_max_l1_linf_norm": 4.884937286376953, + "layer_6_max_spectral_norm": 0.4752638638019562, + "layer_7_update_fnorm": 5.116203308105469, + "layer_7_max_l1_linf_norm": 5.114831924438477, + "layer_7_max_spectral_norm": 0.5313042998313904, + "layer_8_update_fnorm": 5.068325042724609, + "layer_8_max_l1_linf_norm": 5.3496012687683105, + "layer_8_max_spectral_norm": 0.6333922147750854, + "layer_9_update_fnorm": 5.055103302001953, + "layer_9_max_l1_linf_norm": 5.48937463760376, + "layer_9_max_spectral_norm": 0.7056276798248291, + "layer_10_update_fnorm": 5.047800540924072, + "layer_10_max_l1_linf_norm": 5.506924152374268, + "layer_10_max_spectral_norm": 0.7429357171058655, + "layer_11_update_fnorm": 5.123275279998779, + "layer_11_max_l1_linf_norm": 5.697254657745361, + "layer_11_max_spectral_norm": 0.7303240299224854, + "layer_12_update_fnorm": 4.815974235534668, + "layer_12_max_l1_linf_norm": 5.35347843170166, + "layer_12_max_spectral_norm": 0.7812186479568481, + "total_sharpness": 5.3410447435453534e-05, + "ip_v_neg_g": 0.011297836899757385, + "cos_v_neg_g": 0.001385343843139708, + "v_norm": 21.278459548950195, + "g_norm": 0.38326355814933777, + "hv_norm": 0.18267717957496643, + "cos_v_hv": 0.0062213134951889515, + "hg_norm": 3.070725440979004, + "cos_g_hg": 0.538242757320404, + "v_parallel_norm": 0.004874433856457472, + "v_perp_norm": 21.278459548950195, + "layer_1_v_norm": 4.8639817237854, + "layer_1_cos_v_neg_g": 0.0075326114892959595, + "layer_2_v_norm": 4.734237194061279, + "layer_2_cos_v_neg_g": 0.0016673170030117035, + "layer_3_v_norm": 4.698522090911865, + "layer_3_cos_v_neg_g": 0.0026435034815222025, + "layer_4_v_norm": 4.690336227416992, + "layer_4_cos_v_neg_g": 0.0025324763264507055, + "layer_5_v_norm": 4.906155586242676, + "layer_5_cos_v_neg_g": 0.0034897015430033207, + "layer_6_v_norm": 5.024054527282715, + "layer_6_cos_v_neg_g": 0.0033663774374872446, + "layer_7_v_norm": 5.116203308105469, + "layer_7_cos_v_neg_g": 0.00401103962212801, + "layer_8_v_norm": 5.068325042724609, + "layer_8_cos_v_neg_g": 0.005389383994042873, + "layer_9_v_norm": 5.055103302001953, + "layer_9_cos_v_neg_g": 0.0058453441597521305, + "layer_10_v_norm": 5.047800540924072, + "layer_10_cos_v_neg_g": 0.006398688070476055, + "layer_11_v_norm": 5.1232757568359375, + "layer_11_cos_v_neg_g": 0.005506087094545364, + "layer_12_v_norm": 4.815974235534668, + "layer_12_cos_v_neg_g": 0.011218302883207798, + "layer_1_sharpness": 1.4941213521524332e-05, + "layer_2_sharpness": 2.899875198636437e-06, + "layer_3_sharpness": 3.019199311893317e-06, + "layer_4_sharpness": 6.992766429902986e-06, + "layer_5_sharpness": 6.689675501547754e-06, + "layer_6_sharpness": 4.841305781155825e-06, + "layer_7_sharpness": 5.866707397217397e-06, + "layer_8_sharpness": 1.7001579180941917e-05, + "layer_9_sharpness": 1.6451005649287254e-05, + "layer_10_sharpness": 1.96374676306732e-05, + "layer_11_sharpness": 1.4681257198390085e-05, + "layer_12_sharpness": 7.297086995095015e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..2f8184337332f06ec92179524bacf4c54ac16fdf --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.359195709228516, + "total_l1_linf_norm": 187593.3125, + "total_spectral_norm": 21.35919761657715, + "layer_1_update_fnorm": 4.925800323486328, + "layer_1_max_l1_linf_norm": 4.585933685302734, + "layer_1_max_spectral_norm": 0.9008193016052246, + "layer_2_update_fnorm": 4.792983531951904, + "layer_2_max_l1_linf_norm": 4.838018417358398, + "layer_2_max_spectral_norm": 0.5997118353843689, + "layer_3_update_fnorm": 4.872208595275879, + "layer_3_max_l1_linf_norm": 4.944285869598389, + "layer_3_max_spectral_norm": 0.5960335731506348, + "layer_4_update_fnorm": 4.798768520355225, + "layer_4_max_l1_linf_norm": 4.760061264038086, + "layer_4_max_spectral_norm": 0.6546119451522827, + "layer_5_update_fnorm": 4.974027156829834, + "layer_5_max_l1_linf_norm": 5.013103485107422, + "layer_5_max_spectral_norm": 0.5332805514335632, + "layer_6_update_fnorm": 5.057684421539307, + "layer_6_max_l1_linf_norm": 5.071195602416992, + "layer_6_max_spectral_norm": 0.46453389525413513, + "layer_7_update_fnorm": 5.139117240905762, + "layer_7_max_l1_linf_norm": 5.382437229156494, + "layer_7_max_spectral_norm": 0.5385956764221191, + "layer_8_update_fnorm": 5.0811991691589355, + "layer_8_max_l1_linf_norm": 5.6807332038879395, + "layer_8_max_spectral_norm": 0.6321067810058594, + "layer_9_update_fnorm": 5.069036960601807, + "layer_9_max_l1_linf_norm": 5.390695571899414, + "layer_9_max_spectral_norm": 0.6988760232925415, + "layer_10_update_fnorm": 5.064784526824951, + "layer_10_max_l1_linf_norm": 5.832603931427002, + "layer_10_max_spectral_norm": 0.7619823217391968, + "layer_11_update_fnorm": 5.102881908416748, + "layer_11_max_l1_linf_norm": 6.076744556427002, + "layer_11_max_spectral_norm": 0.7721640467643738, + "layer_12_update_fnorm": 4.844942569732666, + "layer_12_max_l1_linf_norm": 6.461037635803223, + "layer_12_max_spectral_norm": 0.8648363351821899, + "total_sharpness": 5.0550723244668916e-05, + "ip_v_neg_g": 0.008952396921813488, + "cos_v_neg_g": 0.0012537123402580619, + "v_norm": 21.359195709228516, + "g_norm": 0.3343155086040497, + "hv_norm": 0.1313047558069229, + "cos_v_hv": 0.008223028853535652, + "hg_norm": 2.0125389099121094, + "cos_g_hg": 0.4632464051246643, + "v_parallel_norm": 0.00529287476092577, + "v_perp_norm": 21.359195709228516, + "layer_1_v_norm": 4.925800323486328, + "layer_1_cos_v_neg_g": 0.005861942190676928, + "layer_2_v_norm": 4.792983531951904, + "layer_2_cos_v_neg_g": 0.002255238126963377, + "layer_3_v_norm": 4.872208595275879, + "layer_3_cos_v_neg_g": 0.0021656963508576155, + "layer_4_v_norm": 4.798768520355225, + "layer_4_cos_v_neg_g": 0.0036604818888008595, + "layer_5_v_norm": 4.974027156829834, + "layer_5_cos_v_neg_g": 0.00302309961989522, + "layer_6_v_norm": 5.057684421539307, + "layer_6_cos_v_neg_g": 0.0036829535383731127, + "layer_7_v_norm": 5.139117240905762, + "layer_7_cos_v_neg_g": 0.0032976616639643908, + "layer_8_v_norm": 5.081199645996094, + "layer_8_cos_v_neg_g": 0.004152174107730389, + "layer_9_v_norm": 5.069036960601807, + "layer_9_cos_v_neg_g": 0.004290335346013308, + "layer_10_v_norm": 5.064784526824951, + "layer_10_cos_v_neg_g": 0.0044942800886929035, + "layer_11_v_norm": 5.102881908416748, + "layer_11_cos_v_neg_g": 0.006003268528729677, + "layer_12_v_norm": 4.844942569732666, + "layer_12_cos_v_neg_g": 0.011348199099302292, + "layer_1_sharpness": 1.1861521670653019e-05, + "layer_2_sharpness": 1.4056502095627366e-06, + "layer_3_sharpness": 6.289460998232244e-06, + "layer_4_sharpness": 9.936468813975807e-06, + "layer_5_sharpness": 5.995567789796041e-06, + "layer_6_sharpness": 4.143584646953968e-06, + "layer_7_sharpness": 5.311796485329978e-06, + "layer_8_sharpness": 1.088473800336942e-05, + "layer_9_sharpness": 1.4399650353880133e-05, + "layer_10_sharpness": 1.6831556422403082e-05, + "layer_11_sharpness": 1.3271868738229387e-05, + "layer_12_sharpness": 7.396208820864558e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..e77457206f3c91284736b1ce7e28d77dbcac4556 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.09912109375, + "total_l1_linf_norm": 184715.40625, + "total_spectral_norm": 21.099117279052734, + "layer_1_update_fnorm": 4.780517101287842, + "layer_1_max_l1_linf_norm": 4.604593276977539, + "layer_1_max_spectral_norm": 0.9120487570762634, + "layer_2_update_fnorm": 4.667755126953125, + "layer_2_max_l1_linf_norm": 4.963862895965576, + "layer_2_max_spectral_norm": 0.572922945022583, + "layer_3_update_fnorm": 4.732508659362793, + "layer_3_max_l1_linf_norm": 5.1576313972473145, + "layer_3_max_spectral_norm": 0.5853120684623718, + "layer_4_update_fnorm": 4.686544895172119, + "layer_4_max_l1_linf_norm": 4.6914567947387695, + "layer_4_max_spectral_norm": 0.6488045454025269, + "layer_5_update_fnorm": 4.923516750335693, + "layer_5_max_l1_linf_norm": 5.177582740783691, + "layer_5_max_spectral_norm": 0.5456505417823792, + "layer_6_update_fnorm": 5.031282424926758, + "layer_6_max_l1_linf_norm": 5.0722503662109375, + "layer_6_max_spectral_norm": 0.49214687943458557, + "layer_7_update_fnorm": 5.095968723297119, + "layer_7_max_l1_linf_norm": 5.20572566986084, + "layer_7_max_spectral_norm": 0.5577515363693237, + "layer_8_update_fnorm": 5.03609561920166, + "layer_8_max_l1_linf_norm": 5.304032325744629, + "layer_8_max_spectral_norm": 0.6033177971839905, + "layer_9_update_fnorm": 4.997780799865723, + "layer_9_max_l1_linf_norm": 5.635193347930908, + "layer_9_max_spectral_norm": 0.6774187088012695, + "layer_10_update_fnorm": 4.991576194763184, + "layer_10_max_l1_linf_norm": 5.423253059387207, + "layer_10_max_spectral_norm": 0.7112141847610474, + "layer_11_update_fnorm": 5.006870746612549, + "layer_11_max_l1_linf_norm": 5.573630332946777, + "layer_11_max_spectral_norm": 0.7235091328620911, + "layer_12_update_fnorm": 4.638451099395752, + "layer_12_max_l1_linf_norm": 5.856202125549316, + "layer_12_max_spectral_norm": 0.8152996897697449, + "total_sharpness": 4.0791484934743494e-05, + "ip_v_neg_g": 0.009236386977136135, + "cos_v_neg_g": 0.001257675001397729, + "v_norm": 21.09912109375, + "g_norm": 0.34807220101356506, + "hv_norm": 0.12116944789886475, + "cos_v_hv": 0.0071029821410775185, + "hg_norm": 2.437371015548706, + "cos_g_hg": 0.5150071382522583, + "v_parallel_norm": 0.004390822723507881, + "v_perp_norm": 21.099119186401367, + "layer_1_v_norm": 4.780517101287842, + "layer_1_cos_v_neg_g": 0.00887882150709629, + "layer_2_v_norm": 4.667755126953125, + "layer_2_cos_v_neg_g": 0.003369406331330538, + "layer_3_v_norm": 4.732508182525635, + "layer_3_cos_v_neg_g": 0.0037560483906418085, + "layer_4_v_norm": 4.686544895172119, + "layer_4_cos_v_neg_g": 0.004703552462160587, + "layer_5_v_norm": 4.923516750335693, + "layer_5_cos_v_neg_g": 0.004340823739767075, + "layer_6_v_norm": 5.031282424926758, + "layer_6_cos_v_neg_g": 0.0029896534979343414, + "layer_7_v_norm": 5.095968723297119, + "layer_7_cos_v_neg_g": 0.0029000649228692055, + "layer_8_v_norm": 5.03609561920166, + "layer_8_cos_v_neg_g": 0.004977704491466284, + "layer_9_v_norm": 4.997780799865723, + "layer_9_cos_v_neg_g": 0.004333764314651489, + "layer_10_v_norm": 4.991576194763184, + "layer_10_cos_v_neg_g": 0.005172288976609707, + "layer_11_v_norm": 5.006870269775391, + "layer_11_cos_v_neg_g": 0.007109309080988169, + "layer_12_v_norm": 4.638451099395752, + "layer_12_cos_v_neg_g": 0.010525143705308437, + "layer_1_sharpness": 9.521741048956756e-06, + "layer_2_sharpness": 8.251112149082473e-07, + "layer_3_sharpness": 1.566737751090841e-06, + "layer_4_sharpness": 6.56674592391937e-06, + "layer_5_sharpness": 5.992590558889788e-06, + "layer_6_sharpness": 4.184735189483035e-06, + "layer_7_sharpness": 5.2531972869473975e-06, + "layer_8_sharpness": 1.1965201338171028e-05, + "layer_9_sharpness": 1.269501353817759e-05, + "layer_10_sharpness": 1.3722634321311489e-05, + "layer_11_sharpness": 9.476316336076707e-06, + "layer_12_sharpness": 7.000532059464604e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..b635f7312a9f63a69f00e16450ada0250164c185 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.413803100585938, + "total_l1_linf_norm": 188183.546875, + "total_spectral_norm": 21.413803100585938, + "layer_1_update_fnorm": 4.909921646118164, + "layer_1_max_l1_linf_norm": 4.637662410736084, + "layer_1_max_spectral_norm": 0.9185486435890198, + "layer_2_update_fnorm": 4.8411664962768555, + "layer_2_max_l1_linf_norm": 4.966485977172852, + "layer_2_max_spectral_norm": 0.6043694019317627, + "layer_3_update_fnorm": 4.845261573791504, + "layer_3_max_l1_linf_norm": 4.809751987457275, + "layer_3_max_spectral_norm": 0.5883041024208069, + "layer_4_update_fnorm": 4.787600040435791, + "layer_4_max_l1_linf_norm": 4.945263862609863, + "layer_4_max_spectral_norm": 0.6449378132820129, + "layer_5_update_fnorm": 5.011597156524658, + "layer_5_max_l1_linf_norm": 4.969185829162598, + "layer_5_max_spectral_norm": 0.5435275435447693, + "layer_6_update_fnorm": 5.060194492340088, + "layer_6_max_l1_linf_norm": 4.861860752105713, + "layer_6_max_spectral_norm": 0.4679107964038849, + "layer_7_update_fnorm": 5.148223400115967, + "layer_7_max_l1_linf_norm": 5.204025745391846, + "layer_7_max_spectral_norm": 0.5168821215629578, + "layer_8_update_fnorm": 5.103413105010986, + "layer_8_max_l1_linf_norm": 5.504249572753906, + "layer_8_max_spectral_norm": 0.629188597202301, + "layer_9_update_fnorm": 5.082645416259766, + "layer_9_max_l1_linf_norm": 5.509544849395752, + "layer_9_max_spectral_norm": 0.696133017539978, + "layer_10_update_fnorm": 5.084353923797607, + "layer_10_max_l1_linf_norm": 5.425848960876465, + "layer_10_max_spectral_norm": 0.7691676020622253, + "layer_11_update_fnorm": 5.1384196281433105, + "layer_11_max_l1_linf_norm": 5.651287078857422, + "layer_11_max_spectral_norm": 0.721445620059967, + "layer_12_update_fnorm": 4.870181560516357, + "layer_12_max_l1_linf_norm": 5.980575084686279, + "layer_12_max_spectral_norm": 0.8632177710533142, + "total_sharpness": 4.903621811536141e-05, + "ip_v_neg_g": 0.013193304650485516, + "cos_v_neg_g": 0.0016222101403400302, + "v_norm": 21.413803100585938, + "g_norm": 0.37979796528816223, + "hv_norm": 0.20727218687534332, + "cos_v_hv": 0.0050660534761846066, + "hg_norm": 2.3308193683624268, + "cos_g_hg": 0.5503305196762085, + "v_parallel_norm": 0.005999304354190826, + "v_perp_norm": 21.413803100585938, + "layer_1_v_norm": 4.909921646118164, + "layer_1_cos_v_neg_g": 0.008027108386158943, + "layer_2_v_norm": 4.8411664962768555, + "layer_2_cos_v_neg_g": 0.0044036791659891605, + "layer_3_v_norm": 4.845261573791504, + "layer_3_cos_v_neg_g": 0.004211426712572575, + "layer_4_v_norm": 4.787600040435791, + "layer_4_cos_v_neg_g": 0.0029426750261336565, + "layer_5_v_norm": 5.011597156524658, + "layer_5_cos_v_neg_g": 0.003013008274137974, + "layer_6_v_norm": 5.060194492340088, + "layer_6_cos_v_neg_g": 0.0034009956289082766, + "layer_7_v_norm": 5.148223400115967, + "layer_7_cos_v_neg_g": 0.0034718471579253674, + "layer_8_v_norm": 5.103413105010986, + "layer_8_cos_v_neg_g": 0.00515500595793128, + "layer_9_v_norm": 5.082645416259766, + "layer_9_cos_v_neg_g": 0.006334131583571434, + "layer_10_v_norm": 5.084353923797607, + "layer_10_cos_v_neg_g": 0.007765802554786205, + "layer_11_v_norm": 5.1384196281433105, + "layer_11_cos_v_neg_g": 0.009648085571825504, + "layer_12_v_norm": 4.870181560516357, + "layer_12_cos_v_neg_g": 0.018964864313602448, + "layer_1_sharpness": 1.4594040294468869e-05, + "layer_2_sharpness": 3.3421320040361024e-06, + "layer_3_sharpness": 7.065673798933858e-06, + "layer_4_sharpness": 6.9075263127160724e-06, + "layer_5_sharpness": 4.544409421214368e-06, + "layer_6_sharpness": 3.3976821214309894e-06, + "layer_7_sharpness": 6.2101439652906265e-06, + "layer_8_sharpness": 1.1906815416296013e-05, + "layer_9_sharpness": 1.4750419722986408e-05, + "layer_10_sharpness": 1.5399627955048345e-05, + "layer_11_sharpness": 1.0850058970390819e-05, + "layer_12_sharpness": 0.00010355677659390494 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..887add2e1e457e1e7ecbd409040be4776b457403 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 21.384910583496094, + "total_l1_linf_norm": 187874.9375, + "total_spectral_norm": 21.38490867614746, + "layer_1_update_fnorm": 4.894913196563721, + "layer_1_max_l1_linf_norm": 4.623693466186523, + "layer_1_max_spectral_norm": 0.905981183052063, + "layer_2_update_fnorm": 4.841567039489746, + "layer_2_max_l1_linf_norm": 4.858397006988525, + "layer_2_max_spectral_norm": 0.613101065158844, + "layer_3_update_fnorm": 4.837564945220947, + "layer_3_max_l1_linf_norm": 4.767063140869141, + "layer_3_max_spectral_norm": 0.6043534278869629, + "layer_4_update_fnorm": 4.768747329711914, + "layer_4_max_l1_linf_norm": 4.679686069488525, + "layer_4_max_spectral_norm": 0.6544691920280457, + "layer_5_update_fnorm": 5.0222487449646, + "layer_5_max_l1_linf_norm": 5.015384674072266, + "layer_5_max_spectral_norm": 0.5478691458702087, + "layer_6_update_fnorm": 5.080304145812988, + "layer_6_max_l1_linf_norm": 4.885174751281738, + "layer_6_max_spectral_norm": 0.487746000289917, + "layer_7_update_fnorm": 5.159106254577637, + "layer_7_max_l1_linf_norm": 5.210066795349121, + "layer_7_max_spectral_norm": 0.539889395236969, + "layer_8_update_fnorm": 5.118068695068359, + "layer_8_max_l1_linf_norm": 5.569347381591797, + "layer_8_max_spectral_norm": 0.6723052263259888, + "layer_9_update_fnorm": 5.106100082397461, + "layer_9_max_l1_linf_norm": 6.212451457977295, + "layer_9_max_spectral_norm": 0.7266808152198792, + "layer_10_update_fnorm": 5.0775651931762695, + "layer_10_max_l1_linf_norm": 6.017547607421875, + "layer_10_max_spectral_norm": 0.7636504769325256, + "layer_11_update_fnorm": 5.118518352508545, + "layer_11_max_l1_linf_norm": 5.720320224761963, + "layer_11_max_spectral_norm": 0.7591449022293091, + "layer_12_update_fnorm": 4.845089435577393, + "layer_12_max_l1_linf_norm": 7.490239143371582, + "layer_12_max_spectral_norm": 0.8719568252563477, + "total_sharpness": 5.386395423556678e-05, + "ip_v_neg_g": 0.011225122027099133, + "cos_v_neg_g": 0.001438902341760695, + "v_norm": 21.384910583496094, + "g_norm": 0.3647978901863098, + "hv_norm": 0.2116166353225708, + "cos_v_hv": 0.005443219095468521, + "hg_norm": 2.9288158416748047, + "cos_g_hg": 0.547015368938446, + "v_parallel_norm": 0.005079095717519522, + "v_perp_norm": 21.38490867614746, + "layer_1_v_norm": 4.894913196563721, + "layer_1_cos_v_neg_g": 0.008015661500394344, + "layer_2_v_norm": 4.841567039489746, + "layer_2_cos_v_neg_g": 0.002709761494770646, + "layer_3_v_norm": 4.837564945220947, + "layer_3_cos_v_neg_g": 0.002823363523930311, + "layer_4_v_norm": 4.768747329711914, + "layer_4_cos_v_neg_g": 0.002193029737100005, + "layer_5_v_norm": 5.0222487449646, + "layer_5_cos_v_neg_g": 0.0032887100242078304, + "layer_6_v_norm": 5.080304145812988, + "layer_6_cos_v_neg_g": 0.0032494983170181513, + "layer_7_v_norm": 5.159106254577637, + "layer_7_cos_v_neg_g": 0.004236855544149876, + "layer_8_v_norm": 5.118068695068359, + "layer_8_cos_v_neg_g": 0.005345140118151903, + "layer_9_v_norm": 5.106100082397461, + "layer_9_cos_v_neg_g": 0.007005280815064907, + "layer_10_v_norm": 5.0775651931762695, + "layer_10_cos_v_neg_g": 0.007020959630608559, + "layer_11_v_norm": 5.118517875671387, + "layer_11_cos_v_neg_g": 0.008797137066721916, + "layer_12_v_norm": 4.845089435577393, + "layer_12_cos_v_neg_g": 0.015904219821095467, + "layer_1_sharpness": 1.3001497791265137e-05, + "layer_2_sharpness": 1.45766023251781e-06, + "layer_3_sharpness": 3.0635594612249406e-06, + "layer_4_sharpness": 6.63080800222815e-06, + "layer_5_sharpness": 5.621966010949109e-06, + "layer_6_sharpness": 4.353004896984203e-06, + "layer_7_sharpness": 6.683757419523317e-06, + "layer_8_sharpness": 1.2543700904643629e-05, + "layer_9_sharpness": 1.4063343769521452e-05, + "layer_10_sharpness": 1.6460944607388228e-05, + "layer_11_sharpness": 1.4404494322661776e-05, + "layer_12_sharpness": 9.555781434755772e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..0b048f1eb63fb383cc967ed97fb4e07e399ba2a1 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.01_mlr_0.01_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019230 +step:1 train loss:10.434136 +step:2 train loss:10.018497 +step:3 train loss:9.771797 +step:4 train loss:9.659418 +step:5 train loss:9.559887 +step:6 train loss:9.488479 +step:7 train loss:9.347619 +step:8 train loss:9.276689 +step:9 train loss:9.119946 +step:10 train loss:9.053164 +step:11 train loss:8.921634 +step:12 train loss:8.772142 +step:13 train loss:8.613052 +step:14 train loss:8.496427 +step:15 train loss:8.350893 +step:16 train loss:8.230795 +step:17 train loss:8.121971 +step:18 train loss:8.028851 +step:19 train loss:7.841505 +step:20 train loss:7.751853 +step:21 train loss:7.696122 +step:22 train loss:7.504427 +step:23 train loss:7.493006 +step:24 train loss:7.378595 +step:25 train loss:7.372848 +step:26 train loss:7.344984 +step:27 train loss:7.235605 +step:28 train loss:7.279245 +step:29 train loss:7.237541 +step:30 train loss:7.223936 +step:31 train loss:7.115438 +step:32 train loss:7.130903 +step:33 train loss:7.067097 +step:34 train loss:7.125850 +step:35 train loss:7.057283 +step:36 train loss:7.024206 +step:37 train loss:6.950800 +step:38 train loss:6.980027 +step:39 train loss:6.898288 +step:40 train loss:6.891150 +step:41 train loss:6.845909 +step:42 train loss:6.890083 +step:43 train loss:6.785035 +step:44 train loss:6.784314 +step:45 train loss:6.765918 +step:46 train loss:6.766308 +step:47 train loss:6.807031 +step:48 train loss:6.704327 +step:49 train loss:6.727849 +step:50 train loss:6.695040 +step:51 train loss:6.690838 +step:52 train loss:6.693235 +step:53 train loss:6.679952 +step:54 train loss:6.678197 +step:55 train loss:6.625392 +step:56 train loss:6.598184 +step:57 train loss:6.619463 +step:58 train loss:6.555601 +step:59 train loss:6.593370 +step:60 train loss:6.573270 +step:61 train loss:6.543673 +step:62 train loss:6.522599 +step:63 train loss:6.582654 +step:64 train loss:6.488786 +step:65 train loss:6.519042 +step:66 train loss:6.549438 +step:67 train loss:6.604216 +step:68 train loss:6.540657 +step:69 train loss:6.529864 +step:70 train loss:6.505377 +step:71 train loss:6.491295 +step:72 train loss:6.505880 +step:73 train loss:6.471416 +step:74 train loss:6.480637 +step:75 train loss:6.437113 +step:76 train loss:6.508811 +step:77 train loss:6.466663 +step:78 train loss:6.232283 +step:79 train loss:6.405202 +step:80 train loss:6.375537 +step:81 train loss:6.492462 +step:82 train loss:6.448244 +step:83 train loss:6.475057 +step:84 train loss:6.396180 +step:85 train loss:6.375554 +step:86 train loss:6.376769 +step:87 train loss:6.358914 +step:88 train loss:6.355967 +step:89 train loss:6.311910 +step:90 train loss:6.365910 +step:91 train loss:6.363672 +step:92 train loss:6.370719 +step:93 train loss:6.336669 +step:94 train loss:6.294443 +step:95 train loss:6.242675 +step:96 train loss:6.341794 +step:97 train loss:6.293420 +step:98 train loss:6.287604 +step:99 train loss:6.263258 +step:100 train loss:6.276206 +step:101 train loss:6.235438 +step:102 train loss:6.245697 +step:103 train loss:6.251078 +step:104 train loss:6.274176 +step:105 train loss:6.308785 +step:106 train loss:6.260495 +step:107 train loss:6.215752 +step:108 train loss:6.253354 +step:109 train loss:6.281596 +step:110 train loss:6.208408 +step:111 train loss:6.232978 +step:112 train loss:6.231448 +step:113 train loss:6.178707 +step:114 train loss:6.218654 +step:115 train loss:6.200879 +step:116 train loss:6.174008 +step:117 train loss:6.133520 +step:118 train loss:6.177207 +step:119 train loss:6.143301 +step:120 train loss:6.171557 +step:121 train loss:6.097219 +step:122 train loss:6.176427 +step:123 train loss:6.101509 +step:124 train loss:6.096533 +step:125 train loss:6.068445 +step:126 train loss:6.176770 +step:127 train loss:6.085730 +step:128 train loss:6.146970 +step:129 train loss:6.103400 +step:130 train loss:6.149361 +step:131 train loss:6.120932 +step:132 train loss:6.069652 +step:133 train loss:6.096457 +step:134 train loss:6.106059 +step:135 train loss:6.010242 +step:136 train loss:6.059834 +step:137 train loss:6.074332 +step:138 train loss:6.040085 +step:139 train loss:6.108274 +step:140 train loss:6.020812 +step:141 train loss:6.107220 +step:142 train loss:6.057299 +step:143 train loss:6.082646 +step:144 train loss:6.045773 +step:145 train loss:5.979738 +step:146 train loss:5.990032 +step:147 train loss:6.038523 +step:148 train loss:6.050579 +step:149 train loss:6.020360 +step:150 train loss:6.025282 +step:151 train loss:5.935011 +step:152 train loss:5.971184 +step:153 train loss:5.971440 +step:154 train loss:6.052125 +step:155 train loss:6.052685 +step:156 train loss:6.077483 +step:157 train loss:5.989749 +step:158 train loss:5.964338 +step:159 train loss:6.002094 +step:160 train loss:5.998402 +step:161 train loss:6.000382 +step:162 train loss:5.948258 +step:163 train loss:5.973339 +step:164 train loss:5.940493 +step:165 train loss:5.977041 +step:166 train loss:5.929440 +step:167 train loss:5.932813 +step:168 train loss:5.913537 +step:169 train loss:5.868436 +step:170 train loss:5.853051 +step:171 train loss:5.995830 +step:172 train loss:5.919501 +step:173 train loss:5.976571 +step:174 train loss:5.973892 +step:175 train loss:5.942076 +step:176 train loss:5.923004 +step:177 train loss:5.956483 +step:178 train loss:5.956837 +step:179 train loss:5.913417 +step:180 train loss:5.869174 +step:181 train loss:5.918660 +step:182 train loss:5.848569 +step:183 train loss:5.921313 +step:184 train loss:5.889851 +step:185 train loss:5.831059 +step:186 train loss:5.959142 +step:187 train loss:5.907922 +step:188 train loss:5.765745 +step:189 train loss:5.885756 +step:190 train loss:5.906925 +step:191 train loss:5.807876 +step:192 train loss:5.761451 +step:193 train loss:5.915018 +step:194 train loss:5.956063 +step:195 train loss:5.976234 +step:196 train loss:5.947755 +step:197 train loss:5.935172 +step:198 train loss:5.883968 +step:199 train loss:5.929327 +step:200 train loss:5.985602 +step:201 train loss:5.913998 +step:202 train loss:5.898059 +step:203 train loss:5.867711 +step:204 train loss:5.875212 +step:205 train loss:5.762680 +step:206 train loss:5.890835 +step:207 train loss:5.857371 +step:208 train loss:5.799220 +step:209 train loss:5.770548 +step:210 train loss:5.789158 +step:211 train loss:5.834258 +step:212 train loss:5.815726 +step:213 train loss:5.829636 +step:214 train loss:5.795520 +step:215 train loss:5.817606 +step:216 train loss:5.779833 +step:217 train loss:5.811854 +step:218 train loss:5.797510 +step:219 train loss:5.768144 +step:220 train loss:5.786032 +step:221 train loss:5.778878 +step:222 train loss:5.813422 +step:223 train loss:5.858713 +step:224 train loss:5.844062 +step:225 train loss:5.761115 +step:226 train loss:5.778042 +step:227 train loss:5.818107 +step:228 train loss:5.787271 +step:229 train loss:5.833498 +step:230 train loss:5.723359 +step:231 train loss:5.785731 +step:232 train loss:5.764789 +step:233 train loss:5.754300 +step:234 train loss:5.748564 +step:235 train loss:5.834629 +step:236 train loss:5.772778 +step:237 train loss:5.808777 +step:238 train loss:5.798570 +step:239 train loss:5.705580 +step:240 train loss:5.780121 +step:241 train loss:5.820803 +step:242 train loss:5.811289 +step:243 train loss:5.734832 +step:244 train loss:5.750468 +step:245 train loss:5.717035 +step:246 train loss:5.712404 +step:247 train loss:5.708907 +step:248 train loss:5.679728 +step:249 train loss:5.770352 +step:250 validation loss:5.767549 +step:250 train loss:5.726367 +step:251 train loss:5.777663 +step:252 train loss:5.712132 +step:253 train loss:5.713701 +step:254 train loss:5.684225 +step:255 train loss:5.724206 +step:256 train loss:5.721080 +step:257 train loss:5.792632 +step:258 train loss:5.700288 +step:259 train loss:5.723581 +step:260 train loss:5.674371 +step:261 train loss:5.673326 +step:262 train loss:5.749360 +step:263 train loss:5.705654 +step:264 train loss:5.676260 +step:265 train loss:5.690024 +step:266 train loss:5.702356 +step:267 train loss:5.718194 +step:268 train loss:5.663395 +step:269 train loss:5.715195 +step:270 train loss:5.722107 +step:271 train loss:5.722028 +step:272 train loss:5.670770 +step:273 train loss:5.722195 +step:274 train loss:5.633854 +step:275 train loss:5.683669 +step:276 train loss:5.656628 +step:277 train loss:5.657694 +step:278 train loss:5.630175 +step:279 train loss:5.610124 +step:280 train loss:5.672053 +step:281 train loss:5.740986 +step:282 train loss:5.625181 +step:283 train loss:5.662580 +step:284 train loss:5.633459 +step:285 train loss:5.702698 +step:286 train loss:5.648038 +step:287 train loss:5.622793 +step:288 train loss:5.603859 +step:289 train loss:5.622557 +step:290 train loss:5.675276 +step:291 train loss:5.611012 +step:292 train loss:5.669971 +step:293 train loss:5.595090 +step:294 train loss:5.709820 +step:295 train loss:5.623543 +step:296 train loss:5.653975 +step:297 train loss:5.670992 +step:298 train loss:5.581976 +step:299 train loss:5.672898 +step:300 train loss:5.595028 +step:301 train loss:5.617827 +step:302 train loss:5.597772 +step:303 train loss:5.617866 +step:304 train loss:5.643425 +step:305 train loss:5.581295 +step:306 train loss:5.592929 +step:307 train loss:5.612214 +step:308 train loss:5.528769 +step:309 train loss:5.657889 +step:310 train loss:5.621603 +step:311 train loss:5.610181 +step:312 train loss:5.583730 +step:313 train loss:5.621712 +step:314 train loss:5.601716 +step:315 train loss:5.541661 +step:316 train loss:5.540644 +step:317 train loss:5.509203 +step:318 train loss:5.516592 +step:319 train loss:5.622742 +step:320 train loss:5.540291 +step:321 train loss:5.590104 +step:322 train loss:5.587104 +step:323 train loss:5.635553 +step:324 train loss:5.569693 +step:325 train loss:5.593371 +step:326 train loss:5.599435 +step:327 train loss:5.593479 +step:328 train loss:5.551143 +step:329 train loss:5.582767 +step:330 train loss:5.513039 +step:331 train loss:5.559365 +step:332 train loss:5.526023 +step:333 train loss:5.491112 +step:334 train loss:5.592897 +step:335 train loss:5.645402 +step:336 train loss:5.773666 +step:337 train loss:5.654043 +step:338 train loss:5.562125 +step:339 train loss:5.564559 +step:340 train loss:5.560511 +step:341 train loss:5.574386 +step:342 train loss:5.611006 +step:343 train loss:5.594155 +step:344 train loss:5.532818 +step:345 train loss:5.488512 +step:346 train loss:5.532464 +step:347 train loss:5.472878 +step:348 train loss:5.476729 +step:349 train loss:5.409134 +step:350 train loss:5.445349 +step:351 train loss:5.536350 +step:352 train loss:5.500362 +step:353 train loss:5.570294 +step:354 train loss:5.569237 +step:355 train loss:5.628418 +step:356 train loss:5.550133 +step:357 train loss:5.628540 +step:358 train loss:5.640408 +step:359 train loss:5.472902 +step:360 train loss:5.600790 +step:361 train loss:5.587594 +step:362 train loss:5.555481 +step:363 train loss:5.513802 +step:364 train loss:5.654529 +step:365 train loss:5.560895 +step:366 train loss:5.520417 +step:367 train loss:5.542397 +step:368 train loss:5.517327 +step:369 train loss:5.492689 +step:370 train loss:5.544748 +step:371 train loss:5.486388 +step:372 train loss:5.541646 +step:373 train loss:5.477152 +step:374 train loss:5.478659 +step:375 train loss:5.496827 +step:376 train loss:5.494465 +step:377 train loss:5.400813 +step:378 train loss:5.460254 +step:379 train loss:5.524404 +step:380 train loss:5.482386 +step:381 train loss:5.524969 +step:382 train loss:5.546640 +step:383 train loss:5.496845 +step:384 train loss:5.486866 +step:385 train loss:5.469268 +step:386 train loss:5.490623 +step:387 train loss:5.476112 +step:388 train loss:5.447321 +step:389 train loss:5.445165 +step:390 train loss:5.421193 +step:391 train loss:5.444665 +step:392 train loss:5.417174 +step:393 train loss:5.407028 +step:394 train loss:5.459908 +step:395 train loss:5.382906 +step:396 train loss:5.339646 +step:397 train loss:5.428674 +step:398 train loss:5.401609 +step:399 train loss:5.402749 +step:400 train loss:5.375325 +step:401 train loss:5.435174 +step:402 train loss:5.440716 +step:403 train loss:5.443866 +step:404 train loss:5.450245 +step:405 train loss:5.491681 +step:406 train loss:5.538494 +step:407 train loss:5.490546 +step:408 train loss:5.539553 +step:409 train loss:5.459232 +step:410 train loss:5.435949 +step:411 train loss:5.424792 +step:412 train loss:5.515049 +step:413 train loss:5.375016 +step:414 train loss:5.462742 +step:415 train loss:5.430147 +step:416 train loss:5.431857 +step:417 train loss:5.489211 +step:418 train loss:5.420055 +step:419 train loss:5.396358 +step:420 train loss:5.373641 +step:421 train loss:5.369212 +step:422 train loss:5.377718 +step:423 train loss:5.394028 +step:424 train loss:5.352895 +step:425 train loss:5.427069 +step:426 train loss:5.421389 +step:427 train loss:5.353630 +step:428 train loss:5.417704 +step:429 train loss:5.316387 +step:430 train loss:5.346689 +step:431 train loss:5.386617 +step:432 train loss:5.408862 +step:433 train loss:5.397227 +step:434 train loss:5.345723 +step:435 train loss:5.394184 +step:436 train loss:5.399373 +step:437 train loss:5.350057 +step:438 train loss:5.307545 +step:439 train loss:5.294209 +step:440 train loss:5.366497 +step:441 train loss:5.325688 +step:442 train loss:5.359267 +step:443 train loss:5.343184 +step:444 train loss:5.382773 +step:445 train loss:5.400525 +step:446 train loss:5.325145 +step:447 train loss:5.336174 +step:448 train loss:5.399930 +step:449 train loss:5.349293 +step:450 train loss:5.351181 +step:451 train loss:5.331150 +step:452 train loss:5.419765 +step:453 train loss:5.359112 +step:454 train loss:5.302805 +step:455 train loss:5.373611 +step:456 train loss:5.332177 +step:457 train loss:5.307393 +step:458 train loss:5.349910 +step:459 train loss:5.304203 +step:460 train loss:5.479832 +step:461 train loss:5.403613 +step:462 train loss:5.293123 +step:463 train loss:5.363542 +step:464 train loss:5.400072 +step:465 train loss:5.347925 +step:466 train loss:5.366935 +step:467 train loss:5.313565 +step:468 train loss:5.348790 +step:469 train loss:5.314223 +step:470 train loss:5.282047 +step:471 train loss:5.401307 +step:472 train loss:5.303107 +step:473 train loss:5.419477 +step:474 train loss:5.372377 +step:475 train loss:5.373424 +step:476 train loss:5.357117 +step:477 train loss:5.321578 +step:478 train loss:5.314292 +step:479 train loss:5.311037 +step:480 train loss:5.329101 +step:481 train loss:5.370849 +step:482 train loss:5.281340 +step:483 train loss:5.341527 +step:484 train loss:5.273657 +step:485 train loss:5.229002 +step:486 train loss:5.283611 +step:487 train loss:5.238726 +step:488 train loss:5.245965 +step:489 train loss:5.250137 +step:490 train loss:5.230614 +step:491 train loss:5.248301 +step:492 train loss:5.251617 +step:493 train loss:5.258176 +step:494 train loss:5.259466 +step:495 train loss:5.226944 +step:496 train loss:5.348572 +step:497 train loss:5.193845 +step:498 train loss:5.324233 +step:499 train loss:5.280258 +step:500 validation loss:5.256884 total_sharp:1.5528e-03 L1_sharp:5.8456e-03 L2_sharp:5.5758e-04 L3_sharp:1.8101e-03 L4_sharp:9.5277e-04 L5_sharp:3.5458e-04 L6_sharp:1.6906e-04 L7_sharp:1.1850e-04 L8_sharp:1.1943e-04 L9_sharp:1.2439e-04 L10_sharp:1.7898e-04 L11_sharp:2.1097e-04 L12_sharp:2.6162e-04 total_fnorm:1.2852e+01 total_l1_linf:1.1030e+05 total_spectral:1.2852e+01 L1_fnorm:2.0412e+00 L2_fnorm:2.3812e+00 L3_fnorm:2.4522e+00 L4_fnorm:2.5442e+00 L5_fnorm:2.6716e+00 L6_fnorm:2.7500e+00 L7_fnorm:2.8675e+00 L8_fnorm:2.9350e+00 L9_fnorm:2.9674e+00 L10_fnorm:3.0498e+00 L11_fnorm:2.8937e+00 L12_fnorm:2.8217e+00 L1_l1linf:5.9180e+00 L2_l1linf:2.6080e+00 L3_l1linf:2.7184e+00 L4_l1linf:3.0190e+00 L5_l1linf:3.2998e+00 L6_l1linf:3.1972e+00 L7_l1linf:3.0806e+00 L8_l1linf:3.8544e+00 L9_l1linf:4.1369e+00 L10_l1linf:4.9153e+00 L11_l1linf:4.5787e+00 L12_l1linf:3.8745e+00 L1_spectral:7.0331e-01 L2_spectral:4.0440e-01 L3_spectral:5.5241e-01 L4_spectral:4.6977e-01 L5_spectral:6.3360e-01 L6_spectral:6.1258e-01 L7_spectral:5.8366e-01 L8_spectral:5.7800e-01 L9_spectral:6.5054e-01 L10_spectral:5.8670e-01 L11_spectral:6.0128e-01 L12_spectral:6.1107e-01 ip_v_neg_g:1.6041e-01 cos_v_neg_g:2.0754e-02 v_norm:1.2852e+01 g_norm:6.0141e-01 hv_norm:5.9817e-01 cos_v_hv:3.3364e-02 hg_norm:5.6407e+00 cos_g_hg:5.6480e-01 v_par:1.6150e-02 v_perp:1.2852e+01 L1_cos_v_neg_g:1.8417e-01 L1_v_norm:2.0412e+00 L2_cos_v_neg_g:7.6112e-02 L2_v_norm:2.3812e+00 L3_cos_v_neg_g:8.8917e-02 L3_v_norm:2.4522e+00 L4_cos_v_neg_g:5.5631e-02 L4_v_norm:2.5442e+00 L5_cos_v_neg_g:5.8778e-02 L5_v_norm:2.6716e+00 L6_cos_v_neg_g:4.4706e-02 L6_v_norm:2.7500e+00 L7_cos_v_neg_g:3.4589e-02 L7_v_norm:2.8675e+00 L8_cos_v_neg_g:2.9178e-02 L8_v_norm:2.9350e+00 L9_cos_v_neg_g:3.4550e-02 L9_v_norm:2.9674e+00 L10_cos_v_neg_g:3.8494e-02 L10_v_norm:3.0498e+00 L11_cos_v_neg_g:4.3683e-02 L11_v_norm:2.8937e+00 L12_cos_v_neg_g:3.7328e-02 L12_v_norm:2.8217e+00 +step:500 train loss:5.277511 +step:501 train loss:5.253760 +step:502 train loss:5.287648 +step:503 train loss:5.198792 +step:504 train loss:5.288245 +step:505 train loss:5.221491 +step:506 train loss:5.221036 +step:507 train loss:5.243596 +step:508 train loss:5.250187 +step:509 train loss:5.246706 +step:510 train loss:5.172826 +step:511 train loss:5.176150 +step:512 train loss:5.172037 +step:513 train loss:5.177195 +step:514 train loss:5.260611 +step:515 train loss:5.220312 +step:516 train loss:5.335572 +step:517 train loss:5.266881 +step:518 train loss:5.248785 +step:519 train loss:5.313983 +step:520 train loss:5.244127 +step:521 train loss:5.250275 +step:522 train loss:5.284588 +step:523 train loss:5.285341 +step:524 train loss:5.225100 +step:525 train loss:5.223717 +step:526 train loss:5.261649 +step:527 train loss:5.273693 +step:528 train loss:5.320516 +step:529 train loss:5.381476 +step:530 train loss:5.323692 +step:531 train loss:5.336556 +step:532 train loss:5.279981 +step:533 train loss:5.255784 +step:534 train loss:5.337662 +step:535 train loss:5.333273 +step:536 train loss:5.377772 +step:537 train loss:5.236750 +step:538 train loss:5.204262 +step:539 train loss:5.308575 +step:540 train loss:5.313179 +step:541 train loss:5.211847 +step:542 train loss:5.229023 +step:543 train loss:5.234828 +step:544 train loss:5.225423 +step:545 train loss:5.194956 +step:546 train loss:5.159329 +step:547 train loss:5.161082 +step:548 train loss:5.130096 +step:549 train loss:5.189758 +step:550 train loss:5.145847 +step:551 train loss:5.161386 +step:552 train loss:5.254236 +step:553 train loss:5.210811 +step:554 train loss:5.131570 +step:555 train loss:5.181825 +step:556 train loss:5.125625 +step:557 train loss:5.108940 +step:558 train loss:5.067644 +step:559 train loss:5.129041 +step:560 train loss:5.185941 +step:561 train loss:5.065450 +step:562 train loss:5.081234 +step:563 train loss:5.180305 +step:564 train loss:5.132608 +step:565 train loss:5.131992 +step:566 train loss:5.129747 +step:567 train loss:5.121515 +step:568 train loss:5.161741 +step:569 train loss:5.127859 +step:570 train loss:5.059169 +step:571 train loss:5.109982 +step:572 train loss:5.115961 +step:573 train loss:5.129641 +step:574 train loss:5.178934 +step:575 train loss:5.156491 +step:576 train loss:5.130102 +step:577 train loss:5.140648 +step:578 train loss:5.122701 +step:579 train loss:5.170028 +step:580 train loss:5.099121 +step:581 train loss:5.149387 +step:582 train loss:5.104141 +step:583 train loss:5.119603 +step:584 train loss:5.098077 +step:585 train loss:5.074266 +step:586 train loss:5.075617 +step:587 train loss:5.142123 +step:588 train loss:5.060975 +step:589 train loss:5.122730 +step:590 train loss:5.146921 +step:591 train loss:5.068840 +step:592 train loss:5.032306 +step:593 train loss:5.049772 +step:594 train loss:5.018888 +step:595 train loss:5.062645 +step:596 train loss:5.054846 +step:597 train loss:5.103276 +step:598 train loss:5.063293 +step:599 train loss:5.091523 +step:600 train loss:5.130000 +step:601 train loss:5.080627 +step:602 train loss:5.111808 +step:603 train loss:5.198634 +step:604 train loss:5.205807 +step:605 train loss:5.251144 +step:606 train loss:5.179944 +step:607 train loss:5.186016 +step:608 train loss:5.233122 +step:609 train loss:5.183446 +step:610 train loss:5.177136 +step:611 train loss:5.212622 +step:612 train loss:5.258583 +step:613 train loss:5.129553 +step:614 train loss:5.206213 +step:615 train loss:5.267537 +step:616 train loss:5.146304 +step:617 train loss:5.162300 +step:618 train loss:5.123434 +step:619 train loss:5.163422 +step:620 train loss:5.170217 +step:621 train loss:5.084869 +step:622 train loss:5.151773 +step:623 train loss:5.136176 +step:624 train loss:5.120790 +step:625 train loss:5.126610 +step:626 train loss:5.130220 +step:627 train loss:5.099026 +step:628 train loss:5.076697 +step:629 train loss:5.017425 +step:630 train loss:5.040026 +step:631 train loss:5.040145 +step:632 train loss:5.027470 +step:633 train loss:5.070859 +step:634 train loss:5.075964 +step:635 train loss:4.987870 +step:636 train loss:5.075976 +step:637 train loss:4.983743 +step:638 train loss:4.916243 +step:639 train loss:5.046424 +step:640 train loss:4.994074 +step:641 train loss:5.010892 +step:642 train loss:5.038673 +step:643 train loss:4.936637 +step:644 train loss:5.015908 +step:645 train loss:4.970165 +step:646 train loss:4.973006 +step:647 train loss:5.005241 +step:648 train loss:5.100915 +step:649 train loss:5.006703 +step:650 train loss:5.072017 +step:651 train loss:4.947916 +step:652 train loss:4.985646 +step:653 train loss:4.981512 +step:654 train loss:5.026306 +step:655 train loss:5.072309 +step:656 train loss:4.988281 +step:657 train loss:5.073963 +step:658 train loss:4.998969 +step:659 train loss:5.075567 +step:660 train loss:5.033502 +step:661 train loss:5.071469 +step:662 train loss:5.067784 +step:663 train loss:5.092936 +step:664 train loss:4.977290 +step:665 train loss:5.017174 +step:666 train loss:4.982980 +step:667 train loss:5.045321 +step:668 train loss:5.005038 +step:669 train loss:4.975203 +step:670 train loss:4.991045 +step:671 train loss:4.955378 +step:672 train loss:4.917824 +step:673 train loss:5.000909 +step:674 train loss:4.997241 +step:675 train loss:4.910922 +step:676 train loss:4.991125 +step:677 train loss:4.919250 +step:678 train loss:4.907573 +step:679 train loss:4.967305 +step:680 train loss:4.936172 +step:681 train loss:5.014251 +step:682 train loss:4.946131 +step:683 train loss:5.009771 +step:684 train loss:5.067024 +step:685 train loss:5.081918 +step:686 train loss:5.188659 +step:687 train loss:5.113762 +step:688 train loss:5.060712 +step:689 train loss:5.121777 +step:690 train loss:5.103427 +step:691 train loss:5.133235 +step:692 train loss:5.139208 +step:693 train loss:5.150578 +step:694 train loss:5.155026 +step:695 train loss:5.148039 +step:696 train loss:5.059998 +step:697 train loss:5.174162 +step:698 train loss:5.075156 +step:699 train loss:5.041207 +step:700 train loss:5.136156 +step:701 train loss:5.019616 +step:702 train loss:5.087492 +step:703 train loss:5.011621 +step:704 train loss:4.969149 +step:705 train loss:5.004940 +step:706 train loss:4.873370 +step:707 train loss:4.939406 +step:708 train loss:5.047907 +step:709 train loss:4.989041 +step:710 train loss:4.943241 +step:711 train loss:4.994072 +step:712 train loss:4.929048 +step:713 train loss:4.889337 +step:714 train loss:4.964481 +step:715 train loss:4.850224 +step:716 train loss:4.995414 +step:717 train loss:4.869226 +step:718 train loss:4.932404 +step:719 train loss:4.886197 +step:720 train loss:4.874660 +step:721 train loss:4.884912 +step:722 train loss:4.895557 +step:723 train loss:4.943718 +step:724 train loss:4.906740 +step:725 train loss:4.872343 +step:726 train loss:4.856915 +step:727 train loss:4.887297 +step:728 train loss:4.867367 +step:729 train loss:4.811697 +step:730 train loss:4.902431 +step:731 train loss:4.940304 +step:732 train loss:4.915713 +step:733 train loss:4.878147 +step:734 train loss:4.878056 +step:735 train loss:4.959731 +step:736 train loss:4.887079 +step:737 train loss:4.875843 +step:738 train loss:4.900778 +step:739 train loss:4.840279 +step:740 train loss:4.864744 +step:741 train loss:4.932526 +step:742 train loss:4.838601 +step:743 train loss:4.825428 +step:744 train loss:4.880296 +step:745 train loss:4.819032 +step:746 train loss:4.841343 +step:747 train loss:4.866810 +step:748 train loss:4.842932 +step:749 train loss:4.890846 +step:750 validation loss:4.840802 +step:750 train loss:4.826691 +step:751 train loss:4.856854 +step:752 train loss:4.790665 +step:753 train loss:4.848134 +step:754 train loss:4.845474 +step:755 train loss:4.894796 +step:756 train loss:4.854229 +step:757 train loss:4.934247 +step:758 train loss:4.823618 +step:759 train loss:4.826208 +step:760 train loss:4.793843 +step:761 train loss:4.833071 +step:762 train loss:4.804904 +step:763 train loss:4.859569 +step:764 train loss:4.820339 +step:765 train loss:4.819694 +step:766 train loss:4.891619 +step:767 train loss:5.005405 +step:768 train loss:4.836907 +step:769 train loss:4.862835 +step:770 train loss:4.898455 +step:771 train loss:4.935987 +step:772 train loss:4.853325 +step:773 train loss:4.785898 +step:774 train loss:4.832172 +step:775 train loss:4.787062 +step:776 train loss:4.812238 +step:777 train loss:4.756855 +step:778 train loss:4.738112 +step:779 train loss:4.726135 +step:780 train loss:4.789918 +step:781 train loss:4.726295 +step:782 train loss:4.747256 +step:783 train loss:4.734074 +step:784 train loss:4.754230 +step:785 train loss:4.743315 +step:786 train loss:4.755006 +step:787 train loss:4.706965 +step:788 train loss:4.762588 +step:789 train loss:4.737068 +step:790 train loss:4.695625 +step:791 train loss:4.769982 +step:792 train loss:4.774724 +step:793 train loss:4.731366 +step:794 train loss:4.715689 +step:795 train loss:4.680937 +step:796 train loss:4.948247 +step:797 train loss:4.790343 +step:798 train loss:4.798719 +step:799 train loss:4.914465 +step:800 train loss:5.067670 +step:801 train loss:4.965338 +step:802 train loss:5.030481 +step:803 train loss:4.911943 +step:804 train loss:4.959329 +step:805 train loss:5.082479 +step:806 train loss:4.999744 +step:807 train loss:5.354280 +step:808 train loss:5.147858 +step:809 train loss:5.115977 +step:810 train loss:5.044142 +step:811 train loss:5.130454 +step:812 train loss:5.102392 +step:813 train loss:5.038638 +step:814 train loss:5.077550 +step:815 train loss:5.093392 +step:816 train loss:5.010508 +step:817 train loss:4.960492 +step:818 train loss:4.919292 +step:819 train loss:4.885369 +step:820 train loss:4.881527 +step:821 train loss:4.803426 +step:822 train loss:4.782227 +step:823 train loss:4.856913 +step:824 train loss:4.751154 +step:825 train loss:4.736155 +step:826 train loss:4.841955 +step:827 train loss:4.769589 +step:828 train loss:4.826080 +step:829 train loss:4.828142 +step:830 train loss:4.847466 +step:831 train loss:4.843001 +step:832 train loss:4.878003 +step:833 train loss:4.818675 +step:834 train loss:4.790863 +step:835 train loss:4.755990 +step:836 train loss:4.732883 +step:837 train loss:4.702937 +step:838 train loss:4.697429 +step:839 train loss:4.694741 +step:840 train loss:4.736564 +step:841 train loss:4.699362 +step:842 train loss:4.693031 +step:843 train loss:4.687701 +step:844 train loss:4.650522 +step:845 train loss:4.627288 +step:846 train loss:4.717780 +step:847 train loss:4.677258 +step:848 train loss:4.632476 +step:849 train loss:4.675725 +step:850 train loss:4.676098 +step:851 train loss:4.633605 +step:852 train loss:4.704941 +step:853 train loss:4.596313 +step:854 train loss:4.640477 +step:855 train loss:4.649084 +step:856 train loss:4.608791 +step:857 train loss:4.659689 +step:858 train loss:4.698739 +step:859 train loss:4.627769 +step:860 train loss:4.649230 +step:861 train loss:4.675571 +step:862 train loss:4.621665 +step:863 train loss:4.633218 +step:864 train loss:4.614177 +step:865 train loss:4.622663 +step:866 train loss:4.646917 +step:867 train loss:4.744343 +step:868 train loss:4.615877 +step:869 train loss:4.626794 +step:870 train loss:4.584498 +step:871 train loss:4.569679 +step:872 train loss:4.623108 +step:873 train loss:4.598116 +step:874 train loss:4.612265 +step:875 train loss:4.512535 +step:876 train loss:4.613748 +step:877 train loss:4.539990 +step:878 train loss:4.636179 +step:879 train loss:4.545022 +step:880 train loss:4.645730 +step:881 train loss:4.597198 +step:882 train loss:4.554631 +step:883 train loss:4.595031 +step:884 train loss:4.613208 +step:885 train loss:4.566029 +step:886 train loss:4.563210 +step:887 train loss:4.582473 +step:888 train loss:4.673522 +step:889 train loss:4.605997 +step:890 train loss:4.555437 +step:891 train loss:4.516111 +step:892 train loss:4.521881 +step:893 train loss:4.615942 +step:894 train loss:4.579297 +step:895 train loss:4.565498 +step:896 train loss:4.645856 +step:897 train loss:4.585508 +step:898 train loss:4.611609 +step:899 train loss:4.606520 +step:900 train loss:4.654022 +step:901 train loss:4.601525 +step:902 train loss:4.608124 +step:903 train loss:4.691532 +step:904 train loss:4.691232 +step:905 train loss:4.569918 +step:906 train loss:4.577196 +step:907 train loss:4.594296 +step:908 train loss:4.600022 +step:909 train loss:4.545074 +step:910 train loss:4.571090 +step:911 train loss:4.702752 +step:912 train loss:4.505044 +step:913 train loss:4.570029 +step:914 train loss:4.535665 +step:915 train loss:4.561864 +step:916 train loss:4.615462 +step:917 train loss:4.562840 +step:918 train loss:4.629265 +step:919 train loss:4.696318 +step:920 train loss:4.454016 +step:921 train loss:4.586705 +step:922 train loss:4.565952 +step:923 train loss:4.485920 +step:924 train loss:4.518143 +step:925 train loss:4.472595 +step:926 train loss:4.568372 +step:927 train loss:4.476549 +step:928 train loss:4.559431 +step:929 train loss:4.534668 +step:930 train loss:4.533081 +step:931 train loss:4.571633 +step:932 train loss:4.503838 +step:933 train loss:4.541143 +step:934 train loss:4.581877 +step:935 train loss:4.560833 +step:936 train loss:4.534050 +step:937 train loss:4.531538 +step:938 train loss:4.519596 +step:939 train loss:4.407461 +step:940 train loss:4.511095 +step:941 train loss:4.456647 +step:942 train loss:4.448830 +step:943 train loss:4.546679 +step:944 train loss:4.492412 +step:945 train loss:4.491290 +step:946 train loss:4.527390 +step:947 train loss:4.687901 +step:948 train loss:4.528933 +step:949 train loss:4.604478 +step:950 train loss:4.534242 +step:951 train loss:4.570312 +step:952 train loss:4.691653 +step:953 train loss:4.605063 +step:954 train loss:4.628795 +step:955 train loss:4.572165 +step:956 train loss:4.614543 +step:957 train loss:4.609389 +step:958 train loss:4.659487 +step:959 train loss:4.594244 +step:960 train loss:4.673701 +step:961 train loss:4.617042 +step:962 train loss:4.566159 +step:963 train loss:4.547713 +step:964 train loss:4.574003 +step:965 train loss:4.494360 +step:966 train loss:4.488143 +step:967 train loss:4.540431 +step:968 train loss:4.531444 +step:969 train loss:4.480758 +step:970 train loss:4.535697 +step:971 train loss:4.504383 +step:972 train loss:4.426052 +step:973 train loss:4.513731 +step:974 train loss:4.452350 +step:975 train loss:4.521756 +step:976 train loss:4.482773 +step:977 train loss:4.473638 +step:978 train loss:4.474906 +step:979 train loss:4.454330 +step:980 train loss:4.457312 +step:981 train loss:4.433716 +step:982 train loss:4.441866 +step:983 train loss:4.450635 +step:984 train loss:4.479975 +step:985 train loss:4.443284 +step:986 train loss:4.471624 +step:987 train loss:4.506388 +step:988 train loss:4.483086 +step:989 train loss:4.452770 +step:990 train loss:4.442513 +step:991 train loss:4.376188 +step:992 train loss:4.438632 +step:993 train loss:4.462739 +step:994 train loss:4.401132 +step:995 train loss:4.410662 +step:996 train loss:4.456188 +step:997 train loss:4.421207 +step:998 train loss:4.428001 +step:999 train loss:4.462381 +step:1000 validation loss:4.400223 total_sharp:4.6780e-04 L1_sharp:4.5911e-04 L2_sharp:7.2033e-05 L3_sharp:2.5482e-04 L4_sharp:1.3898e-04 L5_sharp:9.1653e-05 L6_sharp:3.8002e-05 L7_sharp:2.5111e-05 L8_sharp:6.3561e-05 L9_sharp:1.0671e-04 L10_sharp:1.3898e-04 L11_sharp:1.4954e-04 L12_sharp:3.8797e-04 total_fnorm:1.7753e+01 total_l1_linf:1.5103e+05 total_spectral:1.7753e+01 L1_fnorm:3.3304e+00 L2_fnorm:3.2342e+00 L3_fnorm:2.9158e+00 L4_fnorm:3.2012e+00 L5_fnorm:3.5043e+00 L6_fnorm:3.6284e+00 L7_fnorm:3.8501e+00 L8_fnorm:3.8417e+00 L9_fnorm:3.9744e+00 L10_fnorm:4.0328e+00 L11_fnorm:4.0930e+00 L12_fnorm:3.9101e+00 L1_l1linf:5.4090e+00 L2_l1linf:4.3144e+00 L3_l1linf:4.5310e+00 L4_l1linf:4.2926e+00 L5_l1linf:4.4777e+00 L6_l1linf:4.3475e+00 L7_l1linf:4.9712e+00 L8_l1linf:5.0307e+00 L9_l1linf:5.2821e+00 L10_l1linf:5.7957e+00 L11_l1linf:7.5554e+00 L12_l1linf:7.0274e+00 L1_spectral:7.6890e-01 L2_spectral:6.7769e-01 L3_spectral:5.8496e-01 L4_spectral:5.7492e-01 L5_spectral:6.4107e-01 L6_spectral:5.9429e-01 L7_spectral:6.6993e-01 L8_spectral:7.2837e-01 L9_spectral:8.3340e-01 L10_spectral:9.3957e-01 L11_spectral:9.8750e-01 L12_spectral:9.9878e-01 ip_v_neg_g:8.3185e-02 cos_v_neg_g:1.0133e-02 v_norm:1.7753e+01 g_norm:4.6242e-01 hv_norm:4.5060e-01 cos_v_hv:1.8431e-02 hg_norm:2.9104e+00 cos_g_hg:5.9306e-01 v_par:1.8939e-02 v_perp:1.7753e+01 L1_cos_v_neg_g:8.1742e-02 L1_v_norm:3.3304e+00 L2_cos_v_neg_g:3.1815e-02 L2_v_norm:3.2342e+00 L3_cos_v_neg_g:4.0211e-02 L3_v_norm:2.9158e+00 L4_cos_v_neg_g:3.4391e-02 L4_v_norm:3.2012e+00 L5_cos_v_neg_g:3.4283e-02 L5_v_norm:3.5043e+00 L6_cos_v_neg_g:2.3101e-02 L6_v_norm:3.6284e+00 L7_cos_v_neg_g:1.8772e-02 L7_v_norm:3.8501e+00 L8_cos_v_neg_g:2.7604e-02 L8_v_norm:3.8417e+00 L9_cos_v_neg_g:3.2618e-02 L9_v_norm:3.9744e+00 L10_cos_v_neg_g:3.8703e-02 L10_v_norm:4.0328e+00 L11_cos_v_neg_g:3.5414e-02 L11_v_norm:4.0930e+00 L12_cos_v_neg_g:4.8419e-02 L12_v_norm:3.9101e+00 +step:1000 train loss:4.469402 +step:1001 train loss:4.464935 +step:1002 train loss:4.455595 +step:1003 train loss:4.434402 +step:1004 train loss:4.408016 +step:1005 train loss:4.415012 +step:1006 train loss:4.492220 +step:1007 train loss:4.433957 +step:1008 train loss:4.409951 +step:1009 train loss:4.483935 +step:1010 train loss:4.444534 +step:1011 train loss:4.466014 +step:1012 train loss:4.412441 +step:1013 train loss:4.383463 +step:1014 train loss:4.390486 +step:1015 train loss:4.421669 +step:1016 train loss:4.441999 +step:1017 train loss:4.409322 +step:1018 train loss:4.483801 +step:1019 train loss:4.424219 +step:1020 train loss:4.423278 +step:1021 train loss:4.503685 +step:1022 train loss:4.410529 +step:1023 train loss:4.416411 +step:1024 train loss:4.495754 +step:1025 train loss:4.451600 +step:1026 train loss:4.393963 +step:1027 train loss:4.437870 +step:1028 train loss:4.445325 +step:1029 train loss:4.377920 +step:1030 train loss:4.457629 +step:1031 train loss:4.444170 +step:1032 train loss:4.403623 +step:1033 train loss:4.368599 +step:1034 train loss:4.444088 +step:1035 train loss:4.511819 +step:1036 train loss:4.448753 +step:1037 train loss:4.500966 +step:1038 train loss:4.539247 +step:1039 train loss:4.684290 +step:1040 train loss:4.513465 +step:1041 train loss:4.502317 +step:1042 train loss:4.531299 +step:1043 train loss:4.520102 +step:1044 train loss:4.479527 +step:1045 train loss:4.477022 +step:1046 train loss:4.417983 +step:1047 train loss:4.457443 +step:1048 train loss:4.516404 +step:1049 train loss:4.610904 +step:1050 train loss:4.691599 +step:1051 train loss:4.565030 +step:1052 train loss:4.736175 +step:1053 train loss:4.682347 +step:1054 train loss:4.727929 +step:1055 train loss:4.761264 +step:1056 train loss:4.643146 +step:1057 train loss:4.495088 +step:1058 train loss:4.577279 +step:1059 train loss:4.554447 +step:1060 train loss:4.525686 +step:1061 train loss:4.573227 +step:1062 train loss:4.518548 +step:1063 train loss:4.514084 +step:1064 train loss:4.486544 +step:1065 train loss:4.487660 +step:1066 train loss:4.454491 +step:1067 train loss:4.474662 +step:1068 train loss:4.430944 +step:1069 train loss:4.441977 +step:1070 train loss:4.442694 +step:1071 train loss:4.449970 +step:1072 train loss:4.471864 +step:1073 train loss:4.383207 +step:1074 train loss:4.394661 +step:1075 train loss:4.394053 +step:1076 train loss:4.460032 +step:1077 train loss:4.385621 +step:1078 train loss:4.430171 +step:1079 train loss:4.470342 +step:1080 train loss:4.344381 +step:1081 train loss:4.410256 +step:1082 train loss:4.401394 +step:1083 train loss:4.360742 +step:1084 train loss:4.334679 +step:1085 train loss:4.392679 +step:1086 train loss:4.384775 +step:1087 train loss:4.361525 +step:1088 train loss:4.361650 +step:1089 train loss:4.370456 +step:1090 train loss:4.315146 +step:1091 train loss:4.302826 +step:1092 train loss:4.413674 +step:1093 train loss:4.305801 +step:1094 train loss:4.360377 +step:1095 train loss:4.400104 +step:1096 train loss:4.341598 +step:1097 train loss:4.342359 +step:1098 train loss:4.315976 +step:1099 train loss:4.367114 +step:1100 train loss:4.435324 +step:1101 train loss:4.420627 +step:1102 train loss:4.429354 +step:1103 train loss:4.344746 +step:1104 train loss:4.377917 +step:1105 train loss:4.431685 +step:1106 train loss:4.358725 +step:1107 train loss:4.476171 +step:1108 train loss:4.414967 +step:1109 train loss:4.379914 +step:1110 train loss:4.335809 +step:1111 train loss:4.384933 +step:1112 train loss:4.295766 +step:1113 train loss:4.280308 +step:1114 train loss:4.271751 +step:1115 train loss:4.314022 +step:1116 train loss:4.378498 +step:1117 train loss:4.400556 +step:1118 train loss:4.426114 +step:1119 train loss:4.349861 +step:1120 train loss:4.368560 +step:1121 train loss:4.369885 +step:1122 train loss:4.353324 +step:1123 train loss:4.447050 +step:1124 train loss:4.329917 +step:1125 train loss:4.348704 +step:1126 train loss:4.312973 +step:1127 train loss:4.328458 +step:1128 train loss:4.334294 +step:1129 train loss:4.380370 +step:1130 train loss:4.302862 +step:1131 train loss:4.387988 +step:1132 train loss:4.334790 +step:1133 train loss:4.341972 +step:1134 train loss:4.306337 +step:1135 train loss:4.353640 +step:1136 train loss:4.366806 +step:1137 train loss:4.290129 +step:1138 train loss:4.364134 +step:1139 train loss:4.321005 +step:1140 train loss:4.403510 +step:1141 train loss:4.361035 +step:1142 train loss:4.298886 +step:1143 train loss:4.372613 +step:1144 train loss:4.393546 +step:1145 train loss:4.342105 +step:1146 train loss:4.285677 +step:1147 train loss:4.298445 +step:1148 train loss:4.327625 +step:1149 train loss:4.379282 +step:1150 train loss:4.421380 +step:1151 train loss:4.431612 +step:1152 train loss:4.334776 +step:1153 train loss:4.334471 +step:1154 train loss:4.304255 +step:1155 train loss:4.405940 +step:1156 train loss:4.304538 +step:1157 train loss:4.331467 +step:1158 train loss:4.382481 +step:1159 train loss:4.377202 +step:1160 train loss:4.305601 +step:1161 train loss:4.391861 +step:1162 train loss:4.332520 +step:1163 train loss:4.309888 +step:1164 train loss:4.222449 +step:1165 train loss:4.360240 +step:1166 train loss:4.285873 +step:1167 train loss:4.286726 +step:1168 train loss:4.339563 +step:1169 train loss:4.296043 +step:1170 train loss:4.300438 +step:1171 train loss:4.326646 +step:1172 train loss:4.289095 +step:1173 train loss:4.314676 +step:1174 train loss:4.252544 +step:1175 train loss:4.284828 +step:1176 train loss:4.397614 +step:1177 train loss:4.250964 +step:1178 train loss:4.312778 +step:1179 train loss:4.271267 +step:1180 train loss:4.297789 +step:1181 train loss:4.280666 +step:1182 train loss:4.346049 +step:1183 train loss:4.326694 +step:1184 train loss:4.270598 +step:1185 train loss:4.303709 +step:1186 train loss:4.299189 +step:1187 train loss:4.254385 +step:1188 train loss:4.292189 +step:1189 train loss:4.217404 +step:1190 train loss:4.286119 +step:1191 train loss:4.341516 +step:1192 train loss:4.293758 +step:1193 train loss:4.299166 +step:1194 train loss:4.424933 +step:1195 train loss:4.394898 +step:1196 train loss:4.286645 +step:1197 train loss:4.299553 +step:1198 train loss:4.279230 +step:1199 train loss:4.282924 +step:1200 train loss:4.333903 +step:1201 train loss:4.308504 +step:1202 train loss:4.250097 +step:1203 train loss:4.243200 +step:1204 train loss:4.274728 +step:1205 train loss:4.279602 +step:1206 train loss:4.234616 +step:1207 train loss:4.332935 +step:1208 train loss:4.302336 +step:1209 train loss:4.221689 +step:1210 train loss:4.311579 +step:1211 train loss:4.260309 +step:1212 train loss:4.282657 +step:1213 train loss:4.211750 +step:1214 train loss:4.300902 +step:1215 train loss:4.265265 +step:1216 train loss:4.274239 +step:1217 train loss:4.221567 +step:1218 train loss:4.283883 +step:1219 train loss:4.225197 +step:1220 train loss:4.264178 +step:1221 train loss:4.283601 +step:1222 train loss:4.324006 +step:1223 train loss:4.297733 +step:1224 train loss:4.264494 +step:1225 train loss:4.314865 +step:1226 train loss:4.259294 +step:1227 train loss:4.262320 +step:1228 train loss:4.271450 +step:1229 train loss:4.235989 +step:1230 train loss:4.229496 +step:1231 train loss:4.278688 +step:1232 train loss:4.234288 +step:1233 train loss:4.228423 +step:1234 train loss:4.311904 +step:1235 train loss:4.287930 +step:1236 train loss:4.199707 +step:1237 train loss:4.320156 +step:1238 train loss:4.270455 +step:1239 train loss:4.299007 +step:1240 train loss:4.209498 +step:1241 train loss:4.250349 +step:1242 train loss:4.269024 +step:1243 train loss:4.212923 +step:1244 train loss:4.322028 +step:1245 train loss:4.341218 +step:1246 train loss:4.270072 +step:1247 train loss:4.246369 +step:1248 train loss:4.270103 +step:1249 train loss:4.204222 +step:1250 validation loss:4.199530 +step:1250 train loss:4.211847 +step:1251 train loss:4.279225 +step:1252 train loss:4.235393 +step:1253 train loss:4.196221 +step:1254 train loss:4.236240 +step:1255 train loss:4.228511 +step:1256 train loss:4.278779 +step:1257 train loss:4.258340 +step:1258 train loss:4.299836 +step:1259 train loss:4.293437 +step:1260 train loss:4.186913 +step:1261 train loss:4.424042 +step:1262 train loss:4.275888 +step:1263 train loss:4.229866 +step:1264 train loss:4.236737 +step:1265 train loss:4.283743 +step:1266 train loss:4.225922 +step:1267 train loss:4.232747 +step:1268 train loss:4.245364 +step:1269 train loss:4.233799 +step:1270 train loss:4.163158 +step:1271 train loss:4.170509 +step:1272 train loss:4.196120 +step:1273 train loss:4.254641 +step:1274 train loss:4.220610 +step:1275 train loss:4.250069 +step:1276 train loss:4.249848 +step:1277 train loss:4.258712 +step:1278 train loss:4.195263 +step:1279 train loss:4.206005 +step:1280 train loss:4.223289 +step:1281 train loss:4.274536 +step:1282 train loss:4.195941 +step:1283 train loss:4.275779 +step:1284 train loss:4.218048 +step:1285 train loss:4.262339 +step:1286 train loss:4.162392 +step:1287 train loss:4.203043 +step:1288 train loss:4.231882 +step:1289 train loss:4.288097 +step:1290 train loss:4.242999 +step:1291 train loss:4.213886 +step:1292 train loss:4.195067 +step:1293 train loss:4.183055 +step:1294 train loss:4.240334 +step:1295 train loss:4.224316 +step:1296 train loss:4.274353 +step:1297 train loss:4.228087 +step:1298 train loss:4.247491 +step:1299 train loss:4.282564 +step:1300 train loss:4.200509 +step:1301 train loss:4.239114 +step:1302 train loss:4.200501 +step:1303 train loss:4.236919 +step:1304 train loss:4.262339 +step:1305 train loss:4.236519 +step:1306 train loss:4.228584 +step:1307 train loss:4.215860 +step:1308 train loss:4.175148 +step:1309 train loss:4.201618 +step:1310 train loss:4.185939 +step:1311 train loss:4.192523 +step:1312 train loss:4.258028 +step:1313 train loss:4.174271 +step:1314 train loss:4.180631 +step:1315 train loss:4.225926 +step:1316 train loss:4.189899 +step:1317 train loss:4.083244 +step:1318 train loss:4.237855 +step:1319 train loss:4.272772 +step:1320 train loss:4.186355 +step:1321 train loss:4.165885 +step:1322 train loss:4.271190 +step:1323 train loss:4.218817 +step:1324 train loss:4.315359 +step:1325 train loss:4.204270 +step:1326 train loss:4.250803 +step:1327 train loss:4.277231 +step:1328 train loss:4.174285 +step:1329 train loss:4.206614 +step:1330 train loss:4.221478 +step:1331 train loss:4.097311 +step:1332 train loss:4.261130 +step:1333 train loss:4.222416 +step:1334 train loss:4.229793 +step:1335 train loss:4.250892 +step:1336 train loss:4.256216 +step:1337 train loss:4.222935 +step:1338 train loss:4.203495 +step:1339 train loss:4.279163 +step:1340 train loss:4.242344 +step:1341 train loss:4.226569 +step:1342 train loss:4.187423 +step:1343 train loss:4.177629 +step:1344 train loss:4.250940 +step:1345 train loss:4.206191 +step:1346 train loss:4.285145 +step:1347 train loss:4.223086 +step:1348 train loss:4.191252 +step:1349 train loss:4.136595 +step:1350 train loss:4.159822 +step:1351 train loss:4.225838 +step:1352 train loss:4.192895 +step:1353 train loss:4.170377 +step:1354 train loss:4.173625 +step:1355 train loss:4.244461 +step:1356 train loss:4.156065 +step:1357 train loss:4.181389 +step:1358 train loss:4.177558 +step:1359 train loss:4.172452 +step:1360 train loss:4.207808 +step:1361 train loss:4.327360 +step:1362 train loss:4.243246 +step:1363 train loss:4.125405 +step:1364 train loss:4.147461 +step:1365 train loss:4.141669 +step:1366 train loss:4.178134 +step:1367 train loss:4.110333 +step:1368 train loss:4.145628 +step:1369 train loss:4.184899 +step:1370 train loss:4.199358 +step:1371 train loss:4.157536 +step:1372 train loss:4.181850 +step:1373 train loss:4.218770 +step:1374 train loss:4.233239 +step:1375 train loss:4.180356 +step:1376 train loss:4.204651 +step:1377 train loss:4.225696 +step:1378 train loss:4.225229 +step:1379 train loss:4.243274 +step:1380 train loss:4.316860 +step:1381 train loss:4.248354 +step:1382 train loss:4.225240 +step:1383 train loss:4.211540 +step:1384 train loss:4.304844 +step:1385 train loss:4.186801 +step:1386 train loss:4.258112 +step:1387 train loss:4.240719 +step:1388 train loss:4.200922 +step:1389 train loss:4.169200 +step:1390 train loss:4.200127 +step:1391 train loss:4.226888 +step:1392 train loss:4.201500 +step:1393 train loss:4.256551 +step:1394 train loss:4.177159 +step:1395 train loss:4.214742 +step:1396 train loss:4.196569 +step:1397 train loss:4.211715 +step:1398 train loss:4.212699 +step:1399 train loss:4.183345 +step:1400 train loss:4.160921 +step:1401 train loss:4.152606 +step:1402 train loss:4.161349 +step:1403 train loss:4.117960 +step:1404 train loss:4.182805 +step:1405 train loss:4.138192 +step:1406 train loss:4.163587 +step:1407 train loss:4.157812 +step:1408 train loss:4.138446 +step:1409 train loss:4.125210 +step:1410 train loss:4.143486 +step:1411 train loss:4.175076 +step:1412 train loss:4.235201 +step:1413 train loss:4.152834 +step:1414 train loss:4.179193 +step:1415 train loss:4.139357 +step:1416 train loss:4.191476 +step:1417 train loss:4.164253 +step:1418 train loss:4.099619 +step:1419 train loss:4.101942 +step:1420 train loss:4.133663 +step:1421 train loss:4.172895 +step:1422 train loss:4.147120 +step:1423 train loss:4.243160 +step:1424 train loss:4.143590 +step:1425 train loss:4.109797 +step:1426 train loss:4.134600 +step:1427 train loss:4.128038 +step:1428 train loss:4.114990 +step:1429 train loss:4.140450 +step:1430 train loss:4.150738 +step:1431 train loss:4.177831 +step:1432 train loss:4.169676 +step:1433 train loss:4.150460 +step:1434 train loss:4.124862 +step:1435 train loss:4.112988 +step:1436 train loss:4.192792 +step:1437 train loss:4.110284 +step:1438 train loss:4.114430 +step:1439 train loss:4.095911 +step:1440 train loss:4.131611 +step:1441 train loss:4.202840 +step:1442 train loss:4.164992 +step:1443 train loss:4.091457 +step:1444 train loss:4.103463 +step:1445 train loss:4.107559 +step:1446 train loss:4.137062 +step:1447 train loss:4.150891 +step:1448 train loss:4.116153 +step:1449 train loss:4.145508 +step:1450 train loss:4.154432 +step:1451 train loss:4.080888 +step:1452 train loss:4.131306 +step:1453 train loss:4.128873 +step:1454 train loss:4.125468 +step:1455 train loss:4.060525 +step:1456 train loss:4.140570 +step:1457 train loss:4.078754 +step:1458 train loss:4.222309 +step:1459 train loss:4.144991 +step:1460 train loss:4.110740 +step:1461 train loss:4.164768 +step:1462 train loss:4.167292 +step:1463 train loss:4.124235 +step:1464 train loss:4.114370 +step:1465 train loss:4.104620 +step:1466 train loss:4.065979 +step:1467 train loss:4.204720 +step:1468 train loss:4.094057 +step:1469 train loss:4.174755 +step:1470 train loss:4.108270 +step:1471 train loss:4.108647 +step:1472 train loss:4.113344 +step:1473 train loss:4.107949 +step:1474 train loss:4.046432 +step:1475 train loss:4.110393 +step:1476 train loss:4.193577 +step:1477 train loss:4.145401 +step:1478 train loss:4.080944 +step:1479 train loss:4.108509 +step:1480 train loss:4.114190 +step:1481 train loss:4.079259 +step:1482 train loss:4.141047 +step:1483 train loss:4.130957 +step:1484 train loss:4.154177 +step:1485 train loss:4.164345 +step:1486 train loss:4.106479 +step:1487 train loss:4.093407 +step:1488 train loss:4.100162 +step:1489 train loss:4.095339 +step:1490 train loss:4.151080 +step:1491 train loss:4.140516 +step:1492 train loss:4.143798 +step:1493 train loss:4.101192 +step:1494 train loss:4.137030 +step:1495 train loss:4.114305 +step:1496 train loss:4.076701 +step:1497 train loss:4.146578 +step:1498 train loss:4.050707 +step:1499 train loss:4.090375 +step:1500 validation loss:4.065738 total_sharp:2.2859e-04 L1_sharp:1.1977e-04 L2_sharp:1.4587e-05 L3_sharp:1.8757e-05 L4_sharp:2.1018e-05 L5_sharp:3.0604e-05 L6_sharp:2.7544e-05 L7_sharp:2.8280e-05 L8_sharp:4.4327e-05 L9_sharp:6.7747e-05 L10_sharp:6.0138e-05 L11_sharp:7.7637e-05 L12_sharp:2.6965e-04 total_fnorm:1.9380e+01 total_l1_linf:1.6735e+05 total_spectral:1.9380e+01 L1_fnorm:4.0341e+00 L2_fnorm:3.8995e+00 L3_fnorm:3.7422e+00 L4_fnorm:3.9790e+00 L5_fnorm:4.2003e+00 L6_fnorm:4.3929e+00 L7_fnorm:4.5252e+00 L8_fnorm:4.4633e+00 L9_fnorm:4.4246e+00 L10_fnorm:4.4073e+00 L11_fnorm:4.3830e+00 L12_fnorm:4.1032e+00 L1_l1linf:4.4537e+00 L2_l1linf:4.5614e+00 L3_l1linf:4.7017e+00 L4_l1linf:4.2599e+00 L5_l1linf:4.6898e+00 L6_l1linf:4.6234e+00 L7_l1linf:5.1196e+00 L8_l1linf:5.2375e+00 L9_l1linf:5.6548e+00 L10_l1linf:5.5676e+00 L11_l1linf:5.5035e+00 L12_l1linf:5.9097e+00 L1_spectral:6.1424e-01 L2_spectral:5.7561e-01 L3_spectral:6.0900e-01 L4_spectral:6.3331e-01 L5_spectral:6.5097e-01 L6_spectral:6.8969e-01 L7_spectral:6.6483e-01 L8_spectral:7.3149e-01 L9_spectral:7.8066e-01 L10_spectral:7.8863e-01 L11_spectral:8.2249e-01 L12_spectral:8.6640e-01 ip_v_neg_g:4.4298e-02 cos_v_neg_g:5.1090e-03 v_norm:1.9380e+01 g_norm:4.4740e-01 hv_norm:4.0919e-01 cos_v_hv:1.0826e-02 hg_norm:4.2437e+00 cos_g_hg:5.5327e-01 v_par:1.1278e-02 v_perp:1.9380e+01 L1_cos_v_neg_g:3.0242e-02 L1_v_norm:4.0341e+00 L2_cos_v_neg_g:1.2599e-02 L2_v_norm:3.8995e+00 L3_cos_v_neg_g:1.0969e-02 L3_v_norm:3.7422e+00 L4_cos_v_neg_g:1.3326e-02 L4_v_norm:3.9790e+00 L5_cos_v_neg_g:1.6726e-02 L5_v_norm:4.2003e+00 L6_cos_v_neg_g:1.4107e-02 L6_v_norm:4.3929e+00 L7_cos_v_neg_g:1.5430e-02 L7_v_norm:4.5252e+00 L8_cos_v_neg_g:1.6992e-02 L8_v_norm:4.4633e+00 L9_cos_v_neg_g:2.0769e-02 L9_v_norm:4.4246e+00 L10_cos_v_neg_g:2.2194e-02 L10_v_norm:4.4073e+00 L11_cos_v_neg_g:2.6408e-02 L11_v_norm:4.3830e+00 L12_cos_v_neg_g:3.1911e-02 L12_v_norm:4.1032e+00 +step:1500 train loss:4.087751 +step:1501 train loss:4.107284 +step:1502 train loss:4.045575 +step:1503 train loss:4.096853 +step:1504 train loss:4.066655 +step:1505 train loss:4.039565 +step:1506 train loss:4.031533 +step:1507 train loss:4.052954 +step:1508 train loss:4.061627 +step:1509 train loss:4.109463 +step:1510 train loss:4.052522 +step:1511 train loss:4.082569 +step:1512 train loss:4.055174 +step:1513 train loss:4.121294 +step:1514 train loss:4.072530 +step:1515 train loss:4.132487 +step:1516 train loss:4.060596 +step:1517 train loss:4.070937 +step:1518 train loss:4.152563 +step:1519 train loss:4.114042 +step:1520 train loss:4.164574 +step:1521 train loss:4.105097 +step:1522 train loss:4.156237 +step:1523 train loss:4.192607 +step:1524 train loss:4.186799 +step:1525 train loss:4.208274 +step:1526 train loss:4.118016 +step:1527 train loss:4.191796 +step:1528 train loss:4.224609 +step:1529 train loss:4.160815 +step:1530 train loss:4.205449 +step:1531 train loss:4.130106 +step:1532 train loss:4.241880 +step:1533 train loss:4.206367 +step:1534 train loss:4.397426 +step:1535 train loss:4.330468 +step:1536 train loss:4.319099 +step:1537 train loss:4.319292 +step:1538 train loss:4.397535 +step:1539 train loss:4.332569 +step:1540 train loss:4.407266 +step:1541 train loss:4.299129 +step:1542 train loss:4.340759 +step:1543 train loss:4.346950 +step:1544 train loss:4.206604 +step:1545 train loss:4.166346 +step:1546 train loss:4.196704 +step:1547 train loss:4.175922 +step:1548 train loss:4.212640 +step:1549 train loss:4.132805 +step:1550 train loss:4.235151 +step:1551 train loss:4.165476 +step:1552 train loss:4.189057 +step:1553 train loss:4.189382 +step:1554 train loss:4.191782 +step:1555 train loss:4.143179 +step:1556 train loss:4.121184 +step:1557 train loss:4.124566 +step:1558 train loss:4.141305 +step:1559 train loss:4.104807 +step:1560 train loss:4.181450 +step:1561 train loss:4.151825 +step:1562 train loss:4.041399 +step:1563 train loss:4.010462 +step:1564 train loss:4.144951 +step:1565 train loss:4.122784 +step:1566 train loss:4.133097 +step:1567 train loss:4.132238 +step:1568 train loss:4.082338 +step:1569 train loss:4.075933 +step:1570 train loss:4.090605 +step:1571 train loss:4.066174 +step:1572 train loss:4.073306 +step:1573 train loss:4.110352 +step:1574 train loss:4.067300 +step:1575 train loss:4.092420 +step:1576 train loss:4.042990 +step:1577 train loss:4.066739 +step:1578 train loss:4.054008 +step:1579 train loss:4.123944 +step:1580 train loss:4.083198 +step:1581 train loss:4.117803 +step:1582 train loss:4.118593 +step:1583 train loss:4.089786 +step:1584 train loss:4.016634 +step:1585 train loss:4.106390 +step:1586 train loss:4.066543 +step:1587 train loss:4.081228 +step:1588 train loss:4.063476 +step:1589 train loss:4.113837 +step:1590 train loss:4.021364 +step:1591 train loss:4.078448 +step:1592 train loss:4.027452 +step:1593 train loss:4.060984 +step:1594 train loss:4.062927 +step:1595 train loss:4.053416 +step:1596 train loss:4.059781 +step:1597 train loss:3.994747 +step:1598 train loss:4.092574 +step:1599 train loss:4.102900 +step:1600 train loss:3.993279 +step:1601 train loss:4.067976 +step:1602 train loss:4.126295 +step:1603 train loss:4.120528 +step:1604 train loss:4.040002 +step:1605 train loss:4.090174 +step:1606 train loss:4.140615 +step:1607 train loss:4.025319 +step:1608 train loss:4.057100 +step:1609 train loss:4.069517 +step:1610 train loss:4.127878 +step:1611 train loss:4.055517 +step:1612 train loss:3.985296 +step:1613 train loss:4.055474 +step:1614 train loss:4.163018 +step:1615 train loss:4.083589 +step:1616 train loss:4.110410 +step:1617 train loss:4.094357 +step:1618 train loss:4.105264 +step:1619 train loss:4.297172 +step:1620 train loss:4.071542 +step:1621 train loss:4.130469 +step:1622 train loss:4.044180 +step:1623 train loss:4.102948 +step:1624 train loss:4.067844 +step:1625 train loss:4.143250 +step:1626 train loss:4.029466 +step:1627 train loss:4.037467 +step:1628 train loss:4.053597 +step:1629 train loss:4.082524 +step:1630 train loss:4.098074 +step:1631 train loss:4.046939 +step:1632 train loss:4.020993 +step:1633 train loss:4.039215 +step:1634 train loss:4.089498 +step:1635 train loss:4.036739 +step:1636 train loss:4.021450 +step:1637 train loss:4.095143 +step:1638 train loss:4.199807 +step:1639 train loss:4.006310 +step:1640 train loss:4.079666 +step:1641 train loss:4.051134 +step:1642 train loss:4.133761 +step:1643 train loss:4.039649 +step:1644 train loss:4.051481 +step:1645 train loss:4.026691 +step:1646 train loss:4.107129 +step:1647 train loss:4.000398 +step:1648 train loss:4.057523 +step:1649 train loss:4.027571 +step:1650 train loss:4.040951 +step:1651 train loss:4.059270 +step:1652 train loss:4.080853 +step:1653 train loss:4.089386 +step:1654 train loss:4.077612 +step:1655 train loss:4.055376 +step:1656 train loss:4.047937 +step:1657 train loss:4.049037 +step:1658 train loss:4.027024 +step:1659 train loss:4.098902 +step:1660 train loss:3.999082 +step:1661 train loss:4.106620 +step:1662 train loss:4.041354 +step:1663 train loss:4.039465 +step:1664 train loss:4.133625 +step:1665 train loss:4.054921 +step:1666 train loss:4.067244 +step:1667 train loss:4.079148 +step:1668 train loss:4.058662 +step:1669 train loss:4.015853 +step:1670 train loss:4.067998 +step:1671 train loss:4.063820 +step:1672 train loss:4.059234 +step:1673 train loss:4.018783 +step:1674 train loss:4.012613 +step:1675 train loss:4.056545 +step:1676 train loss:4.321914 +step:1677 train loss:4.081236 +step:1678 train loss:4.009375 +step:1679 train loss:4.134798 +step:1680 train loss:4.064687 +step:1681 train loss:4.115992 +step:1682 train loss:4.065739 +step:1683 train loss:4.062519 +step:1684 train loss:4.025211 +step:1685 train loss:4.074875 +step:1686 train loss:4.053277 +step:1687 train loss:4.069463 +step:1688 train loss:4.049045 +step:1689 train loss:4.032150 +step:1690 train loss:4.061650 +step:1691 train loss:4.049755 +step:1692 train loss:4.061572 +step:1693 train loss:4.031526 +step:1694 train loss:3.987753 +step:1695 train loss:4.006345 +step:1696 train loss:4.016473 +step:1697 train loss:4.055801 +step:1698 train loss:4.053032 +step:1699 train loss:4.010009 +step:1700 train loss:4.085624 +step:1701 train loss:4.025577 +step:1702 train loss:4.015114 +step:1703 train loss:4.040058 +step:1704 train loss:4.044328 +step:1705 train loss:4.054221 +step:1706 train loss:4.065440 +step:1707 train loss:4.063187 +step:1708 train loss:3.988003 +step:1709 train loss:4.086400 +step:1710 train loss:4.006076 +step:1711 train loss:4.011123 +step:1712 train loss:4.035260 +step:1713 train loss:3.997308 +step:1714 train loss:4.359226 +step:1715 train loss:4.017282 +step:1716 train loss:3.998608 +step:1717 train loss:4.002729 +step:1718 train loss:4.072814 +step:1719 train loss:3.988564 +step:1720 train loss:4.074790 +step:1721 train loss:4.014053 +step:1722 train loss:3.985038 +step:1723 train loss:4.084871 +step:1724 train loss:4.040536 +step:1725 train loss:4.027946 +step:1726 train loss:4.030968 +step:1727 train loss:4.068744 +step:1728 train loss:4.084311 +step:1729 train loss:4.012020 +step:1730 train loss:4.086074 +step:1731 train loss:4.010367 +step:1732 train loss:4.020563 +step:1733 train loss:4.038601 +step:1734 train loss:4.063365 +step:1735 train loss:4.124991 +step:1736 train loss:4.030627 +step:1737 train loss:4.060185 +step:1738 train loss:4.022573 +step:1739 train loss:4.080924 +step:1740 train loss:4.070247 +step:1741 train loss:4.121385 +step:1742 train loss:4.111031 +step:1743 train loss:4.004326 +step:1744 train loss:4.014944 +step:1745 train loss:4.002678 +step:1746 train loss:3.988340 +step:1747 train loss:4.024885 +step:1748 train loss:3.965084 +step:1749 train loss:4.004038 +step:1750 validation loss:3.976967 +step:1750 train loss:4.036738 +step:1751 train loss:4.052790 +step:1752 train loss:4.013900 +step:1753 train loss:4.041745 +step:1754 train loss:4.036582 +step:1755 train loss:4.032708 +step:1756 train loss:4.054421 +step:1757 train loss:4.057990 +step:1758 train loss:3.977105 +step:1759 train loss:4.072764 +step:1760 train loss:4.026788 +step:1761 train loss:3.996385 +step:1762 train loss:3.998340 +step:1763 train loss:4.000397 +step:1764 train loss:4.288812 +step:1765 train loss:4.005613 +step:1766 train loss:4.093864 +step:1767 train loss:4.004591 +step:1768 train loss:3.983305 +step:1769 train loss:4.006957 +step:1770 train loss:4.016793 +step:1771 train loss:3.992689 +step:1772 train loss:4.099540 +step:1773 train loss:4.025002 +step:1774 train loss:4.030152 +step:1775 train loss:4.146980 +step:1776 train loss:4.026117 +step:1777 train loss:4.015575 +step:1778 train loss:4.074601 +step:1779 train loss:4.005562 +step:1780 train loss:4.057779 +step:1781 train loss:4.066044 +step:1782 train loss:4.086156 +step:1783 train loss:4.014584 +step:1784 train loss:4.108014 +step:1785 train loss:4.015617 +step:1786 train loss:4.011801 +step:1787 train loss:4.007575 +step:1788 train loss:4.031628 +step:1789 train loss:3.980798 +step:1790 train loss:3.993804 +step:1791 train loss:4.071856 +step:1792 train loss:4.069619 +step:1793 train loss:3.984190 +step:1794 train loss:4.030925 +step:1795 train loss:3.983772 +step:1796 train loss:3.967057 +step:1797 train loss:4.032729 +step:1798 train loss:3.973247 +step:1799 train loss:4.026091 +step:1800 train loss:4.051095 +step:1801 train loss:4.046488 +step:1802 train loss:4.049007 +step:1803 train loss:4.041828 +step:1804 train loss:4.040584 +step:1805 train loss:4.028402 +step:1806 train loss:4.039048 +step:1807 train loss:3.969020 +step:1808 train loss:4.033230 +step:1809 train loss:4.012680 +step:1810 train loss:4.010690 +step:1811 train loss:4.024383 +step:1812 train loss:4.005751 +step:1813 train loss:4.019100 +step:1814 train loss:4.082726 +step:1815 train loss:4.054779 +step:1816 train loss:3.997514 +step:1817 train loss:4.009716 +step:1818 train loss:4.085674 +step:1819 train loss:4.048838 +step:1820 train loss:4.083034 +step:1821 train loss:4.040834 +step:1822 train loss:4.032461 +step:1823 train loss:4.078966 +step:1824 train loss:4.112790 +step:1825 train loss:4.013064 +step:1826 train loss:4.058123 +step:1827 train loss:4.018417 +step:1828 train loss:4.056559 +step:1829 train loss:4.015056 +step:1830 train loss:4.211092 +step:1831 train loss:3.968083 +step:1832 train loss:4.010854 +step:1833 train loss:4.056221 +step:1834 train loss:4.001760 +step:1835 train loss:4.019299 +step:1836 train loss:4.045870 +step:1837 train loss:3.969205 +step:1838 train loss:4.067595 +step:1839 train loss:4.043267 +step:1840 train loss:4.010134 +step:1841 train loss:4.030210 +step:1842 train loss:4.007350 +step:1843 train loss:3.953212 +step:1844 train loss:4.018137 +step:1845 train loss:3.981842 +step:1846 train loss:4.035051 +step:1847 train loss:4.087170 +step:1848 train loss:3.883281 +step:1849 train loss:3.980594 +step:1850 train loss:3.960207 +step:1851 train loss:3.996099 +step:1852 train loss:3.979351 +step:1853 train loss:4.035404 +step:1854 train loss:4.000175 +step:1855 train loss:3.987869 +step:1856 train loss:3.985340 +step:1857 train loss:3.987569 +step:1858 train loss:4.036855 +step:1859 train loss:3.985986 +step:1860 train loss:3.957553 +step:1861 train loss:3.969654 +step:1862 train loss:4.012449 +step:1863 train loss:4.050959 +step:1864 train loss:3.950742 +step:1865 train loss:3.973873 +step:1866 train loss:3.980103 +step:1867 train loss:4.011145 +step:1868 train loss:4.057566 +step:1869 train loss:3.974403 +step:1870 train loss:4.006666 +step:1871 train loss:3.945715 +step:1872 train loss:4.011662 +step:1873 train loss:4.079706 +step:1874 train loss:3.943707 +step:1875 train loss:4.022825 +step:1876 train loss:3.983427 +step:1877 train loss:4.019862 +step:1878 train loss:3.945331 +step:1879 train loss:4.008336 +step:1880 train loss:4.079334 +step:1881 train loss:4.010618 +step:1882 train loss:4.026420 +step:1883 train loss:4.051342 +step:1884 train loss:4.059917 +step:1885 train loss:4.025355 +step:1886 train loss:3.955443 +step:1887 train loss:3.975219 +step:1888 train loss:3.976262 +step:1889 train loss:3.990695 +step:1890 train loss:3.986860 +step:1891 train loss:3.921851 +step:1892 train loss:4.016755 +step:1893 train loss:3.937931 +step:1894 train loss:3.957649 +step:1895 train loss:3.994361 +step:1896 train loss:4.039963 +step:1897 train loss:3.935575 +step:1898 train loss:3.982953 +step:1899 train loss:4.000521 +step:1900 train loss:3.949212 +step:1901 train loss:4.018100 +step:1902 train loss:4.015200 +step:1903 train loss:3.955269 +step:1904 train loss:3.948482 +step:1905 train loss:3.947905 +step:1906 train loss:4.004365 +step:1907 train loss:3.952045 +step:1908 train loss:3.969762 +step:1909 train loss:4.065038 +step:1910 train loss:3.951358 +step:1911 train loss:3.960775 +step:1912 train loss:4.010882 +step:1913 train loss:3.951938 +step:1914 train loss:3.984007 +step:1915 train loss:3.950860 +step:1916 train loss:3.997721 +step:1917 train loss:3.983494 +step:1918 train loss:3.893073 +step:1919 train loss:4.043865 +step:1920 train loss:4.147624 +step:1921 train loss:3.927744 +step:1922 train loss:3.906712 +step:1923 train loss:4.009447 +step:1924 train loss:4.049923 +step:1925 train loss:4.003300 +step:1926 train loss:3.936430 +step:1927 train loss:4.020262 +step:1928 train loss:3.931035 +step:1929 train loss:3.958046 +step:1930 train loss:4.023317 +step:1931 train loss:3.937225 +step:1932 train loss:3.985418 +step:1933 train loss:3.980065 +step:1934 train loss:4.047788 +step:1935 train loss:4.000551 +step:1936 train loss:3.969660 +step:1937 train loss:3.908848 +step:1938 train loss:4.269184 +step:1939 train loss:4.032532 +step:1940 train loss:4.020591 +step:1941 train loss:4.010364 +step:1942 train loss:4.005075 +step:1943 train loss:3.999301 +step:1944 train loss:3.956431 +step:1945 train loss:3.956244 +step:1946 train loss:3.978222 +step:1947 train loss:4.001799 +step:1948 train loss:3.905095 +step:1949 train loss:4.016066 +step:1950 train loss:3.953324 +step:1951 train loss:3.971945 +step:1952 train loss:4.004266 +step:1953 train loss:3.939312 +step:1954 train loss:3.973250 +step:1955 train loss:3.927512 +step:1956 train loss:4.008352 +step:1957 train loss:4.028722 +step:1958 train loss:4.047493 +step:1959 train loss:3.915199 +step:1960 train loss:3.965588 +step:1961 train loss:3.993049 +step:1962 train loss:3.981310 +step:1963 train loss:3.959867 +step:1964 train loss:3.993751 +step:1965 train loss:4.035266 +step:1966 train loss:3.936083 +step:1967 train loss:3.996226 +step:1968 train loss:3.934801 +step:1969 train loss:3.950585 +step:1970 train loss:4.011003 +step:1971 train loss:3.915948 +step:1972 train loss:4.019823 +step:1973 train loss:3.925285 +step:1974 train loss:3.972640 +step:1975 train loss:3.926984 +step:1976 train loss:3.949433 +step:1977 train loss:3.998466 +step:1978 train loss:3.947747 +step:1979 train loss:3.919784 +step:1980 train loss:3.962695 +step:1981 train loss:3.937358 +step:1982 train loss:4.018971 +step:1983 train loss:3.964235 +step:1984 train loss:3.999552 +step:1985 train loss:3.986278 +step:1986 train loss:3.978645 +step:1987 train loss:3.930771 +step:1988 train loss:3.964687 +step:1989 train loss:4.098174 +step:1990 train loss:3.944350 +step:1991 train loss:3.933423 +step:1992 train loss:3.942376 +step:1993 train loss:3.972558 +step:1994 train loss:3.963953 +step:1995 train loss:3.916491 +step:1996 train loss:3.972498 +step:1997 train loss:3.975140 +step:1998 train loss:3.925289 +step:1999 train loss:4.034358 +step:2000 validation loss:3.903081 total_sharp:2.0387e-04 L1_sharp:8.0493e-05 L2_sharp:4.4066e-05 L3_sharp:2.3591e-04 L4_sharp:6.5375e-05 L5_sharp:3.3976e-05 L6_sharp:2.2143e-05 L7_sharp:1.5786e-05 L8_sharp:2.6126e-05 L9_sharp:4.3365e-05 L10_sharp:5.6084e-05 L11_sharp:4.9753e-05 L12_sharp:2.1602e-04 total_fnorm:1.9936e+01 total_l1_linf:1.7311e+05 total_spectral:1.9936e+01 L1_fnorm:4.2991e+00 L2_fnorm:4.2861e+00 L3_fnorm:4.1484e+00 L4_fnorm:4.2148e+00 L5_fnorm:4.4199e+00 L6_fnorm:4.5373e+00 L7_fnorm:4.6270e+00 L8_fnorm:4.5556e+00 L9_fnorm:4.5534e+00 L10_fnorm:4.5564e+00 L11_fnorm:4.5755e+00 L12_fnorm:4.1838e+00 L1_l1linf:4.4271e+00 L2_l1linf:4.5316e+00 L3_l1linf:4.5888e+00 L4_l1linf:4.3335e+00 L5_l1linf:4.6019e+00 L6_l1linf:4.5787e+00 L7_l1linf:5.0042e+00 L8_l1linf:5.2123e+00 L9_l1linf:5.2446e+00 L10_l1linf:5.3750e+00 L11_l1linf:5.5934e+00 L12_l1linf:5.3511e+00 L1_spectral:6.6599e-01 L2_spectral:5.6177e-01 L3_spectral:5.6569e-01 L4_spectral:6.6837e-01 L5_spectral:6.4350e-01 L6_spectral:5.2544e-01 L7_spectral:5.2732e-01 L8_spectral:5.7793e-01 L9_spectral:7.1589e-01 L10_spectral:7.7493e-01 L11_spectral:7.5673e-01 L12_spectral:7.7021e-01 ip_v_neg_g:3.5939e-02 cos_v_neg_g:3.6189e-03 v_norm:1.9936e+01 g_norm:4.9815e-01 hv_norm:3.9659e-01 cos_v_hv:1.0248e-02 hg_norm:5.4624e+00 cos_g_hg:5.9380e-01 v_par:5.7159e-03 v_perp:1.9936e+01 L1_cos_v_neg_g:2.1333e-02 L1_v_norm:4.2991e+00 L2_cos_v_neg_g:1.4509e-02 L2_v_norm:4.2861e+00 L3_cos_v_neg_g:2.0942e-02 L3_v_norm:4.1484e+00 L4_cos_v_neg_g:1.1994e-02 L4_v_norm:4.2148e+00 L5_cos_v_neg_g:1.1275e-02 L5_v_norm:4.4199e+00 L6_cos_v_neg_g:8.6750e-03 L6_v_norm:4.5373e+00 L7_cos_v_neg_g:7.7060e-03 L7_v_norm:4.6270e+00 L8_cos_v_neg_g:8.2687e-03 L8_v_norm:4.5556e+00 L9_cos_v_neg_g:1.1235e-02 L9_v_norm:4.5534e+00 L10_cos_v_neg_g:1.6918e-02 L10_v_norm:4.5564e+00 L11_cos_v_neg_g:1.7357e-02 L11_v_norm:4.5755e+00 L12_cos_v_neg_g:2.5329e-02 L12_v_norm:4.1838e+00 +step:2000 train loss:4.003658 +step:2001 train loss:3.931821 +step:2002 train loss:4.032448 +step:2003 train loss:4.073141 +step:2004 train loss:3.944903 +step:2005 train loss:4.044119 +step:2006 train loss:3.931863 +step:2007 train loss:4.005633 +step:2008 train loss:3.947991 +step:2009 train loss:3.949754 +step:2010 train loss:4.078517 +step:2011 train loss:3.926793 +step:2012 train loss:3.956422 +step:2013 train loss:3.965615 +step:2014 train loss:3.863811 +step:2015 train loss:3.998312 +step:2016 train loss:4.010652 +step:2017 train loss:4.000093 +step:2018 train loss:3.982663 +step:2019 train loss:4.002140 +step:2020 train loss:4.001231 +step:2021 train loss:3.962086 +step:2022 train loss:3.996260 +step:2023 train loss:3.975996 +step:2024 train loss:4.019108 +step:2025 train loss:3.963918 +step:2026 train loss:3.940338 +step:2027 train loss:3.970357 +step:2028 train loss:3.900676 +step:2029 train loss:3.929338 +step:2030 train loss:3.928244 +step:2031 train loss:3.892593 +step:2032 train loss:3.944520 +step:2033 train loss:3.943543 +step:2034 train loss:3.935249 +step:2035 train loss:3.976110 +step:2036 train loss:3.966948 +step:2037 train loss:3.949836 +step:2038 train loss:3.945178 +step:2039 train loss:3.936118 +step:2040 train loss:3.965889 +step:2041 train loss:3.969631 +step:2042 train loss:3.896252 +step:2043 train loss:4.051251 +step:2044 train loss:3.915796 +step:2045 train loss:3.939003 +step:2046 train loss:3.949431 +step:2047 train loss:3.919458 +step:2048 train loss:3.964046 +step:2049 train loss:3.922385 +step:2050 train loss:3.942533 +step:2051 train loss:3.909954 +step:2052 train loss:3.956735 +step:2053 train loss:3.965334 +step:2054 train loss:3.926473 +step:2055 train loss:3.936343 +step:2056 train loss:3.983370 +step:2057 train loss:3.986273 +step:2058 train loss:3.948719 +step:2059 train loss:4.027747 +step:2060 train loss:3.982605 +step:2061 train loss:3.928450 +step:2062 train loss:3.954435 +step:2063 train loss:3.857326 +step:2064 train loss:3.972688 +step:2065 train loss:3.983480 +step:2066 train loss:3.844104 +step:2067 train loss:3.884705 +step:2068 train loss:3.996002 +step:2069 train loss:3.930374 +step:2070 train loss:3.935428 +step:2071 train loss:3.979048 +step:2072 train loss:3.914880 +step:2073 train loss:3.966984 +step:2074 train loss:3.937797 +step:2075 train loss:4.027356 +step:2076 train loss:3.973661 +step:2077 train loss:3.984171 +step:2078 train loss:3.938288 +step:2079 train loss:4.085468 +step:2080 train loss:3.905455 +step:2081 train loss:4.008684 +step:2082 train loss:3.943014 +step:2083 train loss:3.934167 +step:2084 train loss:3.913474 +step:2085 train loss:3.957136 +step:2086 train loss:3.965367 +step:2087 train loss:4.004074 +step:2088 train loss:3.870565 +step:2089 train loss:3.900764 +step:2090 train loss:3.936803 +step:2091 train loss:3.954562 +step:2092 train loss:3.930258 +step:2093 train loss:3.921578 +step:2094 train loss:3.957227 +step:2095 train loss:3.906167 +step:2096 train loss:3.889798 +step:2097 train loss:3.922184 +step:2098 train loss:3.925504 +step:2099 train loss:3.903797 +step:2100 train loss:3.976824 +step:2101 train loss:3.967264 +step:2102 train loss:3.939019 +step:2103 train loss:3.948023 +step:2104 train loss:3.924625 +step:2105 train loss:3.929882 +step:2106 train loss:3.928082 +step:2107 train loss:3.996097 +step:2108 train loss:3.923619 +step:2109 train loss:3.878064 +step:2110 train loss:3.975496 +step:2111 train loss:3.923667 +step:2112 train loss:3.978218 +step:2113 train loss:3.919502 +step:2114 train loss:3.919971 +step:2115 train loss:3.972437 +step:2116 train loss:3.911337 +step:2117 train loss:3.931476 +step:2118 train loss:3.919338 +step:2119 train loss:3.853112 +step:2120 train loss:3.942392 +step:2121 train loss:3.934729 +step:2122 train loss:3.939536 +step:2123 train loss:3.997597 +step:2124 train loss:3.994843 +step:2125 train loss:3.904409 +step:2126 train loss:3.911900 +step:2127 train loss:3.901454 +step:2128 train loss:3.895725 +step:2129 train loss:3.920993 +step:2130 train loss:3.922278 +step:2131 train loss:3.948093 +step:2132 train loss:3.876179 +step:2133 train loss:3.988658 +step:2134 train loss:3.937806 +step:2135 train loss:3.896379 +step:2136 train loss:3.985428 +step:2137 train loss:3.950700 +step:2138 train loss:3.903248 +step:2139 train loss:3.910342 +step:2140 train loss:3.911768 +step:2141 train loss:3.959711 +step:2142 train loss:3.934263 +step:2143 train loss:3.853105 +step:2144 train loss:3.961178 +step:2145 train loss:3.931231 +step:2146 train loss:3.968302 +step:2147 train loss:4.068384 +step:2148 train loss:3.871582 +step:2149 train loss:3.885605 +step:2150 train loss:3.911418 +step:2151 train loss:3.943030 +step:2152 train loss:3.936668 +step:2153 train loss:3.975782 +step:2154 train loss:3.893966 +step:2155 train loss:3.976842 +step:2156 train loss:3.895973 +step:2157 train loss:3.974696 +step:2158 train loss:4.008266 +step:2159 train loss:3.936189 +step:2160 train loss:4.008310 +step:2161 train loss:3.905385 +step:2162 train loss:3.912032 +step:2163 train loss:3.891297 +step:2164 train loss:3.912811 +step:2165 train loss:3.890190 +step:2166 train loss:4.011595 +step:2167 train loss:3.917566 +step:2168 train loss:3.933580 +step:2169 train loss:3.883474 +step:2170 train loss:4.027926 +step:2171 train loss:3.989951 +step:2172 train loss:3.928747 +step:2173 train loss:3.914419 +step:2174 train loss:3.983290 +step:2175 train loss:3.914723 +step:2176 train loss:3.989438 +step:2177 train loss:3.957720 +step:2178 train loss:3.882561 +step:2179 train loss:3.952437 +step:2180 train loss:3.965857 +step:2181 train loss:3.894405 +step:2182 train loss:3.944894 +step:2183 train loss:3.936679 +step:2184 train loss:3.890132 +step:2185 train loss:3.870659 +step:2186 train loss:3.908560 +step:2187 train loss:3.920226 +step:2188 train loss:3.970949 +step:2189 train loss:3.861433 +step:2190 train loss:3.903701 +step:2191 train loss:3.963110 +step:2192 train loss:3.892217 +step:2193 train loss:3.860057 +step:2194 train loss:3.870125 +step:2195 train loss:3.891136 +step:2196 train loss:3.894006 +step:2197 train loss:3.877297 +step:2198 train loss:3.900678 +step:2199 train loss:3.970589 +step:2200 train loss:3.905704 +step:2201 train loss:3.911657 +step:2202 train loss:3.874225 +step:2203 train loss:3.893712 +step:2204 train loss:3.926302 +step:2205 train loss:3.909096 +step:2206 train loss:3.905095 +step:2207 train loss:3.905392 +step:2208 train loss:3.881659 +step:2209 train loss:4.160219 +step:2210 train loss:3.940172 +step:2211 train loss:3.930050 +step:2212 train loss:3.901091 +step:2213 train loss:3.986322 +step:2214 train loss:3.975096 +step:2215 train loss:3.901021 +step:2216 train loss:3.867999 +step:2217 train loss:3.894271 +step:2218 train loss:3.897536 +step:2219 train loss:3.929508 +step:2220 train loss:3.871067 +step:2221 train loss:3.902767 +step:2222 train loss:3.920350 +step:2223 train loss:3.957051 +step:2224 train loss:3.933057 +step:2225 train loss:3.875439 +step:2226 train loss:3.938308 +step:2227 train loss:3.941347 +step:2228 train loss:3.944930 +step:2229 train loss:3.884156 +step:2230 train loss:4.008007 +step:2231 train loss:3.921841 +step:2232 train loss:3.923005 +step:2233 train loss:3.957356 +step:2234 train loss:3.858801 +step:2235 train loss:3.942334 +step:2236 train loss:3.884490 +step:2237 train loss:4.021298 +step:2238 train loss:3.824277 +step:2239 train loss:3.900523 +step:2240 train loss:3.913034 +step:2241 train loss:3.830790 +step:2242 train loss:3.967222 +step:2243 train loss:4.002285 +step:2244 train loss:3.880528 +step:2245 train loss:3.884466 +step:2246 train loss:3.853261 +step:2247 train loss:3.856321 +step:2248 train loss:3.912897 +step:2249 train loss:3.894135 +step:2250 validation loss:3.847225 +step:2250 train loss:3.903998 +step:2251 train loss:3.870192 +step:2252 train loss:3.867219 +step:2253 train loss:3.896012 +step:2254 train loss:3.897873 +step:2255 train loss:3.860933 +step:2256 train loss:3.913217 +step:2257 train loss:3.899601 +step:2258 train loss:3.894969 +step:2259 train loss:3.904617 +step:2260 train loss:3.864563 +step:2261 train loss:3.939600 +step:2262 train loss:3.955912 +step:2263 train loss:3.913602 +step:2264 train loss:4.024847 +step:2265 train loss:3.876445 +step:2266 train loss:3.917831 +step:2267 train loss:3.877975 +step:2268 train loss:3.883777 +step:2269 train loss:3.892790 +step:2270 train loss:3.877961 +step:2271 train loss:3.894953 +step:2272 train loss:3.927569 +step:2273 train loss:3.847712 +step:2274 train loss:3.878991 +step:2275 train loss:3.834732 +step:2276 train loss:3.906404 +step:2277 train loss:3.919863 +step:2278 train loss:3.900124 +step:2279 train loss:3.885664 +step:2280 train loss:3.795347 +step:2281 train loss:3.938863 +step:2282 train loss:3.870788 +step:2283 train loss:3.857686 +step:2284 train loss:3.870837 +step:2285 train loss:3.927001 +step:2286 train loss:3.886957 +step:2287 train loss:3.929192 +step:2288 train loss:3.898933 +step:2289 train loss:3.895340 +step:2290 train loss:3.894884 +step:2291 train loss:3.884729 +step:2292 train loss:3.927701 +step:2293 train loss:3.909909 +step:2294 train loss:3.904001 +step:2295 train loss:3.961204 +step:2296 train loss:3.893209 +step:2297 train loss:3.865225 +step:2298 train loss:3.923200 +step:2299 train loss:3.894488 +step:2300 train loss:3.810869 +step:2301 train loss:3.905343 +step:2302 train loss:3.923988 +step:2303 train loss:3.888089 +step:2304 train loss:3.881482 +step:2305 train loss:3.918339 +step:2306 train loss:3.914443 +step:2307 train loss:3.891105 +step:2308 train loss:3.910440 +step:2309 train loss:3.870846 +step:2310 train loss:3.857956 +step:2311 train loss:3.847549 +step:2312 train loss:3.914243 +step:2313 train loss:3.831580 +step:2314 train loss:3.904027 +step:2315 train loss:3.921364 +step:2316 train loss:3.957554 +step:2317 train loss:3.824972 +step:2318 train loss:3.864727 +step:2319 train loss:3.922408 +step:2320 train loss:3.891239 +step:2321 train loss:3.862429 +step:2322 train loss:3.877386 +step:2323 train loss:3.874564 +step:2324 train loss:3.900057 +step:2325 train loss:3.838725 +step:2326 train loss:3.868301 +step:2327 train loss:3.982236 +step:2328 train loss:3.927715 +step:2329 train loss:3.889059 +step:2330 train loss:3.845057 +step:2331 train loss:3.886724 +step:2332 train loss:3.811376 +step:2333 train loss:3.874702 +step:2334 train loss:3.855810 +step:2335 train loss:3.838820 +step:2336 train loss:4.085756 +step:2337 train loss:3.872357 +step:2338 train loss:3.907290 +step:2339 train loss:3.911086 +step:2340 train loss:3.925833 +step:2341 train loss:3.912509 +step:2342 train loss:3.864980 +step:2343 train loss:3.887627 +step:2344 train loss:3.931229 +step:2345 train loss:3.884134 +step:2346 train loss:3.914188 +step:2347 train loss:3.834483 +step:2348 train loss:3.892881 +step:2349 train loss:3.845320 +step:2350 train loss:3.899183 +step:2351 train loss:3.907835 +step:2352 train loss:3.909658 +step:2353 train loss:3.868002 +step:2354 train loss:3.918630 +step:2355 train loss:3.907551 +step:2356 train loss:3.944279 +step:2357 train loss:3.852876 +step:2358 train loss:3.863821 +step:2359 train loss:3.891269 +step:2360 train loss:3.910632 +step:2361 train loss:3.946289 +step:2362 train loss:3.774633 +step:2363 train loss:3.967822 +step:2364 train loss:3.912736 +step:2365 train loss:3.882079 +step:2366 train loss:3.838052 +step:2367 train loss:3.899253 +step:2368 train loss:3.889955 +step:2369 train loss:3.876762 +step:2370 train loss:3.894300 +step:2371 train loss:3.950737 +step:2372 train loss:3.813243 +step:2373 train loss:3.950284 +step:2374 train loss:3.931082 +step:2375 train loss:3.917138 +step:2376 train loss:3.901721 +step:2377 train loss:3.853030 +step:2378 train loss:3.897891 +step:2379 train loss:3.885414 +step:2380 train loss:3.939642 +step:2381 train loss:4.032222 +step:2382 train loss:3.821877 +step:2383 train loss:3.871690 +step:2384 train loss:3.899620 +step:2385 train loss:3.799161 +step:2386 train loss:3.954834 +step:2387 train loss:3.836096 +step:2388 train loss:3.886802 +step:2389 train loss:3.907388 +step:2390 train loss:3.857313 +step:2391 train loss:3.880030 +step:2392 train loss:3.906084 +step:2393 train loss:3.861722 +step:2394 train loss:3.886246 +step:2395 train loss:3.873733 +step:2396 train loss:3.877934 +step:2397 train loss:3.857794 +step:2398 train loss:3.910141 +step:2399 train loss:3.870944 +step:2400 train loss:3.850085 +step:2401 train loss:3.890663 +step:2402 train loss:3.846890 +step:2403 train loss:3.897856 +step:2404 train loss:3.854558 +step:2405 train loss:3.859103 +step:2406 train loss:3.885247 +step:2407 train loss:3.835428 +step:2408 train loss:3.871660 +step:2409 train loss:3.862651 +step:2410 train loss:3.861274 +step:2411 train loss:3.934680 +step:2412 train loss:3.923078 +step:2413 train loss:3.965152 +step:2414 train loss:3.859686 +step:2415 train loss:3.845994 +step:2416 train loss:3.861227 +step:2417 train loss:3.896282 +step:2418 train loss:3.914962 +step:2419 train loss:3.843316 +step:2420 train loss:3.863201 +step:2421 train loss:3.887137 +step:2422 train loss:3.939979 +step:2423 train loss:3.876389 +step:2424 train loss:3.844165 +step:2425 train loss:3.906477 +step:2426 train loss:3.841257 +step:2427 train loss:3.872227 +step:2428 train loss:3.949177 +step:2429 train loss:3.901767 +step:2430 train loss:3.991225 +step:2431 train loss:3.906541 +step:2432 train loss:3.880099 +step:2433 train loss:3.864468 +step:2434 train loss:3.845402 +step:2435 train loss:3.908923 +step:2436 train loss:3.860409 +step:2437 train loss:3.886625 +step:2438 train loss:3.929652 +step:2439 train loss:3.911582 +step:2440 train loss:3.854385 +step:2441 train loss:3.889361 +step:2442 train loss:3.879690 +step:2443 train loss:3.845550 +step:2444 train loss:3.881867 +step:2445 train loss:3.876760 +step:2446 train loss:3.844011 +step:2447 train loss:3.826721 +step:2448 train loss:3.877732 +step:2449 train loss:3.905730 +step:2450 train loss:3.866041 +step:2451 train loss:3.788688 +step:2452 train loss:3.890393 +step:2453 train loss:3.861417 +step:2454 train loss:3.857595 +step:2455 train loss:3.906634 +step:2456 train loss:3.859795 +step:2457 train loss:3.919672 +step:2458 train loss:3.896236 +step:2459 train loss:3.873179 +step:2460 train loss:3.880636 +step:2461 train loss:3.918194 +step:2462 train loss:3.889551 +step:2463 train loss:3.867350 +step:2464 train loss:3.879191 +step:2465 train loss:3.953091 +step:2466 train loss:4.044262 +step:2467 train loss:3.944208 +step:2468 train loss:3.839720 +step:2469 train loss:3.907413 +step:2470 train loss:3.954314 +step:2471 train loss:3.960631 +step:2472 train loss:3.947964 +step:2473 train loss:3.877478 +step:2474 train loss:3.847003 +step:2475 train loss:3.898485 +step:2476 train loss:3.971883 +step:2477 train loss:3.889850 +step:2478 train loss:3.841171 +step:2479 train loss:3.885890 +step:2480 train loss:3.874279 +step:2481 train loss:4.065338 +step:2482 train loss:3.871485 +step:2483 train loss:3.908104 +step:2484 train loss:3.852281 +step:2485 train loss:3.844520 +step:2486 train loss:3.873541 +step:2487 train loss:3.909031 +step:2488 train loss:3.821107 +step:2489 train loss:3.926074 +step:2490 train loss:3.847594 +step:2491 train loss:3.860120 +step:2492 train loss:3.901885 +step:2493 train loss:3.941500 +step:2494 train loss:3.862984 +step:2495 train loss:3.895901 +step:2496 train loss:3.872861 +step:2497 train loss:3.887411 +step:2498 train loss:3.893485 +step:2499 train loss:3.886808 +step:2500 validation loss:3.810332 total_sharp:1.8211e-04 L1_sharp:8.0372e-05 L2_sharp:1.5251e-05 L3_sharp:3.7431e-05 L4_sharp:2.7018e-05 L5_sharp:1.6766e-05 L6_sharp:1.2718e-05 L7_sharp:1.5472e-05 L8_sharp:3.0449e-05 L9_sharp:6.7906e-05 L10_sharp:6.0948e-05 L11_sharp:4.5094e-05 L12_sharp:3.8617e-04 total_fnorm:2.0212e+01 total_l1_linf:1.7604e+05 total_spectral:2.0212e+01 L1_fnorm:4.5030e+00 L2_fnorm:4.3925e+00 L3_fnorm:4.2698e+00 L4_fnorm:4.2839e+00 L5_fnorm:4.4674e+00 L6_fnorm:4.6145e+00 L7_fnorm:4.7326e+00 L8_fnorm:4.7053e+00 L9_fnorm:4.7268e+00 L10_fnorm:4.7333e+00 L11_fnorm:4.7150e+00 L12_fnorm:4.3193e+00 L1_l1linf:4.5404e+00 L2_l1linf:4.9205e+00 L3_l1linf:4.5986e+00 L4_l1linf:4.3443e+00 L5_l1linf:4.7084e+00 L6_l1linf:4.6475e+00 L7_l1linf:4.9978e+00 L8_l1linf:5.6620e+00 L9_l1linf:5.4159e+00 L10_l1linf:6.3893e+00 L11_l1linf:5.6245e+00 L12_l1linf:5.2712e+00 L1_spectral:7.3440e-01 L2_spectral:6.0359e-01 L3_spectral:5.8229e-01 L4_spectral:6.8059e-01 L5_spectral:6.1032e-01 L6_spectral:5.5039e-01 L7_spectral:5.2732e-01 L8_spectral:6.5678e-01 L9_spectral:8.2777e-01 L10_spectral:8.3612e-01 L11_spectral:7.6562e-01 L12_spectral:8.9198e-01 ip_v_neg_g:3.9005e-02 cos_v_neg_g:4.4191e-03 v_norm:2.0212e+01 g_norm:4.3669e-01 hv_norm:4.0333e-01 cos_v_hv:9.1260e-03 hg_norm:3.6222e+00 cos_g_hg:5.9248e-01 v_par:8.3078e-03 v_perp:2.0212e+01 L1_cos_v_neg_g:2.8999e-02 L1_v_norm:4.5030e+00 L2_cos_v_neg_g:1.3850e-02 L2_v_norm:4.3925e+00 L3_cos_v_neg_g:1.7163e-02 L3_v_norm:4.2698e+00 L4_cos_v_neg_g:1.2377e-02 L4_v_norm:4.2839e+00 L5_cos_v_neg_g:1.0532e-02 L5_v_norm:4.4674e+00 L6_cos_v_neg_g:8.4759e-03 L6_v_norm:4.6145e+00 L7_cos_v_neg_g:1.0615e-02 L7_v_norm:4.7326e+00 L8_cos_v_neg_g:1.3198e-02 L8_v_norm:4.7053e+00 L9_cos_v_neg_g:1.9348e-02 L9_v_norm:4.7268e+00 L10_cos_v_neg_g:2.0695e-02 L10_v_norm:4.7333e+00 L11_cos_v_neg_g:2.1790e-02 L11_v_norm:4.7150e+00 L12_cos_v_neg_g:3.2108e-02 L12_v_norm:4.3193e+00 +step:2500 train loss:3.832088 +step:2501 train loss:3.898619 +step:2502 train loss:3.888883 +step:2503 train loss:3.819648 +step:2504 train loss:3.852707 +step:2505 train loss:3.876002 +step:2506 train loss:3.837122 +step:2507 train loss:3.860748 +step:2508 train loss:3.815818 +step:2509 train loss:3.834656 +step:2510 train loss:3.831441 +step:2511 train loss:3.868687 +step:2512 train loss:3.915302 +step:2513 train loss:3.864758 +step:2514 train loss:3.848711 +step:2515 train loss:3.984340 +step:2516 train loss:3.883939 +step:2517 train loss:3.944451 +step:2518 train loss:3.913338 +step:2519 train loss:3.882596 +step:2520 train loss:3.889944 +step:2521 train loss:3.866516 +step:2522 train loss:3.900503 +step:2523 train loss:3.823259 +step:2524 train loss:3.880792 +step:2525 train loss:3.865402 +step:2526 train loss:3.920290 +step:2527 train loss:3.914927 +step:2528 train loss:3.892507 +step:2529 train loss:3.911851 +step:2530 train loss:3.888267 +step:2531 train loss:3.823760 +step:2532 train loss:3.920578 +step:2533 train loss:3.815053 +step:2534 train loss:3.909782 +step:2535 train loss:3.866754 +step:2536 train loss:3.786721 +step:2537 train loss:3.903000 +step:2538 train loss:3.880898 +step:2539 train loss:3.897520 +step:2540 train loss:3.833451 +step:2541 train loss:3.857555 +step:2542 train loss:3.868722 +step:2543 train loss:3.865785 +step:2544 train loss:3.850480 +step:2545 train loss:3.838261 +step:2546 train loss:3.807123 +step:2547 train loss:3.847081 +step:2548 train loss:3.872719 +step:2549 train loss:3.872709 +step:2550 train loss:4.003727 +step:2551 train loss:4.073048 +step:2552 train loss:3.807845 +step:2553 train loss:3.844720 +step:2554 train loss:3.989278 +step:2555 train loss:3.877066 +step:2556 train loss:3.808635 +step:2557 train loss:3.905975 +step:2558 train loss:3.891813 +step:2559 train loss:3.845121 +step:2560 train loss:3.826900 +step:2561 train loss:3.926550 +step:2562 train loss:3.879799 +step:2563 train loss:3.815607 +step:2564 train loss:3.885697 +step:2565 train loss:3.862789 +step:2566 train loss:3.844024 +step:2567 train loss:3.819682 +step:2568 train loss:3.872752 +step:2569 train loss:3.881922 +step:2570 train loss:3.832616 +step:2571 train loss:3.914414 +step:2572 train loss:3.876904 +step:2573 train loss:3.807544 +step:2574 train loss:3.854958 +step:2575 train loss:3.899721 +step:2576 train loss:3.855973 +step:2577 train loss:3.816364 +step:2578 train loss:3.860987 +step:2579 train loss:3.842369 +step:2580 train loss:3.810595 +step:2581 train loss:3.825822 +step:2582 train loss:3.831706 +step:2583 train loss:3.854782 +step:2584 train loss:3.870853 +step:2585 train loss:3.832499 +step:2586 train loss:3.853846 +step:2587 train loss:3.790834 +step:2588 train loss:3.821433 +step:2589 train loss:3.896955 +step:2590 train loss:3.820446 +step:2591 train loss:3.881469 +step:2592 train loss:3.926815 +step:2593 train loss:3.886100 +step:2594 train loss:3.845048 +step:2595 train loss:3.852406 +step:2596 train loss:3.893272 +step:2597 train loss:3.774971 +step:2598 train loss:3.929482 +step:2599 train loss:3.884224 +step:2600 train loss:3.913755 +step:2601 train loss:3.849154 +step:2602 train loss:3.881100 +step:2603 train loss:3.873113 +step:2604 train loss:3.793700 +step:2605 train loss:3.923830 +step:2606 train loss:3.872622 +step:2607 train loss:3.833390 +step:2608 train loss:3.806252 +step:2609 train loss:3.830889 +step:2610 train loss:3.859468 +step:2611 train loss:3.897245 +step:2612 train loss:3.858789 +step:2613 train loss:3.833655 +step:2614 train loss:3.823400 +step:2615 train loss:3.817914 +step:2616 train loss:3.894076 +step:2617 train loss:3.854489 +step:2618 train loss:3.815957 +step:2619 train loss:3.832750 +step:2620 train loss:3.824214 +step:2621 train loss:3.839389 +step:2622 train loss:3.917462 +step:2623 train loss:3.786817 +step:2624 train loss:3.805364 +step:2625 train loss:3.874712 +step:2626 train loss:3.868999 +step:2627 train loss:3.847754 +step:2628 train loss:3.900835 +step:2629 train loss:3.847840 +step:2630 train loss:3.838655 +step:2631 train loss:3.867787 +step:2632 train loss:3.834038 +step:2633 train loss:3.822750 +step:2634 train loss:3.866824 +step:2635 train loss:3.852446 +step:2636 train loss:3.903065 +step:2637 train loss:3.852410 +step:2638 train loss:3.834479 +step:2639 train loss:3.890424 +step:2640 train loss:3.809888 +step:2641 train loss:3.870279 +step:2642 train loss:3.786534 +step:2643 train loss:3.789137 +step:2644 train loss:3.881064 +step:2645 train loss:3.818946 +step:2646 train loss:3.846381 +step:2647 train loss:3.869075 +step:2648 train loss:3.903446 +step:2649 train loss:3.816740 +step:2650 train loss:3.806303 +step:2651 train loss:3.844598 +step:2652 train loss:3.814242 +step:2653 train loss:3.884558 +step:2654 train loss:3.846803 +step:2655 train loss:3.835230 +step:2656 train loss:3.852075 +step:2657 train loss:3.877597 +step:2658 train loss:3.886263 +step:2659 train loss:3.863366 +step:2660 train loss:3.848325 +step:2661 train loss:3.895478 +step:2662 train loss:3.870476 +step:2663 train loss:3.846915 +step:2664 train loss:3.856816 +step:2665 train loss:3.806784 +step:2666 train loss:3.838075 +step:2667 train loss:3.841258 +step:2668 train loss:3.818856 +step:2669 train loss:3.829795 +step:2670 train loss:3.852355 +step:2671 train loss:3.827267 +step:2672 train loss:3.849297 +step:2673 train loss:3.785174 +step:2674 train loss:3.879125 +step:2675 train loss:3.847249 +step:2676 train loss:3.869105 +step:2677 train loss:3.850904 +step:2678 train loss:3.837206 +step:2679 train loss:3.817891 +step:2680 train loss:3.804677 +step:2681 train loss:3.776061 +step:2682 train loss:3.863713 +step:2683 train loss:3.835204 +step:2684 train loss:3.863974 +step:2685 train loss:3.781684 +step:2686 train loss:3.806123 +step:2687 train loss:3.881046 +step:2688 train loss:3.899537 +step:2689 train loss:3.808082 +step:2690 train loss:3.885107 +step:2691 train loss:3.857661 +step:2692 train loss:3.877148 +step:2693 train loss:3.931338 +step:2694 train loss:3.828990 +step:2695 train loss:3.851679 +step:2696 train loss:3.854036 +step:2697 train loss:3.845359 +step:2698 train loss:3.852484 +step:2699 train loss:3.873383 +step:2700 train loss:3.843960 +step:2701 train loss:3.911807 +step:2702 train loss:3.847178 +step:2703 train loss:3.803405 +step:2704 train loss:3.870595 +step:2705 train loss:3.871868 +step:2706 train loss:3.817002 +step:2707 train loss:3.778346 +step:2708 train loss:3.868321 +step:2709 train loss:3.847537 +step:2710 train loss:3.854066 +step:2711 train loss:3.821696 +step:2712 train loss:3.884553 +step:2713 train loss:3.888441 +step:2714 train loss:3.826748 +step:2715 train loss:3.821295 +step:2716 train loss:3.884142 +step:2717 train loss:3.850010 +step:2718 train loss:3.846556 +step:2719 train loss:3.846171 +step:2720 train loss:3.809693 +step:2721 train loss:3.891169 +step:2722 train loss:3.821464 +step:2723 train loss:3.808132 +step:2724 train loss:3.832054 +step:2725 train loss:3.829475 +step:2726 train loss:3.800473 +step:2727 train loss:3.860313 +step:2728 train loss:3.796728 +step:2729 train loss:3.929347 +step:2730 train loss:3.870440 +step:2731 train loss:3.911026 +step:2732 train loss:3.822646 +step:2733 train loss:3.818533 +step:2734 train loss:3.865560 +step:2735 train loss:3.866113 +step:2736 train loss:3.788434 +step:2737 train loss:3.845879 +step:2738 train loss:3.901897 +step:2739 train loss:3.819826 +step:2740 train loss:3.820219 +step:2741 train loss:3.806338 +step:2742 train loss:3.729837 +step:2743 train loss:3.845718 +step:2744 train loss:3.876460 +step:2745 train loss:3.819697 +step:2746 train loss:3.835493 +step:2747 train loss:3.825417 +step:2748 train loss:3.778841 +step:2749 train loss:3.844097 +step:2750 validation loss:3.768585 +step:2750 train loss:3.851686 +step:2751 train loss:3.876540 +step:2752 train loss:3.857635 +step:2753 train loss:3.851360 +step:2754 train loss:3.791888 +step:2755 train loss:3.862406 +step:2756 train loss:3.831694 +step:2757 train loss:3.817860 +step:2758 train loss:3.848679 +step:2759 train loss:3.857498 +step:2760 train loss:3.768188 +step:2761 train loss:3.782290 +step:2762 train loss:3.797636 +step:2763 train loss:3.821963 +step:2764 train loss:3.762544 +step:2765 train loss:3.810264 +step:2766 train loss:3.902820 +step:2767 train loss:3.775713 +step:2768 train loss:3.834484 +step:2769 train loss:3.810356 +step:2770 train loss:3.829609 +step:2771 train loss:3.855059 +step:2772 train loss:3.818774 +step:2773 train loss:3.819569 +step:2774 train loss:3.809648 +step:2775 train loss:3.829216 +step:2776 train loss:3.781049 +step:2777 train loss:3.814998 +step:2778 train loss:3.821060 +step:2779 train loss:3.846066 +step:2780 train loss:3.817716 +step:2781 train loss:3.806667 +step:2782 train loss:3.793061 +step:2783 train loss:3.825138 +step:2784 train loss:3.831916 +step:2785 train loss:3.899980 +step:2786 train loss:3.870657 +step:2787 train loss:3.833963 +step:2788 train loss:3.826551 +step:2789 train loss:3.819554 +step:2790 train loss:3.758366 +step:2791 train loss:3.858323 +step:2792 train loss:3.848921 +step:2793 train loss:3.815404 +step:2794 train loss:3.822186 +step:2795 train loss:3.838340 +step:2796 train loss:3.832701 +step:2797 train loss:3.876422 +step:2798 train loss:3.862317 +step:2799 train loss:3.772931 +step:2800 train loss:3.816716 +step:2801 train loss:3.854297 +step:2802 train loss:3.880548 +step:2803 train loss:3.850332 +step:2804 train loss:3.788847 +step:2805 train loss:3.829375 +step:2806 train loss:3.820733 +step:2807 train loss:3.850781 +step:2808 train loss:3.789350 +step:2809 train loss:3.860121 +step:2810 train loss:3.855507 +step:2811 train loss:3.839674 +step:2812 train loss:3.886816 +step:2813 train loss:3.855941 +step:2814 train loss:3.849830 +step:2815 train loss:3.853407 +step:2816 train loss:3.853109 +step:2817 train loss:3.791563 +step:2818 train loss:3.893757 +step:2819 train loss:3.821266 +step:2820 train loss:3.817680 +step:2821 train loss:3.797479 +step:2822 train loss:3.838272 +step:2823 train loss:3.791773 +step:2824 train loss:3.687634 +step:2825 train loss:3.841446 +step:2826 train loss:3.842685 +step:2827 train loss:3.869651 +step:2828 train loss:3.860546 +step:2829 train loss:3.846014 +step:2830 train loss:3.880666 +step:2831 train loss:3.812244 +step:2832 train loss:3.786243 +step:2833 train loss:3.845091 +step:2834 train loss:3.799807 +step:2835 train loss:3.831957 +step:2836 train loss:3.835718 +step:2837 train loss:3.832339 +step:2838 train loss:3.774034 +step:2839 train loss:3.872220 +step:2840 train loss:3.832081 +step:2841 train loss:3.914340 +step:2842 train loss:3.856893 +step:2843 train loss:3.850221 +step:2844 train loss:3.879195 +step:2845 train loss:3.834466 +step:2846 train loss:3.782872 +step:2847 train loss:3.871592 +step:2848 train loss:3.825911 +step:2849 train loss:3.817999 +step:2850 train loss:3.877788 +step:2851 train loss:3.828794 +step:2852 train loss:3.907780 +step:2853 train loss:3.823550 +step:2854 train loss:3.767879 +step:2855 train loss:3.848471 +step:2856 train loss:3.773075 +step:2857 train loss:3.874370 +step:2858 train loss:3.828297 +step:2859 train loss:3.825471 +step:2860 train loss:3.808291 +step:2861 train loss:3.790835 +step:2862 train loss:3.821529 +step:2863 train loss:3.804067 +step:2864 train loss:3.806828 +step:2865 train loss:3.883847 +step:2866 train loss:3.900447 +step:2867 train loss:3.835441 +step:2868 train loss:3.831146 +step:2869 train loss:3.794979 +step:2870 train loss:3.885021 +step:2871 train loss:3.886420 +step:2872 train loss:3.838926 +step:2873 train loss:3.850900 +step:2874 train loss:3.827925 +step:2875 train loss:3.780715 +step:2876 train loss:3.823209 +step:2877 train loss:3.808084 +step:2878 train loss:3.821835 +step:2879 train loss:3.789106 +step:2880 train loss:3.807329 +step:2881 train loss:3.799167 +step:2882 train loss:3.736559 +step:2883 train loss:3.819483 +step:2884 train loss:3.889770 +step:2885 train loss:3.783068 +step:2886 train loss:3.832583 +step:2887 train loss:3.857366 +step:2888 train loss:3.831741 +step:2889 train loss:3.813977 +step:2890 train loss:3.779529 +step:2891 train loss:3.823817 +step:2892 train loss:3.829709 +step:2893 train loss:3.813443 +step:2894 train loss:3.782601 +step:2895 train loss:3.831471 +step:2896 train loss:3.880373 +step:2897 train loss:3.859736 +step:2898 train loss:3.994193 +step:2899 train loss:3.750059 +step:2900 train loss:3.827944 +step:2901 train loss:3.775167 +step:2902 train loss:3.776694 +step:2903 train loss:3.789392 +step:2904 train loss:3.817292 +step:2905 train loss:3.879744 +step:2906 train loss:3.844128 +step:2907 train loss:4.015030 +step:2908 train loss:3.771990 +step:2909 train loss:3.847515 +step:2910 train loss:3.819240 +step:2911 train loss:3.849570 +step:2912 train loss:3.804976 +step:2913 train loss:3.840807 +step:2914 train loss:3.866763 +step:2915 train loss:3.864222 +step:2916 train loss:3.821203 +step:2917 train loss:3.856422 +step:2918 train loss:3.850063 +step:2919 train loss:3.791126 +step:2920 train loss:3.845963 +step:2921 train loss:3.801964 +step:2922 train loss:3.822623 +step:2923 train loss:3.892151 +step:2924 train loss:3.822390 +step:2925 train loss:3.779261 +step:2926 train loss:3.869897 +step:2927 train loss:3.774742 +step:2928 train loss:3.746453 +step:2929 train loss:3.764092 +step:2930 train loss:3.782069 +step:2931 train loss:3.935166 +step:2932 train loss:3.857428 +step:2933 train loss:3.816468 +step:2934 train loss:3.810305 +step:2935 train loss:3.836424 +step:2936 train loss:3.786902 +step:2937 train loss:3.803543 +step:2938 train loss:3.822165 +step:2939 train loss:3.893337 +step:2940 train loss:3.795919 +step:2941 train loss:3.828065 +step:2942 train loss:3.790529 +step:2943 train loss:4.069950 +step:2944 train loss:3.901740 +step:2945 train loss:3.856483 +step:2946 train loss:3.869782 +step:2947 train loss:3.825334 +step:2948 train loss:3.786636 +step:2949 train loss:3.888813 +step:2950 train loss:3.827920 +step:2951 train loss:3.728435 +step:2952 train loss:3.795931 +step:2953 train loss:3.713032 +step:2954 train loss:3.802253 +step:2955 train loss:3.883310 +step:2956 train loss:3.819565 +step:2957 train loss:3.817508 +step:2958 train loss:3.772882 +step:2959 train loss:3.796360 +step:2960 train loss:3.893558 +step:2961 train loss:3.753344 +step:2962 train loss:3.831765 +step:2963 train loss:3.825549 +step:2964 train loss:3.805073 +step:2965 train loss:3.832702 +step:2966 train loss:3.807212 +step:2967 train loss:3.802069 +step:2968 train loss:3.778677 +step:2969 train loss:3.788939 +step:2970 train loss:3.857187 +step:2971 train loss:3.788952 +step:2972 train loss:3.771039 +step:2973 train loss:3.764445 +step:2974 train loss:3.804703 +step:2975 train loss:3.769450 +step:2976 train loss:3.806274 +step:2977 train loss:3.798470 +step:2978 train loss:3.878906 +step:2979 train loss:3.860537 +step:2980 train loss:3.868999 +step:2981 train loss:3.826348 +step:2982 train loss:3.815629 +step:2983 train loss:3.769952 +step:2984 train loss:3.740813 +step:2985 train loss:3.856852 +step:2986 train loss:3.751778 +step:2987 train loss:3.877516 +step:2988 train loss:3.805993 +step:2989 train loss:3.838363 +step:2990 train loss:3.786196 +step:2991 train loss:3.858985 +step:2992 train loss:3.848413 +step:2993 train loss:3.817086 +step:2994 train loss:3.807301 +step:2995 train loss:3.875051 +step:2996 train loss:3.797313 +step:2997 train loss:3.707518 +step:2998 train loss:3.818972 +step:2999 train loss:3.863076 +step:3000 validation loss:3.750627 total_sharp:1.6508e-04 L1_sharp:6.4637e-05 L2_sharp:1.6293e-05 L3_sharp:3.6175e-05 L4_sharp:2.2394e-05 L5_sharp:1.3309e-05 L6_sharp:9.0354e-06 L7_sharp:1.1630e-05 L8_sharp:2.5386e-05 L9_sharp:3.8416e-05 L10_sharp:4.4474e-05 L11_sharp:3.7678e-05 L12_sharp:3.8743e-04 total_fnorm:2.0993e+01 total_l1_linf:1.8398e+05 total_spectral:2.0993e+01 L1_fnorm:4.7404e+00 L2_fnorm:4.7638e+00 L3_fnorm:4.6187e+00 L4_fnorm:4.6188e+00 L5_fnorm:4.7903e+00 L6_fnorm:4.8290e+00 L7_fnorm:4.9609e+00 L8_fnorm:4.8958e+00 L9_fnorm:4.9165e+00 L10_fnorm:4.9490e+00 L11_fnorm:5.0502e+00 L12_fnorm:4.8004e+00 L1_l1linf:4.4587e+00 L2_l1linf:4.9215e+00 L3_l1linf:5.0170e+00 L4_l1linf:4.8924e+00 L5_l1linf:5.0911e+00 L6_l1linf:4.8027e+00 L7_l1linf:5.3132e+00 L8_l1linf:5.5478e+00 L9_l1linf:5.7122e+00 L10_l1linf:5.8629e+00 L11_l1linf:6.8187e+00 L12_l1linf:6.2290e+00 L1_spectral:7.7407e-01 L2_spectral:6.4854e-01 L3_spectral:6.0788e-01 L4_spectral:6.7499e-01 L5_spectral:6.0455e-01 L6_spectral:5.4541e-01 L7_spectral:5.9069e-01 L8_spectral:6.9900e-01 L9_spectral:8.0944e-01 L10_spectral:8.6982e-01 L11_spectral:8.9943e-01 L12_spectral:1.0970e+00 ip_v_neg_g:4.2092e-02 cos_v_neg_g:4.9857e-03 v_norm:2.0993e+01 g_norm:4.0216e-01 hv_norm:3.3005e-01 cos_v_hv:1.0500e-02 hg_norm:2.7431e+00 cos_g_hg:5.1059e-01 v_par:1.1691e-02 v_perp:2.0993e+01 L1_cos_v_neg_g:2.9487e-02 L1_v_norm:4.7404e+00 L2_cos_v_neg_g:1.4507e-02 L2_v_norm:4.7638e+00 L3_cos_v_neg_g:1.6677e-02 L3_v_norm:4.6187e+00 L4_cos_v_neg_g:1.1309e-02 L4_v_norm:4.6188e+00 L5_cos_v_neg_g:1.0571e-02 L5_v_norm:4.7903e+00 L6_cos_v_neg_g:8.6425e-03 L6_v_norm:4.8290e+00 L7_cos_v_neg_g:8.5985e-03 L7_v_norm:4.9609e+00 L8_cos_v_neg_g:1.2785e-02 L8_v_norm:4.8958e+00 L9_cos_v_neg_g:1.7541e-02 L9_v_norm:4.9165e+00 L10_cos_v_neg_g:1.8952e-02 L10_v_norm:4.9490e+00 L11_cos_v_neg_g:2.1908e-02 L11_v_norm:5.0502e+00 L12_cos_v_neg_g:4.0389e-02 L12_v_norm:4.8004e+00 +step:3000 train loss:3.762662 +step:3001 train loss:3.812480 +step:3002 train loss:3.807842 +step:3003 train loss:3.804715 +step:3004 train loss:3.834745 +step:3005 train loss:3.731804 +step:3006 train loss:3.780408 +step:3007 train loss:3.812943 +step:3008 train loss:3.852348 +step:3009 train loss:3.808024 +step:3010 train loss:3.827359 +step:3011 train loss:3.818449 +step:3012 train loss:3.794403 +step:3013 train loss:3.837276 +step:3014 train loss:3.791305 +step:3015 train loss:3.791223 +step:3016 train loss:3.810157 +step:3017 train loss:3.831821 +step:3018 train loss:3.762656 +step:3019 train loss:3.801069 +step:3020 train loss:3.821976 +step:3021 train loss:3.783366 +step:3022 train loss:3.876657 +step:3023 train loss:3.824218 +step:3024 train loss:3.812189 +step:3025 train loss:3.816667 +step:3026 train loss:3.795207 +step:3027 train loss:3.774366 +step:3028 train loss:3.821579 +step:3029 train loss:3.811020 +step:3030 train loss:3.786739 +step:3031 train loss:3.768689 +step:3032 train loss:3.756749 +step:3033 train loss:3.786140 +step:3034 train loss:3.829745 +step:3035 train loss:3.809910 +step:3036 train loss:3.769927 +step:3037 train loss:3.731593 +step:3038 train loss:3.845316 +step:3039 train loss:3.723151 +step:3040 train loss:3.709523 +step:3041 train loss:3.838569 +step:3042 train loss:3.776429 +step:3043 train loss:3.836420 +step:3044 train loss:3.729869 +step:3045 train loss:3.775090 +step:3046 train loss:3.748755 +step:3047 train loss:3.777606 +step:3048 train loss:3.746046 +step:3049 train loss:3.826977 +step:3050 train loss:3.712448 +step:3051 train loss:3.727584 +step:3052 train loss:3.748808 +step:3053 train loss:3.821141 +step:3054 train loss:3.893436 +step:3055 train loss:3.730435 +step:3056 train loss:3.764451 +step:3057 train loss:3.799172 +step:3058 train loss:3.747766 +step:3059 train loss:3.777157 +step:3060 train loss:3.771537 +step:3061 train loss:3.759415 +step:3062 train loss:3.812693 +step:3063 train loss:3.793824 +step:3064 train loss:3.817606 +step:3065 train loss:3.833882 +step:3066 train loss:3.735443 +step:3067 train loss:3.789268 +step:3068 train loss:3.835288 +step:3069 train loss:3.853926 +step:3070 train loss:3.782870 +step:3071 train loss:3.793687 +step:3072 train loss:3.795951 +step:3073 train loss:3.833600 +step:3074 train loss:3.768949 +step:3075 train loss:3.803181 +step:3076 train loss:3.739337 +step:3077 train loss:3.735116 +step:3078 train loss:3.765463 +step:3079 train loss:3.813732 +step:3080 train loss:3.806289 +step:3081 train loss:3.855187 +step:3082 train loss:3.830852 +step:3083 train loss:3.756159 +step:3084 train loss:3.839819 +step:3085 train loss:3.769496 +step:3086 train loss:3.829373 +step:3087 train loss:3.794905 +step:3088 train loss:3.871736 +step:3089 train loss:3.750938 +step:3090 train loss:3.822420 +step:3091 train loss:3.741908 +step:3092 train loss:3.767357 +step:3093 train loss:3.791837 +step:3094 train loss:3.782764 +step:3095 train loss:3.862985 +step:3096 train loss:3.791666 +step:3097 train loss:3.806581 +step:3098 train loss:3.779414 +step:3099 train loss:3.790450 +step:3100 train loss:3.818859 +step:3101 train loss:3.900783 +step:3102 train loss:3.825766 +step:3103 train loss:3.752841 +step:3104 train loss:3.833755 +step:3105 train loss:3.807316 +step:3106 train loss:3.801281 +step:3107 train loss:3.782577 +step:3108 train loss:3.757176 +step:3109 train loss:3.811148 +step:3110 train loss:3.741321 +step:3111 train loss:3.776883 +step:3112 train loss:3.711694 +step:3113 train loss:3.833271 +step:3114 train loss:3.743047 +step:3115 train loss:3.786126 +step:3116 train loss:3.668977 +step:3117 train loss:3.687388 +step:3118 train loss:3.789220 +step:3119 train loss:3.799847 +step:3120 train loss:3.797697 +step:3121 train loss:3.745870 +step:3122 train loss:3.825288 +step:3123 train loss:3.741972 +step:3124 train loss:3.802192 +step:3125 train loss:3.817873 +step:3126 train loss:3.925746 +step:3127 train loss:3.768492 +step:3128 train loss:3.795132 +step:3129 train loss:3.780678 +step:3130 train loss:3.759686 +step:3131 train loss:3.830847 +step:3132 train loss:3.824806 +step:3133 train loss:3.792037 +step:3134 train loss:3.685779 +step:3135 train loss:3.781226 +step:3136 train loss:3.755084 +step:3137 train loss:3.882318 +step:3138 train loss:3.784827 +step:3139 train loss:3.766748 +step:3140 train loss:3.786859 +step:3141 train loss:3.788266 +step:3142 train loss:3.724337 +step:3143 train loss:3.809335 +step:3144 train loss:3.756798 +step:3145 train loss:3.741894 +step:3146 train loss:3.755265 +step:3147 train loss:3.864329 +step:3148 train loss:3.769950 +step:3149 train loss:3.822427 +step:3150 train loss:3.806849 +step:3151 train loss:3.780099 +step:3152 train loss:3.776441 +step:3153 train loss:3.734057 +step:3154 train loss:3.815364 +step:3155 train loss:3.761508 +step:3156 train loss:3.812907 +step:3157 train loss:3.819599 +step:3158 train loss:3.788913 +step:3159 train loss:3.729489 +step:3160 train loss:3.774969 +step:3161 train loss:3.742695 +step:3162 train loss:3.804165 +step:3163 train loss:3.784302 +step:3164 train loss:3.766786 +step:3165 train loss:3.782375 +step:3166 train loss:3.817750 +step:3167 train loss:3.782992 +step:3168 train loss:3.865728 +step:3169 train loss:3.777633 +step:3170 train loss:3.761914 +step:3171 train loss:3.746397 +step:3172 train loss:3.749960 +step:3173 train loss:3.694134 +step:3174 train loss:3.813819 +step:3175 train loss:3.783918 +step:3176 train loss:3.798762 +step:3177 train loss:3.761910 +step:3178 train loss:3.745662 +step:3179 train loss:3.815361 +step:3180 train loss:3.748636 +step:3181 train loss:3.821814 +step:3182 train loss:3.830771 +step:3183 train loss:3.768562 +step:3184 train loss:3.769052 +step:3185 train loss:3.824903 +step:3186 train loss:3.789079 +step:3187 train loss:3.805715 +step:3188 train loss:3.845149 +step:3189 train loss:3.793473 +step:3190 train loss:3.743670 +step:3191 train loss:3.749202 +step:3192 train loss:3.716599 +step:3193 train loss:3.791215 +step:3194 train loss:3.753805 +step:3195 train loss:3.741313 +step:3196 train loss:3.790300 +step:3197 train loss:3.752306 +step:3198 train loss:3.793269 +step:3199 train loss:3.769670 +step:3200 train loss:3.774179 +step:3201 train loss:3.741583 +step:3202 train loss:3.804839 +step:3203 train loss:3.863648 +step:3204 train loss:3.826955 +step:3205 train loss:3.673454 +step:3206 train loss:3.957729 +step:3207 train loss:3.718947 +step:3208 train loss:3.781486 +step:3209 train loss:3.774185 +step:3210 train loss:3.754163 +step:3211 train loss:3.780784 +step:3212 train loss:3.797252 +step:3213 train loss:3.733563 +step:3214 train loss:3.841236 +step:3215 train loss:3.843806 +step:3216 train loss:3.712068 +step:3217 train loss:3.788652 +step:3218 train loss:3.832835 +step:3219 train loss:3.750078 +step:3220 train loss:3.818926 +step:3221 train loss:3.731433 +step:3222 train loss:3.770012 +step:3223 train loss:3.787695 +step:3224 train loss:3.797279 +step:3225 train loss:3.727394 +step:3226 train loss:3.758553 +step:3227 train loss:3.787177 +step:3228 train loss:3.779732 +step:3229 train loss:3.817348 +step:3230 train loss:3.835972 +step:3231 train loss:3.769285 +step:3232 train loss:3.781676 +step:3233 train loss:3.751280 +step:3234 train loss:3.736294 +step:3235 train loss:3.742743 +step:3236 train loss:3.757858 +step:3237 train loss:3.761476 +step:3238 train loss:3.773707 +step:3239 train loss:3.680954 +step:3240 train loss:3.793558 +step:3241 train loss:3.787265 +step:3242 train loss:3.844024 +step:3243 train loss:3.786866 +step:3244 train loss:3.801627 +step:3245 train loss:3.708965 +step:3246 train loss:3.831552 +step:3247 train loss:3.774260 +step:3248 train loss:3.796102 +step:3249 train loss:3.742457 +step:3250 validation loss:3.708426 +step:3250 train loss:3.742076 +step:3251 train loss:3.849703 +step:3252 train loss:3.781614 +step:3253 train loss:3.785841 +step:3254 train loss:3.855021 +step:3255 train loss:3.794457 +step:3256 train loss:3.788757 +step:3257 train loss:3.769223 +step:3258 train loss:3.701087 +step:3259 train loss:3.678823 +step:3260 train loss:3.794620 +step:3261 train loss:3.777224 +step:3262 train loss:3.760388 +step:3263 train loss:3.748135 +step:3264 train loss:3.861330 +step:3265 train loss:3.771712 +step:3266 train loss:3.798518 +step:3267 train loss:3.759688 +step:3268 train loss:3.762201 +step:3269 train loss:3.773453 +step:3270 train loss:3.802251 +step:3271 train loss:3.765751 +step:3272 train loss:3.742798 +step:3273 train loss:3.753430 +step:3274 train loss:3.882848 +step:3275 train loss:3.761522 +step:3276 train loss:3.826546 +step:3277 train loss:3.767887 +step:3278 train loss:3.746324 +step:3279 train loss:3.771747 +step:3280 train loss:3.795122 +step:3281 train loss:3.722637 +step:3282 train loss:3.793582 +step:3283 train loss:3.766698 +step:3284 train loss:3.726783 +step:3285 train loss:3.745250 +step:3286 train loss:3.774729 +step:3287 train loss:3.712677 +step:3288 train loss:3.795310 +step:3289 train loss:3.738000 +step:3290 train loss:3.773690 +step:3291 train loss:3.731534 +step:3292 train loss:3.753689 +step:3293 train loss:3.795908 +step:3294 train loss:3.812954 +step:3295 train loss:3.720936 +step:3296 train loss:3.780336 +step:3297 train loss:3.735260 +step:3298 train loss:3.739676 +step:3299 train loss:3.868869 +step:3300 train loss:3.707925 +step:3301 train loss:3.791827 +step:3302 train loss:3.760745 +step:3303 train loss:3.789463 +step:3304 train loss:3.753982 +step:3305 train loss:3.845877 +step:3306 train loss:3.771864 +step:3307 train loss:3.797747 +step:3308 train loss:3.747725 +step:3309 train loss:3.803598 +step:3310 train loss:3.722399 +step:3311 train loss:3.777819 +step:3312 train loss:3.742410 +step:3313 train loss:3.775187 +step:3314 train loss:3.772278 +step:3315 train loss:3.852332 +step:3316 train loss:3.705635 +step:3317 train loss:3.793314 +step:3318 train loss:3.803953 +step:3319 train loss:3.730566 +step:3320 train loss:3.887182 +step:3321 train loss:3.792497 +step:3322 train loss:3.791487 +step:3323 train loss:3.896324 +step:3324 train loss:3.813040 +step:3325 train loss:3.787602 +step:3326 train loss:3.779272 +step:3327 train loss:3.791890 +step:3328 train loss:3.771820 +step:3329 train loss:3.769341 +step:3330 train loss:3.760971 +step:3331 train loss:3.807047 +step:3332 train loss:3.823419 +step:3333 train loss:3.795871 +step:3334 train loss:3.726133 +step:3335 train loss:3.739300 +step:3336 train loss:3.778220 +step:3337 train loss:3.775980 +step:3338 train loss:3.762415 +step:3339 train loss:3.755771 +step:3340 train loss:3.796181 +step:3341 train loss:3.746274 +step:3342 train loss:3.797996 +step:3343 train loss:3.734398 +step:3344 train loss:3.790193 +step:3345 train loss:3.739045 +step:3346 train loss:3.750679 +step:3347 train loss:3.756382 +step:3348 train loss:3.777580 +step:3349 train loss:3.769726 +step:3350 train loss:3.793700 +step:3351 train loss:3.844604 +step:3352 train loss:3.787736 +step:3353 train loss:3.882474 +step:3354 train loss:3.729012 +step:3355 train loss:3.835289 +step:3356 train loss:3.784699 +step:3357 train loss:3.790755 +step:3358 train loss:3.735882 +step:3359 train loss:3.764126 +step:3360 train loss:3.755747 +step:3361 train loss:3.759688 +step:3362 train loss:3.753248 +step:3363 train loss:3.752254 +step:3364 train loss:3.733047 +step:3365 train loss:3.769777 +step:3366 train loss:3.797513 +step:3367 train loss:3.752456 +step:3368 train loss:3.846790 +step:3369 train loss:3.758201 +step:3370 train loss:3.843137 +step:3371 train loss:3.816782 +step:3372 train loss:3.781458 +step:3373 train loss:3.787332 +step:3374 train loss:3.837189 +step:3375 train loss:3.769158 +step:3376 train loss:3.783437 +step:3377 train loss:3.765102 +step:3378 train loss:3.739679 +step:3379 train loss:3.819993 +step:3380 train loss:3.798544 +step:3381 train loss:3.781558 +step:3382 train loss:3.797343 +step:3383 train loss:3.809165 +step:3384 train loss:3.738893 +step:3385 train loss:3.783615 +step:3386 train loss:3.763577 +step:3387 train loss:3.838101 +step:3388 train loss:3.740482 +step:3389 train loss:3.931207 +step:3390 train loss:3.691907 +step:3391 train loss:3.776025 +step:3392 train loss:3.751521 +step:3393 train loss:3.783070 +step:3394 train loss:3.734964 +step:3395 train loss:3.805257 +step:3396 train loss:3.720097 +step:3397 train loss:3.798779 +step:3398 train loss:3.760805 +step:3399 train loss:3.778980 +step:3400 train loss:3.724394 +step:3401 train loss:3.761102 +step:3402 train loss:3.920272 +step:3403 train loss:3.808891 +step:3404 train loss:3.925392 +step:3405 train loss:3.776611 +step:3406 train loss:3.756465 +step:3407 train loss:3.760926 +step:3408 train loss:3.739097 +step:3409 train loss:3.708350 +step:3410 train loss:3.738003 +step:3411 train loss:3.805187 +step:3412 train loss:3.731126 +step:3413 train loss:3.723162 +step:3414 train loss:3.757679 +step:3415 train loss:3.731083 +step:3416 train loss:3.734233 +step:3417 train loss:3.817829 +step:3418 train loss:3.816901 +step:3419 train loss:3.773550 +step:3420 train loss:3.747862 +step:3421 train loss:3.783167 +step:3422 train loss:3.800098 +step:3423 train loss:3.818587 +step:3424 train loss:3.700361 +step:3425 train loss:3.719685 +step:3426 train loss:3.718413 +step:3427 train loss:3.776985 +step:3428 train loss:3.702891 +step:3429 train loss:3.765691 +step:3430 train loss:3.733542 +step:3431 train loss:3.786744 +step:3432 train loss:3.769642 +step:3433 train loss:3.735704 +step:3434 train loss:3.823147 +step:3435 train loss:3.760945 +step:3436 train loss:3.853298 +step:3437 train loss:3.681860 +step:3438 train loss:3.787036 +step:3439 train loss:3.760842 +step:3440 train loss:3.855579 +step:3441 train loss:3.751958 +step:3442 train loss:3.815362 +step:3443 train loss:3.753244 +step:3444 train loss:3.773389 +step:3445 train loss:3.815536 +step:3446 train loss:3.720208 +step:3447 train loss:3.790736 +step:3448 train loss:3.747165 +step:3449 train loss:3.780607 +step:3450 train loss:3.697149 +step:3451 train loss:3.805769 +step:3452 train loss:3.758937 +step:3453 train loss:3.810644 +step:3454 train loss:3.834483 +step:3455 train loss:3.901495 +step:3456 train loss:3.839435 +step:3457 train loss:3.834621 +step:3458 train loss:3.755177 +step:3459 train loss:3.772299 +step:3460 train loss:3.715550 +step:3461 train loss:3.774916 +step:3462 train loss:3.775120 +step:3463 train loss:3.743238 +step:3464 train loss:3.797745 +step:3465 train loss:3.731810 +step:3466 train loss:3.796852 +step:3467 train loss:3.764520 +step:3468 train loss:3.771109 +step:3469 train loss:3.785327 +step:3470 train loss:3.761780 +step:3471 train loss:3.800077 +step:3472 train loss:3.685693 +step:3473 train loss:3.807919 +step:3474 train loss:3.707133 +step:3475 train loss:3.784333 +step:3476 train loss:3.756288 +step:3477 train loss:3.776155 +step:3478 train loss:3.746589 +step:3479 train loss:3.776479 +step:3480 train loss:3.793377 +step:3481 train loss:3.775859 +step:3482 train loss:3.755972 +step:3483 train loss:3.902198 +step:3484 train loss:3.740506 +step:3485 train loss:3.727952 +step:3486 train loss:3.780238 +step:3487 train loss:3.820320 +step:3488 train loss:3.726926 +step:3489 train loss:3.778176 +step:3490 train loss:3.745702 +step:3491 train loss:3.783910 +step:3492 train loss:3.819929 +step:3493 train loss:3.794811 +step:3494 train loss:3.789750 +step:3495 train loss:3.761998 +step:3496 train loss:3.726364 +step:3497 train loss:3.840569 +step:3498 train loss:3.786790 +step:3499 train loss:3.717656 +step:3500 validation loss:3.687883 total_sharp:9.0115e-05 L1_sharp:2.9686e-05 L2_sharp:5.5057e-06 L3_sharp:1.9320e-05 L4_sharp:1.2548e-05 L5_sharp:1.2906e-05 L6_sharp:1.1421e-05 L7_sharp:1.0687e-05 L8_sharp:2.2421e-05 L9_sharp:2.5626e-05 L10_sharp:3.0865e-05 L11_sharp:2.4086e-05 L12_sharp:1.3690e-04 total_fnorm:2.0529e+01 total_l1_linf:1.7914e+05 total_spectral:2.0529e+01 L1_fnorm:4.6492e+00 L2_fnorm:4.4842e+00 L3_fnorm:4.3995e+00 L4_fnorm:4.4336e+00 L5_fnorm:4.6480e+00 L6_fnorm:4.7671e+00 L7_fnorm:4.8477e+00 L8_fnorm:4.8179e+00 L9_fnorm:4.7994e+00 L10_fnorm:4.7891e+00 L11_fnorm:4.8120e+00 L12_fnorm:4.4171e+00 L1_l1linf:4.5236e+00 L2_l1linf:4.8055e+00 L3_l1linf:4.6300e+00 L4_l1linf:4.5546e+00 L5_l1linf:4.7513e+00 L6_l1linf:4.8353e+00 L7_l1linf:4.9649e+00 L8_l1linf:4.9260e+00 L9_l1linf:4.9025e+00 L10_l1linf:5.1996e+00 L11_l1linf:5.7106e+00 L12_l1linf:5.3288e+00 L1_spectral:7.4515e-01 L2_spectral:4.8667e-01 L3_spectral:5.3016e-01 L4_spectral:7.0606e-01 L5_spectral:5.6919e-01 L6_spectral:5.1370e-01 L7_spectral:5.3405e-01 L8_spectral:6.0595e-01 L9_spectral:6.4185e-01 L10_spectral:6.9743e-01 L11_spectral:6.6245e-01 L12_spectral:7.3283e-01 ip_v_neg_g:1.9168e-02 cos_v_neg_g:2.1983e-03 v_norm:2.0529e+01 g_norm:4.2474e-01 hv_norm:2.4082e-01 cos_v_hv:7.6821e-03 hg_norm:2.9972e+00 cos_g_hg:5.0027e-01 v_par:6.5859e-03 v_perp:2.0529e+01 L1_cos_v_neg_g:1.2887e-02 L1_v_norm:4.6492e+00 L2_cos_v_neg_g:4.9735e-03 L2_v_norm:4.4842e+00 L3_cos_v_neg_g:5.7124e-03 L3_v_norm:4.3995e+00 L4_cos_v_neg_g:6.0500e-03 L4_v_norm:4.4336e+00 L5_cos_v_neg_g:6.7878e-03 L5_v_norm:4.6480e+00 L6_cos_v_neg_g:5.6292e-03 L6_v_norm:4.7671e+00 L7_cos_v_neg_g:5.5767e-03 L7_v_norm:4.8477e+00 L8_cos_v_neg_g:7.5843e-03 L8_v_norm:4.8179e+00 L9_cos_v_neg_g:7.8915e-03 L9_v_norm:4.7994e+00 L10_cos_v_neg_g:1.0115e-02 L10_v_norm:4.7891e+00 L11_cos_v_neg_g:1.0557e-02 L11_v_norm:4.8120e+00 L12_cos_v_neg_g:1.4949e-02 L12_v_norm:4.4171e+00 +step:3500 train loss:3.737460 +step:3501 train loss:3.868955 +step:3502 train loss:3.843394 +step:3503 train loss:3.797956 +step:3504 train loss:3.748583 +step:3505 train loss:3.762247 +step:3506 train loss:3.657058 +step:3507 train loss:3.778368 +step:3508 train loss:3.721932 +step:3509 train loss:3.793047 +step:3510 train loss:3.725952 +step:3511 train loss:3.762603 +step:3512 train loss:3.905454 +step:3513 train loss:3.723068 +step:3514 train loss:3.736202 +step:3515 train loss:3.989509 +step:3516 train loss:3.782963 +step:3517 train loss:3.742290 +step:3518 train loss:3.746742 +step:3519 train loss:3.738236 +step:3520 train loss:3.767350 +step:3521 train loss:3.759269 +step:3522 train loss:3.667831 +step:3523 train loss:3.772521 +step:3524 train loss:3.753378 +step:3525 train loss:3.745097 +step:3526 train loss:3.768398 +step:3527 train loss:3.717966 +step:3528 train loss:3.769336 +step:3529 train loss:3.747448 +step:3530 train loss:3.738539 +step:3531 train loss:3.735570 +step:3532 train loss:3.921616 +step:3533 train loss:3.743977 +step:3534 train loss:3.760689 +step:3535 train loss:3.732688 +step:3536 train loss:3.732701 +step:3537 train loss:3.743445 +step:3538 train loss:3.773590 +step:3539 train loss:3.721798 +step:3540 train loss:3.784842 +step:3541 train loss:3.750637 +step:3542 train loss:3.766248 +step:3543 train loss:3.684024 +step:3544 train loss:3.713953 +step:3545 train loss:3.706504 +step:3546 train loss:3.776921 +step:3547 train loss:3.782439 +step:3548 train loss:3.756931 +step:3549 train loss:3.753715 +step:3550 train loss:3.743807 +step:3551 train loss:3.772882 +step:3552 train loss:3.671483 +step:3553 train loss:3.788656 +step:3554 train loss:3.782147 +step:3555 train loss:3.766637 +step:3556 train loss:3.790739 +step:3557 train loss:3.776308 +step:3558 train loss:3.752219 +step:3559 train loss:3.697267 +step:3560 train loss:3.790522 +step:3561 train loss:3.785511 +step:3562 train loss:3.958288 +step:3563 train loss:3.815726 +step:3564 train loss:3.778459 +step:3565 train loss:3.776178 +step:3566 train loss:3.750087 +step:3567 train loss:3.688289 +step:3568 train loss:3.715363 +step:3569 train loss:3.796704 +step:3570 train loss:3.822758 +step:3571 train loss:3.801907 +step:3572 train loss:3.792284 +step:3573 train loss:3.746319 +step:3574 train loss:3.744757 +step:3575 train loss:3.742497 +step:3576 train loss:3.720027 +step:3577 train loss:3.729693 +step:3578 train loss:3.815580 +step:3579 train loss:3.726104 +step:3580 train loss:3.807002 +step:3581 train loss:3.746405 +step:3582 train loss:3.802070 +step:3583 train loss:3.738521 +step:3584 train loss:3.712718 +step:3585 train loss:3.763487 +step:3586 train loss:3.714343 +step:3587 train loss:3.805884 +step:3588 train loss:3.933160 +step:3589 train loss:3.772130 +step:3590 train loss:3.755237 +step:3591 train loss:3.764277 +step:3592 train loss:3.724995 +step:3593 train loss:3.697990 +step:3594 train loss:3.749382 +step:3595 train loss:3.725991 +step:3596 train loss:3.811748 +step:3597 train loss:3.775138 +step:3598 train loss:3.730714 +step:3599 train loss:3.782701 +step:3600 train loss:3.720556 +step:3601 train loss:3.734619 +step:3602 train loss:3.727166 +step:3603 train loss:3.741192 +step:3604 train loss:3.767726 +step:3605 train loss:3.865945 +step:3606 train loss:3.772404 +step:3607 train loss:3.757028 +step:3608 train loss:3.774477 +step:3609 train loss:3.757318 +step:3610 train loss:3.726518 +step:3611 train loss:3.726083 +step:3612 train loss:3.794181 +step:3613 train loss:3.764436 +step:3614 train loss:3.704656 +step:3615 train loss:3.749351 +step:3616 train loss:3.751786 +step:3617 train loss:3.820138 +step:3618 train loss:3.775827 +step:3619 train loss:3.758002 +step:3620 train loss:3.775537 +step:3621 train loss:3.726565 +step:3622 train loss:3.835036 +step:3623 train loss:3.822123 +step:3624 train loss:3.786430 +step:3625 train loss:3.767792 +step:3626 train loss:3.771789 +step:3627 train loss:3.767183 +step:3628 train loss:3.751993 +step:3629 train loss:3.751287 +step:3630 train loss:3.838847 +step:3631 train loss:3.762208 +step:3632 train loss:3.792053 +step:3633 train loss:3.748232 +step:3634 train loss:3.748256 +step:3635 train loss:3.736278 +step:3636 train loss:3.807394 +step:3637 train loss:3.886382 +step:3638 train loss:3.797920 +step:3639 train loss:3.787483 +step:3640 train loss:3.794908 +step:3641 train loss:3.830599 +step:3642 train loss:3.722651 +step:3643 train loss:3.892411 +step:3644 train loss:3.785973 +step:3645 train loss:3.758821 +step:3646 train loss:3.880581 +step:3647 train loss:3.772539 +step:3648 train loss:3.761270 +step:3649 train loss:3.712299 +step:3650 train loss:3.750271 +step:3651 train loss:3.746892 +step:3652 train loss:3.732866 +step:3653 train loss:3.667016 +step:3654 train loss:3.730281 +step:3655 train loss:3.726777 +step:3656 train loss:3.759579 +step:3657 train loss:3.774157 +step:3658 train loss:3.769106 +step:3659 train loss:3.751903 +step:3660 train loss:3.726188 +step:3661 train loss:3.749283 +step:3662 train loss:3.725955 +step:3663 train loss:3.761365 +step:3664 train loss:3.716291 +step:3665 train loss:3.762964 +step:3666 train loss:3.795705 +step:3667 train loss:3.884335 +step:3668 train loss:3.767411 +step:3669 train loss:3.722663 +step:3670 train loss:3.772770 +step:3671 train loss:3.731110 +step:3672 train loss:3.768720 +step:3673 train loss:3.752767 +step:3674 train loss:3.765181 +step:3675 train loss:3.776972 +step:3676 train loss:3.743451 +step:3677 train loss:3.703946 +step:3678 train loss:3.760862 +step:3679 train loss:3.662963 +step:3680 train loss:3.765555 +step:3681 train loss:3.797568 +step:3682 train loss:3.777664 +step:3683 train loss:3.724712 +step:3684 train loss:3.722107 +step:3685 train loss:3.751564 +step:3686 train loss:3.782581 +step:3687 train loss:3.732080 +step:3688 train loss:3.709612 +step:3689 train loss:3.744256 +step:3690 train loss:3.733650 +step:3691 train loss:3.712193 +step:3692 train loss:3.773779 +step:3693 train loss:3.902945 +step:3694 train loss:3.722325 +step:3695 train loss:3.782216 +step:3696 train loss:3.741015 +step:3697 train loss:3.734107 +step:3698 train loss:3.673866 +step:3699 train loss:3.700936 +step:3700 train loss:3.728713 +step:3701 train loss:3.748580 +step:3702 train loss:3.768116 +step:3703 train loss:3.728412 +step:3704 train loss:3.771852 +step:3705 train loss:3.750015 +step:3706 train loss:3.705411 +step:3707 train loss:3.758126 +step:3708 train loss:3.734476 +step:3709 train loss:3.657531 +step:3710 train loss:3.779071 +step:3711 train loss:3.731791 +step:3712 train loss:3.770547 +step:3713 train loss:3.722825 +step:3714 train loss:3.738846 +step:3715 train loss:3.857023 +step:3716 train loss:3.763582 +step:3717 train loss:3.736211 +step:3718 train loss:3.744042 +step:3719 train loss:3.741137 +step:3720 train loss:3.752389 +step:3721 train loss:3.806701 +step:3722 train loss:3.820839 +step:3723 train loss:3.704513 +step:3724 train loss:3.762149 +step:3725 train loss:3.739264 +step:3726 train loss:3.759282 +step:3727 train loss:3.835269 +step:3728 train loss:3.798802 +step:3729 train loss:3.700169 +step:3730 train loss:3.719260 +step:3731 train loss:3.739792 +step:3732 train loss:3.892477 +step:3733 train loss:3.749022 +step:3734 train loss:3.753490 +step:3735 train loss:3.693634 +step:3736 train loss:3.747566 +step:3737 train loss:3.798209 +step:3738 train loss:3.818918 +step:3739 train loss:3.736758 +step:3740 train loss:3.640667 +step:3741 train loss:3.849105 +step:3742 train loss:3.757149 +step:3743 train loss:3.733229 +step:3744 train loss:3.735183 +step:3745 train loss:3.750847 +step:3746 train loss:3.732128 +step:3747 train loss:3.735503 +step:3748 train loss:3.772516 +step:3749 train loss:3.762720 +step:3750 validation loss:3.673057 +step:3750 train loss:3.776463 +step:3751 train loss:3.862847 +step:3752 train loss:3.794583 +step:3753 train loss:3.714755 +step:3754 train loss:3.765240 +step:3755 train loss:3.941423 +step:3756 train loss:3.719675 +step:3757 train loss:3.716896 +step:3758 train loss:3.746935 +step:3759 train loss:3.693101 +step:3760 train loss:3.687785 +step:3761 train loss:3.737206 +step:3762 train loss:3.730917 +step:3763 train loss:3.731561 +step:3764 train loss:3.728042 +step:3765 train loss:3.725638 +step:3766 train loss:3.694248 +step:3767 train loss:3.777014 +step:3768 train loss:3.720104 +step:3769 train loss:3.985093 +step:3770 train loss:3.778486 +step:3771 train loss:3.788080 +step:3772 train loss:3.748414 +step:3773 train loss:3.738791 +step:3774 train loss:3.742692 +step:3775 train loss:3.738718 +step:3776 train loss:3.736489 +step:3777 train loss:3.695094 +step:3778 train loss:3.712742 +step:3779 train loss:3.697162 +step:3780 train loss:3.777273 +step:3781 train loss:3.741336 +step:3782 train loss:3.662621 +step:3783 train loss:3.766727 +step:3784 train loss:3.775571 +step:3785 train loss:3.685579 +step:3786 train loss:3.793908 +step:3787 train loss:3.703049 +step:3788 train loss:3.717342 +step:3789 train loss:3.624351 +step:3790 train loss:3.744075 +step:3791 train loss:3.767031 +step:3792 train loss:3.736241 +step:3793 train loss:3.735518 +step:3794 train loss:3.764390 +step:3795 train loss:3.729763 +step:3796 train loss:3.746638 +step:3797 train loss:3.721831 +step:3798 train loss:3.730173 +step:3799 train loss:3.742568 +step:3800 train loss:3.648449 +step:3801 train loss:3.762446 +step:3802 train loss:3.691536 +step:3803 train loss:3.771105 +step:3804 train loss:3.782879 +step:3805 train loss:3.740227 +step:3806 train loss:3.761469 +step:3807 train loss:3.781457 +step:3808 train loss:3.736911 +step:3809 train loss:3.749341 +step:3810 train loss:3.751703 +step:3811 train loss:3.737285 +step:3812 train loss:3.738978 +step:3813 train loss:3.696675 +step:3814 train loss:3.739732 +step:3815 train loss:3.738838 +step:3816 train loss:3.757789 +step:3817 train loss:3.777979 +step:3818 train loss:3.747464 +step:3819 train loss:3.761796 +step:3820 train loss:3.759837 +step:3821 train loss:3.716838 +step:3822 train loss:3.799703 +step:3823 train loss:3.694842 +step:3824 train loss:3.709570 +step:3825 train loss:3.715691 +step:3826 train loss:3.786011 +step:3827 train loss:3.810402 +step:3828 train loss:3.695906 +step:3829 train loss:3.718764 +step:3830 train loss:3.774819 +step:3831 train loss:3.708560 +step:3832 train loss:3.771557 +step:3833 train loss:3.710008 +step:3834 train loss:3.673367 +step:3835 train loss:3.717105 +step:3836 train loss:3.692219 +step:3837 train loss:3.759978 +step:3838 train loss:3.714885 +step:3839 train loss:3.754887 +step:3840 train loss:3.768791 +step:3841 train loss:3.714389 +step:3842 train loss:3.752715 +step:3843 train loss:3.771230 +step:3844 train loss:3.737312 +step:3845 train loss:3.761327 +step:3846 train loss:3.799851 +step:3847 train loss:3.698820 +step:3848 train loss:3.705767 +step:3849 train loss:3.720582 +step:3850 train loss:3.740904 +step:3851 train loss:3.876769 +step:3852 train loss:3.858252 +step:3853 train loss:3.756883 +step:3854 train loss:3.735576 +step:3855 train loss:3.771158 +step:3856 train loss:3.692483 +step:3857 train loss:3.751615 +step:3858 train loss:3.666674 +step:3859 train loss:3.719186 +step:3860 train loss:3.783747 +step:3861 train loss:3.759497 +step:3862 train loss:3.696230 +step:3863 train loss:3.745359 +step:3864 train loss:3.713209 +step:3865 train loss:3.753071 +step:3866 train loss:3.773683 +step:3867 train loss:3.767899 +step:3868 train loss:3.718072 +step:3869 train loss:3.718268 +step:3870 train loss:3.693637 +step:3871 train loss:3.693220 +step:3872 train loss:3.825281 +step:3873 train loss:3.743803 +step:3874 train loss:3.758056 +step:3875 train loss:3.864410 +step:3876 train loss:3.741996 +step:3877 train loss:3.769454 +step:3878 train loss:3.795928 +step:3879 train loss:3.779934 +step:3880 train loss:3.863442 +step:3881 train loss:3.683641 +step:3882 train loss:3.718025 +step:3883 train loss:3.730762 +step:3884 train loss:3.719924 +step:3885 train loss:3.734947 +step:3886 train loss:3.797143 +step:3887 train loss:3.776914 +step:3888 train loss:3.737718 +step:3889 train loss:3.711087 +step:3890 train loss:3.744035 +step:3891 train loss:3.758808 +step:3892 train loss:3.668151 +step:3893 train loss:3.774302 +step:3894 train loss:3.723060 +step:3895 train loss:3.742949 +step:3896 train loss:3.732451 +step:3897 train loss:3.703772 +step:3898 train loss:3.760466 +step:3899 train loss:3.801075 +step:3900 train loss:3.754335 +step:3901 train loss:3.774287 +step:3902 train loss:3.698553 +step:3903 train loss:3.714331 +step:3904 train loss:3.746396 +step:3905 train loss:3.682117 +step:3906 train loss:3.718216 +step:3907 train loss:3.749610 +step:3908 train loss:3.827222 +step:3909 train loss:3.720633 +step:3910 train loss:3.748668 +step:3911 train loss:3.760144 +step:3912 train loss:3.710773 +step:3913 train loss:3.729540 +step:3914 train loss:3.747901 +step:3915 train loss:3.714514 +step:3916 train loss:3.752533 +step:3917 train loss:3.797766 +step:3918 train loss:3.774961 +step:3919 train loss:3.748918 +step:3920 train loss:3.724412 +step:3921 train loss:3.767912 +step:3922 train loss:3.767167 +step:3923 train loss:3.757651 +step:3924 train loss:3.696214 +step:3925 train loss:3.891555 +step:3926 train loss:3.740985 +step:3927 train loss:3.720310 +step:3928 train loss:3.795906 +step:3929 train loss:3.868758 +step:3930 train loss:3.771203 +step:3931 train loss:3.713003 +step:3932 train loss:3.756298 +step:3933 train loss:3.776812 +step:3934 train loss:3.727746 +step:3935 train loss:3.701739 +step:3936 train loss:3.790383 +step:3937 train loss:3.748393 +step:3938 train loss:3.759531 +step:3939 train loss:3.781657 +step:3940 train loss:3.730444 +step:3941 train loss:3.816728 +step:3942 train loss:3.774571 +step:3943 train loss:3.755368 +step:3944 train loss:3.809166 +step:3945 train loss:3.718550 +step:3946 train loss:3.662483 +step:3947 train loss:3.789825 +step:3948 train loss:3.762885 +step:3949 train loss:3.921314 +step:3950 train loss:3.727586 +step:3951 train loss:3.651723 +step:3952 train loss:3.613905 +step:3953 train loss:3.692488 +step:3954 train loss:3.741586 +step:3955 train loss:3.769755 +step:3956 train loss:3.723534 +step:3957 train loss:3.780145 +step:3958 train loss:3.754769 +step:3959 train loss:3.791010 +step:3960 train loss:3.716142 +step:3961 train loss:3.737889 +step:3962 train loss:3.749608 +step:3963 train loss:3.720909 +step:3964 train loss:3.702590 +step:3965 train loss:3.760046 +step:3966 train loss:3.717811 +step:3967 train loss:3.757956 +step:3968 train loss:3.777959 +step:3969 train loss:3.687266 +step:3970 train loss:3.806284 +step:3971 train loss:3.716783 +step:3972 train loss:3.746560 +step:3973 train loss:3.706107 +step:3974 train loss:3.796122 +step:3975 train loss:3.756349 +step:3976 train loss:3.707718 +step:3977 train loss:3.764731 +step:3978 train loss:3.728578 +step:3979 train loss:3.718078 +step:3980 train loss:3.783216 +step:3981 train loss:3.715428 +step:3982 train loss:3.738038 +step:3983 train loss:3.721811 +step:3984 train loss:3.753002 +step:3985 train loss:3.727776 +step:3986 train loss:3.742810 +step:3987 train loss:3.750086 +step:3988 train loss:3.696385 +step:3989 train loss:3.760916 +step:3990 train loss:3.758948 +step:3991 train loss:3.773306 +step:3992 train loss:3.726393 +step:3993 train loss:3.763344 +step:3994 train loss:3.706203 +step:3995 train loss:3.763405 +step:3996 train loss:3.679223 +step:3997 train loss:3.755968 +step:3998 train loss:3.642506 +step:3999 train loss:3.797953 +step:4000 validation loss:3.657192 total_sharp:1.1863e-04 L1_sharp:2.3049e-05 L2_sharp:3.5699e-06 L3_sharp:3.8477e-06 L4_sharp:1.0990e-05 L5_sharp:8.3402e-06 L6_sharp:6.9430e-06 L7_sharp:9.0212e-06 L8_sharp:2.6984e-05 L9_sharp:4.6871e-05 L10_sharp:6.0154e-05 L11_sharp:3.4595e-05 L12_sharp:2.7473e-04 total_fnorm:2.0813e+01 total_l1_linf:1.8190e+05 total_spectral:2.0813e+01 L1_fnorm:4.6008e+00 L2_fnorm:4.5699e+00 L3_fnorm:4.3888e+00 L4_fnorm:4.5348e+00 L5_fnorm:4.7543e+00 L6_fnorm:4.8763e+00 L7_fnorm:4.9552e+00 L8_fnorm:4.9049e+00 L9_fnorm:4.9325e+00 L10_fnorm:4.9216e+00 L11_fnorm:4.9443e+00 L12_fnorm:4.5917e+00 L1_l1linf:4.6177e+00 L2_l1linf:4.6386e+00 L3_l1linf:4.6010e+00 L4_l1linf:4.6047e+00 L5_l1linf:4.7539e+00 L6_l1linf:5.1810e+00 L7_l1linf:5.1998e+00 L8_l1linf:5.4355e+00 L9_l1linf:6.3141e+00 L10_l1linf:6.0238e+00 L11_l1linf:6.3799e+00 L12_l1linf:6.5509e+00 L1_spectral:7.4296e-01 L2_spectral:5.2711e-01 L3_spectral:5.4551e-01 L4_spectral:7.0494e-01 L5_spectral:5.7711e-01 L6_spectral:5.4281e-01 L7_spectral:5.8151e-01 L8_spectral:7.2280e-01 L9_spectral:8.5951e-01 L10_spectral:9.2105e-01 L11_spectral:8.4880e-01 L12_spectral:9.2666e-01 ip_v_neg_g:2.8117e-02 cos_v_neg_g:3.4351e-03 v_norm:2.0813e+01 g_norm:3.9326e-01 hv_norm:3.7323e-01 cos_v_hv:6.6158e-03 hg_norm:3.7591e+00 cos_g_hg:6.4135e-01 v_par:8.9243e-03 v_perp:2.0813e+01 L1_cos_v_neg_g:1.4447e-02 L1_v_norm:4.6008e+00 L2_cos_v_neg_g:5.6188e-03 L2_v_norm:4.5699e+00 L3_cos_v_neg_g:4.9937e-03 L3_v_norm:4.3888e+00 L4_cos_v_neg_g:5.1974e-03 L4_v_norm:4.5348e+00 L5_cos_v_neg_g:4.8899e-03 L5_v_norm:4.7543e+00 L6_cos_v_neg_g:5.4909e-03 L6_v_norm:4.8763e+00 L7_cos_v_neg_g:7.7953e-03 L7_v_norm:4.9552e+00 L8_cos_v_neg_g:1.2939e-02 L8_v_norm:4.9049e+00 L9_cos_v_neg_g:1.8436e-02 L9_v_norm:4.9325e+00 L10_cos_v_neg_g:2.1792e-02 L10_v_norm:4.9216e+00 L11_cos_v_neg_g:2.1036e-02 L11_v_norm:4.9443e+00 L12_cos_v_neg_g:3.1712e-02 L12_v_norm:4.5917e+00 +step:4000 train loss:3.675063 +step:4001 train loss:3.749565 +step:4002 train loss:3.729030 +step:4003 train loss:3.763680 +step:4004 train loss:3.676046 +step:4005 train loss:3.766512 +step:4006 train loss:3.774355 +step:4007 train loss:3.700032 +step:4008 train loss:3.657638 +step:4009 train loss:3.740132 +step:4010 train loss:3.714764 +step:4011 train loss:3.719854 +step:4012 train loss:3.737137 +step:4013 train loss:3.710818 +step:4014 train loss:3.721731 +step:4015 train loss:3.712082 +step:4016 train loss:3.721648 +step:4017 train loss:3.686110 +step:4018 train loss:3.625996 +step:4019 train loss:3.681626 +step:4020 train loss:3.751594 +step:4021 train loss:3.694623 +step:4022 train loss:3.699365 +step:4023 train loss:3.715048 +step:4024 train loss:3.623164 +step:4025 train loss:3.749386 +step:4026 train loss:3.738633 +step:4027 train loss:3.743644 +step:4028 train loss:3.763513 +step:4029 train loss:3.793718 +step:4030 train loss:3.708488 +step:4031 train loss:3.751488 +step:4032 train loss:3.711170 +step:4033 train loss:3.745969 +step:4034 train loss:3.757236 +step:4035 train loss:3.735023 +step:4036 train loss:3.732177 +step:4037 train loss:3.748221 +step:4038 train loss:3.667844 +step:4039 train loss:3.722452 +step:4040 train loss:3.701203 +step:4041 train loss:3.697132 +step:4042 train loss:3.714303 +step:4043 train loss:3.697340 +step:4044 train loss:3.734032 +step:4045 train loss:3.735935 +step:4046 train loss:3.693912 +step:4047 train loss:3.720071 +step:4048 train loss:3.733313 +step:4049 train loss:3.695474 +step:4050 train loss:3.799643 +step:4051 train loss:3.716341 +step:4052 train loss:3.733598 +step:4053 train loss:3.785590 +step:4054 train loss:3.753368 +step:4055 train loss:3.767282 +step:4056 train loss:3.765426 +step:4057 train loss:3.704543 +step:4058 train loss:3.688799 +step:4059 train loss:3.768065 +step:4060 train loss:3.710234 +step:4061 train loss:3.680818 +step:4062 train loss:3.793829 +step:4063 train loss:3.745293 +step:4064 train loss:3.711119 +step:4065 train loss:3.696511 +step:4066 train loss:3.725396 +step:4067 train loss:3.748568 +step:4068 train loss:3.715393 +step:4069 train loss:3.776811 +step:4070 train loss:3.690616 +step:4071 train loss:3.663699 +step:4072 train loss:3.735750 +step:4073 train loss:3.672143 +step:4074 train loss:3.727455 +step:4075 train loss:3.796758 +step:4076 train loss:3.652079 +step:4077 train loss:3.728070 +step:4078 train loss:3.832265 +step:4079 train loss:3.772348 +step:4080 train loss:3.720726 +step:4081 train loss:3.689855 +step:4082 train loss:3.740656 +step:4083 train loss:3.680279 +step:4084 train loss:3.699891 +step:4085 train loss:3.930264 +step:4086 train loss:3.699859 +step:4087 train loss:3.742693 +step:4088 train loss:3.727412 +step:4089 train loss:3.719258 +step:4090 train loss:3.735890 +step:4091 train loss:3.758628 +step:4092 train loss:3.684108 +step:4093 train loss:3.711883 +step:4094 train loss:3.730634 +step:4095 train loss:3.684587 +step:4096 train loss:3.718009 +step:4097 train loss:3.719517 +step:4098 train loss:3.695592 +step:4099 train loss:3.700838 +step:4100 train loss:3.753666 +step:4101 train loss:3.675212 +step:4102 train loss:3.707826 +step:4103 train loss:3.915186 +step:4104 train loss:3.728445 +step:4105 train loss:3.695192 +step:4106 train loss:3.768423 +step:4107 train loss:3.687817 +step:4108 train loss:3.696683 +step:4109 train loss:3.747097 +step:4110 train loss:3.754553 +step:4111 train loss:3.729476 +step:4112 train loss:3.749429 +step:4113 train loss:3.706564 +step:4114 train loss:3.653543 +step:4115 train loss:3.690438 +step:4116 train loss:3.677739 +step:4117 train loss:3.695895 +step:4118 train loss:3.749443 +step:4119 train loss:3.774988 +step:4120 train loss:3.698009 +step:4121 train loss:3.688090 +step:4122 train loss:3.753550 +step:4123 train loss:3.764627 +step:4124 train loss:3.743340 +step:4125 train loss:3.781631 +step:4126 train loss:3.717377 +step:4127 train loss:3.736231 +step:4128 train loss:3.727065 +step:4129 train loss:3.770777 +step:4130 train loss:3.702920 +step:4131 train loss:3.738743 +step:4132 train loss:3.752939 +step:4133 train loss:3.703633 +step:4134 train loss:3.761838 +step:4135 train loss:3.690785 +step:4136 train loss:3.714845 +step:4137 train loss:3.686303 +step:4138 train loss:3.692329 +step:4139 train loss:3.739529 +step:4140 train loss:3.701336 +step:4141 train loss:3.663501 +step:4142 train loss:3.706617 +step:4143 train loss:3.745298 +step:4144 train loss:3.696759 +step:4145 train loss:3.660756 +step:4146 train loss:3.729542 +step:4147 train loss:3.703552 +step:4148 train loss:3.698690 +step:4149 train loss:3.778260 +step:4150 train loss:3.742133 +step:4151 train loss:3.723328 +step:4152 train loss:3.748093 +step:4153 train loss:3.755620 +step:4154 train loss:3.758204 +step:4155 train loss:3.782672 +step:4156 train loss:3.656930 +step:4157 train loss:3.681232 +step:4158 train loss:3.737239 +step:4159 train loss:3.640109 +step:4160 train loss:3.730951 +step:4161 train loss:3.732995 +step:4162 train loss:3.639161 +step:4163 train loss:3.719328 +step:4164 train loss:3.669356 +step:4165 train loss:3.669917 +step:4166 train loss:3.738772 +step:4167 train loss:3.733322 +step:4168 train loss:3.723889 +step:4169 train loss:3.755736 +step:4170 train loss:3.874613 +step:4171 train loss:3.730833 +step:4172 train loss:3.749085 +step:4173 train loss:3.739590 +step:4174 train loss:3.704413 +step:4175 train loss:3.793514 +step:4176 train loss:3.720583 +step:4177 train loss:3.744156 +step:4178 train loss:3.729013 +step:4179 train loss:3.674978 +step:4180 train loss:3.671920 +step:4181 train loss:3.722521 +step:4182 train loss:3.703182 +step:4183 train loss:3.640449 +step:4184 train loss:3.712035 +step:4185 train loss:3.775278 +step:4186 train loss:3.749038 +step:4187 train loss:3.761374 +step:4188 train loss:3.731833 +step:4189 train loss:3.695181 +step:4190 train loss:3.736852 +step:4191 train loss:3.684098 +step:4192 train loss:3.775228 +step:4193 train loss:3.681517 +step:4194 train loss:3.661813 +step:4195 train loss:3.661080 +step:4196 train loss:3.729466 +step:4197 train loss:3.746305 +step:4198 train loss:3.666626 +step:4199 train loss:3.748402 +step:4200 train loss:3.711178 +step:4201 train loss:3.691861 +step:4202 train loss:3.707215 +step:4203 train loss:3.716100 +step:4204 train loss:3.709739 +step:4205 train loss:3.727584 +step:4206 train loss:3.745351 +step:4207 train loss:3.745106 +step:4208 train loss:3.706777 +step:4209 train loss:3.770417 +step:4210 train loss:3.798912 +step:4211 train loss:3.681673 +step:4212 train loss:3.721145 +step:4213 train loss:3.675202 +step:4214 train loss:3.686127 +step:4215 train loss:3.701431 +step:4216 train loss:3.672548 +step:4217 train loss:3.691325 +step:4218 train loss:3.738041 +step:4219 train loss:3.742053 +step:4220 train loss:3.826516 +step:4221 train loss:3.716191 +step:4222 train loss:3.771662 +step:4223 train loss:3.691545 +step:4224 train loss:3.761815 +step:4225 train loss:3.687731 +step:4226 train loss:3.745039 +step:4227 train loss:3.720492 +step:4228 train loss:3.695157 +step:4229 train loss:3.703945 +step:4230 train loss:3.682574 +step:4231 train loss:3.673516 +step:4232 train loss:3.723769 +step:4233 train loss:3.632493 +step:4234 train loss:3.712395 +step:4235 train loss:3.790587 +step:4236 train loss:3.757358 +step:4237 train loss:3.743142 +step:4238 train loss:3.749199 +step:4239 train loss:3.800093 +step:4240 train loss:3.709456 +step:4241 train loss:3.635307 +step:4242 train loss:3.754232 +step:4243 train loss:3.755245 +step:4244 train loss:3.767715 +step:4245 train loss:3.822351 +step:4246 train loss:3.692915 +step:4247 train loss:3.756978 +step:4248 train loss:3.705881 +step:4249 train loss:3.710277 +step:4250 validation loss:3.638832 +step:4250 train loss:3.693165 +step:4251 train loss:3.787305 +step:4252 train loss:3.694587 +step:4253 train loss:3.691947 +step:4254 train loss:3.702866 +step:4255 train loss:3.683946 +step:4256 train loss:3.698467 +step:4257 train loss:3.758154 +step:4258 train loss:3.616776 +step:4259 train loss:3.680271 +step:4260 train loss:3.745777 +step:4261 train loss:3.731664 +step:4262 train loss:3.866987 +step:4263 train loss:3.814208 +step:4264 train loss:3.762181 +step:4265 train loss:3.750358 +step:4266 train loss:3.742429 +step:4267 train loss:3.741330 +step:4268 train loss:3.685169 +step:4269 train loss:3.779955 +step:4270 train loss:3.756989 +step:4271 train loss:3.671470 +step:4272 train loss:3.725362 +step:4273 train loss:3.704146 +step:4274 train loss:3.687328 +step:4275 train loss:3.706084 +step:4276 train loss:3.673733 +step:4277 train loss:3.806224 +step:4278 train loss:3.655476 +step:4279 train loss:3.684384 +step:4280 train loss:3.769790 +step:4281 train loss:3.750118 +step:4282 train loss:3.818493 +step:4283 train loss:3.671931 +step:4284 train loss:3.701310 +step:4285 train loss:3.703073 +step:4286 train loss:3.764182 +step:4287 train loss:3.768462 +step:4288 train loss:3.749983 +step:4289 train loss:3.701209 +step:4290 train loss:3.710249 +step:4291 train loss:3.666827 +step:4292 train loss:3.709885 +step:4293 train loss:3.724792 +step:4294 train loss:3.709881 +step:4295 train loss:3.642878 +step:4296 train loss:3.715635 +step:4297 train loss:3.697370 +step:4298 train loss:3.708814 +step:4299 train loss:3.703355 +step:4300 train loss:3.826519 +step:4301 train loss:3.636693 +step:4302 train loss:3.778424 +step:4303 train loss:3.658588 +step:4304 train loss:3.666644 +step:4305 train loss:3.685593 +step:4306 train loss:3.759771 +step:4307 train loss:3.674469 +step:4308 train loss:3.675068 +step:4309 train loss:3.742129 +step:4310 train loss:3.679358 +step:4311 train loss:3.736005 +step:4312 train loss:3.730572 +step:4313 train loss:3.719969 +step:4314 train loss:3.667804 +step:4315 train loss:3.700151 +step:4316 train loss:3.643146 +step:4317 train loss:3.702865 +step:4318 train loss:3.741729 +step:4319 train loss:3.691954 +step:4320 train loss:3.754195 +step:4321 train loss:3.735342 +step:4322 train loss:3.693459 +step:4323 train loss:3.633606 +step:4324 train loss:3.722949 +step:4325 train loss:3.705325 +step:4326 train loss:3.694154 +step:4327 train loss:3.798844 +step:4328 train loss:3.709190 +step:4329 train loss:3.665038 +step:4330 train loss:3.714561 +step:4331 train loss:3.722040 +step:4332 train loss:3.749299 +step:4333 train loss:3.709169 +step:4334 train loss:3.731942 +step:4335 train loss:3.732702 +step:4336 train loss:3.736518 +step:4337 train loss:3.705702 +step:4338 train loss:3.827143 +step:4339 train loss:3.735711 +step:4340 train loss:3.737563 +step:4341 train loss:3.707582 +step:4342 train loss:3.722296 +step:4343 train loss:3.838159 +step:4344 train loss:3.726099 +step:4345 train loss:3.745341 +step:4346 train loss:3.758194 +step:4347 train loss:3.765264 +step:4348 train loss:3.678089 +step:4349 train loss:3.761659 +step:4350 train loss:3.700597 +step:4351 train loss:3.651956 +step:4352 train loss:3.732574 +step:4353 train loss:3.679556 +step:4354 train loss:3.742287 +step:4355 train loss:3.691613 +step:4356 train loss:3.718584 +step:4357 train loss:3.699720 +step:4358 train loss:3.792036 +step:4359 train loss:3.742217 +step:4360 train loss:3.659135 +step:4361 train loss:3.702880 +step:4362 train loss:3.727022 +step:4363 train loss:3.744042 +step:4364 train loss:3.709974 +step:4365 train loss:3.690689 +step:4366 train loss:3.739341 +step:4367 train loss:3.751050 +step:4368 train loss:3.730866 +step:4369 train loss:3.597347 +step:4370 train loss:3.732244 +step:4371 train loss:3.638309 +step:4372 train loss:3.790749 +step:4373 train loss:3.724630 +step:4374 train loss:3.695117 +step:4375 train loss:3.739846 +step:4376 train loss:3.749009 +step:4377 train loss:3.684968 +step:4378 train loss:3.695994 +step:4379 train loss:3.775280 +step:4380 train loss:3.760767 +step:4381 train loss:3.658178 +step:4382 train loss:3.706120 +step:4383 train loss:3.733844 +step:4384 train loss:3.726002 +step:4385 train loss:3.655402 +step:4386 train loss:3.710335 +step:4387 train loss:3.681840 +step:4388 train loss:3.699746 +step:4389 train loss:3.729826 +step:4390 train loss:3.770937 +step:4391 train loss:3.697811 +step:4392 train loss:3.767577 +step:4393 train loss:3.733888 +step:4394 train loss:3.668729 +step:4395 train loss:3.724818 +step:4396 train loss:3.698195 +step:4397 train loss:3.742205 +step:4398 train loss:3.688381 +step:4399 train loss:3.677889 +step:4400 train loss:3.684392 +step:4401 train loss:3.746223 +step:4402 train loss:3.742685 +step:4403 train loss:3.696823 +step:4404 train loss:3.725939 +step:4405 train loss:3.645785 +step:4406 train loss:3.727172 +step:4407 train loss:3.660802 +step:4408 train loss:3.756592 +step:4409 train loss:3.714880 +step:4410 train loss:3.720603 +step:4411 train loss:3.682051 +step:4412 train loss:3.793643 +step:4413 train loss:3.690670 +step:4414 train loss:3.699670 +step:4415 train loss:3.684143 +step:4416 train loss:3.678205 +step:4417 train loss:3.671523 +step:4418 train loss:3.745824 +step:4419 train loss:3.714209 +step:4420 train loss:3.721613 +step:4421 train loss:3.745935 +step:4422 train loss:3.769877 +step:4423 train loss:3.722255 +step:4424 train loss:3.710364 +step:4425 train loss:3.671507 +step:4426 train loss:3.748063 +step:4427 train loss:3.704881 +step:4428 train loss:3.646646 +step:4429 train loss:3.705858 +step:4430 train loss:3.745460 +step:4431 train loss:3.739236 +step:4432 train loss:3.644361 +step:4433 train loss:3.697257 +step:4434 train loss:3.695834 +step:4435 train loss:3.727108 +step:4436 train loss:3.663939 +step:4437 train loss:3.740125 +step:4438 train loss:3.707549 +step:4439 train loss:3.709646 +step:4440 train loss:3.710406 +step:4441 train loss:3.711711 +step:4442 train loss:3.761411 +step:4443 train loss:3.697401 +step:4444 train loss:3.780542 +step:4445 train loss:3.746645 +step:4446 train loss:3.676289 +step:4447 train loss:3.727288 +step:4448 train loss:3.743818 +step:4449 train loss:3.683538 +step:4450 train loss:3.700374 +step:4451 train loss:3.755021 +step:4452 train loss:3.813541 +step:4453 train loss:3.746089 +step:4454 train loss:3.717475 +step:4455 train loss:3.766069 +step:4456 train loss:3.710781 +step:4457 train loss:3.710822 +step:4458 train loss:3.718723 +step:4459 train loss:3.755031 +step:4460 train loss:3.662850 +step:4461 train loss:3.634175 +step:4462 train loss:3.688960 +step:4463 train loss:3.713103 +step:4464 train loss:3.677393 +step:4465 train loss:3.718204 +step:4466 train loss:3.809185 +step:4467 train loss:3.689585 +step:4468 train loss:3.681414 +step:4469 train loss:3.674417 +step:4470 train loss:3.649472 +step:4471 train loss:3.715157 +step:4472 train loss:3.638733 +step:4473 train loss:3.725560 +step:4474 train loss:3.749626 +step:4475 train loss:3.710725 +step:4476 train loss:3.678603 +step:4477 train loss:3.661649 +step:4478 train loss:3.719631 +step:4479 train loss:3.821373 +step:4480 train loss:3.656603 +step:4481 train loss:3.728251 +step:4482 train loss:3.688514 +step:4483 train loss:3.682673 +step:4484 train loss:3.729486 +step:4485 train loss:3.690851 +step:4486 train loss:3.791513 +step:4487 train loss:3.687982 +step:4488 train loss:3.689182 +step:4489 train loss:3.641244 +step:4490 train loss:3.724520 +step:4491 train loss:3.678910 +step:4492 train loss:3.706712 +step:4493 train loss:3.691371 +step:4494 train loss:3.684239 +step:4495 train loss:3.753983 +step:4496 train loss:3.693414 +step:4497 train loss:3.776505 +step:4498 train loss:3.666538 +step:4499 train loss:3.723031 +step:4500 validation loss:3.626244 total_sharp:1.0224e-04 L1_sharp:3.1452e-05 L2_sharp:2.7211e-06 L3_sharp:1.3271e-05 L4_sharp:1.5425e-05 L5_sharp:1.0278e-05 L6_sharp:7.2878e-06 L7_sharp:9.9176e-06 L8_sharp:2.1803e-05 L9_sharp:2.7058e-05 L10_sharp:3.0507e-05 L11_sharp:2.3856e-05 L12_sharp:2.1447e-04 total_fnorm:2.1224e+01 total_l1_linf:1.8601e+05 total_spectral:2.1224e+01 L1_fnorm:4.8879e+00 L2_fnorm:4.7533e+00 L3_fnorm:4.7646e+00 L4_fnorm:4.7377e+00 L5_fnorm:4.8946e+00 L6_fnorm:4.9690e+00 L7_fnorm:5.0492e+00 L8_fnorm:4.9381e+00 L9_fnorm:4.9507e+00 L10_fnorm:4.9507e+00 L11_fnorm:5.0059e+00 L12_fnorm:4.7374e+00 L1_l1linf:4.6313e+00 L2_l1linf:4.8048e+00 L3_l1linf:4.9785e+00 L4_l1linf:5.1862e+00 L5_l1linf:5.1038e+00 L6_l1linf:4.9564e+00 L7_l1linf:5.2518e+00 L8_l1linf:5.9028e+00 L9_l1linf:5.7384e+00 L10_l1linf:5.6626e+00 L11_l1linf:6.2462e+00 L12_l1linf:6.2639e+00 L1_spectral:8.3226e-01 L2_spectral:5.7284e-01 L3_spectral:6.0460e-01 L4_spectral:6.9000e-01 L5_spectral:5.6871e-01 L6_spectral:5.5468e-01 L7_spectral:5.7010e-01 L8_spectral:6.7450e-01 L9_spectral:7.1202e-01 L10_spectral:7.9623e-01 L11_spectral:8.0244e-01 L12_spectral:9.5284e-01 ip_v_neg_g:2.7112e-02 cos_v_neg_g:3.3406e-03 v_norm:2.1224e+01 g_norm:3.8239e-01 hv_norm:3.7898e-01 cos_v_hv:5.7258e-03 hg_norm:3.8114e+00 cos_g_hg:5.8704e-01 v_par:7.3525e-03 v_perp:2.1224e+01 L1_cos_v_neg_g:2.1212e-02 L1_v_norm:4.8879e+00 L2_cos_v_neg_g:8.8231e-03 L2_v_norm:4.7533e+00 L3_cos_v_neg_g:1.2165e-02 L3_v_norm:4.7646e+00 L4_cos_v_neg_g:9.2353e-03 L4_v_norm:4.7377e+00 L5_cos_v_neg_g:8.8341e-03 L5_v_norm:4.8946e+00 L6_cos_v_neg_g:5.6648e-03 L6_v_norm:4.9690e+00 L7_cos_v_neg_g:7.4729e-03 L7_v_norm:5.0492e+00 L8_cos_v_neg_g:9.8649e-03 L8_v_norm:4.9381e+00 L9_cos_v_neg_g:1.1894e-02 L9_v_norm:4.9507e+00 L10_cos_v_neg_g:1.5312e-02 L10_v_norm:4.9507e+00 L11_cos_v_neg_g:1.9241e-02 L11_v_norm:5.0059e+00 L12_cos_v_neg_g:2.7846e-02 L12_v_norm:4.7374e+00 +step:4500 train loss:3.631213 +step:4501 train loss:3.692498 +step:4502 train loss:3.820884 +step:4503 train loss:3.718355 +step:4504 train loss:3.725842 +step:4505 train loss:3.712656 +step:4506 train loss:3.682744 +step:4507 train loss:3.752215 +step:4508 train loss:3.693565 +step:4509 train loss:3.690122 +step:4510 train loss:3.724620 +step:4511 train loss:3.676825 +step:4512 train loss:3.697226 +step:4513 train loss:3.755607 +step:4514 train loss:3.660345 +step:4515 train loss:3.773407 +step:4516 train loss:3.751952 +step:4517 train loss:3.707045 +step:4518 train loss:3.643149 +step:4519 train loss:3.678872 +step:4520 train loss:3.692138 +step:4521 train loss:3.631173 +step:4522 train loss:3.687222 +step:4523 train loss:3.734146 +step:4524 train loss:3.717069 +step:4525 train loss:3.639781 +step:4526 train loss:3.680780 +step:4527 train loss:3.669456 +step:4528 train loss:3.697173 +step:4529 train loss:3.694868 +step:4530 train loss:3.788440 +step:4531 train loss:3.680844 +step:4532 train loss:3.702570 +step:4533 train loss:3.678010 +step:4534 train loss:3.768002 +step:4535 train loss:3.668890 +step:4536 train loss:3.737931 +step:4537 train loss:3.724337 +step:4538 train loss:3.704975 +step:4539 train loss:3.726337 +step:4540 train loss:3.702430 +step:4541 train loss:3.668501 +step:4542 train loss:3.719616 +step:4543 train loss:3.803712 +step:4544 train loss:3.746682 +step:4545 train loss:3.688640 +step:4546 train loss:3.782884 +step:4547 train loss:3.742212 +step:4548 train loss:3.743684 +step:4549 train loss:3.699588 +step:4550 train loss:3.664624 +step:4551 train loss:3.682245 +step:4552 train loss:3.684480 +step:4553 train loss:3.766101 +step:4554 train loss:3.662274 +step:4555 train loss:3.775709 +step:4556 train loss:3.713370 +step:4557 train loss:3.642330 +step:4558 train loss:3.726885 +step:4559 train loss:3.734054 +step:4560 train loss:3.674295 +step:4561 train loss:3.660993 +step:4562 train loss:3.700280 +step:4563 train loss:3.652398 +step:4564 train loss:3.676304 +step:4565 train loss:3.678594 +step:4566 train loss:3.654515 +step:4567 train loss:3.680580 +step:4568 train loss:3.678641 +step:4569 train loss:3.663752 +step:4570 train loss:3.713839 +step:4571 train loss:3.691337 +step:4572 train loss:3.684834 +step:4573 train loss:3.695612 +step:4574 train loss:3.840153 +step:4575 train loss:3.676136 +step:4576 train loss:3.663615 +step:4577 train loss:3.703608 +step:4578 train loss:3.747887 +step:4579 train loss:3.692309 +step:4580 train loss:3.751382 +step:4581 train loss:3.692341 +step:4582 train loss:3.685189 +step:4583 train loss:3.690217 +step:4584 train loss:3.664420 +step:4585 train loss:3.741323 +step:4586 train loss:3.732327 +step:4587 train loss:3.633485 +step:4588 train loss:3.673787 +step:4589 train loss:3.753640 +step:4590 train loss:3.722124 +step:4591 train loss:3.662672 +step:4592 train loss:3.745768 +step:4593 train loss:3.667578 +step:4594 train loss:3.694498 +step:4595 train loss:3.716689 +step:4596 train loss:3.654278 +step:4597 train loss:3.790139 +step:4598 train loss:3.711417 +step:4599 train loss:3.663908 +step:4600 train loss:3.671318 +step:4601 train loss:3.693927 +step:4602 train loss:3.644697 +step:4603 train loss:3.660657 +step:4604 train loss:3.763411 +step:4605 train loss:3.683573 +step:4606 train loss:3.710191 +step:4607 train loss:3.692867 +step:4608 train loss:3.727815 +step:4609 train loss:3.686091 +step:4610 train loss:3.733383 +step:4611 train loss:3.754126 +step:4612 train loss:3.755670 +step:4613 train loss:3.733598 +step:4614 train loss:3.728571 +step:4615 train loss:3.671323 +step:4616 train loss:3.654507 +step:4617 train loss:3.696986 +step:4618 train loss:3.713263 +step:4619 train loss:3.671234 +step:4620 train loss:3.683869 +step:4621 train loss:3.690112 +step:4622 train loss:3.622217 +step:4623 train loss:3.732457 +step:4624 train loss:3.712568 +step:4625 train loss:3.673627 +step:4626 train loss:3.718655 +step:4627 train loss:3.688152 +step:4628 train loss:3.675506 +step:4629 train loss:3.713761 +step:4630 train loss:3.769850 +step:4631 train loss:3.772737 +step:4632 train loss:3.666668 +step:4633 train loss:3.677973 +step:4634 train loss:3.759025 +step:4635 train loss:3.718083 +step:4636 train loss:3.733219 +step:4637 train loss:3.674066 +step:4638 train loss:3.677817 +step:4639 train loss:3.671921 +step:4640 train loss:3.681532 +step:4641 train loss:3.691971 +step:4642 train loss:3.722189 +step:4643 train loss:3.683872 +step:4644 train loss:3.708369 +step:4645 train loss:3.724682 +step:4646 train loss:3.678693 +step:4647 train loss:3.634581 +step:4648 train loss:3.741901 +step:4649 train loss:3.756806 +step:4650 train loss:3.699311 +step:4651 train loss:3.702174 +step:4652 train loss:3.691293 +step:4653 train loss:3.750545 +step:4654 train loss:3.745593 +step:4655 train loss:3.646490 +step:4656 train loss:3.681400 +step:4657 train loss:3.736372 +step:4658 train loss:3.688692 +step:4659 train loss:3.704871 +step:4660 train loss:3.746445 +step:4661 train loss:3.664420 +step:4662 train loss:3.678319 +step:4663 train loss:3.684965 +step:4664 train loss:3.742754 +step:4665 train loss:3.737143 +step:4666 train loss:3.735029 +step:4667 train loss:3.726642 +step:4668 train loss:3.691593 +step:4669 train loss:3.700099 +step:4670 train loss:3.732345 +step:4671 train loss:3.713854 +step:4672 train loss:3.608226 +step:4673 train loss:3.642155 +step:4674 train loss:3.773831 +step:4675 train loss:3.674709 +step:4676 train loss:3.639238 +step:4677 train loss:3.647506 +step:4678 train loss:3.614839 +step:4679 train loss:3.716587 +step:4680 train loss:3.652803 +step:4681 train loss:3.707242 +step:4682 train loss:3.654655 +step:4683 train loss:3.626919 +step:4684 train loss:3.739719 +step:4685 train loss:3.676177 +step:4686 train loss:3.690142 +step:4687 train loss:3.722376 +step:4688 train loss:3.656220 +step:4689 train loss:3.732387 +step:4690 train loss:3.672065 +step:4691 train loss:3.708009 +step:4692 train loss:3.635918 +step:4693 train loss:3.675116 +step:4694 train loss:3.719724 +step:4695 train loss:3.737443 +step:4696 train loss:3.723292 +step:4697 train loss:3.634143 +step:4698 train loss:3.656123 +step:4699 train loss:3.702531 +step:4700 train loss:3.673236 +step:4701 train loss:3.681277 +step:4702 train loss:3.636197 +step:4703 train loss:3.716193 +step:4704 train loss:3.702972 +step:4705 train loss:3.647352 +step:4706 train loss:3.652984 +step:4707 train loss:3.641128 +step:4708 train loss:3.707027 +step:4709 train loss:3.656283 +step:4710 train loss:3.669994 +step:4711 train loss:3.729272 +step:4712 train loss:3.629924 +step:4713 train loss:3.732312 +step:4714 train loss:3.632659 +step:4715 train loss:3.722756 +step:4716 train loss:3.687031 +step:4717 train loss:3.623022 +step:4718 train loss:3.707894 +step:4719 train loss:3.636214 +step:4720 train loss:3.735650 +step:4721 train loss:3.688217 +step:4722 train loss:3.744648 +step:4723 train loss:3.646080 +step:4724 train loss:3.694355 +step:4725 train loss:3.625824 +step:4726 train loss:3.677887 +step:4727 train loss:3.679512 +step:4728 train loss:3.683965 +step:4729 train loss:3.716796 +step:4730 train loss:3.618130 +step:4731 train loss:3.677772 +step:4732 train loss:3.629905 +step:4733 train loss:3.570930 +step:4734 train loss:3.705846 +step:4735 train loss:3.657507 +step:4736 train loss:3.697745 +step:4737 train loss:3.582527 +step:4738 train loss:3.727651 +step:4739 train loss:3.600546 +step:4740 train loss:3.716911 +step:4741 train loss:3.682393 +step:4742 train loss:3.642148 +step:4743 train loss:3.642567 +step:4744 train loss:3.685609 +step:4745 train loss:3.704874 +step:4746 train loss:3.742849 +step:4747 train loss:3.704077 +step:4748 train loss:3.602897 +step:4749 train loss:3.668199 +step:4750 validation loss:3.608416 +step:4750 train loss:3.618742 +step:4751 train loss:3.715862 +step:4752 train loss:3.642804 +step:4753 train loss:3.754246 +step:4754 train loss:3.621030 +step:4755 train loss:3.662143 +step:4756 train loss:3.734115 +step:4757 train loss:3.662442 +step:4758 train loss:3.676879 +step:4759 train loss:3.679755 +step:4760 train loss:3.707948 +step:4761 train loss:3.628723 +step:4762 train loss:3.661748 +step:4763 train loss:3.681788 +step:4764 train loss:3.738991 +step:4765 train loss:3.631890 +step:4766 train loss:3.655569 +step:4767 train loss:3.606898 +step:4768 train loss:3.664886 +step:4769 train loss:3.689981 +step:4770 train loss:3.648373 +step:4771 train loss:3.659528 +step:4772 train loss:3.634914 +step:4773 train loss:3.671312 +step:4774 train loss:3.610906 +step:4775 train loss:3.744613 +step:4776 train loss:3.613487 +step:4777 train loss:3.689072 +step:4778 train loss:3.624146 +step:4779 train loss:3.673845 +step:4780 train loss:3.609896 +step:4781 train loss:3.617230 +step:4782 train loss:3.722974 +step:4783 train loss:3.712407 +step:4784 train loss:3.673828 +step:4785 train loss:3.670415 +step:4786 train loss:3.781823 +step:4787 train loss:3.618227 +step:4788 train loss:3.637561 +step:4789 train loss:3.661490 +step:4790 train loss:3.714570 +step:4791 train loss:3.676166 +step:4792 train loss:3.720247 +step:4793 train loss:3.639149 +step:4794 train loss:3.712485 +step:4795 train loss:3.662961 +step:4796 train loss:3.655417 +step:4797 train loss:3.658622 +step:4798 train loss:3.669193 +step:4799 train loss:3.664503 +step:4800 train loss:3.697666 +step:4801 train loss:3.689792 +step:4802 train loss:3.725433 +step:4803 train loss:3.707882 +step:4804 train loss:3.666370 +step:4805 train loss:3.663522 +step:4806 train loss:3.644044 +step:4807 train loss:3.750590 +step:4808 train loss:3.620841 +step:4809 train loss:3.725727 +step:4810 train loss:3.663688 +step:4811 train loss:3.684437 +step:4812 train loss:3.656904 +step:4813 train loss:3.612875 +step:4814 train loss:3.608455 +step:4815 train loss:3.603024 +step:4816 train loss:3.666253 +step:4817 train loss:3.607170 +step:4818 train loss:3.670260 +step:4819 train loss:3.665748 +step:4820 train loss:3.916741 +step:4821 train loss:3.695852 +step:4822 train loss:3.705117 +step:4823 train loss:3.634453 +step:4824 train loss:3.642033 +step:4825 train loss:3.622576 +step:4826 train loss:3.707637 +step:4827 train loss:3.656960 +step:4828 train loss:3.599357 +step:4829 train loss:3.700919 +step:4830 train loss:3.642466 +step:4831 train loss:3.789341 +step:4832 train loss:3.661828 +step:4833 train loss:3.700850 +step:4834 train loss:3.600927 +step:4835 train loss:3.692018 +step:4836 train loss:3.669891 +step:4837 train loss:3.699679 +step:4838 train loss:3.639735 +step:4839 train loss:3.702970 +step:4840 train loss:3.609703 +step:4841 train loss:3.708876 +step:4842 train loss:3.620486 +step:4843 train loss:3.696806 +step:4844 train loss:3.699319 +step:4845 train loss:3.638772 +step:4846 train loss:3.652330 +step:4847 train loss:3.640771 +step:4848 train loss:3.660219 +step:4849 train loss:3.615080 +step:4850 train loss:3.624379 +step:4851 train loss:3.622134 +step:4852 train loss:3.698110 +step:4853 train loss:3.675275 +step:4854 train loss:3.655318 +step:4855 train loss:3.716759 +step:4856 train loss:3.682264 +step:4857 train loss:3.695524 +step:4858 train loss:3.775266 +step:4859 train loss:3.619288 +step:4860 train loss:3.696773 +step:4861 train loss:3.667614 +step:4862 train loss:3.701307 +step:4863 train loss:3.637265 +step:4864 train loss:3.646607 +step:4865 train loss:3.644057 +step:4866 train loss:3.687026 +step:4867 train loss:3.652423 +step:4868 train loss:3.673088 +step:4869 train loss:3.621831 +step:4870 train loss:3.655451 +step:4871 train loss:3.737117 +step:4872 train loss:3.680553 +step:4873 train loss:3.680077 +step:4874 train loss:3.652865 +step:4875 train loss:3.620139 +step:4876 train loss:3.629366 +step:4877 train loss:3.637425 +step:4878 train loss:3.673324 +step:4879 train loss:3.633788 +step:4880 train loss:3.661707 +step:4881 train loss:3.603862 +step:4882 train loss:3.801520 +step:4883 train loss:3.617901 +step:4884 train loss:3.644156 +step:4885 train loss:3.624149 +step:4886 train loss:3.696654 +step:4887 train loss:3.647303 +step:4888 train loss:3.660533 +step:4889 train loss:3.645101 +step:4890 train loss:3.692819 +step:4891 train loss:3.629890 +step:4892 train loss:3.632102 +step:4893 train loss:3.680433 +step:4894 train loss:3.618984 +step:4895 train loss:3.647208 +step:4896 train loss:3.631871 +step:4897 train loss:3.703736 +step:4898 train loss:3.658517 +step:4899 train loss:3.636564 +step:4900 train loss:3.684401 +step:4901 train loss:3.633985 +step:4902 train loss:3.626741 +step:4903 train loss:3.647471 +step:4904 train loss:3.664178 +step:4905 train loss:3.660058 +step:4906 train loss:3.661070 +step:4907 train loss:3.736341 +step:4908 train loss:3.639065 +step:4909 train loss:3.644128 +step:4910 train loss:3.666257 +step:4911 train loss:3.719037 +step:4912 train loss:3.696604 +step:4913 train loss:3.670679 +step:4914 train loss:3.661047 +step:4915 train loss:3.641020 +step:4916 train loss:3.584496 +step:4917 train loss:3.611182 +step:4918 train loss:3.639092 +step:4919 train loss:3.632310 +step:4920 train loss:3.629628 +step:4921 train loss:3.794952 +step:4922 train loss:3.689918 +step:4923 train loss:3.706001 +step:4924 train loss:3.704265 +step:4925 train loss:3.637830 +step:4926 train loss:3.634931 +step:4927 train loss:3.660359 +step:4928 train loss:3.703875 +step:4929 train loss:3.655709 +step:4930 train loss:3.636209 +step:4931 train loss:3.633661 +step:4932 train loss:3.639857 +step:4933 train loss:3.634860 +step:4934 train loss:3.697274 +step:4935 train loss:3.685680 +step:4936 train loss:3.647230 +step:4937 train loss:3.758163 +step:4938 train loss:3.744841 +step:4939 train loss:3.612220 +step:4940 train loss:3.687874 +step:4941 train loss:3.588712 +step:4942 train loss:3.632438 +step:4943 train loss:3.634168 +step:4944 train loss:3.634491 +step:4945 train loss:3.684766 +step:4946 train loss:3.660477 +step:4947 train loss:3.642194 +step:4948 train loss:3.678938 +step:4949 train loss:3.584642 +step:4950 train loss:3.666450 +step:4951 train loss:3.718345 +step:4952 train loss:3.658701 +step:4953 train loss:3.693928 +step:4954 train loss:3.591402 +step:4955 train loss:3.670398 +step:4956 train loss:3.697644 +step:4957 train loss:3.691525 +step:4958 train loss:3.606516 +step:4959 train loss:3.723823 +step:4960 train loss:3.651731 +step:4961 train loss:3.670471 +step:4962 train loss:3.629150 +step:4963 train loss:3.677877 +step:4964 train loss:3.626134 +step:4965 train loss:3.783269 +step:4966 train loss:3.630548 +step:4967 train loss:3.739125 +step:4968 train loss:3.628499 +step:4969 train loss:3.671463 +step:4970 train loss:3.660795 +step:4971 train loss:3.614304 +step:4972 train loss:3.659306 +step:4973 train loss:3.664857 +step:4974 train loss:3.654603 +step:4975 train loss:3.738867 +step:4976 train loss:3.719070 +step:4977 train loss:3.662540 +step:4978 train loss:3.648349 +step:4979 train loss:3.649528 +step:4980 train loss:3.755380 +step:4981 train loss:3.596597 +step:4982 train loss:3.677077 +step:4983 train loss:3.600302 +step:4984 train loss:3.785840 +step:4985 train loss:3.687479 +step:4986 train loss:3.627500 +step:4987 train loss:3.646831 +step:4988 train loss:3.842871 +step:4989 train loss:3.652217 +step:4990 train loss:3.645566 +step:4991 train loss:3.658979 +step:4992 train loss:3.645105 +step:4993 train loss:3.624285 +step:4994 train loss:3.731168 +step:4995 train loss:3.659496 +step:4996 train loss:3.744967 +step:4997 train loss:3.643821 +step:4998 train loss:3.648433 +step:4999 train loss:3.626797 +step:5000 validation loss:3.600739 total_sharp:6.3660e-05 L1_sharp:2.0951e-05 L2_sharp:2.7002e-06 L3_sharp:6.6597e-06 L4_sharp:1.2349e-05 L5_sharp:1.0025e-05 L6_sharp:6.2322e-06 L7_sharp:9.0747e-06 L8_sharp:1.6032e-05 L9_sharp:2.1962e-05 L10_sharp:2.2449e-05 L11_sharp:1.7417e-05 L12_sharp:1.1495e-04 total_fnorm:2.1046e+01 total_l1_linf:1.8438e+05 total_spectral:2.1046e+01 L1_fnorm:4.8058e+00 L2_fnorm:4.7103e+00 L3_fnorm:4.6724e+00 L4_fnorm:4.6172e+00 L5_fnorm:4.8442e+00 L6_fnorm:4.9048e+00 L7_fnorm:5.0345e+00 L8_fnorm:4.9874e+00 L9_fnorm:4.9765e+00 L10_fnorm:4.9417e+00 L11_fnorm:4.9999e+00 L12_fnorm:4.6257e+00 L1_l1linf:4.5559e+00 L2_l1linf:4.7665e+00 L3_l1linf:4.7745e+00 L4_l1linf:4.7289e+00 L5_l1linf:4.9386e+00 L6_l1linf:4.9208e+00 L7_l1linf:5.5192e+00 L8_l1linf:5.8303e+00 L9_l1linf:5.4793e+00 L10_l1linf:5.3183e+00 L11_l1linf:5.7635e+00 L12_l1linf:5.6551e+00 L1_spectral:7.8647e-01 L2_spectral:5.2289e-01 L3_spectral:5.4677e-01 L4_spectral:6.6991e-01 L5_spectral:5.5852e-01 L6_spectral:4.8258e-01 L7_spectral:5.5146e-01 L8_spectral:6.2979e-01 L9_spectral:6.8928e-01 L10_spectral:7.1946e-01 L11_spectral:7.3738e-01 L12_spectral:8.1413e-01 ip_v_neg_g:1.5645e-02 cos_v_neg_g:1.1696e-03 v_norm:2.1046e+01 g_norm:6.3553e-01 hv_norm:2.1022e-01 cos_v_hv:6.3734e-03 hg_norm:1.1451e+01 cos_g_hg:7.7430e-01 v_par:3.8311e-03 v_perp:2.1046e+01 L1_cos_v_neg_g:5.0696e-03 L1_v_norm:4.8058e+00 L2_cos_v_neg_g:2.1383e-03 L2_v_norm:4.7103e+00 L3_cos_v_neg_g:4.2365e-03 L3_v_norm:4.6724e+00 L4_cos_v_neg_g:2.9207e-03 L4_v_norm:4.6172e+00 L5_cos_v_neg_g:3.4383e-03 L5_v_norm:4.8442e+00 L6_cos_v_neg_g:3.2271e-03 L6_v_norm:4.9048e+00 L7_cos_v_neg_g:4.3799e-03 L7_v_norm:5.0345e+00 L8_cos_v_neg_g:6.6949e-03 L8_v_norm:4.9874e+00 L9_cos_v_neg_g:8.8812e-03 L9_v_norm:4.9765e+00 L10_cos_v_neg_g:8.0669e-03 L10_v_norm:4.9417e+00 L11_cos_v_neg_g:8.4521e-03 L11_v_norm:4.9999e+00 L12_cos_v_neg_g:6.4739e-03 L12_v_norm:4.6257e+00 +step:5000 train loss:3.746660 +step:5001 train loss:3.611154 +step:5002 train loss:3.665905 +step:5003 train loss:3.663149 +step:5004 train loss:3.651280 +step:5005 train loss:3.653369 +step:5006 train loss:3.691325 +step:5007 train loss:3.695587 +step:5008 train loss:3.633562 +step:5009 train loss:3.677814 +step:5010 train loss:3.628714 +step:5011 train loss:3.656498 +step:5012 train loss:3.630209 +step:5013 train loss:3.732266 +step:5014 train loss:3.649238 +step:5015 train loss:3.724186 +step:5016 train loss:3.651288 +step:5017 train loss:3.696064 +step:5018 train loss:3.619708 +step:5019 train loss:3.651610 +step:5020 train loss:3.642483 +step:5021 train loss:3.656502 +step:5022 train loss:3.690258 +step:5023 train loss:3.660075 +step:5024 train loss:3.713916 +step:5025 train loss:3.598392 +step:5026 train loss:3.724341 +step:5027 train loss:3.653467 +step:5028 train loss:3.723160 +step:5029 train loss:3.615925 +step:5030 train loss:3.653869 +step:5031 train loss:3.644870 +step:5032 train loss:3.671761 +step:5033 train loss:3.656590 +step:5034 train loss:3.653880 +step:5035 train loss:3.738718 +step:5036 train loss:3.686352 +step:5037 train loss:3.638918 +step:5038 train loss:3.689394 +step:5039 train loss:3.700274 +step:5040 train loss:3.660077 +step:5041 train loss:3.678332 +step:5042 train loss:3.585292 +step:5043 train loss:3.730200 +step:5044 train loss:3.645189 +step:5045 train loss:3.694181 +step:5046 train loss:3.617918 +step:5047 train loss:3.692840 +step:5048 train loss:3.608392 +step:5049 train loss:3.741740 +step:5050 train loss:3.630391 +step:5051 train loss:3.671693 +step:5052 train loss:3.569178 +step:5053 train loss:3.754425 +step:5054 train loss:3.643349 +step:5055 train loss:3.667967 +step:5056 train loss:3.703210 +step:5057 train loss:3.630455 +step:5058 train loss:3.662823 +step:5059 train loss:3.626266 +step:5060 train loss:3.670158 +step:5061 train loss:3.667408 +step:5062 train loss:3.636829 +step:5063 train loss:3.629612 +step:5064 train loss:3.637930 +step:5065 train loss:3.623957 +step:5066 train loss:3.679605 +step:5067 train loss:3.668526 +step:5068 train loss:3.651392 +step:5069 train loss:3.626552 +step:5070 train loss:3.655024 +step:5071 train loss:3.720277 +step:5072 train loss:3.614125 +step:5073 train loss:3.622287 +step:5074 train loss:3.570128 +step:5075 train loss:3.636982 +step:5076 train loss:3.571324 +step:5077 train loss:3.637129 +step:5078 train loss:3.634204 +step:5079 train loss:3.674731 +step:5080 train loss:3.651946 +step:5081 train loss:3.661695 +step:5082 train loss:3.649865 +step:5083 train loss:3.708676 +step:5084 train loss:3.687038 +step:5085 train loss:3.647795 +step:5086 train loss:3.723468 +step:5087 train loss:3.707470 +step:5088 train loss:3.628588 +step:5089 train loss:3.693066 +step:5090 train loss:3.640280 +step:5091 train loss:3.643722 +step:5092 train loss:3.741609 +step:5093 train loss:3.628910 +step:5094 train loss:3.627350 +step:5095 train loss:3.674314 +step:5096 train loss:3.639612 +step:5097 train loss:3.651781 +step:5098 train loss:3.652138 +step:5099 train loss:3.617255 +step:5100 train loss:3.627096 +step:5101 train loss:3.817500 +step:5102 train loss:3.667667 +step:5103 train loss:3.675811 +step:5104 train loss:3.722747 +step:5105 train loss:3.665637 +step:5106 train loss:3.623657 +step:5107 train loss:3.640652 +step:5108 train loss:3.631142 +step:5109 train loss:3.719569 +step:5110 train loss:3.627890 +step:5111 train loss:3.720001 +step:5112 train loss:3.627403 +step:5113 train loss:3.609203 +step:5114 train loss:3.653969 +step:5115 train loss:3.615033 +step:5116 train loss:3.673292 +step:5117 train loss:3.612955 +step:5118 train loss:3.646519 +step:5119 train loss:3.626187 +step:5120 train loss:3.667771 +step:5121 train loss:3.615628 +step:5122 train loss:3.626823 +step:5123 train loss:3.619870 +step:5124 train loss:3.573930 +step:5125 train loss:3.681465 +step:5126 train loss:3.668709 +step:5127 train loss:3.672433 +step:5128 train loss:3.685037 +step:5129 train loss:3.614558 +step:5130 train loss:3.628196 +step:5131 train loss:3.563438 +step:5132 train loss:3.686356 +step:5133 train loss:3.653733 +step:5134 train loss:3.655711 +step:5135 train loss:3.605446 +step:5136 train loss:3.673008 +step:5137 train loss:3.674657 +step:5138 train loss:3.654347 +step:5139 train loss:3.686845 +step:5140 train loss:3.659994 +step:5141 train loss:3.693866 +step:5142 train loss:3.640090 +step:5143 train loss:3.672545 +step:5144 train loss:3.665807 +step:5145 train loss:3.607242 +step:5146 train loss:3.602140 +step:5147 train loss:3.679776 +step:5148 train loss:3.627076 +step:5149 train loss:3.680315 +step:5150 train loss:3.658658 +step:5151 train loss:3.624149 +step:5152 train loss:3.667833 +step:5153 train loss:3.641787 +step:5154 train loss:3.655772 +step:5155 train loss:3.663231 +step:5156 train loss:3.646761 +step:5157 train loss:3.637569 +step:5158 train loss:3.668340 +step:5159 train loss:3.700946 +step:5160 train loss:3.768663 +step:5161 train loss:3.693521 +step:5162 train loss:3.716538 +step:5163 train loss:3.626629 +step:5164 train loss:3.705063 +step:5165 train loss:3.707985 +step:5166 train loss:3.642220 +step:5167 train loss:3.745612 +step:5168 train loss:3.660074 +step:5169 train loss:3.690643 +step:5170 train loss:3.668749 +step:5171 train loss:3.710551 +step:5172 train loss:3.627076 +step:5173 train loss:3.689606 +step:5174 train loss:3.628200 +step:5175 train loss:3.659190 +step:5176 train loss:3.651630 +step:5177 train loss:3.647147 +step:5178 train loss:3.713502 +step:5179 train loss:3.624600 +step:5180 train loss:3.703454 +step:5181 train loss:3.646688 +step:5182 train loss:3.705014 +step:5183 train loss:3.636546 +step:5184 train loss:3.614051 +step:5185 train loss:3.640510 +step:5186 train loss:3.694585 +step:5187 train loss:3.690049 +step:5188 train loss:3.622696 +step:5189 train loss:3.667533 +step:5190 train loss:3.651016 +step:5191 train loss:3.631670 +step:5192 train loss:3.616729 +step:5193 train loss:3.698748 +step:5194 train loss:3.650335 +step:5195 train loss:3.622112 +step:5196 train loss:3.692379 +step:5197 train loss:3.737277 +step:5198 train loss:3.649947 +step:5199 train loss:3.644191 +step:5200 train loss:3.663292 +step:5201 train loss:3.655310 +step:5202 train loss:3.659927 +step:5203 train loss:3.661638 +step:5204 train loss:3.633505 +step:5205 train loss:3.679312 +step:5206 train loss:3.612284 +step:5207 train loss:3.618656 +step:5208 train loss:3.679215 +step:5209 train loss:3.696810 +step:5210 train loss:3.608221 +step:5211 train loss:3.651379 +step:5212 train loss:3.670283 +step:5213 train loss:3.640422 +step:5214 train loss:3.691617 +step:5215 train loss:3.797587 +step:5216 train loss:3.649913 +step:5217 train loss:3.626465 +step:5218 train loss:3.635409 +step:5219 train loss:3.696049 +step:5220 train loss:3.611893 +step:5221 train loss:3.613756 +step:5222 train loss:3.698219 +step:5223 train loss:3.694083 +step:5224 train loss:3.590899 +step:5225 train loss:3.737183 +step:5226 train loss:3.650559 +step:5227 train loss:3.725093 +step:5228 train loss:3.694739 +step:5229 train loss:3.635167 +step:5230 train loss:3.647820 +step:5231 train loss:3.595684 +step:5232 train loss:3.717936 +step:5233 train loss:3.679366 +step:5234 train loss:3.680998 +step:5235 train loss:3.628960 +step:5236 train loss:3.704524 +step:5237 train loss:3.761222 +step:5238 train loss:3.658884 +step:5239 train loss:3.721839 +step:5240 train loss:3.601250 +step:5241 train loss:3.662902 +step:5242 train loss:3.636646 +step:5243 train loss:3.639019 +step:5244 train loss:3.638896 +step:5245 train loss:3.683831 +step:5246 train loss:3.721989 +step:5247 train loss:3.654695 +step:5248 train loss:3.620788 +step:5249 train loss:3.677511 +step:5250 validation loss:3.579200 +step:5250 train loss:3.650325 +step:5251 train loss:3.713392 +step:5252 train loss:3.605349 +step:5253 train loss:3.751543 +step:5254 train loss:3.630354 +step:5255 train loss:3.701256 +step:5256 train loss:3.617463 +step:5257 train loss:3.670832 +step:5258 train loss:3.668432 +step:5259 train loss:3.653629 +step:5260 train loss:3.648799 +step:5261 train loss:3.639746 +step:5262 train loss:3.681838 +step:5263 train loss:3.668522 +step:5264 train loss:3.619530 +step:5265 train loss:3.698471 +step:5266 train loss:3.614452 +step:5267 train loss:3.625693 +step:5268 train loss:3.608201 +step:5269 train loss:3.609651 +step:5270 train loss:3.663954 +step:5271 train loss:3.592351 +step:5272 train loss:3.680016 +step:5273 train loss:3.586297 +step:5274 train loss:3.639416 +step:5275 train loss:3.654556 +step:5276 train loss:3.776692 +step:5277 train loss:3.678660 +step:5278 train loss:3.625166 +step:5279 train loss:3.670858 +step:5280 train loss:3.647738 +step:5281 train loss:3.641660 +step:5282 train loss:3.614799 +step:5283 train loss:3.615161 +step:5284 train loss:3.622502 +step:5285 train loss:3.692309 +step:5286 train loss:3.598360 +step:5287 train loss:3.700530 +step:5288 train loss:3.675342 +step:5289 train loss:3.643511 +step:5290 train loss:3.699927 +step:5291 train loss:3.649367 +step:5292 train loss:3.671896 +step:5293 train loss:3.640273 +step:5294 train loss:3.624221 +step:5295 train loss:3.635224 +step:5296 train loss:3.621771 +step:5297 train loss:3.644534 +step:5298 train loss:3.595165 +step:5299 train loss:3.683016 +step:5300 train loss:3.634042 +step:5301 train loss:3.703042 +step:5302 train loss:3.708986 +step:5303 train loss:3.570798 +step:5304 train loss:3.607522 +step:5305 train loss:3.578898 +step:5306 train loss:3.615995 +step:5307 train loss:3.618722 +step:5308 train loss:3.711985 +step:5309 train loss:3.661500 +step:5310 train loss:3.646667 +step:5311 train loss:3.714888 +step:5312 train loss:3.599383 +step:5313 train loss:3.690136 +step:5314 train loss:3.677414 +step:5315 train loss:3.641118 +step:5316 train loss:3.672408 +step:5317 train loss:3.689494 +step:5318 train loss:3.642815 +step:5319 train loss:3.672007 +step:5320 train loss:3.622542 +step:5321 train loss:3.746202 +step:5322 train loss:3.652807 +step:5323 train loss:3.657296 +step:5324 train loss:3.598892 +step:5325 train loss:3.678444 +step:5326 train loss:3.675563 +step:5327 train loss:3.562711 +step:5328 train loss:3.700505 +step:5329 train loss:3.665423 +step:5330 train loss:3.668178 +step:5331 train loss:3.713472 +step:5332 train loss:3.637345 +step:5333 train loss:3.704044 +step:5334 train loss:3.677029 +step:5335 train loss:3.734902 +step:5336 train loss:3.767941 +step:5337 train loss:3.605475 +step:5338 train loss:3.613352 +step:5339 train loss:3.633724 +step:5340 train loss:3.656494 +step:5341 train loss:3.671887 +step:5342 train loss:3.572315 +step:5343 train loss:3.732163 +step:5344 train loss:3.613865 +step:5345 train loss:3.615174 +step:5346 train loss:3.618391 +step:5347 train loss:3.640653 +step:5348 train loss:3.684186 +step:5349 train loss:3.625200 +step:5350 train loss:3.663955 +step:5351 train loss:3.738494 +step:5352 train loss:3.777105 +step:5353 train loss:3.689672 +step:5354 train loss:3.660417 +step:5355 train loss:3.624570 +step:5356 train loss:3.649664 +step:5357 train loss:3.626888 +step:5358 train loss:3.649751 +step:5359 train loss:3.659826 +step:5360 train loss:3.631706 +step:5361 train loss:3.634386 +step:5362 train loss:3.623605 +step:5363 train loss:3.611994 +step:5364 train loss:3.616660 +step:5365 train loss:3.647213 +step:5366 train loss:3.681465 +step:5367 train loss:3.606473 +step:5368 train loss:3.673530 +step:5369 train loss:3.689716 +step:5370 train loss:3.595660 +step:5371 train loss:3.644779 +step:5372 train loss:3.661779 +step:5373 train loss:3.706096 +step:5374 train loss:3.585848 +step:5375 train loss:3.634281 +step:5376 train loss:3.702389 +step:5377 train loss:3.633934 +step:5378 train loss:3.611050 +step:5379 train loss:3.612972 +step:5380 train loss:3.654831 +step:5381 train loss:3.692077 +step:5382 train loss:3.603643 +step:5383 train loss:3.661804 +step:5384 train loss:3.676058 +step:5385 train loss:3.677867 +step:5386 train loss:3.655899 +step:5387 train loss:3.660885 +step:5388 train loss:3.673888 +step:5389 train loss:3.606432 +step:5390 train loss:3.637786 +step:5391 train loss:3.573051 +step:5392 train loss:3.639158 +step:5393 train loss:3.628540 +step:5394 train loss:3.628330 +step:5395 train loss:3.697275 +step:5396 train loss:3.661870 +step:5397 train loss:3.684995 +step:5398 train loss:3.680316 +step:5399 train loss:3.708471 +step:5400 train loss:3.716558 +step:5401 train loss:3.677100 +step:5402 train loss:3.782198 +step:5403 train loss:3.686245 +step:5404 train loss:3.659931 +step:5405 train loss:3.728999 +step:5406 train loss:3.688627 +step:5407 train loss:3.618673 +step:5408 train loss:3.763121 +step:5409 train loss:3.604864 +step:5410 train loss:3.670293 +step:5411 train loss:3.652841 +step:5412 train loss:3.627577 +step:5413 train loss:3.680995 +step:5414 train loss:3.655304 +step:5415 train loss:3.633167 +step:5416 train loss:3.629596 +step:5417 train loss:3.697354 +step:5418 train loss:3.714986 +step:5419 train loss:3.616702 +step:5420 train loss:3.678478 +step:5421 train loss:3.646470 +step:5422 train loss:3.687721 +step:5423 train loss:3.668545 +step:5424 train loss:3.570982 +step:5425 train loss:3.646567 +step:5426 train loss:3.726178 +step:5427 train loss:3.619167 +step:5428 train loss:3.656039 +step:5429 train loss:3.588459 +step:5430 train loss:3.623165 +step:5431 train loss:3.683224 +step:5432 train loss:3.661627 +step:5433 train loss:3.665158 +step:5434 train loss:3.617440 +step:5435 train loss:3.614856 +step:5436 train loss:3.613907 +step:5437 train loss:3.653093 +step:5438 train loss:3.635133 +step:5439 train loss:3.644510 +step:5440 train loss:3.682949 +step:5441 train loss:3.704242 +step:5442 train loss:3.636346 +step:5443 train loss:3.631966 +step:5444 train loss:3.578248 +step:5445 train loss:3.664072 +step:5446 train loss:3.632887 +step:5447 train loss:3.666698 +step:5448 train loss:3.724449 +step:5449 train loss:3.611715 +step:5450 train loss:3.647000 +step:5451 train loss:3.636103 +step:5452 train loss:3.653271 +step:5453 train loss:3.710424 +step:5454 train loss:3.635084 +step:5455 train loss:3.625325 +step:5456 train loss:3.759539 +step:5457 train loss:3.643760 +step:5458 train loss:3.676935 +step:5459 train loss:3.621950 +step:5460 train loss:3.638850 +step:5461 train loss:3.643622 +step:5462 train loss:3.642417 +step:5463 train loss:3.648884 +step:5464 train loss:3.649301 +step:5465 train loss:3.595951 +step:5466 train loss:3.670863 +step:5467 train loss:3.651765 +step:5468 train loss:3.659199 +step:5469 train loss:3.754516 +step:5470 train loss:3.647573 +step:5471 train loss:3.722895 +step:5472 train loss:3.669449 +step:5473 train loss:3.574656 +step:5474 train loss:3.907241 +step:5475 train loss:3.582439 +step:5476 train loss:3.659132 +step:5477 train loss:3.657788 +step:5478 train loss:3.656120 +step:5479 train loss:3.798655 +step:5480 train loss:3.644440 +step:5481 train loss:3.708253 +step:5482 train loss:3.622286 +step:5483 train loss:3.654151 +step:5484 train loss:3.696684 +step:5485 train loss:3.611914 +step:5486 train loss:3.656581 +step:5487 train loss:3.660561 +step:5488 train loss:3.572017 +step:5489 train loss:3.675219 +step:5490 train loss:3.624085 +step:5491 train loss:3.722593 +step:5492 train loss:3.654352 +step:5493 train loss:3.581465 +step:5494 train loss:3.640832 +step:5495 train loss:3.617656 +step:5496 train loss:3.612577 +step:5497 train loss:3.736872 +step:5498 train loss:3.600787 +step:5499 train loss:3.740319 +step:5500 validation loss:3.576577 total_sharp:7.4234e-05 L1_sharp:3.1066e-05 L2_sharp:6.0743e-06 L3_sharp:2.9939e-05 L4_sharp:2.0302e-05 L5_sharp:6.5471e-06 L6_sharp:4.2237e-06 L7_sharp:7.2213e-06 L8_sharp:1.2603e-05 L9_sharp:1.2562e-05 L10_sharp:1.3176e-05 L11_sharp:1.2766e-05 L12_sharp:1.9852e-04 total_fnorm:2.1129e+01 total_l1_linf:1.8540e+05 total_spectral:2.1129e+01 L1_fnorm:4.7794e+00 L2_fnorm:4.7269e+00 L3_fnorm:4.7424e+00 L4_fnorm:4.6507e+00 L5_fnorm:4.8736e+00 L6_fnorm:4.9412e+00 L7_fnorm:5.0534e+00 L8_fnorm:5.0103e+00 L9_fnorm:4.9901e+00 L10_fnorm:5.0019e+00 L11_fnorm:5.0329e+00 L12_fnorm:4.7557e+00 L1_l1linf:4.5338e+00 L2_l1linf:4.7945e+00 L3_l1linf:4.9614e+00 L4_l1linf:4.5994e+00 L5_l1linf:4.9069e+00 L6_l1linf:4.9326e+00 L7_l1linf:5.1392e+00 L8_l1linf:5.3233e+00 L9_l1linf:5.6403e+00 L10_l1linf:5.5852e+00 L11_l1linf:5.4293e+00 L12_l1linf:6.9293e+00 L1_spectral:8.2351e-01 L2_spectral:5.4003e-01 L3_spectral:5.7241e-01 L4_spectral:7.0690e-01 L5_spectral:5.4222e-01 L6_spectral:4.7766e-01 L7_spectral:5.3229e-01 L8_spectral:6.0882e-01 L9_spectral:6.7208e-01 L10_spectral:7.0215e-01 L11_spectral:7.0309e-01 L12_spectral:9.4397e-01 ip_v_neg_g:1.5204e-02 cos_v_neg_g:9.5808e-04 v_norm:2.1129e+01 g_norm:7.5108e-01 hv_norm:3.2671e-01 cos_v_hv:4.8007e-03 hg_norm:5.8297e+00 cos_g_hg:6.5893e-01 v_par:3.6610e-03 v_perp:2.1129e+01 L1_cos_v_neg_g:5.1323e-03 L1_v_norm:4.7794e+00 L2_cos_v_neg_g:3.3656e-04 L2_v_norm:4.7269e+00 L3_cos_v_neg_g:2.7098e-04 L3_v_norm:4.7424e+00 L4_cos_v_neg_g:1.6493e-03 L4_v_norm:4.6507e+00 L5_cos_v_neg_g:2.1220e-03 L5_v_norm:4.8736e+00 L6_cos_v_neg_g:1.9171e-03 L6_v_norm:4.9412e+00 L7_cos_v_neg_g:1.2879e-03 L7_v_norm:5.0534e+00 L8_cos_v_neg_g:2.2610e-03 L8_v_norm:5.0103e+00 L9_cos_v_neg_g:2.8187e-03 L9_v_norm:4.9901e+00 L10_cos_v_neg_g:3.1650e-03 L10_v_norm:5.0019e+00 L11_cos_v_neg_g:4.5106e-03 L11_v_norm:5.0329e+00 L12_cos_v_neg_g:1.5433e-02 L12_v_norm:4.7557e+00 +step:5500 train loss:3.652202 +step:5501 train loss:3.726514 +step:5502 train loss:3.675382 +step:5503 train loss:3.640164 +step:5504 train loss:3.685536 +step:5505 train loss:3.649235 +step:5506 train loss:3.689995 +step:5507 train loss:3.677052 +step:5508 train loss:3.699457 +step:5509 train loss:3.707942 +step:5510 train loss:3.680136 +step:5511 train loss:3.675465 +step:5512 train loss:3.794022 +step:5513 train loss:3.598060 +step:5514 train loss:3.659496 +step:5515 train loss:3.687701 +step:5516 train loss:3.708201 +step:5517 train loss:3.664193 +step:5518 train loss:3.694352 +step:5519 train loss:3.727856 +step:5520 train loss:3.635343 +step:5521 train loss:3.649258 +step:5522 train loss:3.612739 +step:5523 train loss:3.663406 +step:5524 train loss:3.709981 +step:5525 train loss:3.619264 +step:5526 train loss:3.630794 +step:5527 train loss:3.649767 +step:5528 train loss:3.751343 +step:5529 train loss:3.719087 +step:5530 train loss:3.685811 +step:5531 train loss:3.625143 +step:5532 train loss:3.647988 +step:5533 train loss:3.682490 +step:5534 train loss:3.593530 +step:5535 train loss:3.647850 +step:5536 train loss:3.586942 +step:5537 train loss:3.634392 +step:5538 train loss:3.622756 +step:5539 train loss:3.569928 +step:5540 train loss:3.793513 +step:5541 train loss:3.610620 +step:5542 train loss:3.664132 +step:5543 train loss:3.646754 +step:5544 train loss:3.642505 +step:5545 train loss:3.633202 +step:5546 train loss:3.667653 +step:5547 train loss:3.598630 +step:5548 train loss:3.638855 +step:5549 train loss:3.643198 +step:5550 train loss:3.666856 +step:5551 train loss:3.673012 +step:5552 train loss:3.625897 +step:5553 train loss:3.654351 +step:5554 train loss:3.623074 +step:5555 train loss:3.632474 +step:5556 train loss:3.649590 +step:5557 train loss:3.714478 +step:5558 train loss:3.637613 +step:5559 train loss:3.640969 +step:5560 train loss:3.633313 +step:5561 train loss:3.667011 +step:5562 train loss:3.619861 +step:5563 train loss:3.604403 +step:5564 train loss:3.639434 +step:5565 train loss:3.703372 +step:5566 train loss:3.605620 +step:5567 train loss:3.724623 +step:5568 train loss:3.842155 +step:5569 train loss:3.637405 +step:5570 train loss:3.565686 +step:5571 train loss:3.658276 +step:5572 train loss:3.593952 +step:5573 train loss:3.585979 +step:5574 train loss:3.551404 +step:5575 train loss:3.649829 +step:5576 train loss:3.636551 +step:5577 train loss:3.640652 +step:5578 train loss:3.669434 +step:5579 train loss:3.625062 +step:5580 train loss:3.650455 +step:5581 train loss:3.670897 +step:5582 train loss:3.649608 +step:5583 train loss:3.659553 +step:5584 train loss:3.782940 +step:5585 train loss:3.685912 +step:5586 train loss:3.620409 +step:5587 train loss:3.655092 +step:5588 train loss:3.668988 +step:5589 train loss:3.667995 +step:5590 train loss:3.727352 +step:5591 train loss:3.591125 +step:5592 train loss:3.770595 +step:5593 train loss:3.659692 +step:5594 train loss:3.684494 +step:5595 train loss:3.667695 +step:5596 train loss:3.624900 +step:5597 train loss:3.634696 +step:5598 train loss:3.632802 +step:5599 train loss:3.642903 +step:5600 train loss:3.678836 +step:5601 train loss:3.702689 +step:5602 train loss:3.635470 +step:5603 train loss:3.673365 +step:5604 train loss:3.669550 +step:5605 train loss:3.639410 +step:5606 train loss:3.643824 +step:5607 train loss:3.675296 +step:5608 train loss:3.617330 +step:5609 train loss:3.667497 +step:5610 train loss:3.623801 +step:5611 train loss:3.662386 +step:5612 train loss:3.692556 +step:5613 train loss:3.653361 +step:5614 train loss:3.616835 +step:5615 train loss:3.718850 +step:5616 train loss:3.615992 +step:5617 train loss:3.702862 +step:5618 train loss:3.685108 +step:5619 train loss:3.643673 +step:5620 train loss:3.643100 +step:5621 train loss:3.716449 +step:5622 train loss:3.601609 +step:5623 train loss:3.634446 +step:5624 train loss:3.623430 +step:5625 train loss:3.657165 +step:5626 train loss:3.649904 +step:5627 train loss:3.624910 +step:5628 train loss:3.664298 +step:5629 train loss:3.644251 +step:5630 train loss:3.578657 +step:5631 train loss:3.616517 +step:5632 train loss:3.661728 +step:5633 train loss:3.652931 +step:5634 train loss:3.606581 +step:5635 train loss:3.644751 +step:5636 train loss:3.625293 +step:5637 train loss:3.761729 +step:5638 train loss:3.674064 +step:5639 train loss:3.650073 +step:5640 train loss:3.658054 +step:5641 train loss:3.694215 +step:5642 train loss:3.628407 +step:5643 train loss:3.642273 +step:5644 train loss:3.726223 +step:5645 train loss:3.682172 +step:5646 train loss:3.681859 +step:5647 train loss:3.671582 +step:5648 train loss:3.657958 +step:5649 train loss:3.575070 +step:5650 train loss:3.579122 +step:5651 train loss:3.655344 +step:5652 train loss:3.655777 +step:5653 train loss:3.621550 +step:5654 train loss:3.748060 +step:5655 train loss:3.612854 +step:5656 train loss:3.637724 +step:5657 train loss:3.706020 +step:5658 train loss:3.604594 +step:5659 train loss:3.642192 +step:5660 train loss:3.691947 +step:5661 train loss:3.630586 +step:5662 train loss:3.670570 +step:5663 train loss:3.560179 +step:5664 train loss:3.532585 +step:5665 train loss:3.657090 +step:5666 train loss:3.663744 +step:5667 train loss:3.689237 +step:5668 train loss:3.628926 +step:5669 train loss:3.641730 +step:5670 train loss:3.637165 +step:5671 train loss:3.628726 +step:5672 train loss:3.674674 +step:5673 train loss:3.641500 +step:5674 train loss:3.716102 +step:5675 train loss:3.630109 +step:5676 train loss:3.779263 +step:5677 train loss:3.677926 +step:5678 train loss:3.655444 +step:5679 train loss:3.649596 +step:5680 train loss:3.679760 +step:5681 train loss:3.644809 +step:5682 train loss:3.657655 +step:5683 train loss:3.616017 +step:5684 train loss:3.624084 +step:5685 train loss:3.671541 +step:5686 train loss:3.683404 +step:5687 train loss:3.631503 +step:5688 train loss:3.725929 +step:5689 train loss:3.625779 +step:5690 train loss:3.776467 +step:5691 train loss:3.610452 +step:5692 train loss:3.597260 +step:5693 train loss:3.602982 +step:5694 train loss:3.622375 +step:5695 train loss:3.639360 +step:5696 train loss:3.688487 +step:5697 train loss:3.615473 +step:5698 train loss:3.633993 +step:5699 train loss:3.645059 +step:5700 train loss:3.642044 +step:5701 train loss:3.636075 +step:5702 train loss:3.702418 +step:5703 train loss:3.601593 +step:5704 train loss:3.645708 +step:5705 train loss:3.653296 +step:5706 train loss:3.679564 +step:5707 train loss:3.594934 +step:5708 train loss:3.677993 +step:5709 train loss:3.684578 +step:5710 train loss:3.673715 +step:5711 train loss:3.697264 +step:5712 train loss:3.677759 +step:5713 train loss:3.603499 +step:5714 train loss:3.686995 +step:5715 train loss:3.643660 +step:5716 train loss:3.646785 +step:5717 train loss:3.677067 +step:5718 train loss:3.617465 +step:5719 train loss:3.692070 +step:5720 train loss:3.664912 +step:5721 train loss:3.596187 +step:5722 train loss:3.606775 +step:5723 train loss:3.689246 +step:5724 train loss:3.606061 +step:5725 train loss:3.673723 +step:5726 train loss:3.670081 +step:5727 train loss:3.630232 +step:5728 train loss:3.630907 +step:5729 train loss:3.629182 +step:5730 train loss:3.703224 +step:5731 train loss:3.576158 +step:5732 train loss:3.630005 +step:5733 train loss:3.624801 +step:5734 train loss:3.639334 +step:5735 train loss:3.628016 +step:5736 train loss:3.633126 +step:5737 train loss:3.652784 +step:5738 train loss:3.620958 +step:5739 train loss:3.632250 +step:5740 train loss:3.671480 +step:5741 train loss:3.645073 +step:5742 train loss:3.697132 +step:5743 train loss:3.664411 +step:5744 train loss:3.621362 +step:5745 train loss:3.627184 +step:5746 train loss:3.657077 +step:5747 train loss:3.640443 +step:5748 train loss:3.689496 +step:5749 train loss:3.645171 +step:5750 validation loss:3.569568 +step:5750 train loss:3.650777 +step:5751 train loss:3.662964 +step:5752 train loss:3.648674 +step:5753 train loss:3.619894 +step:5754 train loss:3.626997 +step:5755 train loss:3.646477 +step:5756 train loss:3.634359 +step:5757 train loss:3.696651 +step:5758 train loss:3.629310 +step:5759 train loss:3.593952 +step:5760 train loss:3.676544 +step:5761 train loss:3.672281 +step:5762 train loss:3.628945 +step:5763 train loss:3.655797 +step:5764 train loss:3.616607 +step:5765 train loss:3.736293 +step:5766 train loss:3.644655 +step:5767 train loss:3.680793 +step:5768 train loss:3.616884 +step:5769 train loss:3.742079 +step:5770 train loss:3.659633 +step:5771 train loss:3.686725 +step:5772 train loss:3.639792 +step:5773 train loss:3.618674 +step:5774 train loss:3.627482 +step:5775 train loss:3.698207 +step:5776 train loss:3.684633 +step:5777 train loss:3.604049 +step:5778 train loss:3.688903 +step:5779 train loss:3.651780 +step:5780 train loss:3.620221 +step:5781 train loss:3.685969 +step:5782 train loss:3.644208 +step:5783 train loss:3.607208 +step:5784 train loss:3.707046 +step:5785 train loss:3.698546 +step:5786 train loss:3.607906 +step:5787 train loss:3.654980 +step:5788 train loss:3.661981 +step:5789 train loss:3.604789 +step:5790 train loss:3.708411 +step:5791 train loss:3.634853 +step:5792 train loss:3.908199 +step:5793 train loss:3.675504 +step:5794 train loss:3.696682 +step:5795 train loss:3.689877 +step:5796 train loss:3.675004 +step:5797 train loss:3.656319 +step:5798 train loss:3.653342 +step:5799 train loss:3.622434 +step:5800 train loss:3.782588 +step:5801 train loss:3.654881 +step:5802 train loss:3.642783 +step:5803 train loss:3.652526 +step:5804 train loss:3.673923 +step:5805 train loss:3.633811 +step:5806 train loss:3.677478 +step:5807 train loss:3.598667 +step:5808 train loss:3.629794 +step:5809 train loss:3.642137 +step:5810 train loss:3.612843 +step:5811 train loss:3.626858 +step:5812 train loss:3.609961 +step:5813 train loss:3.621082 +step:5814 train loss:3.614001 +step:5815 train loss:3.613258 +step:5816 train loss:3.678161 +step:5817 train loss:3.690838 +step:5818 train loss:3.665806 +step:5819 train loss:3.713213 +step:5820 train loss:3.656739 +step:5821 train loss:3.647922 +step:5822 train loss:3.666825 +step:5823 train loss:3.668567 +step:5824 train loss:3.620868 +step:5825 train loss:3.712823 +step:5826 train loss:3.627089 +step:5827 train loss:3.589850 +step:5828 train loss:3.578053 +step:5829 train loss:3.643729 +step:5830 train loss:3.615811 +step:5831 train loss:3.585815 +step:5832 train loss:3.700946 +step:5833 train loss:3.679043 +step:5834 train loss:3.662397 +step:5835 train loss:3.613604 +step:5836 train loss:3.578851 +step:5837 train loss:3.700585 +step:5838 train loss:3.678747 +step:5839 train loss:3.653321 +step:5840 train loss:3.738094 +step:5841 train loss:3.662290 +step:5842 train loss:3.675052 +step:5843 train loss:3.621698 +step:5844 train loss:3.692524 +step:5845 train loss:3.597764 +step:5846 train loss:3.645099 +step:5847 train loss:3.673252 +step:5848 train loss:3.736050 +step:5849 train loss:3.632690 +step:5850 train loss:3.663161 +step:5851 train loss:3.630482 +step:5852 train loss:3.715146 +step:5853 train loss:3.812082 +step:5854 train loss:3.601455 +step:5855 train loss:3.662322 +step:5856 train loss:3.633202 +step:5857 train loss:3.647465 +step:5858 train loss:3.613258 +step:5859 train loss:3.622008 +step:5860 train loss:3.722040 +step:5861 train loss:3.605566 +step:5862 train loss:3.720574 +step:5863 train loss:3.661168 +step:5864 train loss:3.647861 +step:5865 train loss:3.653037 +step:5866 train loss:3.641200 +step:5867 train loss:3.725246 +step:5868 train loss:3.647355 +step:5869 train loss:3.672804 +step:5870 train loss:3.647788 +step:5871 train loss:3.631071 +step:5872 train loss:3.659308 +step:5873 train loss:3.634435 +step:5874 train loss:3.717546 +step:5875 train loss:3.644998 +step:5876 train loss:3.623948 +step:5877 train loss:3.633952 +step:5878 train loss:3.634109 +step:5879 train loss:3.605526 +step:5880 train loss:3.801052 +step:5881 train loss:3.644493 +step:5882 train loss:3.618462 +step:5883 train loss:3.621796 +step:5884 train loss:3.638940 +step:5885 train loss:3.632745 +step:5886 train loss:3.652107 +step:5887 train loss:3.651658 +step:5888 train loss:3.626833 +step:5889 train loss:3.608752 +step:5890 train loss:3.655914 +step:5891 train loss:3.598713 +step:5892 train loss:3.680451 +step:5893 train loss:3.605472 +step:5894 train loss:3.596614 +step:5895 train loss:3.599188 +step:5896 train loss:3.615825 +step:5897 train loss:3.678564 +step:5898 train loss:3.899403 +step:5899 train loss:3.626384 +step:5900 train loss:3.680424 +step:5901 train loss:3.627787 +step:5902 train loss:3.649007 +step:5903 train loss:3.637612 +step:5904 train loss:3.664551 +step:5905 train loss:3.769305 +step:5906 train loss:3.712259 +step:5907 train loss:3.655705 +step:5908 train loss:3.631213 +step:5909 train loss:3.626652 +step:5910 train loss:3.609986 +step:5911 train loss:3.629242 +step:5912 train loss:3.649577 +step:5913 train loss:3.663423 +step:5914 train loss:3.647058 +step:5915 train loss:3.770784 +step:5916 train loss:3.650674 +step:5917 train loss:3.622627 +step:5918 train loss:3.620805 +step:5919 train loss:3.643888 +step:5920 train loss:3.646507 +step:5921 train loss:3.612802 +step:5922 train loss:3.670257 +step:5923 train loss:3.664334 +step:5924 train loss:3.617896 +step:5925 train loss:3.744808 +step:5926 train loss:3.632440 +step:5927 train loss:3.607353 +step:5928 train loss:3.636507 +step:5929 train loss:3.663290 +step:5930 train loss:3.612142 +step:5931 train loss:3.596498 +step:5932 train loss:3.635003 +step:5933 train loss:3.687242 +step:5934 train loss:3.608697 +step:5935 train loss:3.634494 +step:5936 train loss:3.621312 +step:5937 train loss:3.602725 +step:5938 train loss:3.618971 +step:5939 train loss:3.597595 +step:5940 train loss:3.677324 +step:5941 train loss:3.617362 +step:5942 train loss:3.627803 +step:5943 train loss:3.630607 +step:5944 train loss:3.687708 +step:5945 train loss:3.617813 +step:5946 train loss:3.597633 +step:5947 train loss:3.612068 +step:5948 train loss:3.647778 +step:5949 train loss:3.697676 +step:5950 train loss:3.652082 +step:5951 train loss:3.657048 +step:5952 train loss:3.579817 +step:5953 train loss:3.623983 +step:5954 train loss:3.634764 +step:5955 train loss:3.639834 +step:5956 train loss:3.615206 +step:5957 train loss:3.581891 +step:5958 train loss:3.654917 +step:5959 train loss:3.614911 +step:5960 train loss:3.591731 +step:5961 train loss:3.614990 +step:5962 train loss:3.647256 +step:5963 train loss:3.681874 +step:5964 train loss:3.638347 +step:5965 train loss:3.655095 +step:5966 train loss:3.649535 +step:5967 train loss:3.616242 +step:5968 train loss:3.686046 +step:5969 train loss:3.630847 +step:5970 train loss:3.648627 +step:5971 train loss:3.601204 +step:5972 train loss:3.625497 +step:5973 train loss:3.616356 +step:5974 train loss:3.640161 +step:5975 train loss:3.607841 +step:5976 train loss:3.649451 +step:5977 train loss:3.606555 +step:5978 train loss:3.593159 +step:5979 train loss:3.632166 +step:5980 train loss:3.703040 +step:5981 train loss:3.593822 +step:5982 train loss:3.605758 +step:5983 train loss:3.671484 +step:5984 train loss:3.616532 +step:5985 train loss:3.658310 +step:5986 train loss:3.631877 +step:5987 train loss:3.621670 +step:5988 train loss:3.627593 +step:5989 train loss:3.648432 +step:5990 train loss:3.574554 +step:5991 train loss:3.639175 +step:5992 train loss:3.676093 +step:5993 train loss:3.624113 +step:5994 train loss:3.643729 +step:5995 train loss:3.539120 +step:5996 train loss:3.703494 +step:5997 train loss:3.683887 +step:5998 train loss:3.560750 +step:5999 train loss:3.587763 +step:6000 validation loss:3.555740 total_sharp:7.1824e-05 L1_sharp:2.8966e-05 L2_sharp:8.5247e-06 L3_sharp:1.4501e-05 L4_sharp:1.3931e-05 L5_sharp:8.9529e-06 L6_sharp:7.1352e-06 L7_sharp:7.8446e-06 L8_sharp:1.6814e-05 L9_sharp:1.9251e-05 L10_sharp:1.9584e-05 L11_sharp:1.5375e-05 L12_sharp:1.0752e-04 total_fnorm:2.1349e+01 total_l1_linf:1.8758e+05 total_spectral:2.1349e+01 L1_fnorm:4.9354e+00 L2_fnorm:4.8436e+00 L3_fnorm:4.8310e+00 L4_fnorm:4.7887e+00 L5_fnorm:4.9724e+00 L6_fnorm:5.0102e+00 L7_fnorm:5.1277e+00 L8_fnorm:5.0662e+00 L9_fnorm:5.0523e+00 L10_fnorm:5.0477e+00 L11_fnorm:5.0776e+00 L12_fnorm:4.7769e+00 L1_l1linf:4.5845e+00 L2_l1linf:4.9213e+00 L3_l1linf:4.9758e+00 L4_l1linf:4.9832e+00 L5_l1linf:4.9995e+00 L6_l1linf:4.8931e+00 L7_l1linf:5.2350e+00 L8_l1linf:5.8279e+00 L9_l1linf:6.1978e+00 L10_l1linf:6.5969e+00 L11_l1linf:5.6511e+00 L12_l1linf:6.0662e+00 L1_spectral:8.6167e-01 L2_spectral:5.6315e-01 L3_spectral:5.6987e-01 L4_spectral:6.8575e-01 L5_spectral:5.6280e-01 L6_spectral:4.8275e-01 L7_spectral:5.6894e-01 L8_spectral:6.7832e-01 L9_spectral:7.2351e-01 L10_spectral:7.6577e-01 L11_spectral:7.5884e-01 L12_spectral:8.6654e-01 ip_v_neg_g:1.6302e-02 cos_v_neg_g:2.2412e-03 v_norm:2.1349e+01 g_norm:3.4070e-01 hv_norm:2.0738e-01 cos_v_hv:7.3940e-03 hg_norm:2.0000e+00 cos_g_hg:5.3071e-01 v_par:5.3654e-03 v_perp:2.1349e+01 L1_cos_v_neg_g:1.4983e-02 L1_v_norm:4.9354e+00 L2_cos_v_neg_g:6.8771e-03 L2_v_norm:4.8436e+00 L3_cos_v_neg_g:7.9676e-03 L3_v_norm:4.8310e+00 L4_cos_v_neg_g:6.8554e-03 L4_v_norm:4.7887e+00 L5_cos_v_neg_g:6.7913e-03 L5_v_norm:4.9724e+00 L6_cos_v_neg_g:5.7927e-03 L6_v_norm:5.0102e+00 L7_cos_v_neg_g:6.3171e-03 L7_v_norm:5.1277e+00 L8_cos_v_neg_g:7.6479e-03 L8_v_norm:5.0662e+00 L9_cos_v_neg_g:1.0084e-02 L9_v_norm:5.0523e+00 L10_cos_v_neg_g:8.9677e-03 L10_v_norm:5.0477e+00 L11_cos_v_neg_g:9.0946e-03 L11_v_norm:5.0776e+00 L12_cos_v_neg_g:1.4969e-02 L12_v_norm:4.7769e+00 +step:6000 train loss:3.637356 +step:6001 train loss:3.599520 +step:6002 train loss:3.633981 +step:6003 train loss:3.656718 +step:6004 train loss:3.606054 +step:6005 train loss:3.674953 +step:6006 train loss:3.583912 +step:6007 train loss:3.602012 +step:6008 train loss:3.617681 +step:6009 train loss:3.658120 +step:6010 train loss:3.651451 +step:6011 train loss:3.643627 +step:6012 train loss:3.605976 +step:6013 train loss:3.667505 +step:6014 train loss:3.685650 +step:6015 train loss:3.677612 +step:6016 train loss:3.648390 +step:6017 train loss:3.655854 +step:6018 train loss:3.595080 +step:6019 train loss:3.631378 +step:6020 train loss:3.615995 +step:6021 train loss:3.544566 +step:6022 train loss:3.656823 +step:6023 train loss:3.589080 +step:6024 train loss:3.665432 +step:6025 train loss:3.634127 +step:6026 train loss:3.607073 +step:6027 train loss:3.645959 +step:6028 train loss:3.563992 +step:6029 train loss:3.675704 +step:6030 train loss:3.649117 +step:6031 train loss:3.617402 +step:6032 train loss:3.578542 +step:6033 train loss:3.637006 +step:6034 train loss:3.662123 +step:6035 train loss:3.583904 +step:6036 train loss:3.557070 +step:6037 train loss:3.669052 +step:6038 train loss:3.673192 +step:6039 train loss:3.656807 +step:6040 train loss:3.615797 +step:6041 train loss:3.599398 +step:6042 train loss:3.578439 +step:6043 train loss:3.633822 +step:6044 train loss:3.752995 +step:6045 train loss:3.594839 +step:6046 train loss:3.606536 +step:6047 train loss:3.646707 +step:6048 train loss:3.653626 +step:6049 train loss:3.628621 +step:6050 train loss:3.599651 +step:6051 train loss:3.648548 +step:6052 train loss:3.621909 +step:6053 train loss:3.739409 +step:6054 train loss:3.775733 +step:6055 train loss:3.593477 +step:6056 train loss:3.584902 +step:6057 train loss:3.617578 +step:6058 train loss:3.647852 +step:6059 train loss:3.646872 +step:6060 train loss:3.655926 +step:6061 train loss:3.671020 +step:6062 train loss:3.620564 +step:6063 train loss:3.639956 +step:6064 train loss:3.634983 +step:6065 train loss:3.633559 +step:6066 train loss:3.619895 +step:6067 train loss:3.663926 +step:6068 train loss:3.599780 +step:6069 train loss:3.557564 +step:6070 train loss:3.704756 +step:6071 train loss:3.653408 +step:6072 train loss:3.592494 +step:6073 train loss:3.630243 +step:6074 train loss:3.716001 +step:6075 train loss:3.637810 +step:6076 train loss:3.650102 +step:6077 train loss:3.650448 +step:6078 train loss:3.587123 +step:6079 train loss:3.612589 +step:6080 train loss:3.621023 +step:6081 train loss:3.657841 +step:6082 train loss:3.607243 +step:6083 train loss:3.619003 +step:6084 train loss:3.680005 +step:6085 train loss:3.678053 +step:6086 train loss:3.578067 +step:6087 train loss:3.624465 +step:6088 train loss:3.608920 +step:6089 train loss:3.669552 +step:6090 train loss:3.669190 +step:6091 train loss:3.619220 +step:6092 train loss:3.584468 +step:6093 train loss:3.644488 +step:6094 train loss:3.557329 +step:6095 train loss:3.721340 +step:6096 train loss:3.595236 +step:6097 train loss:3.671762 +step:6098 train loss:3.644383 +step:6099 train loss:3.702147 +step:6100 train loss:3.695287 +step:6101 train loss:3.631419 +step:6102 train loss:3.739538 +step:6103 train loss:3.631881 +step:6104 train loss:3.749302 +step:6105 train loss:3.677801 +step:6106 train loss:3.614991 +step:6107 train loss:3.681745 +step:6108 train loss:3.641133 +step:6109 train loss:3.715586 +step:6110 train loss:3.643691 +step:6111 train loss:3.676205 +step:6112 train loss:3.617733 +step:6113 train loss:3.644512 +step:6114 train loss:3.615754 +step:6115 train loss:3.678994 +step:6116 train loss:3.622105 +step:6117 train loss:3.673449 +step:6118 train loss:3.655290 +step:6119 train loss:3.663093 +step:6120 train loss:3.811590 +step:6121 train loss:3.642260 +step:6122 train loss:3.651959 +step:6123 train loss:3.632659 +step:6124 train loss:3.609951 +step:6125 train loss:3.598475 +step:6126 train loss:3.620995 +step:6127 train loss:3.606282 +step:6128 train loss:3.586662 +step:6129 train loss:3.811105 +step:6130 train loss:3.596283 +step:6131 train loss:3.573721 +step:6132 train loss:3.646518 +step:6133 train loss:3.612894 +step:6134 train loss:3.644552 +step:6135 train loss:3.725444 +step:6136 train loss:3.743611 +step:6137 train loss:3.605430 +step:6138 train loss:3.665362 +step:6139 train loss:3.641508 +step:6140 train loss:3.638418 +step:6141 train loss:3.597715 +step:6142 train loss:3.663331 +step:6143 train loss:3.626560 +step:6144 train loss:3.644906 +step:6145 train loss:3.894164 +step:6146 train loss:3.731489 +step:6147 train loss:3.809146 +step:6148 train loss:3.586024 +step:6149 train loss:3.708016 +step:6150 train loss:3.662656 +step:6151 train loss:3.612988 +step:6152 train loss:3.610160 +step:6153 train loss:3.678437 +step:6154 train loss:3.764020 +step:6155 train loss:3.628685 +step:6156 train loss:3.736606 +step:6157 train loss:3.654302 +step:6158 train loss:3.650454 +step:6159 train loss:3.615960 +step:6160 train loss:3.780470 +step:6161 train loss:3.631006 +step:6162 train loss:3.648839 +step:6163 train loss:3.681552 +step:6164 train loss:3.595184 +step:6165 train loss:3.662809 +step:6166 train loss:3.654550 +step:6167 train loss:3.675085 +step:6168 train loss:3.653356 +step:6169 train loss:3.640260 +step:6170 train loss:3.646385 +step:6171 train loss:3.614780 +step:6172 train loss:3.600106 +step:6173 train loss:3.651647 +step:6174 train loss:3.580953 +step:6175 train loss:3.592187 +step:6176 train loss:3.573553 +step:6177 train loss:3.670773 +step:6178 train loss:3.614398 +step:6179 train loss:3.623061 +step:6180 train loss:3.630320 +step:6181 train loss:3.664277 +step:6182 train loss:3.545692 +step:6183 train loss:3.554975 +step:6184 train loss:3.672183 +step:6185 train loss:3.629158 +step:6186 train loss:3.588910 +step:6187 train loss:3.631050 +step:6188 train loss:3.597491 +step:6189 train loss:3.636310 +step:6190 train loss:3.595777 +step:6191 train loss:3.630481 +step:6192 train loss:3.597095 +step:6193 train loss:3.664114 +step:6194 train loss:3.653387 +step:6195 train loss:3.637522 +step:6196 train loss:3.649533 +step:6197 train loss:3.673115 +step:6198 train loss:3.587069 +step:6199 train loss:3.611379 +step:6200 train loss:3.652920 +step:6201 train loss:3.697237 +step:6202 train loss:3.693870 +step:6203 train loss:3.693018 +step:6204 train loss:3.678078 +step:6205 train loss:3.618459 +step:6206 train loss:3.606654 +step:6207 train loss:3.668435 +step:6208 train loss:3.691090 +step:6209 train loss:3.660450 +step:6210 train loss:3.689544 +step:6211 train loss:3.607520 +step:6212 train loss:3.601130 +step:6213 train loss:3.613984 +step:6214 train loss:3.591864 +step:6215 train loss:3.764093 +step:6216 train loss:3.637265 +step:6217 train loss:3.693918 +step:6218 train loss:3.669631 +step:6219 train loss:3.685254 +step:6220 train loss:3.641474 +step:6221 train loss:3.603038 +step:6222 train loss:3.840754 +step:6223 train loss:3.617156 +step:6224 train loss:3.653453 +step:6225 train loss:3.628393 +step:6226 train loss:3.633845 +step:6227 train loss:3.636080 +step:6228 train loss:3.629535 +step:6229 train loss:3.669703 +step:6230 train loss:3.625342 +step:6231 train loss:3.737271 +step:6232 train loss:3.579119 +step:6233 train loss:3.614727 +step:6234 train loss:3.623961 +step:6235 train loss:3.652204 +step:6236 train loss:3.590373 +step:6237 train loss:3.611040 +step:6238 train loss:3.634996 +step:6239 train loss:3.620711 +step:6240 train loss:3.642126 +step:6241 train loss:3.624601 +step:6242 train loss:3.622708 +step:6243 train loss:3.658327 +step:6244 train loss:3.816945 +step:6245 train loss:3.610278 +step:6246 train loss:3.596646 +step:6247 train loss:3.595482 +step:6248 train loss:3.595940 +step:6249 train loss:3.535838 +step:6250 validation loss:3.548120 +step:6250 train loss:3.572829 +step:6251 train loss:3.592148 +step:6252 train loss:3.635654 +step:6253 train loss:3.649054 +step:6254 train loss:3.634387 +step:6255 train loss:3.603176 +step:6256 train loss:3.656010 +step:6257 train loss:3.651410 +step:6258 train loss:3.632445 +step:6259 train loss:3.637805 +step:6260 train loss:3.666607 +step:6261 train loss:3.686581 +step:6262 train loss:3.582106 +step:6263 train loss:3.612319 +step:6264 train loss:3.623179 +step:6265 train loss:3.610418 +step:6266 train loss:3.815961 +step:6267 train loss:3.618802 +step:6268 train loss:3.709139 +step:6269 train loss:3.581211 +step:6270 train loss:3.592476 +step:6271 train loss:3.640646 +step:6272 train loss:3.634866 +step:6273 train loss:3.834050 +step:6274 train loss:3.609587 +step:6275 train loss:3.643502 +step:6276 train loss:3.616086 +step:6277 train loss:3.600289 +step:6278 train loss:3.580738 +step:6279 train loss:3.636904 +step:6280 train loss:3.642087 +step:6281 train loss:3.576253 +step:6282 train loss:3.591979 +step:6283 train loss:3.675322 +step:6284 train loss:3.647812 +step:6285 train loss:3.649521 +step:6286 train loss:3.594131 +step:6287 train loss:3.621202 +step:6288 train loss:3.720184 +step:6289 train loss:3.585242 +step:6290 train loss:3.572151 +step:6291 train loss:3.617251 +step:6292 train loss:3.634578 +step:6293 train loss:3.621560 +step:6294 train loss:3.608441 +step:6295 train loss:3.626362 +step:6296 train loss:3.593822 +step:6297 train loss:3.716783 +step:6298 train loss:3.663440 +step:6299 train loss:3.554904 +step:6300 train loss:3.639636 +step:6301 train loss:3.663346 +step:6302 train loss:3.649721 +step:6303 train loss:3.616619 +step:6304 train loss:3.631993 +step:6305 train loss:3.602496 +step:6306 train loss:3.615230 +step:6307 train loss:3.628118 +step:6308 train loss:3.598412 +step:6309 train loss:3.596274 +step:6310 train loss:3.649441 +step:6311 train loss:3.606173 +step:6312 train loss:3.644761 +step:6313 train loss:3.576104 +step:6314 train loss:3.605278 +step:6315 train loss:3.656142 +step:6316 train loss:3.577430 +step:6317 train loss:3.575748 +step:6318 train loss:3.683694 +step:6319 train loss:3.617768 +step:6320 train loss:3.634479 +step:6321 train loss:3.615327 +step:6322 train loss:3.615337 +step:6323 train loss:3.551534 +step:6324 train loss:3.557649 +step:6325 train loss:3.656956 +step:6326 train loss:3.576966 +step:6327 train loss:3.651406 +step:6328 train loss:3.628251 +step:6329 train loss:3.548424 +step:6330 train loss:3.578013 +step:6331 train loss:3.594264 +step:6332 train loss:3.731538 +step:6333 train loss:3.605367 +step:6334 train loss:3.584482 +step:6335 train loss:3.552633 +step:6336 train loss:3.588945 +step:6337 train loss:3.609185 +step:6338 train loss:3.568472 +step:6339 train loss:3.611418 +step:6340 train loss:3.593129 +step:6341 train loss:3.606053 +step:6342 train loss:3.598961 +step:6343 train loss:3.700317 +step:6344 train loss:3.552028 +step:6345 train loss:3.566633 +step:6346 train loss:3.648089 +step:6347 train loss:3.521026 +step:6348 train loss:3.617064 +step:6349 train loss:3.593749 +step:6350 train loss:3.569813 +step:6351 train loss:3.563947 +step:6352 train loss:3.580465 +step:6353 train loss:3.601972 +step:6354 train loss:3.616353 +step:6355 train loss:3.622008 +step:6356 train loss:3.634315 +step:6357 train loss:3.488090 +step:6358 train loss:3.582734 +step:6359 train loss:3.636305 +step:6360 train loss:3.548866 +step:6361 train loss:3.551178 +step:6362 train loss:3.586727 +step:6363 train loss:3.572467 +step:6364 train loss:3.554045 +step:6365 train loss:3.626466 +step:6366 train loss:3.641093 +step:6367 train loss:3.565520 +step:6368 train loss:3.611538 +step:6369 train loss:3.577045 +step:6370 train loss:3.629497 +step:6371 train loss:3.544584 +step:6372 train loss:3.576823 +step:6373 train loss:3.602038 +step:6374 train loss:3.630921 +step:6375 train loss:3.590427 +step:6376 train loss:3.616770 +step:6377 train loss:3.610114 +step:6378 train loss:3.562385 +step:6379 train loss:3.602315 +step:6380 train loss:3.645376 +step:6381 train loss:3.610216 +step:6382 train loss:3.563185 +step:6383 train loss:3.631881 +step:6384 train loss:3.604133 +step:6385 train loss:3.582485 +step:6386 train loss:3.618347 +step:6387 train loss:3.596641 +step:6388 train loss:3.638971 +step:6389 train loss:3.644603 +step:6390 train loss:3.594174 +step:6391 train loss:3.582217 +step:6392 train loss:3.566022 +step:6393 train loss:3.622768 +step:6394 train loss:3.609345 +step:6395 train loss:3.792268 +step:6396 train loss:3.612528 +step:6397 train loss:3.556334 +step:6398 train loss:3.627758 +step:6399 train loss:3.568036 +step:6400 train loss:3.645294 +step:6401 train loss:3.675261 +step:6402 train loss:3.611098 +step:6403 train loss:3.603635 +step:6404 train loss:3.578362 +step:6405 train loss:3.609951 +step:6406 train loss:3.609689 +step:6407 train loss:3.670427 +step:6408 train loss:3.561022 +step:6409 train loss:3.545688 +step:6410 train loss:3.677902 +step:6411 train loss:3.606435 +step:6412 train loss:3.611942 +step:6413 train loss:3.615362 +step:6414 train loss:3.562134 +step:6415 train loss:3.618935 +step:6416 train loss:3.594835 +step:6417 train loss:3.565535 +step:6418 train loss:3.556722 +step:6419 train loss:3.642123 +step:6420 train loss:3.570629 +step:6421 train loss:3.594929 +step:6422 train loss:3.580803 +step:6423 train loss:3.595244 +step:6424 train loss:3.614617 +step:6425 train loss:3.608975 +step:6426 train loss:3.651892 +step:6427 train loss:3.612540 +step:6428 train loss:3.650409 +step:6429 train loss:3.613761 +step:6430 train loss:3.592502 +step:6431 train loss:3.564515 +step:6432 train loss:3.599729 +step:6433 train loss:3.613600 +step:6434 train loss:3.493469 +step:6435 train loss:3.674244 +step:6436 train loss:3.610480 +step:6437 train loss:3.572414 +step:6438 train loss:3.603331 +step:6439 train loss:3.582096 +step:6440 train loss:3.594327 +step:6441 train loss:3.587313 +step:6442 train loss:3.529006 +step:6443 train loss:3.578584 +step:6444 train loss:3.724775 +step:6445 train loss:3.626443 +step:6446 train loss:3.633017 +step:6447 train loss:3.620050 +step:6448 train loss:3.561828 +step:6449 train loss:3.586272 +step:6450 train loss:3.568262 +step:6451 train loss:3.556000 +step:6452 train loss:3.558039 +step:6453 train loss:3.606129 +step:6454 train loss:3.622477 +step:6455 train loss:3.615193 +step:6456 train loss:3.630736 +step:6457 train loss:3.608377 +step:6458 train loss:3.581360 +step:6459 train loss:3.563952 +step:6460 train loss:3.570964 +step:6461 train loss:3.569300 +step:6462 train loss:3.562830 +step:6463 train loss:3.663132 +step:6464 train loss:3.568790 +step:6465 train loss:3.612548 +step:6466 train loss:3.627933 +step:6467 train loss:3.552271 +step:6468 train loss:3.629750 +step:6469 train loss:3.541880 +step:6470 train loss:3.659385 +step:6471 train loss:3.569045 +step:6472 train loss:3.723985 +step:6473 train loss:3.608716 +step:6474 train loss:3.643631 +step:6475 train loss:3.585896 +step:6476 train loss:3.658358 +step:6477 train loss:3.587383 +step:6478 train loss:3.718894 +step:6479 train loss:3.635368 +step:6480 train loss:3.569784 +step:6481 train loss:3.623720 +step:6482 train loss:3.566118 +step:6483 train loss:3.626220 +step:6484 train loss:3.580992 +step:6485 train loss:3.643282 +step:6486 train loss:3.572185 +step:6487 train loss:3.576236 +step:6488 train loss:3.572816 +step:6489 train loss:3.576049 +step:6490 train loss:3.598685 +step:6491 train loss:3.563895 +step:6492 train loss:3.672928 +step:6493 train loss:3.575258 +step:6494 train loss:3.579401 +step:6495 train loss:3.578122 +step:6496 train loss:3.614484 +step:6497 train loss:3.631519 +step:6498 train loss:3.738100 +step:6499 train loss:3.710836 +step:6500 validation loss:3.543269 total_sharp:1.1182e-04 L1_sharp:2.0824e-05 L2_sharp:2.5099e-06 L3_sharp:1.7767e-05 L4_sharp:1.1769e-05 L5_sharp:9.9726e-06 L6_sharp:9.8056e-06 L7_sharp:8.6779e-06 L8_sharp:2.2948e-05 L9_sharp:3.7630e-05 L10_sharp:4.5167e-05 L11_sharp:2.5839e-05 L12_sharp:2.1309e-04 total_fnorm:2.1588e+01 total_l1_linf:1.8990e+05 total_spectral:2.1588e+01 L1_fnorm:4.9700e+00 L2_fnorm:4.8672e+00 L3_fnorm:4.8718e+00 L4_fnorm:4.8150e+00 L5_fnorm:5.0488e+00 L6_fnorm:5.1336e+00 L7_fnorm:5.1705e+00 L8_fnorm:5.1411e+00 L9_fnorm:5.1258e+00 L10_fnorm:5.1340e+00 L11_fnorm:5.1660e+00 L12_fnorm:4.8685e+00 L1_l1linf:4.6800e+00 L2_l1linf:5.0028e+00 L3_l1linf:5.1237e+00 L4_l1linf:4.9475e+00 L5_l1linf:5.1100e+00 L6_l1linf:5.1438e+00 L7_l1linf:5.3884e+00 L8_l1linf:5.6084e+00 L9_l1linf:6.0171e+00 L10_l1linf:5.9711e+00 L11_l1linf:6.3971e+00 L12_l1linf:6.0893e+00 L1_spectral:8.7539e-01 L2_spectral:5.8012e-01 L3_spectral:6.1206e-01 L4_spectral:6.7992e-01 L5_spectral:5.4122e-01 L6_spectral:5.4805e-01 L7_spectral:5.8160e-01 L8_spectral:7.6736e-01 L9_spectral:8.5259e-01 L10_spectral:8.9651e-01 L11_spectral:7.8090e-01 L12_spectral:9.3240e-01 ip_v_neg_g:2.4892e-02 cos_v_neg_g:2.6418e-03 v_norm:2.1588e+01 g_norm:4.3647e-01 hv_norm:4.1390e-01 cos_v_hv:5.8323e-03 hg_norm:4.7929e+00 cos_g_hg:6.7358e-01 v_par:6.9015e-03 v_perp:2.1588e+01 L1_cos_v_neg_g:1.0957e-02 L1_v_norm:4.9700e+00 L2_cos_v_neg_g:2.6128e-03 L2_v_norm:4.8672e+00 L3_cos_v_neg_g:2.7363e-03 L3_v_norm:4.8718e+00 L4_cos_v_neg_g:3.7254e-03 L4_v_norm:4.8150e+00 L5_cos_v_neg_g:6.0875e-03 L5_v_norm:5.0488e+00 L6_cos_v_neg_g:8.0495e-03 L6_v_norm:5.1336e+00 L7_cos_v_neg_g:7.5140e-03 L7_v_norm:5.1705e+00 L8_cos_v_neg_g:1.1943e-02 L8_v_norm:5.1411e+00 L9_cos_v_neg_g:1.5748e-02 L9_v_norm:5.1258e+00 L10_cos_v_neg_g:1.9233e-02 L10_v_norm:5.1340e+00 L11_cos_v_neg_g:2.0726e-02 L11_v_norm:5.1660e+00 L12_cos_v_neg_g:3.2221e-02 L12_v_norm:4.8685e+00 +step:6500 train loss:3.560095 +step:6501 train loss:3.573624 +step:6502 train loss:3.592558 +step:6503 train loss:3.649108 +step:6504 train loss:3.598537 +step:6505 train loss:3.606287 +step:6506 train loss:3.565936 +step:6507 train loss:3.633846 +step:6508 train loss:3.600476 +step:6509 train loss:3.582778 +step:6510 train loss:3.589648 +step:6511 train loss:3.608327 +step:6512 train loss:3.544591 +step:6513 train loss:3.614603 +step:6514 train loss:3.488006 +step:6515 train loss:3.581936 +step:6516 train loss:3.633840 +step:6517 train loss:3.542835 +step:6518 train loss:3.583839 +step:6519 train loss:3.574141 +step:6520 train loss:3.665455 +step:6521 train loss:3.640594 +step:6522 train loss:3.653125 +step:6523 train loss:3.546412 +step:6524 train loss:3.633038 +step:6525 train loss:3.614162 +step:6526 train loss:3.554664 +step:6527 train loss:3.608298 +step:6528 train loss:3.629069 +step:6529 train loss:3.656781 +step:6530 train loss:3.559767 +step:6531 train loss:3.640821 +step:6532 train loss:3.566779 +step:6533 train loss:3.608794 +step:6534 train loss:3.613828 +step:6535 train loss:3.589492 +step:6536 train loss:3.723833 +step:6537 train loss:3.537975 +step:6538 train loss:3.639571 +step:6539 train loss:3.564541 +step:6540 train loss:3.676576 +step:6541 train loss:3.654330 +step:6542 train loss:3.615319 +step:6543 train loss:3.568701 +step:6544 train loss:3.549889 +step:6545 train loss:3.541257 +step:6546 train loss:3.604289 +step:6547 train loss:3.654696 +step:6548 train loss:3.599174 +step:6549 train loss:3.611263 +step:6550 train loss:3.725960 +step:6551 train loss:3.602719 +step:6552 train loss:3.596854 +step:6553 train loss:3.635006 +step:6554 train loss:3.528191 +step:6555 train loss:3.611088 +step:6556 train loss:3.483498 +step:6557 train loss:3.829701 +step:6558 train loss:3.667529 +step:6559 train loss:3.578770 +step:6560 train loss:3.615680 +step:6561 train loss:3.591675 +step:6562 train loss:3.606968 +step:6563 train loss:3.501367 +step:6564 train loss:3.604889 +step:6565 train loss:3.514720 +step:6566 train loss:3.625372 +step:6567 train loss:3.595427 +step:6568 train loss:3.640289 +step:6569 train loss:3.583690 +step:6570 train loss:3.623694 +step:6571 train loss:3.551607 +step:6572 train loss:3.628304 +step:6573 train loss:3.638210 +step:6574 train loss:3.625514 +step:6575 train loss:3.569762 +step:6576 train loss:3.557847 +step:6577 train loss:3.630226 +step:6578 train loss:3.499138 +step:6579 train loss:3.601166 +step:6580 train loss:3.559026 +step:6581 train loss:3.568213 +step:6582 train loss:3.549313 +step:6583 train loss:3.648006 +step:6584 train loss:3.579535 +step:6585 train loss:3.614831 +step:6586 train loss:3.621170 +step:6587 train loss:3.628320 +step:6588 train loss:3.593564 +step:6589 train loss:3.620699 +step:6590 train loss:3.565978 +step:6591 train loss:3.619289 +step:6592 train loss:3.558760 +step:6593 train loss:3.567126 +step:6594 train loss:3.591762 +step:6595 train loss:3.577677 +step:6596 train loss:3.573755 +step:6597 train loss:3.597492 +step:6598 train loss:3.640753 +step:6599 train loss:3.536342 +step:6600 train loss:3.588778 +step:6601 train loss:3.652280 +step:6602 train loss:3.571029 +step:6603 train loss:3.599912 +step:6604 train loss:3.610467 +step:6605 train loss:3.589398 +step:6606 train loss:3.649046 +step:6607 train loss:3.569654 +step:6608 train loss:3.583265 +step:6609 train loss:3.555449 +step:6610 train loss:3.662709 +step:6611 train loss:3.584539 +step:6612 train loss:3.626910 +step:6613 train loss:3.546427 +step:6614 train loss:3.575775 +step:6615 train loss:3.575863 +step:6616 train loss:3.555344 +step:6617 train loss:3.594702 +step:6618 train loss:3.580951 +step:6619 train loss:3.554522 +step:6620 train loss:3.661729 +step:6621 train loss:3.537249 +step:6622 train loss:3.609725 +step:6623 train loss:3.540973 +step:6624 train loss:3.613028 +step:6625 train loss:3.654981 +step:6626 train loss:3.622009 +step:6627 train loss:3.569930 +step:6628 train loss:3.627925 +step:6629 train loss:3.530984 +step:6630 train loss:3.568443 +step:6631 train loss:3.603790 +step:6632 train loss:3.640039 +step:6633 train loss:3.594011 +step:6634 train loss:3.655777 +step:6635 train loss:3.554499 +step:6636 train loss:3.596622 +step:6637 train loss:3.561441 +step:6638 train loss:3.562934 +step:6639 train loss:3.575062 +step:6640 train loss:3.560302 +step:6641 train loss:3.577320 +step:6642 train loss:3.578250 +step:6643 train loss:3.655579 +step:6644 train loss:3.661211 +step:6645 train loss:3.537544 +step:6646 train loss:3.629999 +step:6647 train loss:3.581984 +step:6648 train loss:3.688373 +step:6649 train loss:3.623853 +step:6650 train loss:3.572999 +step:6651 train loss:3.613757 +step:6652 train loss:3.627762 +step:6653 train loss:3.569513 +step:6654 train loss:3.566455 +step:6655 train loss:3.605609 +step:6656 train loss:3.578933 +step:6657 train loss:3.600292 +step:6658 train loss:3.584719 +step:6659 train loss:3.735078 +step:6660 train loss:3.634523 +step:6661 train loss:3.556586 +step:6662 train loss:3.591933 +step:6663 train loss:3.525978 +step:6664 train loss:3.603900 +step:6665 train loss:3.613887 +step:6666 train loss:3.628095 +step:6667 train loss:3.541363 +step:6668 train loss:3.671901 +step:6669 train loss:3.555276 +step:6670 train loss:3.565105 +step:6671 train loss:3.644272 +step:6672 train loss:3.599905 +step:6673 train loss:3.608331 +step:6674 train loss:3.581445 +step:6675 train loss:3.599438 +step:6676 train loss:3.609805 +step:6677 train loss:3.561864 +step:6678 train loss:3.637392 +step:6679 train loss:3.670977 +step:6680 train loss:3.670884 +step:6681 train loss:3.625460 +step:6682 train loss:3.566605 +step:6683 train loss:3.593485 +step:6684 train loss:3.601110 +step:6685 train loss:3.616285 +step:6686 train loss:3.552323 +step:6687 train loss:3.565274 +step:6688 train loss:3.614575 +step:6689 train loss:3.621449 +step:6690 train loss:3.595349 +step:6691 train loss:3.628386 +step:6692 train loss:3.634565 +step:6693 train loss:3.668373 +step:6694 train loss:3.619943 +step:6695 train loss:3.595139 +step:6696 train loss:3.531727 +step:6697 train loss:3.745409 +step:6698 train loss:3.595487 +step:6699 train loss:3.592813 +step:6700 train loss:3.602048 +step:6701 train loss:3.659810 +step:6702 train loss:3.549967 +step:6703 train loss:3.597243 +step:6704 train loss:3.580789 +step:6705 train loss:3.591787 +step:6706 train loss:3.569365 +step:6707 train loss:3.642570 +step:6708 train loss:3.600549 +step:6709 train loss:3.625357 +step:6710 train loss:3.615653 +step:6711 train loss:3.568870 +step:6712 train loss:3.555288 +step:6713 train loss:3.580489 +step:6714 train loss:3.624840 +step:6715 train loss:3.564964 +step:6716 train loss:3.643351 +step:6717 train loss:3.591076 +step:6718 train loss:3.610392 +step:6719 train loss:3.645120 +step:6720 train loss:3.573735 +step:6721 train loss:3.588530 +step:6722 train loss:3.568711 +step:6723 train loss:3.694751 +step:6724 train loss:3.554783 +step:6725 train loss:3.613261 +step:6726 train loss:3.570149 +step:6727 train loss:3.634257 +step:6728 train loss:3.726481 +step:6729 train loss:3.591005 +step:6730 train loss:3.585044 +step:6731 train loss:3.627990 +step:6732 train loss:3.501341 +step:6733 train loss:3.639161 +step:6734 train loss:3.565743 +step:6735 train loss:3.593583 +step:6736 train loss:3.596632 +step:6737 train loss:3.590253 +step:6738 train loss:3.624044 +step:6739 train loss:3.576335 +step:6740 train loss:3.529866 +step:6741 train loss:3.639881 +step:6742 train loss:3.599096 +step:6743 train loss:3.602942 +step:6744 train loss:3.496287 +step:6745 train loss:3.652763 +step:6746 train loss:3.581379 +step:6747 train loss:3.574358 +step:6748 train loss:3.649448 +step:6749 train loss:3.627810 +step:6750 validation loss:3.529684 +step:6750 train loss:3.549204 +step:6751 train loss:3.584144 +step:6752 train loss:3.587675 +step:6753 train loss:3.621766 +step:6754 train loss:3.602932 +step:6755 train loss:3.611557 +step:6756 train loss:3.554715 +step:6757 train loss:3.528580 +step:6758 train loss:3.705289 +step:6759 train loss:3.593259 +step:6760 train loss:3.647490 +step:6761 train loss:3.584845 +step:6762 train loss:3.605501 +step:6763 train loss:3.503691 +step:6764 train loss:3.583967 +step:6765 train loss:3.587111 +step:6766 train loss:3.580895 +step:6767 train loss:3.533184 +step:6768 train loss:3.539990 +step:6769 train loss:3.503677 +step:6770 train loss:3.588753 +step:6771 train loss:3.589525 +step:6772 train loss:3.598563 +step:6773 train loss:3.579785 +step:6774 train loss:3.592999 +step:6775 train loss:3.634108 +step:6776 train loss:3.590863 +step:6777 train loss:3.669107 +step:6778 train loss:3.550757 +step:6779 train loss:3.608361 +step:6780 train loss:3.539274 +step:6781 train loss:3.602343 +step:6782 train loss:3.513519 +step:6783 train loss:3.551736 +step:6784 train loss:3.575774 +step:6785 train loss:3.565493 +step:6786 train loss:3.578049 +step:6787 train loss:3.654762 +step:6788 train loss:3.592236 +step:6789 train loss:3.601025 +step:6790 train loss:3.600353 +step:6791 train loss:3.611189 +step:6792 train loss:3.610285 +step:6793 train loss:3.608668 +step:6794 train loss:3.578842 +step:6795 train loss:3.577033 +step:6796 train loss:3.579174 +step:6797 train loss:3.680058 +step:6798 train loss:3.581949 +step:6799 train loss:3.575243 +step:6800 train loss:3.542068 +step:6801 train loss:3.674484 +step:6802 train loss:3.624068 +step:6803 train loss:3.612571 +step:6804 train loss:3.641869 +step:6805 train loss:3.600741 +step:6806 train loss:3.534981 +step:6807 train loss:3.593669 +step:6808 train loss:3.577612 +step:6809 train loss:3.606426 +step:6810 train loss:3.729675 +step:6811 train loss:3.631412 +step:6812 train loss:3.606704 +step:6813 train loss:3.615405 +step:6814 train loss:3.625943 +step:6815 train loss:3.671695 +step:6816 train loss:3.587921 +step:6817 train loss:3.611980 +step:6818 train loss:3.588043 +step:6819 train loss:3.572596 +step:6820 train loss:3.600084 +step:6821 train loss:3.563742 +step:6822 train loss:3.671584 +step:6823 train loss:3.647897 +step:6824 train loss:3.627481 +step:6825 train loss:3.573004 +step:6826 train loss:3.615354 +step:6827 train loss:3.602890 +step:6828 train loss:3.618897 +step:6829 train loss:3.607392 +step:6830 train loss:3.571275 +step:6831 train loss:3.533850 +step:6832 train loss:3.519611 +step:6833 train loss:3.536770 +step:6834 train loss:3.624290 +step:6835 train loss:3.597878 +step:6836 train loss:3.514370 +step:6837 train loss:3.580408 +step:6838 train loss:3.637412 +step:6839 train loss:3.724414 +step:6840 train loss:3.594977 +step:6841 train loss:3.549411 +step:6842 train loss:3.601537 +step:6843 train loss:3.706135 +step:6844 train loss:3.589461 +step:6845 train loss:3.642220 +step:6846 train loss:3.703795 +step:6847 train loss:3.634791 +step:6848 train loss:3.622902 +step:6849 train loss:3.648852 +step:6850 train loss:3.624158 +step:6851 train loss:3.549397 +step:6852 train loss:3.543250 +step:6853 train loss:3.529522 +step:6854 train loss:3.613027 +step:6855 train loss:3.579257 +step:6856 train loss:3.566262 +step:6857 train loss:3.620583 +step:6858 train loss:3.649649 +step:6859 train loss:3.555149 +step:6860 train loss:3.667261 +step:6861 train loss:3.689233 +step:6862 train loss:3.602945 +step:6863 train loss:3.598042 +step:6864 train loss:3.542775 +step:6865 train loss:3.612459 +step:6866 train loss:3.538696 +step:6867 train loss:3.716074 +step:6868 train loss:3.594603 +step:6869 train loss:3.628162 +step:6870 train loss:3.663763 +step:6871 train loss:3.577463 +step:6872 train loss:3.576224 +step:6873 train loss:3.596852 +step:6874 train loss:3.553638 +step:6875 train loss:3.558967 +step:6876 train loss:3.588384 +step:6877 train loss:3.627685 +step:6878 train loss:3.538552 +step:6879 train loss:3.587004 +step:6880 train loss:3.596032 +step:6881 train loss:3.562013 +step:6882 train loss:3.626241 +step:6883 train loss:3.606261 +step:6884 train loss:3.839691 +step:6885 train loss:3.612893 +step:6886 train loss:3.596116 +step:6887 train loss:3.532012 +step:6888 train loss:3.637464 +step:6889 train loss:3.515038 +step:6890 train loss:3.626865 +step:6891 train loss:3.631917 +step:6892 train loss:3.734600 +step:6893 train loss:3.563618 +step:6894 train loss:3.627249 +step:6895 train loss:3.628391 +step:6896 train loss:3.599745 +step:6897 train loss:3.554442 +step:6898 train loss:3.556105 +step:6899 train loss:3.639234 +step:6900 train loss:3.612097 +step:6901 train loss:3.561224 +step:6902 train loss:3.497660 +step:6903 train loss:3.543272 +step:6904 train loss:3.656507 +step:6905 train loss:3.686806 +step:6906 train loss:3.609338 +step:6907 train loss:3.625675 +step:6908 train loss:3.659124 +step:6909 train loss:3.653082 +step:6910 train loss:3.530362 +step:6911 train loss:3.658329 +step:6912 train loss:3.551738 +step:6913 train loss:3.590408 +step:6914 train loss:3.551323 +step:6915 train loss:3.572120 +step:6916 train loss:3.552654 +step:6917 train loss:3.678418 +step:6918 train loss:3.621626 +step:6919 train loss:3.615279 +step:6920 train loss:3.601894 +step:6921 train loss:3.667645 +step:6922 train loss:3.658082 +step:6923 train loss:3.523419 +step:6924 train loss:3.605223 +step:6925 train loss:3.578674 +step:6926 train loss:3.613255 +step:6927 train loss:3.667823 +step:6928 train loss:3.553617 +step:6929 train loss:3.567986 +step:6930 train loss:3.598073 +step:6931 train loss:3.598755 +step:6932 train loss:3.829430 +step:6933 train loss:3.662186 +step:6934 train loss:3.602952 +step:6935 train loss:3.585851 +step:6936 train loss:3.627980 +step:6937 train loss:3.567218 +step:6938 train loss:3.634526 +step:6939 train loss:3.567741 +step:6940 train loss:3.620903 +step:6941 train loss:3.537724 +step:6942 train loss:3.624876 +step:6943 train loss:3.514933 +step:6944 train loss:3.618161 +step:6945 train loss:3.561327 +step:6946 train loss:3.639832 +step:6947 train loss:3.567881 +step:6948 train loss:3.559354 +step:6949 train loss:3.636115 +step:6950 train loss:3.627656 +step:6951 train loss:3.631088 +step:6952 train loss:3.559596 +step:6953 train loss:3.606075 +step:6954 train loss:3.670056 +step:6955 train loss:3.584518 +step:6956 train loss:3.622941 +step:6957 train loss:3.612918 +step:6958 train loss:3.571157 +step:6959 train loss:3.609608 +step:6960 train loss:3.581426 +step:6961 train loss:3.585678 +step:6962 train loss:3.567211 +step:6963 train loss:3.538464 +step:6964 train loss:3.581910 +step:6965 train loss:3.573067 +step:6966 train loss:3.618296 +step:6967 train loss:3.555651 +step:6968 train loss:3.594836 +step:6969 train loss:3.610212 +step:6970 train loss:3.589556 +step:6971 train loss:3.650822 +step:6972 train loss:3.599236 +step:6973 train loss:3.557044 +step:6974 train loss:3.685109 +step:6975 train loss:3.593088 +step:6976 train loss:3.561063 +step:6977 train loss:3.596482 +step:6978 train loss:3.591877 +step:6979 train loss:3.600918 +step:6980 train loss:3.579187 +step:6981 train loss:3.640555 +step:6982 train loss:3.593208 +step:6983 train loss:3.582849 +step:6984 train loss:3.697009 +step:6985 train loss:3.545654 +step:6986 train loss:3.536156 +step:6987 train loss:3.585369 +step:6988 train loss:3.589093 +step:6989 train loss:3.734218 +step:6990 train loss:3.598182 +step:6991 train loss:3.557155 +step:6992 train loss:3.604722 +step:6993 train loss:3.671870 +step:6994 train loss:3.617028 +step:6995 train loss:3.568518 +step:6996 train loss:3.570959 +step:6997 train loss:3.649699 +step:6998 train loss:3.552616 +step:6999 train loss:3.601703 +step:7000 validation loss:3.524760 total_sharp:7.9387e-05 L1_sharp:3.2466e-05 L2_sharp:9.6593e-06 L3_sharp:1.7037e-05 L4_sharp:1.2886e-05 L5_sharp:7.9470e-06 L6_sharp:5.9818e-06 L7_sharp:1.1195e-05 L8_sharp:1.8383e-05 L9_sharp:2.3330e-05 L10_sharp:2.2801e-05 L11_sharp:1.5050e-05 L12_sharp:1.2239e-04 total_fnorm:2.1431e+01 total_l1_linf:1.8844e+05 total_spectral:2.1431e+01 L1_fnorm:4.8689e+00 L2_fnorm:4.8595e+00 L3_fnorm:4.8155e+00 L4_fnorm:4.7742e+00 L5_fnorm:4.9589e+00 L6_fnorm:5.0599e+00 L7_fnorm:5.1567e+00 L8_fnorm:5.1107e+00 L9_fnorm:5.1012e+00 L10_fnorm:5.0710e+00 L11_fnorm:5.1199e+00 L12_fnorm:4.8188e+00 L1_l1linf:4.5338e+00 L2_l1linf:4.9157e+00 L3_l1linf:4.8704e+00 L4_l1linf:4.7043e+00 L5_l1linf:4.8573e+00 L6_l1linf:4.9206e+00 L7_l1linf:5.2671e+00 L8_l1linf:5.8384e+00 L9_l1linf:5.4691e+00 L10_l1linf:5.8771e+00 L11_l1linf:5.6868e+00 L12_l1linf:5.8684e+00 L1_spectral:8.7403e-01 L2_spectral:5.7323e-01 L3_spectral:5.8855e-01 L4_spectral:6.8426e-01 L5_spectral:5.4645e-01 L6_spectral:4.8664e-01 L7_spectral:5.5199e-01 L8_spectral:6.9632e-01 L9_spectral:7.8304e-01 L10_spectral:8.1355e-01 L11_spectral:7.7713e-01 L12_spectral:8.6754e-01 ip_v_neg_g:1.5451e-02 cos_v_neg_g:2.0336e-03 v_norm:2.1431e+01 g_norm:3.5451e-01 hv_norm:2.7837e-01 cos_v_hv:6.1118e-03 hg_norm:2.6615e+00 cos_g_hg:5.3325e-01 v_par:6.6263e-03 v_perp:2.1431e+01 L1_cos_v_neg_g:1.3355e-02 L1_v_norm:4.8689e+00 L2_cos_v_neg_g:4.1397e-03 L2_v_norm:4.8595e+00 L3_cos_v_neg_g:3.4691e-03 L3_v_norm:4.8155e+00 L4_cos_v_neg_g:2.7064e-03 L4_v_norm:4.7742e+00 L5_cos_v_neg_g:3.6509e-03 L5_v_norm:4.9589e+00 L6_cos_v_neg_g:4.0221e-03 L6_v_norm:5.0599e+00 L7_cos_v_neg_g:5.9227e-03 L7_v_norm:5.1567e+00 L8_cos_v_neg_g:8.1636e-03 L8_v_norm:5.1107e+00 L9_cos_v_neg_g:8.4222e-03 L9_v_norm:5.1012e+00 L10_cos_v_neg_g:9.4752e-03 L10_v_norm:5.0710e+00 L11_cos_v_neg_g:1.0360e-02 L11_v_norm:5.1199e+00 L12_cos_v_neg_g:1.8783e-02 L12_v_norm:4.8188e+00 +step:7000 train loss:3.678976 +step:7001 train loss:3.583118 +step:7002 train loss:3.572235 +step:7003 train loss:3.593945 +step:7004 train loss:3.591825 +step:7005 train loss:3.576528 +step:7006 train loss:3.580712 +step:7007 train loss:3.634896 +step:7008 train loss:3.570701 +step:7009 train loss:3.616393 +step:7010 train loss:3.546605 +step:7011 train loss:3.605475 +step:7012 train loss:3.576406 +step:7013 train loss:3.647942 +step:7014 train loss:3.555500 +step:7015 train loss:3.618484 +step:7016 train loss:3.605719 +step:7017 train loss:3.571702 +step:7018 train loss:3.647788 +step:7019 train loss:3.576289 +step:7020 train loss:3.621884 +step:7021 train loss:3.569027 +step:7022 train loss:3.584987 +step:7023 train loss:3.599159 +step:7024 train loss:3.563776 +step:7025 train loss:3.613089 +step:7026 train loss:3.572089 +step:7027 train loss:3.632533 +step:7028 train loss:3.552718 +step:7029 train loss:3.546016 +step:7030 train loss:3.549678 +step:7031 train loss:3.600046 +step:7032 train loss:3.609886 +step:7033 train loss:3.585675 +step:7034 train loss:3.607137 +step:7035 train loss:3.658171 +step:7036 train loss:3.578182 +step:7037 train loss:3.605759 +step:7038 train loss:3.564286 +step:7039 train loss:3.619293 +step:7040 train loss:3.537255 +step:7041 train loss:3.627305 +step:7042 train loss:3.562649 +step:7043 train loss:3.533605 +step:7044 train loss:3.580061 +step:7045 train loss:3.581244 +step:7046 train loss:3.569292 +step:7047 train loss:3.612661 +step:7048 train loss:3.559346 +step:7049 train loss:3.568924 +step:7050 train loss:3.593157 +step:7051 train loss:3.610016 +step:7052 train loss:3.611997 +step:7053 train loss:3.572385 +step:7054 train loss:3.557055 +step:7055 train loss:3.621992 +step:7056 train loss:3.623449 +step:7057 train loss:3.548163 +step:7058 train loss:3.665995 +step:7059 train loss:3.572313 +step:7060 train loss:3.583631 +step:7061 train loss:3.562206 +step:7062 train loss:3.580265 +step:7063 train loss:3.639526 +step:7064 train loss:3.564268 +step:7065 train loss:3.613477 +step:7066 train loss:3.571363 +step:7067 train loss:3.609609 +step:7068 train loss:3.583831 +step:7069 train loss:3.547243 +step:7070 train loss:3.574449 +step:7071 train loss:3.542102 +step:7072 train loss:3.544654 +step:7073 train loss:3.537653 +step:7074 train loss:3.534450 +step:7075 train loss:3.552154 +step:7076 train loss:3.562755 +step:7077 train loss:3.574135 +step:7078 train loss:3.617790 +step:7079 train loss:3.629388 +step:7080 train loss:3.572713 +step:7081 train loss:3.594692 +step:7082 train loss:3.564735 +step:7083 train loss:3.590512 +step:7084 train loss:3.583730 +step:7085 train loss:3.543920 +step:7086 train loss:3.583793 +step:7087 train loss:3.560135 +step:7088 train loss:3.680614 +step:7089 train loss:3.578744 +step:7090 train loss:3.541180 +step:7091 train loss:3.555341 +step:7092 train loss:3.536932 +step:7093 train loss:3.629539 +step:7094 train loss:3.550174 +step:7095 train loss:3.566805 +step:7096 train loss:3.587826 +step:7097 train loss:3.573605 +step:7098 train loss:3.597717 +step:7099 train loss:3.552276 +step:7100 train loss:3.585049 +step:7101 train loss:3.653148 +step:7102 train loss:3.543962 +step:7103 train loss:3.569475 +step:7104 train loss:3.599232 +step:7105 train loss:3.580018 +step:7106 train loss:3.564900 +step:7107 train loss:3.599075 +step:7108 train loss:3.668798 +step:7109 train loss:3.595628 +step:7110 train loss:3.623976 +step:7111 train loss:3.601516 +step:7112 train loss:3.589919 +step:7113 train loss:3.589380 +step:7114 train loss:3.606464 +step:7115 train loss:3.646954 +step:7116 train loss:3.574976 +step:7117 train loss:3.612185 +step:7118 train loss:3.623881 +step:7119 train loss:3.584719 +step:7120 train loss:3.643148 +step:7121 train loss:3.556652 +step:7122 train loss:3.560224 +step:7123 train loss:3.501520 +step:7124 train loss:3.654662 +step:7125 train loss:3.512510 +step:7126 train loss:3.675833 +step:7127 train loss:3.638646 +step:7128 train loss:3.575873 +step:7129 train loss:3.584396 +step:7130 train loss:3.573134 +step:7131 train loss:3.514850 +step:7132 train loss:3.553872 +step:7133 train loss:3.604979 +step:7134 train loss:3.534023 +step:7135 train loss:3.592149 +step:7136 train loss:3.575778 +step:7137 train loss:3.554493 +step:7138 train loss:3.542609 +step:7139 train loss:3.545977 +step:7140 train loss:3.581464 +step:7141 train loss:3.578731 +step:7142 train loss:3.573918 +step:7143 train loss:3.611526 +step:7144 train loss:3.559808 +step:7145 train loss:3.575021 +step:7146 train loss:3.585881 +step:7147 train loss:3.605010 +step:7148 train loss:3.609106 +step:7149 train loss:3.613976 +step:7150 train loss:3.588790 +step:7151 train loss:3.556096 +step:7152 train loss:3.529955 +step:7153 train loss:3.561411 +step:7154 train loss:3.582025 +step:7155 train loss:3.600401 +step:7156 train loss:3.567934 +step:7157 train loss:3.587355 +step:7158 train loss:3.545295 +step:7159 train loss:3.597505 +step:7160 train loss:3.608051 +step:7161 train loss:3.559307 +step:7162 train loss:3.606662 +step:7163 train loss:3.542421 +step:7164 train loss:3.579328 +step:7165 train loss:3.583849 +step:7166 train loss:3.636758 +step:7167 train loss:3.620838 +step:7168 train loss:3.593961 +step:7169 train loss:3.572268 +step:7170 train loss:3.600922 +step:7171 train loss:3.547199 +step:7172 train loss:3.712535 +step:7173 train loss:3.557698 +step:7174 train loss:3.596977 +step:7175 train loss:3.574088 +step:7176 train loss:3.582475 +step:7177 train loss:3.598140 +step:7178 train loss:3.596184 +step:7179 train loss:3.581299 +step:7180 train loss:3.584557 +step:7181 train loss:3.610243 +step:7182 train loss:3.565006 +step:7183 train loss:3.634655 +step:7184 train loss:3.726128 +step:7185 train loss:3.639885 +step:7186 train loss:3.575887 +step:7187 train loss:3.589755 +step:7188 train loss:3.577069 +step:7189 train loss:3.576577 +step:7190 train loss:3.579268 +step:7191 train loss:3.570910 +step:7192 train loss:3.605167 +step:7193 train loss:3.523935 +step:7194 train loss:3.585983 +step:7195 train loss:3.562936 +step:7196 train loss:3.610445 +step:7197 train loss:3.589242 +step:7198 train loss:3.646276 +step:7199 train loss:3.601791 +step:7200 train loss:3.599024 +step:7201 train loss:3.605688 +step:7202 train loss:3.581915 +step:7203 train loss:3.598882 +step:7204 train loss:3.568066 +step:7205 train loss:3.522635 +step:7206 train loss:3.552721 +step:7207 train loss:3.732051 +step:7208 train loss:3.564686 +step:7209 train loss:3.646776 +step:7210 train loss:3.580711 +step:7211 train loss:3.614343 +step:7212 train loss:3.689374 +step:7213 train loss:3.541830 +step:7214 train loss:3.612985 +step:7215 train loss:3.578398 +step:7216 train loss:3.629926 +step:7217 train loss:3.589678 +step:7218 train loss:3.675794 +step:7219 train loss:3.584963 +step:7220 train loss:3.664716 +step:7221 train loss:3.541562 +step:7222 train loss:3.624622 +step:7223 train loss:3.543522 +step:7224 train loss:3.604612 +step:7225 train loss:3.581837 +step:7226 train loss:3.551044 +step:7227 train loss:3.569182 +step:7228 train loss:3.555419 +step:7229 train loss:3.563339 +step:7230 train loss:3.545717 +step:7231 train loss:3.678007 +step:7232 train loss:3.548938 +step:7233 train loss:3.619578 +step:7234 train loss:3.607041 +step:7235 train loss:3.577497 +step:7236 train loss:3.617832 +step:7237 train loss:3.568673 +step:7238 train loss:3.608057 +step:7239 train loss:3.563434 +step:7240 train loss:3.558253 +step:7241 train loss:3.573188 +step:7242 train loss:3.553660 +step:7243 train loss:3.597939 +step:7244 train loss:3.573678 +step:7245 train loss:3.576943 +step:7246 train loss:3.617544 +step:7247 train loss:3.573800 +step:7248 train loss:3.612779 +step:7249 train loss:3.561392 +step:7250 validation loss:3.512308 +step:7250 train loss:3.584231 +step:7251 train loss:3.629436 +step:7252 train loss:3.547051 +step:7253 train loss:3.632410 +step:7254 train loss:3.571508 +step:7255 train loss:3.539454 +step:7256 train loss:3.581908 +step:7257 train loss:3.625896 +step:7258 train loss:3.580911 +step:7259 train loss:3.565316 +step:7260 train loss:3.653220 +step:7261 train loss:3.607938 +step:7262 train loss:3.561382 +step:7263 train loss:3.602819 +step:7264 train loss:3.591444 +step:7265 train loss:3.493000 +step:7266 train loss:3.618531 +step:7267 train loss:3.535675 +step:7268 train loss:3.599563 +step:7269 train loss:3.605886 +step:7270 train loss:3.560450 +step:7271 train loss:3.577971 +step:7272 train loss:3.581798 +step:7273 train loss:3.579014 +step:7274 train loss:3.555333 +step:7275 train loss:3.628136 +step:7276 train loss:3.536124 +step:7277 train loss:3.579959 +step:7278 train loss:3.552597 +step:7279 train loss:3.534565 +step:7280 train loss:3.600637 +step:7281 train loss:3.622742 +step:7282 train loss:3.624226 +step:7283 train loss:3.512328 +step:7284 train loss:3.558746 +step:7285 train loss:3.584873 +step:7286 train loss:3.717100 +step:7287 train loss:3.625727 +step:7288 train loss:3.580303 +step:7289 train loss:3.583041 +step:7290 train loss:3.632005 +step:7291 train loss:3.593019 +step:7292 train loss:3.664519 +step:7293 train loss:3.558921 +step:7294 train loss:3.646830 +step:7295 train loss:3.537193 +step:7296 train loss:3.533011 +step:7297 train loss:3.578923 +step:7298 train loss:3.554364 +step:7299 train loss:3.593948 +step:7300 train loss:3.582735 +step:7301 train loss:3.533391 +step:7302 train loss:3.677294 +step:7303 train loss:3.569162 +step:7304 train loss:3.511078 +step:7305 train loss:3.588368 +step:7306 train loss:3.618708 +step:7307 train loss:3.623398 +step:7308 train loss:3.574809 +step:7309 train loss:3.537209 +step:7310 train loss:3.567174 +step:7311 train loss:3.552062 +step:7312 train loss:3.589903 +step:7313 train loss:3.631629 +step:7314 train loss:3.525318 +step:7315 train loss:3.521080 +step:7316 train loss:3.661243 +step:7317 train loss:3.601840 +step:7318 train loss:3.542670 +step:7319 train loss:3.568561 +step:7320 train loss:3.600995 +step:7321 train loss:3.628094 +step:7322 train loss:3.506693 +step:7323 train loss:3.563677 +step:7324 train loss:3.589281 +step:7325 train loss:3.552408 +step:7326 train loss:3.580386 +step:7327 train loss:3.556111 +step:7328 train loss:3.677847 +step:7329 train loss:3.518730 +step:7330 train loss:3.575312 +step:7331 train loss:3.568885 +step:7332 train loss:3.611018 +step:7333 train loss:3.593037 +step:7334 train loss:3.561113 +step:7335 train loss:3.557475 +step:7336 train loss:3.809303 +step:7337 train loss:3.597979 +step:7338 train loss:3.589670 +step:7339 train loss:3.603884 +step:7340 train loss:3.587936 +step:7341 train loss:3.582732 +step:7342 train loss:3.573933 +step:7343 train loss:3.586810 +step:7344 train loss:3.663616 +step:7345 train loss:3.523910 +step:7346 train loss:3.560793 +step:7347 train loss:3.554579 +step:7348 train loss:3.558007 +step:7349 train loss:3.656496 +step:7350 train loss:3.643035 +step:7351 train loss:3.575994 +step:7352 train loss:3.605576 +step:7353 train loss:3.584904 +step:7354 train loss:3.537276 +step:7355 train loss:3.719853 +step:7356 train loss:3.691885 +step:7357 train loss:3.621739 +step:7358 train loss:3.595951 +step:7359 train loss:3.564218 +step:7360 train loss:3.574738 +step:7361 train loss:3.528021 +step:7362 train loss:3.573249 +step:7363 train loss:3.588724 +step:7364 train loss:3.620619 +step:7365 train loss:3.605729 +step:7366 train loss:3.568957 +step:7367 train loss:3.648035 +step:7368 train loss:3.626667 +step:7369 train loss:3.617183 +step:7370 train loss:3.584234 +step:7371 train loss:3.544924 +step:7372 train loss:3.600394 +step:7373 train loss:3.621546 +step:7374 train loss:3.716732 +step:7375 train loss:3.543816 +step:7376 train loss:3.562794 +step:7377 train loss:3.605358 +step:7378 train loss:3.560436 +step:7379 train loss:3.680649 +step:7380 train loss:3.642210 +step:7381 train loss:3.610693 +step:7382 train loss:3.572724 +step:7383 train loss:3.666572 +step:7384 train loss:3.608328 +step:7385 train loss:3.566181 +step:7386 train loss:3.572586 +step:7387 train loss:3.614010 +step:7388 train loss:3.645438 +step:7389 train loss:3.588762 +step:7390 train loss:3.536273 +step:7391 train loss:3.567461 +step:7392 train loss:3.623162 +step:7393 train loss:3.590961 +step:7394 train loss:3.633198 +step:7395 train loss:3.519981 +step:7396 train loss:3.622794 +step:7397 train loss:3.552720 +step:7398 train loss:3.562549 +step:7399 train loss:3.613784 +step:7400 train loss:3.616473 +step:7401 train loss:3.530991 +step:7402 train loss:3.652134 +step:7403 train loss:3.532042 +step:7404 train loss:3.603151 +step:7405 train loss:3.725807 +step:7406 train loss:3.551745 +step:7407 train loss:3.599457 +step:7408 train loss:3.598485 +step:7409 train loss:3.574269 +step:7410 train loss:3.740104 +step:7411 train loss:3.586871 +step:7412 train loss:3.594917 +step:7413 train loss:3.642204 +step:7414 train loss:3.550100 +step:7415 train loss:3.611830 +step:7416 train loss:3.492619 +step:7417 train loss:3.610975 +step:7418 train loss:3.597646 +step:7419 train loss:3.563908 +step:7420 train loss:3.553786 +step:7421 train loss:3.591464 +step:7422 train loss:3.546749 +step:7423 train loss:3.685142 +step:7424 train loss:3.748739 +step:7425 train loss:3.638436 +step:7426 train loss:3.602637 +step:7427 train loss:3.572205 +step:7428 train loss:3.598622 +step:7429 train loss:3.610604 +step:7430 train loss:3.535347 +step:7431 train loss:3.540492 +step:7432 train loss:3.552763 +step:7433 train loss:3.646482 +step:7434 train loss:3.562196 +step:7435 train loss:3.649162 +step:7436 train loss:3.688729 +step:7437 train loss:3.511190 +step:7438 train loss:3.571629 +step:7439 train loss:3.583916 +step:7440 train loss:3.553998 +step:7441 train loss:3.521797 +step:7442 train loss:3.752831 +step:7443 train loss:3.574112 +step:7444 train loss:3.619555 +step:7445 train loss:3.549495 +step:7446 train loss:3.572188 +step:7447 train loss:3.499299 +step:7448 train loss:3.552695 +step:7449 train loss:3.567773 +step:7450 train loss:3.603346 +step:7451 train loss:3.632669 +step:7452 train loss:3.559206 +step:7453 train loss:3.587234 +step:7454 train loss:3.570776 +step:7455 train loss:3.581182 +step:7456 train loss:3.553201 +step:7457 train loss:3.560912 +step:7458 train loss:3.601662 +step:7459 train loss:3.577981 +step:7460 train loss:3.585754 +step:7461 train loss:3.622010 +step:7462 train loss:3.555199 +step:7463 train loss:3.620795 +step:7464 train loss:3.544084 +step:7465 train loss:3.554730 +step:7466 train loss:3.553649 +step:7467 train loss:3.566288 +step:7468 train loss:3.614466 +step:7469 train loss:3.545131 +step:7470 train loss:3.576333 +step:7471 train loss:3.568163 +step:7472 train loss:3.602056 +step:7473 train loss:3.542233 +step:7474 train loss:3.527249 +step:7475 train loss:3.557092 +step:7476 train loss:3.594470 +step:7477 train loss:3.572938 +step:7478 train loss:3.577011 +step:7479 train loss:3.589067 +step:7480 train loss:3.869186 +step:7481 train loss:3.516996 +step:7482 train loss:3.588662 +step:7483 train loss:3.582543 +step:7484 train loss:3.600898 +step:7485 train loss:3.587210 +step:7486 train loss:3.611653 +step:7487 train loss:3.606041 +step:7488 train loss:3.622792 +step:7489 train loss:3.620138 +step:7490 train loss:3.565779 +step:7491 train loss:3.588145 +step:7492 train loss:3.695402 +step:7493 train loss:3.668652 +step:7494 train loss:3.690265 +step:7495 train loss:3.562251 +step:7496 train loss:3.548326 +step:7497 train loss:3.647872 +step:7498 train loss:3.579386 +step:7499 train loss:3.619023 +step:7500 validation loss:3.511962 total_sharp:5.3410e-05 L1_sharp:1.4941e-05 L2_sharp:2.8999e-06 L3_sharp:3.0192e-06 L4_sharp:6.9928e-06 L5_sharp:6.6897e-06 L6_sharp:4.8413e-06 L7_sharp:5.8667e-06 L8_sharp:1.7002e-05 L9_sharp:1.6451e-05 L10_sharp:1.9637e-05 L11_sharp:1.4681e-05 L12_sharp:7.2971e-05 total_fnorm:2.1278e+01 total_l1_linf:1.8644e+05 total_spectral:2.1278e+01 L1_fnorm:4.8640e+00 L2_fnorm:4.7342e+00 L3_fnorm:4.6985e+00 L4_fnorm:4.6903e+00 L5_fnorm:4.9062e+00 L6_fnorm:5.0241e+00 L7_fnorm:5.1162e+00 L8_fnorm:5.0683e+00 L9_fnorm:5.0551e+00 L10_fnorm:5.0478e+00 L11_fnorm:5.1233e+00 L12_fnorm:4.8160e+00 L1_l1linf:4.6596e+00 L2_l1linf:4.8490e+00 L3_l1linf:4.7750e+00 L4_l1linf:4.8802e+00 L5_l1linf:4.9586e+00 L6_l1linf:4.8849e+00 L7_l1linf:5.1148e+00 L8_l1linf:5.3496e+00 L9_l1linf:5.4894e+00 L10_l1linf:5.5069e+00 L11_l1linf:5.6973e+00 L12_l1linf:5.3535e+00 L1_spectral:8.9526e-01 L2_spectral:5.8938e-01 L3_spectral:5.7375e-01 L4_spectral:6.0616e-01 L5_spectral:5.2588e-01 L6_spectral:4.7526e-01 L7_spectral:5.3130e-01 L8_spectral:6.3339e-01 L9_spectral:7.0563e-01 L10_spectral:7.4294e-01 L11_spectral:7.3032e-01 L12_spectral:7.8122e-01 ip_v_neg_g:1.1298e-02 cos_v_neg_g:1.3853e-03 v_norm:2.1278e+01 g_norm:3.8326e-01 hv_norm:1.8268e-01 cos_v_hv:6.2213e-03 hg_norm:3.0707e+00 cos_g_hg:5.3824e-01 v_par:4.8744e-03 v_perp:2.1278e+01 L1_cos_v_neg_g:7.5326e-03 L1_v_norm:4.8640e+00 L2_cos_v_neg_g:1.6673e-03 L2_v_norm:4.7342e+00 L3_cos_v_neg_g:2.6435e-03 L3_v_norm:4.6985e+00 L4_cos_v_neg_g:2.5325e-03 L4_v_norm:4.6903e+00 L5_cos_v_neg_g:3.4897e-03 L5_v_norm:4.9062e+00 L6_cos_v_neg_g:3.3664e-03 L6_v_norm:5.0241e+00 L7_cos_v_neg_g:4.0110e-03 L7_v_norm:5.1162e+00 L8_cos_v_neg_g:5.3894e-03 L8_v_norm:5.0683e+00 L9_cos_v_neg_g:5.8453e-03 L9_v_norm:5.0551e+00 L10_cos_v_neg_g:6.3987e-03 L10_v_norm:5.0478e+00 L11_cos_v_neg_g:5.5061e-03 L11_v_norm:5.1233e+00 L12_cos_v_neg_g:1.1218e-02 L12_v_norm:4.8160e+00 +step:7500 train loss:3.564572 +step:7501 train loss:3.556507 +step:7502 train loss:3.544572 +step:7503 train loss:3.524979 +step:7504 train loss:3.546582 +step:7505 train loss:3.536562 +step:7506 train loss:3.594583 +step:7507 train loss:3.513729 +step:7508 train loss:3.582989 +step:7509 train loss:3.554338 +step:7510 train loss:3.585525 +step:7511 train loss:3.590382 +step:7512 train loss:3.849762 +step:7513 train loss:3.544542 +step:7514 train loss:3.575818 +step:7515 train loss:3.540075 +step:7516 train loss:3.551398 +step:7517 train loss:3.585968 +step:7518 train loss:3.564288 +step:7519 train loss:3.574887 +step:7520 train loss:3.638254 +step:7521 train loss:3.528629 +step:7522 train loss:3.583899 +step:7523 train loss:3.615934 +step:7524 train loss:3.564070 +step:7525 train loss:3.569164 +step:7526 train loss:3.518649 +step:7527 train loss:3.525773 +step:7528 train loss:3.620931 +step:7529 train loss:3.599544 +step:7530 train loss:3.548802 +step:7531 train loss:3.619711 +step:7532 train loss:3.606838 +step:7533 train loss:3.534848 +step:7534 train loss:3.598843 +step:7535 train loss:3.603332 +step:7536 train loss:3.634533 +step:7537 train loss:3.652293 +step:7538 train loss:3.677357 +step:7539 train loss:3.580804 +step:7540 train loss:3.567757 +step:7541 train loss:3.619016 +step:7542 train loss:3.582050 +step:7543 train loss:3.537938 +step:7544 train loss:3.579998 +step:7545 train loss:3.567052 +step:7546 train loss:3.523944 +step:7547 train loss:3.569247 +step:7548 train loss:3.583247 +step:7549 train loss:3.563920 +step:7550 train loss:3.565136 +step:7551 train loss:3.661416 +step:7552 train loss:3.577626 +step:7553 train loss:3.611916 +step:7554 train loss:3.540542 +step:7555 train loss:3.630633 +step:7556 train loss:3.532648 +step:7557 train loss:3.629748 +step:7558 train loss:3.618917 +step:7559 train loss:3.573696 +step:7560 train loss:3.669815 +step:7561 train loss:3.639770 +step:7562 train loss:3.543150 +step:7563 train loss:3.539803 +step:7564 train loss:3.595057 +step:7565 train loss:3.612593 +step:7566 train loss:3.600100 +step:7567 train loss:3.619970 +step:7568 train loss:3.563686 +step:7569 train loss:3.623854 +step:7570 train loss:3.606410 +step:7571 train loss:3.690828 +step:7572 train loss:3.535895 +step:7573 train loss:3.606735 +step:7574 train loss:3.568496 +step:7575 train loss:3.563060 +step:7576 train loss:3.570354 +step:7577 train loss:3.585939 +step:7578 train loss:3.642416 +step:7579 train loss:3.577906 +step:7580 train loss:3.567423 +step:7581 train loss:3.552005 +step:7582 train loss:3.608982 +step:7583 train loss:3.544508 +step:7584 train loss:3.528855 +step:7585 train loss:3.500711 +step:7586 train loss:3.533800 +step:7587 train loss:3.597872 +step:7588 train loss:3.726251 +step:7589 train loss:3.546265 +step:7590 train loss:3.613694 +step:7591 train loss:3.618533 +step:7592 train loss:3.577483 +step:7593 train loss:3.601393 +step:7594 train loss:3.602523 +step:7595 train loss:3.568989 +step:7596 train loss:3.623098 +step:7597 train loss:3.526655 +step:7598 train loss:3.587148 +step:7599 train loss:3.583013 +step:7600 train loss:3.540350 +step:7601 train loss:3.653159 +step:7602 train loss:3.596431 +step:7603 train loss:3.552381 +step:7604 train loss:3.698791 +step:7605 train loss:3.590072 +step:7606 train loss:3.621749 +step:7607 train loss:3.574183 +step:7608 train loss:3.582591 +step:7609 train loss:3.615748 +step:7610 train loss:3.576863 +step:7611 train loss:3.553564 +step:7612 train loss:3.496444 +step:7613 train loss:3.544308 +step:7614 train loss:3.614297 +step:7615 train loss:3.574257 +step:7616 train loss:3.642281 +step:7617 train loss:3.540411 +step:7618 train loss:3.629892 +step:7619 train loss:3.569899 +step:7620 train loss:3.555861 +step:7621 train loss:3.506086 +step:7622 train loss:3.778448 +step:7623 train loss:3.797403 +step:7624 train loss:3.612076 +step:7625 train loss:3.647427 +step:7626 train loss:3.565090 +step:7627 train loss:3.637343 +step:7628 train loss:3.520368 +step:7629 train loss:3.578247 +step:7630 train loss:3.592102 +step:7631 train loss:3.572551 +step:7632 train loss:3.621609 +step:7633 train loss:3.690846 +step:7634 train loss:3.649576 +step:7635 train loss:3.552960 +step:7636 train loss:3.583730 +step:7637 train loss:3.530912 +step:7638 train loss:3.637115 +step:7639 train loss:3.568318 +step:7640 train loss:3.548335 +step:7641 train loss:3.577999 +step:7642 train loss:3.915984 +step:7643 train loss:3.668551 +step:7644 train loss:3.593442 +step:7645 train loss:3.583229 +step:7646 train loss:3.570180 +step:7647 train loss:3.562695 +step:7648 train loss:3.596124 +step:7649 train loss:3.556594 +step:7650 train loss:3.605707 +step:7651 train loss:3.627640 +step:7652 train loss:3.504901 +step:7653 train loss:3.697629 +step:7654 train loss:3.556590 +step:7655 train loss:3.576682 +step:7656 train loss:3.551655 +step:7657 train loss:3.569042 +step:7658 train loss:3.520256 +step:7659 train loss:3.584085 +step:7660 train loss:3.517378 +step:7661 train loss:3.532821 +step:7662 train loss:3.533340 +step:7663 train loss:3.583427 +step:7664 train loss:3.542168 +step:7665 train loss:3.518048 +step:7666 train loss:3.626978 +step:7667 train loss:3.538347 +step:7668 train loss:3.648246 +step:7669 train loss:3.580927 +step:7670 train loss:3.536903 +step:7671 train loss:3.595623 +step:7672 train loss:3.611843 +step:7673 train loss:3.575009 +step:7674 train loss:3.614540 +step:7675 train loss:3.667577 +step:7676 train loss:3.638088 +step:7677 train loss:3.664628 +step:7678 train loss:3.602695 +step:7679 train loss:3.624460 +step:7680 train loss:3.632750 +step:7681 train loss:3.599635 +step:7682 train loss:3.567844 +step:7683 train loss:3.570508 +step:7684 train loss:3.544648 +step:7685 train loss:3.524181 +step:7686 train loss:3.643348 +step:7687 train loss:3.560602 +step:7688 train loss:3.526121 +step:7689 train loss:3.575571 +step:7690 train loss:3.541254 +step:7691 train loss:3.567562 +step:7692 train loss:3.602096 +step:7693 train loss:3.605546 +step:7694 train loss:3.659060 +step:7695 train loss:3.587760 +step:7696 train loss:3.563114 +step:7697 train loss:3.549014 +step:7698 train loss:3.610706 +step:7699 train loss:3.600526 +step:7700 train loss:3.503171 +step:7701 train loss:3.617328 +step:7702 train loss:3.559242 +step:7703 train loss:3.562629 +step:7704 train loss:3.615748 +step:7705 train loss:3.574309 +step:7706 train loss:3.513007 +step:7707 train loss:3.630472 +step:7708 train loss:3.573892 +step:7709 train loss:3.588247 +step:7710 train loss:3.651279 +step:7711 train loss:3.612995 +step:7712 train loss:3.555727 +step:7713 train loss:3.636171 +step:7714 train loss:3.583427 +step:7715 train loss:3.532891 +step:7716 train loss:3.571080 +step:7717 train loss:3.596312 +step:7718 train loss:3.601926 +step:7719 train loss:3.556525 +step:7720 train loss:3.572881 +step:7721 train loss:3.615904 +step:7722 train loss:3.540527 +step:7723 train loss:3.911164 +step:7724 train loss:3.577864 +step:7725 train loss:3.482388 +step:7726 train loss:3.566981 +step:7727 train loss:3.593389 +step:7728 train loss:3.560291 +step:7729 train loss:3.556672 +step:7730 train loss:3.579928 +step:7731 train loss:3.607816 +step:7732 train loss:3.626734 +step:7733 train loss:3.537741 +step:7734 train loss:3.566120 +step:7735 train loss:3.653313 +step:7736 train loss:3.597584 +step:7737 train loss:3.615674 +step:7738 train loss:3.518703 +step:7739 train loss:3.593082 +step:7740 train loss:3.541781 +step:7741 train loss:3.579525 +step:7742 train loss:3.578086 +step:7743 train loss:3.530411 +step:7744 train loss:3.653711 +step:7745 train loss:3.546605 +step:7746 train loss:3.525629 +step:7747 train loss:3.616523 +step:7748 train loss:3.599022 +step:7749 train loss:3.522147 +step:7750 validation loss:3.508390 +step:7750 train loss:3.680947 +step:7751 train loss:3.561495 +step:7752 train loss:3.554535 +step:7753 train loss:3.559416 +step:7754 train loss:3.530392 +step:7755 train loss:3.594146 +step:7756 train loss:3.624739 +step:7757 train loss:3.573006 +step:7758 train loss:3.544636 +step:7759 train loss:3.569232 +step:7760 train loss:3.600927 +step:7761 train loss:3.588487 +step:7762 train loss:3.574229 +step:7763 train loss:3.562916 +step:7764 train loss:3.564297 +step:7765 train loss:3.521248 +step:7766 train loss:3.588322 +step:7767 train loss:3.588359 +step:7768 train loss:3.543838 +step:7769 train loss:3.609720 +step:7770 train loss:3.624958 +step:7771 train loss:3.599685 +step:7772 train loss:3.571254 +step:7773 train loss:3.631921 +step:7774 train loss:3.526869 +step:7775 train loss:3.514651 +step:7776 train loss:3.618750 +step:7777 train loss:3.574562 +step:7778 train loss:3.532102 +step:7779 train loss:3.575539 +step:7780 train loss:3.569614 +step:7781 train loss:3.579174 +step:7782 train loss:3.562785 +step:7783 train loss:3.540790 +step:7784 train loss:3.542597 +step:7785 train loss:3.585783 +step:7786 train loss:3.542140 +step:7787 train loss:3.619642 +step:7788 train loss:3.569788 +step:7789 train loss:3.509714 +step:7790 train loss:3.563739 +step:7791 train loss:3.598160 +step:7792 train loss:3.558101 +step:7793 train loss:3.582895 +step:7794 train loss:3.568356 +step:7795 train loss:3.600486 +step:7796 train loss:3.563761 +step:7797 train loss:3.579346 +step:7798 train loss:3.575275 +step:7799 train loss:3.563694 +step:7800 train loss:3.519388 +step:7801 train loss:3.584222 +step:7802 train loss:3.567384 +step:7803 train loss:3.614826 +step:7804 train loss:3.578606 +step:7805 train loss:3.574934 +step:7806 train loss:3.591020 +step:7807 train loss:3.664721 +step:7808 train loss:3.528232 +step:7809 train loss:3.504194 +step:7810 train loss:3.595621 +step:7811 train loss:3.527800 +step:7812 train loss:3.548538 +step:7813 train loss:3.632604 +step:7814 train loss:3.705685 +step:7815 train loss:3.517180 +step:7816 train loss:3.604734 +step:7817 train loss:3.635106 +step:7818 train loss:3.529812 +step:7819 train loss:3.582561 +step:7820 train loss:3.625823 +step:7821 train loss:3.553644 +step:7822 train loss:3.514426 +step:7823 train loss:3.587875 +step:7824 train loss:3.575915 +step:7825 train loss:3.565545 +step:7826 train loss:3.560950 +step:7827 train loss:3.604311 +step:7828 train loss:3.594796 +step:7829 train loss:3.544817 +step:7830 train loss:3.556904 +step:7831 train loss:3.561697 +step:7832 train loss:3.621763 +step:7833 train loss:3.602556 +step:7834 train loss:3.568777 +step:7835 train loss:3.589489 +step:7836 train loss:3.704578 +step:7837 train loss:3.588748 +step:7838 train loss:3.556449 +step:7839 train loss:3.518121 +step:7840 train loss:3.534285 +step:7841 train loss:3.625272 +step:7842 train loss:3.613752 +step:7843 train loss:3.671655 +step:7844 train loss:3.597768 +step:7845 train loss:3.576925 +step:7846 train loss:3.687347 +step:7847 train loss:3.575122 +step:7848 train loss:3.587556 +step:7849 train loss:3.601813 +step:7850 train loss:3.570804 +step:7851 train loss:3.594166 +step:7852 train loss:3.572267 +step:7853 train loss:3.542639 +step:7854 train loss:3.573416 +step:7855 train loss:3.574655 +step:7856 train loss:3.575331 +step:7857 train loss:3.561786 +step:7858 train loss:3.570096 +step:7859 train loss:3.580208 +step:7860 train loss:3.615908 +step:7861 train loss:3.599967 +step:7862 train loss:3.543524 +step:7863 train loss:3.649867 +step:7864 train loss:3.488467 +step:7865 train loss:3.563549 +step:7866 train loss:3.539541 +step:7867 train loss:3.583329 +step:7868 train loss:3.563006 +step:7869 train loss:3.568114 +step:7870 train loss:3.490949 +step:7871 train loss:3.557552 +step:7872 train loss:3.556451 +step:7873 train loss:3.628983 +step:7874 train loss:3.573383 +step:7875 train loss:3.576992 +step:7876 train loss:3.593153 +step:7877 train loss:3.547795 +step:7878 train loss:3.584131 +step:7879 train loss:3.916434 +step:7880 train loss:3.574330 +step:7881 train loss:3.599823 +step:7882 train loss:3.684207 +step:7883 train loss:3.494648 +step:7884 train loss:3.586868 +step:7885 train loss:3.568652 +step:7886 train loss:3.568618 +step:7887 train loss:3.560283 +step:7888 train loss:3.595369 +step:7889 train loss:3.640777 +step:7890 train loss:3.546692 +step:7891 train loss:3.597972 +step:7892 train loss:3.567506 +step:7893 train loss:3.543867 +step:7894 train loss:3.565158 +step:7895 train loss:3.545144 +step:7896 train loss:3.545100 +step:7897 train loss:3.569887 +step:7898 train loss:3.576550 +step:7899 train loss:3.564548 +step:7900 train loss:3.534893 +step:7901 train loss:3.524585 +step:7902 train loss:3.671630 +step:7903 train loss:3.519657 +step:7904 train loss:3.570083 +step:7905 train loss:3.637855 +step:7906 train loss:3.534834 +step:7907 train loss:3.561072 +step:7908 train loss:3.611165 +step:7909 train loss:3.665569 +step:7910 train loss:3.544006 +step:7911 train loss:3.562861 +step:7912 train loss:3.566216 +step:7913 train loss:3.543528 +step:7914 train loss:3.577106 +step:7915 train loss:3.679192 +step:7916 train loss:3.551162 +step:7917 train loss:3.610070 +step:7918 train loss:3.558157 +step:7919 train loss:3.543981 +step:7920 train loss:3.585798 +step:7921 train loss:3.586672 +step:7922 train loss:3.564821 +step:7923 train loss:3.608185 +step:7924 train loss:3.569275 +step:7925 train loss:3.592248 +step:7926 train loss:3.499428 +step:7927 train loss:3.778754 +step:7928 train loss:3.603549 +step:7929 train loss:3.567359 +step:7930 train loss:3.526886 +step:7931 train loss:3.551760 +step:7932 train loss:3.572332 +step:7933 train loss:3.587756 +step:7934 train loss:3.683162 +step:7935 train loss:3.603280 +step:7936 train loss:3.575878 +step:7937 train loss:3.522660 +step:7938 train loss:3.539021 +step:7939 train loss:3.587076 +step:7940 train loss:3.573156 +step:7941 train loss:3.601183 +step:7942 train loss:3.592219 +step:7943 train loss:3.600976 +step:7944 train loss:3.519709 +step:7945 train loss:3.626554 +step:7946 train loss:3.573475 +step:7947 train loss:3.585030 +step:7948 train loss:3.545114 +step:7949 train loss:3.596713 +step:7950 train loss:3.655093 +step:7951 train loss:3.619293 +step:7952 train loss:3.766143 +step:7953 train loss:3.659353 +step:7954 train loss:3.560571 +step:7955 train loss:3.547742 +step:7956 train loss:3.551299 +step:7957 train loss:3.628448 +step:7958 train loss:3.635209 +step:7959 train loss:3.590803 +step:7960 train loss:3.656463 +step:7961 train loss:3.564134 +step:7962 train loss:3.532497 +step:7963 train loss:3.572861 +step:7964 train loss:3.569360 +step:7965 train loss:3.578166 +step:7966 train loss:3.549216 +step:7967 train loss:3.570487 +step:7968 train loss:3.583048 +step:7969 train loss:3.538283 +step:7970 train loss:3.508569 +step:7971 train loss:3.592786 +step:7972 train loss:3.569076 +step:7973 train loss:3.539598 +step:7974 train loss:3.581430 +step:7975 train loss:3.566656 +step:7976 train loss:3.586595 +step:7977 train loss:3.617338 +step:7978 train loss:3.637940 +step:7979 train loss:3.587657 +step:7980 train loss:3.490832 +step:7981 train loss:3.531733 +step:7982 train loss:3.577567 +step:7983 train loss:3.593735 +step:7984 train loss:3.635997 +step:7985 train loss:3.561176 +step:7986 train loss:3.583488 +step:7987 train loss:3.637917 +step:7988 train loss:3.612107 +step:7989 train loss:3.513651 +step:7990 train loss:3.530030 +step:7991 train loss:3.546161 +step:7992 train loss:3.571239 +step:7993 train loss:3.551412 +step:7994 train loss:3.604398 +step:7995 train loss:3.605983 +step:7996 train loss:3.574056 +step:7997 train loss:3.592150 +step:7998 train loss:3.617261 +step:7999 train loss:3.547092 +step:8000 validation loss:3.496602 total_sharp:5.0551e-05 L1_sharp:1.1862e-05 L2_sharp:1.4057e-06 L3_sharp:6.2895e-06 L4_sharp:9.9365e-06 L5_sharp:5.9956e-06 L6_sharp:4.1436e-06 L7_sharp:5.3118e-06 L8_sharp:1.0885e-05 L9_sharp:1.4400e-05 L10_sharp:1.6832e-05 L11_sharp:1.3272e-05 L12_sharp:7.3962e-05 total_fnorm:2.1359e+01 total_l1_linf:1.8759e+05 total_spectral:2.1359e+01 L1_fnorm:4.9258e+00 L2_fnorm:4.7930e+00 L3_fnorm:4.8722e+00 L4_fnorm:4.7988e+00 L5_fnorm:4.9740e+00 L6_fnorm:5.0577e+00 L7_fnorm:5.1391e+00 L8_fnorm:5.0812e+00 L9_fnorm:5.0690e+00 L10_fnorm:5.0648e+00 L11_fnorm:5.1029e+00 L12_fnorm:4.8449e+00 L1_l1linf:4.5859e+00 L2_l1linf:4.8380e+00 L3_l1linf:4.9443e+00 L4_l1linf:4.7601e+00 L5_l1linf:5.0131e+00 L6_l1linf:5.0712e+00 L7_l1linf:5.3824e+00 L8_l1linf:5.6807e+00 L9_l1linf:5.3907e+00 L10_l1linf:5.8326e+00 L11_l1linf:6.0767e+00 L12_l1linf:6.4610e+00 L1_spectral:9.0082e-01 L2_spectral:5.9971e-01 L3_spectral:5.9603e-01 L4_spectral:6.5461e-01 L5_spectral:5.3328e-01 L6_spectral:4.6453e-01 L7_spectral:5.3860e-01 L8_spectral:6.3211e-01 L9_spectral:6.9888e-01 L10_spectral:7.6198e-01 L11_spectral:7.7216e-01 L12_spectral:8.6484e-01 ip_v_neg_g:8.9524e-03 cos_v_neg_g:1.2537e-03 v_norm:2.1359e+01 g_norm:3.3432e-01 hv_norm:1.3130e-01 cos_v_hv:8.2230e-03 hg_norm:2.0125e+00 cos_g_hg:4.6325e-01 v_par:5.2929e-03 v_perp:2.1359e+01 L1_cos_v_neg_g:5.8619e-03 L1_v_norm:4.9258e+00 L2_cos_v_neg_g:2.2552e-03 L2_v_norm:4.7930e+00 L3_cos_v_neg_g:2.1657e-03 L3_v_norm:4.8722e+00 L4_cos_v_neg_g:3.6605e-03 L4_v_norm:4.7988e+00 L5_cos_v_neg_g:3.0231e-03 L5_v_norm:4.9740e+00 L6_cos_v_neg_g:3.6830e-03 L6_v_norm:5.0577e+00 L7_cos_v_neg_g:3.2977e-03 L7_v_norm:5.1391e+00 L8_cos_v_neg_g:4.1522e-03 L8_v_norm:5.0812e+00 L9_cos_v_neg_g:4.2903e-03 L9_v_norm:5.0690e+00 L10_cos_v_neg_g:4.4943e-03 L10_v_norm:5.0648e+00 L11_cos_v_neg_g:6.0033e-03 L11_v_norm:5.1029e+00 L12_cos_v_neg_g:1.1348e-02 L12_v_norm:4.8449e+00 +step:8000 train loss:3.613741 +step:8001 train loss:3.576591 +step:8002 train loss:3.595976 +step:8003 train loss:3.612761 +step:8004 train loss:3.590724 +step:8005 train loss:3.508724 +step:8006 train loss:3.588397 +step:8007 train loss:3.556350 +step:8008 train loss:3.581253 +step:8009 train loss:3.657750 +step:8010 train loss:3.889302 +step:8011 train loss:3.549638 +step:8012 train loss:3.626884 +step:8013 train loss:3.579107 +step:8014 train loss:3.591004 +step:8015 train loss:3.589801 +step:8016 train loss:3.577288 +step:8017 train loss:3.600651 +step:8018 train loss:3.561701 +step:8019 train loss:3.529325 +step:8020 train loss:3.564168 +step:8021 train loss:3.639755 +step:8022 train loss:3.555904 +step:8023 train loss:3.586466 +step:8024 train loss:3.468541 +step:8025 train loss:3.568005 +step:8026 train loss:3.575025 +step:8027 train loss:3.582678 +step:8028 train loss:3.639789 +step:8029 train loss:3.567752 +step:8030 train loss:3.526902 +step:8031 train loss:3.584342 +step:8032 train loss:3.570725 +step:8033 train loss:3.522434 +step:8034 train loss:3.573631 +step:8035 train loss:3.552534 +step:8036 train loss:3.538934 +step:8037 train loss:3.508858 +step:8038 train loss:3.521951 +step:8039 train loss:3.616950 +step:8040 train loss:3.549450 +step:8041 train loss:3.548748 +step:8042 train loss:3.587704 +step:8043 train loss:3.526752 +step:8044 train loss:3.541193 +step:8045 train loss:3.606624 +step:8046 train loss:3.534573 +step:8047 train loss:3.540127 +step:8048 train loss:3.570817 +step:8049 train loss:3.617740 +step:8050 train loss:3.557145 +step:8051 train loss:3.534803 +step:8052 train loss:3.594175 +step:8053 train loss:3.544269 +step:8054 train loss:3.580491 +step:8055 train loss:3.609614 +step:8056 train loss:3.576409 +step:8057 train loss:3.655994 +step:8058 train loss:3.560200 +step:8059 train loss:3.619010 +step:8060 train loss:3.589343 +step:8061 train loss:3.479305 +step:8062 train loss:3.622209 +step:8063 train loss:3.582090 +step:8064 train loss:3.541110 +step:8065 train loss:3.605571 +step:8066 train loss:3.562864 +step:8067 train loss:3.627923 +step:8068 train loss:3.550008 +step:8069 train loss:3.580650 +step:8070 train loss:3.540566 +step:8071 train loss:3.550179 +step:8072 train loss:3.590648 +step:8073 train loss:3.542916 +step:8074 train loss:3.554460 +step:8075 train loss:3.540952 +step:8076 train loss:3.588383 +step:8077 train loss:3.593360 +step:8078 train loss:3.536670 +step:8079 train loss:3.557576 +step:8080 train loss:3.543930 +step:8081 train loss:3.561852 +step:8082 train loss:3.576771 +step:8083 train loss:3.481151 +step:8084 train loss:3.615581 +step:8085 train loss:3.488868 +step:8086 train loss:3.616138 +step:8087 train loss:3.510322 +step:8088 train loss:3.559493 +step:8089 train loss:3.592436 +step:8090 train loss:3.616246 +step:8091 train loss:3.559308 +step:8092 train loss:3.540405 +step:8093 train loss:3.547066 +step:8094 train loss:3.550992 +step:8095 train loss:3.577420 +step:8096 train loss:3.578971 +step:8097 train loss:3.505450 +step:8098 train loss:3.519373 +step:8099 train loss:3.511787 +step:8100 train loss:3.570081 +step:8101 train loss:3.640841 +step:8102 train loss:3.586461 +step:8103 train loss:3.532535 +step:8104 train loss:3.584067 +step:8105 train loss:3.579072 +step:8106 train loss:3.541353 +step:8107 train loss:3.523088 +step:8108 train loss:3.539832 +step:8109 train loss:3.535455 +step:8110 train loss:3.597589 +step:8111 train loss:3.520662 +step:8112 train loss:3.540515 +step:8113 train loss:3.527791 +step:8114 train loss:3.474972 +step:8115 train loss:3.527427 +step:8116 train loss:3.562587 +step:8117 train loss:3.532163 +step:8118 train loss:3.526494 +step:8119 train loss:3.566562 +step:8120 train loss:3.516133 +step:8121 train loss:3.575970 +step:8122 train loss:3.557645 +step:8123 train loss:3.562072 +step:8124 train loss:3.523948 +step:8125 train loss:3.509192 +step:8126 train loss:3.499504 +step:8127 train loss:3.594882 +step:8128 train loss:3.599978 +step:8129 train loss:3.519248 +step:8130 train loss:3.550739 +step:8131 train loss:3.517764 +step:8132 train loss:3.587369 +step:8133 train loss:3.510679 +step:8134 train loss:3.546478 +step:8135 train loss:3.536196 +step:8136 train loss:3.547764 +step:8137 train loss:3.613045 +step:8138 train loss:3.520308 +step:8139 train loss:3.592475 +step:8140 train loss:3.524249 +step:8141 train loss:3.546552 +step:8142 train loss:3.525886 +step:8143 train loss:3.575720 +step:8144 train loss:3.555277 +step:8145 train loss:3.524113 +step:8146 train loss:3.531594 +step:8147 train loss:3.551359 +step:8148 train loss:3.642339 +step:8149 train loss:3.557393 +step:8150 train loss:3.537720 +step:8151 train loss:3.527305 +step:8152 train loss:3.629149 +step:8153 train loss:3.504746 +step:8154 train loss:3.522291 +step:8155 train loss:3.546933 +step:8156 train loss:3.529523 +step:8157 train loss:3.547862 +step:8158 train loss:3.564906 +step:8159 train loss:3.576885 +step:8160 train loss:3.530063 +step:8161 train loss:3.572112 +step:8162 train loss:3.501009 +step:8163 train loss:3.562039 +step:8164 train loss:3.551064 +step:8165 train loss:3.601871 +step:8166 train loss:3.602571 +step:8167 train loss:3.512107 +step:8168 train loss:3.489759 +step:8169 train loss:3.538124 +step:8170 train loss:3.483402 +step:8171 train loss:3.548456 +step:8172 train loss:3.547632 +step:8173 train loss:3.548693 +step:8174 train loss:3.556046 +step:8175 train loss:3.517790 +step:8176 train loss:3.515345 +step:8177 train loss:3.558082 +step:8178 train loss:3.648371 +step:8179 train loss:3.553266 +step:8180 train loss:3.573981 +step:8181 train loss:3.576097 +step:8182 train loss:3.538093 +step:8183 train loss:3.523566 +step:8184 train loss:3.516677 +step:8185 train loss:3.556888 +step:8186 train loss:3.556861 +step:8187 train loss:3.568423 +step:8188 train loss:3.496952 +step:8189 train loss:3.644337 +step:8190 train loss:3.576478 +step:8191 train loss:3.581227 +step:8192 train loss:3.688490 +step:8193 train loss:3.560247 +step:8194 train loss:3.494244 +step:8195 train loss:3.592353 +step:8196 train loss:3.509429 +step:8197 train loss:3.538931 +step:8198 train loss:3.544595 +step:8199 train loss:3.547955 +step:8200 train loss:3.525385 +step:8201 train loss:3.643624 +step:8202 train loss:3.558918 +step:8203 train loss:3.579643 +step:8204 train loss:3.486633 +step:8205 train loss:3.496671 +step:8206 train loss:3.621453 +step:8207 train loss:3.545167 +step:8208 train loss:3.562938 +step:8209 train loss:3.608502 +step:8210 train loss:3.589279 +step:8211 train loss:3.525473 +step:8212 train loss:3.581553 +step:8213 train loss:3.591301 +step:8214 train loss:3.629752 +step:8215 train loss:3.604537 +step:8216 train loss:3.586995 +step:8217 train loss:3.562458 +step:8218 train loss:3.574184 +step:8219 train loss:3.706827 +step:8220 train loss:3.536162 +step:8221 train loss:3.558145 +step:8222 train loss:3.510198 +step:8223 train loss:3.529763 +step:8224 train loss:3.540825 +step:8225 train loss:3.587742 +step:8226 train loss:3.516803 +step:8227 train loss:3.589268 +step:8228 train loss:3.471377 +step:8229 train loss:3.515887 +step:8230 train loss:3.530336 +step:8231 train loss:3.558316 +step:8232 train loss:3.556257 +step:8233 train loss:3.603808 +step:8234 train loss:3.597867 +step:8235 train loss:3.569269 +step:8236 train loss:3.553937 +step:8237 train loss:3.505646 +step:8238 train loss:3.753621 +step:8239 train loss:3.588684 +step:8240 train loss:3.537718 +step:8241 train loss:3.509897 +step:8242 train loss:3.546113 +step:8243 train loss:3.534967 +step:8244 train loss:3.551965 +step:8245 train loss:3.532686 +step:8246 train loss:3.600450 +step:8247 train loss:3.629407 +step:8248 train loss:3.549196 +step:8249 train loss:3.544263 +step:8250 validation loss:3.489673 +step:8250 train loss:3.531728 +step:8251 train loss:3.625828 +step:8252 train loss:3.564698 +step:8253 train loss:3.532334 +step:8254 train loss:3.500166 +step:8255 train loss:3.535975 +step:8256 train loss:3.517981 +step:8257 train loss:3.623896 +step:8258 train loss:3.542948 +step:8259 train loss:3.534649 +step:8260 train loss:3.532157 +step:8261 train loss:3.531085 +step:8262 train loss:3.544826 +step:8263 train loss:3.558764 +step:8264 train loss:3.525551 +step:8265 train loss:3.515603 +step:8266 train loss:3.524348 +step:8267 train loss:3.459265 +step:8268 train loss:3.578546 +step:8269 train loss:3.512027 +step:8270 train loss:3.566540 +step:8271 train loss:3.592216 +step:8272 train loss:3.617140 +step:8273 train loss:3.494219 +step:8274 train loss:3.558344 +step:8275 train loss:3.519519 +step:8276 train loss:3.555261 +step:8277 train loss:3.623654 +step:8278 train loss:3.639611 +step:8279 train loss:3.551443 +step:8280 train loss:3.539005 +step:8281 train loss:3.504026 +step:8282 train loss:3.566181 +step:8283 train loss:3.547986 +step:8284 train loss:3.534419 +step:8285 train loss:3.526218 +step:8286 train loss:3.636373 +step:8287 train loss:3.574400 +step:8288 train loss:3.545141 +step:8289 train loss:3.559227 +step:8290 train loss:3.499376 +step:8291 train loss:3.539413 +step:8292 train loss:3.569129 +step:8293 train loss:3.546626 +step:8294 train loss:3.512209 +step:8295 train loss:3.550061 +step:8296 train loss:3.618927 +step:8297 train loss:3.698571 +step:8298 train loss:3.521329 +step:8299 train loss:3.554323 +step:8300 train loss:3.566182 +step:8301 train loss:3.533289 +step:8302 train loss:3.594608 +step:8303 train loss:3.723763 +step:8304 train loss:3.537993 +step:8305 train loss:3.583355 +step:8306 train loss:3.559761 +step:8307 train loss:3.575040 +step:8308 train loss:3.573898 +step:8309 train loss:3.596914 +step:8310 train loss:3.510251 +step:8311 train loss:3.599169 +step:8312 train loss:3.594645 +step:8313 train loss:3.658942 +step:8314 train loss:3.530463 +step:8315 train loss:3.477422 +step:8316 train loss:3.534424 +step:8317 train loss:3.556558 +step:8318 train loss:3.549603 +step:8319 train loss:3.583164 +step:8320 train loss:3.603699 +step:8321 train loss:3.512343 +step:8322 train loss:3.528904 +step:8323 train loss:3.565106 +step:8324 train loss:3.540008 +step:8325 train loss:3.598252 +step:8326 train loss:3.563987 +step:8327 train loss:3.553252 +step:8328 train loss:3.626769 +step:8329 train loss:3.535015 +step:8330 train loss:3.571815 +step:8331 train loss:3.500879 +step:8332 train loss:3.601467 +step:8333 train loss:3.620105 +step:8334 train loss:3.485718 +step:8335 train loss:3.550600 +step:8336 train loss:3.643668 +step:8337 train loss:3.572968 +step:8338 train loss:3.541035 +step:8339 train loss:3.520685 +step:8340 train loss:3.611856 +step:8341 train loss:3.510175 +step:8342 train loss:3.586382 +step:8343 train loss:3.495548 +step:8344 train loss:3.542028 +step:8345 train loss:3.575656 +step:8346 train loss:3.658394 +step:8347 train loss:3.547574 +step:8348 train loss:3.574086 +step:8349 train loss:3.548147 +step:8350 train loss:3.567310 +step:8351 train loss:3.508986 +step:8352 train loss:3.591174 +step:8353 train loss:3.548555 +step:8354 train loss:3.530758 +step:8355 train loss:3.530444 +step:8356 train loss:3.523665 +step:8357 train loss:3.539045 +step:8358 train loss:3.515452 +step:8359 train loss:3.511733 +step:8360 train loss:3.558588 +step:8361 train loss:3.571351 +step:8362 train loss:3.586583 +step:8363 train loss:3.588687 +step:8364 train loss:3.549911 +step:8365 train loss:3.697444 +step:8366 train loss:3.541137 +step:8367 train loss:3.517234 +step:8368 train loss:3.485293 +step:8369 train loss:3.518135 +step:8370 train loss:3.598580 +step:8371 train loss:3.573041 +step:8372 train loss:3.550317 +step:8373 train loss:3.561804 +step:8374 train loss:3.493639 +step:8375 train loss:3.556730 +step:8376 train loss:3.590819 +step:8377 train loss:3.420962 +step:8378 train loss:3.633355 +step:8379 train loss:3.497288 +step:8380 train loss:3.510478 +step:8381 train loss:3.515584 +step:8382 train loss:3.544531 +step:8383 train loss:3.502681 +step:8384 train loss:3.545110 +step:8385 train loss:3.557040 +step:8386 train loss:3.538140 +step:8387 train loss:3.699739 +step:8388 train loss:3.609850 +step:8389 train loss:3.586421 +step:8390 train loss:3.589669 +step:8391 train loss:3.519099 +step:8392 train loss:3.535462 +step:8393 train loss:3.491962 +step:8394 train loss:3.577652 +step:8395 train loss:3.585567 +step:8396 train loss:3.609322 +step:8397 train loss:3.542351 +step:8398 train loss:3.561457 +step:8399 train loss:3.524815 +step:8400 train loss:3.533797 +step:8401 train loss:3.540492 +step:8402 train loss:3.527556 +step:8403 train loss:3.539545 +step:8404 train loss:3.545665 +step:8405 train loss:3.498221 +step:8406 train loss:3.541638 +step:8407 train loss:3.576902 +step:8408 train loss:3.551018 +step:8409 train loss:3.473414 +step:8410 train loss:3.540680 +step:8411 train loss:3.563531 +step:8412 train loss:3.619444 +step:8413 train loss:3.596319 +step:8414 train loss:3.596026 +step:8415 train loss:3.517162 +step:8416 train loss:3.565979 +step:8417 train loss:3.479568 +step:8418 train loss:3.585891 +step:8419 train loss:3.536991 +step:8420 train loss:3.613258 +step:8421 train loss:3.532773 +step:8422 train loss:3.549232 +step:8423 train loss:3.564550 +step:8424 train loss:3.567921 +step:8425 train loss:3.627447 +step:8426 train loss:3.595473 +step:8427 train loss:3.516399 +step:8428 train loss:3.529618 +step:8429 train loss:3.591045 +step:8430 train loss:3.529668 +step:8431 train loss:3.532158 +step:8432 train loss:3.535377 +step:8433 train loss:3.511055 +step:8434 train loss:3.548180 +step:8435 train loss:3.467834 +step:8436 train loss:3.548098 +step:8437 train loss:3.591684 +step:8438 train loss:3.570292 +step:8439 train loss:3.511023 +step:8440 train loss:3.483252 +step:8441 train loss:3.538051 +step:8442 train loss:3.563633 +step:8443 train loss:3.519343 +step:8444 train loss:3.552177 +step:8445 train loss:3.501896 +step:8446 train loss:3.551638 +step:8447 train loss:3.562685 +step:8448 train loss:3.547394 +step:8449 train loss:3.539784 +step:8450 train loss:3.528501 +step:8451 train loss:3.560857 +step:8452 train loss:3.532277 +step:8453 train loss:3.517099 +step:8454 train loss:3.566590 +step:8455 train loss:3.637881 +step:8456 train loss:3.612310 +step:8457 train loss:3.668121 +step:8458 train loss:3.557010 +step:8459 train loss:3.564291 +step:8460 train loss:3.489222 +step:8461 train loss:3.644385 +step:8462 train loss:3.517019 +step:8463 train loss:3.558317 +step:8464 train loss:3.568919 +step:8465 train loss:3.575366 +step:8466 train loss:3.550765 +step:8467 train loss:3.553076 +step:8468 train loss:3.804589 +step:8469 train loss:3.517299 +step:8470 train loss:3.510816 +step:8471 train loss:3.554691 +step:8472 train loss:3.575130 +step:8473 train loss:3.530924 +step:8474 train loss:3.657613 +step:8475 train loss:3.614959 +step:8476 train loss:3.561680 +step:8477 train loss:3.554137 +step:8478 train loss:3.533219 +step:8479 train loss:3.534523 +step:8480 train loss:3.598061 +step:8481 train loss:3.533969 +step:8482 train loss:3.529338 +step:8483 train loss:3.678977 +step:8484 train loss:3.561231 +step:8485 train loss:3.606515 +step:8486 train loss:3.515074 +step:8487 train loss:3.571851 +step:8488 train loss:3.517182 +step:8489 train loss:3.592339 +step:8490 train loss:3.580018 +step:8491 train loss:3.597496 +step:8492 train loss:3.556398 +step:8493 train loss:3.623409 +step:8494 train loss:3.487179 +step:8495 train loss:3.584091 +step:8496 train loss:3.532238 +step:8497 train loss:3.564983 +step:8498 train loss:3.578025 +step:8499 train loss:3.554263 +step:8500 validation loss:3.489847 total_sharp:4.0791e-05 L1_sharp:9.5217e-06 L2_sharp:8.2511e-07 L3_sharp:1.5667e-06 L4_sharp:6.5667e-06 L5_sharp:5.9926e-06 L6_sharp:4.1847e-06 L7_sharp:5.2532e-06 L8_sharp:1.1965e-05 L9_sharp:1.2695e-05 L10_sharp:1.3723e-05 L11_sharp:9.4763e-06 L12_sharp:7.0005e-05 total_fnorm:2.1099e+01 total_l1_linf:1.8472e+05 total_spectral:2.1099e+01 L1_fnorm:4.7805e+00 L2_fnorm:4.6678e+00 L3_fnorm:4.7325e+00 L4_fnorm:4.6865e+00 L5_fnorm:4.9235e+00 L6_fnorm:5.0313e+00 L7_fnorm:5.0960e+00 L8_fnorm:5.0361e+00 L9_fnorm:4.9978e+00 L10_fnorm:4.9916e+00 L11_fnorm:5.0069e+00 L12_fnorm:4.6385e+00 L1_l1linf:4.6046e+00 L2_l1linf:4.9639e+00 L3_l1linf:5.1576e+00 L4_l1linf:4.6915e+00 L5_l1linf:5.1776e+00 L6_l1linf:5.0723e+00 L7_l1linf:5.2057e+00 L8_l1linf:5.3040e+00 L9_l1linf:5.6352e+00 L10_l1linf:5.4233e+00 L11_l1linf:5.5736e+00 L12_l1linf:5.8562e+00 L1_spectral:9.1205e-01 L2_spectral:5.7292e-01 L3_spectral:5.8531e-01 L4_spectral:6.4880e-01 L5_spectral:5.4565e-01 L6_spectral:4.9215e-01 L7_spectral:5.5775e-01 L8_spectral:6.0332e-01 L9_spectral:6.7742e-01 L10_spectral:7.1121e-01 L11_spectral:7.2351e-01 L12_spectral:8.1530e-01 ip_v_neg_g:9.2364e-03 cos_v_neg_g:1.2577e-03 v_norm:2.1099e+01 g_norm:3.4807e-01 hv_norm:1.2117e-01 cos_v_hv:7.1030e-03 hg_norm:2.4374e+00 cos_g_hg:5.1501e-01 v_par:4.3908e-03 v_perp:2.1099e+01 L1_cos_v_neg_g:8.8788e-03 L1_v_norm:4.7805e+00 L2_cos_v_neg_g:3.3694e-03 L2_v_norm:4.6678e+00 L3_cos_v_neg_g:3.7560e-03 L3_v_norm:4.7325e+00 L4_cos_v_neg_g:4.7036e-03 L4_v_norm:4.6865e+00 L5_cos_v_neg_g:4.3408e-03 L5_v_norm:4.9235e+00 L6_cos_v_neg_g:2.9897e-03 L6_v_norm:5.0313e+00 L7_cos_v_neg_g:2.9001e-03 L7_v_norm:5.0960e+00 L8_cos_v_neg_g:4.9777e-03 L8_v_norm:5.0361e+00 L9_cos_v_neg_g:4.3338e-03 L9_v_norm:4.9978e+00 L10_cos_v_neg_g:5.1723e-03 L10_v_norm:4.9916e+00 L11_cos_v_neg_g:7.1093e-03 L11_v_norm:5.0069e+00 L12_cos_v_neg_g:1.0525e-02 L12_v_norm:4.6385e+00 +step:8500 train loss:3.552207 +step:8501 train loss:3.766581 +step:8502 train loss:3.787184 +step:8503 train loss:3.544669 +step:8504 train loss:3.541553 +step:8505 train loss:3.519457 +step:8506 train loss:3.590659 +step:8507 train loss:3.529337 +step:8508 train loss:3.562802 +step:8509 train loss:3.500083 +step:8510 train loss:3.524625 +step:8511 train loss:3.481524 +step:8512 train loss:3.578896 +step:8513 train loss:3.585815 +step:8514 train loss:3.529294 +step:8515 train loss:3.625305 +step:8516 train loss:3.542566 +step:8517 train loss:3.565696 +step:8518 train loss:3.452216 +step:8519 train loss:3.550204 +step:8520 train loss:3.512477 +step:8521 train loss:3.555485 +step:8522 train loss:3.447140 +step:8523 train loss:3.543162 +step:8524 train loss:3.533903 +step:8525 train loss:3.598722 +step:8526 train loss:3.582598 +step:8527 train loss:3.524363 +step:8528 train loss:3.604632 +step:8529 train loss:3.559510 +step:8530 train loss:3.598155 +step:8531 train loss:3.584941 +step:8532 train loss:3.625163 +step:8533 train loss:3.575761 +step:8534 train loss:3.576383 +step:8535 train loss:3.547636 +step:8536 train loss:3.637479 +step:8537 train loss:3.553297 +step:8538 train loss:3.620849 +step:8539 train loss:3.544077 +step:8540 train loss:3.570165 +step:8541 train loss:3.513241 +step:8542 train loss:3.576643 +step:8543 train loss:3.492026 +step:8544 train loss:3.489261 +step:8545 train loss:3.541760 +step:8546 train loss:3.489662 +step:8547 train loss:3.546691 +step:8548 train loss:3.514373 +step:8549 train loss:3.560704 +step:8550 train loss:3.512257 +step:8551 train loss:3.560726 +step:8552 train loss:3.567746 +step:8553 train loss:3.567170 +step:8554 train loss:3.539872 +step:8555 train loss:3.552724 +step:8556 train loss:3.631555 +step:8557 train loss:3.527713 +step:8558 train loss:3.569457 +step:8559 train loss:3.558961 +step:8560 train loss:3.540905 +step:8561 train loss:3.496712 +step:8562 train loss:3.524323 +step:8563 train loss:3.521563 +step:8564 train loss:3.592566 +step:8565 train loss:3.567218 +step:8566 train loss:3.586771 +step:8567 train loss:3.533090 +step:8568 train loss:3.549278 +step:8569 train loss:3.560138 +step:8570 train loss:3.502360 +step:8571 train loss:3.541585 +step:8572 train loss:3.559533 +step:8573 train loss:3.635617 +step:8574 train loss:3.562930 +step:8575 train loss:3.565504 +step:8576 train loss:3.597264 +step:8577 train loss:3.680800 +step:8578 train loss:3.587702 +step:8579 train loss:3.573183 +step:8580 train loss:3.507461 +step:8581 train loss:3.551757 +step:8582 train loss:3.555252 +step:8583 train loss:3.554345 +step:8584 train loss:3.545134 +step:8585 train loss:3.622379 +step:8586 train loss:3.541617 +step:8587 train loss:3.551710 +step:8588 train loss:3.600007 +step:8589 train loss:3.545734 +step:8590 train loss:3.538887 +step:8591 train loss:3.540375 +step:8592 train loss:3.501401 +step:8593 train loss:3.579522 +step:8594 train loss:3.601671 +step:8595 train loss:3.523371 +step:8596 train loss:3.567488 +step:8597 train loss:3.530266 +step:8598 train loss:3.582315 +step:8599 train loss:3.549739 +step:8600 train loss:3.558671 +step:8601 train loss:3.545792 +step:8602 train loss:3.522367 +step:8603 train loss:3.579365 +step:8604 train loss:3.523980 +step:8605 train loss:3.536129 +step:8606 train loss:3.549478 +step:8607 train loss:3.558567 +step:8608 train loss:3.601004 +step:8609 train loss:3.497732 +step:8610 train loss:3.571619 +step:8611 train loss:3.502087 +step:8612 train loss:3.579238 +step:8613 train loss:3.512138 +step:8614 train loss:3.576912 +step:8615 train loss:3.618546 +step:8616 train loss:3.502096 +step:8617 train loss:3.569254 +step:8618 train loss:3.542560 +step:8619 train loss:3.497411 +step:8620 train loss:3.541213 +step:8621 train loss:3.570196 +step:8622 train loss:3.528264 +step:8623 train loss:3.543848 +step:8624 train loss:3.616944 +step:8625 train loss:3.539467 +step:8626 train loss:3.547801 +step:8627 train loss:3.542813 +step:8628 train loss:3.576327 +step:8629 train loss:3.482970 +step:8630 train loss:3.586443 +step:8631 train loss:3.529370 +step:8632 train loss:3.588829 +step:8633 train loss:3.532964 +step:8634 train loss:3.764360 +step:8635 train loss:3.561229 +step:8636 train loss:3.604091 +step:8637 train loss:3.531383 +step:8638 train loss:3.529539 +step:8639 train loss:3.589255 +step:8640 train loss:3.499831 +step:8641 train loss:3.600170 +step:8642 train loss:3.549816 +step:8643 train loss:3.665082 +step:8644 train loss:3.505187 +step:8645 train loss:3.577521 +step:8646 train loss:3.536186 +step:8647 train loss:3.559914 +step:8648 train loss:3.509827 +step:8649 train loss:3.596097 +step:8650 train loss:3.547825 +step:8651 train loss:3.566428 +step:8652 train loss:3.530088 +step:8653 train loss:3.560916 +step:8654 train loss:3.605749 +step:8655 train loss:3.532757 +step:8656 train loss:3.577111 +step:8657 train loss:3.578392 +step:8658 train loss:3.549838 +step:8659 train loss:3.541575 +step:8660 train loss:3.485338 +step:8661 train loss:3.547342 +step:8662 train loss:3.487119 +step:8663 train loss:3.560761 +step:8664 train loss:3.477064 +step:8665 train loss:3.500807 +step:8666 train loss:3.576095 +step:8667 train loss:3.468454 +step:8668 train loss:3.578932 +step:8669 train loss:3.614302 +step:8670 train loss:3.519045 +step:8671 train loss:3.512048 +step:8672 train loss:3.731290 +step:8673 train loss:3.499442 +step:8674 train loss:3.564927 +step:8675 train loss:3.608894 +step:8676 train loss:3.548268 +step:8677 train loss:3.574284 +step:8678 train loss:3.523550 +step:8679 train loss:3.579856 +step:8680 train loss:3.562565 +step:8681 train loss:3.561022 +step:8682 train loss:3.517246 +step:8683 train loss:3.534560 +step:8684 train loss:3.606791 +step:8685 train loss:3.549725 +step:8686 train loss:3.542567 +step:8687 train loss:3.494901 +step:8688 train loss:3.512583 +step:8689 train loss:3.583637 +step:8690 train loss:3.521441 +step:8691 train loss:3.598418 +step:8692 train loss:3.490354 +step:8693 train loss:3.575818 +step:8694 train loss:3.578624 +step:8695 train loss:3.561122 +step:8696 train loss:3.586134 +step:8697 train loss:3.538125 +step:8698 train loss:3.581167 +step:8699 train loss:3.530535 +step:8700 train loss:3.558957 +step:8701 train loss:3.518046 +step:8702 train loss:3.503984 +step:8703 train loss:3.521326 +step:8704 train loss:3.472596 +step:8705 train loss:3.554356 +step:8706 train loss:3.574078 +step:8707 train loss:3.572760 +step:8708 train loss:3.519312 +step:8709 train loss:3.580969 +step:8710 train loss:3.510069 +step:8711 train loss:3.566079 +step:8712 train loss:3.470840 +step:8713 train loss:3.545958 +step:8714 train loss:3.653300 +step:8715 train loss:3.511287 +step:8716 train loss:3.566689 +step:8717 train loss:3.535176 +step:8718 train loss:3.575897 +step:8719 train loss:3.545369 +step:8720 train loss:3.654345 +step:8721 train loss:3.547720 +step:8722 train loss:3.642776 +step:8723 train loss:3.509667 +step:8724 train loss:3.517093 +step:8725 train loss:3.549103 +step:8726 train loss:3.503423 +step:8727 train loss:3.579520 +step:8728 train loss:3.537393 +step:8729 train loss:3.542136 +step:8730 train loss:3.519651 +step:8731 train loss:3.525227 +step:8732 train loss:3.625074 +step:8733 train loss:3.549001 +step:8734 train loss:3.582646 +step:8735 train loss:3.653185 +step:8736 train loss:3.511523 +step:8737 train loss:3.539357 +step:8738 train loss:3.519192 +step:8739 train loss:3.581015 +step:8740 train loss:3.501769 +step:8741 train loss:3.555719 +step:8742 train loss:3.510927 +step:8743 train loss:3.548430 +step:8744 train loss:3.572869 +step:8745 train loss:3.610220 +step:8746 train loss:3.511052 +step:8747 train loss:3.615355 +step:8748 train loss:3.523004 +step:8749 train loss:3.560836 +step:8750 validation loss:3.480968 +step:8750 train loss:3.572609 +step:8751 train loss:3.611310 +step:8752 train loss:3.471175 +step:8753 train loss:3.518304 +step:8754 train loss:3.572261 +step:8755 train loss:3.551069 +step:8756 train loss:3.596783 +step:8757 train loss:3.510258 +step:8758 train loss:3.661613 +step:8759 train loss:3.510828 +step:8760 train loss:3.542306 +step:8761 train loss:3.618742 +step:8762 train loss:3.517861 +step:8763 train loss:3.491973 +step:8764 train loss:3.562151 +step:8765 train loss:3.635485 +step:8766 train loss:3.565133 +step:8767 train loss:3.518889 +step:8768 train loss:3.560994 +step:8769 train loss:3.534742 +step:8770 train loss:3.577934 +step:8771 train loss:3.550699 +step:8772 train loss:3.571389 +step:8773 train loss:3.531387 +step:8774 train loss:3.563204 +step:8775 train loss:3.563492 +step:8776 train loss:3.507361 +step:8777 train loss:3.543815 +step:8778 train loss:3.552030 +step:8779 train loss:3.570881 +step:8780 train loss:3.543321 +step:8781 train loss:3.544516 +step:8782 train loss:3.566936 +step:8783 train loss:3.546664 +step:8784 train loss:3.570633 +step:8785 train loss:3.557870 +step:8786 train loss:3.629087 +step:8787 train loss:3.576041 +step:8788 train loss:3.475206 +step:8789 train loss:3.573320 +step:8790 train loss:3.502527 +step:8791 train loss:3.555501 +step:8792 train loss:3.494035 +step:8793 train loss:3.581020 +step:8794 train loss:3.508877 +step:8795 train loss:3.576310 +step:8796 train loss:3.722590 +step:8797 train loss:3.469528 +step:8798 train loss:3.627827 +step:8799 train loss:3.543950 +step:8800 train loss:3.534796 +step:8801 train loss:3.559502 +step:8802 train loss:3.614737 +step:8803 train loss:3.571327 +step:8804 train loss:3.551185 +step:8805 train loss:3.572109 +step:8806 train loss:3.543655 +step:8807 train loss:3.533143 +step:8808 train loss:3.487173 +step:8809 train loss:3.613516 +step:8810 train loss:3.518936 +step:8811 train loss:3.504312 +step:8812 train loss:3.550468 +step:8813 train loss:3.460311 +step:8814 train loss:3.651623 +step:8815 train loss:3.495143 +step:8816 train loss:3.609371 +step:8817 train loss:3.549457 +step:8818 train loss:3.478675 +step:8819 train loss:3.597442 +step:8820 train loss:3.525331 +step:8821 train loss:3.550857 +step:8822 train loss:3.533622 +step:8823 train loss:3.547027 +step:8824 train loss:3.606649 +step:8825 train loss:3.581543 +step:8826 train loss:3.557101 +step:8827 train loss:3.513982 +step:8828 train loss:3.553730 +step:8829 train loss:3.538407 +step:8830 train loss:3.511410 +step:8831 train loss:3.587714 +step:8832 train loss:3.527410 +step:8833 train loss:3.558725 +step:8834 train loss:3.528615 +step:8835 train loss:3.463143 +step:8836 train loss:3.588908 +step:8837 train loss:3.495327 +step:8838 train loss:3.541446 +step:8839 train loss:3.525223 +step:8840 train loss:3.527907 +step:8841 train loss:3.541189 +step:8842 train loss:3.553258 +step:8843 train loss:3.563660 +step:8844 train loss:3.531661 +step:8845 train loss:3.550079 +step:8846 train loss:3.516700 +step:8847 train loss:3.554388 +step:8848 train loss:3.602128 +step:8849 train loss:3.580792 +step:8850 train loss:3.575956 +step:8851 train loss:3.452631 +step:8852 train loss:3.560018 +step:8853 train loss:3.544560 +step:8854 train loss:3.510939 +step:8855 train loss:3.585308 +step:8856 train loss:3.576926 +step:8857 train loss:3.642267 +step:8858 train loss:3.509113 +step:8859 train loss:3.576698 +step:8860 train loss:3.535799 +step:8861 train loss:3.519022 +step:8862 train loss:3.518740 +step:8863 train loss:3.503531 +step:8864 train loss:3.569242 +step:8865 train loss:3.563496 +step:8866 train loss:3.444421 +step:8867 train loss:3.549671 +step:8868 train loss:3.576909 +step:8869 train loss:3.659113 +step:8870 train loss:3.540744 +step:8871 train loss:3.564723 +step:8872 train loss:3.548607 +step:8873 train loss:3.548512 +step:8874 train loss:3.599667 +step:8875 train loss:3.535326 +step:8876 train loss:3.572886 +step:8877 train loss:3.555616 +step:8878 train loss:3.604163 +step:8879 train loss:3.565177 +step:8880 train loss:3.513051 +step:8881 train loss:3.475351 +step:8882 train loss:3.547303 +step:8883 train loss:3.533062 +step:8884 train loss:3.624171 +step:8885 train loss:3.557683 +step:8886 train loss:3.560009 +step:8887 train loss:3.587537 +step:8888 train loss:3.543096 +step:8889 train loss:3.550290 +step:8890 train loss:3.543332 +step:8891 train loss:3.515465 +step:8892 train loss:3.596329 +step:8893 train loss:3.537408 +step:8894 train loss:3.553599 +step:8895 train loss:3.583840 +step:8896 train loss:3.499490 +step:8897 train loss:3.591659 +step:8898 train loss:3.523588 +step:8899 train loss:3.549329 +step:8900 train loss:3.515433 +step:8901 train loss:3.529844 +step:8902 train loss:3.568829 +step:8903 train loss:3.509297 +step:8904 train loss:3.558511 +step:8905 train loss:3.531267 +step:8906 train loss:3.524069 +step:8907 train loss:3.538660 +step:8908 train loss:3.598529 +step:8909 train loss:3.546299 +step:8910 train loss:3.503443 +step:8911 train loss:3.606895 +step:8912 train loss:3.502041 +step:8913 train loss:3.515601 +step:8914 train loss:3.613832 +step:8915 train loss:3.550527 +step:8916 train loss:3.583440 +step:8917 train loss:3.536240 +step:8918 train loss:3.540317 +step:8919 train loss:3.531224 +step:8920 train loss:3.556508 +step:8921 train loss:3.552839 +step:8922 train loss:3.533117 +step:8923 train loss:3.715371 +step:8924 train loss:3.619277 +step:8925 train loss:3.547226 +step:8926 train loss:3.557608 +step:8927 train loss:3.586706 +step:8928 train loss:3.543132 +step:8929 train loss:3.533599 +step:8930 train loss:3.589638 +step:8931 train loss:3.503701 +step:8932 train loss:3.601812 +step:8933 train loss:3.510818 +step:8934 train loss:3.546445 +step:8935 train loss:3.564262 +step:8936 train loss:3.596634 +step:8937 train loss:3.591627 +step:8938 train loss:3.535431 +step:8939 train loss:3.600377 +step:8940 train loss:3.554812 +step:8941 train loss:3.500282 +step:8942 train loss:3.572341 +step:8943 train loss:3.507280 +step:8944 train loss:3.556772 +step:8945 train loss:3.576521 +step:8946 train loss:3.424045 +step:8947 train loss:3.612040 +step:8948 train loss:3.460596 +step:8949 train loss:3.463457 +step:8950 train loss:3.507810 +step:8951 train loss:3.546035 +step:8952 train loss:3.566408 +step:8953 train loss:3.520588 +step:8954 train loss:3.623143 +step:8955 train loss:3.541399 +step:8956 train loss:3.565754 +step:8957 train loss:3.555936 +step:8958 train loss:3.534056 +step:8959 train loss:3.523634 +step:8960 train loss:3.489692 +step:8961 train loss:3.515131 +step:8962 train loss:3.568643 +step:8963 train loss:3.544859 +step:8964 train loss:3.529804 +step:8965 train loss:3.570542 +step:8966 train loss:3.529300 +step:8967 train loss:3.507139 +step:8968 train loss:3.493443 +step:8969 train loss:3.480828 +step:8970 train loss:3.560665 +step:8971 train loss:3.510287 +step:8972 train loss:3.709232 +step:8973 train loss:3.597083 +step:8974 train loss:3.555103 +step:8975 train loss:3.556323 +step:8976 train loss:3.519328 +step:8977 train loss:3.608883 +step:8978 train loss:3.589769 +step:8979 train loss:3.506392 +step:8980 train loss:3.602140 +step:8981 train loss:3.553195 +step:8982 train loss:3.525619 +step:8983 train loss:3.472466 +step:8984 train loss:3.595855 +step:8985 train loss:3.513282 +step:8986 train loss:3.550555 +step:8987 train loss:3.525257 +step:8988 train loss:3.573021 +step:8989 train loss:3.484144 +step:8990 train loss:3.623919 +step:8991 train loss:3.478095 +step:8992 train loss:3.533275 +step:8993 train loss:3.624151 +step:8994 train loss:3.527862 +step:8995 train loss:3.556923 +step:8996 train loss:3.524468 +step:8997 train loss:3.472868 +step:8998 train loss:3.479361 +step:8999 train loss:3.501200 +step:9000 validation loss:3.477365 total_sharp:4.9036e-05 L1_sharp:1.4594e-05 L2_sharp:3.3421e-06 L3_sharp:7.0657e-06 L4_sharp:6.9075e-06 L5_sharp:4.5444e-06 L6_sharp:3.3977e-06 L7_sharp:6.2101e-06 L8_sharp:1.1907e-05 L9_sharp:1.4750e-05 L10_sharp:1.5400e-05 L11_sharp:1.0850e-05 L12_sharp:1.0356e-04 total_fnorm:2.1414e+01 total_l1_linf:1.8818e+05 total_spectral:2.1414e+01 L1_fnorm:4.9099e+00 L2_fnorm:4.8412e+00 L3_fnorm:4.8453e+00 L4_fnorm:4.7876e+00 L5_fnorm:5.0116e+00 L6_fnorm:5.0602e+00 L7_fnorm:5.1482e+00 L8_fnorm:5.1034e+00 L9_fnorm:5.0826e+00 L10_fnorm:5.0844e+00 L11_fnorm:5.1384e+00 L12_fnorm:4.8702e+00 L1_l1linf:4.6377e+00 L2_l1linf:4.9665e+00 L3_l1linf:4.8098e+00 L4_l1linf:4.9453e+00 L5_l1linf:4.9692e+00 L6_l1linf:4.8619e+00 L7_l1linf:5.2040e+00 L8_l1linf:5.5042e+00 L9_l1linf:5.5095e+00 L10_l1linf:5.4258e+00 L11_l1linf:5.6513e+00 L12_l1linf:5.9806e+00 L1_spectral:9.1855e-01 L2_spectral:6.0437e-01 L3_spectral:5.8830e-01 L4_spectral:6.4494e-01 L5_spectral:5.4353e-01 L6_spectral:4.6791e-01 L7_spectral:5.1688e-01 L8_spectral:6.2919e-01 L9_spectral:6.9613e-01 L10_spectral:7.6917e-01 L11_spectral:7.2145e-01 L12_spectral:8.6322e-01 ip_v_neg_g:1.3193e-02 cos_v_neg_g:1.6222e-03 v_norm:2.1414e+01 g_norm:3.7980e-01 hv_norm:2.0727e-01 cos_v_hv:5.0661e-03 hg_norm:2.3308e+00 cos_g_hg:5.5033e-01 v_par:5.9993e-03 v_perp:2.1414e+01 L1_cos_v_neg_g:8.0271e-03 L1_v_norm:4.9099e+00 L2_cos_v_neg_g:4.4037e-03 L2_v_norm:4.8412e+00 L3_cos_v_neg_g:4.2114e-03 L3_v_norm:4.8453e+00 L4_cos_v_neg_g:2.9427e-03 L4_v_norm:4.7876e+00 L5_cos_v_neg_g:3.0130e-03 L5_v_norm:5.0116e+00 L6_cos_v_neg_g:3.4010e-03 L6_v_norm:5.0602e+00 L7_cos_v_neg_g:3.4718e-03 L7_v_norm:5.1482e+00 L8_cos_v_neg_g:5.1550e-03 L8_v_norm:5.1034e+00 L9_cos_v_neg_g:6.3341e-03 L9_v_norm:5.0826e+00 L10_cos_v_neg_g:7.7658e-03 L10_v_norm:5.0844e+00 L11_cos_v_neg_g:9.6481e-03 L11_v_norm:5.1384e+00 L12_cos_v_neg_g:1.8965e-02 L12_v_norm:4.8702e+00 +step:9000 train loss:3.588664 +step:9001 train loss:3.554669 +step:9002 train loss:3.561149 +step:9003 train loss:3.505132 +step:9004 train loss:3.502242 +step:9005 train loss:3.515030 +step:9006 train loss:3.518727 +step:9007 train loss:3.534631 +step:9008 train loss:3.492642 +step:9009 train loss:3.488439 +step:9010 train loss:3.524790 +step:9011 train loss:3.521213 +step:9012 train loss:3.632330 +step:9013 train loss:3.460450 +step:9014 train loss:3.532924 +step:9015 train loss:3.533438 +step:9016 train loss:3.608704 +step:9017 train loss:3.551214 +step:9018 train loss:3.473590 +step:9019 train loss:3.556345 +step:9020 train loss:3.568891 +step:9021 train loss:3.525016 +step:9022 train loss:3.536829 +step:9023 train loss:3.532593 +step:9024 train loss:3.554633 +step:9025 train loss:3.539846 +step:9026 train loss:3.496298 +step:9027 train loss:3.541669 +step:9028 train loss:3.563495 +step:9029 train loss:3.581697 +step:9030 train loss:3.578127 +step:9031 train loss:3.541986 +step:9032 train loss:3.553707 +step:9033 train loss:3.537590 +step:9034 train loss:3.548274 +step:9035 train loss:3.551888 +step:9036 train loss:3.502006 +step:9037 train loss:3.497061 +step:9038 train loss:3.619648 +step:9039 train loss:3.522190 +step:9040 train loss:3.538448 +step:9041 train loss:3.587685 +step:9042 train loss:3.442787 +step:9043 train loss:3.537799 +step:9044 train loss:3.555695 +step:9045 train loss:3.501597 +step:9046 train loss:3.540794 +step:9047 train loss:3.538597 +step:9048 train loss:3.517036 +step:9049 train loss:3.553731 +step:9050 train loss:3.504808 +step:9051 train loss:3.543969 +step:9052 train loss:3.474083 +step:9053 train loss:3.596893 +step:9054 train loss:3.611766 +step:9055 train loss:3.534104 +step:9056 train loss:3.596376 +step:9057 train loss:3.450965 +step:9058 train loss:3.534532 +step:9059 train loss:3.614121 +step:9060 train loss:3.542481 +step:9061 train loss:3.570414 +step:9062 train loss:3.500803 +step:9063 train loss:3.632611 +step:9064 train loss:3.521605 +step:9065 train loss:3.533136 +step:9066 train loss:3.550942 +step:9067 train loss:3.513234 +step:9068 train loss:3.585810 +step:9069 train loss:3.545847 +step:9070 train loss:3.593906 +step:9071 train loss:3.526434 +step:9072 train loss:3.550511 +step:9073 train loss:3.510899 +step:9074 train loss:3.589821 +step:9075 train loss:3.536242 +step:9076 train loss:3.502953 +step:9077 train loss:3.580439 +step:9078 train loss:3.519530 +step:9079 train loss:3.562892 +step:9080 train loss:3.496311 +step:9081 train loss:3.534318 +step:9082 train loss:3.560899 +step:9083 train loss:3.590554 +step:9084 train loss:3.483160 +step:9085 train loss:3.552557 +step:9086 train loss:3.538408 +step:9087 train loss:3.483749 +step:9088 train loss:3.542839 +step:9089 train loss:3.560910 +step:9090 train loss:3.493500 +step:9091 train loss:3.594455 +step:9092 train loss:3.524514 +step:9093 train loss:3.516211 +step:9094 train loss:3.647025 +step:9095 train loss:3.513495 +step:9096 train loss:3.528181 +step:9097 train loss:3.514333 +step:9098 train loss:3.506742 +step:9099 train loss:3.631424 +step:9100 train loss:3.663142 +step:9101 train loss:3.580959 +step:9102 train loss:3.525419 +step:9103 train loss:3.527765 +step:9104 train loss:3.617007 +step:9105 train loss:3.480645 +step:9106 train loss:3.605801 +step:9107 train loss:3.540791 +step:9108 train loss:3.521842 +step:9109 train loss:3.547395 +step:9110 train loss:3.551008 +step:9111 train loss:3.529912 +step:9112 train loss:3.532956 +step:9113 train loss:3.560407 +step:9114 train loss:3.510850 +step:9115 train loss:3.541080 +step:9116 train loss:3.563678 +step:9117 train loss:3.573454 +step:9118 train loss:3.544667 +step:9119 train loss:3.466218 +step:9120 train loss:3.564797 +step:9121 train loss:3.597072 +step:9122 train loss:3.540732 +step:9123 train loss:3.559123 +step:9124 train loss:3.589843 +step:9125 train loss:3.537027 +step:9126 train loss:3.518596 +step:9127 train loss:3.549701 +step:9128 train loss:3.604698 +step:9129 train loss:3.559585 +step:9130 train loss:3.570747 +step:9131 train loss:3.554547 +step:9132 train loss:3.560749 +step:9133 train loss:3.549231 +step:9134 train loss:3.521280 +step:9135 train loss:3.548288 +step:9136 train loss:3.545994 +step:9137 train loss:3.598566 +step:9138 train loss:3.519720 +step:9139 train loss:3.592157 +step:9140 train loss:3.516961 +step:9141 train loss:3.493625 +step:9142 train loss:3.671344 +step:9143 train loss:3.501679 +step:9144 train loss:3.594651 +step:9145 train loss:3.603208 +step:9146 train loss:3.516764 +step:9147 train loss:3.587551 +step:9148 train loss:3.608424 +step:9149 train loss:3.518668 +step:9150 train loss:3.539526 +step:9151 train loss:3.601511 +step:9152 train loss:3.556187 +step:9153 train loss:3.523556 +step:9154 train loss:3.538171 +step:9155 train loss:3.504914 +step:9156 train loss:3.508009 +step:9157 train loss:3.523297 +step:9158 train loss:3.507305 +step:9159 train loss:3.596191 +step:9160 train loss:3.479613 +step:9161 train loss:3.505911 +step:9162 train loss:3.594361 +step:9163 train loss:3.535289 +step:9164 train loss:3.508365 +step:9165 train loss:3.505636 +step:9166 train loss:3.557493 +step:9167 train loss:3.503111 +step:9168 train loss:3.548294 +step:9169 train loss:3.484333 +step:9170 train loss:3.505592 +step:9171 train loss:3.568962 +step:9172 train loss:3.493802 +step:9173 train loss:3.614027 +step:9174 train loss:3.540721 +step:9175 train loss:3.521642 +step:9176 train loss:3.503754 +step:9177 train loss:3.550303 +step:9178 train loss:3.496830 +step:9179 train loss:3.454619 +step:9180 train loss:3.550396 +step:9181 train loss:3.557558 +step:9182 train loss:3.528715 +step:9183 train loss:3.536759 +step:9184 train loss:3.529675 +step:9185 train loss:3.546765 +step:9186 train loss:3.509538 +step:9187 train loss:3.582666 +step:9188 train loss:3.618037 +step:9189 train loss:3.540891 +step:9190 train loss:3.546931 +step:9191 train loss:3.538054 +step:9192 train loss:3.549325 +step:9193 train loss:3.553044 +step:9194 train loss:3.488585 +step:9195 train loss:3.476043 +step:9196 train loss:3.529363 +step:9197 train loss:3.485796 +step:9198 train loss:3.567893 +step:9199 train loss:3.512937 +step:9200 train loss:3.535609 +step:9201 train loss:3.573261 +step:9202 train loss:3.560206 +step:9203 train loss:3.516717 +step:9204 train loss:3.713456 +step:9205 train loss:3.626413 +step:9206 train loss:3.539581 +step:9207 train loss:3.593446 +step:9208 train loss:3.570261 +step:9209 train loss:3.592819 +step:9210 train loss:3.485354 +step:9211 train loss:3.508633 +step:9212 train loss:3.510264 +step:9213 train loss:3.571022 +step:9214 train loss:3.512121 +step:9215 train loss:3.579640 +step:9216 train loss:3.542530 +step:9217 train loss:3.484095 +step:9218 train loss:3.572319 +step:9219 train loss:3.533668 +step:9220 train loss:3.576478 +step:9221 train loss:3.631024 +step:9222 train loss:3.576113 +step:9223 train loss:3.740398 +step:9224 train loss:3.580348 +step:9225 train loss:3.511254 +step:9226 train loss:3.530218 +step:9227 train loss:3.548220 +step:9228 train loss:3.547988 +step:9229 train loss:3.510196 +step:9230 train loss:3.570174 +step:9231 train loss:3.455627 +step:9232 train loss:3.512593 +step:9233 train loss:3.534332 +step:9234 train loss:3.593637 +step:9235 train loss:3.595054 +step:9236 train loss:3.500992 +step:9237 train loss:3.559857 +step:9238 train loss:3.534904 +step:9239 train loss:3.528057 +step:9240 train loss:3.495131 +step:9241 train loss:3.525469 +step:9242 train loss:3.536491 +step:9243 train loss:3.533224 +step:9244 train loss:3.508388 +step:9245 train loss:3.514364 +step:9246 train loss:3.514492 +step:9247 train loss:3.527057 +step:9248 train loss:3.534061 +step:9249 train loss:3.534304 +step:9250 validation loss:3.474856 +step:9250 train loss:3.574239 +step:9251 train loss:3.516817 +step:9252 train loss:3.588779 +step:9253 train loss:3.579704 +step:9254 train loss:3.505513 +step:9255 train loss:3.623256 +step:9256 train loss:3.505127 +step:9257 train loss:3.447886 +step:9258 train loss:3.527567 +step:9259 train loss:3.528265 +step:9260 train loss:3.624963 +step:9261 train loss:3.506048 +step:9262 train loss:3.576287 +step:9263 train loss:3.478745 +step:9264 train loss:3.632111 +step:9265 train loss:3.652129 +step:9266 train loss:3.583580 +step:9267 train loss:3.532051 +step:9268 train loss:3.525551 +step:9269 train loss:3.551969 +step:9270 train loss:3.471547 +step:9271 train loss:3.585026 +step:9272 train loss:3.528373 +step:9273 train loss:3.542940 +step:9274 train loss:3.547922 +step:9275 train loss:3.544826 +step:9276 train loss:3.574696 +step:9277 train loss:3.546895 +step:9278 train loss:3.559616 +step:9279 train loss:3.552675 +step:9280 train loss:3.552903 +step:9281 train loss:3.528138 +step:9282 train loss:3.642981 +step:9283 train loss:3.535275 +step:9284 train loss:3.496200 +step:9285 train loss:3.516194 +step:9286 train loss:3.569313 +step:9287 train loss:3.539022 +step:9288 train loss:3.547767 +step:9289 train loss:3.518016 +step:9290 train loss:3.546178 +step:9291 train loss:3.524148 +step:9292 train loss:3.562859 +step:9293 train loss:3.617239 +step:9294 train loss:3.540599 +step:9295 train loss:3.524356 +step:9296 train loss:3.478520 +step:9297 train loss:3.547832 +step:9298 train loss:3.488401 +step:9299 train loss:3.472884 +step:9300 train loss:3.577650 +step:9301 train loss:3.602097 +step:9302 train loss:3.542324 +step:9303 train loss:3.589517 +step:9304 train loss:3.512511 +step:9305 train loss:3.504890 +step:9306 train loss:3.509617 +step:9307 train loss:3.507875 +step:9308 train loss:3.479754 +step:9309 train loss:3.466659 +step:9310 train loss:3.525706 +step:9311 train loss:3.586385 +step:9312 train loss:3.539481 +step:9313 train loss:3.480749 +step:9314 train loss:3.513268 +step:9315 train loss:3.544819 +step:9316 train loss:3.533766 +step:9317 train loss:3.507574 +step:9318 train loss:3.593761 +step:9319 train loss:3.502871 +step:9320 train loss:3.522855 +step:9321 train loss:3.537241 +step:9322 train loss:3.542049 +step:9323 train loss:3.619175 +step:9324 train loss:3.562717 +step:9325 train loss:3.505011 +step:9326 train loss:3.579340 +step:9327 train loss:3.576726 +step:9328 train loss:3.575401 +step:9329 train loss:3.465102 +step:9330 train loss:3.630706 +step:9331 train loss:3.563689 +step:9332 train loss:3.584211 +step:9333 train loss:3.603093 +step:9334 train loss:3.538783 +step:9335 train loss:3.633127 +step:9336 train loss:3.592895 +step:9337 train loss:3.547102 +step:9338 train loss:3.600177 +step:9339 train loss:3.580196 +step:9340 train loss:3.537158 +step:9341 train loss:3.625094 +step:9342 train loss:3.527952 +step:9343 train loss:3.516451 +step:9344 train loss:3.521863 +step:9345 train loss:3.664172 +step:9346 train loss:3.498623 +step:9347 train loss:3.516294 +step:9348 train loss:3.542052 +step:9349 train loss:3.489607 +step:9350 train loss:3.562052 +step:9351 train loss:3.539112 +step:9352 train loss:3.527132 +step:9353 train loss:3.557573 +step:9354 train loss:3.526350 +step:9355 train loss:3.517619 +step:9356 train loss:3.563226 +step:9357 train loss:3.517519 +step:9358 train loss:3.553179 +step:9359 train loss:3.489869 +step:9360 train loss:3.508359 +step:9361 train loss:3.506129 +step:9362 train loss:3.498046 +step:9363 train loss:3.559337 +step:9364 train loss:3.540226 +step:9365 train loss:3.540265 +step:9366 train loss:3.538277 +step:9367 train loss:3.553097 +step:9368 train loss:3.525687 +step:9369 train loss:3.525151 +step:9370 train loss:3.533846 +step:9371 train loss:3.552582 +step:9372 train loss:3.517399 +step:9373 train loss:3.503385 +step:9374 train loss:3.537446 +step:9375 train loss:3.551194 +step:9376 train loss:3.491375 +step:9377 train loss:3.561937 +step:9378 train loss:3.565973 +step:9379 train loss:3.592193 +step:9380 train loss:3.522780 +step:9381 train loss:3.533471 +step:9382 train loss:3.508723 +step:9383 train loss:3.504009 +step:9384 train loss:3.474260 +step:9385 train loss:3.547853 +step:9386 train loss:3.574739 +step:9387 train loss:3.551974 +step:9388 train loss:3.489356 +step:9389 train loss:3.504853 +step:9390 train loss:3.551060 +step:9391 train loss:3.558483 +step:9392 train loss:3.519309 +step:9393 train loss:3.512771 +step:9394 train loss:3.537303 +step:9395 train loss:3.533478 +step:9396 train loss:3.679212 +step:9397 train loss:3.568265 +step:9398 train loss:3.589352 +step:9399 train loss:3.544270 +step:9400 train loss:3.543913 +step:9401 train loss:3.537689 +step:9402 train loss:3.541627 +step:9403 train loss:3.472764 +step:9404 train loss:3.546715 +step:9405 train loss:3.506472 +step:9406 train loss:3.560997 +step:9407 train loss:3.504552 +step:9408 train loss:3.441506 +step:9409 train loss:3.505658 +step:9410 train loss:3.586680 +step:9411 train loss:3.548220 +step:9412 train loss:3.574856 +step:9413 train loss:3.597482 +step:9414 train loss:3.532167 +step:9415 train loss:3.524446 +step:9416 train loss:3.539676 +step:9417 train loss:3.491153 +step:9418 train loss:3.520302 +step:9419 train loss:3.491041 +step:9420 train loss:3.508131 +step:9421 train loss:3.556118 +step:9422 train loss:3.507100 +step:9423 train loss:3.568774 +step:9424 train loss:3.507139 +step:9425 train loss:3.554602 +step:9426 train loss:3.555567 +step:9427 train loss:3.530290 +step:9428 train loss:3.633398 +step:9429 train loss:3.525259 +step:9430 train loss:3.481034 +step:9431 train loss:3.573545 +step:9432 train loss:3.537031 +step:9433 train loss:3.574938 +step:9434 train loss:3.528106 +step:9435 train loss:3.554656 +step:9436 train loss:3.523978 +step:9437 train loss:3.533786 +step:9438 train loss:3.531375 +step:9439 train loss:3.527754 +step:9440 train loss:3.518566 +step:9441 train loss:3.535424 +step:9442 train loss:3.473252 +step:9443 train loss:3.525727 +step:9444 train loss:3.591972 +step:9445 train loss:3.523903 +step:9446 train loss:3.499872 +step:9447 train loss:3.567760 +step:9448 train loss:3.501896 +step:9449 train loss:3.525882 +step:9450 train loss:3.565685 +step:9451 train loss:3.483826 +step:9452 train loss:3.537892 +step:9453 train loss:3.515270 +step:9454 train loss:3.575462 +step:9455 train loss:3.556931 +step:9456 train loss:3.490918 +step:9457 train loss:3.532394 +step:9458 train loss:3.518612 +step:9459 train loss:3.509737 +step:9460 train loss:3.552597 +step:9461 train loss:3.581581 +step:9462 train loss:3.529396 +step:9463 train loss:3.556233 +step:9464 train loss:3.512774 +step:9465 train loss:3.604872 +step:9466 train loss:3.549617 +step:9467 train loss:3.576176 +step:9468 train loss:3.522626 +step:9469 train loss:3.511846 +step:9470 train loss:3.512611 +step:9471 train loss:3.549237 +step:9472 train loss:3.572987 +step:9473 train loss:3.565809 +step:9474 train loss:3.508441 +step:9475 train loss:3.498372 +step:9476 train loss:3.716769 +step:9477 train loss:3.589609 +step:9478 train loss:3.567068 +step:9479 train loss:3.663078 +step:9480 train loss:3.515672 +step:9481 train loss:3.546069 +step:9482 train loss:3.573604 +step:9483 train loss:3.530363 +step:9484 train loss:3.557533 +step:9485 train loss:3.483457 +step:9486 train loss:3.518677 +step:9487 train loss:3.549491 +step:9488 train loss:3.503217 +step:9489 train loss:3.551036 +step:9490 train loss:3.517482 +step:9491 train loss:3.559325 +step:9492 train loss:3.579027 +step:9493 train loss:3.550527 +step:9494 train loss:3.559858 +step:9495 train loss:3.515327 +step:9496 train loss:3.573406 +step:9497 train loss:3.586663 +step:9498 train loss:3.536644 +step:9499 train loss:3.583220 +step:9500 validation loss:3.474916 total_sharp:5.3864e-05 L1_sharp:1.3001e-05 L2_sharp:1.4577e-06 L3_sharp:3.0636e-06 L4_sharp:6.6308e-06 L5_sharp:5.6220e-06 L6_sharp:4.3530e-06 L7_sharp:6.6838e-06 L8_sharp:1.2544e-05 L9_sharp:1.4063e-05 L10_sharp:1.6461e-05 L11_sharp:1.4404e-05 L12_sharp:9.5558e-05 total_fnorm:2.1385e+01 total_l1_linf:1.8787e+05 total_spectral:2.1385e+01 L1_fnorm:4.8949e+00 L2_fnorm:4.8416e+00 L3_fnorm:4.8376e+00 L4_fnorm:4.7687e+00 L5_fnorm:5.0222e+00 L6_fnorm:5.0803e+00 L7_fnorm:5.1591e+00 L8_fnorm:5.1181e+00 L9_fnorm:5.1061e+00 L10_fnorm:5.0776e+00 L11_fnorm:5.1185e+00 L12_fnorm:4.8451e+00 L1_l1linf:4.6237e+00 L2_l1linf:4.8584e+00 L3_l1linf:4.7671e+00 L4_l1linf:4.6797e+00 L5_l1linf:5.0154e+00 L6_l1linf:4.8852e+00 L7_l1linf:5.2101e+00 L8_l1linf:5.5693e+00 L9_l1linf:6.2125e+00 L10_l1linf:6.0175e+00 L11_l1linf:5.7203e+00 L12_l1linf:7.4902e+00 L1_spectral:9.0598e-01 L2_spectral:6.1310e-01 L3_spectral:6.0435e-01 L4_spectral:6.5447e-01 L5_spectral:5.4787e-01 L6_spectral:4.8775e-01 L7_spectral:5.3989e-01 L8_spectral:6.7231e-01 L9_spectral:7.2668e-01 L10_spectral:7.6365e-01 L11_spectral:7.5914e-01 L12_spectral:8.7196e-01 ip_v_neg_g:1.1225e-02 cos_v_neg_g:1.4389e-03 v_norm:2.1385e+01 g_norm:3.6480e-01 hv_norm:2.1162e-01 cos_v_hv:5.4432e-03 hg_norm:2.9288e+00 cos_g_hg:5.4702e-01 v_par:5.0791e-03 v_perp:2.1385e+01 L1_cos_v_neg_g:8.0157e-03 L1_v_norm:4.8949e+00 L2_cos_v_neg_g:2.7098e-03 L2_v_norm:4.8416e+00 L3_cos_v_neg_g:2.8234e-03 L3_v_norm:4.8376e+00 L4_cos_v_neg_g:2.1930e-03 L4_v_norm:4.7687e+00 L5_cos_v_neg_g:3.2887e-03 L5_v_norm:5.0222e+00 L6_cos_v_neg_g:3.2495e-03 L6_v_norm:5.0803e+00 L7_cos_v_neg_g:4.2369e-03 L7_v_norm:5.1591e+00 L8_cos_v_neg_g:5.3451e-03 L8_v_norm:5.1181e+00 L9_cos_v_neg_g:7.0053e-03 L9_v_norm:5.1061e+00 L10_cos_v_neg_g:7.0210e-03 L10_v_norm:5.0776e+00 L11_cos_v_neg_g:8.7971e-03 L11_v_norm:5.1185e+00 L12_cos_v_neg_g:1.5904e-02 L12_v_norm:4.8451e+00 +step:9500 train loss:3.576355 +step:9501 train loss:3.553141 +step:9502 train loss:3.528951 +step:9503 train loss:3.545002 +step:9504 train loss:3.499321 +step:9505 train loss:3.522048 +step:9506 train loss:3.535721 +step:9507 train loss:3.524390 +step:9508 train loss:3.715963 +step:9509 train loss:3.534665 +step:9510 train loss:3.520407 +step:9511 train loss:3.549495 +step:9512 train loss:3.575597 +step:9513 train loss:3.571054 +step:9514 train loss:3.535107 +step:9515 train loss:3.435417 +step:9516 train loss:3.535307 +step:9517 train loss:3.572282 +step:9518 train loss:3.549000 +step:9519 train loss:3.557683 +step:9520 train loss:3.448655 +step:9521 train loss:3.440706 +step:9522 train loss:3.558321 +step:9523 train loss:3.555172 +step:9524 train loss:3.556036 +step:9525 train loss:3.601691 +step:9526 train loss:3.616999 +step:9527 train loss:3.574419 +step:9528 train loss:3.507065 +step:9529 train loss:3.549468 +step:9530 train loss:3.599632 +step:9531 train loss:3.502355 +step:9532 train loss:3.553464 +step:9533 train loss:3.523691 +step:9534 train loss:3.606030 +step:9535 train loss:3.526685 +step:9536 train loss:3.506098 +step:9537 train loss:3.451849 +step:9538 train loss:3.471060 +step:9539 train loss:3.543168 +step:9540 train loss:3.464321 +step:9541 train loss:3.524207 +step:9542 train loss:3.646329 +step:9543 train loss:3.546463 +step:9544 train loss:3.584210 +step:9545 train loss:3.516917 +step:9546 train loss:3.542894 +step:9547 train loss:3.587294 +step:9548 train loss:3.527617 +step:9549 train loss:3.499294 +step:9550 train loss:3.529617 +step:9551 train loss:3.522501 +step:9552 train loss:3.544332 +step:9553 train loss:3.542194 +step:9554 train loss:3.586643 +step:9555 train loss:3.595757 +step:9556 train loss:3.500220 +step:9557 train loss:3.521805 +step:9558 train loss:3.585407 +step:9559 train loss:3.593409 +step:9560 train loss:3.504702 +step:9561 train loss:3.527424 +step:9562 train loss:3.567746 +step:9563 train loss:3.516892 +step:9564 train loss:3.548679 +step:9565 train loss:3.527630 +step:9566 train loss:3.504617 +step:9567 train loss:3.565861 +step:9568 train loss:3.541245 +step:9569 train loss:3.581331 +step:9570 train loss:3.474607 +step:9571 train loss:3.547255 +step:9572 train loss:3.491531 +step:9573 train loss:3.523511 +step:9574 train loss:3.500348 +step:9575 train loss:3.569072 +step:9576 train loss:3.462412 +step:9577 train loss:3.512951 +step:9578 train loss:3.515679 +step:9579 train loss:3.513905 +step:9580 train loss:3.579296 +step:9581 train loss:3.570356 +step:9582 train loss:3.543817 +step:9583 train loss:3.565704 +step:9584 train loss:3.503300 +step:9585 train loss:3.519973 +step:9586 train loss:3.571059 +step:9587 train loss:3.543604 +step:9588 train loss:3.528341 +step:9589 train loss:3.584749 +step:9590 train loss:3.552619 +step:9591 train loss:3.518579 +step:9592 train loss:3.537484 +step:9593 train loss:3.539284 +step:9594 train loss:3.553986 +step:9595 train loss:3.532851 +step:9596 train loss:3.617940 +step:9597 train loss:3.526472 +step:9598 train loss:3.487967 +step:9599 train loss:3.494006 +step:9600 train loss:3.581439 +step:9601 train loss:3.496352 +step:9602 train loss:3.578433 +step:9603 train loss:3.572380 +step:9604 train loss:3.455544 +step:9605 train loss:3.542069 +step:9606 train loss:3.596079 +step:9607 train loss:3.517639 +step:9608 train loss:3.525740 +step:9609 train loss:3.533788 +step:9610 train loss:3.576699 +step:9611 train loss:3.509164 +step:9612 train loss:3.516231 +step:9613 train loss:3.555965 +step:9614 train loss:3.529219 +step:9615 train loss:3.712533 +step:9616 train loss:3.525501 +step:9617 train loss:3.515913 +step:9618 train loss:3.475331 +step:9619 train loss:3.536875 +step:9620 train loss:3.595271 +step:9621 train loss:3.514692 +step:9622 train loss:3.525820 +step:9623 train loss:3.569897 +step:9624 train loss:3.556461 +step:9625 train loss:3.566242 +step:9626 train loss:3.537151 +step:9627 train loss:3.618549 +step:9628 train loss:3.583020 +step:9629 train loss:3.496831 +step:9630 train loss:3.555126 +step:9631 train loss:3.543294 +step:9632 train loss:3.509422 +step:9633 train loss:3.551633 +step:9634 train loss:3.619859 +step:9635 train loss:3.522314 +step:9636 train loss:3.467565 +step:9637 train loss:3.603433 +step:9638 train loss:3.488122 +step:9639 train loss:3.454156 +step:9640 train loss:3.580734 +step:9641 train loss:3.549275 +step:9642 train loss:3.528092 +step:9643 train loss:3.530376 +step:9644 train loss:3.587195 +step:9645 train loss:3.515601 +step:9646 train loss:3.551716 +step:9647 train loss:3.562669 +step:9648 train loss:3.511488 +step:9649 train loss:3.484169 +step:9650 train loss:3.499618 +step:9651 train loss:3.591447 +step:9652 train loss:3.573175 +step:9653 train loss:3.514794 +step:9654 train loss:3.497014 +step:9655 train loss:3.494246 +step:9656 train loss:3.485713 +step:9657 train loss:3.515347 +step:9658 train loss:3.570366 +step:9659 train loss:3.679728 +step:9660 train loss:3.460498 +step:9661 train loss:3.484213 +step:9662 train loss:3.501218 +step:9663 train loss:3.542910 +step:9664 train loss:3.594584 +step:9665 train loss:3.436944 +step:9666 train loss:3.477825 +step:9667 train loss:3.615659 +step:9668 train loss:3.597683 +step:9669 train loss:3.611218 +step:9670 train loss:3.593372 +step:9671 train loss:3.594909 +step:9672 train loss:3.509415 +step:9673 train loss:3.528972 +step:9674 train loss:3.538882 +step:9675 train loss:3.535733 +step:9676 train loss:3.495116 +step:9677 train loss:3.504082 +step:9678 train loss:3.536553 +step:9679 train loss:3.531056 +step:9680 train loss:3.524551 +step:9681 train loss:3.514803 +step:9682 train loss:3.581469 +step:9683 train loss:3.554316 +step:9684 train loss:3.474010 +step:9685 train loss:3.555945 +step:9686 train loss:3.590594 +step:9687 train loss:3.495205 +step:9688 train loss:3.582082 +step:9689 train loss:3.682175 +step:9690 train loss:3.525578 +step:9691 train loss:3.515167 +step:9692 train loss:3.470478 +step:9693 train loss:3.474485 +step:9694 train loss:3.496778 +step:9695 train loss:3.600788 +step:9696 train loss:3.637150 +step:9697 train loss:3.545468 +step:9698 train loss:3.583151 +step:9699 train loss:3.539273 +step:9700 train loss:3.537573 +step:9701 train loss:3.596120 +step:9702 train loss:3.508180 +step:9703 train loss:3.530482 +step:9704 train loss:3.609874 +step:9705 train loss:3.509267 +step:9706 train loss:3.501948 +step:9707 train loss:3.552447 +step:9708 train loss:3.501104 +step:9709 train loss:3.526220 +step:9710 train loss:3.539098 +step:9711 train loss:3.516758 +step:9712 train loss:3.527269 +step:9713 train loss:3.577632 +step:9714 train loss:3.532130 +step:9715 train loss:3.551657 +step:9716 train loss:3.575132 +step:9717 train loss:3.496551 +step:9718 train loss:3.502658 +step:9719 train loss:3.584257 +step:9720 train loss:3.518174 +step:9721 train loss:3.507985 +step:9722 train loss:3.574867 +step:9723 train loss:3.518473 +step:9724 train loss:3.546486 +step:9725 train loss:3.601195 +step:9726 train loss:3.538867 +step:9727 train loss:3.520295 +step:9728 train loss:3.556478 +step:9729 train loss:3.584103 +step:9730 train loss:3.653201 +step:9731 train loss:3.574103 +step:9732 train loss:3.537079 +step:9733 train loss:3.575176 +step:9734 train loss:3.495811 +step:9735 train loss:3.606341 +step:9736 train loss:3.505136 +step:9737 train loss:3.565913 +step:9738 train loss:3.529386 +step:9739 train loss:3.604445 +step:9740 train loss:3.566674 +step:9741 train loss:3.508733 +step:9742 train loss:3.599218 +step:9743 train loss:3.475471 +step:9744 train loss:3.533580 +step:9745 train loss:3.492293 +step:9746 train loss:3.531028 +step:9747 train loss:3.520765 +step:9748 train loss:3.427178 +step:9749 train loss:3.518359 +step:9750 validation loss:3.467892 +step:9750 train loss:3.497617 +step:9751 train loss:3.640396 +step:9752 train loss:3.526351 +step:9753 train loss:3.484091 +step:9754 train loss:3.517259 +step:9755 train loss:3.510635 +step:9756 train loss:3.513334 +step:9757 train loss:3.482846 +step:9758 train loss:3.471240 +step:9759 train loss:3.520399 +step:9760 train loss:3.464645 +step:9761 train loss:3.504867 +step:9762 train loss:3.499388 +step:9763 train loss:3.522183 +step:9764 train loss:3.504922 +step:9765 train loss:3.471461 +step:9766 train loss:3.559576 +step:9767 train loss:3.513987 +step:9768 train loss:3.529954 +step:9769 train loss:3.478511 +step:9770 train loss:3.482356 +step:9771 train loss:3.530119 +step:9772 train loss:3.542939 +step:9773 train loss:3.518873 +step:9774 train loss:3.489908 +step:9775 train loss:3.580092 +step:9776 train loss:3.577817 +step:9777 train loss:3.470378 +step:9778 train loss:3.475210 +step:9779 train loss:3.480414 +step:9780 train loss:3.478190 +step:9781 train loss:3.494994 +step:9782 train loss:3.574810 +step:9783 train loss:3.486433 +step:9784 train loss:3.517017 +step:9785 train loss:3.500103 +step:9786 train loss:3.539213 +step:9787 train loss:3.562957 +step:9788 train loss:3.490125 +step:9789 train loss:3.499793 +step:9790 train loss:3.462002 +step:9791 train loss:3.510197 +step:9792 train loss:3.526309 +step:9793 train loss:3.541563 +step:9794 train loss:3.518591 +step:9795 train loss:3.523435 +step:9796 train loss:3.507716 +step:9797 train loss:3.501575 +step:9798 train loss:3.519959 +step:9799 train loss:3.520774 +step:9800 train loss:3.590812 +step:9801 train loss:3.520179 +step:9802 train loss:3.572845 +step:9803 train loss:3.433284 +step:9804 train loss:3.527584 +step:9805 train loss:3.531836 +step:9806 train loss:3.507745 +step:9807 train loss:3.472967 +step:9808 train loss:3.390707 +step:9809 train loss:3.579322 +step:9810 train loss:3.531608 +step:9811 train loss:3.520855 +step:9812 train loss:3.495118 +step:9813 train loss:3.573961 +step:9814 train loss:3.565020 +step:9815 train loss:3.469035 +step:9816 train loss:3.469819 +step:9817 train loss:3.503178 +step:9818 train loss:3.529032 +step:9819 train loss:3.500127 +step:9820 train loss:3.570012 +step:9821 train loss:3.545608 +step:9822 train loss:3.523176 +step:9823 train loss:3.581951 +step:9824 train loss:3.487939 +step:9825 train loss:3.572579 +step:9826 train loss:3.564872 +step:9827 train loss:3.574024 +step:9828 train loss:3.490365 +step:9829 train loss:3.497602 +step:9830 train loss:3.484420 +step:9831 train loss:3.544056 +step:9832 train loss:3.556275 +step:9833 train loss:3.469953 +step:9834 train loss:3.520110 +step:9835 train loss:3.486595 +step:9836 train loss:3.547043 +step:9837 train loss:3.523073 +step:9838 train loss:3.561685 +step:9839 train loss:3.536206 +step:9840 train loss:3.501173 +step:9841 train loss:3.509150 +step:9842 train loss:3.570674 +step:9843 train loss:3.563039 +step:9844 train loss:3.515902 +step:9845 train loss:3.543126 +step:9846 train loss:3.479015 +step:9847 train loss:3.610008 +step:9848 train loss:3.533935 +step:9849 train loss:3.559206 +step:9850 train loss:3.474594 +step:9851 train loss:3.532767 +step:9852 train loss:3.492836 +step:9853 train loss:3.517390 +step:9854 train loss:3.527023 +step:9855 train loss:3.476991 +step:9856 train loss:3.480156 +step:9857 train loss:3.469374 +step:9858 train loss:3.532281 +step:9859 train loss:3.452484 +step:9860 train loss:3.689744 +step:9861 train loss:3.516408 +step:9862 train loss:3.483444 +step:9863 train loss:3.467031 +step:9864 train loss:3.587991 +step:9865 train loss:3.463341 +step:9866 train loss:3.505337 +step:9867 train loss:3.503489 +step:9868 train loss:3.561892 +step:9869 train loss:3.520839 +step:9870 train loss:3.498138 +step:9871 train loss:3.538039 +step:9872 train loss:3.482282 +step:9873 train loss:3.531341 +step:9874 train loss:3.500678 +step:9875 train loss:3.502607 +step:9876 train loss:3.470900 +step:9877 train loss:3.520286 +step:9878 train loss:3.551758 +step:9879 train loss:3.549293 +step:9880 train loss:3.482121 +step:9881 train loss:3.533886 +step:9882 train loss:3.494893 +step:9883 train loss:3.503958 +step:9884 train loss:3.497770 +step:9885 train loss:3.558233 +step:9886 train loss:3.526531 +step:9887 train loss:3.531139 +step:9888 train loss:3.550791 +step:9889 train loss:3.584158 +step:9890 train loss:3.497458 +step:9891 train loss:3.501550 +step:9892 train loss:3.476593 +step:9893 train loss:3.594824 +step:9894 train loss:3.505802 +step:9895 train loss:3.438024 +step:9896 train loss:3.596677 +step:9897 train loss:3.471853 +step:9898 train loss:3.539773 +step:9899 train loss:3.518684 +step:9900 train loss:3.562070 +step:9901 train loss:3.484364 +step:9902 train loss:3.528469 +step:9903 train loss:3.499468 +step:9904 train loss:3.549732 +step:9905 train loss:3.456285 +step:9906 train loss:3.496213 +step:9907 train loss:3.501879 +step:9908 train loss:3.501565 +step:9909 train loss:3.516516 +step:9910 train loss:3.541476 +step:9911 train loss:3.623302 +step:9912 train loss:3.503420 +step:9913 train loss:3.502140 +step:9914 train loss:3.517457 +step:9915 train loss:3.514229 +step:9916 train loss:3.464421 +step:9917 train loss:3.498098 +step:9918 train loss:3.497120 +step:9919 train loss:3.657633 +step:9920 train loss:3.449628 +step:9921 train loss:3.538216 +step:9922 train loss:3.496393 +step:9923 train loss:3.554534 +step:9924 train loss:3.471818 +step:9925 train loss:3.530837 +step:9926 train loss:3.507755 +step:9927 train loss:3.549248 +step:9928 train loss:3.476899 +step:9929 train loss:3.514912 +step:9930 train loss:3.606174 +step:9931 train loss:3.568562 +step:9932 train loss:3.456192 +step:9933 train loss:3.548820 +step:9934 train loss:3.471595 +step:9935 train loss:3.584258 +step:9936 train loss:3.490826 +step:9937 train loss:3.518289 +step:9938 train loss:3.504150 +step:9939 train loss:3.569123 +step:9940 train loss:3.609056 +step:9941 train loss:3.481861 +step:9942 train loss:3.526657 +step:9943 train loss:3.659111 +step:9944 train loss:3.523112 +step:9945 train loss:3.543934 +step:9946 train loss:3.515889 +step:9947 train loss:3.465453 +step:9948 train loss:3.507146 +step:9949 train loss:3.405977 +step:9950 train loss:3.552057 +step:9951 train loss:3.474153 +step:9952 train loss:3.551276 +step:9953 train loss:3.511202 +step:9954 train loss:3.564837 +step:9955 train loss:3.539632 +step:9956 train loss:3.545441 +step:9957 train loss:3.518198 +step:9958 train loss:3.572476 +step:9959 train loss:3.470117 +step:9960 train loss:3.506127 +step:9961 train loss:3.515134 +step:9962 train loss:3.562094 +step:9963 train loss:3.453267 +step:9964 train loss:3.509073 +step:9965 train loss:3.507910 +step:9966 train loss:3.565860 +step:9967 train loss:3.479669 +step:9968 train loss:3.546814 +step:9969 train loss:3.459496 +step:9970 train loss:3.499770 +step:9971 train loss:3.550610 +step:9972 train loss:3.568011 +step:9973 train loss:3.544273 +step:9974 train loss:3.532856 +step:9975 train loss:3.501964 +step:9976 train loss:3.460019 +step:9977 train loss:3.509851 +step:9978 train loss:3.506436 +step:9979 train loss:3.517312 +step:9980 train loss:3.572849 +step:9981 train loss:3.482846 +step:9982 train loss:3.547430 +step:9983 train loss:3.463993 +step:9984 train loss:3.531247 +step:9985 train loss:3.468060 +step:9986 train loss:3.522359 +step:9987 train loss:3.588455 +step:9988 train loss:3.584528 +step:9989 train loss:3.475009 +step:9990 train loss:3.614057 +step:9991 train loss:3.457902 +step:9992 train loss:3.541081 +step:9993 train loss:3.527258 +step:9994 train loss:3.645186 +step:9995 train loss:3.580317 +step:9996 train loss:3.499771 +step:9997 train loss:3.538254 +step:9998 train loss:3.590880 +step:9999 train loss:3.556144 +step:10000 validation loss:3.464933 total_sharp:6.0705e-05 L1_sharp:1.5397e-05 L2_sharp:5.0665e-06 L3_sharp:1.3746e-05 L4_sharp:1.2275e-05 L5_sharp:6.4303e-06 L6_sharp:4.5827e-06 L7_sharp:5.2906e-06 L8_sharp:9.4668e-06 L9_sharp:1.7411e-05 L10_sharp:1.9749e-05 L11_sharp:1.3963e-05 L12_sharp:9.9868e-05 total_fnorm:2.1529e+01 total_l1_linf:1.8939e+05 total_spectral:2.1529e+01 L1_fnorm:5.0053e+00 L2_fnorm:4.8506e+00 L3_fnorm:4.9200e+00 L4_fnorm:4.8518e+00 L5_fnorm:5.0536e+00 L6_fnorm:5.1222e+00 L7_fnorm:5.1999e+00 L8_fnorm:5.1418e+00 L9_fnorm:5.1688e+00 L10_fnorm:5.1289e+00 L11_fnorm:5.1447e+00 L12_fnorm:4.8906e+00 L1_l1linf:4.8107e+00 L2_l1linf:4.8797e+00 L3_l1linf:5.0321e+00 L4_l1linf:5.0785e+00 L5_l1linf:5.0614e+00 L6_l1linf:5.0393e+00 L7_l1linf:5.3632e+00 L8_l1linf:5.3535e+00 L9_l1linf:6.0905e+00 L10_l1linf:5.9180e+00 L11_l1linf:6.1389e+00 L12_l1linf:5.8177e+00 L1_spectral:9.7515e-01 L2_spectral:6.3140e-01 L3_spectral:6.0798e-01 L4_spectral:6.7492e-01 L5_spectral:5.5480e-01 L6_spectral:5.4923e-01 L7_spectral:5.6961e-01 L8_spectral:7.0553e-01 L9_spectral:8.6402e-01 L10_spectral:9.0612e-01 L11_spectral:8.8044e-01 L12_spectral:9.6635e-01 ip_v_neg_g:1.0726e-02 cos_v_neg_g:1.3154e-03 v_norm:2.1529e+01 g_norm:3.7876e-01 hv_norm:1.8307e-01 cos_v_hv:7.1386e-03 hg_norm:4.4479e+00 cos_g_hg:5.4933e-01 v_par:6.2395e-03 v_perp:2.1529e+01 L1_cos_v_neg_g:-9.3464e-04 L1_v_norm:5.0053e+00 L2_cos_v_neg_g:7.0608e-04 L2_v_norm:4.8506e+00 L3_cos_v_neg_g:8.3618e-04 L3_v_norm:4.9200e+00 L4_cos_v_neg_g:1.7743e-03 L4_v_norm:4.8518e+00 L5_cos_v_neg_g:9.4469e-04 L5_v_norm:5.0536e+00 L6_cos_v_neg_g:1.1249e-03 L6_v_norm:5.1222e+00 L7_cos_v_neg_g:5.0202e-04 L7_v_norm:5.1999e+00 L8_cos_v_neg_g:4.3146e-03 L8_v_norm:5.1418e+00 L9_cos_v_neg_g:6.8411e-03 L9_v_norm:5.1688e+00 L10_cos_v_neg_g:8.5141e-03 L10_v_norm:5.1289e+00 L11_cos_v_neg_g:1.1067e-02 L11_v_norm:5.1447e+00 L12_cos_v_neg_g:1.3768e-02 L12_v_norm:4.8906e+00 diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/config.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2c8e1b9c70c3d51068c75e4456c1a8cecffbc895 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/adam_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.02, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "0c42060b-21e4-4818-919c-4b42c475eac8", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..7d50c6d01ed55f73a2d883b43a8feadd52924723 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 37.2054328918457, + "total_l1_linf_norm": 315196.1875, + "total_spectral_norm": 37.20542907714844, + "layer_1_update_fnorm": 7.559410572052002, + "layer_1_max_l1_linf_norm": 12.811582565307617, + "layer_1_max_spectral_norm": 1.963255763053894, + "layer_2_update_fnorm": 6.016336441040039, + "layer_2_max_l1_linf_norm": 7.18044376373291, + "layer_2_max_spectral_norm": 1.2803298234939575, + "layer_3_update_fnorm": 6.904281139373779, + "layer_3_max_l1_linf_norm": 10.332624435424805, + "layer_3_max_spectral_norm": 1.4039359092712402, + "layer_4_update_fnorm": 7.647483825683594, + "layer_4_max_l1_linf_norm": 9.848817825317383, + "layer_4_max_spectral_norm": 1.432677984237671, + "layer_5_update_fnorm": 8.568597793579102, + "layer_5_max_l1_linf_norm": 9.977725982666016, + "layer_5_max_spectral_norm": 1.6009867191314697, + "layer_6_update_fnorm": 8.69577693939209, + "layer_6_max_l1_linf_norm": 10.194164276123047, + "layer_6_max_spectral_norm": 1.8201395273208618, + "layer_7_update_fnorm": 8.981522560119629, + "layer_7_max_l1_linf_norm": 10.879690170288086, + "layer_7_max_spectral_norm": 1.7327083349227905, + "layer_8_update_fnorm": 8.997591972351074, + "layer_8_max_l1_linf_norm": 12.531286239624023, + "layer_8_max_spectral_norm": 2.038402795791626, + "layer_9_update_fnorm": 8.69867992401123, + "layer_9_max_l1_linf_norm": 12.506532669067383, + "layer_9_max_spectral_norm": 1.7931327819824219, + "layer_10_update_fnorm": 8.228620529174805, + "layer_10_max_l1_linf_norm": 12.417999267578125, + "layer_10_max_spectral_norm": 1.7686116695404053, + "layer_11_update_fnorm": 8.168282508850098, + "layer_11_max_l1_linf_norm": 16.589946746826172, + "layer_11_max_spectral_norm": 1.8244233131408691, + "layer_12_update_fnorm": 7.472377300262451, + "layer_12_max_l1_linf_norm": 21.007972717285156, + "layer_12_max_spectral_norm": 2.0188252925872803, + "total_sharpness": 0.00018357898807153106, + "ip_v_neg_g": 0.1742679476737976, + "cos_v_neg_g": 0.008560831658542156, + "v_norm": 37.2054328918457, + "g_norm": 0.5471358299255371, + "hv_norm": 0.6201986074447632, + "cos_v_hv": 0.011012819595634937, + "hg_norm": 5.209978103637695, + "cos_g_hg": 0.7053093910217285, + "v_parallel_norm": 0.029655708000063896, + "v_perp_norm": 37.205421447753906, + "layer_1_v_norm": 7.559410572052002, + "layer_1_cos_v_neg_g": 0.10103581845760345, + "layer_2_v_norm": 6.016336441040039, + "layer_2_cos_v_neg_g": 0.034846387803554535, + "layer_3_v_norm": 6.904280662536621, + "layer_3_cos_v_neg_g": 0.03301406651735306, + "layer_4_v_norm": 7.647483825683594, + "layer_4_cos_v_neg_g": 0.040305159986019135, + "layer_5_v_norm": 8.568597793579102, + "layer_5_cos_v_neg_g": 0.05149267241358757, + "layer_6_v_norm": 8.69577693939209, + "layer_6_cos_v_neg_g": 0.041987720876932144, + "layer_7_v_norm": 8.981522560119629, + "layer_7_cos_v_neg_g": 0.03762052580714226, + "layer_8_v_norm": 8.997591972351074, + "layer_8_cos_v_neg_g": 0.04239891469478607, + "layer_9_v_norm": 8.69867992401123, + "layer_9_cos_v_neg_g": 0.03878140076994896, + "layer_10_v_norm": 8.228620529174805, + "layer_10_cos_v_neg_g": 0.0449560321867466, + "layer_11_v_norm": 8.168283462524414, + "layer_11_cos_v_neg_g": 0.039646923542022705, + "layer_12_v_norm": 7.472377300262451, + "layer_12_cos_v_neg_g": 0.06036659702658653, + "layer_1_sharpness": 8.142812293954194e-05, + "layer_2_sharpness": 6.906684575369582e-06, + "layer_3_sharpness": 2.2003740014042705e-05, + "layer_4_sharpness": 2.7211051929043606e-05, + "layer_5_sharpness": 5.5771735787857324e-05, + "layer_6_sharpness": 4.77347057312727e-05, + "layer_7_sharpness": 2.4255814423668198e-05, + "layer_8_sharpness": 4.608402741723694e-05, + "layer_9_sharpness": 5.1181465096306056e-05, + "layer_10_sharpness": 6.234423926798627e-05, + "layer_11_sharpness": 4.594646816258319e-05, + "layer_12_sharpness": 0.0002823931281454861 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..6e4770b801c061b932cd978af0cd8f749fb06daf --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.482810974121094, + "total_l1_linf_norm": 348172.21875, + "total_spectral_norm": 40.48280715942383, + "layer_1_update_fnorm": 9.326584815979004, + "layer_1_max_l1_linf_norm": 8.79332160949707, + "layer_1_max_spectral_norm": 1.3226431608200073, + "layer_2_update_fnorm": 7.547158718109131, + "layer_2_max_l1_linf_norm": 5.6918206214904785, + "layer_2_max_spectral_norm": 0.9732520580291748, + "layer_3_update_fnorm": 8.294310569763184, + "layer_3_max_l1_linf_norm": 6.9760894775390625, + "layer_3_max_spectral_norm": 0.9420871734619141, + "layer_4_update_fnorm": 8.855731964111328, + "layer_4_max_l1_linf_norm": 7.986150741577148, + "layer_4_max_spectral_norm": 0.8885883092880249, + "layer_5_update_fnorm": 9.379857063293457, + "layer_5_max_l1_linf_norm": 9.144428253173828, + "layer_5_max_spectral_norm": 1.0105233192443848, + "layer_6_update_fnorm": 9.450042724609375, + "layer_6_max_l1_linf_norm": 9.198066711425781, + "layer_6_max_spectral_norm": 1.152870774269104, + "layer_7_update_fnorm": 10.022977828979492, + "layer_7_max_l1_linf_norm": 10.08755874633789, + "layer_7_max_spectral_norm": 1.124245524406433, + "layer_8_update_fnorm": 10.051915168762207, + "layer_8_max_l1_linf_norm": 10.522113800048828, + "layer_8_max_spectral_norm": 1.3762003183364868, + "layer_9_update_fnorm": 10.041543960571289, + "layer_9_max_l1_linf_norm": 12.274738311767578, + "layer_9_max_spectral_norm": 1.641737937927246, + "layer_10_update_fnorm": 9.943394660949707, + "layer_10_max_l1_linf_norm": 12.293069839477539, + "layer_10_max_spectral_norm": 1.747815728187561, + "layer_11_update_fnorm": 9.991775512695312, + "layer_11_max_l1_linf_norm": 12.632030487060547, + "layer_11_max_spectral_norm": 1.655057668685913, + "layer_12_update_fnorm": 9.33900260925293, + "layer_12_max_l1_linf_norm": 13.197196960449219, + "layer_12_max_spectral_norm": 1.9991954565048218, + "total_sharpness": 1.8096612620865926e-05, + "ip_v_neg_g": 0.017618602141737938, + "cos_v_neg_g": 0.0010727555491030216, + "v_norm": 40.482810974121094, + "g_norm": 0.4056953191757202, + "hv_norm": 0.275204062461853, + "cos_v_hv": 0.002662030979990959, + "hg_norm": 4.7705278396606445, + "cos_g_hg": 0.6478611826896667, + "v_parallel_norm": 0.013723584823310375, + "v_perp_norm": 40.48280715942383, + "layer_1_v_norm": 9.326584815979004, + "layer_1_cos_v_neg_g": 0.0028163667302578688, + "layer_2_v_norm": 7.547158718109131, + "layer_2_cos_v_neg_g": 0.00267163198441267, + "layer_3_v_norm": 8.294310569763184, + "layer_3_cos_v_neg_g": 0.0008380643557757139, + "layer_4_v_norm": 8.855731964111328, + "layer_4_cos_v_neg_g": 0.00142285309266299, + "layer_5_v_norm": 9.379857063293457, + "layer_5_cos_v_neg_g": 0.0011446095304563642, + "layer_6_v_norm": 9.450042724609375, + "layer_6_cos_v_neg_g": 0.0023395472671836615, + "layer_7_v_norm": 10.022977828979492, + "layer_7_cos_v_neg_g": 0.002783468458801508, + "layer_8_v_norm": 10.051915168762207, + "layer_8_cos_v_neg_g": 0.004303618334233761, + "layer_9_v_norm": 10.041543960571289, + "layer_9_cos_v_neg_g": 0.005995594430714846, + "layer_10_v_norm": 9.943394660949707, + "layer_10_cos_v_neg_g": 0.007781091146171093, + "layer_11_v_norm": 9.991775512695312, + "layer_11_cos_v_neg_g": 0.00832985620945692, + "layer_12_v_norm": 9.33900260925293, + "layer_12_cos_v_neg_g": 0.027617761865258217, + "layer_1_sharpness": -1.1919021289941156e-06, + "layer_2_sharpness": -1.1492442126836977e-06, + "layer_3_sharpness": -1.489505052632012e-07, + "layer_4_sharpness": 9.405339937984536e-07, + "layer_5_sharpness": 5.203920636631665e-07, + "layer_6_sharpness": 9.170484531750844e-07, + "layer_7_sharpness": 1.4183946177581674e-06, + "layer_8_sharpness": 2.6426571366755525e-06, + "layer_9_sharpness": 3.749394181795651e-06, + "layer_10_sharpness": 5.358328508009436e-06, + "layer_11_sharpness": 3.3529324809933314e-06, + "layer_12_sharpness": 5.396825144998729e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..08382a131c2521b08cc5ba4253eb6e799d984982 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 36.26450729370117, + "total_l1_linf_norm": 304817.5, + "total_spectral_norm": 36.264503479003906, + "layer_1_update_fnorm": 7.443183422088623, + "layer_1_max_l1_linf_norm": 7.964089393615723, + "layer_1_max_spectral_norm": 1.0434309244155884, + "layer_2_update_fnorm": 5.81013822555542, + "layer_2_max_l1_linf_norm": 4.80126953125, + "layer_2_max_spectral_norm": 0.8403534293174744, + "layer_3_update_fnorm": 6.3642168045043945, + "layer_3_max_l1_linf_norm": 5.630393981933594, + "layer_3_max_spectral_norm": 0.8955845236778259, + "layer_4_update_fnorm": 6.977889060974121, + "layer_4_max_l1_linf_norm": 6.382938861846924, + "layer_4_max_spectral_norm": 1.0578233003616333, + "layer_5_update_fnorm": 7.441190242767334, + "layer_5_max_l1_linf_norm": 7.306151866912842, + "layer_5_max_spectral_norm": 1.0234767198562622, + "layer_6_update_fnorm": 7.742275238037109, + "layer_6_max_l1_linf_norm": 7.860154151916504, + "layer_6_max_spectral_norm": 0.9965707659721375, + "layer_7_update_fnorm": 8.308459281921387, + "layer_7_max_l1_linf_norm": 8.566605567932129, + "layer_7_max_spectral_norm": 1.0004246234893799, + "layer_8_update_fnorm": 8.371692657470703, + "layer_8_max_l1_linf_norm": 9.40206527709961, + "layer_8_max_spectral_norm": 1.1936837434768677, + "layer_9_update_fnorm": 8.351043701171875, + "layer_9_max_l1_linf_norm": 9.14887523651123, + "layer_9_max_spectral_norm": 1.3115237951278687, + "layer_10_update_fnorm": 8.218677520751953, + "layer_10_max_l1_linf_norm": 10.228684425354004, + "layer_10_max_spectral_norm": 1.4856748580932617, + "layer_11_update_fnorm": 8.43018627166748, + "layer_11_max_l1_linf_norm": 10.580953598022461, + "layer_11_max_spectral_norm": 1.5271497964859009, + "layer_12_update_fnorm": 7.504988670349121, + "layer_12_max_l1_linf_norm": 11.950798034667969, + "layer_12_max_spectral_norm": 1.659393310546875, + "total_sharpness": 7.039018237264827e-05, + "ip_v_neg_g": 0.04746589809656143, + "cos_v_neg_g": 0.0029628747142851353, + "v_norm": 36.26450729370117, + "g_norm": 0.44176024198532104, + "hv_norm": 0.43588998913764954, + "cos_v_hv": 0.00585621502250433, + "hg_norm": 3.6087794303894043, + "cos_g_hg": 0.634192943572998, + "v_parallel_norm": 0.018906857818365097, + "v_perp_norm": 36.264503479003906, + "layer_1_v_norm": 7.443183422088623, + "layer_1_cos_v_neg_g": 0.030283384025096893, + "layer_2_v_norm": 5.81013822555542, + "layer_2_cos_v_neg_g": 0.026676081120967865, + "layer_3_v_norm": 6.364216327667236, + "layer_3_cos_v_neg_g": 0.015128021128475666, + "layer_4_v_norm": 6.977889060974121, + "layer_4_cos_v_neg_g": 0.0138351209461689, + "layer_5_v_norm": 7.441190242767334, + "layer_5_cos_v_neg_g": 0.011469312012195587, + "layer_6_v_norm": 7.742275238037109, + "layer_6_cos_v_neg_g": 0.014149771071970463, + "layer_7_v_norm": 8.308459281921387, + "layer_7_cos_v_neg_g": 0.009952082298696041, + "layer_8_v_norm": 8.371692657470703, + "layer_8_cos_v_neg_g": 0.01256292499601841, + "layer_9_v_norm": 8.351043701171875, + "layer_9_cos_v_neg_g": 0.01571880653500557, + "layer_10_v_norm": 8.218677520751953, + "layer_10_cos_v_neg_g": 0.020011868327856064, + "layer_11_v_norm": 8.43018627166748, + "layer_11_cos_v_neg_g": 0.020159302279353142, + "layer_12_v_norm": 7.504988670349121, + "layer_12_cos_v_neg_g": 0.04048006236553192, + "layer_1_sharpness": 2.8477248633862473e-05, + "layer_2_sharpness": 3.148305768263526e-05, + "layer_3_sharpness": 1.1779757187468931e-05, + "layer_4_sharpness": 6.2942717704572715e-06, + "layer_5_sharpness": 5.605690148513531e-06, + "layer_6_sharpness": 8.422016435361002e-06, + "layer_7_sharpness": 5.441019311547279e-06, + "layer_8_sharpness": 8.64270896272501e-06, + "layer_9_sharpness": 1.5225075912894681e-05, + "layer_10_sharpness": 2.2431477191275917e-05, + "layer_11_sharpness": 2.071479320875369e-05, + "layer_12_sharpness": 0.00014815862232353538 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..139367c53723b4ce21a083179afe70d177da70f2 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 36.58609390258789, + "total_l1_linf_norm": 306824.9375, + "total_spectral_norm": 36.586097717285156, + "layer_1_update_fnorm": 7.636297225952148, + "layer_1_max_l1_linf_norm": 7.6518449783325195, + "layer_1_max_spectral_norm": 1.0658715963363647, + "layer_2_update_fnorm": 5.73710823059082, + "layer_2_max_l1_linf_norm": 4.937367916107178, + "layer_2_max_spectral_norm": 0.7448071241378784, + "layer_3_update_fnorm": 6.45156192779541, + "layer_3_max_l1_linf_norm": 6.016932487487793, + "layer_3_max_spectral_norm": 0.960382342338562, + "layer_4_update_fnorm": 7.119076251983643, + "layer_4_max_l1_linf_norm": 6.965081214904785, + "layer_4_max_spectral_norm": 0.9321104884147644, + "layer_5_update_fnorm": 7.541950702667236, + "layer_5_max_l1_linf_norm": 7.932252883911133, + "layer_5_max_spectral_norm": 1.0133867263793945, + "layer_6_update_fnorm": 7.812249183654785, + "layer_6_max_l1_linf_norm": 8.17151927947998, + "layer_6_max_spectral_norm": 1.0400032997131348, + "layer_7_update_fnorm": 8.449288368225098, + "layer_7_max_l1_linf_norm": 9.228771209716797, + "layer_7_max_spectral_norm": 0.9878788590431213, + "layer_8_update_fnorm": 8.52722454071045, + "layer_8_max_l1_linf_norm": 9.313207626342773, + "layer_8_max_spectral_norm": 1.164826512336731, + "layer_9_update_fnorm": 8.556187629699707, + "layer_9_max_l1_linf_norm": 9.953514099121094, + "layer_9_max_spectral_norm": 1.2963589429855347, + "layer_10_update_fnorm": 8.341843605041504, + "layer_10_max_l1_linf_norm": 9.336803436279297, + "layer_10_max_spectral_norm": 1.3034178018569946, + "layer_11_update_fnorm": 8.641449928283691, + "layer_11_max_l1_linf_norm": 9.958269119262695, + "layer_11_max_spectral_norm": 1.3618355989456177, + "layer_12_update_fnorm": 7.694579601287842, + "layer_12_max_l1_linf_norm": 11.52226448059082, + "layer_12_max_spectral_norm": 1.660151720046997, + "total_sharpness": 6.0231421230128035e-05, + "ip_v_neg_g": 0.037850067019462585, + "cos_v_neg_g": 0.0022681921254843473, + "v_norm": 36.58609390258789, + "g_norm": 0.45611125230789185, + "hv_norm": 0.42643362283706665, + "cos_v_hv": 0.005167586263269186, + "hg_norm": 5.057205677032471, + "cos_g_hg": 0.57455974817276, + "v_parallel_norm": 0.012487738393247128, + "v_perp_norm": 36.58609390258789, + "layer_1_v_norm": 7.636297225952148, + "layer_1_cos_v_neg_g": 0.02159830741584301, + "layer_2_v_norm": 5.73710823059082, + "layer_2_cos_v_neg_g": 0.01978371851146221, + "layer_3_v_norm": 6.45156192779541, + "layer_3_cos_v_neg_g": 0.01615268364548683, + "layer_4_v_norm": 7.119076251983643, + "layer_4_cos_v_neg_g": 0.014184611849486828, + "layer_5_v_norm": 7.541950702667236, + "layer_5_cos_v_neg_g": 0.01512705348432064, + "layer_6_v_norm": 7.812249183654785, + "layer_6_cos_v_neg_g": 0.013809241354465485, + "layer_7_v_norm": 8.449288368225098, + "layer_7_cos_v_neg_g": 0.010639461688697338, + "layer_8_v_norm": 8.52722454071045, + "layer_8_cos_v_neg_g": 0.01276568602770567, + "layer_9_v_norm": 8.556187629699707, + "layer_9_cos_v_neg_g": 0.011569908820092678, + "layer_10_v_norm": 8.341843605041504, + "layer_10_cos_v_neg_g": 0.014516828581690788, + "layer_11_v_norm": 8.641449928283691, + "layer_11_cos_v_neg_g": 0.014120519161224365, + "layer_12_v_norm": 7.694579601287842, + "layer_12_cos_v_neg_g": 0.02518909052014351, + "layer_1_sharpness": 1.8252892914460972e-05, + "layer_2_sharpness": 2.9797969546052627e-05, + "layer_3_sharpness": 1.8673259546631016e-05, + "layer_4_sharpness": 1.0069061318063177e-05, + "layer_5_sharpness": 7.731060577498283e-06, + "layer_6_sharpness": 8.717040145711508e-06, + "layer_7_sharpness": 4.163222456554649e-06, + "layer_8_sharpness": 9.221870641340502e-06, + "layer_9_sharpness": 9.996653716370929e-06, + "layer_10_sharpness": 1.5975094356690533e-05, + "layer_11_sharpness": 1.3356604540604167e-05, + "layer_12_sharpness": 0.00013802960165776312 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..9093c7b87e26e6a3b266adbba41d9e7323188026 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 38.58799362182617, + "total_l1_linf_norm": 328587.59375, + "total_spectral_norm": 38.58798599243164, + "layer_1_update_fnorm": 8.524430274963379, + "layer_1_max_l1_linf_norm": 8.339387893676758, + "layer_1_max_spectral_norm": 1.1458979845046997, + "layer_2_update_fnorm": 6.945677757263184, + "layer_2_max_l1_linf_norm": 5.5238471031188965, + "layer_2_max_spectral_norm": 0.9259862899780273, + "layer_3_update_fnorm": 7.4853973388671875, + "layer_3_max_l1_linf_norm": 6.656599998474121, + "layer_3_max_spectral_norm": 1.050270438194275, + "layer_4_update_fnorm": 8.020833969116211, + "layer_4_max_l1_linf_norm": 7.558034896850586, + "layer_4_max_spectral_norm": 1.0288457870483398, + "layer_5_update_fnorm": 8.509651184082031, + "layer_5_max_l1_linf_norm": 8.576900482177734, + "layer_5_max_spectral_norm": 1.1209317445755005, + "layer_6_update_fnorm": 8.712031364440918, + "layer_6_max_l1_linf_norm": 8.729303359985352, + "layer_6_max_spectral_norm": 1.0660223960876465, + "layer_7_update_fnorm": 9.276128768920898, + "layer_7_max_l1_linf_norm": 9.515707969665527, + "layer_7_max_spectral_norm": 1.2138088941574097, + "layer_8_update_fnorm": 9.244608879089355, + "layer_8_max_l1_linf_norm": 10.703598976135254, + "layer_8_max_spectral_norm": 1.4308395385742188, + "layer_9_update_fnorm": 9.234889030456543, + "layer_9_max_l1_linf_norm": 10.267088890075684, + "layer_9_max_spectral_norm": 1.557631015777588, + "layer_10_update_fnorm": 9.040935516357422, + "layer_10_max_l1_linf_norm": 10.69158935546875, + "layer_10_max_spectral_norm": 1.627031683921814, + "layer_11_update_fnorm": 9.220964431762695, + "layer_11_max_l1_linf_norm": 11.303665161132812, + "layer_11_max_spectral_norm": 1.5554765462875366, + "layer_12_update_fnorm": 8.217605590820312, + "layer_12_max_l1_linf_norm": 11.84725284576416, + "layer_12_max_spectral_norm": 1.7923645973205566, + "total_sharpness": 7.223093416541815e-05, + "ip_v_neg_g": 0.06590168178081512, + "cos_v_neg_g": 0.0036057860124856234, + "v_norm": 38.58799362182617, + "g_norm": 0.47363561391830444, + "hv_norm": 0.5226153135299683, + "cos_v_hv": 0.005333267152309418, + "hg_norm": 4.623476982116699, + "cos_g_hg": 0.6552150249481201, + "v_parallel_norm": 0.018821874633431435, + "v_perp_norm": 38.587989807128906, + "layer_1_v_norm": 8.524430274963379, + "layer_1_cos_v_neg_g": 0.035777390003204346, + "layer_2_v_norm": 6.945677757263184, + "layer_2_cos_v_neg_g": 0.016007347032427788, + "layer_3_v_norm": 7.4853973388671875, + "layer_3_cos_v_neg_g": 0.016744600608944893, + "layer_4_v_norm": 8.020833969116211, + "layer_4_cos_v_neg_g": 0.0162481851875782, + "layer_5_v_norm": 8.509651184082031, + "layer_5_cos_v_neg_g": 0.014779407531023026, + "layer_6_v_norm": 8.712031364440918, + "layer_6_cos_v_neg_g": 0.017315395176410675, + "layer_7_v_norm": 9.276128768920898, + "layer_7_cos_v_neg_g": 0.016147639602422714, + "layer_8_v_norm": 9.244608879089355, + "layer_8_cos_v_neg_g": 0.021408965811133385, + "layer_9_v_norm": 9.234889030456543, + "layer_9_cos_v_neg_g": 0.024659356102347374, + "layer_10_v_norm": 9.040935516357422, + "layer_10_cos_v_neg_g": 0.028630193322896957, + "layer_11_v_norm": 9.220964431762695, + "layer_11_cos_v_neg_g": 0.028650840744376183, + "layer_12_v_norm": 8.217605590820312, + "layer_12_cos_v_neg_g": 0.051832545548677444, + "layer_1_sharpness": 2.1114343326189555e-05, + "layer_2_sharpness": 1.0620955436024815e-05, + "layer_3_sharpness": 5.611848791886587e-06, + "layer_4_sharpness": 4.981574875273509e-06, + "layer_5_sharpness": 3.1852723623160273e-06, + "layer_6_sharpness": 3.862836820189841e-06, + "layer_7_sharpness": 4.380899554234929e-06, + "layer_8_sharpness": 8.275939762825146e-06, + "layer_9_sharpness": 1.3698107977688778e-05, + "layer_10_sharpness": 2.0253555703675374e-05, + "layer_11_sharpness": 1.3856037185178138e-05, + "layer_12_sharpness": 0.00019045265798922628 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..c2245009cfb21f49aaa70e29e5e29bea5ed44edf --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.06586837768555, + "total_l1_linf_norm": 342888.875, + "total_spectral_norm": 40.06587219238281, + "layer_1_update_fnorm": 8.993989944458008, + "layer_1_max_l1_linf_norm": 8.482364654541016, + "layer_1_max_spectral_norm": 1.1789177656173706, + "layer_2_update_fnorm": 7.459266185760498, + "layer_2_max_l1_linf_norm": 5.83816385269165, + "layer_2_max_spectral_norm": 0.933580756187439, + "layer_3_update_fnorm": 8.068607330322266, + "layer_3_max_l1_linf_norm": 6.942063331604004, + "layer_3_max_spectral_norm": 0.9547732472419739, + "layer_4_update_fnorm": 8.641511917114258, + "layer_4_max_l1_linf_norm": 7.865985870361328, + "layer_4_max_spectral_norm": 0.9614966511726379, + "layer_5_update_fnorm": 9.062685012817383, + "layer_5_max_l1_linf_norm": 8.87639045715332, + "layer_5_max_spectral_norm": 1.0792179107666016, + "layer_6_update_fnorm": 9.222550392150879, + "layer_6_max_l1_linf_norm": 9.034235000610352, + "layer_6_max_spectral_norm": 1.116355061531067, + "layer_7_update_fnorm": 9.70808219909668, + "layer_7_max_l1_linf_norm": 9.871833801269531, + "layer_7_max_spectral_norm": 1.1040894985198975, + "layer_8_update_fnorm": 9.746907234191895, + "layer_8_max_l1_linf_norm": 10.217761993408203, + "layer_8_max_spectral_norm": 1.3268150091171265, + "layer_9_update_fnorm": 9.672755241394043, + "layer_9_max_l1_linf_norm": 10.681896209716797, + "layer_9_max_spectral_norm": 1.4745436906814575, + "layer_10_update_fnorm": 9.540499687194824, + "layer_10_max_l1_linf_norm": 12.100341796875, + "layer_10_max_spectral_norm": 1.5684082508087158, + "layer_11_update_fnorm": 9.876668930053711, + "layer_11_max_l1_linf_norm": 11.042695045471191, + "layer_11_max_spectral_norm": 1.5630767345428467, + "layer_12_update_fnorm": 9.188922882080078, + "layer_12_max_l1_linf_norm": 12.202078819274902, + "layer_12_max_spectral_norm": 1.8727056980133057, + "total_sharpness": 4.197547605144791e-05, + "ip_v_neg_g": 0.04275396466255188, + "cos_v_neg_g": 0.0024950718507170677, + "v_norm": 40.06586837768555, + "g_norm": 0.4276798367500305, + "hv_norm": 0.30486249923706055, + "cos_v_hv": 0.005516532808542252, + "hg_norm": 4.185144901275635, + "cos_g_hg": 0.6074484586715698, + "v_parallel_norm": 0.018163364380598068, + "v_perp_norm": 40.06586456298828, + "layer_1_v_norm": 8.993989944458008, + "layer_1_cos_v_neg_g": 0.02177545614540577, + "layer_2_v_norm": 7.459266185760498, + "layer_2_cos_v_neg_g": 0.009480222128331661, + "layer_3_v_norm": 8.068607330322266, + "layer_3_cos_v_neg_g": 0.009042716585099697, + "layer_4_v_norm": 8.641511917114258, + "layer_4_cos_v_neg_g": 0.009201128035783768, + "layer_5_v_norm": 9.062685012817383, + "layer_5_cos_v_neg_g": 0.009577451273798943, + "layer_6_v_norm": 9.222549438476562, + "layer_6_cos_v_neg_g": 0.01082037203013897, + "layer_7_v_norm": 9.70808219909668, + "layer_7_cos_v_neg_g": 0.010305365547537804, + "layer_8_v_norm": 9.746907234191895, + "layer_8_cos_v_neg_g": 0.012738047167658806, + "layer_9_v_norm": 9.672755241394043, + "layer_9_cos_v_neg_g": 0.015302830375730991, + "layer_10_v_norm": 9.540499687194824, + "layer_10_cos_v_neg_g": 0.01867119036614895, + "layer_11_v_norm": 9.876668930053711, + "layer_11_cos_v_neg_g": 0.02011280134320259, + "layer_12_v_norm": 9.188922882080078, + "layer_12_cos_v_neg_g": 0.0395313985645771, + "layer_1_sharpness": 1.1188994903932326e-05, + "layer_2_sharpness": 2.4476889848301653e-06, + "layer_3_sharpness": 3.800188778768643e-06, + "layer_4_sharpness": 4.51164032710949e-06, + "layer_5_sharpness": 3.7647698718501488e-06, + "layer_6_sharpness": 3.295515853096731e-06, + "layer_7_sharpness": 2.6708223686000565e-06, + "layer_8_sharpness": 5.243939995125402e-06, + "layer_9_sharpness": 7.667662430321798e-06, + "layer_10_sharpness": 1.1419518159527797e-05, + "layer_11_sharpness": 9.48634169617435e-06, + "layer_12_sharpness": 0.00010681740968720987 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..478eaedaeedf1cce2581192bb28b96e629c3cb0e --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 39.31951141357422, + "total_l1_linf_norm": 336397.9375, + "total_spectral_norm": 39.31951141357422, + "layer_1_update_fnorm": 8.882332801818848, + "layer_1_max_l1_linf_norm": 8.467114448547363, + "layer_1_max_spectral_norm": 1.1508808135986328, + "layer_2_update_fnorm": 7.292483806610107, + "layer_2_max_l1_linf_norm": 5.43705940246582, + "layer_2_max_spectral_norm": 0.9165224432945251, + "layer_3_update_fnorm": 7.762342929840088, + "layer_3_max_l1_linf_norm": 6.545936584472656, + "layer_3_max_spectral_norm": 0.904224693775177, + "layer_4_update_fnorm": 8.44383430480957, + "layer_4_max_l1_linf_norm": 7.570967674255371, + "layer_4_max_spectral_norm": 1.0130575895309448, + "layer_5_update_fnorm": 8.86896800994873, + "layer_5_max_l1_linf_norm": 8.604289054870605, + "layer_5_max_spectral_norm": 1.0058298110961914, + "layer_6_update_fnorm": 9.019949913024902, + "layer_6_max_l1_linf_norm": 9.615230560302734, + "layer_6_max_spectral_norm": 1.026343822479248, + "layer_7_update_fnorm": 9.478460311889648, + "layer_7_max_l1_linf_norm": 9.493236541748047, + "layer_7_max_spectral_norm": 1.053117275238037, + "layer_8_update_fnorm": 9.470356941223145, + "layer_8_max_l1_linf_norm": 10.334726333618164, + "layer_8_max_spectral_norm": 1.2412559986114502, + "layer_9_update_fnorm": 9.448403358459473, + "layer_9_max_l1_linf_norm": 10.334763526916504, + "layer_9_max_spectral_norm": 1.410980463027954, + "layer_10_update_fnorm": 9.237560272216797, + "layer_10_max_l1_linf_norm": 11.21613597869873, + "layer_10_max_spectral_norm": 1.508556842803955, + "layer_11_update_fnorm": 9.495529174804688, + "layer_11_max_l1_linf_norm": 11.095155715942383, + "layer_11_max_spectral_norm": 1.5352822542190552, + "layer_12_update_fnorm": 8.486523628234863, + "layer_12_max_l1_linf_norm": 11.219367980957031, + "layer_12_max_spectral_norm": 1.8061511516571045, + "total_sharpness": 4.606924994732253e-05, + "ip_v_neg_g": 0.0396069772541523, + "cos_v_neg_g": 0.0021468624472618103, + "v_norm": 39.31951141357422, + "g_norm": 0.4692014455795288, + "hv_norm": 0.4918631315231323, + "cos_v_hv": 0.0036827728617936373, + "hg_norm": 4.118252277374268, + "cos_g_hg": 0.6468997597694397, + "v_parallel_norm": 0.014312059618532658, + "v_perp_norm": 39.31950759887695, + "layer_1_v_norm": 8.882332801818848, + "layer_1_cos_v_neg_g": 0.02279982529580593, + "layer_2_v_norm": 7.292483806610107, + "layer_2_cos_v_neg_g": 0.01425323449075222, + "layer_3_v_norm": 7.762342929840088, + "layer_3_cos_v_neg_g": 0.009670152328908443, + "layer_4_v_norm": 8.44383430480957, + "layer_4_cos_v_neg_g": 0.0103302588686347, + "layer_5_v_norm": 8.86896800994873, + "layer_5_cos_v_neg_g": 0.008474624715745449, + "layer_6_v_norm": 9.019949913024902, + "layer_6_cos_v_neg_g": 0.011376021429896355, + "layer_7_v_norm": 9.478460311889648, + "layer_7_cos_v_neg_g": 0.008814450353384018, + "layer_8_v_norm": 9.470356941223145, + "layer_8_cos_v_neg_g": 0.010814360342919827, + "layer_9_v_norm": 9.448403358459473, + "layer_9_cos_v_neg_g": 0.010135386139154434, + "layer_10_v_norm": 9.237560272216797, + "layer_10_cos_v_neg_g": 0.013540894724428654, + "layer_11_v_norm": 9.495529174804688, + "layer_11_cos_v_neg_g": 0.017857348546385765, + "layer_12_v_norm": 8.486523628234863, + "layer_12_cos_v_neg_g": 0.03439328819513321, + "layer_1_sharpness": 6.690414920740295e-06, + "layer_2_sharpness": 3.754700230729213e-07, + "layer_3_sharpness": 2.569891194070806e-06, + "layer_4_sharpness": 5.489146133186296e-06, + "layer_5_sharpness": 2.838708496710751e-06, + "layer_6_sharpness": 5.393043466028757e-06, + "layer_7_sharpness": 3.7397394407889806e-06, + "layer_8_sharpness": 8.008811164472718e-06, + "layer_9_sharpness": 8.633553989056963e-06, + "layer_10_sharpness": 1.2935428458149545e-05, + "layer_11_sharpness": 9.596274139767047e-06, + "layer_12_sharpness": 0.0001361957547487691 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..35eb934ca68c0a7afafd3b12706d9e401cc9bbbc --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 39.01567459106445, + "total_l1_linf_norm": 332781.84375, + "total_spectral_norm": 39.01566696166992, + "layer_1_update_fnorm": 8.528254508972168, + "layer_1_max_l1_linf_norm": 7.953659534454346, + "layer_1_max_spectral_norm": 1.1682941913604736, + "layer_2_update_fnorm": 6.9176130294799805, + "layer_2_max_l1_linf_norm": 5.343991279602051, + "layer_2_max_spectral_norm": 0.7637581825256348, + "layer_3_update_fnorm": 7.521257400512695, + "layer_3_max_l1_linf_norm": 6.439504623413086, + "layer_3_max_spectral_norm": 0.9030463099479675, + "layer_4_update_fnorm": 8.206535339355469, + "layer_4_max_l1_linf_norm": 7.498167991638184, + "layer_4_max_spectral_norm": 0.9810678362846375, + "layer_5_update_fnorm": 8.774606704711914, + "layer_5_max_l1_linf_norm": 8.7156400680542, + "layer_5_max_spectral_norm": 0.9755151271820068, + "layer_6_update_fnorm": 8.88702392578125, + "layer_6_max_l1_linf_norm": 8.633501052856445, + "layer_6_max_spectral_norm": 1.0139358043670654, + "layer_7_update_fnorm": 9.368619918823242, + "layer_7_max_l1_linf_norm": 9.486507415771484, + "layer_7_max_spectral_norm": 0.9953322410583496, + "layer_8_update_fnorm": 9.36822509765625, + "layer_8_max_l1_linf_norm": 10.148187637329102, + "layer_8_max_spectral_norm": 1.264366626739502, + "layer_9_update_fnorm": 9.433427810668945, + "layer_9_max_l1_linf_norm": 11.070154190063477, + "layer_9_max_spectral_norm": 1.4930251836776733, + "layer_10_update_fnorm": 9.325082778930664, + "layer_10_max_l1_linf_norm": 11.047697067260742, + "layer_10_max_spectral_norm": 1.5774873495101929, + "layer_11_update_fnorm": 9.56619644165039, + "layer_11_max_l1_linf_norm": 11.032780647277832, + "layer_11_max_spectral_norm": 1.5995228290557861, + "layer_12_update_fnorm": 8.655387878417969, + "layer_12_max_l1_linf_norm": 11.422429084777832, + "layer_12_max_spectral_norm": 1.8715720176696777, + "total_sharpness": 4.275619357940741e-05, + "ip_v_neg_g": 0.03568977490067482, + "cos_v_neg_g": 0.0020732556004077196, + "v_norm": 39.01567459106445, + "g_norm": 0.4412165880203247, + "hv_norm": 0.5043609738349915, + "cos_v_hv": 0.0033074759412556887, + "hg_norm": 4.798186779022217, + "cos_g_hg": 0.7581779360771179, + "v_parallel_norm": 0.016969988122582436, + "v_perp_norm": 39.01567077636719, + "layer_1_v_norm": 8.528254508972168, + "layer_1_cos_v_neg_g": 0.012907066382467747, + "layer_2_v_norm": 6.9176130294799805, + "layer_2_cos_v_neg_g": 0.007216689642518759, + "layer_3_v_norm": 7.5212578773498535, + "layer_3_cos_v_neg_g": 0.006553520914167166, + "layer_4_v_norm": 8.206535339355469, + "layer_4_cos_v_neg_g": 0.006591217592358589, + "layer_5_v_norm": 8.774606704711914, + "layer_5_cos_v_neg_g": 0.00552252447232604, + "layer_6_v_norm": 8.88702392578125, + "layer_6_cos_v_neg_g": 0.005909528583288193, + "layer_7_v_norm": 9.368619918823242, + "layer_7_cos_v_neg_g": 0.007430813275277615, + "layer_8_v_norm": 9.36822509765625, + "layer_8_cos_v_neg_g": 0.010228794068098068, + "layer_9_v_norm": 9.433427810668945, + "layer_9_cos_v_neg_g": 0.014620325528085232, + "layer_10_v_norm": 9.325082778930664, + "layer_10_cos_v_neg_g": 0.019306164234876633, + "layer_11_v_norm": 9.56619644165039, + "layer_11_cos_v_neg_g": 0.022425517439842224, + "layer_12_v_norm": 8.655387878417969, + "layer_12_cos_v_neg_g": 0.048237040638923645, + "layer_1_sharpness": 6.729901087965118e-06, + "layer_2_sharpness": 1.4518440138999722e-06, + "layer_3_sharpness": 2.751572992565343e-06, + "layer_4_sharpness": 2.731917675191653e-06, + "layer_5_sharpness": 2.0351644707261585e-06, + "layer_6_sharpness": 1.8154375993617577e-06, + "layer_7_sharpness": 2.3130337467591744e-06, + "layer_8_sharpness": 5.059353043179726e-06, + "layer_9_sharpness": 8.797622285783291e-06, + "layer_10_sharpness": 1.2860777133028023e-05, + "layer_11_sharpness": 9.089199011214077e-06, + "layer_12_sharpness": 0.00014932232443243265 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..b095c76802b2383e8e7ad9997aa45a692b01a95a --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 39.984806060791016, + "total_l1_linf_norm": 342240.5, + "total_spectral_norm": 39.984798431396484, + "layer_1_update_fnorm": 9.04567813873291, + "layer_1_max_l1_linf_norm": 8.664958000183105, + "layer_1_max_spectral_norm": 1.2288531064987183, + "layer_2_update_fnorm": 7.392186641693115, + "layer_2_max_l1_linf_norm": 5.734933853149414, + "layer_2_max_spectral_norm": 0.9699603915214539, + "layer_3_update_fnorm": 7.985634803771973, + "layer_3_max_l1_linf_norm": 6.700720310211182, + "layer_3_max_spectral_norm": 0.91496342420578, + "layer_4_update_fnorm": 8.580056190490723, + "layer_4_max_l1_linf_norm": 7.923667907714844, + "layer_4_max_spectral_norm": 0.983483076095581, + "layer_5_update_fnorm": 9.11280632019043, + "layer_5_max_l1_linf_norm": 8.951525688171387, + "layer_5_max_spectral_norm": 0.9880123138427734, + "layer_6_update_fnorm": 9.222453117370605, + "layer_6_max_l1_linf_norm": 9.00418472290039, + "layer_6_max_spectral_norm": 1.0388484001159668, + "layer_7_update_fnorm": 9.723886489868164, + "layer_7_max_l1_linf_norm": 9.621810913085938, + "layer_7_max_spectral_norm": 1.003995656967163, + "layer_8_update_fnorm": 9.640007972717285, + "layer_8_max_l1_linf_norm": 10.173225402832031, + "layer_8_max_spectral_norm": 1.1581169366836548, + "layer_9_update_fnorm": 9.608585357666016, + "layer_9_max_l1_linf_norm": 10.230344772338867, + "layer_9_max_spectral_norm": 1.312367558479309, + "layer_10_update_fnorm": 9.472434043884277, + "layer_10_max_l1_linf_norm": 13.210613250732422, + "layer_10_max_spectral_norm": 1.572539210319519, + "layer_11_update_fnorm": 9.754843711853027, + "layer_11_max_l1_linf_norm": 11.58830451965332, + "layer_11_max_spectral_norm": 1.6196503639221191, + "layer_12_update_fnorm": 8.998865127563477, + "layer_12_max_l1_linf_norm": 11.401966094970703, + "layer_12_max_spectral_norm": 1.976579189300537, + "total_sharpness": 3.033116809092462e-05, + "ip_v_neg_g": 0.030831189826130867, + "cos_v_neg_g": 0.0019430528627708554, + "v_norm": 39.984806060791016, + "g_norm": 0.39683565497398376, + "hv_norm": 0.2393348664045334, + "cos_v_hv": 0.005067317746579647, + "hg_norm": 4.2967529296875, + "cos_g_hg": 0.6166208386421204, + "v_parallel_norm": 0.013908668421208858, + "v_perp_norm": 39.98480224609375, + "layer_1_v_norm": 9.04567813873291, + "layer_1_cos_v_neg_g": 0.013962045311927795, + "layer_2_v_norm": 7.392186641693115, + "layer_2_cos_v_neg_g": 0.005941143725067377, + "layer_3_v_norm": 7.985635280609131, + "layer_3_cos_v_neg_g": 0.0062198578380048275, + "layer_4_v_norm": 8.580056190490723, + "layer_4_cos_v_neg_g": 0.006145539693534374, + "layer_5_v_norm": 9.11280632019043, + "layer_5_cos_v_neg_g": 0.006058147642761469, + "layer_6_v_norm": 9.222452163696289, + "layer_6_cos_v_neg_g": 0.006874010432511568, + "layer_7_v_norm": 9.723886489868164, + "layer_7_cos_v_neg_g": 0.008636552840471268, + "layer_8_v_norm": 9.640007972717285, + "layer_8_cos_v_neg_g": 0.010019593872129917, + "layer_9_v_norm": 9.608585357666016, + "layer_9_cos_v_neg_g": 0.010247860103845596, + "layer_10_v_norm": 9.472434043884277, + "layer_10_cos_v_neg_g": 0.015462780371308327, + "layer_11_v_norm": 9.754843711853027, + "layer_11_cos_v_neg_g": 0.01811087504029274, + "layer_12_v_norm": 8.998865127563477, + "layer_12_cos_v_neg_g": 0.04004911705851555, + "layer_1_sharpness": 6.730834229529137e-06, + "layer_2_sharpness": 9.597098369340529e-07, + "layer_3_sharpness": 2.473178710715729e-06, + "layer_4_sharpness": 2.674185225259862e-06, + "layer_5_sharpness": 1.2730188245768659e-06, + "layer_6_sharpness": 1.5339035144279478e-06, + "layer_7_sharpness": 2.2377028017217526e-06, + "layer_8_sharpness": 4.013934812974185e-06, + "layer_9_sharpness": 5.077414698462235e-06, + "layer_10_sharpness": 8.941656233218964e-06, + "layer_11_sharpness": 6.6092256929550786e-06, + "layer_12_sharpness": 8.896528743207455e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..c1b648c4ae33e9974236540ac51985e3856ef5e2 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 24.99397087097168, + "total_l1_linf_norm": 210012.09375, + "total_spectral_norm": 24.99397087097168, + "layer_1_update_fnorm": 4.128811836242676, + "layer_1_max_l1_linf_norm": 11.814886093139648, + "layer_1_max_spectral_norm": 1.4975162744522095, + "layer_2_update_fnorm": 4.187743186950684, + "layer_2_max_l1_linf_norm": 5.2193450927734375, + "layer_2_max_spectral_norm": 0.8030408024787903, + "layer_3_update_fnorm": 4.6571736335754395, + "layer_3_max_l1_linf_norm": 6.036068916320801, + "layer_3_max_spectral_norm": 0.8828380703926086, + "layer_4_update_fnorm": 4.771152973175049, + "layer_4_max_l1_linf_norm": 6.455528736114502, + "layer_4_max_spectral_norm": 0.9285047650337219, + "layer_5_update_fnorm": 4.897753715515137, + "layer_5_max_l1_linf_norm": 6.98193883895874, + "layer_5_max_spectral_norm": 0.9678908586502075, + "layer_6_update_fnorm": 4.991569519042969, + "layer_6_max_l1_linf_norm": 7.329577445983887, + "layer_6_max_spectral_norm": 1.0106573104858398, + "layer_7_update_fnorm": 5.298153400421143, + "layer_7_max_l1_linf_norm": 7.50823974609375, + "layer_7_max_spectral_norm": 1.0065420866012573, + "layer_8_update_fnorm": 5.578037738800049, + "layer_8_max_l1_linf_norm": 8.38778305053711, + "layer_8_max_spectral_norm": 1.1699333190917969, + "layer_9_update_fnorm": 5.782295227050781, + "layer_9_max_l1_linf_norm": 9.001924514770508, + "layer_9_max_spectral_norm": 1.240782618522644, + "layer_10_update_fnorm": 5.9284892082214355, + "layer_10_max_l1_linf_norm": 12.43722152709961, + "layer_10_max_spectral_norm": 1.3674578666687012, + "layer_11_update_fnorm": 5.77846097946167, + "layer_11_max_l1_linf_norm": 18.254152297973633, + "layer_11_max_spectral_norm": 1.4955236911773682, + "layer_12_update_fnorm": 5.517392158508301, + "layer_12_max_l1_linf_norm": 20.434295654296875, + "layer_12_max_spectral_norm": 1.4831671714782715, + "total_sharpness": 0.00045187739306129515, + "ip_v_neg_g": 0.1882983297109604, + "cos_v_neg_g": 0.01253726426512003, + "v_norm": 24.99397087097168, + "g_norm": 0.600908637046814, + "hv_norm": 0.6526691913604736, + "cos_v_hv": 0.017304647713899612, + "hg_norm": 8.103447914123535, + "cos_g_hg": 0.616873025894165, + "v_parallel_norm": 0.03475893288850784, + "v_perp_norm": 24.993946075439453, + "layer_1_v_norm": 4.128811836242676, + "layer_1_cos_v_neg_g": 0.24940802156925201, + "layer_2_v_norm": 4.187743186950684, + "layer_2_cos_v_neg_g": 0.05300010368227959, + "layer_3_v_norm": 4.657173156738281, + "layer_3_cos_v_neg_g": 0.06429232656955719, + "layer_4_v_norm": 4.771152973175049, + "layer_4_cos_v_neg_g": 0.05329558998346329, + "layer_5_v_norm": 4.897753715515137, + "layer_5_cos_v_neg_g": 0.050454623997211456, + "layer_6_v_norm": 4.991569995880127, + "layer_6_cos_v_neg_g": 0.07011818885803223, + "layer_7_v_norm": 5.298153400421143, + "layer_7_cos_v_neg_g": 0.047691021114587784, + "layer_8_v_norm": 5.578037738800049, + "layer_8_cos_v_neg_g": 0.04664261266589165, + "layer_9_v_norm": 5.782295227050781, + "layer_9_cos_v_neg_g": 0.04584507644176483, + "layer_10_v_norm": 5.9284892082214355, + "layer_10_cos_v_neg_g": 0.04148855060338974, + "layer_11_v_norm": 5.77846097946167, + "layer_11_cos_v_neg_g": 0.04732219874858856, + "layer_12_v_norm": 5.517392158508301, + "layer_12_cos_v_neg_g": 0.05256984010338783, + "layer_1_sharpness": 0.0017018310027197003, + "layer_2_sharpness": 9.002024307847023e-05, + "layer_3_sharpness": 6.055400081095286e-05, + "layer_4_sharpness": 3.27162524627056e-05, + "layer_5_sharpness": 2.8708007448585704e-05, + "layer_6_sharpness": 8.276426524389535e-05, + "layer_7_sharpness": 4.298320709494874e-05, + "layer_8_sharpness": 4.8132409574463964e-05, + "layer_9_sharpness": 5.0089543947251514e-05, + "layer_10_sharpness": 6.123966159066185e-05, + "layer_11_sharpness": 9.041441808221862e-05, + "layer_12_sharpness": 0.00016488716937601566 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..657dcd7fe29805a602883e551d18952cd26d99a2 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 39.70651626586914, + "total_l1_linf_norm": 339903.6875, + "total_spectral_norm": 39.706512451171875, + "layer_1_update_fnorm": 8.962597846984863, + "layer_1_max_l1_linf_norm": 8.774433135986328, + "layer_1_max_spectral_norm": 1.2054929733276367, + "layer_2_update_fnorm": 7.34743070602417, + "layer_2_max_l1_linf_norm": 5.430080413818359, + "layer_2_max_spectral_norm": 0.9664145708084106, + "layer_3_update_fnorm": 7.964141845703125, + "layer_3_max_l1_linf_norm": 6.601642608642578, + "layer_3_max_spectral_norm": 0.9024654626846313, + "layer_4_update_fnorm": 8.552899360656738, + "layer_4_max_l1_linf_norm": 7.504551887512207, + "layer_4_max_spectral_norm": 0.9693707823753357, + "layer_5_update_fnorm": 9.141180992126465, + "layer_5_max_l1_linf_norm": 8.750895500183105, + "layer_5_max_spectral_norm": 1.0010223388671875, + "layer_6_update_fnorm": 9.207698822021484, + "layer_6_max_l1_linf_norm": 9.076723098754883, + "layer_6_max_spectral_norm": 1.1472439765930176, + "layer_7_update_fnorm": 9.668807983398438, + "layer_7_max_l1_linf_norm": 9.470308303833008, + "layer_7_max_spectral_norm": 0.9393583536148071, + "layer_8_update_fnorm": 9.630575180053711, + "layer_8_max_l1_linf_norm": 9.661456108093262, + "layer_8_max_spectral_norm": 1.0747931003570557, + "layer_9_update_fnorm": 9.605703353881836, + "layer_9_max_l1_linf_norm": 10.929694175720215, + "layer_9_max_spectral_norm": 1.3338087797164917, + "layer_10_update_fnorm": 9.467041969299316, + "layer_10_max_l1_linf_norm": 10.183415412902832, + "layer_10_max_spectral_norm": 1.3742538690567017, + "layer_11_update_fnorm": 9.658491134643555, + "layer_11_max_l1_linf_norm": 10.86756706237793, + "layer_11_max_spectral_norm": 1.3267675638198853, + "layer_12_update_fnorm": 8.751675605773926, + "layer_12_max_l1_linf_norm": 11.777677536010742, + "layer_12_max_spectral_norm": 1.5011454820632935, + "total_sharpness": 2.2893878849572502e-05, + "ip_v_neg_g": 0.015406167134642601, + "cos_v_neg_g": 0.0009309985325671732, + "v_norm": 39.70651626586914, + "g_norm": 0.4167579114437103, + "hv_norm": 0.2117585986852646, + "cos_v_hv": 0.004292794968932867, + "hg_norm": 3.6240499019622803, + "cos_g_hg": 0.5674128532409668, + "v_parallel_norm": 0.008681138046085835, + "v_perp_norm": 39.70651626586914, + "layer_1_v_norm": 8.962597846984863, + "layer_1_cos_v_neg_g": 0.005845873150974512, + "layer_2_v_norm": 7.34743070602417, + "layer_2_cos_v_neg_g": 0.0033688987605273724, + "layer_3_v_norm": 7.964142322540283, + "layer_3_cos_v_neg_g": 0.0030476844403892756, + "layer_4_v_norm": 8.552899360656738, + "layer_4_cos_v_neg_g": 0.002151666907593608, + "layer_5_v_norm": 9.141180992126465, + "layer_5_cos_v_neg_g": 0.0037523717619478703, + "layer_6_v_norm": 9.207698822021484, + "layer_6_cos_v_neg_g": 0.0045745414681732655, + "layer_7_v_norm": 9.668807983398438, + "layer_7_cos_v_neg_g": 0.00639310572296381, + "layer_8_v_norm": 9.630575180053711, + "layer_8_cos_v_neg_g": 0.004962562583386898, + "layer_9_v_norm": 9.605703353881836, + "layer_9_cos_v_neg_g": 0.006701644975692034, + "layer_10_v_norm": 9.467041969299316, + "layer_10_cos_v_neg_g": 0.00783179048448801, + "layer_11_v_norm": 9.658490180969238, + "layer_11_cos_v_neg_g": 0.007197342347353697, + "layer_12_v_norm": 8.751675605773926, + "layer_12_cos_v_neg_g": 0.008831281214952469, + "layer_1_sharpness": 4.57525129604619e-06, + "layer_2_sharpness": 3.686134277813835e-06, + "layer_3_sharpness": 3.177335656801006e-06, + "layer_4_sharpness": 3.7189761314948555e-06, + "layer_5_sharpness": 2.2487772639578907e-06, + "layer_6_sharpness": 1.8902048850577557e-06, + "layer_7_sharpness": 2.0820082227146486e-06, + "layer_8_sharpness": 3.7088650515215704e-06, + "layer_9_sharpness": 4.723046004073694e-06, + "layer_10_sharpness": 7.637473572685849e-06, + "layer_11_sharpness": 4.314429588703206e-06, + "layer_12_sharpness": 4.358427759143524e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..ad6d14f7d7239f7252ebe56c6cbb2381b3616c9a --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 39.93121337890625, + "total_l1_linf_norm": 342275.125, + "total_spectral_norm": 39.93121337890625, + "layer_1_update_fnorm": 9.126703262329102, + "layer_1_max_l1_linf_norm": 8.592493057250977, + "layer_1_max_spectral_norm": 1.2514021396636963, + "layer_2_update_fnorm": 7.38647985458374, + "layer_2_max_l1_linf_norm": 5.549566745758057, + "layer_2_max_spectral_norm": 0.9389943480491638, + "layer_3_update_fnorm": 7.998419284820557, + "layer_3_max_l1_linf_norm": 6.796835899353027, + "layer_3_max_spectral_norm": 0.970569908618927, + "layer_4_update_fnorm": 8.554113388061523, + "layer_4_max_l1_linf_norm": 7.65279483795166, + "layer_4_max_spectral_norm": 0.9065030813217163, + "layer_5_update_fnorm": 9.163482666015625, + "layer_5_max_l1_linf_norm": 8.91065502166748, + "layer_5_max_spectral_norm": 0.9733986854553223, + "layer_6_update_fnorm": 9.163917541503906, + "layer_6_max_l1_linf_norm": 9.007157325744629, + "layer_6_max_spectral_norm": 1.011317253112793, + "layer_7_update_fnorm": 9.701115608215332, + "layer_7_max_l1_linf_norm": 9.518674850463867, + "layer_7_max_spectral_norm": 0.9353649616241455, + "layer_8_update_fnorm": 9.751436233520508, + "layer_8_max_l1_linf_norm": 9.97195053100586, + "layer_8_max_spectral_norm": 1.1432437896728516, + "layer_9_update_fnorm": 9.740861892700195, + "layer_9_max_l1_linf_norm": 10.692794799804688, + "layer_9_max_spectral_norm": 1.2623844146728516, + "layer_10_update_fnorm": 9.606926918029785, + "layer_10_max_l1_linf_norm": 11.127504348754883, + "layer_10_max_spectral_norm": 1.4585223197937012, + "layer_11_update_fnorm": 9.831249237060547, + "layer_11_max_l1_linf_norm": 10.885972023010254, + "layer_11_max_spectral_norm": 1.4147824048995972, + "layer_12_update_fnorm": 8.939737319946289, + "layer_12_max_l1_linf_norm": 11.221624374389648, + "layer_12_max_spectral_norm": 1.7225868701934814, + "total_sharpness": 2.1100899175507948e-05, + "ip_v_neg_g": 0.02378590777516365, + "cos_v_neg_g": 0.0007606486906297505, + "v_norm": 39.93121337890625, + "g_norm": 0.7831105589866638, + "hv_norm": 0.3029933571815491, + "cos_v_hv": 0.002780867973342538, + "hg_norm": 6.082784652709961, + "cos_g_hg": 0.6872897148132324, + "v_parallel_norm": 0.00862606056034565, + "v_perp_norm": 39.93121337890625, + "layer_1_v_norm": 9.126703262329102, + "layer_1_cos_v_neg_g": 0.0032831579446792603, + "layer_2_v_norm": 7.38647985458374, + "layer_2_cos_v_neg_g": -0.0009124381467700005, + "layer_3_v_norm": 7.998419284820557, + "layer_3_cos_v_neg_g": 0.0020357612520456314, + "layer_4_v_norm": 8.554113388061523, + "layer_4_cos_v_neg_g": 0.001513513969257474, + "layer_5_v_norm": 9.163482666015625, + "layer_5_cos_v_neg_g": 0.0022800054866820574, + "layer_6_v_norm": 9.163917541503906, + "layer_6_cos_v_neg_g": 0.0019894533324986696, + "layer_7_v_norm": 9.701115608215332, + "layer_7_cos_v_neg_g": 0.002393025206401944, + "layer_8_v_norm": 9.751436233520508, + "layer_8_cos_v_neg_g": 0.003831585170701146, + "layer_9_v_norm": 9.740861892700195, + "layer_9_cos_v_neg_g": 0.0046547409147024155, + "layer_10_v_norm": 9.606926918029785, + "layer_10_cos_v_neg_g": 0.00639543728902936, + "layer_11_v_norm": 9.831249237060547, + "layer_11_cos_v_neg_g": 0.007632361259311438, + "layer_12_v_norm": 8.939737319946289, + "layer_12_cos_v_neg_g": 0.014864413999021053, + "layer_1_sharpness": 4.179458301223349e-06, + "layer_2_sharpness": 1.70029431956209e-06, + "layer_3_sharpness": 2.111112962666084e-06, + "layer_4_sharpness": 1.7687146964817657e-06, + "layer_5_sharpness": 1.165811681858031e-06, + "layer_6_sharpness": 8.316937396557478e-07, + "layer_7_sharpness": 1.031426450026629e-06, + "layer_8_sharpness": 2.2009455733495997e-06, + "layer_9_sharpness": 3.2505283797945594e-06, + "layer_10_sharpness": 5.266595962893916e-06, + "layer_11_sharpness": 3.2899793040996883e-06, + "layer_12_sharpness": 5.6364322517765686e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..1e1e750686cf10efd272851b5fd64791863dcca0 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 39.904632568359375, + "total_l1_linf_norm": 342148.0625, + "total_spectral_norm": 39.904640197753906, + "layer_1_update_fnorm": 9.168106079101562, + "layer_1_max_l1_linf_norm": 8.39889144897461, + "layer_1_max_spectral_norm": 1.2393697500228882, + "layer_2_update_fnorm": 7.475289344787598, + "layer_2_max_l1_linf_norm": 5.645477294921875, + "layer_2_max_spectral_norm": 0.9109386205673218, + "layer_3_update_fnorm": 8.042820930480957, + "layer_3_max_l1_linf_norm": 6.872808933258057, + "layer_3_max_spectral_norm": 0.9218751788139343, + "layer_4_update_fnorm": 8.550873756408691, + "layer_4_max_l1_linf_norm": 7.740034580230713, + "layer_4_max_spectral_norm": 0.9234880805015564, + "layer_5_update_fnorm": 9.146319389343262, + "layer_5_max_l1_linf_norm": 8.955154418945312, + "layer_5_max_spectral_norm": 0.9511651992797852, + "layer_6_update_fnorm": 9.242767333984375, + "layer_6_max_l1_linf_norm": 8.983022689819336, + "layer_6_max_spectral_norm": 1.0105403661727905, + "layer_7_update_fnorm": 9.776762008666992, + "layer_7_max_l1_linf_norm": 9.52405834197998, + "layer_7_max_spectral_norm": 0.9557464718818665, + "layer_8_update_fnorm": 9.768009185791016, + "layer_8_max_l1_linf_norm": 10.212188720703125, + "layer_8_max_spectral_norm": 1.1667418479919434, + "layer_9_update_fnorm": 9.794328689575195, + "layer_9_max_l1_linf_norm": 10.237468719482422, + "layer_9_max_spectral_norm": 1.2867729663848877, + "layer_10_update_fnorm": 9.573173522949219, + "layer_10_max_l1_linf_norm": 10.683208465576172, + "layer_10_max_spectral_norm": 1.373642921447754, + "layer_11_update_fnorm": 9.79654312133789, + "layer_11_max_l1_linf_norm": 10.492956161499023, + "layer_11_max_spectral_norm": 1.5025264024734497, + "layer_12_update_fnorm": 8.943037986755371, + "layer_12_max_l1_linf_norm": 11.266754150390625, + "layer_12_max_spectral_norm": 1.6859900951385498, + "total_sharpness": 2.3882914319983684e-05, + "ip_v_neg_g": 0.01733541116118431, + "cos_v_neg_g": 0.001251125242561102, + "v_norm": 39.904632568359375, + "g_norm": 0.34722423553466797, + "hv_norm": 0.29059967398643494, + "cos_v_hv": 0.0032795595470815897, + "hg_norm": 2.517029047012329, + "cos_g_hg": 0.5821248888969421, + "v_parallel_norm": 0.013539299368858337, + "v_perp_norm": 39.90462875366211, + "layer_1_v_norm": 9.168106079101562, + "layer_1_cos_v_neg_g": 0.009121916256844997, + "layer_2_v_norm": 7.475289344787598, + "layer_2_cos_v_neg_g": 0.002368123037740588, + "layer_3_v_norm": 8.042820930480957, + "layer_3_cos_v_neg_g": 0.0034415950067341328, + "layer_4_v_norm": 8.550873756408691, + "layer_4_cos_v_neg_g": 0.003948056139051914, + "layer_5_v_norm": 9.146319389343262, + "layer_5_cos_v_neg_g": 0.0027576570864766836, + "layer_6_v_norm": 9.242766380310059, + "layer_6_cos_v_neg_g": 0.004225816112011671, + "layer_7_v_norm": 9.776762008666992, + "layer_7_cos_v_neg_g": 0.005644889548420906, + "layer_8_v_norm": 9.768009185791016, + "layer_8_cos_v_neg_g": 0.00613441551104188, + "layer_9_v_norm": 9.794328689575195, + "layer_9_cos_v_neg_g": 0.007289385423064232, + "layer_10_v_norm": 9.573173522949219, + "layer_10_cos_v_neg_g": 0.008993572555482388, + "layer_11_v_norm": 9.796544075012207, + "layer_11_cos_v_neg_g": 0.009887337684631348, + "layer_12_v_norm": 8.943037986755371, + "layer_12_cos_v_neg_g": 0.021700648590922356, + "layer_1_sharpness": 5.846999556524679e-06, + "layer_2_sharpness": 2.5564629595464794e-06, + "layer_3_sharpness": 2.0303536985011306e-06, + "layer_4_sharpness": 1.5227470839818125e-06, + "layer_5_sharpness": 1.1289646408840781e-06, + "layer_6_sharpness": 1.33585319872509e-06, + "layer_7_sharpness": 1.680475634202594e-06, + "layer_8_sharpness": 4.031252501590643e-06, + "layer_9_sharpness": 4.904952220385894e-06, + "layer_10_sharpness": 6.707676220685244e-06, + "layer_11_sharpness": 4.915382760373177e-06, + "layer_12_sharpness": 5.4289717809297144e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..f91a5330d938b8b67b6a9994637c410433c370ca --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.752315521240234, + "total_l1_linf_norm": 350456.3125, + "total_spectral_norm": 40.7523307800293, + "layer_1_update_fnorm": 9.423094749450684, + "layer_1_max_l1_linf_norm": 9.078433990478516, + "layer_1_max_spectral_norm": 1.2559888362884521, + "layer_2_update_fnorm": 7.729144096374512, + "layer_2_max_l1_linf_norm": 5.989262580871582, + "layer_2_max_spectral_norm": 1.142878532409668, + "layer_3_update_fnorm": 8.29582405090332, + "layer_3_max_l1_linf_norm": 7.126275062561035, + "layer_3_max_spectral_norm": 1.0507540702819824, + "layer_4_update_fnorm": 8.954861640930176, + "layer_4_max_l1_linf_norm": 8.066944122314453, + "layer_4_max_spectral_norm": 1.0329006910324097, + "layer_5_update_fnorm": 9.471963882446289, + "layer_5_max_l1_linf_norm": 9.523595809936523, + "layer_5_max_spectral_norm": 1.0454469919204712, + "layer_6_update_fnorm": 9.497936248779297, + "layer_6_max_l1_linf_norm": 9.585409164428711, + "layer_6_max_spectral_norm": 1.077091932296753, + "layer_7_update_fnorm": 9.973444938659668, + "layer_7_max_l1_linf_norm": 10.961795806884766, + "layer_7_max_spectral_norm": 1.1956826448440552, + "layer_8_update_fnorm": 9.978357315063477, + "layer_8_max_l1_linf_norm": 10.523757934570312, + "layer_8_max_spectral_norm": 1.426872730255127, + "layer_9_update_fnorm": 9.909741401672363, + "layer_9_max_l1_linf_norm": 10.632070541381836, + "layer_9_max_spectral_norm": 1.5855176448822021, + "layer_10_update_fnorm": 9.813529014587402, + "layer_10_max_l1_linf_norm": 12.07620620727539, + "layer_10_max_spectral_norm": 1.7401849031448364, + "layer_11_update_fnorm": 10.030081748962402, + "layer_11_max_l1_linf_norm": 11.9555025100708, + "layer_11_max_spectral_norm": 1.5163475275039673, + "layer_12_update_fnorm": 9.313385009765625, + "layer_12_max_l1_linf_norm": 13.217183113098145, + "layer_12_max_spectral_norm": 1.8707897663116455, + "total_sharpness": 3.391493373783305e-05, + "ip_v_neg_g": 0.025756992399692535, + "cos_v_neg_g": 0.0015655045863240957, + "v_norm": 40.752315521240234, + "g_norm": 0.4037276804447174, + "hv_norm": 0.3656153082847595, + "cos_v_hv": 0.0037802362348884344, + "hg_norm": 3.6609177589416504, + "cos_g_hg": 0.6216955780982971, + "v_parallel_norm": 0.012282717041671276, + "v_perp_norm": 40.752315521240234, + "layer_1_v_norm": 9.423094749450684, + "layer_1_cos_v_neg_g": 0.011446421965956688, + "layer_2_v_norm": 7.729144096374512, + "layer_2_cos_v_neg_g": 0.004480860661715269, + "layer_3_v_norm": 8.29582405090332, + "layer_3_cos_v_neg_g": 0.00465095229446888, + "layer_4_v_norm": 8.954861640930176, + "layer_4_cos_v_neg_g": 0.00563493836671114, + "layer_5_v_norm": 9.471963882446289, + "layer_5_cos_v_neg_g": 0.006586190313100815, + "layer_6_v_norm": 9.497936248779297, + "layer_6_cos_v_neg_g": 0.005528495647013187, + "layer_7_v_norm": 9.973444938659668, + "layer_7_cos_v_neg_g": 0.007477192208170891, + "layer_8_v_norm": 9.978357315063477, + "layer_8_cos_v_neg_g": 0.009196942672133446, + "layer_9_v_norm": 9.909741401672363, + "layer_9_cos_v_neg_g": 0.012928396463394165, + "layer_10_v_norm": 9.813529014587402, + "layer_10_cos_v_neg_g": 0.01679077558219433, + "layer_11_v_norm": 10.030081748962402, + "layer_11_cos_v_neg_g": 0.015555795282125473, + "layer_12_v_norm": 9.313385009765625, + "layer_12_cos_v_neg_g": 0.031061304733157158, + "layer_1_sharpness": 5.659249836753588e-06, + "layer_2_sharpness": 6.269256687119196e-07, + "layer_3_sharpness": 2.0967725049558794e-06, + "layer_4_sharpness": 2.755390596576035e-06, + "layer_5_sharpness": 1.8864944877350354e-06, + "layer_6_sharpness": 1.7387380921718432e-06, + "layer_7_sharpness": 2.226247943326598e-06, + "layer_8_sharpness": 5.612921995634679e-06, + "layer_9_sharpness": 7.376287157967454e-06, + "layer_10_sharpness": 1.28616375150159e-05, + "layer_11_sharpness": 5.807076377095655e-06, + "layer_12_sharpness": 7.067775732139125e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..41d13207903f362fb35b1e75ea65d89f9c7657f0 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.87453079223633, + "total_l1_linf_norm": 352127.6875, + "total_spectral_norm": 40.87453079223633, + "layer_1_update_fnorm": 9.683377265930176, + "layer_1_max_l1_linf_norm": 9.04678726196289, + "layer_1_max_spectral_norm": 1.4470927715301514, + "layer_2_update_fnorm": 8.101709365844727, + "layer_2_max_l1_linf_norm": 5.9399871826171875, + "layer_2_max_spectral_norm": 1.563978672027588, + "layer_3_update_fnorm": 8.491204261779785, + "layer_3_max_l1_linf_norm": 7.065697193145752, + "layer_3_max_spectral_norm": 1.1201837062835693, + "layer_4_update_fnorm": 8.946401596069336, + "layer_4_max_l1_linf_norm": 7.779168605804443, + "layer_4_max_spectral_norm": 0.9874519109725952, + "layer_5_update_fnorm": 9.453832626342773, + "layer_5_max_l1_linf_norm": 9.104215621948242, + "layer_5_max_spectral_norm": 1.0247780084609985, + "layer_6_update_fnorm": 9.417121887207031, + "layer_6_max_l1_linf_norm": 8.824737548828125, + "layer_6_max_spectral_norm": 1.0381306409835815, + "layer_7_update_fnorm": 9.953195571899414, + "layer_7_max_l1_linf_norm": 9.916872024536133, + "layer_7_max_spectral_norm": 1.035469889640808, + "layer_8_update_fnorm": 9.996532440185547, + "layer_8_max_l1_linf_norm": 10.397004127502441, + "layer_8_max_spectral_norm": 1.271863579750061, + "layer_9_update_fnorm": 9.994914054870605, + "layer_9_max_l1_linf_norm": 10.853830337524414, + "layer_9_max_spectral_norm": 1.428473949432373, + "layer_10_update_fnorm": 9.83454704284668, + "layer_10_max_l1_linf_norm": 11.822672843933105, + "layer_10_max_spectral_norm": 1.596564531326294, + "layer_11_update_fnorm": 10.018287658691406, + "layer_11_max_l1_linf_norm": 12.248495101928711, + "layer_11_max_spectral_norm": 1.4945179224014282, + "layer_12_update_fnorm": 9.296751022338867, + "layer_12_max_l1_linf_norm": 13.950459480285645, + "layer_12_max_spectral_norm": 1.8858338594436646, + "total_sharpness": 2.9151178750908002e-05, + "ip_v_neg_g": 0.021263068541884422, + "cos_v_neg_g": 0.0014612112427130342, + "v_norm": 40.87453079223633, + "g_norm": 0.3560083210468292, + "hv_norm": 0.35957491397857666, + "cos_v_hv": 0.0033137481659650803, + "hg_norm": 2.9735183715820312, + "cos_g_hg": 0.5598527193069458, + "v_parallel_norm": 0.014415689744055271, + "v_perp_norm": 40.87452697753906, + "layer_1_v_norm": 9.683377265930176, + "layer_1_cos_v_neg_g": 0.010236364789307117, + "layer_2_v_norm": 8.101709365844727, + "layer_2_cos_v_neg_g": 0.005591629073023796, + "layer_3_v_norm": 8.491203308105469, + "layer_3_cos_v_neg_g": 0.0053858207538723946, + "layer_4_v_norm": 8.946401596069336, + "layer_4_cos_v_neg_g": 0.00558385718613863, + "layer_5_v_norm": 9.453832626342773, + "layer_5_cos_v_neg_g": 0.0047496915794909, + "layer_6_v_norm": 9.417121887207031, + "layer_6_cos_v_neg_g": 0.004774671979248524, + "layer_7_v_norm": 9.953195571899414, + "layer_7_cos_v_neg_g": 0.005010591354221106, + "layer_8_v_norm": 9.996532440185547, + "layer_8_cos_v_neg_g": 0.006908050272613764, + "layer_9_v_norm": 9.994914054870605, + "layer_9_cos_v_neg_g": 0.008412362076342106, + "layer_10_v_norm": 9.83454704284668, + "layer_10_cos_v_neg_g": 0.010721389204263687, + "layer_11_v_norm": 10.018287658691406, + "layer_11_cos_v_neg_g": 0.010861382819712162, + "layer_12_v_norm": 9.296751022338867, + "layer_12_cos_v_neg_g": 0.02774229645729065, + "layer_1_sharpness": 6.730220775352791e-06, + "layer_2_sharpness": 5.215833880356513e-06, + "layer_3_sharpness": 3.7005650028731907e-06, + "layer_4_sharpness": 3.5482335078995675e-06, + "layer_5_sharpness": 1.8090917137669749e-06, + "layer_6_sharpness": 1.4092437368162791e-06, + "layer_7_sharpness": 1.7429446188543807e-06, + "layer_8_sharpness": 3.6565181744663278e-06, + "layer_9_sharpness": 4.968564098817296e-06, + "layer_10_sharpness": 8.286749107355718e-06, + "layer_11_sharpness": 4.60075716546271e-06, + "layer_12_sharpness": 6.729001324856654e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..a5a34571417025d0f00465bbd2a2037cdb0cdc8b --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.464176177978516, + "total_l1_linf_norm": 347539.6875, + "total_spectral_norm": 40.46417236328125, + "layer_1_update_fnorm": 9.232202529907227, + "layer_1_max_l1_linf_norm": 8.608604431152344, + "layer_1_max_spectral_norm": 1.2333933115005493, + "layer_2_update_fnorm": 7.736862659454346, + "layer_2_max_l1_linf_norm": 5.684786796569824, + "layer_2_max_spectral_norm": 0.9310053586959839, + "layer_3_update_fnorm": 8.354101181030273, + "layer_3_max_l1_linf_norm": 7.008947372436523, + "layer_3_max_spectral_norm": 0.9542015790939331, + "layer_4_update_fnorm": 8.825254440307617, + "layer_4_max_l1_linf_norm": 7.781475067138672, + "layer_4_max_spectral_norm": 0.9311200976371765, + "layer_5_update_fnorm": 9.398971557617188, + "layer_5_max_l1_linf_norm": 8.884519577026367, + "layer_5_max_spectral_norm": 0.9384658932685852, + "layer_6_update_fnorm": 9.414356231689453, + "layer_6_max_l1_linf_norm": 8.890244483947754, + "layer_6_max_spectral_norm": 1.0361969470977783, + "layer_7_update_fnorm": 9.92100715637207, + "layer_7_max_l1_linf_norm": 9.467270851135254, + "layer_7_max_spectral_norm": 0.9689149856567383, + "layer_8_update_fnorm": 9.89516830444336, + "layer_8_max_l1_linf_norm": 10.02619457244873, + "layer_8_max_spectral_norm": 1.1571505069732666, + "layer_9_update_fnorm": 9.894716262817383, + "layer_9_max_l1_linf_norm": 9.959714889526367, + "layer_9_max_spectral_norm": 1.292535424232483, + "layer_10_update_fnorm": 9.752320289611816, + "layer_10_max_l1_linf_norm": 10.700315475463867, + "layer_10_max_spectral_norm": 1.4295872449874878, + "layer_11_update_fnorm": 9.973231315612793, + "layer_11_max_l1_linf_norm": 11.352448463439941, + "layer_11_max_spectral_norm": 1.3557915687561035, + "layer_12_update_fnorm": 9.212299346923828, + "layer_12_max_l1_linf_norm": 10.946090698242188, + "layer_12_max_spectral_norm": 1.6502399444580078, + "total_sharpness": 1.9573186364141293e-05, + "ip_v_neg_g": 0.01608426310122013, + "cos_v_neg_g": 0.0010003821225836873, + "v_norm": 40.464176177978516, + "g_norm": 0.39734208583831787, + "hv_norm": 0.21493007242679596, + "cos_v_hv": 0.0036849791649729013, + "hg_norm": 3.419788360595703, + "cos_g_hg": 0.6045277714729309, + "v_parallel_norm": 0.011814462020993233, + "v_perp_norm": 40.46417236328125, + "layer_1_v_norm": 9.232202529907227, + "layer_1_cos_v_neg_g": 0.005634746048599482, + "layer_2_v_norm": 7.736862659454346, + "layer_2_cos_v_neg_g": 0.0038879041094332933, + "layer_3_v_norm": 8.354101181030273, + "layer_3_cos_v_neg_g": 0.0033140324521809816, + "layer_4_v_norm": 8.825254440307617, + "layer_4_cos_v_neg_g": 0.002223680494353175, + "layer_5_v_norm": 9.398971557617188, + "layer_5_cos_v_neg_g": 0.0025884327478706837, + "layer_6_v_norm": 9.414356231689453, + "layer_6_cos_v_neg_g": 0.0026209414936602116, + "layer_7_v_norm": 9.92100715637207, + "layer_7_cos_v_neg_g": 0.003492491552606225, + "layer_8_v_norm": 9.89516830444336, + "layer_8_cos_v_neg_g": 0.004423950798809528, + "layer_9_v_norm": 9.894716262817383, + "layer_9_cos_v_neg_g": 0.004894543439149857, + "layer_10_v_norm": 9.752320289611816, + "layer_10_cos_v_neg_g": 0.007390755228698254, + "layer_11_v_norm": 9.973231315612793, + "layer_11_cos_v_neg_g": 0.008054230362176895, + "layer_12_v_norm": 9.212299346923828, + "layer_12_cos_v_neg_g": 0.02080528251826763, + "layer_1_sharpness": 3.521720827848185e-06, + "layer_2_sharpness": 1.4657097153758514e-06, + "layer_3_sharpness": 1.7775661262930953e-06, + "layer_4_sharpness": 1.749462626321474e-06, + "layer_5_sharpness": 1.0829681968971272e-06, + "layer_6_sharpness": 9.004393746181449e-07, + "layer_7_sharpness": 1.602036604708701e-06, + "layer_8_sharpness": 2.8321937861619517e-06, + "layer_9_sharpness": 3.845229457510868e-06, + "layer_10_sharpness": 6.676506473013433e-06, + "layer_11_sharpness": 4.118155175092397e-06, + "layer_12_sharpness": 3.839169949060306e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..5fb0021f920f5bf3cac5110d7568e4840a3e1509 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.3611946105957, + "total_l1_linf_norm": 346667.65625, + "total_spectral_norm": 40.3611946105957, + "layer_1_update_fnorm": 9.343765258789062, + "layer_1_max_l1_linf_norm": 8.837433815002441, + "layer_1_max_spectral_norm": 1.2563060522079468, + "layer_2_update_fnorm": 7.615365505218506, + "layer_2_max_l1_linf_norm": 5.714166641235352, + "layer_2_max_spectral_norm": 0.9488774538040161, + "layer_3_update_fnorm": 8.322059631347656, + "layer_3_max_l1_linf_norm": 7.06938362121582, + "layer_3_max_spectral_norm": 0.9349108934402466, + "layer_4_update_fnorm": 8.828170776367188, + "layer_4_max_l1_linf_norm": 7.880620002746582, + "layer_4_max_spectral_norm": 0.9248313903808594, + "layer_5_update_fnorm": 9.358600616455078, + "layer_5_max_l1_linf_norm": 9.117734909057617, + "layer_5_max_spectral_norm": 0.9383724331855774, + "layer_6_update_fnorm": 9.393899917602539, + "layer_6_max_l1_linf_norm": 9.110616683959961, + "layer_6_max_spectral_norm": 1.069985270500183, + "layer_7_update_fnorm": 9.925341606140137, + "layer_7_max_l1_linf_norm": 10.19441032409668, + "layer_7_max_spectral_norm": 1.0529112815856934, + "layer_8_update_fnorm": 9.938718795776367, + "layer_8_max_l1_linf_norm": 10.510619163513184, + "layer_8_max_spectral_norm": 1.1980715990066528, + "layer_9_update_fnorm": 9.907567977905273, + "layer_9_max_l1_linf_norm": 10.821717262268066, + "layer_9_max_spectral_norm": 1.3448379039764404, + "layer_10_update_fnorm": 9.783611297607422, + "layer_10_max_l1_linf_norm": 11.975769996643066, + "layer_10_max_spectral_norm": 1.569806694984436, + "layer_11_update_fnorm": 9.948370933532715, + "layer_11_max_l1_linf_norm": 11.721810340881348, + "layer_11_max_spectral_norm": 1.4398243427276611, + "layer_12_update_fnorm": 9.189706802368164, + "layer_12_max_l1_linf_norm": 12.877513885498047, + "layer_12_max_spectral_norm": 1.678799033164978, + "total_sharpness": 1.9531440557329915e-05, + "ip_v_neg_g": 0.013575456105172634, + "cos_v_neg_g": 0.0009102625772356987, + "v_norm": 40.3611946105957, + "g_norm": 0.36950790882110596, + "hv_norm": 0.20418590307235718, + "cos_v_hv": 0.0038607579190284014, + "hg_norm": 4.780814170837402, + "cos_g_hg": 0.5793532133102417, + "v_parallel_norm": 0.012537004426121712, + "v_perp_norm": 40.3611946105957, + "layer_1_v_norm": 9.343765258789062, + "layer_1_cos_v_neg_g": 0.006065205670893192, + "layer_2_v_norm": 7.615365505218506, + "layer_2_cos_v_neg_g": 0.0029183304868638515, + "layer_3_v_norm": 8.322059631347656, + "layer_3_cos_v_neg_g": 0.003539893077686429, + "layer_4_v_norm": 8.828170776367188, + "layer_4_cos_v_neg_g": 0.00513228215277195, + "layer_5_v_norm": 9.358600616455078, + "layer_5_cos_v_neg_g": 0.002402626909315586, + "layer_6_v_norm": 9.393899917602539, + "layer_6_cos_v_neg_g": 0.003322647651657462, + "layer_7_v_norm": 9.925341606140137, + "layer_7_cos_v_neg_g": 0.002646086271852255, + "layer_8_v_norm": 9.938718795776367, + "layer_8_cos_v_neg_g": 0.003043828532099724, + "layer_9_v_norm": 9.907567977905273, + "layer_9_cos_v_neg_g": 0.003312106244266033, + "layer_10_v_norm": 9.783611297607422, + "layer_10_cos_v_neg_g": 0.006533580832183361, + "layer_11_v_norm": 9.948370933532715, + "layer_11_cos_v_neg_g": 0.00801120139658451, + "layer_12_v_norm": 9.189706802368164, + "layer_12_cos_v_neg_g": 0.01634136587381363, + "layer_1_sharpness": 3.022093551408034e-06, + "layer_2_sharpness": 9.468681128055323e-07, + "layer_3_sharpness": 1.7395186659996398e-06, + "layer_4_sharpness": 2.976866880999296e-06, + "layer_5_sharpness": 1.5658877146051964e-06, + "layer_6_sharpness": 1.6442005517092184e-06, + "layer_7_sharpness": 1.7127596265709144e-06, + "layer_8_sharpness": 2.580865157142398e-06, + "layer_9_sharpness": 3.0959342893766006e-06, + "layer_10_sharpness": 5.569358108914457e-06, + "layer_11_sharpness": 3.66007179763983e-06, + "layer_12_sharpness": 3.2956038921838626e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..a302a734f693d7f2d72ac37873508666fad1a7bf --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 39.54240798950195, + "total_l1_linf_norm": 338392.625, + "total_spectral_norm": 39.54240798950195, + "layer_1_update_fnorm": 8.823978424072266, + "layer_1_max_l1_linf_norm": 8.370750427246094, + "layer_1_max_spectral_norm": 1.249878168106079, + "layer_2_update_fnorm": 7.18619441986084, + "layer_2_max_l1_linf_norm": 5.617455005645752, + "layer_2_max_spectral_norm": 0.8879894018173218, + "layer_3_update_fnorm": 8.008051872253418, + "layer_3_max_l1_linf_norm": 6.7145609855651855, + "layer_3_max_spectral_norm": 0.9075552225112915, + "layer_4_update_fnorm": 8.536892890930176, + "layer_4_max_l1_linf_norm": 7.815497398376465, + "layer_4_max_spectral_norm": 0.8707146644592285, + "layer_5_update_fnorm": 9.088170051574707, + "layer_5_max_l1_linf_norm": 8.672100067138672, + "layer_5_max_spectral_norm": 0.9232845306396484, + "layer_6_update_fnorm": 9.14474868774414, + "layer_6_max_l1_linf_norm": 8.9013671875, + "layer_6_max_spectral_norm": 1.0513001680374146, + "layer_7_update_fnorm": 9.685403823852539, + "layer_7_max_l1_linf_norm": 9.790291786193848, + "layer_7_max_spectral_norm": 0.9446995258331299, + "layer_8_update_fnorm": 9.761898040771484, + "layer_8_max_l1_linf_norm": 11.024972915649414, + "layer_8_max_spectral_norm": 1.167816400527954, + "layer_9_update_fnorm": 9.764986038208008, + "layer_9_max_l1_linf_norm": 11.494709014892578, + "layer_9_max_spectral_norm": 1.4444513320922852, + "layer_10_update_fnorm": 9.605520248413086, + "layer_10_max_l1_linf_norm": 11.85866928100586, + "layer_10_max_spectral_norm": 1.4566012620925903, + "layer_11_update_fnorm": 9.639771461486816, + "layer_11_max_l1_linf_norm": 12.391284942626953, + "layer_11_max_spectral_norm": 1.3902122974395752, + "layer_12_update_fnorm": 8.765214920043945, + "layer_12_max_l1_linf_norm": 12.705286026000977, + "layer_12_max_spectral_norm": 1.731773853302002, + "total_sharpness": 1.9319877537782304e-05, + "ip_v_neg_g": 0.01673826389014721, + "cos_v_neg_g": 0.0011873641051352024, + "v_norm": 39.54240798950195, + "g_norm": 0.3565031588077545, + "hv_norm": 0.22540369629859924, + "cos_v_hv": 0.003389272140339017, + "hg_norm": 3.04154372215271, + "cos_g_hg": 0.5497943758964539, + "v_parallel_norm": 0.01157395076006651, + "v_perp_norm": 39.54240417480469, + "layer_1_v_norm": 8.823978424072266, + "layer_1_cos_v_neg_g": 0.008599351160228252, + "layer_2_v_norm": 7.18619441986084, + "layer_2_cos_v_neg_g": 0.005452215671539307, + "layer_3_v_norm": 8.008050918579102, + "layer_3_cos_v_neg_g": 0.005936997011303902, + "layer_4_v_norm": 8.536892890930176, + "layer_4_cos_v_neg_g": 0.005611902102828026, + "layer_5_v_norm": 9.088170051574707, + "layer_5_cos_v_neg_g": 0.005067817401140928, + "layer_6_v_norm": 9.144749641418457, + "layer_6_cos_v_neg_g": 0.004523445852100849, + "layer_7_v_norm": 9.685403823852539, + "layer_7_cos_v_neg_g": 0.00455509452149272, + "layer_8_v_norm": 9.761898040771484, + "layer_8_cos_v_neg_g": 0.00556775089353323, + "layer_9_v_norm": 9.764986038208008, + "layer_9_cos_v_neg_g": 0.007571710739284754, + "layer_10_v_norm": 9.605520248413086, + "layer_10_cos_v_neg_g": 0.009286121465265751, + "layer_11_v_norm": 9.639771461486816, + "layer_11_cos_v_neg_g": 0.009781742468476295, + "layer_12_v_norm": 8.765214920043945, + "layer_12_cos_v_neg_g": 0.021911589428782463, + "layer_1_sharpness": 4.1698667700984515e-06, + "layer_2_sharpness": 2.731675522227306e-06, + "layer_3_sharpness": 2.7901191970158834e-06, + "layer_4_sharpness": 1.8821801859303378e-06, + "layer_5_sharpness": 1.2956305681655067e-06, + "layer_6_sharpness": 1.0108298056366039e-06, + "layer_7_sharpness": 1.4704645536767202e-06, + "layer_8_sharpness": 2.689926304810797e-06, + "layer_9_sharpness": 4.387570243125083e-06, + "layer_10_sharpness": 5.558187240239931e-06, + "layer_11_sharpness": 2.851484396160231e-06, + "layer_12_sharpness": 3.756960722967051e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..964b074d9596e18a48d1dca04ade3a5dad8e976b --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.41664123535156, + "total_l1_linf_norm": 347552.34375, + "total_spectral_norm": 40.41664123535156, + "layer_1_update_fnorm": 9.220688819885254, + "layer_1_max_l1_linf_norm": 8.623477935791016, + "layer_1_max_spectral_norm": 1.2809021472930908, + "layer_2_update_fnorm": 7.5974555015563965, + "layer_2_max_l1_linf_norm": 5.615267276763916, + "layer_2_max_spectral_norm": 0.9513744711875916, + "layer_3_update_fnorm": 8.28309440612793, + "layer_3_max_l1_linf_norm": 6.875391960144043, + "layer_3_max_spectral_norm": 0.913554847240448, + "layer_4_update_fnorm": 8.82702350616455, + "layer_4_max_l1_linf_norm": 7.808625221252441, + "layer_4_max_spectral_norm": 0.8781855702400208, + "layer_5_update_fnorm": 9.366394996643066, + "layer_5_max_l1_linf_norm": 9.587162017822266, + "layer_5_max_spectral_norm": 0.9511785507202148, + "layer_6_update_fnorm": 9.36405086517334, + "layer_6_max_l1_linf_norm": 9.409137725830078, + "layer_6_max_spectral_norm": 1.043925404548645, + "layer_7_update_fnorm": 9.96487808227539, + "layer_7_max_l1_linf_norm": 10.305976867675781, + "layer_7_max_spectral_norm": 0.9701632857322693, + "layer_8_update_fnorm": 9.935327529907227, + "layer_8_max_l1_linf_norm": 10.556865692138672, + "layer_8_max_spectral_norm": 1.2152738571166992, + "layer_9_update_fnorm": 9.963525772094727, + "layer_9_max_l1_linf_norm": 11.150256156921387, + "layer_9_max_spectral_norm": 1.430230975151062, + "layer_10_update_fnorm": 9.864466667175293, + "layer_10_max_l1_linf_norm": 11.58477783203125, + "layer_10_max_spectral_norm": 1.6156882047653198, + "layer_11_update_fnorm": 10.013016700744629, + "layer_11_max_l1_linf_norm": 11.393983840942383, + "layer_11_max_spectral_norm": 1.5272514820098877, + "layer_12_update_fnorm": 9.298271179199219, + "layer_12_max_l1_linf_norm": 13.237105369567871, + "layer_12_max_spectral_norm": 1.8539681434631348, + "total_sharpness": 2.1196154193603434e-05, + "ip_v_neg_g": 0.021307319402694702, + "cos_v_neg_g": 0.0012616731692105532, + "v_norm": 40.41664123535156, + "g_norm": 0.4178512692451477, + "hv_norm": 0.26974788308143616, + "cos_v_hv": 0.003175844205543399, + "hg_norm": 4.969134330749512, + "cos_g_hg": 0.6066377758979797, + "v_parallel_norm": 0.011502368375658989, + "v_perp_norm": 40.4166374206543, + "layer_1_v_norm": 9.220688819885254, + "layer_1_cos_v_neg_g": 0.009905273094773293, + "layer_2_v_norm": 7.5974555015563965, + "layer_2_cos_v_neg_g": 0.0057935556396842, + "layer_3_v_norm": 8.28309440612793, + "layer_3_cos_v_neg_g": 0.005955010186880827, + "layer_4_v_norm": 8.82702350616455, + "layer_4_cos_v_neg_g": 0.00513196038082242, + "layer_5_v_norm": 9.366394996643066, + "layer_5_cos_v_neg_g": 0.004498309921473265, + "layer_6_v_norm": 9.36405086517334, + "layer_6_cos_v_neg_g": 0.006326110567897558, + "layer_7_v_norm": 9.96487808227539, + "layer_7_cos_v_neg_g": 0.00492429593577981, + "layer_8_v_norm": 9.935327529907227, + "layer_8_cos_v_neg_g": 0.007003300357609987, + "layer_9_v_norm": 9.963525772094727, + "layer_9_cos_v_neg_g": 0.009719735011458397, + "layer_10_v_norm": 9.864466667175293, + "layer_10_cos_v_neg_g": 0.012437303550541401, + "layer_11_v_norm": 10.013016700744629, + "layer_11_cos_v_neg_g": 0.01419440470635891, + "layer_12_v_norm": 9.298271179199219, + "layer_12_cos_v_neg_g": 0.024242931976914406, + "layer_1_sharpness": 2.7509136089065578e-06, + "layer_2_sharpness": 1.6234199762266144e-08, + "layer_3_sharpness": 2.0786967525054934e-06, + "layer_4_sharpness": 1.8514537032388034e-06, + "layer_5_sharpness": 1.2622023177755182e-06, + "layer_6_sharpness": 8.800950581644429e-07, + "layer_7_sharpness": 1.3077496987534687e-06, + "layer_8_sharpness": 2.7433818559075007e-06, + "layer_9_sharpness": 3.998486590717221e-06, + "layer_10_sharpness": 6.540689810208278e-06, + "layer_11_sharpness": 4.499553142522927e-06, + "layer_12_sharpness": 4.612018892657943e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..2e63ac9229587824f6beed1f57303e7148107222 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 40.39336013793945, + "total_l1_linf_norm": 347283.15625, + "total_spectral_norm": 40.39336013793945, + "layer_1_update_fnorm": 9.336296081542969, + "layer_1_max_l1_linf_norm": 8.81220817565918, + "layer_1_max_spectral_norm": 1.2958756685256958, + "layer_2_update_fnorm": 7.715377330780029, + "layer_2_max_l1_linf_norm": 5.799460411071777, + "layer_2_max_spectral_norm": 1.146816611289978, + "layer_3_update_fnorm": 8.359894752502441, + "layer_3_max_l1_linf_norm": 6.872855186462402, + "layer_3_max_spectral_norm": 0.9900081753730774, + "layer_4_update_fnorm": 8.825213432312012, + "layer_4_max_l1_linf_norm": 8.053994178771973, + "layer_4_max_spectral_norm": 0.8952928185462952, + "layer_5_update_fnorm": 9.388104438781738, + "layer_5_max_l1_linf_norm": 8.906160354614258, + "layer_5_max_spectral_norm": 0.9765334129333496, + "layer_6_update_fnorm": 9.440472602844238, + "layer_6_max_l1_linf_norm": 9.178078651428223, + "layer_6_max_spectral_norm": 1.1062037944793701, + "layer_7_update_fnorm": 9.96009635925293, + "layer_7_max_l1_linf_norm": 9.781715393066406, + "layer_7_max_spectral_norm": 1.088011384010315, + "layer_8_update_fnorm": 9.992109298706055, + "layer_8_max_l1_linf_norm": 11.03036880493164, + "layer_8_max_spectral_norm": 1.3708345890045166, + "layer_9_update_fnorm": 9.958930969238281, + "layer_9_max_l1_linf_norm": 11.035573959350586, + "layer_9_max_spectral_norm": 1.5297350883483887, + "layer_10_update_fnorm": 9.76728343963623, + "layer_10_max_l1_linf_norm": 12.811532974243164, + "layer_10_max_spectral_norm": 1.647717833518982, + "layer_11_update_fnorm": 9.954828262329102, + "layer_11_max_l1_linf_norm": 12.241861343383789, + "layer_11_max_spectral_norm": 1.5632083415985107, + "layer_12_update_fnorm": 9.289305686950684, + "layer_12_max_l1_linf_norm": 11.13717269897461, + "layer_12_max_spectral_norm": 1.8196496963500977, + "total_sharpness": 2.1627583919325843e-05, + "ip_v_neg_g": 0.013777466490864754, + "cos_v_neg_g": 0.0009739890811033547, + "v_norm": 40.39336013793945, + "g_norm": 0.35019123554229736, + "hv_norm": 0.19468499720096588, + "cos_v_hv": 0.004487304016947746, + "hg_norm": 2.6529109477996826, + "cos_g_hg": 0.542375922203064, + "v_parallel_norm": 0.010562675073742867, + "v_perp_norm": 40.39335632324219, + "layer_1_v_norm": 9.336296081542969, + "layer_1_cos_v_neg_g": 0.005477922968566418, + "layer_2_v_norm": 7.715377330780029, + "layer_2_cos_v_neg_g": 0.0031117231119424105, + "layer_3_v_norm": 8.359894752502441, + "layer_3_cos_v_neg_g": 0.002997850300744176, + "layer_4_v_norm": 8.825213432312012, + "layer_4_cos_v_neg_g": 0.003450975753366947, + "layer_5_v_norm": 9.388104438781738, + "layer_5_cos_v_neg_g": 0.002878415398299694, + "layer_6_v_norm": 9.440472602844238, + "layer_6_cos_v_neg_g": 0.002294608624652028, + "layer_7_v_norm": 9.96009635925293, + "layer_7_cos_v_neg_g": 0.002814238891005516, + "layer_8_v_norm": 9.992109298706055, + "layer_8_cos_v_neg_g": 0.004660068545490503, + "layer_9_v_norm": 9.958930969238281, + "layer_9_cos_v_neg_g": 0.00703025609254837, + "layer_10_v_norm": 9.76728343963623, + "layer_10_cos_v_neg_g": 0.007998235523700714, + "layer_11_v_norm": 9.954827308654785, + "layer_11_cos_v_neg_g": 0.009748860262334347, + "layer_12_v_norm": 9.289305686950684, + "layer_12_cos_v_neg_g": 0.018355779349803925, + "layer_1_sharpness": 1.9082399376202375e-06, + "layer_2_sharpness": 6.614876042476681e-07, + "layer_3_sharpness": 1.461601982555294e-06, + "layer_4_sharpness": 1.2795428574463585e-06, + "layer_5_sharpness": 8.716009460840723e-07, + "layer_6_sharpness": 9.936569540514029e-07, + "layer_7_sharpness": 1.3893467212255928e-06, + "layer_8_sharpness": 3.6199896840116708e-06, + "layer_9_sharpness": 4.49863637186354e-06, + "layer_10_sharpness": 7.071751497278456e-06, + "layer_11_sharpness": 4.3400650611147285e-06, + "layer_12_sharpness": 4.304924004827626e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/training_log.txt b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..be402d8f8a629d2647fc15377b2fe3b11d769b85 --- /dev/null +++ b/logs_sharpness_pure/adam_lr_search/opt_adam_alr_0.02_mlr_0.01_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019232 +step:1 train loss:10.262197 +step:2 train loss:9.883890 +step:3 train loss:9.680598 +step:4 train loss:9.572738 +step:5 train loss:9.429279 +step:6 train loss:9.310344 +step:7 train loss:9.075135 +step:8 train loss:8.952276 +step:9 train loss:8.768940 +step:10 train loss:8.602692 +step:11 train loss:8.418120 +step:12 train loss:8.210890 +step:13 train loss:8.009360 +step:14 train loss:7.867903 +step:15 train loss:7.737721 +step:16 train loss:7.644694 +step:17 train loss:7.582595 +step:18 train loss:7.558210 +step:19 train loss:7.399376 +step:20 train loss:7.418854 +step:21 train loss:7.456766 +step:22 train loss:7.373087 +step:23 train loss:7.399614 +step:24 train loss:7.518972 +step:25 train loss:7.365292 +step:26 train loss:7.345950 +step:27 train loss:7.194798 +step:28 train loss:7.269071 +step:29 train loss:7.217355 +step:30 train loss:7.196589 +step:31 train loss:7.078607 +step:32 train loss:7.087792 +step:33 train loss:7.053597 +step:34 train loss:7.099107 +step:35 train loss:7.032171 +step:36 train loss:7.001215 +step:37 train loss:6.975395 +step:38 train loss:6.989234 +step:39 train loss:6.922529 +step:40 train loss:6.937351 +step:41 train loss:6.874026 +step:42 train loss:6.916184 +step:43 train loss:6.825056 +step:44 train loss:6.829992 +step:45 train loss:6.838895 +step:46 train loss:6.846595 +step:47 train loss:6.820647 +step:48 train loss:6.774126 +step:49 train loss:6.782145 +step:50 train loss:6.721800 +step:51 train loss:6.753635 +step:52 train loss:6.765537 +step:53 train loss:6.736302 +step:54 train loss:6.738609 +step:55 train loss:6.670534 +step:56 train loss:6.649166 +step:57 train loss:6.684681 +step:58 train loss:6.619549 +step:59 train loss:6.650199 +step:60 train loss:6.640178 +step:61 train loss:6.609201 +step:62 train loss:6.593523 +step:63 train loss:6.645837 +step:64 train loss:6.572286 +step:65 train loss:6.601536 +step:66 train loss:6.622273 +step:67 train loss:6.696466 +step:68 train loss:6.602819 +step:69 train loss:6.604776 +step:70 train loss:6.560157 +step:71 train loss:6.565208 +step:72 train loss:6.577686 +step:73 train loss:6.539422 +step:74 train loss:6.557817 +step:75 train loss:6.514439 +step:76 train loss:6.596413 +step:77 train loss:6.562307 +step:78 train loss:6.326801 +step:79 train loss:6.500293 +step:80 train loss:6.463450 +step:81 train loss:6.580273 +step:82 train loss:6.524117 +step:83 train loss:6.484874 +step:84 train loss:6.457470 +step:85 train loss:6.442281 +step:86 train loss:6.448334 +step:87 train loss:6.425854 +step:88 train loss:6.426332 +step:89 train loss:6.378015 +step:90 train loss:6.437371 +step:91 train loss:6.446893 +step:92 train loss:6.476047 +step:93 train loss:6.458909 +step:94 train loss:6.407063 +step:95 train loss:6.395261 +step:96 train loss:6.501438 +step:97 train loss:6.424862 +step:98 train loss:6.413359 +step:99 train loss:6.380699 +step:100 train loss:6.369257 +step:101 train loss:6.323749 +step:102 train loss:6.343311 +step:103 train loss:6.354489 +step:104 train loss:6.385768 +step:105 train loss:6.444134 +step:106 train loss:6.377913 +step:107 train loss:6.313987 +step:108 train loss:6.359491 +step:109 train loss:6.390475 +step:110 train loss:6.305174 +step:111 train loss:6.339041 +step:112 train loss:6.346416 +step:113 train loss:6.320485 +step:114 train loss:6.380286 +step:115 train loss:6.368113 +step:116 train loss:6.330380 +step:117 train loss:6.293413 +step:118 train loss:6.357119 +step:119 train loss:6.307263 +step:120 train loss:6.314321 +step:121 train loss:6.226207 +step:122 train loss:6.312073 +step:123 train loss:6.248245 +step:124 train loss:6.227797 +step:125 train loss:6.209261 +step:126 train loss:6.311746 +step:127 train loss:6.226877 +step:128 train loss:6.285388 +step:129 train loss:6.254271 +step:130 train loss:6.284084 +step:131 train loss:6.263585 +step:132 train loss:6.244184 +step:133 train loss:6.273227 +step:134 train loss:6.261516 +step:135 train loss:6.186836 +step:136 train loss:6.265521 +step:137 train loss:6.245782 +step:138 train loss:6.195601 +step:139 train loss:6.248702 +step:140 train loss:6.165659 +step:141 train loss:6.248057 +step:142 train loss:6.203583 +step:143 train loss:6.211365 +step:144 train loss:6.189474 +step:145 train loss:6.126559 +step:146 train loss:6.147139 +step:147 train loss:6.194614 +step:148 train loss:6.203751 +step:149 train loss:6.166272 +step:150 train loss:6.188138 +step:151 train loss:6.122904 +step:152 train loss:6.133907 +step:153 train loss:6.134755 +step:154 train loss:6.211151 +step:155 train loss:6.208389 +step:156 train loss:6.211515 +step:157 train loss:6.145726 +step:158 train loss:6.121163 +step:159 train loss:6.134738 +step:160 train loss:6.122595 +step:161 train loss:6.119669 +step:162 train loss:6.082475 +step:163 train loss:6.102244 +step:164 train loss:6.093690 +step:165 train loss:6.145002 +step:166 train loss:6.100401 +step:167 train loss:6.122712 +step:168 train loss:6.120940 +step:169 train loss:6.063736 +step:170 train loss:6.028121 +step:171 train loss:6.148515 +step:172 train loss:6.086465 +step:173 train loss:6.120578 +step:174 train loss:6.117090 +step:175 train loss:6.072948 +step:176 train loss:6.032326 +step:177 train loss:6.064406 +step:178 train loss:6.068447 +step:179 train loss:6.029622 +step:180 train loss:6.001992 +step:181 train loss:6.082910 +step:182 train loss:6.006044 +step:183 train loss:6.067796 +step:184 train loss:6.046349 +step:185 train loss:5.998614 +step:186 train loss:6.123744 +step:187 train loss:6.058893 +step:188 train loss:5.929901 +step:189 train loss:6.071844 +step:190 train loss:6.094115 +step:191 train loss:5.986784 +step:192 train loss:5.921395 +step:193 train loss:6.081446 +step:194 train loss:6.089950 +step:195 train loss:6.064222 +step:196 train loss:6.035866 +step:197 train loss:6.017637 +step:198 train loss:5.973491 +step:199 train loss:6.044774 +step:200 train loss:6.127149 +step:201 train loss:6.057530 +step:202 train loss:6.087876 +step:203 train loss:6.080516 +step:204 train loss:6.063376 +step:205 train loss:5.944362 +step:206 train loss:6.039369 +step:207 train loss:6.032463 +step:208 train loss:5.976419 +step:209 train loss:5.955841 +step:210 train loss:5.970156 +step:211 train loss:6.019923 +step:212 train loss:5.997328 +step:213 train loss:5.991566 +step:214 train loss:5.974970 +step:215 train loss:5.997898 +step:216 train loss:5.948830 +step:217 train loss:5.974053 +step:218 train loss:5.925058 +step:219 train loss:5.897423 +step:220 train loss:5.939091 +step:221 train loss:5.901371 +step:222 train loss:5.929759 +step:223 train loss:5.965445 +step:224 train loss:5.945044 +step:225 train loss:5.871783 +step:226 train loss:5.877690 +step:227 train loss:5.946115 +step:228 train loss:5.932792 +step:229 train loss:5.996984 +step:230 train loss:5.881590 +step:231 train loss:5.931814 +step:232 train loss:5.949701 +step:233 train loss:5.936660 +step:234 train loss:5.917721 +step:235 train loss:5.996335 +step:236 train loss:5.943882 +step:237 train loss:5.966166 +step:238 train loss:5.964002 +step:239 train loss:5.881055 +step:240 train loss:5.952883 +step:241 train loss:6.016924 +step:242 train loss:6.003256 +step:243 train loss:5.917362 +step:244 train loss:5.963347 +step:245 train loss:5.951026 +step:246 train loss:5.923478 +step:247 train loss:5.918870 +step:248 train loss:5.873212 +step:249 train loss:5.925638 +step:250 validation loss:5.906457 +step:250 train loss:5.881471 +step:251 train loss:5.914075 +step:252 train loss:5.861598 +step:253 train loss:5.863704 +step:254 train loss:5.837325 +step:255 train loss:5.903545 +step:256 train loss:5.934583 +step:257 train loss:5.958807 +step:258 train loss:5.862470 +step:259 train loss:5.867569 +step:260 train loss:5.835538 +step:261 train loss:5.865611 +step:262 train loss:5.924180 +step:263 train loss:5.875477 +step:264 train loss:5.858421 +step:265 train loss:5.870949 +step:266 train loss:5.849993 +step:267 train loss:5.895288 +step:268 train loss:5.832183 +step:269 train loss:5.856145 +step:270 train loss:5.882045 +step:271 train loss:5.882791 +step:272 train loss:5.831614 +step:273 train loss:5.908150 +step:274 train loss:5.820560 +step:275 train loss:5.863021 +step:276 train loss:5.810769 +step:277 train loss:5.808877 +step:278 train loss:5.777995 +step:279 train loss:5.752914 +step:280 train loss:5.822852 +step:281 train loss:5.905840 +step:282 train loss:5.800625 +step:283 train loss:5.850558 +step:284 train loss:5.809310 +step:285 train loss:5.864860 +step:286 train loss:5.832240 +step:287 train loss:5.833797 +step:288 train loss:5.838181 +step:289 train loss:5.862281 +step:290 train loss:5.892404 +step:291 train loss:5.792600 +step:292 train loss:5.850571 +step:293 train loss:5.766932 +step:294 train loss:5.870778 +step:295 train loss:5.770526 +step:296 train loss:5.826291 +step:297 train loss:5.843749 +step:298 train loss:5.730999 +step:299 train loss:5.791825 +step:300 train loss:5.727276 +step:301 train loss:5.768961 +step:302 train loss:5.750466 +step:303 train loss:5.771027 +step:304 train loss:5.783016 +step:305 train loss:5.724415 +step:306 train loss:5.762980 +step:307 train loss:5.791943 +step:308 train loss:5.705709 +step:309 train loss:5.853849 +step:310 train loss:5.808126 +step:311 train loss:5.781875 +step:312 train loss:5.765761 +step:313 train loss:5.788866 +step:314 train loss:5.776439 +step:315 train loss:5.754529 +step:316 train loss:5.731339 +step:317 train loss:5.685763 +step:318 train loss:5.681914 +step:319 train loss:5.774887 +step:320 train loss:5.686090 +step:321 train loss:5.758953 +step:322 train loss:5.733821 +step:323 train loss:5.795714 +step:324 train loss:5.737299 +step:325 train loss:5.767617 +step:326 train loss:5.784756 +step:327 train loss:5.768140 +step:328 train loss:5.752162 +step:329 train loss:5.759795 +step:330 train loss:5.682660 +step:331 train loss:5.707658 +step:332 train loss:5.687117 +step:333 train loss:5.637520 +step:334 train loss:5.746806 +step:335 train loss:5.801204 +step:336 train loss:6.008240 +step:337 train loss:5.851802 +step:338 train loss:5.804276 +step:339 train loss:5.737244 +step:340 train loss:5.752291 +step:341 train loss:5.726208 +step:342 train loss:5.775269 +step:343 train loss:5.749850 +step:344 train loss:5.711786 +step:345 train loss:5.670044 +step:346 train loss:5.719803 +step:347 train loss:5.650830 +step:348 train loss:5.659524 +step:349 train loss:5.601962 +step:350 train loss:5.638816 +step:351 train loss:5.713241 +step:352 train loss:5.685908 +step:353 train loss:5.706374 +step:354 train loss:5.661765 +step:355 train loss:5.699609 +step:356 train loss:5.665668 +step:357 train loss:5.727071 +step:358 train loss:5.751032 +step:359 train loss:5.596751 +step:360 train loss:5.738734 +step:361 train loss:5.727783 +step:362 train loss:5.698988 +step:363 train loss:5.642031 +step:364 train loss:5.754519 +step:365 train loss:5.689726 +step:366 train loss:5.668527 +step:367 train loss:5.689596 +step:368 train loss:5.665330 +step:369 train loss:5.653673 +step:370 train loss:5.713902 +step:371 train loss:5.648648 +step:372 train loss:5.701334 +step:373 train loss:5.652794 +step:374 train loss:5.632220 +step:375 train loss:5.652379 +step:376 train loss:5.629062 +step:377 train loss:5.545222 +step:378 train loss:5.600250 +step:379 train loss:5.676263 +step:380 train loss:5.605108 +step:381 train loss:5.663520 +step:382 train loss:5.683614 +step:383 train loss:5.653295 +step:384 train loss:5.642139 +step:385 train loss:5.634953 +step:386 train loss:5.656590 +step:387 train loss:5.655692 +step:388 train loss:5.615549 +step:389 train loss:5.612606 +step:390 train loss:5.614889 +step:391 train loss:5.631156 +step:392 train loss:5.613091 +step:393 train loss:5.610155 +step:394 train loss:5.671759 +step:395 train loss:5.592556 +step:396 train loss:5.567814 +step:397 train loss:5.659273 +step:398 train loss:5.631057 +step:399 train loss:5.621472 +step:400 train loss:5.571403 +step:401 train loss:5.623012 +step:402 train loss:5.593936 +step:403 train loss:5.617518 +step:404 train loss:5.592631 +step:405 train loss:5.597247 +step:406 train loss:5.621090 +step:407 train loss:5.606481 +step:408 train loss:5.665270 +step:409 train loss:5.600883 +step:410 train loss:5.579499 +step:411 train loss:5.563488 +step:412 train loss:5.659955 +step:413 train loss:5.528200 +step:414 train loss:5.612179 +step:415 train loss:5.576325 +step:416 train loss:5.619970 +step:417 train loss:5.642439 +step:418 train loss:5.607337 +step:419 train loss:5.600011 +step:420 train loss:5.584411 +step:421 train loss:5.566530 +step:422 train loss:5.558205 +step:423 train loss:5.575625 +step:424 train loss:5.531298 +step:425 train loss:5.596567 +step:426 train loss:5.576696 +step:427 train loss:5.500562 +step:428 train loss:5.557589 +step:429 train loss:5.466033 +step:430 train loss:5.510432 +step:431 train loss:5.550414 +step:432 train loss:5.589244 +step:433 train loss:5.569630 +step:434 train loss:5.551292 +step:435 train loss:5.595928 +step:436 train loss:5.617223 +step:437 train loss:5.597698 +step:438 train loss:5.565333 +step:439 train loss:5.530140 +step:440 train loss:5.588330 +step:441 train loss:5.508769 +step:442 train loss:5.507324 +step:443 train loss:5.514508 +step:444 train loss:5.558990 +step:445 train loss:5.563852 +step:446 train loss:5.490791 +step:447 train loss:5.513350 +step:448 train loss:5.555700 +step:449 train loss:5.508681 +step:450 train loss:5.495892 +step:451 train loss:5.489540 +step:452 train loss:5.545886 +step:453 train loss:5.504696 +step:454 train loss:5.480165 +step:455 train loss:5.588614 +step:456 train loss:5.568533 +step:457 train loss:5.560375 +step:458 train loss:5.573949 +step:459 train loss:5.501729 +step:460 train loss:5.599521 +step:461 train loss:5.556108 +step:462 train loss:5.456617 +step:463 train loss:5.518648 +step:464 train loss:5.547424 +step:465 train loss:5.498776 +step:466 train loss:5.525860 +step:467 train loss:5.475702 +step:468 train loss:5.525779 +step:469 train loss:5.486067 +step:470 train loss:5.459072 +step:471 train loss:5.552713 +step:472 train loss:5.436612 +step:473 train loss:5.536324 +step:474 train loss:5.519776 +step:475 train loss:5.519368 +step:476 train loss:5.483761 +step:477 train loss:5.428074 +step:478 train loss:5.429042 +step:479 train loss:5.422229 +step:480 train loss:5.443073 +step:481 train loss:5.457161 +step:482 train loss:5.391933 +step:483 train loss:5.468773 +step:484 train loss:5.436654 +step:485 train loss:5.448071 +step:486 train loss:5.495040 +step:487 train loss:5.480927 +step:488 train loss:5.463958 +step:489 train loss:5.490932 +step:490 train loss:5.443464 +step:491 train loss:5.455446 +step:492 train loss:5.446711 +step:493 train loss:5.453059 +step:494 train loss:5.468650 +step:495 train loss:5.428507 +step:496 train loss:5.527874 +step:497 train loss:5.389560 +step:498 train loss:5.506931 +step:499 train loss:5.476352 +step:500 validation loss:5.461633 total_sharp:4.5188e-04 L1_sharp:1.7018e-03 L2_sharp:9.0020e-05 L3_sharp:6.0554e-05 L4_sharp:3.2716e-05 L5_sharp:2.8708e-05 L6_sharp:8.2764e-05 L7_sharp:4.2983e-05 L8_sharp:4.8132e-05 L9_sharp:5.0090e-05 L10_sharp:6.1240e-05 L11_sharp:9.0414e-05 L12_sharp:1.6489e-04 total_fnorm:2.4994e+01 total_l1_linf:2.1001e+05 total_spectral:2.4994e+01 L1_fnorm:4.1288e+00 L2_fnorm:4.1877e+00 L3_fnorm:4.6572e+00 L4_fnorm:4.7712e+00 L5_fnorm:4.8978e+00 L6_fnorm:4.9916e+00 L7_fnorm:5.2982e+00 L8_fnorm:5.5780e+00 L9_fnorm:5.7823e+00 L10_fnorm:5.9285e+00 L11_fnorm:5.7785e+00 L12_fnorm:5.5174e+00 L1_l1linf:1.1815e+01 L2_l1linf:5.2193e+00 L3_l1linf:6.0361e+00 L4_l1linf:6.4555e+00 L5_l1linf:6.9819e+00 L6_l1linf:7.3296e+00 L7_l1linf:7.5082e+00 L8_l1linf:8.3878e+00 L9_l1linf:9.0019e+00 L10_l1linf:1.2437e+01 L11_l1linf:1.8254e+01 L12_l1linf:2.0434e+01 L1_spectral:1.4975e+00 L2_spectral:8.0304e-01 L3_spectral:8.8284e-01 L4_spectral:9.2850e-01 L5_spectral:9.6789e-01 L6_spectral:1.0107e+00 L7_spectral:1.0065e+00 L8_spectral:1.1699e+00 L9_spectral:1.2408e+00 L10_spectral:1.3675e+00 L11_spectral:1.4955e+00 L12_spectral:1.4832e+00 ip_v_neg_g:1.8830e-01 cos_v_neg_g:1.2537e-02 v_norm:2.4994e+01 g_norm:6.0091e-01 hv_norm:6.5267e-01 cos_v_hv:1.7305e-02 hg_norm:8.1034e+00 cos_g_hg:6.1687e-01 v_par:3.4759e-02 v_perp:2.4994e+01 L1_cos_v_neg_g:2.4941e-01 L1_v_norm:4.1288e+00 L2_cos_v_neg_g:5.3000e-02 L2_v_norm:4.1877e+00 L3_cos_v_neg_g:6.4292e-02 L3_v_norm:4.6572e+00 L4_cos_v_neg_g:5.3296e-02 L4_v_norm:4.7712e+00 L5_cos_v_neg_g:5.0455e-02 L5_v_norm:4.8978e+00 L6_cos_v_neg_g:7.0118e-02 L6_v_norm:4.9916e+00 L7_cos_v_neg_g:4.7691e-02 L7_v_norm:5.2982e+00 L8_cos_v_neg_g:4.6643e-02 L8_v_norm:5.5780e+00 L9_cos_v_neg_g:4.5845e-02 L9_v_norm:5.7823e+00 L10_cos_v_neg_g:4.1489e-02 L10_v_norm:5.9285e+00 L11_cos_v_neg_g:4.7322e-02 L11_v_norm:5.7785e+00 L12_cos_v_neg_g:5.2570e-02 L12_v_norm:5.5174e+00 +step:500 train loss:5.484288 +step:501 train loss:5.460588 +step:502 train loss:5.517597 +step:503 train loss:5.418507 +step:504 train loss:5.521737 +step:505 train loss:5.431964 +step:506 train loss:5.429262 +step:507 train loss:5.468658 +step:508 train loss:5.517918 +step:509 train loss:5.517987 +step:510 train loss:5.446995 +step:511 train loss:5.431629 +step:512 train loss:5.413242 +step:513 train loss:5.446895 +step:514 train loss:5.492924 +step:515 train loss:5.453114 +step:516 train loss:5.521325 +step:517 train loss:5.440238 +step:518 train loss:5.446882 +step:519 train loss:5.476668 +step:520 train loss:5.424411 +step:521 train loss:5.404578 +step:522 train loss:5.428686 +step:523 train loss:5.421286 +step:524 train loss:5.360823 +step:525 train loss:5.361694 +step:526 train loss:5.396910 +step:527 train loss:5.384010 +step:528 train loss:5.376782 +step:529 train loss:5.414977 +step:530 train loss:5.384015 +step:531 train loss:5.396319 +step:532 train loss:5.351964 +step:533 train loss:5.330538 +step:534 train loss:5.430773 +step:535 train loss:5.401488 +step:536 train loss:5.472424 +step:537 train loss:5.362540 +step:538 train loss:5.318638 +step:539 train loss:5.401194 +step:540 train loss:5.433339 +step:541 train loss:5.325716 +step:542 train loss:5.367262 +step:543 train loss:5.379780 +step:544 train loss:5.364557 +step:545 train loss:5.363291 +step:546 train loss:5.334310 +step:547 train loss:5.413866 +step:548 train loss:5.354052 +step:549 train loss:5.433948 +step:550 train loss:5.399041 +step:551 train loss:5.385912 +step:552 train loss:5.489621 +step:553 train loss:5.439522 +step:554 train loss:5.359559 +step:555 train loss:5.421223 +step:556 train loss:5.353863 +step:557 train loss:5.344109 +step:558 train loss:5.284520 +step:559 train loss:5.346112 +step:560 train loss:5.412963 +step:561 train loss:5.313959 +step:562 train loss:5.338991 +step:563 train loss:5.424865 +step:564 train loss:5.367190 +step:565 train loss:5.360116 +step:566 train loss:5.375133 +step:567 train loss:5.343355 +step:568 train loss:5.369513 +step:569 train loss:5.357941 +step:570 train loss:5.268944 +step:571 train loss:5.308466 +step:572 train loss:5.302091 +step:573 train loss:5.309958 +step:574 train loss:5.365327 +step:575 train loss:5.331015 +step:576 train loss:5.355843 +step:577 train loss:5.350386 +step:578 train loss:5.318229 +step:579 train loss:5.354886 +step:580 train loss:5.281853 +step:581 train loss:5.315876 +step:582 train loss:5.274175 +step:583 train loss:5.283835 +step:584 train loss:5.282383 +step:585 train loss:5.278788 +step:586 train loss:5.285645 +step:587 train loss:5.353399 +step:588 train loss:5.269736 +step:589 train loss:5.322457 +step:590 train loss:5.349444 +step:591 train loss:5.267084 +step:592 train loss:5.253104 +step:593 train loss:5.254071 +step:594 train loss:5.238709 +step:595 train loss:5.308102 +step:596 train loss:5.265142 +step:597 train loss:5.318436 +step:598 train loss:5.286298 +step:599 train loss:5.301871 +step:600 train loss:5.290759 +step:601 train loss:5.261467 +step:602 train loss:5.250475 +step:603 train loss:5.304134 +step:604 train loss:5.270670 +step:605 train loss:5.310947 +step:606 train loss:5.267604 +step:607 train loss:5.262364 +step:608 train loss:5.275912 +step:609 train loss:5.251816 +step:610 train loss:5.250536 +step:611 train loss:5.290695 +step:612 train loss:5.302171 +step:613 train loss:5.199290 +step:614 train loss:5.256089 +step:615 train loss:5.301620 +step:616 train loss:5.225059 +step:617 train loss:5.271589 +step:618 train loss:5.237872 +step:619 train loss:5.282207 +step:620 train loss:5.324791 +step:621 train loss:5.214686 +step:622 train loss:5.273263 +step:623 train loss:5.259678 +step:624 train loss:5.242819 +step:625 train loss:5.222305 +step:626 train loss:5.220965 +step:627 train loss:5.186801 +step:628 train loss:5.204622 +step:629 train loss:5.151253 +step:630 train loss:5.174109 +step:631 train loss:5.169332 +step:632 train loss:5.203972 +step:633 train loss:5.239342 +step:634 train loss:5.242793 +step:635 train loss:5.203196 +step:636 train loss:5.271615 +step:637 train loss:5.197489 +step:638 train loss:5.136413 +step:639 train loss:5.249235 +step:640 train loss:5.185091 +step:641 train loss:5.219554 +step:642 train loss:5.242911 +step:643 train loss:5.162054 +step:644 train loss:5.235196 +step:645 train loss:5.201666 +step:646 train loss:5.186224 +step:647 train loss:5.196740 +step:648 train loss:5.280211 +step:649 train loss:5.179102 +step:650 train loss:5.240204 +step:651 train loss:5.098940 +step:652 train loss:5.137276 +step:653 train loss:5.110636 +step:654 train loss:5.111721 +step:655 train loss:5.156189 +step:656 train loss:5.111069 +step:657 train loss:5.190972 +step:658 train loss:5.197222 +step:659 train loss:5.282795 +step:660 train loss:5.272213 +step:661 train loss:5.339263 +step:662 train loss:5.330999 +step:663 train loss:5.331100 +step:664 train loss:5.237134 +step:665 train loss:5.262524 +step:666 train loss:5.227177 +step:667 train loss:5.313331 +step:668 train loss:5.270304 +step:669 train loss:5.279059 +step:670 train loss:5.276312 +step:671 train loss:5.276658 +step:672 train loss:5.229369 +step:673 train loss:5.285988 +step:674 train loss:5.268158 +step:675 train loss:5.150522 +step:676 train loss:5.235246 +step:677 train loss:5.157374 +step:678 train loss:5.139112 +step:679 train loss:5.170978 +step:680 train loss:5.146693 +step:681 train loss:5.167905 +step:682 train loss:5.069697 +step:683 train loss:5.137027 +step:684 train loss:5.173369 +step:685 train loss:5.119655 +step:686 train loss:5.227437 +step:687 train loss:5.144304 +step:688 train loss:5.079843 +step:689 train loss:5.119582 +step:690 train loss:5.105796 +step:691 train loss:5.122776 +step:692 train loss:5.176957 +step:693 train loss:5.155252 +step:694 train loss:5.155603 +step:695 train loss:5.103266 +step:696 train loss:5.047236 +step:697 train loss:5.170346 +step:698 train loss:5.088717 +step:699 train loss:5.079052 +step:700 train loss:5.167325 +step:701 train loss:5.056589 +step:702 train loss:5.130624 +step:703 train loss:5.051394 +step:704 train loss:5.017943 +step:705 train loss:5.069365 +step:706 train loss:4.938249 +step:707 train loss:5.006340 +step:708 train loss:5.110985 +step:709 train loss:5.071304 +step:710 train loss:5.042209 +step:711 train loss:5.105877 +step:712 train loss:5.060445 +step:713 train loss:5.046296 +step:714 train loss:5.124279 +step:715 train loss:5.017189 +step:716 train loss:5.153375 +step:717 train loss:5.025806 +step:718 train loss:5.114598 +step:719 train loss:5.070780 +step:720 train loss:5.033055 +step:721 train loss:5.044867 +step:722 train loss:5.058757 +step:723 train loss:5.104527 +step:724 train loss:5.070443 +step:725 train loss:5.038485 +step:726 train loss:5.012205 +step:727 train loss:5.073421 +step:728 train loss:5.055282 +step:729 train loss:4.990317 +step:730 train loss:5.090952 +step:731 train loss:5.098418 +step:732 train loss:5.057058 +step:733 train loss:5.028989 +step:734 train loss:5.013134 +step:735 train loss:5.083640 +step:736 train loss:5.023076 +step:737 train loss:5.014085 +step:738 train loss:5.055014 +step:739 train loss:4.997226 +step:740 train loss:5.027143 +step:741 train loss:5.107336 +step:742 train loss:5.030472 +step:743 train loss:5.035451 +step:744 train loss:5.059049 +step:745 train loss:5.021628 +step:746 train loss:5.064116 +step:747 train loss:5.092600 +step:748 train loss:5.032170 +step:749 train loss:5.088422 +step:750 validation loss:5.097679 +step:750 train loss:5.070681 +step:751 train loss:5.064076 +step:752 train loss:4.988230 +step:753 train loss:5.038028 +step:754 train loss:5.029663 +step:755 train loss:5.074189 +step:756 train loss:5.041966 +step:757 train loss:5.127094 +step:758 train loss:5.011490 +step:759 train loss:5.020823 +step:760 train loss:5.008605 +step:761 train loss:5.033652 +step:762 train loss:5.019964 +step:763 train loss:5.000381 +step:764 train loss:4.969543 +step:765 train loss:4.987235 +step:766 train loss:5.060469 +step:767 train loss:5.141818 +step:768 train loss:4.980318 +step:769 train loss:5.022173 +step:770 train loss:5.006932 +step:771 train loss:5.071179 +step:772 train loss:4.987123 +step:773 train loss:4.926804 +step:774 train loss:4.981206 +step:775 train loss:4.942606 +step:776 train loss:4.964974 +step:777 train loss:4.918425 +step:778 train loss:4.927115 +step:779 train loss:4.915862 +step:780 train loss:4.973120 +step:781 train loss:4.899448 +step:782 train loss:4.924245 +step:783 train loss:4.889819 +step:784 train loss:4.901979 +step:785 train loss:4.864860 +step:786 train loss:4.891422 +step:787 train loss:4.834236 +step:788 train loss:4.897487 +step:789 train loss:4.908491 +step:790 train loss:4.885191 +step:791 train loss:4.976032 +step:792 train loss:4.993186 +step:793 train loss:4.938928 +step:794 train loss:4.926248 +step:795 train loss:4.886410 +step:796 train loss:5.127180 +step:797 train loss:4.914150 +step:798 train loss:4.917712 +step:799 train loss:4.951704 +step:800 train loss:5.057451 +step:801 train loss:5.047522 +step:802 train loss:5.149087 +step:803 train loss:5.045957 +step:804 train loss:5.031819 +step:805 train loss:5.056650 +step:806 train loss:4.976746 +step:807 train loss:5.012740 +step:808 train loss:4.998507 +step:809 train loss:4.941969 +step:810 train loss:4.908979 +step:811 train loss:5.004057 +step:812 train loss:4.961902 +step:813 train loss:4.969012 +step:814 train loss:5.015262 +step:815 train loss:5.002444 +step:816 train loss:4.942276 +step:817 train loss:4.987538 +step:818 train loss:4.964355 +step:819 train loss:4.950358 +step:820 train loss:4.970010 +step:821 train loss:4.890201 +step:822 train loss:4.862716 +step:823 train loss:4.937113 +step:824 train loss:4.834415 +step:825 train loss:4.828400 +step:826 train loss:4.854702 +step:827 train loss:4.787498 +step:828 train loss:4.855500 +step:829 train loss:4.849127 +step:830 train loss:4.851274 +step:831 train loss:4.870550 +step:832 train loss:4.925179 +step:833 train loss:4.867028 +step:834 train loss:4.852460 +step:835 train loss:4.829851 +step:836 train loss:4.808630 +step:837 train loss:4.795197 +step:838 train loss:4.786827 +step:839 train loss:4.791584 +step:840 train loss:4.836711 +step:841 train loss:4.796856 +step:842 train loss:4.791010 +step:843 train loss:4.782892 +step:844 train loss:4.747468 +step:845 train loss:4.735538 +step:846 train loss:4.822110 +step:847 train loss:4.784603 +step:848 train loss:4.746992 +step:849 train loss:4.814236 +step:850 train loss:4.835534 +step:851 train loss:4.833138 +step:852 train loss:4.957945 +step:853 train loss:4.805809 +step:854 train loss:4.888112 +step:855 train loss:4.893757 +step:856 train loss:4.851370 +step:857 train loss:4.896459 +step:858 train loss:4.908955 +step:859 train loss:4.838265 +step:860 train loss:4.838015 +step:861 train loss:4.873106 +step:862 train loss:4.813958 +step:863 train loss:4.821052 +step:864 train loss:4.793799 +step:865 train loss:4.814146 +step:866 train loss:4.813350 +step:867 train loss:4.900398 +step:868 train loss:4.773492 +step:869 train loss:4.781321 +step:870 train loss:4.724782 +step:871 train loss:4.720711 +step:872 train loss:4.747953 +step:873 train loss:4.740725 +step:874 train loss:4.752108 +step:875 train loss:4.668902 +step:876 train loss:4.770397 +step:877 train loss:4.684634 +step:878 train loss:4.781939 +step:879 train loss:4.698812 +step:880 train loss:4.780731 +step:881 train loss:4.731631 +step:882 train loss:4.682735 +step:883 train loss:4.713445 +step:884 train loss:4.720980 +step:885 train loss:4.671504 +step:886 train loss:4.658195 +step:887 train loss:4.691960 +step:888 train loss:4.813591 +step:889 train loss:4.764756 +step:890 train loss:4.721353 +step:891 train loss:4.669010 +step:892 train loss:4.647078 +step:893 train loss:4.700861 +step:894 train loss:4.673738 +step:895 train loss:4.656807 +step:896 train loss:4.734334 +step:897 train loss:4.655380 +step:898 train loss:4.663696 +step:899 train loss:4.679663 +step:900 train loss:4.723958 +step:901 train loss:4.635575 +step:902 train loss:4.692695 +step:903 train loss:4.780038 +step:904 train loss:4.780550 +step:905 train loss:4.676025 +step:906 train loss:4.695094 +step:907 train loss:4.722341 +step:908 train loss:4.716128 +step:909 train loss:4.673219 +step:910 train loss:4.695300 +step:911 train loss:4.807920 +step:912 train loss:4.630748 +step:913 train loss:4.684531 +step:914 train loss:4.674043 +step:915 train loss:4.695041 +step:916 train loss:4.749427 +step:917 train loss:4.683167 +step:918 train loss:4.756770 +step:919 train loss:4.823292 +step:920 train loss:4.573344 +step:921 train loss:4.696107 +step:922 train loss:4.677353 +step:923 train loss:4.580092 +step:924 train loss:4.633319 +step:925 train loss:4.577356 +step:926 train loss:4.674301 +step:927 train loss:4.583979 +step:928 train loss:4.664357 +step:929 train loss:4.621456 +step:930 train loss:4.620132 +step:931 train loss:4.659338 +step:932 train loss:4.602231 +step:933 train loss:4.640775 +step:934 train loss:4.676950 +step:935 train loss:4.663653 +step:936 train loss:4.644806 +step:937 train loss:4.645705 +step:938 train loss:4.635492 +step:939 train loss:4.521777 +step:940 train loss:4.639568 +step:941 train loss:4.572118 +step:942 train loss:4.556297 +step:943 train loss:4.660207 +step:944 train loss:4.616196 +step:945 train loss:4.611834 +step:946 train loss:4.651564 +step:947 train loss:4.773237 +step:948 train loss:4.579978 +step:949 train loss:4.630235 +step:950 train loss:4.564744 +step:951 train loss:4.596598 +step:952 train loss:4.676888 +step:953 train loss:4.627065 +step:954 train loss:4.629957 +step:955 train loss:4.581582 +step:956 train loss:4.592123 +step:957 train loss:4.593868 +step:958 train loss:4.664149 +step:959 train loss:4.599895 +step:960 train loss:4.689411 +step:961 train loss:4.629098 +step:962 train loss:4.582089 +step:963 train loss:4.571524 +step:964 train loss:4.602969 +step:965 train loss:4.522057 +step:966 train loss:4.529879 +step:967 train loss:4.589926 +step:968 train loss:4.600830 +step:969 train loss:4.549791 +step:970 train loss:4.604679 +step:971 train loss:4.573306 +step:972 train loss:4.496689 +step:973 train loss:4.584021 +step:974 train loss:4.528273 +step:975 train loss:4.606763 +step:976 train loss:4.556239 +step:977 train loss:4.551753 +step:978 train loss:4.560986 +step:979 train loss:4.548318 +step:980 train loss:4.551596 +step:981 train loss:4.527904 +step:982 train loss:4.548199 +step:983 train loss:4.567331 +step:984 train loss:4.589509 +step:985 train loss:4.554246 +step:986 train loss:4.579080 +step:987 train loss:4.593468 +step:988 train loss:4.583082 +step:989 train loss:4.534289 +step:990 train loss:4.532054 +step:991 train loss:4.466839 +step:992 train loss:4.532121 +step:993 train loss:4.539805 +step:994 train loss:4.482848 +step:995 train loss:4.502074 +step:996 train loss:4.542312 +step:997 train loss:4.512465 +step:998 train loss:4.547505 +step:999 train loss:4.585158 +step:1000 validation loss:4.531815 total_sharp:1.8358e-04 L1_sharp:8.1428e-05 L2_sharp:6.9067e-06 L3_sharp:2.2004e-05 L4_sharp:2.7211e-05 L5_sharp:5.5772e-05 L6_sharp:4.7735e-05 L7_sharp:2.4256e-05 L8_sharp:4.6084e-05 L9_sharp:5.1181e-05 L10_sharp:6.2344e-05 L11_sharp:4.5946e-05 L12_sharp:2.8239e-04 total_fnorm:3.7205e+01 total_l1_linf:3.1520e+05 total_spectral:3.7205e+01 L1_fnorm:7.5594e+00 L2_fnorm:6.0163e+00 L3_fnorm:6.9043e+00 L4_fnorm:7.6475e+00 L5_fnorm:8.5686e+00 L6_fnorm:8.6958e+00 L7_fnorm:8.9815e+00 L8_fnorm:8.9976e+00 L9_fnorm:8.6987e+00 L10_fnorm:8.2286e+00 L11_fnorm:8.1683e+00 L12_fnorm:7.4724e+00 L1_l1linf:1.2812e+01 L2_l1linf:7.1804e+00 L3_l1linf:1.0333e+01 L4_l1linf:9.8488e+00 L5_l1linf:9.9777e+00 L6_l1linf:1.0194e+01 L7_l1linf:1.0880e+01 L8_l1linf:1.2531e+01 L9_l1linf:1.2507e+01 L10_l1linf:1.2418e+01 L11_l1linf:1.6590e+01 L12_l1linf:2.1008e+01 L1_spectral:1.9633e+00 L2_spectral:1.2803e+00 L3_spectral:1.4039e+00 L4_spectral:1.4327e+00 L5_spectral:1.6010e+00 L6_spectral:1.8201e+00 L7_spectral:1.7327e+00 L8_spectral:2.0384e+00 L9_spectral:1.7931e+00 L10_spectral:1.7686e+00 L11_spectral:1.8244e+00 L12_spectral:2.0188e+00 ip_v_neg_g:1.7427e-01 cos_v_neg_g:8.5608e-03 v_norm:3.7205e+01 g_norm:5.4714e-01 hv_norm:6.2020e-01 cos_v_hv:1.1013e-02 hg_norm:5.2100e+00 cos_g_hg:7.0531e-01 v_par:2.9656e-02 v_perp:3.7205e+01 L1_cos_v_neg_g:1.0104e-01 L1_v_norm:7.5594e+00 L2_cos_v_neg_g:3.4846e-02 L2_v_norm:6.0163e+00 L3_cos_v_neg_g:3.3014e-02 L3_v_norm:6.9043e+00 L4_cos_v_neg_g:4.0305e-02 L4_v_norm:7.6475e+00 L5_cos_v_neg_g:5.1493e-02 L5_v_norm:8.5686e+00 L6_cos_v_neg_g:4.1988e-02 L6_v_norm:8.6958e+00 L7_cos_v_neg_g:3.7621e-02 L7_v_norm:8.9815e+00 L8_cos_v_neg_g:4.2399e-02 L8_v_norm:8.9976e+00 L9_cos_v_neg_g:3.8781e-02 L9_v_norm:8.6987e+00 L10_cos_v_neg_g:4.4956e-02 L10_v_norm:8.2286e+00 L11_cos_v_neg_g:3.9647e-02 L11_v_norm:8.1683e+00 L12_cos_v_neg_g:6.0367e-02 L12_v_norm:7.4724e+00 +step:1000 train loss:4.600108 +step:1001 train loss:4.608701 +step:1002 train loss:4.617415 +step:1003 train loss:4.577649 +step:1004 train loss:4.550592 +step:1005 train loss:4.563908 +step:1006 train loss:4.655911 +step:1007 train loss:4.600163 +step:1008 train loss:4.560359 +step:1009 train loss:4.601050 +step:1010 train loss:4.574107 +step:1011 train loss:4.592715 +step:1012 train loss:4.533918 +step:1013 train loss:4.509965 +step:1014 train loss:4.513448 +step:1015 train loss:4.537177 +step:1016 train loss:4.543605 +step:1017 train loss:4.487352 +step:1018 train loss:4.542593 +step:1019 train loss:4.491366 +step:1020 train loss:4.490730 +step:1021 train loss:4.586003 +step:1022 train loss:4.486565 +step:1023 train loss:4.497402 +step:1024 train loss:4.575394 +step:1025 train loss:4.532087 +step:1026 train loss:4.475070 +step:1027 train loss:4.517083 +step:1028 train loss:4.521471 +step:1029 train loss:4.455510 +step:1030 train loss:4.537539 +step:1031 train loss:4.527000 +step:1032 train loss:4.484973 +step:1033 train loss:4.448410 +step:1034 train loss:4.510586 +step:1035 train loss:4.508856 +step:1036 train loss:4.423390 +step:1037 train loss:4.486414 +step:1038 train loss:4.509193 +step:1039 train loss:4.643597 +step:1040 train loss:4.479698 +step:1041 train loss:4.474220 +step:1042 train loss:4.484640 +step:1043 train loss:4.481375 +step:1044 train loss:4.471152 +step:1045 train loss:4.484870 +step:1046 train loss:4.419674 +step:1047 train loss:4.461238 +step:1048 train loss:4.451206 +step:1049 train loss:4.511435 +step:1050 train loss:4.476672 +step:1051 train loss:4.453197 +step:1052 train loss:4.536818 +step:1053 train loss:4.443153 +step:1054 train loss:4.438529 +step:1055 train loss:4.498712 +step:1056 train loss:4.448407 +step:1057 train loss:4.368114 +step:1058 train loss:4.468393 +step:1059 train loss:4.446001 +step:1060 train loss:4.447725 +step:1061 train loss:4.503625 +step:1062 train loss:4.458850 +step:1063 train loss:4.458900 +step:1064 train loss:4.441081 +step:1065 train loss:4.452857 +step:1066 train loss:4.426946 +step:1067 train loss:4.449445 +step:1068 train loss:4.411217 +step:1069 train loss:4.424982 +step:1070 train loss:4.445641 +step:1071 train loss:4.487302 +step:1072 train loss:4.497852 +step:1073 train loss:4.420098 +step:1074 train loss:4.438326 +step:1075 train loss:4.438023 +step:1076 train loss:4.513642 +step:1077 train loss:4.448564 +step:1078 train loss:4.498824 +step:1079 train loss:4.540112 +step:1080 train loss:4.401222 +step:1081 train loss:4.459754 +step:1082 train loss:4.464617 +step:1083 train loss:4.417789 +step:1084 train loss:4.388342 +step:1085 train loss:4.447238 +step:1086 train loss:4.441359 +step:1087 train loss:4.425293 +step:1088 train loss:4.427048 +step:1089 train loss:4.424569 +step:1090 train loss:4.367939 +step:1091 train loss:4.360035 +step:1092 train loss:4.469221 +step:1093 train loss:4.363205 +step:1094 train loss:4.418132 +step:1095 train loss:4.463274 +step:1096 train loss:4.399139 +step:1097 train loss:4.395069 +step:1098 train loss:4.358950 +step:1099 train loss:4.414691 +step:1100 train loss:4.464791 +step:1101 train loss:4.462897 +step:1102 train loss:4.467544 +step:1103 train loss:4.389647 +step:1104 train loss:4.428396 +step:1105 train loss:4.479945 +step:1106 train loss:4.408393 +step:1107 train loss:4.522143 +step:1108 train loss:4.467190 +step:1109 train loss:4.423828 +step:1110 train loss:4.379270 +step:1111 train loss:4.437948 +step:1112 train loss:4.350439 +step:1113 train loss:4.329024 +step:1114 train loss:4.324561 +step:1115 train loss:4.372850 +step:1116 train loss:4.451643 +step:1117 train loss:4.474806 +step:1118 train loss:4.513147 +step:1119 train loss:4.425567 +step:1120 train loss:4.442146 +step:1121 train loss:4.436502 +step:1122 train loss:4.413444 +step:1123 train loss:4.504981 +step:1124 train loss:4.390745 +step:1125 train loss:4.400737 +step:1126 train loss:4.350154 +step:1127 train loss:4.373124 +step:1128 train loss:4.374168 +step:1129 train loss:4.428198 +step:1130 train loss:4.343029 +step:1131 train loss:4.436881 +step:1132 train loss:4.393048 +step:1133 train loss:4.407642 +step:1134 train loss:4.383883 +step:1135 train loss:4.424786 +step:1136 train loss:4.431680 +step:1137 train loss:4.351161 +step:1138 train loss:4.430342 +step:1139 train loss:4.385540 +step:1140 train loss:4.450245 +step:1141 train loss:4.414444 +step:1142 train loss:4.344268 +step:1143 train loss:4.417725 +step:1144 train loss:4.442429 +step:1145 train loss:4.386412 +step:1146 train loss:4.338362 +step:1147 train loss:4.355656 +step:1148 train loss:4.382805 +step:1149 train loss:4.430455 +step:1150 train loss:4.447398 +step:1151 train loss:4.455750 +step:1152 train loss:4.351183 +step:1153 train loss:4.346983 +step:1154 train loss:4.322815 +step:1155 train loss:4.422543 +step:1156 train loss:4.330950 +step:1157 train loss:4.355625 +step:1158 train loss:4.410796 +step:1159 train loss:4.410500 +step:1160 train loss:4.336302 +step:1161 train loss:4.428758 +step:1162 train loss:4.372250 +step:1163 train loss:4.352205 +step:1164 train loss:4.258534 +step:1165 train loss:4.395901 +step:1166 train loss:4.320628 +step:1167 train loss:4.322991 +step:1168 train loss:4.384335 +step:1169 train loss:4.336034 +step:1170 train loss:4.344507 +step:1171 train loss:4.367167 +step:1172 train loss:4.329410 +step:1173 train loss:4.360478 +step:1174 train loss:4.303492 +step:1175 train loss:4.336492 +step:1176 train loss:4.442851 +step:1177 train loss:4.301908 +step:1178 train loss:4.369206 +step:1179 train loss:4.322040 +step:1180 train loss:4.351956 +step:1181 train loss:4.330218 +step:1182 train loss:4.387781 +step:1183 train loss:4.367038 +step:1184 train loss:4.300913 +step:1185 train loss:4.339545 +step:1186 train loss:4.326200 +step:1187 train loss:4.293272 +step:1188 train loss:4.334487 +step:1189 train loss:4.277019 +step:1190 train loss:4.343595 +step:1191 train loss:4.406084 +step:1192 train loss:4.356436 +step:1193 train loss:4.359151 +step:1194 train loss:4.469218 +step:1195 train loss:4.456237 +step:1196 train loss:4.349692 +step:1197 train loss:4.356114 +step:1198 train loss:4.331785 +step:1199 train loss:4.336063 +step:1200 train loss:4.389285 +step:1201 train loss:4.363009 +step:1202 train loss:4.297871 +step:1203 train loss:4.287794 +step:1204 train loss:4.327515 +step:1205 train loss:4.325138 +step:1206 train loss:4.287917 +step:1207 train loss:4.380043 +step:1208 train loss:4.354417 +step:1209 train loss:4.262492 +step:1210 train loss:4.363083 +step:1211 train loss:4.305956 +step:1212 train loss:4.329389 +step:1213 train loss:4.262552 +step:1214 train loss:4.349209 +step:1215 train loss:4.308612 +step:1216 train loss:4.331172 +step:1217 train loss:4.279056 +step:1218 train loss:4.345305 +step:1219 train loss:4.290677 +step:1220 train loss:4.317291 +step:1221 train loss:4.332074 +step:1222 train loss:4.378829 +step:1223 train loss:4.341804 +step:1224 train loss:4.309578 +step:1225 train loss:4.359389 +step:1226 train loss:4.292295 +step:1227 train loss:4.302127 +step:1228 train loss:4.306029 +step:1229 train loss:4.274014 +step:1230 train loss:4.267716 +step:1231 train loss:4.321998 +step:1232 train loss:4.275758 +step:1233 train loss:4.276983 +step:1234 train loss:4.354243 +step:1235 train loss:4.332675 +step:1236 train loss:4.243101 +step:1237 train loss:4.351831 +step:1238 train loss:4.308522 +step:1239 train loss:4.348311 +step:1240 train loss:4.242024 +step:1241 train loss:4.291128 +step:1242 train loss:4.304324 +step:1243 train loss:4.254083 +step:1244 train loss:4.359717 +step:1245 train loss:4.376813 +step:1246 train loss:4.303995 +step:1247 train loss:4.278207 +step:1248 train loss:4.307089 +step:1249 train loss:4.243028 +step:1250 validation loss:4.245025 +step:1250 train loss:4.253535 +step:1251 train loss:4.324215 +step:1252 train loss:4.284662 +step:1253 train loss:4.250475 +step:1254 train loss:4.268020 +step:1255 train loss:4.259705 +step:1256 train loss:4.301201 +step:1257 train loss:4.289615 +step:1258 train loss:4.342656 +step:1259 train loss:4.336534 +step:1260 train loss:4.241298 +step:1261 train loss:4.470624 +step:1262 train loss:4.331456 +step:1263 train loss:4.298207 +step:1264 train loss:4.303384 +step:1265 train loss:4.349263 +step:1266 train loss:4.283569 +step:1267 train loss:4.286423 +step:1268 train loss:4.297235 +step:1269 train loss:4.286328 +step:1270 train loss:4.211989 +step:1271 train loss:4.220182 +step:1272 train loss:4.247978 +step:1273 train loss:4.301874 +step:1274 train loss:4.270975 +step:1275 train loss:4.302045 +step:1276 train loss:4.297764 +step:1277 train loss:4.306336 +step:1278 train loss:4.240060 +step:1279 train loss:4.248903 +step:1280 train loss:4.262130 +step:1281 train loss:4.315067 +step:1282 train loss:4.234102 +step:1283 train loss:4.323567 +step:1284 train loss:4.278813 +step:1285 train loss:4.323916 +step:1286 train loss:4.214376 +step:1287 train loss:4.257903 +step:1288 train loss:4.282813 +step:1289 train loss:4.336512 +step:1290 train loss:4.285470 +step:1291 train loss:4.250374 +step:1292 train loss:4.233182 +step:1293 train loss:4.217087 +step:1294 train loss:4.277381 +step:1295 train loss:4.260622 +step:1296 train loss:4.304881 +step:1297 train loss:4.262705 +step:1298 train loss:4.281130 +step:1299 train loss:4.312452 +step:1300 train loss:4.236411 +step:1301 train loss:4.283749 +step:1302 train loss:4.253864 +step:1303 train loss:4.288542 +step:1304 train loss:4.319486 +step:1305 train loss:4.290139 +step:1306 train loss:4.277816 +step:1307 train loss:4.269898 +step:1308 train loss:4.215647 +step:1309 train loss:4.224740 +step:1310 train loss:4.212996 +step:1311 train loss:4.225235 +step:1312 train loss:4.303417 +step:1313 train loss:4.217519 +step:1314 train loss:4.217401 +step:1315 train loss:4.267038 +step:1316 train loss:4.238576 +step:1317 train loss:4.136587 +step:1318 train loss:4.306957 +step:1319 train loss:4.373087 +step:1320 train loss:4.285507 +step:1321 train loss:4.247567 +step:1322 train loss:4.354103 +step:1323 train loss:4.297962 +step:1324 train loss:4.400211 +step:1325 train loss:4.287345 +step:1326 train loss:4.319758 +step:1327 train loss:4.328506 +step:1328 train loss:4.234327 +step:1329 train loss:4.253441 +step:1330 train loss:4.270324 +step:1331 train loss:4.157647 +step:1332 train loss:4.310158 +step:1333 train loss:4.277936 +step:1334 train loss:4.281716 +step:1335 train loss:4.296450 +step:1336 train loss:4.302573 +step:1337 train loss:4.269503 +step:1338 train loss:4.250766 +step:1339 train loss:4.331308 +step:1340 train loss:4.289155 +step:1341 train loss:4.271441 +step:1342 train loss:4.237019 +step:1343 train loss:4.226845 +step:1344 train loss:4.301643 +step:1345 train loss:4.258099 +step:1346 train loss:4.332116 +step:1347 train loss:4.250874 +step:1348 train loss:4.221075 +step:1349 train loss:4.165895 +step:1350 train loss:4.195245 +step:1351 train loss:4.264567 +step:1352 train loss:4.231770 +step:1353 train loss:4.208692 +step:1354 train loss:4.216801 +step:1355 train loss:4.292501 +step:1356 train loss:4.203367 +step:1357 train loss:4.235023 +step:1358 train loss:4.217381 +step:1359 train loss:4.218884 +step:1360 train loss:4.253904 +step:1361 train loss:4.368299 +step:1362 train loss:4.282825 +step:1363 train loss:4.164396 +step:1364 train loss:4.190622 +step:1365 train loss:4.181433 +step:1366 train loss:4.230764 +step:1367 train loss:4.167458 +step:1368 train loss:4.190564 +step:1369 train loss:4.221768 +step:1370 train loss:4.239366 +step:1371 train loss:4.194498 +step:1372 train loss:4.224140 +step:1373 train loss:4.261461 +step:1374 train loss:4.275777 +step:1375 train loss:4.227253 +step:1376 train loss:4.250737 +step:1377 train loss:4.255050 +step:1378 train loss:4.264198 +step:1379 train loss:4.259548 +step:1380 train loss:4.313683 +step:1381 train loss:4.256907 +step:1382 train loss:4.251876 +step:1383 train loss:4.228142 +step:1384 train loss:4.317079 +step:1385 train loss:4.197475 +step:1386 train loss:4.265365 +step:1387 train loss:4.271614 +step:1388 train loss:4.229123 +step:1389 train loss:4.206474 +step:1390 train loss:4.233179 +step:1391 train loss:4.261466 +step:1392 train loss:4.241189 +step:1393 train loss:4.288799 +step:1394 train loss:4.214681 +step:1395 train loss:4.256382 +step:1396 train loss:4.236933 +step:1397 train loss:4.252438 +step:1398 train loss:4.260577 +step:1399 train loss:4.223361 +step:1400 train loss:4.203110 +step:1401 train loss:4.195385 +step:1402 train loss:4.197222 +step:1403 train loss:4.156305 +step:1404 train loss:4.214321 +step:1405 train loss:4.176061 +step:1406 train loss:4.200089 +step:1407 train loss:4.197089 +step:1408 train loss:4.175679 +step:1409 train loss:4.161023 +step:1410 train loss:4.178554 +step:1411 train loss:4.208613 +step:1412 train loss:4.276369 +step:1413 train loss:4.194729 +step:1414 train loss:4.216272 +step:1415 train loss:4.175659 +step:1416 train loss:4.233404 +step:1417 train loss:4.212512 +step:1418 train loss:4.153460 +step:1419 train loss:4.154609 +step:1420 train loss:4.191139 +step:1421 train loss:4.222980 +step:1422 train loss:4.204586 +step:1423 train loss:4.296795 +step:1424 train loss:4.189243 +step:1425 train loss:4.158310 +step:1426 train loss:4.180450 +step:1427 train loss:4.164342 +step:1428 train loss:4.149857 +step:1429 train loss:4.175371 +step:1430 train loss:4.179509 +step:1431 train loss:4.205161 +step:1432 train loss:4.188858 +step:1433 train loss:4.171865 +step:1434 train loss:4.146040 +step:1435 train loss:4.145967 +step:1436 train loss:4.222925 +step:1437 train loss:4.153427 +step:1438 train loss:4.158685 +step:1439 train loss:4.134963 +step:1440 train loss:4.169868 +step:1441 train loss:4.245715 +step:1442 train loss:4.214832 +step:1443 train loss:4.141141 +step:1444 train loss:4.152601 +step:1445 train loss:4.146417 +step:1446 train loss:4.178374 +step:1447 train loss:4.192267 +step:1448 train loss:4.157230 +step:1449 train loss:4.192859 +step:1450 train loss:4.203435 +step:1451 train loss:4.124403 +step:1452 train loss:4.176895 +step:1453 train loss:4.167385 +step:1454 train loss:4.164061 +step:1455 train loss:4.101786 +step:1456 train loss:4.179897 +step:1457 train loss:4.120744 +step:1458 train loss:4.254545 +step:1459 train loss:4.183643 +step:1460 train loss:4.145622 +step:1461 train loss:4.205034 +step:1462 train loss:4.217325 +step:1463 train loss:4.204958 +step:1464 train loss:4.219140 +step:1465 train loss:4.187579 +step:1466 train loss:4.146080 +step:1467 train loss:4.280265 +step:1468 train loss:4.167576 +step:1469 train loss:4.240148 +step:1470 train loss:4.167199 +step:1471 train loss:4.168464 +step:1472 train loss:4.163488 +step:1473 train loss:4.158849 +step:1474 train loss:4.098555 +step:1475 train loss:4.157428 +step:1476 train loss:4.239153 +step:1477 train loss:4.183156 +step:1478 train loss:4.117605 +step:1479 train loss:4.144828 +step:1480 train loss:4.141021 +step:1481 train loss:4.116847 +step:1482 train loss:4.178585 +step:1483 train loss:4.167325 +step:1484 train loss:4.192704 +step:1485 train loss:4.207029 +step:1486 train loss:4.152344 +step:1487 train loss:4.135345 +step:1488 train loss:4.145022 +step:1489 train loss:4.143052 +step:1490 train loss:4.187951 +step:1491 train loss:4.181131 +step:1492 train loss:4.185579 +step:1493 train loss:4.119476 +step:1494 train loss:4.156736 +step:1495 train loss:4.139850 +step:1496 train loss:4.111724 +step:1497 train loss:4.181513 +step:1498 train loss:4.087123 +step:1499 train loss:4.128263 +step:1500 validation loss:4.100855 total_sharp:7.0390e-05 L1_sharp:2.8477e-05 L2_sharp:3.1483e-05 L3_sharp:1.1780e-05 L4_sharp:6.2943e-06 L5_sharp:5.6057e-06 L6_sharp:8.4220e-06 L7_sharp:5.4410e-06 L8_sharp:8.6427e-06 L9_sharp:1.5225e-05 L10_sharp:2.2431e-05 L11_sharp:2.0715e-05 L12_sharp:1.4816e-04 total_fnorm:3.6265e+01 total_l1_linf:3.0482e+05 total_spectral:3.6265e+01 L1_fnorm:7.4432e+00 L2_fnorm:5.8101e+00 L3_fnorm:6.3642e+00 L4_fnorm:6.9779e+00 L5_fnorm:7.4412e+00 L6_fnorm:7.7423e+00 L7_fnorm:8.3085e+00 L8_fnorm:8.3717e+00 L9_fnorm:8.3510e+00 L10_fnorm:8.2187e+00 L11_fnorm:8.4302e+00 L12_fnorm:7.5050e+00 L1_l1linf:7.9641e+00 L2_l1linf:4.8013e+00 L3_l1linf:5.6304e+00 L4_l1linf:6.3829e+00 L5_l1linf:7.3062e+00 L6_l1linf:7.8602e+00 L7_l1linf:8.5666e+00 L8_l1linf:9.4021e+00 L9_l1linf:9.1489e+00 L10_l1linf:1.0229e+01 L11_l1linf:1.0581e+01 L12_l1linf:1.1951e+01 L1_spectral:1.0434e+00 L2_spectral:8.4035e-01 L3_spectral:8.9558e-01 L4_spectral:1.0578e+00 L5_spectral:1.0235e+00 L6_spectral:9.9657e-01 L7_spectral:1.0004e+00 L8_spectral:1.1937e+00 L9_spectral:1.3115e+00 L10_spectral:1.4857e+00 L11_spectral:1.5271e+00 L12_spectral:1.6594e+00 ip_v_neg_g:4.7466e-02 cos_v_neg_g:2.9629e-03 v_norm:3.6265e+01 g_norm:4.4176e-01 hv_norm:4.3589e-01 cos_v_hv:5.8562e-03 hg_norm:3.6088e+00 cos_g_hg:6.3419e-01 v_par:1.8907e-02 v_perp:3.6265e+01 L1_cos_v_neg_g:3.0283e-02 L1_v_norm:7.4432e+00 L2_cos_v_neg_g:2.6676e-02 L2_v_norm:5.8101e+00 L3_cos_v_neg_g:1.5128e-02 L3_v_norm:6.3642e+00 L4_cos_v_neg_g:1.3835e-02 L4_v_norm:6.9779e+00 L5_cos_v_neg_g:1.1469e-02 L5_v_norm:7.4412e+00 L6_cos_v_neg_g:1.4150e-02 L6_v_norm:7.7423e+00 L7_cos_v_neg_g:9.9521e-03 L7_v_norm:8.3085e+00 L8_cos_v_neg_g:1.2563e-02 L8_v_norm:8.3717e+00 L9_cos_v_neg_g:1.5719e-02 L9_v_norm:8.3510e+00 L10_cos_v_neg_g:2.0012e-02 L10_v_norm:8.2187e+00 L11_cos_v_neg_g:2.0159e-02 L11_v_norm:8.4302e+00 L12_cos_v_neg_g:4.0480e-02 L12_v_norm:7.5050e+00 +step:1500 train loss:4.123913 +step:1501 train loss:4.143172 +step:1502 train loss:4.083292 +step:1503 train loss:4.134184 +step:1504 train loss:4.104380 +step:1505 train loss:4.076204 +step:1506 train loss:4.070781 +step:1507 train loss:4.100295 +step:1508 train loss:4.109133 +step:1509 train loss:4.154622 +step:1510 train loss:4.098804 +step:1511 train loss:4.126201 +step:1512 train loss:4.098990 +step:1513 train loss:4.170074 +step:1514 train loss:4.117521 +step:1515 train loss:4.176934 +step:1516 train loss:4.101514 +step:1517 train loss:4.112936 +step:1518 train loss:4.191677 +step:1519 train loss:4.156034 +step:1520 train loss:4.207891 +step:1521 train loss:4.110454 +step:1522 train loss:4.166419 +step:1523 train loss:4.173360 +step:1524 train loss:4.086452 +step:1525 train loss:4.171260 +step:1526 train loss:4.089593 +step:1527 train loss:4.149951 +step:1528 train loss:4.191376 +step:1529 train loss:4.150936 +step:1530 train loss:4.188011 +step:1531 train loss:4.114439 +step:1532 train loss:4.183053 +step:1533 train loss:4.156443 +step:1534 train loss:4.097942 +step:1535 train loss:4.154043 +step:1536 train loss:4.179098 +step:1537 train loss:4.132162 +step:1538 train loss:4.129932 +step:1539 train loss:4.136674 +step:1540 train loss:4.164104 +step:1541 train loss:4.118956 +step:1542 train loss:4.205465 +step:1543 train loss:4.226614 +step:1544 train loss:4.101639 +step:1545 train loss:4.081985 +step:1546 train loss:4.116107 +step:1547 train loss:4.105904 +step:1548 train loss:4.147250 +step:1549 train loss:4.071494 +step:1550 train loss:4.189847 +step:1551 train loss:4.121307 +step:1552 train loss:4.149181 +step:1553 train loss:4.149712 +step:1554 train loss:4.162080 +step:1555 train loss:4.114708 +step:1556 train loss:4.096341 +step:1557 train loss:4.111238 +step:1558 train loss:4.130863 +step:1559 train loss:4.097313 +step:1560 train loss:4.175789 +step:1561 train loss:4.149524 +step:1562 train loss:4.040725 +step:1563 train loss:4.006049 +step:1564 train loss:4.150926 +step:1565 train loss:4.130748 +step:1566 train loss:4.141848 +step:1567 train loss:4.145828 +step:1568 train loss:4.097035 +step:1569 train loss:4.094780 +step:1570 train loss:4.111538 +step:1571 train loss:4.097429 +step:1572 train loss:4.148531 +step:1573 train loss:4.192512 +step:1574 train loss:4.160728 +step:1575 train loss:4.159661 +step:1576 train loss:4.117144 +step:1577 train loss:4.133822 +step:1578 train loss:4.116092 +step:1579 train loss:4.183072 +step:1580 train loss:4.130528 +step:1581 train loss:4.163529 +step:1582 train loss:4.166093 +step:1583 train loss:4.146236 +step:1584 train loss:4.072617 +step:1585 train loss:4.152847 +step:1586 train loss:4.116317 +step:1587 train loss:4.124631 +step:1588 train loss:4.113968 +step:1589 train loss:4.150084 +step:1590 train loss:4.056950 +step:1591 train loss:4.114625 +step:1592 train loss:4.059270 +step:1593 train loss:4.094078 +step:1594 train loss:4.099757 +step:1595 train loss:4.080908 +step:1596 train loss:4.087758 +step:1597 train loss:4.022858 +step:1598 train loss:4.121401 +step:1599 train loss:4.126157 +step:1600 train loss:4.015049 +step:1601 train loss:4.084279 +step:1602 train loss:4.142685 +step:1603 train loss:4.141858 +step:1604 train loss:4.069029 +step:1605 train loss:4.112342 +step:1606 train loss:4.166363 +step:1607 train loss:4.057070 +step:1608 train loss:4.086508 +step:1609 train loss:4.095224 +step:1610 train loss:4.152582 +step:1611 train loss:4.079918 +step:1612 train loss:4.010831 +step:1613 train loss:4.079379 +step:1614 train loss:4.186898 +step:1615 train loss:4.121389 +step:1616 train loss:4.145514 +step:1617 train loss:4.115412 +step:1618 train loss:4.126886 +step:1619 train loss:4.292835 +step:1620 train loss:4.093922 +step:1621 train loss:4.151825 +step:1622 train loss:4.073518 +step:1623 train loss:4.130648 +step:1624 train loss:4.105234 +step:1625 train loss:4.176793 +step:1626 train loss:4.064436 +step:1627 train loss:4.068923 +step:1628 train loss:4.086448 +step:1629 train loss:4.118327 +step:1630 train loss:4.133436 +step:1631 train loss:4.080994 +step:1632 train loss:4.063005 +step:1633 train loss:4.075314 +step:1634 train loss:4.121607 +step:1635 train loss:4.066893 +step:1636 train loss:4.049591 +step:1637 train loss:4.118206 +step:1638 train loss:4.229332 +step:1639 train loss:4.031703 +step:1640 train loss:4.105463 +step:1641 train loss:4.069166 +step:1642 train loss:4.159544 +step:1643 train loss:4.062845 +step:1644 train loss:4.071966 +step:1645 train loss:4.051591 +step:1646 train loss:4.134045 +step:1647 train loss:4.025098 +step:1648 train loss:4.094260 +step:1649 train loss:4.062231 +step:1650 train loss:4.068904 +step:1651 train loss:4.089351 +step:1652 train loss:4.106667 +step:1653 train loss:4.118392 +step:1654 train loss:4.105000 +step:1655 train loss:4.075626 +step:1656 train loss:4.062330 +step:1657 train loss:4.072823 +step:1658 train loss:4.044173 +step:1659 train loss:4.118871 +step:1660 train loss:4.019964 +step:1661 train loss:4.147198 +step:1662 train loss:4.081489 +step:1663 train loss:4.072745 +step:1664 train loss:4.161276 +step:1665 train loss:4.080554 +step:1666 train loss:4.092417 +step:1667 train loss:4.105094 +step:1668 train loss:4.084179 +step:1669 train loss:4.044273 +step:1670 train loss:4.093049 +step:1671 train loss:4.092002 +step:1672 train loss:4.087934 +step:1673 train loss:4.047118 +step:1674 train loss:4.040113 +step:1675 train loss:4.083666 +step:1676 train loss:4.340928 +step:1677 train loss:4.099484 +step:1678 train loss:4.022846 +step:1679 train loss:4.146304 +step:1680 train loss:4.072566 +step:1681 train loss:4.122200 +step:1682 train loss:4.072421 +step:1683 train loss:4.068593 +step:1684 train loss:4.030452 +step:1685 train loss:4.087715 +step:1686 train loss:4.074977 +step:1687 train loss:4.089630 +step:1688 train loss:4.066356 +step:1689 train loss:4.056787 +step:1690 train loss:4.079345 +step:1691 train loss:4.073446 +step:1692 train loss:4.091464 +step:1693 train loss:4.053083 +step:1694 train loss:4.013275 +step:1695 train loss:4.032902 +step:1696 train loss:4.039921 +step:1697 train loss:4.080278 +step:1698 train loss:4.085957 +step:1699 train loss:4.035583 +step:1700 train loss:4.111057 +step:1701 train loss:4.046622 +step:1702 train loss:4.042524 +step:1703 train loss:4.063398 +step:1704 train loss:4.072682 +step:1705 train loss:4.078968 +step:1706 train loss:4.095523 +step:1707 train loss:4.103547 +step:1708 train loss:4.037882 +step:1709 train loss:4.134128 +step:1710 train loss:4.056253 +step:1711 train loss:4.049214 +step:1712 train loss:4.074840 +step:1713 train loss:4.031549 +step:1714 train loss:4.391747 +step:1715 train loss:4.055046 +step:1716 train loss:4.038715 +step:1717 train loss:4.039737 +step:1718 train loss:4.114136 +step:1719 train loss:4.038545 +step:1720 train loss:4.106885 +step:1721 train loss:4.051355 +step:1722 train loss:4.015607 +step:1723 train loss:4.110616 +step:1724 train loss:4.061554 +step:1725 train loss:4.056219 +step:1726 train loss:4.053440 +step:1727 train loss:4.088171 +step:1728 train loss:4.093479 +step:1729 train loss:4.021945 +step:1730 train loss:4.096875 +step:1731 train loss:4.020121 +step:1732 train loss:4.031569 +step:1733 train loss:4.028495 +step:1734 train loss:4.073132 +step:1735 train loss:4.141666 +step:1736 train loss:4.056046 +step:1737 train loss:4.088393 +step:1738 train loss:4.059381 +step:1739 train loss:4.106137 +step:1740 train loss:4.099963 +step:1741 train loss:4.152977 +step:1742 train loss:4.142786 +step:1743 train loss:4.033703 +step:1744 train loss:4.045295 +step:1745 train loss:4.033863 +step:1746 train loss:4.013027 +step:1747 train loss:4.046079 +step:1748 train loss:3.986152 +step:1749 train loss:4.028164 +step:1750 validation loss:4.005017 +step:1750 train loss:4.057912 +step:1751 train loss:4.074269 +step:1752 train loss:4.037329 +step:1753 train loss:4.068038 +step:1754 train loss:4.065186 +step:1755 train loss:4.066824 +step:1756 train loss:4.084316 +step:1757 train loss:4.089655 +step:1758 train loss:4.009988 +step:1759 train loss:4.106881 +step:1760 train loss:4.050207 +step:1761 train loss:4.025421 +step:1762 train loss:4.026110 +step:1763 train loss:4.025795 +step:1764 train loss:4.321549 +step:1765 train loss:4.036439 +step:1766 train loss:4.124969 +step:1767 train loss:4.034688 +step:1768 train loss:4.008187 +step:1769 train loss:4.030680 +step:1770 train loss:4.045380 +step:1771 train loss:4.026410 +step:1772 train loss:4.129157 +step:1773 train loss:4.057673 +step:1774 train loss:4.060882 +step:1775 train loss:4.174417 +step:1776 train loss:4.059997 +step:1777 train loss:4.054290 +step:1778 train loss:4.110356 +step:1779 train loss:4.030597 +step:1780 train loss:4.081663 +step:1781 train loss:4.087661 +step:1782 train loss:4.109836 +step:1783 train loss:4.038449 +step:1784 train loss:4.129737 +step:1785 train loss:4.034540 +step:1786 train loss:4.035375 +step:1787 train loss:4.039336 +step:1788 train loss:4.066001 +step:1789 train loss:4.019187 +step:1790 train loss:4.023709 +step:1791 train loss:4.105804 +step:1792 train loss:4.102487 +step:1793 train loss:4.023979 +step:1794 train loss:4.063560 +step:1795 train loss:4.015179 +step:1796 train loss:3.991880 +step:1797 train loss:4.057063 +step:1798 train loss:4.000391 +step:1799 train loss:4.052885 +step:1800 train loss:4.074684 +step:1801 train loss:4.067678 +step:1802 train loss:4.073886 +step:1803 train loss:4.063863 +step:1804 train loss:4.066840 +step:1805 train loss:4.048282 +step:1806 train loss:4.064574 +step:1807 train loss:4.000227 +step:1808 train loss:4.065453 +step:1809 train loss:4.045473 +step:1810 train loss:4.037852 +step:1811 train loss:4.049407 +step:1812 train loss:4.034691 +step:1813 train loss:4.050114 +step:1814 train loss:4.116121 +step:1815 train loss:4.059842 +step:1816 train loss:4.008147 +step:1817 train loss:3.995474 +step:1818 train loss:4.052010 +step:1819 train loss:4.022246 +step:1820 train loss:4.063047 +step:1821 train loss:4.025774 +step:1822 train loss:4.005383 +step:1823 train loss:4.008583 +step:1824 train loss:4.086586 +step:1825 train loss:3.984171 +step:1826 train loss:4.031754 +step:1827 train loss:3.994793 +step:1828 train loss:4.041720 +step:1829 train loss:4.005240 +step:1830 train loss:4.200598 +step:1831 train loss:3.966540 +step:1832 train loss:4.011965 +step:1833 train loss:4.065465 +step:1834 train loss:4.007078 +step:1835 train loss:4.028875 +step:1836 train loss:4.059824 +step:1837 train loss:3.985950 +step:1838 train loss:4.083608 +step:1839 train loss:4.062377 +step:1840 train loss:4.030379 +step:1841 train loss:4.050525 +step:1842 train loss:4.026737 +step:1843 train loss:3.969993 +step:1844 train loss:4.041975 +step:1845 train loss:4.005890 +step:1846 train loss:4.058205 +step:1847 train loss:4.109944 +step:1848 train loss:3.907512 +step:1849 train loss:4.008245 +step:1850 train loss:3.991245 +step:1851 train loss:4.029951 +step:1852 train loss:4.009077 +step:1853 train loss:4.062959 +step:1854 train loss:4.034191 +step:1855 train loss:4.013781 +step:1856 train loss:4.012322 +step:1857 train loss:4.027044 +step:1858 train loss:4.075138 +step:1859 train loss:4.024874 +step:1860 train loss:3.987281 +step:1861 train loss:4.001480 +step:1862 train loss:4.044977 +step:1863 train loss:4.081949 +step:1864 train loss:3.978688 +step:1865 train loss:4.002204 +step:1866 train loss:4.010765 +step:1867 train loss:4.044024 +step:1868 train loss:4.084769 +step:1869 train loss:4.006683 +step:1870 train loss:4.031693 +step:1871 train loss:3.963798 +step:1872 train loss:4.034150 +step:1873 train loss:4.099247 +step:1874 train loss:3.960210 +step:1875 train loss:4.044860 +step:1876 train loss:4.007404 +step:1877 train loss:4.049809 +step:1878 train loss:3.974622 +step:1879 train loss:4.034239 +step:1880 train loss:4.113406 +step:1881 train loss:4.033885 +step:1882 train loss:4.050686 +step:1883 train loss:4.077484 +step:1884 train loss:4.088892 +step:1885 train loss:4.043181 +step:1886 train loss:3.974838 +step:1887 train loss:4.000494 +step:1888 train loss:4.018990 +step:1889 train loss:4.028062 +step:1890 train loss:4.044834 +step:1891 train loss:3.972459 +step:1892 train loss:4.071357 +step:1893 train loss:3.991090 +step:1894 train loss:4.010733 +step:1895 train loss:4.045105 +step:1896 train loss:4.092310 +step:1897 train loss:3.977181 +step:1898 train loss:4.025755 +step:1899 train loss:4.040178 +step:1900 train loss:3.984623 +step:1901 train loss:4.054295 +step:1902 train loss:4.052005 +step:1903 train loss:3.993023 +step:1904 train loss:3.978518 +step:1905 train loss:3.980264 +step:1906 train loss:4.031891 +step:1907 train loss:3.976275 +step:1908 train loss:3.987508 +step:1909 train loss:4.084097 +step:1910 train loss:3.976448 +step:1911 train loss:3.979054 +step:1912 train loss:4.030447 +step:1913 train loss:3.972744 +step:1914 train loss:4.005621 +step:1915 train loss:3.972723 +step:1916 train loss:4.026078 +step:1917 train loss:4.002297 +step:1918 train loss:3.914861 +step:1919 train loss:4.064595 +step:1920 train loss:4.170308 +step:1921 train loss:3.948273 +step:1922 train loss:3.929930 +step:1923 train loss:4.028976 +step:1924 train loss:4.065074 +step:1925 train loss:4.012077 +step:1926 train loss:3.949162 +step:1927 train loss:4.029828 +step:1928 train loss:3.950913 +step:1929 train loss:3.976220 +step:1930 train loss:4.040955 +step:1931 train loss:3.954238 +step:1932 train loss:4.002823 +step:1933 train loss:4.002201 +step:1934 train loss:4.067220 +step:1935 train loss:4.023673 +step:1936 train loss:3.995195 +step:1937 train loss:3.941531 +step:1938 train loss:4.308750 +step:1939 train loss:4.114716 +step:1940 train loss:4.119553 +step:1941 train loss:4.109976 +step:1942 train loss:4.116302 +step:1943 train loss:4.253256 +step:1944 train loss:4.104475 +step:1945 train loss:4.097667 +step:1946 train loss:4.115395 +step:1947 train loss:4.134674 +step:1948 train loss:4.021109 +step:1949 train loss:4.111810 +step:1950 train loss:4.048351 +step:1951 train loss:4.067334 +step:1952 train loss:4.102096 +step:1953 train loss:4.033262 +step:1954 train loss:4.051975 +step:1955 train loss:3.999986 +step:1956 train loss:4.080153 +step:1957 train loss:4.096758 +step:1958 train loss:4.112442 +step:1959 train loss:3.974391 +step:1960 train loss:4.014783 +step:1961 train loss:4.043329 +step:1962 train loss:4.036558 +step:1963 train loss:4.008351 +step:1964 train loss:4.038316 +step:1965 train loss:4.073494 +step:1966 train loss:3.978141 +step:1967 train loss:4.036621 +step:1968 train loss:3.974797 +step:1969 train loss:3.986296 +step:1970 train loss:4.047706 +step:1971 train loss:3.950186 +step:1972 train loss:4.055483 +step:1973 train loss:3.951107 +step:1974 train loss:4.001399 +step:1975 train loss:3.958335 +step:1976 train loss:3.979800 +step:1977 train loss:4.023633 +step:1978 train loss:3.965555 +step:1979 train loss:3.941020 +step:1980 train loss:3.980087 +step:1981 train loss:3.960557 +step:1982 train loss:4.044892 +step:1983 train loss:3.984041 +step:1984 train loss:4.025521 +step:1985 train loss:4.010844 +step:1986 train loss:4.000582 +step:1987 train loss:3.951819 +step:1988 train loss:3.983416 +step:1989 train loss:4.123087 +step:1990 train loss:3.967826 +step:1991 train loss:3.951407 +step:1992 train loss:3.960517 +step:1993 train loss:4.001196 +step:1994 train loss:4.001399 +step:1995 train loss:3.952718 +step:1996 train loss:3.999323 +step:1997 train loss:4.001190 +step:1998 train loss:3.953691 +step:1999 train loss:4.064363 +step:2000 validation loss:3.932800 total_sharp:6.0231e-05 L1_sharp:1.8253e-05 L2_sharp:2.9798e-05 L3_sharp:1.8673e-05 L4_sharp:1.0069e-05 L5_sharp:7.7311e-06 L6_sharp:8.7170e-06 L7_sharp:4.1632e-06 L8_sharp:9.2219e-06 L9_sharp:9.9967e-06 L10_sharp:1.5975e-05 L11_sharp:1.3357e-05 L12_sharp:1.3803e-04 total_fnorm:3.6586e+01 total_l1_linf:3.0682e+05 total_spectral:3.6586e+01 L1_fnorm:7.6363e+00 L2_fnorm:5.7371e+00 L3_fnorm:6.4516e+00 L4_fnorm:7.1191e+00 L5_fnorm:7.5420e+00 L6_fnorm:7.8122e+00 L7_fnorm:8.4493e+00 L8_fnorm:8.5272e+00 L9_fnorm:8.5562e+00 L10_fnorm:8.3418e+00 L11_fnorm:8.6414e+00 L12_fnorm:7.6946e+00 L1_l1linf:7.6518e+00 L2_l1linf:4.9374e+00 L3_l1linf:6.0169e+00 L4_l1linf:6.9651e+00 L5_l1linf:7.9323e+00 L6_l1linf:8.1715e+00 L7_l1linf:9.2288e+00 L8_l1linf:9.3132e+00 L9_l1linf:9.9535e+00 L10_l1linf:9.3368e+00 L11_l1linf:9.9583e+00 L12_l1linf:1.1522e+01 L1_spectral:1.0659e+00 L2_spectral:7.4481e-01 L3_spectral:9.6038e-01 L4_spectral:9.3211e-01 L5_spectral:1.0134e+00 L6_spectral:1.0400e+00 L7_spectral:9.8788e-01 L8_spectral:1.1648e+00 L9_spectral:1.2964e+00 L10_spectral:1.3034e+00 L11_spectral:1.3618e+00 L12_spectral:1.6602e+00 ip_v_neg_g:3.7850e-02 cos_v_neg_g:2.2682e-03 v_norm:3.6586e+01 g_norm:4.5611e-01 hv_norm:4.2643e-01 cos_v_hv:5.1676e-03 hg_norm:5.0572e+00 cos_g_hg:5.7456e-01 v_par:1.2488e-02 v_perp:3.6586e+01 L1_cos_v_neg_g:2.1598e-02 L1_v_norm:7.6363e+00 L2_cos_v_neg_g:1.9784e-02 L2_v_norm:5.7371e+00 L3_cos_v_neg_g:1.6153e-02 L3_v_norm:6.4516e+00 L4_cos_v_neg_g:1.4185e-02 L4_v_norm:7.1191e+00 L5_cos_v_neg_g:1.5127e-02 L5_v_norm:7.5420e+00 L6_cos_v_neg_g:1.3809e-02 L6_v_norm:7.8122e+00 L7_cos_v_neg_g:1.0639e-02 L7_v_norm:8.4493e+00 L8_cos_v_neg_g:1.2766e-02 L8_v_norm:8.5272e+00 L9_cos_v_neg_g:1.1570e-02 L9_v_norm:8.5562e+00 L10_cos_v_neg_g:1.4517e-02 L10_v_norm:8.3418e+00 L11_cos_v_neg_g:1.4121e-02 L11_v_norm:8.6414e+00 L12_cos_v_neg_g:2.5189e-02 L12_v_norm:7.6946e+00 +step:2000 train loss:4.032716 +step:2001 train loss:3.951461 +step:2002 train loss:4.057542 +step:2003 train loss:4.099484 +step:2004 train loss:3.968886 +step:2005 train loss:4.071232 +step:2006 train loss:3.956870 +step:2007 train loss:4.031651 +step:2008 train loss:3.972715 +step:2009 train loss:3.977643 +step:2010 train loss:4.102341 +step:2011 train loss:3.956055 +step:2012 train loss:3.982192 +step:2013 train loss:3.993453 +step:2014 train loss:3.891575 +step:2015 train loss:4.014580 +step:2016 train loss:3.992723 +step:2017 train loss:3.996455 +step:2018 train loss:3.964836 +step:2019 train loss:3.993912 +step:2020 train loss:4.005055 +step:2021 train loss:3.968585 +step:2022 train loss:4.014949 +step:2023 train loss:3.991076 +step:2024 train loss:4.037030 +step:2025 train loss:3.980903 +step:2026 train loss:3.956622 +step:2027 train loss:3.992470 +step:2028 train loss:3.916606 +step:2029 train loss:3.948076 +step:2030 train loss:3.948299 +step:2031 train loss:3.918572 +step:2032 train loss:3.971730 +step:2033 train loss:3.974795 +step:2034 train loss:3.966773 +step:2035 train loss:4.005082 +step:2036 train loss:3.995315 +step:2037 train loss:3.981900 +step:2038 train loss:3.970173 +step:2039 train loss:3.970293 +step:2040 train loss:4.000555 +step:2041 train loss:4.005292 +step:2042 train loss:3.929920 +step:2043 train loss:4.083260 +step:2044 train loss:3.948392 +step:2045 train loss:3.967660 +step:2046 train loss:3.973730 +step:2047 train loss:3.946886 +step:2048 train loss:3.994233 +step:2049 train loss:3.953576 +step:2050 train loss:3.978335 +step:2051 train loss:3.946854 +step:2052 train loss:3.996471 +step:2053 train loss:3.997929 +step:2054 train loss:3.964797 +step:2055 train loss:3.971104 +step:2056 train loss:4.013419 +step:2057 train loss:4.014528 +step:2058 train loss:3.978768 +step:2059 train loss:4.056954 +step:2060 train loss:4.005874 +step:2061 train loss:3.949780 +step:2062 train loss:3.980827 +step:2063 train loss:3.883559 +step:2064 train loss:4.001843 +step:2065 train loss:4.007689 +step:2066 train loss:3.869949 +step:2067 train loss:3.915797 +step:2068 train loss:4.023735 +step:2069 train loss:3.960965 +step:2070 train loss:3.963123 +step:2071 train loss:4.006132 +step:2072 train loss:3.938555 +step:2073 train loss:3.991529 +step:2074 train loss:3.964901 +step:2075 train loss:4.054867 +step:2076 train loss:3.998285 +step:2077 train loss:4.012180 +step:2078 train loss:3.965827 +step:2079 train loss:4.115118 +step:2080 train loss:3.931849 +step:2081 train loss:4.041001 +step:2082 train loss:3.972646 +step:2083 train loss:3.958068 +step:2084 train loss:3.931703 +step:2085 train loss:3.979868 +step:2086 train loss:3.987471 +step:2087 train loss:4.029197 +step:2088 train loss:3.898975 +step:2089 train loss:3.930635 +step:2090 train loss:3.966290 +step:2091 train loss:3.981091 +step:2092 train loss:3.959092 +step:2093 train loss:3.950689 +step:2094 train loss:3.986259 +step:2095 train loss:3.939070 +step:2096 train loss:3.922325 +step:2097 train loss:3.958802 +step:2098 train loss:3.952751 +step:2099 train loss:3.929386 +step:2100 train loss:4.004871 +step:2101 train loss:3.995312 +step:2102 train loss:3.963735 +step:2103 train loss:3.981970 +step:2104 train loss:3.961556 +step:2105 train loss:3.969734 +step:2106 train loss:3.958036 +step:2107 train loss:4.024516 +step:2108 train loss:3.953605 +step:2109 train loss:3.905194 +step:2110 train loss:4.002610 +step:2111 train loss:3.955144 +step:2112 train loss:4.007874 +step:2113 train loss:3.950752 +step:2114 train loss:3.962010 +step:2115 train loss:4.007222 +step:2116 train loss:3.943177 +step:2117 train loss:3.963813 +step:2118 train loss:3.949067 +step:2119 train loss:3.885247 +step:2120 train loss:3.968348 +step:2121 train loss:3.959622 +step:2122 train loss:3.966321 +step:2123 train loss:4.024047 +step:2124 train loss:4.025231 +step:2125 train loss:3.932747 +step:2126 train loss:3.940509 +step:2127 train loss:3.927119 +step:2128 train loss:3.922933 +step:2129 train loss:3.947993 +step:2130 train loss:3.948339 +step:2131 train loss:3.974508 +step:2132 train loss:3.901042 +step:2133 train loss:4.012910 +step:2134 train loss:3.961531 +step:2135 train loss:3.927884 +step:2136 train loss:4.015078 +step:2137 train loss:3.975446 +step:2138 train loss:3.930867 +step:2139 train loss:3.934552 +step:2140 train loss:3.939907 +step:2141 train loss:3.987460 +step:2142 train loss:3.959166 +step:2143 train loss:3.881106 +step:2144 train loss:3.989803 +step:2145 train loss:3.957947 +step:2146 train loss:3.992806 +step:2147 train loss:4.096847 +step:2148 train loss:3.896262 +step:2149 train loss:3.907656 +step:2150 train loss:3.933526 +step:2151 train loss:3.966947 +step:2152 train loss:3.966707 +step:2153 train loss:4.012633 +step:2154 train loss:3.927067 +step:2155 train loss:4.009491 +step:2156 train loss:3.932787 +step:2157 train loss:4.008172 +step:2158 train loss:4.049911 +step:2159 train loss:3.972610 +step:2160 train loss:4.045082 +step:2161 train loss:3.940809 +step:2162 train loss:3.946019 +step:2163 train loss:3.924974 +step:2164 train loss:3.940627 +step:2165 train loss:3.917227 +step:2166 train loss:4.034395 +step:2167 train loss:3.942286 +step:2168 train loss:3.958037 +step:2169 train loss:3.906486 +step:2170 train loss:4.050616 +step:2171 train loss:4.012475 +step:2172 train loss:3.948819 +step:2173 train loss:3.937754 +step:2174 train loss:3.999551 +step:2175 train loss:3.936844 +step:2176 train loss:4.014166 +step:2177 train loss:3.981213 +step:2178 train loss:3.910279 +step:2179 train loss:3.975194 +step:2180 train loss:3.991974 +step:2181 train loss:3.924171 +step:2182 train loss:3.975664 +step:2183 train loss:3.967657 +step:2184 train loss:3.925177 +step:2185 train loss:3.903691 +step:2186 train loss:3.942906 +step:2187 train loss:3.953419 +step:2188 train loss:4.001626 +step:2189 train loss:3.893533 +step:2190 train loss:3.935996 +step:2191 train loss:3.996895 +step:2192 train loss:3.921644 +step:2193 train loss:3.892965 +step:2194 train loss:3.904063 +step:2195 train loss:3.916456 +step:2196 train loss:3.919401 +step:2197 train loss:3.904343 +step:2198 train loss:3.926732 +step:2199 train loss:3.997026 +step:2200 train loss:3.931063 +step:2201 train loss:3.932510 +step:2202 train loss:3.895905 +step:2203 train loss:3.920991 +step:2204 train loss:3.954688 +step:2205 train loss:3.931509 +step:2206 train loss:3.928434 +step:2207 train loss:3.929239 +step:2208 train loss:3.908404 +step:2209 train loss:4.188718 +step:2210 train loss:3.963722 +step:2211 train loss:3.975752 +step:2212 train loss:3.980358 +step:2213 train loss:4.060884 +step:2214 train loss:4.044990 +step:2215 train loss:3.959510 +step:2216 train loss:3.922823 +step:2217 train loss:3.952580 +step:2218 train loss:3.950021 +step:2219 train loss:3.976940 +step:2220 train loss:3.918804 +step:2221 train loss:3.947763 +step:2222 train loss:3.965165 +step:2223 train loss:3.999984 +step:2224 train loss:3.969957 +step:2225 train loss:3.912272 +step:2226 train loss:3.974310 +step:2227 train loss:3.976550 +step:2228 train loss:3.974259 +step:2229 train loss:3.917247 +step:2230 train loss:4.038100 +step:2231 train loss:3.949839 +step:2232 train loss:3.948912 +step:2233 train loss:3.986992 +step:2234 train loss:3.889365 +step:2235 train loss:3.979267 +step:2236 train loss:3.919309 +step:2237 train loss:4.049906 +step:2238 train loss:3.853019 +step:2239 train loss:3.929866 +step:2240 train loss:3.942880 +step:2241 train loss:3.856581 +step:2242 train loss:3.996932 +step:2243 train loss:4.030749 +step:2244 train loss:3.912009 +step:2245 train loss:3.915787 +step:2246 train loss:3.883757 +step:2247 train loss:3.885495 +step:2248 train loss:3.941203 +step:2249 train loss:3.924923 +step:2250 validation loss:3.874176 +step:2250 train loss:3.932314 +step:2251 train loss:3.898547 +step:2252 train loss:3.898932 +step:2253 train loss:3.926825 +step:2254 train loss:3.925353 +step:2255 train loss:3.892367 +step:2256 train loss:3.940934 +step:2257 train loss:3.926953 +step:2258 train loss:3.918699 +step:2259 train loss:3.932700 +step:2260 train loss:3.890814 +step:2261 train loss:3.968115 +step:2262 train loss:3.983029 +step:2263 train loss:3.944252 +step:2264 train loss:4.056176 +step:2265 train loss:3.902004 +step:2266 train loss:3.943614 +step:2267 train loss:3.903949 +step:2268 train loss:3.910131 +step:2269 train loss:3.918349 +step:2270 train loss:3.905050 +step:2271 train loss:3.916858 +step:2272 train loss:3.961057 +step:2273 train loss:3.886395 +step:2274 train loss:3.908196 +step:2275 train loss:3.869543 +step:2276 train loss:3.938769 +step:2277 train loss:3.953573 +step:2278 train loss:3.932404 +step:2279 train loss:3.914319 +step:2280 train loss:3.833424 +step:2281 train loss:3.969387 +step:2282 train loss:3.906845 +step:2283 train loss:3.891910 +step:2284 train loss:3.905412 +step:2285 train loss:3.959469 +step:2286 train loss:3.914302 +step:2287 train loss:3.952669 +step:2288 train loss:3.921947 +step:2289 train loss:3.919633 +step:2290 train loss:3.921834 +step:2291 train loss:3.914591 +step:2292 train loss:3.955196 +step:2293 train loss:3.945563 +step:2294 train loss:3.934046 +step:2295 train loss:3.990790 +step:2296 train loss:3.923379 +step:2297 train loss:3.892073 +step:2298 train loss:3.952545 +step:2299 train loss:3.924656 +step:2300 train loss:3.840006 +step:2301 train loss:3.937795 +step:2302 train loss:3.951926 +step:2303 train loss:3.923701 +step:2304 train loss:3.910001 +step:2305 train loss:3.952907 +step:2306 train loss:3.944565 +step:2307 train loss:3.922133 +step:2308 train loss:3.941005 +step:2309 train loss:3.898854 +step:2310 train loss:3.882879 +step:2311 train loss:3.875074 +step:2312 train loss:3.940203 +step:2313 train loss:3.856363 +step:2314 train loss:3.926779 +step:2315 train loss:3.943886 +step:2316 train loss:3.983078 +step:2317 train loss:3.854958 +step:2318 train loss:3.894398 +step:2319 train loss:3.948812 +step:2320 train loss:3.914440 +step:2321 train loss:3.888052 +step:2322 train loss:3.899275 +step:2323 train loss:3.899211 +step:2324 train loss:3.925972 +step:2325 train loss:3.864048 +step:2326 train loss:3.890595 +step:2327 train loss:4.012816 +step:2328 train loss:3.961989 +step:2329 train loss:3.919632 +step:2330 train loss:3.874053 +step:2331 train loss:3.921980 +step:2332 train loss:3.840271 +step:2333 train loss:3.904122 +step:2334 train loss:3.882586 +step:2335 train loss:3.868314 +step:2336 train loss:4.121755 +step:2337 train loss:3.896610 +step:2338 train loss:3.940081 +step:2339 train loss:3.940111 +step:2340 train loss:3.954364 +step:2341 train loss:3.945861 +step:2342 train loss:3.898105 +step:2343 train loss:3.911991 +step:2344 train loss:3.955387 +step:2345 train loss:3.911452 +step:2346 train loss:3.942633 +step:2347 train loss:3.869122 +step:2348 train loss:3.928621 +step:2349 train loss:3.875711 +step:2350 train loss:3.932889 +step:2351 train loss:3.940162 +step:2352 train loss:3.943951 +step:2353 train loss:3.900064 +step:2354 train loss:3.950074 +step:2355 train loss:3.943051 +step:2356 train loss:3.977855 +step:2357 train loss:3.880987 +step:2358 train loss:3.899304 +step:2359 train loss:3.917821 +step:2360 train loss:3.936585 +step:2361 train loss:3.974388 +step:2362 train loss:3.800385 +step:2363 train loss:3.995485 +step:2364 train loss:3.939746 +step:2365 train loss:3.911517 +step:2366 train loss:3.864992 +step:2367 train loss:3.927510 +step:2368 train loss:3.918309 +step:2369 train loss:3.906003 +step:2370 train loss:3.918279 +step:2371 train loss:3.975058 +step:2372 train loss:3.835847 +step:2373 train loss:3.974908 +step:2374 train loss:3.955546 +step:2375 train loss:3.946198 +step:2376 train loss:3.929746 +step:2377 train loss:3.878881 +step:2378 train loss:3.922328 +step:2379 train loss:3.909793 +step:2380 train loss:3.968536 +step:2381 train loss:4.062778 +step:2382 train loss:3.849782 +step:2383 train loss:3.900475 +step:2384 train loss:3.928338 +step:2385 train loss:3.832929 +step:2386 train loss:3.989485 +step:2387 train loss:3.868835 +step:2388 train loss:3.913600 +step:2389 train loss:3.936421 +step:2390 train loss:3.888555 +step:2391 train loss:3.907230 +step:2392 train loss:3.932658 +step:2393 train loss:3.888596 +step:2394 train loss:3.914781 +step:2395 train loss:3.902910 +step:2396 train loss:3.905762 +step:2397 train loss:3.887220 +step:2398 train loss:3.938059 +step:2399 train loss:3.899623 +step:2400 train loss:3.876714 +step:2401 train loss:3.918180 +step:2402 train loss:3.872483 +step:2403 train loss:3.932481 +step:2404 train loss:3.889932 +step:2405 train loss:3.888268 +step:2406 train loss:3.918963 +step:2407 train loss:3.860797 +step:2408 train loss:3.904634 +step:2409 train loss:3.892345 +step:2410 train loss:3.894551 +step:2411 train loss:3.966358 +step:2412 train loss:3.951068 +step:2413 train loss:3.989374 +step:2414 train loss:3.880594 +step:2415 train loss:3.871646 +step:2416 train loss:3.887979 +step:2417 train loss:3.922640 +step:2418 train loss:3.941376 +step:2419 train loss:3.867057 +step:2420 train loss:3.892062 +step:2421 train loss:3.919177 +step:2422 train loss:3.968909 +step:2423 train loss:3.905787 +step:2424 train loss:3.873713 +step:2425 train loss:3.931287 +step:2426 train loss:3.870993 +step:2427 train loss:3.897090 +step:2428 train loss:3.981586 +step:2429 train loss:3.937145 +step:2430 train loss:4.024402 +step:2431 train loss:3.937994 +step:2432 train loss:3.914534 +step:2433 train loss:3.883725 +step:2434 train loss:3.872408 +step:2435 train loss:3.928878 +step:2436 train loss:3.882416 +step:2437 train loss:3.910692 +step:2438 train loss:3.957754 +step:2439 train loss:3.939285 +step:2440 train loss:3.879591 +step:2441 train loss:3.914208 +step:2442 train loss:3.907087 +step:2443 train loss:3.866411 +step:2444 train loss:3.905275 +step:2445 train loss:3.903168 +step:2446 train loss:3.876856 +step:2447 train loss:3.852933 +step:2448 train loss:3.900573 +step:2449 train loss:3.932597 +step:2450 train loss:3.894957 +step:2451 train loss:3.818460 +step:2452 train loss:3.918244 +step:2453 train loss:3.888218 +step:2454 train loss:3.886409 +step:2455 train loss:3.936228 +step:2456 train loss:3.892019 +step:2457 train loss:3.946450 +step:2458 train loss:3.923305 +step:2459 train loss:3.898614 +step:2460 train loss:3.902498 +step:2461 train loss:3.945371 +step:2462 train loss:3.911818 +step:2463 train loss:3.890067 +step:2464 train loss:3.908218 +step:2465 train loss:3.975305 +step:2466 train loss:4.069832 +step:2467 train loss:3.979591 +step:2468 train loss:3.867980 +step:2469 train loss:3.940807 +step:2470 train loss:3.986127 +step:2471 train loss:3.992532 +step:2472 train loss:3.974430 +step:2473 train loss:3.906583 +step:2474 train loss:3.874409 +step:2475 train loss:3.920032 +step:2476 train loss:3.994944 +step:2477 train loss:3.909140 +step:2478 train loss:3.864411 +step:2479 train loss:3.908023 +step:2480 train loss:3.894912 +step:2481 train loss:4.088917 +step:2482 train loss:3.900397 +step:2483 train loss:3.934832 +step:2484 train loss:3.882237 +step:2485 train loss:3.871628 +step:2486 train loss:3.906345 +step:2487 train loss:3.937875 +step:2488 train loss:3.854522 +step:2489 train loss:3.958884 +step:2490 train loss:3.881413 +step:2491 train loss:3.888047 +step:2492 train loss:3.933906 +step:2493 train loss:3.968001 +step:2494 train loss:3.889353 +step:2495 train loss:3.921668 +step:2496 train loss:3.896507 +step:2497 train loss:3.912672 +step:2498 train loss:3.915159 +step:2499 train loss:3.914922 +step:2500 validation loss:3.839753 total_sharp:7.2231e-05 L1_sharp:2.1114e-05 L2_sharp:1.0621e-05 L3_sharp:5.6118e-06 L4_sharp:4.9816e-06 L5_sharp:3.1853e-06 L6_sharp:3.8628e-06 L7_sharp:4.3809e-06 L8_sharp:8.2759e-06 L9_sharp:1.3698e-05 L10_sharp:2.0254e-05 L11_sharp:1.3856e-05 L12_sharp:1.9045e-04 total_fnorm:3.8588e+01 total_l1_linf:3.2859e+05 total_spectral:3.8588e+01 L1_fnorm:8.5244e+00 L2_fnorm:6.9457e+00 L3_fnorm:7.4854e+00 L4_fnorm:8.0208e+00 L5_fnorm:8.5097e+00 L6_fnorm:8.7120e+00 L7_fnorm:9.2761e+00 L8_fnorm:9.2446e+00 L9_fnorm:9.2349e+00 L10_fnorm:9.0409e+00 L11_fnorm:9.2210e+00 L12_fnorm:8.2176e+00 L1_l1linf:8.3394e+00 L2_l1linf:5.5238e+00 L3_l1linf:6.6566e+00 L4_l1linf:7.5580e+00 L5_l1linf:8.5769e+00 L6_l1linf:8.7293e+00 L7_l1linf:9.5157e+00 L8_l1linf:1.0704e+01 L9_l1linf:1.0267e+01 L10_l1linf:1.0692e+01 L11_l1linf:1.1304e+01 L12_l1linf:1.1847e+01 L1_spectral:1.1459e+00 L2_spectral:9.2599e-01 L3_spectral:1.0503e+00 L4_spectral:1.0288e+00 L5_spectral:1.1209e+00 L6_spectral:1.0660e+00 L7_spectral:1.2138e+00 L8_spectral:1.4308e+00 L9_spectral:1.5576e+00 L10_spectral:1.6270e+00 L11_spectral:1.5555e+00 L12_spectral:1.7924e+00 ip_v_neg_g:6.5902e-02 cos_v_neg_g:3.6058e-03 v_norm:3.8588e+01 g_norm:4.7364e-01 hv_norm:5.2262e-01 cos_v_hv:5.3333e-03 hg_norm:4.6235e+00 cos_g_hg:6.5522e-01 v_par:1.8822e-02 v_perp:3.8588e+01 L1_cos_v_neg_g:3.5777e-02 L1_v_norm:8.5244e+00 L2_cos_v_neg_g:1.6007e-02 L2_v_norm:6.9457e+00 L3_cos_v_neg_g:1.6745e-02 L3_v_norm:7.4854e+00 L4_cos_v_neg_g:1.6248e-02 L4_v_norm:8.0208e+00 L5_cos_v_neg_g:1.4779e-02 L5_v_norm:8.5097e+00 L6_cos_v_neg_g:1.7315e-02 L6_v_norm:8.7120e+00 L7_cos_v_neg_g:1.6148e-02 L7_v_norm:9.2761e+00 L8_cos_v_neg_g:2.1409e-02 L8_v_norm:9.2446e+00 L9_cos_v_neg_g:2.4659e-02 L9_v_norm:9.2349e+00 L10_cos_v_neg_g:2.8630e-02 L10_v_norm:9.0409e+00 L11_cos_v_neg_g:2.8651e-02 L11_v_norm:9.2210e+00 L12_cos_v_neg_g:5.1833e-02 L12_v_norm:8.2176e+00 +step:2500 train loss:3.864058 +step:2501 train loss:3.925757 +step:2502 train loss:3.916736 +step:2503 train loss:3.850993 +step:2504 train loss:3.878753 +step:2505 train loss:3.904098 +step:2506 train loss:3.865715 +step:2507 train loss:3.891199 +step:2508 train loss:3.844692 +step:2509 train loss:3.861632 +step:2510 train loss:3.864373 +step:2511 train loss:3.899250 +step:2512 train loss:3.946481 +step:2513 train loss:3.891450 +step:2514 train loss:3.876294 +step:2515 train loss:4.024110 +step:2516 train loss:3.918943 +step:2517 train loss:3.982079 +step:2518 train loss:3.947895 +step:2519 train loss:3.914329 +step:2520 train loss:3.921027 +step:2521 train loss:3.896745 +step:2522 train loss:3.937648 +step:2523 train loss:3.860790 +step:2524 train loss:3.921045 +step:2525 train loss:3.899534 +step:2526 train loss:3.950531 +step:2527 train loss:3.949775 +step:2528 train loss:3.924784 +step:2529 train loss:3.939115 +step:2530 train loss:3.915063 +step:2531 train loss:3.852073 +step:2532 train loss:3.950037 +step:2533 train loss:3.847754 +step:2534 train loss:3.938906 +step:2535 train loss:3.893583 +step:2536 train loss:3.815466 +step:2537 train loss:3.935375 +step:2538 train loss:3.909406 +step:2539 train loss:3.927256 +step:2540 train loss:3.862548 +step:2541 train loss:3.887551 +step:2542 train loss:3.899388 +step:2543 train loss:3.892927 +step:2544 train loss:3.872077 +step:2545 train loss:3.866735 +step:2546 train loss:3.831668 +step:2547 train loss:3.872698 +step:2548 train loss:3.898331 +step:2549 train loss:3.899616 +step:2550 train loss:4.027403 +step:2551 train loss:4.100525 +step:2552 train loss:3.839058 +step:2553 train loss:3.873417 +step:2554 train loss:4.016292 +step:2555 train loss:3.904232 +step:2556 train loss:3.829454 +step:2557 train loss:3.924759 +step:2558 train loss:3.915545 +step:2559 train loss:3.866549 +step:2560 train loss:3.851604 +step:2561 train loss:3.951253 +step:2562 train loss:3.900810 +step:2563 train loss:3.836619 +step:2564 train loss:3.907739 +step:2565 train loss:3.887712 +step:2566 train loss:3.863129 +step:2567 train loss:3.850233 +step:2568 train loss:3.906134 +step:2569 train loss:3.909470 +step:2570 train loss:3.862652 +step:2571 train loss:3.945959 +step:2572 train loss:3.904645 +step:2573 train loss:3.840787 +step:2574 train loss:3.883812 +step:2575 train loss:3.930861 +step:2576 train loss:3.879444 +step:2577 train loss:3.843158 +step:2578 train loss:3.886708 +step:2579 train loss:3.859881 +step:2580 train loss:3.830922 +step:2581 train loss:3.848456 +step:2582 train loss:3.855992 +step:2583 train loss:3.880190 +step:2584 train loss:3.895409 +step:2585 train loss:3.859206 +step:2586 train loss:3.882234 +step:2587 train loss:3.814141 +step:2588 train loss:3.846201 +step:2589 train loss:3.921781 +step:2590 train loss:3.845836 +step:2591 train loss:3.903861 +step:2592 train loss:3.954283 +step:2593 train loss:3.913809 +step:2594 train loss:3.870429 +step:2595 train loss:3.877123 +step:2596 train loss:3.920203 +step:2597 train loss:3.804301 +step:2598 train loss:3.959672 +step:2599 train loss:3.912585 +step:2600 train loss:3.941893 +step:2601 train loss:3.876765 +step:2602 train loss:3.911477 +step:2603 train loss:3.902082 +step:2604 train loss:3.827908 +step:2605 train loss:3.950407 +step:2606 train loss:3.902686 +step:2607 train loss:3.857839 +step:2608 train loss:3.830367 +step:2609 train loss:3.857471 +step:2610 train loss:3.881622 +step:2611 train loss:3.922735 +step:2612 train loss:3.884809 +step:2613 train loss:3.853710 +step:2614 train loss:3.845307 +step:2615 train loss:3.849066 +step:2616 train loss:3.920815 +step:2617 train loss:3.877164 +step:2618 train loss:3.838559 +step:2619 train loss:3.859586 +step:2620 train loss:3.853850 +step:2621 train loss:3.865784 +step:2622 train loss:3.941982 +step:2623 train loss:3.811191 +step:2624 train loss:3.829549 +step:2625 train loss:3.904203 +step:2626 train loss:3.899139 +step:2627 train loss:3.875022 +step:2628 train loss:3.919637 +step:2629 train loss:3.874368 +step:2630 train loss:3.863976 +step:2631 train loss:3.897592 +step:2632 train loss:3.865023 +step:2633 train loss:3.849985 +step:2634 train loss:3.894073 +step:2635 train loss:3.878161 +step:2636 train loss:3.928995 +step:2637 train loss:3.878994 +step:2638 train loss:3.858114 +step:2639 train loss:3.915112 +step:2640 train loss:3.831892 +step:2641 train loss:3.889727 +step:2642 train loss:3.806468 +step:2643 train loss:3.808411 +step:2644 train loss:3.906833 +step:2645 train loss:3.842220 +step:2646 train loss:3.871000 +step:2647 train loss:3.893490 +step:2648 train loss:3.928735 +step:2649 train loss:3.837989 +step:2650 train loss:3.827096 +step:2651 train loss:3.868505 +step:2652 train loss:3.837932 +step:2653 train loss:3.905438 +step:2654 train loss:3.868041 +step:2655 train loss:3.859684 +step:2656 train loss:3.876285 +step:2657 train loss:3.905438 +step:2658 train loss:3.913164 +step:2659 train loss:3.896851 +step:2660 train loss:3.888279 +step:2661 train loss:3.933676 +step:2662 train loss:3.900517 +step:2663 train loss:3.877535 +step:2664 train loss:3.889087 +step:2665 train loss:3.833540 +step:2666 train loss:3.864381 +step:2667 train loss:3.872314 +step:2668 train loss:3.846606 +step:2669 train loss:3.863808 +step:2670 train loss:3.882263 +step:2671 train loss:3.855863 +step:2672 train loss:3.874986 +step:2673 train loss:3.811364 +step:2674 train loss:3.906414 +step:2675 train loss:3.874719 +step:2676 train loss:3.896873 +step:2677 train loss:3.877195 +step:2678 train loss:3.864657 +step:2679 train loss:3.845221 +step:2680 train loss:3.831206 +step:2681 train loss:3.810089 +step:2682 train loss:3.895781 +step:2683 train loss:3.868212 +step:2684 train loss:3.899155 +step:2685 train loss:3.812810 +step:2686 train loss:3.831425 +step:2687 train loss:3.913346 +step:2688 train loss:3.927092 +step:2689 train loss:3.828481 +step:2690 train loss:3.908033 +step:2691 train loss:3.876895 +step:2692 train loss:3.905705 +step:2693 train loss:3.956408 +step:2694 train loss:3.849114 +step:2695 train loss:3.870917 +step:2696 train loss:3.877629 +step:2697 train loss:3.867887 +step:2698 train loss:3.873809 +step:2699 train loss:3.894511 +step:2700 train loss:3.865178 +step:2701 train loss:3.938269 +step:2702 train loss:3.868742 +step:2703 train loss:3.825321 +step:2704 train loss:3.900060 +step:2705 train loss:3.901483 +step:2706 train loss:3.849353 +step:2707 train loss:3.815408 +step:2708 train loss:3.897741 +step:2709 train loss:3.880917 +step:2710 train loss:3.883790 +step:2711 train loss:3.849268 +step:2712 train loss:3.909802 +step:2713 train loss:3.914264 +step:2714 train loss:3.853568 +step:2715 train loss:3.848755 +step:2716 train loss:3.916521 +step:2717 train loss:3.878345 +step:2718 train loss:3.874476 +step:2719 train loss:3.874558 +step:2720 train loss:3.838438 +step:2721 train loss:3.918746 +step:2722 train loss:3.845987 +step:2723 train loss:3.836643 +step:2724 train loss:3.858962 +step:2725 train loss:3.855410 +step:2726 train loss:3.831613 +step:2727 train loss:3.887546 +step:2728 train loss:3.826921 +step:2729 train loss:3.958368 +step:2730 train loss:3.893916 +step:2731 train loss:3.936060 +step:2732 train loss:3.850263 +step:2733 train loss:3.844991 +step:2734 train loss:3.893338 +step:2735 train loss:3.897292 +step:2736 train loss:3.818201 +step:2737 train loss:3.870981 +step:2738 train loss:3.926623 +step:2739 train loss:3.843704 +step:2740 train loss:3.845372 +step:2741 train loss:3.833736 +step:2742 train loss:3.757146 +step:2743 train loss:3.876747 +step:2744 train loss:3.916187 +step:2745 train loss:3.861045 +step:2746 train loss:3.869013 +step:2747 train loss:3.852655 +step:2748 train loss:3.807614 +step:2749 train loss:3.874101 +step:2750 validation loss:3.801152 +step:2750 train loss:3.883237 +step:2751 train loss:3.901318 +step:2752 train loss:3.889019 +step:2753 train loss:3.878744 +step:2754 train loss:3.819455 +step:2755 train loss:3.889261 +step:2756 train loss:3.861107 +step:2757 train loss:3.846991 +step:2758 train loss:3.876093 +step:2759 train loss:3.885143 +step:2760 train loss:3.794699 +step:2761 train loss:3.805516 +step:2762 train loss:3.821208 +step:2763 train loss:3.846003 +step:2764 train loss:3.786386 +step:2765 train loss:3.835864 +step:2766 train loss:3.924319 +step:2767 train loss:3.795073 +step:2768 train loss:3.858800 +step:2769 train loss:3.836522 +step:2770 train loss:3.854597 +step:2771 train loss:3.878617 +step:2772 train loss:3.844104 +step:2773 train loss:3.844592 +step:2774 train loss:3.834292 +step:2775 train loss:3.854211 +step:2776 train loss:3.804739 +step:2777 train loss:3.840119 +step:2778 train loss:3.848046 +step:2779 train loss:3.875171 +step:2780 train loss:3.845585 +step:2781 train loss:3.834073 +step:2782 train loss:3.819645 +step:2783 train loss:3.851691 +step:2784 train loss:3.860016 +step:2785 train loss:3.929343 +step:2786 train loss:3.897099 +step:2787 train loss:3.864217 +step:2788 train loss:3.857625 +step:2789 train loss:3.846873 +step:2790 train loss:3.785871 +step:2791 train loss:3.892478 +step:2792 train loss:3.875959 +step:2793 train loss:3.842850 +step:2794 train loss:3.853255 +step:2795 train loss:3.864547 +step:2796 train loss:3.858156 +step:2797 train loss:3.904906 +step:2798 train loss:3.891624 +step:2799 train loss:3.796875 +step:2800 train loss:3.842082 +step:2801 train loss:3.875926 +step:2802 train loss:3.901860 +step:2803 train loss:3.872586 +step:2804 train loss:3.810479 +step:2805 train loss:3.850559 +step:2806 train loss:3.845398 +step:2807 train loss:3.878977 +step:2808 train loss:3.815772 +step:2809 train loss:3.887304 +step:2810 train loss:3.875774 +step:2811 train loss:3.864396 +step:2812 train loss:3.911207 +step:2813 train loss:3.883231 +step:2814 train loss:3.868535 +step:2815 train loss:3.880314 +step:2816 train loss:3.882528 +step:2817 train loss:3.819530 +step:2818 train loss:3.923488 +step:2819 train loss:3.850165 +step:2820 train loss:3.845258 +step:2821 train loss:3.826788 +step:2822 train loss:3.867318 +step:2823 train loss:3.818338 +step:2824 train loss:3.718735 +step:2825 train loss:3.886234 +step:2826 train loss:3.877063 +step:2827 train loss:3.899889 +step:2828 train loss:3.889866 +step:2829 train loss:3.878554 +step:2830 train loss:3.912949 +step:2831 train loss:3.840195 +step:2832 train loss:3.814167 +step:2833 train loss:3.872168 +step:2834 train loss:3.824934 +step:2835 train loss:3.855613 +step:2836 train loss:3.861130 +step:2837 train loss:3.860398 +step:2838 train loss:3.802012 +step:2839 train loss:3.902308 +step:2840 train loss:3.862525 +step:2841 train loss:3.937242 +step:2842 train loss:3.882535 +step:2843 train loss:3.876091 +step:2844 train loss:3.912776 +step:2845 train loss:3.858011 +step:2846 train loss:3.807511 +step:2847 train loss:3.897253 +step:2848 train loss:3.853679 +step:2849 train loss:3.843200 +step:2850 train loss:3.905618 +step:2851 train loss:3.852086 +step:2852 train loss:3.934698 +step:2853 train loss:3.847214 +step:2854 train loss:3.791282 +step:2855 train loss:3.871138 +step:2856 train loss:3.795602 +step:2857 train loss:3.896809 +step:2858 train loss:3.848434 +step:2859 train loss:3.884250 +step:2860 train loss:3.836352 +step:2861 train loss:3.821812 +step:2862 train loss:3.848564 +step:2863 train loss:3.832271 +step:2864 train loss:3.841650 +step:2865 train loss:3.910337 +step:2866 train loss:3.926514 +step:2867 train loss:3.860699 +step:2868 train loss:3.863085 +step:2869 train loss:3.820340 +step:2870 train loss:3.903905 +step:2871 train loss:3.902058 +step:2872 train loss:3.863123 +step:2873 train loss:3.874048 +step:2874 train loss:3.850093 +step:2875 train loss:3.804760 +step:2876 train loss:3.850550 +step:2877 train loss:3.834192 +step:2878 train loss:3.853110 +step:2879 train loss:3.816385 +step:2880 train loss:3.834395 +step:2881 train loss:3.827793 +step:2882 train loss:3.759736 +step:2883 train loss:3.846985 +step:2884 train loss:3.916181 +step:2885 train loss:3.809206 +step:2886 train loss:3.861156 +step:2887 train loss:3.881774 +step:2888 train loss:3.855063 +step:2889 train loss:3.838762 +step:2890 train loss:3.805959 +step:2891 train loss:3.848656 +step:2892 train loss:3.859370 +step:2893 train loss:3.840952 +step:2894 train loss:3.811619 +step:2895 train loss:3.859394 +step:2896 train loss:3.905652 +step:2897 train loss:3.886448 +step:2898 train loss:4.021590 +step:2899 train loss:3.785702 +step:2900 train loss:3.855869 +step:2901 train loss:3.804794 +step:2902 train loss:3.802245 +step:2903 train loss:3.817417 +step:2904 train loss:3.841946 +step:2905 train loss:3.905755 +step:2906 train loss:3.873603 +step:2907 train loss:4.047888 +step:2908 train loss:3.800760 +step:2909 train loss:3.880762 +step:2910 train loss:3.859082 +step:2911 train loss:3.881276 +step:2912 train loss:3.843327 +step:2913 train loss:3.874951 +step:2914 train loss:3.896456 +step:2915 train loss:3.897272 +step:2916 train loss:3.848989 +step:2917 train loss:3.884854 +step:2918 train loss:3.872248 +step:2919 train loss:3.814898 +step:2920 train loss:3.867955 +step:2921 train loss:3.824934 +step:2922 train loss:3.847389 +step:2923 train loss:3.914729 +step:2924 train loss:3.847510 +step:2925 train loss:3.801635 +step:2926 train loss:3.897316 +step:2927 train loss:3.803640 +step:2928 train loss:3.773518 +step:2929 train loss:3.788669 +step:2930 train loss:3.806689 +step:2931 train loss:3.962633 +step:2932 train loss:3.881042 +step:2933 train loss:3.838983 +step:2934 train loss:3.836043 +step:2935 train loss:3.859389 +step:2936 train loss:3.813762 +step:2937 train loss:3.830528 +step:2938 train loss:3.846012 +step:2939 train loss:3.919898 +step:2940 train loss:3.817940 +step:2941 train loss:3.855581 +step:2942 train loss:3.815558 +step:2943 train loss:4.086528 +step:2944 train loss:3.923915 +step:2945 train loss:3.879665 +step:2946 train loss:3.895541 +step:2947 train loss:3.851855 +step:2948 train loss:3.812169 +step:2949 train loss:3.901084 +step:2950 train loss:3.853076 +step:2951 train loss:3.752055 +step:2952 train loss:3.820638 +step:2953 train loss:3.739183 +step:2954 train loss:3.824965 +step:2955 train loss:3.903554 +step:2956 train loss:3.841412 +step:2957 train loss:3.843985 +step:2958 train loss:3.799340 +step:2959 train loss:3.821511 +step:2960 train loss:3.917608 +step:2961 train loss:3.778029 +step:2962 train loss:3.852624 +step:2963 train loss:3.847631 +step:2964 train loss:3.825863 +step:2965 train loss:3.851408 +step:2966 train loss:3.828254 +step:2967 train loss:3.826598 +step:2968 train loss:3.798372 +step:2969 train loss:3.812850 +step:2970 train loss:3.886188 +step:2971 train loss:3.809989 +step:2972 train loss:3.794322 +step:2973 train loss:3.788595 +step:2974 train loss:3.829193 +step:2975 train loss:3.789646 +step:2976 train loss:3.833389 +step:2977 train loss:3.825321 +step:2978 train loss:3.903193 +step:2979 train loss:3.883857 +step:2980 train loss:3.894163 +step:2981 train loss:3.849275 +step:2982 train loss:3.838715 +step:2983 train loss:3.793283 +step:2984 train loss:3.765255 +step:2985 train loss:3.877779 +step:2986 train loss:3.772579 +step:2987 train loss:3.901680 +step:2988 train loss:3.822041 +step:2989 train loss:3.861136 +step:2990 train loss:3.811031 +step:2991 train loss:3.879048 +step:2992 train loss:3.870199 +step:2993 train loss:3.839226 +step:2994 train loss:3.828824 +step:2995 train loss:3.895739 +step:2996 train loss:3.819124 +step:2997 train loss:3.733633 +step:2998 train loss:3.844332 +step:2999 train loss:3.883277 +step:3000 validation loss:3.774163 total_sharp:4.1975e-05 L1_sharp:1.1189e-05 L2_sharp:2.4477e-06 L3_sharp:3.8002e-06 L4_sharp:4.5116e-06 L5_sharp:3.7648e-06 L6_sharp:3.2955e-06 L7_sharp:2.6708e-06 L8_sharp:5.2439e-06 L9_sharp:7.6677e-06 L10_sharp:1.1420e-05 L11_sharp:9.4863e-06 L12_sharp:1.0682e-04 total_fnorm:4.0066e+01 total_l1_linf:3.4289e+05 total_spectral:4.0066e+01 L1_fnorm:8.9940e+00 L2_fnorm:7.4593e+00 L3_fnorm:8.0686e+00 L4_fnorm:8.6415e+00 L5_fnorm:9.0627e+00 L6_fnorm:9.2226e+00 L7_fnorm:9.7081e+00 L8_fnorm:9.7469e+00 L9_fnorm:9.6728e+00 L10_fnorm:9.5405e+00 L11_fnorm:9.8767e+00 L12_fnorm:9.1889e+00 L1_l1linf:8.4824e+00 L2_l1linf:5.8382e+00 L3_l1linf:6.9421e+00 L4_l1linf:7.8660e+00 L5_l1linf:8.8764e+00 L6_l1linf:9.0342e+00 L7_l1linf:9.8718e+00 L8_l1linf:1.0218e+01 L9_l1linf:1.0682e+01 L10_l1linf:1.2100e+01 L11_l1linf:1.1043e+01 L12_l1linf:1.2202e+01 L1_spectral:1.1789e+00 L2_spectral:9.3358e-01 L3_spectral:9.5477e-01 L4_spectral:9.6150e-01 L5_spectral:1.0792e+00 L6_spectral:1.1164e+00 L7_spectral:1.1041e+00 L8_spectral:1.3268e+00 L9_spectral:1.4745e+00 L10_spectral:1.5684e+00 L11_spectral:1.5631e+00 L12_spectral:1.8727e+00 ip_v_neg_g:4.2754e-02 cos_v_neg_g:2.4951e-03 v_norm:4.0066e+01 g_norm:4.2768e-01 hv_norm:3.0486e-01 cos_v_hv:5.5165e-03 hg_norm:4.1851e+00 cos_g_hg:6.0745e-01 v_par:1.8163e-02 v_perp:4.0066e+01 L1_cos_v_neg_g:2.1775e-02 L1_v_norm:8.9940e+00 L2_cos_v_neg_g:9.4802e-03 L2_v_norm:7.4593e+00 L3_cos_v_neg_g:9.0427e-03 L3_v_norm:8.0686e+00 L4_cos_v_neg_g:9.2011e-03 L4_v_norm:8.6415e+00 L5_cos_v_neg_g:9.5775e-03 L5_v_norm:9.0627e+00 L6_cos_v_neg_g:1.0820e-02 L6_v_norm:9.2225e+00 L7_cos_v_neg_g:1.0305e-02 L7_v_norm:9.7081e+00 L8_cos_v_neg_g:1.2738e-02 L8_v_norm:9.7469e+00 L9_cos_v_neg_g:1.5303e-02 L9_v_norm:9.6728e+00 L10_cos_v_neg_g:1.8671e-02 L10_v_norm:9.5405e+00 L11_cos_v_neg_g:2.0113e-02 L11_v_norm:9.8767e+00 L12_cos_v_neg_g:3.9531e-02 L12_v_norm:9.1889e+00 +step:3000 train loss:3.787040 +step:3001 train loss:3.835696 +step:3002 train loss:3.829955 +step:3003 train loss:3.828076 +step:3004 train loss:3.857234 +step:3005 train loss:3.749703 +step:3006 train loss:3.805274 +step:3007 train loss:3.836425 +step:3008 train loss:3.879836 +step:3009 train loss:3.835304 +step:3010 train loss:3.852814 +step:3011 train loss:3.843829 +step:3012 train loss:3.821479 +step:3013 train loss:3.865689 +step:3014 train loss:3.819330 +step:3015 train loss:3.816350 +step:3016 train loss:3.835707 +step:3017 train loss:3.859334 +step:3018 train loss:3.789884 +step:3019 train loss:3.822863 +step:3020 train loss:3.846395 +step:3021 train loss:3.809160 +step:3022 train loss:3.901069 +step:3023 train loss:3.843592 +step:3024 train loss:3.834577 +step:3025 train loss:3.843116 +step:3026 train loss:3.816417 +step:3027 train loss:3.794042 +step:3028 train loss:3.842649 +step:3029 train loss:3.834514 +step:3030 train loss:3.808201 +step:3031 train loss:3.789699 +step:3032 train loss:3.778267 +step:3033 train loss:3.806641 +step:3034 train loss:3.853972 +step:3035 train loss:3.833806 +step:3036 train loss:3.791704 +step:3037 train loss:3.756728 +step:3038 train loss:3.872368 +step:3039 train loss:3.750702 +step:3040 train loss:3.737769 +step:3041 train loss:3.871138 +step:3042 train loss:3.805151 +step:3043 train loss:3.858676 +step:3044 train loss:3.753680 +step:3045 train loss:3.796460 +step:3046 train loss:3.770458 +step:3047 train loss:3.800841 +step:3048 train loss:3.769285 +step:3049 train loss:3.849756 +step:3050 train loss:3.734018 +step:3051 train loss:3.752344 +step:3052 train loss:3.774106 +step:3053 train loss:3.843075 +step:3054 train loss:3.915786 +step:3055 train loss:3.756206 +step:3056 train loss:3.789553 +step:3057 train loss:3.825541 +step:3058 train loss:3.771429 +step:3059 train loss:3.801657 +step:3060 train loss:3.798379 +step:3061 train loss:3.785993 +step:3062 train loss:3.838528 +step:3063 train loss:3.820023 +step:3064 train loss:3.840631 +step:3065 train loss:3.861854 +step:3066 train loss:3.761713 +step:3067 train loss:3.815615 +step:3068 train loss:3.862241 +step:3069 train loss:3.877625 +step:3070 train loss:3.806498 +step:3071 train loss:3.819043 +step:3072 train loss:3.818557 +step:3073 train loss:3.855619 +step:3074 train loss:3.791663 +step:3075 train loss:3.825039 +step:3076 train loss:3.759335 +step:3077 train loss:3.759894 +step:3078 train loss:3.790863 +step:3079 train loss:3.835831 +step:3080 train loss:3.830663 +step:3081 train loss:3.876322 +step:3082 train loss:3.852384 +step:3083 train loss:3.781953 +step:3084 train loss:3.861459 +step:3085 train loss:3.793244 +step:3086 train loss:3.851967 +step:3087 train loss:3.817005 +step:3088 train loss:3.901408 +step:3089 train loss:3.774971 +step:3090 train loss:3.848441 +step:3091 train loss:3.769851 +step:3092 train loss:3.792217 +step:3093 train loss:3.819815 +step:3094 train loss:3.805709 +step:3095 train loss:3.884193 +step:3096 train loss:3.815254 +step:3097 train loss:3.823706 +step:3098 train loss:3.802534 +step:3099 train loss:3.811581 +step:3100 train loss:3.837561 +step:3101 train loss:3.926183 +step:3102 train loss:3.851363 +step:3103 train loss:3.777182 +step:3104 train loss:3.858082 +step:3105 train loss:3.840816 +step:3106 train loss:3.835193 +step:3107 train loss:3.814240 +step:3108 train loss:3.788974 +step:3109 train loss:3.844932 +step:3110 train loss:3.768786 +step:3111 train loss:3.806087 +step:3112 train loss:3.742750 +step:3113 train loss:3.859635 +step:3114 train loss:3.771002 +step:3115 train loss:3.815985 +step:3116 train loss:3.691201 +step:3117 train loss:3.712452 +step:3118 train loss:3.815763 +step:3119 train loss:3.826155 +step:3120 train loss:3.820315 +step:3121 train loss:3.771881 +step:3122 train loss:3.848739 +step:3123 train loss:3.764712 +step:3124 train loss:3.827460 +step:3125 train loss:3.838413 +step:3126 train loss:3.949062 +step:3127 train loss:3.792786 +step:3128 train loss:3.820281 +step:3129 train loss:3.806527 +step:3130 train loss:3.783973 +step:3131 train loss:3.855662 +step:3132 train loss:3.845180 +step:3133 train loss:3.817741 +step:3134 train loss:3.710530 +step:3135 train loss:3.804610 +step:3136 train loss:3.780583 +step:3137 train loss:3.909713 +step:3138 train loss:3.808654 +step:3139 train loss:3.789393 +step:3140 train loss:3.809017 +step:3141 train loss:3.811176 +step:3142 train loss:3.751513 +step:3143 train loss:3.834799 +step:3144 train loss:3.783302 +step:3145 train loss:3.769424 +step:3146 train loss:3.780768 +step:3147 train loss:3.887124 +step:3148 train loss:3.795141 +step:3149 train loss:3.848513 +step:3150 train loss:3.833447 +step:3151 train loss:3.804703 +step:3152 train loss:3.797678 +step:3153 train loss:3.759276 +step:3154 train loss:3.841669 +step:3155 train loss:3.784287 +step:3156 train loss:3.832139 +step:3157 train loss:3.838658 +step:3158 train loss:3.812135 +step:3159 train loss:3.748255 +step:3160 train loss:3.797840 +step:3161 train loss:3.768106 +step:3162 train loss:3.827005 +step:3163 train loss:3.804410 +step:3164 train loss:3.785298 +step:3165 train loss:3.805518 +step:3166 train loss:3.837967 +step:3167 train loss:3.805085 +step:3168 train loss:3.886251 +step:3169 train loss:3.805124 +step:3170 train loss:3.787563 +step:3171 train loss:3.778994 +step:3172 train loss:3.782654 +step:3173 train loss:3.719573 +step:3174 train loss:3.844669 +step:3175 train loss:3.815235 +step:3176 train loss:3.822826 +step:3177 train loss:3.788050 +step:3178 train loss:3.767990 +step:3179 train loss:3.841647 +step:3180 train loss:3.776143 +step:3181 train loss:3.848145 +step:3182 train loss:3.854465 +step:3183 train loss:3.794907 +step:3184 train loss:3.794219 +step:3185 train loss:3.851927 +step:3186 train loss:3.811088 +step:3187 train loss:3.828503 +step:3188 train loss:3.867858 +step:3189 train loss:3.818786 +step:3190 train loss:3.768720 +step:3191 train loss:3.774926 +step:3192 train loss:3.738034 +step:3193 train loss:3.819885 +step:3194 train loss:3.779662 +step:3195 train loss:3.768060 +step:3196 train loss:3.812379 +step:3197 train loss:3.777553 +step:3198 train loss:3.812992 +step:3199 train loss:3.792268 +step:3200 train loss:3.797631 +step:3201 train loss:3.761329 +step:3202 train loss:3.822510 +step:3203 train loss:3.884070 +step:3204 train loss:3.848857 +step:3205 train loss:3.701141 +step:3206 train loss:3.984254 +step:3207 train loss:3.741731 +step:3208 train loss:3.809566 +step:3209 train loss:3.799555 +step:3210 train loss:3.778872 +step:3211 train loss:3.801349 +step:3212 train loss:3.819473 +step:3213 train loss:3.754868 +step:3214 train loss:3.865213 +step:3215 train loss:3.865399 +step:3216 train loss:3.733487 +step:3217 train loss:3.814622 +step:3218 train loss:3.854160 +step:3219 train loss:3.771539 +step:3220 train loss:3.842957 +step:3221 train loss:3.754217 +step:3222 train loss:3.795224 +step:3223 train loss:3.812226 +step:3224 train loss:3.823955 +step:3225 train loss:3.750765 +step:3226 train loss:3.779068 +step:3227 train loss:3.810318 +step:3228 train loss:3.805274 +step:3229 train loss:3.841084 +step:3230 train loss:3.852481 +step:3231 train loss:3.789840 +step:3232 train loss:3.801039 +step:3233 train loss:3.773593 +step:3234 train loss:3.760525 +step:3235 train loss:3.763832 +step:3236 train loss:3.783417 +step:3237 train loss:3.788240 +step:3238 train loss:3.797742 +step:3239 train loss:3.711230 +step:3240 train loss:3.824584 +step:3241 train loss:3.818019 +step:3242 train loss:3.871152 +step:3243 train loss:3.812088 +step:3244 train loss:3.832182 +step:3245 train loss:3.729615 +step:3246 train loss:3.857974 +step:3247 train loss:3.798462 +step:3248 train loss:3.822005 +step:3249 train loss:3.770792 +step:3250 validation loss:3.733484 +step:3250 train loss:3.768383 +step:3251 train loss:3.872627 +step:3252 train loss:3.804409 +step:3253 train loss:3.807474 +step:3254 train loss:3.872557 +step:3255 train loss:3.813032 +step:3256 train loss:3.810503 +step:3257 train loss:3.789045 +step:3258 train loss:3.720673 +step:3259 train loss:3.699585 +step:3260 train loss:3.816739 +step:3261 train loss:3.800156 +step:3262 train loss:3.786144 +step:3263 train loss:3.776451 +step:3264 train loss:3.886495 +step:3265 train loss:3.794412 +step:3266 train loss:3.821581 +step:3267 train loss:3.784273 +step:3268 train loss:3.788723 +step:3269 train loss:3.798643 +step:3270 train loss:3.830557 +step:3271 train loss:3.793591 +step:3272 train loss:3.764544 +step:3273 train loss:3.781499 +step:3274 train loss:3.912872 +step:3275 train loss:3.789958 +step:3276 train loss:3.852633 +step:3277 train loss:3.791395 +step:3278 train loss:3.771150 +step:3279 train loss:3.797403 +step:3280 train loss:3.821589 +step:3281 train loss:3.751191 +step:3282 train loss:3.817049 +step:3283 train loss:3.789445 +step:3284 train loss:3.752082 +step:3285 train loss:3.768339 +step:3286 train loss:3.800854 +step:3287 train loss:3.735386 +step:3288 train loss:3.817220 +step:3289 train loss:3.762557 +step:3290 train loss:3.797086 +step:3291 train loss:3.755967 +step:3292 train loss:3.781089 +step:3293 train loss:3.823152 +step:3294 train loss:3.832901 +step:3295 train loss:3.746172 +step:3296 train loss:3.802964 +step:3297 train loss:3.760517 +step:3298 train loss:3.763299 +step:3299 train loss:3.890201 +step:3300 train loss:3.733278 +step:3301 train loss:3.812847 +step:3302 train loss:3.778631 +step:3303 train loss:3.813050 +step:3304 train loss:3.775002 +step:3305 train loss:3.865912 +step:3306 train loss:3.793619 +step:3307 train loss:3.821250 +step:3308 train loss:3.772482 +step:3309 train loss:3.827930 +step:3310 train loss:3.747036 +step:3311 train loss:3.797710 +step:3312 train loss:3.763984 +step:3313 train loss:3.798864 +step:3314 train loss:3.795583 +step:3315 train loss:3.872429 +step:3316 train loss:3.731336 +step:3317 train loss:3.822347 +step:3318 train loss:3.835216 +step:3319 train loss:3.756209 +step:3320 train loss:3.908285 +step:3321 train loss:3.816327 +step:3322 train loss:3.816065 +step:3323 train loss:3.918835 +step:3324 train loss:3.838789 +step:3325 train loss:3.811103 +step:3326 train loss:3.801358 +step:3327 train loss:3.815785 +step:3328 train loss:3.792356 +step:3329 train loss:3.794263 +step:3330 train loss:3.783920 +step:3331 train loss:3.826829 +step:3332 train loss:3.846059 +step:3333 train loss:3.817813 +step:3334 train loss:3.751817 +step:3335 train loss:3.761254 +step:3336 train loss:3.802696 +step:3337 train loss:3.799390 +step:3338 train loss:3.786880 +step:3339 train loss:3.778446 +step:3340 train loss:3.816864 +step:3341 train loss:3.764688 +step:3342 train loss:3.816659 +step:3343 train loss:3.753032 +step:3344 train loss:3.809680 +step:3345 train loss:3.758657 +step:3346 train loss:3.773094 +step:3347 train loss:3.778639 +step:3348 train loss:3.791074 +step:3349 train loss:3.782046 +step:3350 train loss:3.806837 +step:3351 train loss:3.862937 +step:3352 train loss:3.803253 +step:3353 train loss:3.898687 +step:3354 train loss:3.745670 +step:3355 train loss:3.856657 +step:3356 train loss:3.805207 +step:3357 train loss:3.811103 +step:3358 train loss:3.757282 +step:3359 train loss:3.786114 +step:3360 train loss:3.775500 +step:3361 train loss:3.778018 +step:3362 train loss:3.768425 +step:3363 train loss:3.768186 +step:3364 train loss:3.748141 +step:3365 train loss:3.791542 +step:3366 train loss:3.818924 +step:3367 train loss:3.773004 +step:3368 train loss:3.865074 +step:3369 train loss:3.780139 +step:3370 train loss:3.841908 +step:3371 train loss:3.836762 +step:3372 train loss:3.802895 +step:3373 train loss:3.810174 +step:3374 train loss:3.858362 +step:3375 train loss:3.791747 +step:3376 train loss:3.800699 +step:3377 train loss:3.787537 +step:3378 train loss:3.760125 +step:3379 train loss:3.837809 +step:3380 train loss:3.817649 +step:3381 train loss:3.803936 +step:3382 train loss:3.817336 +step:3383 train loss:3.826078 +step:3384 train loss:3.757837 +step:3385 train loss:3.805137 +step:3386 train loss:3.782166 +step:3387 train loss:3.859876 +step:3388 train loss:3.763658 +step:3389 train loss:3.955126 +step:3390 train loss:3.725157 +step:3391 train loss:3.813401 +step:3392 train loss:3.803672 +step:3393 train loss:3.818302 +step:3394 train loss:3.770733 +step:3395 train loss:3.836612 +step:3396 train loss:3.752924 +step:3397 train loss:3.826296 +step:3398 train loss:3.793734 +step:3399 train loss:3.808378 +step:3400 train loss:3.751139 +step:3401 train loss:3.787827 +step:3402 train loss:3.946882 +step:3403 train loss:3.831876 +step:3404 train loss:3.953232 +step:3405 train loss:3.802554 +step:3406 train loss:3.783040 +step:3407 train loss:3.785362 +step:3408 train loss:3.762917 +step:3409 train loss:3.728087 +step:3410 train loss:3.758353 +step:3411 train loss:3.830334 +step:3412 train loss:3.751369 +step:3413 train loss:3.745066 +step:3414 train loss:3.782257 +step:3415 train loss:3.756998 +step:3416 train loss:3.758097 +step:3417 train loss:3.840778 +step:3418 train loss:3.838850 +step:3419 train loss:3.798086 +step:3420 train loss:3.774304 +step:3421 train loss:3.804755 +step:3422 train loss:3.824114 +step:3423 train loss:3.839641 +step:3424 train loss:3.723773 +step:3425 train loss:3.739423 +step:3426 train loss:3.738551 +step:3427 train loss:3.798727 +step:3428 train loss:3.725368 +step:3429 train loss:3.786902 +step:3430 train loss:3.756033 +step:3431 train loss:3.812332 +step:3432 train loss:3.797990 +step:3433 train loss:3.760748 +step:3434 train loss:3.843803 +step:3435 train loss:3.783246 +step:3436 train loss:3.878507 +step:3437 train loss:3.702320 +step:3438 train loss:3.810386 +step:3439 train loss:3.778870 +step:3440 train loss:3.876244 +step:3441 train loss:3.772149 +step:3442 train loss:3.836007 +step:3443 train loss:3.773001 +step:3444 train loss:3.794073 +step:3445 train loss:3.837314 +step:3446 train loss:3.740510 +step:3447 train loss:3.812406 +step:3448 train loss:3.766100 +step:3449 train loss:3.805305 +step:3450 train loss:3.710832 +step:3451 train loss:3.829847 +step:3452 train loss:3.779350 +step:3453 train loss:3.831401 +step:3454 train loss:3.857448 +step:3455 train loss:3.921181 +step:3456 train loss:3.862739 +step:3457 train loss:3.857081 +step:3458 train loss:3.777729 +step:3459 train loss:3.794739 +step:3460 train loss:3.734752 +step:3461 train loss:3.799038 +step:3462 train loss:3.805303 +step:3463 train loss:3.770803 +step:3464 train loss:3.823876 +step:3465 train loss:3.755851 +step:3466 train loss:3.824013 +step:3467 train loss:3.782791 +step:3468 train loss:3.795377 +step:3469 train loss:3.804462 +step:3470 train loss:3.785768 +step:3471 train loss:3.822382 +step:3472 train loss:3.708191 +step:3473 train loss:3.829474 +step:3474 train loss:3.728137 +step:3475 train loss:3.806151 +step:3476 train loss:3.773752 +step:3477 train loss:3.795359 +step:3478 train loss:3.773821 +step:3479 train loss:3.798119 +step:3480 train loss:3.814554 +step:3481 train loss:3.795137 +step:3482 train loss:3.774768 +step:3483 train loss:3.922325 +step:3484 train loss:3.764638 +step:3485 train loss:3.749824 +step:3486 train loss:3.801329 +step:3487 train loss:3.842543 +step:3488 train loss:3.748022 +step:3489 train loss:3.801022 +step:3490 train loss:3.768098 +step:3491 train loss:3.803908 +step:3492 train loss:3.843339 +step:3493 train loss:3.811495 +step:3494 train loss:3.807053 +step:3495 train loss:3.782970 +step:3496 train loss:3.747627 +step:3497 train loss:3.862519 +step:3498 train loss:3.809145 +step:3499 train loss:3.746295 +step:3500 validation loss:3.712672 total_sharp:4.6069e-05 L1_sharp:6.6904e-06 L2_sharp:3.7547e-07 L3_sharp:2.5699e-06 L4_sharp:5.4891e-06 L5_sharp:2.8387e-06 L6_sharp:5.3930e-06 L7_sharp:3.7397e-06 L8_sharp:8.0088e-06 L9_sharp:8.6336e-06 L10_sharp:1.2935e-05 L11_sharp:9.5963e-06 L12_sharp:1.3620e-04 total_fnorm:3.9320e+01 total_l1_linf:3.3640e+05 total_spectral:3.9320e+01 L1_fnorm:8.8823e+00 L2_fnorm:7.2925e+00 L3_fnorm:7.7623e+00 L4_fnorm:8.4438e+00 L5_fnorm:8.8690e+00 L6_fnorm:9.0199e+00 L7_fnorm:9.4785e+00 L8_fnorm:9.4704e+00 L9_fnorm:9.4484e+00 L10_fnorm:9.2376e+00 L11_fnorm:9.4955e+00 L12_fnorm:8.4865e+00 L1_l1linf:8.4671e+00 L2_l1linf:5.4371e+00 L3_l1linf:6.5459e+00 L4_l1linf:7.5710e+00 L5_l1linf:8.6043e+00 L6_l1linf:9.6152e+00 L7_l1linf:9.4932e+00 L8_l1linf:1.0335e+01 L9_l1linf:1.0335e+01 L10_l1linf:1.1216e+01 L11_l1linf:1.1095e+01 L12_l1linf:1.1219e+01 L1_spectral:1.1509e+00 L2_spectral:9.1652e-01 L3_spectral:9.0422e-01 L4_spectral:1.0131e+00 L5_spectral:1.0058e+00 L6_spectral:1.0263e+00 L7_spectral:1.0531e+00 L8_spectral:1.2413e+00 L9_spectral:1.4110e+00 L10_spectral:1.5086e+00 L11_spectral:1.5353e+00 L12_spectral:1.8062e+00 ip_v_neg_g:3.9607e-02 cos_v_neg_g:2.1469e-03 v_norm:3.9320e+01 g_norm:4.6920e-01 hv_norm:4.9186e-01 cos_v_hv:3.6828e-03 hg_norm:4.1183e+00 cos_g_hg:6.4690e-01 v_par:1.4312e-02 v_perp:3.9320e+01 L1_cos_v_neg_g:2.2800e-02 L1_v_norm:8.8823e+00 L2_cos_v_neg_g:1.4253e-02 L2_v_norm:7.2925e+00 L3_cos_v_neg_g:9.6702e-03 L3_v_norm:7.7623e+00 L4_cos_v_neg_g:1.0330e-02 L4_v_norm:8.4438e+00 L5_cos_v_neg_g:8.4746e-03 L5_v_norm:8.8690e+00 L6_cos_v_neg_g:1.1376e-02 L6_v_norm:9.0199e+00 L7_cos_v_neg_g:8.8145e-03 L7_v_norm:9.4785e+00 L8_cos_v_neg_g:1.0814e-02 L8_v_norm:9.4704e+00 L9_cos_v_neg_g:1.0135e-02 L9_v_norm:9.4484e+00 L10_cos_v_neg_g:1.3541e-02 L10_v_norm:9.2376e+00 L11_cos_v_neg_g:1.7857e-02 L11_v_norm:9.4955e+00 L12_cos_v_neg_g:3.4393e-02 L12_v_norm:8.4865e+00 +step:3500 train loss:3.764729 +step:3501 train loss:3.890607 +step:3502 train loss:3.867240 +step:3503 train loss:3.822159 +step:3504 train loss:3.772005 +step:3505 train loss:3.784389 +step:3506 train loss:3.681449 +step:3507 train loss:3.802461 +step:3508 train loss:3.747161 +step:3509 train loss:3.815780 +step:3510 train loss:3.747669 +step:3511 train loss:3.785648 +step:3512 train loss:3.928213 +step:3513 train loss:3.745213 +step:3514 train loss:3.760955 +step:3515 train loss:4.015313 +step:3516 train loss:3.805887 +step:3517 train loss:3.767453 +step:3518 train loss:3.766552 +step:3519 train loss:3.762599 +step:3520 train loss:3.792388 +step:3521 train loss:3.784173 +step:3522 train loss:3.689928 +step:3523 train loss:3.795249 +step:3524 train loss:3.773539 +step:3525 train loss:3.765830 +step:3526 train loss:3.791273 +step:3527 train loss:3.738773 +step:3528 train loss:3.789637 +step:3529 train loss:3.769643 +step:3530 train loss:3.759823 +step:3531 train loss:3.756917 +step:3532 train loss:3.942934 +step:3533 train loss:3.766573 +step:3534 train loss:3.781680 +step:3535 train loss:3.753435 +step:3536 train loss:3.751521 +step:3537 train loss:3.761437 +step:3538 train loss:3.797395 +step:3539 train loss:3.746743 +step:3540 train loss:3.810042 +step:3541 train loss:3.774849 +step:3542 train loss:3.786559 +step:3543 train loss:3.706921 +step:3544 train loss:3.743704 +step:3545 train loss:3.730562 +step:3546 train loss:3.800189 +step:3547 train loss:3.808428 +step:3548 train loss:3.779454 +step:3549 train loss:3.776649 +step:3550 train loss:3.762268 +step:3551 train loss:3.795689 +step:3552 train loss:3.693311 +step:3553 train loss:3.808365 +step:3554 train loss:3.799889 +step:3555 train loss:3.789758 +step:3556 train loss:3.816469 +step:3557 train loss:3.799076 +step:3558 train loss:3.777366 +step:3559 train loss:3.722690 +step:3560 train loss:3.818386 +step:3561 train loss:3.808310 +step:3562 train loss:3.983011 +step:3563 train loss:3.842764 +step:3564 train loss:3.804228 +step:3565 train loss:3.801742 +step:3566 train loss:3.775011 +step:3567 train loss:3.712494 +step:3568 train loss:3.740512 +step:3569 train loss:3.824774 +step:3570 train loss:3.846567 +step:3571 train loss:3.824182 +step:3572 train loss:3.817191 +step:3573 train loss:3.774835 +step:3574 train loss:3.772640 +step:3575 train loss:3.765684 +step:3576 train loss:3.745282 +step:3577 train loss:3.752043 +step:3578 train loss:3.839263 +step:3579 train loss:3.750408 +step:3580 train loss:3.830864 +step:3581 train loss:3.769532 +step:3582 train loss:3.821247 +step:3583 train loss:3.763181 +step:3584 train loss:3.733829 +step:3585 train loss:3.784920 +step:3586 train loss:3.735871 +step:3587 train loss:3.828095 +step:3588 train loss:3.958190 +step:3589 train loss:3.789873 +step:3590 train loss:3.777098 +step:3591 train loss:3.785666 +step:3592 train loss:3.744979 +step:3593 train loss:3.721042 +step:3594 train loss:3.771266 +step:3595 train loss:3.745767 +step:3596 train loss:3.825392 +step:3597 train loss:3.799572 +step:3598 train loss:3.754106 +step:3599 train loss:3.802125 +step:3600 train loss:3.744431 +step:3601 train loss:3.758377 +step:3602 train loss:3.746331 +step:3603 train loss:3.763020 +step:3604 train loss:3.788677 +step:3605 train loss:3.889443 +step:3606 train loss:3.795888 +step:3607 train loss:3.782527 +step:3608 train loss:3.795449 +step:3609 train loss:3.776262 +step:3610 train loss:3.748674 +step:3611 train loss:3.749939 +step:3612 train loss:3.814240 +step:3613 train loss:3.785852 +step:3614 train loss:3.724176 +step:3615 train loss:3.771401 +step:3616 train loss:3.761828 +step:3617 train loss:3.820084 +step:3618 train loss:3.775789 +step:3619 train loss:3.774864 +step:3620 train loss:3.806545 +step:3621 train loss:3.752829 +step:3622 train loss:3.857718 +step:3623 train loss:3.842752 +step:3624 train loss:3.809198 +step:3625 train loss:3.789722 +step:3626 train loss:3.789666 +step:3627 train loss:3.787269 +step:3628 train loss:3.771634 +step:3629 train loss:3.774725 +step:3630 train loss:3.860475 +step:3631 train loss:3.785492 +step:3632 train loss:3.814909 +step:3633 train loss:3.770918 +step:3634 train loss:3.771557 +step:3635 train loss:3.761800 +step:3636 train loss:3.830406 +step:3637 train loss:3.910506 +step:3638 train loss:3.819986 +step:3639 train loss:3.806971 +step:3640 train loss:3.819142 +step:3641 train loss:3.854112 +step:3642 train loss:3.745032 +step:3643 train loss:3.916931 +step:3644 train loss:3.810171 +step:3645 train loss:3.776815 +step:3646 train loss:3.903277 +step:3647 train loss:3.793988 +step:3648 train loss:3.783625 +step:3649 train loss:3.734707 +step:3650 train loss:3.773514 +step:3651 train loss:3.768428 +step:3652 train loss:3.756250 +step:3653 train loss:3.685505 +step:3654 train loss:3.754722 +step:3655 train loss:3.748370 +step:3656 train loss:3.777495 +step:3657 train loss:3.798562 +step:3658 train loss:3.792086 +step:3659 train loss:3.774730 +step:3660 train loss:3.748915 +step:3661 train loss:3.771882 +step:3662 train loss:3.752010 +step:3663 train loss:3.782634 +step:3664 train loss:3.739016 +step:3665 train loss:3.789874 +step:3666 train loss:3.817019 +step:3667 train loss:3.906057 +step:3668 train loss:3.788693 +step:3669 train loss:3.747331 +step:3670 train loss:3.793972 +step:3671 train loss:3.752337 +step:3672 train loss:3.789838 +step:3673 train loss:3.771626 +step:3674 train loss:3.790109 +step:3675 train loss:3.795489 +step:3676 train loss:3.763934 +step:3677 train loss:3.725417 +step:3678 train loss:3.783491 +step:3679 train loss:3.686978 +step:3680 train loss:3.789483 +step:3681 train loss:3.822029 +step:3682 train loss:3.800080 +step:3683 train loss:3.745900 +step:3684 train loss:3.746082 +step:3685 train loss:3.773057 +step:3686 train loss:3.801095 +step:3687 train loss:3.751497 +step:3688 train loss:3.731598 +step:3689 train loss:3.768419 +step:3690 train loss:3.754631 +step:3691 train loss:3.736354 +step:3692 train loss:3.796837 +step:3693 train loss:3.923091 +step:3694 train loss:3.745317 +step:3695 train loss:3.805805 +step:3696 train loss:3.764354 +step:3697 train loss:3.755683 +step:3698 train loss:3.697936 +step:3699 train loss:3.721654 +step:3700 train loss:3.752617 +step:3701 train loss:3.772380 +step:3702 train loss:3.795331 +step:3703 train loss:3.749578 +step:3704 train loss:3.799441 +step:3705 train loss:3.779811 +step:3706 train loss:3.726346 +step:3707 train loss:3.781401 +step:3708 train loss:3.758109 +step:3709 train loss:3.677619 +step:3710 train loss:3.801664 +step:3711 train loss:3.750474 +step:3712 train loss:3.791152 +step:3713 train loss:3.741838 +step:3714 train loss:3.759380 +step:3715 train loss:3.879643 +step:3716 train loss:3.783175 +step:3717 train loss:3.756571 +step:3718 train loss:3.762448 +step:3719 train loss:3.759166 +step:3720 train loss:3.769013 +step:3721 train loss:3.828864 +step:3722 train loss:3.844086 +step:3723 train loss:3.728882 +step:3724 train loss:3.787500 +step:3725 train loss:3.764833 +step:3726 train loss:3.781515 +step:3727 train loss:3.853904 +step:3728 train loss:3.818856 +step:3729 train loss:3.720275 +step:3730 train loss:3.739721 +step:3731 train loss:3.761319 +step:3732 train loss:3.913904 +step:3733 train loss:3.772806 +step:3734 train loss:3.775083 +step:3735 train loss:3.714654 +step:3736 train loss:3.768398 +step:3737 train loss:3.822009 +step:3738 train loss:3.839476 +step:3739 train loss:3.758277 +step:3740 train loss:3.659233 +step:3741 train loss:3.866289 +step:3742 train loss:3.785658 +step:3743 train loss:3.755057 +step:3744 train loss:3.770260 +step:3745 train loss:3.775006 +step:3746 train loss:3.751425 +step:3747 train loss:3.759226 +step:3748 train loss:3.798121 +step:3749 train loss:3.783345 +step:3750 validation loss:3.695135 +step:3750 train loss:3.800502 +step:3751 train loss:3.882149 +step:3752 train loss:3.811868 +step:3753 train loss:3.735349 +step:3754 train loss:3.785650 +step:3755 train loss:3.961518 +step:3756 train loss:3.741034 +step:3757 train loss:3.736890 +step:3758 train loss:3.770525 +step:3759 train loss:3.713002 +step:3760 train loss:3.708045 +step:3761 train loss:3.762398 +step:3762 train loss:3.754823 +step:3763 train loss:3.755247 +step:3764 train loss:3.749684 +step:3765 train loss:3.749830 +step:3766 train loss:3.719529 +step:3767 train loss:3.800092 +step:3768 train loss:3.741440 +step:3769 train loss:4.008294 +step:3770 train loss:3.800563 +step:3771 train loss:3.807058 +step:3772 train loss:3.760442 +step:3773 train loss:3.757161 +step:3774 train loss:3.764001 +step:3775 train loss:3.759465 +step:3776 train loss:3.758663 +step:3777 train loss:3.717236 +step:3778 train loss:3.734333 +step:3779 train loss:3.719364 +step:3780 train loss:3.799117 +step:3781 train loss:3.761909 +step:3782 train loss:3.682682 +step:3783 train loss:3.786154 +step:3784 train loss:3.799628 +step:3785 train loss:3.706895 +step:3786 train loss:3.815403 +step:3787 train loss:3.725656 +step:3788 train loss:3.737266 +step:3789 train loss:3.650866 +step:3790 train loss:3.767324 +step:3791 train loss:3.792221 +step:3792 train loss:3.758122 +step:3793 train loss:3.758592 +step:3794 train loss:3.784133 +step:3795 train loss:3.750533 +step:3796 train loss:3.770094 +step:3797 train loss:3.744553 +step:3798 train loss:3.752833 +step:3799 train loss:3.761164 +step:3800 train loss:3.669263 +step:3801 train loss:3.785413 +step:3802 train loss:3.712194 +step:3803 train loss:3.791125 +step:3804 train loss:3.807310 +step:3805 train loss:3.763225 +step:3806 train loss:3.780567 +step:3807 train loss:3.801628 +step:3808 train loss:3.756752 +step:3809 train loss:3.772150 +step:3810 train loss:3.775763 +step:3811 train loss:3.763730 +step:3812 train loss:3.764304 +step:3813 train loss:3.718247 +step:3814 train loss:3.761130 +step:3815 train loss:3.761668 +step:3816 train loss:3.781836 +step:3817 train loss:3.799850 +step:3818 train loss:3.771562 +step:3819 train loss:3.782279 +step:3820 train loss:3.782951 +step:3821 train loss:3.734727 +step:3822 train loss:3.821507 +step:3823 train loss:3.715140 +step:3824 train loss:3.726775 +step:3825 train loss:3.735093 +step:3826 train loss:3.803376 +step:3827 train loss:3.831461 +step:3828 train loss:3.720891 +step:3829 train loss:3.738757 +step:3830 train loss:3.796740 +step:3831 train loss:3.731056 +step:3832 train loss:3.791359 +step:3833 train loss:3.729894 +step:3834 train loss:3.695980 +step:3835 train loss:3.740283 +step:3836 train loss:3.714931 +step:3837 train loss:3.781746 +step:3838 train loss:3.734235 +step:3839 train loss:3.775802 +step:3840 train loss:3.790318 +step:3841 train loss:3.738473 +step:3842 train loss:3.773333 +step:3843 train loss:3.789123 +step:3844 train loss:3.760810 +step:3845 train loss:3.781233 +step:3846 train loss:3.824662 +step:3847 train loss:3.721521 +step:3848 train loss:3.728190 +step:3849 train loss:3.759282 +step:3850 train loss:3.770240 +step:3851 train loss:3.904923 +step:3852 train loss:3.884047 +step:3853 train loss:3.787304 +step:3854 train loss:3.766054 +step:3855 train loss:3.799521 +step:3856 train loss:3.720308 +step:3857 train loss:3.779572 +step:3858 train loss:3.696697 +step:3859 train loss:3.743416 +step:3860 train loss:3.813385 +step:3861 train loss:3.785893 +step:3862 train loss:3.719595 +step:3863 train loss:3.769747 +step:3864 train loss:3.739753 +step:3865 train loss:3.773884 +step:3866 train loss:3.793040 +step:3867 train loss:3.790952 +step:3868 train loss:3.736935 +step:3869 train loss:3.741823 +step:3870 train loss:3.715148 +step:3871 train loss:3.716614 +step:3872 train loss:3.843381 +step:3873 train loss:3.765089 +step:3874 train loss:3.778586 +step:3875 train loss:3.885665 +step:3876 train loss:3.764728 +step:3877 train loss:3.791970 +step:3878 train loss:3.817655 +step:3879 train loss:3.804045 +step:3880 train loss:3.887181 +step:3881 train loss:3.706990 +step:3882 train loss:3.743340 +step:3883 train loss:3.754502 +step:3884 train loss:3.744394 +step:3885 train loss:3.760370 +step:3886 train loss:3.815186 +step:3887 train loss:3.796146 +step:3888 train loss:3.759795 +step:3889 train loss:3.730662 +step:3890 train loss:3.766220 +step:3891 train loss:3.781414 +step:3892 train loss:3.691297 +step:3893 train loss:3.794243 +step:3894 train loss:3.745593 +step:3895 train loss:3.768753 +step:3896 train loss:3.757641 +step:3897 train loss:3.723235 +step:3898 train loss:3.782084 +step:3899 train loss:3.824924 +step:3900 train loss:3.776821 +step:3901 train loss:3.795014 +step:3902 train loss:3.717045 +step:3903 train loss:3.734502 +step:3904 train loss:3.768083 +step:3905 train loss:3.702835 +step:3906 train loss:3.746933 +step:3907 train loss:3.773895 +step:3908 train loss:3.850542 +step:3909 train loss:3.740606 +step:3910 train loss:3.770063 +step:3911 train loss:3.781929 +step:3912 train loss:3.731506 +step:3913 train loss:3.746350 +step:3914 train loss:3.768565 +step:3915 train loss:3.735655 +step:3916 train loss:3.770575 +step:3917 train loss:3.819021 +step:3918 train loss:3.796354 +step:3919 train loss:3.772609 +step:3920 train loss:3.748884 +step:3921 train loss:3.789425 +step:3922 train loss:3.790082 +step:3923 train loss:3.777632 +step:3924 train loss:3.721515 +step:3925 train loss:3.916408 +step:3926 train loss:3.765195 +step:3927 train loss:3.744458 +step:3928 train loss:3.818255 +step:3929 train loss:3.889395 +step:3930 train loss:3.793704 +step:3931 train loss:3.736509 +step:3932 train loss:3.783873 +step:3933 train loss:3.798723 +step:3934 train loss:3.748026 +step:3935 train loss:3.722637 +step:3936 train loss:3.811761 +step:3937 train loss:3.770564 +step:3938 train loss:3.781874 +step:3939 train loss:3.808868 +step:3940 train loss:3.752193 +step:3941 train loss:3.838264 +step:3942 train loss:3.797303 +step:3943 train loss:3.780740 +step:3944 train loss:3.828605 +step:3945 train loss:3.739684 +step:3946 train loss:3.685351 +step:3947 train loss:3.810175 +step:3948 train loss:3.786656 +step:3949 train loss:3.947443 +step:3950 train loss:3.751957 +step:3951 train loss:3.683024 +step:3952 train loss:3.636560 +step:3953 train loss:3.715124 +step:3954 train loss:3.762932 +step:3955 train loss:3.790356 +step:3956 train loss:3.749261 +step:3957 train loss:3.801818 +step:3958 train loss:3.774869 +step:3959 train loss:3.816613 +step:3960 train loss:3.739286 +step:3961 train loss:3.764767 +step:3962 train loss:3.773440 +step:3963 train loss:3.747106 +step:3964 train loss:3.724649 +step:3965 train loss:3.783720 +step:3966 train loss:3.746670 +step:3967 train loss:3.777294 +step:3968 train loss:3.800088 +step:3969 train loss:3.705677 +step:3970 train loss:3.828676 +step:3971 train loss:3.738376 +step:3972 train loss:3.769899 +step:3973 train loss:3.726835 +step:3974 train loss:3.822050 +step:3975 train loss:3.779178 +step:3976 train loss:3.729492 +step:3977 train loss:3.783844 +step:3978 train loss:3.751871 +step:3979 train loss:3.738103 +step:3980 train loss:3.805770 +step:3981 train loss:3.737831 +step:3982 train loss:3.759524 +step:3983 train loss:3.742756 +step:3984 train loss:3.773834 +step:3985 train loss:3.749763 +step:3986 train loss:3.763507 +step:3987 train loss:3.772556 +step:3988 train loss:3.707257 +step:3989 train loss:3.779551 +step:3990 train loss:3.772748 +step:3991 train loss:3.784861 +step:3992 train loss:3.741395 +step:3993 train loss:3.781218 +step:3994 train loss:3.724832 +step:3995 train loss:3.779580 +step:3996 train loss:3.696239 +step:3997 train loss:3.772920 +step:3998 train loss:3.661409 +step:3999 train loss:3.817809 +step:4000 validation loss:3.673132 total_sharp:4.2756e-05 L1_sharp:6.7299e-06 L2_sharp:1.4518e-06 L3_sharp:2.7516e-06 L4_sharp:2.7319e-06 L5_sharp:2.0352e-06 L6_sharp:1.8154e-06 L7_sharp:2.3130e-06 L8_sharp:5.0594e-06 L9_sharp:8.7976e-06 L10_sharp:1.2861e-05 L11_sharp:9.0892e-06 L12_sharp:1.4932e-04 total_fnorm:3.9016e+01 total_l1_linf:3.3278e+05 total_spectral:3.9016e+01 L1_fnorm:8.5283e+00 L2_fnorm:6.9176e+00 L3_fnorm:7.5213e+00 L4_fnorm:8.2065e+00 L5_fnorm:8.7746e+00 L6_fnorm:8.8870e+00 L7_fnorm:9.3686e+00 L8_fnorm:9.3682e+00 L9_fnorm:9.4334e+00 L10_fnorm:9.3251e+00 L11_fnorm:9.5662e+00 L12_fnorm:8.6554e+00 L1_l1linf:7.9537e+00 L2_l1linf:5.3440e+00 L3_l1linf:6.4395e+00 L4_l1linf:7.4982e+00 L5_l1linf:8.7156e+00 L6_l1linf:8.6335e+00 L7_l1linf:9.4865e+00 L8_l1linf:1.0148e+01 L9_l1linf:1.1070e+01 L10_l1linf:1.1048e+01 L11_l1linf:1.1033e+01 L12_l1linf:1.1422e+01 L1_spectral:1.1683e+00 L2_spectral:7.6376e-01 L3_spectral:9.0305e-01 L4_spectral:9.8107e-01 L5_spectral:9.7552e-01 L6_spectral:1.0139e+00 L7_spectral:9.9533e-01 L8_spectral:1.2644e+00 L9_spectral:1.4930e+00 L10_spectral:1.5775e+00 L11_spectral:1.5995e+00 L12_spectral:1.8716e+00 ip_v_neg_g:3.5690e-02 cos_v_neg_g:2.0733e-03 v_norm:3.9016e+01 g_norm:4.4122e-01 hv_norm:5.0436e-01 cos_v_hv:3.3075e-03 hg_norm:4.7982e+00 cos_g_hg:7.5818e-01 v_par:1.6970e-02 v_perp:3.9016e+01 L1_cos_v_neg_g:1.2907e-02 L1_v_norm:8.5283e+00 L2_cos_v_neg_g:7.2167e-03 L2_v_norm:6.9176e+00 L3_cos_v_neg_g:6.5535e-03 L3_v_norm:7.5213e+00 L4_cos_v_neg_g:6.5912e-03 L4_v_norm:8.2065e+00 L5_cos_v_neg_g:5.5225e-03 L5_v_norm:8.7746e+00 L6_cos_v_neg_g:5.9095e-03 L6_v_norm:8.8870e+00 L7_cos_v_neg_g:7.4308e-03 L7_v_norm:9.3686e+00 L8_cos_v_neg_g:1.0229e-02 L8_v_norm:9.3682e+00 L9_cos_v_neg_g:1.4620e-02 L9_v_norm:9.4334e+00 L10_cos_v_neg_g:1.9306e-02 L10_v_norm:9.3251e+00 L11_cos_v_neg_g:2.2426e-02 L11_v_norm:9.5662e+00 L12_cos_v_neg_g:4.8237e-02 L12_v_norm:8.6554e+00 +step:4000 train loss:3.692747 +step:4001 train loss:3.768726 +step:4002 train loss:3.748407 +step:4003 train loss:3.784331 +step:4004 train loss:3.693646 +step:4005 train loss:3.788557 +step:4006 train loss:3.797848 +step:4007 train loss:3.718119 +step:4008 train loss:3.677266 +step:4009 train loss:3.762596 +step:4010 train loss:3.734490 +step:4011 train loss:3.740527 +step:4012 train loss:3.754762 +step:4013 train loss:3.731138 +step:4014 train loss:3.744133 +step:4015 train loss:3.738956 +step:4016 train loss:3.745876 +step:4017 train loss:3.709776 +step:4018 train loss:3.646747 +step:4019 train loss:3.706059 +step:4020 train loss:3.774787 +step:4021 train loss:3.724692 +step:4022 train loss:3.719946 +step:4023 train loss:3.740071 +step:4024 train loss:3.649137 +step:4025 train loss:3.768428 +step:4026 train loss:3.758418 +step:4027 train loss:3.765454 +step:4028 train loss:3.784382 +step:4029 train loss:3.813337 +step:4030 train loss:3.728759 +step:4031 train loss:3.771375 +step:4032 train loss:3.729482 +step:4033 train loss:3.763078 +step:4034 train loss:3.779491 +step:4035 train loss:3.756731 +step:4036 train loss:3.750992 +step:4037 train loss:3.767936 +step:4038 train loss:3.690135 +step:4039 train loss:3.743329 +step:4040 train loss:3.723327 +step:4041 train loss:3.716523 +step:4042 train loss:3.742379 +step:4043 train loss:3.723159 +step:4044 train loss:3.758268 +step:4045 train loss:3.762062 +step:4046 train loss:3.716430 +step:4047 train loss:3.742443 +step:4048 train loss:3.756509 +step:4049 train loss:3.714933 +step:4050 train loss:3.818018 +step:4051 train loss:3.731602 +step:4052 train loss:3.750041 +step:4053 train loss:3.799042 +step:4054 train loss:3.773977 +step:4055 train loss:3.787570 +step:4056 train loss:3.784245 +step:4057 train loss:3.721775 +step:4058 train loss:3.704598 +step:4059 train loss:3.788653 +step:4060 train loss:3.729820 +step:4061 train loss:3.700646 +step:4062 train loss:3.813252 +step:4063 train loss:3.764078 +step:4064 train loss:3.732066 +step:4065 train loss:3.714227 +step:4066 train loss:3.741889 +step:4067 train loss:3.770258 +step:4068 train loss:3.734256 +step:4069 train loss:3.792372 +step:4070 train loss:3.709775 +step:4071 train loss:3.681882 +step:4072 train loss:3.756423 +step:4073 train loss:3.693606 +step:4074 train loss:3.750222 +step:4075 train loss:3.819874 +step:4076 train loss:3.672908 +step:4077 train loss:3.750490 +step:4078 train loss:3.854514 +step:4079 train loss:3.795081 +step:4080 train loss:3.743112 +step:4081 train loss:3.711289 +step:4082 train loss:3.764960 +step:4083 train loss:3.699625 +step:4084 train loss:3.719280 +step:4085 train loss:3.953136 +step:4086 train loss:3.717055 +step:4087 train loss:3.766002 +step:4088 train loss:3.743851 +step:4089 train loss:3.737595 +step:4090 train loss:3.753395 +step:4091 train loss:3.776571 +step:4092 train loss:3.705235 +step:4093 train loss:3.730974 +step:4094 train loss:3.753058 +step:4095 train loss:3.704642 +step:4096 train loss:3.737563 +step:4097 train loss:3.739377 +step:4098 train loss:3.721039 +step:4099 train loss:3.721055 +step:4100 train loss:3.771183 +step:4101 train loss:3.694862 +step:4102 train loss:3.728350 +step:4103 train loss:3.933676 +step:4104 train loss:3.750859 +step:4105 train loss:3.713780 +step:4106 train loss:3.790326 +step:4107 train loss:3.712186 +step:4108 train loss:3.713861 +step:4109 train loss:3.766751 +step:4110 train loss:3.778666 +step:4111 train loss:3.752098 +step:4112 train loss:3.768971 +step:4113 train loss:3.729964 +step:4114 train loss:3.679053 +step:4115 train loss:3.715288 +step:4116 train loss:3.702275 +step:4117 train loss:3.720252 +step:4118 train loss:3.772606 +step:4119 train loss:3.797661 +step:4120 train loss:3.716868 +step:4121 train loss:3.708939 +step:4122 train loss:3.774551 +step:4123 train loss:3.788423 +step:4124 train loss:3.764305 +step:4125 train loss:3.802464 +step:4126 train loss:3.738122 +step:4127 train loss:3.758027 +step:4128 train loss:3.748020 +step:4129 train loss:3.791694 +step:4130 train loss:3.723977 +step:4131 train loss:3.760105 +step:4132 train loss:3.773764 +step:4133 train loss:3.725381 +step:4134 train loss:3.781783 +step:4135 train loss:3.714056 +step:4136 train loss:3.733263 +step:4137 train loss:3.706926 +step:4138 train loss:3.715914 +step:4139 train loss:3.760313 +step:4140 train loss:3.723403 +step:4141 train loss:3.689375 +step:4142 train loss:3.729412 +step:4143 train loss:3.766628 +step:4144 train loss:3.717059 +step:4145 train loss:3.683691 +step:4146 train loss:3.749751 +step:4147 train loss:3.722202 +step:4148 train loss:3.718731 +step:4149 train loss:3.798220 +step:4150 train loss:3.765504 +step:4151 train loss:3.743001 +step:4152 train loss:3.763761 +step:4153 train loss:3.773916 +step:4154 train loss:3.776745 +step:4155 train loss:3.801002 +step:4156 train loss:3.674991 +step:4157 train loss:3.702337 +step:4158 train loss:3.756744 +step:4159 train loss:3.657507 +step:4160 train loss:3.750257 +step:4161 train loss:3.752401 +step:4162 train loss:3.662028 +step:4163 train loss:3.742373 +step:4164 train loss:3.688224 +step:4165 train loss:3.691409 +step:4166 train loss:3.759036 +step:4167 train loss:3.754851 +step:4168 train loss:3.746951 +step:4169 train loss:3.773239 +step:4170 train loss:3.899911 +step:4171 train loss:3.756363 +step:4172 train loss:3.769509 +step:4173 train loss:3.763145 +step:4174 train loss:3.723974 +step:4175 train loss:3.819467 +step:4176 train loss:3.740346 +step:4177 train loss:3.764811 +step:4178 train loss:3.737848 +step:4179 train loss:3.692153 +step:4180 train loss:3.687998 +step:4181 train loss:3.738832 +step:4182 train loss:3.720245 +step:4183 train loss:3.658406 +step:4184 train loss:3.734096 +step:4185 train loss:3.796831 +step:4186 train loss:3.771647 +step:4187 train loss:3.781823 +step:4188 train loss:3.753188 +step:4189 train loss:3.715964 +step:4190 train loss:3.761871 +step:4191 train loss:3.707393 +step:4192 train loss:3.793661 +step:4193 train loss:3.700763 +step:4194 train loss:3.685769 +step:4195 train loss:3.682298 +step:4196 train loss:3.750144 +step:4197 train loss:3.767584 +step:4198 train loss:3.688767 +step:4199 train loss:3.771182 +step:4200 train loss:3.730620 +step:4201 train loss:3.713406 +step:4202 train loss:3.727477 +step:4203 train loss:3.735055 +step:4204 train loss:3.732177 +step:4205 train loss:3.747528 +step:4206 train loss:3.767530 +step:4207 train loss:3.772915 +step:4208 train loss:3.729654 +step:4209 train loss:3.795100 +step:4210 train loss:3.824019 +step:4211 train loss:3.704549 +step:4212 train loss:3.740934 +step:4213 train loss:3.697429 +step:4214 train loss:3.704418 +step:4215 train loss:3.721921 +step:4216 train loss:3.693840 +step:4217 train loss:3.717202 +step:4218 train loss:3.757733 +step:4219 train loss:3.750479 +step:4220 train loss:3.852431 +step:4221 train loss:3.745509 +step:4222 train loss:3.799005 +step:4223 train loss:3.714838 +step:4224 train loss:3.788979 +step:4225 train loss:3.712323 +step:4226 train loss:3.765609 +step:4227 train loss:3.741066 +step:4228 train loss:3.717709 +step:4229 train loss:3.728033 +step:4230 train loss:3.706439 +step:4231 train loss:3.692577 +step:4232 train loss:3.749264 +step:4233 train loss:3.649993 +step:4234 train loss:3.733817 +step:4235 train loss:3.808384 +step:4236 train loss:3.776592 +step:4237 train loss:3.761977 +step:4238 train loss:3.770525 +step:4239 train loss:3.817511 +step:4240 train loss:3.728763 +step:4241 train loss:3.654291 +step:4242 train loss:3.771384 +step:4243 train loss:3.773486 +step:4244 train loss:3.785092 +step:4245 train loss:3.843291 +step:4246 train loss:3.716641 +step:4247 train loss:3.773561 +step:4248 train loss:3.722808 +step:4249 train loss:3.730462 +step:4250 validation loss:3.655377 +step:4250 train loss:3.712117 +step:4251 train loss:3.806254 +step:4252 train loss:3.716238 +step:4253 train loss:3.710228 +step:4254 train loss:3.723403 +step:4255 train loss:3.702572 +step:4256 train loss:3.719026 +step:4257 train loss:3.777582 +step:4258 train loss:3.633976 +step:4259 train loss:3.700525 +step:4260 train loss:3.762974 +step:4261 train loss:3.753345 +step:4262 train loss:3.889473 +step:4263 train loss:3.833656 +step:4264 train loss:3.773505 +step:4265 train loss:3.764578 +step:4266 train loss:3.758337 +step:4267 train loss:3.762003 +step:4268 train loss:3.700613 +step:4269 train loss:3.798526 +step:4270 train loss:3.775732 +step:4271 train loss:3.689272 +step:4272 train loss:3.742836 +step:4273 train loss:3.723450 +step:4274 train loss:3.705915 +step:4275 train loss:3.725854 +step:4276 train loss:3.689630 +step:4277 train loss:3.827834 +step:4278 train loss:3.674472 +step:4279 train loss:3.704027 +step:4280 train loss:3.789699 +step:4281 train loss:3.774200 +step:4282 train loss:3.838469 +step:4283 train loss:3.695946 +step:4284 train loss:3.722636 +step:4285 train loss:3.724108 +step:4286 train loss:3.788881 +step:4287 train loss:3.790144 +step:4288 train loss:3.771234 +step:4289 train loss:3.721929 +step:4290 train loss:3.730110 +step:4291 train loss:3.686806 +step:4292 train loss:3.731881 +step:4293 train loss:3.744434 +step:4294 train loss:3.727190 +step:4295 train loss:3.662226 +step:4296 train loss:3.736856 +step:4297 train loss:3.718152 +step:4298 train loss:3.728841 +step:4299 train loss:3.723769 +step:4300 train loss:3.848084 +step:4301 train loss:3.659050 +step:4302 train loss:3.797020 +step:4303 train loss:3.675593 +step:4304 train loss:3.688031 +step:4305 train loss:3.712240 +step:4306 train loss:3.780585 +step:4307 train loss:3.697042 +step:4308 train loss:3.695159 +step:4309 train loss:3.763128 +step:4310 train loss:3.707066 +step:4311 train loss:3.755254 +step:4312 train loss:3.752484 +step:4313 train loss:3.741310 +step:4314 train loss:3.688053 +step:4315 train loss:3.721412 +step:4316 train loss:3.667377 +step:4317 train loss:3.723281 +step:4318 train loss:3.760837 +step:4319 train loss:3.708430 +step:4320 train loss:3.772654 +step:4321 train loss:3.755272 +step:4322 train loss:3.708956 +step:4323 train loss:3.651998 +step:4324 train loss:3.738820 +step:4325 train loss:3.720901 +step:4326 train loss:3.714186 +step:4327 train loss:3.819736 +step:4328 train loss:3.727532 +step:4329 train loss:3.683336 +step:4330 train loss:3.728372 +step:4331 train loss:3.738839 +step:4332 train loss:3.770378 +step:4333 train loss:3.725393 +step:4334 train loss:3.748482 +step:4335 train loss:3.748029 +step:4336 train loss:3.757518 +step:4337 train loss:3.725269 +step:4338 train loss:3.845300 +step:4339 train loss:3.754858 +step:4340 train loss:3.758034 +step:4341 train loss:3.727691 +step:4342 train loss:3.739367 +step:4343 train loss:3.862439 +step:4344 train loss:3.752278 +step:4345 train loss:3.766767 +step:4346 train loss:3.776954 +step:4347 train loss:3.788629 +step:4348 train loss:3.699661 +step:4349 train loss:3.781936 +step:4350 train loss:3.724613 +step:4351 train loss:3.672134 +step:4352 train loss:3.753563 +step:4353 train loss:3.700586 +step:4354 train loss:3.758699 +step:4355 train loss:3.715889 +step:4356 train loss:3.736448 +step:4357 train loss:3.717531 +step:4358 train loss:3.809911 +step:4359 train loss:3.762254 +step:4360 train loss:3.677924 +step:4361 train loss:3.724167 +step:4362 train loss:3.746275 +step:4363 train loss:3.762851 +step:4364 train loss:3.731343 +step:4365 train loss:3.714538 +step:4366 train loss:3.758987 +step:4367 train loss:3.770808 +step:4368 train loss:3.750636 +step:4369 train loss:3.618504 +step:4370 train loss:3.752047 +step:4371 train loss:3.656609 +step:4372 train loss:3.808840 +step:4373 train loss:3.745330 +step:4374 train loss:3.715165 +step:4375 train loss:3.759220 +step:4376 train loss:3.768963 +step:4377 train loss:3.704718 +step:4378 train loss:3.713579 +step:4379 train loss:3.794075 +step:4380 train loss:3.777451 +step:4381 train loss:3.675124 +step:4382 train loss:3.724045 +step:4383 train loss:3.753987 +step:4384 train loss:3.743311 +step:4385 train loss:3.672859 +step:4386 train loss:3.727854 +step:4387 train loss:3.700395 +step:4388 train loss:3.717597 +step:4389 train loss:3.750649 +step:4390 train loss:3.792198 +step:4391 train loss:3.717195 +step:4392 train loss:3.790097 +step:4393 train loss:3.752724 +step:4394 train loss:3.689069 +step:4395 train loss:3.744240 +step:4396 train loss:3.719541 +step:4397 train loss:3.760604 +step:4398 train loss:3.709071 +step:4399 train loss:3.700400 +step:4400 train loss:3.703446 +step:4401 train loss:3.768843 +step:4402 train loss:3.764793 +step:4403 train loss:3.717359 +step:4404 train loss:3.746634 +step:4405 train loss:3.665798 +step:4406 train loss:3.748344 +step:4407 train loss:3.677624 +step:4408 train loss:3.777913 +step:4409 train loss:3.736246 +step:4410 train loss:3.741381 +step:4411 train loss:3.701223 +step:4412 train loss:3.814860 +step:4413 train loss:3.712474 +step:4414 train loss:3.719784 +step:4415 train loss:3.705420 +step:4416 train loss:3.697198 +step:4417 train loss:3.690754 +step:4418 train loss:3.765389 +step:4419 train loss:3.731415 +step:4420 train loss:3.740942 +step:4421 train loss:3.766741 +step:4422 train loss:3.784745 +step:4423 train loss:3.744531 +step:4424 train loss:3.732717 +step:4425 train loss:3.690065 +step:4426 train loss:3.766332 +step:4427 train loss:3.726333 +step:4428 train loss:3.664458 +step:4429 train loss:3.722969 +step:4430 train loss:3.764500 +step:4431 train loss:3.756877 +step:4432 train loss:3.661729 +step:4433 train loss:3.717699 +step:4434 train loss:3.713632 +step:4435 train loss:3.742612 +step:4436 train loss:3.677876 +step:4437 train loss:3.760175 +step:4438 train loss:3.725434 +step:4439 train loss:3.731241 +step:4440 train loss:3.730020 +step:4441 train loss:3.732016 +step:4442 train loss:3.779463 +step:4443 train loss:3.717453 +step:4444 train loss:3.801996 +step:4445 train loss:3.767290 +step:4446 train loss:3.700769 +step:4447 train loss:3.747957 +step:4448 train loss:3.769329 +step:4449 train loss:3.705339 +step:4450 train loss:3.720726 +step:4451 train loss:3.776652 +step:4452 train loss:3.831512 +step:4453 train loss:3.763716 +step:4454 train loss:3.738822 +step:4455 train loss:3.786443 +step:4456 train loss:3.730539 +step:4457 train loss:3.729338 +step:4458 train loss:3.736801 +step:4459 train loss:3.772072 +step:4460 train loss:3.684067 +step:4461 train loss:3.652630 +step:4462 train loss:3.709935 +step:4463 train loss:3.734255 +step:4464 train loss:3.698481 +step:4465 train loss:3.732997 +step:4466 train loss:3.829499 +step:4467 train loss:3.709042 +step:4468 train loss:3.704078 +step:4469 train loss:3.694274 +step:4470 train loss:3.669606 +step:4471 train loss:3.732373 +step:4472 train loss:3.656680 +step:4473 train loss:3.748724 +step:4474 train loss:3.772329 +step:4475 train loss:3.731085 +step:4476 train loss:3.696703 +step:4477 train loss:3.684692 +step:4478 train loss:3.738225 +step:4479 train loss:3.840797 +step:4480 train loss:3.675749 +step:4481 train loss:3.745094 +step:4482 train loss:3.705761 +step:4483 train loss:3.703728 +step:4484 train loss:3.753401 +step:4485 train loss:3.709012 +step:4486 train loss:3.810257 +step:4487 train loss:3.707454 +step:4488 train loss:3.708714 +step:4489 train loss:3.661595 +step:4490 train loss:3.744449 +step:4491 train loss:3.696611 +step:4492 train loss:3.728706 +step:4493 train loss:3.712308 +step:4494 train loss:3.702328 +step:4495 train loss:3.775042 +step:4496 train loss:3.714923 +step:4497 train loss:3.797652 +step:4498 train loss:3.687550 +step:4499 train loss:3.741897 +step:4500 validation loss:3.640476 total_sharp:3.0331e-05 L1_sharp:6.7308e-06 L2_sharp:9.5971e-07 L3_sharp:2.4732e-06 L4_sharp:2.6742e-06 L5_sharp:1.2730e-06 L6_sharp:1.5339e-06 L7_sharp:2.2377e-06 L8_sharp:4.0139e-06 L9_sharp:5.0774e-06 L10_sharp:8.9417e-06 L11_sharp:6.6092e-06 L12_sharp:8.8965e-05 total_fnorm:3.9985e+01 total_l1_linf:3.4224e+05 total_spectral:3.9985e+01 L1_fnorm:9.0457e+00 L2_fnorm:7.3922e+00 L3_fnorm:7.9856e+00 L4_fnorm:8.5801e+00 L5_fnorm:9.1128e+00 L6_fnorm:9.2225e+00 L7_fnorm:9.7239e+00 L8_fnorm:9.6400e+00 L9_fnorm:9.6086e+00 L10_fnorm:9.4724e+00 L11_fnorm:9.7548e+00 L12_fnorm:8.9989e+00 L1_l1linf:8.6650e+00 L2_l1linf:5.7349e+00 L3_l1linf:6.7007e+00 L4_l1linf:7.9237e+00 L5_l1linf:8.9515e+00 L6_l1linf:9.0042e+00 L7_l1linf:9.6218e+00 L8_l1linf:1.0173e+01 L9_l1linf:1.0230e+01 L10_l1linf:1.3211e+01 L11_l1linf:1.1588e+01 L12_l1linf:1.1402e+01 L1_spectral:1.2289e+00 L2_spectral:9.6996e-01 L3_spectral:9.1496e-01 L4_spectral:9.8348e-01 L5_spectral:9.8801e-01 L6_spectral:1.0388e+00 L7_spectral:1.0040e+00 L8_spectral:1.1581e+00 L9_spectral:1.3124e+00 L10_spectral:1.5725e+00 L11_spectral:1.6197e+00 L12_spectral:1.9766e+00 ip_v_neg_g:3.0831e-02 cos_v_neg_g:1.9431e-03 v_norm:3.9985e+01 g_norm:3.9684e-01 hv_norm:2.3933e-01 cos_v_hv:5.0673e-03 hg_norm:4.2968e+00 cos_g_hg:6.1662e-01 v_par:1.3909e-02 v_perp:3.9985e+01 L1_cos_v_neg_g:1.3962e-02 L1_v_norm:9.0457e+00 L2_cos_v_neg_g:5.9411e-03 L2_v_norm:7.3922e+00 L3_cos_v_neg_g:6.2199e-03 L3_v_norm:7.9856e+00 L4_cos_v_neg_g:6.1455e-03 L4_v_norm:8.5801e+00 L5_cos_v_neg_g:6.0581e-03 L5_v_norm:9.1128e+00 L6_cos_v_neg_g:6.8740e-03 L6_v_norm:9.2225e+00 L7_cos_v_neg_g:8.6366e-03 L7_v_norm:9.7239e+00 L8_cos_v_neg_g:1.0020e-02 L8_v_norm:9.6400e+00 L9_cos_v_neg_g:1.0248e-02 L9_v_norm:9.6086e+00 L10_cos_v_neg_g:1.5463e-02 L10_v_norm:9.4724e+00 L11_cos_v_neg_g:1.8111e-02 L11_v_norm:9.7548e+00 L12_cos_v_neg_g:4.0049e-02 L12_v_norm:8.9989e+00 +step:4500 train loss:3.649293 +step:4501 train loss:3.713718 +step:4502 train loss:3.838678 +step:4503 train loss:3.735648 +step:4504 train loss:3.746655 +step:4505 train loss:3.732901 +step:4506 train loss:3.703331 +step:4507 train loss:3.774734 +step:4508 train loss:3.708915 +step:4509 train loss:3.709140 +step:4510 train loss:3.745991 +step:4511 train loss:3.693424 +step:4512 train loss:3.718488 +step:4513 train loss:3.774725 +step:4514 train loss:3.683740 +step:4515 train loss:3.794303 +step:4516 train loss:3.773870 +step:4517 train loss:3.724903 +step:4518 train loss:3.662651 +step:4519 train loss:3.697620 +step:4520 train loss:3.710673 +step:4521 train loss:3.653283 +step:4522 train loss:3.709807 +step:4523 train loss:3.756914 +step:4524 train loss:3.737362 +step:4525 train loss:3.659846 +step:4526 train loss:3.698284 +step:4527 train loss:3.685675 +step:4528 train loss:3.715663 +step:4529 train loss:3.714781 +step:4530 train loss:3.810658 +step:4531 train loss:3.698437 +step:4532 train loss:3.723726 +step:4533 train loss:3.697151 +step:4534 train loss:3.789585 +step:4535 train loss:3.689109 +step:4536 train loss:3.756005 +step:4537 train loss:3.741048 +step:4538 train loss:3.720344 +step:4539 train loss:3.741555 +step:4540 train loss:3.717635 +step:4541 train loss:3.683796 +step:4542 train loss:3.734976 +step:4543 train loss:3.819223 +step:4544 train loss:3.763987 +step:4545 train loss:3.702759 +step:4546 train loss:3.799430 +step:4547 train loss:3.769125 +step:4548 train loss:3.762954 +step:4549 train loss:3.720790 +step:4550 train loss:3.688971 +step:4551 train loss:3.701828 +step:4552 train loss:3.705688 +step:4553 train loss:3.785844 +step:4554 train loss:3.683193 +step:4555 train loss:3.794360 +step:4556 train loss:3.733445 +step:4557 train loss:3.661232 +step:4558 train loss:3.745353 +step:4559 train loss:3.756574 +step:4560 train loss:3.694468 +step:4561 train loss:3.683992 +step:4562 train loss:3.721720 +step:4563 train loss:3.672171 +step:4564 train loss:3.697947 +step:4565 train loss:3.701137 +step:4566 train loss:3.674156 +step:4567 train loss:3.701146 +step:4568 train loss:3.697544 +step:4569 train loss:3.685028 +step:4570 train loss:3.738207 +step:4571 train loss:3.713084 +step:4572 train loss:3.706848 +step:4573 train loss:3.715223 +step:4574 train loss:3.861989 +step:4575 train loss:3.700518 +step:4576 train loss:3.683762 +step:4577 train loss:3.727538 +step:4578 train loss:3.768290 +step:4579 train loss:3.714229 +step:4580 train loss:3.775433 +step:4581 train loss:3.713952 +step:4582 train loss:3.705431 +step:4583 train loss:3.713877 +step:4584 train loss:3.685851 +step:4585 train loss:3.763699 +step:4586 train loss:3.752348 +step:4587 train loss:3.655281 +step:4588 train loss:3.693550 +step:4589 train loss:3.770166 +step:4590 train loss:3.742637 +step:4591 train loss:3.682405 +step:4592 train loss:3.763464 +step:4593 train loss:3.684052 +step:4594 train loss:3.712666 +step:4595 train loss:3.736248 +step:4596 train loss:3.672345 +step:4597 train loss:3.811594 +step:4598 train loss:3.729517 +step:4599 train loss:3.682734 +step:4600 train loss:3.689600 +step:4601 train loss:3.715643 +step:4602 train loss:3.664935 +step:4603 train loss:3.679179 +step:4604 train loss:3.783758 +step:4605 train loss:3.701781 +step:4606 train loss:3.729824 +step:4607 train loss:3.715071 +step:4608 train loss:3.745249 +step:4609 train loss:3.705116 +step:4610 train loss:3.750601 +step:4611 train loss:3.776099 +step:4612 train loss:3.777429 +step:4613 train loss:3.760011 +step:4614 train loss:3.754928 +step:4615 train loss:3.690377 +step:4616 train loss:3.676800 +step:4617 train loss:3.717184 +step:4618 train loss:3.734128 +step:4619 train loss:3.690705 +step:4620 train loss:3.705374 +step:4621 train loss:3.708858 +step:4622 train loss:3.641335 +step:4623 train loss:3.751433 +step:4624 train loss:3.735937 +step:4625 train loss:3.693437 +step:4626 train loss:3.744544 +step:4627 train loss:3.710317 +step:4628 train loss:3.697572 +step:4629 train loss:3.734258 +step:4630 train loss:3.795331 +step:4631 train loss:3.791578 +step:4632 train loss:3.688711 +step:4633 train loss:3.698431 +step:4634 train loss:3.774682 +step:4635 train loss:3.737537 +step:4636 train loss:3.752086 +step:4637 train loss:3.690539 +step:4638 train loss:3.697572 +step:4639 train loss:3.690548 +step:4640 train loss:3.701761 +step:4641 train loss:3.709269 +step:4642 train loss:3.741058 +step:4643 train loss:3.703402 +step:4644 train loss:3.730250 +step:4645 train loss:3.751344 +step:4646 train loss:3.698339 +step:4647 train loss:3.659225 +step:4648 train loss:3.767039 +step:4649 train loss:3.774880 +step:4650 train loss:3.722783 +step:4651 train loss:3.722473 +step:4652 train loss:3.712604 +step:4653 train loss:3.768102 +step:4654 train loss:3.766654 +step:4655 train loss:3.669193 +step:4656 train loss:3.700814 +step:4657 train loss:3.758766 +step:4658 train loss:3.709515 +step:4659 train loss:3.726133 +step:4660 train loss:3.766557 +step:4661 train loss:3.685936 +step:4662 train loss:3.701295 +step:4663 train loss:3.705846 +step:4664 train loss:3.765036 +step:4665 train loss:3.756128 +step:4666 train loss:3.753330 +step:4667 train loss:3.747114 +step:4668 train loss:3.712169 +step:4669 train loss:3.722159 +step:4670 train loss:3.756884 +step:4671 train loss:3.738981 +step:4672 train loss:3.635691 +step:4673 train loss:3.662955 +step:4674 train loss:3.797560 +step:4675 train loss:3.698340 +step:4676 train loss:3.659687 +step:4677 train loss:3.665247 +step:4678 train loss:3.635543 +step:4679 train loss:3.738355 +step:4680 train loss:3.674071 +step:4681 train loss:3.728092 +step:4682 train loss:3.677984 +step:4683 train loss:3.650200 +step:4684 train loss:3.763899 +step:4685 train loss:3.697332 +step:4686 train loss:3.712973 +step:4687 train loss:3.746841 +step:4688 train loss:3.674108 +step:4689 train loss:3.749949 +step:4690 train loss:3.691716 +step:4691 train loss:3.728519 +step:4692 train loss:3.656965 +step:4693 train loss:3.694244 +step:4694 train loss:3.733186 +step:4695 train loss:3.752860 +step:4696 train loss:3.744935 +step:4697 train loss:3.652863 +step:4698 train loss:3.677933 +step:4699 train loss:3.721043 +step:4700 train loss:3.696339 +step:4701 train loss:3.702745 +step:4702 train loss:3.656016 +step:4703 train loss:3.736550 +step:4704 train loss:3.725661 +step:4705 train loss:3.667009 +step:4706 train loss:3.670809 +step:4707 train loss:3.660295 +step:4708 train loss:3.725586 +step:4709 train loss:3.673867 +step:4710 train loss:3.689203 +step:4711 train loss:3.750717 +step:4712 train loss:3.646072 +step:4713 train loss:3.750051 +step:4714 train loss:3.652044 +step:4715 train loss:3.742827 +step:4716 train loss:3.706821 +step:4717 train loss:3.642492 +step:4718 train loss:3.726140 +step:4719 train loss:3.656172 +step:4720 train loss:3.754813 +step:4721 train loss:3.708099 +step:4722 train loss:3.766722 +step:4723 train loss:3.663504 +step:4724 train loss:3.714359 +step:4725 train loss:3.651198 +step:4726 train loss:3.693846 +step:4727 train loss:3.698858 +step:4728 train loss:3.710277 +step:4729 train loss:3.735019 +step:4730 train loss:3.636313 +step:4731 train loss:3.695957 +step:4732 train loss:3.652847 +step:4733 train loss:3.590441 +step:4734 train loss:3.730600 +step:4735 train loss:3.681388 +step:4736 train loss:3.716282 +step:4737 train loss:3.598349 +step:4738 train loss:3.743814 +step:4739 train loss:3.620803 +step:4740 train loss:3.735795 +step:4741 train loss:3.702920 +step:4742 train loss:3.662696 +step:4743 train loss:3.658223 +step:4744 train loss:3.707906 +step:4745 train loss:3.722300 +step:4746 train loss:3.762768 +step:4747 train loss:3.727216 +step:4748 train loss:3.623281 +step:4749 train loss:3.688024 +step:4750 validation loss:3.623445 +step:4750 train loss:3.638586 +step:4751 train loss:3.731719 +step:4752 train loss:3.667070 +step:4753 train loss:3.773231 +step:4754 train loss:3.638965 +step:4755 train loss:3.680491 +step:4756 train loss:3.754405 +step:4757 train loss:3.682348 +step:4758 train loss:3.696859 +step:4759 train loss:3.702371 +step:4760 train loss:3.724672 +step:4761 train loss:3.643615 +step:4762 train loss:3.680301 +step:4763 train loss:3.704919 +step:4764 train loss:3.758470 +step:4765 train loss:3.649816 +step:4766 train loss:3.675946 +step:4767 train loss:3.626346 +step:4768 train loss:3.682042 +step:4769 train loss:3.708058 +step:4770 train loss:3.668680 +step:4771 train loss:3.680339 +step:4772 train loss:3.651579 +step:4773 train loss:3.689387 +step:4774 train loss:3.629974 +step:4775 train loss:3.769164 +step:4776 train loss:3.632792 +step:4777 train loss:3.707756 +step:4778 train loss:3.644279 +step:4779 train loss:3.695481 +step:4780 train loss:3.634345 +step:4781 train loss:3.636174 +step:4782 train loss:3.744504 +step:4783 train loss:3.732341 +step:4784 train loss:3.694481 +step:4785 train loss:3.695329 +step:4786 train loss:3.801581 +step:4787 train loss:3.635519 +step:4788 train loss:3.659965 +step:4789 train loss:3.678392 +step:4790 train loss:3.734503 +step:4791 train loss:3.697752 +step:4792 train loss:3.738771 +step:4793 train loss:3.656715 +step:4794 train loss:3.734635 +step:4795 train loss:3.678739 +step:4796 train loss:3.670273 +step:4797 train loss:3.679614 +step:4798 train loss:3.688057 +step:4799 train loss:3.683007 +step:4800 train loss:3.717361 +step:4801 train loss:3.705667 +step:4802 train loss:3.745896 +step:4803 train loss:3.726070 +step:4804 train loss:3.685210 +step:4805 train loss:3.681984 +step:4806 train loss:3.659236 +step:4807 train loss:3.766848 +step:4808 train loss:3.637045 +step:4809 train loss:3.742393 +step:4810 train loss:3.679893 +step:4811 train loss:3.702826 +step:4812 train loss:3.674874 +step:4813 train loss:3.632520 +step:4814 train loss:3.625876 +step:4815 train loss:3.621240 +step:4816 train loss:3.683283 +step:4817 train loss:3.623868 +step:4818 train loss:3.688330 +step:4819 train loss:3.684265 +step:4820 train loss:3.933047 +step:4821 train loss:3.710638 +step:4822 train loss:3.722758 +step:4823 train loss:3.652661 +step:4824 train loss:3.662198 +step:4825 train loss:3.639567 +step:4826 train loss:3.727076 +step:4827 train loss:3.675563 +step:4828 train loss:3.614211 +step:4829 train loss:3.721347 +step:4830 train loss:3.663560 +step:4831 train loss:3.812400 +step:4832 train loss:3.684379 +step:4833 train loss:3.718788 +step:4834 train loss:3.619564 +step:4835 train loss:3.710872 +step:4836 train loss:3.689312 +step:4837 train loss:3.720695 +step:4838 train loss:3.660954 +step:4839 train loss:3.723485 +step:4840 train loss:3.629753 +step:4841 train loss:3.727320 +step:4842 train loss:3.640244 +step:4843 train loss:3.716699 +step:4844 train loss:3.717558 +step:4845 train loss:3.657158 +step:4846 train loss:3.670167 +step:4847 train loss:3.665089 +step:4848 train loss:3.683944 +step:4849 train loss:3.641749 +step:4850 train loss:3.651444 +step:4851 train loss:3.640552 +step:4852 train loss:3.717403 +step:4853 train loss:3.697493 +step:4854 train loss:3.671136 +step:4855 train loss:3.737360 +step:4856 train loss:3.702493 +step:4857 train loss:3.713205 +step:4858 train loss:3.793249 +step:4859 train loss:3.636431 +step:4860 train loss:3.714712 +step:4861 train loss:3.688188 +step:4862 train loss:3.721768 +step:4863 train loss:3.654181 +step:4864 train loss:3.665958 +step:4865 train loss:3.658349 +step:4866 train loss:3.704973 +step:4867 train loss:3.670950 +step:4868 train loss:3.690346 +step:4869 train loss:3.638087 +step:4870 train loss:3.675949 +step:4871 train loss:3.754206 +step:4872 train loss:3.698474 +step:4873 train loss:3.699996 +step:4874 train loss:3.671494 +step:4875 train loss:3.636359 +step:4876 train loss:3.649634 +step:4877 train loss:3.650148 +step:4878 train loss:3.689980 +step:4879 train loss:3.652782 +step:4880 train loss:3.678878 +step:4881 train loss:3.621364 +step:4882 train loss:3.823973 +step:4883 train loss:3.636763 +step:4884 train loss:3.666020 +step:4885 train loss:3.639492 +step:4886 train loss:3.715551 +step:4887 train loss:3.667891 +step:4888 train loss:3.676967 +step:4889 train loss:3.671542 +step:4890 train loss:3.716466 +step:4891 train loss:3.646757 +step:4892 train loss:3.655544 +step:4893 train loss:3.701365 +step:4894 train loss:3.635627 +step:4895 train loss:3.667732 +step:4896 train loss:3.650548 +step:4897 train loss:3.720792 +step:4898 train loss:3.678688 +step:4899 train loss:3.656897 +step:4900 train loss:3.701375 +step:4901 train loss:3.654226 +step:4902 train loss:3.646417 +step:4903 train loss:3.667390 +step:4904 train loss:3.681958 +step:4905 train loss:3.679025 +step:4906 train loss:3.676373 +step:4907 train loss:3.750240 +step:4908 train loss:3.657120 +step:4909 train loss:3.662580 +step:4910 train loss:3.682756 +step:4911 train loss:3.736264 +step:4912 train loss:3.713962 +step:4913 train loss:3.686878 +step:4914 train loss:3.674978 +step:4915 train loss:3.660562 +step:4916 train loss:3.607951 +step:4917 train loss:3.632484 +step:4918 train loss:3.664597 +step:4919 train loss:3.653447 +step:4920 train loss:3.666483 +step:4921 train loss:3.818094 +step:4922 train loss:3.710012 +step:4923 train loss:3.731157 +step:4924 train loss:3.731626 +step:4925 train loss:3.658405 +step:4926 train loss:3.661428 +step:4927 train loss:3.685255 +step:4928 train loss:3.727934 +step:4929 train loss:3.684686 +step:4930 train loss:3.662186 +step:4931 train loss:3.659757 +step:4932 train loss:3.665010 +step:4933 train loss:3.654783 +step:4934 train loss:3.719384 +step:4935 train loss:3.707969 +step:4936 train loss:3.666992 +step:4937 train loss:3.778471 +step:4938 train loss:3.766984 +step:4939 train loss:3.632017 +step:4940 train loss:3.710567 +step:4941 train loss:3.611738 +step:4942 train loss:3.656951 +step:4943 train loss:3.657080 +step:4944 train loss:3.657234 +step:4945 train loss:3.702976 +step:4946 train loss:3.678961 +step:4947 train loss:3.662532 +step:4948 train loss:3.696267 +step:4949 train loss:3.604769 +step:4950 train loss:3.686572 +step:4951 train loss:3.738992 +step:4952 train loss:3.678494 +step:4953 train loss:3.714280 +step:4954 train loss:3.611735 +step:4955 train loss:3.692945 +step:4956 train loss:3.714324 +step:4957 train loss:3.710872 +step:4958 train loss:3.625375 +step:4959 train loss:3.743482 +step:4960 train loss:3.669557 +step:4961 train loss:3.688885 +step:4962 train loss:3.647321 +step:4963 train loss:3.694130 +step:4964 train loss:3.643629 +step:4965 train loss:3.800238 +step:4966 train loss:3.648190 +step:4967 train loss:3.757329 +step:4968 train loss:3.645385 +step:4969 train loss:3.686067 +step:4970 train loss:3.676443 +step:4971 train loss:3.629539 +step:4972 train loss:3.676537 +step:4973 train loss:3.684596 +step:4974 train loss:3.670314 +step:4975 train loss:3.754512 +step:4976 train loss:3.739395 +step:4977 train loss:3.680335 +step:4978 train loss:3.670473 +step:4979 train loss:3.668509 +step:4980 train loss:3.775625 +step:4981 train loss:3.615597 +step:4982 train loss:3.696985 +step:4983 train loss:3.619721 +step:4984 train loss:3.812061 +step:4985 train loss:3.707916 +step:4986 train loss:3.646998 +step:4987 train loss:3.667941 +step:4988 train loss:3.863423 +step:4989 train loss:3.669306 +step:4990 train loss:3.665046 +step:4991 train loss:3.678686 +step:4992 train loss:3.664353 +step:4993 train loss:3.641307 +step:4994 train loss:3.750327 +step:4995 train loss:3.676932 +step:4996 train loss:3.764568 +step:4997 train loss:3.660377 +step:4998 train loss:3.667751 +step:4999 train loss:3.645171 +step:5000 validation loss:3.613782 total_sharp:2.2894e-05 L1_sharp:4.5753e-06 L2_sharp:3.6861e-06 L3_sharp:3.1773e-06 L4_sharp:3.7190e-06 L5_sharp:2.2488e-06 L6_sharp:1.8902e-06 L7_sharp:2.0820e-06 L8_sharp:3.7089e-06 L9_sharp:4.7230e-06 L10_sharp:7.6375e-06 L11_sharp:4.3144e-06 L12_sharp:4.3584e-05 total_fnorm:3.9707e+01 total_l1_linf:3.3990e+05 total_spectral:3.9707e+01 L1_fnorm:8.9626e+00 L2_fnorm:7.3474e+00 L3_fnorm:7.9641e+00 L4_fnorm:8.5529e+00 L5_fnorm:9.1412e+00 L6_fnorm:9.2077e+00 L7_fnorm:9.6688e+00 L8_fnorm:9.6306e+00 L9_fnorm:9.6057e+00 L10_fnorm:9.4670e+00 L11_fnorm:9.6585e+00 L12_fnorm:8.7517e+00 L1_l1linf:8.7744e+00 L2_l1linf:5.4301e+00 L3_l1linf:6.6016e+00 L4_l1linf:7.5046e+00 L5_l1linf:8.7509e+00 L6_l1linf:9.0767e+00 L7_l1linf:9.4703e+00 L8_l1linf:9.6615e+00 L9_l1linf:1.0930e+01 L10_l1linf:1.0183e+01 L11_l1linf:1.0868e+01 L12_l1linf:1.1778e+01 L1_spectral:1.2055e+00 L2_spectral:9.6641e-01 L3_spectral:9.0247e-01 L4_spectral:9.6937e-01 L5_spectral:1.0010e+00 L6_spectral:1.1472e+00 L7_spectral:9.3936e-01 L8_spectral:1.0748e+00 L9_spectral:1.3338e+00 L10_spectral:1.3743e+00 L11_spectral:1.3268e+00 L12_spectral:1.5011e+00 ip_v_neg_g:1.5406e-02 cos_v_neg_g:9.3100e-04 v_norm:3.9707e+01 g_norm:4.1676e-01 hv_norm:2.1176e-01 cos_v_hv:4.2928e-03 hg_norm:3.6240e+00 cos_g_hg:5.6741e-01 v_par:8.6811e-03 v_perp:3.9707e+01 L1_cos_v_neg_g:5.8459e-03 L1_v_norm:8.9626e+00 L2_cos_v_neg_g:3.3689e-03 L2_v_norm:7.3474e+00 L3_cos_v_neg_g:3.0477e-03 L3_v_norm:7.9641e+00 L4_cos_v_neg_g:2.1517e-03 L4_v_norm:8.5529e+00 L5_cos_v_neg_g:3.7524e-03 L5_v_norm:9.1412e+00 L6_cos_v_neg_g:4.5745e-03 L6_v_norm:9.2077e+00 L7_cos_v_neg_g:6.3931e-03 L7_v_norm:9.6688e+00 L8_cos_v_neg_g:4.9626e-03 L8_v_norm:9.6306e+00 L9_cos_v_neg_g:6.7016e-03 L9_v_norm:9.6057e+00 L10_cos_v_neg_g:7.8318e-03 L10_v_norm:9.4670e+00 L11_cos_v_neg_g:7.1973e-03 L11_v_norm:9.6585e+00 L12_cos_v_neg_g:8.8313e-03 L12_v_norm:8.7517e+00 +step:5000 train loss:3.760971 +step:5001 train loss:3.630464 +step:5002 train loss:3.683652 +step:5003 train loss:3.681720 +step:5004 train loss:3.670731 +step:5005 train loss:3.669646 +step:5006 train loss:3.711710 +step:5007 train loss:3.712876 +step:5008 train loss:3.649082 +step:5009 train loss:3.693696 +step:5010 train loss:3.644071 +step:5011 train loss:3.675486 +step:5012 train loss:3.647711 +step:5013 train loss:3.752925 +step:5014 train loss:3.666980 +step:5015 train loss:3.742676 +step:5016 train loss:3.670519 +step:5017 train loss:3.717998 +step:5018 train loss:3.635275 +step:5019 train loss:3.670029 +step:5020 train loss:3.662856 +step:5021 train loss:3.673385 +step:5022 train loss:3.710920 +step:5023 train loss:3.681645 +step:5024 train loss:3.733493 +step:5025 train loss:3.617783 +step:5026 train loss:3.741702 +step:5027 train loss:3.672476 +step:5028 train loss:3.743825 +step:5029 train loss:3.638283 +step:5030 train loss:3.677386 +step:5031 train loss:3.662916 +step:5032 train loss:3.691440 +step:5033 train loss:3.678973 +step:5034 train loss:3.673011 +step:5035 train loss:3.758718 +step:5036 train loss:3.707693 +step:5037 train loss:3.656872 +step:5038 train loss:3.708321 +step:5039 train loss:3.719143 +step:5040 train loss:3.681878 +step:5041 train loss:3.697769 +step:5042 train loss:3.602565 +step:5043 train loss:3.748891 +step:5044 train loss:3.661911 +step:5045 train loss:3.712523 +step:5046 train loss:3.634160 +step:5047 train loss:3.710056 +step:5048 train loss:3.628020 +step:5049 train loss:3.762268 +step:5050 train loss:3.649398 +step:5051 train loss:3.692679 +step:5052 train loss:3.592552 +step:5053 train loss:3.777151 +step:5054 train loss:3.664204 +step:5055 train loss:3.684708 +step:5056 train loss:3.725243 +step:5057 train loss:3.647794 +step:5058 train loss:3.683986 +step:5059 train loss:3.644359 +step:5060 train loss:3.690557 +step:5061 train loss:3.685946 +step:5062 train loss:3.656821 +step:5063 train loss:3.649642 +step:5064 train loss:3.657797 +step:5065 train loss:3.642221 +step:5066 train loss:3.703454 +step:5067 train loss:3.684474 +step:5068 train loss:3.669768 +step:5069 train loss:3.645521 +step:5070 train loss:3.672723 +step:5071 train loss:3.742781 +step:5072 train loss:3.637036 +step:5073 train loss:3.641491 +step:5074 train loss:3.589583 +step:5075 train loss:3.661397 +step:5076 train loss:3.590875 +step:5077 train loss:3.656868 +step:5078 train loss:3.657227 +step:5079 train loss:3.696478 +step:5080 train loss:3.669995 +step:5081 train loss:3.683974 +step:5082 train loss:3.672117 +step:5083 train loss:3.743682 +step:5084 train loss:3.709713 +step:5085 train loss:3.671106 +step:5086 train loss:3.747228 +step:5087 train loss:3.730526 +step:5088 train loss:3.651700 +step:5089 train loss:3.713855 +step:5090 train loss:3.657589 +step:5091 train loss:3.663342 +step:5092 train loss:3.763005 +step:5093 train loss:3.647537 +step:5094 train loss:3.644083 +step:5095 train loss:3.697268 +step:5096 train loss:3.658762 +step:5097 train loss:3.672178 +step:5098 train loss:3.674898 +step:5099 train loss:3.634449 +step:5100 train loss:3.646752 +step:5101 train loss:3.839609 +step:5102 train loss:3.686124 +step:5103 train loss:3.695676 +step:5104 train loss:3.743010 +step:5105 train loss:3.689652 +step:5106 train loss:3.645801 +step:5107 train loss:3.668297 +step:5108 train loss:3.652266 +step:5109 train loss:3.741202 +step:5110 train loss:3.648246 +step:5111 train loss:3.735202 +step:5112 train loss:3.651184 +step:5113 train loss:3.628285 +step:5114 train loss:3.675631 +step:5115 train loss:3.633991 +step:5116 train loss:3.693746 +step:5117 train loss:3.636250 +step:5118 train loss:3.664356 +step:5119 train loss:3.648987 +step:5120 train loss:3.687109 +step:5121 train loss:3.631905 +step:5122 train loss:3.646846 +step:5123 train loss:3.662465 +step:5124 train loss:3.596309 +step:5125 train loss:3.705036 +step:5126 train loss:3.694164 +step:5127 train loss:3.693993 +step:5128 train loss:3.705247 +step:5129 train loss:3.637612 +step:5130 train loss:3.650121 +step:5131 train loss:3.583116 +step:5132 train loss:3.708043 +step:5133 train loss:3.676449 +step:5134 train loss:3.678283 +step:5135 train loss:3.626426 +step:5136 train loss:3.691053 +step:5137 train loss:3.694135 +step:5138 train loss:3.672867 +step:5139 train loss:3.707345 +step:5140 train loss:3.682481 +step:5141 train loss:3.713834 +step:5142 train loss:3.661330 +step:5143 train loss:3.692038 +step:5144 train loss:3.687347 +step:5145 train loss:3.630159 +step:5146 train loss:3.621416 +step:5147 train loss:3.703294 +step:5148 train loss:3.657427 +step:5149 train loss:3.702837 +step:5150 train loss:3.680879 +step:5151 train loss:3.646165 +step:5152 train loss:3.690481 +step:5153 train loss:3.663303 +step:5154 train loss:3.673443 +step:5155 train loss:3.684907 +step:5156 train loss:3.666698 +step:5157 train loss:3.661347 +step:5158 train loss:3.686249 +step:5159 train loss:3.723826 +step:5160 train loss:3.785702 +step:5161 train loss:3.713751 +step:5162 train loss:3.736471 +step:5163 train loss:3.648404 +step:5164 train loss:3.736167 +step:5165 train loss:3.729538 +step:5166 train loss:3.672066 +step:5167 train loss:3.766320 +step:5168 train loss:3.683263 +step:5169 train loss:3.710459 +step:5170 train loss:3.687449 +step:5171 train loss:3.732605 +step:5172 train loss:3.643489 +step:5173 train loss:3.712187 +step:5174 train loss:3.649445 +step:5175 train loss:3.678659 +step:5176 train loss:3.673818 +step:5177 train loss:3.667172 +step:5178 train loss:3.732032 +step:5179 train loss:3.644591 +step:5180 train loss:3.722719 +step:5181 train loss:3.668453 +step:5182 train loss:3.724122 +step:5183 train loss:3.655396 +step:5184 train loss:3.633286 +step:5185 train loss:3.659379 +step:5186 train loss:3.712199 +step:5187 train loss:3.710133 +step:5188 train loss:3.641124 +step:5189 train loss:3.685030 +step:5190 train loss:3.669365 +step:5191 train loss:3.650136 +step:5192 train loss:3.633037 +step:5193 train loss:3.717242 +step:5194 train loss:3.666164 +step:5195 train loss:3.638342 +step:5196 train loss:3.712249 +step:5197 train loss:3.777246 +step:5198 train loss:3.676795 +step:5199 train loss:3.664970 +step:5200 train loss:3.687071 +step:5201 train loss:3.682522 +step:5202 train loss:3.682292 +step:5203 train loss:3.687293 +step:5204 train loss:3.654394 +step:5205 train loss:3.702882 +step:5206 train loss:3.635041 +step:5207 train loss:3.639443 +step:5208 train loss:3.700187 +step:5209 train loss:3.718810 +step:5210 train loss:3.632413 +step:5211 train loss:3.676253 +step:5212 train loss:3.687843 +step:5213 train loss:3.661366 +step:5214 train loss:3.713577 +step:5215 train loss:3.819598 +step:5216 train loss:3.676194 +step:5217 train loss:3.650337 +step:5218 train loss:3.655886 +step:5219 train loss:3.718580 +step:5220 train loss:3.633035 +step:5221 train loss:3.636589 +step:5222 train loss:3.717650 +step:5223 train loss:3.712219 +step:5224 train loss:3.609160 +step:5225 train loss:3.755716 +step:5226 train loss:3.667685 +step:5227 train loss:3.743037 +step:5228 train loss:3.714077 +step:5229 train loss:3.655630 +step:5230 train loss:3.668663 +step:5231 train loss:3.612250 +step:5232 train loss:3.736061 +step:5233 train loss:3.698393 +step:5234 train loss:3.700667 +step:5235 train loss:3.648248 +step:5236 train loss:3.724423 +step:5237 train loss:3.778252 +step:5238 train loss:3.676953 +step:5239 train loss:3.739775 +step:5240 train loss:3.620625 +step:5241 train loss:3.682259 +step:5242 train loss:3.656831 +step:5243 train loss:3.658195 +step:5244 train loss:3.657258 +step:5245 train loss:3.699855 +step:5246 train loss:3.739362 +step:5247 train loss:3.672836 +step:5248 train loss:3.641569 +step:5249 train loss:3.699568 +step:5250 validation loss:3.596152 +step:5250 train loss:3.666408 +step:5251 train loss:3.733683 +step:5252 train loss:3.624060 +step:5253 train loss:3.773925 +step:5254 train loss:3.650601 +step:5255 train loss:3.719123 +step:5256 train loss:3.636091 +step:5257 train loss:3.687187 +step:5258 train loss:3.687531 +step:5259 train loss:3.676400 +step:5260 train loss:3.668857 +step:5261 train loss:3.656950 +step:5262 train loss:3.699811 +step:5263 train loss:3.689005 +step:5264 train loss:3.636949 +step:5265 train loss:3.718144 +step:5266 train loss:3.634514 +step:5267 train loss:3.647763 +step:5268 train loss:3.628350 +step:5269 train loss:3.630574 +step:5270 train loss:3.681926 +step:5271 train loss:3.607679 +step:5272 train loss:3.698475 +step:5273 train loss:3.603723 +step:5274 train loss:3.659537 +step:5275 train loss:3.675525 +step:5276 train loss:3.793792 +step:5277 train loss:3.696955 +step:5278 train loss:3.643316 +step:5279 train loss:3.689805 +step:5280 train loss:3.668289 +step:5281 train loss:3.660095 +step:5282 train loss:3.632449 +step:5283 train loss:3.636776 +step:5284 train loss:3.640343 +step:5285 train loss:3.712175 +step:5286 train loss:3.615815 +step:5287 train loss:3.720824 +step:5288 train loss:3.693477 +step:5289 train loss:3.660647 +step:5290 train loss:3.716484 +step:5291 train loss:3.666190 +step:5292 train loss:3.688330 +step:5293 train loss:3.656419 +step:5294 train loss:3.643399 +step:5295 train loss:3.652512 +step:5296 train loss:3.641355 +step:5297 train loss:3.662255 +step:5298 train loss:3.611728 +step:5299 train loss:3.705144 +step:5300 train loss:3.653153 +step:5301 train loss:3.724011 +step:5302 train loss:3.729028 +step:5303 train loss:3.588809 +step:5304 train loss:3.620523 +step:5305 train loss:3.595665 +step:5306 train loss:3.633290 +step:5307 train loss:3.636966 +step:5308 train loss:3.730785 +step:5309 train loss:3.681435 +step:5310 train loss:3.665546 +step:5311 train loss:3.732737 +step:5312 train loss:3.619588 +step:5313 train loss:3.710253 +step:5314 train loss:3.699315 +step:5315 train loss:3.660599 +step:5316 train loss:3.693929 +step:5317 train loss:3.709524 +step:5318 train loss:3.663605 +step:5319 train loss:3.694229 +step:5320 train loss:3.640442 +step:5321 train loss:3.761633 +step:5322 train loss:3.671128 +step:5323 train loss:3.676162 +step:5324 train loss:3.619667 +step:5325 train loss:3.700766 +step:5326 train loss:3.695770 +step:5327 train loss:3.583250 +step:5328 train loss:3.720491 +step:5329 train loss:3.687329 +step:5330 train loss:3.683226 +step:5331 train loss:3.733173 +step:5332 train loss:3.656152 +step:5333 train loss:3.718750 +step:5334 train loss:3.696586 +step:5335 train loss:3.754373 +step:5336 train loss:3.788143 +step:5337 train loss:3.621324 +step:5338 train loss:3.630393 +step:5339 train loss:3.649204 +step:5340 train loss:3.676344 +step:5341 train loss:3.692173 +step:5342 train loss:3.592710 +step:5343 train loss:3.751980 +step:5344 train loss:3.634745 +step:5345 train loss:3.634244 +step:5346 train loss:3.638893 +step:5347 train loss:3.661685 +step:5348 train loss:3.701950 +step:5349 train loss:3.644326 +step:5350 train loss:3.681680 +step:5351 train loss:3.756009 +step:5352 train loss:3.793500 +step:5353 train loss:3.709446 +step:5354 train loss:3.677178 +step:5355 train loss:3.646387 +step:5356 train loss:3.665752 +step:5357 train loss:3.640958 +step:5358 train loss:3.663811 +step:5359 train loss:3.674364 +step:5360 train loss:3.648454 +step:5361 train loss:3.651542 +step:5362 train loss:3.636639 +step:5363 train loss:3.626149 +step:5364 train loss:3.633357 +step:5365 train loss:3.664506 +step:5366 train loss:3.697380 +step:5367 train loss:3.624649 +step:5368 train loss:3.695271 +step:5369 train loss:3.711216 +step:5370 train loss:3.614684 +step:5371 train loss:3.666176 +step:5372 train loss:3.679163 +step:5373 train loss:3.723778 +step:5374 train loss:3.608494 +step:5375 train loss:3.650113 +step:5376 train loss:3.721624 +step:5377 train loss:3.653364 +step:5378 train loss:3.631003 +step:5379 train loss:3.634390 +step:5380 train loss:3.681830 +step:5381 train loss:3.713503 +step:5382 train loss:3.640621 +step:5383 train loss:3.680594 +step:5384 train loss:3.700987 +step:5385 train loss:3.701531 +step:5386 train loss:3.679899 +step:5387 train loss:3.690197 +step:5388 train loss:3.705619 +step:5389 train loss:3.636174 +step:5390 train loss:3.661958 +step:5391 train loss:3.598610 +step:5392 train loss:3.662457 +step:5393 train loss:3.649005 +step:5394 train loss:3.648900 +step:5395 train loss:3.720692 +step:5396 train loss:3.682168 +step:5397 train loss:3.704080 +step:5398 train loss:3.700036 +step:5399 train loss:3.730147 +step:5400 train loss:3.738010 +step:5401 train loss:3.694676 +step:5402 train loss:3.799919 +step:5403 train loss:3.705894 +step:5404 train loss:3.681372 +step:5405 train loss:3.748569 +step:5406 train loss:3.709360 +step:5407 train loss:3.636196 +step:5408 train loss:3.784413 +step:5409 train loss:3.626379 +step:5410 train loss:3.686230 +step:5411 train loss:3.668180 +step:5412 train loss:3.646283 +step:5413 train loss:3.700069 +step:5414 train loss:3.674853 +step:5415 train loss:3.652442 +step:5416 train loss:3.645187 +step:5417 train loss:3.717640 +step:5418 train loss:3.736878 +step:5419 train loss:3.637552 +step:5420 train loss:3.699658 +step:5421 train loss:3.668576 +step:5422 train loss:3.708648 +step:5423 train loss:3.687606 +step:5424 train loss:3.592199 +step:5425 train loss:3.664593 +step:5426 train loss:3.742592 +step:5427 train loss:3.637460 +step:5428 train loss:3.675874 +step:5429 train loss:3.607915 +step:5430 train loss:3.640892 +step:5431 train loss:3.702113 +step:5432 train loss:3.679994 +step:5433 train loss:3.684679 +step:5434 train loss:3.633521 +step:5435 train loss:3.630724 +step:5436 train loss:3.632684 +step:5437 train loss:3.669675 +step:5438 train loss:3.651929 +step:5439 train loss:3.656506 +step:5440 train loss:3.698895 +step:5441 train loss:3.721555 +step:5442 train loss:3.652664 +step:5443 train loss:3.647831 +step:5444 train loss:3.594166 +step:5445 train loss:3.679482 +step:5446 train loss:3.651906 +step:5447 train loss:3.685472 +step:5448 train loss:3.742029 +step:5449 train loss:3.629294 +step:5450 train loss:3.664150 +step:5451 train loss:3.654234 +step:5452 train loss:3.670436 +step:5453 train loss:3.726300 +step:5454 train loss:3.654559 +step:5455 train loss:3.638267 +step:5456 train loss:3.776423 +step:5457 train loss:3.664209 +step:5458 train loss:3.694110 +step:5459 train loss:3.638427 +step:5460 train loss:3.656502 +step:5461 train loss:3.658250 +step:5462 train loss:3.665942 +step:5463 train loss:3.670671 +step:5464 train loss:3.670274 +step:5465 train loss:3.615282 +step:5466 train loss:3.688502 +step:5467 train loss:3.672419 +step:5468 train loss:3.674986 +step:5469 train loss:3.772568 +step:5470 train loss:3.666499 +step:5471 train loss:3.740447 +step:5472 train loss:3.688106 +step:5473 train loss:3.596257 +step:5474 train loss:3.925829 +step:5475 train loss:3.597924 +step:5476 train loss:3.678731 +step:5477 train loss:3.676836 +step:5478 train loss:3.675792 +step:5479 train loss:3.819175 +step:5480 train loss:3.662991 +step:5481 train loss:3.729299 +step:5482 train loss:3.641711 +step:5483 train loss:3.676819 +step:5484 train loss:3.714777 +step:5485 train loss:3.629250 +step:5486 train loss:3.677973 +step:5487 train loss:3.681543 +step:5488 train loss:3.593127 +step:5489 train loss:3.694886 +step:5490 train loss:3.643879 +step:5491 train loss:3.743433 +step:5492 train loss:3.673836 +step:5493 train loss:3.602028 +step:5494 train loss:3.655605 +step:5495 train loss:3.634950 +step:5496 train loss:3.631778 +step:5497 train loss:3.752248 +step:5498 train loss:3.620608 +step:5499 train loss:3.755330 +step:5500 validation loss:3.592129 total_sharp:2.1101e-05 L1_sharp:4.1795e-06 L2_sharp:1.7003e-06 L3_sharp:2.1111e-06 L4_sharp:1.7687e-06 L5_sharp:1.1658e-06 L6_sharp:8.3169e-07 L7_sharp:1.0314e-06 L8_sharp:2.2009e-06 L9_sharp:3.2505e-06 L10_sharp:5.2666e-06 L11_sharp:3.2900e-06 L12_sharp:5.6364e-05 total_fnorm:3.9931e+01 total_l1_linf:3.4228e+05 total_spectral:3.9931e+01 L1_fnorm:9.1267e+00 L2_fnorm:7.3865e+00 L3_fnorm:7.9984e+00 L4_fnorm:8.5541e+00 L5_fnorm:9.1635e+00 L6_fnorm:9.1639e+00 L7_fnorm:9.7011e+00 L8_fnorm:9.7514e+00 L9_fnorm:9.7409e+00 L10_fnorm:9.6069e+00 L11_fnorm:9.8312e+00 L12_fnorm:8.9397e+00 L1_l1linf:8.5925e+00 L2_l1linf:5.5496e+00 L3_l1linf:6.7968e+00 L4_l1linf:7.6528e+00 L5_l1linf:8.9107e+00 L6_l1linf:9.0072e+00 L7_l1linf:9.5187e+00 L8_l1linf:9.9720e+00 L9_l1linf:1.0693e+01 L10_l1linf:1.1128e+01 L11_l1linf:1.0886e+01 L12_l1linf:1.1222e+01 L1_spectral:1.2514e+00 L2_spectral:9.3899e-01 L3_spectral:9.7057e-01 L4_spectral:9.0650e-01 L5_spectral:9.7340e-01 L6_spectral:1.0113e+00 L7_spectral:9.3536e-01 L8_spectral:1.1432e+00 L9_spectral:1.2624e+00 L10_spectral:1.4585e+00 L11_spectral:1.4148e+00 L12_spectral:1.7226e+00 ip_v_neg_g:2.3786e-02 cos_v_neg_g:7.6065e-04 v_norm:3.9931e+01 g_norm:7.8311e-01 hv_norm:3.0299e-01 cos_v_hv:2.7809e-03 hg_norm:6.0828e+00 cos_g_hg:6.8729e-01 v_par:8.6261e-03 v_perp:3.9931e+01 L1_cos_v_neg_g:3.2832e-03 L1_v_norm:9.1267e+00 L2_cos_v_neg_g:-9.1244e-04 L2_v_norm:7.3865e+00 L3_cos_v_neg_g:2.0358e-03 L3_v_norm:7.9984e+00 L4_cos_v_neg_g:1.5135e-03 L4_v_norm:8.5541e+00 L5_cos_v_neg_g:2.2800e-03 L5_v_norm:9.1635e+00 L6_cos_v_neg_g:1.9895e-03 L6_v_norm:9.1639e+00 L7_cos_v_neg_g:2.3930e-03 L7_v_norm:9.7011e+00 L8_cos_v_neg_g:3.8316e-03 L8_v_norm:9.7514e+00 L9_cos_v_neg_g:4.6547e-03 L9_v_norm:9.7409e+00 L10_cos_v_neg_g:6.3954e-03 L10_v_norm:9.6069e+00 L11_cos_v_neg_g:7.6324e-03 L11_v_norm:9.8312e+00 L12_cos_v_neg_g:1.4864e-02 L12_v_norm:8.9397e+00 +step:5500 train loss:3.672982 +step:5501 train loss:3.743465 +step:5502 train loss:3.693312 +step:5503 train loss:3.657536 +step:5504 train loss:3.701785 +step:5505 train loss:3.667737 +step:5506 train loss:3.707842 +step:5507 train loss:3.697108 +step:5508 train loss:3.723540 +step:5509 train loss:3.729213 +step:5510 train loss:3.698934 +step:5511 train loss:3.692976 +step:5512 train loss:3.817039 +step:5513 train loss:3.616773 +step:5514 train loss:3.681182 +step:5515 train loss:3.706431 +step:5516 train loss:3.727863 +step:5517 train loss:3.685999 +step:5518 train loss:3.713250 +step:5519 train loss:3.747399 +step:5520 train loss:3.659011 +step:5521 train loss:3.669641 +step:5522 train loss:3.635646 +step:5523 train loss:3.680217 +step:5524 train loss:3.729509 +step:5525 train loss:3.640571 +step:5526 train loss:3.647304 +step:5527 train loss:3.666625 +step:5528 train loss:3.774546 +step:5529 train loss:3.740017 +step:5530 train loss:3.707061 +step:5531 train loss:3.644446 +step:5532 train loss:3.667685 +step:5533 train loss:3.706509 +step:5534 train loss:3.614926 +step:5535 train loss:3.668530 +step:5536 train loss:3.605581 +step:5537 train loss:3.654827 +step:5538 train loss:3.644622 +step:5539 train loss:3.588995 +step:5540 train loss:3.812803 +step:5541 train loss:3.625313 +step:5542 train loss:3.675567 +step:5543 train loss:3.662543 +step:5544 train loss:3.657130 +step:5545 train loss:3.648978 +step:5546 train loss:3.688851 +step:5547 train loss:3.616059 +step:5548 train loss:3.656858 +step:5549 train loss:3.661662 +step:5550 train loss:3.687746 +step:5551 train loss:3.691672 +step:5552 train loss:3.644812 +step:5553 train loss:3.672412 +step:5554 train loss:3.643684 +step:5555 train loss:3.651443 +step:5556 train loss:3.666917 +step:5557 train loss:3.731939 +step:5558 train loss:3.654377 +step:5559 train loss:3.657864 +step:5560 train loss:3.652240 +step:5561 train loss:3.683018 +step:5562 train loss:3.640892 +step:5563 train loss:3.622333 +step:5564 train loss:3.656482 +step:5565 train loss:3.721331 +step:5566 train loss:3.624866 +step:5567 train loss:3.741282 +step:5568 train loss:3.859592 +step:5569 train loss:3.653447 +step:5570 train loss:3.583395 +step:5571 train loss:3.676191 +step:5572 train loss:3.612054 +step:5573 train loss:3.602088 +step:5574 train loss:3.571629 +step:5575 train loss:3.669295 +step:5576 train loss:3.652007 +step:5577 train loss:3.657030 +step:5578 train loss:3.686040 +step:5579 train loss:3.641800 +step:5580 train loss:3.668845 +step:5581 train loss:3.687401 +step:5582 train loss:3.670427 +step:5583 train loss:3.676701 +step:5584 train loss:3.801609 +step:5585 train loss:3.706200 +step:5586 train loss:3.641061 +step:5587 train loss:3.674387 +step:5588 train loss:3.688451 +step:5589 train loss:3.685878 +step:5590 train loss:3.746362 +step:5591 train loss:3.609962 +step:5592 train loss:3.794113 +step:5593 train loss:3.675087 +step:5594 train loss:3.685830 +step:5595 train loss:3.675407 +step:5596 train loss:3.628870 +step:5597 train loss:3.637846 +step:5598 train loss:3.642960 +step:5599 train loss:3.654976 +step:5600 train loss:3.690540 +step:5601 train loss:3.718364 +step:5602 train loss:3.649849 +step:5603 train loss:3.685887 +step:5604 train loss:3.688265 +step:5605 train loss:3.657504 +step:5606 train loss:3.660648 +step:5607 train loss:3.692970 +step:5608 train loss:3.636809 +step:5609 train loss:3.685085 +step:5610 train loss:3.640257 +step:5611 train loss:3.681295 +step:5612 train loss:3.709175 +step:5613 train loss:3.673809 +step:5614 train loss:3.634463 +step:5615 train loss:3.735783 +step:5616 train loss:3.629344 +step:5617 train loss:3.721008 +step:5618 train loss:3.701735 +step:5619 train loss:3.659594 +step:5620 train loss:3.661226 +step:5621 train loss:3.735178 +step:5622 train loss:3.616462 +step:5623 train loss:3.654083 +step:5624 train loss:3.642762 +step:5625 train loss:3.674503 +step:5626 train loss:3.669420 +step:5627 train loss:3.641789 +step:5628 train loss:3.681644 +step:5629 train loss:3.664520 +step:5630 train loss:3.595693 +step:5631 train loss:3.633688 +step:5632 train loss:3.680249 +step:5633 train loss:3.673046 +step:5634 train loss:3.623345 +step:5635 train loss:3.663057 +step:5636 train loss:3.644254 +step:5637 train loss:3.783134 +step:5638 train loss:3.696428 +step:5639 train loss:3.671266 +step:5640 train loss:3.676175 +step:5641 train loss:3.711677 +step:5642 train loss:3.646592 +step:5643 train loss:3.663159 +step:5644 train loss:3.744679 +step:5645 train loss:3.701069 +step:5646 train loss:3.702142 +step:5647 train loss:3.690139 +step:5648 train loss:3.675023 +step:5649 train loss:3.592425 +step:5650 train loss:3.598461 +step:5651 train loss:3.672548 +step:5652 train loss:3.675083 +step:5653 train loss:3.639448 +step:5654 train loss:3.766492 +step:5655 train loss:3.629087 +step:5656 train loss:3.656176 +step:5657 train loss:3.720086 +step:5658 train loss:3.623348 +step:5659 train loss:3.661209 +step:5660 train loss:3.713489 +step:5661 train loss:3.650798 +step:5662 train loss:3.688251 +step:5663 train loss:3.587250 +step:5664 train loss:3.554214 +step:5665 train loss:3.677918 +step:5666 train loss:3.681030 +step:5667 train loss:3.709470 +step:5668 train loss:3.646745 +step:5669 train loss:3.662632 +step:5670 train loss:3.655995 +step:5671 train loss:3.647344 +step:5672 train loss:3.691817 +step:5673 train loss:3.662989 +step:5674 train loss:3.733479 +step:5675 train loss:3.646782 +step:5676 train loss:3.795112 +step:5677 train loss:3.698996 +step:5678 train loss:3.673072 +step:5679 train loss:3.665594 +step:5680 train loss:3.696706 +step:5681 train loss:3.660860 +step:5682 train loss:3.675452 +step:5683 train loss:3.632526 +step:5684 train loss:3.644788 +step:5685 train loss:3.686452 +step:5686 train loss:3.698287 +step:5687 train loss:3.646920 +step:5688 train loss:3.739448 +step:5689 train loss:3.644866 +step:5690 train loss:3.794038 +step:5691 train loss:3.626115 +step:5692 train loss:3.612720 +step:5693 train loss:3.619698 +step:5694 train loss:3.640579 +step:5695 train loss:3.659036 +step:5696 train loss:3.705129 +step:5697 train loss:3.630947 +step:5698 train loss:3.649412 +step:5699 train loss:3.662302 +step:5700 train loss:3.659507 +step:5701 train loss:3.655107 +step:5702 train loss:3.721243 +step:5703 train loss:3.619761 +step:5704 train loss:3.661629 +step:5705 train loss:3.673697 +step:5706 train loss:3.694954 +step:5707 train loss:3.609417 +step:5708 train loss:3.693727 +step:5709 train loss:3.699961 +step:5710 train loss:3.690757 +step:5711 train loss:3.714220 +step:5712 train loss:3.693454 +step:5713 train loss:3.617609 +step:5714 train loss:3.703574 +step:5715 train loss:3.661107 +step:5716 train loss:3.661668 +step:5717 train loss:3.693594 +step:5718 train loss:3.631410 +step:5719 train loss:3.705065 +step:5720 train loss:3.682784 +step:5721 train loss:3.608867 +step:5722 train loss:3.624615 +step:5723 train loss:3.707324 +step:5724 train loss:3.625749 +step:5725 train loss:3.690665 +step:5726 train loss:3.685768 +step:5727 train loss:3.645961 +step:5728 train loss:3.651714 +step:5729 train loss:3.649813 +step:5730 train loss:3.722395 +step:5731 train loss:3.594178 +step:5732 train loss:3.648058 +step:5733 train loss:3.645750 +step:5734 train loss:3.662856 +step:5735 train loss:3.647493 +step:5736 train loss:3.656138 +step:5737 train loss:3.672393 +step:5738 train loss:3.640298 +step:5739 train loss:3.651210 +step:5740 train loss:3.692211 +step:5741 train loss:3.675430 +step:5742 train loss:3.717206 +step:5743 train loss:3.680750 +step:5744 train loss:3.640230 +step:5745 train loss:3.647874 +step:5746 train loss:3.680160 +step:5747 train loss:3.659028 +step:5748 train loss:3.708529 +step:5749 train loss:3.664616 +step:5750 validation loss:3.584486 +step:5750 train loss:3.670957 +step:5751 train loss:3.684586 +step:5752 train loss:3.666889 +step:5753 train loss:3.634629 +step:5754 train loss:3.646953 +step:5755 train loss:3.662620 +step:5756 train loss:3.650485 +step:5757 train loss:3.715173 +step:5758 train loss:3.651013 +step:5759 train loss:3.612220 +step:5760 train loss:3.695202 +step:5761 train loss:3.693574 +step:5762 train loss:3.647043 +step:5763 train loss:3.675670 +step:5764 train loss:3.637714 +step:5765 train loss:3.755003 +step:5766 train loss:3.664021 +step:5767 train loss:3.697126 +step:5768 train loss:3.636055 +step:5769 train loss:3.758256 +step:5770 train loss:3.677800 +step:5771 train loss:3.706084 +step:5772 train loss:3.656643 +step:5773 train loss:3.633291 +step:5774 train loss:3.644754 +step:5775 train loss:3.713744 +step:5776 train loss:3.700297 +step:5777 train loss:3.618200 +step:5778 train loss:3.700614 +step:5779 train loss:3.668411 +step:5780 train loss:3.633111 +step:5781 train loss:3.701549 +step:5782 train loss:3.659276 +step:5783 train loss:3.621297 +step:5784 train loss:3.726462 +step:5785 train loss:3.715021 +step:5786 train loss:3.624345 +step:5787 train loss:3.671338 +step:5788 train loss:3.677459 +step:5789 train loss:3.623429 +step:5790 train loss:3.727325 +step:5791 train loss:3.654800 +step:5792 train loss:3.925084 +step:5793 train loss:3.694511 +step:5794 train loss:3.717755 +step:5795 train loss:3.707867 +step:5796 train loss:3.695436 +step:5797 train loss:3.673042 +step:5798 train loss:3.672266 +step:5799 train loss:3.642220 +step:5800 train loss:3.805032 +step:5801 train loss:3.674347 +step:5802 train loss:3.664528 +step:5803 train loss:3.670031 +step:5804 train loss:3.692728 +step:5805 train loss:3.654662 +step:5806 train loss:3.696511 +step:5807 train loss:3.616892 +step:5808 train loss:3.647163 +step:5809 train loss:3.658956 +step:5810 train loss:3.628736 +step:5811 train loss:3.648464 +step:5812 train loss:3.627264 +step:5813 train loss:3.639544 +step:5814 train loss:3.633215 +step:5815 train loss:3.632342 +step:5816 train loss:3.696704 +step:5817 train loss:3.706621 +step:5818 train loss:3.683093 +step:5819 train loss:3.731076 +step:5820 train loss:3.675016 +step:5821 train loss:3.664832 +step:5822 train loss:3.685556 +step:5823 train loss:3.685745 +step:5824 train loss:3.635470 +step:5825 train loss:3.730045 +step:5826 train loss:3.644570 +step:5827 train loss:3.607316 +step:5828 train loss:3.596782 +step:5829 train loss:3.662169 +step:5830 train loss:3.634144 +step:5831 train loss:3.605656 +step:5832 train loss:3.721877 +step:5833 train loss:3.695660 +step:5834 train loss:3.681918 +step:5835 train loss:3.631896 +step:5836 train loss:3.594743 +step:5837 train loss:3.716806 +step:5838 train loss:3.697660 +step:5839 train loss:3.671944 +step:5840 train loss:3.756183 +step:5841 train loss:3.679350 +step:5842 train loss:3.692332 +step:5843 train loss:3.637880 +step:5844 train loss:3.706214 +step:5845 train loss:3.614208 +step:5846 train loss:3.660368 +step:5847 train loss:3.690513 +step:5848 train loss:3.755002 +step:5849 train loss:3.649129 +step:5850 train loss:3.681210 +step:5851 train loss:3.647651 +step:5852 train loss:3.734880 +step:5853 train loss:3.828149 +step:5854 train loss:3.615360 +step:5855 train loss:3.678899 +step:5856 train loss:3.651135 +step:5857 train loss:3.663690 +step:5858 train loss:3.634077 +step:5859 train loss:3.639614 +step:5860 train loss:3.741436 +step:5861 train loss:3.624769 +step:5862 train loss:3.738730 +step:5863 train loss:3.678733 +step:5864 train loss:3.663737 +step:5865 train loss:3.672361 +step:5866 train loss:3.655772 +step:5867 train loss:3.741728 +step:5868 train loss:3.663182 +step:5869 train loss:3.691223 +step:5870 train loss:3.664485 +step:5871 train loss:3.646262 +step:5872 train loss:3.672906 +step:5873 train loss:3.651053 +step:5874 train loss:3.735714 +step:5875 train loss:3.661914 +step:5876 train loss:3.640445 +step:5877 train loss:3.648121 +step:5878 train loss:3.649946 +step:5879 train loss:3.622187 +step:5880 train loss:3.819596 +step:5881 train loss:3.663084 +step:5882 train loss:3.637104 +step:5883 train loss:3.640001 +step:5884 train loss:3.654812 +step:5885 train loss:3.651884 +step:5886 train loss:3.667874 +step:5887 train loss:3.672786 +step:5888 train loss:3.644626 +step:5889 train loss:3.624609 +step:5890 train loss:3.672609 +step:5891 train loss:3.617881 +step:5892 train loss:3.700668 +step:5893 train loss:3.620802 +step:5894 train loss:3.614474 +step:5895 train loss:3.615242 +step:5896 train loss:3.635146 +step:5897 train loss:3.698236 +step:5898 train loss:3.918485 +step:5899 train loss:3.651759 +step:5900 train loss:3.699499 +step:5901 train loss:3.646670 +step:5902 train loss:3.672680 +step:5903 train loss:3.659426 +step:5904 train loss:3.684099 +step:5905 train loss:3.790839 +step:5906 train loss:3.731672 +step:5907 train loss:3.673016 +step:5908 train loss:3.651068 +step:5909 train loss:3.639887 +step:5910 train loss:3.632131 +step:5911 train loss:3.645289 +step:5912 train loss:3.677928 +step:5913 train loss:3.684074 +step:5914 train loss:3.671360 +step:5915 train loss:3.827615 +step:5916 train loss:3.703049 +step:5917 train loss:3.669212 +step:5918 train loss:3.660649 +step:5919 train loss:3.683816 +step:5920 train loss:3.681834 +step:5921 train loss:3.646906 +step:5922 train loss:3.704624 +step:5923 train loss:3.696082 +step:5924 train loss:3.647200 +step:5925 train loss:3.777093 +step:5926 train loss:3.661565 +step:5927 train loss:3.635725 +step:5928 train loss:3.666490 +step:5929 train loss:3.688993 +step:5930 train loss:3.639147 +step:5931 train loss:3.620296 +step:5932 train loss:3.658056 +step:5933 train loss:3.712760 +step:5934 train loss:3.630870 +step:5935 train loss:3.657643 +step:5936 train loss:3.644974 +step:5937 train loss:3.622106 +step:5938 train loss:3.638565 +step:5939 train loss:3.618877 +step:5940 train loss:3.701358 +step:5941 train loss:3.635023 +step:5942 train loss:3.648796 +step:5943 train loss:3.651706 +step:5944 train loss:3.708801 +step:5945 train loss:3.637954 +step:5946 train loss:3.614596 +step:5947 train loss:3.632125 +step:5948 train loss:3.666054 +step:5949 train loss:3.715243 +step:5950 train loss:3.671649 +step:5951 train loss:3.673220 +step:5952 train loss:3.598068 +step:5953 train loss:3.644719 +step:5954 train loss:3.649512 +step:5955 train loss:3.656584 +step:5956 train loss:3.631229 +step:5957 train loss:3.599847 +step:5958 train loss:3.672550 +step:5959 train loss:3.632767 +step:5960 train loss:3.610751 +step:5961 train loss:3.636915 +step:5962 train loss:3.662361 +step:5963 train loss:3.693658 +step:5964 train loss:3.654973 +step:5965 train loss:3.670385 +step:5966 train loss:3.666956 +step:5967 train loss:3.632850 +step:5968 train loss:3.710730 +step:5969 train loss:3.649864 +step:5970 train loss:3.668662 +step:5971 train loss:3.617361 +step:5972 train loss:3.643786 +step:5973 train loss:3.637017 +step:5974 train loss:3.658898 +step:5975 train loss:3.626573 +step:5976 train loss:3.667311 +step:5977 train loss:3.622415 +step:5978 train loss:3.614578 +step:5979 train loss:3.651646 +step:5980 train loss:3.720513 +step:5981 train loss:3.617359 +step:5982 train loss:3.621903 +step:5983 train loss:3.690130 +step:5984 train loss:3.636440 +step:5985 train loss:3.682216 +step:5986 train loss:3.649893 +step:5987 train loss:3.635276 +step:5988 train loss:3.644238 +step:5989 train loss:3.663475 +step:5990 train loss:3.593960 +step:5991 train loss:3.658791 +step:5992 train loss:3.692842 +step:5993 train loss:3.640161 +step:5994 train loss:3.660495 +step:5995 train loss:3.551280 +step:5996 train loss:3.717664 +step:5997 train loss:3.699940 +step:5998 train loss:3.578931 +step:5999 train loss:3.606737 +step:6000 validation loss:3.571214 total_sharp:2.3883e-05 L1_sharp:5.8470e-06 L2_sharp:2.5565e-06 L3_sharp:2.0304e-06 L4_sharp:1.5227e-06 L5_sharp:1.1290e-06 L6_sharp:1.3359e-06 L7_sharp:1.6805e-06 L8_sharp:4.0313e-06 L9_sharp:4.9050e-06 L10_sharp:6.7077e-06 L11_sharp:4.9154e-06 L12_sharp:5.4290e-05 total_fnorm:3.9905e+01 total_l1_linf:3.4215e+05 total_spectral:3.9905e+01 L1_fnorm:9.1681e+00 L2_fnorm:7.4753e+00 L3_fnorm:8.0428e+00 L4_fnorm:8.5509e+00 L5_fnorm:9.1463e+00 L6_fnorm:9.2428e+00 L7_fnorm:9.7768e+00 L8_fnorm:9.7680e+00 L9_fnorm:9.7943e+00 L10_fnorm:9.5732e+00 L11_fnorm:9.7965e+00 L12_fnorm:8.9430e+00 L1_l1linf:8.3989e+00 L2_l1linf:5.6455e+00 L3_l1linf:6.8728e+00 L4_l1linf:7.7400e+00 L5_l1linf:8.9552e+00 L6_l1linf:8.9830e+00 L7_l1linf:9.5241e+00 L8_l1linf:1.0212e+01 L9_l1linf:1.0237e+01 L10_l1linf:1.0683e+01 L11_l1linf:1.0493e+01 L12_l1linf:1.1267e+01 L1_spectral:1.2394e+00 L2_spectral:9.1094e-01 L3_spectral:9.2188e-01 L4_spectral:9.2349e-01 L5_spectral:9.5117e-01 L6_spectral:1.0105e+00 L7_spectral:9.5575e-01 L8_spectral:1.1667e+00 L9_spectral:1.2868e+00 L10_spectral:1.3736e+00 L11_spectral:1.5025e+00 L12_spectral:1.6860e+00 ip_v_neg_g:1.7335e-02 cos_v_neg_g:1.2511e-03 v_norm:3.9905e+01 g_norm:3.4722e-01 hv_norm:2.9060e-01 cos_v_hv:3.2796e-03 hg_norm:2.5170e+00 cos_g_hg:5.8212e-01 v_par:1.3539e-02 v_perp:3.9905e+01 L1_cos_v_neg_g:9.1219e-03 L1_v_norm:9.1681e+00 L2_cos_v_neg_g:2.3681e-03 L2_v_norm:7.4753e+00 L3_cos_v_neg_g:3.4416e-03 L3_v_norm:8.0428e+00 L4_cos_v_neg_g:3.9481e-03 L4_v_norm:8.5509e+00 L5_cos_v_neg_g:2.7577e-03 L5_v_norm:9.1463e+00 L6_cos_v_neg_g:4.2258e-03 L6_v_norm:9.2428e+00 L7_cos_v_neg_g:5.6449e-03 L7_v_norm:9.7768e+00 L8_cos_v_neg_g:6.1344e-03 L8_v_norm:9.7680e+00 L9_cos_v_neg_g:7.2894e-03 L9_v_norm:9.7943e+00 L10_cos_v_neg_g:8.9936e-03 L10_v_norm:9.5732e+00 L11_cos_v_neg_g:9.8873e-03 L11_v_norm:9.7965e+00 L12_cos_v_neg_g:2.1701e-02 L12_v_norm:8.9430e+00 +step:6000 train loss:3.655612 +step:6001 train loss:3.614708 +step:6002 train loss:3.649459 +step:6003 train loss:3.671529 +step:6004 train loss:3.622823 +step:6005 train loss:3.693574 +step:6006 train loss:3.599841 +step:6007 train loss:3.619668 +step:6008 train loss:3.636155 +step:6009 train loss:3.675327 +step:6010 train loss:3.668450 +step:6011 train loss:3.661473 +step:6012 train loss:3.627347 +step:6013 train loss:3.684345 +step:6014 train loss:3.701604 +step:6015 train loss:3.700375 +step:6016 train loss:3.667137 +step:6017 train loss:3.676000 +step:6018 train loss:3.612244 +step:6019 train loss:3.648401 +step:6020 train loss:3.631148 +step:6021 train loss:3.560840 +step:6022 train loss:3.672555 +step:6023 train loss:3.609198 +step:6024 train loss:3.683490 +step:6025 train loss:3.649822 +step:6026 train loss:3.623410 +step:6027 train loss:3.666346 +step:6028 train loss:3.580366 +step:6029 train loss:3.695581 +step:6030 train loss:3.666502 +step:6031 train loss:3.635281 +step:6032 train loss:3.598686 +step:6033 train loss:3.659153 +step:6034 train loss:3.680861 +step:6035 train loss:3.603208 +step:6036 train loss:3.572325 +step:6037 train loss:3.690831 +step:6038 train loss:3.693869 +step:6039 train loss:3.677755 +step:6040 train loss:3.632441 +step:6041 train loss:3.618870 +step:6042 train loss:3.594666 +step:6043 train loss:3.650533 +step:6044 train loss:3.774833 +step:6045 train loss:3.613141 +step:6046 train loss:3.624887 +step:6047 train loss:3.663937 +step:6048 train loss:3.673210 +step:6049 train loss:3.644798 +step:6050 train loss:3.614443 +step:6051 train loss:3.664285 +step:6052 train loss:3.636866 +step:6053 train loss:3.759245 +step:6054 train loss:3.797014 +step:6055 train loss:3.608524 +step:6056 train loss:3.603117 +step:6057 train loss:3.637111 +step:6058 train loss:3.664925 +step:6059 train loss:3.667281 +step:6060 train loss:3.671925 +step:6061 train loss:3.689162 +step:6062 train loss:3.638600 +step:6063 train loss:3.658013 +step:6064 train loss:3.653215 +step:6065 train loss:3.653934 +step:6066 train loss:3.636899 +step:6067 train loss:3.679572 +step:6068 train loss:3.616717 +step:6069 train loss:3.575746 +step:6070 train loss:3.721375 +step:6071 train loss:3.672379 +step:6072 train loss:3.611193 +step:6073 train loss:3.648870 +step:6074 train loss:3.731687 +step:6075 train loss:3.652772 +step:6076 train loss:3.660749 +step:6077 train loss:3.666144 +step:6078 train loss:3.600924 +step:6079 train loss:3.628285 +step:6080 train loss:3.634846 +step:6081 train loss:3.672405 +step:6082 train loss:3.621754 +step:6083 train loss:3.632585 +step:6084 train loss:3.696739 +step:6085 train loss:3.693883 +step:6086 train loss:3.593937 +step:6087 train loss:3.641972 +step:6088 train loss:3.627606 +step:6089 train loss:3.685374 +step:6090 train loss:3.689962 +step:6091 train loss:3.639495 +step:6092 train loss:3.600301 +step:6093 train loss:3.661660 +step:6094 train loss:3.576795 +step:6095 train loss:3.740794 +step:6096 train loss:3.613747 +step:6097 train loss:3.690342 +step:6098 train loss:3.665669 +step:6099 train loss:3.722081 +step:6100 train loss:3.715713 +step:6101 train loss:3.647473 +step:6102 train loss:3.759587 +step:6103 train loss:3.647530 +step:6104 train loss:3.765401 +step:6105 train loss:3.699308 +step:6106 train loss:3.631350 +step:6107 train loss:3.701926 +step:6108 train loss:3.661476 +step:6109 train loss:3.730438 +step:6110 train loss:3.664012 +step:6111 train loss:3.693622 +step:6112 train loss:3.634120 +step:6113 train loss:3.663416 +step:6114 train loss:3.637025 +step:6115 train loss:3.696025 +step:6116 train loss:3.639962 +step:6117 train loss:3.690996 +step:6118 train loss:3.671845 +step:6119 train loss:3.681424 +step:6120 train loss:3.828796 +step:6121 train loss:3.658465 +step:6122 train loss:3.671009 +step:6123 train loss:3.647407 +step:6124 train loss:3.624880 +step:6125 train loss:3.617013 +step:6126 train loss:3.636227 +step:6127 train loss:3.625794 +step:6128 train loss:3.604397 +step:6129 train loss:3.830885 +step:6130 train loss:3.613768 +step:6131 train loss:3.591473 +step:6132 train loss:3.665627 +step:6133 train loss:3.633144 +step:6134 train loss:3.662273 +step:6135 train loss:3.744051 +step:6136 train loss:3.763997 +step:6137 train loss:3.624528 +step:6138 train loss:3.680414 +step:6139 train loss:3.662829 +step:6140 train loss:3.657491 +step:6141 train loss:3.614911 +step:6142 train loss:3.681253 +step:6143 train loss:3.645206 +step:6144 train loss:3.662498 +step:6145 train loss:3.911690 +step:6146 train loss:3.749649 +step:6147 train loss:3.829455 +step:6148 train loss:3.602410 +step:6149 train loss:3.724532 +step:6150 train loss:3.679919 +step:6151 train loss:3.630743 +step:6152 train loss:3.629390 +step:6153 train loss:3.696689 +step:6154 train loss:3.784330 +step:6155 train loss:3.647112 +step:6156 train loss:3.749826 +step:6157 train loss:3.675144 +step:6158 train loss:3.671382 +step:6159 train loss:3.635536 +step:6160 train loss:3.798441 +step:6161 train loss:3.647556 +step:6162 train loss:3.668794 +step:6163 train loss:3.699157 +step:6164 train loss:3.613050 +step:6165 train loss:3.680435 +step:6166 train loss:3.672144 +step:6167 train loss:3.693623 +step:6168 train loss:3.667119 +step:6169 train loss:3.660512 +step:6170 train loss:3.664026 +step:6171 train loss:3.632516 +step:6172 train loss:3.617363 +step:6173 train loss:3.671083 +step:6174 train loss:3.598476 +step:6175 train loss:3.609063 +step:6176 train loss:3.591519 +step:6177 train loss:3.687740 +step:6178 train loss:3.633026 +step:6179 train loss:3.643858 +step:6180 train loss:3.647858 +step:6181 train loss:3.679122 +step:6182 train loss:3.565968 +step:6183 train loss:3.572888 +step:6184 train loss:3.692727 +step:6185 train loss:3.648103 +step:6186 train loss:3.607341 +step:6187 train loss:3.646898 +step:6188 train loss:3.616586 +step:6189 train loss:3.655317 +step:6190 train loss:3.613558 +step:6191 train loss:3.649618 +step:6192 train loss:3.615012 +step:6193 train loss:3.684537 +step:6194 train loss:3.673805 +step:6195 train loss:3.652095 +step:6196 train loss:3.668041 +step:6197 train loss:3.691679 +step:6198 train loss:3.607970 +step:6199 train loss:3.628686 +step:6200 train loss:3.669226 +step:6201 train loss:3.716673 +step:6202 train loss:3.717677 +step:6203 train loss:3.712096 +step:6204 train loss:3.696713 +step:6205 train loss:3.637551 +step:6206 train loss:3.623660 +step:6207 train loss:3.683924 +step:6208 train loss:3.708547 +step:6209 train loss:3.676816 +step:6210 train loss:3.707767 +step:6211 train loss:3.622878 +step:6212 train loss:3.620619 +step:6213 train loss:3.632326 +step:6214 train loss:3.605554 +step:6215 train loss:3.781242 +step:6216 train loss:3.653974 +step:6217 train loss:3.713564 +step:6218 train loss:3.687049 +step:6219 train loss:3.698355 +step:6220 train loss:3.656231 +step:6221 train loss:3.624261 +step:6222 train loss:3.859775 +step:6223 train loss:3.634501 +step:6224 train loss:3.677910 +step:6225 train loss:3.658571 +step:6226 train loss:3.666115 +step:6227 train loss:3.666534 +step:6228 train loss:3.657587 +step:6229 train loss:3.694380 +step:6230 train loss:3.647815 +step:6231 train loss:3.760416 +step:6232 train loss:3.603016 +step:6233 train loss:3.638464 +step:6234 train loss:3.647176 +step:6235 train loss:3.674837 +step:6236 train loss:3.609200 +step:6237 train loss:3.633072 +step:6238 train loss:3.655925 +step:6239 train loss:3.643735 +step:6240 train loss:3.666438 +step:6241 train loss:3.646605 +step:6242 train loss:3.643478 +step:6243 train loss:3.680033 +step:6244 train loss:3.835968 +step:6245 train loss:3.631511 +step:6246 train loss:3.614003 +step:6247 train loss:3.612515 +step:6248 train loss:3.615631 +step:6249 train loss:3.553455 +step:6250 validation loss:3.565775 +step:6250 train loss:3.593655 +step:6251 train loss:3.610549 +step:6252 train loss:3.655500 +step:6253 train loss:3.662625 +step:6254 train loss:3.652425 +step:6255 train loss:3.619309 +step:6256 train loss:3.672477 +step:6257 train loss:3.668747 +step:6258 train loss:3.647632 +step:6259 train loss:3.652372 +step:6260 train loss:3.680923 +step:6261 train loss:3.701991 +step:6262 train loss:3.597748 +step:6263 train loss:3.631301 +step:6264 train loss:3.640898 +step:6265 train loss:3.624161 +step:6266 train loss:3.829895 +step:6267 train loss:3.633988 +step:6268 train loss:3.723487 +step:6269 train loss:3.600108 +step:6270 train loss:3.609948 +step:6271 train loss:3.655822 +step:6272 train loss:3.649751 +step:6273 train loss:3.851948 +step:6274 train loss:3.629808 +step:6275 train loss:3.667509 +step:6276 train loss:3.637960 +step:6277 train loss:3.617664 +step:6278 train loss:3.603790 +step:6279 train loss:3.657937 +step:6280 train loss:3.662153 +step:6281 train loss:3.593322 +step:6282 train loss:3.610307 +step:6283 train loss:3.697458 +step:6284 train loss:3.665385 +step:6285 train loss:3.666588 +step:6286 train loss:3.613900 +step:6287 train loss:3.638556 +step:6288 train loss:3.734736 +step:6289 train loss:3.603219 +step:6290 train loss:3.589705 +step:6291 train loss:3.630042 +step:6292 train loss:3.651478 +step:6293 train loss:3.638665 +step:6294 train loss:3.621689 +step:6295 train loss:3.643803 +step:6296 train loss:3.610233 +step:6297 train loss:3.734254 +step:6298 train loss:3.680773 +step:6299 train loss:3.570953 +step:6300 train loss:3.657254 +step:6301 train loss:3.684601 +step:6302 train loss:3.667599 +step:6303 train loss:3.636303 +step:6304 train loss:3.648498 +step:6305 train loss:3.619446 +step:6306 train loss:3.631437 +step:6307 train loss:3.643450 +step:6308 train loss:3.615266 +step:6309 train loss:3.612659 +step:6310 train loss:3.667401 +step:6311 train loss:3.621545 +step:6312 train loss:3.663827 +step:6313 train loss:3.592515 +step:6314 train loss:3.619463 +step:6315 train loss:3.672794 +step:6316 train loss:3.595090 +step:6317 train loss:3.592866 +step:6318 train loss:3.701490 +step:6319 train loss:3.634051 +step:6320 train loss:3.651055 +step:6321 train loss:3.634569 +step:6322 train loss:3.633730 +step:6323 train loss:3.570055 +step:6324 train loss:3.573907 +step:6325 train loss:3.677658 +step:6326 train loss:3.594051 +step:6327 train loss:3.669352 +step:6328 train loss:3.647385 +step:6329 train loss:3.567462 +step:6330 train loss:3.597239 +step:6331 train loss:3.612597 +step:6332 train loss:3.746584 +step:6333 train loss:3.623062 +step:6334 train loss:3.602096 +step:6335 train loss:3.572759 +step:6336 train loss:3.602891 +step:6337 train loss:3.627545 +step:6338 train loss:3.584273 +step:6339 train loss:3.628667 +step:6340 train loss:3.621077 +step:6341 train loss:3.622742 +step:6342 train loss:3.617228 +step:6343 train loss:3.718135 +step:6344 train loss:3.568735 +step:6345 train loss:3.587266 +step:6346 train loss:3.670957 +step:6347 train loss:3.536629 +step:6348 train loss:3.637446 +step:6349 train loss:3.610826 +step:6350 train loss:3.588088 +step:6351 train loss:3.584061 +step:6352 train loss:3.597155 +step:6353 train loss:3.622250 +step:6354 train loss:3.634846 +step:6355 train loss:3.640877 +step:6356 train loss:3.655121 +step:6357 train loss:3.507817 +step:6358 train loss:3.601471 +step:6359 train loss:3.657339 +step:6360 train loss:3.567763 +step:6361 train loss:3.573773 +step:6362 train loss:3.609814 +step:6363 train loss:3.592568 +step:6364 train loss:3.575410 +step:6365 train loss:3.647964 +step:6366 train loss:3.658970 +step:6367 train loss:3.581012 +step:6368 train loss:3.632522 +step:6369 train loss:3.597506 +step:6370 train loss:3.648523 +step:6371 train loss:3.563220 +step:6372 train loss:3.594565 +step:6373 train loss:3.619913 +step:6374 train loss:3.649871 +step:6375 train loss:3.605685 +step:6376 train loss:3.632777 +step:6377 train loss:3.626460 +step:6378 train loss:3.579673 +step:6379 train loss:3.620152 +step:6380 train loss:3.659922 +step:6381 train loss:3.629257 +step:6382 train loss:3.580118 +step:6383 train loss:3.648319 +step:6384 train loss:3.623018 +step:6385 train loss:3.600398 +step:6386 train loss:3.636417 +step:6387 train loss:3.612755 +step:6388 train loss:3.654980 +step:6389 train loss:3.663854 +step:6390 train loss:3.610997 +step:6391 train loss:3.596235 +step:6392 train loss:3.582322 +step:6393 train loss:3.638611 +step:6394 train loss:3.625783 +step:6395 train loss:3.807303 +step:6396 train loss:3.627884 +step:6397 train loss:3.574972 +step:6398 train loss:3.642044 +step:6399 train loss:3.584147 +step:6400 train loss:3.660919 +step:6401 train loss:3.690929 +step:6402 train loss:3.627048 +step:6403 train loss:3.622193 +step:6404 train loss:3.598296 +step:6405 train loss:3.624987 +step:6406 train loss:3.629029 +step:6407 train loss:3.689222 +step:6408 train loss:3.581607 +step:6409 train loss:3.567787 +step:6410 train loss:3.695443 +step:6411 train loss:3.626184 +step:6412 train loss:3.630860 +step:6413 train loss:3.635958 +step:6414 train loss:3.582879 +step:6415 train loss:3.634954 +step:6416 train loss:3.612030 +step:6417 train loss:3.582621 +step:6418 train loss:3.575008 +step:6419 train loss:3.661207 +step:6420 train loss:3.586475 +step:6421 train loss:3.610543 +step:6422 train loss:3.596723 +step:6423 train loss:3.612816 +step:6424 train loss:3.631886 +step:6425 train loss:3.628306 +step:6426 train loss:3.666754 +step:6427 train loss:3.629809 +step:6428 train loss:3.668744 +step:6429 train loss:3.628172 +step:6430 train loss:3.609634 +step:6431 train loss:3.581206 +step:6432 train loss:3.615429 +step:6433 train loss:3.630408 +step:6434 train loss:3.512775 +step:6435 train loss:3.693729 +step:6436 train loss:3.629275 +step:6437 train loss:3.592917 +step:6438 train loss:3.621270 +step:6439 train loss:3.601326 +step:6440 train loss:3.612107 +step:6441 train loss:3.603372 +step:6442 train loss:3.549386 +step:6443 train loss:3.597180 +step:6444 train loss:3.745081 +step:6445 train loss:3.646054 +step:6446 train loss:3.652584 +step:6447 train loss:3.631840 +step:6448 train loss:3.577031 +step:6449 train loss:3.600587 +step:6450 train loss:3.581839 +step:6451 train loss:3.570727 +step:6452 train loss:3.573963 +step:6453 train loss:3.621275 +step:6454 train loss:3.639348 +step:6455 train loss:3.632308 +step:6456 train loss:3.645788 +step:6457 train loss:3.625803 +step:6458 train loss:3.597717 +step:6459 train loss:3.582322 +step:6460 train loss:3.589475 +step:6461 train loss:3.585748 +step:6462 train loss:3.578544 +step:6463 train loss:3.678628 +step:6464 train loss:3.586073 +step:6465 train loss:3.629511 +step:6466 train loss:3.644572 +step:6467 train loss:3.568438 +step:6468 train loss:3.647910 +step:6469 train loss:3.556891 +step:6470 train loss:3.675846 +step:6471 train loss:3.584362 +step:6472 train loss:3.741623 +step:6473 train loss:3.629361 +step:6474 train loss:3.658628 +step:6475 train loss:3.604473 +step:6476 train loss:3.675330 +step:6477 train loss:3.604094 +step:6478 train loss:3.734474 +step:6479 train loss:3.652555 +step:6480 train loss:3.590561 +step:6481 train loss:3.640149 +step:6482 train loss:3.588744 +step:6483 train loss:3.644347 +step:6484 train loss:3.602625 +step:6485 train loss:3.662300 +step:6486 train loss:3.592753 +step:6487 train loss:3.595478 +step:6488 train loss:3.588797 +step:6489 train loss:3.592437 +step:6490 train loss:3.618446 +step:6491 train loss:3.581582 +step:6492 train loss:3.690429 +step:6493 train loss:3.591150 +step:6494 train loss:3.596858 +step:6495 train loss:3.594059 +step:6496 train loss:3.631301 +step:6497 train loss:3.649812 +step:6498 train loss:3.757775 +step:6499 train loss:3.729644 +step:6500 validation loss:3.557725 total_sharp:3.3915e-05 L1_sharp:5.6592e-06 L2_sharp:6.2693e-07 L3_sharp:2.0968e-06 L4_sharp:2.7554e-06 L5_sharp:1.8865e-06 L6_sharp:1.7387e-06 L7_sharp:2.2262e-06 L8_sharp:5.6129e-06 L9_sharp:7.3763e-06 L10_sharp:1.2862e-05 L11_sharp:5.8071e-06 L12_sharp:7.0678e-05 total_fnorm:4.0752e+01 total_l1_linf:3.5046e+05 total_spectral:4.0752e+01 L1_fnorm:9.4231e+00 L2_fnorm:7.7291e+00 L3_fnorm:8.2958e+00 L4_fnorm:8.9549e+00 L5_fnorm:9.4720e+00 L6_fnorm:9.4979e+00 L7_fnorm:9.9734e+00 L8_fnorm:9.9784e+00 L9_fnorm:9.9097e+00 L10_fnorm:9.8135e+00 L11_fnorm:1.0030e+01 L12_fnorm:9.3134e+00 L1_l1linf:9.0784e+00 L2_l1linf:5.9893e+00 L3_l1linf:7.1263e+00 L4_l1linf:8.0669e+00 L5_l1linf:9.5236e+00 L6_l1linf:9.5854e+00 L7_l1linf:1.0962e+01 L8_l1linf:1.0524e+01 L9_l1linf:1.0632e+01 L10_l1linf:1.2076e+01 L11_l1linf:1.1956e+01 L12_l1linf:1.3217e+01 L1_spectral:1.2560e+00 L2_spectral:1.1429e+00 L3_spectral:1.0508e+00 L4_spectral:1.0329e+00 L5_spectral:1.0454e+00 L6_spectral:1.0771e+00 L7_spectral:1.1957e+00 L8_spectral:1.4269e+00 L9_spectral:1.5855e+00 L10_spectral:1.7402e+00 L11_spectral:1.5163e+00 L12_spectral:1.8708e+00 ip_v_neg_g:2.5757e-02 cos_v_neg_g:1.5655e-03 v_norm:4.0752e+01 g_norm:4.0373e-01 hv_norm:3.6562e-01 cos_v_hv:3.7802e-03 hg_norm:3.6609e+00 cos_g_hg:6.2170e-01 v_par:1.2283e-02 v_perp:4.0752e+01 L1_cos_v_neg_g:1.1446e-02 L1_v_norm:9.4231e+00 L2_cos_v_neg_g:4.4809e-03 L2_v_norm:7.7291e+00 L3_cos_v_neg_g:4.6510e-03 L3_v_norm:8.2958e+00 L4_cos_v_neg_g:5.6349e-03 L4_v_norm:8.9549e+00 L5_cos_v_neg_g:6.5862e-03 L5_v_norm:9.4720e+00 L6_cos_v_neg_g:5.5285e-03 L6_v_norm:9.4979e+00 L7_cos_v_neg_g:7.4772e-03 L7_v_norm:9.9734e+00 L8_cos_v_neg_g:9.1969e-03 L8_v_norm:9.9784e+00 L9_cos_v_neg_g:1.2928e-02 L9_v_norm:9.9097e+00 L10_cos_v_neg_g:1.6791e-02 L10_v_norm:9.8135e+00 L11_cos_v_neg_g:1.5556e-02 L11_v_norm:1.0030e+01 L12_cos_v_neg_g:3.1061e-02 L12_v_norm:9.3134e+00 +step:6500 train loss:3.576529 +step:6501 train loss:3.589609 +step:6502 train loss:3.606670 +step:6503 train loss:3.666075 +step:6504 train loss:3.614594 +step:6505 train loss:3.627069 +step:6506 train loss:3.580556 +step:6507 train loss:3.651994 +step:6508 train loss:3.618496 +step:6509 train loss:3.599447 +step:6510 train loss:3.606001 +step:6511 train loss:3.625410 +step:6512 train loss:3.565722 +step:6513 train loss:3.634015 +step:6514 train loss:3.504482 +step:6515 train loss:3.601324 +step:6516 train loss:3.652560 +step:6517 train loss:3.562649 +step:6518 train loss:3.601795 +step:6519 train loss:3.592654 +step:6520 train loss:3.684619 +step:6521 train loss:3.659050 +step:6522 train loss:3.674170 +step:6523 train loss:3.562894 +step:6524 train loss:3.649957 +step:6525 train loss:3.633514 +step:6526 train loss:3.573784 +step:6527 train loss:3.622653 +step:6528 train loss:3.648313 +step:6529 train loss:3.672717 +step:6530 train loss:3.578724 +step:6531 train loss:3.657789 +step:6532 train loss:3.584999 +step:6533 train loss:3.625692 +step:6534 train loss:3.628933 +step:6535 train loss:3.606910 +step:6536 train loss:3.748728 +step:6537 train loss:3.609849 +step:6538 train loss:3.661765 +step:6539 train loss:3.589874 +step:6540 train loss:3.701162 +step:6541 train loss:3.684734 +step:6542 train loss:3.639510 +step:6543 train loss:3.588157 +step:6544 train loss:3.575674 +step:6545 train loss:3.564124 +step:6546 train loss:3.624107 +step:6547 train loss:3.675491 +step:6548 train loss:3.622554 +step:6549 train loss:3.638356 +step:6550 train loss:3.749938 +step:6551 train loss:3.621343 +step:6552 train loss:3.619817 +step:6553 train loss:3.656035 +step:6554 train loss:3.545249 +step:6555 train loss:3.631568 +step:6556 train loss:3.504610 +step:6557 train loss:3.846020 +step:6558 train loss:3.685838 +step:6559 train loss:3.596328 +step:6560 train loss:3.631590 +step:6561 train loss:3.607942 +step:6562 train loss:3.620995 +step:6563 train loss:3.514526 +step:6564 train loss:3.618721 +step:6565 train loss:3.529004 +step:6566 train loss:3.639974 +step:6567 train loss:3.610218 +step:6568 train loss:3.654260 +step:6569 train loss:3.599695 +step:6570 train loss:3.637645 +step:6571 train loss:3.566885 +step:6572 train loss:3.643017 +step:6573 train loss:3.659086 +step:6574 train loss:3.639308 +step:6575 train loss:3.584109 +step:6576 train loss:3.574506 +step:6577 train loss:3.644603 +step:6578 train loss:3.515795 +step:6579 train loss:3.617699 +step:6580 train loss:3.572864 +step:6581 train loss:3.583416 +step:6582 train loss:3.564399 +step:6583 train loss:3.660874 +step:6584 train loss:3.593331 +step:6585 train loss:3.630504 +step:6586 train loss:3.634781 +step:6587 train loss:3.644041 +step:6588 train loss:3.609003 +step:6589 train loss:3.632942 +step:6590 train loss:3.581096 +step:6591 train loss:3.635866 +step:6592 train loss:3.575568 +step:6593 train loss:3.584102 +step:6594 train loss:3.608434 +step:6595 train loss:3.591992 +step:6596 train loss:3.589540 +step:6597 train loss:3.613634 +step:6598 train loss:3.653444 +step:6599 train loss:3.550613 +step:6600 train loss:3.602224 +step:6601 train loss:3.663934 +step:6602 train loss:3.583832 +step:6603 train loss:3.613741 +step:6604 train loss:3.626259 +step:6605 train loss:3.607407 +step:6606 train loss:3.665345 +step:6607 train loss:3.584138 +step:6608 train loss:3.597204 +step:6609 train loss:3.568098 +step:6610 train loss:3.675754 +step:6611 train loss:3.601341 +step:6612 train loss:3.643043 +step:6613 train loss:3.561742 +step:6614 train loss:3.592811 +step:6615 train loss:3.591148 +step:6616 train loss:3.570560 +step:6617 train loss:3.610701 +step:6618 train loss:3.597574 +step:6619 train loss:3.573306 +step:6620 train loss:3.676523 +step:6621 train loss:3.554318 +step:6622 train loss:3.626621 +step:6623 train loss:3.556410 +step:6624 train loss:3.629198 +step:6625 train loss:3.670920 +step:6626 train loss:3.639440 +step:6627 train loss:3.588017 +step:6628 train loss:3.646412 +step:6629 train loss:3.549531 +step:6630 train loss:3.587864 +step:6631 train loss:3.620094 +step:6632 train loss:3.659504 +step:6633 train loss:3.609464 +step:6634 train loss:3.673184 +step:6635 train loss:3.572503 +step:6636 train loss:3.610598 +step:6637 train loss:3.578762 +step:6638 train loss:3.581138 +step:6639 train loss:3.592038 +step:6640 train loss:3.577038 +step:6641 train loss:3.589143 +step:6642 train loss:3.593016 +step:6643 train loss:3.674344 +step:6644 train loss:3.680559 +step:6645 train loss:3.554771 +step:6646 train loss:3.642589 +step:6647 train loss:3.599869 +step:6648 train loss:3.701754 +step:6649 train loss:3.633201 +step:6650 train loss:3.587128 +step:6651 train loss:3.625988 +step:6652 train loss:3.641279 +step:6653 train loss:3.583472 +step:6654 train loss:3.581167 +step:6655 train loss:3.618272 +step:6656 train loss:3.592188 +step:6657 train loss:3.614178 +step:6658 train loss:3.597185 +step:6659 train loss:3.750310 +step:6660 train loss:3.645982 +step:6661 train loss:3.572386 +step:6662 train loss:3.608495 +step:6663 train loss:3.540896 +step:6664 train loss:3.621958 +step:6665 train loss:3.630531 +step:6666 train loss:3.650539 +step:6667 train loss:3.560656 +step:6668 train loss:3.693464 +step:6669 train loss:3.572201 +step:6670 train loss:3.584181 +step:6671 train loss:3.666175 +step:6672 train loss:3.620687 +step:6673 train loss:3.626039 +step:6674 train loss:3.600211 +step:6675 train loss:3.621117 +step:6676 train loss:3.627317 +step:6677 train loss:3.583794 +step:6678 train loss:3.657669 +step:6679 train loss:3.688212 +step:6680 train loss:3.690821 +step:6681 train loss:3.643798 +step:6682 train loss:3.583626 +step:6683 train loss:3.610492 +step:6684 train loss:3.621677 +step:6685 train loss:3.632349 +step:6686 train loss:3.567434 +step:6687 train loss:3.584412 +step:6688 train loss:3.629975 +step:6689 train loss:3.637527 +step:6690 train loss:3.611943 +step:6691 train loss:3.645132 +step:6692 train loss:3.652272 +step:6693 train loss:3.684845 +step:6694 train loss:3.638548 +step:6695 train loss:3.610714 +step:6696 train loss:3.549955 +step:6697 train loss:3.763126 +step:6698 train loss:3.610878 +step:6699 train loss:3.607547 +step:6700 train loss:3.619117 +step:6701 train loss:3.678524 +step:6702 train loss:3.566913 +step:6703 train loss:3.616094 +step:6704 train loss:3.599932 +step:6705 train loss:3.608560 +step:6706 train loss:3.586773 +step:6707 train loss:3.662208 +step:6708 train loss:3.614675 +step:6709 train loss:3.645781 +step:6710 train loss:3.632514 +step:6711 train loss:3.586108 +step:6712 train loss:3.571323 +step:6713 train loss:3.596775 +step:6714 train loss:3.642131 +step:6715 train loss:3.583509 +step:6716 train loss:3.662194 +step:6717 train loss:3.609906 +step:6718 train loss:3.627588 +step:6719 train loss:3.662517 +step:6720 train loss:3.591920 +step:6721 train loss:3.608421 +step:6722 train loss:3.585703 +step:6723 train loss:3.714710 +step:6724 train loss:3.568676 +step:6725 train loss:3.631210 +step:6726 train loss:3.587011 +step:6727 train loss:3.650866 +step:6728 train loss:3.742312 +step:6729 train loss:3.607556 +step:6730 train loss:3.605194 +step:6731 train loss:3.646312 +step:6732 train loss:3.520863 +step:6733 train loss:3.658243 +step:6734 train loss:3.584358 +step:6735 train loss:3.612531 +step:6736 train loss:3.613715 +step:6737 train loss:3.606355 +step:6738 train loss:3.643111 +step:6739 train loss:3.594595 +step:6740 train loss:3.548211 +step:6741 train loss:3.657819 +step:6742 train loss:3.615479 +step:6743 train loss:3.619071 +step:6744 train loss:3.513913 +step:6745 train loss:3.669227 +step:6746 train loss:3.599738 +step:6747 train loss:3.591866 +step:6748 train loss:3.669414 +step:6749 train loss:3.645351 +step:6750 validation loss:3.545364 +step:6750 train loss:3.565451 +step:6751 train loss:3.603078 +step:6752 train loss:3.604266 +step:6753 train loss:3.640302 +step:6754 train loss:3.618819 +step:6755 train loss:3.629339 +step:6756 train loss:3.571401 +step:6757 train loss:3.543949 +step:6758 train loss:3.722808 +step:6759 train loss:3.609113 +step:6760 train loss:3.665028 +step:6761 train loss:3.599744 +step:6762 train loss:3.618565 +step:6763 train loss:3.519576 +step:6764 train loss:3.601418 +step:6765 train loss:3.599438 +step:6766 train loss:3.597169 +step:6767 train loss:3.550695 +step:6768 train loss:3.558025 +step:6769 train loss:3.519925 +step:6770 train loss:3.605080 +step:6771 train loss:3.605561 +step:6772 train loss:3.615898 +step:6773 train loss:3.594623 +step:6774 train loss:3.617105 +step:6775 train loss:3.653223 +step:6776 train loss:3.609607 +step:6777 train loss:3.688206 +step:6778 train loss:3.573131 +step:6779 train loss:3.624923 +step:6780 train loss:3.556894 +step:6781 train loss:3.623773 +step:6782 train loss:3.533895 +step:6783 train loss:3.569899 +step:6784 train loss:3.598138 +step:6785 train loss:3.587771 +step:6786 train loss:3.600536 +step:6787 train loss:3.674818 +step:6788 train loss:3.609107 +step:6789 train loss:3.620908 +step:6790 train loss:3.617676 +step:6791 train loss:3.628976 +step:6792 train loss:3.628342 +step:6793 train loss:3.625487 +step:6794 train loss:3.597810 +step:6795 train loss:3.595712 +step:6796 train loss:3.597173 +step:6797 train loss:3.700134 +step:6798 train loss:3.598592 +step:6799 train loss:3.595052 +step:6800 train loss:3.558231 +step:6801 train loss:3.692327 +step:6802 train loss:3.640260 +step:6803 train loss:3.630439 +step:6804 train loss:3.655653 +step:6805 train loss:3.618227 +step:6806 train loss:3.553153 +step:6807 train loss:3.609960 +step:6808 train loss:3.594918 +step:6809 train loss:3.621958 +step:6810 train loss:3.747331 +step:6811 train loss:3.649077 +step:6812 train loss:3.623892 +step:6813 train loss:3.633946 +step:6814 train loss:3.640497 +step:6815 train loss:3.689421 +step:6816 train loss:3.604520 +step:6817 train loss:3.626333 +step:6818 train loss:3.608546 +step:6819 train loss:3.589801 +step:6820 train loss:3.618415 +step:6821 train loss:3.580532 +step:6822 train loss:3.685983 +step:6823 train loss:3.662823 +step:6824 train loss:3.647192 +step:6825 train loss:3.589603 +step:6826 train loss:3.632367 +step:6827 train loss:3.621037 +step:6828 train loss:3.639484 +step:6829 train loss:3.622701 +step:6830 train loss:3.591424 +step:6831 train loss:3.552046 +step:6832 train loss:3.538608 +step:6833 train loss:3.554245 +step:6834 train loss:3.638526 +step:6835 train loss:3.612153 +step:6836 train loss:3.530972 +step:6837 train loss:3.599329 +step:6838 train loss:3.656027 +step:6839 train loss:3.740633 +step:6840 train loss:3.613626 +step:6841 train loss:3.572466 +step:6842 train loss:3.618492 +step:6843 train loss:3.724828 +step:6844 train loss:3.605990 +step:6845 train loss:3.660072 +step:6846 train loss:3.719380 +step:6847 train loss:3.650947 +step:6848 train loss:3.642343 +step:6849 train loss:3.671394 +step:6850 train loss:3.642153 +step:6851 train loss:3.571342 +step:6852 train loss:3.558971 +step:6853 train loss:3.549080 +step:6854 train loss:3.629368 +step:6855 train loss:3.599019 +step:6856 train loss:3.581562 +step:6857 train loss:3.637342 +step:6858 train loss:3.663910 +step:6859 train loss:3.573736 +step:6860 train loss:3.687000 +step:6861 train loss:3.706491 +step:6862 train loss:3.620077 +step:6863 train loss:3.618269 +step:6864 train loss:3.562834 +step:6865 train loss:3.629652 +step:6866 train loss:3.556982 +step:6867 train loss:3.733462 +step:6868 train loss:3.612771 +step:6869 train loss:3.648350 +step:6870 train loss:3.676996 +step:6871 train loss:3.594461 +step:6872 train loss:3.597687 +step:6873 train loss:3.614600 +step:6874 train loss:3.570146 +step:6875 train loss:3.578222 +step:6876 train loss:3.606576 +step:6877 train loss:3.645824 +step:6878 train loss:3.557098 +step:6879 train loss:3.607791 +step:6880 train loss:3.614495 +step:6881 train loss:3.578885 +step:6882 train loss:3.645227 +step:6883 train loss:3.629330 +step:6884 train loss:3.860503 +step:6885 train loss:3.631035 +step:6886 train loss:3.616266 +step:6887 train loss:3.548216 +step:6888 train loss:3.651409 +step:6889 train loss:3.533762 +step:6890 train loss:3.644678 +step:6891 train loss:3.648544 +step:6892 train loss:3.750201 +step:6893 train loss:3.579555 +step:6894 train loss:3.643564 +step:6895 train loss:3.641765 +step:6896 train loss:3.615382 +step:6897 train loss:3.571337 +step:6898 train loss:3.571262 +step:6899 train loss:3.656046 +step:6900 train loss:3.629384 +step:6901 train loss:3.580129 +step:6902 train loss:3.513842 +step:6903 train loss:3.562576 +step:6904 train loss:3.672498 +step:6905 train loss:3.703476 +step:6906 train loss:3.627982 +step:6907 train loss:3.642909 +step:6908 train loss:3.678682 +step:6909 train loss:3.667989 +step:6910 train loss:3.550153 +step:6911 train loss:3.673383 +step:6912 train loss:3.568577 +step:6913 train loss:3.605567 +step:6914 train loss:3.562606 +step:6915 train loss:3.590344 +step:6916 train loss:3.569103 +step:6917 train loss:3.692324 +step:6918 train loss:3.636527 +step:6919 train loss:3.630233 +step:6920 train loss:3.616489 +step:6921 train loss:3.683853 +step:6922 train loss:3.676532 +step:6923 train loss:3.540038 +step:6924 train loss:3.617188 +step:6925 train loss:3.595110 +step:6926 train loss:3.629424 +step:6927 train loss:3.681262 +step:6928 train loss:3.572641 +step:6929 train loss:3.586985 +step:6930 train loss:3.616666 +step:6931 train loss:3.616073 +step:6932 train loss:3.844652 +step:6933 train loss:3.681607 +step:6934 train loss:3.620660 +step:6935 train loss:3.603113 +step:6936 train loss:3.644086 +step:6937 train loss:3.585911 +step:6938 train loss:3.651232 +step:6939 train loss:3.584575 +step:6940 train loss:3.637857 +step:6941 train loss:3.560096 +step:6942 train loss:3.640676 +step:6943 train loss:3.532995 +step:6944 train loss:3.630258 +step:6945 train loss:3.569012 +step:6946 train loss:3.654884 +step:6947 train loss:3.582512 +step:6948 train loss:3.575449 +step:6949 train loss:3.652292 +step:6950 train loss:3.643652 +step:6951 train loss:3.647669 +step:6952 train loss:3.577347 +step:6953 train loss:3.622746 +step:6954 train loss:3.686589 +step:6955 train loss:3.596648 +step:6956 train loss:3.638398 +step:6957 train loss:3.625595 +step:6958 train loss:3.587991 +step:6959 train loss:3.626886 +step:6960 train loss:3.595343 +step:6961 train loss:3.604465 +step:6962 train loss:3.580443 +step:6963 train loss:3.552464 +step:6964 train loss:3.596846 +step:6965 train loss:3.587498 +step:6966 train loss:3.636678 +step:6967 train loss:3.571602 +step:6968 train loss:3.611648 +step:6969 train loss:3.628765 +step:6970 train loss:3.606130 +step:6971 train loss:3.665175 +step:6972 train loss:3.614621 +step:6973 train loss:3.577044 +step:6974 train loss:3.700499 +step:6975 train loss:3.606764 +step:6976 train loss:3.579900 +step:6977 train loss:3.615556 +step:6978 train loss:3.605214 +step:6979 train loss:3.617770 +step:6980 train loss:3.594213 +step:6981 train loss:3.657458 +step:6982 train loss:3.607687 +step:6983 train loss:3.596717 +step:6984 train loss:3.713862 +step:6985 train loss:3.564066 +step:6986 train loss:3.554885 +step:6987 train loss:3.604501 +step:6988 train loss:3.606487 +step:6989 train loss:3.754324 +step:6990 train loss:3.618208 +step:6991 train loss:3.577140 +step:6992 train loss:3.623729 +step:6993 train loss:3.691220 +step:6994 train loss:3.636605 +step:6995 train loss:3.587543 +step:6996 train loss:3.599736 +step:6997 train loss:3.671157 +step:6998 train loss:3.569828 +step:6999 train loss:3.622044 +step:7000 validation loss:3.539756 total_sharp:2.9151e-05 L1_sharp:6.7302e-06 L2_sharp:5.2158e-06 L3_sharp:3.7006e-06 L4_sharp:3.5482e-06 L5_sharp:1.8091e-06 L6_sharp:1.4092e-06 L7_sharp:1.7429e-06 L8_sharp:3.6565e-06 L9_sharp:4.9686e-06 L10_sharp:8.2867e-06 L11_sharp:4.6008e-06 L12_sharp:6.7290e-05 total_fnorm:4.0875e+01 total_l1_linf:3.5213e+05 total_spectral:4.0875e+01 L1_fnorm:9.6834e+00 L2_fnorm:8.1017e+00 L3_fnorm:8.4912e+00 L4_fnorm:8.9464e+00 L5_fnorm:9.4538e+00 L6_fnorm:9.4171e+00 L7_fnorm:9.9532e+00 L8_fnorm:9.9965e+00 L9_fnorm:9.9949e+00 L10_fnorm:9.8345e+00 L11_fnorm:1.0018e+01 L12_fnorm:9.2968e+00 L1_l1linf:9.0468e+00 L2_l1linf:5.9400e+00 L3_l1linf:7.0657e+00 L4_l1linf:7.7792e+00 L5_l1linf:9.1042e+00 L6_l1linf:8.8247e+00 L7_l1linf:9.9169e+00 L8_l1linf:1.0397e+01 L9_l1linf:1.0854e+01 L10_l1linf:1.1823e+01 L11_l1linf:1.2248e+01 L12_l1linf:1.3950e+01 L1_spectral:1.4471e+00 L2_spectral:1.5640e+00 L3_spectral:1.1202e+00 L4_spectral:9.8745e-01 L5_spectral:1.0248e+00 L6_spectral:1.0381e+00 L7_spectral:1.0355e+00 L8_spectral:1.2719e+00 L9_spectral:1.4285e+00 L10_spectral:1.5966e+00 L11_spectral:1.4945e+00 L12_spectral:1.8858e+00 ip_v_neg_g:2.1263e-02 cos_v_neg_g:1.4612e-03 v_norm:4.0875e+01 g_norm:3.5601e-01 hv_norm:3.5957e-01 cos_v_hv:3.3137e-03 hg_norm:2.9735e+00 cos_g_hg:5.5985e-01 v_par:1.4416e-02 v_perp:4.0875e+01 L1_cos_v_neg_g:1.0236e-02 L1_v_norm:9.6834e+00 L2_cos_v_neg_g:5.5916e-03 L2_v_norm:8.1017e+00 L3_cos_v_neg_g:5.3858e-03 L3_v_norm:8.4912e+00 L4_cos_v_neg_g:5.5839e-03 L4_v_norm:8.9464e+00 L5_cos_v_neg_g:4.7497e-03 L5_v_norm:9.4538e+00 L6_cos_v_neg_g:4.7747e-03 L6_v_norm:9.4171e+00 L7_cos_v_neg_g:5.0106e-03 L7_v_norm:9.9532e+00 L8_cos_v_neg_g:6.9081e-03 L8_v_norm:9.9965e+00 L9_cos_v_neg_g:8.4124e-03 L9_v_norm:9.9949e+00 L10_cos_v_neg_g:1.0721e-02 L10_v_norm:9.8345e+00 L11_cos_v_neg_g:1.0861e-02 L11_v_norm:1.0018e+01 L12_cos_v_neg_g:2.7742e-02 L12_v_norm:9.2968e+00 +step:7000 train loss:3.700108 +step:7001 train loss:3.611327 +step:7002 train loss:3.588334 +step:7003 train loss:3.612869 +step:7004 train loss:3.608249 +step:7005 train loss:3.595581 +step:7006 train loss:3.598297 +step:7007 train loss:3.652880 +step:7008 train loss:3.596769 +step:7009 train loss:3.649125 +step:7010 train loss:3.565859 +step:7011 train loss:3.625463 +step:7012 train loss:3.599302 +step:7013 train loss:3.670012 +step:7014 train loss:3.575524 +step:7015 train loss:3.636562 +step:7016 train loss:3.626325 +step:7017 train loss:3.591723 +step:7018 train loss:3.667866 +step:7019 train loss:3.599503 +step:7020 train loss:3.642108 +step:7021 train loss:3.584770 +step:7022 train loss:3.597828 +step:7023 train loss:3.620316 +step:7024 train loss:3.581676 +step:7025 train loss:3.631349 +step:7026 train loss:3.586867 +step:7027 train loss:3.648698 +step:7028 train loss:3.571947 +step:7029 train loss:3.564998 +step:7030 train loss:3.569886 +step:7031 train loss:3.617676 +step:7032 train loss:3.625035 +step:7033 train loss:3.600832 +step:7034 train loss:3.623029 +step:7035 train loss:3.674809 +step:7036 train loss:3.593966 +step:7037 train loss:3.621650 +step:7038 train loss:3.579109 +step:7039 train loss:3.632486 +step:7040 train loss:3.550168 +step:7041 train loss:3.643351 +step:7042 train loss:3.578039 +step:7043 train loss:3.549541 +step:7044 train loss:3.595125 +step:7045 train loss:3.593152 +step:7046 train loss:3.585093 +step:7047 train loss:3.630407 +step:7048 train loss:3.577909 +step:7049 train loss:3.587279 +step:7050 train loss:3.607388 +step:7051 train loss:3.628865 +step:7052 train loss:3.626694 +step:7053 train loss:3.589052 +step:7054 train loss:3.571144 +step:7055 train loss:3.636492 +step:7056 train loss:3.639431 +step:7057 train loss:3.563097 +step:7058 train loss:3.681563 +step:7059 train loss:3.584669 +step:7060 train loss:3.600059 +step:7061 train loss:3.573831 +step:7062 train loss:3.593867 +step:7063 train loss:3.657187 +step:7064 train loss:3.578637 +step:7065 train loss:3.628404 +step:7066 train loss:3.587123 +step:7067 train loss:3.623308 +step:7068 train loss:3.598799 +step:7069 train loss:3.563537 +step:7070 train loss:3.591260 +step:7071 train loss:3.557389 +step:7072 train loss:3.563493 +step:7073 train loss:3.554848 +step:7074 train loss:3.548531 +step:7075 train loss:3.569999 +step:7076 train loss:3.578495 +step:7077 train loss:3.591323 +step:7078 train loss:3.634066 +step:7079 train loss:3.645958 +step:7080 train loss:3.594614 +step:7081 train loss:3.613718 +step:7082 train loss:3.581536 +step:7083 train loss:3.608493 +step:7084 train loss:3.603427 +step:7085 train loss:3.565212 +step:7086 train loss:3.601104 +step:7087 train loss:3.577109 +step:7088 train loss:3.697933 +step:7089 train loss:3.597769 +step:7090 train loss:3.559086 +step:7091 train loss:3.575595 +step:7092 train loss:3.553856 +step:7093 train loss:3.648960 +step:7094 train loss:3.566494 +step:7095 train loss:3.583306 +step:7096 train loss:3.604538 +step:7097 train loss:3.589175 +step:7098 train loss:3.614342 +step:7099 train loss:3.572284 +step:7100 train loss:3.601792 +step:7101 train loss:3.673626 +step:7102 train loss:3.562143 +step:7103 train loss:3.589803 +step:7104 train loss:3.614947 +step:7105 train loss:3.597870 +step:7106 train loss:3.581628 +step:7107 train loss:3.614403 +step:7108 train loss:3.685781 +step:7109 train loss:3.614120 +step:7110 train loss:3.641073 +step:7111 train loss:3.620547 +step:7112 train loss:3.607684 +step:7113 train loss:3.605868 +step:7114 train loss:3.625382 +step:7115 train loss:3.663432 +step:7116 train loss:3.592241 +step:7117 train loss:3.630486 +step:7118 train loss:3.641792 +step:7119 train loss:3.602501 +step:7120 train loss:3.664965 +step:7121 train loss:3.577792 +step:7122 train loss:3.578630 +step:7123 train loss:3.519689 +step:7124 train loss:3.674076 +step:7125 train loss:3.529644 +step:7126 train loss:3.697455 +step:7127 train loss:3.658139 +step:7128 train loss:3.596029 +step:7129 train loss:3.605120 +step:7130 train loss:3.593131 +step:7131 train loss:3.535106 +step:7132 train loss:3.574271 +step:7133 train loss:3.620004 +step:7134 train loss:3.548494 +step:7135 train loss:3.607491 +step:7136 train loss:3.591594 +step:7137 train loss:3.569409 +step:7138 train loss:3.558012 +step:7139 train loss:3.567576 +step:7140 train loss:3.599105 +step:7141 train loss:3.598750 +step:7142 train loss:3.592153 +step:7143 train loss:3.627052 +step:7144 train loss:3.578412 +step:7145 train loss:3.593043 +step:7146 train loss:3.599115 +step:7147 train loss:3.625734 +step:7148 train loss:3.625541 +step:7149 train loss:3.630281 +step:7150 train loss:3.607998 +step:7151 train loss:3.572108 +step:7152 train loss:3.546578 +step:7153 train loss:3.578923 +step:7154 train loss:3.598113 +step:7155 train loss:3.617435 +step:7156 train loss:3.587424 +step:7157 train loss:3.605745 +step:7158 train loss:3.560579 +step:7159 train loss:3.613656 +step:7160 train loss:3.623968 +step:7161 train loss:3.573295 +step:7162 train loss:3.623437 +step:7163 train loss:3.557835 +step:7164 train loss:3.595699 +step:7165 train loss:3.597260 +step:7166 train loss:3.652255 +step:7167 train loss:3.631300 +step:7168 train loss:3.608272 +step:7169 train loss:3.587345 +step:7170 train loss:3.615984 +step:7171 train loss:3.564533 +step:7172 train loss:3.730119 +step:7173 train loss:3.573519 +step:7174 train loss:3.612437 +step:7175 train loss:3.593088 +step:7176 train loss:3.599566 +step:7177 train loss:3.613967 +step:7178 train loss:3.611690 +step:7179 train loss:3.599107 +step:7180 train loss:3.601205 +step:7181 train loss:3.628319 +step:7182 train loss:3.581878 +step:7183 train loss:3.652956 +step:7184 train loss:3.743174 +step:7185 train loss:3.657616 +step:7186 train loss:3.594006 +step:7187 train loss:3.604129 +step:7188 train loss:3.592901 +step:7189 train loss:3.592930 +step:7190 train loss:3.595867 +step:7191 train loss:3.586299 +step:7192 train loss:3.622550 +step:7193 train loss:3.537806 +step:7194 train loss:3.602236 +step:7195 train loss:3.577895 +step:7196 train loss:3.627895 +step:7197 train loss:3.605111 +step:7198 train loss:3.662280 +step:7199 train loss:3.617766 +step:7200 train loss:3.614406 +step:7201 train loss:3.624705 +step:7202 train loss:3.600969 +step:7203 train loss:3.615830 +step:7204 train loss:3.584182 +step:7205 train loss:3.541049 +step:7206 train loss:3.569866 +step:7207 train loss:3.746236 +step:7208 train loss:3.578456 +step:7209 train loss:3.662361 +step:7210 train loss:3.595809 +step:7211 train loss:3.632321 +step:7212 train loss:3.710484 +step:7213 train loss:3.560699 +step:7214 train loss:3.629159 +step:7215 train loss:3.593976 +step:7216 train loss:3.648631 +step:7217 train loss:3.603127 +step:7218 train loss:3.692428 +step:7219 train loss:3.602005 +step:7220 train loss:3.678529 +step:7221 train loss:3.557580 +step:7222 train loss:3.643048 +step:7223 train loss:3.558379 +step:7224 train loss:3.623863 +step:7225 train loss:3.599260 +step:7226 train loss:3.567332 +step:7227 train loss:3.588547 +step:7228 train loss:3.574587 +step:7229 train loss:3.578798 +step:7230 train loss:3.561996 +step:7231 train loss:3.695696 +step:7232 train loss:3.566932 +step:7233 train loss:3.637281 +step:7234 train loss:3.622325 +step:7235 train loss:3.597579 +step:7236 train loss:3.635295 +step:7237 train loss:3.585207 +step:7238 train loss:3.622964 +step:7239 train loss:3.579104 +step:7240 train loss:3.576146 +step:7241 train loss:3.589448 +step:7242 train loss:3.570921 +step:7243 train loss:3.616477 +step:7244 train loss:3.589468 +step:7245 train loss:3.590104 +step:7246 train loss:3.635029 +step:7247 train loss:3.590140 +step:7248 train loss:3.627111 +step:7249 train loss:3.577103 +step:7250 validation loss:3.528921 +step:7250 train loss:3.600862 +step:7251 train loss:3.645140 +step:7252 train loss:3.561598 +step:7253 train loss:3.649498 +step:7254 train loss:3.586102 +step:7255 train loss:3.553228 +step:7256 train loss:3.601221 +step:7257 train loss:3.646698 +step:7258 train loss:3.599188 +step:7259 train loss:3.582775 +step:7260 train loss:3.671435 +step:7261 train loss:3.625052 +step:7262 train loss:3.582899 +step:7263 train loss:3.616225 +step:7264 train loss:3.608505 +step:7265 train loss:3.509317 +step:7266 train loss:3.632034 +step:7267 train loss:3.551712 +step:7268 train loss:3.615750 +step:7269 train loss:3.620644 +step:7270 train loss:3.576403 +step:7271 train loss:3.593018 +step:7272 train loss:3.594682 +step:7273 train loss:3.595083 +step:7274 train loss:3.571655 +step:7275 train loss:3.643016 +step:7276 train loss:3.549789 +step:7277 train loss:3.598500 +step:7278 train loss:3.567660 +step:7279 train loss:3.550068 +step:7280 train loss:3.617916 +step:7281 train loss:3.639023 +step:7282 train loss:3.641135 +step:7283 train loss:3.531721 +step:7284 train loss:3.575361 +step:7285 train loss:3.600608 +step:7286 train loss:3.731975 +step:7287 train loss:3.641125 +step:7288 train loss:3.598471 +step:7289 train loss:3.600756 +step:7290 train loss:3.648244 +step:7291 train loss:3.609739 +step:7292 train loss:3.680376 +step:7293 train loss:3.576051 +step:7294 train loss:3.661289 +step:7295 train loss:3.554922 +step:7296 train loss:3.548377 +step:7297 train loss:3.594957 +step:7298 train loss:3.571745 +step:7299 train loss:3.612318 +step:7300 train loss:3.596650 +step:7301 train loss:3.547388 +step:7302 train loss:3.694082 +step:7303 train loss:3.583334 +step:7304 train loss:3.528780 +step:7305 train loss:3.605537 +step:7306 train loss:3.633780 +step:7307 train loss:3.640403 +step:7308 train loss:3.590329 +step:7309 train loss:3.553456 +step:7310 train loss:3.582936 +step:7311 train loss:3.571213 +step:7312 train loss:3.604030 +step:7313 train loss:3.647819 +step:7314 train loss:3.542968 +step:7315 train loss:3.534938 +step:7316 train loss:3.677384 +step:7317 train loss:3.615991 +step:7318 train loss:3.561099 +step:7319 train loss:3.584079 +step:7320 train loss:3.618982 +step:7321 train loss:3.646465 +step:7322 train loss:3.524488 +step:7323 train loss:3.580314 +step:7324 train loss:3.605966 +step:7325 train loss:3.568509 +step:7326 train loss:3.599673 +step:7327 train loss:3.574651 +step:7328 train loss:3.696852 +step:7329 train loss:3.537663 +step:7330 train loss:3.595208 +step:7331 train loss:3.586594 +step:7332 train loss:3.628616 +step:7333 train loss:3.609364 +step:7334 train loss:3.579354 +step:7335 train loss:3.573759 +step:7336 train loss:3.826059 +step:7337 train loss:3.615315 +step:7338 train loss:3.609427 +step:7339 train loss:3.622802 +step:7340 train loss:3.606925 +step:7341 train loss:3.600427 +step:7342 train loss:3.590753 +step:7343 train loss:3.603244 +step:7344 train loss:3.678900 +step:7345 train loss:3.540849 +step:7346 train loss:3.577949 +step:7347 train loss:3.571864 +step:7348 train loss:3.572290 +step:7349 train loss:3.673863 +step:7350 train loss:3.658785 +step:7351 train loss:3.594817 +step:7352 train loss:3.624910 +step:7353 train loss:3.603076 +step:7354 train loss:3.552982 +step:7355 train loss:3.737942 +step:7356 train loss:3.710227 +step:7357 train loss:3.634213 +step:7358 train loss:3.614082 +step:7359 train loss:3.581431 +step:7360 train loss:3.591831 +step:7361 train loss:3.543797 +step:7362 train loss:3.591176 +step:7363 train loss:3.606970 +step:7364 train loss:3.636451 +step:7365 train loss:3.624670 +step:7366 train loss:3.585893 +step:7367 train loss:3.667060 +step:7368 train loss:3.641712 +step:7369 train loss:3.634043 +step:7370 train loss:3.602660 +step:7371 train loss:3.559377 +step:7372 train loss:3.615723 +step:7373 train loss:3.639345 +step:7374 train loss:3.733956 +step:7375 train loss:3.558946 +step:7376 train loss:3.579875 +step:7377 train loss:3.621963 +step:7378 train loss:3.576185 +step:7379 train loss:3.699092 +step:7380 train loss:3.660817 +step:7381 train loss:3.628036 +step:7382 train loss:3.591676 +step:7383 train loss:3.682662 +step:7384 train loss:3.624255 +step:7385 train loss:3.584048 +step:7386 train loss:3.589417 +step:7387 train loss:3.630077 +step:7388 train loss:3.660293 +step:7389 train loss:3.602317 +step:7390 train loss:3.551395 +step:7391 train loss:3.585689 +step:7392 train loss:3.641089 +step:7393 train loss:3.606901 +step:7394 train loss:3.650373 +step:7395 train loss:3.538155 +step:7396 train loss:3.636430 +step:7397 train loss:3.569239 +step:7398 train loss:3.587098 +step:7399 train loss:3.630595 +step:7400 train loss:3.631629 +step:7401 train loss:3.551680 +step:7402 train loss:3.668410 +step:7403 train loss:3.548862 +step:7404 train loss:3.618782 +step:7405 train loss:3.743732 +step:7406 train loss:3.566769 +step:7407 train loss:3.619798 +step:7408 train loss:3.615950 +step:7409 train loss:3.588366 +step:7410 train loss:3.754562 +step:7411 train loss:3.602432 +step:7412 train loss:3.606297 +step:7413 train loss:3.656905 +step:7414 train loss:3.564727 +step:7415 train loss:3.626168 +step:7416 train loss:3.508108 +step:7417 train loss:3.628957 +step:7418 train loss:3.616638 +step:7419 train loss:3.580121 +step:7420 train loss:3.568358 +step:7421 train loss:3.605592 +step:7422 train loss:3.563893 +step:7423 train loss:3.702191 +step:7424 train loss:3.765145 +step:7425 train loss:3.656124 +step:7426 train loss:3.619352 +step:7427 train loss:3.587356 +step:7428 train loss:3.623357 +step:7429 train loss:3.627745 +step:7430 train loss:3.557212 +step:7431 train loss:3.560749 +step:7432 train loss:3.569327 +step:7433 train loss:3.665821 +step:7434 train loss:3.580502 +step:7435 train loss:3.665336 +step:7436 train loss:3.705933 +step:7437 train loss:3.526392 +step:7438 train loss:3.590106 +step:7439 train loss:3.597833 +step:7440 train loss:3.573324 +step:7441 train loss:3.538203 +step:7442 train loss:3.769486 +step:7443 train loss:3.591620 +step:7444 train loss:3.636699 +step:7445 train loss:3.563130 +step:7446 train loss:3.585906 +step:7447 train loss:3.512122 +step:7448 train loss:3.565206 +step:7449 train loss:3.584210 +step:7450 train loss:3.616349 +step:7451 train loss:3.644716 +step:7452 train loss:3.576761 +step:7453 train loss:3.599859 +step:7454 train loss:3.587375 +step:7455 train loss:3.597688 +step:7456 train loss:3.568012 +step:7457 train loss:3.577036 +step:7458 train loss:3.616750 +step:7459 train loss:3.593796 +step:7460 train loss:3.602748 +step:7461 train loss:3.637527 +step:7462 train loss:3.574541 +step:7463 train loss:3.635968 +step:7464 train loss:3.559316 +step:7465 train loss:3.569674 +step:7466 train loss:3.570997 +step:7467 train loss:3.582378 +step:7468 train loss:3.630318 +step:7469 train loss:3.564334 +step:7470 train loss:3.593826 +step:7471 train loss:3.585244 +step:7472 train loss:3.619085 +step:7473 train loss:3.557163 +step:7474 train loss:3.540878 +step:7475 train loss:3.575367 +step:7476 train loss:3.610894 +step:7477 train loss:3.586151 +step:7478 train loss:3.581801 +step:7479 train loss:3.595675 +step:7480 train loss:3.880688 +step:7481 train loss:3.528242 +step:7482 train loss:3.598887 +step:7483 train loss:3.596182 +step:7484 train loss:3.615660 +step:7485 train loss:3.600322 +step:7486 train loss:3.626434 +step:7487 train loss:3.619139 +step:7488 train loss:3.637774 +step:7489 train loss:3.632192 +step:7490 train loss:3.580711 +step:7491 train loss:3.604248 +step:7492 train loss:3.710449 +step:7493 train loss:3.681985 +step:7494 train loss:3.707532 +step:7495 train loss:3.577575 +step:7496 train loss:3.567693 +step:7497 train loss:3.661159 +step:7498 train loss:3.596589 +step:7499 train loss:3.633892 +step:7500 validation loss:3.525299 total_sharp:1.9573e-05 L1_sharp:3.5217e-06 L2_sharp:1.4657e-06 L3_sharp:1.7776e-06 L4_sharp:1.7495e-06 L5_sharp:1.0830e-06 L6_sharp:9.0044e-07 L7_sharp:1.6020e-06 L8_sharp:2.8322e-06 L9_sharp:3.8452e-06 L10_sharp:6.6765e-06 L11_sharp:4.1182e-06 L12_sharp:3.8392e-05 total_fnorm:4.0464e+01 total_l1_linf:3.4754e+05 total_spectral:4.0464e+01 L1_fnorm:9.2322e+00 L2_fnorm:7.7369e+00 L3_fnorm:8.3541e+00 L4_fnorm:8.8253e+00 L5_fnorm:9.3990e+00 L6_fnorm:9.4144e+00 L7_fnorm:9.9210e+00 L8_fnorm:9.8952e+00 L9_fnorm:9.8947e+00 L10_fnorm:9.7523e+00 L11_fnorm:9.9732e+00 L12_fnorm:9.2123e+00 L1_l1linf:8.6086e+00 L2_l1linf:5.6848e+00 L3_l1linf:7.0089e+00 L4_l1linf:7.7815e+00 L5_l1linf:8.8845e+00 L6_l1linf:8.8902e+00 L7_l1linf:9.4673e+00 L8_l1linf:1.0026e+01 L9_l1linf:9.9597e+00 L10_l1linf:1.0700e+01 L11_l1linf:1.1352e+01 L12_l1linf:1.0946e+01 L1_spectral:1.2334e+00 L2_spectral:9.3101e-01 L3_spectral:9.5420e-01 L4_spectral:9.3112e-01 L5_spectral:9.3847e-01 L6_spectral:1.0362e+00 L7_spectral:9.6891e-01 L8_spectral:1.1572e+00 L9_spectral:1.2925e+00 L10_spectral:1.4296e+00 L11_spectral:1.3558e+00 L12_spectral:1.6502e+00 ip_v_neg_g:1.6084e-02 cos_v_neg_g:1.0004e-03 v_norm:4.0464e+01 g_norm:3.9734e-01 hv_norm:2.1493e-01 cos_v_hv:3.6850e-03 hg_norm:3.4198e+00 cos_g_hg:6.0453e-01 v_par:1.1814e-02 v_perp:4.0464e+01 L1_cos_v_neg_g:5.6347e-03 L1_v_norm:9.2322e+00 L2_cos_v_neg_g:3.8879e-03 L2_v_norm:7.7369e+00 L3_cos_v_neg_g:3.3140e-03 L3_v_norm:8.3541e+00 L4_cos_v_neg_g:2.2237e-03 L4_v_norm:8.8253e+00 L5_cos_v_neg_g:2.5884e-03 L5_v_norm:9.3990e+00 L6_cos_v_neg_g:2.6209e-03 L6_v_norm:9.4144e+00 L7_cos_v_neg_g:3.4925e-03 L7_v_norm:9.9210e+00 L8_cos_v_neg_g:4.4240e-03 L8_v_norm:9.8952e+00 L9_cos_v_neg_g:4.8945e-03 L9_v_norm:9.8947e+00 L10_cos_v_neg_g:7.3908e-03 L10_v_norm:9.7523e+00 L11_cos_v_neg_g:8.0542e-03 L11_v_norm:9.9732e+00 L12_cos_v_neg_g:2.0805e-02 L12_v_norm:9.2123e+00 +step:7500 train loss:3.579555 +step:7501 train loss:3.570042 +step:7502 train loss:3.558270 +step:7503 train loss:3.539597 +step:7504 train loss:3.563919 +step:7505 train loss:3.550931 +step:7506 train loss:3.609999 +step:7507 train loss:3.528081 +step:7508 train loss:3.599380 +step:7509 train loss:3.570771 +step:7510 train loss:3.601573 +step:7511 train loss:3.612076 +step:7512 train loss:3.867187 +step:7513 train loss:3.564388 +step:7514 train loss:3.592741 +step:7515 train loss:3.561510 +step:7516 train loss:3.572661 +step:7517 train loss:3.601822 +step:7518 train loss:3.583222 +step:7519 train loss:3.592611 +step:7520 train loss:3.656605 +step:7521 train loss:3.544604 +step:7522 train loss:3.599749 +step:7523 train loss:3.632761 +step:7524 train loss:3.581184 +step:7525 train loss:3.584428 +step:7526 train loss:3.533550 +step:7527 train loss:3.540679 +step:7528 train loss:3.634850 +step:7529 train loss:3.614352 +step:7530 train loss:3.564382 +step:7531 train loss:3.634789 +step:7532 train loss:3.624446 +step:7533 train loss:3.551854 +step:7534 train loss:3.614489 +step:7535 train loss:3.618155 +step:7536 train loss:3.650143 +step:7537 train loss:3.675922 +step:7538 train loss:3.694573 +step:7539 train loss:3.597802 +step:7540 train loss:3.584857 +step:7541 train loss:3.635909 +step:7542 train loss:3.597915 +step:7543 train loss:3.556082 +step:7544 train loss:3.594482 +step:7545 train loss:3.584134 +step:7546 train loss:3.540941 +step:7547 train loss:3.585906 +step:7548 train loss:3.600837 +step:7549 train loss:3.581131 +step:7550 train loss:3.578963 +step:7551 train loss:3.677332 +step:7552 train loss:3.591688 +step:7553 train loss:3.627890 +step:7554 train loss:3.555755 +step:7555 train loss:3.645319 +step:7556 train loss:3.549552 +step:7557 train loss:3.645567 +step:7558 train loss:3.633842 +step:7559 train loss:3.592403 +step:7560 train loss:3.686180 +step:7561 train loss:3.656211 +step:7562 train loss:3.559248 +step:7563 train loss:3.553973 +step:7564 train loss:3.611835 +step:7565 train loss:3.630013 +step:7566 train loss:3.617459 +step:7567 train loss:3.637336 +step:7568 train loss:3.579073 +step:7569 train loss:3.640423 +step:7570 train loss:3.621293 +step:7571 train loss:3.703577 +step:7572 train loss:3.551948 +step:7573 train loss:3.623056 +step:7574 train loss:3.584599 +step:7575 train loss:3.576243 +step:7576 train loss:3.588216 +step:7577 train loss:3.603370 +step:7578 train loss:3.659395 +step:7579 train loss:3.594213 +step:7580 train loss:3.583037 +step:7581 train loss:3.569371 +step:7582 train loss:3.626063 +step:7583 train loss:3.562299 +step:7584 train loss:3.548716 +step:7585 train loss:3.520440 +step:7586 train loss:3.552903 +step:7587 train loss:3.615559 +step:7588 train loss:3.744189 +step:7589 train loss:3.562690 +step:7590 train loss:3.634223 +step:7591 train loss:3.634142 +step:7592 train loss:3.595624 +step:7593 train loss:3.617520 +step:7594 train loss:3.616073 +step:7595 train loss:3.587740 +step:7596 train loss:3.638637 +step:7597 train loss:3.546918 +step:7598 train loss:3.605167 +step:7599 train loss:3.598777 +step:7600 train loss:3.555568 +step:7601 train loss:3.668797 +step:7602 train loss:3.611515 +step:7603 train loss:3.567605 +step:7604 train loss:3.716085 +step:7605 train loss:3.605170 +step:7606 train loss:3.638104 +step:7607 train loss:3.593019 +step:7608 train loss:3.598029 +step:7609 train loss:3.636008 +step:7610 train loss:3.592492 +step:7611 train loss:3.573522 +step:7612 train loss:3.512737 +step:7613 train loss:3.562241 +step:7614 train loss:3.629162 +step:7615 train loss:3.593753 +step:7616 train loss:3.659785 +step:7617 train loss:3.556334 +step:7618 train loss:3.646386 +step:7619 train loss:3.587636 +step:7620 train loss:3.574792 +step:7621 train loss:3.521715 +step:7622 train loss:3.798701 +step:7623 train loss:3.815365 +step:7624 train loss:3.627853 +step:7625 train loss:3.666216 +step:7626 train loss:3.583316 +step:7627 train loss:3.654100 +step:7628 train loss:3.533257 +step:7629 train loss:3.596428 +step:7630 train loss:3.608638 +step:7631 train loss:3.588699 +step:7632 train loss:3.637266 +step:7633 train loss:3.706684 +step:7634 train loss:3.667865 +step:7635 train loss:3.570618 +step:7636 train loss:3.599147 +step:7637 train loss:3.546293 +step:7638 train loss:3.657549 +step:7639 train loss:3.584213 +step:7640 train loss:3.563228 +step:7641 train loss:3.598209 +step:7642 train loss:3.930165 +step:7643 train loss:3.685106 +step:7644 train loss:3.608578 +step:7645 train loss:3.600951 +step:7646 train loss:3.585114 +step:7647 train loss:3.576167 +step:7648 train loss:3.611239 +step:7649 train loss:3.570322 +step:7650 train loss:3.619707 +step:7651 train loss:3.642863 +step:7652 train loss:3.519174 +step:7653 train loss:3.713303 +step:7654 train loss:3.570427 +step:7655 train loss:3.591654 +step:7656 train loss:3.567696 +step:7657 train loss:3.583576 +step:7658 train loss:3.538028 +step:7659 train loss:3.600720 +step:7660 train loss:3.534440 +step:7661 train loss:3.549161 +step:7662 train loss:3.549290 +step:7663 train loss:3.599134 +step:7664 train loss:3.560466 +step:7665 train loss:3.533645 +step:7666 train loss:3.642358 +step:7667 train loss:3.552353 +step:7668 train loss:3.661366 +step:7669 train loss:3.598757 +step:7670 train loss:3.550452 +step:7671 train loss:3.610614 +step:7672 train loss:3.628750 +step:7673 train loss:3.590263 +step:7674 train loss:3.629384 +step:7675 train loss:3.682741 +step:7676 train loss:3.652992 +step:7677 train loss:3.683274 +step:7678 train loss:3.619260 +step:7679 train loss:3.639866 +step:7680 train loss:3.649836 +step:7681 train loss:3.619219 +step:7682 train loss:3.583058 +step:7683 train loss:3.588680 +step:7684 train loss:3.565152 +step:7685 train loss:3.538020 +step:7686 train loss:3.661041 +step:7687 train loss:3.574089 +step:7688 train loss:3.540885 +step:7689 train loss:3.591422 +step:7690 train loss:3.556943 +step:7691 train loss:3.582363 +step:7692 train loss:3.618083 +step:7693 train loss:3.622715 +step:7694 train loss:3.670908 +step:7695 train loss:3.602136 +step:7696 train loss:3.576056 +step:7697 train loss:3.565552 +step:7698 train loss:3.623950 +step:7699 train loss:3.617002 +step:7700 train loss:3.520179 +step:7701 train loss:3.634198 +step:7702 train loss:3.580124 +step:7703 train loss:3.578252 +step:7704 train loss:3.626927 +step:7705 train loss:3.589417 +step:7706 train loss:3.528904 +step:7707 train loss:3.645862 +step:7708 train loss:3.591131 +step:7709 train loss:3.603976 +step:7710 train loss:3.668265 +step:7711 train loss:3.629671 +step:7712 train loss:3.571406 +step:7713 train loss:3.652939 +step:7714 train loss:3.598205 +step:7715 train loss:3.548709 +step:7716 train loss:3.585879 +step:7717 train loss:3.613370 +step:7718 train loss:3.616053 +step:7719 train loss:3.573067 +step:7720 train loss:3.588790 +step:7721 train loss:3.631882 +step:7722 train loss:3.557002 +step:7723 train loss:3.932356 +step:7724 train loss:3.598980 +step:7725 train loss:3.504441 +step:7726 train loss:3.585845 +step:7727 train loss:3.609845 +step:7728 train loss:3.575408 +step:7729 train loss:3.573562 +step:7730 train loss:3.594285 +step:7731 train loss:3.623652 +step:7732 train loss:3.645408 +step:7733 train loss:3.554568 +step:7734 train loss:3.582239 +step:7735 train loss:3.673499 +step:7736 train loss:3.614852 +step:7737 train loss:3.632806 +step:7738 train loss:3.533600 +step:7739 train loss:3.609381 +step:7740 train loss:3.557997 +step:7741 train loss:3.594308 +step:7742 train loss:3.603464 +step:7743 train loss:3.548095 +step:7744 train loss:3.669305 +step:7745 train loss:3.566250 +step:7746 train loss:3.545488 +step:7747 train loss:3.633429 +step:7748 train loss:3.615581 +step:7749 train loss:3.539145 +step:7750 validation loss:3.528307 +step:7750 train loss:3.699020 +step:7751 train loss:3.578282 +step:7752 train loss:3.571736 +step:7753 train loss:3.578047 +step:7754 train loss:3.544793 +step:7755 train loss:3.611657 +step:7756 train loss:3.641452 +step:7757 train loss:3.590685 +step:7758 train loss:3.560714 +step:7759 train loss:3.585422 +step:7760 train loss:3.617488 +step:7761 train loss:3.602229 +step:7762 train loss:3.592122 +step:7763 train loss:3.579851 +step:7764 train loss:3.579026 +step:7765 train loss:3.536464 +step:7766 train loss:3.602286 +step:7767 train loss:3.604414 +step:7768 train loss:3.561045 +step:7769 train loss:3.623360 +step:7770 train loss:3.643363 +step:7771 train loss:3.613412 +step:7772 train loss:3.588923 +step:7773 train loss:3.646394 +step:7774 train loss:3.545089 +step:7775 train loss:3.533163 +step:7776 train loss:3.637572 +step:7777 train loss:3.592540 +step:7778 train loss:3.548990 +step:7779 train loss:3.590791 +step:7780 train loss:3.586801 +step:7781 train loss:3.594426 +step:7782 train loss:3.579325 +step:7783 train loss:3.559004 +step:7784 train loss:3.557629 +step:7785 train loss:3.600872 +step:7786 train loss:3.556417 +step:7787 train loss:3.634082 +step:7788 train loss:3.588259 +step:7789 train loss:3.522615 +step:7790 train loss:3.577826 +step:7791 train loss:3.614231 +step:7792 train loss:3.574202 +step:7793 train loss:3.596639 +step:7794 train loss:3.583959 +step:7795 train loss:3.618345 +step:7796 train loss:3.576605 +step:7797 train loss:3.593855 +step:7798 train loss:3.592018 +step:7799 train loss:3.581238 +step:7800 train loss:3.533892 +step:7801 train loss:3.600353 +step:7802 train loss:3.580906 +step:7803 train loss:3.631299 +step:7804 train loss:3.595172 +step:7805 train loss:3.590420 +step:7806 train loss:3.604151 +step:7807 train loss:3.680600 +step:7808 train loss:3.546950 +step:7809 train loss:3.520104 +step:7810 train loss:3.611043 +step:7811 train loss:3.545259 +step:7812 train loss:3.563549 +step:7813 train loss:3.652856 +step:7814 train loss:3.727111 +step:7815 train loss:3.534387 +step:7816 train loss:3.624050 +step:7817 train loss:3.652460 +step:7818 train loss:3.550165 +step:7819 train loss:3.601846 +step:7820 train loss:3.643747 +step:7821 train loss:3.571303 +step:7822 train loss:3.530241 +step:7823 train loss:3.598633 +step:7824 train loss:3.590805 +step:7825 train loss:3.578291 +step:7826 train loss:3.579443 +step:7827 train loss:3.619840 +step:7828 train loss:3.610390 +step:7829 train loss:3.559854 +step:7830 train loss:3.573618 +step:7831 train loss:3.584675 +step:7832 train loss:3.641616 +step:7833 train loss:3.619974 +step:7834 train loss:3.588298 +step:7835 train loss:3.605276 +step:7836 train loss:3.724879 +step:7837 train loss:3.604976 +step:7838 train loss:3.574951 +step:7839 train loss:3.536758 +step:7840 train loss:3.551332 +step:7841 train loss:3.643681 +step:7842 train loss:3.627525 +step:7843 train loss:3.692194 +step:7844 train loss:3.615613 +step:7845 train loss:3.593487 +step:7846 train loss:3.705776 +step:7847 train loss:3.593744 +step:7848 train loss:3.607907 +step:7849 train loss:3.617462 +step:7850 train loss:3.587138 +step:7851 train loss:3.612205 +step:7852 train loss:3.590375 +step:7853 train loss:3.559412 +step:7854 train loss:3.590062 +step:7855 train loss:3.590440 +step:7856 train loss:3.594489 +step:7857 train loss:3.579302 +step:7858 train loss:3.588134 +step:7859 train loss:3.595353 +step:7860 train loss:3.633760 +step:7861 train loss:3.615886 +step:7862 train loss:3.559515 +step:7863 train loss:3.667552 +step:7864 train loss:3.504271 +step:7865 train loss:3.580781 +step:7866 train loss:3.557681 +step:7867 train loss:3.599375 +step:7868 train loss:3.581985 +step:7869 train loss:3.582071 +step:7870 train loss:3.508906 +step:7871 train loss:3.572288 +step:7872 train loss:3.573720 +step:7873 train loss:3.646969 +step:7874 train loss:3.587859 +step:7875 train loss:3.593070 +step:7876 train loss:3.609472 +step:7877 train loss:3.565655 +step:7878 train loss:3.597925 +step:7879 train loss:3.936862 +step:7880 train loss:3.590152 +step:7881 train loss:3.619200 +step:7882 train loss:3.699358 +step:7883 train loss:3.512321 +step:7884 train loss:3.603096 +step:7885 train loss:3.584199 +step:7886 train loss:3.584682 +step:7887 train loss:3.577715 +step:7888 train loss:3.610233 +step:7889 train loss:3.659493 +step:7890 train loss:3.561823 +step:7891 train loss:3.613060 +step:7892 train loss:3.582997 +step:7893 train loss:3.559413 +step:7894 train loss:3.581249 +step:7895 train loss:3.559503 +step:7896 train loss:3.559706 +step:7897 train loss:3.581977 +step:7898 train loss:3.593980 +step:7899 train loss:3.582922 +step:7900 train loss:3.552626 +step:7901 train loss:3.540430 +step:7902 train loss:3.689303 +step:7903 train loss:3.536906 +step:7904 train loss:3.588734 +step:7905 train loss:3.651071 +step:7906 train loss:3.553167 +step:7907 train loss:3.577801 +step:7908 train loss:3.628673 +step:7909 train loss:3.680497 +step:7910 train loss:3.557981 +step:7911 train loss:3.580628 +step:7912 train loss:3.581497 +step:7913 train loss:3.558629 +step:7914 train loss:3.594403 +step:7915 train loss:3.696362 +step:7916 train loss:3.566640 +step:7917 train loss:3.626609 +step:7918 train loss:3.566250 +step:7919 train loss:3.559728 +step:7920 train loss:3.599960 +step:7921 train loss:3.600988 +step:7922 train loss:3.577726 +step:7923 train loss:3.624414 +step:7924 train loss:3.582351 +step:7925 train loss:3.606241 +step:7926 train loss:3.510240 +step:7927 train loss:3.791013 +step:7928 train loss:3.619523 +step:7929 train loss:3.581464 +step:7930 train loss:3.541027 +step:7931 train loss:3.564914 +step:7932 train loss:3.586541 +step:7933 train loss:3.603635 +step:7934 train loss:3.698499 +step:7935 train loss:3.617837 +step:7936 train loss:3.588940 +step:7937 train loss:3.538771 +step:7938 train loss:3.551607 +step:7939 train loss:3.602289 +step:7940 train loss:3.588260 +step:7941 train loss:3.615763 +step:7942 train loss:3.606193 +step:7943 train loss:3.616178 +step:7944 train loss:3.536249 +step:7945 train loss:3.640280 +step:7946 train loss:3.586214 +step:7947 train loss:3.598743 +step:7948 train loss:3.558287 +step:7949 train loss:3.610367 +step:7950 train loss:3.668553 +step:7951 train loss:3.635889 +step:7952 train loss:3.781363 +step:7953 train loss:3.669113 +step:7954 train loss:3.573416 +step:7955 train loss:3.566004 +step:7956 train loss:3.569146 +step:7957 train loss:3.643504 +step:7958 train loss:3.650623 +step:7959 train loss:3.607965 +step:7960 train loss:3.671840 +step:7961 train loss:3.579450 +step:7962 train loss:3.548023 +step:7963 train loss:3.589843 +step:7964 train loss:3.586972 +step:7965 train loss:3.593476 +step:7966 train loss:3.566034 +step:7967 train loss:3.583148 +step:7968 train loss:3.597143 +step:7969 train loss:3.555088 +step:7970 train loss:3.520337 +step:7971 train loss:3.607668 +step:7972 train loss:3.585049 +step:7973 train loss:3.555611 +step:7974 train loss:3.595586 +step:7975 train loss:3.582485 +step:7976 train loss:3.601413 +step:7977 train loss:3.634643 +step:7978 train loss:3.655077 +step:7979 train loss:3.603742 +step:7980 train loss:3.508456 +step:7981 train loss:3.550585 +step:7982 train loss:3.596263 +step:7983 train loss:3.610065 +step:7984 train loss:3.651905 +step:7985 train loss:3.575937 +step:7986 train loss:3.602110 +step:7987 train loss:3.658385 +step:7988 train loss:3.628880 +step:7989 train loss:3.531119 +step:7990 train loss:3.548770 +step:7991 train loss:3.563957 +step:7992 train loss:3.587906 +step:7993 train loss:3.567572 +step:7994 train loss:3.619368 +step:7995 train loss:3.621663 +step:7996 train loss:3.592795 +step:7997 train loss:3.605527 +step:7998 train loss:3.632045 +step:7999 train loss:3.562788 +step:8000 validation loss:3.511844 total_sharp:1.9531e-05 L1_sharp:3.0221e-06 L2_sharp:9.4687e-07 L3_sharp:1.7395e-06 L4_sharp:2.9769e-06 L5_sharp:1.5659e-06 L6_sharp:1.6442e-06 L7_sharp:1.7128e-06 L8_sharp:2.5809e-06 L9_sharp:3.0959e-06 L10_sharp:5.5694e-06 L11_sharp:3.6601e-06 L12_sharp:3.2956e-05 total_fnorm:4.0361e+01 total_l1_linf:3.4667e+05 total_spectral:4.0361e+01 L1_fnorm:9.3438e+00 L2_fnorm:7.6154e+00 L3_fnorm:8.3221e+00 L4_fnorm:8.8282e+00 L5_fnorm:9.3586e+00 L6_fnorm:9.3939e+00 L7_fnorm:9.9253e+00 L8_fnorm:9.9387e+00 L9_fnorm:9.9076e+00 L10_fnorm:9.7836e+00 L11_fnorm:9.9484e+00 L12_fnorm:9.1897e+00 L1_l1linf:8.8374e+00 L2_l1linf:5.7142e+00 L3_l1linf:7.0694e+00 L4_l1linf:7.8806e+00 L5_l1linf:9.1177e+00 L6_l1linf:9.1106e+00 L7_l1linf:1.0194e+01 L8_l1linf:1.0511e+01 L9_l1linf:1.0822e+01 L10_l1linf:1.1976e+01 L11_l1linf:1.1722e+01 L12_l1linf:1.2878e+01 L1_spectral:1.2563e+00 L2_spectral:9.4888e-01 L3_spectral:9.3491e-01 L4_spectral:9.2483e-01 L5_spectral:9.3837e-01 L6_spectral:1.0700e+00 L7_spectral:1.0529e+00 L8_spectral:1.1981e+00 L9_spectral:1.3448e+00 L10_spectral:1.5698e+00 L11_spectral:1.4398e+00 L12_spectral:1.6788e+00 ip_v_neg_g:1.3575e-02 cos_v_neg_g:9.1026e-04 v_norm:4.0361e+01 g_norm:3.6951e-01 hv_norm:2.0419e-01 cos_v_hv:3.8608e-03 hg_norm:4.7808e+00 cos_g_hg:5.7935e-01 v_par:1.2537e-02 v_perp:4.0361e+01 L1_cos_v_neg_g:6.0652e-03 L1_v_norm:9.3438e+00 L2_cos_v_neg_g:2.9183e-03 L2_v_norm:7.6154e+00 L3_cos_v_neg_g:3.5399e-03 L3_v_norm:8.3221e+00 L4_cos_v_neg_g:5.1323e-03 L4_v_norm:8.8282e+00 L5_cos_v_neg_g:2.4026e-03 L5_v_norm:9.3586e+00 L6_cos_v_neg_g:3.3226e-03 L6_v_norm:9.3939e+00 L7_cos_v_neg_g:2.6461e-03 L7_v_norm:9.9253e+00 L8_cos_v_neg_g:3.0438e-03 L8_v_norm:9.9387e+00 L9_cos_v_neg_g:3.3121e-03 L9_v_norm:9.9076e+00 L10_cos_v_neg_g:6.5336e-03 L10_v_norm:9.7836e+00 L11_cos_v_neg_g:8.0112e-03 L11_v_norm:9.9484e+00 L12_cos_v_neg_g:1.6341e-02 L12_v_norm:9.1897e+00 +step:8000 train loss:3.628017 +step:8001 train loss:3.590613 +step:8002 train loss:3.608083 +step:8003 train loss:3.627001 +step:8004 train loss:3.603240 +step:8005 train loss:3.523019 +step:8006 train loss:3.604841 +step:8007 train loss:3.573323 +step:8008 train loss:3.596283 +step:8009 train loss:3.672681 +step:8010 train loss:3.920121 +step:8011 train loss:3.568570 +step:8012 train loss:3.643060 +step:8013 train loss:3.599476 +step:8014 train loss:3.611196 +step:8015 train loss:3.606546 +step:8016 train loss:3.595825 +step:8017 train loss:3.616339 +step:8018 train loss:3.578843 +step:8019 train loss:3.549215 +step:8020 train loss:3.581752 +step:8021 train loss:3.657146 +step:8022 train loss:3.571395 +step:8023 train loss:3.603960 +step:8024 train loss:3.490178 +step:8025 train loss:3.587507 +step:8026 train loss:3.591860 +step:8027 train loss:3.609056 +step:8028 train loss:3.666902 +step:8029 train loss:3.588574 +step:8030 train loss:3.547561 +step:8031 train loss:3.604635 +step:8032 train loss:3.591319 +step:8033 train loss:3.541156 +step:8034 train loss:3.592949 +step:8035 train loss:3.573212 +step:8036 train loss:3.554615 +step:8037 train loss:3.523283 +step:8038 train loss:3.537002 +step:8039 train loss:3.635818 +step:8040 train loss:3.570010 +step:8041 train loss:3.566568 +step:8042 train loss:3.606378 +step:8043 train loss:3.545485 +step:8044 train loss:3.555856 +step:8045 train loss:3.621247 +step:8046 train loss:3.549953 +step:8047 train loss:3.557195 +step:8048 train loss:3.584942 +step:8049 train loss:3.634716 +step:8050 train loss:3.571846 +step:8051 train loss:3.550108 +step:8052 train loss:3.609530 +step:8053 train loss:3.558280 +step:8054 train loss:3.597526 +step:8055 train loss:3.624810 +step:8056 train loss:3.591813 +step:8057 train loss:3.671418 +step:8058 train loss:3.574593 +step:8059 train loss:3.635526 +step:8060 train loss:3.605608 +step:8061 train loss:3.490470 +step:8062 train loss:3.631999 +step:8063 train loss:3.592244 +step:8064 train loss:3.554883 +step:8065 train loss:3.617163 +step:8066 train loss:3.577785 +step:8067 train loss:3.639653 +step:8068 train loss:3.565777 +step:8069 train loss:3.592826 +step:8070 train loss:3.554700 +step:8071 train loss:3.564706 +step:8072 train loss:3.602330 +step:8073 train loss:3.557690 +step:8074 train loss:3.567637 +step:8075 train loss:3.565845 +step:8076 train loss:3.600936 +step:8077 train loss:3.606220 +step:8078 train loss:3.550608 +step:8079 train loss:3.573026 +step:8080 train loss:3.556470 +step:8081 train loss:3.575604 +step:8082 train loss:3.591571 +step:8083 train loss:3.498736 +step:8084 train loss:3.632106 +step:8085 train loss:3.503769 +step:8086 train loss:3.630888 +step:8087 train loss:3.522982 +step:8088 train loss:3.574816 +step:8089 train loss:3.607637 +step:8090 train loss:3.628628 +step:8091 train loss:3.575892 +step:8092 train loss:3.560228 +step:8093 train loss:3.563923 +step:8094 train loss:3.568383 +step:8095 train loss:3.593921 +step:8096 train loss:3.590955 +step:8097 train loss:3.520957 +step:8098 train loss:3.533847 +step:8099 train loss:3.531167 +step:8100 train loss:3.587761 +step:8101 train loss:3.655499 +step:8102 train loss:3.600546 +step:8103 train loss:3.547293 +step:8104 train loss:3.599933 +step:8105 train loss:3.592687 +step:8106 train loss:3.555512 +step:8107 train loss:3.537941 +step:8108 train loss:3.552355 +step:8109 train loss:3.550331 +step:8110 train loss:3.615891 +step:8111 train loss:3.537027 +step:8112 train loss:3.555778 +step:8113 train loss:3.542923 +step:8114 train loss:3.489643 +step:8115 train loss:3.544731 +step:8116 train loss:3.578620 +step:8117 train loss:3.548486 +step:8118 train loss:3.543640 +step:8119 train loss:3.580817 +step:8120 train loss:3.529803 +step:8121 train loss:3.591501 +step:8122 train loss:3.570456 +step:8123 train loss:3.579302 +step:8124 train loss:3.541199 +step:8125 train loss:3.524253 +step:8126 train loss:3.516461 +step:8127 train loss:3.612247 +step:8128 train loss:3.614475 +step:8129 train loss:3.534261 +step:8130 train loss:3.564045 +step:8131 train loss:3.532845 +step:8132 train loss:3.603789 +step:8133 train loss:3.525230 +step:8134 train loss:3.564296 +step:8135 train loss:3.551445 +step:8136 train loss:3.564494 +step:8137 train loss:3.628445 +step:8138 train loss:3.539195 +step:8139 train loss:3.608126 +step:8140 train loss:3.537716 +step:8141 train loss:3.562831 +step:8142 train loss:3.541290 +step:8143 train loss:3.589511 +step:8144 train loss:3.570615 +step:8145 train loss:3.540175 +step:8146 train loss:3.545987 +step:8147 train loss:3.567670 +step:8148 train loss:3.657292 +step:8149 train loss:3.574744 +step:8150 train loss:3.551502 +step:8151 train loss:3.546997 +step:8152 train loss:3.644546 +step:8153 train loss:3.521631 +step:8154 train loss:3.537092 +step:8155 train loss:3.561882 +step:8156 train loss:3.547783 +step:8157 train loss:3.562502 +step:8158 train loss:3.580493 +step:8159 train loss:3.591578 +step:8160 train loss:3.543731 +step:8161 train loss:3.586434 +step:8162 train loss:3.517752 +step:8163 train loss:3.577853 +step:8164 train loss:3.564264 +step:8165 train loss:3.618499 +step:8166 train loss:3.617154 +step:8167 train loss:3.522431 +step:8168 train loss:3.505899 +step:8169 train loss:3.555628 +step:8170 train loss:3.506253 +step:8171 train loss:3.564446 +step:8172 train loss:3.562499 +step:8173 train loss:3.565003 +step:8174 train loss:3.572670 +step:8175 train loss:3.534371 +step:8176 train loss:3.529144 +step:8177 train loss:3.574690 +step:8178 train loss:3.662772 +step:8179 train loss:3.573318 +step:8180 train loss:3.590907 +step:8181 train loss:3.594497 +step:8182 train loss:3.556096 +step:8183 train loss:3.540686 +step:8184 train loss:3.537003 +step:8185 train loss:3.573653 +step:8186 train loss:3.575250 +step:8187 train loss:3.582824 +step:8188 train loss:3.513553 +step:8189 train loss:3.659019 +step:8190 train loss:3.595761 +step:8191 train loss:3.595256 +step:8192 train loss:3.704452 +step:8193 train loss:3.574679 +step:8194 train loss:3.508838 +step:8195 train loss:3.608244 +step:8196 train loss:3.527779 +step:8197 train loss:3.554816 +step:8198 train loss:3.560938 +step:8199 train loss:3.565377 +step:8200 train loss:3.539729 +step:8201 train loss:3.658782 +step:8202 train loss:3.573910 +step:8203 train loss:3.598296 +step:8204 train loss:3.503580 +step:8205 train loss:3.511549 +step:8206 train loss:3.634336 +step:8207 train loss:3.560891 +step:8208 train loss:3.578319 +step:8209 train loss:3.625547 +step:8210 train loss:3.604848 +step:8211 train loss:3.538178 +step:8212 train loss:3.597870 +step:8213 train loss:3.607976 +step:8214 train loss:3.642341 +step:8215 train loss:3.621929 +step:8216 train loss:3.600113 +step:8217 train loss:3.578024 +step:8218 train loss:3.589509 +step:8219 train loss:3.722023 +step:8220 train loss:3.551489 +step:8221 train loss:3.569504 +step:8222 train loss:3.525578 +step:8223 train loss:3.543773 +step:8224 train loss:3.559329 +step:8225 train loss:3.606583 +step:8226 train loss:3.533063 +step:8227 train loss:3.604714 +step:8228 train loss:3.490760 +step:8229 train loss:3.530983 +step:8230 train loss:3.546896 +step:8231 train loss:3.574578 +step:8232 train loss:3.571224 +step:8233 train loss:3.619700 +step:8234 train loss:3.613922 +step:8235 train loss:3.586961 +step:8236 train loss:3.569419 +step:8237 train loss:3.520488 +step:8238 train loss:3.770107 +step:8239 train loss:3.603497 +step:8240 train loss:3.553687 +step:8241 train loss:3.525139 +step:8242 train loss:3.562296 +step:8243 train loss:3.549507 +step:8244 train loss:3.565951 +step:8245 train loss:3.548176 +step:8246 train loss:3.615348 +step:8247 train loss:3.646914 +step:8248 train loss:3.567023 +step:8249 train loss:3.561997 +step:8250 validation loss:3.503737 +step:8250 train loss:3.546335 +step:8251 train loss:3.639296 +step:8252 train loss:3.577987 +step:8253 train loss:3.547205 +step:8254 train loss:3.515325 +step:8255 train loss:3.552388 +step:8256 train loss:3.530748 +step:8257 train loss:3.642438 +step:8258 train loss:3.560348 +step:8259 train loss:3.547518 +step:8260 train loss:3.547631 +step:8261 train loss:3.547748 +step:8262 train loss:3.559551 +step:8263 train loss:3.573388 +step:8264 train loss:3.538511 +step:8265 train loss:3.528634 +step:8266 train loss:3.539513 +step:8267 train loss:3.475802 +step:8268 train loss:3.593662 +step:8269 train loss:3.526748 +step:8270 train loss:3.580842 +step:8271 train loss:3.606108 +step:8272 train loss:3.632165 +step:8273 train loss:3.510151 +step:8274 train loss:3.574872 +step:8275 train loss:3.537426 +step:8276 train loss:3.571398 +step:8277 train loss:3.642074 +step:8278 train loss:3.656597 +step:8279 train loss:3.565032 +step:8280 train loss:3.554818 +step:8281 train loss:3.519309 +step:8282 train loss:3.580524 +step:8283 train loss:3.563078 +step:8284 train loss:3.552422 +step:8285 train loss:3.540302 +step:8286 train loss:3.656488 +step:8287 train loss:3.590280 +step:8288 train loss:3.563766 +step:8289 train loss:3.573915 +step:8290 train loss:3.514735 +step:8291 train loss:3.553915 +step:8292 train loss:3.584206 +step:8293 train loss:3.557080 +step:8294 train loss:3.527509 +step:8295 train loss:3.563557 +step:8296 train loss:3.631475 +step:8297 train loss:3.711071 +step:8298 train loss:3.535209 +step:8299 train loss:3.569765 +step:8300 train loss:3.578590 +step:8301 train loss:3.551326 +step:8302 train loss:3.608851 +step:8303 train loss:3.740564 +step:8304 train loss:3.553972 +step:8305 train loss:3.599615 +step:8306 train loss:3.574280 +step:8307 train loss:3.590351 +step:8308 train loss:3.588421 +step:8309 train loss:3.611154 +step:8310 train loss:3.525785 +step:8311 train loss:3.616428 +step:8312 train loss:3.613826 +step:8313 train loss:3.671359 +step:8314 train loss:3.546646 +step:8315 train loss:3.491739 +step:8316 train loss:3.553069 +step:8317 train loss:3.571966 +step:8318 train loss:3.565063 +step:8319 train loss:3.600125 +step:8320 train loss:3.620041 +step:8321 train loss:3.527504 +step:8322 train loss:3.544155 +step:8323 train loss:3.579056 +step:8324 train loss:3.556355 +step:8325 train loss:3.612204 +step:8326 train loss:3.577565 +step:8327 train loss:3.567746 +step:8328 train loss:3.642298 +step:8329 train loss:3.547940 +step:8330 train loss:3.588069 +step:8331 train loss:3.515206 +step:8332 train loss:3.615351 +step:8333 train loss:3.635754 +step:8334 train loss:3.500525 +step:8335 train loss:3.568482 +step:8336 train loss:3.660462 +step:8337 train loss:3.589850 +step:8338 train loss:3.555572 +step:8339 train loss:3.537867 +step:8340 train loss:3.625234 +step:8341 train loss:3.523497 +step:8342 train loss:3.599104 +step:8343 train loss:3.509074 +step:8344 train loss:3.555107 +step:8345 train loss:3.590158 +step:8346 train loss:3.669949 +step:8347 train loss:3.565362 +step:8348 train loss:3.592205 +step:8349 train loss:3.565107 +step:8350 train loss:3.580842 +step:8351 train loss:3.523881 +step:8352 train loss:3.609508 +step:8353 train loss:3.565485 +step:8354 train loss:3.543554 +step:8355 train loss:3.547913 +step:8356 train loss:3.539346 +step:8357 train loss:3.555903 +step:8358 train loss:3.529952 +step:8359 train loss:3.524730 +step:8360 train loss:3.572432 +step:8361 train loss:3.588471 +step:8362 train loss:3.603690 +step:8363 train loss:3.606518 +step:8364 train loss:3.567831 +step:8365 train loss:3.713161 +step:8366 train loss:3.556285 +step:8367 train loss:3.533860 +step:8368 train loss:3.499454 +step:8369 train loss:3.533925 +step:8370 train loss:3.612227 +step:8371 train loss:3.587519 +step:8372 train loss:3.562483 +step:8373 train loss:3.578878 +step:8374 train loss:3.506288 +step:8375 train loss:3.570838 +step:8376 train loss:3.612092 +step:8377 train loss:3.438248 +step:8378 train loss:3.649953 +step:8379 train loss:3.524649 +step:8380 train loss:3.526918 +step:8381 train loss:3.533744 +step:8382 train loss:3.562353 +step:8383 train loss:3.517501 +step:8384 train loss:3.559661 +step:8385 train loss:3.573150 +step:8386 train loss:3.550759 +step:8387 train loss:3.715627 +step:8388 train loss:3.627653 +step:8389 train loss:3.599795 +step:8390 train loss:3.603760 +step:8391 train loss:3.535921 +step:8392 train loss:3.547956 +step:8393 train loss:3.504486 +step:8394 train loss:3.592907 +step:8395 train loss:3.598983 +step:8396 train loss:3.622706 +step:8397 train loss:3.557982 +step:8398 train loss:3.574332 +step:8399 train loss:3.541220 +step:8400 train loss:3.546801 +step:8401 train loss:3.561291 +step:8402 train loss:3.541516 +step:8403 train loss:3.552218 +step:8404 train loss:3.559146 +step:8405 train loss:3.515856 +step:8406 train loss:3.559175 +step:8407 train loss:3.594891 +step:8408 train loss:3.568887 +step:8409 train loss:3.490635 +step:8410 train loss:3.557502 +step:8411 train loss:3.580644 +step:8412 train loss:3.634657 +step:8413 train loss:3.615012 +step:8414 train loss:3.613763 +step:8415 train loss:3.534471 +step:8416 train loss:3.583580 +step:8417 train loss:3.496816 +step:8418 train loss:3.602407 +step:8419 train loss:3.553491 +step:8420 train loss:3.632710 +step:8421 train loss:3.551616 +step:8422 train loss:3.565167 +step:8423 train loss:3.580448 +step:8424 train loss:3.586101 +step:8425 train loss:3.642791 +step:8426 train loss:3.610646 +step:8427 train loss:3.529402 +step:8428 train loss:3.544021 +step:8429 train loss:3.605675 +step:8430 train loss:3.544965 +step:8431 train loss:3.548515 +step:8432 train loss:3.553280 +step:8433 train loss:3.530291 +step:8434 train loss:3.560623 +step:8435 train loss:3.483457 +step:8436 train loss:3.565581 +step:8437 train loss:3.609742 +step:8438 train loss:3.586610 +step:8439 train loss:3.527667 +step:8440 train loss:3.497401 +step:8441 train loss:3.554714 +step:8442 train loss:3.577560 +step:8443 train loss:3.532296 +step:8444 train loss:3.568544 +step:8445 train loss:3.517564 +step:8446 train loss:3.566155 +step:8447 train loss:3.578348 +step:8448 train loss:3.562765 +step:8449 train loss:3.553221 +step:8450 train loss:3.540605 +step:8451 train loss:3.575399 +step:8452 train loss:3.549592 +step:8453 train loss:3.529227 +step:8454 train loss:3.579604 +step:8455 train loss:3.659771 +step:8456 train loss:3.627962 +step:8457 train loss:3.684669 +step:8458 train loss:3.570796 +step:8459 train loss:3.578981 +step:8460 train loss:3.504451 +step:8461 train loss:3.662148 +step:8462 train loss:3.534317 +step:8463 train loss:3.577192 +step:8464 train loss:3.588892 +step:8465 train loss:3.592989 +step:8466 train loss:3.566405 +step:8467 train loss:3.569793 +step:8468 train loss:3.818434 +step:8469 train loss:3.534920 +step:8470 train loss:3.525409 +step:8471 train loss:3.570591 +step:8472 train loss:3.592539 +step:8473 train loss:3.550964 +step:8474 train loss:3.676916 +step:8475 train loss:3.633347 +step:8476 train loss:3.577248 +step:8477 train loss:3.572621 +step:8478 train loss:3.548018 +step:8479 train loss:3.551261 +step:8480 train loss:3.652281 +step:8481 train loss:3.560458 +step:8482 train loss:3.551180 +step:8483 train loss:3.707971 +step:8484 train loss:3.588092 +step:8485 train loss:3.630231 +step:8486 train loss:3.535055 +step:8487 train loss:3.595490 +step:8488 train loss:3.536500 +step:8489 train loss:3.612271 +step:8490 train loss:3.598624 +step:8491 train loss:3.620336 +step:8492 train loss:3.577369 +step:8493 train loss:3.640228 +step:8494 train loss:3.505396 +step:8495 train loss:3.604285 +step:8496 train loss:3.549268 +step:8497 train loss:3.582413 +step:8498 train loss:3.593695 +step:8499 train loss:3.570562 +step:8500 validation loss:3.506177 total_sharp:1.9320e-05 L1_sharp:4.1699e-06 L2_sharp:2.7317e-06 L3_sharp:2.7901e-06 L4_sharp:1.8822e-06 L5_sharp:1.2956e-06 L6_sharp:1.0108e-06 L7_sharp:1.4705e-06 L8_sharp:2.6899e-06 L9_sharp:4.3876e-06 L10_sharp:5.5582e-06 L11_sharp:2.8515e-06 L12_sharp:3.7570e-05 total_fnorm:3.9542e+01 total_l1_linf:3.3839e+05 total_spectral:3.9542e+01 L1_fnorm:8.8240e+00 L2_fnorm:7.1862e+00 L3_fnorm:8.0081e+00 L4_fnorm:8.5369e+00 L5_fnorm:9.0882e+00 L6_fnorm:9.1447e+00 L7_fnorm:9.6854e+00 L8_fnorm:9.7619e+00 L9_fnorm:9.7650e+00 L10_fnorm:9.6055e+00 L11_fnorm:9.6398e+00 L12_fnorm:8.7652e+00 L1_l1linf:8.3708e+00 L2_l1linf:5.6175e+00 L3_l1linf:6.7146e+00 L4_l1linf:7.8155e+00 L5_l1linf:8.6721e+00 L6_l1linf:8.9014e+00 L7_l1linf:9.7903e+00 L8_l1linf:1.1025e+01 L9_l1linf:1.1495e+01 L10_l1linf:1.1859e+01 L11_l1linf:1.2391e+01 L12_l1linf:1.2705e+01 L1_spectral:1.2499e+00 L2_spectral:8.8799e-01 L3_spectral:9.0756e-01 L4_spectral:8.7071e-01 L5_spectral:9.2328e-01 L6_spectral:1.0513e+00 L7_spectral:9.4470e-01 L8_spectral:1.1678e+00 L9_spectral:1.4445e+00 L10_spectral:1.4566e+00 L11_spectral:1.3902e+00 L12_spectral:1.7318e+00 ip_v_neg_g:1.6738e-02 cos_v_neg_g:1.1874e-03 v_norm:3.9542e+01 g_norm:3.5650e-01 hv_norm:2.2540e-01 cos_v_hv:3.3893e-03 hg_norm:3.0415e+00 cos_g_hg:5.4979e-01 v_par:1.1574e-02 v_perp:3.9542e+01 L1_cos_v_neg_g:8.5994e-03 L1_v_norm:8.8240e+00 L2_cos_v_neg_g:5.4522e-03 L2_v_norm:7.1862e+00 L3_cos_v_neg_g:5.9370e-03 L3_v_norm:8.0081e+00 L4_cos_v_neg_g:5.6119e-03 L4_v_norm:8.5369e+00 L5_cos_v_neg_g:5.0678e-03 L5_v_norm:9.0882e+00 L6_cos_v_neg_g:4.5234e-03 L6_v_norm:9.1447e+00 L7_cos_v_neg_g:4.5551e-03 L7_v_norm:9.6854e+00 L8_cos_v_neg_g:5.5678e-03 L8_v_norm:9.7619e+00 L9_cos_v_neg_g:7.5717e-03 L9_v_norm:9.7650e+00 L10_cos_v_neg_g:9.2861e-03 L10_v_norm:9.6055e+00 L11_cos_v_neg_g:9.7817e-03 L11_v_norm:9.6398e+00 L12_cos_v_neg_g:2.1912e-02 L12_v_norm:8.7652e+00 +step:8500 train loss:3.567915 +step:8501 train loss:3.786208 +step:8502 train loss:3.806428 +step:8503 train loss:3.562881 +step:8504 train loss:3.561963 +step:8505 train loss:3.535888 +step:8506 train loss:3.607084 +step:8507 train loss:3.543907 +step:8508 train loss:3.576580 +step:8509 train loss:3.514894 +step:8510 train loss:3.540015 +step:8511 train loss:3.496027 +step:8512 train loss:3.597218 +step:8513 train loss:3.599683 +step:8514 train loss:3.547421 +step:8515 train loss:3.641258 +step:8516 train loss:3.558446 +step:8517 train loss:3.578193 +step:8518 train loss:3.466704 +step:8519 train loss:3.561296 +step:8520 train loss:3.527183 +step:8521 train loss:3.570635 +step:8522 train loss:3.461509 +step:8523 train loss:3.558016 +step:8524 train loss:3.549560 +step:8525 train loss:3.615127 +step:8526 train loss:3.594838 +step:8527 train loss:3.538766 +step:8528 train loss:3.618564 +step:8529 train loss:3.573232 +step:8530 train loss:3.614778 +step:8531 train loss:3.599830 +step:8532 train loss:3.639497 +step:8533 train loss:3.587892 +step:8534 train loss:3.591764 +step:8535 train loss:3.561266 +step:8536 train loss:3.653955 +step:8537 train loss:3.565682 +step:8538 train loss:3.637356 +step:8539 train loss:3.558876 +step:8540 train loss:3.583437 +step:8541 train loss:3.524979 +step:8542 train loss:3.591187 +step:8543 train loss:3.507349 +step:8544 train loss:3.500425 +step:8545 train loss:3.558332 +step:8546 train loss:3.507097 +step:8547 train loss:3.555195 +step:8548 train loss:3.532021 +step:8549 train loss:3.574817 +step:8550 train loss:3.528474 +step:8551 train loss:3.576434 +step:8552 train loss:3.583295 +step:8553 train loss:3.582242 +step:8554 train loss:3.556671 +step:8555 train loss:3.566086 +step:8556 train loss:3.647367 +step:8557 train loss:3.543671 +step:8558 train loss:3.584254 +step:8559 train loss:3.573302 +step:8560 train loss:3.555901 +step:8561 train loss:3.509887 +step:8562 train loss:3.540887 +step:8563 train loss:3.534936 +step:8564 train loss:3.609545 +step:8565 train loss:3.583196 +step:8566 train loss:3.600842 +step:8567 train loss:3.549022 +step:8568 train loss:3.565384 +step:8569 train loss:3.575860 +step:8570 train loss:3.517734 +step:8571 train loss:3.560108 +step:8572 train loss:3.578464 +step:8573 train loss:3.653391 +step:8574 train loss:3.580634 +step:8575 train loss:3.581694 +step:8576 train loss:3.615782 +step:8577 train loss:3.700626 +step:8578 train loss:3.605904 +step:8579 train loss:3.588780 +step:8580 train loss:3.521762 +step:8581 train loss:3.566493 +step:8582 train loss:3.570670 +step:8583 train loss:3.572008 +step:8584 train loss:3.560637 +step:8585 train loss:3.637604 +step:8586 train loss:3.559137 +step:8587 train loss:3.569455 +step:8588 train loss:3.616654 +step:8589 train loss:3.561065 +step:8590 train loss:3.553790 +step:8591 train loss:3.556105 +step:8592 train loss:3.515999 +step:8593 train loss:3.595261 +step:8594 train loss:3.619111 +step:8595 train loss:3.543511 +step:8596 train loss:3.582372 +step:8597 train loss:3.548871 +step:8598 train loss:3.599350 +step:8599 train loss:3.567719 +step:8600 train loss:3.572921 +step:8601 train loss:3.559975 +step:8602 train loss:3.535226 +step:8603 train loss:3.592504 +step:8604 train loss:3.540479 +step:8605 train loss:3.548932 +step:8606 train loss:3.565558 +step:8607 train loss:3.572630 +step:8608 train loss:3.619451 +step:8609 train loss:3.510605 +step:8610 train loss:3.585697 +step:8611 train loss:3.515877 +step:8612 train loss:3.595205 +step:8613 train loss:3.528567 +step:8614 train loss:3.591446 +step:8615 train loss:3.633245 +step:8616 train loss:3.516008 +step:8617 train loss:3.585620 +step:8618 train loss:3.558878 +step:8619 train loss:3.510274 +step:8620 train loss:3.553994 +step:8621 train loss:3.584541 +step:8622 train loss:3.542615 +step:8623 train loss:3.556205 +step:8624 train loss:3.631884 +step:8625 train loss:3.550132 +step:8626 train loss:3.564171 +step:8627 train loss:3.557155 +step:8628 train loss:3.591002 +step:8629 train loss:3.499180 +step:8630 train loss:3.601285 +step:8631 train loss:3.542617 +step:8632 train loss:3.606983 +step:8633 train loss:3.546744 +step:8634 train loss:3.776313 +step:8635 train loss:3.573840 +step:8636 train loss:3.621591 +step:8637 train loss:3.546514 +step:8638 train loss:3.542742 +step:8639 train loss:3.604756 +step:8640 train loss:3.513835 +step:8641 train loss:3.616243 +step:8642 train loss:3.565478 +step:8643 train loss:3.681786 +step:8644 train loss:3.519156 +step:8645 train loss:3.591581 +step:8646 train loss:3.552525 +step:8647 train loss:3.573975 +step:8648 train loss:3.525268 +step:8649 train loss:3.609710 +step:8650 train loss:3.563369 +step:8651 train loss:3.580146 +step:8652 train loss:3.545855 +step:8653 train loss:3.573934 +step:8654 train loss:3.621986 +step:8655 train loss:3.549521 +step:8656 train loss:3.590762 +step:8657 train loss:3.591362 +step:8658 train loss:3.567217 +step:8659 train loss:3.558609 +step:8660 train loss:3.501262 +step:8661 train loss:3.565454 +step:8662 train loss:3.500781 +step:8663 train loss:3.577634 +step:8664 train loss:3.493575 +step:8665 train loss:3.517707 +step:8666 train loss:3.594356 +step:8667 train loss:3.485927 +step:8668 train loss:3.593973 +step:8669 train loss:3.630163 +step:8670 train loss:3.534117 +step:8671 train loss:3.528598 +step:8672 train loss:3.748263 +step:8673 train loss:3.514583 +step:8674 train loss:3.579680 +step:8675 train loss:3.622252 +step:8676 train loss:3.565739 +step:8677 train loss:3.590332 +step:8678 train loss:3.538388 +step:8679 train loss:3.594823 +step:8680 train loss:3.576906 +step:8681 train loss:3.572332 +step:8682 train loss:3.533055 +step:8683 train loss:3.547497 +step:8684 train loss:3.623787 +step:8685 train loss:3.563186 +step:8686 train loss:3.556971 +step:8687 train loss:3.511021 +step:8688 train loss:3.529176 +step:8689 train loss:3.599038 +step:8690 train loss:3.535548 +step:8691 train loss:3.616056 +step:8692 train loss:3.499235 +step:8693 train loss:3.589885 +step:8694 train loss:3.591879 +step:8695 train loss:3.576050 +step:8696 train loss:3.602797 +step:8697 train loss:3.554343 +step:8698 train loss:3.596182 +step:8699 train loss:3.546093 +step:8700 train loss:3.573618 +step:8701 train loss:3.532393 +step:8702 train loss:3.518067 +step:8703 train loss:3.535827 +step:8704 train loss:3.488272 +step:8705 train loss:3.570231 +step:8706 train loss:3.592457 +step:8707 train loss:3.590152 +step:8708 train loss:3.535453 +step:8709 train loss:3.599784 +step:8710 train loss:3.524677 +step:8711 train loss:3.580467 +step:8712 train loss:3.482393 +step:8713 train loss:3.560829 +step:8714 train loss:3.665217 +step:8715 train loss:3.523456 +step:8716 train loss:3.583779 +step:8717 train loss:3.550592 +step:8718 train loss:3.588702 +step:8719 train loss:3.563199 +step:8720 train loss:3.670913 +step:8721 train loss:3.561334 +step:8722 train loss:3.657281 +step:8723 train loss:3.523858 +step:8724 train loss:3.531874 +step:8725 train loss:3.563052 +step:8726 train loss:3.519318 +step:8727 train loss:3.594209 +step:8728 train loss:3.554021 +step:8729 train loss:3.555712 +step:8730 train loss:3.536416 +step:8731 train loss:3.537231 +step:8732 train loss:3.639560 +step:8733 train loss:3.562296 +step:8734 train loss:3.598136 +step:8735 train loss:3.668395 +step:8736 train loss:3.527579 +step:8737 train loss:3.554838 +step:8738 train loss:3.534801 +step:8739 train loss:3.594462 +step:8740 train loss:3.517317 +step:8741 train loss:3.570322 +step:8742 train loss:3.527611 +step:8743 train loss:3.562399 +step:8744 train loss:3.587092 +step:8745 train loss:3.628028 +step:8746 train loss:3.526653 +step:8747 train loss:3.628952 +step:8748 train loss:3.538199 +step:8749 train loss:3.571138 +step:8750 validation loss:3.493731 +step:8750 train loss:3.588054 +step:8751 train loss:3.624912 +step:8752 train loss:3.483195 +step:8753 train loss:3.531736 +step:8754 train loss:3.587889 +step:8755 train loss:3.565186 +step:8756 train loss:3.611063 +step:8757 train loss:3.523222 +step:8758 train loss:3.677920 +step:8759 train loss:3.525550 +step:8760 train loss:3.556408 +step:8761 train loss:3.632963 +step:8762 train loss:3.532735 +step:8763 train loss:3.505037 +step:8764 train loss:3.575701 +step:8765 train loss:3.650308 +step:8766 train loss:3.577732 +step:8767 train loss:3.533809 +step:8768 train loss:3.573889 +step:8769 train loss:3.549114 +step:8770 train loss:3.594542 +step:8771 train loss:3.567310 +step:8772 train loss:3.587771 +step:8773 train loss:3.546600 +step:8774 train loss:3.578010 +step:8775 train loss:3.576614 +step:8776 train loss:3.522120 +step:8777 train loss:3.560601 +step:8778 train loss:3.566164 +step:8779 train loss:3.584875 +step:8780 train loss:3.556734 +step:8781 train loss:3.559462 +step:8782 train loss:3.581007 +step:8783 train loss:3.561460 +step:8784 train loss:3.586475 +step:8785 train loss:3.570947 +step:8786 train loss:3.644468 +step:8787 train loss:3.592389 +step:8788 train loss:3.490678 +step:8789 train loss:3.590307 +step:8790 train loss:3.518766 +step:8791 train loss:3.571243 +step:8792 train loss:3.508609 +step:8793 train loss:3.601123 +step:8794 train loss:3.526499 +step:8795 train loss:3.594742 +step:8796 train loss:3.743107 +step:8797 train loss:3.486283 +step:8798 train loss:3.648580 +step:8799 train loss:3.561624 +step:8800 train loss:3.553771 +step:8801 train loss:3.577016 +step:8802 train loss:3.633538 +step:8803 train loss:3.588114 +step:8804 train loss:3.569553 +step:8805 train loss:3.588050 +step:8806 train loss:3.558953 +step:8807 train loss:3.547800 +step:8808 train loss:3.502480 +step:8809 train loss:3.627957 +step:8810 train loss:3.532983 +step:8811 train loss:3.520695 +step:8812 train loss:3.566330 +step:8813 train loss:3.477703 +step:8814 train loss:3.665193 +step:8815 train loss:3.509396 +step:8816 train loss:3.624247 +step:8817 train loss:3.562091 +step:8818 train loss:3.492907 +step:8819 train loss:3.612324 +step:8820 train loss:3.543726 +step:8821 train loss:3.567438 +step:8822 train loss:3.546394 +step:8823 train loss:3.559864 +step:8824 train loss:3.621856 +step:8825 train loss:3.597091 +step:8826 train loss:3.571914 +step:8827 train loss:3.530151 +step:8828 train loss:3.567408 +step:8829 train loss:3.554330 +step:8830 train loss:3.526942 +step:8831 train loss:3.602145 +step:8832 train loss:3.541554 +step:8833 train loss:3.575042 +step:8834 train loss:3.545445 +step:8835 train loss:3.479022 +step:8836 train loss:3.606560 +step:8837 train loss:3.512787 +step:8838 train loss:3.556843 +step:8839 train loss:3.539611 +step:8840 train loss:3.543731 +step:8841 train loss:3.559322 +step:8842 train loss:3.569715 +step:8843 train loss:3.578498 +step:8844 train loss:3.549572 +step:8845 train loss:3.564094 +step:8846 train loss:3.531955 +step:8847 train loss:3.570715 +step:8848 train loss:3.619868 +step:8849 train loss:3.593802 +step:8850 train loss:3.587641 +step:8851 train loss:3.472381 +step:8852 train loss:3.578257 +step:8853 train loss:3.558228 +step:8854 train loss:3.526596 +step:8855 train loss:3.602960 +step:8856 train loss:3.594667 +step:8857 train loss:3.660422 +step:8858 train loss:3.523234 +step:8859 train loss:3.592020 +step:8860 train loss:3.554175 +step:8861 train loss:3.533640 +step:8862 train loss:3.536131 +step:8863 train loss:3.519728 +step:8864 train loss:3.583534 +step:8865 train loss:3.580561 +step:8866 train loss:3.456126 +step:8867 train loss:3.567888 +step:8868 train loss:3.590326 +step:8869 train loss:3.672174 +step:8870 train loss:3.553720 +step:8871 train loss:3.578819 +step:8872 train loss:3.563432 +step:8873 train loss:3.562681 +step:8874 train loss:3.615051 +step:8875 train loss:3.548432 +step:8876 train loss:3.586433 +step:8877 train loss:3.569239 +step:8878 train loss:3.619558 +step:8879 train loss:3.577949 +step:8880 train loss:3.527240 +step:8881 train loss:3.490382 +step:8882 train loss:3.563980 +step:8883 train loss:3.549402 +step:8884 train loss:3.636799 +step:8885 train loss:3.572450 +step:8886 train loss:3.576147 +step:8887 train loss:3.601044 +step:8888 train loss:3.557422 +step:8889 train loss:3.564981 +step:8890 train loss:3.557636 +step:8891 train loss:3.529437 +step:8892 train loss:3.611406 +step:8893 train loss:3.552226 +step:8894 train loss:3.566132 +step:8895 train loss:3.599540 +step:8896 train loss:3.512822 +step:8897 train loss:3.604635 +step:8898 train loss:3.540049 +step:8899 train loss:3.565596 +step:8900 train loss:3.528365 +step:8901 train loss:3.543553 +step:8902 train loss:3.582279 +step:8903 train loss:3.525368 +step:8904 train loss:3.575919 +step:8905 train loss:3.548717 +step:8906 train loss:3.539765 +step:8907 train loss:3.556737 +step:8908 train loss:3.613135 +step:8909 train loss:3.560462 +step:8910 train loss:3.518120 +step:8911 train loss:3.621540 +step:8912 train loss:3.521490 +step:8913 train loss:3.530035 +step:8914 train loss:3.630777 +step:8915 train loss:3.564629 +step:8916 train loss:3.595952 +step:8917 train loss:3.551556 +step:8918 train loss:3.556686 +step:8919 train loss:3.545347 +step:8920 train loss:3.574534 +step:8921 train loss:3.567357 +step:8922 train loss:3.545964 +step:8923 train loss:3.736470 +step:8924 train loss:3.634173 +step:8925 train loss:3.562998 +step:8926 train loss:3.570760 +step:8927 train loss:3.603723 +step:8928 train loss:3.555416 +step:8929 train loss:3.549439 +step:8930 train loss:3.607707 +step:8931 train loss:3.518783 +step:8932 train loss:3.621450 +step:8933 train loss:3.527163 +step:8934 train loss:3.562247 +step:8935 train loss:3.578104 +step:8936 train loss:3.611008 +step:8937 train loss:3.614190 +step:8938 train loss:3.552404 +step:8939 train loss:3.616084 +step:8940 train loss:3.569863 +step:8941 train loss:3.515305 +step:8942 train loss:3.593966 +step:8943 train loss:3.525032 +step:8944 train loss:3.571842 +step:8945 train loss:3.588459 +step:8946 train loss:3.440184 +step:8947 train loss:3.627589 +step:8948 train loss:3.472827 +step:8949 train loss:3.478640 +step:8950 train loss:3.523293 +step:8951 train loss:3.559588 +step:8952 train loss:3.581416 +step:8953 train loss:3.534879 +step:8954 train loss:3.639975 +step:8955 train loss:3.553718 +step:8956 train loss:3.580465 +step:8957 train loss:3.573626 +step:8958 train loss:3.548581 +step:8959 train loss:3.538727 +step:8960 train loss:3.506844 +step:8961 train loss:3.529745 +step:8962 train loss:3.584718 +step:8963 train loss:3.559298 +step:8964 train loss:3.545442 +step:8965 train loss:3.586006 +step:8966 train loss:3.544909 +step:8967 train loss:3.520826 +step:8968 train loss:3.507926 +step:8969 train loss:3.494213 +step:8970 train loss:3.577080 +step:8971 train loss:3.525306 +step:8972 train loss:3.723642 +step:8973 train loss:3.609759 +step:8974 train loss:3.567041 +step:8975 train loss:3.572852 +step:8976 train loss:3.536381 +step:8977 train loss:3.626120 +step:8978 train loss:3.606283 +step:8979 train loss:3.524697 +step:8980 train loss:3.619544 +step:8981 train loss:3.568020 +step:8982 train loss:3.541832 +step:8983 train loss:3.487565 +step:8984 train loss:3.610350 +step:8985 train loss:3.529620 +step:8986 train loss:3.567799 +step:8987 train loss:3.538089 +step:8988 train loss:3.590099 +step:8989 train loss:3.498015 +step:8990 train loss:3.643405 +step:8991 train loss:3.491841 +step:8992 train loss:3.551769 +step:8993 train loss:3.640131 +step:8994 train loss:3.545439 +step:8995 train loss:3.572530 +step:8996 train loss:3.540626 +step:8997 train loss:3.488692 +step:8998 train loss:3.493289 +step:8999 train loss:3.515594 +step:9000 validation loss:3.494586 total_sharp:2.1196e-05 L1_sharp:2.7509e-06 L2_sharp:1.6234e-08 L3_sharp:2.0787e-06 L4_sharp:1.8515e-06 L5_sharp:1.2622e-06 L6_sharp:8.8010e-07 L7_sharp:1.3077e-06 L8_sharp:2.7434e-06 L9_sharp:3.9985e-06 L10_sharp:6.5407e-06 L11_sharp:4.4996e-06 L12_sharp:4.6120e-05 total_fnorm:4.0417e+01 total_l1_linf:3.4755e+05 total_spectral:4.0417e+01 L1_fnorm:9.2207e+00 L2_fnorm:7.5975e+00 L3_fnorm:8.2831e+00 L4_fnorm:8.8270e+00 L5_fnorm:9.3664e+00 L6_fnorm:9.3641e+00 L7_fnorm:9.9649e+00 L8_fnorm:9.9353e+00 L9_fnorm:9.9635e+00 L10_fnorm:9.8645e+00 L11_fnorm:1.0013e+01 L12_fnorm:9.2983e+00 L1_l1linf:8.6235e+00 L2_l1linf:5.6153e+00 L3_l1linf:6.8754e+00 L4_l1linf:7.8086e+00 L5_l1linf:9.5872e+00 L6_l1linf:9.4091e+00 L7_l1linf:1.0306e+01 L8_l1linf:1.0557e+01 L9_l1linf:1.1150e+01 L10_l1linf:1.1585e+01 L11_l1linf:1.1394e+01 L12_l1linf:1.3237e+01 L1_spectral:1.2809e+00 L2_spectral:9.5137e-01 L3_spectral:9.1355e-01 L4_spectral:8.7819e-01 L5_spectral:9.5118e-01 L6_spectral:1.0439e+00 L7_spectral:9.7016e-01 L8_spectral:1.2153e+00 L9_spectral:1.4302e+00 L10_spectral:1.6157e+00 L11_spectral:1.5273e+00 L12_spectral:1.8540e+00 ip_v_neg_g:2.1307e-02 cos_v_neg_g:1.2617e-03 v_norm:4.0417e+01 g_norm:4.1785e-01 hv_norm:2.6975e-01 cos_v_hv:3.1758e-03 hg_norm:4.9691e+00 cos_g_hg:6.0664e-01 v_par:1.1502e-02 v_perp:4.0417e+01 L1_cos_v_neg_g:9.9053e-03 L1_v_norm:9.2207e+00 L2_cos_v_neg_g:5.7936e-03 L2_v_norm:7.5975e+00 L3_cos_v_neg_g:5.9550e-03 L3_v_norm:8.2831e+00 L4_cos_v_neg_g:5.1320e-03 L4_v_norm:8.8270e+00 L5_cos_v_neg_g:4.4983e-03 L5_v_norm:9.3664e+00 L6_cos_v_neg_g:6.3261e-03 L6_v_norm:9.3641e+00 L7_cos_v_neg_g:4.9243e-03 L7_v_norm:9.9649e+00 L8_cos_v_neg_g:7.0033e-03 L8_v_norm:9.9353e+00 L9_cos_v_neg_g:9.7197e-03 L9_v_norm:9.9635e+00 L10_cos_v_neg_g:1.2437e-02 L10_v_norm:9.8645e+00 L11_cos_v_neg_g:1.4194e-02 L11_v_norm:1.0013e+01 L12_cos_v_neg_g:2.4243e-02 L12_v_norm:9.2983e+00 +step:9000 train loss:3.605608 +step:9001 train loss:3.570214 +step:9002 train loss:3.575432 +step:9003 train loss:3.519788 +step:9004 train loss:3.516936 +step:9005 train loss:3.529169 +step:9006 train loss:3.533454 +step:9007 train loss:3.551896 +step:9008 train loss:3.508771 +step:9009 train loss:3.503004 +step:9010 train loss:3.542220 +step:9011 train loss:3.536091 +step:9012 train loss:3.651884 +step:9013 train loss:3.476840 +step:9014 train loss:3.547527 +step:9015 train loss:3.551192 +step:9016 train loss:3.625867 +step:9017 train loss:3.565490 +step:9018 train loss:3.491518 +step:9019 train loss:3.572962 +step:9020 train loss:3.581951 +step:9021 train loss:3.541767 +step:9022 train loss:3.553658 +step:9023 train loss:3.550145 +step:9024 train loss:3.568548 +step:9025 train loss:3.553346 +step:9026 train loss:3.514163 +step:9027 train loss:3.560462 +step:9028 train loss:3.577487 +step:9029 train loss:3.596636 +step:9030 train loss:3.595935 +step:9031 train loss:3.557283 +step:9032 train loss:3.568712 +step:9033 train loss:3.554187 +step:9034 train loss:3.564404 +step:9035 train loss:3.568146 +step:9036 train loss:3.517080 +step:9037 train loss:3.510174 +step:9038 train loss:3.636211 +step:9039 train loss:3.537884 +step:9040 train loss:3.555518 +step:9041 train loss:3.602977 +step:9042 train loss:3.458569 +step:9043 train loss:3.553077 +step:9044 train loss:3.571331 +step:9045 train loss:3.516133 +step:9046 train loss:3.558599 +step:9047 train loss:3.552727 +step:9048 train loss:3.532562 +step:9049 train loss:3.568333 +step:9050 train loss:3.518852 +step:9051 train loss:3.563175 +step:9052 train loss:3.488687 +step:9053 train loss:3.614117 +step:9054 train loss:3.624896 +step:9055 train loss:3.549120 +step:9056 train loss:3.615740 +step:9057 train loss:3.467359 +step:9058 train loss:3.549128 +step:9059 train loss:3.629852 +step:9060 train loss:3.560249 +step:9061 train loss:3.586971 +step:9062 train loss:3.517986 +step:9063 train loss:3.646203 +step:9064 train loss:3.537440 +step:9065 train loss:3.545579 +step:9066 train loss:3.562772 +step:9067 train loss:3.528593 +step:9068 train loss:3.602152 +step:9069 train loss:3.561310 +step:9070 train loss:3.608682 +step:9071 train loss:3.541066 +step:9072 train loss:3.565338 +step:9073 train loss:3.525669 +step:9074 train loss:3.603641 +step:9075 train loss:3.551402 +step:9076 train loss:3.517158 +step:9077 train loss:3.595744 +step:9078 train loss:3.533193 +step:9079 train loss:3.577760 +step:9080 train loss:3.510285 +step:9081 train loss:3.549054 +step:9082 train loss:3.574997 +step:9083 train loss:3.606252 +step:9084 train loss:3.495372 +step:9085 train loss:3.566947 +step:9086 train loss:3.553972 +step:9087 train loss:3.496948 +step:9088 train loss:3.558953 +step:9089 train loss:3.576414 +step:9090 train loss:3.509065 +step:9091 train loss:3.609326 +step:9092 train loss:3.539078 +step:9093 train loss:3.528850 +step:9094 train loss:3.660510 +step:9095 train loss:3.525147 +step:9096 train loss:3.542382 +step:9097 train loss:3.536145 +step:9098 train loss:3.519933 +step:9099 train loss:3.648772 +step:9100 train loss:3.679789 +step:9101 train loss:3.599618 +step:9102 train loss:3.540311 +step:9103 train loss:3.544196 +step:9104 train loss:3.631873 +step:9105 train loss:3.495423 +step:9106 train loss:3.620557 +step:9107 train loss:3.555574 +step:9108 train loss:3.537890 +step:9109 train loss:3.562335 +step:9110 train loss:3.567877 +step:9111 train loss:3.542439 +step:9112 train loss:3.547549 +step:9113 train loss:3.576274 +step:9114 train loss:3.525296 +step:9115 train loss:3.556927 +step:9116 train loss:3.578719 +step:9117 train loss:3.588506 +step:9118 train loss:3.560532 +step:9119 train loss:3.483685 +step:9120 train loss:3.578029 +step:9121 train loss:3.613104 +step:9122 train loss:3.556115 +step:9123 train loss:3.575849 +step:9124 train loss:3.606596 +step:9125 train loss:3.554495 +step:9126 train loss:3.535008 +step:9127 train loss:3.566779 +step:9128 train loss:3.621430 +step:9129 train loss:3.574868 +step:9130 train loss:3.587132 +step:9131 train loss:3.566955 +step:9132 train loss:3.575298 +step:9133 train loss:3.563651 +step:9134 train loss:3.538136 +step:9135 train loss:3.563595 +step:9136 train loss:3.560047 +step:9137 train loss:3.616196 +step:9138 train loss:3.536122 +step:9139 train loss:3.607418 +step:9140 train loss:3.530542 +step:9141 train loss:3.510985 +step:9142 train loss:3.686079 +step:9143 train loss:3.516791 +step:9144 train loss:3.609920 +step:9145 train loss:3.617191 +step:9146 train loss:3.529926 +step:9147 train loss:3.601497 +step:9148 train loss:3.623446 +step:9149 train loss:3.533228 +step:9150 train loss:3.555931 +step:9151 train loss:3.617483 +step:9152 train loss:3.574454 +step:9153 train loss:3.538331 +step:9154 train loss:3.552496 +step:9155 train loss:3.518052 +step:9156 train loss:3.521858 +step:9157 train loss:3.538006 +step:9158 train loss:3.523423 +step:9159 train loss:3.607656 +step:9160 train loss:3.491791 +step:9161 train loss:3.519148 +step:9162 train loss:3.607077 +step:9163 train loss:3.552005 +step:9164 train loss:3.522436 +step:9165 train loss:3.520211 +step:9166 train loss:3.574095 +step:9167 train loss:3.516428 +step:9168 train loss:3.562752 +step:9169 train loss:3.498054 +step:9170 train loss:3.518846 +step:9171 train loss:3.584360 +step:9172 train loss:3.510550 +step:9173 train loss:3.625864 +step:9174 train loss:3.557861 +step:9175 train loss:3.535379 +step:9176 train loss:3.520212 +step:9177 train loss:3.565690 +step:9178 train loss:3.510399 +step:9179 train loss:3.469368 +step:9180 train loss:3.565629 +step:9181 train loss:3.573674 +step:9182 train loss:3.544484 +step:9183 train loss:3.552144 +step:9184 train loss:3.544159 +step:9185 train loss:3.564332 +step:9186 train loss:3.525871 +step:9187 train loss:3.596923 +step:9188 train loss:3.634636 +step:9189 train loss:3.554133 +step:9190 train loss:3.559249 +step:9191 train loss:3.552055 +step:9192 train loss:3.561386 +step:9193 train loss:3.566944 +step:9194 train loss:3.503543 +step:9195 train loss:3.494289 +step:9196 train loss:3.544564 +step:9197 train loss:3.502678 +step:9198 train loss:3.583731 +step:9199 train loss:3.530038 +step:9200 train loss:3.550847 +step:9201 train loss:3.588447 +step:9202 train loss:3.575673 +step:9203 train loss:3.531587 +step:9204 train loss:3.727958 +step:9205 train loss:3.641647 +step:9206 train loss:3.554969 +step:9207 train loss:3.607459 +step:9208 train loss:3.584589 +step:9209 train loss:3.607175 +step:9210 train loss:3.497205 +step:9211 train loss:3.527690 +step:9212 train loss:3.525179 +step:9213 train loss:3.588830 +step:9214 train loss:3.528750 +step:9215 train loss:3.594532 +step:9216 train loss:3.556424 +step:9217 train loss:3.498147 +step:9218 train loss:3.586725 +step:9219 train loss:3.550810 +step:9220 train loss:3.591722 +step:9221 train loss:3.646587 +step:9222 train loss:3.589533 +step:9223 train loss:3.758497 +step:9224 train loss:3.596085 +step:9225 train loss:3.524635 +step:9226 train loss:3.542604 +step:9227 train loss:3.563193 +step:9228 train loss:3.561209 +step:9229 train loss:3.521870 +step:9230 train loss:3.585842 +step:9231 train loss:3.466591 +step:9232 train loss:3.525129 +step:9233 train loss:3.550077 +step:9234 train loss:3.608888 +step:9235 train loss:3.608372 +step:9236 train loss:3.512741 +step:9237 train loss:3.577020 +step:9238 train loss:3.549139 +step:9239 train loss:3.541604 +step:9240 train loss:3.511804 +step:9241 train loss:3.542770 +step:9242 train loss:3.550591 +step:9243 train loss:3.549716 +step:9244 train loss:3.525803 +step:9245 train loss:3.530627 +step:9246 train loss:3.527676 +step:9247 train loss:3.540700 +step:9248 train loss:3.548243 +step:9249 train loss:3.547374 +step:9250 validation loss:3.487851 +step:9250 train loss:3.593195 +step:9251 train loss:3.534225 +step:9252 train loss:3.602329 +step:9253 train loss:3.595842 +step:9254 train loss:3.524086 +step:9255 train loss:3.638683 +step:9256 train loss:3.521825 +step:9257 train loss:3.462379 +step:9258 train loss:3.543214 +step:9259 train loss:3.548456 +step:9260 train loss:3.642631 +step:9261 train loss:3.521500 +step:9262 train loss:3.593828 +step:9263 train loss:3.491284 +step:9264 train loss:3.647004 +step:9265 train loss:3.666909 +step:9266 train loss:3.600577 +step:9267 train loss:3.547721 +step:9268 train loss:3.543466 +step:9269 train loss:3.567727 +step:9270 train loss:3.488502 +step:9271 train loss:3.602667 +step:9272 train loss:3.543273 +step:9273 train loss:3.559551 +step:9274 train loss:3.563315 +step:9275 train loss:3.560952 +step:9276 train loss:3.590858 +step:9277 train loss:3.563125 +step:9278 train loss:3.574653 +step:9279 train loss:3.568008 +step:9280 train loss:3.567711 +step:9281 train loss:3.545962 +step:9282 train loss:3.659457 +step:9283 train loss:3.550514 +step:9284 train loss:3.509672 +step:9285 train loss:3.531780 +step:9286 train loss:3.582791 +step:9287 train loss:3.558431 +step:9288 train loss:3.560257 +step:9289 train loss:3.530534 +step:9290 train loss:3.561078 +step:9291 train loss:3.541463 +step:9292 train loss:3.591971 +step:9293 train loss:3.634488 +step:9294 train loss:3.555755 +step:9295 train loss:3.540983 +step:9296 train loss:3.498124 +step:9297 train loss:3.564005 +step:9298 train loss:3.502765 +step:9299 train loss:3.484253 +step:9300 train loss:3.588516 +step:9301 train loss:3.617733 +step:9302 train loss:3.558805 +step:9303 train loss:3.607793 +step:9304 train loss:3.527425 +step:9305 train loss:3.521518 +step:9306 train loss:3.521059 +step:9307 train loss:3.521966 +step:9308 train loss:3.494643 +step:9309 train loss:3.481911 +step:9310 train loss:3.541647 +step:9311 train loss:3.601627 +step:9312 train loss:3.554075 +step:9313 train loss:3.495666 +step:9314 train loss:3.527788 +step:9315 train loss:3.560483 +step:9316 train loss:3.546535 +step:9317 train loss:3.520317 +step:9318 train loss:3.607555 +step:9319 train loss:3.517467 +step:9320 train loss:3.535263 +step:9321 train loss:3.551946 +step:9322 train loss:3.557811 +step:9323 train loss:3.632711 +step:9324 train loss:3.576781 +step:9325 train loss:3.519286 +step:9326 train loss:3.592981 +step:9327 train loss:3.591009 +step:9328 train loss:3.591657 +step:9329 train loss:3.480887 +step:9330 train loss:3.646314 +step:9331 train loss:3.579077 +step:9332 train loss:3.599048 +step:9333 train loss:3.619825 +step:9334 train loss:3.556388 +step:9335 train loss:3.647359 +step:9336 train loss:3.607658 +step:9337 train loss:3.563060 +step:9338 train loss:3.613103 +step:9339 train loss:3.593808 +step:9340 train loss:3.551692 +step:9341 train loss:3.643339 +step:9342 train loss:3.539291 +step:9343 train loss:3.531981 +step:9344 train loss:3.534641 +step:9345 train loss:3.679212 +step:9346 train loss:3.509472 +step:9347 train loss:3.531238 +step:9348 train loss:3.553549 +step:9349 train loss:3.499754 +step:9350 train loss:3.578317 +step:9351 train loss:3.552934 +step:9352 train loss:3.538681 +step:9353 train loss:3.570160 +step:9354 train loss:3.537201 +step:9355 train loss:3.530250 +step:9356 train loss:3.574414 +step:9357 train loss:3.531454 +step:9358 train loss:3.563083 +step:9359 train loss:3.503724 +step:9360 train loss:3.525226 +step:9361 train loss:3.521193 +step:9362 train loss:3.512424 +step:9363 train loss:3.575516 +step:9364 train loss:3.551820 +step:9365 train loss:3.556155 +step:9366 train loss:3.553361 +step:9367 train loss:3.565285 +step:9368 train loss:3.540170 +step:9369 train loss:3.537239 +step:9370 train loss:3.549777 +step:9371 train loss:3.565085 +step:9372 train loss:3.533849 +step:9373 train loss:3.516613 +step:9374 train loss:3.550188 +step:9375 train loss:3.565129 +step:9376 train loss:3.504888 +step:9377 train loss:3.577142 +step:9378 train loss:3.581591 +step:9379 train loss:3.605957 +step:9380 train loss:3.536222 +step:9381 train loss:3.548732 +step:9382 train loss:3.522991 +step:9383 train loss:3.516479 +step:9384 train loss:3.487138 +step:9385 train loss:3.560760 +step:9386 train loss:3.588379 +step:9387 train loss:3.565680 +step:9388 train loss:3.502958 +step:9389 train loss:3.520033 +step:9390 train loss:3.566973 +step:9391 train loss:3.571364 +step:9392 train loss:3.532213 +step:9393 train loss:3.527715 +step:9394 train loss:3.552140 +step:9395 train loss:3.547194 +step:9396 train loss:3.694595 +step:9397 train loss:3.584965 +step:9398 train loss:3.604209 +step:9399 train loss:3.558611 +step:9400 train loss:3.561693 +step:9401 train loss:3.554135 +step:9402 train loss:3.553455 +step:9403 train loss:3.486618 +step:9404 train loss:3.560944 +step:9405 train loss:3.522087 +step:9406 train loss:3.573531 +step:9407 train loss:3.518719 +step:9408 train loss:3.457381 +step:9409 train loss:3.520562 +step:9410 train loss:3.601137 +step:9411 train loss:3.563237 +step:9412 train loss:3.591177 +step:9413 train loss:3.612601 +step:9414 train loss:3.545849 +step:9415 train loss:3.538394 +step:9416 train loss:3.553926 +step:9417 train loss:3.505745 +step:9418 train loss:3.535772 +step:9419 train loss:3.506899 +step:9420 train loss:3.523310 +step:9421 train loss:3.568892 +step:9422 train loss:3.523965 +step:9423 train loss:3.583688 +step:9424 train loss:3.523299 +step:9425 train loss:3.569842 +step:9426 train loss:3.569128 +step:9427 train loss:3.543365 +step:9428 train loss:3.647406 +step:9429 train loss:3.539217 +step:9430 train loss:3.496222 +step:9431 train loss:3.589283 +step:9432 train loss:3.548908 +step:9433 train loss:3.591942 +step:9434 train loss:3.540852 +step:9435 train loss:3.570096 +step:9436 train loss:3.536773 +step:9437 train loss:3.550024 +step:9438 train loss:3.547303 +step:9439 train loss:3.545892 +step:9440 train loss:3.534944 +step:9441 train loss:3.550198 +step:9442 train loss:3.485697 +step:9443 train loss:3.540719 +step:9444 train loss:3.604685 +step:9445 train loss:3.538074 +step:9446 train loss:3.509482 +step:9447 train loss:3.581812 +step:9448 train loss:3.514575 +step:9449 train loss:3.540826 +step:9450 train loss:3.579831 +step:9451 train loss:3.498043 +step:9452 train loss:3.550414 +step:9453 train loss:3.527113 +step:9454 train loss:3.590233 +step:9455 train loss:3.572188 +step:9456 train loss:3.498497 +step:9457 train loss:3.543789 +step:9458 train loss:3.534682 +step:9459 train loss:3.524632 +step:9460 train loss:3.566074 +step:9461 train loss:3.596060 +step:9462 train loss:3.542509 +step:9463 train loss:3.570916 +step:9464 train loss:3.528890 +step:9465 train loss:3.616184 +step:9466 train loss:3.567618 +step:9467 train loss:3.590619 +step:9468 train loss:3.537490 +step:9469 train loss:3.524733 +step:9470 train loss:3.525967 +step:9471 train loss:3.566439 +step:9472 train loss:3.588315 +step:9473 train loss:3.577919 +step:9474 train loss:3.524386 +step:9475 train loss:3.514145 +step:9476 train loss:3.731900 +step:9477 train loss:3.605714 +step:9478 train loss:3.581887 +step:9479 train loss:3.680076 +step:9480 train loss:3.530865 +step:9481 train loss:3.560168 +step:9482 train loss:3.589349 +step:9483 train loss:3.545414 +step:9484 train loss:3.573080 +step:9485 train loss:3.495675 +step:9486 train loss:3.533813 +step:9487 train loss:3.565703 +step:9488 train loss:3.517592 +step:9489 train loss:3.565458 +step:9490 train loss:3.529193 +step:9491 train loss:3.572893 +step:9492 train loss:3.592607 +step:9493 train loss:3.565778 +step:9494 train loss:3.576110 +step:9495 train loss:3.526634 +step:9496 train loss:3.589528 +step:9497 train loss:3.602567 +step:9498 train loss:3.549931 +step:9499 train loss:3.600581 +step:9500 validation loss:3.485147 total_sharp:2.1628e-05 L1_sharp:1.9082e-06 L2_sharp:6.6149e-07 L3_sharp:1.4616e-06 L4_sharp:1.2795e-06 L5_sharp:8.7160e-07 L6_sharp:9.9366e-07 L7_sharp:1.3893e-06 L8_sharp:3.6200e-06 L9_sharp:4.4986e-06 L10_sharp:7.0718e-06 L11_sharp:4.3401e-06 L12_sharp:4.3049e-05 total_fnorm:4.0393e+01 total_l1_linf:3.4728e+05 total_spectral:4.0393e+01 L1_fnorm:9.3363e+00 L2_fnorm:7.7154e+00 L3_fnorm:8.3599e+00 L4_fnorm:8.8252e+00 L5_fnorm:9.3881e+00 L6_fnorm:9.4405e+00 L7_fnorm:9.9601e+00 L8_fnorm:9.9921e+00 L9_fnorm:9.9589e+00 L10_fnorm:9.7673e+00 L11_fnorm:9.9548e+00 L12_fnorm:9.2893e+00 L1_l1linf:8.8122e+00 L2_l1linf:5.7995e+00 L3_l1linf:6.8729e+00 L4_l1linf:8.0540e+00 L5_l1linf:8.9062e+00 L6_l1linf:9.1781e+00 L7_l1linf:9.7817e+00 L8_l1linf:1.1030e+01 L9_l1linf:1.1036e+01 L10_l1linf:1.2812e+01 L11_l1linf:1.2242e+01 L12_l1linf:1.1137e+01 L1_spectral:1.2959e+00 L2_spectral:1.1468e+00 L3_spectral:9.9001e-01 L4_spectral:8.9529e-01 L5_spectral:9.7653e-01 L6_spectral:1.1062e+00 L7_spectral:1.0880e+00 L8_spectral:1.3708e+00 L9_spectral:1.5297e+00 L10_spectral:1.6477e+00 L11_spectral:1.5632e+00 L12_spectral:1.8196e+00 ip_v_neg_g:1.3777e-02 cos_v_neg_g:9.7399e-04 v_norm:4.0393e+01 g_norm:3.5019e-01 hv_norm:1.9468e-01 cos_v_hv:4.4873e-03 hg_norm:2.6529e+00 cos_g_hg:5.4238e-01 v_par:1.0563e-02 v_perp:4.0393e+01 L1_cos_v_neg_g:5.4779e-03 L1_v_norm:9.3363e+00 L2_cos_v_neg_g:3.1117e-03 L2_v_norm:7.7154e+00 L3_cos_v_neg_g:2.9979e-03 L3_v_norm:8.3599e+00 L4_cos_v_neg_g:3.4510e-03 L4_v_norm:8.8252e+00 L5_cos_v_neg_g:2.8784e-03 L5_v_norm:9.3881e+00 L6_cos_v_neg_g:2.2946e-03 L6_v_norm:9.4405e+00 L7_cos_v_neg_g:2.8142e-03 L7_v_norm:9.9601e+00 L8_cos_v_neg_g:4.6601e-03 L8_v_norm:9.9921e+00 L9_cos_v_neg_g:7.0303e-03 L9_v_norm:9.9589e+00 L10_cos_v_neg_g:7.9982e-03 L10_v_norm:9.7673e+00 L11_cos_v_neg_g:9.7489e-03 L11_v_norm:9.9548e+00 L12_cos_v_neg_g:1.8356e-02 L12_v_norm:9.2893e+00 +step:9500 train loss:3.591532 +step:9501 train loss:3.568080 +step:9502 train loss:3.544722 +step:9503 train loss:3.557899 +step:9504 train loss:3.512784 +step:9505 train loss:3.536639 +step:9506 train loss:3.549683 +step:9507 train loss:3.537605 +step:9508 train loss:3.730375 +step:9509 train loss:3.548288 +step:9510 train loss:3.536272 +step:9511 train loss:3.562036 +step:9512 train loss:3.592281 +step:9513 train loss:3.584801 +step:9514 train loss:3.548048 +step:9515 train loss:3.450850 +step:9516 train loss:3.549278 +step:9517 train loss:3.589185 +step:9518 train loss:3.562316 +step:9519 train loss:3.572406 +step:9520 train loss:3.462053 +step:9521 train loss:3.454496 +step:9522 train loss:3.573101 +step:9523 train loss:3.569617 +step:9524 train loss:3.569187 +step:9525 train loss:3.616365 +step:9526 train loss:3.628995 +step:9527 train loss:3.590402 +step:9528 train loss:3.517740 +step:9529 train loss:3.564354 +step:9530 train loss:3.612974 +step:9531 train loss:3.513998 +step:9532 train loss:3.565100 +step:9533 train loss:3.540109 +step:9534 train loss:3.621434 +step:9535 train loss:3.539648 +step:9536 train loss:3.522300 +step:9537 train loss:3.469007 +step:9538 train loss:3.485134 +step:9539 train loss:3.556021 +step:9540 train loss:3.476655 +step:9541 train loss:3.535405 +step:9542 train loss:3.660272 +step:9543 train loss:3.560540 +step:9544 train loss:3.598455 +step:9545 train loss:3.533345 +step:9546 train loss:3.557228 +step:9547 train loss:3.601015 +step:9548 train loss:3.542128 +step:9549 train loss:3.512307 +step:9550 train loss:3.545789 +step:9551 train loss:3.536466 +step:9552 train loss:3.560791 +step:9553 train loss:3.556840 +step:9554 train loss:3.604067 +step:9555 train loss:3.615519 +step:9556 train loss:3.513155 +step:9557 train loss:3.535942 +step:9558 train loss:3.598495 +step:9559 train loss:3.606104 +step:9560 train loss:3.515602 +step:9561 train loss:3.545809 +step:9562 train loss:3.584738 +step:9563 train loss:3.530781 +step:9564 train loss:3.562175 +step:9565 train loss:3.538249 +step:9566 train loss:3.516680 +step:9567 train loss:3.579683 +step:9568 train loss:3.551972 +step:9569 train loss:3.598302 +step:9570 train loss:3.487697 +step:9571 train loss:3.563456 +step:9572 train loss:3.508217 +step:9573 train loss:3.536083 +step:9574 train loss:3.512343 +step:9575 train loss:3.586058 +step:9576 train loss:3.477823 +step:9577 train loss:3.526801 +step:9578 train loss:3.530760 +step:9579 train loss:3.527598 +step:9580 train loss:3.593294 +step:9581 train loss:3.583773 +step:9582 train loss:3.560317 +step:9583 train loss:3.579472 +step:9584 train loss:3.517959 +step:9585 train loss:3.532285 +step:9586 train loss:3.587042 +step:9587 train loss:3.562196 +step:9588 train loss:3.542142 +step:9589 train loss:3.599600 +step:9590 train loss:3.567030 +step:9591 train loss:3.532629 +step:9592 train loss:3.553386 +step:9593 train loss:3.552160 +step:9594 train loss:3.571762 +step:9595 train loss:3.546711 +step:9596 train loss:3.631395 +step:9597 train loss:3.543154 +step:9598 train loss:3.503741 +step:9599 train loss:3.509280 +step:9600 train loss:3.593697 +step:9601 train loss:3.511355 +step:9602 train loss:3.593901 +step:9603 train loss:3.588789 +step:9604 train loss:3.473583 +step:9605 train loss:3.557003 +step:9606 train loss:3.613559 +step:9607 train loss:3.534252 +step:9608 train loss:3.538722 +step:9609 train loss:3.548184 +step:9610 train loss:3.591068 +step:9611 train loss:3.523450 +step:9612 train loss:3.530692 +step:9613 train loss:3.571250 +step:9614 train loss:3.543628 +step:9615 train loss:3.729052 +step:9616 train loss:3.540750 +step:9617 train loss:3.532329 +step:9618 train loss:3.489394 +step:9619 train loss:3.552500 +step:9620 train loss:3.609593 +step:9621 train loss:3.530253 +step:9622 train loss:3.538979 +step:9623 train loss:3.585172 +step:9624 train loss:3.567450 +step:9625 train loss:3.585040 +step:9626 train loss:3.552084 +step:9627 train loss:3.632580 +step:9628 train loss:3.597917 +step:9629 train loss:3.512072 +step:9630 train loss:3.569437 +step:9631 train loss:3.557299 +step:9632 train loss:3.523074 +step:9633 train loss:3.564846 +step:9634 train loss:3.633864 +step:9635 train loss:3.537333 +step:9636 train loss:3.480267 +step:9637 train loss:3.616990 +step:9638 train loss:3.501615 +step:9639 train loss:3.470016 +step:9640 train loss:3.594271 +step:9641 train loss:3.564914 +step:9642 train loss:3.540705 +step:9643 train loss:3.544616 +step:9644 train loss:3.602858 +step:9645 train loss:3.527441 +step:9646 train loss:3.567236 +step:9647 train loss:3.573073 +step:9648 train loss:3.524545 +step:9649 train loss:3.497437 +step:9650 train loss:3.513970 +step:9651 train loss:3.605566 +step:9652 train loss:3.586357 +step:9653 train loss:3.525551 +step:9654 train loss:3.510856 +step:9655 train loss:3.506915 +step:9656 train loss:3.500770 +step:9657 train loss:3.527194 +step:9658 train loss:3.586226 +step:9659 train loss:3.689448 +step:9660 train loss:3.473543 +step:9661 train loss:3.498936 +step:9662 train loss:3.516757 +step:9663 train loss:3.556763 +step:9664 train loss:3.607996 +step:9665 train loss:3.451475 +step:9666 train loss:3.493818 +step:9667 train loss:3.626631 +step:9668 train loss:3.608297 +step:9669 train loss:3.627534 +step:9670 train loss:3.607320 +step:9671 train loss:3.607718 +step:9672 train loss:3.522209 +step:9673 train loss:3.542124 +step:9674 train loss:3.553051 +step:9675 train loss:3.551031 +step:9676 train loss:3.509718 +step:9677 train loss:3.520505 +step:9678 train loss:3.553923 +step:9679 train loss:3.541928 +step:9680 train loss:3.541343 +step:9681 train loss:3.529095 +step:9682 train loss:3.594409 +step:9683 train loss:3.570075 +step:9684 train loss:3.486446 +step:9685 train loss:3.569228 +step:9686 train loss:3.604542 +step:9687 train loss:3.508479 +step:9688 train loss:3.597147 +step:9689 train loss:3.696975 +step:9690 train loss:3.539711 +step:9691 train loss:3.527606 +step:9692 train loss:3.491812 +step:9693 train loss:3.489616 +step:9694 train loss:3.515317 +step:9695 train loss:3.617708 +step:9696 train loss:3.651132 +step:9697 train loss:3.563202 +step:9698 train loss:3.595042 +step:9699 train loss:3.553040 +step:9700 train loss:3.552473 +step:9701 train loss:3.615294 +step:9702 train loss:3.525667 +step:9703 train loss:3.545169 +step:9704 train loss:3.626939 +step:9705 train loss:3.526261 +step:9706 train loss:3.518103 +step:9707 train loss:3.568343 +step:9708 train loss:3.515043 +step:9709 train loss:3.537602 +step:9710 train loss:3.553963 +step:9711 train loss:3.529033 +step:9712 train loss:3.539296 +step:9713 train loss:3.591860 +step:9714 train loss:3.545492 +step:9715 train loss:3.567970 +step:9716 train loss:3.588305 +step:9717 train loss:3.510044 +step:9718 train loss:3.516635 +step:9719 train loss:3.599510 +step:9720 train loss:3.534339 +step:9721 train loss:3.519013 +step:9722 train loss:3.590889 +step:9723 train loss:3.530359 +step:9724 train loss:3.562228 +step:9725 train loss:3.614323 +step:9726 train loss:3.556922 +step:9727 train loss:3.532862 +step:9728 train loss:3.570645 +step:9729 train loss:3.595417 +step:9730 train loss:3.667016 +step:9731 train loss:3.588747 +step:9732 train loss:3.548565 +step:9733 train loss:3.590602 +step:9734 train loss:3.511215 +step:9735 train loss:3.621256 +step:9736 train loss:3.520431 +step:9737 train loss:3.579749 +step:9738 train loss:3.544444 +step:9739 train loss:3.618668 +step:9740 train loss:3.582883 +step:9741 train loss:3.520276 +step:9742 train loss:3.616325 +step:9743 train loss:3.490268 +step:9744 train loss:3.548856 +step:9745 train loss:3.505961 +step:9746 train loss:3.543614 +step:9747 train loss:3.534521 +step:9748 train loss:3.442377 +step:9749 train loss:3.535642 +step:9750 validation loss:3.479465 +step:9750 train loss:3.512949 +step:9751 train loss:3.656320 +step:9752 train loss:3.540549 +step:9753 train loss:3.501894 +step:9754 train loss:3.530671 +step:9755 train loss:3.526302 +step:9756 train loss:3.527323 +step:9757 train loss:3.496928 +step:9758 train loss:3.485706 +step:9759 train loss:3.532848 +step:9760 train loss:3.476621 +step:9761 train loss:3.519310 +step:9762 train loss:3.513660 +step:9763 train loss:3.536226 +step:9764 train loss:3.518420 +step:9765 train loss:3.484860 +step:9766 train loss:3.573966 +step:9767 train loss:3.529172 +step:9768 train loss:3.542264 +step:9769 train loss:3.489854 +step:9770 train loss:3.493562 +step:9771 train loss:3.545084 +step:9772 train loss:3.556182 +step:9773 train loss:3.534781 +step:9774 train loss:3.506349 +step:9775 train loss:3.591341 +step:9776 train loss:3.591090 +step:9777 train loss:3.484177 +step:9778 train loss:3.490547 +step:9779 train loss:3.496841 +step:9780 train loss:3.492061 +step:9781 train loss:3.510054 +step:9782 train loss:3.589521 +step:9783 train loss:3.500659 +step:9784 train loss:3.530800 +step:9785 train loss:3.518234 +step:9786 train loss:3.555913 +step:9787 train loss:3.576915 +step:9788 train loss:3.507207 +step:9789 train loss:3.514915 +step:9790 train loss:3.475717 +step:9791 train loss:3.521698 +step:9792 train loss:3.541055 +step:9793 train loss:3.554524 +step:9794 train loss:3.531699 +step:9795 train loss:3.535991 +step:9796 train loss:3.522833 +step:9797 train loss:3.517035 +step:9798 train loss:3.531425 +step:9799 train loss:3.536282 +step:9800 train loss:3.604411 +step:9801 train loss:3.529734 +step:9802 train loss:3.588084 +step:9803 train loss:3.443767 +step:9804 train loss:3.540794 +step:9805 train loss:3.543880 +step:9806 train loss:3.521175 +step:9807 train loss:3.491739 +step:9808 train loss:3.403112 +step:9809 train loss:3.595383 +step:9810 train loss:3.549481 +step:9811 train loss:3.534825 +step:9812 train loss:3.509873 +step:9813 train loss:3.591716 +step:9814 train loss:3.581376 +step:9815 train loss:3.484078 +step:9816 train loss:3.487857 +step:9817 train loss:3.517453 +step:9818 train loss:3.540855 +step:9819 train loss:3.518467 +step:9820 train loss:3.582240 +step:9821 train loss:3.559263 +step:9822 train loss:3.538732 +step:9823 train loss:3.596116 +step:9824 train loss:3.501960 +step:9825 train loss:3.583116 +step:9826 train loss:3.582601 +step:9827 train loss:3.586116 +step:9828 train loss:3.503762 +step:9829 train loss:3.513001 +step:9830 train loss:3.495929 +step:9831 train loss:3.560361 +step:9832 train loss:3.572535 +step:9833 train loss:3.482858 +step:9834 train loss:3.535336 +step:9835 train loss:3.500810 +step:9836 train loss:3.565011 +step:9837 train loss:3.536524 +step:9838 train loss:3.575065 +step:9839 train loss:3.550889 +step:9840 train loss:3.515040 +step:9841 train loss:3.522694 +step:9842 train loss:3.583407 +step:9843 train loss:3.579330 +step:9844 train loss:3.527595 +step:9845 train loss:3.558795 +step:9846 train loss:3.490285 +step:9847 train loss:3.625406 +step:9848 train loss:3.548744 +step:9849 train loss:3.571757 +step:9850 train loss:3.489954 +step:9851 train loss:3.545991 +step:9852 train loss:3.508229 +step:9853 train loss:3.529919 +step:9854 train loss:3.538223 +step:9855 train loss:3.490228 +step:9856 train loss:3.494775 +step:9857 train loss:3.487442 +step:9858 train loss:3.546850 +step:9859 train loss:3.467415 +step:9860 train loss:3.705313 +step:9861 train loss:3.530257 +step:9862 train loss:3.496624 +step:9863 train loss:3.481516 +step:9864 train loss:3.606889 +step:9865 train loss:3.477226 +step:9866 train loss:3.522649 +step:9867 train loss:3.517181 +step:9868 train loss:3.576493 +step:9869 train loss:3.536842 +step:9870 train loss:3.510449 +step:9871 train loss:3.552565 +step:9872 train loss:3.498416 +step:9873 train loss:3.547167 +step:9874 train loss:3.515471 +step:9875 train loss:3.516931 +step:9876 train loss:3.484868 +step:9877 train loss:3.531514 +step:9878 train loss:3.568004 +step:9879 train loss:3.564364 +step:9880 train loss:3.496959 +step:9881 train loss:3.546845 +step:9882 train loss:3.508513 +step:9883 train loss:3.517261 +step:9884 train loss:3.509975 +step:9885 train loss:3.576333 +step:9886 train loss:3.538997 +step:9887 train loss:3.545145 +step:9888 train loss:3.565761 +step:9889 train loss:3.601534 +step:9890 train loss:3.512331 +step:9891 train loss:3.518635 +step:9892 train loss:3.489170 +step:9893 train loss:3.608896 +step:9894 train loss:3.516507 +step:9895 train loss:3.456311 +step:9896 train loss:3.608970 +step:9897 train loss:3.484873 +step:9898 train loss:3.553582 +step:9899 train loss:3.532169 +step:9900 train loss:3.576689 +step:9901 train loss:3.498170 +step:9902 train loss:3.543633 +step:9903 train loss:3.515132 +step:9904 train loss:3.564357 +step:9905 train loss:3.468548 +step:9906 train loss:3.510533 +step:9907 train loss:3.513545 +step:9908 train loss:3.514796 +step:9909 train loss:3.530263 +step:9910 train loss:3.555351 +step:9911 train loss:3.637008 +step:9912 train loss:3.516470 +step:9913 train loss:3.518550 +step:9914 train loss:3.528072 +step:9915 train loss:3.529607 +step:9916 train loss:3.478502 +step:9917 train loss:3.512628 +step:9918 train loss:3.509511 +step:9919 train loss:3.672195 +step:9920 train loss:3.458994 +step:9921 train loss:3.550726 +step:9922 train loss:3.511283 +step:9923 train loss:3.568944 +step:9924 train loss:3.482824 +step:9925 train loss:3.541345 +step:9926 train loss:3.520974 +step:9927 train loss:3.562281 +step:9928 train loss:3.492190 +step:9929 train loss:3.529052 +step:9930 train loss:3.619646 +step:9931 train loss:3.580950 +step:9932 train loss:3.471142 +step:9933 train loss:3.563511 +step:9934 train loss:3.485126 +step:9935 train loss:3.601017 +step:9936 train loss:3.506863 +step:9937 train loss:3.534038 +step:9938 train loss:3.519093 +step:9939 train loss:3.586527 +step:9940 train loss:3.623475 +step:9941 train loss:3.498039 +step:9942 train loss:3.541914 +step:9943 train loss:3.681877 +step:9944 train loss:3.538117 +step:9945 train loss:3.564122 +step:9946 train loss:3.530300 +step:9947 train loss:3.481386 +step:9948 train loss:3.524176 +step:9949 train loss:3.418663 +step:9950 train loss:3.566937 +step:9951 train loss:3.490135 +step:9952 train loss:3.566229 +step:9953 train loss:3.526100 +step:9954 train loss:3.582592 +step:9955 train loss:3.553868 +step:9956 train loss:3.561662 +step:9957 train loss:3.532894 +step:9958 train loss:3.589236 +step:9959 train loss:3.484899 +step:9960 train loss:3.524648 +step:9961 train loss:3.536699 +step:9962 train loss:3.576887 +step:9963 train loss:3.467061 +step:9964 train loss:3.523356 +step:9965 train loss:3.524224 +step:9966 train loss:3.580587 +step:9967 train loss:3.500329 +step:9968 train loss:3.562510 +step:9969 train loss:3.474198 +step:9970 train loss:3.514852 +step:9971 train loss:3.566319 +step:9972 train loss:3.584824 +step:9973 train loss:3.560657 +step:9974 train loss:3.546584 +step:9975 train loss:3.517857 +step:9976 train loss:3.471109 +step:9977 train loss:3.525152 +step:9978 train loss:3.522398 +step:9979 train loss:3.533240 +step:9980 train loss:3.588905 +step:9981 train loss:3.496936 +step:9982 train loss:3.562100 +step:9983 train loss:3.476934 +step:9984 train loss:3.541909 +step:9985 train loss:3.482604 +step:9986 train loss:3.537583 +step:9987 train loss:3.602072 +step:9988 train loss:3.594657 +step:9989 train loss:3.488184 +step:9990 train loss:3.629936 +step:9991 train loss:3.469255 +step:9992 train loss:3.553675 +step:9993 train loss:3.540786 +step:9994 train loss:3.657176 +step:9995 train loss:3.593468 +step:9996 train loss:3.516254 +step:9997 train loss:3.551239 +step:9998 train loss:3.610361 +step:9999 train loss:3.572046 +step:10000 validation loss:3.477410 total_sharp:1.8097e-05 L1_sharp:-1.1919e-06 L2_sharp:-1.1492e-06 L3_sharp:-1.4895e-07 L4_sharp:9.4053e-07 L5_sharp:5.2039e-07 L6_sharp:9.1705e-07 L7_sharp:1.4184e-06 L8_sharp:2.6427e-06 L9_sharp:3.7494e-06 L10_sharp:5.3583e-06 L11_sharp:3.3529e-06 L12_sharp:5.3968e-05 total_fnorm:4.0483e+01 total_l1_linf:3.4817e+05 total_spectral:4.0483e+01 L1_fnorm:9.3266e+00 L2_fnorm:7.5472e+00 L3_fnorm:8.2943e+00 L4_fnorm:8.8557e+00 L5_fnorm:9.3799e+00 L6_fnorm:9.4500e+00 L7_fnorm:1.0023e+01 L8_fnorm:1.0052e+01 L9_fnorm:1.0042e+01 L10_fnorm:9.9434e+00 L11_fnorm:9.9918e+00 L12_fnorm:9.3390e+00 L1_l1linf:8.7933e+00 L2_l1linf:5.6918e+00 L3_l1linf:6.9761e+00 L4_l1linf:7.9862e+00 L5_l1linf:9.1444e+00 L6_l1linf:9.1981e+00 L7_l1linf:1.0088e+01 L8_l1linf:1.0522e+01 L9_l1linf:1.2275e+01 L10_l1linf:1.2293e+01 L11_l1linf:1.2632e+01 L12_l1linf:1.3197e+01 L1_spectral:1.3226e+00 L2_spectral:9.7325e-01 L3_spectral:9.4209e-01 L4_spectral:8.8859e-01 L5_spectral:1.0105e+00 L6_spectral:1.1529e+00 L7_spectral:1.1242e+00 L8_spectral:1.3762e+00 L9_spectral:1.6417e+00 L10_spectral:1.7478e+00 L11_spectral:1.6551e+00 L12_spectral:1.9992e+00 ip_v_neg_g:1.7619e-02 cos_v_neg_g:1.0728e-03 v_norm:4.0483e+01 g_norm:4.0570e-01 hv_norm:2.7520e-01 cos_v_hv:2.6620e-03 hg_norm:4.7705e+00 cos_g_hg:6.4786e-01 v_par:1.3724e-02 v_perp:4.0483e+01 L1_cos_v_neg_g:2.8164e-03 L1_v_norm:9.3266e+00 L2_cos_v_neg_g:2.6716e-03 L2_v_norm:7.5472e+00 L3_cos_v_neg_g:8.3806e-04 L3_v_norm:8.2943e+00 L4_cos_v_neg_g:1.4229e-03 L4_v_norm:8.8557e+00 L5_cos_v_neg_g:1.1446e-03 L5_v_norm:9.3799e+00 L6_cos_v_neg_g:2.3395e-03 L6_v_norm:9.4500e+00 L7_cos_v_neg_g:2.7835e-03 L7_v_norm:1.0023e+01 L8_cos_v_neg_g:4.3036e-03 L8_v_norm:1.0052e+01 L9_cos_v_neg_g:5.9956e-03 L9_v_norm:1.0042e+01 L10_cos_v_neg_g:7.7811e-03 L10_v_norm:9.9434e+00 L11_cos_v_neg_g:8.3299e-03 L11_v_norm:9.9918e+00 L12_cos_v_neg_g:2.7618e-02 L12_v_norm:9.3390e+00 diff --git a/logs_sharpness_pure/muon_lr_search/avg_loss_log_vs_steps.png b/logs_sharpness_pure/muon_lr_search/avg_loss_log_vs_steps.png new file mode 100644 index 0000000000000000000000000000000000000000..2b20c2c9f4236adc6b40784d139939c43aa5df47 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/avg_loss_log_vs_steps.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:678c250fa946fd72591565edbb18e06bba11f55dafbbbf66cd714ab83f374348 +size 119592 diff --git a/logs_sharpness_pure/muon_lr_search/avg_loss_vs_steps.png b/logs_sharpness_pure/muon_lr_search/avg_loss_vs_steps.png new file mode 100644 index 0000000000000000000000000000000000000000..9f482e3ee9b8c7ab44857ead6119f36dfec2fc07 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/avg_loss_vs_steps.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b391b94a04c280ee2c9b986862b6489fb0558be8c861d2b46ddafabadce332a +size 112556 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..8c2ccf8ca9d5bd12bb65f7dd8e97afc151befc58 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.0005, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "d1124352-5cfb-41e1-b8cc-d843163f2f8a", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..54822e801caa4901eddafa69c826dcfa9ff3ea14 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2938636541366577, + "total_l1_linf_norm": 7129.70947265625, + "total_spectral_norm": 1.2938638925552368, + "layer_1_update_fnorm": 0.03138824924826622, + "layer_1_max_l1_linf_norm": 0.1890329122543335, + "layer_1_max_spectral_norm": 0.00427622115239501, + "layer_2_update_fnorm": 0.028722915798425674, + "layer_2_max_l1_linf_norm": 0.21833652257919312, + "layer_2_max_spectral_norm": 0.00495386216789484, + "layer_3_update_fnorm": 0.027931535616517067, + "layer_3_max_l1_linf_norm": 0.23473268747329712, + "layer_3_max_spectral_norm": 0.005295776762068272, + "layer_4_update_fnorm": 0.02911187708377838, + "layer_4_max_l1_linf_norm": 0.24135984480381012, + "layer_4_max_spectral_norm": 0.005396955646574497, + "layer_5_update_fnorm": 0.02963371016085148, + "layer_5_max_l1_linf_norm": 0.24630765616893768, + "layer_5_max_spectral_norm": 0.005577700678259134, + "layer_6_update_fnorm": 0.030568363144993782, + "layer_6_max_l1_linf_norm": 0.25037696957588196, + "layer_6_max_spectral_norm": 0.005624828860163689, + "layer_7_update_fnorm": 0.03109409101307392, + "layer_7_max_l1_linf_norm": 0.2451963722705841, + "layer_7_max_spectral_norm": 0.006210616324096918, + "layer_8_update_fnorm": 0.03137333318591118, + "layer_8_max_l1_linf_norm": 0.2544834613800049, + "layer_8_max_spectral_norm": 0.0066167744807899, + "layer_9_update_fnorm": 0.031838446855545044, + "layer_9_max_l1_linf_norm": 0.2518662214279175, + "layer_9_max_spectral_norm": 0.006785668898373842, + "layer_10_update_fnorm": 0.03190399333834648, + "layer_10_max_l1_linf_norm": 0.24672424793243408, + "layer_10_max_spectral_norm": 0.00672390591353178, + "layer_11_update_fnorm": 0.03169197216629982, + "layer_11_max_l1_linf_norm": 0.232727512717247, + "layer_11_max_spectral_norm": 0.006175899878144264, + "layer_12_update_fnorm": 0.03212735801935196, + "layer_12_max_l1_linf_norm": 0.22850686311721802, + "layer_12_max_spectral_norm": 0.007818570360541344, + "total_sharpness": 0.02359323762357235, + "ip_v_neg_g": 0.01979822665452957, + "cos_v_neg_g": 0.00141177698969841, + "v_norm": 1.2938636541366577, + "g_norm": 10.83856201171875, + "hv_norm": 8.241813659667969, + "cos_v_hv": 0.0037038486916571856, + "hg_norm": 17587.525390625, + "cos_g_hg": 0.6875536441802979, + "v_parallel_norm": 8.327062096213922e-05, + "v_perp_norm": 1.2938636541366577, + "layer_1_v_norm": 0.03138824924826622, + "layer_1_cos_v_neg_g": 0.024693505838513374, + "layer_2_v_norm": 0.028722915798425674, + "layer_2_cos_v_neg_g": 0.022861629724502563, + "layer_3_v_norm": 0.027931535616517067, + "layer_3_cos_v_neg_g": 0.022597622126340866, + "layer_4_v_norm": 0.02911187708377838, + "layer_4_cos_v_neg_g": 0.018778255209326744, + "layer_5_v_norm": 0.02963371016085148, + "layer_5_cos_v_neg_g": 0.01976364478468895, + "layer_6_v_norm": 0.030568363144993782, + "layer_6_cos_v_neg_g": 0.01737339235842228, + "layer_7_v_norm": 0.03109409101307392, + "layer_7_cos_v_neg_g": 0.015225578099489212, + "layer_8_v_norm": 0.03137333691120148, + "layer_8_cos_v_neg_g": 0.013096518814563751, + "layer_9_v_norm": 0.031838446855545044, + "layer_9_cos_v_neg_g": 0.011644478887319565, + "layer_10_v_norm": 0.03190399333834648, + "layer_10_cos_v_neg_g": 0.010108301416039467, + "layer_11_v_norm": 0.03169197216629982, + "layer_11_cos_v_neg_g": 0.008107539266347885, + "layer_12_v_norm": 0.03212735801935196, + "layer_12_cos_v_neg_g": 0.006424827966839075, + "layer_1_sharpness": 1.9012045860290527, + "layer_2_sharpness": 1.1229664087295532, + "layer_3_sharpness": 0.7996897101402283, + "layer_4_sharpness": 0.5328930020332336, + "layer_5_sharpness": 0.4712008833885193, + "layer_6_sharpness": 0.3246268630027771, + "layer_7_sharpness": 0.21098992228507996, + "layer_8_sharpness": 0.13880784809589386, + "layer_9_sharpness": 0.09845487028360367, + "layer_10_sharpness": 0.07071341574192047, + "layer_11_sharpness": 0.055379707366228104, + "layer_12_sharpness": 0.04064727574586868 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..8a1ac67c11813ae197469fdff337e914f7d6b154 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.313619613647461, + "total_l1_linf_norm": 7255.6982421875, + "total_spectral_norm": 1.3136197328567505, + "layer_1_update_fnorm": 0.031853314489126205, + "layer_1_max_l1_linf_norm": 0.3113507032394409, + "layer_1_max_spectral_norm": 0.0070070624351501465, + "layer_2_update_fnorm": 0.03109319694340229, + "layer_2_max_l1_linf_norm": 0.34170520305633545, + "layer_2_max_spectral_norm": 0.00768353370949626, + "layer_3_update_fnorm": 0.031098008155822754, + "layer_3_max_l1_linf_norm": 0.3512601852416992, + "layer_3_max_spectral_norm": 0.007898940704762936, + "layer_4_update_fnorm": 0.03175213932991028, + "layer_4_max_l1_linf_norm": 0.34534111618995667, + "layer_4_max_spectral_norm": 0.007812676951289177, + "layer_5_update_fnorm": 0.03210337087512016, + "layer_5_max_l1_linf_norm": 0.32025277614593506, + "layer_5_max_spectral_norm": 0.007242392748594284, + "layer_6_update_fnorm": 0.0322604775428772, + "layer_6_max_l1_linf_norm": 0.3058958649635315, + "layer_6_max_spectral_norm": 0.006926347501575947, + "layer_7_update_fnorm": 0.03223155438899994, + "layer_7_max_l1_linf_norm": 0.27999889850616455, + "layer_7_max_spectral_norm": 0.00635412335395813, + "layer_8_update_fnorm": 0.03230877220630646, + "layer_8_max_l1_linf_norm": 0.3091828227043152, + "layer_8_max_spectral_norm": 0.0069052488543093204, + "layer_9_update_fnorm": 0.03218575194478035, + "layer_9_max_l1_linf_norm": 0.30344313383102417, + "layer_9_max_spectral_norm": 0.006852537859231234, + "layer_10_update_fnorm": 0.03216542303562164, + "layer_10_max_l1_linf_norm": 0.33265867829322815, + "layer_10_max_spectral_norm": 0.007458769250661135, + "layer_11_update_fnorm": 0.03264995291829109, + "layer_11_max_l1_linf_norm": 0.37840789556503296, + "layer_11_max_spectral_norm": 0.008479233831167221, + "layer_12_update_fnorm": 0.03254155442118645, + "layer_12_max_l1_linf_norm": 0.3599466383457184, + "layer_12_max_spectral_norm": 0.008126536384224892, + "total_sharpness": 0.004091247450560331, + "ip_v_neg_g": 0.0014047358417883515, + "cos_v_neg_g": 0.00018890321371145546, + "v_norm": 1.313619613647461, + "g_norm": 5.660902500152588, + "hv_norm": 1.0611616373062134, + "cos_v_hv": 0.005064585246145725, + "hg_norm": 1698.4239501953125, + "cos_g_hg": 0.47906431555747986, + "v_parallel_norm": 1.0158442819374613e-05, + "v_perp_norm": 1.313619613647461, + "layer_1_v_norm": 0.031853314489126205, + "layer_1_cos_v_neg_g": -0.00041465909453108907, + "layer_2_v_norm": 0.03109319694340229, + "layer_2_cos_v_neg_g": 0.0012790367472916842, + "layer_3_v_norm": 0.031098008155822754, + "layer_3_cos_v_neg_g": 0.00010967801790684462, + "layer_4_v_norm": 0.03175213932991028, + "layer_4_cos_v_neg_g": 0.004277684725821018, + "layer_5_v_norm": 0.03210337087512016, + "layer_5_cos_v_neg_g": 0.0022410873789340258, + "layer_6_v_norm": 0.0322604775428772, + "layer_6_cos_v_neg_g": 0.0017458003712818027, + "layer_7_v_norm": 0.03223155438899994, + "layer_7_cos_v_neg_g": 0.0020465340930968523, + "layer_8_v_norm": 0.03230877220630646, + "layer_8_cos_v_neg_g": 0.0013126290868967772, + "layer_9_v_norm": 0.03218575194478035, + "layer_9_cos_v_neg_g": 0.001497799763455987, + "layer_10_v_norm": 0.03216542303562164, + "layer_10_cos_v_neg_g": 0.002682275837287307, + "layer_11_v_norm": 0.03264995291829109, + "layer_11_cos_v_neg_g": 0.0026272214017808437, + "layer_12_v_norm": 0.03254155442118645, + "layer_12_cos_v_neg_g": 0.00365280918776989, + "layer_1_sharpness": 0.15359938144683838, + "layer_2_sharpness": 0.05400438979268074, + "layer_3_sharpness": 0.09052039682865143, + "layer_4_sharpness": 0.05858176201581955, + "layer_5_sharpness": 0.05764603242278099, + "layer_6_sharpness": 0.06997594237327576, + "layer_7_sharpness": 0.08760351687669754, + "layer_8_sharpness": 0.10069383680820465, + "layer_9_sharpness": 0.07778848707675934, + "layer_10_sharpness": 0.0594862662255764, + "layer_11_sharpness": 0.05596477538347244, + "layer_12_sharpness": 0.08978255093097687 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..3b2ea2c3d5c3f9b707a9441fd993eb53be89bd81 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2943679094314575, + "total_l1_linf_norm": 7148.677734375, + "total_spectral_norm": 1.294368028640747, + "layer_1_update_fnorm": 0.03150644525885582, + "layer_1_max_l1_linf_norm": 0.20170584321022034, + "layer_1_max_spectral_norm": 0.0045296731404960155, + "layer_2_update_fnorm": 0.02855602093040943, + "layer_2_max_l1_linf_norm": 0.22076904773712158, + "layer_2_max_spectral_norm": 0.0050138975493609905, + "layer_3_update_fnorm": 0.02721875160932541, + "layer_3_max_l1_linf_norm": 0.22589847445487976, + "layer_3_max_spectral_norm": 0.0051023829728364944, + "layer_4_update_fnorm": 0.02923751436173916, + "layer_4_max_l1_linf_norm": 0.23482641577720642, + "layer_4_max_spectral_norm": 0.005297219846397638, + "layer_5_update_fnorm": 0.029768167063593864, + "layer_5_max_l1_linf_norm": 0.23337233066558838, + "layer_5_max_spectral_norm": 0.005309537518769503, + "layer_6_update_fnorm": 0.030788008123636246, + "layer_6_max_l1_linf_norm": 0.24425357580184937, + "layer_6_max_spectral_norm": 0.00544325914233923, + "layer_7_update_fnorm": 0.03135707601904869, + "layer_7_max_l1_linf_norm": 0.24067020416259766, + "layer_7_max_spectral_norm": 0.0054381671361625195, + "layer_8_update_fnorm": 0.03138554468750954, + "layer_8_max_l1_linf_norm": 0.2343558371067047, + "layer_8_max_spectral_norm": 0.005943209398537874, + "layer_9_update_fnorm": 0.031636953353881836, + "layer_9_max_l1_linf_norm": 0.23467771708965302, + "layer_9_max_spectral_norm": 0.005770915187895298, + "layer_10_update_fnorm": 0.03168347105383873, + "layer_10_max_l1_linf_norm": 0.22639119625091553, + "layer_10_max_spectral_norm": 0.005996566265821457, + "layer_11_update_fnorm": 0.03131507709622383, + "layer_11_max_l1_linf_norm": 0.22199761867523193, + "layer_11_max_spectral_norm": 0.0051209949888288975, + "layer_12_update_fnorm": 0.03155267611145973, + "layer_12_max_l1_linf_norm": 0.20753434300422668, + "layer_12_max_spectral_norm": 0.005607330705970526, + "total_sharpness": 0.019079051911830902, + "ip_v_neg_g": 0.012451625429093838, + "cos_v_neg_g": 0.0009018604177981615, + "v_norm": 1.2943679094314575, + "g_norm": 10.66667366027832, + "hv_norm": 6.524567127227783, + "cos_v_hv": 0.0037849731743335724, + "hg_norm": 18843.833984375, + "cos_g_hg": 0.6354480981826782, + "v_parallel_norm": 4.4046428229194134e-05, + "v_perp_norm": 1.2943679094314575, + "layer_1_v_norm": 0.03150644525885582, + "layer_1_cos_v_neg_g": 0.017682673409581184, + "layer_2_v_norm": 0.02855602093040943, + "layer_2_cos_v_neg_g": 0.015951313078403473, + "layer_3_v_norm": 0.02721875160932541, + "layer_3_cos_v_neg_g": 0.013353395275771618, + "layer_4_v_norm": 0.02923751436173916, + "layer_4_cos_v_neg_g": 0.011306880973279476, + "layer_5_v_norm": 0.029768167063593864, + "layer_5_cos_v_neg_g": 0.010722900740802288, + "layer_6_v_norm": 0.030788008123636246, + "layer_6_cos_v_neg_g": 0.008466304279863834, + "layer_7_v_norm": 0.03135707601904869, + "layer_7_cos_v_neg_g": 0.007480270694941282, + "layer_8_v_norm": 0.03138554468750954, + "layer_8_cos_v_neg_g": 0.006992811802774668, + "layer_9_v_norm": 0.031636953353881836, + "layer_9_cos_v_neg_g": 0.00703837163746357, + "layer_10_v_norm": 0.03168347105383873, + "layer_10_cos_v_neg_g": 0.006467321887612343, + "layer_11_v_norm": 0.03131507709622383, + "layer_11_cos_v_neg_g": 0.005759130232036114, + "layer_12_v_norm": 0.03155267611145973, + "layer_12_cos_v_neg_g": 0.00537268677726388, + "layer_1_sharpness": 1.2984009981155396, + "layer_2_sharpness": 0.9200637340545654, + "layer_3_sharpness": 0.605105459690094, + "layer_4_sharpness": 0.470859557390213, + "layer_5_sharpness": 0.38602739572525024, + "layer_6_sharpness": 0.3138681948184967, + "layer_7_sharpness": 0.2328100949525833, + "layer_8_sharpness": 0.13842131197452545, + "layer_9_sharpness": 0.10162759572267532, + "layer_10_sharpness": 0.07263388484716415, + "layer_11_sharpness": 0.05916563794016838, + "layer_12_sharpness": 0.05693178251385689 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..6975c069fab59f3e7fd82e0bcb500ef5badcc5a6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3289060592651367, + "total_l1_linf_norm": 7341.671875, + "total_spectral_norm": 1.3289061784744263, + "layer_1_update_fnorm": 0.03150031715631485, + "layer_1_max_l1_linf_norm": 0.2051171362400055, + "layer_1_max_spectral_norm": 0.004661330953240395, + "layer_2_update_fnorm": 0.029421012848615646, + "layer_2_max_l1_linf_norm": 0.21498766541481018, + "layer_2_max_spectral_norm": 0.0048700845800340176, + "layer_3_update_fnorm": 0.028335090726614, + "layer_3_max_l1_linf_norm": 0.23844361305236816, + "layer_3_max_spectral_norm": 0.005411609075963497, + "layer_4_update_fnorm": 0.02956947311758995, + "layer_4_max_l1_linf_norm": 0.24448439478874207, + "layer_4_max_spectral_norm": 0.005515766330063343, + "layer_5_update_fnorm": 0.030259057879447937, + "layer_5_max_l1_linf_norm": 0.2481260448694229, + "layer_5_max_spectral_norm": 0.005627988837659359, + "layer_6_update_fnorm": 0.03099261038005352, + "layer_6_max_l1_linf_norm": 0.24434390664100647, + "layer_6_max_spectral_norm": 0.005474105477333069, + "layer_7_update_fnorm": 0.031552862375974655, + "layer_7_max_l1_linf_norm": 0.24588406085968018, + "layer_7_max_spectral_norm": 0.005513536743819714, + "layer_8_update_fnorm": 0.03143182396888733, + "layer_8_max_l1_linf_norm": 0.2422286570072174, + "layer_8_max_spectral_norm": 0.005485482979565859, + "layer_9_update_fnorm": 0.03166170418262482, + "layer_9_max_l1_linf_norm": 0.2505800724029541, + "layer_9_max_spectral_norm": 0.00560466293245554, + "layer_10_update_fnorm": 0.03173692151904106, + "layer_10_max_l1_linf_norm": 0.2624194622039795, + "layer_10_max_spectral_norm": 0.0058398013934493065, + "layer_11_update_fnorm": 0.03166036680340767, + "layer_11_max_l1_linf_norm": 0.25662797689437866, + "layer_11_max_spectral_norm": 0.005762449465692043, + "layer_12_update_fnorm": 0.03181622177362442, + "layer_12_max_l1_linf_norm": 0.2576752305030823, + "layer_12_max_spectral_norm": 0.005740850232541561, + "total_sharpness": 0.01355364453047514, + "ip_v_neg_g": 0.01037875097244978, + "cos_v_neg_g": 0.0008026782306842506, + "v_norm": 1.3289060592651367, + "g_norm": 9.729921340942383, + "hv_norm": 4.728088855743408, + "cos_v_hv": 0.003809471847489476, + "hg_norm": 8294.9580078125, + "cos_g_hg": 0.6586905121803284, + "v_parallel_norm": 3.334530629217625e-05, + "v_perp_norm": 1.3289060592651367, + "layer_1_v_norm": 0.03150031715631485, + "layer_1_cos_v_neg_g": 0.007696358487010002, + "layer_2_v_norm": 0.029421012848615646, + "layer_2_cos_v_neg_g": 0.010076899081468582, + "layer_3_v_norm": 0.02833508886396885, + "layer_3_cos_v_neg_g": 0.012124798260629177, + "layer_4_v_norm": 0.02956947311758995, + "layer_4_cos_v_neg_g": 0.012940386310219765, + "layer_5_v_norm": 0.030259057879447937, + "layer_5_cos_v_neg_g": 0.012359770946204662, + "layer_6_v_norm": 0.03099260851740837, + "layer_6_cos_v_neg_g": 0.011084754019975662, + "layer_7_v_norm": 0.031552862375974655, + "layer_7_cos_v_neg_g": 0.010140233673155308, + "layer_8_v_norm": 0.03143182396888733, + "layer_8_cos_v_neg_g": 0.009695854969322681, + "layer_9_v_norm": 0.03166170418262482, + "layer_9_cos_v_neg_g": 0.009824683889746666, + "layer_10_v_norm": 0.03173692151904106, + "layer_10_cos_v_neg_g": 0.009490806609392166, + "layer_11_v_norm": 0.03166036680340767, + "layer_11_cos_v_neg_g": 0.008876423351466656, + "layer_12_v_norm": 0.03181622177362442, + "layer_12_cos_v_neg_g": 0.008055003359913826, + "layer_1_sharpness": 0.4184298813343048, + "layer_2_sharpness": 0.2906785309314728, + "layer_3_sharpness": 0.36563119292259216, + "layer_4_sharpness": 0.31165754795074463, + "layer_5_sharpness": 0.27699732780456543, + "layer_6_sharpness": 0.27186015248298645, + "layer_7_sharpness": 0.20620334148406982, + "layer_8_sharpness": 0.15111754834651947, + "layer_9_sharpness": 0.13486403226852417, + "layer_10_sharpness": 0.11562182009220123, + "layer_11_sharpness": 0.1065780445933342, + "layer_12_sharpness": 0.12439877539873123 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..5b3ad2a96e5b71e0b9e4241bb1b4e7e65f23a88e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3252313137054443, + "total_l1_linf_norm": 7318.6884765625, + "total_spectral_norm": 1.3252311944961548, + "layer_1_update_fnorm": 0.031849443912506104, + "layer_1_max_l1_linf_norm": 0.2708996832370758, + "layer_1_max_spectral_norm": 0.005989706609398127, + "layer_2_update_fnorm": 0.029247041791677475, + "layer_2_max_l1_linf_norm": 0.25581324100494385, + "layer_2_max_spectral_norm": 0.005776043049991131, + "layer_3_update_fnorm": 0.028727108612656593, + "layer_3_max_l1_linf_norm": 0.25764453411102295, + "layer_3_max_spectral_norm": 0.00583333894610405, + "layer_4_update_fnorm": 0.0301557295024395, + "layer_4_max_l1_linf_norm": 0.26689982414245605, + "layer_4_max_spectral_norm": 0.00600824598222971, + "layer_5_update_fnorm": 0.030639048665761948, + "layer_5_max_l1_linf_norm": 0.2619856894016266, + "layer_5_max_spectral_norm": 0.005855632945895195, + "layer_6_update_fnorm": 0.031316809356212616, + "layer_6_max_l1_linf_norm": 0.2621566653251648, + "layer_6_max_spectral_norm": 0.005919995252043009, + "layer_7_update_fnorm": 0.03172493353486061, + "layer_7_max_l1_linf_norm": 0.2536488175392151, + "layer_7_max_spectral_norm": 0.005727849900722504, + "layer_8_update_fnorm": 0.03164084628224373, + "layer_8_max_l1_linf_norm": 0.2591298222541809, + "layer_8_max_spectral_norm": 0.005807045381516218, + "layer_9_update_fnorm": 0.03175141662359238, + "layer_9_max_l1_linf_norm": 0.26393815875053406, + "layer_9_max_spectral_norm": 0.005918179173022509, + "layer_10_update_fnorm": 0.0319083072245121, + "layer_10_max_l1_linf_norm": 0.28182610869407654, + "layer_10_max_spectral_norm": 0.006299563217908144, + "layer_11_update_fnorm": 0.031607840210199356, + "layer_11_max_l1_linf_norm": 0.26839300990104675, + "layer_11_max_spectral_norm": 0.006008043419569731, + "layer_12_update_fnorm": 0.03156237676739693, + "layer_12_max_l1_linf_norm": 0.2447684407234192, + "layer_12_max_spectral_norm": 0.005611548665910959, + "total_sharpness": 0.010710162110626698, + "ip_v_neg_g": 0.007167407777160406, + "cos_v_neg_g": 0.0006797860842198133, + "v_norm": 1.3252313137054443, + "g_norm": 7.956061363220215, + "hv_norm": 3.4273910522460938, + "cos_v_hv": 0.004141179379075766, + "hg_norm": 3679.978515625, + "cos_g_hg": 0.6014571189880371, + "v_parallel_norm": 2.705634142330382e-05, + "v_perp_norm": 1.3252313137054443, + "layer_1_v_norm": 0.031849443912506104, + "layer_1_cos_v_neg_g": 0.012036068364977837, + "layer_2_v_norm": 0.029247041791677475, + "layer_2_cos_v_neg_g": 0.01146476436406374, + "layer_3_v_norm": 0.028727110475301743, + "layer_3_cos_v_neg_g": 0.011169010773301125, + "layer_4_v_norm": 0.0301557295024395, + "layer_4_cos_v_neg_g": 0.008699827827513218, + "layer_5_v_norm": 0.030639048665761948, + "layer_5_cos_v_neg_g": 0.00776830967515707, + "layer_6_v_norm": 0.031316809356212616, + "layer_6_cos_v_neg_g": 0.007636476773768663, + "layer_7_v_norm": 0.03172493353486061, + "layer_7_cos_v_neg_g": 0.007256804965436459, + "layer_8_v_norm": 0.03164084628224373, + "layer_8_cos_v_neg_g": 0.006702104117721319, + "layer_9_v_norm": 0.03175141662359238, + "layer_9_cos_v_neg_g": 0.0055061341263353825, + "layer_10_v_norm": 0.0319083072245121, + "layer_10_cos_v_neg_g": 0.00519094942137599, + "layer_11_v_norm": 0.03160783648490906, + "layer_11_cos_v_neg_g": 0.004581201355904341, + "layer_12_v_norm": 0.03156237676739693, + "layer_12_cos_v_neg_g": 0.004920690320432186, + "layer_1_sharpness": 0.794746994972229, + "layer_2_sharpness": 0.44221678376197815, + "layer_3_sharpness": 0.333304762840271, + "layer_4_sharpness": 0.19368445873260498, + "layer_5_sharpness": 0.15878981351852417, + "layer_6_sharpness": 0.16203917562961578, + "layer_7_sharpness": 0.1523021161556244, + "layer_8_sharpness": 0.13744406402111053, + "layer_9_sharpness": 0.10586467385292053, + "layer_10_sharpness": 0.0919923484325409, + "layer_11_sharpness": 0.06300010532140732, + "layer_12_sharpness": 0.07392672449350357 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..694688b7372d32993a0b9baeb00fb974078eda4d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3275713920593262, + "total_l1_linf_norm": 7330.19580078125, + "total_spectral_norm": 1.3275716304779053, + "layer_1_update_fnorm": 0.03187941759824753, + "layer_1_max_l1_linf_norm": 0.2544288635253906, + "layer_1_max_spectral_norm": 0.0058241127990186214, + "layer_2_update_fnorm": 0.030171412974596024, + "layer_2_max_l1_linf_norm": 0.2919391393661499, + "layer_2_max_spectral_norm": 0.006608447525650263, + "layer_3_update_fnorm": 0.029403777793049812, + "layer_3_max_l1_linf_norm": 0.28770124912261963, + "layer_3_max_spectral_norm": 0.006569760385900736, + "layer_4_update_fnorm": 0.03056446649134159, + "layer_4_max_l1_linf_norm": 0.29620349407196045, + "layer_4_max_spectral_norm": 0.00668524531647563, + "layer_5_update_fnorm": 0.031086677685379982, + "layer_5_max_l1_linf_norm": 0.2770997881889343, + "layer_5_max_spectral_norm": 0.006276072468608618, + "layer_6_update_fnorm": 0.03179820626974106, + "layer_6_max_l1_linf_norm": 0.26999741792678833, + "layer_6_max_spectral_norm": 0.006122647784650326, + "layer_7_update_fnorm": 0.03182941675186157, + "layer_7_max_l1_linf_norm": 0.2556535303592682, + "layer_7_max_spectral_norm": 0.005754541605710983, + "layer_8_update_fnorm": 0.031829722225666046, + "layer_8_max_l1_linf_norm": 0.2657046914100647, + "layer_8_max_spectral_norm": 0.0060036457143723965, + "layer_9_update_fnorm": 0.031931549310684204, + "layer_9_max_l1_linf_norm": 0.27751410007476807, + "layer_9_max_spectral_norm": 0.006258576642721891, + "layer_10_update_fnorm": 0.03198793902993202, + "layer_10_max_l1_linf_norm": 0.2897551655769348, + "layer_10_max_spectral_norm": 0.00651689525693655, + "layer_11_update_fnorm": 0.031847033649683, + "layer_11_max_l1_linf_norm": 0.29484498500823975, + "layer_11_max_spectral_norm": 0.006689386907964945, + "layer_12_update_fnorm": 0.03201202303171158, + "layer_12_max_l1_linf_norm": 0.28192758560180664, + "layer_12_max_spectral_norm": 0.0064014168456196785, + "total_sharpness": 0.007675761356949806, + "ip_v_neg_g": 0.00924074649810791, + "cos_v_neg_g": 0.0009365859441459179, + "v_norm": 1.3275713920593262, + "g_norm": 7.431928634643555, + "hv_norm": 2.288844585418701, + "cos_v_hv": 0.004452080931514502, + "hg_norm": 2740.8525390625, + "cos_g_hg": 0.5873799920082092, + "v_parallel_norm": 3.328398452140391e-05, + "v_perp_norm": 1.3275713920593262, + "layer_1_v_norm": 0.03187941759824753, + "layer_1_cos_v_neg_g": 0.014162451028823853, + "layer_2_v_norm": 0.030171412974596024, + "layer_2_cos_v_neg_g": 0.01438065804541111, + "layer_3_v_norm": 0.02940378151834011, + "layer_3_cos_v_neg_g": 0.01441233605146408, + "layer_4_v_norm": 0.03056446649134159, + "layer_4_cos_v_neg_g": 0.013570244424045086, + "layer_5_v_norm": 0.031086677685379982, + "layer_5_cos_v_neg_g": 0.012033142149448395, + "layer_6_v_norm": 0.03179820999503136, + "layer_6_cos_v_neg_g": 0.011170039884746075, + "layer_7_v_norm": 0.03182941675186157, + "layer_7_cos_v_neg_g": 0.011854495853185654, + "layer_8_v_norm": 0.031829722225666046, + "layer_8_cos_v_neg_g": 0.010840240865945816, + "layer_9_v_norm": 0.031931549310684204, + "layer_9_cos_v_neg_g": 0.009554008953273296, + "layer_10_v_norm": 0.03198793902993202, + "layer_10_cos_v_neg_g": 0.008474275469779968, + "layer_11_v_norm": 0.0318470373749733, + "layer_11_cos_v_neg_g": 0.0058542476035654545, + "layer_12_v_norm": 0.03201202303171158, + "layer_12_cos_v_neg_g": 0.0054612006060779095, + "layer_1_sharpness": 0.21842600405216217, + "layer_2_sharpness": 0.1557587832212448, + "layer_3_sharpness": 0.20644867420196533, + "layer_4_sharpness": 0.13165728747844696, + "layer_5_sharpness": 0.12459768354892731, + "layer_6_sharpness": 0.1267918050289154, + "layer_7_sharpness": 0.14561572670936584, + "layer_8_sharpness": 0.12623931467533112, + "layer_9_sharpness": 0.10718975961208344, + "layer_10_sharpness": 0.08385694772005081, + "layer_11_sharpness": 0.06530995666980743, + "layer_12_sharpness": 0.07359202206134796 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..9d3c6b02ab7109e01f07cae66d0b297dabbd8937 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.332406759262085, + "total_l1_linf_norm": 7356.42919921875, + "total_spectral_norm": 1.3324068784713745, + "layer_1_update_fnorm": 0.031815990805625916, + "layer_1_max_l1_linf_norm": 0.2637697458267212, + "layer_1_max_spectral_norm": 0.005949466954916716, + "layer_2_update_fnorm": 0.030081070959568024, + "layer_2_max_l1_linf_norm": 0.2722265124320984, + "layer_2_max_spectral_norm": 0.006143772974610329, + "layer_3_update_fnorm": 0.029370663687586784, + "layer_3_max_l1_linf_norm": 0.28372395038604736, + "layer_3_max_spectral_norm": 0.006443570367991924, + "layer_4_update_fnorm": 0.030505115166306496, + "layer_4_max_l1_linf_norm": 0.28407710790634155, + "layer_4_max_spectral_norm": 0.006396885961294174, + "layer_5_update_fnorm": 0.031060243025422096, + "layer_5_max_l1_linf_norm": 0.2749485373497009, + "layer_5_max_spectral_norm": 0.006230655126273632, + "layer_6_update_fnorm": 0.03177008032798767, + "layer_6_max_l1_linf_norm": 0.2630055844783783, + "layer_6_max_spectral_norm": 0.005909794010221958, + "layer_7_update_fnorm": 0.03167145326733589, + "layer_7_max_l1_linf_norm": 0.24854537844657898, + "layer_7_max_spectral_norm": 0.005625184625387192, + "layer_8_update_fnorm": 0.03187907859683037, + "layer_8_max_l1_linf_norm": 0.2617706060409546, + "layer_8_max_spectral_norm": 0.005916638299822807, + "layer_9_update_fnorm": 0.031937502324581146, + "layer_9_max_l1_linf_norm": 0.28393882513046265, + "layer_9_max_spectral_norm": 0.006408532150089741, + "layer_10_update_fnorm": 0.03198106586933136, + "layer_10_max_l1_linf_norm": 0.29632532596588135, + "layer_10_max_spectral_norm": 0.00663305539637804, + "layer_11_update_fnorm": 0.0319344699382782, + "layer_11_max_l1_linf_norm": 0.3078666925430298, + "layer_11_max_spectral_norm": 0.006912060547620058, + "layer_12_update_fnorm": 0.03203536197543144, + "layer_12_max_l1_linf_norm": 0.31158527731895447, + "layer_12_max_spectral_norm": 0.006975082214921713, + "total_sharpness": 0.0077965292148292065, + "ip_v_neg_g": 0.008527117781341076, + "cos_v_neg_g": 0.0008285895455628633, + "v_norm": 1.332406759262085, + "g_norm": 7.723710536956787, + "hv_norm": 2.4659619331359863, + "cos_v_hv": 0.004212614614516497, + "hg_norm": 3464.09228515625, + "cos_g_hg": 0.612079381942749, + "v_parallel_norm": 3.0234908990678377e-05, + "v_perp_norm": 1.332406759262085, + "layer_1_v_norm": 0.031815990805625916, + "layer_1_cos_v_neg_g": 0.012241318821907043, + "layer_2_v_norm": 0.030081070959568024, + "layer_2_cos_v_neg_g": 0.01171494647860527, + "layer_3_v_norm": 0.029370663687586784, + "layer_3_cos_v_neg_g": 0.014800602570176125, + "layer_4_v_norm": 0.030505115166306496, + "layer_4_cos_v_neg_g": 0.010700900107622147, + "layer_5_v_norm": 0.031060243025422096, + "layer_5_cos_v_neg_g": 0.010014638304710388, + "layer_6_v_norm": 0.03177008032798767, + "layer_6_cos_v_neg_g": 0.009786803275346756, + "layer_7_v_norm": 0.03167145326733589, + "layer_7_cos_v_neg_g": 0.01007118634879589, + "layer_8_v_norm": 0.03187907859683037, + "layer_8_cos_v_neg_g": 0.00975863542407751, + "layer_9_v_norm": 0.031937502324581146, + "layer_9_cos_v_neg_g": 0.0077757444232702255, + "layer_10_v_norm": 0.03198106586933136, + "layer_10_cos_v_neg_g": 0.006123350467532873, + "layer_11_v_norm": 0.0319344699382782, + "layer_11_cos_v_neg_g": 0.0067821103148162365, + "layer_12_v_norm": 0.03203536197543144, + "layer_12_cos_v_neg_g": 0.007445107214152813, + "layer_1_sharpness": 0.2682854235172272, + "layer_2_sharpness": 0.16627205908298492, + "layer_3_sharpness": 0.21111667156219482, + "layer_4_sharpness": 0.12602351605892181, + "layer_5_sharpness": 0.10820559412240982, + "layer_6_sharpness": 0.11862588673830032, + "layer_7_sharpness": 0.13684777915477753, + "layer_8_sharpness": 0.12549850344657898, + "layer_9_sharpness": 0.110818050801754, + "layer_10_sharpness": 0.08837459981441498, + "layer_11_sharpness": 0.07617314159870148, + "layer_12_sharpness": 0.16685332357883453 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..31e9a9e8d6354e2c2310ff2714dab7a8d4e29b2a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3222864866256714, + "total_l1_linf_norm": 7300.6494140625, + "total_spectral_norm": 1.3222862482070923, + "layer_1_update_fnorm": 0.03172951191663742, + "layer_1_max_l1_linf_norm": 0.26096174120903015, + "layer_1_max_spectral_norm": 0.005912737920880318, + "layer_2_update_fnorm": 0.030260616913437843, + "layer_2_max_l1_linf_norm": 0.28550946712493896, + "layer_2_max_spectral_norm": 0.006411950569599867, + "layer_3_update_fnorm": 0.02981860376894474, + "layer_3_max_l1_linf_norm": 0.28410211205482483, + "layer_3_max_spectral_norm": 0.006377110257744789, + "layer_4_update_fnorm": 0.030766064301133156, + "layer_4_max_l1_linf_norm": 0.3043246865272522, + "layer_4_max_spectral_norm": 0.006845887750387192, + "layer_5_update_fnorm": 0.031510572880506516, + "layer_5_max_l1_linf_norm": 0.2979578971862793, + "layer_5_max_spectral_norm": 0.006673021707683802, + "layer_6_update_fnorm": 0.03176293149590492, + "layer_6_max_l1_linf_norm": 0.2759193181991577, + "layer_6_max_spectral_norm": 0.006227533798664808, + "layer_7_update_fnorm": 0.03190257400274277, + "layer_7_max_l1_linf_norm": 0.2734281122684479, + "layer_7_max_spectral_norm": 0.006175663787871599, + "layer_8_update_fnorm": 0.03189645707607269, + "layer_8_max_l1_linf_norm": 0.27979734539985657, + "layer_8_max_spectral_norm": 0.006285527255386114, + "layer_9_update_fnorm": 0.0319061279296875, + "layer_9_max_l1_linf_norm": 0.28238070011138916, + "layer_9_max_spectral_norm": 0.006344693247228861, + "layer_10_update_fnorm": 0.03200066089630127, + "layer_10_max_l1_linf_norm": 0.28559720516204834, + "layer_10_max_spectral_norm": 0.0064696939662098885, + "layer_11_update_fnorm": 0.03202151879668236, + "layer_11_max_l1_linf_norm": 0.31022047996520996, + "layer_11_max_spectral_norm": 0.006951900199055672, + "layer_12_update_fnorm": 0.032129351049661636, + "layer_12_max_l1_linf_norm": 0.329193115234375, + "layer_12_max_spectral_norm": 0.007317133713513613, + "total_sharpness": 0.007966401055455208, + "ip_v_neg_g": 0.007552728988230228, + "cos_v_neg_g": 0.0008870260207913816, + "v_norm": 1.3222864866256714, + "g_norm": 6.43934965133667, + "hv_norm": 2.1934525966644287, + "cos_v_hv": 0.00480241235345602, + "hg_norm": 1383.212158203125, + "cos_g_hg": 0.5270813703536987, + "v_parallel_norm": 2.900809886341449e-05, + "v_perp_norm": 1.3222864866256714, + "layer_1_v_norm": 0.03172951191663742, + "layer_1_cos_v_neg_g": 0.01106266863644123, + "layer_2_v_norm": 0.030260616913437843, + "layer_2_cos_v_neg_g": 0.010791386477649212, + "layer_3_v_norm": 0.02981860376894474, + "layer_3_cos_v_neg_g": 0.011522997170686722, + "layer_4_v_norm": 0.030766064301133156, + "layer_4_cos_v_neg_g": 0.009950771927833557, + "layer_5_v_norm": 0.031510572880506516, + "layer_5_cos_v_neg_g": 0.009440477937459946, + "layer_6_v_norm": 0.03176293149590492, + "layer_6_cos_v_neg_g": 0.009946038015186787, + "layer_7_v_norm": 0.03190257400274277, + "layer_7_cos_v_neg_g": 0.011057978495955467, + "layer_8_v_norm": 0.03189645707607269, + "layer_8_cos_v_neg_g": 0.010916310362517834, + "layer_9_v_norm": 0.0319061279296875, + "layer_9_cos_v_neg_g": 0.00955349113792181, + "layer_10_v_norm": 0.03200066089630127, + "layer_10_cos_v_neg_g": 0.008941782638430595, + "layer_11_v_norm": 0.03202151879668236, + "layer_11_cos_v_neg_g": 0.008263765834271908, + "layer_12_v_norm": 0.032129351049661636, + "layer_12_cos_v_neg_g": 0.008951181545853615, + "layer_1_sharpness": 0.22896218299865723, + "layer_2_sharpness": 0.1301017552614212, + "layer_3_sharpness": 0.15446610748767853, + "layer_4_sharpness": 0.11679241806268692, + "layer_5_sharpness": 0.1237252727150917, + "layer_6_sharpness": 0.1280638575553894, + "layer_7_sharpness": 0.1608329862356186, + "layer_8_sharpness": 0.14603577554225922, + "layer_9_sharpness": 0.11295770108699799, + "layer_10_sharpness": 0.09207255393266678, + "layer_11_sharpness": 0.09031451493501663, + "layer_12_sharpness": 0.15928566455841064 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..0100e1a3562a8270462697f203ac4fd3bf8ff589 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3447480201721191, + "total_l1_linf_norm": 7424.46240234375, + "total_spectral_norm": 1.3447479009628296, + "layer_1_update_fnorm": 0.03209162503480911, + "layer_1_max_l1_linf_norm": 0.3021143972873688, + "layer_1_max_spectral_norm": 0.006798392161726952, + "layer_2_update_fnorm": 0.03055739961564541, + "layer_2_max_l1_linf_norm": 0.3163171410560608, + "layer_2_max_spectral_norm": 0.007166400086134672, + "layer_3_update_fnorm": 0.03000226803123951, + "layer_3_max_l1_linf_norm": 0.31355470418930054, + "layer_3_max_spectral_norm": 0.007086071651428938, + "layer_4_update_fnorm": 0.030903413891792297, + "layer_4_max_l1_linf_norm": 0.3122522830963135, + "layer_4_max_spectral_norm": 0.007015382871031761, + "layer_5_update_fnorm": 0.03167155012488365, + "layer_5_max_l1_linf_norm": 0.30849987268447876, + "layer_5_max_spectral_norm": 0.0068763927556574345, + "layer_6_update_fnorm": 0.03188672289252281, + "layer_6_max_l1_linf_norm": 0.2744848430156708, + "layer_6_max_spectral_norm": 0.006159032694995403, + "layer_7_update_fnorm": 0.03189454227685928, + "layer_7_max_l1_linf_norm": 0.2553561329841614, + "layer_7_max_spectral_norm": 0.005809458438307047, + "layer_8_update_fnorm": 0.03184043988585472, + "layer_8_max_l1_linf_norm": 0.2630992531776428, + "layer_8_max_spectral_norm": 0.005963347852230072, + "layer_9_update_fnorm": 0.03180497884750366, + "layer_9_max_l1_linf_norm": 0.2767210900783539, + "layer_9_max_spectral_norm": 0.006224128417670727, + "layer_10_update_fnorm": 0.03185344487428665, + "layer_10_max_l1_linf_norm": 0.292750746011734, + "layer_10_max_spectral_norm": 0.0065492670983076096, + "layer_11_update_fnorm": 0.03193681687116623, + "layer_11_max_l1_linf_norm": 0.30534031987190247, + "layer_11_max_spectral_norm": 0.0068948580883443356, + "layer_12_update_fnorm": 0.03173762932419777, + "layer_12_max_l1_linf_norm": 0.27438199520111084, + "layer_12_max_spectral_norm": 0.006282474379986525, + "total_sharpness": 0.006167437881231308, + "ip_v_neg_g": 0.0064659444615244865, + "cos_v_neg_g": 0.0007366216159425676, + "v_norm": 1.3447480201721191, + "g_norm": 6.527495861053467, + "hv_norm": 1.8150639533996582, + "cos_v_hv": 0.004569342825561762, + "hg_norm": 3720.218017578125, + "cos_g_hg": 0.47119390964508057, + "v_parallel_norm": 2.6937446818919852e-05, + "v_perp_norm": 1.3447480201721191, + "layer_1_v_norm": 0.03209162503480911, + "layer_1_cos_v_neg_g": 0.0102988975122571, + "layer_2_v_norm": 0.03055739961564541, + "layer_2_cos_v_neg_g": 0.009958990849554539, + "layer_3_v_norm": 0.03000226616859436, + "layer_3_cos_v_neg_g": 0.010046247392892838, + "layer_4_v_norm": 0.030903413891792297, + "layer_4_cos_v_neg_g": 0.008220150135457516, + "layer_5_v_norm": 0.03167155012488365, + "layer_5_cos_v_neg_g": 0.008562530390918255, + "layer_6_v_norm": 0.03188671916723251, + "layer_6_cos_v_neg_g": 0.00825822539627552, + "layer_7_v_norm": 0.03189454227685928, + "layer_7_cos_v_neg_g": 0.008882282301783562, + "layer_8_v_norm": 0.03184043988585472, + "layer_8_cos_v_neg_g": 0.007758577819913626, + "layer_9_v_norm": 0.03180497884750366, + "layer_9_cos_v_neg_g": 0.007931155152618885, + "layer_10_v_norm": 0.03185344487428665, + "layer_10_cos_v_neg_g": 0.00739420996978879, + "layer_11_v_norm": 0.03193681314587593, + "layer_11_cos_v_neg_g": 0.007027223240584135, + "layer_12_v_norm": 0.03173762932419777, + "layer_12_cos_v_neg_g": 0.006130640394985676, + "layer_1_sharpness": 0.22385364770889282, + "layer_2_sharpness": 0.1348809450864792, + "layer_3_sharpness": 0.16120228171348572, + "layer_4_sharpness": 0.09478616714477539, + "layer_5_sharpness": 0.09896327555179596, + "layer_6_sharpness": 0.11954927444458008, + "layer_7_sharpness": 0.1402205228805542, + "layer_8_sharpness": 0.11629932373762131, + "layer_9_sharpness": 0.09585349261760712, + "layer_10_sharpness": 0.07639173418283463, + "layer_11_sharpness": 0.06114397943019867, + "layer_12_sharpness": 0.07909993827342987 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..fc07a3adc5f12d34463a121160eec22a37756b8e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.9079322218894958, + "total_l1_linf_norm": 5007.96435546875, + "total_spectral_norm": 0.9079320430755615, + "layer_1_update_fnorm": 0.0229373537003994, + "layer_1_max_l1_linf_norm": 0.18168491125106812, + "layer_1_max_spectral_norm": 0.004046762362122536, + "layer_2_update_fnorm": 0.02244974672794342, + "layer_2_max_l1_linf_norm": 0.18166840076446533, + "layer_2_max_spectral_norm": 0.00494103878736496, + "layer_3_update_fnorm": 0.021819692105054855, + "layer_3_max_l1_linf_norm": 0.1865071952342987, + "layer_3_max_spectral_norm": 0.004833380691707134, + "layer_4_update_fnorm": 0.02275390923023224, + "layer_4_max_l1_linf_norm": 0.18455876410007477, + "layer_4_max_spectral_norm": 0.0075619458220899105, + "layer_5_update_fnorm": 0.023538906127214432, + "layer_5_max_l1_linf_norm": 0.20832666754722595, + "layer_5_max_spectral_norm": 0.0092774061486125, + "layer_6_update_fnorm": 0.023888083174824715, + "layer_6_max_l1_linf_norm": 0.22366370260715485, + "layer_6_max_spectral_norm": 0.009754610247910023, + "layer_7_update_fnorm": 0.0241988655179739, + "layer_7_max_l1_linf_norm": 0.2579193115234375, + "layer_7_max_spectral_norm": 0.010675322264432907, + "layer_8_update_fnorm": 0.023357918485999107, + "layer_8_max_l1_linf_norm": 0.19424249231815338, + "layer_8_max_spectral_norm": 0.008707886561751366, + "layer_9_update_fnorm": 0.023006541654467583, + "layer_9_max_l1_linf_norm": 0.18614593148231506, + "layer_9_max_spectral_norm": 0.008062097243964672, + "layer_10_update_fnorm": 0.022876368835568428, + "layer_10_max_l1_linf_norm": 0.17798858880996704, + "layer_10_max_spectral_norm": 0.00762177212163806, + "layer_11_update_fnorm": 0.022072868421673775, + "layer_11_max_l1_linf_norm": 0.1354534924030304, + "layer_11_max_spectral_norm": 0.004934447817504406, + "layer_12_update_fnorm": 0.022298714146018028, + "layer_12_max_l1_linf_norm": 0.13894957304000854, + "layer_12_max_spectral_norm": 0.005305348429828882, + "total_sharpness": 0.022081125527620316, + "ip_v_neg_g": 0.007561615668237209, + "cos_v_neg_g": 0.0010000505717471242, + "v_norm": 0.9079322218894958, + "g_norm": 8.327971458435059, + "hv_norm": 5.4809980392456055, + "cos_v_hv": 0.0036577582359313965, + "hg_norm": 11559.404296875, + "cos_g_hg": 0.6755183935165405, + "v_parallel_norm": 5.075359877082519e-05, + "v_perp_norm": 0.9079322218894958, + "layer_1_v_norm": 0.0229373537003994, + "layer_1_cos_v_neg_g": 0.017780084162950516, + "layer_2_v_norm": 0.02244974672794342, + "layer_2_cos_v_neg_g": 0.013726561330258846, + "layer_3_v_norm": 0.021819692105054855, + "layer_3_cos_v_neg_g": 0.013730419799685478, + "layer_4_v_norm": 0.02275390923023224, + "layer_4_cos_v_neg_g": 0.011264855042099953, + "layer_5_v_norm": 0.023538906127214432, + "layer_5_cos_v_neg_g": 0.010533900000154972, + "layer_6_v_norm": 0.023888085037469864, + "layer_6_cos_v_neg_g": 0.009511535987257957, + "layer_7_v_norm": 0.0241988655179739, + "layer_7_cos_v_neg_g": 0.009143998846411705, + "layer_8_v_norm": 0.023357916623353958, + "layer_8_cos_v_neg_g": 0.008863318711519241, + "layer_9_v_norm": 0.023006541654467583, + "layer_9_cos_v_neg_g": 0.00858089979737997, + "layer_10_v_norm": 0.022876368835568428, + "layer_10_cos_v_neg_g": 0.007724763359874487, + "layer_11_v_norm": 0.022072866559028625, + "layer_11_cos_v_neg_g": 0.00669619906693697, + "layer_12_v_norm": 0.022298714146018028, + "layer_12_cos_v_neg_g": 0.005557706579566002, + "layer_1_sharpness": 2.058406114578247, + "layer_2_sharpness": 0.6542129516601562, + "layer_3_sharpness": 0.4767957627773285, + "layer_4_sharpness": 0.2807578146457672, + "layer_5_sharpness": 0.20519307255744934, + "layer_6_sharpness": 0.14379355311393738, + "layer_7_sharpness": 0.10099968314170837, + "layer_8_sharpness": 0.08200377970933914, + "layer_9_sharpness": 0.052207253873348236, + "layer_10_sharpness": 0.0372043214738369, + "layer_11_sharpness": 0.02789955399930477, + "layer_12_sharpness": 0.021897681057453156 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..6da0eb1cdfd37b4e66f5884527b7e9c2f93b7e60 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3462022542953491, + "total_l1_linf_norm": 7423.81689453125, + "total_spectral_norm": 1.3462018966674805, + "layer_1_update_fnorm": 0.03216633200645447, + "layer_1_max_l1_linf_norm": 0.30943745374679565, + "layer_1_max_spectral_norm": 0.006962619721889496, + "layer_2_update_fnorm": 0.030665496364235878, + "layer_2_max_l1_linf_norm": 0.32703742384910583, + "layer_2_max_spectral_norm": 0.007371967658400536, + "layer_3_update_fnorm": 0.03001769259572029, + "layer_3_max_l1_linf_norm": 0.3257071077823639, + "layer_3_max_spectral_norm": 0.007238763850182295, + "layer_4_update_fnorm": 0.03130469098687172, + "layer_4_max_l1_linf_norm": 0.31951311230659485, + "layer_4_max_spectral_norm": 0.007134018931537867, + "layer_5_update_fnorm": 0.0317082405090332, + "layer_5_max_l1_linf_norm": 0.30657726526260376, + "layer_5_max_spectral_norm": 0.006884084548801184, + "layer_6_update_fnorm": 0.031928651034832, + "layer_6_max_l1_linf_norm": 0.2800524830818176, + "layer_6_max_spectral_norm": 0.006320275831967592, + "layer_7_update_fnorm": 0.0319284163415432, + "layer_7_max_l1_linf_norm": 0.26309502124786377, + "layer_7_max_spectral_norm": 0.005931466817855835, + "layer_8_update_fnorm": 0.03193790093064308, + "layer_8_max_l1_linf_norm": 0.27466344833374023, + "layer_8_max_spectral_norm": 0.006197578273713589, + "layer_9_update_fnorm": 0.03207291290163994, + "layer_9_max_l1_linf_norm": 0.30059608817100525, + "layer_9_max_spectral_norm": 0.006731483619660139, + "layer_10_update_fnorm": 0.032167620956897736, + "layer_10_max_l1_linf_norm": 0.3143994212150574, + "layer_10_max_spectral_norm": 0.007059060502797365, + "layer_11_update_fnorm": 0.03214006870985031, + "layer_11_max_l1_linf_norm": 0.3266122341156006, + "layer_11_max_spectral_norm": 0.007313843816518784, + "layer_12_update_fnorm": 0.03202240169048309, + "layer_12_max_l1_linf_norm": 0.31926149129867554, + "layer_12_max_spectral_norm": 0.007242546416819096, + "total_sharpness": 0.006058819591999054, + "ip_v_neg_g": 0.007715177722275257, + "cos_v_neg_g": 0.000879378174431622, + "v_norm": 1.3462022542953491, + "g_norm": 6.517183780670166, + "hv_norm": 1.7507152557373047, + "cos_v_hv": 0.004658893682062626, + "hg_norm": 3710.199462890625, + "cos_g_hg": 0.4655340313911438, + "v_parallel_norm": 3.530313915689476e-05, + "v_perp_norm": 1.3462022542953491, + "layer_1_v_norm": 0.03216633200645447, + "layer_1_cos_v_neg_g": 0.009094123728573322, + "layer_2_v_norm": 0.030665496364235878, + "layer_2_cos_v_neg_g": 0.008652711287140846, + "layer_3_v_norm": 0.03001769445836544, + "layer_3_cos_v_neg_g": 0.01128850132226944, + "layer_4_v_norm": 0.03130469098687172, + "layer_4_cos_v_neg_g": 0.010796003974974155, + "layer_5_v_norm": 0.0317082405090332, + "layer_5_cos_v_neg_g": 0.010558143258094788, + "layer_6_v_norm": 0.031928651034832, + "layer_6_cos_v_neg_g": 0.011026794090867043, + "layer_7_v_norm": 0.0319284163415432, + "layer_7_cos_v_neg_g": 0.010960856452584267, + "layer_8_v_norm": 0.03193789720535278, + "layer_8_cos_v_neg_g": 0.01046806387603283, + "layer_9_v_norm": 0.03207291290163994, + "layer_9_cos_v_neg_g": 0.009156630374491215, + "layer_10_v_norm": 0.032167620956897736, + "layer_10_cos_v_neg_g": 0.009339268319308758, + "layer_11_v_norm": 0.03214006870985031, + "layer_11_cos_v_neg_g": 0.008049027062952518, + "layer_12_v_norm": 0.03202240169048309, + "layer_12_cos_v_neg_g": 0.008412552997469902, + "layer_1_sharpness": 0.18651415407657623, + "layer_2_sharpness": 0.1388835906982422, + "layer_3_sharpness": 0.18953253328800201, + "layer_4_sharpness": 0.08909636735916138, + "layer_5_sharpness": 0.09053011238574982, + "layer_6_sharpness": 0.11255335807800293, + "layer_7_sharpness": 0.13722671568393707, + "layer_8_sharpness": 0.11708112061023712, + "layer_9_sharpness": 0.10606914758682251, + "layer_10_sharpness": 0.08343514800071716, + "layer_11_sharpness": 0.062386371195316315, + "layer_12_sharpness": 0.07245694100856781 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..ab55302ef39c01addd5751741e4322af715b4ee9 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3216971158981323, + "total_l1_linf_norm": 7301.0224609375, + "total_spectral_norm": 1.3216973543167114, + "layer_1_update_fnorm": 0.0320393405854702, + "layer_1_max_l1_linf_norm": 0.32527798414230347, + "layer_1_max_spectral_norm": 0.007169150281697512, + "layer_2_update_fnorm": 0.030705301091074944, + "layer_2_max_l1_linf_norm": 0.34410691261291504, + "layer_2_max_spectral_norm": 0.007646150887012482, + "layer_3_update_fnorm": 0.030126843601465225, + "layer_3_max_l1_linf_norm": 0.328671395778656, + "layer_3_max_spectral_norm": 0.007300395052880049, + "layer_4_update_fnorm": 0.031216204166412354, + "layer_4_max_l1_linf_norm": 0.31270790100097656, + "layer_4_max_spectral_norm": 0.00707215815782547, + "layer_5_update_fnorm": 0.03188507258892059, + "layer_5_max_l1_linf_norm": 0.32023170590400696, + "layer_5_max_spectral_norm": 0.0071714818477630615, + "layer_6_update_fnorm": 0.03217049315571785, + "layer_6_max_l1_linf_norm": 0.31458181142807007, + "layer_6_max_spectral_norm": 0.006975091062486172, + "layer_7_update_fnorm": 0.03222833573818207, + "layer_7_max_l1_linf_norm": 0.295228511095047, + "layer_7_max_spectral_norm": 0.006620274391025305, + "layer_8_update_fnorm": 0.03199062496423721, + "layer_8_max_l1_linf_norm": 0.2698976993560791, + "layer_8_max_spectral_norm": 0.006103898398578167, + "layer_9_update_fnorm": 0.031911756843328476, + "layer_9_max_l1_linf_norm": 0.290330171585083, + "layer_9_max_spectral_norm": 0.006508197635412216, + "layer_10_update_fnorm": 0.032011352479457855, + "layer_10_max_l1_linf_norm": 0.3012782335281372, + "layer_10_max_spectral_norm": 0.006784841883927584, + "layer_11_update_fnorm": 0.032053716480731964, + "layer_11_max_l1_linf_norm": 0.3129427433013916, + "layer_11_max_spectral_norm": 0.00709269754588604, + "layer_12_update_fnorm": 0.03197717294096947, + "layer_12_max_l1_linf_norm": 0.31733477115631104, + "layer_12_max_spectral_norm": 0.007202415727078915, + "total_sharpness": 0.005692018661648035, + "ip_v_neg_g": 0.0008304743096232414, + "cos_v_neg_g": 6.0644644690910354e-05, + "v_norm": 1.3216971158981323, + "g_norm": 10.361002922058105, + "hv_norm": 2.001962423324585, + "cos_v_hv": 0.0037578751798719168, + "hg_norm": 7038.44775390625, + "cos_g_hg": 0.7175339460372925, + "v_parallel_norm": 6.850585123174824e-06, + "v_perp_norm": 1.3216971158981323, + "layer_1_v_norm": 0.0320393405854702, + "layer_1_cos_v_neg_g": 0.00048541484284214675, + "layer_2_v_norm": 0.030705301091074944, + "layer_2_cos_v_neg_g": 0.0020652241073548794, + "layer_3_v_norm": 0.030126843601465225, + "layer_3_cos_v_neg_g": 0.002806904027238488, + "layer_4_v_norm": 0.031216204166412354, + "layer_4_cos_v_neg_g": 0.003041531192138791, + "layer_5_v_norm": 0.03188507258892059, + "layer_5_cos_v_neg_g": 0.003341138595715165, + "layer_6_v_norm": 0.03217049315571785, + "layer_6_cos_v_neg_g": 0.001994987018406391, + "layer_7_v_norm": 0.03222833573818207, + "layer_7_cos_v_neg_g": 0.001787626533769071, + "layer_8_v_norm": 0.03199062496423721, + "layer_8_cos_v_neg_g": -0.00084727379726246, + "layer_9_v_norm": 0.031911756843328476, + "layer_9_cos_v_neg_g": 4.6646364353364334e-05, + "layer_10_v_norm": 0.032011352479457855, + "layer_10_cos_v_neg_g": -0.0006002928712405264, + "layer_11_v_norm": 0.032053716480731964, + "layer_11_cos_v_neg_g": -0.0003294787893537432, + "layer_12_v_norm": 0.03197717294096947, + "layer_12_cos_v_neg_g": 0.00035463031963445246, + "layer_1_sharpness": 0.5738632678985596, + "layer_2_sharpness": 0.2573045492172241, + "layer_3_sharpness": 0.14745378494262695, + "layer_4_sharpness": 0.07457933574914932, + "layer_5_sharpness": 0.07070598751306534, + "layer_6_sharpness": 0.08329597115516663, + "layer_7_sharpness": 0.12156271934509277, + "layer_8_sharpness": 0.09859274327754974, + "layer_9_sharpness": 0.07851359248161316, + "layer_10_sharpness": 0.05540863424539566, + "layer_11_sharpness": 0.0397496372461319, + "layer_12_sharpness": 0.05455417186021805 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..2e3e77233b08769992e199ee59fb54124fde0eef --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2958507537841797, + "total_l1_linf_norm": 7166.27880859375, + "total_spectral_norm": 1.2958506345748901, + "layer_1_update_fnorm": 0.03226179629564285, + "layer_1_max_l1_linf_norm": 0.33447226881980896, + "layer_1_max_spectral_norm": 0.0073249503038823605, + "layer_2_update_fnorm": 0.030633535236120224, + "layer_2_max_l1_linf_norm": 0.3163105547428131, + "layer_2_max_spectral_norm": 0.007154745515435934, + "layer_3_update_fnorm": 0.030201788991689682, + "layer_3_max_l1_linf_norm": 0.3167698085308075, + "layer_3_max_spectral_norm": 0.007093660067766905, + "layer_4_update_fnorm": 0.03125875070691109, + "layer_4_max_l1_linf_norm": 0.326046347618103, + "layer_4_max_spectral_norm": 0.0072947051376104355, + "layer_5_update_fnorm": 0.03168220818042755, + "layer_5_max_l1_linf_norm": 0.28621745109558105, + "layer_5_max_spectral_norm": 0.006480748299509287, + "layer_6_update_fnorm": 0.03159967437386513, + "layer_6_max_l1_linf_norm": 0.25106722116470337, + "layer_6_max_spectral_norm": 0.005674024112522602, + "layer_7_update_fnorm": 0.031749460846185684, + "layer_7_max_l1_linf_norm": 0.24201777577400208, + "layer_7_max_spectral_norm": 0.005477843806147575, + "layer_8_update_fnorm": 0.03172741085290909, + "layer_8_max_l1_linf_norm": 0.2453041970729828, + "layer_8_max_spectral_norm": 0.005545938387513161, + "layer_9_update_fnorm": 0.03172803297638893, + "layer_9_max_l1_linf_norm": 0.2650982737541199, + "layer_9_max_spectral_norm": 0.005966860335320234, + "layer_10_update_fnorm": 0.03177830949425697, + "layer_10_max_l1_linf_norm": 0.2810858488082886, + "layer_10_max_spectral_norm": 0.006326545029878616, + "layer_11_update_fnorm": 0.03200102597475052, + "layer_11_max_l1_linf_norm": 0.32053929567337036, + "layer_11_max_spectral_norm": 0.007219329942017794, + "layer_12_update_fnorm": 0.03209501877427101, + "layer_12_max_l1_linf_norm": 0.3406028151512146, + "layer_12_max_spectral_norm": 0.007502139545977116, + "total_sharpness": 0.00669412175193429, + "ip_v_neg_g": 0.005076222121715546, + "cos_v_neg_g": 0.0006915825069881976, + "v_norm": 1.2958507537841797, + "g_norm": 5.664239883422852, + "hv_norm": 1.7372894287109375, + "cos_v_hv": 0.004993170499801636, + "hg_norm": 918.3568115234375, + "cos_g_hg": 0.4670152962207794, + "v_parallel_norm": 2.4393977582803927e-05, + "v_perp_norm": 1.2958507537841797, + "layer_1_v_norm": 0.03226179629564285, + "layer_1_cos_v_neg_g": 0.011829730123281479, + "layer_2_v_norm": 0.030633535236120224, + "layer_2_cos_v_neg_g": 0.013015814125537872, + "layer_3_v_norm": 0.030201787129044533, + "layer_3_cos_v_neg_g": 0.013150767423212528, + "layer_4_v_norm": 0.03125875070691109, + "layer_4_cos_v_neg_g": 0.00870969332754612, + "layer_5_v_norm": 0.03168220818042755, + "layer_5_cos_v_neg_g": 0.007702280767261982, + "layer_6_v_norm": 0.03159967437386513, + "layer_6_cos_v_neg_g": 0.007083766162395477, + "layer_7_v_norm": 0.031749460846185684, + "layer_7_cos_v_neg_g": 0.006297025829553604, + "layer_8_v_norm": 0.03172741085290909, + "layer_8_cos_v_neg_g": 0.005531476344913244, + "layer_9_v_norm": 0.03172803297638893, + "layer_9_cos_v_neg_g": 0.004932621959596872, + "layer_10_v_norm": 0.03177830949425697, + "layer_10_cos_v_neg_g": 0.005206839181482792, + "layer_11_v_norm": 0.03200102597475052, + "layer_11_cos_v_neg_g": 0.005215159151703119, + "layer_12_v_norm": 0.03209501877427101, + "layer_12_cos_v_neg_g": 0.006100038532167673, + "layer_1_sharpness": 0.6087839603424072, + "layer_2_sharpness": 0.19841651618480682, + "layer_3_sharpness": 0.21373942494392395, + "layer_4_sharpness": 0.0940336138010025, + "layer_5_sharpness": 0.0835515633225441, + "layer_6_sharpness": 0.07778066396713257, + "layer_7_sharpness": 0.09701173007488251, + "layer_8_sharpness": 0.10056757181882858, + "layer_9_sharpness": 0.09144672006368637, + "layer_10_sharpness": 0.07496189326047897, + "layer_11_sharpness": 0.08049863576889038, + "layer_12_sharpness": 0.19684667885303497 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..a26a9813ab579073d573fddfd9d4497ed385b90e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3203805685043335, + "total_l1_linf_norm": 7303.455078125, + "total_spectral_norm": 1.3203803300857544, + "layer_1_update_fnorm": 0.031908076256513596, + "layer_1_max_l1_linf_norm": 0.2828688621520996, + "layer_1_max_spectral_norm": 0.006464095786213875, + "layer_2_update_fnorm": 0.030764197930693626, + "layer_2_max_l1_linf_norm": 0.30975908041000366, + "layer_2_max_spectral_norm": 0.007055120076984167, + "layer_3_update_fnorm": 0.030552847310900688, + "layer_3_max_l1_linf_norm": 0.3101462423801422, + "layer_3_max_spectral_norm": 0.006935528479516506, + "layer_4_update_fnorm": 0.03116718679666519, + "layer_4_max_l1_linf_norm": 0.29955950379371643, + "layer_4_max_spectral_norm": 0.006802237592637539, + "layer_5_update_fnorm": 0.03162005916237831, + "layer_5_max_l1_linf_norm": 0.3021238446235657, + "layer_5_max_spectral_norm": 0.006796340923756361, + "layer_6_update_fnorm": 0.03199222311377525, + "layer_6_max_l1_linf_norm": 0.29524654150009155, + "layer_6_max_spectral_norm": 0.00655430369079113, + "layer_7_update_fnorm": 0.032032087445259094, + "layer_7_max_l1_linf_norm": 0.2826528549194336, + "layer_7_max_spectral_norm": 0.0063586910255253315, + "layer_8_update_fnorm": 0.031934939324855804, + "layer_8_max_l1_linf_norm": 0.27391842007637024, + "layer_8_max_spectral_norm": 0.006137714721262455, + "layer_9_update_fnorm": 0.0319521427154541, + "layer_9_max_l1_linf_norm": 0.2848885953426361, + "layer_9_max_spectral_norm": 0.006420737598091364, + "layer_10_update_fnorm": 0.03199918568134308, + "layer_10_max_l1_linf_norm": 0.2979046702384949, + "layer_10_max_spectral_norm": 0.006720225792378187, + "layer_11_update_fnorm": 0.0320274718105793, + "layer_11_max_l1_linf_norm": 0.315233051776886, + "layer_11_max_spectral_norm": 0.007094106171280146, + "layer_12_update_fnorm": 0.03209942206740379, + "layer_12_max_l1_linf_norm": 0.32265526056289673, + "layer_12_max_spectral_norm": 0.007245729677379131, + "total_sharpness": 0.005219950806349516, + "ip_v_neg_g": 0.0034161785151809454, + "cos_v_neg_g": 0.0004685988533310592, + "v_norm": 1.3203805685043335, + "g_norm": 5.521285533905029, + "hv_norm": 1.411096215248108, + "cos_v_hv": 0.004884373862296343, + "hg_norm": 716.5479125976562, + "cos_g_hg": 0.4758009910583496, + "v_parallel_norm": 1.82179719558917e-05, + "v_perp_norm": 1.3203805685043335, + "layer_1_v_norm": 0.031908076256513596, + "layer_1_cos_v_neg_g": 0.005556792952120304, + "layer_2_v_norm": 0.030764197930693626, + "layer_2_cos_v_neg_g": 0.003993383143097162, + "layer_3_v_norm": 0.030552847310900688, + "layer_3_cos_v_neg_g": 0.0032395124435424805, + "layer_4_v_norm": 0.03116718679666519, + "layer_4_cos_v_neg_g": 0.003970706835389137, + "layer_5_v_norm": 0.03162005916237831, + "layer_5_cos_v_neg_g": 0.005369650200009346, + "layer_6_v_norm": 0.03199222311377525, + "layer_6_cos_v_neg_g": 0.00599325355142355, + "layer_7_v_norm": 0.032032087445259094, + "layer_7_cos_v_neg_g": 0.006058682221919298, + "layer_8_v_norm": 0.031934939324855804, + "layer_8_cos_v_neg_g": 0.006343727000057697, + "layer_9_v_norm": 0.0319521427154541, + "layer_9_cos_v_neg_g": 0.006335978861898184, + "layer_10_v_norm": 0.03199918568134308, + "layer_10_cos_v_neg_g": 0.0056200092658400536, + "layer_11_v_norm": 0.0320274718105793, + "layer_11_cos_v_neg_g": 0.004692356567829847, + "layer_12_v_norm": 0.03209942206740379, + "layer_12_cos_v_neg_g": 0.004089595749974251, + "layer_1_sharpness": 0.10446345806121826, + "layer_2_sharpness": 0.08411020785570145, + "layer_3_sharpness": 0.12199588865041733, + "layer_4_sharpness": 0.06575708836317062, + "layer_5_sharpness": 0.07418949156999588, + "layer_6_sharpness": 0.09848053008317947, + "layer_7_sharpness": 0.1357353776693344, + "layer_8_sharpness": 0.11353603005409241, + "layer_9_sharpness": 0.09935684502124786, + "layer_10_sharpness": 0.07478383183479309, + "layer_11_sharpness": 0.06430783122777939, + "layer_12_sharpness": 0.10982950031757355 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..9116dbd08691e69f57513be2c36ae30279067ecc --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3261388540267944, + "total_l1_linf_norm": 7325.33251953125, + "total_spectral_norm": 1.3261388540267944, + "layer_1_update_fnorm": 0.03187530115246773, + "layer_1_max_l1_linf_norm": 0.29837989807128906, + "layer_1_max_spectral_norm": 0.006695646792650223, + "layer_2_update_fnorm": 0.030819926410913467, + "layer_2_max_l1_linf_norm": 0.3184865117073059, + "layer_2_max_spectral_norm": 0.007220964413136244, + "layer_3_update_fnorm": 0.030853843316435814, + "layer_3_max_l1_linf_norm": 0.35087400674819946, + "layer_3_max_spectral_norm": 0.007827941328287125, + "layer_4_update_fnorm": 0.031417928636074066, + "layer_4_max_l1_linf_norm": 0.32891327142715454, + "layer_4_max_spectral_norm": 0.007389882579445839, + "layer_5_update_fnorm": 0.031940486282110214, + "layer_5_max_l1_linf_norm": 0.320659875869751, + "layer_5_max_spectral_norm": 0.00718405656516552, + "layer_6_update_fnorm": 0.032097332179546356, + "layer_6_max_l1_linf_norm": 0.2902339696884155, + "layer_6_max_spectral_norm": 0.006509676109999418, + "layer_7_update_fnorm": 0.03216260299086571, + "layer_7_max_l1_linf_norm": 0.292232871055603, + "layer_7_max_spectral_norm": 0.006564976181834936, + "layer_8_update_fnorm": 0.032184574753046036, + "layer_8_max_l1_linf_norm": 0.30168062448501587, + "layer_8_max_spectral_norm": 0.006767789833247662, + "layer_9_update_fnorm": 0.03228526934981346, + "layer_9_max_l1_linf_norm": 0.3145710825920105, + "layer_9_max_spectral_norm": 0.007049139589071274, + "layer_10_update_fnorm": 0.032314326614141464, + "layer_10_max_l1_linf_norm": 0.3327866196632385, + "layer_10_max_spectral_norm": 0.007409657351672649, + "layer_11_update_fnorm": 0.03236936032772064, + "layer_11_max_l1_linf_norm": 0.3457062244415283, + "layer_11_max_spectral_norm": 0.0077680121175944805, + "layer_12_update_fnorm": 0.03266726806759834, + "layer_12_max_l1_linf_norm": 0.37830984592437744, + "layer_12_max_spectral_norm": 0.008448715321719646, + "total_sharpness": 0.005456616636365652, + "ip_v_neg_g": 0.004435503855347633, + "cos_v_neg_g": 0.0005931712803430855, + "v_norm": 1.3261388540267944, + "g_norm": 5.6386332511901855, + "hv_norm": 1.36168372631073, + "cos_v_hv": 0.005314178764820099, + "hg_norm": 757.9012451171875, + "cos_g_hg": 0.47097039222717285, + "v_parallel_norm": 1.7484753698226996e-05, + "v_perp_norm": 1.3261388540267944, + "layer_1_v_norm": 0.03187530115246773, + "layer_1_cos_v_neg_g": 0.007776085287332535, + "layer_2_v_norm": 0.030819926410913467, + "layer_2_cos_v_neg_g": 0.008488371036946774, + "layer_3_v_norm": 0.030853843316435814, + "layer_3_cos_v_neg_g": 0.009286166168749332, + "layer_4_v_norm": 0.031417928636074066, + "layer_4_cos_v_neg_g": 0.007144410628825426, + "layer_5_v_norm": 0.031940486282110214, + "layer_5_cos_v_neg_g": 0.008181373588740826, + "layer_6_v_norm": 0.032097332179546356, + "layer_6_cos_v_neg_g": 0.00659154262393713, + "layer_7_v_norm": 0.03216260299086571, + "layer_7_cos_v_neg_g": 0.00588897243142128, + "layer_8_v_norm": 0.032184574753046036, + "layer_8_cos_v_neg_g": 0.00598450843244791, + "layer_9_v_norm": 0.03228526934981346, + "layer_9_cos_v_neg_g": 0.006385354325175285, + "layer_10_v_norm": 0.032314326614141464, + "layer_10_cos_v_neg_g": 0.005497562699019909, + "layer_11_v_norm": 0.03236936032772064, + "layer_11_cos_v_neg_g": 0.005017157644033432, + "layer_12_v_norm": 0.03266726806759834, + "layer_12_cos_v_neg_g": 0.005423249211162329, + "layer_1_sharpness": 0.19554781913757324, + "layer_2_sharpness": 0.10292839258909225, + "layer_3_sharpness": 0.16319982707500458, + "layer_4_sharpness": 0.07494111359119415, + "layer_5_sharpness": 0.07788334786891937, + "layer_6_sharpness": 0.08681853860616684, + "layer_7_sharpness": 0.10619786381721497, + "layer_8_sharpness": 0.1111760064959526, + "layer_9_sharpness": 0.1014963760972023, + "layer_10_sharpness": 0.0748339593410492, + "layer_11_sharpness": 0.05987529456615448, + "layer_12_sharpness": 0.10403762757778168 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..ceb0fb52b842fee1492ef9be86ba07645f8e77f1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3597354888916016, + "total_l1_linf_norm": 7499.0849609375, + "total_spectral_norm": 1.359735369682312, + "layer_1_update_fnorm": 0.032283637672662735, + "layer_1_max_l1_linf_norm": 0.30894309282302856, + "layer_1_max_spectral_norm": 0.006961227860301733, + "layer_2_update_fnorm": 0.03116331435739994, + "layer_2_max_l1_linf_norm": 0.35043567419052124, + "layer_2_max_spectral_norm": 0.007787787821143866, + "layer_3_update_fnorm": 0.03088752180337906, + "layer_3_max_l1_linf_norm": 0.35326653718948364, + "layer_3_max_spectral_norm": 0.00792133342474699, + "layer_4_update_fnorm": 0.03165978193283081, + "layer_4_max_l1_linf_norm": 0.34404170513153076, + "layer_4_max_spectral_norm": 0.007743852213025093, + "layer_5_update_fnorm": 0.03223529830574989, + "layer_5_max_l1_linf_norm": 0.33084946870803833, + "layer_5_max_spectral_norm": 0.007454987149685621, + "layer_6_update_fnorm": 0.03249477595090866, + "layer_6_max_l1_linf_norm": 0.33295583724975586, + "layer_6_max_spectral_norm": 0.00746857188642025, + "layer_7_update_fnorm": 0.03233249485492706, + "layer_7_max_l1_linf_norm": 0.3240411877632141, + "layer_7_max_spectral_norm": 0.007117407862097025, + "layer_8_update_fnorm": 0.03224983811378479, + "layer_8_max_l1_linf_norm": 0.31052809953689575, + "layer_8_max_spectral_norm": 0.0069197118282318115, + "layer_9_update_fnorm": 0.032197777181863785, + "layer_9_max_l1_linf_norm": 0.3063289523124695, + "layer_9_max_spectral_norm": 0.006938084028661251, + "layer_10_update_fnorm": 0.032168198376894, + "layer_10_max_l1_linf_norm": 0.31881049275398254, + "layer_10_max_spectral_norm": 0.007188773714005947, + "layer_11_update_fnorm": 0.03224710747599602, + "layer_11_max_l1_linf_norm": 0.33020731806755066, + "layer_11_max_spectral_norm": 0.007510328199714422, + "layer_12_update_fnorm": 0.032178204506635666, + "layer_12_max_l1_linf_norm": 0.3323157727718353, + "layer_12_max_spectral_norm": 0.007536423392593861, + "total_sharpness": 0.005589582491666079, + "ip_v_neg_g": 0.00606638565659523, + "cos_v_neg_g": 0.0007372485706582665, + "v_norm": 1.3597354888916016, + "g_norm": 6.051480293273926, + "hv_norm": 1.6813287734985352, + "cos_v_hv": 0.004520445130765438, + "hg_norm": 1900.02392578125, + "cos_g_hg": 0.44552797079086304, + "v_parallel_norm": 2.3232529201777652e-05, + "v_perp_norm": 1.3597354888916016, + "layer_1_v_norm": 0.032283637672662735, + "layer_1_cos_v_neg_g": 0.00737233879044652, + "layer_2_v_norm": 0.03116331435739994, + "layer_2_cos_v_neg_g": 0.009766452945768833, + "layer_3_v_norm": 0.03088752180337906, + "layer_3_cos_v_neg_g": 0.011829342693090439, + "layer_4_v_norm": 0.03165978193283081, + "layer_4_cos_v_neg_g": 0.010791348293423653, + "layer_5_v_norm": 0.03223529830574989, + "layer_5_cos_v_neg_g": 0.010436533018946648, + "layer_6_v_norm": 0.03249477595090866, + "layer_6_cos_v_neg_g": 0.008446432650089264, + "layer_7_v_norm": 0.03233249485492706, + "layer_7_cos_v_neg_g": 0.008889421820640564, + "layer_8_v_norm": 0.03224983811378479, + "layer_8_cos_v_neg_g": 0.009544864296913147, + "layer_9_v_norm": 0.032197777181863785, + "layer_9_cos_v_neg_g": 0.008808666840195656, + "layer_10_v_norm": 0.032168198376894, + "layer_10_cos_v_neg_g": 0.007522475440055132, + "layer_11_v_norm": 0.03224710747599602, + "layer_11_cos_v_neg_g": 0.006212552543729544, + "layer_12_v_norm": 0.032178204506635666, + "layer_12_cos_v_neg_g": 0.005252732429653406, + "layer_1_sharpness": 0.1865810751914978, + "layer_2_sharpness": 0.07500918954610825, + "layer_3_sharpness": 0.15534529089927673, + "layer_4_sharpness": 0.08997531980276108, + "layer_5_sharpness": 0.09050479531288147, + "layer_6_sharpness": 0.11506108939647675, + "layer_7_sharpness": 0.17104440927505493, + "layer_8_sharpness": 0.1670108139514923, + "layer_9_sharpness": 0.10599181056022644, + "layer_10_sharpness": 0.07225503027439117, + "layer_11_sharpness": 0.056074656546115875, + "layer_12_sharpness": 0.10279343277215958 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..01b6d624350350a40d9a8428a4e41e1862a01e1e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3484687805175781, + "total_l1_linf_norm": 7437.2080078125, + "total_spectral_norm": 1.3484686613082886, + "layer_1_update_fnorm": 0.0318412184715271, + "layer_1_max_l1_linf_norm": 0.29662781953811646, + "layer_1_max_spectral_norm": 0.006706243846565485, + "layer_2_update_fnorm": 0.031013058498501778, + "layer_2_max_l1_linf_norm": 0.31841814517974854, + "layer_2_max_spectral_norm": 0.0072043295949697495, + "layer_3_update_fnorm": 0.030979882925748825, + "layer_3_max_l1_linf_norm": 0.33004388213157654, + "layer_3_max_spectral_norm": 0.0074258954264223576, + "layer_4_update_fnorm": 0.031438104808330536, + "layer_4_max_l1_linf_norm": 0.32475215196609497, + "layer_4_max_spectral_norm": 0.007333115208894014, + "layer_5_update_fnorm": 0.032130103558301926, + "layer_5_max_l1_linf_norm": 0.32560646533966064, + "layer_5_max_spectral_norm": 0.007317130919545889, + "layer_6_update_fnorm": 0.0322086438536644, + "layer_6_max_l1_linf_norm": 0.3103569447994232, + "layer_6_max_spectral_norm": 0.0069671859964728355, + "layer_7_update_fnorm": 0.03223052993416786, + "layer_7_max_l1_linf_norm": 0.2918122410774231, + "layer_7_max_spectral_norm": 0.006571553647518158, + "layer_8_update_fnorm": 0.03216136619448662, + "layer_8_max_l1_linf_norm": 0.3042507767677307, + "layer_8_max_spectral_norm": 0.006846852600574493, + "layer_9_update_fnorm": 0.03228376433253288, + "layer_9_max_l1_linf_norm": 0.317777544260025, + "layer_9_max_spectral_norm": 0.00718745356425643, + "layer_10_update_fnorm": 0.03233669698238373, + "layer_10_max_l1_linf_norm": 0.32520371675491333, + "layer_10_max_spectral_norm": 0.0073796967044472694, + "layer_11_update_fnorm": 0.03251294046640396, + "layer_11_max_l1_linf_norm": 0.3557322025299072, + "layer_11_max_spectral_norm": 0.007927636615931988, + "layer_12_update_fnorm": 0.032465774565935135, + "layer_12_max_l1_linf_norm": 0.35022449493408203, + "layer_12_max_spectral_norm": 0.007856774143874645, + "total_sharpness": 0.0042290096171200275, + "ip_v_neg_g": 0.0039625647477805614, + "cos_v_neg_g": 0.0005419973167590797, + "v_norm": 1.3484687805175781, + "g_norm": 5.4217352867126465, + "hv_norm": 1.2015444040298462, + "cos_v_hv": 0.004746131133288145, + "hg_norm": 779.6546020507812, + "cos_g_hg": 0.48523586988449097, + "v_parallel_norm": 1.7662860045675188e-05, + "v_perp_norm": 1.3484687805175781, + "layer_1_v_norm": 0.0318412184715271, + "layer_1_cos_v_neg_g": 0.0045952876098454, + "layer_2_v_norm": 0.031013058498501778, + "layer_2_cos_v_neg_g": 0.004482142627239227, + "layer_3_v_norm": 0.030979882925748825, + "layer_3_cos_v_neg_g": 0.00461479090154171, + "layer_4_v_norm": 0.031438104808330536, + "layer_4_cos_v_neg_g": 0.004089283756911755, + "layer_5_v_norm": 0.032130103558301926, + "layer_5_cos_v_neg_g": 0.004789920523762703, + "layer_6_v_norm": 0.0322086475789547, + "layer_6_cos_v_neg_g": 0.005932888947427273, + "layer_7_v_norm": 0.03223052993416786, + "layer_7_cos_v_neg_g": 0.007367013953626156, + "layer_8_v_norm": 0.03216136619448662, + "layer_8_cos_v_neg_g": 0.007999429479241371, + "layer_9_v_norm": 0.03228376433253288, + "layer_9_cos_v_neg_g": 0.007371585816144943, + "layer_10_v_norm": 0.03233669698238373, + "layer_10_cos_v_neg_g": 0.005904595833271742, + "layer_11_v_norm": 0.03251294046640396, + "layer_11_cos_v_neg_g": 0.00526147847995162, + "layer_12_v_norm": 0.032465774565935135, + "layer_12_cos_v_neg_g": 0.004580747336149216, + "layer_1_sharpness": 0.07487425953149796, + "layer_2_sharpness": 0.04934367537498474, + "layer_3_sharpness": 0.07908590883016586, + "layer_4_sharpness": 0.0545966699719429, + "layer_5_sharpness": 0.0656876415014267, + "layer_6_sharpness": 0.07899202406406403, + "layer_7_sharpness": 0.10769510269165039, + "layer_8_sharpness": 0.11710026115179062, + "layer_9_sharpness": 0.10538604855537415, + "layer_10_sharpness": 0.08065818250179291, + "layer_11_sharpness": 0.0630689188838005, + "layer_12_sharpness": 0.07812057435512543 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..14c11cca32d11a7f6efd083437d6e56f0a01bf71 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3749970197677612, + "total_l1_linf_norm": 7575.513671875, + "total_spectral_norm": 1.3749970197677612, + "layer_1_update_fnorm": 0.0319686233997345, + "layer_1_max_l1_linf_norm": 0.3160783052444458, + "layer_1_max_spectral_norm": 0.007115854416042566, + "layer_2_update_fnorm": 0.03106447122991085, + "layer_2_max_l1_linf_norm": 0.35146987438201904, + "layer_2_max_spectral_norm": 0.007874013856053352, + "layer_3_update_fnorm": 0.031004969030618668, + "layer_3_max_l1_linf_norm": 0.34948819875717163, + "layer_3_max_spectral_norm": 0.007837352342903614, + "layer_4_update_fnorm": 0.031806960701942444, + "layer_4_max_l1_linf_norm": 0.3475947976112366, + "layer_4_max_spectral_norm": 0.007833930663764477, + "layer_5_update_fnorm": 0.03205224499106407, + "layer_5_max_l1_linf_norm": 0.3257816731929779, + "layer_5_max_spectral_norm": 0.007361320778727531, + "layer_6_update_fnorm": 0.032220978289842606, + "layer_6_max_l1_linf_norm": 0.313170850276947, + "layer_6_max_spectral_norm": 0.0070586721412837505, + "layer_7_update_fnorm": 0.03231155499815941, + "layer_7_max_l1_linf_norm": 0.29774120450019836, + "layer_7_max_spectral_norm": 0.006640369538217783, + "layer_8_update_fnorm": 0.03221604973077774, + "layer_8_max_l1_linf_norm": 0.29827165603637695, + "layer_8_max_spectral_norm": 0.006693960167467594, + "layer_9_update_fnorm": 0.0321325920522213, + "layer_9_max_l1_linf_norm": 0.31510141491889954, + "layer_9_max_spectral_norm": 0.007044073194265366, + "layer_10_update_fnorm": 0.032247088849544525, + "layer_10_max_l1_linf_norm": 0.329075425863266, + "layer_10_max_spectral_norm": 0.007336932700127363, + "layer_11_update_fnorm": 0.03235691785812378, + "layer_11_max_l1_linf_norm": 0.3450992703437805, + "layer_11_max_spectral_norm": 0.007834495045244694, + "layer_12_update_fnorm": 0.03216085210442543, + "layer_12_max_l1_linf_norm": 0.33193910121917725, + "layer_12_max_spectral_norm": 0.00757605629041791, + "total_sharpness": 0.004589370917528868, + "ip_v_neg_g": 0.005043173208832741, + "cos_v_neg_g": 0.0006791871855966747, + "v_norm": 1.3749970197677612, + "g_norm": 5.400234699249268, + "hv_norm": 1.205273985862732, + "cos_v_hv": 0.005235631950199604, + "hg_norm": 909.6495971679688, + "cos_g_hg": 0.42320093512535095, + "v_parallel_norm": 2.0167690308880992e-05, + "v_perp_norm": 1.3749970197677612, + "layer_1_v_norm": 0.0319686233997345, + "layer_1_cos_v_neg_g": 0.009118380956351757, + "layer_2_v_norm": 0.03106447122991085, + "layer_2_cos_v_neg_g": 0.007005100604146719, + "layer_3_v_norm": 0.03100496716797352, + "layer_3_cos_v_neg_g": 0.006126410327851772, + "layer_4_v_norm": 0.031806960701942444, + "layer_4_cos_v_neg_g": 0.006232158746570349, + "layer_5_v_norm": 0.03205224499106407, + "layer_5_cos_v_neg_g": 0.010137204080820084, + "layer_6_v_norm": 0.032220978289842606, + "layer_6_cos_v_neg_g": 0.008211291395127773, + "layer_7_v_norm": 0.03231155499815941, + "layer_7_cos_v_neg_g": 0.008966444991528988, + "layer_8_v_norm": 0.03221604973077774, + "layer_8_cos_v_neg_g": 0.009117200039327145, + "layer_9_v_norm": 0.0321325920522213, + "layer_9_cos_v_neg_g": 0.008550960570573807, + "layer_10_v_norm": 0.032247088849544525, + "layer_10_cos_v_neg_g": 0.007227173540741205, + "layer_11_v_norm": 0.03235691785812378, + "layer_11_cos_v_neg_g": 0.005380959250032902, + "layer_12_v_norm": 0.03216085210442543, + "layer_12_cos_v_neg_g": 0.0052596344612538815, + "layer_1_sharpness": 0.16828395426273346, + "layer_2_sharpness": 0.06605949252843857, + "layer_3_sharpness": 0.10142284631729126, + "layer_4_sharpness": 0.06320121884346008, + "layer_5_sharpness": 0.08374049514532089, + "layer_6_sharpness": 0.09842861443758011, + "layer_7_sharpness": 0.13323991000652313, + "layer_8_sharpness": 0.1287829577922821, + "layer_9_sharpness": 0.10014689713716507, + "layer_10_sharpness": 0.06698910892009735, + "layer_11_sharpness": 0.0544852614402771, + "layer_12_sharpness": 0.08448927849531174 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..6c0bab518cb5b2738ad77ebcd434650676661e7e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.366684079170227, + "total_l1_linf_norm": 7540.3994140625, + "total_spectral_norm": 1.3666843175888062, + "layer_1_update_fnorm": 0.03174702078104019, + "layer_1_max_l1_linf_norm": 0.2893277108669281, + "layer_1_max_spectral_norm": 0.006615078076720238, + "layer_2_update_fnorm": 0.030971432104706764, + "layer_2_max_l1_linf_norm": 0.3196715712547302, + "layer_2_max_spectral_norm": 0.007188989780843258, + "layer_3_update_fnorm": 0.03108866885304451, + "layer_3_max_l1_linf_norm": 0.3274044096469879, + "layer_3_max_spectral_norm": 0.007318228483200073, + "layer_4_update_fnorm": 0.031503476202487946, + "layer_4_max_l1_linf_norm": 0.3338027596473694, + "layer_4_max_spectral_norm": 0.007490584626793861, + "layer_5_update_fnorm": 0.032206278294324875, + "layer_5_max_l1_linf_norm": 0.32880309224128723, + "layer_5_max_spectral_norm": 0.007436299230903387, + "layer_6_update_fnorm": 0.03228973597288132, + "layer_6_max_l1_linf_norm": 0.31450527906417847, + "layer_6_max_spectral_norm": 0.007133157923817635, + "layer_7_update_fnorm": 0.03226558864116669, + "layer_7_max_l1_linf_norm": 0.30273377895355225, + "layer_7_max_spectral_norm": 0.006848873104900122, + "layer_8_update_fnorm": 0.032096974551677704, + "layer_8_max_l1_linf_norm": 0.3046645522117615, + "layer_8_max_spectral_norm": 0.0068410844542086124, + "layer_9_update_fnorm": 0.03216463699936867, + "layer_9_max_l1_linf_norm": 0.31049787998199463, + "layer_9_max_spectral_norm": 0.006921937223523855, + "layer_10_update_fnorm": 0.03217124193906784, + "layer_10_max_l1_linf_norm": 0.328074187040329, + "layer_10_max_spectral_norm": 0.007337662857025862, + "layer_11_update_fnorm": 0.03244709223508835, + "layer_11_max_l1_linf_norm": 0.35803571343421936, + "layer_11_max_spectral_norm": 0.007996095344424248, + "layer_12_update_fnorm": 0.03227302432060242, + "layer_12_max_l1_linf_norm": 0.34576597809791565, + "layer_12_max_spectral_norm": 0.007726185955107212, + "total_sharpness": 0.004157327115535736, + "ip_v_neg_g": 0.005236693192273378, + "cos_v_neg_g": 0.0006151679554022849, + "v_norm": 1.366684079170227, + "g_norm": 6.2286696434021, + "hv_norm": 1.2622783184051514, + "cos_v_hv": 0.004501188639551401, + "hg_norm": 5961.3193359375, + "cos_g_hg": 0.49993282556533813, + "v_parallel_norm": 2.1148964151507244e-05, + "v_perp_norm": 1.366684079170227, + "layer_1_v_norm": 0.03174702078104019, + "layer_1_cos_v_neg_g": 0.0032041824888437986, + "layer_2_v_norm": 0.030971432104706764, + "layer_2_cos_v_neg_g": 0.004122733138501644, + "layer_3_v_norm": 0.03108866699039936, + "layer_3_cos_v_neg_g": 0.0078048668801784515, + "layer_4_v_norm": 0.031503476202487946, + "layer_4_cos_v_neg_g": 0.008523700758814812, + "layer_5_v_norm": 0.032206278294324875, + "layer_5_cos_v_neg_g": 0.00718374690040946, + "layer_6_v_norm": 0.03228973597288132, + "layer_6_cos_v_neg_g": 0.008046547882258892, + "layer_7_v_norm": 0.03226558864116669, + "layer_7_cos_v_neg_g": 0.009061476215720177, + "layer_8_v_norm": 0.032096974551677704, + "layer_8_cos_v_neg_g": 0.009904209524393082, + "layer_9_v_norm": 0.03216463699936867, + "layer_9_cos_v_neg_g": 0.009534482844173908, + "layer_10_v_norm": 0.03217124193906784, + "layer_10_cos_v_neg_g": 0.007096940651535988, + "layer_11_v_norm": 0.03244708850979805, + "layer_11_cos_v_neg_g": 0.0060593197122216225, + "layer_12_v_norm": 0.03227302432060242, + "layer_12_cos_v_neg_g": 0.0018476782133802772, + "layer_1_sharpness": 0.05967143177986145, + "layer_2_sharpness": 0.044497277587652206, + "layer_3_sharpness": 0.06756199151277542, + "layer_4_sharpness": 0.0522412471473217, + "layer_5_sharpness": 0.06211297959089279, + "layer_6_sharpness": 0.08037236332893372, + "layer_7_sharpness": 0.11043667793273926, + "layer_8_sharpness": 0.10820307582616806, + "layer_9_sharpness": 0.09467986971139908, + "layer_10_sharpness": 0.06952881813049316, + "layer_11_sharpness": 0.06909587979316711, + "layer_12_sharpness": 0.13738220930099487 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..cee926b8fd0a2dad64c88235037dfff1f735c5d4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.319138765335083, + "total_l1_linf_norm": 7276.298828125, + "total_spectral_norm": 1.3191386461257935, + "layer_1_update_fnorm": 0.0322347953915596, + "layer_1_max_l1_linf_norm": 0.32879430055618286, + "layer_1_max_spectral_norm": 0.0073655410669744015, + "layer_2_update_fnorm": 0.031047388911247253, + "layer_2_max_l1_linf_norm": 0.34248611330986023, + "layer_2_max_spectral_norm": 0.007683875039219856, + "layer_3_update_fnorm": 0.030998148024082184, + "layer_3_max_l1_linf_norm": 0.3341134786605835, + "layer_3_max_spectral_norm": 0.007517155725508928, + "layer_4_update_fnorm": 0.031509820371866226, + "layer_4_max_l1_linf_norm": 0.33155035972595215, + "layer_4_max_spectral_norm": 0.007437750231474638, + "layer_5_update_fnorm": 0.031956471502780914, + "layer_5_max_l1_linf_norm": 0.3158324956893921, + "layer_5_max_spectral_norm": 0.0071320259012281895, + "layer_6_update_fnorm": 0.032153163105249405, + "layer_6_max_l1_linf_norm": 0.29670143127441406, + "layer_6_max_spectral_norm": 0.006690302398055792, + "layer_7_update_fnorm": 0.032172176986932755, + "layer_7_max_l1_linf_norm": 0.3066243827342987, + "layer_7_max_spectral_norm": 0.006819047033786774, + "layer_8_update_fnorm": 0.03219606354832649, + "layer_8_max_l1_linf_norm": 0.3038346469402313, + "layer_8_max_spectral_norm": 0.006849884055554867, + "layer_9_update_fnorm": 0.03197639063000679, + "layer_9_max_l1_linf_norm": 0.29767906665802, + "layer_9_max_spectral_norm": 0.006695257965475321, + "layer_10_update_fnorm": 0.03213949874043465, + "layer_10_max_l1_linf_norm": 0.3125397562980652, + "layer_10_max_spectral_norm": 0.007007177919149399, + "layer_11_update_fnorm": 0.032182320952415466, + "layer_11_max_l1_linf_norm": 0.33596354722976685, + "layer_11_max_spectral_norm": 0.007537216413766146, + "layer_12_update_fnorm": 0.03225677087903023, + "layer_12_max_l1_linf_norm": 0.33018121123313904, + "layer_12_max_spectral_norm": 0.00749189592897892, + "total_sharpness": 0.0047675627283751965, + "ip_v_neg_g": 0.0031207907013595104, + "cos_v_neg_g": 0.00045972768566571176, + "v_norm": 1.319138765335083, + "g_norm": 5.1460442543029785, + "hv_norm": 1.1598918437957764, + "cos_v_hv": 0.005422123242169619, + "hg_norm": 652.0316772460938, + "cos_g_hg": 0.46930965781211853, + "v_parallel_norm": 1.2773986782121938e-05, + "v_perp_norm": 1.319138765335083, + "layer_1_v_norm": 0.0322347953915596, + "layer_1_cos_v_neg_g": 0.006642181891947985, + "layer_2_v_norm": 0.031047388911247253, + "layer_2_cos_v_neg_g": 0.007533764000982046, + "layer_3_v_norm": 0.030998148024082184, + "layer_3_cos_v_neg_g": 0.0061539276503026485, + "layer_4_v_norm": 0.031509820371866226, + "layer_4_cos_v_neg_g": 0.006744004786014557, + "layer_5_v_norm": 0.031956471502780914, + "layer_5_cos_v_neg_g": 0.005314326379448175, + "layer_6_v_norm": 0.032153163105249405, + "layer_6_cos_v_neg_g": 0.005759240128099918, + "layer_7_v_norm": 0.032172176986932755, + "layer_7_cos_v_neg_g": 0.006258346140384674, + "layer_8_v_norm": 0.03219606354832649, + "layer_8_cos_v_neg_g": 0.006660227198153734, + "layer_9_v_norm": 0.03197639063000679, + "layer_9_cos_v_neg_g": 0.005450243595987558, + "layer_10_v_norm": 0.03213949874043465, + "layer_10_cos_v_neg_g": 0.0045322333462536335, + "layer_11_v_norm": 0.032182324677705765, + "layer_11_cos_v_neg_g": 0.00391008285805583, + "layer_12_v_norm": 0.03225677087903023, + "layer_12_cos_v_neg_g": 0.0022432920522987843, + "layer_1_sharpness": 0.2476026713848114, + "layer_2_sharpness": 0.07144961506128311, + "layer_3_sharpness": 0.08485725522041321, + "layer_4_sharpness": 0.058519043028354645, + "layer_5_sharpness": 0.07272467017173767, + "layer_6_sharpness": 0.09036849439144135, + "layer_7_sharpness": 0.13401204347610474, + "layer_8_sharpness": 0.1254374384880066, + "layer_9_sharpness": 0.08628366887569427, + "layer_10_sharpness": 0.0660109743475914, + "layer_11_sharpness": 0.05392821133136749, + "layer_12_sharpness": 0.0714745968580246 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..02d251d00f665115e61854478b675474ca9b72e0 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019232 +step:1 train loss:11.024825 +step:2 train loss:11.018543 +step:3 train loss:11.016764 +step:4 train loss:11.011629 +step:5 train loss:11.006146 +step:6 train loss:10.996649 +step:7 train loss:10.989814 +step:8 train loss:10.979502 +step:9 train loss:10.968818 +step:10 train loss:10.954552 +step:11 train loss:10.945137 +step:12 train loss:10.925699 +step:13 train loss:10.912355 +step:14 train loss:10.891305 +step:15 train loss:10.874607 +step:16 train loss:10.854689 +step:17 train loss:10.836140 +step:18 train loss:10.812090 +step:19 train loss:10.789518 +step:20 train loss:10.761378 +step:21 train loss:10.739790 +step:22 train loss:10.707527 +step:23 train loss:10.684309 +step:24 train loss:10.647940 +step:25 train loss:10.625850 +step:26 train loss:10.586981 +step:27 train loss:10.554930 +step:28 train loss:10.525646 +step:29 train loss:10.490961 +step:30 train loss:10.453738 +step:31 train loss:10.413128 +step:32 train loss:10.371183 +step:33 train loss:10.335737 +step:34 train loss:10.300290 +step:35 train loss:10.250204 +step:36 train loss:10.210146 +step:37 train loss:10.156153 +step:38 train loss:10.120650 +step:39 train loss:10.068200 +step:40 train loss:10.025005 +step:41 train loss:9.969195 +step:42 train loss:9.934341 +step:43 train loss:9.863053 +step:44 train loss:9.825006 +step:45 train loss:9.763753 +step:46 train loss:9.725676 +step:47 train loss:9.665593 +step:48 train loss:9.611372 +step:49 train loss:9.546312 +step:50 train loss:9.487166 +step:51 train loss:9.421848 +step:52 train loss:9.381457 +step:53 train loss:9.317215 +step:54 train loss:9.263254 +step:55 train loss:9.192246 +step:56 train loss:9.134863 +step:57 train loss:9.085656 +step:58 train loss:9.007865 +step:59 train loss:8.954748 +step:60 train loss:8.893784 +step:61 train loss:8.838674 +step:62 train loss:8.777871 +step:63 train loss:8.752894 +step:64 train loss:8.645260 +step:65 train loss:8.592638 +step:66 train loss:8.549870 +step:67 train loss:8.495455 +step:68 train loss:8.437725 +step:69 train loss:8.370248 +step:70 train loss:8.315645 +step:71 train loss:8.239937 +step:72 train loss:8.218178 +step:73 train loss:8.149031 +step:74 train loss:8.122785 +step:75 train loss:8.043577 +step:76 train loss:8.057839 +step:77 train loss:7.968667 +step:78 train loss:7.837279 +step:79 train loss:7.864216 +step:80 train loss:7.829782 +step:81 train loss:7.833015 +step:82 train loss:7.798828 +step:83 train loss:7.742942 +step:84 train loss:7.706701 +step:85 train loss:7.669097 +step:86 train loss:7.641677 +step:87 train loss:7.612947 +step:88 train loss:7.609765 +step:89 train loss:7.572526 +step:90 train loss:7.605909 +step:91 train loss:7.607804 +step:92 train loss:7.601636 +step:93 train loss:7.557952 +step:94 train loss:7.542242 +step:95 train loss:7.504277 +step:96 train loss:7.581097 +step:97 train loss:7.538249 +step:98 train loss:7.532428 +step:99 train loss:7.502991 +step:100 train loss:7.572423 +step:101 train loss:7.458474 +step:102 train loss:7.450792 +step:103 train loss:7.437491 +step:104 train loss:7.475575 +step:105 train loss:7.525509 +step:106 train loss:7.465397 +step:107 train loss:7.430976 +step:108 train loss:7.435676 +step:109 train loss:7.464005 +step:110 train loss:7.394904 +step:111 train loss:7.399233 +step:112 train loss:7.386148 +step:113 train loss:7.346396 +step:114 train loss:7.402539 +step:115 train loss:7.342640 +step:116 train loss:7.318952 +step:117 train loss:7.255313 +step:118 train loss:7.314672 +step:119 train loss:7.254033 +step:120 train loss:7.259473 +step:121 train loss:7.184060 +step:122 train loss:7.253494 +step:123 train loss:7.178228 +step:124 train loss:7.160423 +step:125 train loss:7.128304 +step:126 train loss:7.202183 +step:127 train loss:7.120964 +step:128 train loss:7.142548 +step:129 train loss:7.113865 +step:130 train loss:7.158130 +step:131 train loss:7.087564 +step:132 train loss:6.996175 +step:133 train loss:7.059991 +step:134 train loss:7.012339 +step:135 train loss:6.926322 +step:136 train loss:6.950997 +step:137 train loss:6.946031 +step:138 train loss:6.872403 +step:139 train loss:6.946494 +step:140 train loss:6.850888 +step:141 train loss:6.940053 +step:142 train loss:6.882112 +step:143 train loss:6.886408 +step:144 train loss:6.847391 +step:145 train loss:6.780233 +step:146 train loss:6.781609 +step:147 train loss:6.833786 +step:148 train loss:6.830864 +step:149 train loss:6.769192 +step:150 train loss:6.776214 +step:151 train loss:6.682639 +step:152 train loss:6.710994 +step:153 train loss:6.679974 +step:154 train loss:6.758445 +step:155 train loss:6.723414 +step:156 train loss:6.754329 +step:157 train loss:6.644471 +step:158 train loss:6.620999 +step:159 train loss:6.657868 +step:160 train loss:6.628436 +step:161 train loss:6.614309 +step:162 train loss:6.584826 +step:163 train loss:6.594982 +step:164 train loss:6.605965 +step:165 train loss:6.603947 +step:166 train loss:6.553249 +step:167 train loss:6.546154 +step:168 train loss:6.515152 +step:169 train loss:6.467292 +step:170 train loss:6.437780 +step:171 train loss:6.540364 +step:172 train loss:6.467502 +step:173 train loss:6.501244 +step:174 train loss:6.506748 +step:175 train loss:6.466750 +step:176 train loss:6.411767 +step:177 train loss:6.457988 +step:178 train loss:6.462580 +step:179 train loss:6.411141 +step:180 train loss:6.396192 +step:181 train loss:6.428019 +step:182 train loss:6.354662 +step:183 train loss:6.448737 +step:184 train loss:6.414942 +step:185 train loss:6.323452 +step:186 train loss:6.477324 +step:187 train loss:6.405507 +step:188 train loss:6.225690 +step:189 train loss:6.381421 +step:190 train loss:6.375444 +step:191 train loss:6.292893 +step:192 train loss:6.202995 +step:193 train loss:6.348746 +step:194 train loss:6.373962 +step:195 train loss:6.361987 +step:196 train loss:6.334363 +step:197 train loss:6.330614 +step:198 train loss:6.268936 +step:199 train loss:6.349833 +step:200 train loss:6.380816 +step:201 train loss:6.314129 +step:202 train loss:6.316584 +step:203 train loss:6.271176 +step:204 train loss:6.311976 +step:205 train loss:6.164431 +step:206 train loss:6.301778 +step:207 train loss:6.266685 +step:208 train loss:6.210750 +step:209 train loss:6.210294 +step:210 train loss:6.207557 +step:211 train loss:6.282218 +step:212 train loss:6.227678 +step:213 train loss:6.243990 +step:214 train loss:6.226063 +step:215 train loss:6.248434 +step:216 train loss:6.190670 +step:217 train loss:6.190471 +step:218 train loss:6.170547 +step:219 train loss:6.145341 +step:220 train loss:6.195947 +step:221 train loss:6.146482 +step:222 train loss:6.190102 +step:223 train loss:6.200585 +step:224 train loss:6.191587 +step:225 train loss:6.127360 +step:226 train loss:6.132199 +step:227 train loss:6.195309 +step:228 train loss:6.161170 +step:229 train loss:6.235919 +step:230 train loss:6.100665 +step:231 train loss:6.154298 +step:232 train loss:6.139546 +step:233 train loss:6.109879 +step:234 train loss:6.108045 +step:235 train loss:6.191554 +step:236 train loss:6.135577 +step:237 train loss:6.164660 +step:238 train loss:6.171755 +step:239 train loss:6.077308 +step:240 train loss:6.149664 +step:241 train loss:6.182304 +step:242 train loss:6.166064 +step:243 train loss:6.072791 +step:244 train loss:6.100821 +step:245 train loss:6.086636 +step:246 train loss:6.082650 +step:247 train loss:6.075496 +step:248 train loss:6.032743 +step:249 train loss:6.091828 +step:250 validation loss:6.088235 +step:250 train loss:6.058946 +step:251 train loss:6.093602 +step:252 train loss:6.052905 +step:253 train loss:6.053368 +step:254 train loss:6.015593 +step:255 train loss:6.054482 +step:256 train loss:6.055677 +step:257 train loss:6.101927 +step:258 train loss:5.996537 +step:259 train loss:6.027132 +step:260 train loss:6.000834 +step:261 train loss:5.995948 +step:262 train loss:6.068019 +step:263 train loss:6.027428 +step:264 train loss:5.993546 +step:265 train loss:6.024035 +step:266 train loss:5.983169 +step:267 train loss:6.021036 +step:268 train loss:5.970877 +step:269 train loss:5.998745 +step:270 train loss:6.013801 +step:271 train loss:6.000692 +step:272 train loss:5.957935 +step:273 train loss:6.032454 +step:274 train loss:5.946216 +step:275 train loss:5.980637 +step:276 train loss:5.956357 +step:277 train loss:5.955256 +step:278 train loss:5.933020 +step:279 train loss:5.905970 +step:280 train loss:5.976957 +step:281 train loss:6.057741 +step:282 train loss:5.939772 +step:283 train loss:5.948396 +step:284 train loss:5.914024 +step:285 train loss:5.968412 +step:286 train loss:5.941583 +step:287 train loss:5.920478 +step:288 train loss:5.897456 +step:289 train loss:5.922833 +step:290 train loss:5.981678 +step:291 train loss:5.906975 +step:292 train loss:5.962146 +step:293 train loss:5.893461 +step:294 train loss:6.000999 +step:295 train loss:5.896771 +step:296 train loss:5.962233 +step:297 train loss:5.994699 +step:298 train loss:5.883116 +step:299 train loss:5.959428 +step:300 train loss:5.871815 +step:301 train loss:5.910555 +step:302 train loss:5.883364 +step:303 train loss:5.900536 +step:304 train loss:5.929961 +step:305 train loss:5.850327 +step:306 train loss:5.872916 +step:307 train loss:5.907018 +step:308 train loss:5.812101 +step:309 train loss:5.951102 +step:310 train loss:5.913857 +step:311 train loss:5.892416 +step:312 train loss:5.881079 +step:313 train loss:5.905303 +step:314 train loss:5.883139 +step:315 train loss:5.850729 +step:316 train loss:5.843839 +step:317 train loss:5.810144 +step:318 train loss:5.812665 +step:319 train loss:5.884325 +step:320 train loss:5.805430 +step:321 train loss:5.863100 +step:322 train loss:5.854166 +step:323 train loss:5.921692 +step:324 train loss:5.862061 +step:325 train loss:5.884702 +step:326 train loss:5.893946 +step:327 train loss:5.863221 +step:328 train loss:5.837075 +step:329 train loss:5.854777 +step:330 train loss:5.786895 +step:331 train loss:5.814630 +step:332 train loss:5.801944 +step:333 train loss:5.739059 +step:334 train loss:5.837100 +step:335 train loss:5.876147 +step:336 train loss:5.998078 +step:337 train loss:5.899911 +step:338 train loss:5.812420 +step:339 train loss:5.767663 +step:340 train loss:5.777988 +step:341 train loss:5.771931 +step:342 train loss:5.837484 +step:343 train loss:5.813375 +step:344 train loss:5.766350 +step:345 train loss:5.741786 +step:346 train loss:5.788015 +step:347 train loss:5.726256 +step:348 train loss:5.734557 +step:349 train loss:5.674123 +step:350 train loss:5.711923 +step:351 train loss:5.780391 +step:352 train loss:5.739395 +step:353 train loss:5.764072 +step:354 train loss:5.720684 +step:355 train loss:5.770078 +step:356 train loss:5.739264 +step:357 train loss:5.811208 +step:358 train loss:5.839777 +step:359 train loss:5.678526 +step:360 train loss:5.793370 +step:361 train loss:5.775926 +step:362 train loss:5.758904 +step:363 train loss:5.714270 +step:364 train loss:5.833786 +step:365 train loss:5.762820 +step:366 train loss:5.736438 +step:367 train loss:5.757701 +step:368 train loss:5.733377 +step:369 train loss:5.708445 +step:370 train loss:5.768132 +step:371 train loss:5.701471 +step:372 train loss:5.771449 +step:373 train loss:5.722309 +step:374 train loss:5.707806 +step:375 train loss:5.740387 +step:376 train loss:5.723239 +step:377 train loss:5.614337 +step:378 train loss:5.699893 +step:379 train loss:5.756359 +step:380 train loss:5.681769 +step:381 train loss:5.744726 +step:382 train loss:5.728183 +step:383 train loss:5.709455 +step:384 train loss:5.684861 +step:385 train loss:5.680345 +step:386 train loss:5.712884 +step:387 train loss:5.709213 +step:388 train loss:5.680869 +step:389 train loss:5.697258 +step:390 train loss:5.680611 +step:391 train loss:5.682647 +step:392 train loss:5.676168 +step:393 train loss:5.668509 +step:394 train loss:5.716265 +step:395 train loss:5.652151 +step:396 train loss:5.602420 +step:397 train loss:5.688679 +step:398 train loss:5.674205 +step:399 train loss:5.682143 +step:400 train loss:5.645227 +step:401 train loss:5.684782 +step:402 train loss:5.661582 +step:403 train loss:5.658851 +step:404 train loss:5.640436 +step:405 train loss:5.642966 +step:406 train loss:5.675797 +step:407 train loss:5.662288 +step:408 train loss:5.724688 +step:409 train loss:5.658146 +step:410 train loss:5.627518 +step:411 train loss:5.621667 +step:412 train loss:5.707539 +step:413 train loss:5.593047 +step:414 train loss:5.677133 +step:415 train loss:5.637039 +step:416 train loss:5.651855 +step:417 train loss:5.672925 +step:418 train loss:5.617121 +step:419 train loss:5.609737 +step:420 train loss:5.614498 +step:421 train loss:5.597876 +step:422 train loss:5.598217 +step:423 train loss:5.603327 +step:424 train loss:5.576071 +step:425 train loss:5.636347 +step:426 train loss:5.632251 +step:427 train loss:5.568370 +step:428 train loss:5.631018 +step:429 train loss:5.543085 +step:430 train loss:5.580627 +step:431 train loss:5.613297 +step:432 train loss:5.634187 +step:433 train loss:5.621994 +step:434 train loss:5.577873 +step:435 train loss:5.632451 +step:436 train loss:5.655277 +step:437 train loss:5.616379 +step:438 train loss:5.571058 +step:439 train loss:5.572816 +step:440 train loss:5.612773 +step:441 train loss:5.565683 +step:442 train loss:5.559600 +step:443 train loss:5.576349 +step:444 train loss:5.605354 +step:445 train loss:5.610791 +step:446 train loss:5.551951 +step:447 train loss:5.569211 +step:448 train loss:5.623027 +step:449 train loss:5.581697 +step:450 train loss:5.567130 +step:451 train loss:5.559046 +step:452 train loss:5.620650 +step:453 train loss:5.547507 +step:454 train loss:5.509979 +step:455 train loss:5.598897 +step:456 train loss:5.568450 +step:457 train loss:5.542089 +step:458 train loss:5.564532 +step:459 train loss:5.510345 +step:460 train loss:5.611956 +step:461 train loss:5.566876 +step:462 train loss:5.474958 +step:463 train loss:5.534950 +step:464 train loss:5.587283 +step:465 train loss:5.546238 +step:466 train loss:5.568057 +step:467 train loss:5.522832 +step:468 train loss:5.578807 +step:469 train loss:5.549773 +step:470 train loss:5.508662 +step:471 train loss:5.595138 +step:472 train loss:5.491778 +step:473 train loss:5.565990 +step:474 train loss:5.548342 +step:475 train loss:5.559409 +step:476 train loss:5.537655 +step:477 train loss:5.479773 +step:478 train loss:5.500024 +step:479 train loss:5.497449 +step:480 train loss:5.520535 +step:481 train loss:5.519566 +step:482 train loss:5.468838 +step:483 train loss:5.531477 +step:484 train loss:5.485972 +step:485 train loss:5.470396 +step:486 train loss:5.530114 +step:487 train loss:5.501548 +step:488 train loss:5.495358 +step:489 train loss:5.494007 +step:490 train loss:5.475191 +step:491 train loss:5.485887 +step:492 train loss:5.489033 +step:493 train loss:5.488382 +step:494 train loss:5.501585 +step:495 train loss:5.446997 +step:496 train loss:5.541506 +step:497 train loss:5.434038 +step:498 train loss:5.529777 +step:499 train loss:5.503922 +step:500 validation loss:5.481534 total_sharp:2.2081e-02 L1_sharp:2.0584e+00 L2_sharp:6.5421e-01 L3_sharp:4.7680e-01 L4_sharp:2.8076e-01 L5_sharp:2.0519e-01 L6_sharp:1.4379e-01 L7_sharp:1.0100e-01 L8_sharp:8.2004e-02 L9_sharp:5.2207e-02 L10_sharp:3.7204e-02 L11_sharp:2.7900e-02 L12_sharp:2.1898e-02 total_fnorm:9.0793e-01 total_l1_linf:5.0080e+03 total_spectral:9.0793e-01 L1_fnorm:2.2937e-02 L2_fnorm:2.2450e-02 L3_fnorm:2.1820e-02 L4_fnorm:2.2754e-02 L5_fnorm:2.3539e-02 L6_fnorm:2.3888e-02 L7_fnorm:2.4199e-02 L8_fnorm:2.3358e-02 L9_fnorm:2.3007e-02 L10_fnorm:2.2876e-02 L11_fnorm:2.2073e-02 L12_fnorm:2.2299e-02 L1_l1linf:1.8168e-01 L2_l1linf:1.8167e-01 L3_l1linf:1.8651e-01 L4_l1linf:1.8456e-01 L5_l1linf:2.0833e-01 L6_l1linf:2.2366e-01 L7_l1linf:2.5792e-01 L8_l1linf:1.9424e-01 L9_l1linf:1.8615e-01 L10_l1linf:1.7799e-01 L11_l1linf:1.3545e-01 L12_l1linf:1.3895e-01 L1_spectral:4.0468e-03 L2_spectral:4.9410e-03 L3_spectral:4.8334e-03 L4_spectral:7.5619e-03 L5_spectral:9.2774e-03 L6_spectral:9.7546e-03 L7_spectral:1.0675e-02 L8_spectral:8.7079e-03 L9_spectral:8.0621e-03 L10_spectral:7.6218e-03 L11_spectral:4.9344e-03 L12_spectral:5.3053e-03 ip_v_neg_g:7.5616e-03 cos_v_neg_g:1.0001e-03 v_norm:9.0793e-01 g_norm:8.3280e+00 hv_norm:5.4810e+00 cos_v_hv:3.6578e-03 hg_norm:1.1559e+04 cos_g_hg:6.7552e-01 v_par:5.0754e-05 v_perp:9.0793e-01 L1_cos_v_neg_g:1.7780e-02 L1_v_norm:2.2937e-02 L2_cos_v_neg_g:1.3727e-02 L2_v_norm:2.2450e-02 L3_cos_v_neg_g:1.3730e-02 L3_v_norm:2.1820e-02 L4_cos_v_neg_g:1.1265e-02 L4_v_norm:2.2754e-02 L5_cos_v_neg_g:1.0534e-02 L5_v_norm:2.3539e-02 L6_cos_v_neg_g:9.5115e-03 L6_v_norm:2.3888e-02 L7_cos_v_neg_g:9.1440e-03 L7_v_norm:2.4199e-02 L8_cos_v_neg_g:8.8633e-03 L8_v_norm:2.3358e-02 L9_cos_v_neg_g:8.5809e-03 L9_v_norm:2.3007e-02 L10_cos_v_neg_g:7.7248e-03 L10_v_norm:2.2876e-02 L11_cos_v_neg_g:6.6962e-03 L11_v_norm:2.2073e-02 L12_cos_v_neg_g:5.5577e-03 L12_v_norm:2.2299e-02 +step:500 train loss:5.510560 +step:501 train loss:5.459540 +step:502 train loss:5.496431 +step:503 train loss:5.425019 +step:504 train loss:5.517599 +step:505 train loss:5.451464 +step:506 train loss:5.452368 +step:507 train loss:5.461338 +step:508 train loss:5.488193 +step:509 train loss:5.491801 +step:510 train loss:5.426357 +step:511 train loss:5.418507 +step:512 train loss:5.417910 +step:513 train loss:5.448526 +step:514 train loss:5.495140 +step:515 train loss:5.448186 +step:516 train loss:5.521945 +step:517 train loss:5.452628 +step:518 train loss:5.438991 +step:519 train loss:5.491666 +step:520 train loss:5.442743 +step:521 train loss:5.430885 +step:522 train loss:5.457220 +step:523 train loss:5.449684 +step:524 train loss:5.405991 +step:525 train loss:5.410340 +step:526 train loss:5.422581 +step:527 train loss:5.422792 +step:528 train loss:5.425768 +step:529 train loss:5.447952 +step:530 train loss:5.396482 +step:531 train loss:5.437216 +step:532 train loss:5.407064 +step:533 train loss:5.367082 +step:534 train loss:5.441654 +step:535 train loss:5.423666 +step:536 train loss:5.490209 +step:537 train loss:5.381043 +step:538 train loss:5.350383 +step:539 train loss:5.435452 +step:540 train loss:5.477865 +step:541 train loss:5.379981 +step:542 train loss:5.405349 +step:543 train loss:5.425739 +step:544 train loss:5.421991 +step:545 train loss:5.400854 +step:546 train loss:5.366534 +step:547 train loss:5.385666 +step:548 train loss:5.345896 +step:549 train loss:5.399393 +step:550 train loss:5.376951 +step:551 train loss:5.382968 +step:552 train loss:5.472686 +step:553 train loss:5.441160 +step:554 train loss:5.383674 +step:555 train loss:5.446706 +step:556 train loss:5.396025 +step:557 train loss:5.368637 +step:558 train loss:5.347966 +step:559 train loss:5.396477 +step:560 train loss:5.443008 +step:561 train loss:5.329577 +step:562 train loss:5.312661 +step:563 train loss:5.390649 +step:564 train loss:5.360604 +step:565 train loss:5.377581 +step:566 train loss:5.375168 +step:567 train loss:5.370291 +step:568 train loss:5.395322 +step:569 train loss:5.379592 +step:570 train loss:5.318032 +step:571 train loss:5.343779 +step:572 train loss:5.342212 +step:573 train loss:5.338554 +step:574 train loss:5.376808 +step:575 train loss:5.339162 +step:576 train loss:5.348410 +step:577 train loss:5.368999 +step:578 train loss:5.347605 +step:579 train loss:5.391221 +step:580 train loss:5.329053 +step:581 train loss:5.382436 +step:582 train loss:5.347077 +step:583 train loss:5.359863 +step:584 train loss:5.343785 +step:585 train loss:5.334128 +step:586 train loss:5.324888 +step:587 train loss:5.397301 +step:588 train loss:5.320626 +step:589 train loss:5.373392 +step:590 train loss:5.378917 +step:591 train loss:5.319275 +step:592 train loss:5.297529 +step:593 train loss:5.329020 +step:594 train loss:5.299595 +step:595 train loss:5.340823 +step:596 train loss:5.314612 +step:597 train loss:5.348014 +step:598 train loss:5.318892 +step:599 train loss:5.319170 +step:600 train loss:5.299744 +step:601 train loss:5.273391 +step:602 train loss:5.282918 +step:603 train loss:5.337272 +step:604 train loss:5.312954 +step:605 train loss:5.343370 +step:606 train loss:5.295576 +step:607 train loss:5.284056 +step:608 train loss:5.280420 +step:609 train loss:5.256797 +step:610 train loss:5.275100 +step:611 train loss:5.283062 +step:612 train loss:5.323028 +step:613 train loss:5.244733 +step:614 train loss:5.288504 +step:615 train loss:5.340540 +step:616 train loss:5.264957 +step:617 train loss:5.297881 +step:618 train loss:5.268632 +step:619 train loss:5.301276 +step:620 train loss:5.320333 +step:621 train loss:5.256761 +step:622 train loss:5.312120 +step:623 train loss:5.323136 +step:624 train loss:5.305127 +step:625 train loss:5.304311 +step:626 train loss:5.308640 +step:627 train loss:5.275112 +step:628 train loss:5.283045 +step:629 train loss:5.229569 +step:630 train loss:5.254850 +step:631 train loss:5.249680 +step:632 train loss:5.264505 +step:633 train loss:5.287801 +step:634 train loss:5.283953 +step:635 train loss:5.222539 +step:636 train loss:5.307467 +step:637 train loss:5.226224 +step:638 train loss:5.159062 +step:639 train loss:5.286327 +step:640 train loss:5.232735 +step:641 train loss:5.261639 +step:642 train loss:5.299249 +step:643 train loss:5.209264 +step:644 train loss:5.290760 +step:645 train loss:5.249168 +step:646 train loss:5.242573 +step:647 train loss:5.255054 +step:648 train loss:5.350427 +step:649 train loss:5.247122 +step:650 train loss:5.307487 +step:651 train loss:5.194945 +step:652 train loss:5.224198 +step:653 train loss:5.222911 +step:654 train loss:5.215495 +step:655 train loss:5.257551 +step:656 train loss:5.204790 +step:657 train loss:5.257698 +step:658 train loss:5.187259 +step:659 train loss:5.267285 +step:660 train loss:5.232471 +step:661 train loss:5.269632 +step:662 train loss:5.265010 +step:663 train loss:5.262709 +step:664 train loss:5.176685 +step:665 train loss:5.191330 +step:666 train loss:5.192604 +step:667 train loss:5.250295 +step:668 train loss:5.220331 +step:669 train loss:5.207393 +step:670 train loss:5.223385 +step:671 train loss:5.197898 +step:672 train loss:5.169694 +step:673 train loss:5.255985 +step:674 train loss:5.259970 +step:675 train loss:5.165056 +step:676 train loss:5.246238 +step:677 train loss:5.185452 +step:678 train loss:5.176261 +step:679 train loss:5.220051 +step:680 train loss:5.174470 +step:681 train loss:5.229947 +step:682 train loss:5.143286 +step:683 train loss:5.198103 +step:684 train loss:5.237502 +step:685 train loss:5.170680 +step:686 train loss:5.282296 +step:687 train loss:5.208554 +step:688 train loss:5.144469 +step:689 train loss:5.192046 +step:690 train loss:5.165019 +step:691 train loss:5.173645 +step:692 train loss:5.190300 +step:693 train loss:5.185139 +step:694 train loss:5.170012 +step:695 train loss:5.132434 +step:696 train loss:5.104039 +step:697 train loss:5.211482 +step:698 train loss:5.150162 +step:699 train loss:5.145527 +step:700 train loss:5.226503 +step:701 train loss:5.134436 +step:702 train loss:5.201411 +step:703 train loss:5.137857 +step:704 train loss:5.098177 +step:705 train loss:5.139252 +step:706 train loss:5.046726 +step:707 train loss:5.104747 +step:708 train loss:5.187959 +step:709 train loss:5.154410 +step:710 train loss:5.114442 +step:711 train loss:5.186990 +step:712 train loss:5.141800 +step:713 train loss:5.099168 +step:714 train loss:5.180414 +step:715 train loss:5.095927 +step:716 train loss:5.225855 +step:717 train loss:5.110654 +step:718 train loss:5.179852 +step:719 train loss:5.132275 +step:720 train loss:5.115681 +step:721 train loss:5.123363 +step:722 train loss:5.141971 +step:723 train loss:5.183815 +step:724 train loss:5.153983 +step:725 train loss:5.133611 +step:726 train loss:5.113517 +step:727 train loss:5.143743 +step:728 train loss:5.133236 +step:729 train loss:5.058684 +step:730 train loss:5.150895 +step:731 train loss:5.173351 +step:732 train loss:5.157667 +step:733 train loss:5.134070 +step:734 train loss:5.127776 +step:735 train loss:5.196612 +step:736 train loss:5.145604 +step:737 train loss:5.140380 +step:738 train loss:5.165165 +step:739 train loss:5.114821 +step:740 train loss:5.127045 +step:741 train loss:5.191968 +step:742 train loss:5.102244 +step:743 train loss:5.086533 +step:744 train loss:5.144226 +step:745 train loss:5.087076 +step:746 train loss:5.087481 +step:747 train loss:5.112844 +step:748 train loss:5.074629 +step:749 train loss:5.108752 +step:750 validation loss:5.089469 +step:750 train loss:5.068946 +step:751 train loss:5.082068 +step:752 train loss:5.028869 +step:753 train loss:5.083443 +step:754 train loss:5.090387 +step:755 train loss:5.129831 +step:756 train loss:5.118845 +step:757 train loss:5.208712 +step:758 train loss:5.084850 +step:759 train loss:5.091868 +step:760 train loss:5.059824 +step:761 train loss:5.102453 +step:762 train loss:5.084322 +step:763 train loss:5.072580 +step:764 train loss:5.044895 +step:765 train loss:5.056866 +step:766 train loss:5.131955 +step:767 train loss:5.223364 +step:768 train loss:5.065209 +step:769 train loss:5.100319 +step:770 train loss:5.121165 +step:771 train loss:5.166642 +step:772 train loss:5.108108 +step:773 train loss:5.053020 +step:774 train loss:5.101998 +step:775 train loss:5.077078 +step:776 train loss:5.088145 +step:777 train loss:5.047235 +step:778 train loss:5.059261 +step:779 train loss:5.033829 +step:780 train loss:5.086360 +step:781 train loss:5.022627 +step:782 train loss:5.046865 +step:783 train loss:5.030171 +step:784 train loss:5.041487 +step:785 train loss:5.006410 +step:786 train loss:5.041936 +step:787 train loss:4.995811 +step:788 train loss:5.050397 +step:789 train loss:5.055717 +step:790 train loss:5.001814 +step:791 train loss:5.087186 +step:792 train loss:5.097886 +step:793 train loss:5.061028 +step:794 train loss:5.059190 +step:795 train loss:5.018139 +step:796 train loss:5.251553 +step:797 train loss:5.045892 +step:798 train loss:5.033646 +step:799 train loss:5.032257 +step:800 train loss:5.108270 +step:801 train loss:5.041071 +step:802 train loss:5.140721 +step:803 train loss:5.056408 +step:804 train loss:4.996510 +step:805 train loss:5.059089 +step:806 train loss:4.964815 +step:807 train loss:5.027896 +step:808 train loss:5.036946 +step:809 train loss:5.003860 +step:810 train loss:4.981039 +step:811 train loss:5.075779 +step:812 train loss:5.034626 +step:813 train loss:5.037294 +step:814 train loss:5.088296 +step:815 train loss:5.058705 +step:816 train loss:4.991365 +step:817 train loss:5.021045 +step:818 train loss:4.996406 +step:819 train loss:4.993132 +step:820 train loss:5.008905 +step:821 train loss:4.949033 +step:822 train loss:4.945072 +step:823 train loss:5.023267 +step:824 train loss:4.939052 +step:825 train loss:4.926262 +step:826 train loss:4.971968 +step:827 train loss:4.921776 +step:828 train loss:4.982990 +step:829 train loss:4.984973 +step:830 train loss:4.994984 +step:831 train loss:5.009702 +step:832 train loss:5.062599 +step:833 train loss:5.020923 +step:834 train loss:5.007649 +step:835 train loss:4.980923 +step:836 train loss:4.971324 +step:837 train loss:4.940104 +step:838 train loss:4.947682 +step:839 train loss:4.949974 +step:840 train loss:4.986841 +step:841 train loss:4.958886 +step:842 train loss:4.969600 +step:843 train loss:4.964571 +step:844 train loss:4.946172 +step:845 train loss:4.933572 +step:846 train loss:5.003407 +step:847 train loss:4.970566 +step:848 train loss:4.937156 +step:849 train loss:4.990002 +step:850 train loss:4.986907 +step:851 train loss:4.957934 +step:852 train loss:5.010403 +step:853 train loss:4.912426 +step:854 train loss:4.961035 +step:855 train loss:4.940819 +step:856 train loss:4.902746 +step:857 train loss:4.948620 +step:858 train loss:4.976620 +step:859 train loss:4.942909 +step:860 train loss:4.945145 +step:861 train loss:4.983151 +step:862 train loss:4.933913 +step:863 train loss:4.953381 +step:864 train loss:4.925271 +step:865 train loss:4.951299 +step:866 train loss:4.959361 +step:867 train loss:5.025760 +step:868 train loss:4.927212 +step:869 train loss:4.942664 +step:870 train loss:4.909129 +step:871 train loss:4.916359 +step:872 train loss:4.919687 +step:873 train loss:4.917826 +step:874 train loss:4.920681 +step:875 train loss:4.835266 +step:876 train loss:4.953874 +step:877 train loss:4.845547 +step:878 train loss:4.956157 +step:879 train loss:4.901076 +step:880 train loss:4.968349 +step:881 train loss:4.927627 +step:882 train loss:4.893703 +step:883 train loss:4.921813 +step:884 train loss:4.933285 +step:885 train loss:4.886729 +step:886 train loss:4.861938 +step:887 train loss:4.898230 +step:888 train loss:4.996202 +step:889 train loss:4.934436 +step:890 train loss:4.890296 +step:891 train loss:4.846898 +step:892 train loss:4.812652 +step:893 train loss:4.892330 +step:894 train loss:4.874290 +step:895 train loss:4.860775 +step:896 train loss:4.937778 +step:897 train loss:4.874183 +step:898 train loss:4.881557 +step:899 train loss:4.883840 +step:900 train loss:4.943085 +step:901 train loss:4.853213 +step:902 train loss:4.897957 +step:903 train loss:4.960540 +step:904 train loss:4.981125 +step:905 train loss:4.869097 +step:906 train loss:4.885069 +step:907 train loss:4.883734 +step:908 train loss:4.906062 +step:909 train loss:4.864933 +step:910 train loss:4.886846 +step:911 train loss:5.005495 +step:912 train loss:4.832529 +step:913 train loss:4.879148 +step:914 train loss:4.862455 +step:915 train loss:4.870163 +step:916 train loss:4.945250 +step:917 train loss:4.874143 +step:918 train loss:4.939606 +step:919 train loss:5.013972 +step:920 train loss:4.804077 +step:921 train loss:4.885702 +step:922 train loss:4.861758 +step:923 train loss:4.829902 +step:924 train loss:4.843183 +step:925 train loss:4.790546 +step:926 train loss:4.883202 +step:927 train loss:4.824970 +step:928 train loss:4.869554 +step:929 train loss:4.858485 +step:930 train loss:4.856582 +step:931 train loss:4.897058 +step:932 train loss:4.862243 +step:933 train loss:4.856697 +step:934 train loss:4.900496 +step:935 train loss:4.876846 +step:936 train loss:4.875985 +step:937 train loss:4.876819 +step:938 train loss:4.891641 +step:939 train loss:4.765230 +step:940 train loss:4.848891 +step:941 train loss:4.792732 +step:942 train loss:4.779119 +step:943 train loss:4.862143 +step:944 train loss:4.817987 +step:945 train loss:4.844821 +step:946 train loss:4.879301 +step:947 train loss:4.974441 +step:948 train loss:4.801548 +step:949 train loss:4.853721 +step:950 train loss:4.808156 +step:951 train loss:4.819395 +step:952 train loss:4.885029 +step:953 train loss:4.832437 +step:954 train loss:4.842817 +step:955 train loss:4.778732 +step:956 train loss:4.796246 +step:957 train loss:4.802027 +step:958 train loss:4.871116 +step:959 train loss:4.821418 +step:960 train loss:4.893832 +step:961 train loss:4.873267 +step:962 train loss:4.808402 +step:963 train loss:4.795718 +step:964 train loss:4.840719 +step:965 train loss:4.759786 +step:966 train loss:4.779394 +step:967 train loss:4.823452 +step:968 train loss:4.814087 +step:969 train loss:4.768302 +step:970 train loss:4.844064 +step:971 train loss:4.797721 +step:972 train loss:4.751060 +step:973 train loss:4.818155 +step:974 train loss:4.771364 +step:975 train loss:4.856790 +step:976 train loss:4.796063 +step:977 train loss:4.794492 +step:978 train loss:4.813941 +step:979 train loss:4.778695 +step:980 train loss:4.785769 +step:981 train loss:4.783474 +step:982 train loss:4.762449 +step:983 train loss:4.774089 +step:984 train loss:4.817963 +step:985 train loss:4.797585 +step:986 train loss:4.778666 +step:987 train loss:4.827130 +step:988 train loss:4.812787 +step:989 train loss:4.774431 +step:990 train loss:4.764550 +step:991 train loss:4.717196 +step:992 train loss:4.754219 +step:993 train loss:4.782048 +step:994 train loss:4.711175 +step:995 train loss:4.724757 +step:996 train loss:4.781253 +step:997 train loss:4.738209 +step:998 train loss:4.743975 +step:999 train loss:4.766911 +step:1000 validation loss:4.740698 total_sharp:2.3593e-02 L1_sharp:1.9012e+00 L2_sharp:1.1230e+00 L3_sharp:7.9969e-01 L4_sharp:5.3289e-01 L5_sharp:4.7120e-01 L6_sharp:3.2463e-01 L7_sharp:2.1099e-01 L8_sharp:1.3881e-01 L9_sharp:9.8455e-02 L10_sharp:7.0713e-02 L11_sharp:5.5380e-02 L12_sharp:4.0647e-02 total_fnorm:1.2939e+00 total_l1_linf:7.1297e+03 total_spectral:1.2939e+00 L1_fnorm:3.1388e-02 L2_fnorm:2.8723e-02 L3_fnorm:2.7932e-02 L4_fnorm:2.9112e-02 L5_fnorm:2.9634e-02 L6_fnorm:3.0568e-02 L7_fnorm:3.1094e-02 L8_fnorm:3.1373e-02 L9_fnorm:3.1838e-02 L10_fnorm:3.1904e-02 L11_fnorm:3.1692e-02 L12_fnorm:3.2127e-02 L1_l1linf:1.8903e-01 L2_l1linf:2.1834e-01 L3_l1linf:2.3473e-01 L4_l1linf:2.4136e-01 L5_l1linf:2.4631e-01 L6_l1linf:2.5038e-01 L7_l1linf:2.4520e-01 L8_l1linf:2.5448e-01 L9_l1linf:2.5187e-01 L10_l1linf:2.4672e-01 L11_l1linf:2.3273e-01 L12_l1linf:2.2851e-01 L1_spectral:4.2762e-03 L2_spectral:4.9539e-03 L3_spectral:5.2958e-03 L4_spectral:5.3970e-03 L5_spectral:5.5777e-03 L6_spectral:5.6248e-03 L7_spectral:6.2106e-03 L8_spectral:6.6168e-03 L9_spectral:6.7857e-03 L10_spectral:6.7239e-03 L11_spectral:6.1759e-03 L12_spectral:7.8186e-03 ip_v_neg_g:1.9798e-02 cos_v_neg_g:1.4118e-03 v_norm:1.2939e+00 g_norm:1.0839e+01 hv_norm:8.2418e+00 cos_v_hv:3.7038e-03 hg_norm:1.7588e+04 cos_g_hg:6.8755e-01 v_par:8.3271e-05 v_perp:1.2939e+00 L1_cos_v_neg_g:2.4694e-02 L1_v_norm:3.1388e-02 L2_cos_v_neg_g:2.2862e-02 L2_v_norm:2.8723e-02 L3_cos_v_neg_g:2.2598e-02 L3_v_norm:2.7932e-02 L4_cos_v_neg_g:1.8778e-02 L4_v_norm:2.9112e-02 L5_cos_v_neg_g:1.9764e-02 L5_v_norm:2.9634e-02 L6_cos_v_neg_g:1.7373e-02 L6_v_norm:3.0568e-02 L7_cos_v_neg_g:1.5226e-02 L7_v_norm:3.1094e-02 L8_cos_v_neg_g:1.3097e-02 L8_v_norm:3.1373e-02 L9_cos_v_neg_g:1.1644e-02 L9_v_norm:3.1838e-02 L10_cos_v_neg_g:1.0108e-02 L10_v_norm:3.1904e-02 L11_cos_v_neg_g:8.1075e-03 L11_v_norm:3.1692e-02 L12_cos_v_neg_g:6.4248e-03 L12_v_norm:3.2127e-02 +step:1000 train loss:4.781721 +step:1001 train loss:4.778483 +step:1002 train loss:4.780629 +step:1003 train loss:4.767449 +step:1004 train loss:4.739871 +step:1005 train loss:4.751003 +step:1006 train loss:4.817443 +step:1007 train loss:4.804118 +step:1008 train loss:4.739747 +step:1009 train loss:4.795752 +step:1010 train loss:4.780897 +step:1011 train loss:4.795317 +step:1012 train loss:4.740567 +step:1013 train loss:4.711638 +step:1014 train loss:4.716733 +step:1015 train loss:4.746954 +step:1016 train loss:4.749060 +step:1017 train loss:4.709662 +step:1018 train loss:4.754784 +step:1019 train loss:4.751111 +step:1020 train loss:4.725083 +step:1021 train loss:4.791539 +step:1022 train loss:4.715373 +step:1023 train loss:4.719896 +step:1024 train loss:4.793774 +step:1025 train loss:4.740837 +step:1026 train loss:4.708515 +step:1027 train loss:4.746679 +step:1028 train loss:4.749950 +step:1029 train loss:4.699344 +step:1030 train loss:4.756475 +step:1031 train loss:4.768222 +step:1032 train loss:4.709281 +step:1033 train loss:4.684873 +step:1034 train loss:4.750063 +step:1035 train loss:4.755958 +step:1036 train loss:4.660042 +step:1037 train loss:4.722968 +step:1038 train loss:4.744347 +step:1039 train loss:4.865473 +step:1040 train loss:4.697247 +step:1041 train loss:4.710177 +step:1042 train loss:4.740977 +step:1043 train loss:4.722112 +step:1044 train loss:4.715471 +step:1045 train loss:4.728176 +step:1046 train loss:4.682497 +step:1047 train loss:4.694584 +step:1048 train loss:4.701416 +step:1049 train loss:4.749059 +step:1050 train loss:4.712306 +step:1051 train loss:4.685922 +step:1052 train loss:4.775752 +step:1053 train loss:4.682864 +step:1054 train loss:4.685375 +step:1055 train loss:4.745886 +step:1056 train loss:4.688210 +step:1057 train loss:4.583273 +step:1058 train loss:4.690602 +step:1059 train loss:4.691720 +step:1060 train loss:4.670792 +step:1061 train loss:4.745750 +step:1062 train loss:4.685503 +step:1063 train loss:4.690095 +step:1064 train loss:4.667612 +step:1065 train loss:4.692601 +step:1066 train loss:4.657332 +step:1067 train loss:4.692062 +step:1068 train loss:4.667703 +step:1069 train loss:4.675400 +step:1070 train loss:4.668138 +step:1071 train loss:4.699706 +step:1072 train loss:4.699507 +step:1073 train loss:4.620028 +step:1074 train loss:4.649780 +step:1075 train loss:4.674416 +step:1076 train loss:4.734714 +step:1077 train loss:4.663119 +step:1078 train loss:4.693560 +step:1079 train loss:4.758242 +step:1080 train loss:4.631853 +step:1081 train loss:4.688152 +step:1082 train loss:4.695423 +step:1083 train loss:4.652155 +step:1084 train loss:4.620356 +step:1085 train loss:4.675311 +step:1086 train loss:4.700644 +step:1087 train loss:4.650050 +step:1088 train loss:4.656959 +step:1089 train loss:4.660617 +step:1090 train loss:4.607039 +step:1091 train loss:4.590896 +step:1092 train loss:4.731807 +step:1093 train loss:4.619152 +step:1094 train loss:4.655964 +step:1095 train loss:4.695806 +step:1096 train loss:4.637353 +step:1097 train loss:4.624642 +step:1098 train loss:4.608109 +step:1099 train loss:4.646488 +step:1100 train loss:4.674515 +step:1101 train loss:4.691643 +step:1102 train loss:4.692891 +step:1103 train loss:4.642312 +step:1104 train loss:4.668955 +step:1105 train loss:4.709790 +step:1106 train loss:4.637519 +step:1107 train loss:4.747826 +step:1108 train loss:4.706172 +step:1109 train loss:4.656946 +step:1110 train loss:4.621842 +step:1111 train loss:4.681350 +step:1112 train loss:4.616926 +step:1113 train loss:4.570199 +step:1114 train loss:4.562161 +step:1115 train loss:4.607642 +step:1116 train loss:4.687799 +step:1117 train loss:4.682068 +step:1118 train loss:4.726704 +step:1119 train loss:4.682377 +step:1120 train loss:4.656640 +step:1121 train loss:4.633638 +step:1122 train loss:4.620707 +step:1123 train loss:4.714930 +step:1124 train loss:4.599739 +step:1125 train loss:4.632820 +step:1126 train loss:4.583009 +step:1127 train loss:4.606147 +step:1128 train loss:4.605939 +step:1129 train loss:4.660993 +step:1130 train loss:4.571560 +step:1131 train loss:4.666162 +step:1132 train loss:4.612086 +step:1133 train loss:4.612234 +step:1134 train loss:4.601153 +step:1135 train loss:4.643438 +step:1136 train loss:4.653358 +step:1137 train loss:4.572276 +step:1138 train loss:4.640101 +step:1139 train loss:4.612524 +step:1140 train loss:4.686922 +step:1141 train loss:4.630888 +step:1142 train loss:4.589440 +step:1143 train loss:4.641553 +step:1144 train loss:4.679585 +step:1145 train loss:4.629488 +step:1146 train loss:4.567483 +step:1147 train loss:4.586083 +step:1148 train loss:4.595627 +step:1149 train loss:4.645255 +step:1150 train loss:4.670544 +step:1151 train loss:4.674897 +step:1152 train loss:4.577959 +step:1153 train loss:4.587181 +step:1154 train loss:4.567481 +step:1155 train loss:4.671508 +step:1156 train loss:4.573555 +step:1157 train loss:4.595250 +step:1158 train loss:4.697334 +step:1159 train loss:4.635233 +step:1160 train loss:4.568259 +step:1161 train loss:4.669775 +step:1162 train loss:4.599416 +step:1163 train loss:4.589186 +step:1164 train loss:4.493212 +step:1165 train loss:4.631783 +step:1166 train loss:4.550643 +step:1167 train loss:4.545433 +step:1168 train loss:4.620875 +step:1169 train loss:4.583745 +step:1170 train loss:4.580925 +step:1171 train loss:4.604641 +step:1172 train loss:4.583630 +step:1173 train loss:4.600914 +step:1174 train loss:4.532331 +step:1175 train loss:4.559278 +step:1176 train loss:4.683689 +step:1177 train loss:4.515535 +step:1178 train loss:4.571469 +step:1179 train loss:4.548427 +step:1180 train loss:4.582608 +step:1181 train loss:4.549747 +step:1182 train loss:4.635206 +step:1183 train loss:4.603892 +step:1184 train loss:4.521457 +step:1185 train loss:4.578296 +step:1186 train loss:4.544869 +step:1187 train loss:4.502789 +step:1188 train loss:4.539479 +step:1189 train loss:4.515948 +step:1190 train loss:4.542963 +step:1191 train loss:4.594938 +step:1192 train loss:4.561544 +step:1193 train loss:4.575909 +step:1194 train loss:4.668323 +step:1195 train loss:4.647976 +step:1196 train loss:4.554154 +step:1197 train loss:4.566660 +step:1198 train loss:4.535852 +step:1199 train loss:4.550418 +step:1200 train loss:4.630962 +step:1201 train loss:4.573066 +step:1202 train loss:4.508646 +step:1203 train loss:4.510174 +step:1204 train loss:4.561148 +step:1205 train loss:4.535049 +step:1206 train loss:4.515240 +step:1207 train loss:4.601471 +step:1208 train loss:4.570024 +step:1209 train loss:4.480931 +step:1210 train loss:4.578062 +step:1211 train loss:4.534946 +step:1212 train loss:4.544953 +step:1213 train loss:4.487789 +step:1214 train loss:4.590833 +step:1215 train loss:4.552088 +step:1216 train loss:4.548209 +step:1217 train loss:4.541428 +step:1218 train loss:4.554925 +step:1219 train loss:4.502587 +step:1220 train loss:4.512371 +step:1221 train loss:4.542200 +step:1222 train loss:4.602289 +step:1223 train loss:4.556023 +step:1224 train loss:4.521122 +step:1225 train loss:4.577579 +step:1226 train loss:4.511724 +step:1227 train loss:4.557189 +step:1228 train loss:4.514957 +step:1229 train loss:4.508502 +step:1230 train loss:4.495751 +step:1231 train loss:4.533072 +step:1232 train loss:4.492415 +step:1233 train loss:4.485879 +step:1234 train loss:4.581425 +step:1235 train loss:4.568754 +step:1236 train loss:4.458459 +step:1237 train loss:4.552979 +step:1238 train loss:4.509829 +step:1239 train loss:4.552840 +step:1240 train loss:4.440731 +step:1241 train loss:4.495377 +step:1242 train loss:4.508365 +step:1243 train loss:4.473100 +step:1244 train loss:4.574786 +step:1245 train loss:4.589560 +step:1246 train loss:4.521009 +step:1247 train loss:4.487935 +step:1248 train loss:4.518041 +step:1249 train loss:4.480684 +step:1250 validation loss:4.483734 +step:1250 train loss:4.476409 +step:1251 train loss:4.538817 +step:1252 train loss:4.490744 +step:1253 train loss:4.455922 +step:1254 train loss:4.476344 +step:1255 train loss:4.476120 +step:1256 train loss:4.514536 +step:1257 train loss:4.498934 +step:1258 train loss:4.546205 +step:1259 train loss:4.550404 +step:1260 train loss:4.434169 +step:1261 train loss:4.682859 +step:1262 train loss:4.517199 +step:1263 train loss:4.482502 +step:1264 train loss:4.476472 +step:1265 train loss:4.579437 +step:1266 train loss:4.480979 +step:1267 train loss:4.497759 +step:1268 train loss:4.513124 +step:1269 train loss:4.497028 +step:1270 train loss:4.429956 +step:1271 train loss:4.444099 +step:1272 train loss:4.459411 +step:1273 train loss:4.521420 +step:1274 train loss:4.482973 +step:1275 train loss:4.505415 +step:1276 train loss:4.510327 +step:1277 train loss:4.509324 +step:1278 train loss:4.444570 +step:1279 train loss:4.469210 +step:1280 train loss:4.475758 +step:1281 train loss:4.542825 +step:1282 train loss:4.436902 +step:1283 train loss:4.534114 +step:1284 train loss:4.482631 +step:1285 train loss:4.510583 +step:1286 train loss:4.418812 +step:1287 train loss:4.467269 +step:1288 train loss:4.494626 +step:1289 train loss:4.548704 +step:1290 train loss:4.482122 +step:1291 train loss:4.475976 +step:1292 train loss:4.460036 +step:1293 train loss:4.415993 +step:1294 train loss:4.480780 +step:1295 train loss:4.460371 +step:1296 train loss:4.512480 +step:1297 train loss:4.466188 +step:1298 train loss:4.485499 +step:1299 train loss:4.525314 +step:1300 train loss:4.440169 +step:1301 train loss:4.479897 +step:1302 train loss:4.440136 +step:1303 train loss:4.481108 +step:1304 train loss:4.511733 +step:1305 train loss:4.481912 +step:1306 train loss:4.483562 +step:1307 train loss:4.470499 +step:1308 train loss:4.420286 +step:1309 train loss:4.432277 +step:1310 train loss:4.406968 +step:1311 train loss:4.447772 +step:1312 train loss:4.494311 +step:1313 train loss:4.409356 +step:1314 train loss:4.415857 +step:1315 train loss:4.479331 +step:1316 train loss:4.427209 +step:1317 train loss:4.350889 +step:1318 train loss:4.484336 +step:1319 train loss:4.523631 +step:1320 train loss:4.440430 +step:1321 train loss:4.396948 +step:1322 train loss:4.506697 +step:1323 train loss:4.464699 +step:1324 train loss:4.560047 +step:1325 train loss:4.445622 +step:1326 train loss:4.486883 +step:1327 train loss:4.488483 +step:1328 train loss:4.405121 +step:1329 train loss:4.431097 +step:1330 train loss:4.446343 +step:1331 train loss:4.289281 +step:1332 train loss:4.500436 +step:1333 train loss:4.435849 +step:1334 train loss:4.465895 +step:1335 train loss:4.493642 +step:1336 train loss:4.480176 +step:1337 train loss:4.452300 +step:1338 train loss:4.426254 +step:1339 train loss:4.519567 +step:1340 train loss:4.462416 +step:1341 train loss:4.451640 +step:1342 train loss:4.429274 +step:1343 train loss:4.404092 +step:1344 train loss:4.472599 +step:1345 train loss:4.431553 +step:1346 train loss:4.520395 +step:1347 train loss:4.438052 +step:1348 train loss:4.422344 +step:1349 train loss:4.362967 +step:1350 train loss:4.376368 +step:1351 train loss:4.453519 +step:1352 train loss:4.412771 +step:1353 train loss:4.390780 +step:1354 train loss:4.411431 +step:1355 train loss:4.484941 +step:1356 train loss:4.402304 +step:1357 train loss:4.411111 +step:1358 train loss:4.408031 +step:1359 train loss:4.409538 +step:1360 train loss:4.441556 +step:1361 train loss:4.550781 +step:1362 train loss:4.478463 +step:1363 train loss:4.362114 +step:1364 train loss:4.395341 +step:1365 train loss:4.382767 +step:1366 train loss:4.417261 +step:1367 train loss:4.347214 +step:1368 train loss:4.379532 +step:1369 train loss:4.408443 +step:1370 train loss:4.433870 +step:1371 train loss:4.388837 +step:1372 train loss:4.411605 +step:1373 train loss:4.440286 +step:1374 train loss:4.453794 +step:1375 train loss:4.431170 +step:1376 train loss:4.428282 +step:1377 train loss:4.460166 +step:1378 train loss:4.410233 +step:1379 train loss:4.390130 +step:1380 train loss:4.463928 +step:1381 train loss:4.409316 +step:1382 train loss:4.380211 +step:1383 train loss:4.370460 +step:1384 train loss:4.509631 +step:1385 train loss:4.346506 +step:1386 train loss:4.402847 +step:1387 train loss:4.411576 +step:1388 train loss:4.374454 +step:1389 train loss:4.368708 +step:1390 train loss:4.391671 +step:1391 train loss:4.421270 +step:1392 train loss:4.390249 +step:1393 train loss:4.459563 +step:1394 train loss:4.376670 +step:1395 train loss:4.419622 +step:1396 train loss:4.403217 +step:1397 train loss:4.418734 +step:1398 train loss:4.424665 +step:1399 train loss:4.392740 +step:1400 train loss:4.371512 +step:1401 train loss:4.371082 +step:1402 train loss:4.366741 +step:1403 train loss:4.337627 +step:1404 train loss:4.390602 +step:1405 train loss:4.353621 +step:1406 train loss:4.377102 +step:1407 train loss:4.370640 +step:1408 train loss:4.365679 +step:1409 train loss:4.344080 +step:1410 train loss:4.369585 +step:1411 train loss:4.387389 +step:1412 train loss:4.458957 +step:1413 train loss:4.374061 +step:1414 train loss:4.397396 +step:1415 train loss:4.355250 +step:1416 train loss:4.410419 +step:1417 train loss:4.380586 +step:1418 train loss:4.322022 +step:1419 train loss:4.310087 +step:1420 train loss:4.358481 +step:1421 train loss:4.399865 +step:1422 train loss:4.378034 +step:1423 train loss:4.473331 +step:1424 train loss:4.360991 +step:1425 train loss:4.331274 +step:1426 train loss:4.363477 +step:1427 train loss:4.362913 +step:1428 train loss:4.337975 +step:1429 train loss:4.357566 +step:1430 train loss:4.344587 +step:1431 train loss:4.378144 +step:1432 train loss:4.358158 +step:1433 train loss:4.348210 +step:1434 train loss:4.315054 +step:1435 train loss:4.319654 +step:1436 train loss:4.377879 +step:1437 train loss:4.319627 +step:1438 train loss:4.325850 +step:1439 train loss:4.297712 +step:1440 train loss:4.335340 +step:1441 train loss:4.409352 +step:1442 train loss:4.372972 +step:1443 train loss:4.307307 +step:1444 train loss:4.323006 +step:1445 train loss:4.336211 +step:1446 train loss:4.336997 +step:1447 train loss:4.347882 +step:1448 train loss:4.322226 +step:1449 train loss:4.353861 +step:1450 train loss:4.367063 +step:1451 train loss:4.287600 +step:1452 train loss:4.345362 +step:1453 train loss:4.334005 +step:1454 train loss:4.335312 +step:1455 train loss:4.259027 +step:1456 train loss:4.348309 +step:1457 train loss:4.273040 +step:1458 train loss:4.416604 +step:1459 train loss:4.331710 +step:1460 train loss:4.305696 +step:1461 train loss:4.359253 +step:1462 train loss:4.355628 +step:1463 train loss:4.338976 +step:1464 train loss:4.306386 +step:1465 train loss:4.312681 +step:1466 train loss:4.269753 +step:1467 train loss:4.404551 +step:1468 train loss:4.285470 +step:1469 train loss:4.369538 +step:1470 train loss:4.297619 +step:1471 train loss:4.319232 +step:1472 train loss:4.296181 +step:1473 train loss:4.306421 +step:1474 train loss:4.252069 +step:1475 train loss:4.309361 +step:1476 train loss:4.383783 +step:1477 train loss:4.328040 +step:1478 train loss:4.270198 +step:1479 train loss:4.293217 +step:1480 train loss:4.296125 +step:1481 train loss:4.265891 +step:1482 train loss:4.326481 +step:1483 train loss:4.313789 +step:1484 train loss:4.353749 +step:1485 train loss:4.361514 +step:1486 train loss:4.308028 +step:1487 train loss:4.290634 +step:1488 train loss:4.297483 +step:1489 train loss:4.285507 +step:1490 train loss:4.344071 +step:1491 train loss:4.322529 +step:1492 train loss:4.334669 +step:1493 train loss:4.278550 +step:1494 train loss:4.311403 +step:1495 train loss:4.281331 +step:1496 train loss:4.260580 +step:1497 train loss:4.337080 +step:1498 train loss:4.242652 +step:1499 train loss:4.281255 +step:1500 validation loss:4.270107 total_sharp:1.9079e-02 L1_sharp:1.2984e+00 L2_sharp:9.2006e-01 L3_sharp:6.0511e-01 L4_sharp:4.7086e-01 L5_sharp:3.8603e-01 L6_sharp:3.1387e-01 L7_sharp:2.3281e-01 L8_sharp:1.3842e-01 L9_sharp:1.0163e-01 L10_sharp:7.2634e-02 L11_sharp:5.9166e-02 L12_sharp:5.6932e-02 total_fnorm:1.2944e+00 total_l1_linf:7.1487e+03 total_spectral:1.2944e+00 L1_fnorm:3.1506e-02 L2_fnorm:2.8556e-02 L3_fnorm:2.7219e-02 L4_fnorm:2.9238e-02 L5_fnorm:2.9768e-02 L6_fnorm:3.0788e-02 L7_fnorm:3.1357e-02 L8_fnorm:3.1386e-02 L9_fnorm:3.1637e-02 L10_fnorm:3.1683e-02 L11_fnorm:3.1315e-02 L12_fnorm:3.1553e-02 L1_l1linf:2.0171e-01 L2_l1linf:2.2077e-01 L3_l1linf:2.2590e-01 L4_l1linf:2.3483e-01 L5_l1linf:2.3337e-01 L6_l1linf:2.4425e-01 L7_l1linf:2.4067e-01 L8_l1linf:2.3436e-01 L9_l1linf:2.3468e-01 L10_l1linf:2.2639e-01 L11_l1linf:2.2200e-01 L12_l1linf:2.0753e-01 L1_spectral:4.5297e-03 L2_spectral:5.0139e-03 L3_spectral:5.1024e-03 L4_spectral:5.2972e-03 L5_spectral:5.3095e-03 L6_spectral:5.4433e-03 L7_spectral:5.4382e-03 L8_spectral:5.9432e-03 L9_spectral:5.7709e-03 L10_spectral:5.9966e-03 L11_spectral:5.1210e-03 L12_spectral:5.6073e-03 ip_v_neg_g:1.2452e-02 cos_v_neg_g:9.0186e-04 v_norm:1.2944e+00 g_norm:1.0667e+01 hv_norm:6.5246e+00 cos_v_hv:3.7850e-03 hg_norm:1.8844e+04 cos_g_hg:6.3545e-01 v_par:4.4046e-05 v_perp:1.2944e+00 L1_cos_v_neg_g:1.7683e-02 L1_v_norm:3.1506e-02 L2_cos_v_neg_g:1.5951e-02 L2_v_norm:2.8556e-02 L3_cos_v_neg_g:1.3353e-02 L3_v_norm:2.7219e-02 L4_cos_v_neg_g:1.1307e-02 L4_v_norm:2.9238e-02 L5_cos_v_neg_g:1.0723e-02 L5_v_norm:2.9768e-02 L6_cos_v_neg_g:8.4663e-03 L6_v_norm:3.0788e-02 L7_cos_v_neg_g:7.4803e-03 L7_v_norm:3.1357e-02 L8_cos_v_neg_g:6.9928e-03 L8_v_norm:3.1386e-02 L9_cos_v_neg_g:7.0384e-03 L9_v_norm:3.1637e-02 L10_cos_v_neg_g:6.4673e-03 L10_v_norm:3.1683e-02 L11_cos_v_neg_g:5.7591e-03 L11_v_norm:3.1315e-02 L12_cos_v_neg_g:5.3727e-03 L12_v_norm:3.1553e-02 +step:1500 train loss:4.286199 +step:1501 train loss:4.294266 +step:1502 train loss:4.246873 +step:1503 train loss:4.288442 +step:1504 train loss:4.255232 +step:1505 train loss:4.235665 +step:1506 train loss:4.219096 +step:1507 train loss:4.253575 +step:1508 train loss:4.257694 +step:1509 train loss:4.310825 +step:1510 train loss:4.248439 +step:1511 train loss:4.281990 +step:1512 train loss:4.243057 +step:1513 train loss:4.323942 +step:1514 train loss:4.270464 +step:1515 train loss:4.334123 +step:1516 train loss:4.250065 +step:1517 train loss:4.278454 +step:1518 train loss:4.348046 +step:1519 train loss:4.304936 +step:1520 train loss:4.348533 +step:1521 train loss:4.271978 +step:1522 train loss:4.316206 +step:1523 train loss:4.323216 +step:1524 train loss:4.230512 +step:1525 train loss:4.308887 +step:1526 train loss:4.220408 +step:1527 train loss:4.294622 +step:1528 train loss:4.327136 +step:1529 train loss:4.295474 +step:1530 train loss:4.322155 +step:1531 train loss:4.249295 +step:1532 train loss:4.321711 +step:1533 train loss:4.294634 +step:1534 train loss:4.235633 +step:1535 train loss:4.298946 +step:1536 train loss:4.323081 +step:1537 train loss:4.280773 +step:1538 train loss:4.258687 +step:1539 train loss:4.273264 +step:1540 train loss:4.275482 +step:1541 train loss:4.254888 +step:1542 train loss:4.341274 +step:1543 train loss:4.356149 +step:1544 train loss:4.235693 +step:1545 train loss:4.217758 +step:1546 train loss:4.251959 +step:1547 train loss:4.249031 +step:1548 train loss:4.283383 +step:1549 train loss:4.225156 +step:1550 train loss:4.323797 +step:1551 train loss:4.260641 +step:1552 train loss:4.296274 +step:1553 train loss:4.297619 +step:1554 train loss:4.306689 +step:1555 train loss:4.263943 +step:1556 train loss:4.248187 +step:1557 train loss:4.252416 +step:1558 train loss:4.272365 +step:1559 train loss:4.243310 +step:1560 train loss:4.322099 +step:1561 train loss:4.291341 +step:1562 train loss:4.187516 +step:1563 train loss:4.174034 +step:1564 train loss:4.282514 +step:1565 train loss:4.273888 +step:1566 train loss:4.281940 +step:1567 train loss:4.293067 +step:1568 train loss:4.237390 +step:1569 train loss:4.235702 +step:1570 train loss:4.245131 +step:1571 train loss:4.229376 +step:1572 train loss:4.235916 +step:1573 train loss:4.272828 +step:1574 train loss:4.229341 +step:1575 train loss:4.255639 +step:1576 train loss:4.206136 +step:1577 train loss:4.231382 +step:1578 train loss:4.217195 +step:1579 train loss:4.291529 +step:1580 train loss:4.243060 +step:1581 train loss:4.278980 +step:1582 train loss:4.271908 +step:1583 train loss:4.254655 +step:1584 train loss:4.177817 +step:1585 train loss:4.272626 +step:1586 train loss:4.225049 +step:1587 train loss:4.243632 +step:1588 train loss:4.223929 +step:1589 train loss:4.270369 +step:1590 train loss:4.180116 +step:1591 train loss:4.247743 +step:1592 train loss:4.193494 +step:1593 train loss:4.231285 +step:1594 train loss:4.225971 +step:1595 train loss:4.222382 +step:1596 train loss:4.226688 +step:1597 train loss:4.160573 +step:1598 train loss:4.258392 +step:1599 train loss:4.271555 +step:1600 train loss:4.162773 +step:1601 train loss:4.226172 +step:1602 train loss:4.279464 +step:1603 train loss:4.273656 +step:1604 train loss:4.205747 +step:1605 train loss:4.252262 +step:1606 train loss:4.300669 +step:1607 train loss:4.191023 +step:1608 train loss:4.215693 +step:1609 train loss:4.230195 +step:1610 train loss:4.293419 +step:1611 train loss:4.215814 +step:1612 train loss:4.147316 +step:1613 train loss:4.211092 +step:1614 train loss:4.317300 +step:1615 train loss:4.256971 +step:1616 train loss:4.263954 +step:1617 train loss:4.226749 +step:1618 train loss:4.236229 +step:1619 train loss:4.404196 +step:1620 train loss:4.194242 +step:1621 train loss:4.252632 +step:1622 train loss:4.182610 +step:1623 train loss:4.236932 +step:1624 train loss:4.215311 +step:1625 train loss:4.286108 +step:1626 train loss:4.186577 +step:1627 train loss:4.186993 +step:1628 train loss:4.207214 +step:1629 train loss:4.227684 +step:1630 train loss:4.252090 +step:1631 train loss:4.197649 +step:1632 train loss:4.178414 +step:1633 train loss:4.194186 +step:1634 train loss:4.240043 +step:1635 train loss:4.186560 +step:1636 train loss:4.178871 +step:1637 train loss:4.252622 +step:1638 train loss:4.342305 +step:1639 train loss:4.165070 +step:1640 train loss:4.234619 +step:1641 train loss:4.206603 +step:1642 train loss:4.283341 +step:1643 train loss:4.188251 +step:1644 train loss:4.213322 +step:1645 train loss:4.186069 +step:1646 train loss:4.261406 +step:1647 train loss:4.159516 +step:1648 train loss:4.221205 +step:1649 train loss:4.186567 +step:1650 train loss:4.199687 +step:1651 train loss:4.219934 +step:1652 train loss:4.229011 +step:1653 train loss:4.238538 +step:1654 train loss:4.223798 +step:1655 train loss:4.208817 +step:1656 train loss:4.199385 +step:1657 train loss:4.201957 +step:1658 train loss:4.171608 +step:1659 train loss:4.247311 +step:1660 train loss:4.149335 +step:1661 train loss:4.255438 +step:1662 train loss:4.198273 +step:1663 train loss:4.186817 +step:1664 train loss:4.281214 +step:1665 train loss:4.205315 +step:1666 train loss:4.217495 +step:1667 train loss:4.225050 +step:1668 train loss:4.207287 +step:1669 train loss:4.176472 +step:1670 train loss:4.212181 +step:1671 train loss:4.214508 +step:1672 train loss:4.210421 +step:1673 train loss:4.175471 +step:1674 train loss:4.165873 +step:1675 train loss:4.215711 +step:1676 train loss:4.462090 +step:1677 train loss:4.226847 +step:1678 train loss:4.135236 +step:1679 train loss:4.261145 +step:1680 train loss:4.190682 +step:1681 train loss:4.243267 +step:1682 train loss:4.195157 +step:1683 train loss:4.198267 +step:1684 train loss:4.156507 +step:1685 train loss:4.207935 +step:1686 train loss:4.197259 +step:1687 train loss:4.200365 +step:1688 train loss:4.191047 +step:1689 train loss:4.175291 +step:1690 train loss:4.207109 +step:1691 train loss:4.177198 +step:1692 train loss:4.197952 +step:1693 train loss:4.158432 +step:1694 train loss:4.123568 +step:1695 train loss:4.143939 +step:1696 train loss:4.157431 +step:1697 train loss:4.201687 +step:1698 train loss:4.188658 +step:1699 train loss:4.157960 +step:1700 train loss:4.232648 +step:1701 train loss:4.172920 +step:1702 train loss:4.161522 +step:1703 train loss:4.185869 +step:1704 train loss:4.190977 +step:1705 train loss:4.200953 +step:1706 train loss:4.217440 +step:1707 train loss:4.205511 +step:1708 train loss:4.138264 +step:1709 train loss:4.228564 +step:1710 train loss:4.155738 +step:1711 train loss:4.155425 +step:1712 train loss:4.178777 +step:1713 train loss:4.147508 +step:1714 train loss:4.503175 +step:1715 train loss:4.165727 +step:1716 train loss:4.144329 +step:1717 train loss:4.148381 +step:1718 train loss:4.220827 +step:1719 train loss:4.140357 +step:1720 train loss:4.215528 +step:1721 train loss:4.158906 +step:1722 train loss:4.133364 +step:1723 train loss:4.222727 +step:1724 train loss:4.179745 +step:1725 train loss:4.173290 +step:1726 train loss:4.177572 +step:1727 train loss:4.202208 +step:1728 train loss:4.210142 +step:1729 train loss:4.131031 +step:1730 train loss:4.219719 +step:1731 train loss:4.145058 +step:1732 train loss:4.153809 +step:1733 train loss:4.145204 +step:1734 train loss:4.186821 +step:1735 train loss:4.255721 +step:1736 train loss:4.157111 +step:1737 train loss:4.192713 +step:1738 train loss:4.160279 +step:1739 train loss:4.215348 +step:1740 train loss:4.208510 +step:1741 train loss:4.252960 +step:1742 train loss:4.250729 +step:1743 train loss:4.141164 +step:1744 train loss:4.158338 +step:1745 train loss:4.140403 +step:1746 train loss:4.127548 +step:1747 train loss:4.167675 +step:1748 train loss:4.101750 +step:1749 train loss:4.141512 +step:1750 validation loss:4.130858 +step:1750 train loss:4.177042 +step:1751 train loss:4.197586 +step:1752 train loss:4.148198 +step:1753 train loss:4.181842 +step:1754 train loss:4.171244 +step:1755 train loss:4.170590 +step:1756 train loss:4.198063 +step:1757 train loss:4.199608 +step:1758 train loss:4.118758 +step:1759 train loss:4.205136 +step:1760 train loss:4.160645 +step:1761 train loss:4.129449 +step:1762 train loss:4.134090 +step:1763 train loss:4.137821 +step:1764 train loss:4.415069 +step:1765 train loss:4.140801 +step:1766 train loss:4.229399 +step:1767 train loss:4.144813 +step:1768 train loss:4.118769 +step:1769 train loss:4.133554 +step:1770 train loss:4.158766 +step:1771 train loss:4.130758 +step:1772 train loss:4.237702 +step:1773 train loss:4.165577 +step:1774 train loss:4.172144 +step:1775 train loss:4.283296 +step:1776 train loss:4.162499 +step:1777 train loss:4.146680 +step:1778 train loss:4.208807 +step:1779 train loss:4.131804 +step:1780 train loss:4.190740 +step:1781 train loss:4.195787 +step:1782 train loss:4.220405 +step:1783 train loss:4.156423 +step:1784 train loss:4.238897 +step:1785 train loss:4.154057 +step:1786 train loss:4.145831 +step:1787 train loss:4.145967 +step:1788 train loss:4.167715 +step:1789 train loss:4.116938 +step:1790 train loss:4.131765 +step:1791 train loss:4.201458 +step:1792 train loss:4.207589 +step:1793 train loss:4.124157 +step:1794 train loss:4.168745 +step:1795 train loss:4.124052 +step:1796 train loss:4.105728 +step:1797 train loss:4.171601 +step:1798 train loss:4.105243 +step:1799 train loss:4.167063 +step:1800 train loss:4.186871 +step:1801 train loss:4.179361 +step:1802 train loss:4.187201 +step:1803 train loss:4.180346 +step:1804 train loss:4.178502 +step:1805 train loss:4.162346 +step:1806 train loss:4.180103 +step:1807 train loss:4.114238 +step:1808 train loss:4.180728 +step:1809 train loss:4.151335 +step:1810 train loss:4.153029 +step:1811 train loss:4.161556 +step:1812 train loss:4.147684 +step:1813 train loss:4.160058 +step:1814 train loss:4.215555 +step:1815 train loss:4.163197 +step:1816 train loss:4.118123 +step:1817 train loss:4.104142 +step:1818 train loss:4.165377 +step:1819 train loss:4.128892 +step:1820 train loss:4.174561 +step:1821 train loss:4.129529 +step:1822 train loss:4.113665 +step:1823 train loss:4.109528 +step:1824 train loss:4.180216 +step:1825 train loss:4.086883 +step:1826 train loss:4.136158 +step:1827 train loss:4.105727 +step:1828 train loss:4.148369 +step:1829 train loss:4.112196 +step:1830 train loss:4.306390 +step:1831 train loss:4.073610 +step:1832 train loss:4.108284 +step:1833 train loss:4.171825 +step:1834 train loss:4.113296 +step:1835 train loss:4.125299 +step:1836 train loss:4.162754 +step:1837 train loss:4.087769 +step:1838 train loss:4.183481 +step:1839 train loss:4.156891 +step:1840 train loss:4.135745 +step:1841 train loss:4.152328 +step:1842 train loss:4.131267 +step:1843 train loss:4.077525 +step:1844 train loss:4.142034 +step:1845 train loss:4.112607 +step:1846 train loss:4.157973 +step:1847 train loss:4.218983 +step:1848 train loss:4.006926 +step:1849 train loss:4.110039 +step:1850 train loss:4.086573 +step:1851 train loss:4.132132 +step:1852 train loss:4.114355 +step:1853 train loss:4.167043 +step:1854 train loss:4.133603 +step:1855 train loss:4.112141 +step:1856 train loss:4.123753 +step:1857 train loss:4.120076 +step:1858 train loss:4.166603 +step:1859 train loss:4.125154 +step:1860 train loss:4.087040 +step:1861 train loss:4.110114 +step:1862 train loss:4.147338 +step:1863 train loss:4.185500 +step:1864 train loss:4.084254 +step:1865 train loss:4.108054 +step:1866 train loss:4.116601 +step:1867 train loss:4.138629 +step:1868 train loss:4.190248 +step:1869 train loss:4.103763 +step:1870 train loss:4.135799 +step:1871 train loss:4.076446 +step:1872 train loss:4.138991 +step:1873 train loss:4.206576 +step:1874 train loss:4.062007 +step:1875 train loss:4.145746 +step:1876 train loss:4.105696 +step:1877 train loss:4.148505 +step:1878 train loss:4.076471 +step:1879 train loss:4.129177 +step:1880 train loss:4.212151 +step:1881 train loss:4.139720 +step:1882 train loss:4.151838 +step:1883 train loss:4.179060 +step:1884 train loss:4.181460 +step:1885 train loss:4.151886 +step:1886 train loss:4.074273 +step:1887 train loss:4.091119 +step:1888 train loss:4.093952 +step:1889 train loss:4.102828 +step:1890 train loss:4.117561 +step:1891 train loss:4.042582 +step:1892 train loss:4.144625 +step:1893 train loss:4.065810 +step:1894 train loss:4.080526 +step:1895 train loss:4.120326 +step:1896 train loss:4.166233 +step:1897 train loss:4.071193 +step:1898 train loss:4.113925 +step:1899 train loss:4.127788 +step:1900 train loss:4.078421 +step:1901 train loss:4.146688 +step:1902 train loss:4.148757 +step:1903 train loss:4.088923 +step:1904 train loss:4.080473 +step:1905 train loss:4.079867 +step:1906 train loss:4.129742 +step:1907 train loss:4.082973 +step:1908 train loss:4.089455 +step:1909 train loss:4.184469 +step:1910 train loss:4.082271 +step:1911 train loss:4.079693 +step:1912 train loss:4.131624 +step:1913 train loss:4.073880 +step:1914 train loss:4.107069 +step:1915 train loss:4.076377 +step:1916 train loss:4.119310 +step:1917 train loss:4.108665 +step:1918 train loss:4.020667 +step:1919 train loss:4.170150 +step:1920 train loss:4.280347 +step:1921 train loss:4.051126 +step:1922 train loss:4.031922 +step:1923 train loss:4.134240 +step:1924 train loss:4.166040 +step:1925 train loss:4.116066 +step:1926 train loss:4.050539 +step:1927 train loss:4.133873 +step:1928 train loss:4.045803 +step:1929 train loss:4.076777 +step:1930 train loss:4.146208 +step:1931 train loss:4.055940 +step:1932 train loss:4.109447 +step:1933 train loss:4.104163 +step:1934 train loss:4.173333 +step:1935 train loss:4.128535 +step:1936 train loss:4.094238 +step:1937 train loss:4.038869 +step:1938 train loss:4.396461 +step:1939 train loss:4.146064 +step:1940 train loss:4.128594 +step:1941 train loss:4.121653 +step:1942 train loss:4.123807 +step:1943 train loss:4.112390 +step:1944 train loss:4.076715 +step:1945 train loss:4.076094 +step:1946 train loss:4.095755 +step:1947 train loss:4.122809 +step:1948 train loss:4.027325 +step:1949 train loss:4.138914 +step:1950 train loss:4.079932 +step:1951 train loss:4.098845 +step:1952 train loss:4.129687 +step:1953 train loss:4.067605 +step:1954 train loss:4.094036 +step:1955 train loss:4.053391 +step:1956 train loss:4.128854 +step:1957 train loss:4.152707 +step:1958 train loss:4.167965 +step:1959 train loss:4.038765 +step:1960 train loss:4.079689 +step:1961 train loss:4.107859 +step:1962 train loss:4.107187 +step:1963 train loss:4.079895 +step:1964 train loss:4.115076 +step:1965 train loss:4.158400 +step:1966 train loss:4.057134 +step:1967 train loss:4.121051 +step:1968 train loss:4.056818 +step:1969 train loss:4.072370 +step:1970 train loss:4.136157 +step:1971 train loss:4.040063 +step:1972 train loss:4.144226 +step:1973 train loss:4.044165 +step:1974 train loss:4.091864 +step:1975 train loss:4.058718 +step:1976 train loss:4.073669 +step:1977 train loss:4.118621 +step:1978 train loss:4.064595 +step:1979 train loss:4.038424 +step:1980 train loss:4.078950 +step:1981 train loss:4.054422 +step:1982 train loss:4.144412 +step:1983 train loss:4.083721 +step:1984 train loss:4.121433 +step:1985 train loss:4.110389 +step:1986 train loss:4.097879 +step:1987 train loss:4.053920 +step:1988 train loss:4.084192 +step:1989 train loss:4.212328 +step:1990 train loss:4.057102 +step:1991 train loss:4.050753 +step:1992 train loss:4.058823 +step:1993 train loss:4.097812 +step:1994 train loss:4.085167 +step:1995 train loss:4.045530 +step:1996 train loss:4.093139 +step:1997 train loss:4.095227 +step:1998 train loss:4.052759 +step:1999 train loss:4.158013 +step:2000 validation loss:4.034933 total_sharp:1.3554e-02 L1_sharp:4.1843e-01 L2_sharp:2.9068e-01 L3_sharp:3.6563e-01 L4_sharp:3.1166e-01 L5_sharp:2.7700e-01 L6_sharp:2.7186e-01 L7_sharp:2.0620e-01 L8_sharp:1.5112e-01 L9_sharp:1.3486e-01 L10_sharp:1.1562e-01 L11_sharp:1.0658e-01 L12_sharp:1.2440e-01 total_fnorm:1.3289e+00 total_l1_linf:7.3417e+03 total_spectral:1.3289e+00 L1_fnorm:3.1500e-02 L2_fnorm:2.9421e-02 L3_fnorm:2.8335e-02 L4_fnorm:2.9569e-02 L5_fnorm:3.0259e-02 L6_fnorm:3.0993e-02 L7_fnorm:3.1553e-02 L8_fnorm:3.1432e-02 L9_fnorm:3.1662e-02 L10_fnorm:3.1737e-02 L11_fnorm:3.1660e-02 L12_fnorm:3.1816e-02 L1_l1linf:2.0512e-01 L2_l1linf:2.1499e-01 L3_l1linf:2.3844e-01 L4_l1linf:2.4448e-01 L5_l1linf:2.4813e-01 L6_l1linf:2.4434e-01 L7_l1linf:2.4588e-01 L8_l1linf:2.4223e-01 L9_l1linf:2.5058e-01 L10_l1linf:2.6242e-01 L11_l1linf:2.5663e-01 L12_l1linf:2.5768e-01 L1_spectral:4.6613e-03 L2_spectral:4.8701e-03 L3_spectral:5.4116e-03 L4_spectral:5.5158e-03 L5_spectral:5.6280e-03 L6_spectral:5.4741e-03 L7_spectral:5.5135e-03 L8_spectral:5.4855e-03 L9_spectral:5.6047e-03 L10_spectral:5.8398e-03 L11_spectral:5.7624e-03 L12_spectral:5.7409e-03 ip_v_neg_g:1.0379e-02 cos_v_neg_g:8.0268e-04 v_norm:1.3289e+00 g_norm:9.7299e+00 hv_norm:4.7281e+00 cos_v_hv:3.8095e-03 hg_norm:8.2950e+03 cos_g_hg:6.5869e-01 v_par:3.3345e-05 v_perp:1.3289e+00 L1_cos_v_neg_g:7.6964e-03 L1_v_norm:3.1500e-02 L2_cos_v_neg_g:1.0077e-02 L2_v_norm:2.9421e-02 L3_cos_v_neg_g:1.2125e-02 L3_v_norm:2.8335e-02 L4_cos_v_neg_g:1.2940e-02 L4_v_norm:2.9569e-02 L5_cos_v_neg_g:1.2360e-02 L5_v_norm:3.0259e-02 L6_cos_v_neg_g:1.1085e-02 L6_v_norm:3.0993e-02 L7_cos_v_neg_g:1.0140e-02 L7_v_norm:3.1553e-02 L8_cos_v_neg_g:9.6959e-03 L8_v_norm:3.1432e-02 L9_cos_v_neg_g:9.8247e-03 L9_v_norm:3.1662e-02 L10_cos_v_neg_g:9.4908e-03 L10_v_norm:3.1737e-02 L11_cos_v_neg_g:8.8764e-03 L11_v_norm:3.1660e-02 L12_cos_v_neg_g:8.0550e-03 L12_v_norm:3.1816e-02 +step:2000 train loss:4.133606 +step:2001 train loss:4.044916 +step:2002 train loss:4.147960 +step:2003 train loss:4.190829 +step:2004 train loss:4.061313 +step:2005 train loss:4.157583 +step:2006 train loss:4.053745 +step:2007 train loss:4.127655 +step:2008 train loss:4.068800 +step:2009 train loss:4.069975 +step:2010 train loss:4.196207 +step:2011 train loss:4.046829 +step:2012 train loss:4.069963 +step:2013 train loss:4.090787 +step:2014 train loss:3.972396 +step:2015 train loss:4.103035 +step:2016 train loss:4.084636 +step:2017 train loss:4.085264 +step:2018 train loss:4.053111 +step:2019 train loss:4.083617 +step:2020 train loss:4.091879 +step:2021 train loss:4.056062 +step:2022 train loss:4.095779 +step:2023 train loss:4.073202 +step:2024 train loss:4.121547 +step:2025 train loss:4.069775 +step:2026 train loss:4.048137 +step:2027 train loss:4.078045 +step:2028 train loss:4.011563 +step:2029 train loss:4.035568 +step:2030 train loss:4.043124 +step:2031 train loss:4.008777 +step:2032 train loss:4.053680 +step:2033 train loss:4.057286 +step:2034 train loss:4.044389 +step:2035 train loss:4.091250 +step:2036 train loss:4.080644 +step:2037 train loss:4.062916 +step:2038 train loss:4.066127 +step:2039 train loss:4.054865 +step:2040 train loss:4.083618 +step:2041 train loss:4.088906 +step:2042 train loss:4.017398 +step:2043 train loss:4.174195 +step:2044 train loss:4.033565 +step:2045 train loss:4.061559 +step:2046 train loss:4.063672 +step:2047 train loss:4.036659 +step:2048 train loss:4.086790 +step:2049 train loss:4.042757 +step:2050 train loss:4.068228 +step:2051 train loss:4.033423 +step:2052 train loss:4.077157 +step:2053 train loss:4.089630 +step:2054 train loss:4.039095 +step:2055 train loss:4.046702 +step:2056 train loss:4.092045 +step:2057 train loss:4.095860 +step:2058 train loss:4.066038 +step:2059 train loss:4.140516 +step:2060 train loss:4.094632 +step:2061 train loss:4.039257 +step:2062 train loss:4.063878 +step:2063 train loss:3.978581 +step:2064 train loss:4.089397 +step:2065 train loss:4.102301 +step:2066 train loss:3.965398 +step:2067 train loss:4.003424 +step:2068 train loss:4.113289 +step:2069 train loss:4.044762 +step:2070 train loss:4.048923 +step:2071 train loss:4.097365 +step:2072 train loss:4.017902 +step:2073 train loss:4.072580 +step:2074 train loss:4.047916 +step:2075 train loss:4.133935 +step:2076 train loss:4.079373 +step:2077 train loss:4.094294 +step:2078 train loss:4.045716 +step:2079 train loss:4.200897 +step:2080 train loss:4.017609 +step:2081 train loss:4.126366 +step:2082 train loss:4.059142 +step:2083 train loss:4.039556 +step:2084 train loss:4.024411 +step:2085 train loss:4.071684 +step:2086 train loss:4.080614 +step:2087 train loss:4.118080 +step:2088 train loss:3.984301 +step:2089 train loss:4.019217 +step:2090 train loss:4.051190 +step:2091 train loss:4.069018 +step:2092 train loss:4.049653 +step:2093 train loss:4.033046 +step:2094 train loss:4.072797 +step:2095 train loss:4.024433 +step:2096 train loss:4.007791 +step:2097 train loss:4.042693 +step:2098 train loss:4.041055 +step:2099 train loss:4.013503 +step:2100 train loss:4.094199 +step:2101 train loss:4.080468 +step:2102 train loss:4.048630 +step:2103 train loss:4.065918 +step:2104 train loss:4.039855 +step:2105 train loss:4.049444 +step:2106 train loss:4.043499 +step:2107 train loss:4.108359 +step:2108 train loss:4.038651 +step:2109 train loss:3.990327 +step:2110 train loss:4.089699 +step:2111 train loss:4.036677 +step:2112 train loss:4.086250 +step:2113 train loss:4.031519 +step:2114 train loss:4.036206 +step:2115 train loss:4.083027 +step:2116 train loss:4.018152 +step:2117 train loss:4.035255 +step:2118 train loss:4.034645 +step:2119 train loss:3.965644 +step:2120 train loss:4.052331 +step:2121 train loss:4.037841 +step:2122 train loss:4.045589 +step:2123 train loss:4.105410 +step:2124 train loss:4.100290 +step:2125 train loss:4.015159 +step:2126 train loss:4.021966 +step:2127 train loss:4.014811 +step:2128 train loss:4.004350 +step:2129 train loss:4.032825 +step:2130 train loss:4.032582 +step:2131 train loss:4.064152 +step:2132 train loss:3.990747 +step:2133 train loss:4.097937 +step:2134 train loss:4.046681 +step:2135 train loss:4.008571 +step:2136 train loss:4.094859 +step:2137 train loss:4.066338 +step:2138 train loss:4.017518 +step:2139 train loss:4.023746 +step:2140 train loss:4.029640 +step:2141 train loss:4.072351 +step:2142 train loss:4.045634 +step:2143 train loss:3.969154 +step:2144 train loss:4.075146 +step:2145 train loss:4.043462 +step:2146 train loss:4.080073 +step:2147 train loss:4.180390 +step:2148 train loss:3.987296 +step:2149 train loss:3.996269 +step:2150 train loss:4.026212 +step:2151 train loss:4.062348 +step:2152 train loss:4.050124 +step:2153 train loss:4.090925 +step:2154 train loss:4.006616 +step:2155 train loss:4.090510 +step:2156 train loss:4.006595 +step:2157 train loss:4.086236 +step:2158 train loss:4.119445 +step:2159 train loss:4.045784 +step:2160 train loss:4.124946 +step:2161 train loss:4.022104 +step:2162 train loss:4.027769 +step:2163 train loss:4.007365 +step:2164 train loss:4.028680 +step:2165 train loss:4.004462 +step:2166 train loss:4.124068 +step:2167 train loss:4.028472 +step:2168 train loss:4.047153 +step:2169 train loss:3.998410 +step:2170 train loss:4.134873 +step:2171 train loss:4.102839 +step:2172 train loss:4.039344 +step:2173 train loss:4.021954 +step:2174 train loss:4.085300 +step:2175 train loss:4.023988 +step:2176 train loss:4.097275 +step:2177 train loss:4.068312 +step:2178 train loss:3.994699 +step:2179 train loss:4.060275 +step:2180 train loss:4.071693 +step:2181 train loss:4.009369 +step:2182 train loss:4.057505 +step:2183 train loss:4.052964 +step:2184 train loss:4.005142 +step:2185 train loss:3.984838 +step:2186 train loss:4.024058 +step:2187 train loss:4.037013 +step:2188 train loss:4.086210 +step:2189 train loss:3.975567 +step:2190 train loss:4.021661 +step:2191 train loss:4.077071 +step:2192 train loss:4.003457 +step:2193 train loss:3.971759 +step:2194 train loss:3.987318 +step:2195 train loss:4.004284 +step:2196 train loss:4.011732 +step:2197 train loss:3.989983 +step:2198 train loss:4.008407 +step:2199 train loss:4.086157 +step:2200 train loss:4.014658 +step:2201 train loss:4.018058 +step:2202 train loss:3.986123 +step:2203 train loss:4.008483 +step:2204 train loss:4.037562 +step:2205 train loss:4.018993 +step:2206 train loss:4.022167 +step:2207 train loss:4.012270 +step:2208 train loss:3.995708 +step:2209 train loss:4.272098 +step:2210 train loss:4.045787 +step:2211 train loss:4.038648 +step:2212 train loss:4.006964 +step:2213 train loss:4.091672 +step:2214 train loss:4.085696 +step:2215 train loss:4.010097 +step:2216 train loss:3.980930 +step:2217 train loss:4.000788 +step:2218 train loss:4.002426 +step:2219 train loss:4.038126 +step:2220 train loss:3.983955 +step:2221 train loss:4.013862 +step:2222 train loss:4.034031 +step:2223 train loss:4.065226 +step:2224 train loss:4.044744 +step:2225 train loss:3.980462 +step:2226 train loss:4.047282 +step:2227 train loss:4.049565 +step:2228 train loss:4.048407 +step:2229 train loss:3.989736 +step:2230 train loss:4.115548 +step:2231 train loss:4.027286 +step:2232 train loss:4.028139 +step:2233 train loss:4.067786 +step:2234 train loss:3.966597 +step:2235 train loss:4.057297 +step:2236 train loss:3.990435 +step:2237 train loss:4.123667 +step:2238 train loss:3.937134 +step:2239 train loss:4.007915 +step:2240 train loss:4.024785 +step:2241 train loss:3.943255 +step:2242 train loss:4.076803 +step:2243 train loss:4.118860 +step:2244 train loss:3.997498 +step:2245 train loss:3.991872 +step:2246 train loss:3.967902 +step:2247 train loss:3.967143 +step:2248 train loss:4.017618 +step:2249 train loss:4.008329 +step:2250 validation loss:3.967480 +step:2250 train loss:4.017200 +step:2251 train loss:3.982622 +step:2252 train loss:3.982550 +step:2253 train loss:4.006694 +step:2254 train loss:4.009606 +step:2255 train loss:3.971724 +step:2256 train loss:4.022854 +step:2257 train loss:4.013711 +step:2258 train loss:4.003363 +step:2259 train loss:4.014577 +step:2260 train loss:3.975806 +step:2261 train loss:4.050103 +step:2262 train loss:4.066371 +step:2263 train loss:4.028012 +step:2264 train loss:4.137739 +step:2265 train loss:3.988391 +step:2266 train loss:4.034566 +step:2267 train loss:3.992551 +step:2268 train loss:3.994569 +step:2269 train loss:3.997580 +step:2270 train loss:3.988357 +step:2271 train loss:3.999704 +step:2272 train loss:4.038745 +step:2273 train loss:3.960425 +step:2274 train loss:3.988762 +step:2275 train loss:3.952501 +step:2276 train loss:4.020019 +step:2277 train loss:4.032055 +step:2278 train loss:4.017729 +step:2279 train loss:3.993468 +step:2280 train loss:3.906143 +step:2281 train loss:4.047712 +step:2282 train loss:3.983544 +step:2283 train loss:3.965455 +step:2284 train loss:3.988656 +step:2285 train loss:4.035407 +step:2286 train loss:3.995848 +step:2287 train loss:4.036050 +step:2288 train loss:4.006082 +step:2289 train loss:4.002646 +step:2290 train loss:4.008092 +step:2291 train loss:3.998043 +step:2292 train loss:4.030202 +step:2293 train loss:4.014338 +step:2294 train loss:4.010106 +step:2295 train loss:4.066469 +step:2296 train loss:3.998828 +step:2297 train loss:3.971987 +step:2298 train loss:4.031719 +step:2299 train loss:4.005139 +step:2300 train loss:3.922382 +step:2301 train loss:4.016541 +step:2302 train loss:4.036147 +step:2303 train loss:3.998239 +step:2304 train loss:3.991399 +step:2305 train loss:4.035143 +step:2306 train loss:4.030420 +step:2307 train loss:3.997657 +step:2308 train loss:4.022518 +step:2309 train loss:3.981334 +step:2310 train loss:3.965102 +step:2311 train loss:3.957221 +step:2312 train loss:4.021618 +step:2313 train loss:3.937823 +step:2314 train loss:4.012117 +step:2315 train loss:4.028736 +step:2316 train loss:4.062446 +step:2317 train loss:3.932534 +step:2318 train loss:3.970571 +step:2319 train loss:4.027380 +step:2320 train loss:3.999464 +step:2321 train loss:3.970897 +step:2322 train loss:3.981290 +step:2323 train loss:3.979744 +step:2324 train loss:4.003517 +step:2325 train loss:3.953562 +step:2326 train loss:3.968802 +step:2327 train loss:4.088264 +step:2328 train loss:4.038059 +step:2329 train loss:3.989368 +step:2330 train loss:3.944402 +step:2331 train loss:3.990901 +step:2332 train loss:3.919740 +step:2333 train loss:3.984091 +step:2334 train loss:3.961209 +step:2335 train loss:3.945525 +step:2336 train loss:4.197713 +step:2337 train loss:3.977082 +step:2338 train loss:4.013454 +step:2339 train loss:4.014202 +step:2340 train loss:4.022395 +step:2341 train loss:4.014392 +step:2342 train loss:3.966746 +step:2343 train loss:3.987253 +step:2344 train loss:4.026719 +step:2345 train loss:3.988301 +step:2346 train loss:4.016367 +step:2347 train loss:3.937031 +step:2348 train loss:3.998444 +step:2349 train loss:3.948227 +step:2350 train loss:4.001395 +step:2351 train loss:4.012118 +step:2352 train loss:4.011149 +step:2353 train loss:3.965743 +step:2354 train loss:4.021724 +step:2355 train loss:4.003264 +step:2356 train loss:4.046986 +step:2357 train loss:3.954913 +step:2358 train loss:3.969981 +step:2359 train loss:3.994120 +step:2360 train loss:4.013475 +step:2361 train loss:4.054379 +step:2362 train loss:3.875691 +step:2363 train loss:4.065930 +step:2364 train loss:4.017356 +step:2365 train loss:3.991886 +step:2366 train loss:3.948408 +step:2367 train loss:4.004558 +step:2368 train loss:4.003347 +step:2369 train loss:3.981260 +step:2370 train loss:4.001776 +step:2371 train loss:4.059346 +step:2372 train loss:3.915027 +step:2373 train loss:4.052799 +step:2374 train loss:4.033476 +step:2375 train loss:4.020675 +step:2376 train loss:4.008005 +step:2377 train loss:3.956972 +step:2378 train loss:4.000149 +step:2379 train loss:3.985569 +step:2380 train loss:4.046624 +step:2381 train loss:4.130961 +step:2382 train loss:3.926271 +step:2383 train loss:3.979936 +step:2384 train loss:4.005244 +step:2385 train loss:3.904983 +step:2386 train loss:4.064290 +step:2387 train loss:3.946418 +step:2388 train loss:3.993417 +step:2389 train loss:4.014458 +step:2390 train loss:3.966532 +step:2391 train loss:3.985472 +step:2392 train loss:4.012824 +step:2393 train loss:3.971954 +step:2394 train loss:3.991106 +step:2395 train loss:3.982823 +step:2396 train loss:3.989707 +step:2397 train loss:3.966063 +step:2398 train loss:4.016820 +step:2399 train loss:3.979717 +step:2400 train loss:3.962613 +step:2401 train loss:4.000452 +step:2402 train loss:3.951451 +step:2403 train loss:4.007280 +step:2404 train loss:3.958696 +step:2405 train loss:3.961882 +step:2406 train loss:3.990091 +step:2407 train loss:3.936802 +step:2408 train loss:3.981784 +step:2409 train loss:3.967498 +step:2410 train loss:3.970653 +step:2411 train loss:4.038283 +step:2412 train loss:4.030747 +step:2413 train loss:4.059294 +step:2414 train loss:3.955853 +step:2415 train loss:3.948107 +step:2416 train loss:3.963523 +step:2417 train loss:3.997352 +step:2418 train loss:4.018527 +step:2419 train loss:3.944903 +step:2420 train loss:3.968161 +step:2421 train loss:3.999628 +step:2422 train loss:4.049714 +step:2423 train loss:3.982036 +step:2424 train loss:3.948437 +step:2425 train loss:4.008044 +step:2426 train loss:3.948194 +step:2427 train loss:3.976514 +step:2428 train loss:4.053513 +step:2429 train loss:4.008330 +step:2430 train loss:4.097003 +step:2431 train loss:4.011701 +step:2432 train loss:3.978335 +step:2433 train loss:3.960270 +step:2434 train loss:3.944098 +step:2435 train loss:3.997032 +step:2436 train loss:3.959316 +step:2437 train loss:3.991140 +step:2438 train loss:4.031225 +step:2439 train loss:4.015289 +step:2440 train loss:3.961441 +step:2441 train loss:3.993318 +step:2442 train loss:3.985056 +step:2443 train loss:3.951964 +step:2444 train loss:3.987294 +step:2445 train loss:3.981042 +step:2446 train loss:3.956589 +step:2447 train loss:3.935802 +step:2448 train loss:3.982190 +step:2449 train loss:4.011384 +step:2450 train loss:3.972092 +step:2451 train loss:3.888026 +step:2452 train loss:3.995036 +step:2453 train loss:3.964430 +step:2454 train loss:3.959852 +step:2455 train loss:4.012145 +step:2456 train loss:3.967737 +step:2457 train loss:4.024167 +step:2458 train loss:4.002266 +step:2459 train loss:3.980272 +step:2460 train loss:3.980606 +step:2461 train loss:4.016392 +step:2462 train loss:3.988613 +step:2463 train loss:3.958790 +step:2464 train loss:3.979700 +step:2465 train loss:4.053884 +step:2466 train loss:4.134882 +step:2467 train loss:4.043228 +step:2468 train loss:3.940455 +step:2469 train loss:4.002993 +step:2470 train loss:4.058254 +step:2471 train loss:4.060665 +step:2472 train loss:4.048046 +step:2473 train loss:3.974805 +step:2474 train loss:3.932898 +step:2475 train loss:3.987423 +step:2476 train loss:4.064891 +step:2477 train loss:3.976944 +step:2478 train loss:3.933310 +step:2479 train loss:3.977973 +step:2480 train loss:3.968421 +step:2481 train loss:4.155079 +step:2482 train loss:3.970257 +step:2483 train loss:3.998800 +step:2484 train loss:3.947384 +step:2485 train loss:3.931332 +step:2486 train loss:3.973644 +step:2487 train loss:4.005755 +step:2488 train loss:3.918324 +step:2489 train loss:4.029053 +step:2490 train loss:3.950933 +step:2491 train loss:3.960062 +step:2492 train loss:4.000964 +step:2493 train loss:4.038739 +step:2494 train loss:3.962061 +step:2495 train loss:3.994705 +step:2496 train loss:3.973191 +step:2497 train loss:3.988214 +step:2498 train loss:3.991309 +step:2499 train loss:3.989065 +step:2500 validation loss:3.916346 total_sharp:1.0710e-02 L1_sharp:7.9475e-01 L2_sharp:4.4222e-01 L3_sharp:3.3330e-01 L4_sharp:1.9368e-01 L5_sharp:1.5879e-01 L6_sharp:1.6204e-01 L7_sharp:1.5230e-01 L8_sharp:1.3744e-01 L9_sharp:1.0586e-01 L10_sharp:9.1992e-02 L11_sharp:6.3000e-02 L12_sharp:7.3927e-02 total_fnorm:1.3252e+00 total_l1_linf:7.3187e+03 total_spectral:1.3252e+00 L1_fnorm:3.1849e-02 L2_fnorm:2.9247e-02 L3_fnorm:2.8727e-02 L4_fnorm:3.0156e-02 L5_fnorm:3.0639e-02 L6_fnorm:3.1317e-02 L7_fnorm:3.1725e-02 L8_fnorm:3.1641e-02 L9_fnorm:3.1751e-02 L10_fnorm:3.1908e-02 L11_fnorm:3.1608e-02 L12_fnorm:3.1562e-02 L1_l1linf:2.7090e-01 L2_l1linf:2.5581e-01 L3_l1linf:2.5764e-01 L4_l1linf:2.6690e-01 L5_l1linf:2.6199e-01 L6_l1linf:2.6216e-01 L7_l1linf:2.5365e-01 L8_l1linf:2.5913e-01 L9_l1linf:2.6394e-01 L10_l1linf:2.8183e-01 L11_l1linf:2.6839e-01 L12_l1linf:2.4477e-01 L1_spectral:5.9897e-03 L2_spectral:5.7760e-03 L3_spectral:5.8333e-03 L4_spectral:6.0082e-03 L5_spectral:5.8556e-03 L6_spectral:5.9200e-03 L7_spectral:5.7278e-03 L8_spectral:5.8070e-03 L9_spectral:5.9182e-03 L10_spectral:6.2996e-03 L11_spectral:6.0080e-03 L12_spectral:5.6115e-03 ip_v_neg_g:7.1674e-03 cos_v_neg_g:6.7979e-04 v_norm:1.3252e+00 g_norm:7.9561e+00 hv_norm:3.4274e+00 cos_v_hv:4.1412e-03 hg_norm:3.6800e+03 cos_g_hg:6.0146e-01 v_par:2.7056e-05 v_perp:1.3252e+00 L1_cos_v_neg_g:1.2036e-02 L1_v_norm:3.1849e-02 L2_cos_v_neg_g:1.1465e-02 L2_v_norm:2.9247e-02 L3_cos_v_neg_g:1.1169e-02 L3_v_norm:2.8727e-02 L4_cos_v_neg_g:8.6998e-03 L4_v_norm:3.0156e-02 L5_cos_v_neg_g:7.7683e-03 L5_v_norm:3.0639e-02 L6_cos_v_neg_g:7.6365e-03 L6_v_norm:3.1317e-02 L7_cos_v_neg_g:7.2568e-03 L7_v_norm:3.1725e-02 L8_cos_v_neg_g:6.7021e-03 L8_v_norm:3.1641e-02 L9_cos_v_neg_g:5.5061e-03 L9_v_norm:3.1751e-02 L10_cos_v_neg_g:5.1909e-03 L10_v_norm:3.1908e-02 L11_cos_v_neg_g:4.5812e-03 L11_v_norm:3.1608e-02 L12_cos_v_neg_g:4.9207e-03 L12_v_norm:3.1562e-02 +step:2500 train loss:3.933105 +step:2501 train loss:3.992231 +step:2502 train loss:3.982245 +step:2503 train loss:3.915374 +step:2504 train loss:3.945709 +step:2505 train loss:3.979798 +step:2506 train loss:3.937145 +step:2507 train loss:3.964381 +step:2508 train loss:3.916936 +step:2509 train loss:3.931307 +step:2510 train loss:3.928633 +step:2511 train loss:3.972143 +step:2512 train loss:4.019204 +step:2513 train loss:3.963985 +step:2514 train loss:3.950584 +step:2515 train loss:4.088626 +step:2516 train loss:3.976192 +step:2517 train loss:4.034921 +step:2518 train loss:3.998242 +step:2519 train loss:3.973181 +step:2520 train loss:3.981552 +step:2521 train loss:3.947602 +step:2522 train loss:3.992804 +step:2523 train loss:3.910758 +step:2524 train loss:3.967904 +step:2525 train loss:3.959146 +step:2526 train loss:4.009179 +step:2527 train loss:3.997948 +step:2528 train loss:3.982023 +step:2529 train loss:4.008778 +step:2530 train loss:3.979973 +step:2531 train loss:3.921911 +step:2532 train loss:4.017158 +step:2533 train loss:3.908596 +step:2534 train loss:4.007425 +step:2535 train loss:3.963912 +step:2536 train loss:3.884757 +step:2537 train loss:4.001334 +step:2538 train loss:3.978613 +step:2539 train loss:3.997373 +step:2540 train loss:3.937515 +step:2541 train loss:3.961355 +step:2542 train loss:3.972586 +step:2543 train loss:3.963512 +step:2544 train loss:3.947853 +step:2545 train loss:3.938985 +step:2546 train loss:3.906646 +step:2547 train loss:3.948639 +step:2548 train loss:3.968739 +step:2549 train loss:3.969871 +step:2550 train loss:4.101000 +step:2551 train loss:4.177211 +step:2552 train loss:3.906902 +step:2553 train loss:3.941319 +step:2554 train loss:4.088420 +step:2555 train loss:3.979056 +step:2556 train loss:3.904466 +step:2557 train loss:3.995067 +step:2558 train loss:3.989516 +step:2559 train loss:3.938985 +step:2560 train loss:3.924287 +step:2561 train loss:4.025498 +step:2562 train loss:3.977522 +step:2563 train loss:3.910367 +step:2564 train loss:3.981867 +step:2565 train loss:3.962689 +step:2566 train loss:3.940890 +step:2567 train loss:3.919262 +step:2568 train loss:3.977667 +step:2569 train loss:3.982527 +step:2570 train loss:3.936786 +step:2571 train loss:4.017424 +step:2572 train loss:3.981454 +step:2573 train loss:3.913247 +step:2574 train loss:3.955029 +step:2575 train loss:4.003691 +step:2576 train loss:3.957081 +step:2577 train loss:3.916405 +step:2578 train loss:3.958641 +step:2579 train loss:3.938251 +step:2580 train loss:3.910523 +step:2581 train loss:3.922909 +step:2582 train loss:3.932309 +step:2583 train loss:3.955775 +step:2584 train loss:3.968828 +step:2585 train loss:3.931523 +step:2586 train loss:3.959505 +step:2587 train loss:3.888644 +step:2588 train loss:3.921063 +step:2589 train loss:3.999839 +step:2590 train loss:3.921714 +step:2591 train loss:3.977369 +step:2592 train loss:4.029133 +step:2593 train loss:3.986178 +step:2594 train loss:3.941957 +step:2595 train loss:3.953676 +step:2596 train loss:3.996483 +step:2597 train loss:3.879302 +step:2598 train loss:4.031497 +step:2599 train loss:3.979354 +step:2600 train loss:4.008945 +step:2601 train loss:3.943151 +step:2602 train loss:3.982238 +step:2603 train loss:3.972455 +step:2604 train loss:3.894525 +step:2605 train loss:4.018290 +step:2606 train loss:3.973810 +step:2607 train loss:3.931858 +step:2608 train loss:3.904190 +step:2609 train loss:3.932163 +step:2610 train loss:3.956663 +step:2611 train loss:3.991783 +step:2612 train loss:3.953556 +step:2613 train loss:3.928173 +step:2614 train loss:3.916408 +step:2615 train loss:3.915749 +step:2616 train loss:3.987816 +step:2617 train loss:3.947566 +step:2618 train loss:3.911218 +step:2619 train loss:3.932173 +step:2620 train loss:3.921635 +step:2621 train loss:3.935116 +step:2622 train loss:4.011036 +step:2623 train loss:3.885195 +step:2624 train loss:3.897920 +step:2625 train loss:3.972953 +step:2626 train loss:3.964607 +step:2627 train loss:3.941062 +step:2628 train loss:3.994841 +step:2629 train loss:3.943199 +step:2630 train loss:3.937495 +step:2631 train loss:3.967452 +step:2632 train loss:3.938434 +step:2633 train loss:3.920182 +step:2634 train loss:3.967551 +step:2635 train loss:3.950974 +step:2636 train loss:4.000701 +step:2637 train loss:3.949034 +step:2638 train loss:3.927431 +step:2639 train loss:3.991020 +step:2640 train loss:3.906636 +step:2641 train loss:3.965043 +step:2642 train loss:3.882929 +step:2643 train loss:3.888871 +step:2644 train loss:3.982247 +step:2645 train loss:3.910964 +step:2646 train loss:3.947487 +step:2647 train loss:3.964449 +step:2648 train loss:3.997998 +step:2649 train loss:3.911592 +step:2650 train loss:3.900222 +step:2651 train loss:3.944429 +step:2652 train loss:3.913569 +step:2653 train loss:3.981307 +step:2654 train loss:3.941714 +step:2655 train loss:3.930778 +step:2656 train loss:3.949985 +step:2657 train loss:3.975622 +step:2658 train loss:3.980849 +step:2659 train loss:3.961217 +step:2660 train loss:3.950733 +step:2661 train loss:3.994855 +step:2662 train loss:3.970876 +step:2663 train loss:3.943181 +step:2664 train loss:3.959813 +step:2665 train loss:3.906251 +step:2666 train loss:3.936572 +step:2667 train loss:3.942462 +step:2668 train loss:3.920802 +step:2669 train loss:3.925318 +step:2670 train loss:3.953263 +step:2671 train loss:3.926721 +step:2672 train loss:3.949543 +step:2673 train loss:3.883864 +step:2674 train loss:3.974984 +step:2675 train loss:3.949738 +step:2676 train loss:3.970834 +step:2677 train loss:3.951321 +step:2678 train loss:3.932667 +step:2679 train loss:3.915901 +step:2680 train loss:3.901976 +step:2681 train loss:3.872483 +step:2682 train loss:3.960493 +step:2683 train loss:3.931459 +step:2684 train loss:3.961109 +step:2685 train loss:3.891183 +step:2686 train loss:3.897466 +step:2687 train loss:3.970188 +step:2688 train loss:3.986793 +step:2689 train loss:3.894187 +step:2690 train loss:3.976669 +step:2691 train loss:3.944373 +step:2692 train loss:3.970927 +step:2693 train loss:4.028665 +step:2694 train loss:3.924753 +step:2695 train loss:3.943194 +step:2696 train loss:3.948159 +step:2697 train loss:3.941149 +step:2698 train loss:3.943798 +step:2699 train loss:3.963934 +step:2700 train loss:3.941032 +step:2701 train loss:4.000162 +step:2702 train loss:3.939386 +step:2703 train loss:3.898432 +step:2704 train loss:3.970897 +step:2705 train loss:3.964894 +step:2706 train loss:3.896899 +step:2707 train loss:3.860458 +step:2708 train loss:3.954302 +step:2709 train loss:3.937477 +step:2710 train loss:3.947605 +step:2711 train loss:3.909846 +step:2712 train loss:3.968667 +step:2713 train loss:3.977760 +step:2714 train loss:3.919111 +step:2715 train loss:3.913631 +step:2716 train loss:3.978025 +step:2717 train loss:3.948208 +step:2718 train loss:3.940080 +step:2719 train loss:3.942568 +step:2720 train loss:3.904420 +step:2721 train loss:3.985350 +step:2722 train loss:3.915292 +step:2723 train loss:3.898568 +step:2724 train loss:3.924273 +step:2725 train loss:3.924559 +step:2726 train loss:3.896490 +step:2727 train loss:3.954612 +step:2728 train loss:3.895381 +step:2729 train loss:4.025099 +step:2730 train loss:3.961734 +step:2731 train loss:4.001758 +step:2732 train loss:3.920594 +step:2733 train loss:3.917348 +step:2734 train loss:3.961224 +step:2735 train loss:3.962009 +step:2736 train loss:3.884524 +step:2737 train loss:3.940842 +step:2738 train loss:3.996630 +step:2739 train loss:3.919830 +step:2740 train loss:3.919243 +step:2741 train loss:3.909540 +step:2742 train loss:3.821747 +step:2743 train loss:3.937214 +step:2744 train loss:3.955242 +step:2745 train loss:3.910066 +step:2746 train loss:3.931787 +step:2747 train loss:3.917990 +step:2748 train loss:3.874681 +step:2749 train loss:3.936036 +step:2750 validation loss:3.872425 +step:2750 train loss:3.946815 +step:2751 train loss:3.973144 +step:2752 train loss:3.953175 +step:2753 train loss:3.950585 +step:2754 train loss:3.883085 +step:2755 train loss:3.954556 +step:2756 train loss:3.924209 +step:2757 train loss:3.912415 +step:2758 train loss:3.938866 +step:2759 train loss:3.950105 +step:2760 train loss:3.862278 +step:2761 train loss:3.879878 +step:2762 train loss:3.894306 +step:2763 train loss:3.917051 +step:2764 train loss:3.861352 +step:2765 train loss:3.904266 +step:2766 train loss:3.996705 +step:2767 train loss:3.870267 +step:2768 train loss:3.931317 +step:2769 train loss:3.906045 +step:2770 train loss:3.928117 +step:2771 train loss:3.951943 +step:2772 train loss:3.914724 +step:2773 train loss:3.912884 +step:2774 train loss:3.906793 +step:2775 train loss:3.923690 +step:2776 train loss:3.879748 +step:2777 train loss:3.908387 +step:2778 train loss:3.919981 +step:2779 train loss:3.944172 +step:2780 train loss:3.911452 +step:2781 train loss:3.900872 +step:2782 train loss:3.889475 +step:2783 train loss:3.924199 +step:2784 train loss:3.930931 +step:2785 train loss:3.996300 +step:2786 train loss:3.968330 +step:2787 train loss:3.928401 +step:2788 train loss:3.921767 +step:2789 train loss:3.914376 +step:2790 train loss:3.857002 +step:2791 train loss:3.956369 +step:2792 train loss:3.944506 +step:2793 train loss:3.911507 +step:2794 train loss:3.921940 +step:2795 train loss:3.932582 +step:2796 train loss:3.927930 +step:2797 train loss:3.970479 +step:2798 train loss:3.961136 +step:2799 train loss:3.869229 +step:2800 train loss:3.916224 +step:2801 train loss:3.950865 +step:2802 train loss:3.975733 +step:2803 train loss:3.946075 +step:2804 train loss:3.882925 +step:2805 train loss:3.920484 +step:2806 train loss:3.918324 +step:2807 train loss:3.948390 +step:2808 train loss:3.884995 +step:2809 train loss:3.949488 +step:2810 train loss:3.941692 +step:2811 train loss:3.931947 +step:2812 train loss:3.976215 +step:2813 train loss:3.946932 +step:2814 train loss:3.936649 +step:2815 train loss:3.946372 +step:2816 train loss:3.950425 +step:2817 train loss:3.884272 +step:2818 train loss:3.989368 +step:2819 train loss:3.915862 +step:2820 train loss:3.910542 +step:2821 train loss:3.892009 +step:2822 train loss:3.937017 +step:2823 train loss:3.887722 +step:2824 train loss:3.785835 +step:2825 train loss:3.936013 +step:2826 train loss:3.927114 +step:2827 train loss:3.954037 +step:2828 train loss:3.938473 +step:2829 train loss:3.933659 +step:2830 train loss:3.959757 +step:2831 train loss:3.902773 +step:2832 train loss:3.873228 +step:2833 train loss:3.932625 +step:2834 train loss:3.885128 +step:2835 train loss:3.919391 +step:2836 train loss:3.922709 +step:2837 train loss:3.920280 +step:2838 train loss:3.859635 +step:2839 train loss:3.957010 +step:2840 train loss:3.920343 +step:2841 train loss:3.998995 +step:2842 train loss:3.946110 +step:2843 train loss:3.935846 +step:2844 train loss:3.960457 +step:2845 train loss:3.921536 +step:2846 train loss:3.871150 +step:2847 train loss:3.960404 +step:2848 train loss:3.915193 +step:2849 train loss:3.909230 +step:2850 train loss:3.967545 +step:2851 train loss:3.920419 +step:2852 train loss:4.000383 +step:2853 train loss:3.915709 +step:2854 train loss:3.852037 +step:2855 train loss:3.935708 +step:2856 train loss:3.859995 +step:2857 train loss:3.960258 +step:2858 train loss:3.917261 +step:2859 train loss:3.900429 +step:2860 train loss:3.899940 +step:2861 train loss:3.883144 +step:2862 train loss:3.905097 +step:2863 train loss:3.889155 +step:2864 train loss:3.898479 +step:2865 train loss:3.972150 +step:2866 train loss:3.989031 +step:2867 train loss:3.926009 +step:2868 train loss:3.922407 +step:2869 train loss:3.884490 +step:2870 train loss:3.969231 +step:2871 train loss:3.965475 +step:2872 train loss:3.927361 +step:2873 train loss:3.940512 +step:2874 train loss:3.912583 +step:2875 train loss:3.866839 +step:2876 train loss:3.918266 +step:2877 train loss:3.896181 +step:2878 train loss:3.910842 +step:2879 train loss:3.877438 +step:2880 train loss:3.899206 +step:2881 train loss:3.890568 +step:2882 train loss:3.825014 +step:2883 train loss:3.913040 +step:2884 train loss:3.974861 +step:2885 train loss:3.875757 +step:2886 train loss:3.918704 +step:2887 train loss:3.942745 +step:2888 train loss:3.917902 +step:2889 train loss:3.903987 +step:2890 train loss:3.875412 +step:2891 train loss:3.913217 +step:2892 train loss:3.923990 +step:2893 train loss:3.906255 +step:2894 train loss:3.876263 +step:2895 train loss:3.923199 +step:2896 train loss:3.971973 +step:2897 train loss:3.950008 +step:2898 train loss:4.080214 +step:2899 train loss:3.841207 +step:2900 train loss:3.919153 +step:2901 train loss:3.865780 +step:2902 train loss:3.865463 +step:2903 train loss:3.881029 +step:2904 train loss:3.910027 +step:2905 train loss:3.968687 +step:2906 train loss:3.935796 +step:2907 train loss:4.106966 +step:2908 train loss:3.858482 +step:2909 train loss:3.933612 +step:2910 train loss:3.904578 +step:2911 train loss:3.933015 +step:2912 train loss:3.894477 +step:2913 train loss:3.925435 +step:2914 train loss:3.954802 +step:2915 train loss:3.952666 +step:2916 train loss:3.912275 +step:2917 train loss:3.939985 +step:2918 train loss:3.933226 +step:2919 train loss:3.875608 +step:2920 train loss:3.932421 +step:2921 train loss:3.889045 +step:2922 train loss:3.911852 +step:2923 train loss:3.980129 +step:2924 train loss:3.912848 +step:2925 train loss:3.867201 +step:2926 train loss:3.955792 +step:2927 train loss:3.865099 +step:2928 train loss:3.836420 +step:2929 train loss:3.851185 +step:2930 train loss:3.869390 +step:2931 train loss:4.022892 +step:2932 train loss:3.942407 +step:2933 train loss:3.910032 +step:2934 train loss:3.902552 +step:2935 train loss:3.925583 +step:2936 train loss:3.876450 +step:2937 train loss:3.889623 +step:2938 train loss:3.909441 +step:2939 train loss:3.983224 +step:2940 train loss:3.883507 +step:2941 train loss:3.919601 +step:2942 train loss:3.880542 +step:2943 train loss:4.149758 +step:2944 train loss:3.984786 +step:2945 train loss:3.941301 +step:2946 train loss:3.948133 +step:2947 train loss:3.912536 +step:2948 train loss:3.874830 +step:2949 train loss:3.963421 +step:2950 train loss:3.913071 +step:2951 train loss:3.812698 +step:2952 train loss:3.883067 +step:2953 train loss:3.798877 +step:2954 train loss:3.887674 +step:2955 train loss:3.955592 +step:2956 train loss:3.905156 +step:2957 train loss:3.907178 +step:2958 train loss:3.862880 +step:2959 train loss:3.884975 +step:2960 train loss:3.977630 +step:2961 train loss:3.844198 +step:2962 train loss:3.919259 +step:2963 train loss:3.908326 +step:2964 train loss:3.888619 +step:2965 train loss:3.917160 +step:2966 train loss:3.890327 +step:2967 train loss:3.890427 +step:2968 train loss:3.862456 +step:2969 train loss:3.875518 +step:2970 train loss:3.940028 +step:2971 train loss:3.874260 +step:2972 train loss:3.853475 +step:2973 train loss:3.850430 +step:2974 train loss:3.890663 +step:2975 train loss:3.855221 +step:2976 train loss:3.894280 +step:2977 train loss:3.886078 +step:2978 train loss:3.966754 +step:2979 train loss:3.949475 +step:2980 train loss:3.952939 +step:2981 train loss:3.917951 +step:2982 train loss:3.901122 +step:2983 train loss:3.856688 +step:2984 train loss:3.826499 +step:2985 train loss:3.939304 +step:2986 train loss:3.836969 +step:2987 train loss:3.962875 +step:2988 train loss:3.886640 +step:2989 train loss:3.920775 +step:2990 train loss:3.874077 +step:2991 train loss:3.943573 +step:2992 train loss:3.938708 +step:2993 train loss:3.906002 +step:2994 train loss:3.888292 +step:2995 train loss:3.958668 +step:2996 train loss:3.886080 +step:2997 train loss:3.796652 +step:2998 train loss:3.906226 +step:2999 train loss:3.943322 +step:3000 validation loss:3.838517 total_sharp:7.6758e-03 L1_sharp:2.1843e-01 L2_sharp:1.5576e-01 L3_sharp:2.0645e-01 L4_sharp:1.3166e-01 L5_sharp:1.2460e-01 L6_sharp:1.2679e-01 L7_sharp:1.4562e-01 L8_sharp:1.2624e-01 L9_sharp:1.0719e-01 L10_sharp:8.3857e-02 L11_sharp:6.5310e-02 L12_sharp:7.3592e-02 total_fnorm:1.3276e+00 total_l1_linf:7.3302e+03 total_spectral:1.3276e+00 L1_fnorm:3.1879e-02 L2_fnorm:3.0171e-02 L3_fnorm:2.9404e-02 L4_fnorm:3.0564e-02 L5_fnorm:3.1087e-02 L6_fnorm:3.1798e-02 L7_fnorm:3.1829e-02 L8_fnorm:3.1830e-02 L9_fnorm:3.1932e-02 L10_fnorm:3.1988e-02 L11_fnorm:3.1847e-02 L12_fnorm:3.2012e-02 L1_l1linf:2.5443e-01 L2_l1linf:2.9194e-01 L3_l1linf:2.8770e-01 L4_l1linf:2.9620e-01 L5_l1linf:2.7710e-01 L6_l1linf:2.7000e-01 L7_l1linf:2.5565e-01 L8_l1linf:2.6570e-01 L9_l1linf:2.7751e-01 L10_l1linf:2.8976e-01 L11_l1linf:2.9484e-01 L12_l1linf:2.8193e-01 L1_spectral:5.8241e-03 L2_spectral:6.6084e-03 L3_spectral:6.5698e-03 L4_spectral:6.6852e-03 L5_spectral:6.2761e-03 L6_spectral:6.1226e-03 L7_spectral:5.7545e-03 L8_spectral:6.0036e-03 L9_spectral:6.2586e-03 L10_spectral:6.5169e-03 L11_spectral:6.6894e-03 L12_spectral:6.4014e-03 ip_v_neg_g:9.2407e-03 cos_v_neg_g:9.3659e-04 v_norm:1.3276e+00 g_norm:7.4319e+00 hv_norm:2.2888e+00 cos_v_hv:4.4521e-03 hg_norm:2.7409e+03 cos_g_hg:5.8738e-01 v_par:3.3284e-05 v_perp:1.3276e+00 L1_cos_v_neg_g:1.4162e-02 L1_v_norm:3.1879e-02 L2_cos_v_neg_g:1.4381e-02 L2_v_norm:3.0171e-02 L3_cos_v_neg_g:1.4412e-02 L3_v_norm:2.9404e-02 L4_cos_v_neg_g:1.3570e-02 L4_v_norm:3.0564e-02 L5_cos_v_neg_g:1.2033e-02 L5_v_norm:3.1087e-02 L6_cos_v_neg_g:1.1170e-02 L6_v_norm:3.1798e-02 L7_cos_v_neg_g:1.1854e-02 L7_v_norm:3.1829e-02 L8_cos_v_neg_g:1.0840e-02 L8_v_norm:3.1830e-02 L9_cos_v_neg_g:9.5540e-03 L9_v_norm:3.1932e-02 L10_cos_v_neg_g:8.4743e-03 L10_v_norm:3.1988e-02 L11_cos_v_neg_g:5.8542e-03 L11_v_norm:3.1847e-02 L12_cos_v_neg_g:5.4612e-03 L12_v_norm:3.2012e-02 +step:3000 train loss:3.839044 +step:3001 train loss:3.894237 +step:3002 train loss:3.890721 +step:3003 train loss:3.888527 +step:3004 train loss:3.914154 +step:3005 train loss:3.813739 +step:3006 train loss:3.865188 +step:3007 train loss:3.894153 +step:3008 train loss:3.938366 +step:3009 train loss:3.899947 +step:3010 train loss:3.911727 +step:3011 train loss:3.903257 +step:3012 train loss:3.878124 +step:3013 train loss:3.920880 +step:3014 train loss:3.879522 +step:3015 train loss:3.877226 +step:3016 train loss:3.904188 +step:3017 train loss:3.917226 +step:3018 train loss:3.849592 +step:3019 train loss:3.886933 +step:3020 train loss:3.903428 +step:3021 train loss:3.876156 +step:3022 train loss:3.963139 +step:3023 train loss:3.910037 +step:3024 train loss:3.899860 +step:3025 train loss:3.907191 +step:3026 train loss:3.883867 +step:3027 train loss:3.856590 +step:3028 train loss:3.909657 +step:3029 train loss:3.896854 +step:3030 train loss:3.870035 +step:3031 train loss:3.855115 +step:3032 train loss:3.839470 +step:3033 train loss:3.872425 +step:3034 train loss:3.914206 +step:3035 train loss:3.894234 +step:3036 train loss:3.853558 +step:3037 train loss:3.818565 +step:3038 train loss:3.936224 +step:3039 train loss:3.813582 +step:3040 train loss:3.805090 +step:3041 train loss:3.930304 +step:3042 train loss:3.869442 +step:3043 train loss:3.924208 +step:3044 train loss:3.819028 +step:3045 train loss:3.869834 +step:3046 train loss:3.838732 +step:3047 train loss:3.874560 +step:3048 train loss:3.832512 +step:3049 train loss:3.916825 +step:3050 train loss:3.800328 +step:3051 train loss:3.824423 +step:3052 train loss:3.839219 +step:3053 train loss:3.903968 +step:3054 train loss:3.978757 +step:3055 train loss:3.816823 +step:3056 train loss:3.854910 +step:3057 train loss:3.884262 +step:3058 train loss:3.834287 +step:3059 train loss:3.865628 +step:3060 train loss:3.857543 +step:3061 train loss:3.849571 +step:3062 train loss:3.894866 +step:3063 train loss:3.884254 +step:3064 train loss:3.903052 +step:3065 train loss:3.919572 +step:3066 train loss:3.825664 +step:3067 train loss:3.868175 +step:3068 train loss:3.922559 +step:3069 train loss:3.932084 +step:3070 train loss:3.866914 +step:3071 train loss:3.880415 +step:3072 train loss:3.883444 +step:3073 train loss:3.920362 +step:3074 train loss:3.857605 +step:3075 train loss:3.891959 +step:3076 train loss:3.826699 +step:3077 train loss:3.825426 +step:3078 train loss:3.850173 +step:3079 train loss:3.900003 +step:3080 train loss:3.891374 +step:3081 train loss:3.944179 +step:3082 train loss:3.910289 +step:3083 train loss:3.846874 +step:3084 train loss:3.924453 +step:3085 train loss:3.852727 +step:3086 train loss:3.913097 +step:3087 train loss:3.881597 +step:3088 train loss:3.964307 +step:3089 train loss:3.837315 +step:3090 train loss:3.908646 +step:3091 train loss:3.828019 +step:3092 train loss:3.860215 +step:3093 train loss:3.876992 +step:3094 train loss:3.865762 +step:3095 train loss:3.943849 +step:3096 train loss:3.875184 +step:3097 train loss:3.889857 +step:3098 train loss:3.861742 +step:3099 train loss:3.874415 +step:3100 train loss:3.895495 +step:3101 train loss:3.978626 +step:3102 train loss:3.905170 +step:3103 train loss:3.833187 +step:3104 train loss:3.914951 +step:3105 train loss:3.887157 +step:3106 train loss:3.879067 +step:3107 train loss:3.861701 +step:3108 train loss:3.842864 +step:3109 train loss:3.892627 +step:3110 train loss:3.826804 +step:3111 train loss:3.859091 +step:3112 train loss:3.796343 +step:3113 train loss:3.916325 +step:3114 train loss:3.826846 +step:3115 train loss:3.871125 +step:3116 train loss:3.746126 +step:3117 train loss:3.774149 +step:3118 train loss:3.867611 +step:3119 train loss:3.880784 +step:3120 train loss:3.877784 +step:3121 train loss:3.829760 +step:3122 train loss:3.904497 +step:3123 train loss:3.825709 +step:3124 train loss:3.883850 +step:3125 train loss:3.898165 +step:3126 train loss:3.999400 +step:3127 train loss:3.848328 +step:3128 train loss:3.877879 +step:3129 train loss:3.858109 +step:3130 train loss:3.835838 +step:3131 train loss:3.914274 +step:3132 train loss:3.901260 +step:3133 train loss:3.870013 +step:3134 train loss:3.771293 +step:3135 train loss:3.858737 +step:3136 train loss:3.837569 +step:3137 train loss:3.966945 +step:3138 train loss:3.868651 +step:3139 train loss:3.849385 +step:3140 train loss:3.870127 +step:3141 train loss:3.876974 +step:3142 train loss:3.811760 +step:3143 train loss:3.900526 +step:3144 train loss:3.844166 +step:3145 train loss:3.830365 +step:3146 train loss:3.840241 +step:3147 train loss:3.953653 +step:3148 train loss:3.854475 +step:3149 train loss:3.912151 +step:3150 train loss:3.893594 +step:3151 train loss:3.867343 +step:3152 train loss:3.860794 +step:3153 train loss:3.827145 +step:3154 train loss:3.903247 +step:3155 train loss:3.851529 +step:3156 train loss:3.898294 +step:3157 train loss:3.905321 +step:3158 train loss:3.870313 +step:3159 train loss:3.810337 +step:3160 train loss:3.861128 +step:3161 train loss:3.823023 +step:3162 train loss:3.888953 +step:3163 train loss:3.868067 +step:3164 train loss:3.849506 +step:3165 train loss:3.863131 +step:3166 train loss:3.906012 +step:3167 train loss:3.866246 +step:3168 train loss:3.943865 +step:3169 train loss:3.857090 +step:3170 train loss:3.844457 +step:3171 train loss:3.824944 +step:3172 train loss:3.835735 +step:3173 train loss:3.778481 +step:3174 train loss:3.886474 +step:3175 train loss:3.861046 +step:3176 train loss:3.871818 +step:3177 train loss:3.834673 +step:3178 train loss:3.817302 +step:3179 train loss:3.888194 +step:3180 train loss:3.822753 +step:3181 train loss:3.903678 +step:3182 train loss:3.911630 +step:3183 train loss:3.852276 +step:3184 train loss:3.852923 +step:3185 train loss:3.912861 +step:3186 train loss:3.869563 +step:3187 train loss:3.885405 +step:3188 train loss:3.924407 +step:3189 train loss:3.880597 +step:3190 train loss:3.824947 +step:3191 train loss:3.835232 +step:3192 train loss:3.794195 +step:3193 train loss:3.879440 +step:3194 train loss:3.835596 +step:3195 train loss:3.828822 +step:3196 train loss:3.871494 +step:3197 train loss:3.837889 +step:3198 train loss:3.859751 +step:3199 train loss:3.855363 +step:3200 train loss:3.857255 +step:3201 train loss:3.828710 +step:3202 train loss:3.877205 +step:3203 train loss:3.948497 +step:3204 train loss:3.910079 +step:3205 train loss:3.757646 +step:3206 train loss:4.039175 +step:3207 train loss:3.802479 +step:3208 train loss:3.862770 +step:3209 train loss:3.859967 +step:3210 train loss:3.841258 +step:3211 train loss:3.865249 +step:3212 train loss:3.877858 +step:3213 train loss:3.816513 +step:3214 train loss:3.921452 +step:3215 train loss:3.922476 +step:3216 train loss:3.795716 +step:3217 train loss:3.871903 +step:3218 train loss:3.920877 +step:3219 train loss:3.831916 +step:3220 train loss:3.901137 +step:3221 train loss:3.815272 +step:3222 train loss:3.859285 +step:3223 train loss:3.872726 +step:3224 train loss:3.892088 +step:3225 train loss:3.809914 +step:3226 train loss:3.843429 +step:3227 train loss:3.872709 +step:3228 train loss:3.866476 +step:3229 train loss:3.902802 +step:3230 train loss:3.912225 +step:3231 train loss:3.850061 +step:3232 train loss:3.861280 +step:3233 train loss:3.830605 +step:3234 train loss:3.822902 +step:3235 train loss:3.823206 +step:3236 train loss:3.844181 +step:3237 train loss:3.840505 +step:3238 train loss:3.864831 +step:3239 train loss:3.763093 +step:3240 train loss:3.880737 +step:3241 train loss:3.875111 +step:3242 train loss:3.930708 +step:3243 train loss:3.876225 +step:3244 train loss:3.884967 +step:3245 train loss:3.794188 +step:3246 train loss:3.917472 +step:3247 train loss:3.858859 +step:3248 train loss:3.878009 +step:3249 train loss:3.825383 +step:3250 validation loss:3.799905 +step:3250 train loss:3.829821 +step:3251 train loss:3.935627 +step:3252 train loss:3.868517 +step:3253 train loss:3.867939 +step:3254 train loss:3.936110 +step:3255 train loss:3.875186 +step:3256 train loss:3.874316 +step:3257 train loss:3.849842 +step:3258 train loss:3.787523 +step:3259 train loss:3.761314 +step:3260 train loss:3.878553 +step:3261 train loss:3.861846 +step:3262 train loss:3.848522 +step:3263 train loss:3.830971 +step:3264 train loss:3.944834 +step:3265 train loss:3.854503 +step:3266 train loss:3.880885 +step:3267 train loss:3.842931 +step:3268 train loss:3.845182 +step:3269 train loss:3.856094 +step:3270 train loss:3.885814 +step:3271 train loss:3.853536 +step:3272 train loss:3.820310 +step:3273 train loss:3.838416 +step:3274 train loss:3.970965 +step:3275 train loss:3.846412 +step:3276 train loss:3.909185 +step:3277 train loss:3.852775 +step:3278 train loss:3.827909 +step:3279 train loss:3.855764 +step:3280 train loss:3.879844 +step:3281 train loss:3.808707 +step:3282 train loss:3.872896 +step:3283 train loss:3.852344 +step:3284 train loss:3.806752 +step:3285 train loss:3.828665 +step:3286 train loss:3.859743 +step:3287 train loss:3.796741 +step:3288 train loss:3.877512 +step:3289 train loss:3.824917 +step:3290 train loss:3.857789 +step:3291 train loss:3.818238 +step:3292 train loss:3.836187 +step:3293 train loss:3.879518 +step:3294 train loss:3.886262 +step:3295 train loss:3.804633 +step:3296 train loss:3.856604 +step:3297 train loss:3.823351 +step:3298 train loss:3.818949 +step:3299 train loss:3.948859 +step:3300 train loss:3.783517 +step:3301 train loss:3.868896 +step:3302 train loss:3.831941 +step:3303 train loss:3.856304 +step:3304 train loss:3.818940 +step:3305 train loss:3.907543 +step:3306 train loss:3.843887 +step:3307 train loss:3.868095 +step:3308 train loss:3.820992 +step:3309 train loss:3.877992 +step:3310 train loss:3.795958 +step:3311 train loss:3.848155 +step:3312 train loss:3.816988 +step:3313 train loss:3.851923 +step:3314 train loss:3.851480 +step:3315 train loss:3.925816 +step:3316 train loss:3.780339 +step:3317 train loss:3.876511 +step:3318 train loss:3.878632 +step:3319 train loss:3.813614 +step:3320 train loss:3.961874 +step:3321 train loss:3.874706 +step:3322 train loss:3.869945 +step:3323 train loss:3.975530 +step:3324 train loss:3.893034 +step:3325 train loss:3.870080 +step:3326 train loss:3.860498 +step:3327 train loss:3.873178 +step:3328 train loss:3.849694 +step:3329 train loss:3.847684 +step:3330 train loss:3.843139 +step:3331 train loss:3.890585 +step:3332 train loss:3.912838 +step:3333 train loss:3.873431 +step:3334 train loss:3.812861 +step:3335 train loss:3.815869 +step:3336 train loss:3.862046 +step:3337 train loss:3.853174 +step:3338 train loss:3.847205 +step:3339 train loss:3.834883 +step:3340 train loss:3.873828 +step:3341 train loss:3.824673 +step:3342 train loss:3.872219 +step:3343 train loss:3.807296 +step:3344 train loss:3.863620 +step:3345 train loss:3.819327 +step:3346 train loss:3.829710 +step:3347 train loss:3.840274 +step:3348 train loss:3.845525 +step:3349 train loss:3.845289 +step:3350 train loss:3.865512 +step:3351 train loss:3.921327 +step:3352 train loss:3.858283 +step:3353 train loss:3.955822 +step:3354 train loss:3.804729 +step:3355 train loss:3.912225 +step:3356 train loss:3.860526 +step:3357 train loss:3.872183 +step:3358 train loss:3.814612 +step:3359 train loss:3.844268 +step:3360 train loss:3.835019 +step:3361 train loss:3.837184 +step:3362 train loss:3.827704 +step:3363 train loss:3.829115 +step:3364 train loss:3.814184 +step:3365 train loss:3.849316 +step:3366 train loss:3.882020 +step:3367 train loss:3.833129 +step:3368 train loss:3.926814 +step:3369 train loss:3.837532 +step:3370 train loss:3.915559 +step:3371 train loss:3.888086 +step:3372 train loss:3.857137 +step:3373 train loss:3.864099 +step:3374 train loss:3.911570 +step:3375 train loss:3.844904 +step:3376 train loss:3.849336 +step:3377 train loss:3.836451 +step:3378 train loss:3.815320 +step:3379 train loss:3.897010 +step:3380 train loss:3.872504 +step:3381 train loss:3.861891 +step:3382 train loss:3.872287 +step:3383 train loss:3.886870 +step:3384 train loss:3.812210 +step:3385 train loss:3.865502 +step:3386 train loss:3.843509 +step:3387 train loss:3.916497 +step:3388 train loss:3.821880 +step:3389 train loss:3.997945 +step:3390 train loss:3.755074 +step:3391 train loss:3.844261 +step:3392 train loss:3.826272 +step:3393 train loss:3.858354 +step:3394 train loss:3.814217 +step:3395 train loss:3.882560 +step:3396 train loss:3.797863 +step:3397 train loss:3.874459 +step:3398 train loss:3.842241 +step:3399 train loss:3.857908 +step:3400 train loss:3.807741 +step:3401 train loss:3.838645 +step:3402 train loss:3.999857 +step:3403 train loss:3.883376 +step:3404 train loss:4.001122 +step:3405 train loss:3.853585 +step:3406 train loss:3.841044 +step:3407 train loss:3.834167 +step:3408 train loss:3.816550 +step:3409 train loss:3.784225 +step:3410 train loss:3.816849 +step:3411 train loss:3.882747 +step:3412 train loss:3.812173 +step:3413 train loss:3.797019 +step:3414 train loss:3.837311 +step:3415 train loss:3.810641 +step:3416 train loss:3.813008 +step:3417 train loss:3.897276 +step:3418 train loss:3.894017 +step:3419 train loss:3.857685 +step:3420 train loss:3.827554 +step:3421 train loss:3.861849 +step:3422 train loss:3.876157 +step:3423 train loss:3.899835 +step:3424 train loss:3.781253 +step:3425 train loss:3.805584 +step:3426 train loss:3.798340 +step:3427 train loss:3.859178 +step:3428 train loss:3.788045 +step:3429 train loss:3.848292 +step:3430 train loss:3.810613 +step:3431 train loss:3.867051 +step:3432 train loss:3.849521 +step:3433 train loss:3.810737 +step:3434 train loss:3.900093 +step:3435 train loss:3.835191 +step:3436 train loss:3.930874 +step:3437 train loss:3.753841 +step:3438 train loss:3.860220 +step:3439 train loss:3.833928 +step:3440 train loss:3.929996 +step:3441 train loss:3.825896 +step:3442 train loss:3.889709 +step:3443 train loss:3.828681 +step:3444 train loss:3.843179 +step:3445 train loss:3.892678 +step:3446 train loss:3.796108 +step:3447 train loss:3.870977 +step:3448 train loss:3.826338 +step:3449 train loss:3.860183 +step:3450 train loss:3.765929 +step:3451 train loss:3.881144 +step:3452 train loss:3.835505 +step:3453 train loss:3.883306 +step:3454 train loss:3.911642 +step:3455 train loss:3.978475 +step:3456 train loss:3.913140 +step:3457 train loss:3.901952 +step:3458 train loss:3.829934 +step:3459 train loss:3.842246 +step:3460 train loss:3.786650 +step:3461 train loss:3.850524 +step:3462 train loss:3.849402 +step:3463 train loss:3.823334 +step:3464 train loss:3.871052 +step:3465 train loss:3.806628 +step:3466 train loss:3.872525 +step:3467 train loss:3.832684 +step:3468 train loss:3.840752 +step:3469 train loss:3.855567 +step:3470 train loss:3.837534 +step:3471 train loss:3.873531 +step:3472 train loss:3.761608 +step:3473 train loss:3.878638 +step:3474 train loss:3.784368 +step:3475 train loss:3.858895 +step:3476 train loss:3.831100 +step:3477 train loss:3.848013 +step:3478 train loss:3.829681 +step:3479 train loss:3.852711 +step:3480 train loss:3.873093 +step:3481 train loss:3.856778 +step:3482 train loss:3.832999 +step:3483 train loss:3.979249 +step:3484 train loss:3.820235 +step:3485 train loss:3.810508 +step:3486 train loss:3.853616 +step:3487 train loss:3.903226 +step:3488 train loss:3.805069 +step:3489 train loss:3.857333 +step:3490 train loss:3.831676 +step:3491 train loss:3.859461 +step:3492 train loss:3.900964 +step:3493 train loss:3.866185 +step:3494 train loss:3.864109 +step:3495 train loss:3.839862 +step:3496 train loss:3.804609 +step:3497 train loss:3.915196 +step:3498 train loss:3.865726 +step:3499 train loss:3.802871 +step:3500 validation loss:3.773662 total_sharp:7.7965e-03 L1_sharp:2.6829e-01 L2_sharp:1.6627e-01 L3_sharp:2.1112e-01 L4_sharp:1.2602e-01 L5_sharp:1.0821e-01 L6_sharp:1.1863e-01 L7_sharp:1.3685e-01 L8_sharp:1.2550e-01 L9_sharp:1.1082e-01 L10_sharp:8.8375e-02 L11_sharp:7.6173e-02 L12_sharp:1.6685e-01 total_fnorm:1.3324e+00 total_l1_linf:7.3564e+03 total_spectral:1.3324e+00 L1_fnorm:3.1816e-02 L2_fnorm:3.0081e-02 L3_fnorm:2.9371e-02 L4_fnorm:3.0505e-02 L5_fnorm:3.1060e-02 L6_fnorm:3.1770e-02 L7_fnorm:3.1671e-02 L8_fnorm:3.1879e-02 L9_fnorm:3.1938e-02 L10_fnorm:3.1981e-02 L11_fnorm:3.1934e-02 L12_fnorm:3.2035e-02 L1_l1linf:2.6377e-01 L2_l1linf:2.7223e-01 L3_l1linf:2.8372e-01 L4_l1linf:2.8408e-01 L5_l1linf:2.7495e-01 L6_l1linf:2.6301e-01 L7_l1linf:2.4855e-01 L8_l1linf:2.6177e-01 L9_l1linf:2.8394e-01 L10_l1linf:2.9633e-01 L11_l1linf:3.0787e-01 L12_l1linf:3.1159e-01 L1_spectral:5.9495e-03 L2_spectral:6.1438e-03 L3_spectral:6.4436e-03 L4_spectral:6.3969e-03 L5_spectral:6.2307e-03 L6_spectral:5.9098e-03 L7_spectral:5.6252e-03 L8_spectral:5.9166e-03 L9_spectral:6.4085e-03 L10_spectral:6.6331e-03 L11_spectral:6.9121e-03 L12_spectral:6.9751e-03 ip_v_neg_g:8.5271e-03 cos_v_neg_g:8.2859e-04 v_norm:1.3324e+00 g_norm:7.7237e+00 hv_norm:2.4660e+00 cos_v_hv:4.2126e-03 hg_norm:3.4641e+03 cos_g_hg:6.1208e-01 v_par:3.0235e-05 v_perp:1.3324e+00 L1_cos_v_neg_g:1.2241e-02 L1_v_norm:3.1816e-02 L2_cos_v_neg_g:1.1715e-02 L2_v_norm:3.0081e-02 L3_cos_v_neg_g:1.4801e-02 L3_v_norm:2.9371e-02 L4_cos_v_neg_g:1.0701e-02 L4_v_norm:3.0505e-02 L5_cos_v_neg_g:1.0015e-02 L5_v_norm:3.1060e-02 L6_cos_v_neg_g:9.7868e-03 L6_v_norm:3.1770e-02 L7_cos_v_neg_g:1.0071e-02 L7_v_norm:3.1671e-02 L8_cos_v_neg_g:9.7586e-03 L8_v_norm:3.1879e-02 L9_cos_v_neg_g:7.7757e-03 L9_v_norm:3.1938e-02 L10_cos_v_neg_g:6.1234e-03 L10_v_norm:3.1981e-02 L11_cos_v_neg_g:6.7821e-03 L11_v_norm:3.1934e-02 L12_cos_v_neg_g:7.4451e-03 L12_v_norm:3.2035e-02 +step:3500 train loss:3.817061 +step:3501 train loss:3.944206 +step:3502 train loss:3.924860 +step:3503 train loss:3.876147 +step:3504 train loss:3.827780 +step:3505 train loss:3.840940 +step:3506 train loss:3.736181 +step:3507 train loss:3.857540 +step:3508 train loss:3.807535 +step:3509 train loss:3.869949 +step:3510 train loss:3.804918 +step:3511 train loss:3.836155 +step:3512 train loss:3.979314 +step:3513 train loss:3.798722 +step:3514 train loss:3.814686 +step:3515 train loss:4.060763 +step:3516 train loss:3.858316 +step:3517 train loss:3.817809 +step:3518 train loss:3.820851 +step:3519 train loss:3.816101 +step:3520 train loss:3.848336 +step:3521 train loss:3.841221 +step:3522 train loss:3.744688 +step:3523 train loss:3.852723 +step:3524 train loss:3.836391 +step:3525 train loss:3.822956 +step:3526 train loss:3.849359 +step:3527 train loss:3.802222 +step:3528 train loss:3.849164 +step:3529 train loss:3.825319 +step:3530 train loss:3.816777 +step:3531 train loss:3.817770 +step:3532 train loss:3.987844 +step:3533 train loss:3.820522 +step:3534 train loss:3.835078 +step:3535 train loss:3.814234 +step:3536 train loss:3.812005 +step:3537 train loss:3.818294 +step:3538 train loss:3.853152 +step:3539 train loss:3.801361 +step:3540 train loss:3.865522 +step:3541 train loss:3.829186 +step:3542 train loss:3.843857 +step:3543 train loss:3.762425 +step:3544 train loss:3.778593 +step:3545 train loss:3.786428 +step:3546 train loss:3.852644 +step:3547 train loss:3.861874 +step:3548 train loss:3.836126 +step:3549 train loss:3.831362 +step:3550 train loss:3.816060 +step:3551 train loss:3.852180 +step:3552 train loss:3.742766 +step:3553 train loss:3.866584 +step:3554 train loss:3.853309 +step:3555 train loss:3.845815 +step:3556 train loss:3.870336 +step:3557 train loss:3.852546 +step:3558 train loss:3.830541 +step:3559 train loss:3.770702 +step:3560 train loss:3.865924 +step:3561 train loss:3.857388 +step:3562 train loss:4.031476 +step:3563 train loss:3.887176 +step:3564 train loss:3.847513 +step:3565 train loss:3.849491 +step:3566 train loss:3.825810 +step:3567 train loss:3.766712 +step:3568 train loss:3.789755 +step:3569 train loss:3.877010 +step:3570 train loss:3.896076 +step:3571 train loss:3.883131 +step:3572 train loss:3.867226 +step:3573 train loss:3.827477 +step:3574 train loss:3.824702 +step:3575 train loss:3.814669 +step:3576 train loss:3.803463 +step:3577 train loss:3.805797 +step:3578 train loss:3.896175 +step:3579 train loss:3.802309 +step:3580 train loss:3.883332 +step:3581 train loss:3.823973 +step:3582 train loss:3.875998 +step:3583 train loss:3.822500 +step:3584 train loss:3.788156 +step:3585 train loss:3.843626 +step:3586 train loss:3.785806 +step:3587 train loss:3.887901 +step:3588 train loss:4.010141 +step:3589 train loss:3.846246 +step:3590 train loss:3.833397 +step:3591 train loss:3.839108 +step:3592 train loss:3.803179 +step:3593 train loss:3.773576 +step:3594 train loss:3.826092 +step:3595 train loss:3.800357 +step:3596 train loss:3.875063 +step:3597 train loss:3.854393 +step:3598 train loss:3.806202 +step:3599 train loss:3.857421 +step:3600 train loss:3.802309 +step:3601 train loss:3.809817 +step:3602 train loss:3.804017 +step:3603 train loss:3.815925 +step:3604 train loss:3.844238 +step:3605 train loss:3.942417 +step:3606 train loss:3.846018 +step:3607 train loss:3.832253 +step:3608 train loss:3.847608 +step:3609 train loss:3.833084 +step:3610 train loss:3.800046 +step:3611 train loss:3.805413 +step:3612 train loss:3.868013 +step:3613 train loss:3.844225 +step:3614 train loss:3.787979 +step:3615 train loss:3.825604 +step:3616 train loss:3.787899 +step:3617 train loss:3.853418 +step:3618 train loss:3.813002 +step:3619 train loss:3.800125 +step:3620 train loss:3.815709 +step:3621 train loss:3.780510 +step:3622 train loss:3.886177 +step:3623 train loss:3.873468 +step:3624 train loss:3.842692 +step:3625 train loss:3.826663 +step:3626 train loss:3.827521 +step:3627 train loss:3.823414 +step:3628 train loss:3.813722 +step:3629 train loss:3.817793 +step:3630 train loss:3.898370 +step:3631 train loss:3.824245 +step:3632 train loss:3.860268 +step:3633 train loss:3.816323 +step:3634 train loss:3.814329 +step:3635 train loss:3.806010 +step:3636 train loss:3.873301 +step:3637 train loss:3.953354 +step:3638 train loss:3.865562 +step:3639 train loss:3.859829 +step:3640 train loss:3.864692 +step:3641 train loss:3.904840 +step:3642 train loss:3.800267 +step:3643 train loss:3.972395 +step:3644 train loss:3.863589 +step:3645 train loss:3.824354 +step:3646 train loss:3.951830 +step:3647 train loss:3.841626 +step:3648 train loss:3.835412 +step:3649 train loss:3.784823 +step:3650 train loss:3.824267 +step:3651 train loss:3.818436 +step:3652 train loss:3.809136 +step:3653 train loss:3.733562 +step:3654 train loss:3.803877 +step:3655 train loss:3.791913 +step:3656 train loss:3.825465 +step:3657 train loss:3.842511 +step:3658 train loss:3.838395 +step:3659 train loss:3.825865 +step:3660 train loss:3.791844 +step:3661 train loss:3.824830 +step:3662 train loss:3.796990 +step:3663 train loss:3.836090 +step:3664 train loss:3.788031 +step:3665 train loss:3.830191 +step:3666 train loss:3.872600 +step:3667 train loss:3.955695 +step:3668 train loss:3.842560 +step:3669 train loss:3.799033 +step:3670 train loss:3.845787 +step:3671 train loss:3.803763 +step:3672 train loss:3.840236 +step:3673 train loss:3.825331 +step:3674 train loss:3.840171 +step:3675 train loss:3.854658 +step:3676 train loss:3.821139 +step:3677 train loss:3.779662 +step:3678 train loss:3.844449 +step:3679 train loss:3.744817 +step:3680 train loss:3.845428 +step:3681 train loss:3.877219 +step:3682 train loss:3.855931 +step:3683 train loss:3.804948 +step:3684 train loss:3.797786 +step:3685 train loss:3.826606 +step:3686 train loss:3.858566 +step:3687 train loss:3.807072 +step:3688 train loss:3.786890 +step:3689 train loss:3.819269 +step:3690 train loss:3.809006 +step:3691 train loss:3.791645 +step:3692 train loss:3.845394 +step:3693 train loss:3.979919 +step:3694 train loss:3.798711 +step:3695 train loss:3.856131 +step:3696 train loss:3.819572 +step:3697 train loss:3.811616 +step:3698 train loss:3.749703 +step:3699 train loss:3.776718 +step:3700 train loss:3.811016 +step:3701 train loss:3.826075 +step:3702 train loss:3.847190 +step:3703 train loss:3.805008 +step:3704 train loss:3.846590 +step:3705 train loss:3.831247 +step:3706 train loss:3.780639 +step:3707 train loss:3.837203 +step:3708 train loss:3.812095 +step:3709 train loss:3.730866 +step:3710 train loss:3.862912 +step:3711 train loss:3.808162 +step:3712 train loss:3.850432 +step:3713 train loss:3.797854 +step:3714 train loss:3.815384 +step:3715 train loss:3.935511 +step:3716 train loss:3.836754 +step:3717 train loss:3.815439 +step:3718 train loss:3.818727 +step:3719 train loss:3.813847 +step:3720 train loss:3.823994 +step:3721 train loss:3.879772 +step:3722 train loss:3.889254 +step:3723 train loss:3.781241 +step:3724 train loss:3.834987 +step:3725 train loss:3.813835 +step:3726 train loss:3.833852 +step:3727 train loss:3.906192 +step:3728 train loss:3.866810 +step:3729 train loss:3.769515 +step:3730 train loss:3.789212 +step:3731 train loss:3.807513 +step:3732 train loss:3.959790 +step:3733 train loss:3.821880 +step:3734 train loss:3.825194 +step:3735 train loss:3.766585 +step:3736 train loss:3.820467 +step:3737 train loss:3.872608 +step:3738 train loss:3.890714 +step:3739 train loss:3.812769 +step:3740 train loss:3.712068 +step:3741 train loss:3.920073 +step:3742 train loss:3.831789 +step:3743 train loss:3.806459 +step:3744 train loss:3.813329 +step:3745 train loss:3.826009 +step:3746 train loss:3.790761 +step:3747 train loss:3.807638 +step:3748 train loss:3.845476 +step:3749 train loss:3.831982 +step:3750 validation loss:3.752159 +step:3750 train loss:3.838239 +step:3751 train loss:3.931221 +step:3752 train loss:3.860609 +step:3753 train loss:3.777088 +step:3754 train loss:3.832661 +step:3755 train loss:4.001384 +step:3756 train loss:3.791893 +step:3757 train loss:3.784060 +step:3758 train loss:3.810101 +step:3759 train loss:3.758479 +step:3760 train loss:3.758176 +step:3761 train loss:3.809692 +step:3762 train loss:3.800029 +step:3763 train loss:3.801899 +step:3764 train loss:3.797140 +step:3765 train loss:3.791040 +step:3766 train loss:3.764820 +step:3767 train loss:3.844454 +step:3768 train loss:3.787253 +step:3769 train loss:4.042761 +step:3770 train loss:3.836545 +step:3771 train loss:3.852539 +step:3772 train loss:3.805035 +step:3773 train loss:3.800214 +step:3774 train loss:3.810764 +step:3775 train loss:3.800066 +step:3776 train loss:3.805671 +step:3777 train loss:3.764714 +step:3778 train loss:3.776358 +step:3779 train loss:3.766496 +step:3780 train loss:3.849123 +step:3781 train loss:3.817210 +step:3782 train loss:3.732807 +step:3783 train loss:3.837399 +step:3784 train loss:3.847374 +step:3785 train loss:3.759204 +step:3786 train loss:3.869300 +step:3787 train loss:3.779212 +step:3788 train loss:3.792390 +step:3789 train loss:3.694572 +step:3790 train loss:3.816360 +step:3791 train loss:3.838601 +step:3792 train loss:3.810169 +step:3793 train loss:3.806709 +step:3794 train loss:3.835543 +step:3795 train loss:3.800452 +step:3796 train loss:3.821295 +step:3797 train loss:3.795696 +step:3798 train loss:3.802403 +step:3799 train loss:3.813248 +step:3800 train loss:3.721062 +step:3801 train loss:3.837418 +step:3802 train loss:3.768446 +step:3803 train loss:3.844642 +step:3804 train loss:3.856959 +step:3805 train loss:3.815436 +step:3806 train loss:3.831679 +step:3807 train loss:3.852515 +step:3808 train loss:3.807165 +step:3809 train loss:3.825625 +step:3810 train loss:3.823771 +step:3811 train loss:3.809173 +step:3812 train loss:3.815848 +step:3813 train loss:3.767821 +step:3814 train loss:3.808391 +step:3815 train loss:3.813171 +step:3816 train loss:3.829644 +step:3817 train loss:3.847706 +step:3818 train loss:3.821380 +step:3819 train loss:3.833129 +step:3820 train loss:3.835170 +step:3821 train loss:3.789081 +step:3822 train loss:3.873070 +step:3823 train loss:3.771502 +step:3824 train loss:3.782128 +step:3825 train loss:3.791189 +step:3826 train loss:3.848599 +step:3827 train loss:3.877078 +step:3828 train loss:3.763390 +step:3829 train loss:3.785455 +step:3830 train loss:3.847053 +step:3831 train loss:3.779407 +step:3832 train loss:3.839948 +step:3833 train loss:3.781784 +step:3834 train loss:3.742980 +step:3835 train loss:3.788945 +step:3836 train loss:3.764615 +step:3837 train loss:3.835043 +step:3838 train loss:3.789190 +step:3839 train loss:3.828460 +step:3840 train loss:3.842514 +step:3841 train loss:3.785992 +step:3842 train loss:3.822226 +step:3843 train loss:3.838845 +step:3844 train loss:3.807965 +step:3845 train loss:3.824213 +step:3846 train loss:3.871966 +step:3847 train loss:3.766000 +step:3848 train loss:3.776147 +step:3849 train loss:3.778829 +step:3850 train loss:3.809180 +step:3851 train loss:3.937299 +step:3852 train loss:3.914057 +step:3853 train loss:3.818269 +step:3854 train loss:3.776549 +step:3855 train loss:3.835765 +step:3856 train loss:3.758254 +step:3857 train loss:3.817451 +step:3858 train loss:3.736860 +step:3859 train loss:3.780633 +step:3860 train loss:3.845233 +step:3861 train loss:3.821200 +step:3862 train loss:3.761378 +step:3863 train loss:3.810354 +step:3864 train loss:3.775445 +step:3865 train loss:3.820122 +step:3866 train loss:3.833907 +step:3867 train loss:3.833447 +step:3868 train loss:3.782928 +step:3869 train loss:3.780365 +step:3870 train loss:3.760248 +step:3871 train loss:3.749229 +step:3872 train loss:3.885140 +step:3873 train loss:3.810708 +step:3874 train loss:3.824115 +step:3875 train loss:3.932544 +step:3876 train loss:3.806652 +step:3877 train loss:3.834640 +step:3878 train loss:3.860522 +step:3879 train loss:3.846688 +step:3880 train loss:3.929348 +step:3881 train loss:3.752411 +step:3882 train loss:3.787752 +step:3883 train loss:3.798165 +step:3884 train loss:3.792948 +step:3885 train loss:3.806390 +step:3886 train loss:3.868735 +step:3887 train loss:3.847250 +step:3888 train loss:3.810826 +step:3889 train loss:3.780563 +step:3890 train loss:3.813497 +step:3891 train loss:3.833685 +step:3892 train loss:3.742182 +step:3893 train loss:3.847778 +step:3894 train loss:3.798685 +step:3895 train loss:3.818359 +step:3896 train loss:3.808886 +step:3897 train loss:3.771848 +step:3898 train loss:3.833493 +step:3899 train loss:3.879133 +step:3900 train loss:3.829878 +step:3901 train loss:3.847043 +step:3902 train loss:3.779234 +step:3903 train loss:3.788171 +step:3904 train loss:3.821554 +step:3905 train loss:3.758358 +step:3906 train loss:3.791983 +step:3907 train loss:3.826186 +step:3908 train loss:3.902980 +step:3909 train loss:3.790872 +step:3910 train loss:3.821715 +step:3911 train loss:3.836069 +step:3912 train loss:3.782570 +step:3913 train loss:3.801480 +step:3914 train loss:3.818553 +step:3915 train loss:3.783809 +step:3916 train loss:3.822757 +step:3917 train loss:3.857277 +step:3918 train loss:3.842742 +step:3919 train loss:3.823930 +step:3920 train loss:3.796182 +step:3921 train loss:3.837357 +step:3922 train loss:3.839852 +step:3923 train loss:3.828235 +step:3924 train loss:3.765152 +step:3925 train loss:3.958813 +step:3926 train loss:3.810880 +step:3927 train loss:3.788485 +step:3928 train loss:3.867005 +step:3929 train loss:3.928970 +step:3930 train loss:3.821471 +step:3931 train loss:3.762557 +step:3932 train loss:3.809405 +step:3933 train loss:3.825684 +step:3934 train loss:3.781047 +step:3935 train loss:3.756874 +step:3936 train loss:3.851919 +step:3937 train loss:3.811707 +step:3938 train loss:3.816379 +step:3939 train loss:3.842167 +step:3940 train loss:3.793009 +step:3941 train loss:3.879722 +step:3942 train loss:3.837809 +step:3943 train loss:3.820549 +step:3944 train loss:3.872206 +step:3945 train loss:3.779887 +step:3946 train loss:3.726497 +step:3947 train loss:3.855005 +step:3948 train loss:3.821638 +step:3949 train loss:3.991575 +step:3950 train loss:3.790107 +step:3951 train loss:3.718295 +step:3952 train loss:3.684468 +step:3953 train loss:3.754794 +step:3954 train loss:3.809615 +step:3955 train loss:3.833695 +step:3956 train loss:3.788122 +step:3957 train loss:3.844482 +step:3958 train loss:3.819970 +step:3959 train loss:3.854869 +step:3960 train loss:3.777882 +step:3961 train loss:3.805094 +step:3962 train loss:3.812911 +step:3963 train loss:3.786850 +step:3964 train loss:3.766367 +step:3965 train loss:3.826585 +step:3966 train loss:3.775221 +step:3967 train loss:3.819998 +step:3968 train loss:3.839492 +step:3969 train loss:3.750175 +step:3970 train loss:3.865813 +step:3971 train loss:3.773883 +step:3972 train loss:3.806820 +step:3973 train loss:3.768708 +step:3974 train loss:3.855546 +step:3975 train loss:3.812228 +step:3976 train loss:3.765422 +step:3977 train loss:3.822207 +step:3978 train loss:3.793765 +step:3979 train loss:3.777669 +step:3980 train loss:3.846492 +step:3981 train loss:3.780988 +step:3982 train loss:3.797014 +step:3983 train loss:3.783634 +step:3984 train loss:3.817140 +step:3985 train loss:3.799279 +step:3986 train loss:3.806690 +step:3987 train loss:3.817212 +step:3988 train loss:3.753604 +step:3989 train loss:3.829645 +step:3990 train loss:3.817690 +step:3991 train loss:3.831078 +step:3992 train loss:3.793048 +step:3993 train loss:3.827279 +step:3994 train loss:3.769060 +step:3995 train loss:3.828893 +step:3996 train loss:3.746746 +step:3997 train loss:3.824601 +step:3998 train loss:3.708189 +step:3999 train loss:3.860792 +step:4000 validation loss:3.728641 total_sharp:7.9664e-03 L1_sharp:2.2896e-01 L2_sharp:1.3010e-01 L3_sharp:1.5447e-01 L4_sharp:1.1679e-01 L5_sharp:1.2373e-01 L6_sharp:1.2806e-01 L7_sharp:1.6083e-01 L8_sharp:1.4604e-01 L9_sharp:1.1296e-01 L10_sharp:9.2073e-02 L11_sharp:9.0315e-02 L12_sharp:1.5929e-01 total_fnorm:1.3223e+00 total_l1_linf:7.3006e+03 total_spectral:1.3223e+00 L1_fnorm:3.1730e-02 L2_fnorm:3.0261e-02 L3_fnorm:2.9819e-02 L4_fnorm:3.0766e-02 L5_fnorm:3.1511e-02 L6_fnorm:3.1763e-02 L7_fnorm:3.1903e-02 L8_fnorm:3.1896e-02 L9_fnorm:3.1906e-02 L10_fnorm:3.2001e-02 L11_fnorm:3.2022e-02 L12_fnorm:3.2129e-02 L1_l1linf:2.6096e-01 L2_l1linf:2.8551e-01 L3_l1linf:2.8410e-01 L4_l1linf:3.0432e-01 L5_l1linf:2.9796e-01 L6_l1linf:2.7592e-01 L7_l1linf:2.7343e-01 L8_l1linf:2.7980e-01 L9_l1linf:2.8238e-01 L10_l1linf:2.8560e-01 L11_l1linf:3.1022e-01 L12_l1linf:3.2919e-01 L1_spectral:5.9127e-03 L2_spectral:6.4120e-03 L3_spectral:6.3771e-03 L4_spectral:6.8459e-03 L5_spectral:6.6730e-03 L6_spectral:6.2275e-03 L7_spectral:6.1757e-03 L8_spectral:6.2855e-03 L9_spectral:6.3447e-03 L10_spectral:6.4697e-03 L11_spectral:6.9519e-03 L12_spectral:7.3171e-03 ip_v_neg_g:7.5527e-03 cos_v_neg_g:8.8703e-04 v_norm:1.3223e+00 g_norm:6.4393e+00 hv_norm:2.1935e+00 cos_v_hv:4.8024e-03 hg_norm:1.3832e+03 cos_g_hg:5.2708e-01 v_par:2.9008e-05 v_perp:1.3223e+00 L1_cos_v_neg_g:1.1063e-02 L1_v_norm:3.1730e-02 L2_cos_v_neg_g:1.0791e-02 L2_v_norm:3.0261e-02 L3_cos_v_neg_g:1.1523e-02 L3_v_norm:2.9819e-02 L4_cos_v_neg_g:9.9508e-03 L4_v_norm:3.0766e-02 L5_cos_v_neg_g:9.4405e-03 L5_v_norm:3.1511e-02 L6_cos_v_neg_g:9.9460e-03 L6_v_norm:3.1763e-02 L7_cos_v_neg_g:1.1058e-02 L7_v_norm:3.1903e-02 L8_cos_v_neg_g:1.0916e-02 L8_v_norm:3.1896e-02 L9_cos_v_neg_g:9.5535e-03 L9_v_norm:3.1906e-02 L10_cos_v_neg_g:8.9418e-03 L10_v_norm:3.2001e-02 L11_cos_v_neg_g:8.2638e-03 L11_v_norm:3.2022e-02 L12_cos_v_neg_g:8.9512e-03 L12_v_norm:3.2129e-02 +step:4000 train loss:3.739454 +step:4001 train loss:3.818379 +step:4002 train loss:3.795800 +step:4003 train loss:3.830436 +step:4004 train loss:3.741684 +step:4005 train loss:3.831798 +step:4006 train loss:3.839695 +step:4007 train loss:3.762360 +step:4008 train loss:3.717509 +step:4009 train loss:3.804122 +step:4010 train loss:3.780909 +step:4011 train loss:3.788481 +step:4012 train loss:3.800187 +step:4013 train loss:3.775093 +step:4014 train loss:3.787989 +step:4015 train loss:3.783943 +step:4016 train loss:3.790882 +step:4017 train loss:3.755990 +step:4018 train loss:3.692680 +step:4019 train loss:3.749967 +step:4020 train loss:3.817178 +step:4021 train loss:3.762761 +step:4022 train loss:3.768095 +step:4023 train loss:3.784694 +step:4024 train loss:3.685959 +step:4025 train loss:3.815514 +step:4026 train loss:3.803769 +step:4027 train loss:3.813020 +step:4028 train loss:3.829635 +step:4029 train loss:3.858619 +step:4030 train loss:3.774626 +step:4031 train loss:3.817958 +step:4032 train loss:3.775262 +step:4033 train loss:3.807076 +step:4034 train loss:3.825705 +step:4035 train loss:3.803804 +step:4036 train loss:3.799198 +step:4037 train loss:3.815345 +step:4038 train loss:3.738471 +step:4039 train loss:3.788586 +step:4040 train loss:3.767708 +step:4041 train loss:3.766084 +step:4042 train loss:3.783242 +step:4043 train loss:3.769491 +step:4044 train loss:3.806383 +step:4045 train loss:3.810463 +step:4046 train loss:3.767388 +step:4047 train loss:3.796790 +step:4048 train loss:3.803074 +step:4049 train loss:3.765148 +step:4050 train loss:3.867507 +step:4051 train loss:3.782210 +step:4052 train loss:3.801027 +step:4053 train loss:3.847788 +step:4054 train loss:3.822221 +step:4055 train loss:3.838789 +step:4056 train loss:3.836095 +step:4057 train loss:3.774907 +step:4058 train loss:3.755971 +step:4059 train loss:3.838368 +step:4060 train loss:3.780450 +step:4061 train loss:3.750329 +step:4062 train loss:3.862638 +step:4063 train loss:3.813506 +step:4064 train loss:3.781795 +step:4065 train loss:3.768961 +step:4066 train loss:3.796288 +step:4067 train loss:3.816631 +step:4068 train loss:3.784462 +step:4069 train loss:3.843357 +step:4070 train loss:3.760592 +step:4071 train loss:3.731156 +step:4072 train loss:3.803879 +step:4073 train loss:3.739854 +step:4074 train loss:3.795689 +step:4075 train loss:3.864507 +step:4076 train loss:3.711580 +step:4077 train loss:3.794612 +step:4078 train loss:3.893851 +step:4079 train loss:3.839425 +step:4080 train loss:3.781021 +step:4081 train loss:3.754596 +step:4082 train loss:3.803859 +step:4083 train loss:3.743718 +step:4084 train loss:3.761942 +step:4085 train loss:3.994098 +step:4086 train loss:3.766758 +step:4087 train loss:3.807654 +step:4088 train loss:3.794435 +step:4089 train loss:3.784290 +step:4090 train loss:3.804088 +step:4091 train loss:3.828521 +step:4092 train loss:3.752683 +step:4093 train loss:3.778177 +step:4094 train loss:3.803656 +step:4095 train loss:3.755260 +step:4096 train loss:3.788794 +step:4097 train loss:3.787328 +step:4098 train loss:3.763648 +step:4099 train loss:3.761539 +step:4100 train loss:3.817209 +step:4101 train loss:3.740713 +step:4102 train loss:3.773174 +step:4103 train loss:3.979173 +step:4104 train loss:3.794825 +step:4105 train loss:3.757730 +step:4106 train loss:3.835727 +step:4107 train loss:3.752088 +step:4108 train loss:3.759522 +step:4109 train loss:3.812501 +step:4110 train loss:3.820184 +step:4111 train loss:3.800552 +step:4112 train loss:3.815605 +step:4113 train loss:3.772735 +step:4114 train loss:3.723916 +step:4115 train loss:3.762232 +step:4116 train loss:3.750137 +step:4117 train loss:3.763348 +step:4118 train loss:3.815168 +step:4119 train loss:3.841054 +step:4120 train loss:3.766212 +step:4121 train loss:3.756934 +step:4122 train loss:3.815869 +step:4123 train loss:3.833739 +step:4124 train loss:3.811154 +step:4125 train loss:3.843927 +step:4126 train loss:3.779273 +step:4127 train loss:3.802406 +step:4128 train loss:3.789511 +step:4129 train loss:3.838151 +step:4130 train loss:3.767832 +step:4131 train loss:3.806180 +step:4132 train loss:3.819603 +step:4133 train loss:3.772274 +step:4134 train loss:3.827517 +step:4135 train loss:3.758674 +step:4136 train loss:3.781312 +step:4137 train loss:3.758806 +step:4138 train loss:3.759566 +step:4139 train loss:3.802537 +step:4140 train loss:3.765616 +step:4141 train loss:3.730082 +step:4142 train loss:3.772984 +step:4143 train loss:3.815225 +step:4144 train loss:3.764714 +step:4145 train loss:3.731174 +step:4146 train loss:3.795530 +step:4147 train loss:3.774634 +step:4148 train loss:3.769559 +step:4149 train loss:3.849686 +step:4150 train loss:3.812693 +step:4151 train loss:3.792886 +step:4152 train loss:3.818593 +step:4153 train loss:3.823466 +step:4154 train loss:3.829261 +step:4155 train loss:3.854949 +step:4156 train loss:3.725679 +step:4157 train loss:3.749735 +step:4158 train loss:3.808339 +step:4159 train loss:3.706044 +step:4160 train loss:3.802160 +step:4161 train loss:3.801808 +step:4162 train loss:3.706988 +step:4163 train loss:3.788362 +step:4164 train loss:3.739865 +step:4165 train loss:3.738212 +step:4166 train loss:3.804134 +step:4167 train loss:3.803305 +step:4168 train loss:3.793570 +step:4169 train loss:3.814594 +step:4170 train loss:3.938282 +step:4171 train loss:3.792667 +step:4172 train loss:3.808485 +step:4173 train loss:3.805548 +step:4174 train loss:3.767751 +step:4175 train loss:3.861414 +step:4176 train loss:3.781393 +step:4177 train loss:3.805292 +step:4178 train loss:3.784517 +step:4179 train loss:3.736777 +step:4180 train loss:3.732120 +step:4181 train loss:3.783354 +step:4182 train loss:3.767637 +step:4183 train loss:3.700979 +step:4184 train loss:3.777127 +step:4185 train loss:3.841859 +step:4186 train loss:3.816211 +step:4187 train loss:3.825201 +step:4188 train loss:3.796478 +step:4189 train loss:3.757718 +step:4190 train loss:3.804449 +step:4191 train loss:3.750762 +step:4192 train loss:3.836879 +step:4193 train loss:3.750893 +step:4194 train loss:3.730286 +step:4195 train loss:3.726913 +step:4196 train loss:3.800372 +step:4197 train loss:3.810965 +step:4198 train loss:3.733807 +step:4199 train loss:3.816617 +step:4200 train loss:3.777241 +step:4201 train loss:3.759151 +step:4202 train loss:3.777734 +step:4203 train loss:3.782925 +step:4204 train loss:3.779819 +step:4205 train loss:3.792615 +step:4206 train loss:3.812735 +step:4207 train loss:3.813104 +step:4208 train loss:3.775335 +step:4209 train loss:3.838199 +step:4210 train loss:3.867519 +step:4211 train loss:3.749514 +step:4212 train loss:3.794255 +step:4213 train loss:3.742841 +step:4214 train loss:3.750271 +step:4215 train loss:3.766248 +step:4216 train loss:3.742967 +step:4217 train loss:3.762849 +step:4218 train loss:3.805889 +step:4219 train loss:3.796926 +step:4220 train loss:3.882860 +step:4221 train loss:3.770199 +step:4222 train loss:3.828494 +step:4223 train loss:3.751785 +step:4224 train loss:3.825293 +step:4225 train loss:3.751034 +step:4226 train loss:3.807047 +step:4227 train loss:3.785176 +step:4228 train loss:3.755617 +step:4229 train loss:3.764333 +step:4230 train loss:3.752085 +step:4231 train loss:3.740446 +step:4232 train loss:3.785621 +step:4233 train loss:3.697793 +step:4234 train loss:3.777686 +step:4235 train loss:3.853918 +step:4236 train loss:3.821294 +step:4237 train loss:3.808735 +step:4238 train loss:3.815126 +step:4239 train loss:3.866959 +step:4240 train loss:3.776539 +step:4241 train loss:3.702596 +step:4242 train loss:3.821658 +step:4243 train loss:3.817855 +step:4244 train loss:3.833446 +step:4245 train loss:3.886263 +step:4246 train loss:3.760456 +step:4247 train loss:3.818361 +step:4248 train loss:3.769207 +step:4249 train loss:3.780424 +step:4250 validation loss:3.708467 +step:4250 train loss:3.758742 +step:4251 train loss:3.853828 +step:4252 train loss:3.768429 +step:4253 train loss:3.755035 +step:4254 train loss:3.769709 +step:4255 train loss:3.746581 +step:4256 train loss:3.762871 +step:4257 train loss:3.822069 +step:4258 train loss:3.682046 +step:4259 train loss:3.747676 +step:4260 train loss:3.810459 +step:4261 train loss:3.798347 +step:4262 train loss:3.936681 +step:4263 train loss:3.868844 +step:4264 train loss:3.808344 +step:4265 train loss:3.799159 +step:4266 train loss:3.799292 +step:4267 train loss:3.799021 +step:4268 train loss:3.743321 +step:4269 train loss:3.839253 +step:4270 train loss:3.818923 +step:4271 train loss:3.732033 +step:4272 train loss:3.789233 +step:4273 train loss:3.769778 +step:4274 train loss:3.751630 +step:4275 train loss:3.770660 +step:4276 train loss:3.737434 +step:4277 train loss:3.870163 +step:4278 train loss:3.718538 +step:4279 train loss:3.751255 +step:4280 train loss:3.833561 +step:4281 train loss:3.815308 +step:4282 train loss:3.879922 +step:4283 train loss:3.738297 +step:4284 train loss:3.764702 +step:4285 train loss:3.769668 +step:4286 train loss:3.834522 +step:4287 train loss:3.832529 +step:4288 train loss:3.813361 +step:4289 train loss:3.765135 +step:4290 train loss:3.777041 +step:4291 train loss:3.736070 +step:4292 train loss:3.774963 +step:4293 train loss:3.791434 +step:4294 train loss:3.776768 +step:4295 train loss:3.710311 +step:4296 train loss:3.782567 +step:4297 train loss:3.763524 +step:4298 train loss:3.775079 +step:4299 train loss:3.772064 +step:4300 train loss:3.890709 +step:4301 train loss:3.706620 +step:4302 train loss:3.844562 +step:4303 train loss:3.723044 +step:4304 train loss:3.730756 +step:4305 train loss:3.751489 +step:4306 train loss:3.827700 +step:4307 train loss:3.745502 +step:4308 train loss:3.738399 +step:4309 train loss:3.809714 +step:4310 train loss:3.746080 +step:4311 train loss:3.802227 +step:4312 train loss:3.795173 +step:4313 train loss:3.789136 +step:4314 train loss:3.734084 +step:4315 train loss:3.767749 +step:4316 train loss:3.716063 +step:4317 train loss:3.772734 +step:4318 train loss:3.809474 +step:4319 train loss:3.762334 +step:4320 train loss:3.820644 +step:4321 train loss:3.803900 +step:4322 train loss:3.758275 +step:4323 train loss:3.698453 +step:4324 train loss:3.788179 +step:4325 train loss:3.763827 +step:4326 train loss:3.756612 +step:4327 train loss:3.861707 +step:4328 train loss:3.773288 +step:4329 train loss:3.729259 +step:4330 train loss:3.775239 +step:4331 train loss:3.789736 +step:4332 train loss:3.814815 +step:4333 train loss:3.771810 +step:4334 train loss:3.797239 +step:4335 train loss:3.794967 +step:4336 train loss:3.802652 +step:4337 train loss:3.771962 +step:4338 train loss:3.893022 +step:4339 train loss:3.793047 +step:4340 train loss:3.798938 +step:4341 train loss:3.767242 +step:4342 train loss:3.780530 +step:4343 train loss:3.898588 +step:4344 train loss:3.791085 +step:4345 train loss:3.809020 +step:4346 train loss:3.824486 +step:4347 train loss:3.831079 +step:4348 train loss:3.742915 +step:4349 train loss:3.828595 +step:4350 train loss:3.767135 +step:4351 train loss:3.720828 +step:4352 train loss:3.796977 +step:4353 train loss:3.743176 +step:4354 train loss:3.801020 +step:4355 train loss:3.759590 +step:4356 train loss:3.783535 +step:4357 train loss:3.761135 +step:4358 train loss:3.857657 +step:4359 train loss:3.810946 +step:4360 train loss:3.722602 +step:4361 train loss:3.768795 +step:4362 train loss:3.792301 +step:4363 train loss:3.807359 +step:4364 train loss:3.778620 +step:4365 train loss:3.759582 +step:4366 train loss:3.804517 +step:4367 train loss:3.818219 +step:4368 train loss:3.796801 +step:4369 train loss:3.661725 +step:4370 train loss:3.795027 +step:4371 train loss:3.706400 +step:4372 train loss:3.853372 +step:4373 train loss:3.792395 +step:4374 train loss:3.758527 +step:4375 train loss:3.805428 +step:4376 train loss:3.815716 +step:4377 train loss:3.751220 +step:4378 train loss:3.758395 +step:4379 train loss:3.839965 +step:4380 train loss:3.824879 +step:4381 train loss:3.723628 +step:4382 train loss:3.772175 +step:4383 train loss:3.799284 +step:4384 train loss:3.796906 +step:4385 train loss:3.720813 +step:4386 train loss:3.779147 +step:4387 train loss:3.747535 +step:4388 train loss:3.767165 +step:4389 train loss:3.797038 +step:4390 train loss:3.835851 +step:4391 train loss:3.762124 +step:4392 train loss:3.835975 +step:4393 train loss:3.796368 +step:4394 train loss:3.733569 +step:4395 train loss:3.791854 +step:4396 train loss:3.764385 +step:4397 train loss:3.806004 +step:4398 train loss:3.756769 +step:4399 train loss:3.751547 +step:4400 train loss:3.752285 +step:4401 train loss:3.811512 +step:4402 train loss:3.812520 +step:4403 train loss:3.761240 +step:4404 train loss:3.794118 +step:4405 train loss:3.712579 +step:4406 train loss:3.791067 +step:4407 train loss:3.725724 +step:4408 train loss:3.824887 +step:4409 train loss:3.779155 +step:4410 train loss:3.785036 +step:4411 train loss:3.749373 +step:4412 train loss:3.857897 +step:4413 train loss:3.756352 +step:4414 train loss:3.763780 +step:4415 train loss:3.749004 +step:4416 train loss:3.745333 +step:4417 train loss:3.734116 +step:4418 train loss:3.808852 +step:4419 train loss:3.778274 +step:4420 train loss:3.783819 +step:4421 train loss:3.812382 +step:4422 train loss:3.826269 +step:4423 train loss:3.785457 +step:4424 train loss:3.773518 +step:4425 train loss:3.735485 +step:4426 train loss:3.809480 +step:4427 train loss:3.768708 +step:4428 train loss:3.708860 +step:4429 train loss:3.765567 +step:4430 train loss:3.808940 +step:4431 train loss:3.802694 +step:4432 train loss:3.706970 +step:4433 train loss:3.762390 +step:4434 train loss:3.761370 +step:4435 train loss:3.788354 +step:4436 train loss:3.726650 +step:4437 train loss:3.806255 +step:4438 train loss:3.772362 +step:4439 train loss:3.776940 +step:4440 train loss:3.772942 +step:4441 train loss:3.776893 +step:4442 train loss:3.827872 +step:4443 train loss:3.761195 +step:4444 train loss:3.841854 +step:4445 train loss:3.807124 +step:4446 train loss:3.739356 +step:4447 train loss:3.790868 +step:4448 train loss:3.808317 +step:4449 train loss:3.744184 +step:4450 train loss:3.762355 +step:4451 train loss:3.820071 +step:4452 train loss:3.871109 +step:4453 train loss:3.803554 +step:4454 train loss:3.773593 +step:4455 train loss:3.821183 +step:4456 train loss:3.767539 +step:4457 train loss:3.767059 +step:4458 train loss:3.776786 +step:4459 train loss:3.809741 +step:4460 train loss:3.722589 +step:4461 train loss:3.693347 +step:4462 train loss:3.748846 +step:4463 train loss:3.770865 +step:4464 train loss:3.738315 +step:4465 train loss:3.772646 +step:4466 train loss:3.871120 +step:4467 train loss:3.750707 +step:4468 train loss:3.745928 +step:4469 train loss:3.737091 +step:4470 train loss:3.711894 +step:4471 train loss:3.778149 +step:4472 train loss:3.699717 +step:4473 train loss:3.785537 +step:4474 train loss:3.812136 +step:4475 train loss:3.773691 +step:4476 train loss:3.735394 +step:4477 train loss:3.719459 +step:4478 train loss:3.780656 +step:4479 train loss:3.884149 +step:4480 train loss:3.719676 +step:4481 train loss:3.787051 +step:4482 train loss:3.748666 +step:4483 train loss:3.743973 +step:4484 train loss:3.786438 +step:4485 train loss:3.752172 +step:4486 train loss:3.850476 +step:4487 train loss:3.746981 +step:4488 train loss:3.746811 +step:4489 train loss:3.703806 +step:4490 train loss:3.786202 +step:4491 train loss:3.734081 +step:4492 train loss:3.769220 +step:4493 train loss:3.754045 +step:4494 train loss:3.745567 +step:4495 train loss:3.815213 +step:4496 train loss:3.759750 +step:4497 train loss:3.836400 +step:4498 train loss:3.729642 +step:4499 train loss:3.782844 +step:4500 validation loss:3.690533 total_sharp:6.1674e-03 L1_sharp:2.2385e-01 L2_sharp:1.3488e-01 L3_sharp:1.6120e-01 L4_sharp:9.4786e-02 L5_sharp:9.8963e-02 L6_sharp:1.1955e-01 L7_sharp:1.4022e-01 L8_sharp:1.1630e-01 L9_sharp:9.5853e-02 L10_sharp:7.6392e-02 L11_sharp:6.1144e-02 L12_sharp:7.9100e-02 total_fnorm:1.3447e+00 total_l1_linf:7.4245e+03 total_spectral:1.3447e+00 L1_fnorm:3.2092e-02 L2_fnorm:3.0557e-02 L3_fnorm:3.0002e-02 L4_fnorm:3.0903e-02 L5_fnorm:3.1672e-02 L6_fnorm:3.1887e-02 L7_fnorm:3.1895e-02 L8_fnorm:3.1840e-02 L9_fnorm:3.1805e-02 L10_fnorm:3.1853e-02 L11_fnorm:3.1937e-02 L12_fnorm:3.1738e-02 L1_l1linf:3.0211e-01 L2_l1linf:3.1632e-01 L3_l1linf:3.1355e-01 L4_l1linf:3.1225e-01 L5_l1linf:3.0850e-01 L6_l1linf:2.7448e-01 L7_l1linf:2.5536e-01 L8_l1linf:2.6310e-01 L9_l1linf:2.7672e-01 L10_l1linf:2.9275e-01 L11_l1linf:3.0534e-01 L12_l1linf:2.7438e-01 L1_spectral:6.7984e-03 L2_spectral:7.1664e-03 L3_spectral:7.0861e-03 L4_spectral:7.0154e-03 L5_spectral:6.8764e-03 L6_spectral:6.1590e-03 L7_spectral:5.8095e-03 L8_spectral:5.9633e-03 L9_spectral:6.2241e-03 L10_spectral:6.5493e-03 L11_spectral:6.8949e-03 L12_spectral:6.2825e-03 ip_v_neg_g:6.4659e-03 cos_v_neg_g:7.3662e-04 v_norm:1.3447e+00 g_norm:6.5275e+00 hv_norm:1.8151e+00 cos_v_hv:4.5693e-03 hg_norm:3.7202e+03 cos_g_hg:4.7119e-01 v_par:2.6937e-05 v_perp:1.3447e+00 L1_cos_v_neg_g:1.0299e-02 L1_v_norm:3.2092e-02 L2_cos_v_neg_g:9.9590e-03 L2_v_norm:3.0557e-02 L3_cos_v_neg_g:1.0046e-02 L3_v_norm:3.0002e-02 L4_cos_v_neg_g:8.2202e-03 L4_v_norm:3.0903e-02 L5_cos_v_neg_g:8.5625e-03 L5_v_norm:3.1672e-02 L6_cos_v_neg_g:8.2582e-03 L6_v_norm:3.1887e-02 L7_cos_v_neg_g:8.8823e-03 L7_v_norm:3.1895e-02 L8_cos_v_neg_g:7.7586e-03 L8_v_norm:3.1840e-02 L9_cos_v_neg_g:7.9312e-03 L9_v_norm:3.1805e-02 L10_cos_v_neg_g:7.3942e-03 L10_v_norm:3.1853e-02 L11_cos_v_neg_g:7.0272e-03 L11_v_norm:3.1937e-02 L12_cos_v_neg_g:6.1306e-03 L12_v_norm:3.1738e-02 +step:4500 train loss:3.689877 +step:4501 train loss:3.750805 +step:4502 train loss:3.875883 +step:4503 train loss:3.777239 +step:4504 train loss:3.786554 +step:4505 train loss:3.773568 +step:4506 train loss:3.744985 +step:4507 train loss:3.816596 +step:4508 train loss:3.750515 +step:4509 train loss:3.751534 +step:4510 train loss:3.786504 +step:4511 train loss:3.743357 +step:4512 train loss:3.760371 +step:4513 train loss:3.820735 +step:4514 train loss:3.728210 +step:4515 train loss:3.836883 +step:4516 train loss:3.816709 +step:4517 train loss:3.772556 +step:4518 train loss:3.708410 +step:4519 train loss:3.744764 +step:4520 train loss:3.757793 +step:4521 train loss:3.696120 +step:4522 train loss:3.754161 +step:4523 train loss:3.803339 +step:4524 train loss:3.783608 +step:4525 train loss:3.708383 +step:4526 train loss:3.749155 +step:4527 train loss:3.736873 +step:4528 train loss:3.763446 +step:4529 train loss:3.761338 +step:4530 train loss:3.856880 +step:4531 train loss:3.744166 +step:4532 train loss:3.769832 +step:4533 train loss:3.741234 +step:4534 train loss:3.835303 +step:4535 train loss:3.736140 +step:4536 train loss:3.805974 +step:4537 train loss:3.785861 +step:4538 train loss:3.767945 +step:4539 train loss:3.789697 +step:4540 train loss:3.764881 +step:4541 train loss:3.730760 +step:4542 train loss:3.784651 +step:4543 train loss:3.866889 +step:4544 train loss:3.808836 +step:4545 train loss:3.753814 +step:4546 train loss:3.845127 +step:4547 train loss:3.803438 +step:4548 train loss:3.807155 +step:4549 train loss:3.761392 +step:4550 train loss:3.732406 +step:4551 train loss:3.746377 +step:4552 train loss:3.748223 +step:4553 train loss:3.829310 +step:4554 train loss:3.725139 +step:4555 train loss:3.837188 +step:4556 train loss:3.772303 +step:4557 train loss:3.702394 +step:4558 train loss:3.786746 +step:4559 train loss:3.800760 +step:4560 train loss:3.736834 +step:4561 train loss:3.723046 +step:4562 train loss:3.766021 +step:4563 train loss:3.715107 +step:4564 train loss:3.740652 +step:4565 train loss:3.742734 +step:4566 train loss:3.715950 +step:4567 train loss:3.743096 +step:4568 train loss:3.740443 +step:4569 train loss:3.727924 +step:4570 train loss:3.777443 +step:4571 train loss:3.753829 +step:4572 train loss:3.747238 +step:4573 train loss:3.758169 +step:4574 train loss:3.900077 +step:4575 train loss:3.735799 +step:4576 train loss:3.727053 +step:4577 train loss:3.766300 +step:4578 train loss:3.803988 +step:4579 train loss:3.755692 +step:4580 train loss:3.815943 +step:4581 train loss:3.754848 +step:4582 train loss:3.749221 +step:4583 train loss:3.754337 +step:4584 train loss:3.727434 +step:4585 train loss:3.802829 +step:4586 train loss:3.795172 +step:4587 train loss:3.694137 +step:4588 train loss:3.735914 +step:4589 train loss:3.813156 +step:4590 train loss:3.784278 +step:4591 train loss:3.724675 +step:4592 train loss:3.807255 +step:4593 train loss:3.727498 +step:4594 train loss:3.757977 +step:4595 train loss:3.778412 +step:4596 train loss:3.719730 +step:4597 train loss:3.856024 +step:4598 train loss:3.774808 +step:4599 train loss:3.724663 +step:4600 train loss:3.735316 +step:4601 train loss:3.757958 +step:4602 train loss:3.709112 +step:4603 train loss:3.724981 +step:4604 train loss:3.829214 +step:4605 train loss:3.749378 +step:4606 train loss:3.771238 +step:4607 train loss:3.755121 +step:4608 train loss:3.793504 +step:4609 train loss:3.747917 +step:4610 train loss:3.798265 +step:4611 train loss:3.815312 +step:4612 train loss:3.815884 +step:4613 train loss:3.795442 +step:4614 train loss:3.785828 +step:4615 train loss:3.731839 +step:4616 train loss:3.712906 +step:4617 train loss:3.758667 +step:4618 train loss:3.771815 +step:4619 train loss:3.738919 +step:4620 train loss:3.748169 +step:4621 train loss:3.752431 +step:4622 train loss:3.684444 +step:4623 train loss:3.795581 +step:4624 train loss:3.781254 +step:4625 train loss:3.736447 +step:4626 train loss:3.780678 +step:4627 train loss:3.755316 +step:4628 train loss:3.738521 +step:4629 train loss:3.776019 +step:4630 train loss:3.833211 +step:4631 train loss:3.832766 +step:4632 train loss:3.726929 +step:4633 train loss:3.739187 +step:4634 train loss:3.816627 +step:4635 train loss:3.781626 +step:4636 train loss:3.797763 +step:4637 train loss:3.733705 +step:4638 train loss:3.742010 +step:4639 train loss:3.735827 +step:4640 train loss:3.746127 +step:4641 train loss:3.753907 +step:4642 train loss:3.786468 +step:4643 train loss:3.749100 +step:4644 train loss:3.771084 +step:4645 train loss:3.782876 +step:4646 train loss:3.744986 +step:4647 train loss:3.697835 +step:4648 train loss:3.808116 +step:4649 train loss:3.818642 +step:4650 train loss:3.763702 +step:4651 train loss:3.762301 +step:4652 train loss:3.751766 +step:4653 train loss:3.814350 +step:4654 train loss:3.807085 +step:4655 train loss:3.709794 +step:4656 train loss:3.745237 +step:4657 train loss:3.798997 +step:4658 train loss:3.756795 +step:4659 train loss:3.768022 +step:4660 train loss:3.812935 +step:4661 train loss:3.727459 +step:4662 train loss:3.741289 +step:4663 train loss:3.748393 +step:4664 train loss:3.811423 +step:4665 train loss:3.802302 +step:4666 train loss:3.797376 +step:4667 train loss:3.790626 +step:4668 train loss:3.753509 +step:4669 train loss:3.766211 +step:4670 train loss:3.796686 +step:4671 train loss:3.795794 +step:4672 train loss:3.667915 +step:4673 train loss:3.704299 +step:4674 train loss:3.832057 +step:4675 train loss:3.735349 +step:4676 train loss:3.701064 +step:4677 train loss:3.701024 +step:4678 train loss:3.673789 +step:4679 train loss:3.777622 +step:4680 train loss:3.714383 +step:4681 train loss:3.768458 +step:4682 train loss:3.716136 +step:4683 train loss:3.683700 +step:4684 train loss:3.798357 +step:4685 train loss:3.733471 +step:4686 train loss:3.750549 +step:4687 train loss:3.780983 +step:4688 train loss:3.713240 +step:4689 train loss:3.791636 +step:4690 train loss:3.730982 +step:4691 train loss:3.769516 +step:4692 train loss:3.692984 +step:4693 train loss:3.736037 +step:4694 train loss:3.778908 +step:4695 train loss:3.790734 +step:4696 train loss:3.783405 +step:4697 train loss:3.693222 +step:4698 train loss:3.716374 +step:4699 train loss:3.761387 +step:4700 train loss:3.729862 +step:4701 train loss:3.741736 +step:4702 train loss:3.692859 +step:4703 train loss:3.777139 +step:4704 train loss:3.763572 +step:4705 train loss:3.709504 +step:4706 train loss:3.716088 +step:4707 train loss:3.701094 +step:4708 train loss:3.774594 +step:4709 train loss:3.716255 +step:4710 train loss:3.736559 +step:4711 train loss:3.789959 +step:4712 train loss:3.693120 +step:4713 train loss:3.794130 +step:4714 train loss:3.694581 +step:4715 train loss:3.786277 +step:4716 train loss:3.748727 +step:4717 train loss:3.686229 +step:4718 train loss:3.771243 +step:4719 train loss:3.700566 +step:4720 train loss:3.794621 +step:4721 train loss:3.750697 +step:4722 train loss:3.808179 +step:4723 train loss:3.704703 +step:4724 train loss:3.756270 +step:4725 train loss:3.687397 +step:4726 train loss:3.739309 +step:4727 train loss:3.738479 +step:4728 train loss:3.751795 +step:4729 train loss:3.778952 +step:4730 train loss:3.678214 +step:4731 train loss:3.741631 +step:4732 train loss:3.688282 +step:4733 train loss:3.631356 +step:4734 train loss:3.763319 +step:4735 train loss:3.720577 +step:4736 train loss:3.756478 +step:4737 train loss:3.641773 +step:4738 train loss:3.782217 +step:4739 train loss:3.663194 +step:4740 train loss:3.777847 +step:4741 train loss:3.743224 +step:4742 train loss:3.706402 +step:4743 train loss:3.695579 +step:4744 train loss:3.747158 +step:4745 train loss:3.763977 +step:4746 train loss:3.805052 +step:4747 train loss:3.764733 +step:4748 train loss:3.665132 +step:4749 train loss:3.732427 +step:4750 validation loss:3.671663 +step:4750 train loss:3.678622 +step:4751 train loss:3.776326 +step:4752 train loss:3.705453 +step:4753 train loss:3.815497 +step:4754 train loss:3.678855 +step:4755 train loss:3.729661 +step:4756 train loss:3.792086 +step:4757 train loss:3.727542 +step:4758 train loss:3.734400 +step:4759 train loss:3.740781 +step:4760 train loss:3.763671 +step:4761 train loss:3.689287 +step:4762 train loss:3.720653 +step:4763 train loss:3.740440 +step:4764 train loss:3.803243 +step:4765 train loss:3.692101 +step:4766 train loss:3.723506 +step:4767 train loss:3.670868 +step:4768 train loss:3.729376 +step:4769 train loss:3.750306 +step:4770 train loss:3.715644 +step:4771 train loss:3.722470 +step:4772 train loss:3.698779 +step:4773 train loss:3.729011 +step:4774 train loss:3.673064 +step:4775 train loss:3.810216 +step:4776 train loss:3.672609 +step:4777 train loss:3.750360 +step:4778 train loss:3.684603 +step:4779 train loss:3.734674 +step:4780 train loss:3.670335 +step:4781 train loss:3.681910 +step:4782 train loss:3.780226 +step:4783 train loss:3.778002 +step:4784 train loss:3.732842 +step:4785 train loss:3.736016 +step:4786 train loss:3.846034 +step:4787 train loss:3.679505 +step:4788 train loss:3.700342 +step:4789 train loss:3.727697 +step:4790 train loss:3.772875 +step:4791 train loss:3.740908 +step:4792 train loss:3.785836 +step:4793 train loss:3.701131 +step:4794 train loss:3.776360 +step:4795 train loss:3.723066 +step:4796 train loss:3.713371 +step:4797 train loss:3.719400 +step:4798 train loss:3.731624 +step:4799 train loss:3.722878 +step:4800 train loss:3.760468 +step:4801 train loss:3.743849 +step:4802 train loss:3.783448 +step:4803 train loss:3.768132 +step:4804 train loss:3.729507 +step:4805 train loss:3.718277 +step:4806 train loss:3.698081 +step:4807 train loss:3.809154 +step:4808 train loss:3.678037 +step:4809 train loss:3.784692 +step:4810 train loss:3.722828 +step:4811 train loss:3.747118 +step:4812 train loss:3.714045 +step:4813 train loss:3.674836 +step:4814 train loss:3.665241 +step:4815 train loss:3.666756 +step:4816 train loss:3.722020 +step:4817 train loss:3.671664 +step:4818 train loss:3.731394 +step:4819 train loss:3.730467 +step:4820 train loss:3.974125 +step:4821 train loss:3.754149 +step:4822 train loss:3.764485 +step:4823 train loss:3.690799 +step:4824 train loss:3.705864 +step:4825 train loss:3.682977 +step:4826 train loss:3.769877 +step:4827 train loss:3.716675 +step:4828 train loss:3.663181 +step:4829 train loss:3.758712 +step:4830 train loss:3.705177 +step:4831 train loss:3.849518 +step:4832 train loss:3.720173 +step:4833 train loss:3.758120 +step:4834 train loss:3.660927 +step:4835 train loss:3.750156 +step:4836 train loss:3.728624 +step:4837 train loss:3.759658 +step:4838 train loss:3.696598 +step:4839 train loss:3.764558 +step:4840 train loss:3.670210 +step:4841 train loss:3.767212 +step:4842 train loss:3.679652 +step:4843 train loss:3.761779 +step:4844 train loss:3.753880 +step:4845 train loss:3.701881 +step:4846 train loss:3.710317 +step:4847 train loss:3.700674 +step:4848 train loss:3.721023 +step:4849 train loss:3.680337 +step:4850 train loss:3.681110 +step:4851 train loss:3.683327 +step:4852 train loss:3.755133 +step:4853 train loss:3.736189 +step:4854 train loss:3.710749 +step:4855 train loss:3.777795 +step:4856 train loss:3.743730 +step:4857 train loss:3.756596 +step:4858 train loss:3.835932 +step:4859 train loss:3.684716 +step:4860 train loss:3.755593 +step:4861 train loss:3.729314 +step:4862 train loss:3.765713 +step:4863 train loss:3.700103 +step:4864 train loss:3.711025 +step:4865 train loss:3.702707 +step:4866 train loss:3.749618 +step:4867 train loss:3.713533 +step:4868 train loss:3.736523 +step:4869 train loss:3.675150 +step:4870 train loss:3.718030 +step:4871 train loss:3.797127 +step:4872 train loss:3.742534 +step:4873 train loss:3.735508 +step:4874 train loss:3.716615 +step:4875 train loss:3.675082 +step:4876 train loss:3.691933 +step:4877 train loss:3.689965 +step:4878 train loss:3.733607 +step:4879 train loss:3.688898 +step:4880 train loss:3.721991 +step:4881 train loss:3.661283 +step:4882 train loss:3.868533 +step:4883 train loss:3.672644 +step:4884 train loss:3.708653 +step:4885 train loss:3.679860 +step:4886 train loss:3.759551 +step:4887 train loss:3.707558 +step:4888 train loss:3.719064 +step:4889 train loss:3.703891 +step:4890 train loss:3.756466 +step:4891 train loss:3.687449 +step:4892 train loss:3.697836 +step:4893 train loss:3.740674 +step:4894 train loss:3.679618 +step:4895 train loss:3.707401 +step:4896 train loss:3.694601 +step:4897 train loss:3.762221 +step:4898 train loss:3.717292 +step:4899 train loss:3.697353 +step:4900 train loss:3.747341 +step:4901 train loss:3.695656 +step:4902 train loss:3.692683 +step:4903 train loss:3.708853 +step:4904 train loss:3.727244 +step:4905 train loss:3.719243 +step:4906 train loss:3.721550 +step:4907 train loss:3.791449 +step:4908 train loss:3.701491 +step:4909 train loss:3.702799 +step:4910 train loss:3.730404 +step:4911 train loss:3.777640 +step:4912 train loss:3.754719 +step:4913 train loss:3.728120 +step:4914 train loss:3.723059 +step:4915 train loss:3.701618 +step:4916 train loss:3.645917 +step:4917 train loss:3.669638 +step:4918 train loss:3.704346 +step:4919 train loss:3.693647 +step:4920 train loss:3.706056 +step:4921 train loss:3.855084 +step:4922 train loss:3.758107 +step:4923 train loss:3.767275 +step:4924 train loss:3.771879 +step:4925 train loss:3.700387 +step:4926 train loss:3.702653 +step:4927 train loss:3.724422 +step:4928 train loss:3.764342 +step:4929 train loss:3.715609 +step:4930 train loss:3.702891 +step:4931 train loss:3.689087 +step:4932 train loss:3.702448 +step:4933 train loss:3.696939 +step:4934 train loss:3.764106 +step:4935 train loss:3.749493 +step:4936 train loss:3.709471 +step:4937 train loss:3.819592 +step:4938 train loss:3.802387 +step:4939 train loss:3.673218 +step:4940 train loss:3.745781 +step:4941 train loss:3.654039 +step:4942 train loss:3.692260 +step:4943 train loss:3.698426 +step:4944 train loss:3.692369 +step:4945 train loss:3.745556 +step:4946 train loss:3.713384 +step:4947 train loss:3.704901 +step:4948 train loss:3.734401 +step:4949 train loss:3.645035 +step:4950 train loss:3.724314 +step:4951 train loss:3.775671 +step:4952 train loss:3.714203 +step:4953 train loss:3.750060 +step:4954 train loss:3.650721 +step:4955 train loss:3.726777 +step:4956 train loss:3.754452 +step:4957 train loss:3.754052 +step:4958 train loss:3.667331 +step:4959 train loss:3.785081 +step:4960 train loss:3.708681 +step:4961 train loss:3.731572 +step:4962 train loss:3.690342 +step:4963 train loss:3.737061 +step:4964 train loss:3.687168 +step:4965 train loss:3.840358 +step:4966 train loss:3.687093 +step:4967 train loss:3.798606 +step:4968 train loss:3.686684 +step:4969 train loss:3.730577 +step:4970 train loss:3.722047 +step:4971 train loss:3.665952 +step:4972 train loss:3.719103 +step:4973 train loss:3.721525 +step:4974 train loss:3.718825 +step:4975 train loss:3.792096 +step:4976 train loss:3.780551 +step:4977 train loss:3.722526 +step:4978 train loss:3.711670 +step:4979 train loss:3.707912 +step:4980 train loss:3.818870 +step:4981 train loss:3.656354 +step:4982 train loss:3.737909 +step:4983 train loss:3.650969 +step:4984 train loss:3.845105 +step:4985 train loss:3.743877 +step:4986 train loss:3.686394 +step:4987 train loss:3.704516 +step:4988 train loss:3.902679 +step:4989 train loss:3.706180 +step:4990 train loss:3.704118 +step:4991 train loss:3.714272 +step:4992 train loss:3.701649 +step:4993 train loss:3.679645 +step:4994 train loss:3.784548 +step:4995 train loss:3.715493 +step:4996 train loss:3.802150 +step:4997 train loss:3.698292 +step:4998 train loss:3.706883 +step:4999 train loss:3.687145 +step:5000 validation loss:3.660331 total_sharp:6.0588e-03 L1_sharp:1.8651e-01 L2_sharp:1.3888e-01 L3_sharp:1.8953e-01 L4_sharp:8.9096e-02 L5_sharp:9.0530e-02 L6_sharp:1.1255e-01 L7_sharp:1.3723e-01 L8_sharp:1.1708e-01 L9_sharp:1.0607e-01 L10_sharp:8.3435e-02 L11_sharp:6.2386e-02 L12_sharp:7.2457e-02 total_fnorm:1.3462e+00 total_l1_linf:7.4238e+03 total_spectral:1.3462e+00 L1_fnorm:3.2166e-02 L2_fnorm:3.0665e-02 L3_fnorm:3.0018e-02 L4_fnorm:3.1305e-02 L5_fnorm:3.1708e-02 L6_fnorm:3.1929e-02 L7_fnorm:3.1928e-02 L8_fnorm:3.1938e-02 L9_fnorm:3.2073e-02 L10_fnorm:3.2168e-02 L11_fnorm:3.2140e-02 L12_fnorm:3.2022e-02 L1_l1linf:3.0944e-01 L2_l1linf:3.2704e-01 L3_l1linf:3.2571e-01 L4_l1linf:3.1951e-01 L5_l1linf:3.0658e-01 L6_l1linf:2.8005e-01 L7_l1linf:2.6310e-01 L8_l1linf:2.7466e-01 L9_l1linf:3.0060e-01 L10_l1linf:3.1440e-01 L11_l1linf:3.2661e-01 L12_l1linf:3.1926e-01 L1_spectral:6.9626e-03 L2_spectral:7.3720e-03 L3_spectral:7.2388e-03 L4_spectral:7.1340e-03 L5_spectral:6.8841e-03 L6_spectral:6.3203e-03 L7_spectral:5.9315e-03 L8_spectral:6.1976e-03 L9_spectral:6.7315e-03 L10_spectral:7.0591e-03 L11_spectral:7.3138e-03 L12_spectral:7.2425e-03 ip_v_neg_g:7.7152e-03 cos_v_neg_g:8.7938e-04 v_norm:1.3462e+00 g_norm:6.5172e+00 hv_norm:1.7507e+00 cos_v_hv:4.6589e-03 hg_norm:3.7102e+03 cos_g_hg:4.6553e-01 v_par:3.5303e-05 v_perp:1.3462e+00 L1_cos_v_neg_g:9.0941e-03 L1_v_norm:3.2166e-02 L2_cos_v_neg_g:8.6527e-03 L2_v_norm:3.0665e-02 L3_cos_v_neg_g:1.1289e-02 L3_v_norm:3.0018e-02 L4_cos_v_neg_g:1.0796e-02 L4_v_norm:3.1305e-02 L5_cos_v_neg_g:1.0558e-02 L5_v_norm:3.1708e-02 L6_cos_v_neg_g:1.1027e-02 L6_v_norm:3.1929e-02 L7_cos_v_neg_g:1.0961e-02 L7_v_norm:3.1928e-02 L8_cos_v_neg_g:1.0468e-02 L8_v_norm:3.1938e-02 L9_cos_v_neg_g:9.1566e-03 L9_v_norm:3.2073e-02 L10_cos_v_neg_g:9.3393e-03 L10_v_norm:3.2168e-02 L11_cos_v_neg_g:8.0490e-03 L11_v_norm:3.2140e-02 L12_cos_v_neg_g:8.4126e-03 L12_v_norm:3.2022e-02 +step:5000 train loss:3.801285 +step:5001 train loss:3.671714 +step:5002 train loss:3.722679 +step:5003 train loss:3.723674 +step:5004 train loss:3.712498 +step:5005 train loss:3.711016 +step:5006 train loss:3.750940 +step:5007 train loss:3.757942 +step:5008 train loss:3.692796 +step:5009 train loss:3.740375 +step:5010 train loss:3.685498 +step:5011 train loss:3.719322 +step:5012 train loss:3.692982 +step:5013 train loss:3.793912 +step:5014 train loss:3.709160 +step:5015 train loss:3.784384 +step:5016 train loss:3.710383 +step:5017 train loss:3.756201 +step:5018 train loss:3.677541 +step:5019 train loss:3.708512 +step:5020 train loss:3.703436 +step:5021 train loss:3.714213 +step:5022 train loss:3.752050 +step:5023 train loss:3.717236 +step:5024 train loss:3.775542 +step:5025 train loss:3.654582 +step:5026 train loss:3.785082 +step:5027 train loss:3.714091 +step:5028 train loss:3.782961 +step:5029 train loss:3.676364 +step:5030 train loss:3.715942 +step:5031 train loss:3.704469 +step:5032 train loss:3.730051 +step:5033 train loss:3.718117 +step:5034 train loss:3.708627 +step:5035 train loss:3.799771 +step:5036 train loss:3.742385 +step:5037 train loss:3.698469 +step:5038 train loss:3.745409 +step:5039 train loss:3.759910 +step:5040 train loss:3.719982 +step:5041 train loss:3.736219 +step:5042 train loss:3.643682 +step:5043 train loss:3.787461 +step:5044 train loss:3.698335 +step:5045 train loss:3.752389 +step:5046 train loss:3.675905 +step:5047 train loss:3.751328 +step:5048 train loss:3.666373 +step:5049 train loss:3.799201 +step:5050 train loss:3.686392 +step:5051 train loss:3.729285 +step:5052 train loss:3.628473 +step:5053 train loss:3.808032 +step:5054 train loss:3.703769 +step:5055 train loss:3.722620 +step:5056 train loss:3.765135 +step:5057 train loss:3.692497 +step:5058 train loss:3.725925 +step:5059 train loss:3.686657 +step:5060 train loss:3.732138 +step:5061 train loss:3.726274 +step:5062 train loss:3.695815 +step:5063 train loss:3.691383 +step:5064 train loss:3.696609 +step:5065 train loss:3.684587 +step:5066 train loss:3.740844 +step:5067 train loss:3.729926 +step:5068 train loss:3.709379 +step:5069 train loss:3.686002 +step:5070 train loss:3.711605 +step:5071 train loss:3.780838 +step:5072 train loss:3.674861 +step:5073 train loss:3.681364 +step:5074 train loss:3.626176 +step:5075 train loss:3.696846 +step:5076 train loss:3.628339 +step:5077 train loss:3.690091 +step:5078 train loss:3.704032 +step:5079 train loss:3.735031 +step:5080 train loss:3.709041 +step:5081 train loss:3.720747 +step:5082 train loss:3.709944 +step:5083 train loss:3.762655 +step:5084 train loss:3.744461 +step:5085 train loss:3.707163 +step:5086 train loss:3.785092 +step:5087 train loss:3.765014 +step:5088 train loss:3.684873 +step:5089 train loss:3.752049 +step:5090 train loss:3.694001 +step:5091 train loss:3.702104 +step:5092 train loss:3.797038 +step:5093 train loss:3.683149 +step:5094 train loss:3.678756 +step:5095 train loss:3.726576 +step:5096 train loss:3.693843 +step:5097 train loss:3.710996 +step:5098 train loss:3.710591 +step:5099 train loss:3.671143 +step:5100 train loss:3.682549 +step:5101 train loss:3.875057 +step:5102 train loss:3.719056 +step:5103 train loss:3.731982 +step:5104 train loss:3.775960 +step:5105 train loss:3.714005 +step:5106 train loss:3.676213 +step:5107 train loss:3.693697 +step:5108 train loss:3.686448 +step:5109 train loss:3.763969 +step:5110 train loss:3.683453 +step:5111 train loss:3.766578 +step:5112 train loss:3.682927 +step:5113 train loss:3.659438 +step:5114 train loss:3.709352 +step:5115 train loss:3.668271 +step:5116 train loss:3.727398 +step:5117 train loss:3.666715 +step:5118 train loss:3.699620 +step:5119 train loss:3.683940 +step:5120 train loss:3.724873 +step:5121 train loss:3.673659 +step:5122 train loss:3.681519 +step:5123 train loss:3.671834 +step:5124 train loss:3.633518 +step:5125 train loss:3.745323 +step:5126 train loss:3.727865 +step:5127 train loss:3.729330 +step:5128 train loss:3.747217 +step:5129 train loss:3.671789 +step:5130 train loss:3.687002 +step:5131 train loss:3.620375 +step:5132 train loss:3.745021 +step:5133 train loss:3.711260 +step:5134 train loss:3.714375 +step:5135 train loss:3.664337 +step:5136 train loss:3.731817 +step:5137 train loss:3.732705 +step:5138 train loss:3.708495 +step:5139 train loss:3.750082 +step:5140 train loss:3.719031 +step:5141 train loss:3.755474 +step:5142 train loss:3.695470 +step:5143 train loss:3.729186 +step:5144 train loss:3.722576 +step:5145 train loss:3.669660 +step:5146 train loss:3.659956 +step:5147 train loss:3.736097 +step:5148 train loss:3.665266 +step:5149 train loss:3.737505 +step:5150 train loss:3.713016 +step:5151 train loss:3.675043 +step:5152 train loss:3.722564 +step:5153 train loss:3.697320 +step:5154 train loss:3.704451 +step:5155 train loss:3.712653 +step:5156 train loss:3.696012 +step:5157 train loss:3.693325 +step:5158 train loss:3.721210 +step:5159 train loss:3.751439 +step:5160 train loss:3.821060 +step:5161 train loss:3.749913 +step:5162 train loss:3.767084 +step:5163 train loss:3.683881 +step:5164 train loss:3.742146 +step:5165 train loss:3.763459 +step:5166 train loss:3.697553 +step:5167 train loss:3.797484 +step:5168 train loss:3.706596 +step:5169 train loss:3.745066 +step:5170 train loss:3.720238 +step:5171 train loss:3.765769 +step:5172 train loss:3.685867 +step:5173 train loss:3.748756 +step:5174 train loss:3.684296 +step:5175 train loss:3.711483 +step:5176 train loss:3.707446 +step:5177 train loss:3.698910 +step:5178 train loss:3.767080 +step:5179 train loss:3.680273 +step:5180 train loss:3.757702 +step:5181 train loss:3.705453 +step:5182 train loss:3.760092 +step:5183 train loss:3.693487 +step:5184 train loss:3.671177 +step:5185 train loss:3.700766 +step:5186 train loss:3.750627 +step:5187 train loss:3.748261 +step:5188 train loss:3.681021 +step:5189 train loss:3.724643 +step:5190 train loss:3.709116 +step:5191 train loss:3.685537 +step:5192 train loss:3.671331 +step:5193 train loss:3.754863 +step:5194 train loss:3.710168 +step:5195 train loss:3.682260 +step:5196 train loss:3.750811 +step:5197 train loss:3.789186 +step:5198 train loss:3.713417 +step:5199 train loss:3.699770 +step:5200 train loss:3.720966 +step:5201 train loss:3.714108 +step:5202 train loss:3.711833 +step:5203 train loss:3.721089 +step:5204 train loss:3.689256 +step:5205 train loss:3.731268 +step:5206 train loss:3.668842 +step:5207 train loss:3.674954 +step:5208 train loss:3.738213 +step:5209 train loss:3.757041 +step:5210 train loss:3.661011 +step:5211 train loss:3.702163 +step:5212 train loss:3.721123 +step:5213 train loss:3.693155 +step:5214 train loss:3.745624 +step:5215 train loss:3.852722 +step:5216 train loss:3.705894 +step:5217 train loss:3.681063 +step:5218 train loss:3.688567 +step:5219 train loss:3.750603 +step:5220 train loss:3.668496 +step:5221 train loss:3.674763 +step:5222 train loss:3.752919 +step:5223 train loss:3.748585 +step:5224 train loss:3.645137 +step:5225 train loss:3.790977 +step:5226 train loss:3.705153 +step:5227 train loss:3.776276 +step:5228 train loss:3.754380 +step:5229 train loss:3.688887 +step:5230 train loss:3.707339 +step:5231 train loss:3.649106 +step:5232 train loss:3.772808 +step:5233 train loss:3.734910 +step:5234 train loss:3.734765 +step:5235 train loss:3.688546 +step:5236 train loss:3.760246 +step:5237 train loss:3.815438 +step:5238 train loss:3.712688 +step:5239 train loss:3.781022 +step:5240 train loss:3.662536 +step:5241 train loss:3.722147 +step:5242 train loss:3.696202 +step:5243 train loss:3.694827 +step:5244 train loss:3.696902 +step:5245 train loss:3.741768 +step:5246 train loss:3.781053 +step:5247 train loss:3.712776 +step:5248 train loss:3.677484 +step:5249 train loss:3.739273 +step:5250 validation loss:3.642559 +step:5250 train loss:3.706726 +step:5251 train loss:3.769660 +step:5252 train loss:3.663081 +step:5253 train loss:3.813116 +step:5254 train loss:3.685565 +step:5255 train loss:3.758785 +step:5256 train loss:3.676778 +step:5257 train loss:3.725400 +step:5258 train loss:3.725691 +step:5259 train loss:3.711724 +step:5260 train loss:3.704052 +step:5261 train loss:3.698745 +step:5262 train loss:3.733179 +step:5263 train loss:3.724689 +step:5264 train loss:3.672269 +step:5265 train loss:3.755419 +step:5266 train loss:3.673526 +step:5267 train loss:3.682648 +step:5268 train loss:3.667801 +step:5269 train loss:3.666554 +step:5270 train loss:3.722116 +step:5271 train loss:3.644319 +step:5272 train loss:3.735738 +step:5273 train loss:3.644084 +step:5274 train loss:3.691870 +step:5275 train loss:3.710507 +step:5276 train loss:3.831771 +step:5277 train loss:3.734526 +step:5278 train loss:3.682363 +step:5279 train loss:3.726633 +step:5280 train loss:3.707371 +step:5281 train loss:3.697980 +step:5282 train loss:3.670973 +step:5283 train loss:3.671673 +step:5284 train loss:3.678558 +step:5285 train loss:3.750545 +step:5286 train loss:3.654355 +step:5287 train loss:3.757347 +step:5288 train loss:3.732163 +step:5289 train loss:3.702137 +step:5290 train loss:3.752844 +step:5291 train loss:3.704078 +step:5292 train loss:3.727310 +step:5293 train loss:3.694926 +step:5294 train loss:3.683511 +step:5295 train loss:3.691641 +step:5296 train loss:3.677409 +step:5297 train loss:3.701854 +step:5298 train loss:3.645614 +step:5299 train loss:3.738233 +step:5300 train loss:3.689870 +step:5301 train loss:3.758812 +step:5302 train loss:3.765113 +step:5303 train loss:3.626861 +step:5304 train loss:3.663526 +step:5305 train loss:3.639393 +step:5306 train loss:3.673757 +step:5307 train loss:3.675311 +step:5308 train loss:3.766737 +step:5309 train loss:3.720119 +step:5310 train loss:3.701569 +step:5311 train loss:3.778160 +step:5312 train loss:3.652705 +step:5313 train loss:3.742881 +step:5314 train loss:3.735428 +step:5315 train loss:3.697104 +step:5316 train loss:3.729024 +step:5317 train loss:3.743742 +step:5318 train loss:3.705019 +step:5319 train loss:3.728979 +step:5320 train loss:3.676604 +step:5321 train loss:3.801496 +step:5322 train loss:3.710149 +step:5323 train loss:3.711854 +step:5324 train loss:3.659019 +step:5325 train loss:3.733906 +step:5326 train loss:3.732609 +step:5327 train loss:3.621811 +step:5328 train loss:3.759248 +step:5329 train loss:3.724466 +step:5330 train loss:3.719523 +step:5331 train loss:3.773320 +step:5332 train loss:3.693695 +step:5333 train loss:3.755928 +step:5334 train loss:3.735226 +step:5335 train loss:3.785326 +step:5336 train loss:3.828186 +step:5337 train loss:3.661403 +step:5338 train loss:3.667408 +step:5339 train loss:3.689581 +step:5340 train loss:3.711581 +step:5341 train loss:3.733451 +step:5342 train loss:3.630279 +step:5343 train loss:3.789025 +step:5344 train loss:3.670231 +step:5345 train loss:3.668625 +step:5346 train loss:3.676982 +step:5347 train loss:3.694048 +step:5348 train loss:3.738459 +step:5349 train loss:3.678367 +step:5350 train loss:3.716738 +step:5351 train loss:3.794165 +step:5352 train loss:3.829256 +step:5353 train loss:3.743601 +step:5354 train loss:3.712793 +step:5355 train loss:3.678770 +step:5356 train loss:3.703624 +step:5357 train loss:3.677579 +step:5358 train loss:3.702382 +step:5359 train loss:3.716455 +step:5360 train loss:3.684995 +step:5361 train loss:3.688753 +step:5362 train loss:3.673290 +step:5363 train loss:3.670356 +step:5364 train loss:3.670649 +step:5365 train loss:3.701231 +step:5366 train loss:3.738801 +step:5367 train loss:3.658611 +step:5368 train loss:3.728149 +step:5369 train loss:3.746016 +step:5370 train loss:3.644524 +step:5371 train loss:3.703828 +step:5372 train loss:3.715135 +step:5373 train loss:3.763782 +step:5374 train loss:3.641666 +step:5375 train loss:3.687775 +step:5376 train loss:3.760491 +step:5377 train loss:3.690756 +step:5378 train loss:3.668041 +step:5379 train loss:3.672755 +step:5380 train loss:3.700644 +step:5381 train loss:3.747997 +step:5382 train loss:3.645520 +step:5383 train loss:3.715218 +step:5384 train loss:3.727401 +step:5385 train loss:3.726739 +step:5386 train loss:3.708096 +step:5387 train loss:3.711889 +step:5388 train loss:3.727053 +step:5389 train loss:3.655109 +step:5390 train loss:3.685570 +step:5391 train loss:3.628578 +step:5392 train loss:3.688651 +step:5393 train loss:3.685263 +step:5394 train loss:3.673885 +step:5395 train loss:3.750959 +step:5396 train loss:3.711036 +step:5397 train loss:3.732273 +step:5398 train loss:3.732848 +step:5399 train loss:3.762001 +step:5400 train loss:3.766188 +step:5401 train loss:3.728705 +step:5402 train loss:3.832411 +step:5403 train loss:3.743412 +step:5404 train loss:3.709715 +step:5405 train loss:3.782411 +step:5406 train loss:3.744859 +step:5407 train loss:3.673619 +step:5408 train loss:3.818453 +step:5409 train loss:3.655731 +step:5410 train loss:3.724019 +step:5411 train loss:3.710074 +step:5412 train loss:3.678980 +step:5413 train loss:3.736434 +step:5414 train loss:3.711647 +step:5415 train loss:3.694712 +step:5416 train loss:3.686543 +step:5417 train loss:3.750927 +step:5418 train loss:3.770944 +step:5419 train loss:3.670628 +step:5420 train loss:3.733101 +step:5421 train loss:3.701038 +step:5422 train loss:3.740651 +step:5423 train loss:3.725313 +step:5424 train loss:3.625956 +step:5425 train loss:3.690334 +step:5426 train loss:3.776598 +step:5427 train loss:3.667605 +step:5428 train loss:3.709240 +step:5429 train loss:3.644242 +step:5430 train loss:3.677006 +step:5431 train loss:3.738315 +step:5432 train loss:3.714249 +step:5433 train loss:3.721804 +step:5434 train loss:3.670159 +step:5435 train loss:3.669357 +step:5436 train loss:3.672702 +step:5437 train loss:3.707241 +step:5438 train loss:3.688785 +step:5439 train loss:3.693319 +step:5440 train loss:3.734162 +step:5441 train loss:3.753960 +step:5442 train loss:3.678510 +step:5443 train loss:3.681941 +step:5444 train loss:3.626714 +step:5445 train loss:3.709124 +step:5446 train loss:3.684023 +step:5447 train loss:3.716594 +step:5448 train loss:3.773704 +step:5449 train loss:3.660796 +step:5450 train loss:3.696959 +step:5451 train loss:3.691407 +step:5452 train loss:3.702370 +step:5453 train loss:3.761500 +step:5454 train loss:3.686738 +step:5455 train loss:3.669492 +step:5456 train loss:3.812453 +step:5457 train loss:3.688248 +step:5458 train loss:3.728624 +step:5459 train loss:3.670875 +step:5460 train loss:3.687835 +step:5461 train loss:3.693519 +step:5462 train loss:3.689478 +step:5463 train loss:3.699756 +step:5464 train loss:3.702272 +step:5465 train loss:3.644115 +step:5466 train loss:3.727526 +step:5467 train loss:3.711976 +step:5468 train loss:3.712776 +step:5469 train loss:3.806388 +step:5470 train loss:3.698270 +step:5471 train loss:3.771127 +step:5472 train loss:3.719779 +step:5473 train loss:3.621988 +step:5474 train loss:3.956117 +step:5475 train loss:3.633048 +step:5476 train loss:3.710262 +step:5477 train loss:3.715246 +step:5478 train loss:3.709743 +step:5479 train loss:3.853073 +step:5480 train loss:3.701092 +step:5481 train loss:3.764789 +step:5482 train loss:3.674109 +step:5483 train loss:3.709295 +step:5484 train loss:3.751870 +step:5485 train loss:3.667132 +step:5486 train loss:3.709825 +step:5487 train loss:3.713192 +step:5488 train loss:3.625057 +step:5489 train loss:3.731606 +step:5490 train loss:3.678536 +step:5491 train loss:3.779233 +step:5492 train loss:3.707040 +step:5493 train loss:3.634033 +step:5494 train loss:3.691738 +step:5495 train loss:3.671891 +step:5496 train loss:3.663336 +step:5497 train loss:3.785089 +step:5498 train loss:3.652835 +step:5499 train loss:3.789491 +step:5500 validation loss:3.635050 total_sharp:5.6920e-03 L1_sharp:5.7386e-01 L2_sharp:2.5730e-01 L3_sharp:1.4745e-01 L4_sharp:7.4579e-02 L5_sharp:7.0706e-02 L6_sharp:8.3296e-02 L7_sharp:1.2156e-01 L8_sharp:9.8593e-02 L9_sharp:7.8514e-02 L10_sharp:5.5409e-02 L11_sharp:3.9750e-02 L12_sharp:5.4554e-02 total_fnorm:1.3217e+00 total_l1_linf:7.3010e+03 total_spectral:1.3217e+00 L1_fnorm:3.2039e-02 L2_fnorm:3.0705e-02 L3_fnorm:3.0127e-02 L4_fnorm:3.1216e-02 L5_fnorm:3.1885e-02 L6_fnorm:3.2170e-02 L7_fnorm:3.2228e-02 L8_fnorm:3.1991e-02 L9_fnorm:3.1912e-02 L10_fnorm:3.2011e-02 L11_fnorm:3.2054e-02 L12_fnorm:3.1977e-02 L1_l1linf:3.2528e-01 L2_l1linf:3.4411e-01 L3_l1linf:3.2867e-01 L4_l1linf:3.1271e-01 L5_l1linf:3.2023e-01 L6_l1linf:3.1458e-01 L7_l1linf:2.9523e-01 L8_l1linf:2.6990e-01 L9_l1linf:2.9033e-01 L10_l1linf:3.0128e-01 L11_l1linf:3.1294e-01 L12_l1linf:3.1733e-01 L1_spectral:7.1692e-03 L2_spectral:7.6462e-03 L3_spectral:7.3004e-03 L4_spectral:7.0722e-03 L5_spectral:7.1715e-03 L6_spectral:6.9751e-03 L7_spectral:6.6203e-03 L8_spectral:6.1039e-03 L9_spectral:6.5082e-03 L10_spectral:6.7848e-03 L11_spectral:7.0927e-03 L12_spectral:7.2024e-03 ip_v_neg_g:8.3047e-04 cos_v_neg_g:6.0645e-05 v_norm:1.3217e+00 g_norm:1.0361e+01 hv_norm:2.0020e+00 cos_v_hv:3.7579e-03 hg_norm:7.0384e+03 cos_g_hg:7.1753e-01 v_par:6.8506e-06 v_perp:1.3217e+00 L1_cos_v_neg_g:4.8541e-04 L1_v_norm:3.2039e-02 L2_cos_v_neg_g:2.0652e-03 L2_v_norm:3.0705e-02 L3_cos_v_neg_g:2.8069e-03 L3_v_norm:3.0127e-02 L4_cos_v_neg_g:3.0415e-03 L4_v_norm:3.1216e-02 L5_cos_v_neg_g:3.3411e-03 L5_v_norm:3.1885e-02 L6_cos_v_neg_g:1.9950e-03 L6_v_norm:3.2170e-02 L7_cos_v_neg_g:1.7876e-03 L7_v_norm:3.2228e-02 L8_cos_v_neg_g:-8.4727e-04 L8_v_norm:3.1991e-02 L9_cos_v_neg_g:4.6646e-05 L9_v_norm:3.1912e-02 L10_cos_v_neg_g:-6.0029e-04 L10_v_norm:3.2011e-02 L11_cos_v_neg_g:-3.2948e-04 L11_v_norm:3.2054e-02 L12_cos_v_neg_g:3.5463e-04 L12_v_norm:3.1977e-02 +step:5500 train loss:3.705537 +step:5501 train loss:3.773803 +step:5502 train loss:3.727433 +step:5503 train loss:3.690689 +step:5504 train loss:3.737574 +step:5505 train loss:3.702101 +step:5506 train loss:3.740198 +step:5507 train loss:3.734527 +step:5508 train loss:3.752446 +step:5509 train loss:3.760806 +step:5510 train loss:3.738355 +step:5511 train loss:3.727622 +step:5512 train loss:3.848364 +step:5513 train loss:3.655951 +step:5514 train loss:3.711176 +step:5515 train loss:3.740974 +step:5516 train loss:3.759477 +step:5517 train loss:3.721962 +step:5518 train loss:3.750916 +step:5519 train loss:3.780530 +step:5520 train loss:3.691858 +step:5521 train loss:3.704295 +step:5522 train loss:3.662436 +step:5523 train loss:3.716890 +step:5524 train loss:3.765816 +step:5525 train loss:3.671840 +step:5526 train loss:3.683604 +step:5527 train loss:3.699953 +step:5528 train loss:3.818335 +step:5529 train loss:3.768088 +step:5530 train loss:3.739572 +step:5531 train loss:3.679220 +step:5532 train loss:3.695484 +step:5533 train loss:3.737676 +step:5534 train loss:3.646983 +step:5535 train loss:3.699840 +step:5536 train loss:3.639991 +step:5537 train loss:3.683794 +step:5538 train loss:3.676929 +step:5539 train loss:3.624648 +step:5540 train loss:3.848148 +step:5541 train loss:3.659194 +step:5542 train loss:3.706794 +step:5543 train loss:3.693806 +step:5544 train loss:3.687331 +step:5545 train loss:3.676280 +step:5546 train loss:3.718387 +step:5547 train loss:3.645973 +step:5548 train loss:3.687849 +step:5549 train loss:3.691928 +step:5550 train loss:3.711794 +step:5551 train loss:3.725952 +step:5552 train loss:3.676903 +step:5553 train loss:3.703747 +step:5554 train loss:3.678295 +step:5555 train loss:3.688275 +step:5556 train loss:3.705045 +step:5557 train loss:3.770866 +step:5558 train loss:3.693046 +step:5559 train loss:3.693094 +step:5560 train loss:3.688066 +step:5561 train loss:3.720383 +step:5562 train loss:3.678577 +step:5563 train loss:3.661297 +step:5564 train loss:3.695313 +step:5565 train loss:3.759085 +step:5566 train loss:3.662094 +step:5567 train loss:3.776806 +step:5568 train loss:3.900777 +step:5569 train loss:3.693429 +step:5570 train loss:3.617240 +step:5571 train loss:3.712440 +step:5572 train loss:3.649328 +step:5573 train loss:3.638440 +step:5574 train loss:3.610676 +step:5575 train loss:3.700082 +step:5576 train loss:3.688607 +step:5577 train loss:3.693214 +step:5578 train loss:3.721110 +step:5579 train loss:3.676684 +step:5580 train loss:3.704204 +step:5581 train loss:3.722140 +step:5582 train loss:3.703418 +step:5583 train loss:3.713834 +step:5584 train loss:3.829678 +step:5585 train loss:3.738855 +step:5586 train loss:3.668645 +step:5587 train loss:3.706990 +step:5588 train loss:3.725212 +step:5589 train loss:3.719978 +step:5590 train loss:3.785774 +step:5591 train loss:3.647089 +step:5592 train loss:3.824772 +step:5593 train loss:3.706642 +step:5594 train loss:3.709612 +step:5595 train loss:3.695982 +step:5596 train loss:3.650810 +step:5597 train loss:3.667638 +step:5598 train loss:3.670069 +step:5599 train loss:3.679780 +step:5600 train loss:3.720632 +step:5601 train loss:3.742379 +step:5602 train loss:3.675889 +step:5603 train loss:3.715904 +step:5604 train loss:3.717371 +step:5605 train loss:3.687160 +step:5606 train loss:3.692219 +step:5607 train loss:3.723999 +step:5608 train loss:3.668186 +step:5609 train loss:3.714865 +step:5610 train loss:3.673066 +step:5611 train loss:3.709460 +step:5612 train loss:3.741752 +step:5613 train loss:3.704643 +step:5614 train loss:3.666283 +step:5615 train loss:3.769317 +step:5616 train loss:3.666425 +step:5617 train loss:3.755235 +step:5618 train loss:3.738355 +step:5619 train loss:3.693930 +step:5620 train loss:3.689714 +step:5621 train loss:3.769149 +step:5622 train loss:3.651811 +step:5623 train loss:3.687668 +step:5624 train loss:3.677064 +step:5625 train loss:3.709557 +step:5626 train loss:3.702487 +step:5627 train loss:3.677217 +step:5628 train loss:3.716486 +step:5629 train loss:3.695992 +step:5630 train loss:3.628811 +step:5631 train loss:3.670720 +step:5632 train loss:3.712577 +step:5633 train loss:3.709089 +step:5634 train loss:3.658143 +step:5635 train loss:3.700275 +step:5636 train loss:3.676444 +step:5637 train loss:3.820386 +step:5638 train loss:3.724241 +step:5639 train loss:3.702185 +step:5640 train loss:3.708754 +step:5641 train loss:3.746703 +step:5642 train loss:3.676183 +step:5643 train loss:3.697151 +step:5644 train loss:3.779360 +step:5645 train loss:3.731223 +step:5646 train loss:3.736311 +step:5647 train loss:3.724742 +step:5648 train loss:3.712338 +step:5649 train loss:3.629390 +step:5650 train loss:3.630505 +step:5651 train loss:3.707480 +step:5652 train loss:3.707754 +step:5653 train loss:3.672350 +step:5654 train loss:3.804640 +step:5655 train loss:3.665767 +step:5656 train loss:3.689490 +step:5657 train loss:3.756522 +step:5658 train loss:3.657880 +step:5659 train loss:3.694073 +step:5660 train loss:3.747923 +step:5661 train loss:3.683545 +step:5662 train loss:3.725867 +step:5663 train loss:3.613503 +step:5664 train loss:3.589441 +step:5665 train loss:3.706562 +step:5666 train loss:3.715089 +step:5667 train loss:3.747579 +step:5668 train loss:3.678657 +step:5669 train loss:3.695094 +step:5670 train loss:3.692376 +step:5671 train loss:3.677682 +step:5672 train loss:3.724444 +step:5673 train loss:3.695509 +step:5674 train loss:3.768652 +step:5675 train loss:3.679660 +step:5676 train loss:3.827120 +step:5677 train loss:3.726635 +step:5678 train loss:3.699264 +step:5679 train loss:3.695535 +step:5680 train loss:3.723598 +step:5681 train loss:3.691805 +step:5682 train loss:3.710568 +step:5683 train loss:3.666905 +step:5684 train loss:3.669881 +step:5685 train loss:3.720449 +step:5686 train loss:3.732936 +step:5687 train loss:3.678993 +step:5688 train loss:3.771962 +step:5689 train loss:3.676401 +step:5690 train loss:3.820855 +step:5691 train loss:3.656852 +step:5692 train loss:3.640360 +step:5693 train loss:3.652345 +step:5694 train loss:3.670132 +step:5695 train loss:3.685011 +step:5696 train loss:3.738711 +step:5697 train loss:3.665819 +step:5698 train loss:3.683757 +step:5699 train loss:3.699518 +step:5700 train loss:3.695123 +step:5701 train loss:3.681877 +step:5702 train loss:3.756516 +step:5703 train loss:3.653301 +step:5704 train loss:3.696311 +step:5705 train loss:3.708143 +step:5706 train loss:3.729866 +step:5707 train loss:3.647002 +step:5708 train loss:3.733332 +step:5709 train loss:3.737310 +step:5710 train loss:3.723349 +step:5711 train loss:3.748381 +step:5712 train loss:3.729016 +step:5713 train loss:3.653995 +step:5714 train loss:3.740873 +step:5715 train loss:3.698749 +step:5716 train loss:3.699105 +step:5717 train loss:3.729099 +step:5718 train loss:3.672467 +step:5719 train loss:3.740543 +step:5720 train loss:3.719079 +step:5721 train loss:3.647965 +step:5722 train loss:3.655893 +step:5723 train loss:3.740972 +step:5724 train loss:3.655703 +step:5725 train loss:3.726048 +step:5726 train loss:3.723539 +step:5727 train loss:3.676622 +step:5728 train loss:3.680658 +step:5729 train loss:3.685884 +step:5730 train loss:3.751143 +step:5731 train loss:3.627755 +step:5732 train loss:3.683522 +step:5733 train loss:3.670669 +step:5734 train loss:3.688723 +step:5735 train loss:3.679910 +step:5736 train loss:3.680656 +step:5737 train loss:3.705969 +step:5738 train loss:3.672460 +step:5739 train loss:3.680792 +step:5740 train loss:3.722284 +step:5741 train loss:3.689780 +step:5742 train loss:3.747636 +step:5743 train loss:3.710985 +step:5744 train loss:3.667243 +step:5745 train loss:3.676802 +step:5746 train loss:3.707158 +step:5747 train loss:3.689341 +step:5748 train loss:3.740505 +step:5749 train loss:3.696736 +step:5750 validation loss:3.626064 +step:5750 train loss:3.700702 +step:5751 train loss:3.717066 +step:5752 train loss:3.704659 +step:5753 train loss:3.668846 +step:5754 train loss:3.682211 +step:5755 train loss:3.695091 +step:5756 train loss:3.679759 +step:5757 train loss:3.750463 +step:5758 train loss:3.680874 +step:5759 train loss:3.645276 +step:5760 train loss:3.723711 +step:5761 train loss:3.720477 +step:5762 train loss:3.679622 +step:5763 train loss:3.705528 +step:5764 train loss:3.667881 +step:5765 train loss:3.784628 +step:5766 train loss:3.692911 +step:5767 train loss:3.728920 +step:5768 train loss:3.667317 +step:5769 train loss:3.788771 +step:5770 train loss:3.710871 +step:5771 train loss:3.736855 +step:5772 train loss:3.693565 +step:5773 train loss:3.670047 +step:5774 train loss:3.678662 +step:5775 train loss:3.745747 +step:5776 train loss:3.736783 +step:5777 train loss:3.652632 +step:5778 train loss:3.734704 +step:5779 train loss:3.703747 +step:5780 train loss:3.668585 +step:5781 train loss:3.733371 +step:5782 train loss:3.695165 +step:5783 train loss:3.651732 +step:5784 train loss:3.757545 +step:5785 train loss:3.747721 +step:5786 train loss:3.657605 +step:5787 train loss:3.701105 +step:5788 train loss:3.713420 +step:5789 train loss:3.660168 +step:5790 train loss:3.757622 +step:5791 train loss:3.687297 +step:5792 train loss:3.954066 +step:5793 train loss:3.729087 +step:5794 train loss:3.751903 +step:5795 train loss:3.740494 +step:5796 train loss:3.720341 +step:5797 train loss:3.704352 +step:5798 train loss:3.702664 +step:5799 train loss:3.668081 +step:5800 train loss:3.832865 +step:5801 train loss:3.703665 +step:5802 train loss:3.687798 +step:5803 train loss:3.704482 +step:5804 train loss:3.724613 +step:5805 train loss:3.682379 +step:5806 train loss:3.727818 +step:5807 train loss:3.648769 +step:5808 train loss:3.682097 +step:5809 train loss:3.691620 +step:5810 train loss:3.665121 +step:5811 train loss:3.678445 +step:5812 train loss:3.661877 +step:5813 train loss:3.671203 +step:5814 train loss:3.663080 +step:5815 train loss:3.664462 +step:5816 train loss:3.732111 +step:5817 train loss:3.739382 +step:5818 train loss:3.715570 +step:5819 train loss:3.767371 +step:5820 train loss:3.710389 +step:5821 train loss:3.697121 +step:5822 train loss:3.716008 +step:5823 train loss:3.717531 +step:5824 train loss:3.669202 +step:5825 train loss:3.759817 +step:5826 train loss:3.677264 +step:5827 train loss:3.641003 +step:5828 train loss:3.627281 +step:5829 train loss:3.691420 +step:5830 train loss:3.666507 +step:5831 train loss:3.633760 +step:5832 train loss:3.753101 +step:5833 train loss:3.730386 +step:5834 train loss:3.713417 +step:5835 train loss:3.665276 +step:5836 train loss:3.629726 +step:5837 train loss:3.748227 +step:5838 train loss:3.731331 +step:5839 train loss:3.709652 +step:5840 train loss:3.789026 +step:5841 train loss:3.715687 +step:5842 train loss:3.724275 +step:5843 train loss:3.672673 +step:5844 train loss:3.738524 +step:5845 train loss:3.650693 +step:5846 train loss:3.695346 +step:5847 train loss:3.723515 +step:5848 train loss:3.792380 +step:5849 train loss:3.683248 +step:5850 train loss:3.712159 +step:5851 train loss:3.682161 +step:5852 train loss:3.765037 +step:5853 train loss:3.856157 +step:5854 train loss:3.649454 +step:5855 train loss:3.708897 +step:5856 train loss:3.677793 +step:5857 train loss:3.692289 +step:5858 train loss:3.665206 +step:5859 train loss:3.666915 +step:5860 train loss:3.772712 +step:5861 train loss:3.653004 +step:5862 train loss:3.768223 +step:5863 train loss:3.705410 +step:5864 train loss:3.693934 +step:5865 train loss:3.701341 +step:5866 train loss:3.692483 +step:5867 train loss:3.769773 +step:5868 train loss:3.700003 +step:5869 train loss:3.720661 +step:5870 train loss:3.694832 +step:5871 train loss:3.680030 +step:5872 train loss:3.707930 +step:5873 train loss:3.684322 +step:5874 train loss:3.767561 +step:5875 train loss:3.698159 +step:5876 train loss:3.676518 +step:5877 train loss:3.684383 +step:5878 train loss:3.687002 +step:5879 train loss:3.657970 +step:5880 train loss:3.853949 +step:5881 train loss:3.696563 +step:5882 train loss:3.665228 +step:5883 train loss:3.664451 +step:5884 train loss:3.689167 +step:5885 train loss:3.680038 +step:5886 train loss:3.698272 +step:5887 train loss:3.700277 +step:5888 train loss:3.679595 +step:5889 train loss:3.659618 +step:5890 train loss:3.708622 +step:5891 train loss:3.650633 +step:5892 train loss:3.728681 +step:5893 train loss:3.655597 +step:5894 train loss:3.646350 +step:5895 train loss:3.647672 +step:5896 train loss:3.663758 +step:5897 train loss:3.729599 +step:5898 train loss:3.941962 +step:5899 train loss:3.688857 +step:5900 train loss:3.730197 +step:5901 train loss:3.677339 +step:5902 train loss:3.695623 +step:5903 train loss:3.685429 +step:5904 train loss:3.712158 +step:5905 train loss:3.816674 +step:5906 train loss:3.757077 +step:5907 train loss:3.700742 +step:5908 train loss:3.682257 +step:5909 train loss:3.667885 +step:5910 train loss:3.655378 +step:5911 train loss:3.675710 +step:5912 train loss:3.680718 +step:5913 train loss:3.712996 +step:5914 train loss:3.690832 +step:5915 train loss:3.814477 +step:5916 train loss:3.698007 +step:5917 train loss:3.668876 +step:5918 train loss:3.663656 +step:5919 train loss:3.690033 +step:5920 train loss:3.690190 +step:5921 train loss:3.663525 +step:5922 train loss:3.719110 +step:5923 train loss:3.713205 +step:5924 train loss:3.666959 +step:5925 train loss:3.791818 +step:5926 train loss:3.682884 +step:5927 train loss:3.655258 +step:5928 train loss:3.690373 +step:5929 train loss:3.709640 +step:5930 train loss:3.660158 +step:5931 train loss:3.644720 +step:5932 train loss:3.683054 +step:5933 train loss:3.736390 +step:5934 train loss:3.654668 +step:5935 train loss:3.678320 +step:5936 train loss:3.671209 +step:5937 train loss:3.652339 +step:5938 train loss:3.664363 +step:5939 train loss:3.645386 +step:5940 train loss:3.728170 +step:5941 train loss:3.662659 +step:5942 train loss:3.678365 +step:5943 train loss:3.681876 +step:5944 train loss:3.736474 +step:5945 train loss:3.667408 +step:5946 train loss:3.650124 +step:5947 train loss:3.662217 +step:5948 train loss:3.696603 +step:5949 train loss:3.744941 +step:5950 train loss:3.705914 +step:5951 train loss:3.704577 +step:5952 train loss:3.632305 +step:5953 train loss:3.675966 +step:5954 train loss:3.681412 +step:5955 train loss:3.690158 +step:5956 train loss:3.666743 +step:5957 train loss:3.634509 +step:5958 train loss:3.706852 +step:5959 train loss:3.667876 +step:5960 train loss:3.642534 +step:5961 train loss:3.665027 +step:5962 train loss:3.698306 +step:5963 train loss:3.734696 +step:5964 train loss:3.690085 +step:5965 train loss:3.705120 +step:5966 train loss:3.700500 +step:5967 train loss:3.665508 +step:5968 train loss:3.737023 +step:5969 train loss:3.682440 +step:5970 train loss:3.699610 +step:5971 train loss:3.649864 +step:5972 train loss:3.674623 +step:5973 train loss:3.670308 +step:5974 train loss:3.689588 +step:5975 train loss:3.659181 +step:5976 train loss:3.697093 +step:5977 train loss:3.656954 +step:5978 train loss:3.643677 +step:5979 train loss:3.679271 +step:5980 train loss:3.746451 +step:5981 train loss:3.646169 +step:5982 train loss:3.655121 +step:5983 train loss:3.722412 +step:5984 train loss:3.666114 +step:5985 train loss:3.707004 +step:5986 train loss:3.683779 +step:5987 train loss:3.668659 +step:5988 train loss:3.676983 +step:5989 train loss:3.694645 +step:5990 train loss:3.625367 +step:5991 train loss:3.689009 +step:5992 train loss:3.723636 +step:5993 train loss:3.676538 +step:5994 train loss:3.694403 +step:5995 train loss:3.583689 +step:5996 train loss:3.751237 +step:5997 train loss:3.733205 +step:5998 train loss:3.609838 +step:5999 train loss:3.637449 +step:6000 validation loss:3.613995 total_sharp:6.6941e-03 L1_sharp:6.0878e-01 L2_sharp:1.9842e-01 L3_sharp:2.1374e-01 L4_sharp:9.4034e-02 L5_sharp:8.3552e-02 L6_sharp:7.7781e-02 L7_sharp:9.7012e-02 L8_sharp:1.0057e-01 L9_sharp:9.1447e-02 L10_sharp:7.4962e-02 L11_sharp:8.0499e-02 L12_sharp:1.9685e-01 total_fnorm:1.2959e+00 total_l1_linf:7.1663e+03 total_spectral:1.2959e+00 L1_fnorm:3.2262e-02 L2_fnorm:3.0634e-02 L3_fnorm:3.0202e-02 L4_fnorm:3.1259e-02 L5_fnorm:3.1682e-02 L6_fnorm:3.1600e-02 L7_fnorm:3.1749e-02 L8_fnorm:3.1727e-02 L9_fnorm:3.1728e-02 L10_fnorm:3.1778e-02 L11_fnorm:3.2001e-02 L12_fnorm:3.2095e-02 L1_l1linf:3.3447e-01 L2_l1linf:3.1631e-01 L3_l1linf:3.1677e-01 L4_l1linf:3.2605e-01 L5_l1linf:2.8622e-01 L6_l1linf:2.5107e-01 L7_l1linf:2.4202e-01 L8_l1linf:2.4530e-01 L9_l1linf:2.6510e-01 L10_l1linf:2.8109e-01 L11_l1linf:3.2054e-01 L12_l1linf:3.4060e-01 L1_spectral:7.3250e-03 L2_spectral:7.1547e-03 L3_spectral:7.0937e-03 L4_spectral:7.2947e-03 L5_spectral:6.4807e-03 L6_spectral:5.6740e-03 L7_spectral:5.4778e-03 L8_spectral:5.5459e-03 L9_spectral:5.9669e-03 L10_spectral:6.3265e-03 L11_spectral:7.2193e-03 L12_spectral:7.5021e-03 ip_v_neg_g:5.0762e-03 cos_v_neg_g:6.9158e-04 v_norm:1.2959e+00 g_norm:5.6642e+00 hv_norm:1.7373e+00 cos_v_hv:4.9932e-03 hg_norm:9.1836e+02 cos_g_hg:4.6702e-01 v_par:2.4394e-05 v_perp:1.2959e+00 L1_cos_v_neg_g:1.1830e-02 L1_v_norm:3.2262e-02 L2_cos_v_neg_g:1.3016e-02 L2_v_norm:3.0634e-02 L3_cos_v_neg_g:1.3151e-02 L3_v_norm:3.0202e-02 L4_cos_v_neg_g:8.7097e-03 L4_v_norm:3.1259e-02 L5_cos_v_neg_g:7.7023e-03 L5_v_norm:3.1682e-02 L6_cos_v_neg_g:7.0838e-03 L6_v_norm:3.1600e-02 L7_cos_v_neg_g:6.2970e-03 L7_v_norm:3.1749e-02 L8_cos_v_neg_g:5.5315e-03 L8_v_norm:3.1727e-02 L9_cos_v_neg_g:4.9326e-03 L9_v_norm:3.1728e-02 L10_cos_v_neg_g:5.2068e-03 L10_v_norm:3.1778e-02 L11_cos_v_neg_g:5.2152e-03 L11_v_norm:3.2001e-02 L12_cos_v_neg_g:6.1000e-03 L12_v_norm:3.2095e-02 +step:6000 train loss:3.691605 +step:6001 train loss:3.651860 +step:6002 train loss:3.679554 +step:6003 train loss:3.708105 +step:6004 train loss:3.653219 +step:6005 train loss:3.726037 +step:6006 train loss:3.629236 +step:6007 train loss:3.652852 +step:6008 train loss:3.667189 +step:6009 train loss:3.705217 +step:6010 train loss:3.695708 +step:6011 train loss:3.681584 +step:6012 train loss:3.650480 +step:6013 train loss:3.713091 +step:6014 train loss:3.731603 +step:6015 train loss:3.727259 +step:6016 train loss:3.696256 +step:6017 train loss:3.701403 +step:6018 train loss:3.641568 +step:6019 train loss:3.677470 +step:6020 train loss:3.662938 +step:6021 train loss:3.593435 +step:6022 train loss:3.703064 +step:6023 train loss:3.641625 +step:6024 train loss:3.717837 +step:6025 train loss:3.686523 +step:6026 train loss:3.656074 +step:6027 train loss:3.698937 +step:6028 train loss:3.616357 +step:6029 train loss:3.724695 +step:6030 train loss:3.698447 +step:6031 train loss:3.669544 +step:6032 train loss:3.626826 +step:6033 train loss:3.683645 +step:6034 train loss:3.711575 +step:6035 train loss:3.629226 +step:6036 train loss:3.602763 +step:6037 train loss:3.713346 +step:6038 train loss:3.722815 +step:6039 train loss:3.703420 +step:6040 train loss:3.662743 +step:6041 train loss:3.647939 +step:6042 train loss:3.621305 +step:6043 train loss:3.681546 +step:6044 train loss:3.805850 +step:6045 train loss:3.645962 +step:6046 train loss:3.654031 +step:6047 train loss:3.691772 +step:6048 train loss:3.700022 +step:6049 train loss:3.677270 +step:6050 train loss:3.644078 +step:6051 train loss:3.697984 +step:6052 train loss:3.673072 +step:6053 train loss:3.784837 +step:6054 train loss:3.823133 +step:6055 train loss:3.640627 +step:6056 train loss:3.631219 +step:6057 train loss:3.666906 +step:6058 train loss:3.697660 +step:6059 train loss:3.698215 +step:6060 train loss:3.702733 +step:6061 train loss:3.722711 +step:6062 train loss:3.668217 +step:6063 train loss:3.687093 +step:6064 train loss:3.684516 +step:6065 train loss:3.680118 +step:6066 train loss:3.671460 +step:6067 train loss:3.710016 +step:6068 train loss:3.648874 +step:6069 train loss:3.606373 +step:6070 train loss:3.749938 +step:6071 train loss:3.698529 +step:6072 train loss:3.640739 +step:6073 train loss:3.677789 +step:6074 train loss:3.766976 +step:6075 train loss:3.686723 +step:6076 train loss:3.693381 +step:6077 train loss:3.700325 +step:6078 train loss:3.634339 +step:6079 train loss:3.662143 +step:6080 train loss:3.669611 +step:6081 train loss:3.706137 +step:6082 train loss:3.658625 +step:6083 train loss:3.668701 +step:6084 train loss:3.730746 +step:6085 train loss:3.729591 +step:6086 train loss:3.629356 +step:6087 train loss:3.674193 +step:6088 train loss:3.659734 +step:6089 train loss:3.719491 +step:6090 train loss:3.721488 +step:6091 train loss:3.667305 +step:6092 train loss:3.634058 +step:6093 train loss:3.692290 +step:6094 train loss:3.606050 +step:6095 train loss:3.771325 +step:6096 train loss:3.640792 +step:6097 train loss:3.719512 +step:6098 train loss:3.697267 +step:6099 train loss:3.751081 +step:6100 train loss:3.744464 +step:6101 train loss:3.680852 +step:6102 train loss:3.791651 +step:6103 train loss:3.684695 +step:6104 train loss:3.791078 +step:6105 train loss:3.722251 +step:6106 train loss:3.660153 +step:6107 train loss:3.726383 +step:6108 train loss:3.688952 +step:6109 train loss:3.761917 +step:6110 train loss:3.691655 +step:6111 train loss:3.726777 +step:6112 train loss:3.663912 +step:6113 train loss:3.692111 +step:6114 train loss:3.665060 +step:6115 train loss:3.731867 +step:6116 train loss:3.671273 +step:6117 train loss:3.721199 +step:6118 train loss:3.701885 +step:6119 train loss:3.714688 +step:6120 train loss:3.855360 +step:6121 train loss:3.690318 +step:6122 train loss:3.702182 +step:6123 train loss:3.687081 +step:6124 train loss:3.657805 +step:6125 train loss:3.649348 +step:6126 train loss:3.672150 +step:6127 train loss:3.660667 +step:6128 train loss:3.626904 +step:6129 train loss:3.860215 +step:6130 train loss:3.644515 +step:6131 train loss:3.618107 +step:6132 train loss:3.690726 +step:6133 train loss:3.656650 +step:6134 train loss:3.685110 +step:6135 train loss:3.766402 +step:6136 train loss:3.791236 +step:6137 train loss:3.649971 +step:6138 train loss:3.708009 +step:6139 train loss:3.686091 +step:6140 train loss:3.684850 +step:6141 train loss:3.642592 +step:6142 train loss:3.705056 +step:6143 train loss:3.671969 +step:6144 train loss:3.693437 +step:6145 train loss:3.932896 +step:6146 train loss:3.775170 +step:6147 train loss:3.858135 +step:6148 train loss:3.628784 +step:6149 train loss:3.752560 +step:6150 train loss:3.709991 +step:6151 train loss:3.660986 +step:6152 train loss:3.655972 +step:6153 train loss:3.725890 +step:6154 train loss:3.811868 +step:6155 train loss:3.677280 +step:6156 train loss:3.770848 +step:6157 train loss:3.701613 +step:6158 train loss:3.694539 +step:6159 train loss:3.663499 +step:6160 train loss:3.829556 +step:6161 train loss:3.679708 +step:6162 train loss:3.698859 +step:6163 train loss:3.730289 +step:6164 train loss:3.642948 +step:6165 train loss:3.712135 +step:6166 train loss:3.701427 +step:6167 train loss:3.717630 +step:6168 train loss:3.700048 +step:6169 train loss:3.692616 +step:6170 train loss:3.693066 +step:6171 train loss:3.662579 +step:6172 train loss:3.647631 +step:6173 train loss:3.702708 +step:6174 train loss:3.633462 +step:6175 train loss:3.639801 +step:6176 train loss:3.624998 +step:6177 train loss:3.722659 +step:6178 train loss:3.664795 +step:6179 train loss:3.670126 +step:6180 train loss:3.680277 +step:6181 train loss:3.712703 +step:6182 train loss:3.595943 +step:6183 train loss:3.601052 +step:6184 train loss:3.724223 +step:6185 train loss:3.680475 +step:6186 train loss:3.639504 +step:6187 train loss:3.680898 +step:6188 train loss:3.646624 +step:6189 train loss:3.688937 +step:6190 train loss:3.648260 +step:6191 train loss:3.681632 +step:6192 train loss:3.647973 +step:6193 train loss:3.714034 +step:6194 train loss:3.702082 +step:6195 train loss:3.682901 +step:6196 train loss:3.696829 +step:6197 train loss:3.723447 +step:6198 train loss:3.637127 +step:6199 train loss:3.659914 +step:6200 train loss:3.701478 +step:6201 train loss:3.746904 +step:6202 train loss:3.745077 +step:6203 train loss:3.744818 +step:6204 train loss:3.728912 +step:6205 train loss:3.668712 +step:6206 train loss:3.657585 +step:6207 train loss:3.713793 +step:6208 train loss:3.736258 +step:6209 train loss:3.710163 +step:6210 train loss:3.736712 +step:6211 train loss:3.656136 +step:6212 train loss:3.653324 +step:6213 train loss:3.664809 +step:6214 train loss:3.639411 +step:6215 train loss:3.812839 +step:6216 train loss:3.682518 +step:6217 train loss:3.742292 +step:6218 train loss:3.715529 +step:6219 train loss:3.726538 +step:6220 train loss:3.685957 +step:6221 train loss:3.652718 +step:6222 train loss:3.890615 +step:6223 train loss:3.654037 +step:6224 train loss:3.689250 +step:6225 train loss:3.662241 +step:6226 train loss:3.675189 +step:6227 train loss:3.677258 +step:6228 train loss:3.671684 +step:6229 train loss:3.712109 +step:6230 train loss:3.668225 +step:6231 train loss:3.780984 +step:6232 train loss:3.620651 +step:6233 train loss:3.664510 +step:6234 train loss:3.672019 +step:6235 train loss:3.699443 +step:6236 train loss:3.636315 +step:6237 train loss:3.660312 +step:6238 train loss:3.683013 +step:6239 train loss:3.667092 +step:6240 train loss:3.688540 +step:6241 train loss:3.674533 +step:6242 train loss:3.672348 +step:6243 train loss:3.706179 +step:6244 train loss:3.862468 +step:6245 train loss:3.661996 +step:6246 train loss:3.643823 +step:6247 train loss:3.642504 +step:6248 train loss:3.646873 +step:6249 train loss:3.584969 +step:6250 validation loss:3.601656 +step:6250 train loss:3.621507 +step:6251 train loss:3.637347 +step:6252 train loss:3.685966 +step:6253 train loss:3.695602 +step:6254 train loss:3.682005 +step:6255 train loss:3.649286 +step:6256 train loss:3.703452 +step:6257 train loss:3.700148 +step:6258 train loss:3.680323 +step:6259 train loss:3.682213 +step:6260 train loss:3.709706 +step:6261 train loss:3.735409 +step:6262 train loss:3.630834 +step:6263 train loss:3.661345 +step:6264 train loss:3.672047 +step:6265 train loss:3.655425 +step:6266 train loss:3.856381 +step:6267 train loss:3.666373 +step:6268 train loss:3.758101 +step:6269 train loss:3.626944 +step:6270 train loss:3.638363 +step:6271 train loss:3.690063 +step:6272 train loss:3.681064 +step:6273 train loss:3.875025 +step:6274 train loss:3.657684 +step:6275 train loss:3.687925 +step:6276 train loss:3.665839 +step:6277 train loss:3.647207 +step:6278 train loss:3.630946 +step:6279 train loss:3.685544 +step:6280 train loss:3.690093 +step:6281 train loss:3.624926 +step:6282 train loss:3.636075 +step:6283 train loss:3.723028 +step:6284 train loss:3.696769 +step:6285 train loss:3.695881 +step:6286 train loss:3.641934 +step:6287 train loss:3.665714 +step:6288 train loss:3.765999 +step:6289 train loss:3.633457 +step:6290 train loss:3.623609 +step:6291 train loss:3.658581 +step:6292 train loss:3.675374 +step:6293 train loss:3.665370 +step:6294 train loss:3.650990 +step:6295 train loss:3.672647 +step:6296 train loss:3.636120 +step:6297 train loss:3.762628 +step:6298 train loss:3.710985 +step:6299 train loss:3.605213 +step:6300 train loss:3.683128 +step:6301 train loss:3.713919 +step:6302 train loss:3.697767 +step:6303 train loss:3.664103 +step:6304 train loss:3.676866 +step:6305 train loss:3.648901 +step:6306 train loss:3.666176 +step:6307 train loss:3.675996 +step:6308 train loss:3.648475 +step:6309 train loss:3.641335 +step:6310 train loss:3.698250 +step:6311 train loss:3.652740 +step:6312 train loss:3.695446 +step:6313 train loss:3.624884 +step:6314 train loss:3.649089 +step:6315 train loss:3.704586 +step:6316 train loss:3.622881 +step:6317 train loss:3.625138 +step:6318 train loss:3.730859 +step:6319 train loss:3.660805 +step:6320 train loss:3.682874 +step:6321 train loss:3.661949 +step:6322 train loss:3.660650 +step:6323 train loss:3.602590 +step:6324 train loss:3.603861 +step:6325 train loss:3.705026 +step:6326 train loss:3.624015 +step:6327 train loss:3.696335 +step:6328 train loss:3.675353 +step:6329 train loss:3.594752 +step:6330 train loss:3.624133 +step:6331 train loss:3.640057 +step:6332 train loss:3.772196 +step:6333 train loss:3.656387 +step:6334 train loss:3.628860 +step:6335 train loss:3.601668 +step:6336 train loss:3.633789 +step:6337 train loss:3.652951 +step:6338 train loss:3.616516 +step:6339 train loss:3.659042 +step:6340 train loss:3.637683 +step:6341 train loss:3.653199 +step:6342 train loss:3.646806 +step:6343 train loss:3.747342 +step:6344 train loss:3.594672 +step:6345 train loss:3.614035 +step:6346 train loss:3.698199 +step:6347 train loss:3.565229 +step:6348 train loss:3.665601 +step:6349 train loss:3.638430 +step:6350 train loss:3.614768 +step:6351 train loss:3.614125 +step:6352 train loss:3.625127 +step:6353 train loss:3.651721 +step:6354 train loss:3.662402 +step:6355 train loss:3.668169 +step:6356 train loss:3.682984 +step:6357 train loss:3.532426 +step:6358 train loss:3.630095 +step:6359 train loss:3.684704 +step:6360 train loss:3.595824 +step:6361 train loss:3.605749 +step:6362 train loss:3.635725 +step:6363 train loss:3.620031 +step:6364 train loss:3.603934 +step:6365 train loss:3.670270 +step:6366 train loss:3.688474 +step:6367 train loss:3.606695 +step:6368 train loss:3.661208 +step:6369 train loss:3.624642 +step:6370 train loss:3.674899 +step:6371 train loss:3.594777 +step:6372 train loss:3.621953 +step:6373 train loss:3.654505 +step:6374 train loss:3.678425 +step:6375 train loss:3.637836 +step:6376 train loss:3.664510 +step:6377 train loss:3.656519 +step:6378 train loss:3.612104 +step:6379 train loss:3.652638 +step:6380 train loss:3.690527 +step:6381 train loss:3.661835 +step:6382 train loss:3.609375 +step:6383 train loss:3.684919 +step:6384 train loss:3.644771 +step:6385 train loss:3.631372 +step:6386 train loss:3.667231 +step:6387 train loss:3.640150 +step:6388 train loss:3.688387 +step:6389 train loss:3.687062 +step:6390 train loss:3.643077 +step:6391 train loss:3.627174 +step:6392 train loss:3.613288 +step:6393 train loss:3.669791 +step:6394 train loss:3.654016 +step:6395 train loss:3.831121 +step:6396 train loss:3.659385 +step:6397 train loss:3.600584 +step:6398 train loss:3.669889 +step:6399 train loss:3.611406 +step:6400 train loss:3.690757 +step:6401 train loss:3.716279 +step:6402 train loss:3.658475 +step:6403 train loss:3.646068 +step:6404 train loss:3.621807 +step:6405 train loss:3.651363 +step:6406 train loss:3.656144 +step:6407 train loss:3.718524 +step:6408 train loss:3.604872 +step:6409 train loss:3.596981 +step:6410 train loss:3.719146 +step:6411 train loss:3.654696 +step:6412 train loss:3.661119 +step:6413 train loss:3.658911 +step:6414 train loss:3.611650 +step:6415 train loss:3.665578 +step:6416 train loss:3.642063 +step:6417 train loss:3.611179 +step:6418 train loss:3.605690 +step:6419 train loss:3.692316 +step:6420 train loss:3.614801 +step:6421 train loss:3.644484 +step:6422 train loss:3.625291 +step:6423 train loss:3.643604 +step:6424 train loss:3.662681 +step:6425 train loss:3.659476 +step:6426 train loss:3.699400 +step:6427 train loss:3.662465 +step:6428 train loss:3.704905 +step:6429 train loss:3.659254 +step:6430 train loss:3.641384 +step:6431 train loss:3.611027 +step:6432 train loss:3.647908 +step:6433 train loss:3.661933 +step:6434 train loss:3.549430 +step:6435 train loss:3.722238 +step:6436 train loss:3.654500 +step:6437 train loss:3.620984 +step:6438 train loss:3.646484 +step:6439 train loss:3.624629 +step:6440 train loss:3.636378 +step:6441 train loss:3.634383 +step:6442 train loss:3.573529 +step:6443 train loss:3.624323 +step:6444 train loss:3.770526 +step:6445 train loss:3.668086 +step:6446 train loss:3.676928 +step:6447 train loss:3.654707 +step:6448 train loss:3.605324 +step:6449 train loss:3.631032 +step:6450 train loss:3.608721 +step:6451 train loss:3.602527 +step:6452 train loss:3.601266 +step:6453 train loss:3.650873 +step:6454 train loss:3.668222 +step:6455 train loss:3.660377 +step:6456 train loss:3.674992 +step:6457 train loss:3.658263 +step:6458 train loss:3.631432 +step:6459 train loss:3.610030 +step:6460 train loss:3.620301 +step:6461 train loss:3.616713 +step:6462 train loss:3.613207 +step:6463 train loss:3.710252 +step:6464 train loss:3.613716 +step:6465 train loss:3.659543 +step:6466 train loss:3.671313 +step:6467 train loss:3.599974 +step:6468 train loss:3.674566 +step:6469 train loss:3.589829 +step:6470 train loss:3.704890 +step:6471 train loss:3.621227 +step:6472 train loss:3.774096 +step:6473 train loss:3.657434 +step:6474 train loss:3.687815 +step:6475 train loss:3.626657 +step:6476 train loss:3.705417 +step:6477 train loss:3.629863 +step:6478 train loss:3.764041 +step:6479 train loss:3.677413 +step:6480 train loss:3.618120 +step:6481 train loss:3.671191 +step:6482 train loss:3.612067 +step:6483 train loss:3.675202 +step:6484 train loss:3.627300 +step:6485 train loss:3.691901 +step:6486 train loss:3.620752 +step:6487 train loss:3.627664 +step:6488 train loss:3.616917 +step:6489 train loss:3.621452 +step:6490 train loss:3.641271 +step:6491 train loss:3.610317 +step:6492 train loss:3.718649 +step:6493 train loss:3.621775 +step:6494 train loss:3.629136 +step:6495 train loss:3.621482 +step:6496 train loss:3.661537 +step:6497 train loss:3.676677 +step:6498 train loss:3.786535 +step:6499 train loss:3.751632 +step:6500 validation loss:3.591309 total_sharp:5.2200e-03 L1_sharp:1.0446e-01 L2_sharp:8.4110e-02 L3_sharp:1.2200e-01 L4_sharp:6.5757e-02 L5_sharp:7.4189e-02 L6_sharp:9.8481e-02 L7_sharp:1.3574e-01 L8_sharp:1.1354e-01 L9_sharp:9.9357e-02 L10_sharp:7.4784e-02 L11_sharp:6.4308e-02 L12_sharp:1.0983e-01 total_fnorm:1.3204e+00 total_l1_linf:7.3035e+03 total_spectral:1.3204e+00 L1_fnorm:3.1908e-02 L2_fnorm:3.0764e-02 L3_fnorm:3.0553e-02 L4_fnorm:3.1167e-02 L5_fnorm:3.1620e-02 L6_fnorm:3.1992e-02 L7_fnorm:3.2032e-02 L8_fnorm:3.1935e-02 L9_fnorm:3.1952e-02 L10_fnorm:3.1999e-02 L11_fnorm:3.2027e-02 L12_fnorm:3.2099e-02 L1_l1linf:2.8287e-01 L2_l1linf:3.0976e-01 L3_l1linf:3.1015e-01 L4_l1linf:2.9956e-01 L5_l1linf:3.0212e-01 L6_l1linf:2.9525e-01 L7_l1linf:2.8265e-01 L8_l1linf:2.7392e-01 L9_l1linf:2.8489e-01 L10_l1linf:2.9790e-01 L11_l1linf:3.1523e-01 L12_l1linf:3.2266e-01 L1_spectral:6.4641e-03 L2_spectral:7.0551e-03 L3_spectral:6.9355e-03 L4_spectral:6.8022e-03 L5_spectral:6.7963e-03 L6_spectral:6.5543e-03 L7_spectral:6.3587e-03 L8_spectral:6.1377e-03 L9_spectral:6.4207e-03 L10_spectral:6.7202e-03 L11_spectral:7.0941e-03 L12_spectral:7.2457e-03 ip_v_neg_g:3.4162e-03 cos_v_neg_g:4.6860e-04 v_norm:1.3204e+00 g_norm:5.5213e+00 hv_norm:1.4111e+00 cos_v_hv:4.8844e-03 hg_norm:7.1655e+02 cos_g_hg:4.7580e-01 v_par:1.8218e-05 v_perp:1.3204e+00 L1_cos_v_neg_g:5.5568e-03 L1_v_norm:3.1908e-02 L2_cos_v_neg_g:3.9934e-03 L2_v_norm:3.0764e-02 L3_cos_v_neg_g:3.2395e-03 L3_v_norm:3.0553e-02 L4_cos_v_neg_g:3.9707e-03 L4_v_norm:3.1167e-02 L5_cos_v_neg_g:5.3697e-03 L5_v_norm:3.1620e-02 L6_cos_v_neg_g:5.9933e-03 L6_v_norm:3.1992e-02 L7_cos_v_neg_g:6.0587e-03 L7_v_norm:3.2032e-02 L8_cos_v_neg_g:6.3437e-03 L8_v_norm:3.1935e-02 L9_cos_v_neg_g:6.3360e-03 L9_v_norm:3.1952e-02 L10_cos_v_neg_g:5.6200e-03 L10_v_norm:3.1999e-02 L11_cos_v_neg_g:4.6924e-03 L11_v_norm:3.2027e-02 L12_cos_v_neg_g:4.0896e-03 L12_v_norm:3.2099e-02 +step:6500 train loss:3.603999 +step:6501 train loss:3.618521 +step:6502 train loss:3.635647 +step:6503 train loss:3.698009 +step:6504 train loss:3.645465 +step:6505 train loss:3.655280 +step:6506 train loss:3.611849 +step:6507 train loss:3.682019 +step:6508 train loss:3.642217 +step:6509 train loss:3.633244 +step:6510 train loss:3.635223 +step:6511 train loss:3.657513 +step:6512 train loss:3.595219 +step:6513 train loss:3.664039 +step:6514 train loss:3.534644 +step:6515 train loss:3.633580 +step:6516 train loss:3.679369 +step:6517 train loss:3.592181 +step:6518 train loss:3.632125 +step:6519 train loss:3.619485 +step:6520 train loss:3.713214 +step:6521 train loss:3.684883 +step:6522 train loss:3.698858 +step:6523 train loss:3.589551 +step:6524 train loss:3.677094 +step:6525 train loss:3.657859 +step:6526 train loss:3.607954 +step:6527 train loss:3.650705 +step:6528 train loss:3.673168 +step:6529 train loss:3.702054 +step:6530 train loss:3.604959 +step:6531 train loss:3.691644 +step:6532 train loss:3.611981 +step:6533 train loss:3.652357 +step:6534 train loss:3.657310 +step:6535 train loss:3.639915 +step:6536 train loss:3.765047 +step:6537 train loss:3.573423 +step:6538 train loss:3.682675 +step:6539 train loss:3.610723 +step:6540 train loss:3.720686 +step:6541 train loss:3.699620 +step:6542 train loss:3.659223 +step:6543 train loss:3.611151 +step:6544 train loss:3.597007 +step:6545 train loss:3.583830 +step:6546 train loss:3.645896 +step:6547 train loss:3.697591 +step:6548 train loss:3.647559 +step:6549 train loss:3.656608 +step:6550 train loss:3.772336 +step:6551 train loss:3.645031 +step:6552 train loss:3.645296 +step:6553 train loss:3.681007 +step:6554 train loss:3.572475 +step:6555 train loss:3.654721 +step:6556 train loss:3.529530 +step:6557 train loss:3.876630 +step:6558 train loss:3.706946 +step:6559 train loss:3.623380 +step:6560 train loss:3.658304 +step:6561 train loss:3.635099 +step:6562 train loss:3.649971 +step:6563 train loss:3.546720 +step:6564 train loss:3.644594 +step:6565 train loss:3.553997 +step:6566 train loss:3.668103 +step:6567 train loss:3.638652 +step:6568 train loss:3.684178 +step:6569 train loss:3.628975 +step:6570 train loss:3.667217 +step:6571 train loss:3.597673 +step:6572 train loss:3.670589 +step:6573 train loss:3.678102 +step:6574 train loss:3.668400 +step:6575 train loss:3.611967 +step:6576 train loss:3.606469 +step:6577 train loss:3.670680 +step:6578 train loss:3.544033 +step:6579 train loss:3.643057 +step:6580 train loss:3.604353 +step:6581 train loss:3.612872 +step:6582 train loss:3.593965 +step:6583 train loss:3.688781 +step:6584 train loss:3.620225 +step:6585 train loss:3.659330 +step:6586 train loss:3.665207 +step:6587 train loss:3.676156 +step:6588 train loss:3.639463 +step:6589 train loss:3.672326 +step:6590 train loss:3.610480 +step:6591 train loss:3.667290 +step:6592 train loss:3.601584 +step:6593 train loss:3.615999 +step:6594 train loss:3.638488 +step:6595 train loss:3.623578 +step:6596 train loss:3.622204 +step:6597 train loss:3.646297 +step:6598 train loss:3.681860 +step:6599 train loss:3.579345 +step:6600 train loss:3.630291 +step:6601 train loss:3.692939 +step:6602 train loss:3.613008 +step:6603 train loss:3.644676 +step:6604 train loss:3.653594 +step:6605 train loss:3.635604 +step:6606 train loss:3.696777 +step:6607 train loss:3.616639 +step:6608 train loss:3.633004 +step:6609 train loss:3.599541 +step:6610 train loss:3.708418 +step:6611 train loss:3.631516 +step:6612 train loss:3.675835 +step:6613 train loss:3.595609 +step:6614 train loss:3.621154 +step:6615 train loss:3.621744 +step:6616 train loss:3.600154 +step:6617 train loss:3.645013 +step:6618 train loss:3.626635 +step:6619 train loss:3.597189 +step:6620 train loss:3.706146 +step:6621 train loss:3.584874 +step:6622 train loss:3.656410 +step:6623 train loss:3.588146 +step:6624 train loss:3.661050 +step:6625 train loss:3.699909 +step:6626 train loss:3.663133 +step:6627 train loss:3.618974 +step:6628 train loss:3.672103 +step:6629 train loss:3.576486 +step:6630 train loss:3.612649 +step:6631 train loss:3.649831 +step:6632 train loss:3.687716 +step:6633 train loss:3.638876 +step:6634 train loss:3.699998 +step:6635 train loss:3.602930 +step:6636 train loss:3.643776 +step:6637 train loss:3.607455 +step:6638 train loss:3.609109 +step:6639 train loss:3.619195 +step:6640 train loss:3.606965 +step:6641 train loss:3.624222 +step:6642 train loss:3.626577 +step:6643 train loss:3.700161 +step:6644 train loss:3.708651 +step:6645 train loss:3.580931 +step:6646 train loss:3.673182 +step:6647 train loss:3.628239 +step:6648 train loss:3.732122 +step:6649 train loss:3.659674 +step:6650 train loss:3.612520 +step:6651 train loss:3.653562 +step:6652 train loss:3.672095 +step:6653 train loss:3.608572 +step:6654 train loss:3.612931 +step:6655 train loss:3.648511 +step:6656 train loss:3.622934 +step:6657 train loss:3.644327 +step:6658 train loss:3.630459 +step:6659 train loss:3.780872 +step:6660 train loss:3.679465 +step:6661 train loss:3.605880 +step:6662 train loss:3.635684 +step:6663 train loss:3.569648 +step:6664 train loss:3.653138 +step:6665 train loss:3.658793 +step:6666 train loss:3.670620 +step:6667 train loss:3.587177 +step:6668 train loss:3.722557 +step:6669 train loss:3.596654 +step:6670 train loss:3.610312 +step:6671 train loss:3.689679 +step:6672 train loss:3.645951 +step:6673 train loss:3.650540 +step:6674 train loss:3.628851 +step:6675 train loss:3.638964 +step:6676 train loss:3.654277 +step:6677 train loss:3.608405 +step:6678 train loss:3.682644 +step:6679 train loss:3.715020 +step:6680 train loss:3.714548 +step:6681 train loss:3.668907 +step:6682 train loss:3.612005 +step:6683 train loss:3.637981 +step:6684 train loss:3.644787 +step:6685 train loss:3.656820 +step:6686 train loss:3.598183 +step:6687 train loss:3.612340 +step:6688 train loss:3.657709 +step:6689 train loss:3.662197 +step:6690 train loss:3.645290 +step:6691 train loss:3.674133 +step:6692 train loss:3.679210 +step:6693 train loss:3.709659 +step:6694 train loss:3.668782 +step:6695 train loss:3.638348 +step:6696 train loss:3.580886 +step:6697 train loss:3.787962 +step:6698 train loss:3.637963 +step:6699 train loss:3.635168 +step:6700 train loss:3.647229 +step:6701 train loss:3.704969 +step:6702 train loss:3.597176 +step:6703 train loss:3.643287 +step:6704 train loss:3.626032 +step:6705 train loss:3.635265 +step:6706 train loss:3.616117 +step:6707 train loss:3.687076 +step:6708 train loss:3.642630 +step:6709 train loss:3.673701 +step:6710 train loss:3.660543 +step:6711 train loss:3.613251 +step:6712 train loss:3.601893 +step:6713 train loss:3.627835 +step:6714 train loss:3.668670 +step:6715 train loss:3.612735 +step:6716 train loss:3.690464 +step:6717 train loss:3.634993 +step:6718 train loss:3.661194 +step:6719 train loss:3.692084 +step:6720 train loss:3.620805 +step:6721 train loss:3.637747 +step:6722 train loss:3.614426 +step:6723 train loss:3.742334 +step:6724 train loss:3.600300 +step:6725 train loss:3.664777 +step:6726 train loss:3.614959 +step:6727 train loss:3.681347 +step:6728 train loss:3.773961 +step:6729 train loss:3.636554 +step:6730 train loss:3.634198 +step:6731 train loss:3.673907 +step:6732 train loss:3.551347 +step:6733 train loss:3.687345 +step:6734 train loss:3.615611 +step:6735 train loss:3.638102 +step:6736 train loss:3.643130 +step:6737 train loss:3.637659 +step:6738 train loss:3.670430 +step:6739 train loss:3.623174 +step:6740 train loss:3.575223 +step:6741 train loss:3.686758 +step:6742 train loss:3.648164 +step:6743 train loss:3.650888 +step:6744 train loss:3.545404 +step:6745 train loss:3.694005 +step:6746 train loss:3.630684 +step:6747 train loss:3.618795 +step:6748 train loss:3.694996 +step:6749 train loss:3.673166 +step:6750 validation loss:3.582914 +step:6750 train loss:3.598010 +step:6751 train loss:3.630539 +step:6752 train loss:3.636264 +step:6753 train loss:3.668265 +step:6754 train loss:3.648942 +step:6755 train loss:3.656962 +step:6756 train loss:3.604878 +step:6757 train loss:3.570290 +step:6758 train loss:3.751529 +step:6759 train loss:3.636595 +step:6760 train loss:3.695318 +step:6761 train loss:3.624495 +step:6762 train loss:3.652546 +step:6763 train loss:3.549496 +step:6764 train loss:3.630218 +step:6765 train loss:3.628034 +step:6766 train loss:3.625148 +step:6767 train loss:3.579494 +step:6768 train loss:3.584766 +step:6769 train loss:3.548698 +step:6770 train loss:3.636112 +step:6771 train loss:3.635624 +step:6772 train loss:3.642565 +step:6773 train loss:3.625766 +step:6774 train loss:3.637846 +step:6775 train loss:3.682474 +step:6776 train loss:3.634385 +step:6777 train loss:3.713185 +step:6778 train loss:3.594327 +step:6779 train loss:3.655140 +step:6780 train loss:3.580631 +step:6781 train loss:3.648582 +step:6782 train loss:3.556007 +step:6783 train loss:3.599967 +step:6784 train loss:3.621318 +step:6785 train loss:3.609611 +step:6786 train loss:3.622185 +step:6787 train loss:3.698375 +step:6788 train loss:3.635719 +step:6789 train loss:3.645980 +step:6790 train loss:3.644136 +step:6791 train loss:3.653701 +step:6792 train loss:3.653425 +step:6793 train loss:3.653126 +step:6794 train loss:3.621523 +step:6795 train loss:3.624211 +step:6796 train loss:3.626527 +step:6797 train loss:3.725228 +step:6798 train loss:3.629596 +step:6799 train loss:3.622500 +step:6800 train loss:3.588505 +step:6801 train loss:3.718248 +step:6802 train loss:3.667746 +step:6803 train loss:3.655655 +step:6804 train loss:3.687290 +step:6805 train loss:3.643339 +step:6806 train loss:3.581383 +step:6807 train loss:3.639029 +step:6808 train loss:3.622721 +step:6809 train loss:3.650431 +step:6810 train loss:3.771367 +step:6811 train loss:3.671589 +step:6812 train loss:3.648963 +step:6813 train loss:3.658252 +step:6814 train loss:3.666584 +step:6815 train loss:3.709468 +step:6816 train loss:3.632190 +step:6817 train loss:3.653515 +step:6818 train loss:3.636491 +step:6819 train loss:3.613041 +step:6820 train loss:3.645362 +step:6821 train loss:3.608112 +step:6822 train loss:3.710565 +step:6823 train loss:3.691296 +step:6824 train loss:3.671999 +step:6825 train loss:3.614658 +step:6826 train loss:3.662353 +step:6827 train loss:3.643420 +step:6828 train loss:3.663821 +step:6829 train loss:3.650384 +step:6830 train loss:3.617707 +step:6831 train loss:3.577976 +step:6832 train loss:3.564507 +step:6833 train loss:3.583628 +step:6834 train loss:3.665586 +step:6835 train loss:3.642210 +step:6836 train loss:3.557548 +step:6837 train loss:3.626902 +step:6838 train loss:3.681971 +step:6839 train loss:3.767372 +step:6840 train loss:3.637823 +step:6841 train loss:3.603119 +step:6842 train loss:3.644047 +step:6843 train loss:3.750382 +step:6844 train loss:3.629289 +step:6845 train loss:3.687348 +step:6846 train loss:3.746659 +step:6847 train loss:3.676154 +step:6848 train loss:3.670108 +step:6849 train loss:3.691260 +step:6850 train loss:3.665913 +step:6851 train loss:3.590533 +step:6852 train loss:3.585907 +step:6853 train loss:3.569768 +step:6854 train loss:3.656743 +step:6855 train loss:3.623915 +step:6856 train loss:3.612656 +step:6857 train loss:3.657920 +step:6858 train loss:3.694968 +step:6859 train loss:3.597358 +step:6860 train loss:3.708322 +step:6861 train loss:3.730501 +step:6862 train loss:3.645271 +step:6863 train loss:3.641554 +step:6864 train loss:3.582346 +step:6865 train loss:3.660178 +step:6866 train loss:3.581760 +step:6867 train loss:3.760932 +step:6868 train loss:3.634916 +step:6869 train loss:3.673526 +step:6870 train loss:3.703255 +step:6871 train loss:3.618205 +step:6872 train loss:3.619216 +step:6873 train loss:3.637378 +step:6874 train loss:3.599350 +step:6875 train loss:3.604916 +step:6876 train loss:3.636629 +step:6877 train loss:3.673889 +step:6878 train loss:3.583910 +step:6879 train loss:3.635372 +step:6880 train loss:3.642314 +step:6881 train loss:3.604283 +step:6882 train loss:3.662470 +step:6883 train loss:3.653819 +step:6884 train loss:3.882098 +step:6885 train loss:3.651751 +step:6886 train loss:3.635620 +step:6887 train loss:3.569286 +step:6888 train loss:3.678977 +step:6889 train loss:3.556351 +step:6890 train loss:3.664527 +step:6891 train loss:3.673890 +step:6892 train loss:3.774560 +step:6893 train loss:3.606094 +step:6894 train loss:3.665048 +step:6895 train loss:3.669985 +step:6896 train loss:3.638391 +step:6897 train loss:3.598101 +step:6898 train loss:3.594467 +step:6899 train loss:3.683402 +step:6900 train loss:3.654312 +step:6901 train loss:3.605766 +step:6902 train loss:3.538840 +step:6903 train loss:3.589186 +step:6904 train loss:3.698357 +step:6905 train loss:3.726039 +step:6906 train loss:3.650107 +step:6907 train loss:3.671210 +step:6908 train loss:3.707623 +step:6909 train loss:3.698557 +step:6910 train loss:3.576083 +step:6911 train loss:3.701014 +step:6912 train loss:3.598015 +step:6913 train loss:3.628981 +step:6914 train loss:3.590798 +step:6915 train loss:3.615994 +step:6916 train loss:3.595712 +step:6917 train loss:3.714136 +step:6918 train loss:3.665213 +step:6919 train loss:3.652905 +step:6920 train loss:3.641540 +step:6921 train loss:3.706547 +step:6922 train loss:3.696985 +step:6923 train loss:3.562768 +step:6924 train loss:3.644059 +step:6925 train loss:3.619159 +step:6926 train loss:3.654138 +step:6927 train loss:3.707985 +step:6928 train loss:3.593970 +step:6929 train loss:3.610844 +step:6930 train loss:3.639283 +step:6931 train loss:3.642856 +step:6932 train loss:3.869691 +step:6933 train loss:3.708302 +step:6934 train loss:3.648258 +step:6935 train loss:3.631002 +step:6936 train loss:3.672832 +step:6937 train loss:3.610622 +step:6938 train loss:3.682639 +step:6939 train loss:3.611775 +step:6940 train loss:3.666713 +step:6941 train loss:3.582125 +step:6942 train loss:3.671175 +step:6943 train loss:3.560026 +step:6944 train loss:3.662523 +step:6945 train loss:3.588084 +step:6946 train loss:3.682408 +step:6947 train loss:3.612012 +step:6948 train loss:3.606032 +step:6949 train loss:3.681614 +step:6950 train loss:3.668846 +step:6951 train loss:3.677839 +step:6952 train loss:3.607756 +step:6953 train loss:3.652212 +step:6954 train loss:3.711928 +step:6955 train loss:3.623294 +step:6956 train loss:3.663257 +step:6957 train loss:3.653697 +step:6958 train loss:3.617223 +step:6959 train loss:3.652729 +step:6960 train loss:3.622355 +step:6961 train loss:3.624840 +step:6962 train loss:3.610377 +step:6963 train loss:3.575022 +step:6964 train loss:3.626267 +step:6965 train loss:3.611345 +step:6966 train loss:3.663062 +step:6967 train loss:3.596797 +step:6968 train loss:3.634623 +step:6969 train loss:3.653452 +step:6970 train loss:3.627237 +step:6971 train loss:3.692651 +step:6972 train loss:3.637362 +step:6973 train loss:3.601074 +step:6974 train loss:3.723986 +step:6975 train loss:3.632900 +step:6976 train loss:3.604804 +step:6977 train loss:3.641075 +step:6978 train loss:3.636420 +step:6979 train loss:3.641275 +step:6980 train loss:3.623458 +step:6981 train loss:3.676181 +step:6982 train loss:3.634261 +step:6983 train loss:3.623031 +step:6984 train loss:3.739984 +step:6985 train loss:3.591279 +step:6986 train loss:3.577665 +step:6987 train loss:3.626573 +step:6988 train loss:3.632199 +step:6989 train loss:3.780840 +step:6990 train loss:3.640045 +step:6991 train loss:3.601586 +step:6992 train loss:3.645572 +step:6993 train loss:3.713335 +step:6994 train loss:3.662248 +step:6995 train loss:3.609518 +step:6996 train loss:3.613034 +step:6997 train loss:3.696437 +step:6998 train loss:3.596335 +step:6999 train loss:3.641026 +step:7000 validation loss:3.573135 total_sharp:5.4566e-03 L1_sharp:1.9555e-01 L2_sharp:1.0293e-01 L3_sharp:1.6320e-01 L4_sharp:7.4941e-02 L5_sharp:7.7883e-02 L6_sharp:8.6819e-02 L7_sharp:1.0620e-01 L8_sharp:1.1118e-01 L9_sharp:1.0150e-01 L10_sharp:7.4834e-02 L11_sharp:5.9875e-02 L12_sharp:1.0404e-01 total_fnorm:1.3261e+00 total_l1_linf:7.3253e+03 total_spectral:1.3261e+00 L1_fnorm:3.1875e-02 L2_fnorm:3.0820e-02 L3_fnorm:3.0854e-02 L4_fnorm:3.1418e-02 L5_fnorm:3.1940e-02 L6_fnorm:3.2097e-02 L7_fnorm:3.2163e-02 L8_fnorm:3.2185e-02 L9_fnorm:3.2285e-02 L10_fnorm:3.2314e-02 L11_fnorm:3.2369e-02 L12_fnorm:3.2667e-02 L1_l1linf:2.9838e-01 L2_l1linf:3.1849e-01 L3_l1linf:3.5087e-01 L4_l1linf:3.2891e-01 L5_l1linf:3.2066e-01 L6_l1linf:2.9023e-01 L7_l1linf:2.9223e-01 L8_l1linf:3.0168e-01 L9_l1linf:3.1457e-01 L10_l1linf:3.3279e-01 L11_l1linf:3.4571e-01 L12_l1linf:3.7831e-01 L1_spectral:6.6956e-03 L2_spectral:7.2210e-03 L3_spectral:7.8279e-03 L4_spectral:7.3899e-03 L5_spectral:7.1841e-03 L6_spectral:6.5097e-03 L7_spectral:6.5650e-03 L8_spectral:6.7678e-03 L9_spectral:7.0491e-03 L10_spectral:7.4097e-03 L11_spectral:7.7680e-03 L12_spectral:8.4487e-03 ip_v_neg_g:4.4355e-03 cos_v_neg_g:5.9317e-04 v_norm:1.3261e+00 g_norm:5.6386e+00 hv_norm:1.3617e+00 cos_v_hv:5.3142e-03 hg_norm:7.5790e+02 cos_g_hg:4.7097e-01 v_par:1.7485e-05 v_perp:1.3261e+00 L1_cos_v_neg_g:7.7761e-03 L1_v_norm:3.1875e-02 L2_cos_v_neg_g:8.4884e-03 L2_v_norm:3.0820e-02 L3_cos_v_neg_g:9.2862e-03 L3_v_norm:3.0854e-02 L4_cos_v_neg_g:7.1444e-03 L4_v_norm:3.1418e-02 L5_cos_v_neg_g:8.1814e-03 L5_v_norm:3.1940e-02 L6_cos_v_neg_g:6.5915e-03 L6_v_norm:3.2097e-02 L7_cos_v_neg_g:5.8890e-03 L7_v_norm:3.2163e-02 L8_cos_v_neg_g:5.9845e-03 L8_v_norm:3.2185e-02 L9_cos_v_neg_g:6.3854e-03 L9_v_norm:3.2285e-02 L10_cos_v_neg_g:5.4976e-03 L10_v_norm:3.2314e-02 L11_cos_v_neg_g:5.0172e-03 L11_v_norm:3.2369e-02 L12_cos_v_neg_g:5.4232e-03 L12_v_norm:3.2667e-02 +step:7000 train loss:3.721523 +step:7001 train loss:3.619962 +step:7002 train loss:3.613769 +step:7003 train loss:3.636220 +step:7004 train loss:3.630556 +step:7005 train loss:3.617922 +step:7006 train loss:3.619671 +step:7007 train loss:3.671630 +step:7008 train loss:3.611404 +step:7009 train loss:3.654184 +step:7010 train loss:3.587601 +step:7011 train loss:3.647626 +step:7012 train loss:3.618268 +step:7013 train loss:3.691522 +step:7014 train loss:3.600438 +step:7015 train loss:3.655471 +step:7016 train loss:3.648594 +step:7017 train loss:3.614729 +step:7018 train loss:3.692474 +step:7019 train loss:3.621684 +step:7020 train loss:3.666391 +step:7021 train loss:3.613778 +step:7022 train loss:3.623070 +step:7023 train loss:3.646574 +step:7024 train loss:3.605979 +step:7025 train loss:3.656116 +step:7026 train loss:3.613031 +step:7027 train loss:3.676224 +step:7028 train loss:3.599939 +step:7029 train loss:3.587461 +step:7030 train loss:3.590709 +step:7031 train loss:3.643983 +step:7032 train loss:3.651558 +step:7033 train loss:3.627043 +step:7034 train loss:3.648283 +step:7035 train loss:3.699292 +step:7036 train loss:3.617482 +step:7037 train loss:3.646952 +step:7038 train loss:3.604509 +step:7039 train loss:3.663084 +step:7040 train loss:3.581570 +step:7041 train loss:3.670743 +step:7042 train loss:3.607353 +step:7043 train loss:3.579951 +step:7044 train loss:3.625012 +step:7045 train loss:3.626268 +step:7046 train loss:3.612633 +step:7047 train loss:3.655563 +step:7048 train loss:3.601118 +step:7049 train loss:3.615155 +step:7050 train loss:3.634036 +step:7051 train loss:3.653102 +step:7052 train loss:3.655470 +step:7053 train loss:3.616733 +step:7054 train loss:3.599615 +step:7055 train loss:3.661726 +step:7056 train loss:3.667304 +step:7057 train loss:3.587768 +step:7058 train loss:3.705733 +step:7059 train loss:3.617469 +step:7060 train loss:3.626050 +step:7061 train loss:3.603647 +step:7062 train loss:3.620512 +step:7063 train loss:3.680539 +step:7064 train loss:3.605740 +step:7065 train loss:3.654419 +step:7066 train loss:3.613079 +step:7067 train loss:3.649528 +step:7068 train loss:3.630172 +step:7069 train loss:3.586579 +step:7070 train loss:3.617275 +step:7071 train loss:3.580599 +step:7072 train loss:3.590911 +step:7073 train loss:3.579325 +step:7074 train loss:3.574007 +step:7075 train loss:3.596250 +step:7076 train loss:3.602016 +step:7077 train loss:3.618098 +step:7078 train loss:3.657718 +step:7079 train loss:3.671423 +step:7080 train loss:3.610347 +step:7081 train loss:3.638018 +step:7082 train loss:3.607547 +step:7083 train loss:3.633905 +step:7084 train loss:3.628733 +step:7085 train loss:3.584807 +step:7086 train loss:3.630742 +step:7087 train loss:3.603724 +step:7088 train loss:3.722375 +step:7089 train loss:3.623312 +step:7090 train loss:3.583469 +step:7091 train loss:3.601102 +step:7092 train loss:3.577211 +step:7093 train loss:3.670892 +step:7094 train loss:3.592296 +step:7095 train loss:3.603225 +step:7096 train loss:3.626101 +step:7097 train loss:3.612812 +step:7098 train loss:3.638094 +step:7099 train loss:3.594199 +step:7100 train loss:3.622600 +step:7101 train loss:3.696545 +step:7102 train loss:3.586723 +step:7103 train loss:3.613163 +step:7104 train loss:3.642541 +step:7105 train loss:3.617735 +step:7106 train loss:3.606787 +step:7107 train loss:3.640822 +step:7108 train loss:3.710599 +step:7109 train loss:3.638062 +step:7110 train loss:3.667100 +step:7111 train loss:3.643327 +step:7112 train loss:3.631156 +step:7113 train loss:3.633910 +step:7114 train loss:3.650553 +step:7115 train loss:3.691788 +step:7116 train loss:3.620257 +step:7117 train loss:3.651637 +step:7118 train loss:3.668350 +step:7119 train loss:3.624856 +step:7120 train loss:3.687556 +step:7121 train loss:3.597849 +step:7122 train loss:3.600871 +step:7123 train loss:3.544502 +step:7124 train loss:3.695536 +step:7125 train loss:3.554085 +step:7126 train loss:3.716087 +step:7127 train loss:3.677662 +step:7128 train loss:3.616298 +step:7129 train loss:3.629701 +step:7130 train loss:3.616860 +step:7131 train loss:3.554225 +step:7132 train loss:3.600268 +step:7133 train loss:3.644519 +step:7134 train loss:3.575001 +step:7135 train loss:3.628493 +step:7136 train loss:3.615447 +step:7137 train loss:3.592822 +step:7138 train loss:3.582489 +step:7139 train loss:3.586720 +step:7140 train loss:3.621208 +step:7141 train loss:3.616883 +step:7142 train loss:3.612766 +step:7143 train loss:3.650345 +step:7144 train loss:3.603405 +step:7145 train loss:3.619645 +step:7146 train loss:3.626801 +step:7147 train loss:3.648002 +step:7148 train loss:3.656914 +step:7149 train loss:3.654452 +step:7150 train loss:3.635504 +step:7151 train loss:3.597432 +step:7152 train loss:3.571239 +step:7153 train loss:3.607409 +step:7154 train loss:3.621667 +step:7155 train loss:3.642845 +step:7156 train loss:3.608300 +step:7157 train loss:3.626371 +step:7158 train loss:3.587757 +step:7159 train loss:3.636100 +step:7160 train loss:3.648462 +step:7161 train loss:3.599014 +step:7162 train loss:3.649203 +step:7163 train loss:3.583046 +step:7164 train loss:3.617692 +step:7165 train loss:3.624827 +step:7166 train loss:3.679886 +step:7167 train loss:3.660019 +step:7168 train loss:3.633717 +step:7169 train loss:3.611921 +step:7170 train loss:3.645271 +step:7171 train loss:3.586604 +step:7172 train loss:3.757414 +step:7173 train loss:3.597423 +step:7174 train loss:3.639521 +step:7175 train loss:3.618684 +step:7176 train loss:3.622443 +step:7177 train loss:3.641127 +step:7178 train loss:3.637297 +step:7179 train loss:3.623101 +step:7180 train loss:3.627026 +step:7181 train loss:3.651475 +step:7182 train loss:3.604823 +step:7183 train loss:3.679670 +step:7184 train loss:3.767325 +step:7185 train loss:3.682326 +step:7186 train loss:3.621181 +step:7187 train loss:3.632186 +step:7188 train loss:3.623175 +step:7189 train loss:3.619435 +step:7190 train loss:3.621928 +step:7191 train loss:3.612300 +step:7192 train loss:3.651832 +step:7193 train loss:3.565014 +step:7194 train loss:3.630198 +step:7195 train loss:3.604506 +step:7196 train loss:3.651821 +step:7197 train loss:3.629459 +step:7198 train loss:3.682669 +step:7199 train loss:3.643265 +step:7200 train loss:3.636637 +step:7201 train loss:3.643250 +step:7202 train loss:3.624054 +step:7203 train loss:3.634444 +step:7204 train loss:3.606418 +step:7205 train loss:3.561575 +step:7206 train loss:3.593267 +step:7207 train loss:3.769557 +step:7208 train loss:3.598748 +step:7209 train loss:3.685161 +step:7210 train loss:3.618107 +step:7211 train loss:3.650657 +step:7212 train loss:3.728374 +step:7213 train loss:3.580084 +step:7214 train loss:3.648597 +step:7215 train loss:3.618274 +step:7216 train loss:3.668171 +step:7217 train loss:3.627629 +step:7218 train loss:3.717183 +step:7219 train loss:3.626014 +step:7220 train loss:3.700869 +step:7221 train loss:3.584803 +step:7222 train loss:3.665039 +step:7223 train loss:3.584618 +step:7224 train loss:3.648033 +step:7225 train loss:3.622312 +step:7226 train loss:3.594504 +step:7227 train loss:3.613923 +step:7228 train loss:3.601245 +step:7229 train loss:3.606307 +step:7230 train loss:3.584548 +step:7231 train loss:3.720255 +step:7232 train loss:3.591265 +step:7233 train loss:3.661505 +step:7234 train loss:3.651153 +step:7235 train loss:3.621579 +step:7236 train loss:3.664378 +step:7237 train loss:3.612621 +step:7238 train loss:3.647094 +step:7239 train loss:3.603492 +step:7240 train loss:3.601430 +step:7241 train loss:3.616301 +step:7242 train loss:3.593957 +step:7243 train loss:3.636927 +step:7244 train loss:3.618530 +step:7245 train loss:3.616826 +step:7246 train loss:3.662784 +step:7247 train loss:3.617060 +step:7248 train loss:3.652884 +step:7249 train loss:3.605486 +step:7250 validation loss:3.564997 +step:7250 train loss:3.628438 +step:7251 train loss:3.671118 +step:7252 train loss:3.585847 +step:7253 train loss:3.673466 +step:7254 train loss:3.613981 +step:7255 train loss:3.580861 +step:7256 train loss:3.623549 +step:7257 train loss:3.669876 +step:7258 train loss:3.625296 +step:7259 train loss:3.609261 +step:7260 train loss:3.687812 +step:7261 train loss:3.650717 +step:7262 train loss:3.603901 +step:7263 train loss:3.644110 +step:7264 train loss:3.634752 +step:7265 train loss:3.535981 +step:7266 train loss:3.656993 +step:7267 train loss:3.579979 +step:7268 train loss:3.643420 +step:7269 train loss:3.647287 +step:7270 train loss:3.600586 +step:7271 train loss:3.619445 +step:7272 train loss:3.623539 +step:7273 train loss:3.618311 +step:7274 train loss:3.600470 +step:7275 train loss:3.671112 +step:7276 train loss:3.575585 +step:7277 train loss:3.626880 +step:7278 train loss:3.594440 +step:7279 train loss:3.574988 +step:7280 train loss:3.645336 +step:7281 train loss:3.666534 +step:7282 train loss:3.666747 +step:7283 train loss:3.557337 +step:7284 train loss:3.600611 +step:7285 train loss:3.629924 +step:7286 train loss:3.757971 +step:7287 train loss:3.668325 +step:7288 train loss:3.625071 +step:7289 train loss:3.622652 +step:7290 train loss:3.676728 +step:7291 train loss:3.635411 +step:7292 train loss:3.704700 +step:7293 train loss:3.603178 +step:7294 train loss:3.684616 +step:7295 train loss:3.577215 +step:7296 train loss:3.573138 +step:7297 train loss:3.618943 +step:7298 train loss:3.597117 +step:7299 train loss:3.634854 +step:7300 train loss:3.624053 +step:7301 train loss:3.572818 +step:7302 train loss:3.720428 +step:7303 train loss:3.610208 +step:7304 train loss:3.549905 +step:7305 train loss:3.631754 +step:7306 train loss:3.658282 +step:7307 train loss:3.665396 +step:7308 train loss:3.612932 +step:7309 train loss:3.577298 +step:7310 train loss:3.607848 +step:7311 train loss:3.593178 +step:7312 train loss:3.627820 +step:7313 train loss:3.671684 +step:7314 train loss:3.565173 +step:7315 train loss:3.559517 +step:7316 train loss:3.702976 +step:7317 train loss:3.639792 +step:7318 train loss:3.581134 +step:7319 train loss:3.606669 +step:7320 train loss:3.640069 +step:7321 train loss:3.671063 +step:7322 train loss:3.548252 +step:7323 train loss:3.604026 +step:7324 train loss:3.628580 +step:7325 train loss:3.592918 +step:7326 train loss:3.623403 +step:7327 train loss:3.597047 +step:7328 train loss:3.719909 +step:7329 train loss:3.560297 +step:7330 train loss:3.613997 +step:7331 train loss:3.612830 +step:7332 train loss:3.650021 +step:7333 train loss:3.632570 +step:7334 train loss:3.602975 +step:7335 train loss:3.597881 +step:7336 train loss:3.851848 +step:7337 train loss:3.637002 +step:7338 train loss:3.633772 +step:7339 train loss:3.646188 +step:7340 train loss:3.631853 +step:7341 train loss:3.622317 +step:7342 train loss:3.616951 +step:7343 train loss:3.627346 +step:7344 train loss:3.706246 +step:7345 train loss:3.564564 +step:7346 train loss:3.601940 +step:7347 train loss:3.596607 +step:7348 train loss:3.599817 +step:7349 train loss:3.700779 +step:7350 train loss:3.684289 +step:7351 train loss:3.616223 +step:7352 train loss:3.646974 +step:7353 train loss:3.627866 +step:7354 train loss:3.577977 +step:7355 train loss:3.761435 +step:7356 train loss:3.733690 +step:7357 train loss:3.655138 +step:7358 train loss:3.638784 +step:7359 train loss:3.603828 +step:7360 train loss:3.614687 +step:7361 train loss:3.565807 +step:7362 train loss:3.613110 +step:7363 train loss:3.630477 +step:7364 train loss:3.662484 +step:7365 train loss:3.647910 +step:7366 train loss:3.612432 +step:7367 train loss:3.683557 +step:7368 train loss:3.670290 +step:7369 train loss:3.657509 +step:7370 train loss:3.623438 +step:7371 train loss:3.582710 +step:7372 train loss:3.639024 +step:7373 train loss:3.662697 +step:7374 train loss:3.752695 +step:7375 train loss:3.575899 +step:7376 train loss:3.600821 +step:7377 train loss:3.645601 +step:7378 train loss:3.599107 +step:7379 train loss:3.723692 +step:7380 train loss:3.684195 +step:7381 train loss:3.650708 +step:7382 train loss:3.617250 +step:7383 train loss:3.701758 +step:7384 train loss:3.650752 +step:7385 train loss:3.605383 +step:7386 train loss:3.614332 +step:7387 train loss:3.653373 +step:7388 train loss:3.686316 +step:7389 train loss:3.629919 +step:7390 train loss:3.571399 +step:7391 train loss:3.609616 +step:7392 train loss:3.665896 +step:7393 train loss:3.633440 +step:7394 train loss:3.675947 +step:7395 train loss:3.562498 +step:7396 train loss:3.660896 +step:7397 train loss:3.590963 +step:7398 train loss:3.602035 +step:7399 train loss:3.654713 +step:7400 train loss:3.653096 +step:7401 train loss:3.567304 +step:7402 train loss:3.691720 +step:7403 train loss:3.572639 +step:7404 train loss:3.645397 +step:7405 train loss:3.763125 +step:7406 train loss:3.591179 +step:7407 train loss:3.642747 +step:7408 train loss:3.634072 +step:7409 train loss:3.610985 +step:7410 train loss:3.781132 +step:7411 train loss:3.621422 +step:7412 train loss:3.631866 +step:7413 train loss:3.677943 +step:7414 train loss:3.587274 +step:7415 train loss:3.649442 +step:7416 train loss:3.531825 +step:7417 train loss:3.647881 +step:7418 train loss:3.632456 +step:7419 train loss:3.604284 +step:7420 train loss:3.595165 +step:7421 train loss:3.628707 +step:7422 train loss:3.587979 +step:7423 train loss:3.723208 +step:7424 train loss:3.789633 +step:7425 train loss:3.677023 +step:7426 train loss:3.643894 +step:7427 train loss:3.612518 +step:7428 train loss:3.644593 +step:7429 train loss:3.653708 +step:7430 train loss:3.576254 +step:7431 train loss:3.583209 +step:7432 train loss:3.592124 +step:7433 train loss:3.685822 +step:7434 train loss:3.605020 +step:7435 train loss:3.685410 +step:7436 train loss:3.728745 +step:7437 train loss:3.548359 +step:7438 train loss:3.610067 +step:7439 train loss:3.622604 +step:7440 train loss:3.594913 +step:7441 train loss:3.562010 +step:7442 train loss:3.792801 +step:7443 train loss:3.614352 +step:7444 train loss:3.661290 +step:7445 train loss:3.584442 +step:7446 train loss:3.608209 +step:7447 train loss:3.537006 +step:7448 train loss:3.589122 +step:7449 train loss:3.605061 +step:7450 train loss:3.638274 +step:7451 train loss:3.666388 +step:7452 train loss:3.602813 +step:7453 train loss:3.626076 +step:7454 train loss:3.608138 +step:7455 train loss:3.623400 +step:7456 train loss:3.594791 +step:7457 train loss:3.597541 +step:7458 train loss:3.643219 +step:7459 train loss:3.616954 +step:7460 train loss:3.623545 +step:7461 train loss:3.663644 +step:7462 train loss:3.596172 +step:7463 train loss:3.660537 +step:7464 train loss:3.583942 +step:7465 train loss:3.594685 +step:7466 train loss:3.596816 +step:7467 train loss:3.603959 +step:7468 train loss:3.653851 +step:7469 train loss:3.587630 +step:7470 train loss:3.618044 +step:7471 train loss:3.607498 +step:7472 train loss:3.643276 +step:7473 train loss:3.584602 +step:7474 train loss:3.566803 +step:7475 train loss:3.600513 +step:7476 train loss:3.640193 +step:7477 train loss:3.610135 +step:7478 train loss:3.608934 +step:7479 train loss:3.622159 +step:7480 train loss:3.899002 +step:7481 train loss:3.552266 +step:7482 train loss:3.620740 +step:7483 train loss:3.615233 +step:7484 train loss:3.637228 +step:7485 train loss:3.621669 +step:7486 train loss:3.649657 +step:7487 train loss:3.644852 +step:7488 train loss:3.661055 +step:7489 train loss:3.658354 +step:7490 train loss:3.602708 +step:7491 train loss:3.622861 +step:7492 train loss:3.729154 +step:7493 train loss:3.706578 +step:7494 train loss:3.728510 +step:7495 train loss:3.603872 +step:7496 train loss:3.589702 +step:7497 train loss:3.685187 +step:7498 train loss:3.624565 +step:7499 train loss:3.658796 +step:7500 validation loss:3.558907 total_sharp:5.5896e-03 L1_sharp:1.8658e-01 L2_sharp:7.5009e-02 L3_sharp:1.5535e-01 L4_sharp:8.9975e-02 L5_sharp:9.0505e-02 L6_sharp:1.1506e-01 L7_sharp:1.7104e-01 L8_sharp:1.6701e-01 L9_sharp:1.0599e-01 L10_sharp:7.2255e-02 L11_sharp:5.6075e-02 L12_sharp:1.0279e-01 total_fnorm:1.3597e+00 total_l1_linf:7.4991e+03 total_spectral:1.3597e+00 L1_fnorm:3.2284e-02 L2_fnorm:3.1163e-02 L3_fnorm:3.0888e-02 L4_fnorm:3.1660e-02 L5_fnorm:3.2235e-02 L6_fnorm:3.2495e-02 L7_fnorm:3.2332e-02 L8_fnorm:3.2250e-02 L9_fnorm:3.2198e-02 L10_fnorm:3.2168e-02 L11_fnorm:3.2247e-02 L12_fnorm:3.2178e-02 L1_l1linf:3.0894e-01 L2_l1linf:3.5044e-01 L3_l1linf:3.5327e-01 L4_l1linf:3.4404e-01 L5_l1linf:3.3085e-01 L6_l1linf:3.3296e-01 L7_l1linf:3.2404e-01 L8_l1linf:3.1053e-01 L9_l1linf:3.0633e-01 L10_l1linf:3.1881e-01 L11_l1linf:3.3021e-01 L12_l1linf:3.3232e-01 L1_spectral:6.9612e-03 L2_spectral:7.7878e-03 L3_spectral:7.9213e-03 L4_spectral:7.7439e-03 L5_spectral:7.4550e-03 L6_spectral:7.4686e-03 L7_spectral:7.1174e-03 L8_spectral:6.9197e-03 L9_spectral:6.9381e-03 L10_spectral:7.1888e-03 L11_spectral:7.5103e-03 L12_spectral:7.5364e-03 ip_v_neg_g:6.0664e-03 cos_v_neg_g:7.3725e-04 v_norm:1.3597e+00 g_norm:6.0515e+00 hv_norm:1.6813e+00 cos_v_hv:4.5204e-03 hg_norm:1.9000e+03 cos_g_hg:4.4553e-01 v_par:2.3233e-05 v_perp:1.3597e+00 L1_cos_v_neg_g:7.3723e-03 L1_v_norm:3.2284e-02 L2_cos_v_neg_g:9.7665e-03 L2_v_norm:3.1163e-02 L3_cos_v_neg_g:1.1829e-02 L3_v_norm:3.0888e-02 L4_cos_v_neg_g:1.0791e-02 L4_v_norm:3.1660e-02 L5_cos_v_neg_g:1.0437e-02 L5_v_norm:3.2235e-02 L6_cos_v_neg_g:8.4464e-03 L6_v_norm:3.2495e-02 L7_cos_v_neg_g:8.8894e-03 L7_v_norm:3.2332e-02 L8_cos_v_neg_g:9.5449e-03 L8_v_norm:3.2250e-02 L9_cos_v_neg_g:8.8087e-03 L9_v_norm:3.2198e-02 L10_cos_v_neg_g:7.5225e-03 L10_v_norm:3.2168e-02 L11_cos_v_neg_g:6.2126e-03 L11_v_norm:3.2247e-02 L12_cos_v_neg_g:5.2527e-03 L12_v_norm:3.2178e-02 +step:7500 train loss:3.606083 +step:7501 train loss:3.597410 +step:7502 train loss:3.585861 +step:7503 train loss:3.563770 +step:7504 train loss:3.590689 +step:7505 train loss:3.576963 +step:7506 train loss:3.638007 +step:7507 train loss:3.558108 +step:7508 train loss:3.625889 +step:7509 train loss:3.597398 +step:7510 train loss:3.627448 +step:7511 train loss:3.633270 +step:7512 train loss:3.893401 +step:7513 train loss:3.586692 +step:7514 train loss:3.609716 +step:7515 train loss:3.586421 +step:7516 train loss:3.595075 +step:7517 train loss:3.624483 +step:7518 train loss:3.607740 +step:7519 train loss:3.616551 +step:7520 train loss:3.681265 +step:7521 train loss:3.569452 +step:7522 train loss:3.622979 +step:7523 train loss:3.653937 +step:7524 train loss:3.601567 +step:7525 train loss:3.603866 +step:7526 train loss:3.556664 +step:7527 train loss:3.564243 +step:7528 train loss:3.658864 +step:7529 train loss:3.637755 +step:7530 train loss:3.585738 +step:7531 train loss:3.657174 +step:7532 train loss:3.647174 +step:7533 train loss:3.574210 +step:7534 train loss:3.635095 +step:7535 train loss:3.641707 +step:7536 train loss:3.672230 +step:7537 train loss:3.691940 +step:7538 train loss:3.719587 +step:7539 train loss:3.619955 +step:7540 train loss:3.608558 +step:7541 train loss:3.660537 +step:7542 train loss:3.620780 +step:7543 train loss:3.577357 +step:7544 train loss:3.620981 +step:7545 train loss:3.606990 +step:7546 train loss:3.563379 +step:7547 train loss:3.609215 +step:7548 train loss:3.621937 +step:7549 train loss:3.604297 +step:7550 train loss:3.603790 +step:7551 train loss:3.701262 +step:7552 train loss:3.615669 +step:7553 train loss:3.654920 +step:7554 train loss:3.581942 +step:7555 train loss:3.668436 +step:7556 train loss:3.571515 +step:7557 train loss:3.669353 +step:7558 train loss:3.658959 +step:7559 train loss:3.612860 +step:7560 train loss:3.707161 +step:7561 train loss:3.681109 +step:7562 train loss:3.583181 +step:7563 train loss:3.578640 +step:7564 train loss:3.636206 +step:7565 train loss:3.651948 +step:7566 train loss:3.642685 +step:7567 train loss:3.662012 +step:7568 train loss:3.602028 +step:7569 train loss:3.660605 +step:7570 train loss:3.645315 +step:7571 train loss:3.726854 +step:7572 train loss:3.573411 +step:7573 train loss:3.645843 +step:7574 train loss:3.612526 +step:7575 train loss:3.601235 +step:7576 train loss:3.611360 +step:7577 train loss:3.628387 +step:7578 train loss:3.684715 +step:7579 train loss:3.618439 +step:7580 train loss:3.606852 +step:7581 train loss:3.591748 +step:7582 train loss:3.649616 +step:7583 train loss:3.581944 +step:7584 train loss:3.566652 +step:7585 train loss:3.538976 +step:7586 train loss:3.575593 +step:7587 train loss:3.638052 +step:7588 train loss:3.764650 +step:7589 train loss:3.584806 +step:7590 train loss:3.656910 +step:7591 train loss:3.662439 +step:7592 train loss:3.617791 +step:7593 train loss:3.640270 +step:7594 train loss:3.638223 +step:7595 train loss:3.606343 +step:7596 train loss:3.661143 +step:7597 train loss:3.565554 +step:7598 train loss:3.626127 +step:7599 train loss:3.621206 +step:7600 train loss:3.575353 +step:7601 train loss:3.687973 +step:7602 train loss:3.632094 +step:7603 train loss:3.590240 +step:7604 train loss:3.734470 +step:7605 train loss:3.628677 +step:7606 train loss:3.663284 +step:7607 train loss:3.611065 +step:7608 train loss:3.622829 +step:7609 train loss:3.655397 +step:7610 train loss:3.614739 +step:7611 train loss:3.593569 +step:7612 train loss:3.536000 +step:7613 train loss:3.583741 +step:7614 train loss:3.654756 +step:7615 train loss:3.610498 +step:7616 train loss:3.681155 +step:7617 train loss:3.579734 +step:7618 train loss:3.667439 +step:7619 train loss:3.613116 +step:7620 train loss:3.597688 +step:7621 train loss:3.545266 +step:7622 train loss:3.818636 +step:7623 train loss:3.834250 +step:7624 train loss:3.649441 +step:7625 train loss:3.683141 +step:7626 train loss:3.601772 +step:7627 train loss:3.676217 +step:7628 train loss:3.559209 +step:7629 train loss:3.617341 +step:7630 train loss:3.632427 +step:7631 train loss:3.611696 +step:7632 train loss:3.660295 +step:7633 train loss:3.729295 +step:7634 train loss:3.691279 +step:7635 train loss:3.592958 +step:7636 train loss:3.624427 +step:7637 train loss:3.573738 +step:7638 train loss:3.679682 +step:7639 train loss:3.608790 +step:7640 train loss:3.588406 +step:7641 train loss:3.619173 +step:7642 train loss:3.954059 +step:7643 train loss:3.708457 +step:7644 train loss:3.631663 +step:7645 train loss:3.624519 +step:7646 train loss:3.608276 +step:7647 train loss:3.596718 +step:7648 train loss:3.638927 +step:7649 train loss:3.594540 +step:7650 train loss:3.639995 +step:7651 train loss:3.666227 +step:7652 train loss:3.542794 +step:7653 train loss:3.734333 +step:7654 train loss:3.597745 +step:7655 train loss:3.614484 +step:7656 train loss:3.589542 +step:7657 train loss:3.605157 +step:7658 train loss:3.557056 +step:7659 train loss:3.623163 +step:7660 train loss:3.559293 +step:7661 train loss:3.572359 +step:7662 train loss:3.569494 +step:7663 train loss:3.621380 +step:7664 train loss:3.577952 +step:7665 train loss:3.555642 +step:7666 train loss:3.665243 +step:7667 train loss:3.573787 +step:7668 train loss:3.682842 +step:7669 train loss:3.619506 +step:7670 train loss:3.574153 +step:7671 train loss:3.629171 +step:7672 train loss:3.648630 +step:7673 train loss:3.614067 +step:7674 train loss:3.653750 +step:7675 train loss:3.703302 +step:7676 train loss:3.673951 +step:7677 train loss:3.704072 +step:7678 train loss:3.636048 +step:7679 train loss:3.661138 +step:7680 train loss:3.669859 +step:7681 train loss:3.639385 +step:7682 train loss:3.604696 +step:7683 train loss:3.609245 +step:7684 train loss:3.581447 +step:7685 train loss:3.559492 +step:7686 train loss:3.682292 +step:7687 train loss:3.597847 +step:7688 train loss:3.564078 +step:7689 train loss:3.612522 +step:7690 train loss:3.583389 +step:7691 train loss:3.606462 +step:7692 train loss:3.638767 +step:7693 train loss:3.643916 +step:7694 train loss:3.692822 +step:7695 train loss:3.622155 +step:7696 train loss:3.597617 +step:7697 train loss:3.581964 +step:7698 train loss:3.641705 +step:7699 train loss:3.639436 +step:7700 train loss:3.540433 +step:7701 train loss:3.655635 +step:7702 train loss:3.600033 +step:7703 train loss:3.602683 +step:7704 train loss:3.651873 +step:7705 train loss:3.614123 +step:7706 train loss:3.547887 +step:7707 train loss:3.664059 +step:7708 train loss:3.605731 +step:7709 train loss:3.624584 +step:7710 train loss:3.686622 +step:7711 train loss:3.648900 +step:7712 train loss:3.592225 +step:7713 train loss:3.674492 +step:7714 train loss:3.618303 +step:7715 train loss:3.569694 +step:7716 train loss:3.607972 +step:7717 train loss:3.639192 +step:7718 train loss:3.641401 +step:7719 train loss:3.597429 +step:7720 train loss:3.613375 +step:7721 train loss:3.652061 +step:7722 train loss:3.581870 +step:7723 train loss:3.948498 +step:7724 train loss:3.616744 +step:7725 train loss:3.523756 +step:7726 train loss:3.607477 +step:7727 train loss:3.631706 +step:7728 train loss:3.581370 +step:7729 train loss:3.595863 +step:7730 train loss:3.617105 +step:7731 train loss:3.646466 +step:7732 train loss:3.664826 +step:7733 train loss:3.575635 +step:7734 train loss:3.602410 +step:7735 train loss:3.690060 +step:7736 train loss:3.637915 +step:7737 train loss:3.655080 +step:7738 train loss:3.560170 +step:7739 train loss:3.630739 +step:7740 train loss:3.581078 +step:7741 train loss:3.618134 +step:7742 train loss:3.614458 +step:7743 train loss:3.569619 +step:7744 train loss:3.693170 +step:7745 train loss:3.586305 +step:7746 train loss:3.561673 +step:7747 train loss:3.653083 +step:7748 train loss:3.634265 +step:7749 train loss:3.562371 +step:7750 validation loss:3.550189 +step:7750 train loss:3.718006 +step:7751 train loss:3.599231 +step:7752 train loss:3.591074 +step:7753 train loss:3.595611 +step:7754 train loss:3.567361 +step:7755 train loss:3.632328 +step:7756 train loss:3.664488 +step:7757 train loss:3.614182 +step:7758 train loss:3.581770 +step:7759 train loss:3.606462 +step:7760 train loss:3.638798 +step:7761 train loss:3.627896 +step:7762 train loss:3.615643 +step:7763 train loss:3.599422 +step:7764 train loss:3.603010 +step:7765 train loss:3.561022 +step:7766 train loss:3.625039 +step:7767 train loss:3.630116 +step:7768 train loss:3.582697 +step:7769 train loss:3.645378 +step:7770 train loss:3.664304 +step:7771 train loss:3.639693 +step:7772 train loss:3.612007 +step:7773 train loss:3.667300 +step:7774 train loss:3.568171 +step:7775 train loss:3.555318 +step:7776 train loss:3.656016 +step:7777 train loss:3.615193 +step:7778 train loss:3.571568 +step:7779 train loss:3.613224 +step:7780 train loss:3.611571 +step:7781 train loss:3.619591 +step:7782 train loss:3.595948 +step:7783 train loss:3.580405 +step:7784 train loss:3.580968 +step:7785 train loss:3.622917 +step:7786 train loss:3.579959 +step:7787 train loss:3.659894 +step:7788 train loss:3.606907 +step:7789 train loss:3.546953 +step:7790 train loss:3.602424 +step:7791 train loss:3.637790 +step:7792 train loss:3.596122 +step:7793 train loss:3.620508 +step:7794 train loss:3.609026 +step:7795 train loss:3.641048 +step:7796 train loss:3.600703 +step:7797 train loss:3.621098 +step:7798 train loss:3.615530 +step:7799 train loss:3.604387 +step:7800 train loss:3.556303 +step:7801 train loss:3.624709 +step:7802 train loss:3.602798 +step:7803 train loss:3.654599 +step:7804 train loss:3.616262 +step:7805 train loss:3.613423 +step:7806 train loss:3.629138 +step:7807 train loss:3.703804 +step:7808 train loss:3.563186 +step:7809 train loss:3.538674 +step:7810 train loss:3.630867 +step:7811 train loss:3.564539 +step:7812 train loss:3.582455 +step:7813 train loss:3.669526 +step:7814 train loss:3.735882 +step:7815 train loss:3.552804 +step:7816 train loss:3.635506 +step:7817 train loss:3.669642 +step:7818 train loss:3.566076 +step:7819 train loss:3.616918 +step:7820 train loss:3.663241 +step:7821 train loss:3.589953 +step:7822 train loss:3.548376 +step:7823 train loss:3.602132 +step:7824 train loss:3.607511 +step:7825 train loss:3.592798 +step:7826 train loss:3.589887 +step:7827 train loss:3.630031 +step:7828 train loss:3.623382 +step:7829 train loss:3.574887 +step:7830 train loss:3.586406 +step:7831 train loss:3.595326 +step:7832 train loss:3.655183 +step:7833 train loss:3.635053 +step:7834 train loss:3.596269 +step:7835 train loss:3.621571 +step:7836 train loss:3.734050 +step:7837 train loss:3.618843 +step:7838 train loss:3.587657 +step:7839 train loss:3.547054 +step:7840 train loss:3.562762 +step:7841 train loss:3.659283 +step:7842 train loss:3.646455 +step:7843 train loss:3.696014 +step:7844 train loss:3.630573 +step:7845 train loss:3.607209 +step:7846 train loss:3.717700 +step:7847 train loss:3.605449 +step:7848 train loss:3.618437 +step:7849 train loss:3.634902 +step:7850 train loss:3.602956 +step:7851 train loss:3.629942 +step:7852 train loss:3.603321 +step:7853 train loss:3.578796 +step:7854 train loss:3.608391 +step:7855 train loss:3.608756 +step:7856 train loss:3.612967 +step:7857 train loss:3.598398 +step:7858 train loss:3.607516 +step:7859 train loss:3.618004 +step:7860 train loss:3.653458 +step:7861 train loss:3.636927 +step:7862 train loss:3.582131 +step:7863 train loss:3.684655 +step:7864 train loss:3.526968 +step:7865 train loss:3.599674 +step:7866 train loss:3.578702 +step:7867 train loss:3.622932 +step:7868 train loss:3.601110 +step:7869 train loss:3.602596 +step:7870 train loss:3.529296 +step:7871 train loss:3.588888 +step:7872 train loss:3.583501 +step:7873 train loss:3.661505 +step:7874 train loss:3.604044 +step:7875 train loss:3.607494 +step:7876 train loss:3.627576 +step:7877 train loss:3.581137 +step:7878 train loss:3.617897 +step:7879 train loss:3.954251 +step:7880 train loss:3.609981 +step:7881 train loss:3.633497 +step:7882 train loss:3.716372 +step:7883 train loss:3.530802 +step:7884 train loss:3.619012 +step:7885 train loss:3.603194 +step:7886 train loss:3.600852 +step:7887 train loss:3.596016 +step:7888 train loss:3.628832 +step:7889 train loss:3.677040 +step:7890 train loss:3.579125 +step:7891 train loss:3.632234 +step:7892 train loss:3.600687 +step:7893 train loss:3.577375 +step:7894 train loss:3.597281 +step:7895 train loss:3.583136 +step:7896 train loss:3.581669 +step:7897 train loss:3.604587 +step:7898 train loss:3.616671 +step:7899 train loss:3.603642 +step:7900 train loss:3.572893 +step:7901 train loss:3.561852 +step:7902 train loss:3.711266 +step:7903 train loss:3.556275 +step:7904 train loss:3.606126 +step:7905 train loss:3.673485 +step:7906 train loss:3.572705 +step:7907 train loss:3.601152 +step:7908 train loss:3.649325 +step:7909 train loss:3.699019 +step:7910 train loss:3.579816 +step:7911 train loss:3.600953 +step:7912 train loss:3.606109 +step:7913 train loss:3.575664 +step:7914 train loss:3.613344 +step:7915 train loss:3.718688 +step:7916 train loss:3.587605 +step:7917 train loss:3.647449 +step:7918 train loss:3.587946 +step:7919 train loss:3.580810 +step:7920 train loss:3.620884 +step:7921 train loss:3.623408 +step:7922 train loss:3.600527 +step:7923 train loss:3.648960 +step:7924 train loss:3.607681 +step:7925 train loss:3.630431 +step:7926 train loss:3.535754 +step:7927 train loss:3.809271 +step:7928 train loss:3.640234 +step:7929 train loss:3.605539 +step:7930 train loss:3.564788 +step:7931 train loss:3.588963 +step:7932 train loss:3.611347 +step:7933 train loss:3.628058 +step:7934 train loss:3.716330 +step:7935 train loss:3.639016 +step:7936 train loss:3.613403 +step:7937 train loss:3.562542 +step:7938 train loss:3.579673 +step:7939 train loss:3.625385 +step:7940 train loss:3.610517 +step:7941 train loss:3.635089 +step:7942 train loss:3.624530 +step:7943 train loss:3.637948 +step:7944 train loss:3.560337 +step:7945 train loss:3.662112 +step:7946 train loss:3.607324 +step:7947 train loss:3.621014 +step:7948 train loss:3.580512 +step:7949 train loss:3.628044 +step:7950 train loss:3.688111 +step:7951 train loss:3.648995 +step:7952 train loss:3.796470 +step:7953 train loss:3.689800 +step:7954 train loss:3.594192 +step:7955 train loss:3.586063 +step:7956 train loss:3.585694 +step:7957 train loss:3.662094 +step:7958 train loss:3.671628 +step:7959 train loss:3.624725 +step:7960 train loss:3.688383 +step:7961 train loss:3.595160 +step:7962 train loss:3.568024 +step:7963 train loss:3.606358 +step:7964 train loss:3.604030 +step:7965 train loss:3.611488 +step:7966 train loss:3.582922 +step:7967 train loss:3.607148 +step:7968 train loss:3.620567 +step:7969 train loss:3.574716 +step:7970 train loss:3.544026 +step:7971 train loss:3.629552 +step:7972 train loss:3.604066 +step:7973 train loss:3.575959 +step:7974 train loss:3.618432 +step:7975 train loss:3.602595 +step:7976 train loss:3.621534 +step:7977 train loss:3.654588 +step:7978 train loss:3.674707 +step:7979 train loss:3.624124 +step:7980 train loss:3.530251 +step:7981 train loss:3.567906 +step:7982 train loss:3.616291 +step:7983 train loss:3.631658 +step:7984 train loss:3.674637 +step:7985 train loss:3.598434 +step:7986 train loss:3.621934 +step:7987 train loss:3.675241 +step:7988 train loss:3.649926 +step:7989 train loss:3.552536 +step:7990 train loss:3.567820 +step:7991 train loss:3.584470 +step:7992 train loss:3.610806 +step:7993 train loss:3.586502 +step:7994 train loss:3.640697 +step:7995 train loss:3.643399 +step:7996 train loss:3.609697 +step:7997 train loss:3.626245 +step:7998 train loss:3.657092 +step:7999 train loss:3.583950 +step:8000 validation loss:3.542000 total_sharp:4.2290e-03 L1_sharp:7.4874e-02 L2_sharp:4.9344e-02 L3_sharp:7.9086e-02 L4_sharp:5.4597e-02 L5_sharp:6.5688e-02 L6_sharp:7.8992e-02 L7_sharp:1.0770e-01 L8_sharp:1.1710e-01 L9_sharp:1.0539e-01 L10_sharp:8.0658e-02 L11_sharp:6.3069e-02 L12_sharp:7.8121e-02 total_fnorm:1.3485e+00 total_l1_linf:7.4372e+03 total_spectral:1.3485e+00 L1_fnorm:3.1841e-02 L2_fnorm:3.1013e-02 L3_fnorm:3.0980e-02 L4_fnorm:3.1438e-02 L5_fnorm:3.2130e-02 L6_fnorm:3.2209e-02 L7_fnorm:3.2231e-02 L8_fnorm:3.2161e-02 L9_fnorm:3.2284e-02 L10_fnorm:3.2337e-02 L11_fnorm:3.2513e-02 L12_fnorm:3.2466e-02 L1_l1linf:2.9663e-01 L2_l1linf:3.1842e-01 L3_l1linf:3.3004e-01 L4_l1linf:3.2475e-01 L5_l1linf:3.2561e-01 L6_l1linf:3.1036e-01 L7_l1linf:2.9181e-01 L8_l1linf:3.0425e-01 L9_l1linf:3.1778e-01 L10_l1linf:3.2520e-01 L11_l1linf:3.5573e-01 L12_l1linf:3.5022e-01 L1_spectral:6.7062e-03 L2_spectral:7.2043e-03 L3_spectral:7.4259e-03 L4_spectral:7.3331e-03 L5_spectral:7.3171e-03 L6_spectral:6.9672e-03 L7_spectral:6.5716e-03 L8_spectral:6.8469e-03 L9_spectral:7.1875e-03 L10_spectral:7.3797e-03 L11_spectral:7.9276e-03 L12_spectral:7.8568e-03 ip_v_neg_g:3.9626e-03 cos_v_neg_g:5.4200e-04 v_norm:1.3485e+00 g_norm:5.4217e+00 hv_norm:1.2015e+00 cos_v_hv:4.7461e-03 hg_norm:7.7965e+02 cos_g_hg:4.8524e-01 v_par:1.7663e-05 v_perp:1.3485e+00 L1_cos_v_neg_g:4.5953e-03 L1_v_norm:3.1841e-02 L2_cos_v_neg_g:4.4821e-03 L2_v_norm:3.1013e-02 L3_cos_v_neg_g:4.6148e-03 L3_v_norm:3.0980e-02 L4_cos_v_neg_g:4.0893e-03 L4_v_norm:3.1438e-02 L5_cos_v_neg_g:4.7899e-03 L5_v_norm:3.2130e-02 L6_cos_v_neg_g:5.9329e-03 L6_v_norm:3.2209e-02 L7_cos_v_neg_g:7.3670e-03 L7_v_norm:3.2231e-02 L8_cos_v_neg_g:7.9994e-03 L8_v_norm:3.2161e-02 L9_cos_v_neg_g:7.3716e-03 L9_v_norm:3.2284e-02 L10_cos_v_neg_g:5.9046e-03 L10_v_norm:3.2337e-02 L11_cos_v_neg_g:5.2615e-03 L11_v_norm:3.2513e-02 L12_cos_v_neg_g:4.5807e-03 L12_v_norm:3.2466e-02 +step:8000 train loss:3.652906 +step:8001 train loss:3.610812 +step:8002 train loss:3.633871 +step:8003 train loss:3.651695 +step:8004 train loss:3.628587 +step:8005 train loss:3.544766 +step:8006 train loss:3.627041 +step:8007 train loss:3.594594 +step:8008 train loss:3.617174 +step:8009 train loss:3.691571 +step:8010 train loss:3.906748 +step:8011 train loss:3.578576 +step:8012 train loss:3.650724 +step:8013 train loss:3.609302 +step:8014 train loss:3.622751 +step:8015 train loss:3.619524 +step:8016 train loss:3.605537 +step:8017 train loss:3.628768 +step:8018 train loss:3.590660 +step:8019 train loss:3.558247 +step:8020 train loss:3.594357 +step:8021 train loss:3.671810 +step:8022 train loss:3.585555 +step:8023 train loss:3.621098 +step:8024 train loss:3.465591 +step:8025 train loss:3.600862 +step:8026 train loss:3.605537 +step:8027 train loss:3.612273 +step:8028 train loss:3.668769 +step:8029 train loss:3.599585 +step:8030 train loss:3.556611 +step:8031 train loss:3.614876 +step:8032 train loss:3.603180 +step:8033 train loss:3.554240 +step:8034 train loss:3.591106 +step:8035 train loss:3.571897 +step:8036 train loss:3.572471 +step:8037 train loss:3.539984 +step:8038 train loss:3.554082 +step:8039 train loss:3.650205 +step:8040 train loss:3.583127 +step:8041 train loss:3.580440 +step:8042 train loss:3.616212 +step:8043 train loss:3.557931 +step:8044 train loss:3.576568 +step:8045 train loss:3.639017 +step:8046 train loss:3.567191 +step:8047 train loss:3.572705 +step:8048 train loss:3.596342 +step:8049 train loss:3.650272 +step:8050 train loss:3.589701 +step:8051 train loss:3.566743 +step:8052 train loss:3.628676 +step:8053 train loss:3.576961 +step:8054 train loss:3.616088 +step:8055 train loss:3.642389 +step:8056 train loss:3.613365 +step:8057 train loss:3.687546 +step:8058 train loss:3.594208 +step:8059 train loss:3.655450 +step:8060 train loss:3.620207 +step:8061 train loss:3.514106 +step:8062 train loss:3.647523 +step:8063 train loss:3.607081 +step:8064 train loss:3.571296 +step:8065 train loss:3.629678 +step:8066 train loss:3.593246 +step:8067 train loss:3.652842 +step:8068 train loss:3.578917 +step:8069 train loss:3.606158 +step:8070 train loss:3.569978 +step:8071 train loss:3.581454 +step:8072 train loss:3.619720 +step:8073 train loss:3.575288 +step:8074 train loss:3.587210 +step:8075 train loss:3.566552 +step:8076 train loss:3.623258 +step:8077 train loss:3.625221 +step:8078 train loss:3.569624 +step:8079 train loss:3.591852 +step:8080 train loss:3.576169 +step:8081 train loss:3.598398 +step:8082 train loss:3.611257 +step:8083 train loss:3.514344 +step:8084 train loss:3.650649 +step:8085 train loss:3.523212 +step:8086 train loss:3.652143 +step:8087 train loss:3.544184 +step:8088 train loss:3.598048 +step:8089 train loss:3.625935 +step:8090 train loss:3.652982 +step:8091 train loss:3.591694 +step:8092 train loss:3.577884 +step:8093 train loss:3.584345 +step:8094 train loss:3.580282 +step:8095 train loss:3.611399 +step:8096 train loss:3.611480 +step:8097 train loss:3.540504 +step:8098 train loss:3.556048 +step:8099 train loss:3.538232 +step:8100 train loss:3.599985 +step:8101 train loss:3.668982 +step:8102 train loss:3.613608 +step:8103 train loss:3.562439 +step:8104 train loss:3.613596 +step:8105 train loss:3.613242 +step:8106 train loss:3.570480 +step:8107 train loss:3.556229 +step:8108 train loss:3.569306 +step:8109 train loss:3.569974 +step:8110 train loss:3.631906 +step:8111 train loss:3.555206 +step:8112 train loss:3.574213 +step:8113 train loss:3.563372 +step:8114 train loss:3.508381 +step:8115 train loss:3.562263 +step:8116 train loss:3.598783 +step:8117 train loss:3.566542 +step:8118 train loss:3.562875 +step:8119 train loss:3.602482 +step:8120 train loss:3.550071 +step:8121 train loss:3.606045 +step:8122 train loss:3.587825 +step:8123 train loss:3.600558 +step:8124 train loss:3.553364 +step:8125 train loss:3.546538 +step:8126 train loss:3.531078 +step:8127 train loss:3.630820 +step:8128 train loss:3.635594 +step:8129 train loss:3.554094 +step:8130 train loss:3.585595 +step:8131 train loss:3.552919 +step:8132 train loss:3.622349 +step:8133 train loss:3.545354 +step:8134 train loss:3.583523 +step:8135 train loss:3.570413 +step:8136 train loss:3.584435 +step:8137 train loss:3.646669 +step:8138 train loss:3.558853 +step:8139 train loss:3.628404 +step:8140 train loss:3.560945 +step:8141 train loss:3.581263 +step:8142 train loss:3.557328 +step:8143 train loss:3.610539 +step:8144 train loss:3.589462 +step:8145 train loss:3.561897 +step:8146 train loss:3.563162 +step:8147 train loss:3.590173 +step:8148 train loss:3.677856 +step:8149 train loss:3.591830 +step:8150 train loss:3.577072 +step:8151 train loss:3.565742 +step:8152 train loss:3.665351 +step:8153 train loss:3.537866 +step:8154 train loss:3.559702 +step:8155 train loss:3.579046 +step:8156 train loss:3.570317 +step:8157 train loss:3.586027 +step:8158 train loss:3.599313 +step:8159 train loss:3.611268 +step:8160 train loss:3.564679 +step:8161 train loss:3.608873 +step:8162 train loss:3.537182 +step:8163 train loss:3.600608 +step:8164 train loss:3.582607 +step:8165 train loss:3.638307 +step:8166 train loss:3.639790 +step:8167 train loss:3.546847 +step:8168 train loss:3.525397 +step:8169 train loss:3.572864 +step:8170 train loss:3.519773 +step:8171 train loss:3.584146 +step:8172 train loss:3.582103 +step:8173 train loss:3.581024 +step:8174 train loss:3.593374 +step:8175 train loss:3.551017 +step:8176 train loss:3.549014 +step:8177 train loss:3.593471 +step:8178 train loss:3.681955 +step:8179 train loss:3.586258 +step:8180 train loss:3.608506 +step:8181 train loss:3.609058 +step:8182 train loss:3.572340 +step:8183 train loss:3.556039 +step:8184 train loss:3.548404 +step:8185 train loss:3.590919 +step:8186 train loss:3.594875 +step:8187 train loss:3.602384 +step:8188 train loss:3.534362 +step:8189 train loss:3.680552 +step:8190 train loss:3.611911 +step:8191 train loss:3.618204 +step:8192 train loss:3.722844 +step:8193 train loss:3.596357 +step:8194 train loss:3.528497 +step:8195 train loss:3.628665 +step:8196 train loss:3.542006 +step:8197 train loss:3.578764 +step:8198 train loss:3.580048 +step:8199 train loss:3.586032 +step:8200 train loss:3.558628 +step:8201 train loss:3.677674 +step:8202 train loss:3.592550 +step:8203 train loss:3.616500 +step:8204 train loss:3.519542 +step:8205 train loss:3.533626 +step:8206 train loss:3.652106 +step:8207 train loss:3.579381 +step:8208 train loss:3.596073 +step:8209 train loss:3.644742 +step:8210 train loss:3.627372 +step:8211 train loss:3.562267 +step:8212 train loss:3.617626 +step:8213 train loss:3.626313 +step:8214 train loss:3.666927 +step:8215 train loss:3.636338 +step:8216 train loss:3.623880 +step:8217 train loss:3.598257 +step:8218 train loss:3.608060 +step:8219 train loss:3.740537 +step:8220 train loss:3.572680 +step:8221 train loss:3.593285 +step:8222 train loss:3.545880 +step:8223 train loss:3.562601 +step:8224 train loss:3.574316 +step:8225 train loss:3.625901 +step:8226 train loss:3.552675 +step:8227 train loss:3.624805 +step:8228 train loss:3.507942 +step:8229 train loss:3.554016 +step:8230 train loss:3.565780 +step:8231 train loss:3.594788 +step:8232 train loss:3.590883 +step:8233 train loss:3.640609 +step:8234 train loss:3.634164 +step:8235 train loss:3.605412 +step:8236 train loss:3.591091 +step:8237 train loss:3.541707 +step:8238 train loss:3.792340 +step:8239 train loss:3.623217 +step:8240 train loss:3.575487 +step:8241 train loss:3.542050 +step:8242 train loss:3.581801 +step:8243 train loss:3.569439 +step:8244 train loss:3.589010 +step:8245 train loss:3.568593 +step:8246 train loss:3.636855 +step:8247 train loss:3.665187 +step:8248 train loss:3.586013 +step:8249 train loss:3.581758 +step:8250 validation loss:3.532202 +step:8250 train loss:3.568547 +step:8251 train loss:3.662004 +step:8252 train loss:3.597895 +step:8253 train loss:3.572912 +step:8254 train loss:3.535853 +step:8255 train loss:3.575229 +step:8256 train loss:3.552337 +step:8257 train loss:3.663553 +step:8258 train loss:3.583321 +step:8259 train loss:3.569830 +step:8260 train loss:3.563032 +step:8261 train loss:3.566868 +step:8262 train loss:3.578560 +step:8263 train loss:3.596784 +step:8264 train loss:3.557601 +step:8265 train loss:3.550836 +step:8266 train loss:3.556068 +step:8267 train loss:3.492923 +step:8268 train loss:3.614060 +step:8269 train loss:3.546071 +step:8270 train loss:3.598778 +step:8271 train loss:3.625700 +step:8272 train loss:3.650132 +step:8273 train loss:3.530892 +step:8274 train loss:3.591348 +step:8275 train loss:3.549721 +step:8276 train loss:3.586944 +step:8277 train loss:3.656091 +step:8278 train loss:3.672585 +step:8279 train loss:3.583765 +step:8280 train loss:3.574173 +step:8281 train loss:3.539735 +step:8282 train loss:3.600961 +step:8283 train loss:3.583961 +step:8284 train loss:3.571486 +step:8285 train loss:3.560807 +step:8286 train loss:3.671731 +step:8287 train loss:3.606135 +step:8288 train loss:3.584225 +step:8289 train loss:3.591473 +step:8290 train loss:3.534465 +step:8291 train loss:3.573452 +step:8292 train loss:3.603142 +step:8293 train loss:3.570344 +step:8294 train loss:3.546933 +step:8295 train loss:3.580894 +step:8296 train loss:3.651705 +step:8297 train loss:3.728405 +step:8298 train loss:3.554087 +step:8299 train loss:3.588009 +step:8300 train loss:3.599852 +step:8301 train loss:3.568598 +step:8302 train loss:3.627353 +step:8303 train loss:3.756093 +step:8304 train loss:3.569975 +step:8305 train loss:3.614980 +step:8306 train loss:3.591440 +step:8307 train loss:3.606299 +step:8308 train loss:3.602568 +step:8309 train loss:3.628714 +step:8310 train loss:3.544954 +step:8311 train loss:3.634392 +step:8312 train loss:3.628120 +step:8313 train loss:3.694143 +step:8314 train loss:3.564255 +step:8315 train loss:3.510861 +step:8316 train loss:3.570971 +step:8317 train loss:3.594411 +step:8318 train loss:3.583998 +step:8319 train loss:3.619153 +step:8320 train loss:3.641154 +step:8321 train loss:3.549926 +step:8322 train loss:3.565477 +step:8323 train loss:3.603651 +step:8324 train loss:3.575855 +step:8325 train loss:3.632894 +step:8326 train loss:3.598549 +step:8327 train loss:3.590683 +step:8328 train loss:3.658716 +step:8329 train loss:3.571723 +step:8330 train loss:3.608525 +step:8331 train loss:3.539724 +step:8332 train loss:3.635078 +step:8333 train loss:3.655862 +step:8334 train loss:3.519822 +step:8335 train loss:3.586519 +step:8336 train loss:3.674843 +step:8337 train loss:3.610062 +step:8338 train loss:3.575373 +step:8339 train loss:3.554906 +step:8340 train loss:3.644629 +step:8341 train loss:3.546529 +step:8342 train loss:3.619382 +step:8343 train loss:3.531147 +step:8344 train loss:3.575594 +step:8345 train loss:3.611793 +step:8346 train loss:3.688801 +step:8347 train loss:3.582150 +step:8348 train loss:3.607885 +step:8349 train loss:3.586209 +step:8350 train loss:3.605458 +step:8351 train loss:3.546921 +step:8352 train loss:3.629933 +step:8353 train loss:3.590836 +step:8354 train loss:3.567214 +step:8355 train loss:3.572166 +step:8356 train loss:3.561400 +step:8357 train loss:3.579870 +step:8358 train loss:3.552032 +step:8359 train loss:3.550779 +step:8360 train loss:3.592470 +step:8361 train loss:3.612435 +step:8362 train loss:3.628793 +step:8363 train loss:3.628993 +step:8364 train loss:3.589237 +step:8365 train loss:3.736711 +step:8366 train loss:3.573452 +step:8367 train loss:3.554369 +step:8368 train loss:3.517721 +step:8369 train loss:3.553763 +step:8370 train loss:3.632061 +step:8371 train loss:3.609617 +step:8372 train loss:3.580611 +step:8373 train loss:3.598071 +step:8374 train loss:3.526017 +step:8375 train loss:3.591789 +step:8376 train loss:3.624832 +step:8377 train loss:3.450363 +step:8378 train loss:3.667612 +step:8379 train loss:3.529096 +step:8380 train loss:3.544427 +step:8381 train loss:3.546632 +step:8382 train loss:3.575721 +step:8383 train loss:3.533631 +step:8384 train loss:3.578255 +step:8385 train loss:3.589803 +step:8386 train loss:3.566338 +step:8387 train loss:3.732823 +step:8388 train loss:3.639565 +step:8389 train loss:3.619852 +step:8390 train loss:3.617360 +step:8391 train loss:3.554455 +step:8392 train loss:3.562736 +step:8393 train loss:3.517042 +step:8394 train loss:3.612390 +step:8395 train loss:3.615234 +step:8396 train loss:3.643528 +step:8397 train loss:3.574577 +step:8398 train loss:3.593835 +step:8399 train loss:3.558673 +step:8400 train loss:3.566439 +step:8401 train loss:3.566007 +step:8402 train loss:3.561782 +step:8403 train loss:3.568668 +step:8404 train loss:3.577835 +step:8405 train loss:3.529373 +step:8406 train loss:3.573516 +step:8407 train loss:3.614094 +step:8408 train loss:3.587054 +step:8409 train loss:3.509026 +step:8410 train loss:3.572290 +step:8411 train loss:3.598570 +step:8412 train loss:3.653566 +step:8413 train loss:3.629625 +step:8414 train loss:3.630072 +step:8415 train loss:3.549356 +step:8416 train loss:3.597489 +step:8417 train loss:3.516926 +step:8418 train loss:3.616121 +step:8419 train loss:3.574668 +step:8420 train loss:3.648422 +step:8421 train loss:3.564096 +step:8422 train loss:3.583786 +step:8423 train loss:3.597537 +step:8424 train loss:3.603942 +step:8425 train loss:3.659809 +step:8426 train loss:3.631457 +step:8427 train loss:3.549338 +step:8428 train loss:3.562646 +step:8429 train loss:3.624916 +step:8430 train loss:3.563741 +step:8431 train loss:3.569306 +step:8432 train loss:3.571135 +step:8433 train loss:3.544960 +step:8434 train loss:3.578615 +step:8435 train loss:3.503504 +step:8436 train loss:3.583586 +step:8437 train loss:3.626780 +step:8438 train loss:3.604193 +step:8439 train loss:3.544944 +step:8440 train loss:3.513266 +step:8441 train loss:3.574338 +step:8442 train loss:3.594328 +step:8443 train loss:3.550316 +step:8444 train loss:3.584142 +step:8445 train loss:3.537983 +step:8446 train loss:3.586053 +step:8447 train loss:3.598405 +step:8448 train loss:3.578758 +step:8449 train loss:3.572665 +step:8450 train loss:3.561053 +step:8451 train loss:3.595291 +step:8452 train loss:3.566975 +step:8453 train loss:3.547704 +step:8454 train loss:3.597277 +step:8455 train loss:3.669576 +step:8456 train loss:3.644464 +step:8457 train loss:3.702438 +step:8458 train loss:3.590322 +step:8459 train loss:3.596449 +step:8460 train loss:3.520088 +step:8461 train loss:3.680129 +step:8462 train loss:3.549512 +step:8463 train loss:3.593871 +step:8464 train loss:3.603887 +step:8465 train loss:3.612387 +step:8466 train loss:3.584426 +step:8467 train loss:3.588711 +step:8468 train loss:3.837224 +step:8469 train loss:3.547779 +step:8470 train loss:3.545765 +step:8471 train loss:3.590950 +step:8472 train loss:3.606780 +step:8473 train loss:3.568925 +step:8474 train loss:3.690513 +step:8475 train loss:3.647983 +step:8476 train loss:3.593856 +step:8477 train loss:3.586546 +step:8478 train loss:3.566411 +step:8479 train loss:3.567639 +step:8480 train loss:3.647558 +step:8481 train loss:3.569014 +step:8482 train loss:3.562705 +step:8483 train loss:3.704218 +step:8484 train loss:3.589207 +step:8485 train loss:3.636407 +step:8486 train loss:3.546604 +step:8487 train loss:3.600544 +step:8488 train loss:3.546402 +step:8489 train loss:3.619815 +step:8490 train loss:3.610783 +step:8491 train loss:3.627498 +step:8492 train loss:3.583721 +step:8493 train loss:3.652489 +step:8494 train loss:3.520556 +step:8495 train loss:3.615166 +step:8496 train loss:3.563047 +step:8497 train loss:3.594697 +step:8498 train loss:3.609608 +step:8499 train loss:3.585398 +step:8500 validation loss:3.525799 total_sharp:4.5894e-03 L1_sharp:1.6828e-01 L2_sharp:6.6059e-02 L3_sharp:1.0142e-01 L4_sharp:6.3201e-02 L5_sharp:8.3740e-02 L6_sharp:9.8429e-02 L7_sharp:1.3324e-01 L8_sharp:1.2878e-01 L9_sharp:1.0015e-01 L10_sharp:6.6989e-02 L11_sharp:5.4485e-02 L12_sharp:8.4489e-02 total_fnorm:1.3750e+00 total_l1_linf:7.5755e+03 total_spectral:1.3750e+00 L1_fnorm:3.1969e-02 L2_fnorm:3.1064e-02 L3_fnorm:3.1005e-02 L4_fnorm:3.1807e-02 L5_fnorm:3.2052e-02 L6_fnorm:3.2221e-02 L7_fnorm:3.2312e-02 L8_fnorm:3.2216e-02 L9_fnorm:3.2133e-02 L10_fnorm:3.2247e-02 L11_fnorm:3.2357e-02 L12_fnorm:3.2161e-02 L1_l1linf:3.1608e-01 L2_l1linf:3.5147e-01 L3_l1linf:3.4949e-01 L4_l1linf:3.4759e-01 L5_l1linf:3.2578e-01 L6_l1linf:3.1317e-01 L7_l1linf:2.9774e-01 L8_l1linf:2.9827e-01 L9_l1linf:3.1510e-01 L10_l1linf:3.2908e-01 L11_l1linf:3.4510e-01 L12_l1linf:3.3194e-01 L1_spectral:7.1159e-03 L2_spectral:7.8740e-03 L3_spectral:7.8374e-03 L4_spectral:7.8339e-03 L5_spectral:7.3613e-03 L6_spectral:7.0587e-03 L7_spectral:6.6404e-03 L8_spectral:6.6940e-03 L9_spectral:7.0441e-03 L10_spectral:7.3369e-03 L11_spectral:7.8345e-03 L12_spectral:7.5761e-03 ip_v_neg_g:5.0432e-03 cos_v_neg_g:6.7919e-04 v_norm:1.3750e+00 g_norm:5.4002e+00 hv_norm:1.2053e+00 cos_v_hv:5.2356e-03 hg_norm:9.0965e+02 cos_g_hg:4.2320e-01 v_par:2.0168e-05 v_perp:1.3750e+00 L1_cos_v_neg_g:9.1184e-03 L1_v_norm:3.1969e-02 L2_cos_v_neg_g:7.0051e-03 L2_v_norm:3.1064e-02 L3_cos_v_neg_g:6.1264e-03 L3_v_norm:3.1005e-02 L4_cos_v_neg_g:6.2322e-03 L4_v_norm:3.1807e-02 L5_cos_v_neg_g:1.0137e-02 L5_v_norm:3.2052e-02 L6_cos_v_neg_g:8.2113e-03 L6_v_norm:3.2221e-02 L7_cos_v_neg_g:8.9664e-03 L7_v_norm:3.2312e-02 L8_cos_v_neg_g:9.1172e-03 L8_v_norm:3.2216e-02 L9_cos_v_neg_g:8.5510e-03 L9_v_norm:3.2133e-02 L10_cos_v_neg_g:7.2272e-03 L10_v_norm:3.2247e-02 L11_cos_v_neg_g:5.3810e-03 L11_v_norm:3.2357e-02 L12_cos_v_neg_g:5.2596e-03 L12_v_norm:3.2161e-02 +step:8500 train loss:3.585205 +step:8501 train loss:3.796471 +step:8502 train loss:3.817077 +step:8503 train loss:3.574378 +step:8504 train loss:3.574203 +step:8505 train loss:3.550733 +step:8506 train loss:3.622724 +step:8507 train loss:3.557650 +step:8508 train loss:3.593301 +step:8509 train loss:3.536445 +step:8510 train loss:3.555614 +step:8511 train loss:3.515487 +step:8512 train loss:3.611086 +step:8513 train loss:3.619478 +step:8514 train loss:3.558579 +step:8515 train loss:3.657210 +step:8516 train loss:3.574586 +step:8517 train loss:3.599818 +step:8518 train loss:3.486924 +step:8519 train loss:3.582559 +step:8520 train loss:3.546865 +step:8521 train loss:3.589931 +step:8522 train loss:3.485734 +step:8523 train loss:3.575870 +step:8524 train loss:3.569250 +step:8525 train loss:3.634119 +step:8526 train loss:3.619987 +step:8527 train loss:3.559530 +step:8528 train loss:3.642621 +step:8529 train loss:3.595365 +step:8530 train loss:3.635203 +step:8531 train loss:3.620482 +step:8532 train loss:3.659645 +step:8533 train loss:3.608483 +step:8534 train loss:3.612191 +step:8535 train loss:3.582947 +step:8536 train loss:3.672730 +step:8537 train loss:3.586899 +step:8538 train loss:3.654884 +step:8539 train loss:3.579315 +step:8540 train loss:3.603184 +step:8541 train loss:3.545571 +step:8542 train loss:3.608950 +step:8543 train loss:3.527149 +step:8544 train loss:3.520888 +step:8545 train loss:3.575252 +step:8546 train loss:3.525891 +step:8547 train loss:3.578788 +step:8548 train loss:3.548781 +step:8549 train loss:3.592633 +step:8550 train loss:3.544556 +step:8551 train loss:3.594185 +step:8552 train loss:3.602437 +step:8553 train loss:3.598748 +step:8554 train loss:3.575499 +step:8555 train loss:3.585677 +step:8556 train loss:3.666169 +step:8557 train loss:3.559602 +step:8558 train loss:3.602823 +step:8559 train loss:3.591676 +step:8560 train loss:3.572099 +step:8561 train loss:3.529023 +step:8562 train loss:3.558157 +step:8563 train loss:3.554502 +step:8564 train loss:3.622800 +step:8565 train loss:3.602642 +step:8566 train loss:3.617377 +step:8567 train loss:3.567739 +step:8568 train loss:3.578795 +step:8569 train loss:3.594476 +step:8570 train loss:3.535030 +step:8571 train loss:3.576397 +step:8572 train loss:3.590429 +step:8573 train loss:3.670306 +step:8574 train loss:3.597547 +step:8575 train loss:3.598209 +step:8576 train loss:3.630910 +step:8577 train loss:3.713219 +step:8578 train loss:3.619920 +step:8579 train loss:3.609958 +step:8580 train loss:3.543680 +step:8581 train loss:3.583203 +step:8582 train loss:3.587629 +step:8583 train loss:3.588451 +step:8584 train loss:3.578601 +step:8585 train loss:3.660414 +step:8586 train loss:3.574811 +step:8587 train loss:3.583493 +step:8588 train loss:3.633059 +step:8589 train loss:3.575792 +step:8590 train loss:3.570620 +step:8591 train loss:3.575178 +step:8592 train loss:3.532622 +step:8593 train loss:3.614185 +step:8594 train loss:3.634354 +step:8595 train loss:3.560819 +step:8596 train loss:3.597957 +step:8597 train loss:3.564919 +step:8598 train loss:3.614951 +step:8599 train loss:3.589015 +step:8600 train loss:3.593283 +step:8601 train loss:3.580440 +step:8602 train loss:3.557482 +step:8603 train loss:3.612164 +step:8604 train loss:3.558689 +step:8605 train loss:3.569616 +step:8606 train loss:3.584660 +step:8607 train loss:3.588710 +step:8608 train loss:3.635832 +step:8609 train loss:3.529263 +step:8610 train loss:3.610425 +step:8611 train loss:3.535807 +step:8612 train loss:3.614686 +step:8613 train loss:3.551828 +step:8614 train loss:3.610534 +step:8615 train loss:3.656335 +step:8616 train loss:3.534631 +step:8617 train loss:3.601999 +step:8618 train loss:3.578691 +step:8619 train loss:3.531691 +step:8620 train loss:3.574128 +step:8621 train loss:3.604933 +step:8622 train loss:3.566159 +step:8623 train loss:3.574589 +step:8624 train loss:3.653230 +step:8625 train loss:3.572908 +step:8626 train loss:3.584800 +step:8627 train loss:3.579962 +step:8628 train loss:3.611070 +step:8629 train loss:3.518592 +step:8630 train loss:3.618414 +step:8631 train loss:3.562484 +step:8632 train loss:3.620797 +step:8633 train loss:3.565916 +step:8634 train loss:3.793083 +step:8635 train loss:3.590169 +step:8636 train loss:3.637332 +step:8637 train loss:3.561194 +step:8638 train loss:3.558154 +step:8639 train loss:3.621069 +step:8640 train loss:3.530583 +step:8641 train loss:3.633788 +step:8642 train loss:3.581436 +step:8643 train loss:3.689611 +step:8644 train loss:3.536397 +step:8645 train loss:3.604427 +step:8646 train loss:3.572553 +step:8647 train loss:3.590655 +step:8648 train loss:3.544844 +step:8649 train loss:3.625923 +step:8650 train loss:3.582115 +step:8651 train loss:3.596208 +step:8652 train loss:3.561335 +step:8653 train loss:3.594467 +step:8654 train loss:3.638304 +step:8655 train loss:3.568477 +step:8656 train loss:3.605598 +step:8657 train loss:3.612061 +step:8658 train loss:3.581238 +step:8659 train loss:3.573076 +step:8660 train loss:3.519933 +step:8661 train loss:3.579267 +step:8662 train loss:3.522110 +step:8663 train loss:3.593697 +step:8664 train loss:3.512164 +step:8665 train loss:3.528240 +step:8666 train loss:3.607492 +step:8667 train loss:3.501163 +step:8668 train loss:3.612213 +step:8669 train loss:3.643104 +step:8670 train loss:3.548621 +step:8671 train loss:3.544162 +step:8672 train loss:3.762568 +step:8673 train loss:3.531677 +step:8674 train loss:3.595025 +step:8675 train loss:3.641941 +step:8676 train loss:3.581623 +step:8677 train loss:3.607195 +step:8678 train loss:3.554609 +step:8679 train loss:3.611930 +step:8680 train loss:3.592933 +step:8681 train loss:3.593453 +step:8682 train loss:3.551364 +step:8683 train loss:3.564107 +step:8684 train loss:3.641908 +step:8685 train loss:3.584119 +step:8686 train loss:3.576974 +step:8687 train loss:3.528767 +step:8688 train loss:3.544323 +step:8689 train loss:3.617861 +step:8690 train loss:3.553074 +step:8691 train loss:3.633667 +step:8692 train loss:3.518803 +step:8693 train loss:3.610041 +step:8694 train loss:3.610112 +step:8695 train loss:3.595141 +step:8696 train loss:3.622307 +step:8697 train loss:3.571954 +step:8698 train loss:3.617041 +step:8699 train loss:3.563584 +step:8700 train loss:3.591501 +step:8701 train loss:3.555052 +step:8702 train loss:3.535242 +step:8703 train loss:3.556401 +step:8704 train loss:3.504214 +step:8705 train loss:3.592355 +step:8706 train loss:3.608274 +step:8707 train loss:3.606527 +step:8708 train loss:3.552883 +step:8709 train loss:3.616579 +step:8710 train loss:3.541126 +step:8711 train loss:3.598053 +step:8712 train loss:3.503187 +step:8713 train loss:3.576863 +step:8714 train loss:3.684560 +step:8715 train loss:3.540073 +step:8716 train loss:3.599744 +step:8717 train loss:3.565144 +step:8718 train loss:3.609491 +step:8719 train loss:3.574103 +step:8720 train loss:3.684479 +step:8721 train loss:3.580214 +step:8722 train loss:3.666044 +step:8723 train loss:3.540344 +step:8724 train loss:3.549778 +step:8725 train loss:3.582019 +step:8726 train loss:3.535596 +step:8727 train loss:3.611683 +step:8728 train loss:3.572639 +step:8729 train loss:3.573404 +step:8730 train loss:3.555331 +step:8731 train loss:3.555334 +step:8732 train loss:3.656540 +step:8733 train loss:3.580827 +step:8734 train loss:3.617465 +step:8735 train loss:3.688335 +step:8736 train loss:3.543011 +step:8737 train loss:3.574126 +step:8738 train loss:3.551522 +step:8739 train loss:3.612463 +step:8740 train loss:3.535110 +step:8741 train loss:3.589155 +step:8742 train loss:3.548531 +step:8743 train loss:3.581387 +step:8744 train loss:3.607644 +step:8745 train loss:3.648285 +step:8746 train loss:3.544468 +step:8747 train loss:3.646940 +step:8748 train loss:3.556348 +step:8749 train loss:3.595802 +step:8750 validation loss:3.520943 +step:8750 train loss:3.608530 +step:8751 train loss:3.644270 +step:8752 train loss:3.504944 +step:8753 train loss:3.548466 +step:8754 train loss:3.607641 +step:8755 train loss:3.585348 +step:8756 train loss:3.631450 +step:8757 train loss:3.542551 +step:8758 train loss:3.694720 +step:8759 train loss:3.545941 +step:8760 train loss:3.573425 +step:8761 train loss:3.653530 +step:8762 train loss:3.548916 +step:8763 train loss:3.523200 +step:8764 train loss:3.594545 +step:8765 train loss:3.663774 +step:8766 train loss:3.597258 +step:8767 train loss:3.550801 +step:8768 train loss:3.592588 +step:8769 train loss:3.564209 +step:8770 train loss:3.610425 +step:8771 train loss:3.580958 +step:8772 train loss:3.603390 +step:8773 train loss:3.565129 +step:8774 train loss:3.595479 +step:8775 train loss:3.596963 +step:8776 train loss:3.538156 +step:8777 train loss:3.578769 +step:8778 train loss:3.585359 +step:8779 train loss:3.604540 +step:8780 train loss:3.573076 +step:8781 train loss:3.573897 +step:8782 train loss:3.599745 +step:8783 train loss:3.578855 +step:8784 train loss:3.605083 +step:8785 train loss:3.590891 +step:8786 train loss:3.662724 +step:8787 train loss:3.607868 +step:8788 train loss:3.509040 +step:8789 train loss:3.609163 +step:8790 train loss:3.536596 +step:8791 train loss:3.586463 +step:8792 train loss:3.526384 +step:8793 train loss:3.612801 +step:8794 train loss:3.541764 +step:8795 train loss:3.609695 +step:8796 train loss:3.754621 +step:8797 train loss:3.495315 +step:8798 train loss:3.661165 +step:8799 train loss:3.573081 +step:8800 train loss:3.569139 +step:8801 train loss:3.587465 +step:8802 train loss:3.643588 +step:8803 train loss:3.602474 +step:8804 train loss:3.582582 +step:8805 train loss:3.604169 +step:8806 train loss:3.573242 +step:8807 train loss:3.563049 +step:8808 train loss:3.520368 +step:8809 train loss:3.640525 +step:8810 train loss:3.551415 +step:8811 train loss:3.535006 +step:8812 train loss:3.582902 +step:8813 train loss:3.490445 +step:8814 train loss:3.679453 +step:8815 train loss:3.527941 +step:8816 train loss:3.639700 +step:8817 train loss:3.580500 +step:8818 train loss:3.513069 +step:8819 train loss:3.630957 +step:8820 train loss:3.559874 +step:8821 train loss:3.584410 +step:8822 train loss:3.568158 +step:8823 train loss:3.579180 +step:8824 train loss:3.640008 +step:8825 train loss:3.614832 +step:8826 train loss:3.589744 +step:8827 train loss:3.550313 +step:8828 train loss:3.586552 +step:8829 train loss:3.570188 +step:8830 train loss:3.549120 +step:8831 train loss:3.621138 +step:8832 train loss:3.562443 +step:8833 train loss:3.590632 +step:8834 train loss:3.562220 +step:8835 train loss:3.497334 +step:8836 train loss:3.625257 +step:8837 train loss:3.526522 +step:8838 train loss:3.570067 +step:8839 train loss:3.559071 +step:8840 train loss:3.554651 +step:8841 train loss:3.575274 +step:8842 train loss:3.578882 +step:8843 train loss:3.594128 +step:8844 train loss:3.561901 +step:8845 train loss:3.580570 +step:8846 train loss:3.548813 +step:8847 train loss:3.582052 +step:8848 train loss:3.632710 +step:8849 train loss:3.610770 +step:8850 train loss:3.606182 +step:8851 train loss:3.483416 +step:8852 train loss:3.590868 +step:8853 train loss:3.573970 +step:8854 train loss:3.542754 +step:8855 train loss:3.612504 +step:8856 train loss:3.604018 +step:8857 train loss:3.672505 +step:8858 train loss:3.536207 +step:8859 train loss:3.606364 +step:8860 train loss:3.569440 +step:8861 train loss:3.547519 +step:8862 train loss:3.553318 +step:8863 train loss:3.533865 +step:8864 train loss:3.601629 +step:8865 train loss:3.596509 +step:8866 train loss:3.474222 +step:8867 train loss:3.581296 +step:8868 train loss:3.610801 +step:8869 train loss:3.690760 +step:8870 train loss:3.573408 +step:8871 train loss:3.593018 +step:8872 train loss:3.580588 +step:8873 train loss:3.579252 +step:8874 train loss:3.629272 +step:8875 train loss:3.567083 +step:8876 train loss:3.604156 +step:8877 train loss:3.586168 +step:8878 train loss:3.636081 +step:8879 train loss:3.598301 +step:8880 train loss:3.545383 +step:8881 train loss:3.508163 +step:8882 train loss:3.581952 +step:8883 train loss:3.567465 +step:8884 train loss:3.654074 +step:8885 train loss:3.589308 +step:8886 train loss:3.592137 +step:8887 train loss:3.619637 +step:8888 train loss:3.580353 +step:8889 train loss:3.582440 +step:8890 train loss:3.578251 +step:8891 train loss:3.546424 +step:8892 train loss:3.629892 +step:8893 train loss:3.570509 +step:8894 train loss:3.584143 +step:8895 train loss:3.614762 +step:8896 train loss:3.533164 +step:8897 train loss:3.623742 +step:8898 train loss:3.556588 +step:8899 train loss:3.579947 +step:8900 train loss:3.546651 +step:8901 train loss:3.559641 +step:8902 train loss:3.603935 +step:8903 train loss:3.541900 +step:8904 train loss:3.590784 +step:8905 train loss:3.564634 +step:8906 train loss:3.553559 +step:8907 train loss:3.571618 +step:8908 train loss:3.633220 +step:8909 train loss:3.580439 +step:8910 train loss:3.541677 +step:8911 train loss:3.638294 +step:8912 train loss:3.536026 +step:8913 train loss:3.548437 +step:8914 train loss:3.644706 +step:8915 train loss:3.577989 +step:8916 train loss:3.612753 +step:8917 train loss:3.570271 +step:8918 train loss:3.574624 +step:8919 train loss:3.558425 +step:8920 train loss:3.583557 +step:8921 train loss:3.582935 +step:8922 train loss:3.561623 +step:8923 train loss:3.747364 +step:8924 train loss:3.644699 +step:8925 train loss:3.575348 +step:8926 train loss:3.587312 +step:8927 train loss:3.611196 +step:8928 train loss:3.568191 +step:8929 train loss:3.559412 +step:8930 train loss:3.621155 +step:8931 train loss:3.530142 +step:8932 train loss:3.631277 +step:8933 train loss:3.540438 +step:8934 train loss:3.575602 +step:8935 train loss:3.591470 +step:8936 train loss:3.623401 +step:8937 train loss:3.624562 +step:8938 train loss:3.567428 +step:8939 train loss:3.629503 +step:8940 train loss:3.586668 +step:8941 train loss:3.527847 +step:8942 train loss:3.606507 +step:8943 train loss:3.539247 +step:8944 train loss:3.588108 +step:8945 train loss:3.605405 +step:8946 train loss:3.448897 +step:8947 train loss:3.647521 +step:8948 train loss:3.489296 +step:8949 train loss:3.493249 +step:8950 train loss:3.540208 +step:8951 train loss:3.573201 +step:8952 train loss:3.597515 +step:8953 train loss:3.551668 +step:8954 train loss:3.657014 +step:8955 train loss:3.570460 +step:8956 train loss:3.594203 +step:8957 train loss:3.587918 +step:8958 train loss:3.565101 +step:8959 train loss:3.553939 +step:8960 train loss:3.522444 +step:8961 train loss:3.542814 +step:8962 train loss:3.600053 +step:8963 train loss:3.577528 +step:8964 train loss:3.561285 +step:8965 train loss:3.602344 +step:8966 train loss:3.558677 +step:8967 train loss:3.539050 +step:8968 train loss:3.525501 +step:8969 train loss:3.514885 +step:8970 train loss:3.593054 +step:8971 train loss:3.544474 +step:8972 train loss:3.739405 +step:8973 train loss:3.629427 +step:8974 train loss:3.585395 +step:8975 train loss:3.589207 +step:8976 train loss:3.555342 +step:8977 train loss:3.641481 +step:8978 train loss:3.622275 +step:8979 train loss:3.541693 +step:8980 train loss:3.633885 +step:8981 train loss:3.586483 +step:8982 train loss:3.561181 +step:8983 train loss:3.505059 +step:8984 train loss:3.627335 +step:8985 train loss:3.548606 +step:8986 train loss:3.583373 +step:8987 train loss:3.556828 +step:8988 train loss:3.606488 +step:8989 train loss:3.515702 +step:8990 train loss:3.654222 +step:8991 train loss:3.506289 +step:8992 train loss:3.565124 +step:8993 train loss:3.657304 +step:8994 train loss:3.557654 +step:8995 train loss:3.588211 +step:8996 train loss:3.558825 +step:8997 train loss:3.501400 +step:8998 train loss:3.510582 +step:8999 train loss:3.533182 +step:9000 validation loss:3.513237 total_sharp:4.1573e-03 L1_sharp:5.9671e-02 L2_sharp:4.4497e-02 L3_sharp:6.7562e-02 L4_sharp:5.2241e-02 L5_sharp:6.2113e-02 L6_sharp:8.0372e-02 L7_sharp:1.1044e-01 L8_sharp:1.0820e-01 L9_sharp:9.4680e-02 L10_sharp:6.9529e-02 L11_sharp:6.9096e-02 L12_sharp:1.3738e-01 total_fnorm:1.3667e+00 total_l1_linf:7.5404e+03 total_spectral:1.3667e+00 L1_fnorm:3.1747e-02 L2_fnorm:3.0971e-02 L3_fnorm:3.1089e-02 L4_fnorm:3.1503e-02 L5_fnorm:3.2206e-02 L6_fnorm:3.2290e-02 L7_fnorm:3.2266e-02 L8_fnorm:3.2097e-02 L9_fnorm:3.2165e-02 L10_fnorm:3.2171e-02 L11_fnorm:3.2447e-02 L12_fnorm:3.2273e-02 L1_l1linf:2.8933e-01 L2_l1linf:3.1967e-01 L3_l1linf:3.2740e-01 L4_l1linf:3.3380e-01 L5_l1linf:3.2880e-01 L6_l1linf:3.1451e-01 L7_l1linf:3.0273e-01 L8_l1linf:3.0466e-01 L9_l1linf:3.1050e-01 L10_l1linf:3.2807e-01 L11_l1linf:3.5804e-01 L12_l1linf:3.4577e-01 L1_spectral:6.6151e-03 L2_spectral:7.1890e-03 L3_spectral:7.3182e-03 L4_spectral:7.4906e-03 L5_spectral:7.4363e-03 L6_spectral:7.1332e-03 L7_spectral:6.8489e-03 L8_spectral:6.8411e-03 L9_spectral:6.9219e-03 L10_spectral:7.3377e-03 L11_spectral:7.9961e-03 L12_spectral:7.7262e-03 ip_v_neg_g:5.2367e-03 cos_v_neg_g:6.1517e-04 v_norm:1.3667e+00 g_norm:6.2287e+00 hv_norm:1.2623e+00 cos_v_hv:4.5012e-03 hg_norm:5.9613e+03 cos_g_hg:4.9993e-01 v_par:2.1149e-05 v_perp:1.3667e+00 L1_cos_v_neg_g:3.2042e-03 L1_v_norm:3.1747e-02 L2_cos_v_neg_g:4.1227e-03 L2_v_norm:3.0971e-02 L3_cos_v_neg_g:7.8049e-03 L3_v_norm:3.1089e-02 L4_cos_v_neg_g:8.5237e-03 L4_v_norm:3.1503e-02 L5_cos_v_neg_g:7.1837e-03 L5_v_norm:3.2206e-02 L6_cos_v_neg_g:8.0465e-03 L6_v_norm:3.2290e-02 L7_cos_v_neg_g:9.0615e-03 L7_v_norm:3.2266e-02 L8_cos_v_neg_g:9.9042e-03 L8_v_norm:3.2097e-02 L9_cos_v_neg_g:9.5345e-03 L9_v_norm:3.2165e-02 L10_cos_v_neg_g:7.0969e-03 L10_v_norm:3.2171e-02 L11_cos_v_neg_g:6.0593e-03 L11_v_norm:3.2447e-02 L12_cos_v_neg_g:1.8477e-03 L12_v_norm:3.2273e-02 +step:9000 train loss:3.620855 +step:9001 train loss:3.589530 +step:9002 train loss:3.589614 +step:9003 train loss:3.534463 +step:9004 train loss:3.532724 +step:9005 train loss:3.545903 +step:9006 train loss:3.550897 +step:9007 train loss:3.565622 +step:9008 train loss:3.527795 +step:9009 train loss:3.521792 +step:9010 train loss:3.556343 +step:9011 train loss:3.552363 +step:9012 train loss:3.661448 +step:9013 train loss:3.491074 +step:9014 train loss:3.561847 +step:9015 train loss:3.569127 +step:9016 train loss:3.639135 +step:9017 train loss:3.583456 +step:9018 train loss:3.507570 +step:9019 train loss:3.587904 +step:9020 train loss:3.599563 +step:9021 train loss:3.556149 +step:9022 train loss:3.568555 +step:9023 train loss:3.567713 +step:9024 train loss:3.585394 +step:9025 train loss:3.571672 +step:9026 train loss:3.528584 +step:9027 train loss:3.575534 +step:9028 train loss:3.597533 +step:9029 train loss:3.615114 +step:9030 train loss:3.609144 +step:9031 train loss:3.574487 +step:9032 train loss:3.583157 +step:9033 train loss:3.572039 +step:9034 train loss:3.581466 +step:9035 train loss:3.582818 +step:9036 train loss:3.531059 +step:9037 train loss:3.527297 +step:9038 train loss:3.648416 +step:9039 train loss:3.553418 +step:9040 train loss:3.567400 +step:9041 train loss:3.617604 +step:9042 train loss:3.474158 +step:9043 train loss:3.566275 +step:9044 train loss:3.585757 +step:9045 train loss:3.530678 +step:9046 train loss:3.574259 +step:9047 train loss:3.570557 +step:9048 train loss:3.548444 +step:9049 train loss:3.582624 +step:9050 train loss:3.536422 +step:9051 train loss:3.579322 +step:9052 train loss:3.508209 +step:9053 train loss:3.627575 +step:9054 train loss:3.643630 +step:9055 train loss:3.565290 +step:9056 train loss:3.630160 +step:9057 train loss:3.484987 +step:9058 train loss:3.567001 +step:9059 train loss:3.646747 +step:9060 train loss:3.575733 +step:9061 train loss:3.603049 +step:9062 train loss:3.532619 +step:9063 train loss:3.664423 +step:9064 train loss:3.552490 +step:9065 train loss:3.563485 +step:9066 train loss:3.580758 +step:9067 train loss:3.544977 +step:9068 train loss:3.614957 +step:9069 train loss:3.573825 +step:9070 train loss:3.625771 +step:9071 train loss:3.556554 +step:9072 train loss:3.580274 +step:9073 train loss:3.542863 +step:9074 train loss:3.622133 +step:9075 train loss:3.569088 +step:9076 train loss:3.535204 +step:9077 train loss:3.618008 +step:9078 train loss:3.550764 +step:9079 train loss:3.594509 +step:9080 train loss:3.529886 +step:9081 train loss:3.564966 +step:9082 train loss:3.591126 +step:9083 train loss:3.625094 +step:9084 train loss:3.514201 +step:9085 train loss:3.581067 +step:9086 train loss:3.571771 +step:9087 train loss:3.515981 +step:9088 train loss:3.578248 +step:9089 train loss:3.593386 +step:9090 train loss:3.525316 +step:9091 train loss:3.629982 +step:9092 train loss:3.555677 +step:9093 train loss:3.549340 +step:9094 train loss:3.680917 +step:9095 train loss:3.544111 +step:9096 train loss:3.559022 +step:9097 train loss:3.541070 +step:9098 train loss:3.536583 +step:9099 train loss:3.664136 +step:9100 train loss:3.694383 +step:9101 train loss:3.612290 +step:9102 train loss:3.557129 +step:9103 train loss:3.561028 +step:9104 train loss:3.647714 +step:9105 train loss:3.513566 +step:9106 train loss:3.633426 +step:9107 train loss:3.573389 +step:9108 train loss:3.552318 +step:9109 train loss:3.577062 +step:9110 train loss:3.585239 +step:9111 train loss:3.560881 +step:9112 train loss:3.564017 +step:9113 train loss:3.592940 +step:9114 train loss:3.541996 +step:9115 train loss:3.564860 +step:9116 train loss:3.594774 +step:9117 train loss:3.597857 +step:9118 train loss:3.575717 +step:9119 train loss:3.496678 +step:9120 train loss:3.590150 +step:9121 train loss:3.626061 +step:9122 train loss:3.568754 +step:9123 train loss:3.586570 +step:9124 train loss:3.619725 +step:9125 train loss:3.566097 +step:9126 train loss:3.546744 +step:9127 train loss:3.579363 +step:9128 train loss:3.633934 +step:9129 train loss:3.589449 +step:9130 train loss:3.602200 +step:9131 train loss:3.580639 +step:9132 train loss:3.590789 +step:9133 train loss:3.575931 +step:9134 train loss:3.550944 +step:9135 train loss:3.580171 +step:9136 train loss:3.577179 +step:9137 train loss:3.630998 +step:9138 train loss:3.549505 +step:9139 train loss:3.623333 +step:9140 train loss:3.549431 +step:9141 train loss:3.524533 +step:9142 train loss:3.702570 +step:9143 train loss:3.530557 +step:9144 train loss:3.625906 +step:9145 train loss:3.634196 +step:9146 train loss:3.549017 +step:9147 train loss:3.616996 +step:9148 train loss:3.643173 +step:9149 train loss:3.551774 +step:9150 train loss:3.569706 +step:9151 train loss:3.636877 +step:9152 train loss:3.592240 +step:9153 train loss:3.553844 +step:9154 train loss:3.570877 +step:9155 train loss:3.538374 +step:9156 train loss:3.537824 +step:9157 train loss:3.557212 +step:9158 train loss:3.540961 +step:9159 train loss:3.622493 +step:9160 train loss:3.509490 +step:9161 train loss:3.537306 +step:9162 train loss:3.625623 +step:9163 train loss:3.569138 +step:9164 train loss:3.542258 +step:9165 train loss:3.537417 +step:9166 train loss:3.594941 +step:9167 train loss:3.536591 +step:9168 train loss:3.580382 +step:9169 train loss:3.514075 +step:9170 train loss:3.536958 +step:9171 train loss:3.603052 +step:9172 train loss:3.524582 +step:9173 train loss:3.648291 +step:9174 train loss:3.579073 +step:9175 train loss:3.552616 +step:9176 train loss:3.536394 +step:9177 train loss:3.581021 +step:9178 train loss:3.527001 +step:9179 train loss:3.487082 +step:9180 train loss:3.582242 +step:9181 train loss:3.590027 +step:9182 train loss:3.561039 +step:9183 train loss:3.566236 +step:9184 train loss:3.561413 +step:9185 train loss:3.580235 +step:9186 train loss:3.537875 +step:9187 train loss:3.613124 +step:9188 train loss:3.650897 +step:9189 train loss:3.570406 +step:9190 train loss:3.577633 +step:9191 train loss:3.569503 +step:9192 train loss:3.579378 +step:9193 train loss:3.581115 +step:9194 train loss:3.522060 +step:9195 train loss:3.509073 +step:9196 train loss:3.564729 +step:9197 train loss:3.517056 +step:9198 train loss:3.594102 +step:9199 train loss:3.542930 +step:9200 train loss:3.565487 +step:9201 train loss:3.604663 +step:9202 train loss:3.590335 +step:9203 train loss:3.546597 +step:9204 train loss:3.743251 +step:9205 train loss:3.659474 +step:9206 train loss:3.570362 +step:9207 train loss:3.624758 +step:9208 train loss:3.599586 +step:9209 train loss:3.622725 +step:9210 train loss:3.515712 +step:9211 train loss:3.540712 +step:9212 train loss:3.539080 +step:9213 train loss:3.605272 +step:9214 train loss:3.544067 +step:9215 train loss:3.611835 +step:9216 train loss:3.579997 +step:9217 train loss:3.517312 +step:9218 train loss:3.603340 +step:9219 train loss:3.567119 +step:9220 train loss:3.612093 +step:9221 train loss:3.661460 +step:9222 train loss:3.607972 +step:9223 train loss:3.774946 +step:9224 train loss:3.611716 +step:9225 train loss:3.543572 +step:9226 train loss:3.558605 +step:9227 train loss:3.578271 +step:9228 train loss:3.579732 +step:9229 train loss:3.540188 +step:9230 train loss:3.601245 +step:9231 train loss:3.484891 +step:9232 train loss:3.542639 +step:9233 train loss:3.566312 +step:9234 train loss:3.622974 +step:9235 train loss:3.623364 +step:9236 train loss:3.530965 +step:9237 train loss:3.592382 +step:9238 train loss:3.566835 +step:9239 train loss:3.560078 +step:9240 train loss:3.528827 +step:9241 train loss:3.558807 +step:9242 train loss:3.567566 +step:9243 train loss:3.564698 +step:9244 train loss:3.542684 +step:9245 train loss:3.550221 +step:9246 train loss:3.544825 +step:9247 train loss:3.560190 +step:9248 train loss:3.566651 +step:9249 train loss:3.565618 +step:9250 validation loss:3.509389 +step:9250 train loss:3.607306 +step:9251 train loss:3.549130 +step:9252 train loss:3.614095 +step:9253 train loss:3.612388 +step:9254 train loss:3.537026 +step:9255 train loss:3.656591 +step:9256 train loss:3.536945 +step:9257 train loss:3.477998 +step:9258 train loss:3.555856 +step:9259 train loss:3.562284 +step:9260 train loss:3.659443 +step:9261 train loss:3.539448 +step:9262 train loss:3.609325 +step:9263 train loss:3.508992 +step:9264 train loss:3.664835 +step:9265 train loss:3.683223 +step:9266 train loss:3.614891 +step:9267 train loss:3.563277 +step:9268 train loss:3.552893 +step:9269 train loss:3.582549 +step:9270 train loss:3.501473 +step:9271 train loss:3.615458 +step:9272 train loss:3.557472 +step:9273 train loss:3.574198 +step:9274 train loss:3.573744 +step:9275 train loss:3.576019 +step:9276 train loss:3.601989 +step:9277 train loss:3.576356 +step:9278 train loss:3.591084 +step:9279 train loss:3.583456 +step:9280 train loss:3.582519 +step:9281 train loss:3.555293 +step:9282 train loss:3.675874 +step:9283 train loss:3.566222 +step:9284 train loss:3.527772 +step:9285 train loss:3.542970 +step:9286 train loss:3.600055 +step:9287 train loss:3.580153 +step:9288 train loss:3.576145 +step:9289 train loss:3.548179 +step:9290 train loss:3.577625 +step:9291 train loss:3.553914 +step:9292 train loss:3.593553 +step:9293 train loss:3.652802 +step:9294 train loss:3.570212 +step:9295 train loss:3.558308 +step:9296 train loss:3.507523 +step:9297 train loss:3.578055 +step:9298 train loss:3.520574 +step:9299 train loss:3.497734 +step:9300 train loss:3.605370 +step:9301 train loss:3.633396 +step:9302 train loss:3.569850 +step:9303 train loss:3.620323 +step:9304 train loss:3.542390 +step:9305 train loss:3.533215 +step:9306 train loss:3.538494 +step:9307 train loss:3.536801 +step:9308 train loss:3.508723 +step:9309 train loss:3.499534 +step:9310 train loss:3.559095 +step:9311 train loss:3.612427 +step:9312 train loss:3.567269 +step:9313 train loss:3.512441 +step:9314 train loss:3.542406 +step:9315 train loss:3.576265 +step:9316 train loss:3.562135 +step:9317 train loss:3.535245 +step:9318 train loss:3.620980 +step:9319 train loss:3.532527 +step:9320 train loss:3.551086 +step:9321 train loss:3.566685 +step:9322 train loss:3.572978 +step:9323 train loss:3.646119 +step:9324 train loss:3.594722 +step:9325 train loss:3.531777 +step:9326 train loss:3.608660 +step:9327 train loss:3.604627 +step:9328 train loss:3.607773 +step:9329 train loss:3.496862 +step:9330 train loss:3.661369 +step:9331 train loss:3.591813 +step:9332 train loss:3.613958 +step:9333 train loss:3.634492 +step:9334 train loss:3.570371 +step:9335 train loss:3.663013 +step:9336 train loss:3.624557 +step:9337 train loss:3.578937 +step:9338 train loss:3.631200 +step:9339 train loss:3.607977 +step:9340 train loss:3.569012 +step:9341 train loss:3.654702 +step:9342 train loss:3.555010 +step:9343 train loss:3.549231 +step:9344 train loss:3.550354 +step:9345 train loss:3.689797 +step:9346 train loss:3.526248 +step:9347 train loss:3.544767 +step:9348 train loss:3.571635 +step:9349 train loss:3.517638 +step:9350 train loss:3.590152 +step:9351 train loss:3.569315 +step:9352 train loss:3.557305 +step:9353 train loss:3.585750 +step:9354 train loss:3.553361 +step:9355 train loss:3.549020 +step:9356 train loss:3.589222 +step:9357 train loss:3.549978 +step:9358 train loss:3.579972 +step:9359 train loss:3.518192 +step:9360 train loss:3.541286 +step:9361 train loss:3.535791 +step:9362 train loss:3.527379 +step:9363 train loss:3.592747 +step:9364 train loss:3.569711 +step:9365 train loss:3.574279 +step:9366 train loss:3.569397 +step:9367 train loss:3.582939 +step:9368 train loss:3.557182 +step:9369 train loss:3.557480 +step:9370 train loss:3.567503 +step:9371 train loss:3.578950 +step:9372 train loss:3.552468 +step:9373 train loss:3.534222 +step:9374 train loss:3.569628 +step:9375 train loss:3.583373 +step:9376 train loss:3.525874 +step:9377 train loss:3.593399 +step:9378 train loss:3.598689 +step:9379 train loss:3.622321 +step:9380 train loss:3.556802 +step:9381 train loss:3.567085 +step:9382 train loss:3.541461 +step:9383 train loss:3.539051 +step:9384 train loss:3.507933 +step:9385 train loss:3.580337 +step:9386 train loss:3.604815 +step:9387 train loss:3.582674 +step:9388 train loss:3.518872 +step:9389 train loss:3.537232 +step:9390 train loss:3.584967 +step:9391 train loss:3.587872 +step:9392 train loss:3.548086 +step:9393 train loss:3.540108 +step:9394 train loss:3.566783 +step:9395 train loss:3.562105 +step:9396 train loss:3.709110 +step:9397 train loss:3.597578 +step:9398 train loss:3.619380 +step:9399 train loss:3.571107 +step:9400 train loss:3.570545 +step:9401 train loss:3.566817 +step:9402 train loss:3.568247 +step:9403 train loss:3.500789 +step:9404 train loss:3.575664 +step:9405 train loss:3.535188 +step:9406 train loss:3.588247 +step:9407 train loss:3.533752 +step:9408 train loss:3.469616 +step:9409 train loss:3.532345 +step:9410 train loss:3.613786 +step:9411 train loss:3.577108 +step:9412 train loss:3.604955 +step:9413 train loss:3.624338 +step:9414 train loss:3.560496 +step:9415 train loss:3.551260 +step:9416 train loss:3.569182 +step:9417 train loss:3.520624 +step:9418 train loss:3.550147 +step:9419 train loss:3.521474 +step:9420 train loss:3.538923 +step:9421 train loss:3.585400 +step:9422 train loss:3.539468 +step:9423 train loss:3.601569 +step:9424 train loss:3.537941 +step:9425 train loss:3.583590 +step:9426 train loss:3.585463 +step:9427 train loss:3.557295 +step:9428 train loss:3.667052 +step:9429 train loss:3.554116 +step:9430 train loss:3.512150 +step:9431 train loss:3.603185 +step:9432 train loss:3.568195 +step:9433 train loss:3.604318 +step:9434 train loss:3.558672 +step:9435 train loss:3.584989 +step:9436 train loss:3.554805 +step:9437 train loss:3.566024 +step:9438 train loss:3.561764 +step:9439 train loss:3.559407 +step:9440 train loss:3.545365 +step:9441 train loss:3.565119 +step:9442 train loss:3.501433 +step:9443 train loss:3.555125 +step:9444 train loss:3.622875 +step:9445 train loss:3.554272 +step:9446 train loss:3.533227 +step:9447 train loss:3.598402 +step:9448 train loss:3.533837 +step:9449 train loss:3.557583 +step:9450 train loss:3.595040 +step:9451 train loss:3.514099 +step:9452 train loss:3.567972 +step:9453 train loss:3.544449 +step:9454 train loss:3.610244 +step:9455 train loss:3.588683 +step:9456 train loss:3.511035 +step:9457 train loss:3.560982 +step:9458 train loss:3.549572 +step:9459 train loss:3.538968 +step:9460 train loss:3.584690 +step:9461 train loss:3.610889 +step:9462 train loss:3.557650 +step:9463 train loss:3.588624 +step:9464 train loss:3.545679 +step:9465 train loss:3.629651 +step:9466 train loss:3.581098 +step:9467 train loss:3.606486 +step:9468 train loss:3.550416 +step:9469 train loss:3.540125 +step:9470 train loss:3.537910 +step:9471 train loss:3.577694 +step:9472 train loss:3.599777 +step:9473 train loss:3.591904 +step:9474 train loss:3.535211 +step:9475 train loss:3.528615 +step:9476 train loss:3.748887 +step:9477 train loss:3.615766 +step:9478 train loss:3.595044 +step:9479 train loss:3.694005 +step:9480 train loss:3.542504 +step:9481 train loss:3.573952 +step:9482 train loss:3.600647 +step:9483 train loss:3.557276 +step:9484 train loss:3.586993 +step:9485 train loss:3.507122 +step:9486 train loss:3.546172 +step:9487 train loss:3.578325 +step:9488 train loss:3.529337 +step:9489 train loss:3.580255 +step:9490 train loss:3.543572 +step:9491 train loss:3.588233 +step:9492 train loss:3.608753 +step:9493 train loss:3.576109 +step:9494 train loss:3.589662 +step:9495 train loss:3.541009 +step:9496 train loss:3.602857 +step:9497 train loss:3.616570 +step:9498 train loss:3.563527 +step:9499 train loss:3.611090 +step:9500 validation loss:3.509398 total_sharp:4.7676e-03 L1_sharp:2.4760e-01 L2_sharp:7.1450e-02 L3_sharp:8.4857e-02 L4_sharp:5.8519e-02 L5_sharp:7.2725e-02 L6_sharp:9.0368e-02 L7_sharp:1.3401e-01 L8_sharp:1.2544e-01 L9_sharp:8.6284e-02 L10_sharp:6.6011e-02 L11_sharp:5.3928e-02 L12_sharp:7.1475e-02 total_fnorm:1.3191e+00 total_l1_linf:7.2763e+03 total_spectral:1.3191e+00 L1_fnorm:3.2235e-02 L2_fnorm:3.1047e-02 L3_fnorm:3.0998e-02 L4_fnorm:3.1510e-02 L5_fnorm:3.1956e-02 L6_fnorm:3.2153e-02 L7_fnorm:3.2172e-02 L8_fnorm:3.2196e-02 L9_fnorm:3.1976e-02 L10_fnorm:3.2139e-02 L11_fnorm:3.2182e-02 L12_fnorm:3.2257e-02 L1_l1linf:3.2879e-01 L2_l1linf:3.4249e-01 L3_l1linf:3.3411e-01 L4_l1linf:3.3155e-01 L5_l1linf:3.1583e-01 L6_l1linf:2.9670e-01 L7_l1linf:3.0662e-01 L8_l1linf:3.0383e-01 L9_l1linf:2.9768e-01 L10_l1linf:3.1254e-01 L11_l1linf:3.3596e-01 L12_l1linf:3.3018e-01 L1_spectral:7.3655e-03 L2_spectral:7.6839e-03 L3_spectral:7.5172e-03 L4_spectral:7.4378e-03 L5_spectral:7.1320e-03 L6_spectral:6.6903e-03 L7_spectral:6.8190e-03 L8_spectral:6.8499e-03 L9_spectral:6.6953e-03 L10_spectral:7.0072e-03 L11_spectral:7.5372e-03 L12_spectral:7.4919e-03 ip_v_neg_g:3.1208e-03 cos_v_neg_g:4.5973e-04 v_norm:1.3191e+00 g_norm:5.1460e+00 hv_norm:1.1599e+00 cos_v_hv:5.4221e-03 hg_norm:6.5203e+02 cos_g_hg:4.6931e-01 v_par:1.2774e-05 v_perp:1.3191e+00 L1_cos_v_neg_g:6.6422e-03 L1_v_norm:3.2235e-02 L2_cos_v_neg_g:7.5338e-03 L2_v_norm:3.1047e-02 L3_cos_v_neg_g:6.1539e-03 L3_v_norm:3.0998e-02 L4_cos_v_neg_g:6.7440e-03 L4_v_norm:3.1510e-02 L5_cos_v_neg_g:5.3143e-03 L5_v_norm:3.1956e-02 L6_cos_v_neg_g:5.7592e-03 L6_v_norm:3.2153e-02 L7_cos_v_neg_g:6.2583e-03 L7_v_norm:3.2172e-02 L8_cos_v_neg_g:6.6602e-03 L8_v_norm:3.2196e-02 L9_cos_v_neg_g:5.4502e-03 L9_v_norm:3.1976e-02 L10_cos_v_neg_g:4.5322e-03 L10_v_norm:3.2139e-02 L11_cos_v_neg_g:3.9101e-03 L11_v_norm:3.2182e-02 L12_cos_v_neg_g:2.2433e-03 L12_v_norm:3.2257e-02 +step:9500 train loss:3.606571 +step:9501 train loss:3.583027 +step:9502 train loss:3.557197 +step:9503 train loss:3.571636 +step:9504 train loss:3.527903 +step:9505 train loss:3.550067 +step:9506 train loss:3.564350 +step:9507 train loss:3.554729 +step:9508 train loss:3.745806 +step:9509 train loss:3.560517 +step:9510 train loss:3.552054 +step:9511 train loss:3.575207 +step:9512 train loss:3.605554 +step:9513 train loss:3.596098 +step:9514 train loss:3.563415 +step:9515 train loss:3.465054 +step:9516 train loss:3.564663 +step:9517 train loss:3.601990 +step:9518 train loss:3.576075 +step:9519 train loss:3.589162 +step:9520 train loss:3.477273 +step:9521 train loss:3.468234 +step:9522 train loss:3.588052 +step:9523 train loss:3.586429 +step:9524 train loss:3.586378 +step:9525 train loss:3.628899 +step:9526 train loss:3.647185 +step:9527 train loss:3.604666 +step:9528 train loss:3.536047 +step:9529 train loss:3.578026 +step:9530 train loss:3.625590 +step:9531 train loss:3.532473 +step:9532 train loss:3.579130 +step:9533 train loss:3.552987 +step:9534 train loss:3.635123 +step:9535 train loss:3.554489 +step:9536 train loss:3.536317 +step:9537 train loss:3.485618 +step:9538 train loss:3.501574 +step:9539 train loss:3.576623 +step:9540 train loss:3.494153 +step:9541 train loss:3.552637 +step:9542 train loss:3.679116 +step:9543 train loss:3.576711 +step:9544 train loss:3.615781 +step:9545 train loss:3.548191 +step:9546 train loss:3.573748 +step:9547 train loss:3.620196 +step:9548 train loss:3.555511 +step:9549 train loss:3.526702 +step:9550 train loss:3.560415 +step:9551 train loss:3.550841 +step:9552 train loss:3.574230 +step:9553 train loss:3.571959 +step:9554 train loss:3.615706 +step:9555 train loss:3.617850 +step:9556 train loss:3.529536 +step:9557 train loss:3.547718 +step:9558 train loss:3.612221 +step:9559 train loss:3.624034 +step:9560 train loss:3.530625 +step:9561 train loss:3.557937 +step:9562 train loss:3.598794 +step:9563 train loss:3.544124 +step:9564 train loss:3.578547 +step:9565 train loss:3.558308 +step:9566 train loss:3.531308 +step:9567 train loss:3.593419 +step:9568 train loss:3.568930 +step:9569 train loss:3.609902 +step:9570 train loss:3.504681 +step:9571 train loss:3.577558 +step:9572 train loss:3.521540 +step:9573 train loss:3.554572 +step:9574 train loss:3.528384 +step:9575 train loss:3.600179 +step:9576 train loss:3.489042 +step:9577 train loss:3.540942 +step:9578 train loss:3.545685 +step:9579 train loss:3.542472 +step:9580 train loss:3.609604 +step:9581 train loss:3.599307 +step:9582 train loss:3.561772 +step:9583 train loss:3.594869 +step:9584 train loss:3.533906 +step:9585 train loss:3.551600 +step:9586 train loss:3.600142 +step:9587 train loss:3.571609 +step:9588 train loss:3.556672 +step:9589 train loss:3.615808 +step:9590 train loss:3.579593 +step:9591 train loss:3.546421 +step:9592 train loss:3.565626 +step:9593 train loss:3.566027 +step:9594 train loss:3.582055 +step:9595 train loss:3.559463 +step:9596 train loss:3.646675 +step:9597 train loss:3.553142 +step:9598 train loss:3.516963 +step:9599 train loss:3.523826 +step:9600 train loss:3.606688 +step:9601 train loss:3.524912 +step:9602 train loss:3.608229 +step:9603 train loss:3.602467 +step:9604 train loss:3.485238 +step:9605 train loss:3.571326 +step:9606 train loss:3.622862 +step:9607 train loss:3.548891 +step:9608 train loss:3.552004 +step:9609 train loss:3.563913 +step:9610 train loss:3.607719 +step:9611 train loss:3.541459 +step:9612 train loss:3.546237 +step:9613 train loss:3.585797 +step:9614 train loss:3.558557 +step:9615 train loss:3.743625 +step:9616 train loss:3.556072 +step:9617 train loss:3.553718 +step:9618 train loss:3.497864 +step:9619 train loss:3.562289 +step:9620 train loss:3.618675 +step:9621 train loss:3.540614 +step:9622 train loss:3.553008 +step:9623 train loss:3.595142 +step:9624 train loss:3.579156 +step:9625 train loss:3.594410 +step:9626 train loss:3.566685 +step:9627 train loss:3.646538 +step:9628 train loss:3.612377 +step:9629 train loss:3.524344 +step:9630 train loss:3.583648 +step:9631 train loss:3.570011 +step:9632 train loss:3.539647 +step:9633 train loss:3.580152 +step:9634 train loss:3.649314 +step:9635 train loss:3.553028 +step:9636 train loss:3.497585 +step:9637 train loss:3.631806 +step:9638 train loss:3.516767 +step:9639 train loss:3.487477 +step:9640 train loss:3.607697 +step:9641 train loss:3.577481 +step:9642 train loss:3.553369 +step:9643 train loss:3.558423 +step:9644 train loss:3.613611 +step:9645 train loss:3.540663 +step:9646 train loss:3.577878 +step:9647 train loss:3.587540 +step:9648 train loss:3.538963 +step:9649 train loss:3.513699 +step:9650 train loss:3.530128 +step:9651 train loss:3.621852 +step:9652 train loss:3.601620 +step:9653 train loss:3.541917 +step:9654 train loss:3.523164 +step:9655 train loss:3.522700 +step:9656 train loss:3.510849 +step:9657 train loss:3.543525 +step:9658 train loss:3.603387 +step:9659 train loss:3.705532 +step:9660 train loss:3.486219 +step:9661 train loss:3.510561 +step:9662 train loss:3.531724 +step:9663 train loss:3.568207 +step:9664 train loss:3.621611 +step:9665 train loss:3.464490 +step:9666 train loss:3.506684 +step:9667 train loss:3.643559 +step:9668 train loss:3.625715 +step:9669 train loss:3.638763 +step:9670 train loss:3.620000 +step:9671 train loss:3.622153 +step:9672 train loss:3.534762 +step:9673 train loss:3.556875 +step:9674 train loss:3.567623 +step:9675 train loss:3.563081 +step:9676 train loss:3.526497 +step:9677 train loss:3.531242 +step:9678 train loss:3.569462 +step:9679 train loss:3.557778 +step:9680 train loss:3.554564 +step:9681 train loss:3.543525 +step:9682 train loss:3.609996 +step:9683 train loss:3.585298 +step:9684 train loss:3.502068 +step:9685 train loss:3.584967 +step:9686 train loss:3.619304 +step:9687 train loss:3.525045 +step:9688 train loss:3.611237 +step:9689 train loss:3.713180 +step:9690 train loss:3.557252 +step:9691 train loss:3.543985 +step:9692 train loss:3.500208 +step:9693 train loss:3.503341 +step:9694 train loss:3.523298 +step:9695 train loss:3.627471 +step:9696 train loss:3.662232 +step:9697 train loss:3.569137 +step:9698 train loss:3.607919 +step:9699 train loss:3.565965 +step:9700 train loss:3.565298 +step:9701 train loss:3.616498 +step:9702 train loss:3.533420 +step:9703 train loss:3.554049 +step:9704 train loss:3.639294 +step:9705 train loss:3.538564 +step:9706 train loss:3.529368 +step:9707 train loss:3.580328 +step:9708 train loss:3.531764 +step:9709 train loss:3.553358 +step:9710 train loss:3.568954 +step:9711 train loss:3.544222 +step:9712 train loss:3.554424 +step:9713 train loss:3.603975 +step:9714 train loss:3.559777 +step:9715 train loss:3.583657 +step:9716 train loss:3.606632 +step:9717 train loss:3.522175 +step:9718 train loss:3.530384 +step:9719 train loss:3.612555 +step:9720 train loss:3.544887 +step:9721 train loss:3.532915 +step:9722 train loss:3.598636 +step:9723 train loss:3.544538 +step:9724 train loss:3.571848 +step:9725 train loss:3.626498 +step:9726 train loss:3.567196 +step:9727 train loss:3.541797 +step:9728 train loss:3.581981 +step:9729 train loss:3.610238 +step:9730 train loss:3.678902 +step:9731 train loss:3.601906 +step:9732 train loss:3.564759 +step:9733 train loss:3.603692 +step:9734 train loss:3.525485 +step:9735 train loss:3.632371 +step:9736 train loss:3.533403 +step:9737 train loss:3.592479 +step:9738 train loss:3.555772 +step:9739 train loss:3.629138 +step:9740 train loss:3.593905 +step:9741 train loss:3.534262 +step:9742 train loss:3.624659 +step:9743 train loss:3.504217 +step:9744 train loss:3.562402 +step:9745 train loss:3.522051 +step:9746 train loss:3.557354 +step:9747 train loss:3.547569 +step:9748 train loss:3.451932 +step:9749 train loss:3.545397 +step:9750 validation loss:3.499088 +step:9750 train loss:3.526529 +step:9751 train loss:3.668644 +step:9752 train loss:3.551361 +step:9753 train loss:3.513966 +step:9754 train loss:3.543277 +step:9755 train loss:3.538484 +step:9756 train loss:3.540449 +step:9757 train loss:3.502289 +step:9758 train loss:3.503990 +step:9759 train loss:3.545254 +step:9760 train loss:3.494086 +step:9761 train loss:3.534049 +step:9762 train loss:3.529800 +step:9763 train loss:3.552321 +step:9764 train loss:3.535965 +step:9765 train loss:3.500826 +step:9766 train loss:3.586838 +step:9767 train loss:3.544542 +step:9768 train loss:3.556300 +step:9769 train loss:3.507921 +step:9770 train loss:3.510048 +step:9771 train loss:3.560158 +step:9772 train loss:3.575617 +step:9773 train loss:3.545629 +step:9774 train loss:3.518848 +step:9775 train loss:3.608341 +step:9776 train loss:3.603087 +step:9777 train loss:3.500310 +step:9778 train loss:3.500719 +step:9779 train loss:3.512348 +step:9780 train loss:3.506299 +step:9781 train loss:3.526287 +step:9782 train loss:3.605140 +step:9783 train loss:3.513932 +step:9784 train loss:3.543445 +step:9785 train loss:3.529624 +step:9786 train loss:3.569729 +step:9787 train loss:3.591799 +step:9788 train loss:3.521293 +step:9789 train loss:3.528467 +step:9790 train loss:3.490922 +step:9791 train loss:3.540572 +step:9792 train loss:3.556703 +step:9793 train loss:3.570917 +step:9794 train loss:3.549342 +step:9795 train loss:3.553357 +step:9796 train loss:3.536281 +step:9797 train loss:3.533552 +step:9798 train loss:3.545234 +step:9799 train loss:3.551224 +step:9800 train loss:3.619666 +step:9801 train loss:3.546911 +step:9802 train loss:3.602708 +step:9803 train loss:3.463604 +step:9804 train loss:3.558223 +step:9805 train loss:3.561425 +step:9806 train loss:3.538609 +step:9807 train loss:3.506119 +step:9808 train loss:3.418248 +step:9809 train loss:3.607828 +step:9810 train loss:3.560202 +step:9811 train loss:3.550464 +step:9812 train loss:3.520976 +step:9813 train loss:3.604702 +step:9814 train loss:3.592162 +step:9815 train loss:3.496699 +step:9816 train loss:3.498999 +step:9817 train loss:3.532405 +step:9818 train loss:3.557535 +step:9819 train loss:3.527056 +step:9820 train loss:3.600419 +step:9821 train loss:3.572438 +step:9822 train loss:3.551482 +step:9823 train loss:3.608341 +step:9824 train loss:3.516399 +step:9825 train loss:3.599944 +step:9826 train loss:3.595159 +step:9827 train loss:3.601220 +step:9828 train loss:3.517120 +step:9829 train loss:3.527398 +step:9830 train loss:3.511307 +step:9831 train loss:3.574509 +step:9832 train loss:3.580961 +step:9833 train loss:3.498010 +step:9834 train loss:3.545552 +step:9835 train loss:3.514511 +step:9836 train loss:3.573648 +step:9837 train loss:3.548728 +step:9838 train loss:3.588121 +step:9839 train loss:3.560698 +step:9840 train loss:3.528936 +step:9841 train loss:3.535405 +step:9842 train loss:3.596916 +step:9843 train loss:3.589836 +step:9844 train loss:3.543686 +step:9845 train loss:3.567443 +step:9846 train loss:3.507873 +step:9847 train loss:3.635946 +step:9848 train loss:3.559577 +step:9849 train loss:3.586435 +step:9850 train loss:3.502398 +step:9851 train loss:3.555737 +step:9852 train loss:3.521712 +step:9853 train loss:3.545027 +step:9854 train loss:3.551862 +step:9855 train loss:3.502225 +step:9856 train loss:3.506170 +step:9857 train loss:3.497115 +step:9858 train loss:3.561285 +step:9859 train loss:3.479227 +step:9860 train loss:3.716903 +step:9861 train loss:3.540068 +step:9862 train loss:3.509844 +step:9863 train loss:3.492411 +step:9864 train loss:3.615087 +step:9865 train loss:3.490949 +step:9866 train loss:3.534306 +step:9867 train loss:3.530101 +step:9868 train loss:3.592033 +step:9869 train loss:3.554641 +step:9870 train loss:3.526005 +step:9871 train loss:3.565118 +step:9872 train loss:3.513229 +step:9873 train loss:3.559574 +step:9874 train loss:3.528189 +step:9875 train loss:3.529182 +step:9876 train loss:3.495650 +step:9877 train loss:3.544051 +step:9878 train loss:3.580057 +step:9879 train loss:3.577647 +step:9880 train loss:3.509737 +step:9881 train loss:3.562623 +step:9882 train loss:3.524171 +step:9883 train loss:3.531779 +step:9884 train loss:3.524952 +step:9885 train loss:3.589146 +step:9886 train loss:3.555023 +step:9887 train loss:3.557615 +step:9888 train loss:3.575378 +step:9889 train loss:3.611072 +step:9890 train loss:3.525876 +step:9891 train loss:3.530978 +step:9892 train loss:3.501387 +step:9893 train loss:3.621663 +step:9894 train loss:3.530717 +step:9895 train loss:3.467373 +step:9896 train loss:3.621787 +step:9897 train loss:3.499059 +step:9898 train loss:3.566648 +step:9899 train loss:3.546447 +step:9900 train loss:3.590304 +step:9901 train loss:3.513347 +step:9902 train loss:3.558466 +step:9903 train loss:3.530476 +step:9904 train loss:3.578263 +step:9905 train loss:3.483263 +step:9906 train loss:3.523945 +step:9907 train loss:3.529139 +step:9908 train loss:3.530997 +step:9909 train loss:3.545103 +step:9910 train loss:3.570193 +step:9911 train loss:3.648199 +step:9912 train loss:3.531266 +step:9913 train loss:3.532711 +step:9914 train loss:3.541659 +step:9915 train loss:3.542272 +step:9916 train loss:3.491023 +step:9917 train loss:3.526701 +step:9918 train loss:3.525713 +step:9919 train loss:3.686569 +step:9920 train loss:3.473271 +step:9921 train loss:3.567076 +step:9922 train loss:3.525838 +step:9923 train loss:3.582057 +step:9924 train loss:3.496854 +step:9925 train loss:3.555866 +step:9926 train loss:3.535214 +step:9927 train loss:3.576558 +step:9928 train loss:3.505020 +step:9929 train loss:3.541974 +step:9930 train loss:3.633073 +step:9931 train loss:3.595379 +step:9932 train loss:3.484408 +step:9933 train loss:3.577954 +step:9934 train loss:3.494450 +step:9935 train loss:3.613921 +step:9936 train loss:3.518881 +step:9937 train loss:3.546463 +step:9938 train loss:3.529032 +step:9939 train loss:3.599004 +step:9940 train loss:3.632964 +step:9941 train loss:3.509047 +step:9942 train loss:3.548533 +step:9943 train loss:3.676902 +step:9944 train loss:3.547816 +step:9945 train loss:3.567351 +step:9946 train loss:3.538605 +step:9947 train loss:3.490248 +step:9948 train loss:3.535011 +step:9949 train loss:3.432833 +step:9950 train loss:3.580348 +step:9951 train loss:3.501468 +step:9952 train loss:3.571025 +step:9953 train loss:3.532166 +step:9954 train loss:3.590169 +step:9955 train loss:3.560895 +step:9956 train loss:3.570192 +step:9957 train loss:3.543264 +step:9958 train loss:3.598548 +step:9959 train loss:3.497767 +step:9960 train loss:3.533599 +step:9961 train loss:3.538276 +step:9962 train loss:3.589397 +step:9963 train loss:3.482857 +step:9964 train loss:3.535089 +step:9965 train loss:3.536289 +step:9966 train loss:3.593069 +step:9967 train loss:3.510145 +step:9968 train loss:3.574652 +step:9969 train loss:3.490247 +step:9970 train loss:3.529124 +step:9971 train loss:3.571476 +step:9972 train loss:3.596327 +step:9973 train loss:3.571344 +step:9974 train loss:3.557200 +step:9975 train loss:3.526891 +step:9976 train loss:3.486831 +step:9977 train loss:3.540104 +step:9978 train loss:3.535494 +step:9979 train loss:3.547995 +step:9980 train loss:3.601737 +step:9981 train loss:3.506221 +step:9982 train loss:3.573641 +step:9983 train loss:3.491419 +step:9984 train loss:3.554958 +step:9985 train loss:3.497331 +step:9986 train loss:3.550181 +step:9987 train loss:3.588074 +step:9988 train loss:3.609198 +step:9989 train loss:3.503066 +step:9990 train loss:3.639752 +step:9991 train loss:3.481236 +step:9992 train loss:3.562998 +step:9993 train loss:3.550582 +step:9994 train loss:3.666936 +step:9995 train loss:3.603613 +step:9996 train loss:3.521629 +step:9997 train loss:3.560597 +step:9998 train loss:3.615991 +step:9999 train loss:3.581844 +step:10000 validation loss:3.492885 total_sharp:4.0912e-03 L1_sharp:1.5360e-01 L2_sharp:5.4004e-02 L3_sharp:9.0520e-02 L4_sharp:5.8582e-02 L5_sharp:5.7646e-02 L6_sharp:6.9976e-02 L7_sharp:8.7604e-02 L8_sharp:1.0069e-01 L9_sharp:7.7788e-02 L10_sharp:5.9486e-02 L11_sharp:5.5965e-02 L12_sharp:8.9783e-02 total_fnorm:1.3136e+00 total_l1_linf:7.2557e+03 total_spectral:1.3136e+00 L1_fnorm:3.1853e-02 L2_fnorm:3.1093e-02 L3_fnorm:3.1098e-02 L4_fnorm:3.1752e-02 L5_fnorm:3.2103e-02 L6_fnorm:3.2260e-02 L7_fnorm:3.2232e-02 L8_fnorm:3.2309e-02 L9_fnorm:3.2186e-02 L10_fnorm:3.2165e-02 L11_fnorm:3.2650e-02 L12_fnorm:3.2542e-02 L1_l1linf:3.1135e-01 L2_l1linf:3.4171e-01 L3_l1linf:3.5126e-01 L4_l1linf:3.4534e-01 L5_l1linf:3.2025e-01 L6_l1linf:3.0590e-01 L7_l1linf:2.8000e-01 L8_l1linf:3.0918e-01 L9_l1linf:3.0344e-01 L10_l1linf:3.3266e-01 L11_l1linf:3.7841e-01 L12_l1linf:3.5995e-01 L1_spectral:7.0071e-03 L2_spectral:7.6835e-03 L3_spectral:7.8989e-03 L4_spectral:7.8127e-03 L5_spectral:7.2424e-03 L6_spectral:6.9263e-03 L7_spectral:6.3541e-03 L8_spectral:6.9052e-03 L9_spectral:6.8525e-03 L10_spectral:7.4588e-03 L11_spectral:8.4792e-03 L12_spectral:8.1265e-03 ip_v_neg_g:1.4047e-03 cos_v_neg_g:1.8890e-04 v_norm:1.3136e+00 g_norm:5.6609e+00 hv_norm:1.0612e+00 cos_v_hv:5.0646e-03 hg_norm:1.6984e+03 cos_g_hg:4.7906e-01 v_par:1.0158e-05 v_perp:1.3136e+00 L1_cos_v_neg_g:-4.1466e-04 L1_v_norm:3.1853e-02 L2_cos_v_neg_g:1.2790e-03 L2_v_norm:3.1093e-02 L3_cos_v_neg_g:1.0968e-04 L3_v_norm:3.1098e-02 L4_cos_v_neg_g:4.2777e-03 L4_v_norm:3.1752e-02 L5_cos_v_neg_g:2.2411e-03 L5_v_norm:3.2103e-02 L6_cos_v_neg_g:1.7458e-03 L6_v_norm:3.2260e-02 L7_cos_v_neg_g:2.0465e-03 L7_v_norm:3.2232e-02 L8_cos_v_neg_g:1.3126e-03 L8_v_norm:3.2309e-02 L9_cos_v_neg_g:1.4978e-03 L9_v_norm:3.2186e-02 L10_cos_v_neg_g:2.6823e-03 L10_v_norm:3.2165e-02 L11_cos_v_neg_g:2.6272e-03 L11_v_norm:3.2650e-02 L12_cos_v_neg_g:3.6528e-03 L12_v_norm:3.2542e-02 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..5205f564ae464b9c9043bf63b4381aa7df910e81 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.0005, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 43, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "48cecf7e-13be-4a31-b018-07b3aee1fec4", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..036f2ac1df44f097094b4b40fe6ceaea6d5a7d9b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3393642902374268, + "total_l1_linf_norm": 7372.5302734375, + "total_spectral_norm": 1.3393644094467163, + "layer_1_update_fnorm": 0.03131446614861488, + "layer_1_max_l1_linf_norm": 0.1772906482219696, + "layer_1_max_spectral_norm": 0.0040565780363976955, + "layer_2_update_fnorm": 0.028965208679437637, + "layer_2_max_l1_linf_norm": 0.20070844888687134, + "layer_2_max_spectral_norm": 0.0045843711122870445, + "layer_3_update_fnorm": 0.028254369273781776, + "layer_3_max_l1_linf_norm": 0.20670157670974731, + "layer_3_max_spectral_norm": 0.00475255586206913, + "layer_4_update_fnorm": 0.02915477566421032, + "layer_4_max_l1_linf_norm": 0.2146884649991989, + "layer_4_max_spectral_norm": 0.00490031810477376, + "layer_5_update_fnorm": 0.029661402106285095, + "layer_5_max_l1_linf_norm": 0.219647616147995, + "layer_5_max_spectral_norm": 0.004983220249414444, + "layer_6_update_fnorm": 0.030420878902077675, + "layer_6_max_l1_linf_norm": 0.22540460526943207, + "layer_6_max_spectral_norm": 0.005474958568811417, + "layer_7_update_fnorm": 0.031011028215289116, + "layer_7_max_l1_linf_norm": 0.22413870692253113, + "layer_7_max_spectral_norm": 0.006427688989788294, + "layer_8_update_fnorm": 0.031256161630153656, + "layer_8_max_l1_linf_norm": 0.22738876938819885, + "layer_8_max_spectral_norm": 0.0068135252222418785, + "layer_9_update_fnorm": 0.031679026782512665, + "layer_9_max_l1_linf_norm": 0.22729584574699402, + "layer_9_max_spectral_norm": 0.006666531786322594, + "layer_10_update_fnorm": 0.03181736171245575, + "layer_10_max_l1_linf_norm": 0.22524559497833252, + "layer_10_max_spectral_norm": 0.006842938717454672, + "layer_11_update_fnorm": 0.03160172328352928, + "layer_11_max_l1_linf_norm": 0.22077211737632751, + "layer_11_max_spectral_norm": 0.006156697403639555, + "layer_12_update_fnorm": 0.03208776190876961, + "layer_12_max_l1_linf_norm": 0.20763349533081055, + "layer_12_max_spectral_norm": 0.008225458674132824, + "total_sharpness": 0.020199671387672424, + "ip_v_neg_g": 0.018232502043247223, + "cos_v_neg_g": 0.0012659271014854312, + "v_norm": 1.3393642902374268, + "g_norm": 10.753228187561035, + "hv_norm": 8.154483795166016, + "cos_v_hv": 0.0033177719451487064, + "hg_norm": 16663.19140625, + "cos_g_hg": 0.6923304796218872, + "v_parallel_norm": 8.029315358726308e-05, + "v_perp_norm": 1.3393642902374268, + "layer_1_v_norm": 0.03131446614861488, + "layer_1_cos_v_neg_g": 0.02349923923611641, + "layer_2_v_norm": 0.028965208679437637, + "layer_2_cos_v_neg_g": 0.02056829258799553, + "layer_3_v_norm": 0.028254367411136627, + "layer_3_cos_v_neg_g": 0.020160427317023277, + "layer_4_v_norm": 0.02915477566421032, + "layer_4_cos_v_neg_g": 0.0184998270124197, + "layer_5_v_norm": 0.029661402106285095, + "layer_5_cos_v_neg_g": 0.01810275949537754, + "layer_6_v_norm": 0.030420880764722824, + "layer_6_cos_v_neg_g": 0.017168981954455376, + "layer_7_v_norm": 0.031011028215289116, + "layer_7_cos_v_neg_g": 0.01370988693088293, + "layer_8_v_norm": 0.031256165355443954, + "layer_8_cos_v_neg_g": 0.01108513306826353, + "layer_9_v_norm": 0.031679026782512665, + "layer_9_cos_v_neg_g": 0.010638831183314323, + "layer_10_v_norm": 0.03181736171245575, + "layer_10_cos_v_neg_g": 0.009423688985407352, + "layer_11_v_norm": 0.03160172700881958, + "layer_11_cos_v_neg_g": 0.007977433502674103, + "layer_12_v_norm": 0.03208776190876961, + "layer_12_cos_v_neg_g": 0.005978530738502741, + "layer_1_sharpness": 1.6953718662261963, + "layer_2_sharpness": 0.892375648021698, + "layer_3_sharpness": 0.711284339427948, + "layer_4_sharpness": 0.5608697533607483, + "layer_5_sharpness": 0.42976340651512146, + "layer_6_sharpness": 0.3001166880130768, + "layer_7_sharpness": 0.18075712025165558, + "layer_8_sharpness": 0.12004082649946213, + "layer_9_sharpness": 0.09490561485290527, + "layer_10_sharpness": 0.07274656742811203, + "layer_11_sharpness": 0.05741115286946297, + "layer_12_sharpness": 0.04079093039035797 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..087a9f677e22ca895db83ce4f073dbdfc025f267 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2882522344589233, + "total_l1_linf_norm": 7119.45068359375, + "total_spectral_norm": 1.288252353668213, + "layer_1_update_fnorm": 0.03194776549935341, + "layer_1_max_l1_linf_norm": 0.3128506541252136, + "layer_1_max_spectral_norm": 0.007077273912727833, + "layer_2_update_fnorm": 0.031124312430620193, + "layer_2_max_l1_linf_norm": 0.34515371918678284, + "layer_2_max_spectral_norm": 0.007754483725875616, + "layer_3_update_fnorm": 0.031001035124063492, + "layer_3_max_l1_linf_norm": 0.33556145429611206, + "layer_3_max_spectral_norm": 0.007587241008877754, + "layer_4_update_fnorm": 0.03165758401155472, + "layer_4_max_l1_linf_norm": 0.34195438027381897, + "layer_4_max_spectral_norm": 0.007723015733063221, + "layer_5_update_fnorm": 0.03192354738712311, + "layer_5_max_l1_linf_norm": 0.31118977069854736, + "layer_5_max_spectral_norm": 0.006979031953960657, + "layer_6_update_fnorm": 0.03221642225980759, + "layer_6_max_l1_linf_norm": 0.310306578874588, + "layer_6_max_spectral_norm": 0.007067713886499405, + "layer_7_update_fnorm": 0.032197799533605576, + "layer_7_max_l1_linf_norm": 0.28482943773269653, + "layer_7_max_spectral_norm": 0.006418393459171057, + "layer_8_update_fnorm": 0.03224934637546539, + "layer_8_max_l1_linf_norm": 0.3008924424648285, + "layer_8_max_spectral_norm": 0.006769997533410788, + "layer_9_update_fnorm": 0.03205880522727966, + "layer_9_max_l1_linf_norm": 0.2933202087879181, + "layer_9_max_spectral_norm": 0.006625790614634752, + "layer_10_update_fnorm": 0.03209714964032173, + "layer_10_max_l1_linf_norm": 0.3226809501647949, + "layer_10_max_spectral_norm": 0.0072688027285039425, + "layer_11_update_fnorm": 0.03249620646238327, + "layer_11_max_l1_linf_norm": 0.3732023239135742, + "layer_11_max_spectral_norm": 0.008284374140202999, + "layer_12_update_fnorm": 0.03239143639802933, + "layer_12_max_l1_linf_norm": 0.34244024753570557, + "layer_12_max_spectral_norm": 0.007749435491859913, + "total_sharpness": 0.004280897788703442, + "ip_v_neg_g": 0.0015391422202810645, + "cos_v_neg_g": 0.00020475426572375, + "v_norm": 1.2882522344589233, + "g_norm": 5.835054397583008, + "hv_norm": 1.1101595163345337, + "cos_v_hv": 0.0049676429480314255, + "hg_norm": 1917.348876953125, + "cos_g_hg": 0.5294162631034851, + "v_parallel_norm": 9.790974218049087e-06, + "v_perp_norm": 1.2882522344589233, + "layer_1_v_norm": 0.03194776549935341, + "layer_1_cos_v_neg_g": 0.0004389836103655398, + "layer_2_v_norm": 0.031124312430620193, + "layer_2_cos_v_neg_g": 0.0001652186765568331, + "layer_3_v_norm": 0.03100103698670864, + "layer_3_cos_v_neg_g": 0.00019472588610369712, + "layer_4_v_norm": 0.03165758401155472, + "layer_4_cos_v_neg_g": 0.004251560196280479, + "layer_5_v_norm": 0.03192354738712311, + "layer_5_cos_v_neg_g": 0.002848409116268158, + "layer_6_v_norm": 0.03221642225980759, + "layer_6_cos_v_neg_g": 0.002680150792002678, + "layer_7_v_norm": 0.032197799533605576, + "layer_7_cos_v_neg_g": 0.002897953148931265, + "layer_8_v_norm": 0.03224934637546539, + "layer_8_cos_v_neg_g": 0.001899398979730904, + "layer_9_v_norm": 0.03205880522727966, + "layer_9_cos_v_neg_g": 0.001670782221481204, + "layer_10_v_norm": 0.03209714964032173, + "layer_10_cos_v_neg_g": 0.0031485697254538536, + "layer_11_v_norm": 0.03249620646238327, + "layer_11_cos_v_neg_g": 0.0022085336968302727, + "layer_12_v_norm": 0.03239143639802933, + "layer_12_cos_v_neg_g": 0.0023348131217062473, + "layer_1_sharpness": 0.26123443245887756, + "layer_2_sharpness": 0.05780445784330368, + "layer_3_sharpness": 0.08108628541231155, + "layer_4_sharpness": 0.057025689631700516, + "layer_5_sharpness": 0.055819302797317505, + "layer_6_sharpness": 0.07291987538337708, + "layer_7_sharpness": 0.08989494293928146, + "layer_8_sharpness": 0.09235631674528122, + "layer_9_sharpness": 0.07311675697565079, + "layer_10_sharpness": 0.060300856828689575, + "layer_11_sharpness": 0.05119657516479492, + "layer_12_sharpness": 0.07011820375919342 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..56173e19dd468b797ee9c7523937020ae5fa629b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3021628856658936, + "total_l1_linf_norm": 7187.08837890625, + "total_spectral_norm": 1.3021626472473145, + "layer_1_update_fnorm": 0.031676776707172394, + "layer_1_max_l1_linf_norm": 0.22583559155464172, + "layer_1_max_spectral_norm": 0.005086785182356834, + "layer_2_update_fnorm": 0.028952397406101227, + "layer_2_max_l1_linf_norm": 0.2334977388381958, + "layer_2_max_spectral_norm": 0.005294593516737223, + "layer_3_update_fnorm": 0.02795465476810932, + "layer_3_max_l1_linf_norm": 0.2499549388885498, + "layer_3_max_spectral_norm": 0.005597645416855812, + "layer_4_update_fnorm": 0.02921832725405693, + "layer_4_max_l1_linf_norm": 0.24584145843982697, + "layer_4_max_spectral_norm": 0.005532673094421625, + "layer_5_update_fnorm": 0.029837986454367638, + "layer_5_max_l1_linf_norm": 0.25449225306510925, + "layer_5_max_spectral_norm": 0.0056791165843605995, + "layer_6_update_fnorm": 0.030823249369859695, + "layer_6_max_l1_linf_norm": 0.25873008370399475, + "layer_6_max_spectral_norm": 0.005795336794108152, + "layer_7_update_fnorm": 0.031505484133958817, + "layer_7_max_l1_linf_norm": 0.2658623456954956, + "layer_7_max_spectral_norm": 0.005949967075139284, + "layer_8_update_fnorm": 0.03156110271811485, + "layer_8_max_l1_linf_norm": 0.26553845405578613, + "layer_8_max_spectral_norm": 0.005953098181635141, + "layer_9_update_fnorm": 0.03185790032148361, + "layer_9_max_l1_linf_norm": 0.2767946422100067, + "layer_9_max_spectral_norm": 0.006144500337541103, + "layer_10_update_fnorm": 0.03197947517037392, + "layer_10_max_l1_linf_norm": 0.2717246115207672, + "layer_10_max_spectral_norm": 0.006115986034274101, + "layer_11_update_fnorm": 0.031475573778152466, + "layer_11_max_l1_linf_norm": 0.25634467601776123, + "layer_11_max_spectral_norm": 0.0057932669296860695, + "layer_12_update_fnorm": 0.03182969614863396, + "layer_12_max_l1_linf_norm": 0.2415553331375122, + "layer_12_max_spectral_norm": 0.005625730846077204, + "total_sharpness": 0.018695196136832237, + "ip_v_neg_g": 0.013550284318625927, + "cos_v_neg_g": 0.0009995328728109598, + "v_norm": 1.3021628856658936, + "g_norm": 10.410845756530762, + "hv_norm": 6.419321060180664, + "cos_v_hv": 0.003792331088334322, + "hg_norm": 16391.638671875, + "cos_g_hg": 0.6375614404678345, + "v_parallel_norm": 4.825579890166409e-05, + "v_perp_norm": 1.3021628856658936, + "layer_1_v_norm": 0.031676776707172394, + "layer_1_cos_v_neg_g": 0.017015349119901657, + "layer_2_v_norm": 0.028952397406101227, + "layer_2_cos_v_neg_g": 0.015975221991539, + "layer_3_v_norm": 0.02795465476810932, + "layer_3_cos_v_neg_g": 0.015282834880053997, + "layer_4_v_norm": 0.02921832725405693, + "layer_4_cos_v_neg_g": 0.013006944209337234, + "layer_5_v_norm": 0.029837986454367638, + "layer_5_cos_v_neg_g": 0.012984032742679119, + "layer_6_v_norm": 0.030823251232504845, + "layer_6_cos_v_neg_g": 0.01239297166466713, + "layer_7_v_norm": 0.031505484133958817, + "layer_7_cos_v_neg_g": 0.010370075702667236, + "layer_8_v_norm": 0.03156110271811485, + "layer_8_cos_v_neg_g": 0.008701791986823082, + "layer_9_v_norm": 0.03185790032148361, + "layer_9_cos_v_neg_g": 0.008357801474630833, + "layer_10_v_norm": 0.03197947517037392, + "layer_10_cos_v_neg_g": 0.008198034949600697, + "layer_11_v_norm": 0.031475573778152466, + "layer_11_cos_v_neg_g": 0.007918644696474075, + "layer_12_v_norm": 0.03182969614863396, + "layer_12_cos_v_neg_g": 0.006962868385016918, + "layer_1_sharpness": 1.0426822900772095, + "layer_2_sharpness": 0.6678247451782227, + "layer_3_sharpness": 0.5605916380882263, + "layer_4_sharpness": 0.4178398549556732, + "layer_5_sharpness": 0.3908917009830475, + "layer_6_sharpness": 0.33658334612846375, + "layer_7_sharpness": 0.2607572376728058, + "layer_8_sharpness": 0.15230262279510498, + "layer_9_sharpness": 0.1155579686164856, + "layer_10_sharpness": 0.08836796879768372, + "layer_11_sharpness": 0.06693302094936371, + "layer_12_sharpness": 0.06134595721960068 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..511f550fef878bc582d0250fec8f61c408c6a246 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3586710691452026, + "total_l1_linf_norm": 7500.7509765625, + "total_spectral_norm": 1.3586710691452026, + "layer_1_update_fnorm": 0.03155208006501198, + "layer_1_max_l1_linf_norm": 0.20226989686489105, + "layer_1_max_spectral_norm": 0.004641259089112282, + "layer_2_update_fnorm": 0.02924257144331932, + "layer_2_max_l1_linf_norm": 0.22429051995277405, + "layer_2_max_spectral_norm": 0.005105805117636919, + "layer_3_update_fnorm": 0.027822664007544518, + "layer_3_max_l1_linf_norm": 0.24142104387283325, + "layer_3_max_spectral_norm": 0.005438980646431446, + "layer_4_update_fnorm": 0.029725130647420883, + "layer_4_max_l1_linf_norm": 0.24559852480888367, + "layer_4_max_spectral_norm": 0.005500786937773228, + "layer_5_update_fnorm": 0.030294738709926605, + "layer_5_max_l1_linf_norm": 0.2494777888059616, + "layer_5_max_spectral_norm": 0.005624736193567514, + "layer_6_update_fnorm": 0.031061984598636627, + "layer_6_max_l1_linf_norm": 0.24245184659957886, + "layer_6_max_spectral_norm": 0.0054326727986335754, + "layer_7_update_fnorm": 0.03151985630393028, + "layer_7_max_l1_linf_norm": 0.24113664031028748, + "layer_7_max_spectral_norm": 0.00539387809112668, + "layer_8_update_fnorm": 0.031489964574575424, + "layer_8_max_l1_linf_norm": 0.237906813621521, + "layer_8_max_spectral_norm": 0.005520610138773918, + "layer_9_update_fnorm": 0.031603455543518066, + "layer_9_max_l1_linf_norm": 0.24208340048789978, + "layer_9_max_spectral_norm": 0.005459034815430641, + "layer_10_update_fnorm": 0.03173896670341492, + "layer_10_max_l1_linf_norm": 0.2557234764099121, + "layer_10_max_spectral_norm": 0.005777007434517145, + "layer_11_update_fnorm": 0.03157871589064598, + "layer_11_max_l1_linf_norm": 0.2524685263633728, + "layer_11_max_spectral_norm": 0.005674852058291435, + "layer_12_update_fnorm": 0.03173253312706947, + "layer_12_max_l1_linf_norm": 0.2460915893316269, + "layer_12_max_spectral_norm": 0.0055337888188660145, + "total_sharpness": 0.011826591566205025, + "ip_v_neg_g": 0.00749988853931427, + "cos_v_neg_g": 0.000586890964768827, + "v_norm": 1.3586710691452026, + "g_norm": 9.405525207519531, + "hv_norm": 4.473392009735107, + "cos_v_hv": 0.003592005232349038, + "hg_norm": 7238.59228515625, + "cos_g_hg": 0.6491408944129944, + "v_parallel_norm": 2.8075521186110564e-05, + "v_perp_norm": 1.3586710691452026, + "layer_1_v_norm": 0.03155208006501198, + "layer_1_cos_v_neg_g": 0.007728943135589361, + "layer_2_v_norm": 0.02924257144331932, + "layer_2_cos_v_neg_g": 0.00814929511398077, + "layer_3_v_norm": 0.027822664007544518, + "layer_3_cos_v_neg_g": 0.010522350668907166, + "layer_4_v_norm": 0.029725130647420883, + "layer_4_cos_v_neg_g": 0.009779922664165497, + "layer_5_v_norm": 0.030294738709926605, + "layer_5_cos_v_neg_g": 0.008668435737490654, + "layer_6_v_norm": 0.031061982735991478, + "layer_6_cos_v_neg_g": 0.008910421282052994, + "layer_7_v_norm": 0.03151985630393028, + "layer_7_cos_v_neg_g": 0.008044815622270107, + "layer_8_v_norm": 0.031489964574575424, + "layer_8_cos_v_neg_g": 0.007435020059347153, + "layer_9_v_norm": 0.031603455543518066, + "layer_9_cos_v_neg_g": 0.006871317978948355, + "layer_10_v_norm": 0.03173896670341492, + "layer_10_cos_v_neg_g": 0.006319734733551741, + "layer_11_v_norm": 0.03157871589064598, + "layer_11_cos_v_neg_g": 0.0057434202171862125, + "layer_12_v_norm": 0.03173253312706947, + "layer_12_cos_v_neg_g": 0.004628140013664961, + "layer_1_sharpness": 0.6096833944320679, + "layer_2_sharpness": 0.5884570479393005, + "layer_3_sharpness": 0.4943864345550537, + "layer_4_sharpness": 0.2860434055328369, + "layer_5_sharpness": 0.2525658905506134, + "layer_6_sharpness": 0.23850053548812866, + "layer_7_sharpness": 0.18737171590328217, + "layer_8_sharpness": 0.14328065514564514, + "layer_9_sharpness": 0.11140904575586319, + "layer_10_sharpness": 0.08920501917600632, + "layer_11_sharpness": 0.08233457058668137, + "layer_12_sharpness": 0.09830684214830399 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..b90e97103f20dfe4bfbddc735c91910f70c68e74 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3521429300308228, + "total_l1_linf_norm": 7466.376953125, + "total_spectral_norm": 1.3521430492401123, + "layer_1_update_fnorm": 0.031732361763715744, + "layer_1_max_l1_linf_norm": 0.24532507359981537, + "layer_1_max_spectral_norm": 0.005513252690434456, + "layer_2_update_fnorm": 0.029719360172748566, + "layer_2_max_l1_linf_norm": 0.2552482485771179, + "layer_2_max_spectral_norm": 0.005839054472744465, + "layer_3_update_fnorm": 0.02905474416911602, + "layer_3_max_l1_linf_norm": 0.2664616107940674, + "layer_3_max_spectral_norm": 0.00603755796328187, + "layer_4_update_fnorm": 0.0302574522793293, + "layer_4_max_l1_linf_norm": 0.2714109420776367, + "layer_4_max_spectral_norm": 0.006127423141151667, + "layer_5_update_fnorm": 0.030732233077287674, + "layer_5_max_l1_linf_norm": 0.2639893889427185, + "layer_5_max_spectral_norm": 0.005939382594078779, + "layer_6_update_fnorm": 0.03139416128396988, + "layer_6_max_l1_linf_norm": 0.259108304977417, + "layer_6_max_spectral_norm": 0.0058572362177073956, + "layer_7_update_fnorm": 0.031664516776800156, + "layer_7_max_l1_linf_norm": 0.24251003563404083, + "layer_7_max_spectral_norm": 0.005496441852301359, + "layer_8_update_fnorm": 0.03160799294710159, + "layer_8_max_l1_linf_norm": 0.2511013150215149, + "layer_8_max_spectral_norm": 0.005661096423864365, + "layer_9_update_fnorm": 0.031698841601610184, + "layer_9_max_l1_linf_norm": 0.25657427310943604, + "layer_9_max_spectral_norm": 0.005796144250780344, + "layer_10_update_fnorm": 0.031916968524456024, + "layer_10_max_l1_linf_norm": 0.2745698094367981, + "layer_10_max_spectral_norm": 0.006239432841539383, + "layer_11_update_fnorm": 0.03160993009805679, + "layer_11_max_l1_linf_norm": 0.2628287672996521, + "layer_11_max_spectral_norm": 0.0059592281468212605, + "layer_12_update_fnorm": 0.03161861374974251, + "layer_12_max_l1_linf_norm": 0.25499799847602844, + "layer_12_max_spectral_norm": 0.005811036564409733, + "total_sharpness": 0.009306021966040134, + "ip_v_neg_g": 0.007184024900197983, + "cos_v_neg_g": 0.0006687837885692716, + "v_norm": 1.3521429300308228, + "g_norm": 7.944370746612549, + "hv_norm": 3.219987392425537, + "cos_v_hv": 0.003907801117748022, + "hg_norm": 3740.61279296875, + "cos_g_hg": 0.5943974852561951, + "v_parallel_norm": 2.7151108952239156e-05, + "v_perp_norm": 1.3521429300308228, + "layer_1_v_norm": 0.031732361763715744, + "layer_1_cos_v_neg_g": 0.008929288946092129, + "layer_2_v_norm": 0.029719360172748566, + "layer_2_cos_v_neg_g": 0.009606880135834217, + "layer_3_v_norm": 0.02905474416911602, + "layer_3_cos_v_neg_g": 0.011129971593618393, + "layer_4_v_norm": 0.0302574522793293, + "layer_4_cos_v_neg_g": 0.009703211486339569, + "layer_5_v_norm": 0.030732233077287674, + "layer_5_cos_v_neg_g": 0.008537469431757927, + "layer_6_v_norm": 0.03139416128396988, + "layer_6_cos_v_neg_g": 0.008293639868497849, + "layer_7_v_norm": 0.031664516776800156, + "layer_7_cos_v_neg_g": 0.007266811560839415, + "layer_8_v_norm": 0.03160799294710159, + "layer_8_cos_v_neg_g": 0.007287175860255957, + "layer_9_v_norm": 0.031698841601610184, + "layer_9_cos_v_neg_g": 0.006503998767584562, + "layer_10_v_norm": 0.031916968524456024, + "layer_10_cos_v_neg_g": 0.005858882796019316, + "layer_11_v_norm": 0.03160993009805679, + "layer_11_cos_v_neg_g": 0.00495147705078125, + "layer_12_v_norm": 0.03161861374974251, + "layer_12_cos_v_neg_g": 0.004296146798878908, + "layer_1_sharpness": 0.40012651681900024, + "layer_2_sharpness": 0.2994821071624756, + "layer_3_sharpness": 0.3019193708896637, + "layer_4_sharpness": 0.20392684638500214, + "layer_5_sharpness": 0.15890167653560638, + "layer_6_sharpness": 0.1601811945438385, + "layer_7_sharpness": 0.15550950169563293, + "layer_8_sharpness": 0.13545455038547516, + "layer_9_sharpness": 0.10335486382246017, + "layer_10_sharpness": 0.08662839978933334, + "layer_11_sharpness": 0.0677727535367012, + "layer_12_sharpness": 0.08422213792800903 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..7eceba171fd9fa7a63492edf9e9272fffde89b6e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3315917253494263, + "total_l1_linf_norm": 7352.294921875, + "total_spectral_norm": 1.3315917253494263, + "layer_1_update_fnorm": 0.03201310336589813, + "layer_1_max_l1_linf_norm": 0.2714522182941437, + "layer_1_max_spectral_norm": 0.0061529241502285, + "layer_2_update_fnorm": 0.030116725713014603, + "layer_2_max_l1_linf_norm": 0.29267367720603943, + "layer_2_max_spectral_norm": 0.006629658862948418, + "layer_3_update_fnorm": 0.029333610087633133, + "layer_3_max_l1_linf_norm": 0.2972063422203064, + "layer_3_max_spectral_norm": 0.006712683476507664, + "layer_4_update_fnorm": 0.030624719336628914, + "layer_4_max_l1_linf_norm": 0.3013583719730377, + "layer_4_max_spectral_norm": 0.006818752270191908, + "layer_5_update_fnorm": 0.03111582063138485, + "layer_5_max_l1_linf_norm": 0.28108590841293335, + "layer_5_max_spectral_norm": 0.006310727912932634, + "layer_6_update_fnorm": 0.03178191930055618, + "layer_6_max_l1_linf_norm": 0.27890944480895996, + "layer_6_max_spectral_norm": 0.006257181987166405, + "layer_7_update_fnorm": 0.031880415976047516, + "layer_7_max_l1_linf_norm": 0.2604685127735138, + "layer_7_max_spectral_norm": 0.005884754937142134, + "layer_8_update_fnorm": 0.03188357129693031, + "layer_8_max_l1_linf_norm": 0.2704402208328247, + "layer_8_max_spectral_norm": 0.006129585672169924, + "layer_9_update_fnorm": 0.0319838747382164, + "layer_9_max_l1_linf_norm": 0.2809058427810669, + "layer_9_max_spectral_norm": 0.006275429390370846, + "layer_10_update_fnorm": 0.032102786004543304, + "layer_10_max_l1_linf_norm": 0.2918998599052429, + "layer_10_max_spectral_norm": 0.0065893735736608505, + "layer_11_update_fnorm": 0.03188667446374893, + "layer_11_max_l1_linf_norm": 0.2990298271179199, + "layer_11_max_spectral_norm": 0.00674686161801219, + "layer_12_update_fnorm": 0.0320889949798584, + "layer_12_max_l1_linf_norm": 0.28449827432632446, + "layer_12_max_spectral_norm": 0.0064691524021327496, + "total_sharpness": 0.0080016003921628, + "ip_v_neg_g": 0.010398846119642258, + "cos_v_neg_g": 0.0010704982560127974, + "v_norm": 1.3315917253494263, + "g_norm": 7.295047283172607, + "hv_norm": 2.3495140075683594, + "cos_v_hv": 0.004534923005849123, + "hg_norm": 2510.2509765625, + "cos_g_hg": 0.5731265544891357, + "v_parallel_norm": 3.748527160496451e-05, + "v_perp_norm": 1.3315917253494263, + "layer_1_v_norm": 0.03201310336589813, + "layer_1_cos_v_neg_g": 0.017286622896790504, + "layer_2_v_norm": 0.030116725713014603, + "layer_2_cos_v_neg_g": 0.017303485423326492, + "layer_3_v_norm": 0.029333611950278282, + "layer_3_cos_v_neg_g": 0.018224691972136497, + "layer_4_v_norm": 0.030624719336628914, + "layer_4_cos_v_neg_g": 0.015116933733224869, + "layer_5_v_norm": 0.03111582063138485, + "layer_5_cos_v_neg_g": 0.013213836587965488, + "layer_6_v_norm": 0.03178191930055618, + "layer_6_cos_v_neg_g": 0.012132090516388416, + "layer_7_v_norm": 0.031880415976047516, + "layer_7_cos_v_neg_g": 0.011688596569001675, + "layer_8_v_norm": 0.031883567571640015, + "layer_8_cos_v_neg_g": 0.012560760602355003, + "layer_9_v_norm": 0.0319838747382164, + "layer_9_cos_v_neg_g": 0.011372610926628113, + "layer_10_v_norm": 0.032102786004543304, + "layer_10_cos_v_neg_g": 0.0099007124081254, + "layer_11_v_norm": 0.03188667446374893, + "layer_11_cos_v_neg_g": 0.006735941860824823, + "layer_12_v_norm": 0.0320889949798584, + "layer_12_cos_v_neg_g": 0.00556756928563118, + "layer_1_sharpness": 0.2707841098308563, + "layer_2_sharpness": 0.18982616066932678, + "layer_3_sharpness": 0.22669538855552673, + "layer_4_sharpness": 0.14433713257312775, + "layer_5_sharpness": 0.13037392497062683, + "layer_6_sharpness": 0.13695891201496124, + "layer_7_sharpness": 0.13792996108531952, + "layer_8_sharpness": 0.12594985961914062, + "layer_9_sharpness": 0.11097018420696259, + "layer_10_sharpness": 0.088711678981781, + "layer_11_sharpness": 0.07269313186407089, + "layer_12_sharpness": 0.10442043840885162 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..a43f3109d7a9536fd576d17290a9bbe29916e9c1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3372265100479126, + "total_l1_linf_norm": 7380.20556640625, + "total_spectral_norm": 1.3372267484664917, + "layer_1_update_fnorm": 0.031925372779369354, + "layer_1_max_l1_linf_norm": 0.2659066319465637, + "layer_1_max_spectral_norm": 0.0059778401628136635, + "layer_2_update_fnorm": 0.030235672369599342, + "layer_2_max_l1_linf_norm": 0.27248626947402954, + "layer_2_max_spectral_norm": 0.0061510480009019375, + "layer_3_update_fnorm": 0.029508043080568314, + "layer_3_max_l1_linf_norm": 0.28521105647087097, + "layer_3_max_spectral_norm": 0.006452774628996849, + "layer_4_update_fnorm": 0.03060579113662243, + "layer_4_max_l1_linf_norm": 0.29435330629348755, + "layer_4_max_spectral_norm": 0.0066137127578258514, + "layer_5_update_fnorm": 0.03108413703739643, + "layer_5_max_l1_linf_norm": 0.2758272886276245, + "layer_5_max_spectral_norm": 0.006204860284924507, + "layer_6_update_fnorm": 0.031799424439668655, + "layer_6_max_l1_linf_norm": 0.26321375370025635, + "layer_6_max_spectral_norm": 0.005883119069039822, + "layer_7_update_fnorm": 0.03168031945824623, + "layer_7_max_l1_linf_norm": 0.24632909893989563, + "layer_7_max_spectral_norm": 0.005577911157160997, + "layer_8_update_fnorm": 0.03189719468355179, + "layer_8_max_l1_linf_norm": 0.2624983489513397, + "layer_8_max_spectral_norm": 0.005931780207902193, + "layer_9_update_fnorm": 0.031892988830804825, + "layer_9_max_l1_linf_norm": 0.2825421094894409, + "layer_9_max_spectral_norm": 0.006356092635542154, + "layer_10_update_fnorm": 0.03195643797516823, + "layer_10_max_l1_linf_norm": 0.2959824502468109, + "layer_10_max_spectral_norm": 0.006672500632703304, + "layer_11_update_fnorm": 0.031978148967027664, + "layer_11_max_l1_linf_norm": 0.3106009364128113, + "layer_11_max_spectral_norm": 0.0069882492534816265, + "layer_12_update_fnorm": 0.031966954469680786, + "layer_12_max_l1_linf_norm": 0.29735758900642395, + "layer_12_max_spectral_norm": 0.006779466290026903, + "total_sharpness": 0.007654602639377117, + "ip_v_neg_g": 0.009008309803903103, + "cos_v_neg_g": 0.0008820748771540821, + "v_norm": 1.3372265100479126, + "g_norm": 7.637176990509033, + "hv_norm": 2.3687856197357178, + "cos_v_hv": 0.0043211751617491245, + "hg_norm": 3359.40966796875, + "cos_g_hg": 0.603976845741272, + "v_parallel_norm": 3.305646532680839e-05, + "v_perp_norm": 1.3372265100479126, + "layer_1_v_norm": 0.031925372779369354, + "layer_1_cos_v_neg_g": 0.012139442376792431, + "layer_2_v_norm": 0.030235672369599342, + "layer_2_cos_v_neg_g": 0.01128925010561943, + "layer_3_v_norm": 0.029508043080568314, + "layer_3_cos_v_neg_g": 0.014453842304646969, + "layer_4_v_norm": 0.03060579113662243, + "layer_4_cos_v_neg_g": 0.0114756990224123, + "layer_5_v_norm": 0.03108413703739643, + "layer_5_cos_v_neg_g": 0.010909777134656906, + "layer_6_v_norm": 0.031799424439668655, + "layer_6_cos_v_neg_g": 0.010094254277646542, + "layer_7_v_norm": 0.03168031945824623, + "layer_7_cos_v_neg_g": 0.011265327222645283, + "layer_8_v_norm": 0.03189719095826149, + "layer_8_cos_v_neg_g": 0.011000169441103935, + "layer_9_v_norm": 0.031892988830804825, + "layer_9_cos_v_neg_g": 0.009645992890000343, + "layer_10_v_norm": 0.03195643797516823, + "layer_10_cos_v_neg_g": 0.008150397799909115, + "layer_11_v_norm": 0.031978148967027664, + "layer_11_cos_v_neg_g": 0.007963176816701889, + "layer_12_v_norm": 0.031966954469680786, + "layer_12_cos_v_neg_g": 0.007409804966300726, + "layer_1_sharpness": 0.22327092289924622, + "layer_2_sharpness": 0.1214844360947609, + "layer_3_sharpness": 0.20687517523765564, + "layer_4_sharpness": 0.1353912502527237, + "layer_5_sharpness": 0.11263735592365265, + "layer_6_sharpness": 0.12084256112575531, + "layer_7_sharpness": 0.1505231261253357, + "layer_8_sharpness": 0.12890805304050446, + "layer_9_sharpness": 0.11314798891544342, + "layer_10_sharpness": 0.0935022234916687, + "layer_11_sharpness": 0.0703844502568245, + "layer_12_sharpness": 0.08662442862987518 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..62dc03b9459a5cb6af12adbaa1606bc8e9bb585a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.315078616142273, + "total_l1_linf_norm": 7262.49853515625, + "total_spectral_norm": 1.3150787353515625, + "layer_1_update_fnorm": 0.031692732125520706, + "layer_1_max_l1_linf_norm": 0.26468101143836975, + "layer_1_max_spectral_norm": 0.005943936295807362, + "layer_2_update_fnorm": 0.030320707708597183, + "layer_2_max_l1_linf_norm": 0.2903958559036255, + "layer_2_max_spectral_norm": 0.006573659833520651, + "layer_3_update_fnorm": 0.029712380841374397, + "layer_3_max_l1_linf_norm": 0.291145384311676, + "layer_3_max_spectral_norm": 0.006542320363223553, + "layer_4_update_fnorm": 0.030714649707078934, + "layer_4_max_l1_linf_norm": 0.30143290758132935, + "layer_4_max_spectral_norm": 0.006751201581209898, + "layer_5_update_fnorm": 0.031395602971315384, + "layer_5_max_l1_linf_norm": 0.29853689670562744, + "layer_5_max_spectral_norm": 0.006687506102025509, + "layer_6_update_fnorm": 0.03174322098493576, + "layer_6_max_l1_linf_norm": 0.2769516110420227, + "layer_6_max_spectral_norm": 0.006202587857842445, + "layer_7_update_fnorm": 0.03188197687268257, + "layer_7_max_l1_linf_norm": 0.26472294330596924, + "layer_7_max_spectral_norm": 0.005986131727695465, + "layer_8_update_fnorm": 0.031953178346157074, + "layer_8_max_l1_linf_norm": 0.28147971630096436, + "layer_8_max_spectral_norm": 0.006329540628939867, + "layer_9_update_fnorm": 0.031884968280792236, + "layer_9_max_l1_linf_norm": 0.28182968497276306, + "layer_9_max_spectral_norm": 0.006355863995850086, + "layer_10_update_fnorm": 0.03201355040073395, + "layer_10_max_l1_linf_norm": 0.2950592637062073, + "layer_10_max_spectral_norm": 0.006623759865760803, + "layer_11_update_fnorm": 0.03207700699567795, + "layer_11_max_l1_linf_norm": 0.321172833442688, + "layer_11_max_spectral_norm": 0.007176673039793968, + "layer_12_update_fnorm": 0.03216155990958214, + "layer_12_max_l1_linf_norm": 0.323731929063797, + "layer_12_max_spectral_norm": 0.007264507468789816, + "total_sharpness": 0.007648215163499117, + "ip_v_neg_g": 0.007060045842081308, + "cos_v_neg_g": 0.0008342767250724137, + "v_norm": 1.315078616142273, + "g_norm": 6.434957504272461, + "hv_norm": 2.105947256088257, + "cos_v_hv": 0.004775999579578638, + "hg_norm": 1365.3155517578125, + "cos_g_hg": 0.5216759443283081, + "v_parallel_norm": 2.802556446113158e-05, + "v_perp_norm": 1.315078616142273, + "layer_1_v_norm": 0.031692732125520706, + "layer_1_cos_v_neg_g": 0.009778246283531189, + "layer_2_v_norm": 0.030320707708597183, + "layer_2_cos_v_neg_g": 0.011706785298883915, + "layer_3_v_norm": 0.029712378978729248, + "layer_3_cos_v_neg_g": 0.012095736339688301, + "layer_4_v_norm": 0.030714649707078934, + "layer_4_cos_v_neg_g": 0.008499299176037312, + "layer_5_v_norm": 0.031395602971315384, + "layer_5_cos_v_neg_g": 0.008125122636556625, + "layer_6_v_norm": 0.03174322098493576, + "layer_6_cos_v_neg_g": 0.008589648641645908, + "layer_7_v_norm": 0.03188197687268257, + "layer_7_cos_v_neg_g": 0.009508445858955383, + "layer_8_v_norm": 0.031953178346157074, + "layer_8_cos_v_neg_g": 0.010200363583862782, + "layer_9_v_norm": 0.031884968280792236, + "layer_9_cos_v_neg_g": 0.008938825689256191, + "layer_10_v_norm": 0.03201355040073395, + "layer_10_cos_v_neg_g": 0.008407936431467533, + "layer_11_v_norm": 0.03207700699567795, + "layer_11_cos_v_neg_g": 0.007952377200126648, + "layer_12_v_norm": 0.03216155990958214, + "layer_12_cos_v_neg_g": 0.00849071517586708, + "layer_1_sharpness": 0.19714535772800446, + "layer_2_sharpness": 0.16922158002853394, + "layer_3_sharpness": 0.17967957258224487, + "layer_4_sharpness": 0.10486120730638504, + "layer_5_sharpness": 0.10892690718173981, + "layer_6_sharpness": 0.12198325991630554, + "layer_7_sharpness": 0.16001082956790924, + "layer_8_sharpness": 0.14506161212921143, + "layer_9_sharpness": 0.11280224472284317, + "layer_10_sharpness": 0.0905231460928917, + "layer_11_sharpness": 0.08629649877548218, + "layer_12_sharpness": 0.12050748616456985 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..d57b7cc08a5ee50b6d5dee9cd81b64f04af71bd1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3412656784057617, + "total_l1_linf_norm": 7405.32470703125, + "total_spectral_norm": 1.3412660360336304, + "layer_1_update_fnorm": 0.03218439593911171, + "layer_1_max_l1_linf_norm": 0.31671154499053955, + "layer_1_max_spectral_norm": 0.007109431084245443, + "layer_2_update_fnorm": 0.030504906550049782, + "layer_2_max_l1_linf_norm": 0.3316202163696289, + "layer_2_max_spectral_norm": 0.007396203000098467, + "layer_3_update_fnorm": 0.029976502060890198, + "layer_3_max_l1_linf_norm": 0.324831485748291, + "layer_3_max_spectral_norm": 0.007355235051363707, + "layer_4_update_fnorm": 0.03087414614856243, + "layer_4_max_l1_linf_norm": 0.3046529293060303, + "layer_4_max_spectral_norm": 0.006887160707265139, + "layer_5_update_fnorm": 0.03166953846812248, + "layer_5_max_l1_linf_norm": 0.29988163709640503, + "layer_5_max_spectral_norm": 0.006751172710210085, + "layer_6_update_fnorm": 0.031815484166145325, + "layer_6_max_l1_linf_norm": 0.2656909227371216, + "layer_6_max_spectral_norm": 0.005996895954012871, + "layer_7_update_fnorm": 0.03175129368901253, + "layer_7_max_l1_linf_norm": 0.2405981719493866, + "layer_7_max_spectral_norm": 0.005489100702106953, + "layer_8_update_fnorm": 0.03180135041475296, + "layer_8_max_l1_linf_norm": 0.2567112445831299, + "layer_8_max_spectral_norm": 0.005842946004122496, + "layer_9_update_fnorm": 0.03171563521027565, + "layer_9_max_l1_linf_norm": 0.26812368631362915, + "layer_9_max_spectral_norm": 0.006035438273102045, + "layer_10_update_fnorm": 0.03178005293011665, + "layer_10_max_l1_linf_norm": 0.2812041640281677, + "layer_10_max_spectral_norm": 0.006319098640233278, + "layer_11_update_fnorm": 0.03190520778298378, + "layer_11_max_l1_linf_norm": 0.29689842462539673, + "layer_11_max_spectral_norm": 0.00669463025406003, + "layer_12_update_fnorm": 0.03173850476741791, + "layer_12_max_l1_linf_norm": 0.27914702892303467, + "layer_12_max_spectral_norm": 0.006334403995424509, + "total_sharpness": 0.006604056805372238, + "ip_v_neg_g": 0.0067371586337685585, + "cos_v_neg_g": 0.0007823661435395479, + "v_norm": 1.3412656784057617, + "g_norm": 6.4202494621276855, + "hv_norm": 1.9876585006713867, + "cos_v_hv": 0.00445639668032527, + "hg_norm": 2802.09326171875, + "cos_g_hg": 0.45578789710998535, + "v_parallel_norm": 2.8400310839060694e-05, + "v_perp_norm": 1.3412656784057617, + "layer_1_v_norm": 0.03218439593911171, + "layer_1_cos_v_neg_g": 0.01204723585397005, + "layer_2_v_norm": 0.030504906550049782, + "layer_2_cos_v_neg_g": 0.011777715757489204, + "layer_3_v_norm": 0.02997650019824505, + "layer_3_cos_v_neg_g": 0.010980039834976196, + "layer_4_v_norm": 0.03087414614856243, + "layer_4_cos_v_neg_g": 0.008892822079360485, + "layer_5_v_norm": 0.03166953846812248, + "layer_5_cos_v_neg_g": 0.00834697112441063, + "layer_6_v_norm": 0.031815484166145325, + "layer_6_cos_v_neg_g": 0.008379069156944752, + "layer_7_v_norm": 0.03175129368901253, + "layer_7_cos_v_neg_g": 0.008706294000148773, + "layer_8_v_norm": 0.03180135041475296, + "layer_8_cos_v_neg_g": 0.008121028542518616, + "layer_9_v_norm": 0.03171563521027565, + "layer_9_cos_v_neg_g": 0.007974532432854176, + "layer_10_v_norm": 0.03178005293011665, + "layer_10_cos_v_neg_g": 0.007386158220469952, + "layer_11_v_norm": 0.03190520405769348, + "layer_11_cos_v_neg_g": 0.007325544487684965, + "layer_12_v_norm": 0.03173850476741791, + "layer_12_cos_v_neg_g": 0.006564713083207607, + "layer_1_sharpness": 0.32113903760910034, + "layer_2_sharpness": 0.1902429461479187, + "layer_3_sharpness": 0.19510957598686218, + "layer_4_sharpness": 0.09844855964183807, + "layer_5_sharpness": 0.09397700428962708, + "layer_6_sharpness": 0.11507357656955719, + "layer_7_sharpness": 0.1301228106021881, + "layer_8_sharpness": 0.11913073062896729, + "layer_9_sharpness": 0.0985918641090393, + "layer_10_sharpness": 0.07452528178691864, + "layer_11_sharpness": 0.0626080259680748, + "layer_12_sharpness": 0.10168802738189697 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..e634bb124a6647a6b22de337793255ee361d3813 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.878676176071167, + "total_l1_linf_norm": 4854.095703125, + "total_spectral_norm": 0.8786762952804565, + "layer_1_update_fnorm": 0.022945299744606018, + "layer_1_max_l1_linf_norm": 0.17840155959129333, + "layer_1_max_spectral_norm": 0.004000478889793158, + "layer_2_update_fnorm": 0.02235325612127781, + "layer_2_max_l1_linf_norm": 0.17950044572353363, + "layer_2_max_spectral_norm": 0.0044233729131519794, + "layer_3_update_fnorm": 0.021812040358781815, + "layer_3_max_l1_linf_norm": 0.18064062297344208, + "layer_3_max_spectral_norm": 0.0048252795822918415, + "layer_4_update_fnorm": 0.02277541719377041, + "layer_4_max_l1_linf_norm": 0.17426475882530212, + "layer_4_max_spectral_norm": 0.007560159545391798, + "layer_5_update_fnorm": 0.023412490263581276, + "layer_5_max_l1_linf_norm": 0.20730093121528625, + "layer_5_max_spectral_norm": 0.00906592421233654, + "layer_6_update_fnorm": 0.0237002931535244, + "layer_6_max_l1_linf_norm": 0.2139643132686615, + "layer_6_max_spectral_norm": 0.009368370287120342, + "layer_7_update_fnorm": 0.02406246028840542, + "layer_7_max_l1_linf_norm": 0.249477356672287, + "layer_7_max_spectral_norm": 0.01031552255153656, + "layer_8_update_fnorm": 0.023261141031980515, + "layer_8_max_l1_linf_norm": 0.19345837831497192, + "layer_8_max_spectral_norm": 0.008432798087596893, + "layer_9_update_fnorm": 0.0229355338960886, + "layer_9_max_l1_linf_norm": 0.1791715919971466, + "layer_9_max_spectral_norm": 0.007762755732983351, + "layer_10_update_fnorm": 0.022948505356907845, + "layer_10_max_l1_linf_norm": 0.17987987399101257, + "layer_10_max_spectral_norm": 0.007676983717828989, + "layer_11_update_fnorm": 0.022146349772810936, + "layer_11_max_l1_linf_norm": 0.14963605999946594, + "layer_11_max_spectral_norm": 0.004964128602296114, + "layer_12_update_fnorm": 0.022326720878481865, + "layer_12_max_l1_linf_norm": 0.15289613604545593, + "layer_12_max_spectral_norm": 0.0051561202853918076, + "total_sharpness": 0.02302301488816738, + "ip_v_neg_g": 0.008631651289761066, + "cos_v_neg_g": 0.0011438551591709256, + "v_norm": 0.878676176071167, + "g_norm": 8.58803939819336, + "hv_norm": 5.3084588050842285, + "cos_v_hv": 0.0038108567241579294, + "hg_norm": 12317.654296875, + "cos_g_hg": 0.685223400592804, + "v_parallel_norm": 5.415383930085227e-05, + "v_perp_norm": 0.878676176071167, + "layer_1_v_norm": 0.022945299744606018, + "layer_1_cos_v_neg_g": 0.016215236857533455, + "layer_2_v_norm": 0.02235325612127781, + "layer_2_cos_v_neg_g": 0.015228299424052238, + "layer_3_v_norm": 0.021812040358781815, + "layer_3_cos_v_neg_g": 0.015768475830554962, + "layer_4_v_norm": 0.02277541719377041, + "layer_4_cos_v_neg_g": 0.013245013542473316, + "layer_5_v_norm": 0.023412490263581276, + "layer_5_cos_v_neg_g": 0.012372331693768501, + "layer_6_v_norm": 0.0237002931535244, + "layer_6_cos_v_neg_g": 0.011956071481108665, + "layer_7_v_norm": 0.02406246028840542, + "layer_7_cos_v_neg_g": 0.011211119592189789, + "layer_8_v_norm": 0.023261141031980515, + "layer_8_cos_v_neg_g": 0.011049523949623108, + "layer_9_v_norm": 0.0229355338960886, + "layer_9_cos_v_neg_g": 0.011074306443333626, + "layer_10_v_norm": 0.022948505356907845, + "layer_10_cos_v_neg_g": 0.009842638857662678, + "layer_11_v_norm": 0.022146347910165787, + "layer_11_cos_v_neg_g": 0.008682521991431713, + "layer_12_v_norm": 0.022326720878481865, + "layer_12_cos_v_neg_g": 0.0076227025128901005, + "layer_1_sharpness": 1.6882978677749634, + "layer_2_sharpness": 0.635952889919281, + "layer_3_sharpness": 0.42042210698127747, + "layer_4_sharpness": 0.2499295324087143, + "layer_5_sharpness": 0.18224871158599854, + "layer_6_sharpness": 0.1383322775363922, + "layer_7_sharpness": 0.10665469616651535, + "layer_8_sharpness": 0.10756635665893555, + "layer_9_sharpness": 0.0858919769525528, + "layer_10_sharpness": 0.07003207504749298, + "layer_11_sharpness": 0.05650107562541962, + "layer_12_sharpness": 0.050558947026729584 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..5c8dbb545c3fddda5ddec654e929b03ec88d3f2f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.367124080657959, + "total_l1_linf_norm": 7537.35009765625, + "total_spectral_norm": 1.367124080657959, + "layer_1_update_fnorm": 0.03228780999779701, + "layer_1_max_l1_linf_norm": 0.31571584939956665, + "layer_1_max_spectral_norm": 0.0071367318741977215, + "layer_2_update_fnorm": 0.030784811824560165, + "layer_2_max_l1_linf_norm": 0.32635343074798584, + "layer_2_max_spectral_norm": 0.007382108364254236, + "layer_3_update_fnorm": 0.03030610829591751, + "layer_3_max_l1_linf_norm": 0.3275468349456787, + "layer_3_max_spectral_norm": 0.007294152397662401, + "layer_4_update_fnorm": 0.031342461705207825, + "layer_4_max_l1_linf_norm": 0.31646373867988586, + "layer_4_max_spectral_norm": 0.007134869694709778, + "layer_5_update_fnorm": 0.031817276030778885, + "layer_5_max_l1_linf_norm": 0.3118666410446167, + "layer_5_max_spectral_norm": 0.007023719139397144, + "layer_6_update_fnorm": 0.032044291496276855, + "layer_6_max_l1_linf_norm": 0.29337242245674133, + "layer_6_max_spectral_norm": 0.006622136104851961, + "layer_7_update_fnorm": 0.03203035145998001, + "layer_7_max_l1_linf_norm": 0.2736368179321289, + "layer_7_max_spectral_norm": 0.006152179092168808, + "layer_8_update_fnorm": 0.03193320333957672, + "layer_8_max_l1_linf_norm": 0.2699336111545563, + "layer_8_max_spectral_norm": 0.00608788849785924, + "layer_9_update_fnorm": 0.03206183388829231, + "layer_9_max_l1_linf_norm": 0.3003687560558319, + "layer_9_max_spectral_norm": 0.006714593153446913, + "layer_10_update_fnorm": 0.032171670347452164, + "layer_10_max_l1_linf_norm": 0.31263819336891174, + "layer_10_max_spectral_norm": 0.007029733154922724, + "layer_11_update_fnorm": 0.03217334300279617, + "layer_11_max_l1_linf_norm": 0.3291582763195038, + "layer_11_max_spectral_norm": 0.007413392420858145, + "layer_12_update_fnorm": 0.03201955556869507, + "layer_12_max_l1_linf_norm": 0.3103291392326355, + "layer_12_max_spectral_norm": 0.007041838485747576, + "total_sharpness": 0.005629512947052717, + "ip_v_neg_g": 0.008605284616351128, + "cos_v_neg_g": 0.0009436191758140922, + "v_norm": 1.367124080657959, + "g_norm": 6.670533180236816, + "hv_norm": 1.6980761289596558, + "cos_v_hv": 0.004532331135123968, + "hg_norm": 4186.484375, + "cos_g_hg": 0.48923155665397644, + "v_parallel_norm": 3.615700552472845e-05, + "v_perp_norm": 1.367124080657959, + "layer_1_v_norm": 0.03228780999779701, + "layer_1_cos_v_neg_g": 0.01161454152315855, + "layer_2_v_norm": 0.030784811824560165, + "layer_2_cos_v_neg_g": 0.01075734756886959, + "layer_3_v_norm": 0.03030610829591751, + "layer_3_cos_v_neg_g": 0.011635752394795418, + "layer_4_v_norm": 0.031342461705207825, + "layer_4_cos_v_neg_g": 0.0119385477155447, + "layer_5_v_norm": 0.031817276030778885, + "layer_5_cos_v_neg_g": 0.011887501925230026, + "layer_6_v_norm": 0.032044291496276855, + "layer_6_cos_v_neg_g": 0.011485929600894451, + "layer_7_v_norm": 0.03203035145998001, + "layer_7_cos_v_neg_g": 0.011612663976848125, + "layer_8_v_norm": 0.03193319961428642, + "layer_8_cos_v_neg_g": 0.011023323982954025, + "layer_9_v_norm": 0.03206183388829231, + "layer_9_cos_v_neg_g": 0.009864049963653088, + "layer_10_v_norm": 0.032171670347452164, + "layer_10_cos_v_neg_g": 0.00990858394652605, + "layer_11_v_norm": 0.03217334672808647, + "layer_11_cos_v_neg_g": 0.008239240385591984, + "layer_12_v_norm": 0.03201955556869507, + "layer_12_cos_v_neg_g": 0.00841504242271185, + "layer_1_sharpness": 0.257558673620224, + "layer_2_sharpness": 0.10701955854892731, + "layer_3_sharpness": 0.16087082028388977, + "layer_4_sharpness": 0.08547703176736832, + "layer_5_sharpness": 0.09153814613819122, + "layer_6_sharpness": 0.10405433177947998, + "layer_7_sharpness": 0.13310091197490692, + "layer_8_sharpness": 0.10715573281049728, + "layer_9_sharpness": 0.09658835828304291, + "layer_10_sharpness": 0.07528430968523026, + "layer_11_sharpness": 0.05875036120414734, + "layer_12_sharpness": 0.07179398089647293 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..bbd3db833126aebd985e1739bad9ec12e37dc64d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3485594987869263, + "total_l1_linf_norm": 7445.7236328125, + "total_spectral_norm": 1.3485596179962158, + "layer_1_update_fnorm": 0.03215458616614342, + "layer_1_max_l1_linf_norm": 0.3276292681694031, + "layer_1_max_spectral_norm": 0.007211978547275066, + "layer_2_update_fnorm": 0.030846377834677696, + "layer_2_max_l1_linf_norm": 0.34614306688308716, + "layer_2_max_spectral_norm": 0.007730837445706129, + "layer_3_update_fnorm": 0.030205270275473595, + "layer_3_max_l1_linf_norm": 0.3302832245826721, + "layer_3_max_spectral_norm": 0.0073545859195292, + "layer_4_update_fnorm": 0.031367234885692596, + "layer_4_max_l1_linf_norm": 0.3261333107948303, + "layer_4_max_spectral_norm": 0.007363158278167248, + "layer_5_update_fnorm": 0.03201606124639511, + "layer_5_max_l1_linf_norm": 0.3268347978591919, + "layer_5_max_spectral_norm": 0.0073339990340173244, + "layer_6_update_fnorm": 0.032198503613471985, + "layer_6_max_l1_linf_norm": 0.3100111484527588, + "layer_6_max_spectral_norm": 0.006940788589417934, + "layer_7_update_fnorm": 0.03227733448147774, + "layer_7_max_l1_linf_norm": 0.3019576668739319, + "layer_7_max_spectral_norm": 0.006724612321704626, + "layer_8_update_fnorm": 0.03202912583947182, + "layer_8_max_l1_linf_norm": 0.2802039384841919, + "layer_8_max_spectral_norm": 0.00631097424775362, + "layer_9_update_fnorm": 0.0320061631500721, + "layer_9_max_l1_linf_norm": 0.3067694306373596, + "layer_9_max_spectral_norm": 0.0068173641338944435, + "layer_10_update_fnorm": 0.032171063125133514, + "layer_10_max_l1_linf_norm": 0.31927600502967834, + "layer_10_max_spectral_norm": 0.007100198417901993, + "layer_11_update_fnorm": 0.03219041973352432, + "layer_11_max_l1_linf_norm": 0.32883262634277344, + "layer_11_max_spectral_norm": 0.007400347851216793, + "layer_12_update_fnorm": 0.03215750679373741, + "layer_12_max_l1_linf_norm": 0.3357856869697571, + "layer_12_max_spectral_norm": 0.007560382597148418, + "total_sharpness": 0.004895223304629326, + "ip_v_neg_g": 0.0017387926345691085, + "cos_v_neg_g": 0.000124872982269153, + "v_norm": 1.3485594987869263, + "g_norm": 10.325455665588379, + "hv_norm": 1.7424228191375732, + "cos_v_hv": 0.0037886900827288628, + "hg_norm": 7562.80419921875, + "cos_g_hg": 0.7144138216972351, + "v_parallel_norm": 6.915739959367784e-06, + "v_perp_norm": 1.3485594987869263, + "layer_1_v_norm": 0.03215458616614342, + "layer_1_cos_v_neg_g": 0.0016815006965771317, + "layer_2_v_norm": 0.030846377834677696, + "layer_2_cos_v_neg_g": 0.0009812029311433434, + "layer_3_v_norm": 0.030205268412828445, + "layer_3_cos_v_neg_g": 0.0028117899782955647, + "layer_4_v_norm": 0.031367234885692596, + "layer_4_cos_v_neg_g": 0.002200594637542963, + "layer_5_v_norm": 0.03201606124639511, + "layer_5_cos_v_neg_g": 0.0019754848908632994, + "layer_6_v_norm": 0.032198499888181686, + "layer_6_cos_v_neg_g": 0.00254300469532609, + "layer_7_v_norm": 0.03227733448147774, + "layer_7_cos_v_neg_g": 0.0029456366319209337, + "layer_8_v_norm": 0.03202912583947182, + "layer_8_cos_v_neg_g": 0.0009398266556672752, + "layer_9_v_norm": 0.0320061631500721, + "layer_9_cos_v_neg_g": 0.0014856274938210845, + "layer_10_v_norm": 0.032171063125133514, + "layer_10_cos_v_neg_g": 0.0017791703576222062, + "layer_11_v_norm": 0.032190416008234024, + "layer_11_cos_v_neg_g": 9.046127524925396e-05, + "layer_12_v_norm": 0.03215750679373741, + "layer_12_cos_v_neg_g": 0.0013690656051039696, + "layer_1_sharpness": 0.48037368059158325, + "layer_2_sharpness": 0.21638667583465576, + "layer_3_sharpness": 0.11982816457748413, + "layer_4_sharpness": 0.07108316570520401, + "layer_5_sharpness": 0.06950299441814423, + "layer_6_sharpness": 0.07945967465639114, + "layer_7_sharpness": 0.11294197291135788, + "layer_8_sharpness": 0.09374687075614929, + "layer_9_sharpness": 0.0745803564786911, + "layer_10_sharpness": 0.05181198939681053, + "layer_11_sharpness": 0.03867805376648903, + "layer_12_sharpness": 0.0589340515434742 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..898c2cb4d6322c24265c957ce76f6cfeb48c9087 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2850730419158936, + "total_l1_linf_norm": 7111.2177734375, + "total_spectral_norm": 1.2850730419158936, + "layer_1_update_fnorm": 0.032030560076236725, + "layer_1_max_l1_linf_norm": 0.30994951725006104, + "layer_1_max_spectral_norm": 0.006865084171295166, + "layer_2_update_fnorm": 0.03070778213441372, + "layer_2_max_l1_linf_norm": 0.31704068183898926, + "layer_2_max_spectral_norm": 0.007138795685023069, + "layer_3_update_fnorm": 0.03036513924598694, + "layer_3_max_l1_linf_norm": 0.31296753883361816, + "layer_3_max_spectral_norm": 0.007046314422041178, + "layer_4_update_fnorm": 0.03118838556110859, + "layer_4_max_l1_linf_norm": 0.32550087571144104, + "layer_4_max_spectral_norm": 0.007266683503985405, + "layer_5_update_fnorm": 0.03189966082572937, + "layer_5_max_l1_linf_norm": 0.31411051750183105, + "layer_5_max_spectral_norm": 0.007052943110466003, + "layer_6_update_fnorm": 0.0318608358502388, + "layer_6_max_l1_linf_norm": 0.28360387682914734, + "layer_6_max_spectral_norm": 0.006348953116685152, + "layer_7_update_fnorm": 0.03194769471883774, + "layer_7_max_l1_linf_norm": 0.2701312303543091, + "layer_7_max_spectral_norm": 0.006093492731451988, + "layer_8_update_fnorm": 0.031976114958524704, + "layer_8_max_l1_linf_norm": 0.27564871311187744, + "layer_8_max_spectral_norm": 0.00619188928976655, + "layer_9_update_fnorm": 0.03192821517586708, + "layer_9_max_l1_linf_norm": 0.29796499013900757, + "layer_9_max_spectral_norm": 0.0066626425832509995, + "layer_10_update_fnorm": 0.03192475065588951, + "layer_10_max_l1_linf_norm": 0.2986147999763489, + "layer_10_max_spectral_norm": 0.006729199551045895, + "layer_11_update_fnorm": 0.031939975917339325, + "layer_11_max_l1_linf_norm": 0.315964937210083, + "layer_11_max_spectral_norm": 0.007104119285941124, + "layer_12_update_fnorm": 0.031839195638895035, + "layer_12_max_l1_linf_norm": 0.3052749037742615, + "layer_12_max_spectral_norm": 0.006837103515863419, + "total_sharpness": 0.007304459344595671, + "ip_v_neg_g": 0.004779913928359747, + "cos_v_neg_g": 0.0006553569692187011, + "v_norm": 1.2850730419158936, + "g_norm": 5.675633430480957, + "hv_norm": 1.746756911277771, + "cos_v_hv": 0.005373823456466198, + "hg_norm": 980.8017578125, + "cos_g_hg": 0.45690038800239563, + "v_parallel_norm": 2.497648893040605e-05, + "v_perp_norm": 1.2850730419158936, + "layer_1_v_norm": 0.032030560076236725, + "layer_1_cos_v_neg_g": 0.009588121436536312, + "layer_2_v_norm": 0.03070778213441372, + "layer_2_cos_v_neg_g": 0.009754961356520653, + "layer_3_v_norm": 0.030365141108632088, + "layer_3_cos_v_neg_g": 0.008871829137206078, + "layer_4_v_norm": 0.03118838556110859, + "layer_4_cos_v_neg_g": 0.008130950853228569, + "layer_5_v_norm": 0.03189966082572937, + "layer_5_cos_v_neg_g": 0.008989027701318264, + "layer_6_v_norm": 0.0318608358502388, + "layer_6_cos_v_neg_g": 0.008503253571689129, + "layer_7_v_norm": 0.03194769471883774, + "layer_7_cos_v_neg_g": 0.007354082074016333, + "layer_8_v_norm": 0.031976114958524704, + "layer_8_cos_v_neg_g": 0.006255696527659893, + "layer_9_v_norm": 0.03192821517586708, + "layer_9_cos_v_neg_g": 0.0055895233526825905, + "layer_10_v_norm": 0.03192475065588951, + "layer_10_cos_v_neg_g": 0.005923598073422909, + "layer_11_v_norm": 0.031939975917339325, + "layer_11_cos_v_neg_g": 0.005027194507420063, + "layer_12_v_norm": 0.031839195638895035, + "layer_12_cos_v_neg_g": 0.0038493019528687, + "layer_1_sharpness": 0.36483827233314514, + "layer_2_sharpness": 0.12860995531082153, + "layer_3_sharpness": 0.16026785969734192, + "layer_4_sharpness": 0.09402598440647125, + "layer_5_sharpness": 0.10746053606271744, + "layer_6_sharpness": 0.10871002823114395, + "layer_7_sharpness": 0.1434924155473709, + "layer_8_sharpness": 0.13454671204090118, + "layer_9_sharpness": 0.12068099528551102, + "layer_10_sharpness": 0.08659335970878601, + "layer_11_sharpness": 0.07370519638061523, + "layer_12_sharpness": 0.13499325513839722 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..c01ada21a105662cc0ccf1f3dd5f9ee98a16eddb --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2957874536514282, + "total_l1_linf_norm": 7170.8427734375, + "total_spectral_norm": 1.2957870960235596, + "layer_1_update_fnorm": 0.031648408621549606, + "layer_1_max_l1_linf_norm": 0.29115530848503113, + "layer_1_max_spectral_norm": 0.0065327961929142475, + "layer_2_update_fnorm": 0.030380213633179665, + "layer_2_max_l1_linf_norm": 0.30976587533950806, + "layer_2_max_spectral_norm": 0.006980407517403364, + "layer_3_update_fnorm": 0.030077379196882248, + "layer_3_max_l1_linf_norm": 0.2926555275917053, + "layer_3_max_spectral_norm": 0.0065587242133915424, + "layer_4_update_fnorm": 0.031005864962935448, + "layer_4_max_l1_linf_norm": 0.28435686230659485, + "layer_4_max_spectral_norm": 0.00641924561932683, + "layer_5_update_fnorm": 0.0316394567489624, + "layer_5_max_l1_linf_norm": 0.2889416217803955, + "layer_5_max_spectral_norm": 0.006505896337330341, + "layer_6_update_fnorm": 0.031943611800670624, + "layer_6_max_l1_linf_norm": 0.2905117869377136, + "layer_6_max_spectral_norm": 0.006484511774033308, + "layer_7_update_fnorm": 0.03192933276295662, + "layer_7_max_l1_linf_norm": 0.26628804206848145, + "layer_7_max_spectral_norm": 0.006038139574229717, + "layer_8_update_fnorm": 0.031815603375434875, + "layer_8_max_l1_linf_norm": 0.25989681482315063, + "layer_8_max_spectral_norm": 0.0058950576931238174, + "layer_9_update_fnorm": 0.03176380321383476, + "layer_9_max_l1_linf_norm": 0.27058714628219604, + "layer_9_max_spectral_norm": 0.006136247888207436, + "layer_10_update_fnorm": 0.03178469464182854, + "layer_10_max_l1_linf_norm": 0.2819928824901581, + "layer_10_max_spectral_norm": 0.006368787959218025, + "layer_11_update_fnorm": 0.031888339668512344, + "layer_11_max_l1_linf_norm": 0.3061732053756714, + "layer_11_max_spectral_norm": 0.006865742616355419, + "layer_12_update_fnorm": 0.032017309218645096, + "layer_12_max_l1_linf_norm": 0.31750303506851196, + "layer_12_max_spectral_norm": 0.007091558538377285, + "total_sharpness": 0.005420721136033535, + "ip_v_neg_g": 0.003677276661619544, + "cos_v_neg_g": 0.000511554884724319, + "v_norm": 1.2957874536514282, + "g_norm": 5.5475382804870605, + "hv_norm": 1.3818681240081787, + "cos_v_hv": 0.0050830477848649025, + "hg_norm": 756.3644409179688, + "cos_g_hg": 0.4784407615661621, + "v_parallel_norm": 1.728445931803435e-05, + "v_perp_norm": 1.2957874536514282, + "layer_1_v_norm": 0.031648408621549606, + "layer_1_cos_v_neg_g": 0.007783017121255398, + "layer_2_v_norm": 0.030380213633179665, + "layer_2_cos_v_neg_g": 0.0060163214802742004, + "layer_3_v_norm": 0.03007737547159195, + "layer_3_cos_v_neg_g": 0.004484736826270819, + "layer_4_v_norm": 0.031005864962935448, + "layer_4_cos_v_neg_g": 0.00450861593708396, + "layer_5_v_norm": 0.0316394567489624, + "layer_5_cos_v_neg_g": 0.004252983722835779, + "layer_6_v_norm": 0.03194361552596092, + "layer_6_cos_v_neg_g": 0.005037779454141855, + "layer_7_v_norm": 0.03192933276295662, + "layer_7_cos_v_neg_g": 0.005191306583583355, + "layer_8_v_norm": 0.031815603375434875, + "layer_8_cos_v_neg_g": 0.006687017623335123, + "layer_9_v_norm": 0.03176380321383476, + "layer_9_cos_v_neg_g": 0.006706772837787867, + "layer_10_v_norm": 0.03178469464182854, + "layer_10_cos_v_neg_g": 0.00577516620978713, + "layer_11_v_norm": 0.031888339668512344, + "layer_11_cos_v_neg_g": 0.0048697274178266525, + "layer_12_v_norm": 0.032017309218645096, + "layer_12_cos_v_neg_g": 0.003473993157967925, + "layer_1_sharpness": 0.4428521990776062, + "layer_2_sharpness": 0.14394253492355347, + "layer_3_sharpness": 0.12037260830402374, + "layer_4_sharpness": 0.05788769945502281, + "layer_5_sharpness": 0.07033078372478485, + "layer_6_sharpness": 0.09776431322097778, + "layer_7_sharpness": 0.11868312209844589, + "layer_8_sharpness": 0.09990210086107254, + "layer_9_sharpness": 0.07915886491537094, + "layer_10_sharpness": 0.05946574732661247, + "layer_11_sharpness": 0.054664965718984604, + "layer_12_sharpness": 0.12203837931156158 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..c1f7e423e5e0612ae17ac94abaca459cf596422a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3220158815383911, + "total_l1_linf_norm": 7302.875, + "total_spectral_norm": 1.322015643119812, + "layer_1_update_fnorm": 0.031902700662612915, + "layer_1_max_l1_linf_norm": 0.305957555770874, + "layer_1_max_spectral_norm": 0.006815414410084486, + "layer_2_update_fnorm": 0.030841436237096786, + "layer_2_max_l1_linf_norm": 0.3131793737411499, + "layer_2_max_spectral_norm": 0.007122900802642107, + "layer_3_update_fnorm": 0.03077506646513939, + "layer_3_max_l1_linf_norm": 0.34829601645469666, + "layer_3_max_spectral_norm": 0.0077958907932043076, + "layer_4_update_fnorm": 0.03135938569903374, + "layer_4_max_l1_linf_norm": 0.33226773142814636, + "layer_4_max_spectral_norm": 0.007414400111883879, + "layer_5_update_fnorm": 0.03196157142519951, + "layer_5_max_l1_linf_norm": 0.3204951882362366, + "layer_5_max_spectral_norm": 0.007148980163037777, + "layer_6_update_fnorm": 0.03211549296975136, + "layer_6_max_l1_linf_norm": 0.29722118377685547, + "layer_6_max_spectral_norm": 0.006702893879264593, + "layer_7_update_fnorm": 0.0321061909198761, + "layer_7_max_l1_linf_norm": 0.2750595808029175, + "layer_7_max_spectral_norm": 0.0061745489947497845, + "layer_8_update_fnorm": 0.03210560604929924, + "layer_8_max_l1_linf_norm": 0.28736358880996704, + "layer_8_max_spectral_norm": 0.006461364682763815, + "layer_9_update_fnorm": 0.032132863998413086, + "layer_9_max_l1_linf_norm": 0.3018050193786621, + "layer_9_max_spectral_norm": 0.006754494737833738, + "layer_10_update_fnorm": 0.03227286785840988, + "layer_10_max_l1_linf_norm": 0.3279187083244324, + "layer_10_max_spectral_norm": 0.007366911042481661, + "layer_11_update_fnorm": 0.03225534409284592, + "layer_11_max_l1_linf_norm": 0.3384442925453186, + "layer_11_max_spectral_norm": 0.00756995752453804, + "layer_12_update_fnorm": 0.032338425517082214, + "layer_12_max_l1_linf_norm": 0.3434680700302124, + "layer_12_max_spectral_norm": 0.007740416564047337, + "total_sharpness": 0.00512226577848196, + "ip_v_neg_g": 0.0037751561030745506, + "cos_v_neg_g": 0.0005095475353300571, + "v_norm": 1.3220158815383911, + "g_norm": 5.604198932647705, + "hv_norm": 1.256949543952942, + "cos_v_hv": 0.005387420300394297, + "hg_norm": 755.5093383789062, + "cos_g_hg": 0.4652685821056366, + "v_parallel_norm": 1.601280746399425e-05, + "v_perp_norm": 1.3220158815383911, + "layer_1_v_norm": 0.031902700662612915, + "layer_1_cos_v_neg_g": 0.007087672129273415, + "layer_2_v_norm": 0.030841436237096786, + "layer_2_cos_v_neg_g": 0.00789807178080082, + "layer_3_v_norm": 0.03077506646513939, + "layer_3_cos_v_neg_g": 0.007515931501984596, + "layer_4_v_norm": 0.03135938569903374, + "layer_4_cos_v_neg_g": 0.007446703035384417, + "layer_5_v_norm": 0.03196157142519951, + "layer_5_cos_v_neg_g": 0.008134612813591957, + "layer_6_v_norm": 0.03211549296975136, + "layer_6_cos_v_neg_g": 0.006191106978803873, + "layer_7_v_norm": 0.0321061909198761, + "layer_7_cos_v_neg_g": 0.005194280296564102, + "layer_8_v_norm": 0.03210560604929924, + "layer_8_cos_v_neg_g": 0.004677748307585716, + "layer_9_v_norm": 0.032132863998413086, + "layer_9_cos_v_neg_g": 0.005393757950514555, + "layer_10_v_norm": 0.03227286785840988, + "layer_10_cos_v_neg_g": 0.004633751697838306, + "layer_11_v_norm": 0.03225534409284592, + "layer_11_cos_v_neg_g": 0.003739372594282031, + "layer_12_v_norm": 0.032338425517082214, + "layer_12_cos_v_neg_g": 0.003963463939726353, + "layer_1_sharpness": 0.3220503628253937, + "layer_2_sharpness": 0.09429436922073364, + "layer_3_sharpness": 0.13612985610961914, + "layer_4_sharpness": 0.07065481692552567, + "layer_5_sharpness": 0.07545355707406998, + "layer_6_sharpness": 0.08055420219898224, + "layer_7_sharpness": 0.09334384649991989, + "layer_8_sharpness": 0.09940604120492935, + "layer_9_sharpness": 0.09429782629013062, + "layer_10_sharpness": 0.07334479689598083, + "layer_11_sharpness": 0.05930204689502716, + "layer_12_sharpness": 0.09262320399284363 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..62cfe64d33af7b5c84d5e6a69f97b801809cbc9b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3684186935424805, + "total_l1_linf_norm": 7544.53515625, + "total_spectral_norm": 1.3684186935424805, + "layer_1_update_fnorm": 0.03239545598626137, + "layer_1_max_l1_linf_norm": 0.3349401354789734, + "layer_1_max_spectral_norm": 0.007428016979247332, + "layer_2_update_fnorm": 0.03112509474158287, + "layer_2_max_l1_linf_norm": 0.3575012683868408, + "layer_2_max_spectral_norm": 0.00797773152589798, + "layer_3_update_fnorm": 0.03090997040271759, + "layer_3_max_l1_linf_norm": 0.3492845892906189, + "layer_3_max_spectral_norm": 0.007834327407181263, + "layer_4_update_fnorm": 0.03165088966488838, + "layer_4_max_l1_linf_norm": 0.3423241674900055, + "layer_4_max_spectral_norm": 0.007704815827310085, + "layer_5_update_fnorm": 0.03214295953512192, + "layer_5_max_l1_linf_norm": 0.3257330060005188, + "layer_5_max_spectral_norm": 0.007335492875427008, + "layer_6_update_fnorm": 0.03243729844689369, + "layer_6_max_l1_linf_norm": 0.3213009834289551, + "layer_6_max_spectral_norm": 0.007200582884252071, + "layer_7_update_fnorm": 0.03224257379770279, + "layer_7_max_l1_linf_norm": 0.3118193447589874, + "layer_7_max_spectral_norm": 0.00695606367662549, + "layer_8_update_fnorm": 0.0322522297501564, + "layer_8_max_l1_linf_norm": 0.3112791180610657, + "layer_8_max_spectral_norm": 0.006908111274242401, + "layer_9_update_fnorm": 0.032154783606529236, + "layer_9_max_l1_linf_norm": 0.30452513694763184, + "layer_9_max_spectral_norm": 0.006851170212030411, + "layer_10_update_fnorm": 0.032237883657217026, + "layer_10_max_l1_linf_norm": 0.32016509771347046, + "layer_10_max_spectral_norm": 0.00720837851986289, + "layer_11_update_fnorm": 0.03232353925704956, + "layer_11_max_l1_linf_norm": 0.34253841638565063, + "layer_11_max_spectral_norm": 0.007711593993008137, + "layer_12_update_fnorm": 0.03219503164291382, + "layer_12_max_l1_linf_norm": 0.3387695252895355, + "layer_12_max_spectral_norm": 0.007652857340872288, + "total_sharpness": 0.005237973295152187, + "ip_v_neg_g": 0.005966432858258486, + "cos_v_neg_g": 0.0007188823656179011, + "v_norm": 1.3684186935424805, + "g_norm": 6.065099716186523, + "hv_norm": 1.5440925359725952, + "cos_v_hv": 0.004642040468752384, + "hg_norm": 2317.919921875, + "cos_g_hg": 0.4256332814693451, + "v_parallel_norm": 2.31546819122741e-05, + "v_perp_norm": 1.3684186935424805, + "layer_1_v_norm": 0.03239545598626137, + "layer_1_cos_v_neg_g": 0.01145151536911726, + "layer_2_v_norm": 0.03112509474158287, + "layer_2_cos_v_neg_g": 0.0103427330031991, + "layer_3_v_norm": 0.03090997226536274, + "layer_3_cos_v_neg_g": 0.010273941792547703, + "layer_4_v_norm": 0.03165088966488838, + "layer_4_cos_v_neg_g": 0.009493510238826275, + "layer_5_v_norm": 0.03214295953512192, + "layer_5_cos_v_neg_g": 0.009128117933869362, + "layer_6_v_norm": 0.032437294721603394, + "layer_6_cos_v_neg_g": 0.008354765363037586, + "layer_7_v_norm": 0.03224257379770279, + "layer_7_cos_v_neg_g": 0.008373972959816456, + "layer_8_v_norm": 0.0322522297501564, + "layer_8_cos_v_neg_g": 0.008083876222372055, + "layer_9_v_norm": 0.032154783606529236, + "layer_9_cos_v_neg_g": 0.008231937885284424, + "layer_10_v_norm": 0.032237883657217026, + "layer_10_cos_v_neg_g": 0.007194381207227707, + "layer_11_v_norm": 0.03232353925704956, + "layer_11_cos_v_neg_g": 0.005708526819944382, + "layer_12_v_norm": 0.03219503164291382, + "layer_12_cos_v_neg_g": 0.005240767262876034, + "layer_1_sharpness": 0.34656617045402527, + "layer_2_sharpness": 0.09089794009923935, + "layer_3_sharpness": 0.12212740629911423, + "layer_4_sharpness": 0.07578997313976288, + "layer_5_sharpness": 0.07650047540664673, + "layer_6_sharpness": 0.09541147202253342, + "layer_7_sharpness": 0.15173079073429108, + "layer_8_sharpness": 0.146283820271492, + "layer_9_sharpness": 0.10599543154239655, + "layer_10_sharpness": 0.07377146929502487, + "layer_11_sharpness": 0.06041943281888962, + "layer_12_sharpness": 0.0916629359126091 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..099ffc7a1afecc4186ee134f2f5635e1346205a1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3484487533569336, + "total_l1_linf_norm": 7437.76123046875, + "total_spectral_norm": 1.3484488725662231, + "layer_1_update_fnorm": 0.031822532415390015, + "layer_1_max_l1_linf_norm": 0.29647916555404663, + "layer_1_max_spectral_norm": 0.006708033382892609, + "layer_2_update_fnorm": 0.03099307045340538, + "layer_2_max_l1_linf_norm": 0.32831645011901855, + "layer_2_max_spectral_norm": 0.0074262903071939945, + "layer_3_update_fnorm": 0.030875667929649353, + "layer_3_max_l1_linf_norm": 0.3244660496711731, + "layer_3_max_spectral_norm": 0.0073217055760324, + "layer_4_update_fnorm": 0.03141264617443085, + "layer_4_max_l1_linf_norm": 0.33153438568115234, + "layer_4_max_spectral_norm": 0.007503609172999859, + "layer_5_update_fnorm": 0.03207209333777428, + "layer_5_max_l1_linf_norm": 0.32576972246170044, + "layer_5_max_spectral_norm": 0.00734896631911397, + "layer_6_update_fnorm": 0.03213826194405556, + "layer_6_max_l1_linf_norm": 0.29931044578552246, + "layer_6_max_spectral_norm": 0.006756844464689493, + "layer_7_update_fnorm": 0.03216017037630081, + "layer_7_max_l1_linf_norm": 0.2855186462402344, + "layer_7_max_spectral_norm": 0.006417801138013601, + "layer_8_update_fnorm": 0.032091304659843445, + "layer_8_max_l1_linf_norm": 0.2974209785461426, + "layer_8_max_spectral_norm": 0.00668242946267128, + "layer_9_update_fnorm": 0.032024264335632324, + "layer_9_max_l1_linf_norm": 0.29807591438293457, + "layer_9_max_spectral_norm": 0.006714808288961649, + "layer_10_update_fnorm": 0.032361894845962524, + "layer_10_max_l1_linf_norm": 0.3302420973777771, + "layer_10_max_spectral_norm": 0.0073817335069179535, + "layer_11_update_fnorm": 0.032430075109004974, + "layer_11_max_l1_linf_norm": 0.3568692207336426, + "layer_11_max_spectral_norm": 0.007938059978187084, + "layer_12_update_fnorm": 0.03235208988189697, + "layer_12_max_l1_linf_norm": 0.35416358709335327, + "layer_12_max_spectral_norm": 0.007964521646499634, + "total_sharpness": 0.0040713706985116005, + "ip_v_neg_g": 0.004074800293892622, + "cos_v_neg_g": 0.0005568794440478086, + "v_norm": 1.3484487533569336, + "g_norm": 5.42638635635376, + "hv_norm": 1.1643662452697754, + "cos_v_hv": 0.004715041257441044, + "hg_norm": 840.7766723632812, + "cos_g_hg": 0.47673264145851135, + "v_parallel_norm": 1.8908540369011462e-05, + "v_perp_norm": 1.3484487533569336, + "layer_1_v_norm": 0.031822532415390015, + "layer_1_cos_v_neg_g": 0.00427309051156044, + "layer_2_v_norm": 0.03099307045340538, + "layer_2_cos_v_neg_g": 0.0037499612662941217, + "layer_3_v_norm": 0.030875667929649353, + "layer_3_cos_v_neg_g": 0.004240144044160843, + "layer_4_v_norm": 0.03141264617443085, + "layer_4_cos_v_neg_g": 0.0044494448229670525, + "layer_5_v_norm": 0.03207209333777428, + "layer_5_cos_v_neg_g": 0.0049659921787679195, + "layer_6_v_norm": 0.03213826194405556, + "layer_6_cos_v_neg_g": 0.0063124955631792545, + "layer_7_v_norm": 0.03216017037630081, + "layer_7_cos_v_neg_g": 0.007495174650102854, + "layer_8_v_norm": 0.032091304659843445, + "layer_8_cos_v_neg_g": 0.007980500347912312, + "layer_9_v_norm": 0.032024264335632324, + "layer_9_cos_v_neg_g": 0.007565814536064863, + "layer_10_v_norm": 0.032361894845962524, + "layer_10_cos_v_neg_g": 0.006221290212124586, + "layer_11_v_norm": 0.032430071383714676, + "layer_11_cos_v_neg_g": 0.0061705270782113075, + "layer_12_v_norm": 0.03235208988189697, + "layer_12_cos_v_neg_g": 0.004815276246517897, + "layer_1_sharpness": 0.07520472258329391, + "layer_2_sharpness": 0.05208519101142883, + "layer_3_sharpness": 0.08227167278528214, + "layer_4_sharpness": 0.054121505469083786, + "layer_5_sharpness": 0.06721940636634827, + "layer_6_sharpness": 0.07463084161281586, + "layer_7_sharpness": 0.10177015513181686, + "layer_8_sharpness": 0.10768024623394012, + "layer_9_sharpness": 0.08699148893356323, + "layer_10_sharpness": 0.07819150388240814, + "layer_11_sharpness": 0.062247686088085175, + "layer_12_sharpness": 0.07690060883760452 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..b25d5902ba71ee73ccf4d7b1a8962fae72d46ecc --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3524348735809326, + "total_l1_linf_norm": 7456.68115234375, + "total_spectral_norm": 1.3524349927902222, + "layer_1_update_fnorm": 0.03205686807632446, + "layer_1_max_l1_linf_norm": 0.3055720925331116, + "layer_1_max_spectral_norm": 0.006925566121935844, + "layer_2_update_fnorm": 0.030925150960683823, + "layer_2_max_l1_linf_norm": 0.33301424980163574, + "layer_2_max_spectral_norm": 0.007535703480243683, + "layer_3_update_fnorm": 0.030981868505477905, + "layer_3_max_l1_linf_norm": 0.3425089120864868, + "layer_3_max_spectral_norm": 0.007668131962418556, + "layer_4_update_fnorm": 0.03168976306915283, + "layer_4_max_l1_linf_norm": 0.3546174168586731, + "layer_4_max_spectral_norm": 0.008001256734132767, + "layer_5_update_fnorm": 0.03202243894338608, + "layer_5_max_l1_linf_norm": 0.3281223177909851, + "layer_5_max_spectral_norm": 0.007361091207712889, + "layer_6_update_fnorm": 0.032188691198825836, + "layer_6_max_l1_linf_norm": 0.30892685055732727, + "layer_6_max_spectral_norm": 0.006928896531462669, + "layer_7_update_fnorm": 0.03221697732806206, + "layer_7_max_l1_linf_norm": 0.28218749165534973, + "layer_7_max_spectral_norm": 0.006375922821462154, + "layer_8_update_fnorm": 0.03220022842288017, + "layer_8_max_l1_linf_norm": 0.29957497119903564, + "layer_8_max_spectral_norm": 0.006719145458191633, + "layer_9_update_fnorm": 0.03210828825831413, + "layer_9_max_l1_linf_norm": 0.31679829955101013, + "layer_9_max_spectral_norm": 0.0070493584498763084, + "layer_10_update_fnorm": 0.03219682350754738, + "layer_10_max_l1_linf_norm": 0.32108327746391296, + "layer_10_max_spectral_norm": 0.0071503035724163055, + "layer_11_update_fnorm": 0.03230191022157669, + "layer_11_max_l1_linf_norm": 0.34281665086746216, + "layer_11_max_spectral_norm": 0.0077057830058038235, + "layer_12_update_fnorm": 0.03216838836669922, + "layer_12_max_l1_linf_norm": 0.33638182282447815, + "layer_12_max_spectral_norm": 0.007573832292109728, + "total_sharpness": 0.00485767237842083, + "ip_v_neg_g": 0.0049509815871715546, + "cos_v_neg_g": 0.0006700058584101498, + "v_norm": 1.3524348735809326, + "g_norm": 5.46381950378418, + "hv_norm": 1.2824872732162476, + "cos_v_hv": 0.005122613161802292, + "hg_norm": 1380.805419921875, + "cos_g_hg": 0.41168996691703796, + "v_parallel_norm": 2.0902545657008886e-05, + "v_perp_norm": 1.3524348735809326, + "layer_1_v_norm": 0.03205686807632446, + "layer_1_cos_v_neg_g": 0.0064786989241838455, + "layer_2_v_norm": 0.030925150960683823, + "layer_2_cos_v_neg_g": 0.006342337932437658, + "layer_3_v_norm": 0.030981868505477905, + "layer_3_cos_v_neg_g": 0.0060220989398658276, + "layer_4_v_norm": 0.03168976306915283, + "layer_4_cos_v_neg_g": 0.007066321559250355, + "layer_5_v_norm": 0.03202243894338608, + "layer_5_cos_v_neg_g": 0.009767084382474422, + "layer_6_v_norm": 0.032188694924116135, + "layer_6_cos_v_neg_g": 0.007155647501349449, + "layer_7_v_norm": 0.03221697732806206, + "layer_7_cos_v_neg_g": 0.0076646627858281136, + "layer_8_v_norm": 0.03220022842288017, + "layer_8_cos_v_neg_g": 0.009141530841588974, + "layer_9_v_norm": 0.03210828825831413, + "layer_9_cos_v_neg_g": 0.009137732908129692, + "layer_10_v_norm": 0.03219682350754738, + "layer_10_cos_v_neg_g": 0.008135221898555756, + "layer_11_v_norm": 0.03230190649628639, + "layer_11_cos_v_neg_g": 0.006222831085324287, + "layer_12_v_norm": 0.03216838836669922, + "layer_12_cos_v_neg_g": 0.006548409350216389, + "layer_1_sharpness": 0.09306029975414276, + "layer_2_sharpness": 0.06149197369813919, + "layer_3_sharpness": 0.09547539800405502, + "layer_4_sharpness": 0.06838739663362503, + "layer_5_sharpness": 0.08565360307693481, + "layer_6_sharpness": 0.09570232033729553, + "layer_7_sharpness": 0.12478575855493546, + "layer_8_sharpness": 0.13254697620868683, + "layer_9_sharpness": 0.1127278283238411, + "layer_10_sharpness": 0.07193901389837265, + "layer_11_sharpness": 0.06414277106523514, + "layer_12_sharpness": 0.13605330884456635 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..1c2051c43445817caf6b891f9881fa9eeab1d1a4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3568220138549805, + "total_l1_linf_norm": 7488.814453125, + "total_spectral_norm": 1.35682213306427, + "layer_1_update_fnorm": 0.0318589024245739, + "layer_1_max_l1_linf_norm": 0.3034909963607788, + "layer_1_max_spectral_norm": 0.006883311551064253, + "layer_2_update_fnorm": 0.031090661883354187, + "layer_2_max_l1_linf_norm": 0.33794841170310974, + "layer_2_max_spectral_norm": 0.007580443751066923, + "layer_3_update_fnorm": 0.0313723050057888, + "layer_3_max_l1_linf_norm": 0.3478580713272095, + "layer_3_max_spectral_norm": 0.007833986543118954, + "layer_4_update_fnorm": 0.0315425805747509, + "layer_4_max_l1_linf_norm": 0.33944976329803467, + "layer_4_max_spectral_norm": 0.007624705322086811, + "layer_5_update_fnorm": 0.03211202099919319, + "layer_5_max_l1_linf_norm": 0.32291683554649353, + "layer_5_max_spectral_norm": 0.007266978733241558, + "layer_6_update_fnorm": 0.032233256846666336, + "layer_6_max_l1_linf_norm": 0.31616440415382385, + "layer_6_max_spectral_norm": 0.00706414645537734, + "layer_7_update_fnorm": 0.032197799533605576, + "layer_7_max_l1_linf_norm": 0.29117920994758606, + "layer_7_max_spectral_norm": 0.006568664684891701, + "layer_8_update_fnorm": 0.03209361806511879, + "layer_8_max_l1_linf_norm": 0.2961253523826599, + "layer_8_max_spectral_norm": 0.006661997642368078, + "layer_9_update_fnorm": 0.03214234486222267, + "layer_9_max_l1_linf_norm": 0.30863404273986816, + "layer_9_max_spectral_norm": 0.0069499812088906765, + "layer_10_update_fnorm": 0.03224089741706848, + "layer_10_max_l1_linf_norm": 0.32970502972602844, + "layer_10_max_spectral_norm": 0.007411877159029245, + "layer_11_update_fnorm": 0.032518450170755386, + "layer_11_max_l1_linf_norm": 0.35920849442481995, + "layer_11_max_spectral_norm": 0.008075306192040443, + "layer_12_update_fnorm": 0.032397717237472534, + "layer_12_max_l1_linf_norm": 0.36038702726364136, + "layer_12_max_spectral_norm": 0.008053378202021122, + "total_sharpness": 0.0037629175931215286, + "ip_v_neg_g": 0.003513805102556944, + "cos_v_neg_g": 0.000444937904831022, + "v_norm": 1.3568220138549805, + "g_norm": 5.820434093475342, + "hv_norm": 1.1781774759292603, + "cos_v_hv": 0.0043334802612662315, + "hg_norm": 3638.7158203125, + "cos_g_hg": 0.43197309970855713, + "v_parallel_norm": 1.6756295735831372e-05, + "v_perp_norm": 1.3568220138549805, + "layer_1_v_norm": 0.0318589024245739, + "layer_1_cos_v_neg_g": 0.0038300291635096073, + "layer_2_v_norm": 0.031090661883354187, + "layer_2_cos_v_neg_g": 0.0028117732144892216, + "layer_3_v_norm": 0.0313723050057888, + "layer_3_cos_v_neg_g": 0.005093933083117008, + "layer_4_v_norm": 0.0315425805747509, + "layer_4_cos_v_neg_g": 0.005457296501845121, + "layer_5_v_norm": 0.03211202099919319, + "layer_5_cos_v_neg_g": 0.006174921058118343, + "layer_6_v_norm": 0.03223325312137604, + "layer_6_cos_v_neg_g": 0.00597814517095685, + "layer_7_v_norm": 0.032197799533605576, + "layer_7_cos_v_neg_g": 0.005545341409742832, + "layer_8_v_norm": 0.03209361806511879, + "layer_8_cos_v_neg_g": 0.0056046429090201855, + "layer_9_v_norm": 0.03214234486222267, + "layer_9_cos_v_neg_g": 0.00590258464217186, + "layer_10_v_norm": 0.03224089741706848, + "layer_10_cos_v_neg_g": 0.005061168223619461, + "layer_11_v_norm": 0.032518450170755386, + "layer_11_cos_v_neg_g": 0.004579465836286545, + "layer_12_v_norm": 0.032397717237472534, + "layer_12_cos_v_neg_g": 0.0008240483584813774, + "layer_1_sharpness": 0.06452512741088867, + "layer_2_sharpness": 0.050300806760787964, + "layer_3_sharpness": 0.07646196335554123, + "layer_4_sharpness": 0.051079414784908295, + "layer_5_sharpness": 0.05302538350224495, + "layer_6_sharpness": 0.06782770156860352, + "layer_7_sharpness": 0.08397141844034195, + "layer_8_sharpness": 0.08700569719076157, + "layer_9_sharpness": 0.0815083459019661, + "layer_10_sharpness": 0.06727629154920578, + "layer_11_sharpness": 0.06908166408538818, + "layer_12_sharpness": 0.14099547266960144 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..7de5efa9a2b15d98fa92312515478c001aa8b4d4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3185651302337646, + "total_l1_linf_norm": 7274.7890625, + "total_spectral_norm": 1.3185652494430542, + "layer_1_update_fnorm": 0.03249432146549225, + "layer_1_max_l1_linf_norm": 0.34944799542427063, + "layer_1_max_spectral_norm": 0.007813765667378902, + "layer_2_update_fnorm": 0.0310442503541708, + "layer_2_max_l1_linf_norm": 0.3442763090133667, + "layer_2_max_spectral_norm": 0.007708603050559759, + "layer_3_update_fnorm": 0.030833764001727104, + "layer_3_max_l1_linf_norm": 0.33535200357437134, + "layer_3_max_spectral_norm": 0.007535491604357958, + "layer_4_update_fnorm": 0.031521644443273544, + "layer_4_max_l1_linf_norm": 0.33098915219306946, + "layer_4_max_spectral_norm": 0.007402980700135231, + "layer_5_update_fnorm": 0.03196493908762932, + "layer_5_max_l1_linf_norm": 0.3223070502281189, + "layer_5_max_spectral_norm": 0.007204820401966572, + "layer_6_update_fnorm": 0.03225790336728096, + "layer_6_max_l1_linf_norm": 0.31621211767196655, + "layer_6_max_spectral_norm": 0.007020111195743084, + "layer_7_update_fnorm": 0.03220435604453087, + "layer_7_max_l1_linf_norm": 0.30762407183647156, + "layer_7_max_spectral_norm": 0.006901965942233801, + "layer_8_update_fnorm": 0.03221989423036575, + "layer_8_max_l1_linf_norm": 0.30979013442993164, + "layer_8_max_spectral_norm": 0.006916491314768791, + "layer_9_update_fnorm": 0.03203829750418663, + "layer_9_max_l1_linf_norm": 0.3068729043006897, + "layer_9_max_spectral_norm": 0.0068860603496432304, + "layer_10_update_fnorm": 0.03224841505289078, + "layer_10_max_l1_linf_norm": 0.32158076763153076, + "layer_10_max_spectral_norm": 0.00722096860408783, + "layer_11_update_fnorm": 0.032374367117881775, + "layer_11_max_l1_linf_norm": 0.35285794734954834, + "layer_11_max_spectral_norm": 0.007907967083156109, + "layer_12_update_fnorm": 0.032442934811115265, + "layer_12_max_l1_linf_norm": 0.3511849641799927, + "layer_12_max_spectral_norm": 0.007963240146636963, + "total_sharpness": 0.005548775661736727, + "ip_v_neg_g": 0.004141804296523333, + "cos_v_neg_g": 0.000611643074080348, + "v_norm": 1.3185651302337646, + "g_norm": 5.135585308074951, + "hv_norm": 1.4298511743545532, + "cos_v_hv": 0.005116911605000496, + "hg_norm": 671.41796875, + "cos_g_hg": 0.45769691467285156, + "v_parallel_norm": 1.6868247257662006e-05, + "v_perp_norm": 1.3185651302337646, + "layer_1_v_norm": 0.03249432146549225, + "layer_1_cos_v_neg_g": 0.011521188542246819, + "layer_2_v_norm": 0.0310442503541708, + "layer_2_cos_v_neg_g": 0.009866553358733654, + "layer_3_v_norm": 0.030833765864372253, + "layer_3_cos_v_neg_g": 0.00869158748537302, + "layer_4_v_norm": 0.031521644443273544, + "layer_4_cos_v_neg_g": 0.008271361701190472, + "layer_5_v_norm": 0.03196493908762932, + "layer_5_cos_v_neg_g": 0.0069441404193639755, + "layer_6_v_norm": 0.03225790336728096, + "layer_6_cos_v_neg_g": 0.007722064852714539, + "layer_7_v_norm": 0.03220435604453087, + "layer_7_cos_v_neg_g": 0.007391707971692085, + "layer_8_v_norm": 0.03221989423036575, + "layer_8_cos_v_neg_g": 0.007411431986838579, + "layer_9_v_norm": 0.03203829750418663, + "layer_9_cos_v_neg_g": 0.005526332650333643, + "layer_10_v_norm": 0.03224841505289078, + "layer_10_cos_v_neg_g": 0.005403388291597366, + "layer_11_v_norm": 0.032374367117881775, + "layer_11_cos_v_neg_g": 0.0046570696868002415, + "layer_12_v_norm": 0.032442934811115265, + "layer_12_cos_v_neg_g": 0.0035585451405495405, + "layer_1_sharpness": 0.47239527106285095, + "layer_2_sharpness": 0.11277256160974503, + "layer_3_sharpness": 0.12219064682722092, + "layer_4_sharpness": 0.06319982558488846, + "layer_5_sharpness": 0.07626785337924957, + "layer_6_sharpness": 0.09904951602220535, + "layer_7_sharpness": 0.13744089007377625, + "layer_8_sharpness": 0.12985673546791077, + "layer_9_sharpness": 0.08854552358388901, + "layer_10_sharpness": 0.07362205535173416, + "layer_11_sharpness": 0.06351825594902039, + "layer_12_sharpness": 0.10213372856378555 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3be51c9d75fdad689934d6630e91af5f42c8dbf --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_43/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019236 +step:1 train loss:11.024820 +step:2 train loss:11.018538 +step:3 train loss:11.016748 +step:4 train loss:11.011624 +step:5 train loss:11.006145 +step:6 train loss:10.996647 +step:7 train loss:10.989803 +step:8 train loss:10.979490 +step:9 train loss:10.968807 +step:10 train loss:10.954557 +step:11 train loss:10.945145 +step:12 train loss:10.925711 +step:13 train loss:10.912363 +step:14 train loss:10.891300 +step:15 train loss:10.874619 +step:16 train loss:10.854683 +step:17 train loss:10.836144 +step:18 train loss:10.812094 +step:19 train loss:10.789505 +step:20 train loss:10.761374 +step:21 train loss:10.739804 +step:22 train loss:10.707506 +step:23 train loss:10.684285 +step:24 train loss:10.647919 +step:25 train loss:10.625851 +step:26 train loss:10.586983 +step:27 train loss:10.554924 +step:28 train loss:10.525660 +step:29 train loss:10.490969 +step:30 train loss:10.453743 +step:31 train loss:10.413169 +step:32 train loss:10.371225 +step:33 train loss:10.335739 +step:34 train loss:10.300295 +step:35 train loss:10.250214 +step:36 train loss:10.210142 +step:37 train loss:10.156153 +step:38 train loss:10.120667 +step:39 train loss:10.068211 +step:40 train loss:10.024977 +step:41 train loss:9.969172 +step:42 train loss:9.934351 +step:43 train loss:9.863052 +step:44 train loss:9.825000 +step:45 train loss:9.763763 +step:46 train loss:9.725681 +step:47 train loss:9.665627 +step:48 train loss:9.611405 +step:49 train loss:9.546303 +step:50 train loss:9.487167 +step:51 train loss:9.421865 +step:52 train loss:9.381462 +step:53 train loss:9.317208 +step:54 train loss:9.263264 +step:55 train loss:9.192257 +step:56 train loss:9.134899 +step:57 train loss:9.085657 +step:58 train loss:9.007849 +step:59 train loss:8.954746 +step:60 train loss:8.893790 +step:61 train loss:8.838655 +step:62 train loss:8.777858 +step:63 train loss:8.752916 +step:64 train loss:8.645276 +step:65 train loss:8.592652 +step:66 train loss:8.549864 +step:67 train loss:8.495429 +step:68 train loss:8.437706 +step:69 train loss:8.370280 +step:70 train loss:8.315664 +step:71 train loss:8.239923 +step:72 train loss:8.218147 +step:73 train loss:8.149014 +step:74 train loss:8.122777 +step:75 train loss:8.043582 +step:76 train loss:8.057850 +step:77 train loss:7.968685 +step:78 train loss:7.837281 +step:79 train loss:7.864217 +step:80 train loss:7.829777 +step:81 train loss:7.833018 +step:82 train loss:7.798831 +step:83 train loss:7.742934 +step:84 train loss:7.706695 +step:85 train loss:7.669087 +step:86 train loss:7.641686 +step:87 train loss:7.612958 +step:88 train loss:7.609761 +step:89 train loss:7.572513 +step:90 train loss:7.605917 +step:91 train loss:7.607817 +step:92 train loss:7.601631 +step:93 train loss:7.557944 +step:94 train loss:7.542251 +step:95 train loss:7.504274 +step:96 train loss:7.581106 +step:97 train loss:7.538252 +step:98 train loss:7.532405 +step:99 train loss:7.502985 +step:100 train loss:7.572433 +step:101 train loss:7.458469 +step:102 train loss:7.450791 +step:103 train loss:7.437501 +step:104 train loss:7.475556 +step:105 train loss:7.525498 +step:106 train loss:7.465392 +step:107 train loss:7.430956 +step:108 train loss:7.435651 +step:109 train loss:7.463996 +step:110 train loss:7.394883 +step:111 train loss:7.399209 +step:112 train loss:7.386163 +step:113 train loss:7.346397 +step:114 train loss:7.402504 +step:115 train loss:7.342630 +step:116 train loss:7.318933 +step:117 train loss:7.255287 +step:118 train loss:7.314672 +step:119 train loss:7.254005 +step:120 train loss:7.259476 +step:121 train loss:7.184059 +step:122 train loss:7.253476 +step:123 train loss:7.178230 +step:124 train loss:7.160430 +step:125 train loss:7.128272 +step:126 train loss:7.202172 +step:127 train loss:7.120960 +step:128 train loss:7.142542 +step:129 train loss:7.113830 +step:130 train loss:7.158092 +step:131 train loss:7.087535 +step:132 train loss:6.996132 +step:133 train loss:7.059981 +step:134 train loss:7.012331 +step:135 train loss:6.926310 +step:136 train loss:6.950982 +step:137 train loss:6.946041 +step:138 train loss:6.872418 +step:139 train loss:6.946517 +step:140 train loss:6.850881 +step:141 train loss:6.940014 +step:142 train loss:6.882080 +step:143 train loss:6.886386 +step:144 train loss:6.847346 +step:145 train loss:6.780148 +step:146 train loss:6.781556 +step:147 train loss:6.833702 +step:148 train loss:6.830763 +step:149 train loss:6.769066 +step:150 train loss:6.776070 +step:151 train loss:6.682482 +step:152 train loss:6.710789 +step:153 train loss:6.679802 +step:154 train loss:6.758279 +step:155 train loss:6.723269 +step:156 train loss:6.754190 +step:157 train loss:6.644296 +step:158 train loss:6.620853 +step:159 train loss:6.657717 +step:160 train loss:6.628277 +step:161 train loss:6.614147 +step:162 train loss:6.584664 +step:163 train loss:6.594821 +step:164 train loss:6.605763 +step:165 train loss:6.603760 +step:166 train loss:6.553030 +step:167 train loss:6.545910 +step:168 train loss:6.514955 +step:169 train loss:6.467065 +step:170 train loss:6.437603 +step:171 train loss:6.540133 +step:172 train loss:6.467330 +step:173 train loss:6.501070 +step:174 train loss:6.506555 +step:175 train loss:6.466532 +step:176 train loss:6.411587 +step:177 train loss:6.457833 +step:178 train loss:6.462443 +step:179 train loss:6.411017 +step:180 train loss:6.396076 +step:181 train loss:6.427889 +step:182 train loss:6.354548 +step:183 train loss:6.448623 +step:184 train loss:6.414826 +step:185 train loss:6.323317 +step:186 train loss:6.477183 +step:187 train loss:6.405360 +step:188 train loss:6.225581 +step:189 train loss:6.381281 +step:190 train loss:6.375341 +step:191 train loss:6.292823 +step:192 train loss:6.202884 +step:193 train loss:6.348667 +step:194 train loss:6.373857 +step:195 train loss:6.361891 +step:196 train loss:6.334280 +step:197 train loss:6.330492 +step:198 train loss:6.268880 +step:199 train loss:6.349686 +step:200 train loss:6.380844 +step:201 train loss:6.314031 +step:202 train loss:6.316543 +step:203 train loss:6.271128 +step:204 train loss:6.312051 +step:205 train loss:6.164704 +step:206 train loss:6.301930 +step:207 train loss:6.267086 +step:208 train loss:6.210859 +step:209 train loss:6.210509 +step:210 train loss:6.207448 +step:211 train loss:6.282187 +step:212 train loss:6.227789 +step:213 train loss:6.243891 +step:214 train loss:6.226130 +step:215 train loss:6.248057 +step:216 train loss:6.190903 +step:217 train loss:6.190117 +step:218 train loss:6.170602 +step:219 train loss:6.145400 +step:220 train loss:6.195515 +step:221 train loss:6.146484 +step:222 train loss:6.189777 +step:223 train loss:6.200299 +step:224 train loss:6.191254 +step:225 train loss:6.127002 +step:226 train loss:6.131976 +step:227 train loss:6.195228 +step:228 train loss:6.160684 +step:229 train loss:6.235519 +step:230 train loss:6.100174 +step:231 train loss:6.153705 +step:232 train loss:6.138903 +step:233 train loss:6.109329 +step:234 train loss:6.106914 +step:235 train loss:6.190744 +step:236 train loss:6.134586 +step:237 train loss:6.163826 +step:238 train loss:6.170588 +step:239 train loss:6.076916 +step:240 train loss:6.149206 +step:241 train loss:6.181974 +step:242 train loss:6.165941 +step:243 train loss:6.072680 +step:244 train loss:6.099966 +step:245 train loss:6.086198 +step:246 train loss:6.082518 +step:247 train loss:6.074821 +step:248 train loss:6.032528 +step:249 train loss:6.091398 +step:250 validation loss:6.087726 +step:250 train loss:6.058904 +step:251 train loss:6.093236 +step:252 train loss:6.052453 +step:253 train loss:6.053420 +step:254 train loss:6.014860 +step:255 train loss:6.053874 +step:256 train loss:6.055407 +step:257 train loss:6.101603 +step:258 train loss:5.995887 +step:259 train loss:6.026617 +step:260 train loss:6.000416 +step:261 train loss:5.995317 +step:262 train loss:6.067343 +step:263 train loss:6.026751 +step:264 train loss:5.992643 +step:265 train loss:6.023514 +step:266 train loss:5.983213 +step:267 train loss:6.020650 +step:268 train loss:5.970526 +step:269 train loss:5.998539 +step:270 train loss:6.013252 +step:271 train loss:6.000285 +step:272 train loss:5.957730 +step:273 train loss:6.032380 +step:274 train loss:5.946148 +step:275 train loss:5.980667 +step:276 train loss:5.956410 +step:277 train loss:5.955048 +step:278 train loss:5.933028 +step:279 train loss:5.905724 +step:280 train loss:5.976803 +step:281 train loss:6.057300 +step:282 train loss:5.939680 +step:283 train loss:5.947910 +step:284 train loss:5.913592 +step:285 train loss:5.968172 +step:286 train loss:5.941197 +step:287 train loss:5.920200 +step:288 train loss:5.896909 +step:289 train loss:5.923046 +step:290 train loss:5.981662 +step:291 train loss:5.907031 +step:292 train loss:5.962171 +step:293 train loss:5.893057 +step:294 train loss:6.000897 +step:295 train loss:5.896628 +step:296 train loss:5.961669 +step:297 train loss:5.994625 +step:298 train loss:5.882879 +step:299 train loss:5.959548 +step:300 train loss:5.871684 +step:301 train loss:5.910428 +step:302 train loss:5.883589 +step:303 train loss:5.900464 +step:304 train loss:5.929874 +step:305 train loss:5.850090 +step:306 train loss:5.872800 +step:307 train loss:5.906543 +step:308 train loss:5.811713 +step:309 train loss:5.951141 +step:310 train loss:5.913425 +step:311 train loss:5.892672 +step:312 train loss:5.881144 +step:313 train loss:5.905288 +step:314 train loss:5.883495 +step:315 train loss:5.850505 +step:316 train loss:5.844098 +step:317 train loss:5.809805 +step:318 train loss:5.812398 +step:319 train loss:5.884298 +step:320 train loss:5.804958 +step:321 train loss:5.862946 +step:322 train loss:5.853942 +step:323 train loss:5.921990 +step:324 train loss:5.862423 +step:325 train loss:5.884600 +step:326 train loss:5.894038 +step:327 train loss:5.862972 +step:328 train loss:5.836709 +step:329 train loss:5.854135 +step:330 train loss:5.786363 +step:331 train loss:5.813599 +step:332 train loss:5.801610 +step:333 train loss:5.738280 +step:334 train loss:5.836211 +step:335 train loss:5.875575 +step:336 train loss:5.997015 +step:337 train loss:5.899853 +step:338 train loss:5.811625 +step:339 train loss:5.767604 +step:340 train loss:5.777786 +step:341 train loss:5.772289 +step:342 train loss:5.838051 +step:343 train loss:5.813386 +step:344 train loss:5.766735 +step:345 train loss:5.741741 +step:346 train loss:5.787808 +step:347 train loss:5.726124 +step:348 train loss:5.734906 +step:349 train loss:5.674009 +step:350 train loss:5.711970 +step:351 train loss:5.780691 +step:352 train loss:5.739470 +step:353 train loss:5.764291 +step:354 train loss:5.720796 +step:355 train loss:5.770038 +step:356 train loss:5.739818 +step:357 train loss:5.811916 +step:358 train loss:5.839615 +step:359 train loss:5.679845 +step:360 train loss:5.793852 +step:361 train loss:5.775720 +step:362 train loss:5.758906 +step:363 train loss:5.713901 +step:364 train loss:5.833417 +step:365 train loss:5.762146 +step:366 train loss:5.736120 +step:367 train loss:5.757363 +step:368 train loss:5.733321 +step:369 train loss:5.708396 +step:370 train loss:5.769059 +step:371 train loss:5.701379 +step:372 train loss:5.772073 +step:373 train loss:5.722718 +step:374 train loss:5.708589 +step:375 train loss:5.741940 +step:376 train loss:5.723173 +step:377 train loss:5.614687 +step:378 train loss:5.701839 +step:379 train loss:5.757628 +step:380 train loss:5.682991 +step:381 train loss:5.746643 +step:382 train loss:5.729675 +step:383 train loss:5.711365 +step:384 train loss:5.685683 +step:385 train loss:5.680695 +step:386 train loss:5.713582 +step:387 train loss:5.709635 +step:388 train loss:5.682219 +step:389 train loss:5.697582 +step:390 train loss:5.680685 +step:391 train loss:5.682762 +step:392 train loss:5.677380 +step:393 train loss:5.669175 +step:394 train loss:5.717703 +step:395 train loss:5.653789 +step:396 train loss:5.602952 +step:397 train loss:5.687992 +step:398 train loss:5.674887 +step:399 train loss:5.683991 +step:400 train loss:5.645498 +step:401 train loss:5.684655 +step:402 train loss:5.662866 +step:403 train loss:5.661287 +step:404 train loss:5.641544 +step:405 train loss:5.645406 +step:406 train loss:5.678968 +step:407 train loss:5.663589 +step:408 train loss:5.727178 +step:409 train loss:5.660334 +step:410 train loss:5.629224 +step:411 train loss:5.624513 +step:412 train loss:5.707109 +step:413 train loss:5.593202 +step:414 train loss:5.676672 +step:415 train loss:5.635676 +step:416 train loss:5.652994 +step:417 train loss:5.670106 +step:418 train loss:5.615672 +step:419 train loss:5.608921 +step:420 train loss:5.612298 +step:421 train loss:5.597491 +step:422 train loss:5.596928 +step:423 train loss:5.601892 +step:424 train loss:5.575609 +step:425 train loss:5.634246 +step:426 train loss:5.632973 +step:427 train loss:5.568680 +step:428 train loss:5.632288 +step:429 train loss:5.544153 +step:430 train loss:5.581417 +step:431 train loss:5.612739 +step:432 train loss:5.634912 +step:433 train loss:5.623765 +step:434 train loss:5.579500 +step:435 train loss:5.633638 +step:436 train loss:5.656065 +step:437 train loss:5.617447 +step:438 train loss:5.570630 +step:439 train loss:5.573080 +step:440 train loss:5.613216 +step:441 train loss:5.564670 +step:442 train loss:5.560471 +step:443 train loss:5.575841 +step:444 train loss:5.604869 +step:445 train loss:5.610636 +step:446 train loss:5.550403 +step:447 train loss:5.569002 +step:448 train loss:5.623719 +step:449 train loss:5.580368 +step:450 train loss:5.567215 +step:451 train loss:5.560559 +step:452 train loss:5.622245 +step:453 train loss:5.548179 +step:454 train loss:5.511929 +step:455 train loss:5.601069 +step:456 train loss:5.570458 +step:457 train loss:5.544863 +step:458 train loss:5.566459 +step:459 train loss:5.512668 +step:460 train loss:5.613383 +step:461 train loss:5.568202 +step:462 train loss:5.475419 +step:463 train loss:5.535856 +step:464 train loss:5.589581 +step:465 train loss:5.546945 +step:466 train loss:5.569265 +step:467 train loss:5.524724 +step:468 train loss:5.580719 +step:469 train loss:5.549489 +step:470 train loss:5.510756 +step:471 train loss:5.595496 +step:472 train loss:5.492426 +step:473 train loss:5.568437 +step:474 train loss:5.548503 +step:475 train loss:5.559548 +step:476 train loss:5.538486 +step:477 train loss:5.479250 +step:478 train loss:5.499723 +step:479 train loss:5.496137 +step:480 train loss:5.520062 +step:481 train loss:5.518063 +step:482 train loss:5.467823 +step:483 train loss:5.528840 +step:484 train loss:5.483767 +step:485 train loss:5.468928 +step:486 train loss:5.528485 +step:487 train loss:5.498985 +step:488 train loss:5.496615 +step:489 train loss:5.493024 +step:490 train loss:5.475606 +step:491 train loss:5.486436 +step:492 train loss:5.490199 +step:493 train loss:5.491030 +step:494 train loss:5.504107 +step:495 train loss:5.447834 +step:496 train loss:5.547110 +step:497 train loss:5.437623 +step:498 train loss:5.531814 +step:499 train loss:5.507328 +step:500 validation loss:5.486014 total_sharp:2.3023e-02 L1_sharp:1.6883e+00 L2_sharp:6.3595e-01 L3_sharp:4.2042e-01 L4_sharp:2.4993e-01 L5_sharp:1.8225e-01 L6_sharp:1.3833e-01 L7_sharp:1.0665e-01 L8_sharp:1.0757e-01 L9_sharp:8.5892e-02 L10_sharp:7.0032e-02 L11_sharp:5.6501e-02 L12_sharp:5.0559e-02 total_fnorm:8.7868e-01 total_l1_linf:4.8541e+03 total_spectral:8.7868e-01 L1_fnorm:2.2945e-02 L2_fnorm:2.2353e-02 L3_fnorm:2.1812e-02 L4_fnorm:2.2775e-02 L5_fnorm:2.3412e-02 L6_fnorm:2.3700e-02 L7_fnorm:2.4062e-02 L8_fnorm:2.3261e-02 L9_fnorm:2.2936e-02 L10_fnorm:2.2949e-02 L11_fnorm:2.2146e-02 L12_fnorm:2.2327e-02 L1_l1linf:1.7840e-01 L2_l1linf:1.7950e-01 L3_l1linf:1.8064e-01 L4_l1linf:1.7426e-01 L5_l1linf:2.0730e-01 L6_l1linf:2.1396e-01 L7_l1linf:2.4948e-01 L8_l1linf:1.9346e-01 L9_l1linf:1.7917e-01 L10_l1linf:1.7988e-01 L11_l1linf:1.4964e-01 L12_l1linf:1.5290e-01 L1_spectral:4.0005e-03 L2_spectral:4.4234e-03 L3_spectral:4.8253e-03 L4_spectral:7.5602e-03 L5_spectral:9.0659e-03 L6_spectral:9.3684e-03 L7_spectral:1.0316e-02 L8_spectral:8.4328e-03 L9_spectral:7.7628e-03 L10_spectral:7.6770e-03 L11_spectral:4.9641e-03 L12_spectral:5.1561e-03 ip_v_neg_g:8.6317e-03 cos_v_neg_g:1.1439e-03 v_norm:8.7868e-01 g_norm:8.5880e+00 hv_norm:5.3085e+00 cos_v_hv:3.8109e-03 hg_norm:1.2318e+04 cos_g_hg:6.8522e-01 v_par:5.4154e-05 v_perp:8.7868e-01 L1_cos_v_neg_g:1.6215e-02 L1_v_norm:2.2945e-02 L2_cos_v_neg_g:1.5228e-02 L2_v_norm:2.2353e-02 L3_cos_v_neg_g:1.5768e-02 L3_v_norm:2.1812e-02 L4_cos_v_neg_g:1.3245e-02 L4_v_norm:2.2775e-02 L5_cos_v_neg_g:1.2372e-02 L5_v_norm:2.3412e-02 L6_cos_v_neg_g:1.1956e-02 L6_v_norm:2.3700e-02 L7_cos_v_neg_g:1.1211e-02 L7_v_norm:2.4062e-02 L8_cos_v_neg_g:1.1050e-02 L8_v_norm:2.3261e-02 L9_cos_v_neg_g:1.1074e-02 L9_v_norm:2.2936e-02 L10_cos_v_neg_g:9.8426e-03 L10_v_norm:2.2949e-02 L11_cos_v_neg_g:8.6825e-03 L11_v_norm:2.2146e-02 L12_cos_v_neg_g:7.6227e-03 L12_v_norm:2.2327e-02 +step:500 train loss:5.515120 +step:501 train loss:5.464902 +step:502 train loss:5.499174 +step:503 train loss:5.428135 +step:504 train loss:5.521428 +step:505 train loss:5.452517 +step:506 train loss:5.455106 +step:507 train loss:5.463804 +step:508 train loss:5.489970 +step:509 train loss:5.494362 +step:510 train loss:5.428480 +step:511 train loss:5.419279 +step:512 train loss:5.420152 +step:513 train loss:5.451924 +step:514 train loss:5.497528 +step:515 train loss:5.453403 +step:516 train loss:5.523374 +step:517 train loss:5.452750 +step:518 train loss:5.438495 +step:519 train loss:5.491759 +step:520 train loss:5.442068 +step:521 train loss:5.430947 +step:522 train loss:5.455858 +step:523 train loss:5.449640 +step:524 train loss:5.406596 +step:525 train loss:5.409458 +step:526 train loss:5.422556 +step:527 train loss:5.421213 +step:528 train loss:5.424097 +step:529 train loss:5.447238 +step:530 train loss:5.396040 +step:531 train loss:5.437580 +step:532 train loss:5.406708 +step:533 train loss:5.365296 +step:534 train loss:5.442627 +step:535 train loss:5.424433 +step:536 train loss:5.490941 +step:537 train loss:5.379785 +step:538 train loss:5.349187 +step:539 train loss:5.434563 +step:540 train loss:5.476488 +step:541 train loss:5.378809 +step:542 train loss:5.406199 +step:543 train loss:5.425973 +step:544 train loss:5.422701 +step:545 train loss:5.401631 +step:546 train loss:5.364675 +step:547 train loss:5.385184 +step:548 train loss:5.347383 +step:549 train loss:5.400047 +step:550 train loss:5.374421 +step:551 train loss:5.381855 +step:552 train loss:5.472974 +step:553 train loss:5.438520 +step:554 train loss:5.381993 +step:555 train loss:5.447443 +step:556 train loss:5.395258 +step:557 train loss:5.367885 +step:558 train loss:5.348173 +step:559 train loss:5.398091 +step:560 train loss:5.445027 +step:561 train loss:5.332231 +step:562 train loss:5.317428 +step:563 train loss:5.394511 +step:564 train loss:5.362767 +step:565 train loss:5.382337 +step:566 train loss:5.378155 +step:567 train loss:5.374923 +step:568 train loss:5.397583 +step:569 train loss:5.382043 +step:570 train loss:5.319849 +step:571 train loss:5.345800 +step:572 train loss:5.343832 +step:573 train loss:5.340374 +step:574 train loss:5.377396 +step:575 train loss:5.340138 +step:576 train loss:5.350128 +step:577 train loss:5.371696 +step:578 train loss:5.349856 +step:579 train loss:5.394511 +step:580 train loss:5.329596 +step:581 train loss:5.384657 +step:582 train loss:5.347878 +step:583 train loss:5.360913 +step:584 train loss:5.345037 +step:585 train loss:5.335515 +step:586 train loss:5.326087 +step:587 train loss:5.396906 +step:588 train loss:5.320796 +step:589 train loss:5.374247 +step:590 train loss:5.377645 +step:591 train loss:5.320601 +step:592 train loss:5.297757 +step:593 train loss:5.328372 +step:594 train loss:5.300580 +step:595 train loss:5.342526 +step:596 train loss:5.315557 +step:597 train loss:5.347631 +step:598 train loss:5.320886 +step:599 train loss:5.321556 +step:600 train loss:5.301130 +step:601 train loss:5.274878 +step:602 train loss:5.284756 +step:603 train loss:5.339789 +step:604 train loss:5.313535 +step:605 train loss:5.346140 +step:606 train loss:5.295278 +step:607 train loss:5.288331 +step:608 train loss:5.282929 +step:609 train loss:5.260303 +step:610 train loss:5.280859 +step:611 train loss:5.284160 +step:612 train loss:5.328105 +step:613 train loss:5.245900 +step:614 train loss:5.291087 +step:615 train loss:5.341628 +step:616 train loss:5.263482 +step:617 train loss:5.297125 +step:618 train loss:5.267756 +step:619 train loss:5.301217 +step:620 train loss:5.320027 +step:621 train loss:5.256067 +step:622 train loss:5.310332 +step:623 train loss:5.322647 +step:624 train loss:5.308738 +step:625 train loss:5.305723 +step:626 train loss:5.309605 +step:627 train loss:5.274732 +step:628 train loss:5.284942 +step:629 train loss:5.228903 +step:630 train loss:5.254957 +step:631 train loss:5.248785 +step:632 train loss:5.263102 +step:633 train loss:5.284943 +step:634 train loss:5.280683 +step:635 train loss:5.220467 +step:636 train loss:5.309331 +step:637 train loss:5.223325 +step:638 train loss:5.159319 +step:639 train loss:5.285398 +step:640 train loss:5.232812 +step:641 train loss:5.259900 +step:642 train loss:5.298277 +step:643 train loss:5.212463 +step:644 train loss:5.294077 +step:645 train loss:5.250688 +step:646 train loss:5.243366 +step:647 train loss:5.257941 +step:648 train loss:5.350239 +step:649 train loss:5.251699 +step:650 train loss:5.314044 +step:651 train loss:5.201642 +step:652 train loss:5.228614 +step:653 train loss:5.229150 +step:654 train loss:5.221777 +step:655 train loss:5.258493 +step:656 train loss:5.208817 +step:657 train loss:5.261229 +step:658 train loss:5.188377 +step:659 train loss:5.268253 +step:660 train loss:5.234446 +step:661 train loss:5.272169 +step:662 train loss:5.265681 +step:663 train loss:5.265289 +step:664 train loss:5.176969 +step:665 train loss:5.193365 +step:666 train loss:5.193920 +step:667 train loss:5.250420 +step:668 train loss:5.224458 +step:669 train loss:5.209648 +step:670 train loss:5.223824 +step:671 train loss:5.198032 +step:672 train loss:5.169402 +step:673 train loss:5.258640 +step:674 train loss:5.260842 +step:675 train loss:5.164092 +step:676 train loss:5.247519 +step:677 train loss:5.185142 +step:678 train loss:5.176739 +step:679 train loss:5.219419 +step:680 train loss:5.176793 +step:681 train loss:5.228249 +step:682 train loss:5.145365 +step:683 train loss:5.206566 +step:684 train loss:5.240175 +step:685 train loss:5.175136 +step:686 train loss:5.282818 +step:687 train loss:5.211001 +step:688 train loss:5.144383 +step:689 train loss:5.192041 +step:690 train loss:5.166395 +step:691 train loss:5.176399 +step:692 train loss:5.192518 +step:693 train loss:5.188744 +step:694 train loss:5.173505 +step:695 train loss:5.135778 +step:696 train loss:5.108634 +step:697 train loss:5.215345 +step:698 train loss:5.152416 +step:699 train loss:5.148560 +step:700 train loss:5.229794 +step:701 train loss:5.137061 +step:702 train loss:5.205002 +step:703 train loss:5.139877 +step:704 train loss:5.101964 +step:705 train loss:5.140410 +step:706 train loss:5.046469 +step:707 train loss:5.104668 +step:708 train loss:5.194818 +step:709 train loss:5.160943 +step:710 train loss:5.119175 +step:711 train loss:5.188935 +step:712 train loss:5.142684 +step:713 train loss:5.102816 +step:714 train loss:5.185700 +step:715 train loss:5.103114 +step:716 train loss:5.228694 +step:717 train loss:5.114582 +step:718 train loss:5.176330 +step:719 train loss:5.130004 +step:720 train loss:5.111850 +step:721 train loss:5.125285 +step:722 train loss:5.142690 +step:723 train loss:5.182835 +step:724 train loss:5.152414 +step:725 train loss:5.136756 +step:726 train loss:5.119654 +step:727 train loss:5.146945 +step:728 train loss:5.141286 +step:729 train loss:5.064143 +step:730 train loss:5.158881 +step:731 train loss:5.180663 +step:732 train loss:5.159000 +step:733 train loss:5.134844 +step:734 train loss:5.126112 +step:735 train loss:5.194641 +step:736 train loss:5.145073 +step:737 train loss:5.142710 +step:738 train loss:5.163569 +step:739 train loss:5.117912 +step:740 train loss:5.131006 +step:741 train loss:5.192879 +step:742 train loss:5.105588 +step:743 train loss:5.091501 +step:744 train loss:5.145185 +step:745 train loss:5.086875 +step:746 train loss:5.083509 +step:747 train loss:5.113972 +step:748 train loss:5.079102 +step:749 train loss:5.106984 +step:750 validation loss:5.088912 +step:750 train loss:5.067680 +step:751 train loss:5.079494 +step:752 train loss:5.030542 +step:753 train loss:5.085689 +step:754 train loss:5.094675 +step:755 train loss:5.132880 +step:756 train loss:5.118469 +step:757 train loss:5.211729 +step:758 train loss:5.089532 +step:759 train loss:5.096170 +step:760 train loss:5.065161 +step:761 train loss:5.104937 +step:762 train loss:5.086042 +step:763 train loss:5.076351 +step:764 train loss:5.052977 +step:765 train loss:5.059137 +step:766 train loss:5.135285 +step:767 train loss:5.223762 +step:768 train loss:5.066378 +step:769 train loss:5.101766 +step:770 train loss:5.120878 +step:771 train loss:5.172019 +step:772 train loss:5.109576 +step:773 train loss:5.056027 +step:774 train loss:5.103031 +step:775 train loss:5.077925 +step:776 train loss:5.089233 +step:777 train loss:5.052014 +step:778 train loss:5.060132 +step:779 train loss:5.035071 +step:780 train loss:5.092088 +step:781 train loss:5.024894 +step:782 train loss:5.049457 +step:783 train loss:5.033615 +step:784 train loss:5.044374 +step:785 train loss:5.006170 +step:786 train loss:5.040774 +step:787 train loss:4.996360 +step:788 train loss:5.054239 +step:789 train loss:5.056309 +step:790 train loss:5.005644 +step:791 train loss:5.089715 +step:792 train loss:5.102493 +step:793 train loss:5.063741 +step:794 train loss:5.060420 +step:795 train loss:5.019919 +step:796 train loss:5.251751 +step:797 train loss:5.050251 +step:798 train loss:5.038939 +step:799 train loss:5.037716 +step:800 train loss:5.113738 +step:801 train loss:5.043753 +step:802 train loss:5.144437 +step:803 train loss:5.058618 +step:804 train loss:5.001003 +step:805 train loss:5.061463 +step:806 train loss:4.968914 +step:807 train loss:5.028934 +step:808 train loss:5.038471 +step:809 train loss:5.004542 +step:810 train loss:4.978859 +step:811 train loss:5.077056 +step:812 train loss:5.036779 +step:813 train loss:5.042594 +step:814 train loss:5.095709 +step:815 train loss:5.060119 +step:816 train loss:4.993980 +step:817 train loss:5.022778 +step:818 train loss:4.998801 +step:819 train loss:4.993351 +step:820 train loss:5.007503 +step:821 train loss:4.952059 +step:822 train loss:4.948261 +step:823 train loss:5.027889 +step:824 train loss:4.941556 +step:825 train loss:4.928466 +step:826 train loss:4.974422 +step:827 train loss:4.919996 +step:828 train loss:4.984548 +step:829 train loss:4.985350 +step:830 train loss:4.994318 +step:831 train loss:5.011336 +step:832 train loss:5.063588 +step:833 train loss:5.021809 +step:834 train loss:5.006918 +step:835 train loss:4.981190 +step:836 train loss:4.970581 +step:837 train loss:4.939776 +step:838 train loss:4.950400 +step:839 train loss:4.949722 +step:840 train loss:4.986082 +step:841 train loss:4.959949 +step:842 train loss:4.972362 +step:843 train loss:4.967377 +step:844 train loss:4.948615 +step:845 train loss:4.936622 +step:846 train loss:5.003810 +step:847 train loss:4.972507 +step:848 train loss:4.938545 +step:849 train loss:4.991305 +step:850 train loss:4.988869 +step:851 train loss:4.960386 +step:852 train loss:5.009833 +step:853 train loss:4.914479 +step:854 train loss:4.965318 +step:855 train loss:4.946701 +step:856 train loss:4.903700 +step:857 train loss:4.948923 +step:858 train loss:4.979462 +step:859 train loss:4.948623 +step:860 train loss:4.947095 +step:861 train loss:4.989956 +step:862 train loss:4.941568 +step:863 train loss:4.957645 +step:864 train loss:4.933704 +step:865 train loss:4.961010 +step:866 train loss:4.962379 +step:867 train loss:5.033309 +step:868 train loss:4.933004 +step:869 train loss:4.946924 +step:870 train loss:4.909838 +step:871 train loss:4.916762 +step:872 train loss:4.920425 +step:873 train loss:4.922965 +step:874 train loss:4.925664 +step:875 train loss:4.835019 +step:876 train loss:4.957243 +step:877 train loss:4.850942 +step:878 train loss:4.960468 +step:879 train loss:4.904939 +step:880 train loss:4.973439 +step:881 train loss:4.932489 +step:882 train loss:4.898701 +step:883 train loss:4.920778 +step:884 train loss:4.931704 +step:885 train loss:4.887321 +step:886 train loss:4.864223 +step:887 train loss:4.903595 +step:888 train loss:4.998461 +step:889 train loss:4.932853 +step:890 train loss:4.889206 +step:891 train loss:4.849154 +step:892 train loss:4.814942 +step:893 train loss:4.895290 +step:894 train loss:4.873490 +step:895 train loss:4.863466 +step:896 train loss:4.939806 +step:897 train loss:4.873122 +step:898 train loss:4.884160 +step:899 train loss:4.887963 +step:900 train loss:4.943261 +step:901 train loss:4.854372 +step:902 train loss:4.897046 +step:903 train loss:4.960833 +step:904 train loss:4.978137 +step:905 train loss:4.869991 +step:906 train loss:4.882933 +step:907 train loss:4.884127 +step:908 train loss:4.908264 +step:909 train loss:4.868270 +step:910 train loss:4.890257 +step:911 train loss:5.008717 +step:912 train loss:4.834321 +step:913 train loss:4.881123 +step:914 train loss:4.867873 +step:915 train loss:4.873475 +step:916 train loss:4.950249 +step:917 train loss:4.879741 +step:918 train loss:4.946760 +step:919 train loss:5.018569 +step:920 train loss:4.807218 +step:921 train loss:4.887489 +step:922 train loss:4.865897 +step:923 train loss:4.833348 +step:924 train loss:4.846091 +step:925 train loss:4.792702 +step:926 train loss:4.887023 +step:927 train loss:4.824148 +step:928 train loss:4.870120 +step:929 train loss:4.858540 +step:930 train loss:4.860363 +step:931 train loss:4.900378 +step:932 train loss:4.862271 +step:933 train loss:4.860460 +step:934 train loss:4.905416 +step:935 train loss:4.884167 +step:936 train loss:4.879413 +step:937 train loss:4.878789 +step:938 train loss:4.892112 +step:939 train loss:4.760146 +step:940 train loss:4.852571 +step:941 train loss:4.796729 +step:942 train loss:4.776054 +step:943 train loss:4.861497 +step:944 train loss:4.816465 +step:945 train loss:4.845798 +step:946 train loss:4.878750 +step:947 train loss:4.977460 +step:948 train loss:4.802748 +step:949 train loss:4.858556 +step:950 train loss:4.809588 +step:951 train loss:4.821597 +step:952 train loss:4.885684 +step:953 train loss:4.835635 +step:954 train loss:4.846850 +step:955 train loss:4.781991 +step:956 train loss:4.797352 +step:957 train loss:4.804968 +step:958 train loss:4.872922 +step:959 train loss:4.819162 +step:960 train loss:4.891383 +step:961 train loss:4.876000 +step:962 train loss:4.812667 +step:963 train loss:4.795002 +step:964 train loss:4.839099 +step:965 train loss:4.760829 +step:966 train loss:4.781249 +step:967 train loss:4.825670 +step:968 train loss:4.815334 +step:969 train loss:4.766695 +step:970 train loss:4.842966 +step:971 train loss:4.799690 +step:972 train loss:4.752763 +step:973 train loss:4.821552 +step:974 train loss:4.773530 +step:975 train loss:4.860886 +step:976 train loss:4.801862 +step:977 train loss:4.798653 +step:978 train loss:4.819761 +step:979 train loss:4.786428 +step:980 train loss:4.789491 +step:981 train loss:4.792079 +step:982 train loss:4.772369 +step:983 train loss:4.778430 +step:984 train loss:4.824072 +step:985 train loss:4.803084 +step:986 train loss:4.784952 +step:987 train loss:4.831448 +step:988 train loss:4.818472 +step:989 train loss:4.779059 +step:990 train loss:4.767663 +step:991 train loss:4.720283 +step:992 train loss:4.756966 +step:993 train loss:4.784591 +step:994 train loss:4.713325 +step:995 train loss:4.721501 +step:996 train loss:4.781347 +step:997 train loss:4.742424 +step:998 train loss:4.740778 +step:999 train loss:4.766528 +step:1000 validation loss:4.744947 total_sharp:2.0200e-02 L1_sharp:1.6954e+00 L2_sharp:8.9238e-01 L3_sharp:7.1128e-01 L4_sharp:5.6087e-01 L5_sharp:4.2976e-01 L6_sharp:3.0012e-01 L7_sharp:1.8076e-01 L8_sharp:1.2004e-01 L9_sharp:9.4906e-02 L10_sharp:7.2747e-02 L11_sharp:5.7411e-02 L12_sharp:4.0791e-02 total_fnorm:1.3394e+00 total_l1_linf:7.3725e+03 total_spectral:1.3394e+00 L1_fnorm:3.1314e-02 L2_fnorm:2.8965e-02 L3_fnorm:2.8254e-02 L4_fnorm:2.9155e-02 L5_fnorm:2.9661e-02 L6_fnorm:3.0421e-02 L7_fnorm:3.1011e-02 L8_fnorm:3.1256e-02 L9_fnorm:3.1679e-02 L10_fnorm:3.1817e-02 L11_fnorm:3.1602e-02 L12_fnorm:3.2088e-02 L1_l1linf:1.7729e-01 L2_l1linf:2.0071e-01 L3_l1linf:2.0670e-01 L4_l1linf:2.1469e-01 L5_l1linf:2.1965e-01 L6_l1linf:2.2540e-01 L7_l1linf:2.2414e-01 L8_l1linf:2.2739e-01 L9_l1linf:2.2730e-01 L10_l1linf:2.2525e-01 L11_l1linf:2.2077e-01 L12_l1linf:2.0763e-01 L1_spectral:4.0566e-03 L2_spectral:4.5844e-03 L3_spectral:4.7526e-03 L4_spectral:4.9003e-03 L5_spectral:4.9832e-03 L6_spectral:5.4750e-03 L7_spectral:6.4277e-03 L8_spectral:6.8135e-03 L9_spectral:6.6665e-03 L10_spectral:6.8429e-03 L11_spectral:6.1567e-03 L12_spectral:8.2255e-03 ip_v_neg_g:1.8233e-02 cos_v_neg_g:1.2659e-03 v_norm:1.3394e+00 g_norm:1.0753e+01 hv_norm:8.1545e+00 cos_v_hv:3.3178e-03 hg_norm:1.6663e+04 cos_g_hg:6.9233e-01 v_par:8.0293e-05 v_perp:1.3394e+00 L1_cos_v_neg_g:2.3499e-02 L1_v_norm:3.1314e-02 L2_cos_v_neg_g:2.0568e-02 L2_v_norm:2.8965e-02 L3_cos_v_neg_g:2.0160e-02 L3_v_norm:2.8254e-02 L4_cos_v_neg_g:1.8500e-02 L4_v_norm:2.9155e-02 L5_cos_v_neg_g:1.8103e-02 L5_v_norm:2.9661e-02 L6_cos_v_neg_g:1.7169e-02 L6_v_norm:3.0421e-02 L7_cos_v_neg_g:1.3710e-02 L7_v_norm:3.1011e-02 L8_cos_v_neg_g:1.1085e-02 L8_v_norm:3.1256e-02 L9_cos_v_neg_g:1.0639e-02 L9_v_norm:3.1679e-02 L10_cos_v_neg_g:9.4237e-03 L10_v_norm:3.1817e-02 L11_cos_v_neg_g:7.9774e-03 L11_v_norm:3.1602e-02 L12_cos_v_neg_g:5.9785e-03 L12_v_norm:3.2088e-02 +step:1000 train loss:4.782729 +step:1001 train loss:4.779561 +step:1002 train loss:4.786784 +step:1003 train loss:4.769096 +step:1004 train loss:4.746501 +step:1005 train loss:4.752676 +step:1006 train loss:4.823177 +step:1007 train loss:4.807422 +step:1008 train loss:4.738091 +step:1009 train loss:4.795284 +step:1010 train loss:4.779908 +step:1011 train loss:4.791256 +step:1012 train loss:4.739243 +step:1013 train loss:4.709194 +step:1014 train loss:4.713921 +step:1015 train loss:4.745907 +step:1016 train loss:4.748470 +step:1017 train loss:4.706727 +step:1018 train loss:4.754086 +step:1019 train loss:4.752945 +step:1020 train loss:4.723352 +step:1021 train loss:4.790658 +step:1022 train loss:4.713920 +step:1023 train loss:4.716290 +step:1024 train loss:4.794858 +step:1025 train loss:4.742326 +step:1026 train loss:4.705258 +step:1027 train loss:4.745568 +step:1028 train loss:4.746192 +step:1029 train loss:4.699187 +step:1030 train loss:4.754663 +step:1031 train loss:4.768018 +step:1032 train loss:4.714465 +step:1033 train loss:4.690492 +step:1034 train loss:4.752758 +step:1035 train loss:4.761146 +step:1036 train loss:4.666296 +step:1037 train loss:4.730310 +step:1038 train loss:4.749453 +step:1039 train loss:4.872340 +step:1040 train loss:4.704083 +step:1041 train loss:4.712787 +step:1042 train loss:4.746624 +step:1043 train loss:4.722106 +step:1044 train loss:4.717686 +step:1045 train loss:4.731087 +step:1046 train loss:4.683805 +step:1047 train loss:4.694884 +step:1048 train loss:4.704788 +step:1049 train loss:4.753256 +step:1050 train loss:4.715629 +step:1051 train loss:4.687561 +step:1052 train loss:4.777302 +step:1053 train loss:4.686286 +step:1054 train loss:4.686990 +step:1055 train loss:4.749718 +step:1056 train loss:4.691823 +step:1057 train loss:4.591530 +step:1058 train loss:4.697785 +step:1059 train loss:4.695586 +step:1060 train loss:4.675845 +step:1061 train loss:4.751754 +step:1062 train loss:4.690426 +step:1063 train loss:4.694293 +step:1064 train loss:4.673053 +step:1065 train loss:4.696389 +step:1066 train loss:4.660717 +step:1067 train loss:4.691260 +step:1068 train loss:4.670934 +step:1069 train loss:4.678832 +step:1070 train loss:4.669830 +step:1071 train loss:4.702375 +step:1072 train loss:4.703037 +step:1073 train loss:4.620229 +step:1074 train loss:4.651034 +step:1075 train loss:4.672374 +step:1076 train loss:4.731952 +step:1077 train loss:4.664669 +step:1078 train loss:4.692401 +step:1079 train loss:4.754692 +step:1080 train loss:4.633839 +step:1081 train loss:4.687489 +step:1082 train loss:4.691610 +step:1083 train loss:4.650620 +step:1084 train loss:4.618744 +step:1085 train loss:4.673366 +step:1086 train loss:4.698185 +step:1087 train loss:4.648952 +step:1088 train loss:4.655047 +step:1089 train loss:4.660530 +step:1090 train loss:4.607187 +step:1091 train loss:4.592589 +step:1092 train loss:4.734132 +step:1093 train loss:4.619574 +step:1094 train loss:4.654340 +step:1095 train loss:4.695491 +step:1096 train loss:4.634516 +step:1097 train loss:4.625624 +step:1098 train loss:4.604753 +step:1099 train loss:4.645819 +step:1100 train loss:4.671995 +step:1101 train loss:4.689508 +step:1102 train loss:4.689344 +step:1103 train loss:4.640387 +step:1104 train loss:4.669446 +step:1105 train loss:4.709600 +step:1106 train loss:4.639773 +step:1107 train loss:4.750159 +step:1108 train loss:4.708533 +step:1109 train loss:4.658828 +step:1110 train loss:4.623623 +step:1111 train loss:4.683705 +step:1112 train loss:4.618013 +step:1113 train loss:4.571123 +step:1114 train loss:4.563946 +step:1115 train loss:4.606650 +step:1116 train loss:4.685431 +step:1117 train loss:4.675902 +step:1118 train loss:4.730424 +step:1119 train loss:4.683766 +step:1120 train loss:4.654143 +step:1121 train loss:4.635113 +step:1122 train loss:4.622909 +step:1123 train loss:4.714663 +step:1124 train loss:4.600377 +step:1125 train loss:4.635417 +step:1126 train loss:4.582108 +step:1127 train loss:4.606611 +step:1128 train loss:4.608830 +step:1129 train loss:4.661420 +step:1130 train loss:4.574042 +step:1131 train loss:4.668718 +step:1132 train loss:4.615955 +step:1133 train loss:4.616753 +step:1134 train loss:4.604116 +step:1135 train loss:4.642588 +step:1136 train loss:4.655264 +step:1137 train loss:4.571994 +step:1138 train loss:4.640109 +step:1139 train loss:4.613605 +step:1140 train loss:4.694173 +step:1141 train loss:4.632359 +step:1142 train loss:4.594110 +step:1143 train loss:4.645321 +step:1144 train loss:4.681787 +step:1145 train loss:4.634284 +step:1146 train loss:4.572051 +step:1147 train loss:4.588215 +step:1148 train loss:4.599182 +step:1149 train loss:4.648911 +step:1150 train loss:4.669852 +step:1151 train loss:4.672716 +step:1152 train loss:4.577791 +step:1153 train loss:4.582253 +step:1154 train loss:4.565241 +step:1155 train loss:4.665594 +step:1156 train loss:4.570879 +step:1157 train loss:4.592513 +step:1158 train loss:4.694195 +step:1159 train loss:4.630268 +step:1160 train loss:4.565680 +step:1161 train loss:4.664589 +step:1162 train loss:4.595148 +step:1163 train loss:4.584116 +step:1164 train loss:4.489580 +step:1165 train loss:4.631968 +step:1166 train loss:4.549624 +step:1167 train loss:4.545902 +step:1168 train loss:4.620957 +step:1169 train loss:4.584095 +step:1170 train loss:4.581671 +step:1171 train loss:4.603195 +step:1172 train loss:4.585263 +step:1173 train loss:4.602644 +step:1174 train loss:4.532550 +step:1175 train loss:4.560531 +step:1176 train loss:4.685429 +step:1177 train loss:4.517167 +step:1178 train loss:4.574461 +step:1179 train loss:4.551866 +step:1180 train loss:4.583763 +step:1181 train loss:4.554067 +step:1182 train loss:4.641462 +step:1183 train loss:4.610312 +step:1184 train loss:4.528476 +step:1185 train loss:4.584293 +step:1186 train loss:4.549030 +step:1187 train loss:4.506912 +step:1188 train loss:4.544993 +step:1189 train loss:4.518861 +step:1190 train loss:4.548970 +step:1191 train loss:4.600399 +step:1192 train loss:4.565750 +step:1193 train loss:4.580885 +step:1194 train loss:4.672749 +step:1195 train loss:4.651505 +step:1196 train loss:4.557474 +step:1197 train loss:4.567807 +step:1198 train loss:4.537685 +step:1199 train loss:4.550403 +step:1200 train loss:4.632113 +step:1201 train loss:4.574590 +step:1202 train loss:4.510911 +step:1203 train loss:4.511312 +step:1204 train loss:4.558969 +step:1205 train loss:4.534605 +step:1206 train loss:4.515951 +step:1207 train loss:4.602554 +step:1208 train loss:4.568771 +step:1209 train loss:4.481173 +step:1210 train loss:4.577038 +step:1211 train loss:4.534589 +step:1212 train loss:4.542852 +step:1213 train loss:4.484147 +step:1214 train loss:4.591440 +step:1215 train loss:4.553973 +step:1216 train loss:4.546019 +step:1217 train loss:4.541440 +step:1218 train loss:4.555755 +step:1219 train loss:4.502971 +step:1220 train loss:4.514469 +step:1221 train loss:4.541247 +step:1222 train loss:4.601884 +step:1223 train loss:4.557262 +step:1224 train loss:4.521070 +step:1225 train loss:4.578506 +step:1226 train loss:4.514783 +step:1227 train loss:4.558204 +step:1228 train loss:4.515358 +step:1229 train loss:4.509046 +step:1230 train loss:4.497476 +step:1231 train loss:4.533952 +step:1232 train loss:4.493688 +step:1233 train loss:4.487446 +step:1234 train loss:4.581018 +step:1235 train loss:4.568475 +step:1236 train loss:4.457197 +step:1237 train loss:4.552104 +step:1238 train loss:4.507337 +step:1239 train loss:4.551123 +step:1240 train loss:4.440598 +step:1241 train loss:4.495778 +step:1242 train loss:4.508360 +step:1243 train loss:4.473697 +step:1244 train loss:4.578673 +step:1245 train loss:4.589490 +step:1246 train loss:4.521949 +step:1247 train loss:4.490268 +step:1248 train loss:4.521475 +step:1249 train loss:4.483459 +step:1250 validation loss:4.485438 +step:1250 train loss:4.480042 +step:1251 train loss:4.540815 +step:1252 train loss:4.493019 +step:1253 train loss:4.458723 +step:1254 train loss:4.479537 +step:1255 train loss:4.475554 +step:1256 train loss:4.515676 +step:1257 train loss:4.495842 +step:1258 train loss:4.543056 +step:1259 train loss:4.547230 +step:1260 train loss:4.430736 +step:1261 train loss:4.681939 +step:1262 train loss:4.514940 +step:1263 train loss:4.481484 +step:1264 train loss:4.476409 +step:1265 train loss:4.578998 +step:1266 train loss:4.482284 +step:1267 train loss:4.499681 +step:1268 train loss:4.511680 +step:1269 train loss:4.498703 +step:1270 train loss:4.432856 +step:1271 train loss:4.444408 +step:1272 train loss:4.459740 +step:1273 train loss:4.521975 +step:1274 train loss:4.484734 +step:1275 train loss:4.507421 +step:1276 train loss:4.508759 +step:1277 train loss:4.509355 +step:1278 train loss:4.443645 +step:1279 train loss:4.468760 +step:1280 train loss:4.476200 +step:1281 train loss:4.543497 +step:1282 train loss:4.439613 +step:1283 train loss:4.535117 +step:1284 train loss:4.484794 +step:1285 train loss:4.513460 +step:1286 train loss:4.420220 +step:1287 train loss:4.469188 +step:1288 train loss:4.496902 +step:1289 train loss:4.549102 +step:1290 train loss:4.484063 +step:1291 train loss:4.479853 +step:1292 train loss:4.464856 +step:1293 train loss:4.423647 +step:1294 train loss:4.483959 +step:1295 train loss:4.463995 +step:1296 train loss:4.516809 +step:1297 train loss:4.469217 +step:1298 train loss:4.490663 +step:1299 train loss:4.527306 +step:1300 train loss:4.443404 +step:1301 train loss:4.482529 +step:1302 train loss:4.444882 +step:1303 train loss:4.482295 +step:1304 train loss:4.514639 +step:1305 train loss:4.485291 +step:1306 train loss:4.484165 +step:1307 train loss:4.470612 +step:1308 train loss:4.421800 +step:1309 train loss:4.432506 +step:1310 train loss:4.405882 +step:1311 train loss:4.447723 +step:1312 train loss:4.493042 +step:1313 train loss:4.410143 +step:1314 train loss:4.414977 +step:1315 train loss:4.480455 +step:1316 train loss:4.430518 +step:1317 train loss:4.352906 +step:1318 train loss:4.487082 +step:1319 train loss:4.525304 +step:1320 train loss:4.441613 +step:1321 train loss:4.400812 +step:1322 train loss:4.505905 +step:1323 train loss:4.464788 +step:1324 train loss:4.558193 +step:1325 train loss:4.448701 +step:1326 train loss:4.486050 +step:1327 train loss:4.487877 +step:1328 train loss:4.407172 +step:1329 train loss:4.431147 +step:1330 train loss:4.449454 +step:1331 train loss:4.292083 +step:1332 train loss:4.502284 +step:1333 train loss:4.436532 +step:1334 train loss:4.467488 +step:1335 train loss:4.494412 +step:1336 train loss:4.481600 +step:1337 train loss:4.452773 +step:1338 train loss:4.427765 +step:1339 train loss:4.519847 +step:1340 train loss:4.462564 +step:1341 train loss:4.452420 +step:1342 train loss:4.427756 +step:1343 train loss:4.404072 +step:1344 train loss:4.471019 +step:1345 train loss:4.434248 +step:1346 train loss:4.521086 +step:1347 train loss:4.440742 +step:1348 train loss:4.423544 +step:1349 train loss:4.365993 +step:1350 train loss:4.379140 +step:1351 train loss:4.454050 +step:1352 train loss:4.413923 +step:1353 train loss:4.394116 +step:1354 train loss:4.411974 +step:1355 train loss:4.487576 +step:1356 train loss:4.406510 +step:1357 train loss:4.412140 +step:1358 train loss:4.411273 +step:1359 train loss:4.415049 +step:1360 train loss:4.445492 +step:1361 train loss:4.554395 +step:1362 train loss:4.481745 +step:1363 train loss:4.365651 +step:1364 train loss:4.396992 +step:1365 train loss:4.384787 +step:1366 train loss:4.416731 +step:1367 train loss:4.347104 +step:1368 train loss:4.382843 +step:1369 train loss:4.408370 +step:1370 train loss:4.435524 +step:1371 train loss:4.390428 +step:1372 train loss:4.416169 +step:1373 train loss:4.442775 +step:1374 train loss:4.456589 +step:1375 train loss:4.434894 +step:1376 train loss:4.429605 +step:1377 train loss:4.457632 +step:1378 train loss:4.411095 +step:1379 train loss:4.387179 +step:1380 train loss:4.460712 +step:1381 train loss:4.409074 +step:1382 train loss:4.379399 +step:1383 train loss:4.366213 +step:1384 train loss:4.507835 +step:1385 train loss:4.344491 +step:1386 train loss:4.402597 +step:1387 train loss:4.409471 +step:1388 train loss:4.372815 +step:1389 train loss:4.368233 +step:1390 train loss:4.392616 +step:1391 train loss:4.423008 +step:1392 train loss:4.390870 +step:1393 train loss:4.459464 +step:1394 train loss:4.375409 +step:1395 train loss:4.420435 +step:1396 train loss:4.404090 +step:1397 train loss:4.416670 +step:1398 train loss:4.424227 +step:1399 train loss:4.390065 +step:1400 train loss:4.369398 +step:1401 train loss:4.368912 +step:1402 train loss:4.365794 +step:1403 train loss:4.336202 +step:1404 train loss:4.389183 +step:1405 train loss:4.352587 +step:1406 train loss:4.375879 +step:1407 train loss:4.372677 +step:1408 train loss:4.366194 +step:1409 train loss:4.346274 +step:1410 train loss:4.370409 +step:1411 train loss:4.390693 +step:1412 train loss:4.463321 +step:1413 train loss:4.375161 +step:1414 train loss:4.399604 +step:1415 train loss:4.355600 +step:1416 train loss:4.411300 +step:1417 train loss:4.381807 +step:1418 train loss:4.322997 +step:1419 train loss:4.311829 +step:1420 train loss:4.357289 +step:1421 train loss:4.400675 +step:1422 train loss:4.379615 +step:1423 train loss:4.471763 +step:1424 train loss:4.360514 +step:1425 train loss:4.332667 +step:1426 train loss:4.360911 +step:1427 train loss:4.361157 +step:1428 train loss:4.337611 +step:1429 train loss:4.356831 +step:1430 train loss:4.345422 +step:1431 train loss:4.378580 +step:1432 train loss:4.359050 +step:1433 train loss:4.347731 +step:1434 train loss:4.313472 +step:1435 train loss:4.320523 +step:1436 train loss:4.375974 +step:1437 train loss:4.321710 +step:1438 train loss:4.327535 +step:1439 train loss:4.294909 +step:1440 train loss:4.334150 +step:1441 train loss:4.407515 +step:1442 train loss:4.372044 +step:1443 train loss:4.306231 +step:1444 train loss:4.324336 +step:1445 train loss:4.334899 +step:1446 train loss:4.339157 +step:1447 train loss:4.346647 +step:1448 train loss:4.319571 +step:1449 train loss:4.354728 +step:1450 train loss:4.366192 +step:1451 train loss:4.282675 +step:1452 train loss:4.342375 +step:1453 train loss:4.336907 +step:1454 train loss:4.334017 +step:1455 train loss:4.263096 +step:1456 train loss:4.351785 +step:1457 train loss:4.275981 +step:1458 train loss:4.420687 +step:1459 train loss:4.334806 +step:1460 train loss:4.305222 +step:1461 train loss:4.356681 +step:1462 train loss:4.355443 +step:1463 train loss:4.337486 +step:1464 train loss:4.302549 +step:1465 train loss:4.310848 +step:1466 train loss:4.268432 +step:1467 train loss:4.400427 +step:1468 train loss:4.286622 +step:1469 train loss:4.365343 +step:1470 train loss:4.296548 +step:1471 train loss:4.316741 +step:1472 train loss:4.298221 +step:1473 train loss:4.306866 +step:1474 train loss:4.252831 +step:1475 train loss:4.309330 +step:1476 train loss:4.383591 +step:1477 train loss:4.329459 +step:1478 train loss:4.270062 +step:1479 train loss:4.292857 +step:1480 train loss:4.297443 +step:1481 train loss:4.269216 +step:1482 train loss:4.329227 +step:1483 train loss:4.315011 +step:1484 train loss:4.357556 +step:1485 train loss:4.365232 +step:1486 train loss:4.310657 +step:1487 train loss:4.294101 +step:1488 train loss:4.302317 +step:1489 train loss:4.288398 +step:1490 train loss:4.346934 +step:1491 train loss:4.327858 +step:1492 train loss:4.336471 +step:1493 train loss:4.279606 +step:1494 train loss:4.311468 +step:1495 train loss:4.281535 +step:1496 train loss:4.261047 +step:1497 train loss:4.338520 +step:1498 train loss:4.243093 +step:1499 train loss:4.279352 +step:1500 validation loss:4.268830 total_sharp:1.8695e-02 L1_sharp:1.0427e+00 L2_sharp:6.6782e-01 L3_sharp:5.6059e-01 L4_sharp:4.1784e-01 L5_sharp:3.9089e-01 L6_sharp:3.3658e-01 L7_sharp:2.6076e-01 L8_sharp:1.5230e-01 L9_sharp:1.1556e-01 L10_sharp:8.8368e-02 L11_sharp:6.6933e-02 L12_sharp:6.1346e-02 total_fnorm:1.3022e+00 total_l1_linf:7.1871e+03 total_spectral:1.3022e+00 L1_fnorm:3.1677e-02 L2_fnorm:2.8952e-02 L3_fnorm:2.7955e-02 L4_fnorm:2.9218e-02 L5_fnorm:2.9838e-02 L6_fnorm:3.0823e-02 L7_fnorm:3.1505e-02 L8_fnorm:3.1561e-02 L9_fnorm:3.1858e-02 L10_fnorm:3.1979e-02 L11_fnorm:3.1476e-02 L12_fnorm:3.1830e-02 L1_l1linf:2.2584e-01 L2_l1linf:2.3350e-01 L3_l1linf:2.4995e-01 L4_l1linf:2.4584e-01 L5_l1linf:2.5449e-01 L6_l1linf:2.5873e-01 L7_l1linf:2.6586e-01 L8_l1linf:2.6554e-01 L9_l1linf:2.7679e-01 L10_l1linf:2.7172e-01 L11_l1linf:2.5634e-01 L12_l1linf:2.4156e-01 L1_spectral:5.0868e-03 L2_spectral:5.2946e-03 L3_spectral:5.5976e-03 L4_spectral:5.5327e-03 L5_spectral:5.6791e-03 L6_spectral:5.7953e-03 L7_spectral:5.9500e-03 L8_spectral:5.9531e-03 L9_spectral:6.1445e-03 L10_spectral:6.1160e-03 L11_spectral:5.7933e-03 L12_spectral:5.6257e-03 ip_v_neg_g:1.3550e-02 cos_v_neg_g:9.9953e-04 v_norm:1.3022e+00 g_norm:1.0411e+01 hv_norm:6.4193e+00 cos_v_hv:3.7923e-03 hg_norm:1.6392e+04 cos_g_hg:6.3756e-01 v_par:4.8256e-05 v_perp:1.3022e+00 L1_cos_v_neg_g:1.7015e-02 L1_v_norm:3.1677e-02 L2_cos_v_neg_g:1.5975e-02 L2_v_norm:2.8952e-02 L3_cos_v_neg_g:1.5283e-02 L3_v_norm:2.7955e-02 L4_cos_v_neg_g:1.3007e-02 L4_v_norm:2.9218e-02 L5_cos_v_neg_g:1.2984e-02 L5_v_norm:2.9838e-02 L6_cos_v_neg_g:1.2393e-02 L6_v_norm:3.0823e-02 L7_cos_v_neg_g:1.0370e-02 L7_v_norm:3.1505e-02 L8_cos_v_neg_g:8.7018e-03 L8_v_norm:3.1561e-02 L9_cos_v_neg_g:8.3578e-03 L9_v_norm:3.1858e-02 L10_cos_v_neg_g:8.1980e-03 L10_v_norm:3.1979e-02 L11_cos_v_neg_g:7.9186e-03 L11_v_norm:3.1476e-02 L12_cos_v_neg_g:6.9629e-03 L12_v_norm:3.1830e-02 +step:1500 train loss:4.285050 +step:1501 train loss:4.293611 +step:1502 train loss:4.246912 +step:1503 train loss:4.289672 +step:1504 train loss:4.255052 +step:1505 train loss:4.234891 +step:1506 train loss:4.217686 +step:1507 train loss:4.254500 +step:1508 train loss:4.257905 +step:1509 train loss:4.310562 +step:1510 train loss:4.247723 +step:1511 train loss:4.282061 +step:1512 train loss:4.242723 +step:1513 train loss:4.325418 +step:1514 train loss:4.269767 +step:1515 train loss:4.333052 +step:1516 train loss:4.250906 +step:1517 train loss:4.277511 +step:1518 train loss:4.350183 +step:1519 train loss:4.306649 +step:1520 train loss:4.349506 +step:1521 train loss:4.273820 +step:1522 train loss:4.318645 +step:1523 train loss:4.326254 +step:1524 train loss:4.231038 +step:1525 train loss:4.310868 +step:1526 train loss:4.222366 +step:1527 train loss:4.295815 +step:1528 train loss:4.327336 +step:1529 train loss:4.296926 +step:1530 train loss:4.320959 +step:1531 train loss:4.250436 +step:1532 train loss:4.321320 +step:1533 train loss:4.294597 +step:1534 train loss:4.236423 +step:1535 train loss:4.297517 +step:1536 train loss:4.320981 +step:1537 train loss:4.280488 +step:1538 train loss:4.259129 +step:1539 train loss:4.273090 +step:1540 train loss:4.278054 +step:1541 train loss:4.257839 +step:1542 train loss:4.342193 +step:1543 train loss:4.356486 +step:1544 train loss:4.234364 +step:1545 train loss:4.217998 +step:1546 train loss:4.251172 +step:1547 train loss:4.249466 +step:1548 train loss:4.283232 +step:1549 train loss:4.224707 +step:1550 train loss:4.324211 +step:1551 train loss:4.261718 +step:1552 train loss:4.295737 +step:1553 train loss:4.296705 +step:1554 train loss:4.305716 +step:1555 train loss:4.264153 +step:1556 train loss:4.247352 +step:1557 train loss:4.250814 +step:1558 train loss:4.271400 +step:1559 train loss:4.241924 +step:1560 train loss:4.322660 +step:1561 train loss:4.289062 +step:1562 train loss:4.186680 +step:1563 train loss:4.173539 +step:1564 train loss:4.282786 +step:1565 train loss:4.273796 +step:1566 train loss:4.281879 +step:1567 train loss:4.293174 +step:1568 train loss:4.236506 +step:1569 train loss:4.235178 +step:1570 train loss:4.245428 +step:1571 train loss:4.231023 +step:1572 train loss:4.234336 +step:1573 train loss:4.273724 +step:1574 train loss:4.230861 +step:1575 train loss:4.255492 +step:1576 train loss:4.205875 +step:1577 train loss:4.231386 +step:1578 train loss:4.216543 +step:1579 train loss:4.291427 +step:1580 train loss:4.241937 +step:1581 train loss:4.278551 +step:1582 train loss:4.273221 +step:1583 train loss:4.255324 +step:1584 train loss:4.181002 +step:1585 train loss:4.273071 +step:1586 train loss:4.226064 +step:1587 train loss:4.244417 +step:1588 train loss:4.224391 +step:1589 train loss:4.269763 +step:1590 train loss:4.180446 +step:1591 train loss:4.248691 +step:1592 train loss:4.194215 +step:1593 train loss:4.231499 +step:1594 train loss:4.226143 +step:1595 train loss:4.223455 +step:1596 train loss:4.227346 +step:1597 train loss:4.158921 +step:1598 train loss:4.258878 +step:1599 train loss:4.270692 +step:1600 train loss:4.161976 +step:1601 train loss:4.225705 +step:1602 train loss:4.278213 +step:1603 train loss:4.273337 +step:1604 train loss:4.205746 +step:1605 train loss:4.251596 +step:1606 train loss:4.300265 +step:1607 train loss:4.188148 +step:1608 train loss:4.215289 +step:1609 train loss:4.229231 +step:1610 train loss:4.291404 +step:1611 train loss:4.216073 +step:1612 train loss:4.146492 +step:1613 train loss:4.208015 +step:1614 train loss:4.316667 +step:1615 train loss:4.258685 +step:1616 train loss:4.260049 +step:1617 train loss:4.223163 +step:1618 train loss:4.233825 +step:1619 train loss:4.401815 +step:1620 train loss:4.193797 +step:1621 train loss:4.252693 +step:1622 train loss:4.180800 +step:1623 train loss:4.235922 +step:1624 train loss:4.215880 +step:1625 train loss:4.286224 +step:1626 train loss:4.186812 +step:1627 train loss:4.189195 +step:1628 train loss:4.209499 +step:1629 train loss:4.229400 +step:1630 train loss:4.253545 +step:1631 train loss:4.198441 +step:1632 train loss:4.180894 +step:1633 train loss:4.194922 +step:1634 train loss:4.240845 +step:1635 train loss:4.188256 +step:1636 train loss:4.178143 +step:1637 train loss:4.251163 +step:1638 train loss:4.342329 +step:1639 train loss:4.165832 +step:1640 train loss:4.235608 +step:1641 train loss:4.206279 +step:1642 train loss:4.282599 +step:1643 train loss:4.188290 +step:1644 train loss:4.210133 +step:1645 train loss:4.185216 +step:1646 train loss:4.261240 +step:1647 train loss:4.161167 +step:1648 train loss:4.220448 +step:1649 train loss:4.187357 +step:1650 train loss:4.202170 +step:1651 train loss:4.220984 +step:1652 train loss:4.232536 +step:1653 train loss:4.239296 +step:1654 train loss:4.227108 +step:1655 train loss:4.213235 +step:1656 train loss:4.200882 +step:1657 train loss:4.202380 +step:1658 train loss:4.174587 +step:1659 train loss:4.246687 +step:1660 train loss:4.150136 +step:1661 train loss:4.256396 +step:1662 train loss:4.198868 +step:1663 train loss:4.187914 +step:1664 train loss:4.282597 +step:1665 train loss:4.203528 +step:1666 train loss:4.215447 +step:1667 train loss:4.225443 +step:1668 train loss:4.206230 +step:1669 train loss:4.174977 +step:1670 train loss:4.213544 +step:1671 train loss:4.214349 +step:1672 train loss:4.211707 +step:1673 train loss:4.176382 +step:1674 train loss:4.166964 +step:1675 train loss:4.216705 +step:1676 train loss:4.464949 +step:1677 train loss:4.226937 +step:1678 train loss:4.135530 +step:1679 train loss:4.261523 +step:1680 train loss:4.190335 +step:1681 train loss:4.243625 +step:1682 train loss:4.195504 +step:1683 train loss:4.197234 +step:1684 train loss:4.155561 +step:1685 train loss:4.206913 +step:1686 train loss:4.195871 +step:1687 train loss:4.199411 +step:1688 train loss:4.189337 +step:1689 train loss:4.172291 +step:1690 train loss:4.204412 +step:1691 train loss:4.173937 +step:1692 train loss:4.195222 +step:1693 train loss:4.157047 +step:1694 train loss:4.123002 +step:1695 train loss:4.142435 +step:1696 train loss:4.156322 +step:1697 train loss:4.200411 +step:1698 train loss:4.187552 +step:1699 train loss:4.157159 +step:1700 train loss:4.230780 +step:1701 train loss:4.170504 +step:1702 train loss:4.160778 +step:1703 train loss:4.183829 +step:1704 train loss:4.191139 +step:1705 train loss:4.200886 +step:1706 train loss:4.218097 +step:1707 train loss:4.207068 +step:1708 train loss:4.140404 +step:1709 train loss:4.229591 +step:1710 train loss:4.156423 +step:1711 train loss:4.156484 +step:1712 train loss:4.180846 +step:1713 train loss:4.147748 +step:1714 train loss:4.503839 +step:1715 train loss:4.166836 +step:1716 train loss:4.146078 +step:1717 train loss:4.150464 +step:1718 train loss:4.222229 +step:1719 train loss:4.142993 +step:1720 train loss:4.219237 +step:1721 train loss:4.160506 +step:1722 train loss:4.133442 +step:1723 train loss:4.225852 +step:1724 train loss:4.181924 +step:1725 train loss:4.173142 +step:1726 train loss:4.177306 +step:1727 train loss:4.202137 +step:1728 train loss:4.210217 +step:1729 train loss:4.131092 +step:1730 train loss:4.215911 +step:1731 train loss:4.144684 +step:1732 train loss:4.152563 +step:1733 train loss:4.142063 +step:1734 train loss:4.186495 +step:1735 train loss:4.253431 +step:1736 train loss:4.156137 +step:1737 train loss:4.191021 +step:1738 train loss:4.160728 +step:1739 train loss:4.215675 +step:1740 train loss:4.208417 +step:1741 train loss:4.253570 +step:1742 train loss:4.249531 +step:1743 train loss:4.142248 +step:1744 train loss:4.157562 +step:1745 train loss:4.141752 +step:1746 train loss:4.126719 +step:1747 train loss:4.167851 +step:1748 train loss:4.104171 +step:1749 train loss:4.143106 +step:1750 validation loss:4.128505 +step:1750 train loss:4.175002 +step:1751 train loss:4.196615 +step:1752 train loss:4.147871 +step:1753 train loss:4.181614 +step:1754 train loss:4.171660 +step:1755 train loss:4.171741 +step:1756 train loss:4.199067 +step:1757 train loss:4.197915 +step:1758 train loss:4.119585 +step:1759 train loss:4.205362 +step:1760 train loss:4.161434 +step:1761 train loss:4.130306 +step:1762 train loss:4.134111 +step:1763 train loss:4.138638 +step:1764 train loss:4.416228 +step:1765 train loss:4.141455 +step:1766 train loss:4.229919 +step:1767 train loss:4.146034 +step:1768 train loss:4.120461 +step:1769 train loss:4.137589 +step:1770 train loss:4.158454 +step:1771 train loss:4.131245 +step:1772 train loss:4.238512 +step:1773 train loss:4.166683 +step:1774 train loss:4.173084 +step:1775 train loss:4.283069 +step:1776 train loss:4.161036 +step:1777 train loss:4.145743 +step:1778 train loss:4.205300 +step:1779 train loss:4.128421 +step:1780 train loss:4.187215 +step:1781 train loss:4.191899 +step:1782 train loss:4.217548 +step:1783 train loss:4.153444 +step:1784 train loss:4.236643 +step:1785 train loss:4.148329 +step:1786 train loss:4.142491 +step:1787 train loss:4.141012 +step:1788 train loss:4.163925 +step:1789 train loss:4.115160 +step:1790 train loss:4.129304 +step:1791 train loss:4.200494 +step:1792 train loss:4.206065 +step:1793 train loss:4.123436 +step:1794 train loss:4.168144 +step:1795 train loss:4.122517 +step:1796 train loss:4.106177 +step:1797 train loss:4.171889 +step:1798 train loss:4.105389 +step:1799 train loss:4.168229 +step:1800 train loss:4.187572 +step:1801 train loss:4.179741 +step:1802 train loss:4.186948 +step:1803 train loss:4.180439 +step:1804 train loss:4.178944 +step:1805 train loss:4.161747 +step:1806 train loss:4.178766 +step:1807 train loss:4.113653 +step:1808 train loss:4.178073 +step:1809 train loss:4.148943 +step:1810 train loss:4.150528 +step:1811 train loss:4.160157 +step:1812 train loss:4.146441 +step:1813 train loss:4.159719 +step:1814 train loss:4.214392 +step:1815 train loss:4.161470 +step:1816 train loss:4.118670 +step:1817 train loss:4.104895 +step:1818 train loss:4.164061 +step:1819 train loss:4.130167 +step:1820 train loss:4.175159 +step:1821 train loss:4.131413 +step:1822 train loss:4.115792 +step:1823 train loss:4.108569 +step:1824 train loss:4.181382 +step:1825 train loss:4.092370 +step:1826 train loss:4.137882 +step:1827 train loss:4.107954 +step:1828 train loss:4.152000 +step:1829 train loss:4.114440 +step:1830 train loss:4.306675 +step:1831 train loss:4.076629 +step:1832 train loss:4.109146 +step:1833 train loss:4.170231 +step:1834 train loss:4.114717 +step:1835 train loss:4.124175 +step:1836 train loss:4.160539 +step:1837 train loss:4.087560 +step:1838 train loss:4.181201 +step:1839 train loss:4.155763 +step:1840 train loss:4.134300 +step:1841 train loss:4.152356 +step:1842 train loss:4.131247 +step:1843 train loss:4.077405 +step:1844 train loss:4.142922 +step:1845 train loss:4.112175 +step:1846 train loss:4.157480 +step:1847 train loss:4.219950 +step:1848 train loss:4.006171 +step:1849 train loss:4.109627 +step:1850 train loss:4.085749 +step:1851 train loss:4.132295 +step:1852 train loss:4.113903 +step:1853 train loss:4.167830 +step:1854 train loss:4.133634 +step:1855 train loss:4.114519 +step:1856 train loss:4.125479 +step:1857 train loss:4.121815 +step:1858 train loss:4.169234 +step:1859 train loss:4.126908 +step:1860 train loss:4.091316 +step:1861 train loss:4.113751 +step:1862 train loss:4.152575 +step:1863 train loss:4.189065 +step:1864 train loss:4.087834 +step:1865 train loss:4.110500 +step:1866 train loss:4.120354 +step:1867 train loss:4.140113 +step:1868 train loss:4.192027 +step:1869 train loss:4.106657 +step:1870 train loss:4.138612 +step:1871 train loss:4.076529 +step:1872 train loss:4.139367 +step:1873 train loss:4.206521 +step:1874 train loss:4.062534 +step:1875 train loss:4.146020 +step:1876 train loss:4.107246 +step:1877 train loss:4.148798 +step:1878 train loss:4.075831 +step:1879 train loss:4.128905 +step:1880 train loss:4.212783 +step:1881 train loss:4.138931 +step:1882 train loss:4.151484 +step:1883 train loss:4.178380 +step:1884 train loss:4.180762 +step:1885 train loss:4.151299 +step:1886 train loss:4.072637 +step:1887 train loss:4.089489 +step:1888 train loss:4.093607 +step:1889 train loss:4.101905 +step:1890 train loss:4.117669 +step:1891 train loss:4.042488 +step:1892 train loss:4.144317 +step:1893 train loss:4.065254 +step:1894 train loss:4.080684 +step:1895 train loss:4.120057 +step:1896 train loss:4.166414 +step:1897 train loss:4.071814 +step:1898 train loss:4.113935 +step:1899 train loss:4.126856 +step:1900 train loss:4.080061 +step:1901 train loss:4.146874 +step:1902 train loss:4.148651 +step:1903 train loss:4.089279 +step:1904 train loss:4.078928 +step:1905 train loss:4.080169 +step:1906 train loss:4.129573 +step:1907 train loss:4.081609 +step:1908 train loss:4.087313 +step:1909 train loss:4.184093 +step:1910 train loss:4.081099 +step:1911 train loss:4.078823 +step:1912 train loss:4.131218 +step:1913 train loss:4.072686 +step:1914 train loss:4.104743 +step:1915 train loss:4.075163 +step:1916 train loss:4.119074 +step:1917 train loss:4.107047 +step:1918 train loss:4.021146 +step:1919 train loss:4.167579 +step:1920 train loss:4.276198 +step:1921 train loss:4.048921 +step:1922 train loss:4.028872 +step:1923 train loss:4.130775 +step:1924 train loss:4.162990 +step:1925 train loss:4.110341 +step:1926 train loss:4.048222 +step:1927 train loss:4.132499 +step:1928 train loss:4.044540 +step:1929 train loss:4.074267 +step:1930 train loss:4.145882 +step:1931 train loss:4.056223 +step:1932 train loss:4.109907 +step:1933 train loss:4.105441 +step:1934 train loss:4.173881 +step:1935 train loss:4.129938 +step:1936 train loss:4.094616 +step:1937 train loss:4.038851 +step:1938 train loss:4.395445 +step:1939 train loss:4.146496 +step:1940 train loss:4.128096 +step:1941 train loss:4.123026 +step:1942 train loss:4.125719 +step:1943 train loss:4.112124 +step:1944 train loss:4.077596 +step:1945 train loss:4.077559 +step:1946 train loss:4.096191 +step:1947 train loss:4.124096 +step:1948 train loss:4.028908 +step:1949 train loss:4.139904 +step:1950 train loss:4.081693 +step:1951 train loss:4.101315 +step:1952 train loss:4.129236 +step:1953 train loss:4.069121 +step:1954 train loss:4.096201 +step:1955 train loss:4.054013 +step:1956 train loss:4.131741 +step:1957 train loss:4.154905 +step:1958 train loss:4.169419 +step:1959 train loss:4.039256 +step:1960 train loss:4.079694 +step:1961 train loss:4.108357 +step:1962 train loss:4.107739 +step:1963 train loss:4.078230 +step:1964 train loss:4.114102 +step:1965 train loss:4.156694 +step:1966 train loss:4.055572 +step:1967 train loss:4.120161 +step:1968 train loss:4.056945 +step:1969 train loss:4.069531 +step:1970 train loss:4.135820 +step:1971 train loss:4.038164 +step:1972 train loss:4.143164 +step:1973 train loss:4.041320 +step:1974 train loss:4.091067 +step:1975 train loss:4.055732 +step:1976 train loss:4.071623 +step:1977 train loss:4.115793 +step:1978 train loss:4.062600 +step:1979 train loss:4.038008 +step:1980 train loss:4.076673 +step:1981 train loss:4.052545 +step:1982 train loss:4.143120 +step:1983 train loss:4.082772 +step:1984 train loss:4.120881 +step:1985 train loss:4.110193 +step:1986 train loss:4.097762 +step:1987 train loss:4.054227 +step:1988 train loss:4.084542 +step:1989 train loss:4.213674 +step:1990 train loss:4.059528 +step:1991 train loss:4.050160 +step:1992 train loss:4.059600 +step:1993 train loss:4.096225 +step:1994 train loss:4.083701 +step:1995 train loss:4.043546 +step:1996 train loss:4.089996 +step:1997 train loss:4.093136 +step:1998 train loss:4.050425 +step:1999 train loss:4.156619 +step:2000 validation loss:4.032078 total_sharp:1.1827e-02 L1_sharp:6.0968e-01 L2_sharp:5.8846e-01 L3_sharp:4.9439e-01 L4_sharp:2.8604e-01 L5_sharp:2.5257e-01 L6_sharp:2.3850e-01 L7_sharp:1.8737e-01 L8_sharp:1.4328e-01 L9_sharp:1.1141e-01 L10_sharp:8.9205e-02 L11_sharp:8.2335e-02 L12_sharp:9.8307e-02 total_fnorm:1.3587e+00 total_l1_linf:7.5008e+03 total_spectral:1.3587e+00 L1_fnorm:3.1552e-02 L2_fnorm:2.9243e-02 L3_fnorm:2.7823e-02 L4_fnorm:2.9725e-02 L5_fnorm:3.0295e-02 L6_fnorm:3.1062e-02 L7_fnorm:3.1520e-02 L8_fnorm:3.1490e-02 L9_fnorm:3.1603e-02 L10_fnorm:3.1739e-02 L11_fnorm:3.1579e-02 L12_fnorm:3.1733e-02 L1_l1linf:2.0227e-01 L2_l1linf:2.2429e-01 L3_l1linf:2.4142e-01 L4_l1linf:2.4560e-01 L5_l1linf:2.4948e-01 L6_l1linf:2.4245e-01 L7_l1linf:2.4114e-01 L8_l1linf:2.3791e-01 L9_l1linf:2.4208e-01 L10_l1linf:2.5572e-01 L11_l1linf:2.5247e-01 L12_l1linf:2.4609e-01 L1_spectral:4.6413e-03 L2_spectral:5.1058e-03 L3_spectral:5.4390e-03 L4_spectral:5.5008e-03 L5_spectral:5.6247e-03 L6_spectral:5.4327e-03 L7_spectral:5.3939e-03 L8_spectral:5.5206e-03 L9_spectral:5.4590e-03 L10_spectral:5.7770e-03 L11_spectral:5.6749e-03 L12_spectral:5.5338e-03 ip_v_neg_g:7.4999e-03 cos_v_neg_g:5.8689e-04 v_norm:1.3587e+00 g_norm:9.4055e+00 hv_norm:4.4734e+00 cos_v_hv:3.5920e-03 hg_norm:7.2386e+03 cos_g_hg:6.4914e-01 v_par:2.8076e-05 v_perp:1.3587e+00 L1_cos_v_neg_g:7.7289e-03 L1_v_norm:3.1552e-02 L2_cos_v_neg_g:8.1493e-03 L2_v_norm:2.9243e-02 L3_cos_v_neg_g:1.0522e-02 L3_v_norm:2.7823e-02 L4_cos_v_neg_g:9.7799e-03 L4_v_norm:2.9725e-02 L5_cos_v_neg_g:8.6684e-03 L5_v_norm:3.0295e-02 L6_cos_v_neg_g:8.9104e-03 L6_v_norm:3.1062e-02 L7_cos_v_neg_g:8.0448e-03 L7_v_norm:3.1520e-02 L8_cos_v_neg_g:7.4350e-03 L8_v_norm:3.1490e-02 L9_cos_v_neg_g:6.8713e-03 L9_v_norm:3.1603e-02 L10_cos_v_neg_g:6.3197e-03 L10_v_norm:3.1739e-02 L11_cos_v_neg_g:5.7434e-03 L11_v_norm:3.1579e-02 L12_cos_v_neg_g:4.6281e-03 L12_v_norm:3.1733e-02 +step:2000 train loss:4.130129 +step:2001 train loss:4.046278 +step:2002 train loss:4.149527 +step:2003 train loss:4.193459 +step:2004 train loss:4.062917 +step:2005 train loss:4.163380 +step:2006 train loss:4.058438 +step:2007 train loss:4.130416 +step:2008 train loss:4.072026 +step:2009 train loss:4.072666 +step:2010 train loss:4.198705 +step:2011 train loss:4.048242 +step:2012 train loss:4.071270 +step:2013 train loss:4.090225 +step:2014 train loss:3.972678 +step:2015 train loss:4.104584 +step:2016 train loss:4.085209 +step:2017 train loss:4.086041 +step:2018 train loss:4.054011 +step:2019 train loss:4.084935 +step:2020 train loss:4.093527 +step:2021 train loss:4.056936 +step:2022 train loss:4.099421 +step:2023 train loss:4.072699 +step:2024 train loss:4.122638 +step:2025 train loss:4.071757 +step:2026 train loss:4.047273 +step:2027 train loss:4.078753 +step:2028 train loss:4.012779 +step:2029 train loss:4.035430 +step:2030 train loss:4.042694 +step:2031 train loss:4.010284 +step:2032 train loss:4.054130 +step:2033 train loss:4.058237 +step:2034 train loss:4.045216 +step:2035 train loss:4.092008 +step:2036 train loss:4.080846 +step:2037 train loss:4.064262 +step:2038 train loss:4.067344 +step:2039 train loss:4.054885 +step:2040 train loss:4.083282 +step:2041 train loss:4.087800 +step:2042 train loss:4.018222 +step:2043 train loss:4.174678 +step:2044 train loss:4.033420 +step:2045 train loss:4.063003 +step:2046 train loss:4.065524 +step:2047 train loss:4.037728 +step:2048 train loss:4.088602 +step:2049 train loss:4.044034 +step:2050 train loss:4.069690 +step:2051 train loss:4.036460 +step:2052 train loss:4.077031 +step:2053 train loss:4.092666 +step:2054 train loss:4.041821 +step:2055 train loss:4.051162 +step:2056 train loss:4.095011 +step:2057 train loss:4.098610 +step:2058 train loss:4.068482 +step:2059 train loss:4.141307 +step:2060 train loss:4.095979 +step:2061 train loss:4.040016 +step:2062 train loss:4.064922 +step:2063 train loss:3.979720 +step:2064 train loss:4.090629 +step:2065 train loss:4.103543 +step:2066 train loss:3.965118 +step:2067 train loss:4.004007 +step:2068 train loss:4.114318 +step:2069 train loss:4.044768 +step:2070 train loss:4.049066 +step:2071 train loss:4.098050 +step:2072 train loss:4.018381 +step:2073 train loss:4.072066 +step:2074 train loss:4.048224 +step:2075 train loss:4.134926 +step:2076 train loss:4.079944 +step:2077 train loss:4.095484 +step:2078 train loss:4.048238 +step:2079 train loss:4.201839 +step:2080 train loss:4.018826 +step:2081 train loss:4.126868 +step:2082 train loss:4.060297 +step:2083 train loss:4.040572 +step:2084 train loss:4.024004 +step:2085 train loss:4.070900 +step:2086 train loss:4.080234 +step:2087 train loss:4.115894 +step:2088 train loss:3.981998 +step:2089 train loss:4.015634 +step:2090 train loss:4.049837 +step:2091 train loss:4.066668 +step:2092 train loss:4.046918 +step:2093 train loss:4.032282 +step:2094 train loss:4.072553 +step:2095 train loss:4.023991 +step:2096 train loss:4.008124 +step:2097 train loss:4.043422 +step:2098 train loss:4.041509 +step:2099 train loss:4.013944 +step:2100 train loss:4.093402 +step:2101 train loss:4.080195 +step:2102 train loss:4.047574 +step:2103 train loss:4.063944 +step:2104 train loss:4.040785 +step:2105 train loss:4.048208 +step:2106 train loss:4.041925 +step:2107 train loss:4.108093 +step:2108 train loss:4.036418 +step:2109 train loss:3.991540 +step:2110 train loss:4.091071 +step:2111 train loss:4.037711 +step:2112 train loss:4.087476 +step:2113 train loss:4.032605 +step:2114 train loss:4.036930 +step:2115 train loss:4.084013 +step:2116 train loss:4.018372 +step:2117 train loss:4.035738 +step:2118 train loss:4.034261 +step:2119 train loss:3.965946 +step:2120 train loss:4.053099 +step:2121 train loss:4.038254 +step:2122 train loss:4.045704 +step:2123 train loss:4.104333 +step:2124 train loss:4.101596 +step:2125 train loss:4.014937 +step:2126 train loss:4.022072 +step:2127 train loss:4.015836 +step:2128 train loss:4.004400 +step:2129 train loss:4.033601 +step:2130 train loss:4.032695 +step:2131 train loss:4.062603 +step:2132 train loss:3.989810 +step:2133 train loss:4.096351 +step:2134 train loss:4.047337 +step:2135 train loss:4.008296 +step:2136 train loss:4.095155 +step:2137 train loss:4.064441 +step:2138 train loss:4.017994 +step:2139 train loss:4.023423 +step:2140 train loss:4.029872 +step:2141 train loss:4.071269 +step:2142 train loss:4.046658 +step:2143 train loss:3.969739 +step:2144 train loss:4.076046 +step:2145 train loss:4.044450 +step:2146 train loss:4.080735 +step:2147 train loss:4.181462 +step:2148 train loss:3.988117 +step:2149 train loss:3.996472 +step:2150 train loss:4.025734 +step:2151 train loss:4.061178 +step:2152 train loss:4.049709 +step:2153 train loss:4.090557 +step:2154 train loss:4.007254 +step:2155 train loss:4.089794 +step:2156 train loss:4.006879 +step:2157 train loss:4.087189 +step:2158 train loss:4.119818 +step:2159 train loss:4.045901 +step:2160 train loss:4.125494 +step:2161 train loss:4.022735 +step:2162 train loss:4.028668 +step:2163 train loss:4.008465 +step:2164 train loss:4.029410 +step:2165 train loss:4.005940 +step:2166 train loss:4.125505 +step:2167 train loss:4.029609 +step:2168 train loss:4.048885 +step:2169 train loss:3.999708 +step:2170 train loss:4.136607 +step:2171 train loss:4.102326 +step:2172 train loss:4.039227 +step:2173 train loss:4.021802 +step:2174 train loss:4.085559 +step:2175 train loss:4.023419 +step:2176 train loss:4.097119 +step:2177 train loss:4.067506 +step:2178 train loss:3.993769 +step:2179 train loss:4.058996 +step:2180 train loss:4.070836 +step:2181 train loss:4.008495 +step:2182 train loss:4.057950 +step:2183 train loss:4.052668 +step:2184 train loss:4.004173 +step:2185 train loss:3.982934 +step:2186 train loss:4.023921 +step:2187 train loss:4.036071 +step:2188 train loss:4.084209 +step:2189 train loss:3.974414 +step:2190 train loss:4.021950 +step:2191 train loss:4.075773 +step:2192 train loss:4.003154 +step:2193 train loss:3.971974 +step:2194 train loss:3.986894 +step:2195 train loss:4.002648 +step:2196 train loss:4.012221 +step:2197 train loss:3.989777 +step:2198 train loss:4.008197 +step:2199 train loss:4.085430 +step:2200 train loss:4.015076 +step:2201 train loss:4.018459 +step:2202 train loss:3.985804 +step:2203 train loss:4.009658 +step:2204 train loss:4.038155 +step:2205 train loss:4.019443 +step:2206 train loss:4.021001 +step:2207 train loss:4.012165 +step:2208 train loss:3.994905 +step:2209 train loss:4.271049 +step:2210 train loss:4.044158 +step:2211 train loss:4.037246 +step:2212 train loss:4.005225 +step:2213 train loss:4.090766 +step:2214 train loss:4.084470 +step:2215 train loss:4.009466 +step:2216 train loss:3.979842 +step:2217 train loss:3.999746 +step:2218 train loss:4.002972 +step:2219 train loss:4.036846 +step:2220 train loss:3.983881 +step:2221 train loss:4.015493 +step:2222 train loss:4.034486 +step:2223 train loss:4.066830 +step:2224 train loss:4.046378 +step:2225 train loss:3.982612 +step:2226 train loss:4.050129 +step:2227 train loss:4.050788 +step:2228 train loss:4.048338 +step:2229 train loss:3.989987 +step:2230 train loss:4.115405 +step:2231 train loss:4.026771 +step:2232 train loss:4.027659 +step:2233 train loss:4.066012 +step:2234 train loss:3.965974 +step:2235 train loss:4.055595 +step:2236 train loss:3.990145 +step:2237 train loss:4.123505 +step:2238 train loss:3.937038 +step:2239 train loss:4.007508 +step:2240 train loss:4.024668 +step:2241 train loss:3.942626 +step:2242 train loss:4.075792 +step:2243 train loss:4.118680 +step:2244 train loss:3.996458 +step:2245 train loss:3.991475 +step:2246 train loss:3.967554 +step:2247 train loss:3.967254 +step:2248 train loss:4.018782 +step:2249 train loss:4.007513 +step:2250 validation loss:3.966651 +step:2250 train loss:4.017008 +step:2251 train loss:3.982692 +step:2252 train loss:3.983543 +step:2253 train loss:4.006990 +step:2254 train loss:4.009095 +step:2255 train loss:3.972366 +step:2256 train loss:4.021618 +step:2257 train loss:4.012413 +step:2258 train loss:4.002938 +step:2259 train loss:4.012750 +step:2260 train loss:3.975239 +step:2261 train loss:4.049602 +step:2262 train loss:4.066649 +step:2263 train loss:4.028707 +step:2264 train loss:4.137660 +step:2265 train loss:3.989114 +step:2266 train loss:4.034754 +step:2267 train loss:3.992483 +step:2268 train loss:3.993841 +step:2269 train loss:3.998220 +step:2270 train loss:3.987530 +step:2271 train loss:3.997962 +step:2272 train loss:4.038091 +step:2273 train loss:3.960165 +step:2274 train loss:3.986938 +step:2275 train loss:3.952289 +step:2276 train loss:4.019343 +step:2277 train loss:4.031255 +step:2278 train loss:4.017216 +step:2279 train loss:3.993159 +step:2280 train loss:3.905122 +step:2281 train loss:4.046676 +step:2282 train loss:3.981568 +step:2283 train loss:3.964062 +step:2284 train loss:3.987792 +step:2285 train loss:4.033834 +step:2286 train loss:3.996582 +step:2287 train loss:4.036287 +step:2288 train loss:4.006589 +step:2289 train loss:4.004035 +step:2290 train loss:4.008610 +step:2291 train loss:3.998405 +step:2292 train loss:4.030739 +step:2293 train loss:4.015536 +step:2294 train loss:4.010636 +step:2295 train loss:4.068001 +step:2296 train loss:4.000518 +step:2297 train loss:3.972318 +step:2298 train loss:4.033026 +step:2299 train loss:4.005750 +step:2300 train loss:3.923070 +step:2301 train loss:4.016637 +step:2302 train loss:4.035115 +step:2303 train loss:3.998034 +step:2304 train loss:3.991337 +step:2305 train loss:4.034296 +step:2306 train loss:4.030543 +step:2307 train loss:3.998098 +step:2308 train loss:4.023510 +step:2309 train loss:3.982893 +step:2310 train loss:3.966231 +step:2311 train loss:3.957230 +step:2312 train loss:4.023423 +step:2313 train loss:3.938438 +step:2314 train loss:4.014063 +step:2315 train loss:4.029374 +step:2316 train loss:4.063113 +step:2317 train loss:3.932313 +step:2318 train loss:3.970317 +step:2319 train loss:4.026665 +step:2320 train loss:3.998444 +step:2321 train loss:3.969974 +step:2322 train loss:3.979444 +step:2323 train loss:3.979849 +step:2324 train loss:4.002635 +step:2325 train loss:3.951653 +step:2326 train loss:3.966009 +step:2327 train loss:4.086338 +step:2328 train loss:4.037149 +step:2329 train loss:3.989321 +step:2330 train loss:3.943897 +step:2331 train loss:3.989887 +step:2332 train loss:3.920934 +step:2333 train loss:3.984402 +step:2334 train loss:3.961887 +step:2335 train loss:3.945298 +step:2336 train loss:4.197941 +step:2337 train loss:3.977285 +step:2338 train loss:4.017140 +step:2339 train loss:4.015031 +step:2340 train loss:4.023718 +step:2341 train loss:4.015698 +step:2342 train loss:3.969359 +step:2343 train loss:3.989459 +step:2344 train loss:4.029622 +step:2345 train loss:3.989961 +step:2346 train loss:4.019449 +step:2347 train loss:3.938338 +step:2348 train loss:4.000678 +step:2349 train loss:3.949761 +step:2350 train loss:4.002882 +step:2351 train loss:4.013305 +step:2352 train loss:4.013126 +step:2353 train loss:3.966191 +step:2354 train loss:4.022390 +step:2355 train loss:4.003857 +step:2356 train loss:4.045080 +step:2357 train loss:3.953758 +step:2358 train loss:3.968818 +step:2359 train loss:3.992959 +step:2360 train loss:4.012789 +step:2361 train loss:4.052432 +step:2362 train loss:3.875993 +step:2363 train loss:4.065259 +step:2364 train loss:4.018015 +step:2365 train loss:3.990373 +step:2366 train loss:3.947867 +step:2367 train loss:4.003051 +step:2368 train loss:4.002273 +step:2369 train loss:3.979765 +step:2370 train loss:4.000676 +step:2371 train loss:4.058473 +step:2372 train loss:3.915093 +step:2373 train loss:4.051205 +step:2374 train loss:4.031745 +step:2375 train loss:4.019115 +step:2376 train loss:4.006883 +step:2377 train loss:3.955900 +step:2378 train loss:3.998482 +step:2379 train loss:3.985743 +step:2380 train loss:4.044206 +step:2381 train loss:4.131274 +step:2382 train loss:3.925771 +step:2383 train loss:3.979930 +step:2384 train loss:4.004324 +step:2385 train loss:3.906792 +step:2386 train loss:4.064863 +step:2387 train loss:3.946925 +step:2388 train loss:3.993735 +step:2389 train loss:4.015144 +step:2390 train loss:3.966112 +step:2391 train loss:3.984401 +step:2392 train loss:4.011781 +step:2393 train loss:3.970729 +step:2394 train loss:3.989420 +step:2395 train loss:3.980831 +step:2396 train loss:3.988860 +step:2397 train loss:3.965619 +step:2398 train loss:4.015667 +step:2399 train loss:3.978088 +step:2400 train loss:3.962023 +step:2401 train loss:3.999312 +step:2402 train loss:3.949670 +step:2403 train loss:4.006132 +step:2404 train loss:3.958029 +step:2405 train loss:3.960780 +step:2406 train loss:3.990035 +step:2407 train loss:3.936558 +step:2408 train loss:3.982515 +step:2409 train loss:3.967137 +step:2410 train loss:3.970832 +step:2411 train loss:4.039159 +step:2412 train loss:4.032288 +step:2413 train loss:4.060390 +step:2414 train loss:3.955949 +step:2415 train loss:3.949637 +step:2416 train loss:3.965057 +step:2417 train loss:3.999122 +step:2418 train loss:4.019575 +step:2419 train loss:3.947026 +step:2420 train loss:3.969976 +step:2421 train loss:4.000157 +step:2422 train loss:4.051852 +step:2423 train loss:3.984149 +step:2424 train loss:3.949641 +step:2425 train loss:4.009223 +step:2426 train loss:3.949412 +step:2427 train loss:3.975080 +step:2428 train loss:4.053279 +step:2429 train loss:4.008538 +step:2430 train loss:4.095907 +step:2431 train loss:4.010369 +step:2432 train loss:3.977856 +step:2433 train loss:3.958925 +step:2434 train loss:3.941876 +step:2435 train loss:3.995357 +step:2436 train loss:3.958763 +step:2437 train loss:3.989954 +step:2438 train loss:4.029477 +step:2439 train loss:4.013939 +step:2440 train loss:3.961283 +step:2441 train loss:3.992896 +step:2442 train loss:3.984927 +step:2443 train loss:3.951946 +step:2444 train loss:3.986522 +step:2445 train loss:3.981676 +step:2446 train loss:3.956130 +step:2447 train loss:3.935676 +step:2448 train loss:3.982726 +step:2449 train loss:4.011064 +step:2450 train loss:3.972032 +step:2451 train loss:3.888195 +step:2452 train loss:3.994720 +step:2453 train loss:3.965068 +step:2454 train loss:3.959662 +step:2455 train loss:4.012415 +step:2456 train loss:3.967653 +step:2457 train loss:4.024231 +step:2458 train loss:4.002321 +step:2459 train loss:3.980348 +step:2460 train loss:3.980844 +step:2461 train loss:4.016960 +step:2462 train loss:3.988212 +step:2463 train loss:3.958183 +step:2464 train loss:3.978981 +step:2465 train loss:4.053281 +step:2466 train loss:4.133283 +step:2467 train loss:4.042882 +step:2468 train loss:3.939380 +step:2469 train loss:4.002606 +step:2470 train loss:4.056359 +step:2471 train loss:4.059359 +step:2472 train loss:4.046817 +step:2473 train loss:3.974016 +step:2474 train loss:3.932527 +step:2475 train loss:3.988217 +step:2476 train loss:4.064087 +step:2477 train loss:3.977904 +step:2478 train loss:3.933517 +step:2479 train loss:3.979313 +step:2480 train loss:3.970449 +step:2481 train loss:4.157466 +step:2482 train loss:3.972041 +step:2483 train loss:4.000278 +step:2484 train loss:3.947512 +step:2485 train loss:3.931074 +step:2486 train loss:3.974130 +step:2487 train loss:4.005764 +step:2488 train loss:3.917796 +step:2489 train loss:4.028778 +step:2490 train loss:3.949585 +step:2491 train loss:3.958616 +step:2492 train loss:3.998966 +step:2493 train loss:4.037464 +step:2494 train loss:3.959742 +step:2495 train loss:3.993362 +step:2496 train loss:3.972319 +step:2497 train loss:3.985930 +step:2498 train loss:3.989556 +step:2499 train loss:3.987823 +step:2500 validation loss:3.915343 total_sharp:9.3060e-03 L1_sharp:4.0013e-01 L2_sharp:2.9948e-01 L3_sharp:3.0192e-01 L4_sharp:2.0393e-01 L5_sharp:1.5890e-01 L6_sharp:1.6018e-01 L7_sharp:1.5551e-01 L8_sharp:1.3545e-01 L9_sharp:1.0335e-01 L10_sharp:8.6628e-02 L11_sharp:6.7773e-02 L12_sharp:8.4222e-02 total_fnorm:1.3521e+00 total_l1_linf:7.4664e+03 total_spectral:1.3521e+00 L1_fnorm:3.1732e-02 L2_fnorm:2.9719e-02 L3_fnorm:2.9055e-02 L4_fnorm:3.0257e-02 L5_fnorm:3.0732e-02 L6_fnorm:3.1394e-02 L7_fnorm:3.1665e-02 L8_fnorm:3.1608e-02 L9_fnorm:3.1699e-02 L10_fnorm:3.1917e-02 L11_fnorm:3.1610e-02 L12_fnorm:3.1619e-02 L1_l1linf:2.4533e-01 L2_l1linf:2.5525e-01 L3_l1linf:2.6646e-01 L4_l1linf:2.7141e-01 L5_l1linf:2.6399e-01 L6_l1linf:2.5911e-01 L7_l1linf:2.4251e-01 L8_l1linf:2.5110e-01 L9_l1linf:2.5657e-01 L10_l1linf:2.7457e-01 L11_l1linf:2.6283e-01 L12_l1linf:2.5500e-01 L1_spectral:5.5133e-03 L2_spectral:5.8391e-03 L3_spectral:6.0376e-03 L4_spectral:6.1274e-03 L5_spectral:5.9394e-03 L6_spectral:5.8572e-03 L7_spectral:5.4964e-03 L8_spectral:5.6611e-03 L9_spectral:5.7961e-03 L10_spectral:6.2394e-03 L11_spectral:5.9592e-03 L12_spectral:5.8110e-03 ip_v_neg_g:7.1840e-03 cos_v_neg_g:6.6878e-04 v_norm:1.3521e+00 g_norm:7.9444e+00 hv_norm:3.2200e+00 cos_v_hv:3.9078e-03 hg_norm:3.7406e+03 cos_g_hg:5.9440e-01 v_par:2.7151e-05 v_perp:1.3521e+00 L1_cos_v_neg_g:8.9293e-03 L1_v_norm:3.1732e-02 L2_cos_v_neg_g:9.6069e-03 L2_v_norm:2.9719e-02 L3_cos_v_neg_g:1.1130e-02 L3_v_norm:2.9055e-02 L4_cos_v_neg_g:9.7032e-03 L4_v_norm:3.0257e-02 L5_cos_v_neg_g:8.5375e-03 L5_v_norm:3.0732e-02 L6_cos_v_neg_g:8.2936e-03 L6_v_norm:3.1394e-02 L7_cos_v_neg_g:7.2668e-03 L7_v_norm:3.1665e-02 L8_cos_v_neg_g:7.2872e-03 L8_v_norm:3.1608e-02 L9_cos_v_neg_g:6.5040e-03 L9_v_norm:3.1699e-02 L10_cos_v_neg_g:5.8589e-03 L10_v_norm:3.1917e-02 L11_cos_v_neg_g:4.9515e-03 L11_v_norm:3.1610e-02 L12_cos_v_neg_g:4.2961e-03 L12_v_norm:3.1619e-02 +step:2500 train loss:3.933038 +step:2501 train loss:3.991408 +step:2502 train loss:3.981609 +step:2503 train loss:3.914546 +step:2504 train loss:3.944571 +step:2505 train loss:3.978446 +step:2506 train loss:3.936289 +step:2507 train loss:3.963283 +step:2508 train loss:3.915583 +step:2509 train loss:3.929208 +step:2510 train loss:3.927787 +step:2511 train loss:3.971981 +step:2512 train loss:4.018983 +step:2513 train loss:3.963397 +step:2514 train loss:3.950580 +step:2515 train loss:4.087260 +step:2516 train loss:3.978308 +step:2517 train loss:4.036717 +step:2518 train loss:4.001966 +step:2519 train loss:3.975111 +step:2520 train loss:3.985745 +step:2521 train loss:3.954420 +step:2522 train loss:4.001058 +step:2523 train loss:3.916742 +step:2524 train loss:3.975502 +step:2525 train loss:3.963603 +step:2526 train loss:4.013034 +step:2527 train loss:4.001369 +step:2528 train loss:3.984820 +step:2529 train loss:4.009988 +step:2530 train loss:3.981928 +step:2531 train loss:3.923030 +step:2532 train loss:4.017212 +step:2533 train loss:3.909835 +step:2534 train loss:4.008819 +step:2535 train loss:3.965746 +step:2536 train loss:3.885708 +step:2537 train loss:4.003222 +step:2538 train loss:3.979828 +step:2539 train loss:3.997684 +step:2540 train loss:3.937920 +step:2541 train loss:3.961584 +step:2542 train loss:3.972661 +step:2543 train loss:3.962332 +step:2544 train loss:3.946530 +step:2545 train loss:3.937106 +step:2546 train loss:3.905020 +step:2547 train loss:3.947320 +step:2548 train loss:3.968265 +step:2549 train loss:3.970488 +step:2550 train loss:4.096053 +step:2551 train loss:4.174975 +step:2552 train loss:3.908007 +step:2553 train loss:3.942664 +step:2554 train loss:4.089109 +step:2555 train loss:3.979812 +step:2556 train loss:3.905331 +step:2557 train loss:3.995153 +step:2558 train loss:3.991576 +step:2559 train loss:3.938965 +step:2560 train loss:3.927413 +step:2561 train loss:4.026496 +step:2562 train loss:3.977862 +step:2563 train loss:3.910924 +step:2564 train loss:3.983424 +step:2565 train loss:3.964918 +step:2566 train loss:3.943018 +step:2567 train loss:3.921055 +step:2568 train loss:3.979437 +step:2569 train loss:3.983691 +step:2570 train loss:3.938669 +step:2571 train loss:4.018044 +step:2572 train loss:3.982158 +step:2573 train loss:3.914054 +step:2574 train loss:3.954487 +step:2575 train loss:4.003479 +step:2576 train loss:3.958376 +step:2577 train loss:3.916097 +step:2578 train loss:3.958047 +step:2579 train loss:3.938273 +step:2580 train loss:3.910734 +step:2581 train loss:3.923409 +step:2582 train loss:3.932533 +step:2583 train loss:3.955601 +step:2584 train loss:3.969052 +step:2585 train loss:3.930732 +step:2586 train loss:3.959286 +step:2587 train loss:3.888040 +step:2588 train loss:3.921121 +step:2589 train loss:3.999266 +step:2590 train loss:3.922222 +step:2591 train loss:3.977462 +step:2592 train loss:4.029431 +step:2593 train loss:3.985959 +step:2594 train loss:3.942890 +step:2595 train loss:3.952582 +step:2596 train loss:3.996008 +step:2597 train loss:3.878528 +step:2598 train loss:4.030627 +step:2599 train loss:3.979615 +step:2600 train loss:4.008781 +step:2601 train loss:3.943122 +step:2602 train loss:3.980477 +step:2603 train loss:3.971236 +step:2604 train loss:3.892987 +step:2605 train loss:4.018718 +step:2606 train loss:3.971983 +step:2607 train loss:3.929969 +step:2608 train loss:3.902812 +step:2609 train loss:3.930939 +step:2610 train loss:3.957285 +step:2611 train loss:3.991598 +step:2612 train loss:3.953052 +step:2613 train loss:3.929641 +step:2614 train loss:3.915772 +step:2615 train loss:3.915980 +step:2616 train loss:3.988685 +step:2617 train loss:3.946837 +step:2618 train loss:3.911529 +step:2619 train loss:3.932106 +step:2620 train loss:3.921581 +step:2621 train loss:3.935826 +step:2622 train loss:4.011383 +step:2623 train loss:3.885113 +step:2624 train loss:3.897690 +step:2625 train loss:3.973127 +step:2626 train loss:3.964312 +step:2627 train loss:3.941056 +step:2628 train loss:3.987514 +step:2629 train loss:3.941773 +step:2630 train loss:3.936512 +step:2631 train loss:3.966794 +step:2632 train loss:3.937824 +step:2633 train loss:3.920702 +step:2634 train loss:3.967044 +step:2635 train loss:3.950901 +step:2636 train loss:4.001195 +step:2637 train loss:3.949996 +step:2638 train loss:3.927771 +step:2639 train loss:3.990454 +step:2640 train loss:3.906541 +step:2641 train loss:3.965420 +step:2642 train loss:3.882357 +step:2643 train loss:3.888286 +step:2644 train loss:3.980639 +step:2645 train loss:3.908945 +step:2646 train loss:3.946084 +step:2647 train loss:3.963399 +step:2648 train loss:3.997447 +step:2649 train loss:3.910150 +step:2650 train loss:3.899047 +step:2651 train loss:3.944583 +step:2652 train loss:3.913560 +step:2653 train loss:3.980822 +step:2654 train loss:3.940586 +step:2655 train loss:3.930589 +step:2656 train loss:3.950591 +step:2657 train loss:3.975169 +step:2658 train loss:3.981034 +step:2659 train loss:3.959776 +step:2660 train loss:3.950225 +step:2661 train loss:3.995777 +step:2662 train loss:3.970769 +step:2663 train loss:3.943618 +step:2664 train loss:3.959060 +step:2665 train loss:3.906108 +step:2666 train loss:3.936267 +step:2667 train loss:3.942526 +step:2668 train loss:3.919874 +step:2669 train loss:3.923625 +step:2670 train loss:3.952256 +step:2671 train loss:3.924571 +step:2672 train loss:3.947781 +step:2673 train loss:3.882823 +step:2674 train loss:3.973289 +step:2675 train loss:3.950160 +step:2676 train loss:3.970138 +step:2677 train loss:3.951332 +step:2678 train loss:3.933133 +step:2679 train loss:3.916750 +step:2680 train loss:3.902739 +step:2681 train loss:3.872982 +step:2682 train loss:3.960856 +step:2683 train loss:3.932051 +step:2684 train loss:3.960924 +step:2685 train loss:3.890183 +step:2686 train loss:3.897727 +step:2687 train loss:3.969671 +step:2688 train loss:3.987281 +step:2689 train loss:3.894886 +step:2690 train loss:3.975996 +step:2691 train loss:3.944412 +step:2692 train loss:3.971101 +step:2693 train loss:4.028863 +step:2694 train loss:3.923207 +step:2695 train loss:3.943179 +step:2696 train loss:3.947742 +step:2697 train loss:3.940759 +step:2698 train loss:3.943044 +step:2699 train loss:3.964458 +step:2700 train loss:3.940746 +step:2701 train loss:4.000741 +step:2702 train loss:3.939878 +step:2703 train loss:3.898471 +step:2704 train loss:3.970356 +step:2705 train loss:3.964582 +step:2706 train loss:3.896724 +step:2707 train loss:3.861091 +step:2708 train loss:3.954834 +step:2709 train loss:3.938162 +step:2710 train loss:3.948453 +step:2711 train loss:3.909880 +step:2712 train loss:3.969055 +step:2713 train loss:3.977828 +step:2714 train loss:3.917924 +step:2715 train loss:3.912734 +step:2716 train loss:3.977904 +step:2717 train loss:3.946684 +step:2718 train loss:3.939242 +step:2719 train loss:3.942303 +step:2720 train loss:3.904156 +step:2721 train loss:3.984059 +step:2722 train loss:3.915214 +step:2723 train loss:3.898118 +step:2724 train loss:3.924726 +step:2725 train loss:3.925616 +step:2726 train loss:3.896270 +step:2727 train loss:3.955858 +step:2728 train loss:3.895518 +step:2729 train loss:4.025255 +step:2730 train loss:3.962019 +step:2731 train loss:4.001855 +step:2732 train loss:3.919122 +step:2733 train loss:3.915596 +step:2734 train loss:3.959077 +step:2735 train loss:3.960646 +step:2736 train loss:3.883034 +step:2737 train loss:3.940093 +step:2738 train loss:3.995032 +step:2739 train loss:3.919631 +step:2740 train loss:3.918365 +step:2741 train loss:3.910174 +step:2742 train loss:3.822587 +step:2743 train loss:3.937335 +step:2744 train loss:3.957328 +step:2745 train loss:3.910460 +step:2746 train loss:3.931509 +step:2747 train loss:3.917764 +step:2748 train loss:3.875898 +step:2749 train loss:3.936276 +step:2750 validation loss:3.872268 +step:2750 train loss:3.946962 +step:2751 train loss:3.973355 +step:2752 train loss:3.952446 +step:2753 train loss:3.949930 +step:2754 train loss:3.883554 +step:2755 train loss:3.953885 +step:2756 train loss:3.923349 +step:2757 train loss:3.912691 +step:2758 train loss:3.937820 +step:2759 train loss:3.949893 +step:2760 train loss:3.860832 +step:2761 train loss:3.878706 +step:2762 train loss:3.893710 +step:2763 train loss:3.916387 +step:2764 train loss:3.860286 +step:2765 train loss:3.904546 +step:2766 train loss:3.996249 +step:2767 train loss:3.869828 +step:2768 train loss:3.930746 +step:2769 train loss:3.906449 +step:2770 train loss:3.928742 +step:2771 train loss:3.951537 +step:2772 train loss:3.915094 +step:2773 train loss:3.912893 +step:2774 train loss:3.908124 +step:2775 train loss:3.924961 +step:2776 train loss:3.880060 +step:2777 train loss:3.908782 +step:2778 train loss:3.919839 +step:2779 train loss:3.944138 +step:2780 train loss:3.911914 +step:2781 train loss:3.900327 +step:2782 train loss:3.889649 +step:2783 train loss:3.925618 +step:2784 train loss:3.931134 +step:2785 train loss:3.996247 +step:2786 train loss:3.967451 +step:2787 train loss:3.927575 +step:2788 train loss:3.921481 +step:2789 train loss:3.914074 +step:2790 train loss:3.857204 +step:2791 train loss:3.957041 +step:2792 train loss:3.944295 +step:2793 train loss:3.911717 +step:2794 train loss:3.921790 +step:2795 train loss:3.933018 +step:2796 train loss:3.927354 +step:2797 train loss:3.970632 +step:2798 train loss:3.960793 +step:2799 train loss:3.870424 +step:2800 train loss:3.915616 +step:2801 train loss:3.950998 +step:2802 train loss:3.975739 +step:2803 train loss:3.946160 +step:2804 train loss:3.882631 +step:2805 train loss:3.920331 +step:2806 train loss:3.917097 +step:2807 train loss:3.947593 +step:2808 train loss:3.884027 +step:2809 train loss:3.949712 +step:2810 train loss:3.941883 +step:2811 train loss:3.932070 +step:2812 train loss:3.976195 +step:2813 train loss:3.947496 +step:2814 train loss:3.937313 +step:2815 train loss:3.947638 +step:2816 train loss:3.951744 +step:2817 train loss:3.885871 +step:2818 train loss:3.991137 +step:2819 train loss:3.916801 +step:2820 train loss:3.911938 +step:2821 train loss:3.892159 +step:2822 train loss:3.936675 +step:2823 train loss:3.887871 +step:2824 train loss:3.784298 +step:2825 train loss:3.936972 +step:2826 train loss:3.926665 +step:2827 train loss:3.953626 +step:2828 train loss:3.941187 +step:2829 train loss:3.933035 +step:2830 train loss:3.959283 +step:2831 train loss:3.900665 +step:2832 train loss:3.870884 +step:2833 train loss:3.930419 +step:2834 train loss:3.883370 +step:2835 train loss:3.917715 +step:2836 train loss:3.921601 +step:2837 train loss:3.918869 +step:2838 train loss:3.859019 +step:2839 train loss:3.955850 +step:2840 train loss:3.920022 +step:2841 train loss:3.998190 +step:2842 train loss:3.945545 +step:2843 train loss:3.936408 +step:2844 train loss:3.958751 +step:2845 train loss:3.921606 +step:2846 train loss:3.870566 +step:2847 train loss:3.960153 +step:2848 train loss:3.914336 +step:2849 train loss:3.908975 +step:2850 train loss:3.967488 +step:2851 train loss:3.920017 +step:2852 train loss:3.999881 +step:2853 train loss:3.915200 +step:2854 train loss:3.851751 +step:2855 train loss:3.936231 +step:2856 train loss:3.860351 +step:2857 train loss:3.962379 +step:2858 train loss:3.918772 +step:2859 train loss:3.904667 +step:2860 train loss:3.901629 +step:2861 train loss:3.883893 +step:2862 train loss:3.906805 +step:2863 train loss:3.891760 +step:2864 train loss:3.899252 +step:2865 train loss:3.972663 +step:2866 train loss:3.989326 +step:2867 train loss:3.924873 +step:2868 train loss:3.922269 +step:2869 train loss:3.885749 +step:2870 train loss:3.968475 +step:2871 train loss:3.965492 +step:2872 train loss:3.927556 +step:2873 train loss:3.940236 +step:2874 train loss:3.912306 +step:2875 train loss:3.868028 +step:2876 train loss:3.919090 +step:2877 train loss:3.897081 +step:2878 train loss:3.910398 +step:2879 train loss:3.877439 +step:2880 train loss:3.897963 +step:2881 train loss:3.889411 +step:2882 train loss:3.822729 +step:2883 train loss:3.911133 +step:2884 train loss:3.973580 +step:2885 train loss:3.874258 +step:2886 train loss:3.916952 +step:2887 train loss:3.942191 +step:2888 train loss:3.916604 +step:2889 train loss:3.902591 +step:2890 train loss:3.875247 +step:2891 train loss:3.913821 +step:2892 train loss:3.924191 +step:2893 train loss:3.907284 +step:2894 train loss:3.875742 +step:2895 train loss:3.923890 +step:2896 train loss:3.971760 +step:2897 train loss:3.950105 +step:2898 train loss:4.080506 +step:2899 train loss:3.841202 +step:2900 train loss:3.917972 +step:2901 train loss:3.864808 +step:2902 train loss:3.864564 +step:2903 train loss:3.880821 +step:2904 train loss:3.909580 +step:2905 train loss:3.968425 +step:2906 train loss:3.936423 +step:2907 train loss:4.106506 +step:2908 train loss:3.859221 +step:2909 train loss:3.934814 +step:2910 train loss:3.904789 +step:2911 train loss:3.934093 +step:2912 train loss:3.895797 +step:2913 train loss:3.926491 +step:2914 train loss:3.955284 +step:2915 train loss:3.954229 +step:2916 train loss:3.914103 +step:2917 train loss:3.941184 +step:2918 train loss:3.935040 +step:2919 train loss:3.876797 +step:2920 train loss:3.934184 +step:2921 train loss:3.891019 +step:2922 train loss:3.912573 +step:2923 train loss:3.980839 +step:2924 train loss:3.914407 +step:2925 train loss:3.867840 +step:2926 train loss:3.957119 +step:2927 train loss:3.865277 +step:2928 train loss:3.836994 +step:2929 train loss:3.851224 +step:2930 train loss:3.869419 +step:2931 train loss:4.023112 +step:2932 train loss:3.941700 +step:2933 train loss:3.909139 +step:2934 train loss:3.902318 +step:2935 train loss:3.926034 +step:2936 train loss:3.875535 +step:2937 train loss:3.889976 +step:2938 train loss:3.908494 +step:2939 train loss:3.982125 +step:2940 train loss:3.883652 +step:2941 train loss:3.918031 +step:2942 train loss:3.880400 +step:2943 train loss:4.149985 +step:2944 train loss:3.983624 +step:2945 train loss:3.940062 +step:2946 train loss:3.948428 +step:2947 train loss:3.912446 +step:2948 train loss:3.874118 +step:2949 train loss:3.963025 +step:2950 train loss:3.912744 +step:2951 train loss:3.812635 +step:2952 train loss:3.883460 +step:2953 train loss:3.798560 +step:2954 train loss:3.887518 +step:2955 train loss:3.955244 +step:2956 train loss:3.906619 +step:2957 train loss:3.907422 +step:2958 train loss:3.862932 +step:2959 train loss:3.885858 +step:2960 train loss:3.977982 +step:2961 train loss:3.843016 +step:2962 train loss:3.919371 +step:2963 train loss:3.908514 +step:2964 train loss:3.887935 +step:2965 train loss:3.915679 +step:2966 train loss:3.890243 +step:2967 train loss:3.889869 +step:2968 train loss:3.861102 +step:2969 train loss:3.874489 +step:2970 train loss:3.938893 +step:2971 train loss:3.873705 +step:2972 train loss:3.852682 +step:2973 train loss:3.848612 +step:2974 train loss:3.890065 +step:2975 train loss:3.855290 +step:2976 train loss:3.892585 +step:2977 train loss:3.884788 +step:2978 train loss:3.967398 +step:2979 train loss:3.949051 +step:2980 train loss:3.952496 +step:2981 train loss:3.917485 +step:2982 train loss:3.901567 +step:2983 train loss:3.858268 +step:2984 train loss:3.826820 +step:2985 train loss:3.940056 +step:2986 train loss:3.838008 +step:2987 train loss:3.963654 +step:2988 train loss:3.886721 +step:2989 train loss:3.920526 +step:2990 train loss:3.874095 +step:2991 train loss:3.944049 +step:2992 train loss:3.938028 +step:2993 train loss:3.904788 +step:2994 train loss:3.888778 +step:2995 train loss:3.958330 +step:2996 train loss:3.885525 +step:2997 train loss:3.796050 +step:2998 train loss:3.905795 +step:2999 train loss:3.942981 +step:3000 validation loss:3.838786 total_sharp:8.0016e-03 L1_sharp:2.7078e-01 L2_sharp:1.8983e-01 L3_sharp:2.2670e-01 L4_sharp:1.4434e-01 L5_sharp:1.3037e-01 L6_sharp:1.3696e-01 L7_sharp:1.3793e-01 L8_sharp:1.2595e-01 L9_sharp:1.1097e-01 L10_sharp:8.8712e-02 L11_sharp:7.2693e-02 L12_sharp:1.0442e-01 total_fnorm:1.3316e+00 total_l1_linf:7.3523e+03 total_spectral:1.3316e+00 L1_fnorm:3.2013e-02 L2_fnorm:3.0117e-02 L3_fnorm:2.9334e-02 L4_fnorm:3.0625e-02 L5_fnorm:3.1116e-02 L6_fnorm:3.1782e-02 L7_fnorm:3.1880e-02 L8_fnorm:3.1884e-02 L9_fnorm:3.1984e-02 L10_fnorm:3.2103e-02 L11_fnorm:3.1887e-02 L12_fnorm:3.2089e-02 L1_l1linf:2.7145e-01 L2_l1linf:2.9267e-01 L3_l1linf:2.9721e-01 L4_l1linf:3.0136e-01 L5_l1linf:2.8109e-01 L6_l1linf:2.7891e-01 L7_l1linf:2.6047e-01 L8_l1linf:2.7044e-01 L9_l1linf:2.8091e-01 L10_l1linf:2.9190e-01 L11_l1linf:2.9903e-01 L12_l1linf:2.8450e-01 L1_spectral:6.1529e-03 L2_spectral:6.6297e-03 L3_spectral:6.7127e-03 L4_spectral:6.8188e-03 L5_spectral:6.3107e-03 L6_spectral:6.2572e-03 L7_spectral:5.8848e-03 L8_spectral:6.1296e-03 L9_spectral:6.2754e-03 L10_spectral:6.5894e-03 L11_spectral:6.7469e-03 L12_spectral:6.4692e-03 ip_v_neg_g:1.0399e-02 cos_v_neg_g:1.0705e-03 v_norm:1.3316e+00 g_norm:7.2950e+00 hv_norm:2.3495e+00 cos_v_hv:4.5349e-03 hg_norm:2.5103e+03 cos_g_hg:5.7313e-01 v_par:3.7485e-05 v_perp:1.3316e+00 L1_cos_v_neg_g:1.7287e-02 L1_v_norm:3.2013e-02 L2_cos_v_neg_g:1.7303e-02 L2_v_norm:3.0117e-02 L3_cos_v_neg_g:1.8225e-02 L3_v_norm:2.9334e-02 L4_cos_v_neg_g:1.5117e-02 L4_v_norm:3.0625e-02 L5_cos_v_neg_g:1.3214e-02 L5_v_norm:3.1116e-02 L6_cos_v_neg_g:1.2132e-02 L6_v_norm:3.1782e-02 L7_cos_v_neg_g:1.1689e-02 L7_v_norm:3.1880e-02 L8_cos_v_neg_g:1.2561e-02 L8_v_norm:3.1884e-02 L9_cos_v_neg_g:1.1373e-02 L9_v_norm:3.1984e-02 L10_cos_v_neg_g:9.9007e-03 L10_v_norm:3.2103e-02 L11_cos_v_neg_g:6.7359e-03 L11_v_norm:3.1887e-02 L12_cos_v_neg_g:5.5676e-03 L12_v_norm:3.2089e-02 +step:3000 train loss:3.838709 +step:3001 train loss:3.894858 +step:3002 train loss:3.891200 +step:3003 train loss:3.888407 +step:3004 train loss:3.914948 +step:3005 train loss:3.814214 +step:3006 train loss:3.865234 +step:3007 train loss:3.895723 +step:3008 train loss:3.937999 +step:3009 train loss:3.899346 +step:3010 train loss:3.912236 +step:3011 train loss:3.903879 +step:3012 train loss:3.878251 +step:3013 train loss:3.920250 +step:3014 train loss:3.879097 +step:3015 train loss:3.876565 +step:3016 train loss:3.904281 +step:3017 train loss:3.917356 +step:3018 train loss:3.849155 +step:3019 train loss:3.886281 +step:3020 train loss:3.903943 +step:3021 train loss:3.876334 +step:3022 train loss:3.963617 +step:3023 train loss:3.910050 +step:3024 train loss:3.899797 +step:3025 train loss:3.907658 +step:3026 train loss:3.884443 +step:3027 train loss:3.856713 +step:3028 train loss:3.909462 +step:3029 train loss:3.897163 +step:3030 train loss:3.869885 +step:3031 train loss:3.855211 +step:3032 train loss:3.839941 +step:3033 train loss:3.872812 +step:3034 train loss:3.914155 +step:3035 train loss:3.894074 +step:3036 train loss:3.852612 +step:3037 train loss:3.818290 +step:3038 train loss:3.935209 +step:3039 train loss:3.813221 +step:3040 train loss:3.804969 +step:3041 train loss:3.929751 +step:3042 train loss:3.869457 +step:3043 train loss:3.923790 +step:3044 train loss:3.818384 +step:3045 train loss:3.869311 +step:3046 train loss:3.838743 +step:3047 train loss:3.875371 +step:3048 train loss:3.832186 +step:3049 train loss:3.917787 +step:3050 train loss:3.801963 +step:3051 train loss:3.825734 +step:3052 train loss:3.840201 +step:3053 train loss:3.905579 +step:3054 train loss:3.979469 +step:3055 train loss:3.818048 +step:3056 train loss:3.854273 +step:3057 train loss:3.886126 +step:3058 train loss:3.834658 +step:3059 train loss:3.866929 +step:3060 train loss:3.858399 +step:3061 train loss:3.849758 +step:3062 train loss:3.895842 +step:3063 train loss:3.885053 +step:3064 train loss:3.903150 +step:3065 train loss:3.919195 +step:3066 train loss:3.825076 +step:3067 train loss:3.867580 +step:3068 train loss:3.922557 +step:3069 train loss:3.932460 +step:3070 train loss:3.867257 +step:3071 train loss:3.879564 +step:3072 train loss:3.882203 +step:3073 train loss:3.919658 +step:3074 train loss:3.856069 +step:3075 train loss:3.890124 +step:3076 train loss:3.825415 +step:3077 train loss:3.824068 +step:3078 train loss:3.849221 +step:3079 train loss:3.899411 +step:3080 train loss:3.891411 +step:3081 train loss:3.941755 +step:3082 train loss:3.908991 +step:3083 train loss:3.844002 +step:3084 train loss:3.922925 +step:3085 train loss:3.851274 +step:3086 train loss:3.910935 +step:3087 train loss:3.880720 +step:3088 train loss:3.963877 +step:3089 train loss:3.835409 +step:3090 train loss:3.908668 +step:3091 train loss:3.826900 +step:3092 train loss:3.860365 +step:3093 train loss:3.877272 +step:3094 train loss:3.866259 +step:3095 train loss:3.943609 +step:3096 train loss:3.875935 +step:3097 train loss:3.890562 +step:3098 train loss:3.863118 +step:3099 train loss:3.874744 +step:3100 train loss:3.897492 +step:3101 train loss:3.980751 +step:3102 train loss:3.906737 +step:3103 train loss:3.835464 +step:3104 train loss:3.916502 +step:3105 train loss:3.888783 +step:3106 train loss:3.881102 +step:3107 train loss:3.863192 +step:3108 train loss:3.843735 +step:3109 train loss:3.893864 +step:3110 train loss:3.828503 +step:3111 train loss:3.860550 +step:3112 train loss:3.797844 +step:3113 train loss:3.917098 +step:3114 train loss:3.827219 +step:3115 train loss:3.872113 +step:3116 train loss:3.746310 +step:3117 train loss:3.772947 +step:3118 train loss:3.868149 +step:3119 train loss:3.881580 +step:3120 train loss:3.879828 +step:3121 train loss:3.831269 +step:3122 train loss:3.906387 +step:3123 train loss:3.826603 +step:3124 train loss:3.886019 +step:3125 train loss:3.900569 +step:3126 train loss:4.002383 +step:3127 train loss:3.849920 +step:3128 train loss:3.880393 +step:3129 train loss:3.859648 +step:3130 train loss:3.834194 +step:3131 train loss:3.914038 +step:3132 train loss:3.902191 +step:3133 train loss:3.869695 +step:3134 train loss:3.770428 +step:3135 train loss:3.857708 +step:3136 train loss:3.836880 +step:3137 train loss:3.966716 +step:3138 train loss:3.868460 +step:3139 train loss:3.849106 +step:3140 train loss:3.870499 +step:3141 train loss:3.876619 +step:3142 train loss:3.811831 +step:3143 train loss:3.900900 +step:3144 train loss:3.843729 +step:3145 train loss:3.830878 +step:3146 train loss:3.839616 +step:3147 train loss:3.953363 +step:3148 train loss:3.854987 +step:3149 train loss:3.910838 +step:3150 train loss:3.892880 +step:3151 train loss:3.867559 +step:3152 train loss:3.860538 +step:3153 train loss:3.826491 +step:3154 train loss:3.902510 +step:3155 train loss:3.851470 +step:3156 train loss:3.896975 +step:3157 train loss:3.903907 +step:3158 train loss:3.868815 +step:3159 train loss:3.809612 +step:3160 train loss:3.861322 +step:3161 train loss:3.823291 +step:3162 train loss:3.888796 +step:3163 train loss:3.867820 +step:3164 train loss:3.848691 +step:3165 train loss:3.862855 +step:3166 train loss:3.903711 +step:3167 train loss:3.865675 +step:3168 train loss:3.940008 +step:3169 train loss:3.856053 +step:3170 train loss:3.843336 +step:3171 train loss:3.824335 +step:3172 train loss:3.836542 +step:3173 train loss:3.778125 +step:3174 train loss:3.886959 +step:3175 train loss:3.861527 +step:3176 train loss:3.871219 +step:3177 train loss:3.835046 +step:3178 train loss:3.815530 +step:3179 train loss:3.887236 +step:3180 train loss:3.821485 +step:3181 train loss:3.902532 +step:3182 train loss:3.909884 +step:3183 train loss:3.851279 +step:3184 train loss:3.850773 +step:3185 train loss:3.911318 +step:3186 train loss:3.866277 +step:3187 train loss:3.885571 +step:3188 train loss:3.923347 +step:3189 train loss:3.881714 +step:3190 train loss:3.825131 +step:3191 train loss:3.835566 +step:3192 train loss:3.795619 +step:3193 train loss:3.879894 +step:3194 train loss:3.836815 +step:3195 train loss:3.828968 +step:3196 train loss:3.872304 +step:3197 train loss:3.838544 +step:3198 train loss:3.858965 +step:3199 train loss:3.856378 +step:3200 train loss:3.856321 +step:3201 train loss:3.829197 +step:3202 train loss:3.877332 +step:3203 train loss:3.949185 +step:3204 train loss:3.910223 +step:3205 train loss:3.758389 +step:3206 train loss:4.039385 +step:3207 train loss:3.802885 +step:3208 train loss:3.863837 +step:3209 train loss:3.859193 +step:3210 train loss:3.840052 +step:3211 train loss:3.865164 +step:3212 train loss:3.878239 +step:3213 train loss:3.815797 +step:3214 train loss:3.921590 +step:3215 train loss:3.922708 +step:3216 train loss:3.797174 +step:3217 train loss:3.872616 +step:3218 train loss:3.921429 +step:3219 train loss:3.833178 +step:3220 train loss:3.901630 +step:3221 train loss:3.815378 +step:3222 train loss:3.858837 +step:3223 train loss:3.872162 +step:3224 train loss:3.892232 +step:3225 train loss:3.810122 +step:3226 train loss:3.842410 +step:3227 train loss:3.871710 +step:3228 train loss:3.866353 +step:3229 train loss:3.902907 +step:3230 train loss:3.912224 +step:3231 train loss:3.850250 +step:3232 train loss:3.860725 +step:3233 train loss:3.830052 +step:3234 train loss:3.821889 +step:3235 train loss:3.822746 +step:3236 train loss:3.843291 +step:3237 train loss:3.840090 +step:3238 train loss:3.864803 +step:3239 train loss:3.762485 +step:3240 train loss:3.881145 +step:3241 train loss:3.874606 +step:3242 train loss:3.930747 +step:3243 train loss:3.877191 +step:3244 train loss:3.885442 +step:3245 train loss:3.794473 +step:3246 train loss:3.918589 +step:3247 train loss:3.859380 +step:3248 train loss:3.878588 +step:3249 train loss:3.824895 +step:3250 validation loss:3.800628 +step:3250 train loss:3.830139 +step:3251 train loss:3.934880 +step:3252 train loss:3.867898 +step:3253 train loss:3.867428 +step:3254 train loss:3.936064 +step:3255 train loss:3.873754 +step:3256 train loss:3.873674 +step:3257 train loss:3.849584 +step:3258 train loss:3.786221 +step:3259 train loss:3.760729 +step:3260 train loss:3.877063 +step:3261 train loss:3.860760 +step:3262 train loss:3.848022 +step:3263 train loss:3.829705 +step:3264 train loss:3.944446 +step:3265 train loss:3.852952 +step:3266 train loss:3.879779 +step:3267 train loss:3.842673 +step:3268 train loss:3.845607 +step:3269 train loss:3.855464 +step:3270 train loss:3.885191 +step:3271 train loss:3.853985 +step:3272 train loss:3.820474 +step:3273 train loss:3.837909 +step:3274 train loss:3.971374 +step:3275 train loss:3.846428 +step:3276 train loss:3.909357 +step:3277 train loss:3.852582 +step:3278 train loss:3.828300 +step:3279 train loss:3.855482 +step:3280 train loss:3.878904 +step:3281 train loss:3.808356 +step:3282 train loss:3.873141 +step:3283 train loss:3.852313 +step:3284 train loss:3.807184 +step:3285 train loss:3.827931 +step:3286 train loss:3.859782 +step:3287 train loss:3.796693 +step:3288 train loss:3.877072 +step:3289 train loss:3.824986 +step:3290 train loss:3.857134 +step:3291 train loss:3.817326 +step:3292 train loss:3.836182 +step:3293 train loss:3.879970 +step:3294 train loss:3.887527 +step:3295 train loss:3.804020 +step:3296 train loss:3.856241 +step:3297 train loss:3.822334 +step:3298 train loss:3.818569 +step:3299 train loss:3.947717 +step:3300 train loss:3.783203 +step:3301 train loss:3.867363 +step:3302 train loss:3.831053 +step:3303 train loss:3.854970 +step:3304 train loss:3.818313 +step:3305 train loss:3.907685 +step:3306 train loss:3.842172 +step:3307 train loss:3.867381 +step:3308 train loss:3.820303 +step:3309 train loss:3.877128 +step:3310 train loss:3.796646 +step:3311 train loss:3.848189 +step:3312 train loss:3.817521 +step:3313 train loss:3.852021 +step:3314 train loss:3.851027 +step:3315 train loss:3.927054 +step:3316 train loss:3.781418 +step:3317 train loss:3.876339 +step:3318 train loss:3.879175 +step:3319 train loss:3.814654 +step:3320 train loss:3.961967 +step:3321 train loss:3.875659 +step:3322 train loss:3.870100 +step:3323 train loss:3.976533 +step:3324 train loss:3.893861 +step:3325 train loss:3.869849 +step:3326 train loss:3.861205 +step:3327 train loss:3.873978 +step:3328 train loss:3.850428 +step:3329 train loss:3.848735 +step:3330 train loss:3.844939 +step:3331 train loss:3.892287 +step:3332 train loss:3.914191 +step:3333 train loss:3.874722 +step:3334 train loss:3.815031 +step:3335 train loss:3.817961 +step:3336 train loss:3.864132 +step:3337 train loss:3.854152 +step:3338 train loss:3.848143 +step:3339 train loss:3.836045 +step:3340 train loss:3.874732 +step:3341 train loss:3.826808 +step:3342 train loss:3.874058 +step:3343 train loss:3.808496 +step:3344 train loss:3.863490 +step:3345 train loss:3.820463 +step:3346 train loss:3.830084 +step:3347 train loss:3.840678 +step:3348 train loss:3.846573 +step:3349 train loss:3.845075 +step:3350 train loss:3.865075 +step:3351 train loss:3.920872 +step:3352 train loss:3.857104 +step:3353 train loss:3.955831 +step:3354 train loss:3.804066 +step:3355 train loss:3.912317 +step:3356 train loss:3.859526 +step:3357 train loss:3.872077 +step:3358 train loss:3.813452 +step:3359 train loss:3.844044 +step:3360 train loss:3.833663 +step:3361 train loss:3.837448 +step:3362 train loss:3.827315 +step:3363 train loss:3.827754 +step:3364 train loss:3.813645 +step:3365 train loss:3.848790 +step:3366 train loss:3.880823 +step:3367 train loss:3.831601 +step:3368 train loss:3.925803 +step:3369 train loss:3.836904 +step:3370 train loss:3.924565 +step:3371 train loss:3.886474 +step:3372 train loss:3.856041 +step:3373 train loss:3.862812 +step:3374 train loss:3.910769 +step:3375 train loss:3.843743 +step:3376 train loss:3.847869 +step:3377 train loss:3.835755 +step:3378 train loss:3.814384 +step:3379 train loss:3.896465 +step:3380 train loss:3.872102 +step:3381 train loss:3.861223 +step:3382 train loss:3.873052 +step:3383 train loss:3.887117 +step:3384 train loss:3.812026 +step:3385 train loss:3.865658 +step:3386 train loss:3.844548 +step:3387 train loss:3.917009 +step:3388 train loss:3.822067 +step:3389 train loss:3.999862 +step:3390 train loss:3.756331 +step:3391 train loss:3.844956 +step:3392 train loss:3.826743 +step:3393 train loss:3.858444 +step:3394 train loss:3.814235 +step:3395 train loss:3.882254 +step:3396 train loss:3.799368 +step:3397 train loss:3.875075 +step:3398 train loss:3.842151 +step:3399 train loss:3.859670 +step:3400 train loss:3.807977 +step:3401 train loss:3.840095 +step:3402 train loss:3.999879 +step:3403 train loss:3.884202 +step:3404 train loss:4.000657 +step:3405 train loss:3.853695 +step:3406 train loss:3.840990 +step:3407 train loss:3.833884 +step:3408 train loss:3.816756 +step:3409 train loss:3.785274 +step:3410 train loss:3.817898 +step:3411 train loss:3.882020 +step:3412 train loss:3.812048 +step:3413 train loss:3.796609 +step:3414 train loss:3.836582 +step:3415 train loss:3.809710 +step:3416 train loss:3.812746 +step:3417 train loss:3.897443 +step:3418 train loss:3.893438 +step:3419 train loss:3.857464 +step:3420 train loss:3.827672 +step:3421 train loss:3.862011 +step:3422 train loss:3.876090 +step:3423 train loss:3.899317 +step:3424 train loss:3.782254 +step:3425 train loss:3.805011 +step:3426 train loss:3.798324 +step:3427 train loss:3.859369 +step:3428 train loss:3.788733 +step:3429 train loss:3.848242 +step:3430 train loss:3.810863 +step:3431 train loss:3.867856 +step:3432 train loss:3.850800 +step:3433 train loss:3.811826 +step:3434 train loss:3.900743 +step:3435 train loss:3.834869 +step:3436 train loss:3.930551 +step:3437 train loss:3.754711 +step:3438 train loss:3.860267 +step:3439 train loss:3.833767 +step:3440 train loss:3.929527 +step:3441 train loss:3.825956 +step:3442 train loss:3.889002 +step:3443 train loss:3.828540 +step:3444 train loss:3.843353 +step:3445 train loss:3.892284 +step:3446 train loss:3.796547 +step:3447 train loss:3.870158 +step:3448 train loss:3.826317 +step:3449 train loss:3.861220 +step:3450 train loss:3.768192 +step:3451 train loss:3.882268 +step:3452 train loss:3.835003 +step:3453 train loss:3.883007 +step:3454 train loss:3.911964 +step:3455 train loss:3.979536 +step:3456 train loss:3.913060 +step:3457 train loss:3.902123 +step:3458 train loss:3.830546 +step:3459 train loss:3.842965 +step:3460 train loss:3.786588 +step:3461 train loss:3.850575 +step:3462 train loss:3.849408 +step:3463 train loss:3.823580 +step:3464 train loss:3.870943 +step:3465 train loss:3.806729 +step:3466 train loss:3.871683 +step:3467 train loss:3.832075 +step:3468 train loss:3.839731 +step:3469 train loss:3.855027 +step:3470 train loss:3.836915 +step:3471 train loss:3.873045 +step:3472 train loss:3.761814 +step:3473 train loss:3.878851 +step:3474 train loss:3.783191 +step:3475 train loss:3.858520 +step:3476 train loss:3.830235 +step:3477 train loss:3.848382 +step:3478 train loss:3.829938 +step:3479 train loss:3.852367 +step:3480 train loss:3.872426 +step:3481 train loss:3.856771 +step:3482 train loss:3.833039 +step:3483 train loss:3.978898 +step:3484 train loss:3.819982 +step:3485 train loss:3.810019 +step:3486 train loss:3.854180 +step:3487 train loss:3.903745 +step:3488 train loss:3.804451 +step:3489 train loss:3.856685 +step:3490 train loss:3.831270 +step:3491 train loss:3.859119 +step:3492 train loss:3.900685 +step:3493 train loss:3.866018 +step:3494 train loss:3.864405 +step:3495 train loss:3.840280 +step:3496 train loss:3.805254 +step:3497 train loss:3.914932 +step:3498 train loss:3.865434 +step:3499 train loss:3.801961 +step:3500 validation loss:3.772571 total_sharp:7.6546e-03 L1_sharp:2.2327e-01 L2_sharp:1.2148e-01 L3_sharp:2.0688e-01 L4_sharp:1.3539e-01 L5_sharp:1.1264e-01 L6_sharp:1.2084e-01 L7_sharp:1.5052e-01 L8_sharp:1.2891e-01 L9_sharp:1.1315e-01 L10_sharp:9.3502e-02 L11_sharp:7.0384e-02 L12_sharp:8.6624e-02 total_fnorm:1.3372e+00 total_l1_linf:7.3802e+03 total_spectral:1.3372e+00 L1_fnorm:3.1925e-02 L2_fnorm:3.0236e-02 L3_fnorm:2.9508e-02 L4_fnorm:3.0606e-02 L5_fnorm:3.1084e-02 L6_fnorm:3.1799e-02 L7_fnorm:3.1680e-02 L8_fnorm:3.1897e-02 L9_fnorm:3.1893e-02 L10_fnorm:3.1956e-02 L11_fnorm:3.1978e-02 L12_fnorm:3.1967e-02 L1_l1linf:2.6591e-01 L2_l1linf:2.7249e-01 L3_l1linf:2.8521e-01 L4_l1linf:2.9435e-01 L5_l1linf:2.7583e-01 L6_l1linf:2.6321e-01 L7_l1linf:2.4633e-01 L8_l1linf:2.6250e-01 L9_l1linf:2.8254e-01 L10_l1linf:2.9598e-01 L11_l1linf:3.1060e-01 L12_l1linf:2.9736e-01 L1_spectral:5.9778e-03 L2_spectral:6.1510e-03 L3_spectral:6.4528e-03 L4_spectral:6.6137e-03 L5_spectral:6.2049e-03 L6_spectral:5.8831e-03 L7_spectral:5.5779e-03 L8_spectral:5.9318e-03 L9_spectral:6.3561e-03 L10_spectral:6.6725e-03 L11_spectral:6.9882e-03 L12_spectral:6.7795e-03 ip_v_neg_g:9.0083e-03 cos_v_neg_g:8.8207e-04 v_norm:1.3372e+00 g_norm:7.6372e+00 hv_norm:2.3688e+00 cos_v_hv:4.3212e-03 hg_norm:3.3594e+03 cos_g_hg:6.0398e-01 v_par:3.3056e-05 v_perp:1.3372e+00 L1_cos_v_neg_g:1.2139e-02 L1_v_norm:3.1925e-02 L2_cos_v_neg_g:1.1289e-02 L2_v_norm:3.0236e-02 L3_cos_v_neg_g:1.4454e-02 L3_v_norm:2.9508e-02 L4_cos_v_neg_g:1.1476e-02 L4_v_norm:3.0606e-02 L5_cos_v_neg_g:1.0910e-02 L5_v_norm:3.1084e-02 L6_cos_v_neg_g:1.0094e-02 L6_v_norm:3.1799e-02 L7_cos_v_neg_g:1.1265e-02 L7_v_norm:3.1680e-02 L8_cos_v_neg_g:1.1000e-02 L8_v_norm:3.1897e-02 L9_cos_v_neg_g:9.6460e-03 L9_v_norm:3.1893e-02 L10_cos_v_neg_g:8.1504e-03 L10_v_norm:3.1956e-02 L11_cos_v_neg_g:7.9632e-03 L11_v_norm:3.1978e-02 L12_cos_v_neg_g:7.4098e-03 L12_v_norm:3.1967e-02 +step:3500 train loss:3.816271 +step:3501 train loss:3.943182 +step:3502 train loss:3.923365 +step:3503 train loss:3.875827 +step:3504 train loss:3.827188 +step:3505 train loss:3.840868 +step:3506 train loss:3.735180 +step:3507 train loss:3.857527 +step:3508 train loss:3.808099 +step:3509 train loss:3.869323 +step:3510 train loss:3.805050 +step:3511 train loss:3.836314 +step:3512 train loss:3.980098 +step:3513 train loss:3.799998 +step:3514 train loss:3.815463 +step:3515 train loss:4.062299 +step:3516 train loss:3.859424 +step:3517 train loss:3.818502 +step:3518 train loss:3.821730 +step:3519 train loss:3.816589 +step:3520 train loss:3.848526 +step:3521 train loss:3.841214 +step:3522 train loss:3.744947 +step:3523 train loss:3.853626 +step:3524 train loss:3.836673 +step:3525 train loss:3.823320 +step:3526 train loss:3.848917 +step:3527 train loss:3.801989 +step:3528 train loss:3.849099 +step:3529 train loss:3.825572 +step:3530 train loss:3.816959 +step:3531 train loss:3.817332 +step:3532 train loss:3.988208 +step:3533 train loss:3.819230 +step:3534 train loss:3.834565 +step:3535 train loss:3.812984 +step:3536 train loss:3.810960 +step:3537 train loss:3.816445 +step:3538 train loss:3.852281 +step:3539 train loss:3.800012 +step:3540 train loss:3.865021 +step:3541 train loss:3.828261 +step:3542 train loss:3.843685 +step:3543 train loss:3.761535 +step:3544 train loss:3.777741 +step:3545 train loss:3.785899 +step:3546 train loss:3.851603 +step:3547 train loss:3.862150 +step:3548 train loss:3.835513 +step:3549 train loss:3.831609 +step:3550 train loss:3.815429 +step:3551 train loss:3.852531 +step:3552 train loss:3.744461 +step:3553 train loss:3.866284 +step:3554 train loss:3.852297 +step:3555 train loss:3.845195 +step:3556 train loss:3.869799 +step:3557 train loss:3.852612 +step:3558 train loss:3.830002 +step:3559 train loss:3.771276 +step:3560 train loss:3.866302 +step:3561 train loss:3.857307 +step:3562 train loss:4.032327 +step:3563 train loss:3.886191 +step:3564 train loss:3.847543 +step:3565 train loss:3.850024 +step:3566 train loss:3.825576 +step:3567 train loss:3.766979 +step:3568 train loss:3.790434 +step:3569 train loss:3.877107 +step:3570 train loss:3.896524 +step:3571 train loss:3.883655 +step:3572 train loss:3.867189 +step:3573 train loss:3.827264 +step:3574 train loss:3.824631 +step:3575 train loss:3.814836 +step:3576 train loss:3.803691 +step:3577 train loss:3.806029 +step:3578 train loss:3.895360 +step:3579 train loss:3.801838 +step:3580 train loss:3.882514 +step:3581 train loss:3.823334 +step:3582 train loss:3.876122 +step:3583 train loss:3.821381 +step:3584 train loss:3.787087 +step:3585 train loss:3.843302 +step:3586 train loss:3.786066 +step:3587 train loss:3.887724 +step:3588 train loss:4.011168 +step:3589 train loss:3.845011 +step:3590 train loss:3.833666 +step:3591 train loss:3.839507 +step:3592 train loss:3.802513 +step:3593 train loss:3.773065 +step:3594 train loss:3.826113 +step:3595 train loss:3.800768 +step:3596 train loss:3.875060 +step:3597 train loss:3.854541 +step:3598 train loss:3.806022 +step:3599 train loss:3.857708 +step:3600 train loss:3.802421 +step:3601 train loss:3.811002 +step:3602 train loss:3.803975 +step:3603 train loss:3.816790 +step:3604 train loss:3.845161 +step:3605 train loss:3.943251 +step:3606 train loss:3.846849 +step:3607 train loss:3.832885 +step:3608 train loss:3.848532 +step:3609 train loss:3.832659 +step:3610 train loss:3.801879 +step:3611 train loss:3.806159 +step:3612 train loss:3.870438 +step:3613 train loss:3.845803 +step:3614 train loss:3.789360 +step:3615 train loss:3.826420 +step:3616 train loss:3.788233 +step:3617 train loss:3.854538 +step:3618 train loss:3.813115 +step:3619 train loss:3.799754 +step:3620 train loss:3.814194 +step:3621 train loss:3.780360 +step:3622 train loss:3.885810 +step:3623 train loss:3.871476 +step:3624 train loss:3.841984 +step:3625 train loss:3.825264 +step:3626 train loss:3.826340 +step:3627 train loss:3.822625 +step:3628 train loss:3.813143 +step:3629 train loss:3.817155 +step:3630 train loss:3.897322 +step:3631 train loss:3.824003 +step:3632 train loss:3.859407 +step:3633 train loss:3.815495 +step:3634 train loss:3.814139 +step:3635 train loss:3.805282 +step:3636 train loss:3.872061 +step:3637 train loss:3.953490 +step:3638 train loss:3.865366 +step:3639 train loss:3.859368 +step:3640 train loss:3.863912 +step:3641 train loss:3.903387 +step:3642 train loss:3.798457 +step:3643 train loss:3.970695 +step:3644 train loss:3.864468 +step:3645 train loss:3.823154 +step:3646 train loss:3.950948 +step:3647 train loss:3.840731 +step:3648 train loss:3.835760 +step:3649 train loss:3.783962 +step:3650 train loss:3.823756 +step:3651 train loss:3.818857 +step:3652 train loss:3.809292 +step:3653 train loss:3.734085 +step:3654 train loss:3.803846 +step:3655 train loss:3.792569 +step:3656 train loss:3.825350 +step:3657 train loss:3.842208 +step:3658 train loss:3.839047 +step:3659 train loss:3.827007 +step:3660 train loss:3.792791 +step:3661 train loss:3.825154 +step:3662 train loss:3.797289 +step:3663 train loss:3.836431 +step:3664 train loss:3.788116 +step:3665 train loss:3.829494 +step:3666 train loss:3.872803 +step:3667 train loss:3.956020 +step:3668 train loss:3.843007 +step:3669 train loss:3.799443 +step:3670 train loss:3.846387 +step:3671 train loss:3.803817 +step:3672 train loss:3.839911 +step:3673 train loss:3.825945 +step:3674 train loss:3.841335 +step:3675 train loss:3.854737 +step:3676 train loss:3.822001 +step:3677 train loss:3.779343 +step:3678 train loss:3.845028 +step:3679 train loss:3.744712 +step:3680 train loss:3.845376 +step:3681 train loss:3.876845 +step:3682 train loss:3.855442 +step:3683 train loss:3.804575 +step:3684 train loss:3.796875 +step:3685 train loss:3.826324 +step:3686 train loss:3.857258 +step:3687 train loss:3.806380 +step:3688 train loss:3.785837 +step:3689 train loss:3.818827 +step:3690 train loss:3.808636 +step:3691 train loss:3.790541 +step:3692 train loss:3.844721 +step:3693 train loss:3.979658 +step:3694 train loss:3.798057 +step:3695 train loss:3.856073 +step:3696 train loss:3.819858 +step:3697 train loss:3.811487 +step:3698 train loss:3.749451 +step:3699 train loss:3.776338 +step:3700 train loss:3.811201 +step:3701 train loss:3.825330 +step:3702 train loss:3.847570 +step:3703 train loss:3.804749 +step:3704 train loss:3.846799 +step:3705 train loss:3.831558 +step:3706 train loss:3.780962 +step:3707 train loss:3.837032 +step:3708 train loss:3.812043 +step:3709 train loss:3.729148 +step:3710 train loss:3.861942 +step:3711 train loss:3.807458 +step:3712 train loss:3.849596 +step:3713 train loss:3.797587 +step:3714 train loss:3.814035 +step:3715 train loss:3.935597 +step:3716 train loss:3.835853 +step:3717 train loss:3.814850 +step:3718 train loss:3.817522 +step:3719 train loss:3.813629 +step:3720 train loss:3.822703 +step:3721 train loss:3.877889 +step:3722 train loss:3.888835 +step:3723 train loss:3.780085 +step:3724 train loss:3.833678 +step:3725 train loss:3.813076 +step:3726 train loss:3.831909 +step:3727 train loss:3.904439 +step:3728 train loss:3.866482 +step:3729 train loss:3.769821 +step:3730 train loss:3.788278 +step:3731 train loss:3.807120 +step:3732 train loss:3.959510 +step:3733 train loss:3.820784 +step:3734 train loss:3.825068 +step:3735 train loss:3.765927 +step:3736 train loss:3.820233 +step:3737 train loss:3.873425 +step:3738 train loss:3.890651 +step:3739 train loss:3.812042 +step:3740 train loss:3.711337 +step:3741 train loss:3.919125 +step:3742 train loss:3.830724 +step:3743 train loss:3.805500 +step:3744 train loss:3.815123 +step:3745 train loss:3.824628 +step:3746 train loss:3.791322 +step:3747 train loss:3.807206 +step:3748 train loss:3.845257 +step:3749 train loss:3.831884 +step:3750 validation loss:3.752796 +step:3750 train loss:3.838192 +step:3751 train loss:3.931526 +step:3752 train loss:3.861440 +step:3753 train loss:3.777561 +step:3754 train loss:3.833255 +step:3755 train loss:4.002767 +step:3756 train loss:3.791660 +step:3757 train loss:3.783969 +step:3758 train loss:3.810617 +step:3759 train loss:3.759603 +step:3760 train loss:3.759643 +step:3761 train loss:3.809878 +step:3762 train loss:3.800766 +step:3763 train loss:3.802874 +step:3764 train loss:3.797917 +step:3765 train loss:3.790971 +step:3766 train loss:3.764349 +step:3767 train loss:3.845411 +step:3768 train loss:3.787278 +step:3769 train loss:4.042989 +step:3770 train loss:3.836547 +step:3771 train loss:3.852726 +step:3772 train loss:3.805464 +step:3773 train loss:3.800023 +step:3774 train loss:3.810784 +step:3775 train loss:3.800435 +step:3776 train loss:3.805568 +step:3777 train loss:3.764106 +step:3778 train loss:3.776742 +step:3779 train loss:3.766600 +step:3780 train loss:3.848587 +step:3781 train loss:3.817075 +step:3782 train loss:3.731659 +step:3783 train loss:3.836235 +step:3784 train loss:3.846129 +step:3785 train loss:3.757932 +step:3786 train loss:3.866733 +step:3787 train loss:3.778337 +step:3788 train loss:3.791249 +step:3789 train loss:3.704335 +step:3790 train loss:3.817225 +step:3791 train loss:3.839329 +step:3792 train loss:3.810122 +step:3793 train loss:3.806688 +step:3794 train loss:3.836499 +step:3795 train loss:3.801262 +step:3796 train loss:3.822265 +step:3797 train loss:3.797182 +step:3798 train loss:3.804356 +step:3799 train loss:3.814424 +step:3800 train loss:3.721933 +step:3801 train loss:3.838427 +step:3802 train loss:3.770481 +step:3803 train loss:3.845348 +step:3804 train loss:3.857462 +step:3805 train loss:3.817166 +step:3806 train loss:3.832371 +step:3807 train loss:3.853877 +step:3808 train loss:3.804960 +step:3809 train loss:3.826062 +step:3810 train loss:3.824465 +step:3811 train loss:3.810396 +step:3812 train loss:3.815452 +step:3813 train loss:3.768846 +step:3814 train loss:3.808975 +step:3815 train loss:3.813868 +step:3816 train loss:3.829503 +step:3817 train loss:3.848256 +step:3818 train loss:3.821397 +step:3819 train loss:3.833140 +step:3820 train loss:3.835592 +step:3821 train loss:3.789459 +step:3822 train loss:3.872445 +step:3823 train loss:3.771703 +step:3824 train loss:3.781691 +step:3825 train loss:3.790652 +step:3826 train loss:3.846232 +step:3827 train loss:3.878294 +step:3828 train loss:3.764122 +step:3829 train loss:3.785505 +step:3830 train loss:3.847918 +step:3831 train loss:3.780717 +step:3832 train loss:3.839889 +step:3833 train loss:3.782429 +step:3834 train loss:3.744103 +step:3835 train loss:3.788582 +step:3836 train loss:3.764318 +step:3837 train loss:3.834116 +step:3838 train loss:3.788638 +step:3839 train loss:3.826640 +step:3840 train loss:3.840748 +step:3841 train loss:3.784920 +step:3842 train loss:3.820983 +step:3843 train loss:3.837235 +step:3844 train loss:3.806114 +step:3845 train loss:3.823037 +step:3846 train loss:3.870370 +step:3847 train loss:3.764970 +step:3848 train loss:3.774942 +step:3849 train loss:3.777111 +step:3850 train loss:3.808249 +step:3851 train loss:3.936114 +step:3852 train loss:3.913450 +step:3853 train loss:3.818232 +step:3854 train loss:3.774677 +step:3855 train loss:3.835126 +step:3856 train loss:3.757653 +step:3857 train loss:3.817698 +step:3858 train loss:3.737328 +step:3859 train loss:3.781586 +step:3860 train loss:3.845781 +step:3861 train loss:3.822564 +step:3862 train loss:3.762044 +step:3863 train loss:3.810434 +step:3864 train loss:3.775085 +step:3865 train loss:3.820315 +step:3866 train loss:3.833764 +step:3867 train loss:3.833556 +step:3868 train loss:3.784372 +step:3869 train loss:3.781110 +step:3870 train loss:3.761029 +step:3871 train loss:3.751158 +step:3872 train loss:3.886149 +step:3873 train loss:3.810709 +step:3874 train loss:3.823054 +step:3875 train loss:3.934390 +step:3876 train loss:3.806338 +step:3877 train loss:3.834148 +step:3878 train loss:3.860158 +step:3879 train loss:3.846759 +step:3880 train loss:3.928465 +step:3881 train loss:3.752894 +step:3882 train loss:3.788195 +step:3883 train loss:3.798012 +step:3884 train loss:3.792917 +step:3885 train loss:3.806657 +step:3886 train loss:3.868053 +step:3887 train loss:3.846503 +step:3888 train loss:3.810748 +step:3889 train loss:3.780287 +step:3890 train loss:3.812998 +step:3891 train loss:3.834135 +step:3892 train loss:3.741503 +step:3893 train loss:3.846988 +step:3894 train loss:3.797993 +step:3895 train loss:3.817176 +step:3896 train loss:3.808661 +step:3897 train loss:3.771533 +step:3898 train loss:3.832977 +step:3899 train loss:3.878954 +step:3900 train loss:3.829880 +step:3901 train loss:3.845896 +step:3902 train loss:3.778960 +step:3903 train loss:3.788788 +step:3904 train loss:3.821362 +step:3905 train loss:3.757716 +step:3906 train loss:3.793261 +step:3907 train loss:3.827087 +step:3908 train loss:3.903306 +step:3909 train loss:3.790759 +step:3910 train loss:3.820674 +step:3911 train loss:3.836662 +step:3912 train loss:3.781731 +step:3913 train loss:3.801656 +step:3914 train loss:3.818395 +step:3915 train loss:3.784733 +step:3916 train loss:3.822555 +step:3917 train loss:3.857366 +step:3918 train loss:3.843435 +step:3919 train loss:3.822525 +step:3920 train loss:3.796098 +step:3921 train loss:3.837667 +step:3922 train loss:3.838522 +step:3923 train loss:3.828984 +step:3924 train loss:3.764865 +step:3925 train loss:3.958919 +step:3926 train loss:3.810527 +step:3927 train loss:3.788224 +step:3928 train loss:3.866031 +step:3929 train loss:3.928037 +step:3930 train loss:3.821345 +step:3931 train loss:3.762415 +step:3932 train loss:3.808297 +step:3933 train loss:3.824922 +step:3934 train loss:3.780123 +step:3935 train loss:3.756202 +step:3936 train loss:3.851038 +step:3937 train loss:3.810755 +step:3938 train loss:3.815669 +step:3939 train loss:3.841479 +step:3940 train loss:3.792419 +step:3941 train loss:3.879693 +step:3942 train loss:3.837409 +step:3943 train loss:3.820443 +step:3944 train loss:3.871493 +step:3945 train loss:3.780228 +step:3946 train loss:3.727999 +step:3947 train loss:3.855353 +step:3948 train loss:3.821229 +step:3949 train loss:3.991014 +step:3950 train loss:3.790741 +step:3951 train loss:3.718344 +step:3952 train loss:3.682972 +step:3953 train loss:3.755082 +step:3954 train loss:3.809409 +step:3955 train loss:3.833239 +step:3956 train loss:3.787886 +step:3957 train loss:3.844002 +step:3958 train loss:3.820118 +step:3959 train loss:3.854519 +step:3960 train loss:3.777543 +step:3961 train loss:3.804767 +step:3962 train loss:3.812325 +step:3963 train loss:3.785923 +step:3964 train loss:3.765190 +step:3965 train loss:3.824613 +step:3966 train loss:3.771202 +step:3967 train loss:3.819366 +step:3968 train loss:3.838412 +step:3969 train loss:3.748882 +step:3970 train loss:3.864184 +step:3971 train loss:3.773194 +step:3972 train loss:3.806400 +step:3973 train loss:3.762575 +step:3974 train loss:3.854446 +step:3975 train loss:3.812000 +step:3976 train loss:3.764125 +step:3977 train loss:3.822015 +step:3978 train loss:3.793109 +step:3979 train loss:3.776985 +step:3980 train loss:3.844115 +step:3981 train loss:3.780519 +step:3982 train loss:3.797405 +step:3983 train loss:3.783474 +step:3984 train loss:3.816989 +step:3985 train loss:3.798439 +step:3986 train loss:3.805574 +step:3987 train loss:3.816810 +step:3988 train loss:3.761125 +step:3989 train loss:3.830996 +step:3990 train loss:3.817231 +step:3991 train loss:3.831542 +step:3992 train loss:3.793308 +step:3993 train loss:3.826906 +step:3994 train loss:3.768657 +step:3995 train loss:3.828493 +step:3996 train loss:3.746669 +step:3997 train loss:3.824916 +step:3998 train loss:3.707763 +step:3999 train loss:3.860663 +step:4000 validation loss:3.728146 total_sharp:7.6482e-03 L1_sharp:1.9715e-01 L2_sharp:1.6922e-01 L3_sharp:1.7968e-01 L4_sharp:1.0486e-01 L5_sharp:1.0893e-01 L6_sharp:1.2198e-01 L7_sharp:1.6001e-01 L8_sharp:1.4506e-01 L9_sharp:1.1280e-01 L10_sharp:9.0523e-02 L11_sharp:8.6296e-02 L12_sharp:1.2051e-01 total_fnorm:1.3151e+00 total_l1_linf:7.2625e+03 total_spectral:1.3151e+00 L1_fnorm:3.1693e-02 L2_fnorm:3.0321e-02 L3_fnorm:2.9712e-02 L4_fnorm:3.0715e-02 L5_fnorm:3.1396e-02 L6_fnorm:3.1743e-02 L7_fnorm:3.1882e-02 L8_fnorm:3.1953e-02 L9_fnorm:3.1885e-02 L10_fnorm:3.2014e-02 L11_fnorm:3.2077e-02 L12_fnorm:3.2162e-02 L1_l1linf:2.6468e-01 L2_l1linf:2.9040e-01 L3_l1linf:2.9115e-01 L4_l1linf:3.0143e-01 L5_l1linf:2.9854e-01 L6_l1linf:2.7695e-01 L7_l1linf:2.6472e-01 L8_l1linf:2.8148e-01 L9_l1linf:2.8183e-01 L10_l1linf:2.9506e-01 L11_l1linf:3.2117e-01 L12_l1linf:3.2373e-01 L1_spectral:5.9439e-03 L2_spectral:6.5737e-03 L3_spectral:6.5423e-03 L4_spectral:6.7512e-03 L5_spectral:6.6875e-03 L6_spectral:6.2026e-03 L7_spectral:5.9861e-03 L8_spectral:6.3295e-03 L9_spectral:6.3559e-03 L10_spectral:6.6238e-03 L11_spectral:7.1767e-03 L12_spectral:7.2645e-03 ip_v_neg_g:7.0600e-03 cos_v_neg_g:8.3428e-04 v_norm:1.3151e+00 g_norm:6.4350e+00 hv_norm:2.1059e+00 cos_v_hv:4.7760e-03 hg_norm:1.3653e+03 cos_g_hg:5.2168e-01 v_par:2.8026e-05 v_perp:1.3151e+00 L1_cos_v_neg_g:9.7782e-03 L1_v_norm:3.1693e-02 L2_cos_v_neg_g:1.1707e-02 L2_v_norm:3.0321e-02 L3_cos_v_neg_g:1.2096e-02 L3_v_norm:2.9712e-02 L4_cos_v_neg_g:8.4993e-03 L4_v_norm:3.0715e-02 L5_cos_v_neg_g:8.1251e-03 L5_v_norm:3.1396e-02 L6_cos_v_neg_g:8.5896e-03 L6_v_norm:3.1743e-02 L7_cos_v_neg_g:9.5084e-03 L7_v_norm:3.1882e-02 L8_cos_v_neg_g:1.0200e-02 L8_v_norm:3.1953e-02 L9_cos_v_neg_g:8.9388e-03 L9_v_norm:3.1885e-02 L10_cos_v_neg_g:8.4079e-03 L10_v_norm:3.2014e-02 L11_cos_v_neg_g:7.9524e-03 L11_v_norm:3.2077e-02 L12_cos_v_neg_g:8.4907e-03 L12_v_norm:3.2162e-02 +step:4000 train loss:3.739360 +step:4001 train loss:3.818534 +step:4002 train loss:3.796114 +step:4003 train loss:3.829743 +step:4004 train loss:3.741014 +step:4005 train loss:3.831908 +step:4006 train loss:3.839222 +step:4007 train loss:3.762557 +step:4008 train loss:3.717164 +step:4009 train loss:3.804869 +step:4010 train loss:3.781856 +step:4011 train loss:3.788407 +step:4012 train loss:3.800820 +step:4013 train loss:3.775443 +step:4014 train loss:3.788676 +step:4015 train loss:3.784192 +step:4016 train loss:3.790213 +step:4017 train loss:3.756139 +step:4018 train loss:3.693440 +step:4019 train loss:3.750834 +step:4020 train loss:3.818067 +step:4021 train loss:3.763515 +step:4022 train loss:3.768657 +step:4023 train loss:3.786092 +step:4024 train loss:3.687247 +step:4025 train loss:3.817044 +step:4026 train loss:3.804629 +step:4027 train loss:3.813683 +step:4028 train loss:3.829669 +step:4029 train loss:3.859365 +step:4030 train loss:3.774603 +step:4031 train loss:3.818335 +step:4032 train loss:3.774708 +step:4033 train loss:3.806431 +step:4034 train loss:3.824740 +step:4035 train loss:3.803204 +step:4036 train loss:3.798301 +step:4037 train loss:3.814234 +step:4038 train loss:3.737352 +step:4039 train loss:3.787865 +step:4040 train loss:3.767060 +step:4041 train loss:3.764827 +step:4042 train loss:3.783087 +step:4043 train loss:3.768878 +step:4044 train loss:3.805800 +step:4045 train loss:3.809319 +step:4046 train loss:3.766301 +step:4047 train loss:3.795882 +step:4048 train loss:3.802830 +step:4049 train loss:3.765111 +step:4050 train loss:3.866897 +step:4051 train loss:3.782269 +step:4052 train loss:3.799689 +step:4053 train loss:3.847755 +step:4054 train loss:3.821905 +step:4055 train loss:3.837762 +step:4056 train loss:3.835306 +step:4057 train loss:3.774074 +step:4058 train loss:3.755195 +step:4059 train loss:3.837777 +step:4060 train loss:3.779308 +step:4061 train loss:3.750189 +step:4062 train loss:3.861760 +step:4063 train loss:3.813145 +step:4064 train loss:3.780364 +step:4065 train loss:3.768179 +step:4066 train loss:3.794481 +step:4067 train loss:3.816023 +step:4068 train loss:3.783524 +step:4069 train loss:3.843767 +step:4070 train loss:3.760086 +step:4071 train loss:3.731469 +step:4072 train loss:3.805645 +step:4073 train loss:3.739935 +step:4074 train loss:3.796798 +step:4075 train loss:3.865022 +step:4076 train loss:3.711983 +step:4077 train loss:3.795682 +step:4078 train loss:3.894236 +step:4079 train loss:3.841162 +step:4080 train loss:3.781124 +step:4081 train loss:3.755973 +step:4082 train loss:3.805418 +step:4083 train loss:3.745127 +step:4084 train loss:3.762972 +step:4085 train loss:3.993908 +step:4086 train loss:3.767251 +step:4087 train loss:3.807945 +step:4088 train loss:3.795040 +step:4089 train loss:3.784759 +step:4090 train loss:3.804308 +step:4091 train loss:3.828836 +step:4092 train loss:3.752377 +step:4093 train loss:3.777895 +step:4094 train loss:3.802427 +step:4095 train loss:3.755170 +step:4096 train loss:3.786982 +step:4097 train loss:3.786860 +step:4098 train loss:3.762950 +step:4099 train loss:3.761011 +step:4100 train loss:3.816946 +step:4101 train loss:3.740918 +step:4102 train loss:3.773973 +step:4103 train loss:3.978993 +step:4104 train loss:3.794589 +step:4105 train loss:3.758603 +step:4106 train loss:3.835053 +step:4107 train loss:3.752653 +step:4108 train loss:3.759565 +step:4109 train loss:3.812094 +step:4110 train loss:3.820584 +step:4111 train loss:3.799845 +step:4112 train loss:3.816575 +step:4113 train loss:3.773249 +step:4114 train loss:3.724426 +step:4115 train loss:3.762629 +step:4116 train loss:3.750220 +step:4117 train loss:3.763272 +step:4118 train loss:3.815239 +step:4119 train loss:3.840305 +step:4120 train loss:3.765867 +step:4121 train loss:3.757606 +step:4122 train loss:3.815515 +step:4123 train loss:3.833551 +step:4124 train loss:3.810562 +step:4125 train loss:3.843367 +step:4126 train loss:3.779623 +step:4127 train loss:3.801953 +step:4128 train loss:3.790175 +step:4129 train loss:3.839620 +step:4130 train loss:3.768513 +step:4131 train loss:3.806593 +step:4132 train loss:3.819197 +step:4133 train loss:3.772594 +step:4134 train loss:3.827268 +step:4135 train loss:3.758637 +step:4136 train loss:3.780918 +step:4137 train loss:3.757758 +step:4138 train loss:3.759771 +step:4139 train loss:3.802492 +step:4140 train loss:3.765825 +step:4141 train loss:3.730265 +step:4142 train loss:3.772781 +step:4143 train loss:3.815447 +step:4144 train loss:3.764986 +step:4145 train loss:3.731232 +step:4146 train loss:3.796202 +step:4147 train loss:3.774669 +step:4148 train loss:3.769200 +step:4149 train loss:3.849169 +step:4150 train loss:3.812591 +step:4151 train loss:3.793577 +step:4152 train loss:3.818155 +step:4153 train loss:3.823430 +step:4154 train loss:3.829611 +step:4155 train loss:3.854487 +step:4156 train loss:3.724808 +step:4157 train loss:3.748344 +step:4158 train loss:3.808017 +step:4159 train loss:3.705360 +step:4160 train loss:3.801575 +step:4161 train loss:3.801195 +step:4162 train loss:3.705832 +step:4163 train loss:3.790671 +step:4164 train loss:3.738507 +step:4165 train loss:3.737448 +step:4166 train loss:3.802641 +step:4167 train loss:3.802413 +step:4168 train loss:3.792784 +step:4169 train loss:3.813779 +step:4170 train loss:3.936817 +step:4171 train loss:3.791430 +step:4172 train loss:3.807380 +step:4173 train loss:3.804339 +step:4174 train loss:3.766971 +step:4175 train loss:3.860391 +step:4176 train loss:3.780933 +step:4177 train loss:3.805021 +step:4178 train loss:3.783865 +step:4179 train loss:3.736281 +step:4180 train loss:3.732604 +step:4181 train loss:3.782499 +step:4182 train loss:3.767508 +step:4183 train loss:3.700773 +step:4184 train loss:3.776263 +step:4185 train loss:3.841132 +step:4186 train loss:3.816714 +step:4187 train loss:3.825698 +step:4188 train loss:3.796627 +step:4189 train loss:3.757823 +step:4190 train loss:3.804644 +step:4191 train loss:3.750994 +step:4192 train loss:3.836881 +step:4193 train loss:3.750482 +step:4194 train loss:3.729930 +step:4195 train loss:3.726222 +step:4196 train loss:3.799907 +step:4197 train loss:3.810501 +step:4198 train loss:3.732299 +step:4199 train loss:3.816188 +step:4200 train loss:3.776134 +step:4201 train loss:3.758049 +step:4202 train loss:3.776623 +step:4203 train loss:3.781894 +step:4204 train loss:3.777792 +step:4205 train loss:3.792140 +step:4206 train loss:3.811880 +step:4207 train loss:3.811831 +step:4208 train loss:3.774104 +step:4209 train loss:3.837534 +step:4210 train loss:3.867279 +step:4211 train loss:3.748620 +step:4212 train loss:3.792292 +step:4213 train loss:3.742489 +step:4214 train loss:3.749825 +step:4215 train loss:3.766014 +step:4216 train loss:3.742800 +step:4217 train loss:3.762543 +step:4218 train loss:3.805938 +step:4219 train loss:3.797600 +step:4220 train loss:3.883508 +step:4221 train loss:3.770691 +step:4222 train loss:3.830531 +step:4223 train loss:3.753679 +step:4224 train loss:3.827257 +step:4225 train loss:3.752977 +step:4226 train loss:3.809092 +step:4227 train loss:3.787758 +step:4228 train loss:3.757954 +step:4229 train loss:3.766733 +step:4230 train loss:3.754043 +step:4231 train loss:3.742140 +step:4232 train loss:3.785430 +step:4233 train loss:3.699595 +step:4234 train loss:3.777742 +step:4235 train loss:3.854257 +step:4236 train loss:3.821263 +step:4237 train loss:3.807860 +step:4238 train loss:3.814943 +step:4239 train loss:3.866851 +step:4240 train loss:3.776178 +step:4241 train loss:3.702142 +step:4242 train loss:3.821232 +step:4243 train loss:3.817744 +step:4244 train loss:3.833663 +step:4245 train loss:3.886212 +step:4246 train loss:3.761290 +step:4247 train loss:3.818758 +step:4248 train loss:3.769625 +step:4249 train loss:3.780627 +step:4250 validation loss:3.708977 +step:4250 train loss:3.759234 +step:4251 train loss:3.854076 +step:4252 train loss:3.768639 +step:4253 train loss:3.755602 +step:4254 train loss:3.769205 +step:4255 train loss:3.745692 +step:4256 train loss:3.762922 +step:4257 train loss:3.820958 +step:4258 train loss:3.680866 +step:4259 train loss:3.746725 +step:4260 train loss:3.808949 +step:4261 train loss:3.797497 +step:4262 train loss:3.935429 +step:4263 train loss:3.868223 +step:4264 train loss:3.808248 +step:4265 train loss:3.798817 +step:4266 train loss:3.799408 +step:4267 train loss:3.798979 +step:4268 train loss:3.743591 +step:4269 train loss:3.839299 +step:4270 train loss:3.819119 +step:4271 train loss:3.732848 +step:4272 train loss:3.789917 +step:4273 train loss:3.770036 +step:4274 train loss:3.751926 +step:4275 train loss:3.771869 +step:4276 train loss:3.739574 +step:4277 train loss:3.870413 +step:4278 train loss:3.718407 +step:4279 train loss:3.750836 +step:4280 train loss:3.833881 +step:4281 train loss:3.815610 +step:4282 train loss:3.879722 +step:4283 train loss:3.737582 +step:4284 train loss:3.764048 +step:4285 train loss:3.769593 +step:4286 train loss:3.834457 +step:4287 train loss:3.832291 +step:4288 train loss:3.811948 +step:4289 train loss:3.763979 +step:4290 train loss:3.776272 +step:4291 train loss:3.735499 +step:4292 train loss:3.774826 +step:4293 train loss:3.790673 +step:4294 train loss:3.776245 +step:4295 train loss:3.709658 +step:4296 train loss:3.782595 +step:4297 train loss:3.762990 +step:4298 train loss:3.774332 +step:4299 train loss:3.771023 +step:4300 train loss:3.889272 +step:4301 train loss:3.705735 +step:4302 train loss:3.842992 +step:4303 train loss:3.721177 +step:4304 train loss:3.730031 +step:4305 train loss:3.750281 +step:4306 train loss:3.826622 +step:4307 train loss:3.745830 +step:4308 train loss:3.738345 +step:4309 train loss:3.809923 +step:4310 train loss:3.745794 +step:4311 train loss:3.801993 +step:4312 train loss:3.795446 +step:4313 train loss:3.789341 +step:4314 train loss:3.734675 +step:4315 train loss:3.768100 +step:4316 train loss:3.715525 +step:4317 train loss:3.772734 +step:4318 train loss:3.809435 +step:4319 train loss:3.762682 +step:4320 train loss:3.820526 +step:4321 train loss:3.804211 +step:4322 train loss:3.758534 +step:4323 train loss:3.699162 +step:4324 train loss:3.788512 +step:4325 train loss:3.764374 +step:4326 train loss:3.757021 +step:4327 train loss:3.861506 +step:4328 train loss:3.774828 +step:4329 train loss:3.729226 +step:4330 train loss:3.775790 +step:4331 train loss:3.790070 +step:4332 train loss:3.816323 +step:4333 train loss:3.772001 +step:4334 train loss:3.798267 +step:4335 train loss:3.794408 +step:4336 train loss:3.803355 +step:4337 train loss:3.772197 +step:4338 train loss:3.894314 +step:4339 train loss:3.793208 +step:4340 train loss:3.799364 +step:4341 train loss:3.767910 +step:4342 train loss:3.780215 +step:4343 train loss:3.899135 +step:4344 train loss:3.791151 +step:4345 train loss:3.808114 +step:4346 train loss:3.824555 +step:4347 train loss:3.830812 +step:4348 train loss:3.742298 +step:4349 train loss:3.829185 +step:4350 train loss:3.765708 +step:4351 train loss:3.720285 +step:4352 train loss:3.796246 +step:4353 train loss:3.742167 +step:4354 train loss:3.798738 +step:4355 train loss:3.759705 +step:4356 train loss:3.781879 +step:4357 train loss:3.760437 +step:4358 train loss:3.856122 +step:4359 train loss:3.810385 +step:4360 train loss:3.721941 +step:4361 train loss:3.768240 +step:4362 train loss:3.791343 +step:4363 train loss:3.807309 +step:4364 train loss:3.778288 +step:4365 train loss:3.759096 +step:4366 train loss:3.803800 +step:4367 train loss:3.818022 +step:4368 train loss:3.795663 +step:4369 train loss:3.661772 +step:4370 train loss:3.794760 +step:4371 train loss:3.706677 +step:4372 train loss:3.853913 +step:4373 train loss:3.792534 +step:4374 train loss:3.759576 +step:4375 train loss:3.807138 +step:4376 train loss:3.816108 +step:4377 train loss:3.751959 +step:4378 train loss:3.759118 +step:4379 train loss:3.840461 +step:4380 train loss:3.825349 +step:4381 train loss:3.724072 +step:4382 train loss:3.772179 +step:4383 train loss:3.799793 +step:4384 train loss:3.797440 +step:4385 train loss:3.720247 +step:4386 train loss:3.778739 +step:4387 train loss:3.747882 +step:4388 train loss:3.766796 +step:4389 train loss:3.796861 +step:4390 train loss:3.836230 +step:4391 train loss:3.762058 +step:4392 train loss:3.835405 +step:4393 train loss:3.796587 +step:4394 train loss:3.733650 +step:4395 train loss:3.791681 +step:4396 train loss:3.763781 +step:4397 train loss:3.806111 +step:4398 train loss:3.756657 +step:4399 train loss:3.752136 +step:4400 train loss:3.752454 +step:4401 train loss:3.811304 +step:4402 train loss:3.812523 +step:4403 train loss:3.760807 +step:4404 train loss:3.793618 +step:4405 train loss:3.712677 +step:4406 train loss:3.790567 +step:4407 train loss:3.725888 +step:4408 train loss:3.825241 +step:4409 train loss:3.779413 +step:4410 train loss:3.785789 +step:4411 train loss:3.749358 +step:4412 train loss:3.857702 +step:4413 train loss:3.756788 +step:4414 train loss:3.763810 +step:4415 train loss:3.749243 +step:4416 train loss:3.745053 +step:4417 train loss:3.734725 +step:4418 train loss:3.808054 +step:4419 train loss:3.776907 +step:4420 train loss:3.783465 +step:4421 train loss:3.811876 +step:4422 train loss:3.825529 +step:4423 train loss:3.785034 +step:4424 train loss:3.772313 +step:4425 train loss:3.735364 +step:4426 train loss:3.808984 +step:4427 train loss:3.769333 +step:4428 train loss:3.708886 +step:4429 train loss:3.765880 +step:4430 train loss:3.808908 +step:4431 train loss:3.803445 +step:4432 train loss:3.707650 +step:4433 train loss:3.762604 +step:4434 train loss:3.762499 +step:4435 train loss:3.787431 +step:4436 train loss:3.726475 +step:4437 train loss:3.805835 +step:4438 train loss:3.771361 +step:4439 train loss:3.775567 +step:4440 train loss:3.771760 +step:4441 train loss:3.776108 +step:4442 train loss:3.826370 +step:4443 train loss:3.759868 +step:4444 train loss:3.842078 +step:4445 train loss:3.806449 +step:4446 train loss:3.738615 +step:4447 train loss:3.789646 +step:4448 train loss:3.807781 +step:4449 train loss:3.743482 +step:4450 train loss:3.760965 +step:4451 train loss:3.818990 +step:4452 train loss:3.870327 +step:4453 train loss:3.802284 +step:4454 train loss:3.773043 +step:4455 train loss:3.820143 +step:4456 train loss:3.766535 +step:4457 train loss:3.766226 +step:4458 train loss:3.776740 +step:4459 train loss:3.810462 +step:4460 train loss:3.722441 +step:4461 train loss:3.693348 +step:4462 train loss:3.748976 +step:4463 train loss:3.771251 +step:4464 train loss:3.738859 +step:4465 train loss:3.772674 +step:4466 train loss:3.871477 +step:4467 train loss:3.751194 +step:4468 train loss:3.746112 +step:4469 train loss:3.737775 +step:4470 train loss:3.712422 +step:4471 train loss:3.778781 +step:4472 train loss:3.699906 +step:4473 train loss:3.785602 +step:4474 train loss:3.811629 +step:4475 train loss:3.772946 +step:4476 train loss:3.735664 +step:4477 train loss:3.719810 +step:4478 train loss:3.779913 +step:4479 train loss:3.884104 +step:4480 train loss:3.719857 +step:4481 train loss:3.786484 +step:4482 train loss:3.749674 +step:4483 train loss:3.744178 +step:4484 train loss:3.786170 +step:4485 train loss:3.752027 +step:4486 train loss:3.850978 +step:4487 train loss:3.748087 +step:4488 train loss:3.748092 +step:4489 train loss:3.703630 +step:4490 train loss:3.786514 +step:4491 train loss:3.733437 +step:4492 train loss:3.769885 +step:4493 train loss:3.754104 +step:4494 train loss:3.746070 +step:4495 train loss:3.814801 +step:4496 train loss:3.759940 +step:4497 train loss:3.835732 +step:4498 train loss:3.729407 +step:4499 train loss:3.782547 +step:4500 validation loss:3.691041 total_sharp:6.6041e-03 L1_sharp:3.2114e-01 L2_sharp:1.9024e-01 L3_sharp:1.9511e-01 L4_sharp:9.8449e-02 L5_sharp:9.3977e-02 L6_sharp:1.1507e-01 L7_sharp:1.3012e-01 L8_sharp:1.1913e-01 L9_sharp:9.8592e-02 L10_sharp:7.4525e-02 L11_sharp:6.2608e-02 L12_sharp:1.0169e-01 total_fnorm:1.3413e+00 total_l1_linf:7.4053e+03 total_spectral:1.3413e+00 L1_fnorm:3.2184e-02 L2_fnorm:3.0505e-02 L3_fnorm:2.9977e-02 L4_fnorm:3.0874e-02 L5_fnorm:3.1670e-02 L6_fnorm:3.1815e-02 L7_fnorm:3.1751e-02 L8_fnorm:3.1801e-02 L9_fnorm:3.1716e-02 L10_fnorm:3.1780e-02 L11_fnorm:3.1905e-02 L12_fnorm:3.1739e-02 L1_l1linf:3.1671e-01 L2_l1linf:3.3162e-01 L3_l1linf:3.2483e-01 L4_l1linf:3.0465e-01 L5_l1linf:2.9988e-01 L6_l1linf:2.6569e-01 L7_l1linf:2.4060e-01 L8_l1linf:2.5671e-01 L9_l1linf:2.6812e-01 L10_l1linf:2.8120e-01 L11_l1linf:2.9690e-01 L12_l1linf:2.7915e-01 L1_spectral:7.1094e-03 L2_spectral:7.3962e-03 L3_spectral:7.3552e-03 L4_spectral:6.8872e-03 L5_spectral:6.7512e-03 L6_spectral:5.9969e-03 L7_spectral:5.4891e-03 L8_spectral:5.8429e-03 L9_spectral:6.0354e-03 L10_spectral:6.3191e-03 L11_spectral:6.6946e-03 L12_spectral:6.3344e-03 ip_v_neg_g:6.7372e-03 cos_v_neg_g:7.8237e-04 v_norm:1.3413e+00 g_norm:6.4202e+00 hv_norm:1.9877e+00 cos_v_hv:4.4564e-03 hg_norm:2.8021e+03 cos_g_hg:4.5579e-01 v_par:2.8400e-05 v_perp:1.3413e+00 L1_cos_v_neg_g:1.2047e-02 L1_v_norm:3.2184e-02 L2_cos_v_neg_g:1.1778e-02 L2_v_norm:3.0505e-02 L3_cos_v_neg_g:1.0980e-02 L3_v_norm:2.9977e-02 L4_cos_v_neg_g:8.8928e-03 L4_v_norm:3.0874e-02 L5_cos_v_neg_g:8.3470e-03 L5_v_norm:3.1670e-02 L6_cos_v_neg_g:8.3791e-03 L6_v_norm:3.1815e-02 L7_cos_v_neg_g:8.7063e-03 L7_v_norm:3.1751e-02 L8_cos_v_neg_g:8.1210e-03 L8_v_norm:3.1801e-02 L9_cos_v_neg_g:7.9745e-03 L9_v_norm:3.1716e-02 L10_cos_v_neg_g:7.3862e-03 L10_v_norm:3.1780e-02 L11_cos_v_neg_g:7.3255e-03 L11_v_norm:3.1905e-02 L12_cos_v_neg_g:6.5647e-03 L12_v_norm:3.1739e-02 +step:4500 train loss:3.690632 +step:4501 train loss:3.750697 +step:4502 train loss:3.875941 +step:4503 train loss:3.777030 +step:4504 train loss:3.786577 +step:4505 train loss:3.773518 +step:4506 train loss:3.744852 +step:4507 train loss:3.816679 +step:4508 train loss:3.750499 +step:4509 train loss:3.751597 +step:4510 train loss:3.785923 +step:4511 train loss:3.742804 +step:4512 train loss:3.760781 +step:4513 train loss:3.819789 +step:4514 train loss:3.727568 +step:4515 train loss:3.835550 +step:4516 train loss:3.816464 +step:4517 train loss:3.772445 +step:4518 train loss:3.707225 +step:4519 train loss:3.744549 +step:4520 train loss:3.757369 +step:4521 train loss:3.695635 +step:4522 train loss:3.753106 +step:4523 train loss:3.802633 +step:4524 train loss:3.782991 +step:4525 train loss:3.707826 +step:4526 train loss:3.748816 +step:4527 train loss:3.736325 +step:4528 train loss:3.764466 +step:4529 train loss:3.761407 +step:4530 train loss:3.856561 +step:4531 train loss:3.744832 +step:4532 train loss:3.769838 +step:4533 train loss:3.741649 +step:4534 train loss:3.835053 +step:4535 train loss:3.736710 +step:4536 train loss:3.806612 +step:4537 train loss:3.786181 +step:4538 train loss:3.768182 +step:4539 train loss:3.789880 +step:4540 train loss:3.765013 +step:4541 train loss:3.730517 +step:4542 train loss:3.784824 +step:4543 train loss:3.866572 +step:4544 train loss:3.808365 +step:4545 train loss:3.753595 +step:4546 train loss:3.845587 +step:4547 train loss:3.803535 +step:4548 train loss:3.808263 +step:4549 train loss:3.761895 +step:4550 train loss:3.732652 +step:4551 train loss:3.747185 +step:4552 train loss:3.749205 +step:4553 train loss:3.830230 +step:4554 train loss:3.725615 +step:4555 train loss:3.839002 +step:4556 train loss:3.773861 +step:4557 train loss:3.702783 +step:4558 train loss:3.788313 +step:4559 train loss:3.802584 +step:4560 train loss:3.737643 +step:4561 train loss:3.723747 +step:4562 train loss:3.767373 +step:4563 train loss:3.716049 +step:4564 train loss:3.740544 +step:4565 train loss:3.742838 +step:4566 train loss:3.716447 +step:4567 train loss:3.743177 +step:4568 train loss:3.740469 +step:4569 train loss:3.728426 +step:4570 train loss:3.777723 +step:4571 train loss:3.754234 +step:4572 train loss:3.746556 +step:4573 train loss:3.758153 +step:4574 train loss:3.900669 +step:4575 train loss:3.733835 +step:4576 train loss:3.727047 +step:4577 train loss:3.764870 +step:4578 train loss:3.804262 +step:4579 train loss:3.754554 +step:4580 train loss:3.815325 +step:4581 train loss:3.753773 +step:4582 train loss:3.748790 +step:4583 train loss:3.755249 +step:4584 train loss:3.727137 +step:4585 train loss:3.802207 +step:4586 train loss:3.793827 +step:4587 train loss:3.693758 +step:4588 train loss:3.735603 +step:4589 train loss:3.812685 +step:4590 train loss:3.783603 +step:4591 train loss:3.723370 +step:4592 train loss:3.806805 +step:4593 train loss:3.726499 +step:4594 train loss:3.757849 +step:4595 train loss:3.777671 +step:4596 train loss:3.719117 +step:4597 train loss:3.855902 +step:4598 train loss:3.774883 +step:4599 train loss:3.724666 +step:4600 train loss:3.735048 +step:4601 train loss:3.757895 +step:4602 train loss:3.708547 +step:4603 train loss:3.724937 +step:4604 train loss:3.829482 +step:4605 train loss:3.749578 +step:4606 train loss:3.771004 +step:4607 train loss:3.755717 +step:4608 train loss:3.793459 +step:4609 train loss:3.748142 +step:4610 train loss:3.798228 +step:4611 train loss:3.815354 +step:4612 train loss:3.816257 +step:4613 train loss:3.795580 +step:4614 train loss:3.786617 +step:4615 train loss:3.732426 +step:4616 train loss:3.713083 +step:4617 train loss:3.758849 +step:4618 train loss:3.771364 +step:4619 train loss:3.739351 +step:4620 train loss:3.748688 +step:4621 train loss:3.751861 +step:4622 train loss:3.684968 +step:4623 train loss:3.796810 +step:4624 train loss:3.781430 +step:4625 train loss:3.736408 +step:4626 train loss:3.781465 +step:4627 train loss:3.754742 +step:4628 train loss:3.738500 +step:4629 train loss:3.775291 +step:4630 train loss:3.833431 +step:4631 train loss:3.832385 +step:4632 train loss:3.726670 +step:4633 train loss:3.738451 +step:4634 train loss:3.816680 +step:4635 train loss:3.781760 +step:4636 train loss:3.797721 +step:4637 train loss:3.733425 +step:4638 train loss:3.740080 +step:4639 train loss:3.735652 +step:4640 train loss:3.745937 +step:4641 train loss:3.753554 +step:4642 train loss:3.786430 +step:4643 train loss:3.749343 +step:4644 train loss:3.771138 +step:4645 train loss:3.782873 +step:4646 train loss:3.745403 +step:4647 train loss:3.697652 +step:4648 train loss:3.809271 +step:4649 train loss:3.819650 +step:4650 train loss:3.764321 +step:4651 train loss:3.763170 +step:4652 train loss:3.752155 +step:4653 train loss:3.815655 +step:4654 train loss:3.808143 +step:4655 train loss:3.710997 +step:4656 train loss:3.744861 +step:4657 train loss:3.799453 +step:4658 train loss:3.755704 +step:4659 train loss:3.767591 +step:4660 train loss:3.812207 +step:4661 train loss:3.728082 +step:4662 train loss:3.740787 +step:4663 train loss:3.748241 +step:4664 train loss:3.811693 +step:4665 train loss:3.802365 +step:4666 train loss:3.797990 +step:4667 train loss:3.791036 +step:4668 train loss:3.754221 +step:4669 train loss:3.765525 +step:4670 train loss:3.796125 +step:4671 train loss:3.795661 +step:4672 train loss:3.667965 +step:4673 train loss:3.704684 +step:4674 train loss:3.831761 +step:4675 train loss:3.735011 +step:4676 train loss:3.700513 +step:4677 train loss:3.701003 +step:4678 train loss:3.672849 +step:4679 train loss:3.776822 +step:4680 train loss:3.713343 +step:4681 train loss:3.768335 +step:4682 train loss:3.714116 +step:4683 train loss:3.683735 +step:4684 train loss:3.797850 +step:4685 train loss:3.733495 +step:4686 train loss:3.750026 +step:4687 train loss:3.779946 +step:4688 train loss:3.711885 +step:4689 train loss:3.791052 +step:4690 train loss:3.729504 +step:4691 train loss:3.769236 +step:4692 train loss:3.692051 +step:4693 train loss:3.734725 +step:4694 train loss:3.777655 +step:4695 train loss:3.790191 +step:4696 train loss:3.783783 +step:4697 train loss:3.692871 +step:4698 train loss:3.714912 +step:4699 train loss:3.760820 +step:4700 train loss:3.727387 +step:4701 train loss:3.741469 +step:4702 train loss:3.692519 +step:4703 train loss:3.776003 +step:4704 train loss:3.761464 +step:4705 train loss:3.708399 +step:4706 train loss:3.715537 +step:4707 train loss:3.700665 +step:4708 train loss:3.773644 +step:4709 train loss:3.716286 +step:4710 train loss:3.735109 +step:4711 train loss:3.789139 +step:4712 train loss:3.692205 +step:4713 train loss:3.793147 +step:4714 train loss:3.694343 +step:4715 train loss:3.785831 +step:4716 train loss:3.748813 +step:4717 train loss:3.685727 +step:4718 train loss:3.770096 +step:4719 train loss:3.700300 +step:4720 train loss:3.793682 +step:4721 train loss:3.750114 +step:4722 train loss:3.807581 +step:4723 train loss:3.703829 +step:4724 train loss:3.756123 +step:4725 train loss:3.686656 +step:4726 train loss:3.739161 +step:4727 train loss:3.737922 +step:4728 train loss:3.750543 +step:4729 train loss:3.778575 +step:4730 train loss:3.677912 +step:4731 train loss:3.741364 +step:4732 train loss:3.686737 +step:4733 train loss:3.631411 +step:4734 train loss:3.762966 +step:4735 train loss:3.720203 +step:4736 train loss:3.756265 +step:4737 train loss:3.641299 +step:4738 train loss:3.781929 +step:4739 train loss:3.662534 +step:4740 train loss:3.777753 +step:4741 train loss:3.742887 +step:4742 train loss:3.706490 +step:4743 train loss:3.696221 +step:4744 train loss:3.746996 +step:4745 train loss:3.764062 +step:4746 train loss:3.805496 +step:4747 train loss:3.764960 +step:4748 train loss:3.665610 +step:4749 train loss:3.732480 +step:4750 validation loss:3.672727 +step:4750 train loss:3.679295 +step:4751 train loss:3.776722 +step:4752 train loss:3.705571 +step:4753 train loss:3.815663 +step:4754 train loss:3.679106 +step:4755 train loss:3.730251 +step:4756 train loss:3.792362 +step:4757 train loss:3.727991 +step:4758 train loss:3.734763 +step:4759 train loss:3.742036 +step:4760 train loss:3.764551 +step:4761 train loss:3.690052 +step:4762 train loss:3.721650 +step:4763 train loss:3.741068 +step:4764 train loss:3.802947 +step:4765 train loss:3.692815 +step:4766 train loss:3.723511 +step:4767 train loss:3.671187 +step:4768 train loss:3.729733 +step:4769 train loss:3.750039 +step:4770 train loss:3.715468 +step:4771 train loss:3.721960 +step:4772 train loss:3.699221 +step:4773 train loss:3.729517 +step:4774 train loss:3.672670 +step:4775 train loss:3.810179 +step:4776 train loss:3.672511 +step:4777 train loss:3.750551 +step:4778 train loss:3.684019 +step:4779 train loss:3.735243 +step:4780 train loss:3.670348 +step:4781 train loss:3.682072 +step:4782 train loss:3.780010 +step:4783 train loss:3.778391 +step:4784 train loss:3.733063 +step:4785 train loss:3.736586 +step:4786 train loss:3.846041 +step:4787 train loss:3.680928 +step:4788 train loss:3.701091 +step:4789 train loss:3.727715 +step:4790 train loss:3.773434 +step:4791 train loss:3.741650 +step:4792 train loss:3.786303 +step:4793 train loss:3.701212 +step:4794 train loss:3.777139 +step:4795 train loss:3.723221 +step:4796 train loss:3.713602 +step:4797 train loss:3.718835 +step:4798 train loss:3.731180 +step:4799 train loss:3.723252 +step:4800 train loss:3.760140 +step:4801 train loss:3.744053 +step:4802 train loss:3.783530 +step:4803 train loss:3.767340 +step:4804 train loss:3.729008 +step:4805 train loss:3.718510 +step:4806 train loss:3.698215 +step:4807 train loss:3.809515 +step:4808 train loss:3.678722 +step:4809 train loss:3.784194 +step:4810 train loss:3.722452 +step:4811 train loss:3.746447 +step:4812 train loss:3.714290 +step:4813 train loss:3.674540 +step:4814 train loss:3.664068 +step:4815 train loss:3.666095 +step:4816 train loss:3.720814 +step:4817 train loss:3.670555 +step:4818 train loss:3.731077 +step:4819 train loss:3.729589 +step:4820 train loss:3.974553 +step:4821 train loss:3.751919 +step:4822 train loss:3.763554 +step:4823 train loss:3.690199 +step:4824 train loss:3.705368 +step:4825 train loss:3.680712 +step:4826 train loss:3.769496 +step:4827 train loss:3.716039 +step:4828 train loss:3.661717 +step:4829 train loss:3.757847 +step:4830 train loss:3.705886 +step:4831 train loss:3.850082 +step:4832 train loss:3.720616 +step:4833 train loss:3.757896 +step:4834 train loss:3.661039 +step:4835 train loss:3.750640 +step:4836 train loss:3.729383 +step:4837 train loss:3.760052 +step:4838 train loss:3.697040 +step:4839 train loss:3.765454 +step:4840 train loss:3.670710 +step:4841 train loss:3.767520 +step:4842 train loss:3.679384 +step:4843 train loss:3.761948 +step:4844 train loss:3.754007 +step:4845 train loss:3.701434 +step:4846 train loss:3.710222 +step:4847 train loss:3.700234 +step:4848 train loss:3.720421 +step:4849 train loss:3.680027 +step:4850 train loss:3.680623 +step:4851 train loss:3.683389 +step:4852 train loss:3.755136 +step:4853 train loss:3.736626 +step:4854 train loss:3.710316 +step:4855 train loss:3.778346 +step:4856 train loss:3.743410 +step:4857 train loss:3.757853 +step:4858 train loss:3.836859 +step:4859 train loss:3.686006 +step:4860 train loss:3.756096 +step:4861 train loss:3.730070 +step:4862 train loss:3.766739 +step:4863 train loss:3.701061 +step:4864 train loss:3.712367 +step:4865 train loss:3.704057 +step:4866 train loss:3.750505 +step:4867 train loss:3.715327 +step:4868 train loss:3.738022 +step:4869 train loss:3.677839 +step:4870 train loss:3.719231 +step:4871 train loss:3.799178 +step:4872 train loss:3.744177 +step:4873 train loss:3.737402 +step:4874 train loss:3.719247 +step:4875 train loss:3.676586 +step:4876 train loss:3.693666 +step:4877 train loss:3.690766 +step:4878 train loss:3.735027 +step:4879 train loss:3.688458 +step:4880 train loss:3.723000 +step:4881 train loss:3.661806 +step:4882 train loss:3.868871 +step:4883 train loss:3.671567 +step:4884 train loss:3.706658 +step:4885 train loss:3.679362 +step:4886 train loss:3.758961 +step:4887 train loss:3.705637 +step:4888 train loss:3.719203 +step:4889 train loss:3.707427 +step:4890 train loss:3.757276 +step:4891 train loss:3.687404 +step:4892 train loss:3.698395 +step:4893 train loss:3.740567 +step:4894 train loss:3.680029 +step:4895 train loss:3.707894 +step:4896 train loss:3.695728 +step:4897 train loss:3.762621 +step:4898 train loss:3.716777 +step:4899 train loss:3.696992 +step:4900 train loss:3.747202 +step:4901 train loss:3.698616 +step:4902 train loss:3.693807 +step:4903 train loss:3.709151 +step:4904 train loss:3.727839 +step:4905 train loss:3.718946 +step:4906 train loss:3.721816 +step:4907 train loss:3.792729 +step:4908 train loss:3.701077 +step:4909 train loss:3.703141 +step:4910 train loss:3.729438 +step:4911 train loss:3.776352 +step:4912 train loss:3.753570 +step:4913 train loss:3.727446 +step:4914 train loss:3.722125 +step:4915 train loss:3.700847 +step:4916 train loss:3.645070 +step:4917 train loss:3.667431 +step:4918 train loss:3.702702 +step:4919 train loss:3.690832 +step:4920 train loss:3.689368 +step:4921 train loss:3.848990 +step:4922 train loss:3.752202 +step:4923 train loss:3.763113 +step:4924 train loss:3.767807 +step:4925 train loss:3.696352 +step:4926 train loss:3.699402 +step:4927 train loss:3.720904 +step:4928 train loss:3.761392 +step:4929 train loss:3.713433 +step:4930 train loss:3.700885 +step:4931 train loss:3.687598 +step:4932 train loss:3.701047 +step:4933 train loss:3.694080 +step:4934 train loss:3.762593 +step:4935 train loss:3.748991 +step:4936 train loss:3.708650 +step:4937 train loss:3.819850 +step:4938 train loss:3.802545 +step:4939 train loss:3.674489 +step:4940 train loss:3.746907 +step:4941 train loss:3.655174 +step:4942 train loss:3.693295 +step:4943 train loss:3.699586 +step:4944 train loss:3.694151 +step:4945 train loss:3.745820 +step:4946 train loss:3.714386 +step:4947 train loss:3.705506 +step:4948 train loss:3.734941 +step:4949 train loss:3.645246 +step:4950 train loss:3.725560 +step:4951 train loss:3.776243 +step:4952 train loss:3.715207 +step:4953 train loss:3.751125 +step:4954 train loss:3.651869 +step:4955 train loss:3.728585 +step:4956 train loss:3.754641 +step:4957 train loss:3.753896 +step:4958 train loss:3.666911 +step:4959 train loss:3.786018 +step:4960 train loss:3.708665 +step:4961 train loss:3.731834 +step:4962 train loss:3.690056 +step:4963 train loss:3.736613 +step:4964 train loss:3.687565 +step:4965 train loss:3.841100 +step:4966 train loss:3.686831 +step:4967 train loss:3.798740 +step:4968 train loss:3.687116 +step:4969 train loss:3.729989 +step:4970 train loss:3.721320 +step:4971 train loss:3.665601 +step:4972 train loss:3.718877 +step:4973 train loss:3.721249 +step:4974 train loss:3.717270 +step:4975 train loss:3.791441 +step:4976 train loss:3.780020 +step:4977 train loss:3.721523 +step:4978 train loss:3.711790 +step:4979 train loss:3.707807 +step:4980 train loss:3.817225 +step:4981 train loss:3.656284 +step:4982 train loss:3.737866 +step:4983 train loss:3.650670 +step:4984 train loss:3.844054 +step:4985 train loss:3.742446 +step:4986 train loss:3.685206 +step:4987 train loss:3.704286 +step:4988 train loss:3.900936 +step:4989 train loss:3.705275 +step:4990 train loss:3.703387 +step:4991 train loss:3.712670 +step:4992 train loss:3.700434 +step:4993 train loss:3.678903 +step:4994 train loss:3.783802 +step:4995 train loss:3.714234 +step:4996 train loss:3.800962 +step:4997 train loss:3.697445 +step:4998 train loss:3.706129 +step:4999 train loss:3.686333 +step:5000 validation loss:3.659501 total_sharp:5.6295e-03 L1_sharp:2.5756e-01 L2_sharp:1.0702e-01 L3_sharp:1.6087e-01 L4_sharp:8.5477e-02 L5_sharp:9.1538e-02 L6_sharp:1.0405e-01 L7_sharp:1.3310e-01 L8_sharp:1.0716e-01 L9_sharp:9.6588e-02 L10_sharp:7.5284e-02 L11_sharp:5.8750e-02 L12_sharp:7.1794e-02 total_fnorm:1.3671e+00 total_l1_linf:7.5374e+03 total_spectral:1.3671e+00 L1_fnorm:3.2288e-02 L2_fnorm:3.0785e-02 L3_fnorm:3.0306e-02 L4_fnorm:3.1342e-02 L5_fnorm:3.1817e-02 L6_fnorm:3.2044e-02 L7_fnorm:3.2030e-02 L8_fnorm:3.1933e-02 L9_fnorm:3.2062e-02 L10_fnorm:3.2172e-02 L11_fnorm:3.2173e-02 L12_fnorm:3.2020e-02 L1_l1linf:3.1572e-01 L2_l1linf:3.2635e-01 L3_l1linf:3.2755e-01 L4_l1linf:3.1646e-01 L5_l1linf:3.1187e-01 L6_l1linf:2.9337e-01 L7_l1linf:2.7364e-01 L8_l1linf:2.6993e-01 L9_l1linf:3.0037e-01 L10_l1linf:3.1264e-01 L11_l1linf:3.2916e-01 L12_l1linf:3.1033e-01 L1_spectral:7.1367e-03 L2_spectral:7.3821e-03 L3_spectral:7.2942e-03 L4_spectral:7.1349e-03 L5_spectral:7.0237e-03 L6_spectral:6.6221e-03 L7_spectral:6.1522e-03 L8_spectral:6.0879e-03 L9_spectral:6.7146e-03 L10_spectral:7.0297e-03 L11_spectral:7.4134e-03 L12_spectral:7.0418e-03 ip_v_neg_g:8.6053e-03 cos_v_neg_g:9.4362e-04 v_norm:1.3671e+00 g_norm:6.6705e+00 hv_norm:1.6981e+00 cos_v_hv:4.5323e-03 hg_norm:4.1865e+03 cos_g_hg:4.8923e-01 v_par:3.6157e-05 v_perp:1.3671e+00 L1_cos_v_neg_g:1.1615e-02 L1_v_norm:3.2288e-02 L2_cos_v_neg_g:1.0757e-02 L2_v_norm:3.0785e-02 L3_cos_v_neg_g:1.1636e-02 L3_v_norm:3.0306e-02 L4_cos_v_neg_g:1.1939e-02 L4_v_norm:3.1342e-02 L5_cos_v_neg_g:1.1888e-02 L5_v_norm:3.1817e-02 L6_cos_v_neg_g:1.1486e-02 L6_v_norm:3.2044e-02 L7_cos_v_neg_g:1.1613e-02 L7_v_norm:3.2030e-02 L8_cos_v_neg_g:1.1023e-02 L8_v_norm:3.1933e-02 L9_cos_v_neg_g:9.8640e-03 L9_v_norm:3.2062e-02 L10_cos_v_neg_g:9.9086e-03 L10_v_norm:3.2172e-02 L11_cos_v_neg_g:8.2392e-03 L11_v_norm:3.2173e-02 L12_cos_v_neg_g:8.4150e-03 L12_v_norm:3.2020e-02 +step:5000 train loss:3.800504 +step:5001 train loss:3.670596 +step:5002 train loss:3.722161 +step:5003 train loss:3.723852 +step:5004 train loss:3.711533 +step:5005 train loss:3.710256 +step:5006 train loss:3.750645 +step:5007 train loss:3.757820 +step:5008 train loss:3.692267 +step:5009 train loss:3.739720 +step:5010 train loss:3.684998 +step:5011 train loss:3.719528 +step:5012 train loss:3.692623 +step:5013 train loss:3.793806 +step:5014 train loss:3.707915 +step:5015 train loss:3.784455 +step:5016 train loss:3.711109 +step:5017 train loss:3.755104 +step:5018 train loss:3.676618 +step:5019 train loss:3.708559 +step:5020 train loss:3.702834 +step:5021 train loss:3.713952 +step:5022 train loss:3.751290 +step:5023 train loss:3.716612 +step:5024 train loss:3.775624 +step:5025 train loss:3.654213 +step:5026 train loss:3.785054 +step:5027 train loss:3.713822 +step:5028 train loss:3.782842 +step:5029 train loss:3.676601 +step:5030 train loss:3.716009 +step:5031 train loss:3.704193 +step:5032 train loss:3.729784 +step:5033 train loss:3.718616 +step:5034 train loss:3.707586 +step:5035 train loss:3.800254 +step:5036 train loss:3.743347 +step:5037 train loss:3.698624 +step:5038 train loss:3.745564 +step:5039 train loss:3.760315 +step:5040 train loss:3.719405 +step:5041 train loss:3.736424 +step:5042 train loss:3.643795 +step:5043 train loss:3.787292 +step:5044 train loss:3.698340 +step:5045 train loss:3.751885 +step:5046 train loss:3.675887 +step:5047 train loss:3.751654 +step:5048 train loss:3.666147 +step:5049 train loss:3.797532 +step:5050 train loss:3.685708 +step:5051 train loss:3.729534 +step:5052 train loss:3.628565 +step:5053 train loss:3.807850 +step:5054 train loss:3.703461 +step:5055 train loss:3.722062 +step:5056 train loss:3.764904 +step:5057 train loss:3.691100 +step:5058 train loss:3.725416 +step:5059 train loss:3.684997 +step:5060 train loss:3.730915 +step:5061 train loss:3.725295 +step:5062 train loss:3.694909 +step:5063 train loss:3.690671 +step:5064 train loss:3.696191 +step:5065 train loss:3.684236 +step:5066 train loss:3.739495 +step:5067 train loss:3.729488 +step:5068 train loss:3.709786 +step:5069 train loss:3.685584 +step:5070 train loss:3.711467 +step:5071 train loss:3.779881 +step:5072 train loss:3.675584 +step:5073 train loss:3.681703 +step:5074 train loss:3.627281 +step:5075 train loss:3.697245 +step:5076 train loss:3.629087 +step:5077 train loss:3.690236 +step:5078 train loss:3.711803 +step:5079 train loss:3.736700 +step:5080 train loss:3.710003 +step:5081 train loss:3.721300 +step:5082 train loss:3.710671 +step:5083 train loss:3.764419 +step:5084 train loss:3.745166 +step:5085 train loss:3.706760 +step:5086 train loss:3.785606 +step:5087 train loss:3.764722 +step:5088 train loss:3.684481 +step:5089 train loss:3.752477 +step:5090 train loss:3.694005 +step:5091 train loss:3.701375 +step:5092 train loss:3.796750 +step:5093 train loss:3.682989 +step:5094 train loss:3.678322 +step:5095 train loss:3.725906 +step:5096 train loss:3.695221 +step:5097 train loss:3.711185 +step:5098 train loss:3.710731 +step:5099 train loss:3.672132 +step:5100 train loss:3.683003 +step:5101 train loss:3.876235 +step:5102 train loss:3.719554 +step:5103 train loss:3.732171 +step:5104 train loss:3.775726 +step:5105 train loss:3.714682 +step:5106 train loss:3.676745 +step:5107 train loss:3.693777 +step:5108 train loss:3.686414 +step:5109 train loss:3.763434 +step:5110 train loss:3.683618 +step:5111 train loss:3.767334 +step:5112 train loss:3.683900 +step:5113 train loss:3.659530 +step:5114 train loss:3.708857 +step:5115 train loss:3.668213 +step:5116 train loss:3.727720 +step:5117 train loss:3.668458 +step:5118 train loss:3.699387 +step:5119 train loss:3.684650 +step:5120 train loss:3.724169 +step:5121 train loss:3.673194 +step:5122 train loss:3.680012 +step:5123 train loss:3.670855 +step:5124 train loss:3.632865 +step:5125 train loss:3.744850 +step:5126 train loss:3.727032 +step:5127 train loss:3.728213 +step:5128 train loss:3.745893 +step:5129 train loss:3.670722 +step:5130 train loss:3.686428 +step:5131 train loss:3.618904 +step:5132 train loss:3.743673 +step:5133 train loss:3.710444 +step:5134 train loss:3.713897 +step:5135 train loss:3.663572 +step:5136 train loss:3.730538 +step:5137 train loss:3.732546 +step:5138 train loss:3.708263 +step:5139 train loss:3.748405 +step:5140 train loss:3.718208 +step:5141 train loss:3.754261 +step:5142 train loss:3.694751 +step:5143 train loss:3.728210 +step:5144 train loss:3.721097 +step:5145 train loss:3.669132 +step:5146 train loss:3.659372 +step:5147 train loss:3.735684 +step:5148 train loss:3.664592 +step:5149 train loss:3.736880 +step:5150 train loss:3.713819 +step:5151 train loss:3.675570 +step:5152 train loss:3.723109 +step:5153 train loss:3.697285 +step:5154 train loss:3.704269 +step:5155 train loss:3.712786 +step:5156 train loss:3.697026 +step:5157 train loss:3.694357 +step:5158 train loss:3.721648 +step:5159 train loss:3.751300 +step:5160 train loss:3.821761 +step:5161 train loss:3.750927 +step:5162 train loss:3.767556 +step:5163 train loss:3.683519 +step:5164 train loss:3.743424 +step:5165 train loss:3.765019 +step:5166 train loss:3.698702 +step:5167 train loss:3.798259 +step:5168 train loss:3.707003 +step:5169 train loss:3.745483 +step:5170 train loss:3.720924 +step:5171 train loss:3.766448 +step:5172 train loss:3.687070 +step:5173 train loss:3.748561 +step:5174 train loss:3.684382 +step:5175 train loss:3.711474 +step:5176 train loss:3.707842 +step:5177 train loss:3.699326 +step:5178 train loss:3.766508 +step:5179 train loss:3.680604 +step:5180 train loss:3.757667 +step:5181 train loss:3.704455 +step:5182 train loss:3.760407 +step:5183 train loss:3.693762 +step:5184 train loss:3.671270 +step:5185 train loss:3.700279 +step:5186 train loss:3.750161 +step:5187 train loss:3.748574 +step:5188 train loss:3.680985 +step:5189 train loss:3.723632 +step:5190 train loss:3.709080 +step:5191 train loss:3.684786 +step:5192 train loss:3.670541 +step:5193 train loss:3.753670 +step:5194 train loss:3.709614 +step:5195 train loss:3.681598 +step:5196 train loss:3.749767 +step:5197 train loss:3.793154 +step:5198 train loss:3.712467 +step:5199 train loss:3.699509 +step:5200 train loss:3.720693 +step:5201 train loss:3.713501 +step:5202 train loss:3.710962 +step:5203 train loss:3.720841 +step:5204 train loss:3.688898 +step:5205 train loss:3.730390 +step:5206 train loss:3.668424 +step:5207 train loss:3.674592 +step:5208 train loss:3.737915 +step:5209 train loss:3.757203 +step:5210 train loss:3.660448 +step:5211 train loss:3.702437 +step:5212 train loss:3.721266 +step:5213 train loss:3.693925 +step:5214 train loss:3.746343 +step:5215 train loss:3.852690 +step:5216 train loss:3.705989 +step:5217 train loss:3.681873 +step:5218 train loss:3.689819 +step:5219 train loss:3.751045 +step:5220 train loss:3.668433 +step:5221 train loss:3.674266 +step:5222 train loss:3.752614 +step:5223 train loss:3.748557 +step:5224 train loss:3.645098 +step:5225 train loss:3.790796 +step:5226 train loss:3.704917 +step:5227 train loss:3.778161 +step:5228 train loss:3.754730 +step:5229 train loss:3.688750 +step:5230 train loss:3.708194 +step:5231 train loss:3.649352 +step:5232 train loss:3.773067 +step:5233 train loss:3.735496 +step:5234 train loss:3.735633 +step:5235 train loss:3.688189 +step:5236 train loss:3.760818 +step:5237 train loss:3.815971 +step:5238 train loss:3.713694 +step:5239 train loss:3.781702 +step:5240 train loss:3.663129 +step:5241 train loss:3.721882 +step:5242 train loss:3.696186 +step:5243 train loss:3.695358 +step:5244 train loss:3.697805 +step:5245 train loss:3.741771 +step:5246 train loss:3.781285 +step:5247 train loss:3.712186 +step:5248 train loss:3.677083 +step:5249 train loss:3.740154 +step:5250 validation loss:3.644344 +step:5250 train loss:3.706832 +step:5251 train loss:3.770075 +step:5252 train loss:3.663305 +step:5253 train loss:3.813726 +step:5254 train loss:3.687689 +step:5255 train loss:3.759189 +step:5256 train loss:3.679005 +step:5257 train loss:3.725275 +step:5258 train loss:3.727065 +step:5259 train loss:3.712740 +step:5260 train loss:3.705624 +step:5261 train loss:3.699350 +step:5262 train loss:3.734990 +step:5263 train loss:3.725474 +step:5264 train loss:3.673229 +step:5265 train loss:3.755907 +step:5266 train loss:3.675206 +step:5267 train loss:3.683291 +step:5268 train loss:3.668486 +step:5269 train loss:3.667253 +step:5270 train loss:3.722735 +step:5271 train loss:3.644423 +step:5272 train loss:3.735892 +step:5273 train loss:3.644481 +step:5274 train loss:3.692093 +step:5275 train loss:3.709579 +step:5276 train loss:3.830628 +step:5277 train loss:3.733200 +step:5278 train loss:3.682203 +step:5279 train loss:3.725284 +step:5280 train loss:3.706308 +step:5281 train loss:3.696565 +step:5282 train loss:3.669464 +step:5283 train loss:3.669877 +step:5284 train loss:3.676717 +step:5285 train loss:3.749217 +step:5286 train loss:3.653527 +step:5287 train loss:3.757126 +step:5288 train loss:3.730342 +step:5289 train loss:3.700610 +step:5290 train loss:3.751751 +step:5291 train loss:3.703183 +step:5292 train loss:3.726051 +step:5293 train loss:3.694075 +step:5294 train loss:3.682201 +step:5295 train loss:3.691854 +step:5296 train loss:3.677558 +step:5297 train loss:3.701903 +step:5298 train loss:3.645709 +step:5299 train loss:3.738690 +step:5300 train loss:3.689626 +step:5301 train loss:3.758834 +step:5302 train loss:3.765329 +step:5303 train loss:3.626261 +step:5304 train loss:3.663270 +step:5305 train loss:3.638309 +step:5306 train loss:3.673731 +step:5307 train loss:3.674944 +step:5308 train loss:3.766533 +step:5309 train loss:3.720545 +step:5310 train loss:3.701823 +step:5311 train loss:3.776918 +step:5312 train loss:3.651860 +step:5313 train loss:3.743345 +step:5314 train loss:3.735571 +step:5315 train loss:3.697100 +step:5316 train loss:3.729279 +step:5317 train loss:3.744295 +step:5318 train loss:3.705531 +step:5319 train loss:3.728430 +step:5320 train loss:3.677144 +step:5321 train loss:3.801589 +step:5322 train loss:3.709633 +step:5323 train loss:3.711645 +step:5324 train loss:3.659654 +step:5325 train loss:3.733213 +step:5326 train loss:3.732137 +step:5327 train loss:3.621334 +step:5328 train loss:3.759421 +step:5329 train loss:3.723679 +step:5330 train loss:3.719172 +step:5331 train loss:3.772891 +step:5332 train loss:3.693325 +step:5333 train loss:3.756697 +step:5334 train loss:3.734560 +step:5335 train loss:3.785418 +step:5336 train loss:3.827459 +step:5337 train loss:3.661002 +step:5338 train loss:3.667148 +step:5339 train loss:3.689551 +step:5340 train loss:3.711528 +step:5341 train loss:3.732805 +step:5342 train loss:3.629946 +step:5343 train loss:3.788183 +step:5344 train loss:3.670329 +step:5345 train loss:3.667970 +step:5346 train loss:3.676596 +step:5347 train loss:3.694566 +step:5348 train loss:3.738594 +step:5349 train loss:3.678802 +step:5350 train loss:3.717317 +step:5351 train loss:3.793789 +step:5352 train loss:3.828596 +step:5353 train loss:3.743954 +step:5354 train loss:3.713550 +step:5355 train loss:3.678815 +step:5356 train loss:3.703909 +step:5357 train loss:3.678339 +step:5358 train loss:3.703524 +step:5359 train loss:3.717217 +step:5360 train loss:3.687057 +step:5361 train loss:3.690135 +step:5362 train loss:3.674030 +step:5363 train loss:3.670308 +step:5364 train loss:3.671432 +step:5365 train loss:3.702025 +step:5366 train loss:3.739020 +step:5367 train loss:3.658076 +step:5368 train loss:3.728876 +step:5369 train loss:3.746296 +step:5370 train loss:3.644183 +step:5371 train loss:3.701912 +step:5372 train loss:3.714703 +step:5373 train loss:3.762953 +step:5374 train loss:3.640266 +step:5375 train loss:3.686683 +step:5376 train loss:3.759847 +step:5377 train loss:3.690311 +step:5378 train loss:3.666793 +step:5379 train loss:3.672345 +step:5380 train loss:3.700907 +step:5381 train loss:3.747221 +step:5382 train loss:3.645679 +step:5383 train loss:3.715777 +step:5384 train loss:3.727892 +step:5385 train loss:3.727500 +step:5386 train loss:3.708900 +step:5387 train loss:3.712322 +step:5388 train loss:3.729068 +step:5389 train loss:3.656066 +step:5390 train loss:3.687371 +step:5391 train loss:3.629582 +step:5392 train loss:3.690134 +step:5393 train loss:3.683408 +step:5394 train loss:3.675330 +step:5395 train loss:3.751406 +step:5396 train loss:3.711287 +step:5397 train loss:3.732515 +step:5398 train loss:3.732365 +step:5399 train loss:3.761932 +step:5400 train loss:3.766822 +step:5401 train loss:3.728335 +step:5402 train loss:3.832454 +step:5403 train loss:3.742169 +step:5404 train loss:3.708979 +step:5405 train loss:3.781393 +step:5406 train loss:3.743860 +step:5407 train loss:3.671587 +step:5408 train loss:3.817284 +step:5409 train loss:3.655204 +step:5410 train loss:3.721950 +step:5411 train loss:3.709182 +step:5412 train loss:3.677882 +step:5413 train loss:3.735530 +step:5414 train loss:3.709689 +step:5415 train loss:3.693686 +step:5416 train loss:3.685563 +step:5417 train loss:3.749873 +step:5418 train loss:3.769341 +step:5419 train loss:3.669960 +step:5420 train loss:3.733218 +step:5421 train loss:3.700319 +step:5422 train loss:3.740589 +step:5423 train loss:3.725898 +step:5424 train loss:3.626094 +step:5425 train loss:3.690128 +step:5426 train loss:3.777141 +step:5427 train loss:3.667787 +step:5428 train loss:3.709330 +step:5429 train loss:3.642279 +step:5430 train loss:3.677445 +step:5431 train loss:3.738840 +step:5432 train loss:3.715179 +step:5433 train loss:3.722656 +step:5434 train loss:3.671468 +step:5435 train loss:3.670569 +step:5436 train loss:3.673043 +step:5437 train loss:3.708354 +step:5438 train loss:3.689674 +step:5439 train loss:3.694067 +step:5440 train loss:3.734508 +step:5441 train loss:3.753286 +step:5442 train loss:3.678704 +step:5443 train loss:3.681065 +step:5444 train loss:3.627384 +step:5445 train loss:3.708181 +step:5446 train loss:3.682854 +step:5447 train loss:3.716959 +step:5448 train loss:3.772973 +step:5449 train loss:3.659565 +step:5450 train loss:3.695940 +step:5451 train loss:3.690652 +step:5452 train loss:3.701831 +step:5453 train loss:3.760928 +step:5454 train loss:3.685763 +step:5455 train loss:3.669128 +step:5456 train loss:3.812101 +step:5457 train loss:3.687570 +step:5458 train loss:3.728471 +step:5459 train loss:3.670936 +step:5460 train loss:3.688313 +step:5461 train loss:3.694198 +step:5462 train loss:3.689985 +step:5463 train loss:3.699123 +step:5464 train loss:3.701926 +step:5465 train loss:3.644122 +step:5466 train loss:3.727518 +step:5467 train loss:3.711174 +step:5468 train loss:3.712283 +step:5469 train loss:3.805935 +step:5470 train loss:3.698571 +step:5471 train loss:3.771321 +step:5472 train loss:3.719858 +step:5473 train loss:3.621487 +step:5474 train loss:3.955826 +step:5475 train loss:3.632192 +step:5476 train loss:3.709971 +step:5477 train loss:3.714908 +step:5478 train loss:3.708500 +step:5479 train loss:3.852109 +step:5480 train loss:3.701032 +step:5481 train loss:3.763900 +step:5482 train loss:3.673668 +step:5483 train loss:3.708736 +step:5484 train loss:3.752248 +step:5485 train loss:3.666848 +step:5486 train loss:3.709105 +step:5487 train loss:3.712364 +step:5488 train loss:3.624636 +step:5489 train loss:3.730474 +step:5490 train loss:3.677346 +step:5491 train loss:3.778483 +step:5492 train loss:3.706120 +step:5493 train loss:3.633444 +step:5494 train loss:3.691726 +step:5495 train loss:3.671263 +step:5496 train loss:3.663240 +step:5497 train loss:3.784666 +step:5498 train loss:3.652647 +step:5499 train loss:3.788686 +step:5500 validation loss:3.635213 total_sharp:4.8952e-03 L1_sharp:4.8037e-01 L2_sharp:2.1639e-01 L3_sharp:1.1983e-01 L4_sharp:7.1083e-02 L5_sharp:6.9503e-02 L6_sharp:7.9460e-02 L7_sharp:1.1294e-01 L8_sharp:9.3747e-02 L9_sharp:7.4580e-02 L10_sharp:5.1812e-02 L11_sharp:3.8678e-02 L12_sharp:5.8934e-02 total_fnorm:1.3486e+00 total_l1_linf:7.4457e+03 total_spectral:1.3486e+00 L1_fnorm:3.2155e-02 L2_fnorm:3.0846e-02 L3_fnorm:3.0205e-02 L4_fnorm:3.1367e-02 L5_fnorm:3.2016e-02 L6_fnorm:3.2199e-02 L7_fnorm:3.2277e-02 L8_fnorm:3.2029e-02 L9_fnorm:3.2006e-02 L10_fnorm:3.2171e-02 L11_fnorm:3.2190e-02 L12_fnorm:3.2158e-02 L1_l1linf:3.2763e-01 L2_l1linf:3.4614e-01 L3_l1linf:3.3028e-01 L4_l1linf:3.2613e-01 L5_l1linf:3.2683e-01 L6_l1linf:3.1001e-01 L7_l1linf:3.0196e-01 L8_l1linf:2.8020e-01 L9_l1linf:3.0677e-01 L10_l1linf:3.1928e-01 L11_l1linf:3.2883e-01 L12_l1linf:3.3579e-01 L1_spectral:7.2120e-03 L2_spectral:7.7308e-03 L3_spectral:7.3546e-03 L4_spectral:7.3632e-03 L5_spectral:7.3340e-03 L6_spectral:6.9408e-03 L7_spectral:6.7246e-03 L8_spectral:6.3110e-03 L9_spectral:6.8174e-03 L10_spectral:7.1002e-03 L11_spectral:7.4003e-03 L12_spectral:7.5604e-03 ip_v_neg_g:1.7388e-03 cos_v_neg_g:1.2487e-04 v_norm:1.3486e+00 g_norm:1.0325e+01 hv_norm:1.7424e+00 cos_v_hv:3.7887e-03 hg_norm:7.5628e+03 cos_g_hg:7.1441e-01 v_par:6.9157e-06 v_perp:1.3486e+00 L1_cos_v_neg_g:1.6815e-03 L1_v_norm:3.2155e-02 L2_cos_v_neg_g:9.8120e-04 L2_v_norm:3.0846e-02 L3_cos_v_neg_g:2.8118e-03 L3_v_norm:3.0205e-02 L4_cos_v_neg_g:2.2006e-03 L4_v_norm:3.1367e-02 L5_cos_v_neg_g:1.9755e-03 L5_v_norm:3.2016e-02 L6_cos_v_neg_g:2.5430e-03 L6_v_norm:3.2198e-02 L7_cos_v_neg_g:2.9456e-03 L7_v_norm:3.2277e-02 L8_cos_v_neg_g:9.3983e-04 L8_v_norm:3.2029e-02 L9_cos_v_neg_g:1.4856e-03 L9_v_norm:3.2006e-02 L10_cos_v_neg_g:1.7792e-03 L10_v_norm:3.2171e-02 L11_cos_v_neg_g:9.0461e-05 L11_v_norm:3.2190e-02 L12_cos_v_neg_g:1.3691e-03 L12_v_norm:3.2158e-02 +step:5500 train loss:3.704227 +step:5501 train loss:3.772572 +step:5502 train loss:3.727463 +step:5503 train loss:3.688935 +step:5504 train loss:3.736995 +step:5505 train loss:3.700511 +step:5506 train loss:3.740091 +step:5507 train loss:3.733022 +step:5508 train loss:3.752290 +step:5509 train loss:3.759814 +step:5510 train loss:3.738860 +step:5511 train loss:3.727685 +step:5512 train loss:3.848132 +step:5513 train loss:3.656107 +step:5514 train loss:3.712243 +step:5515 train loss:3.741462 +step:5516 train loss:3.759158 +step:5517 train loss:3.722267 +step:5518 train loss:3.750628 +step:5519 train loss:3.780097 +step:5520 train loss:3.691569 +step:5521 train loss:3.704123 +step:5522 train loss:3.662974 +step:5523 train loss:3.716164 +step:5524 train loss:3.764719 +step:5525 train loss:3.671842 +step:5526 train loss:3.683184 +step:5527 train loss:3.699887 +step:5528 train loss:3.817885 +step:5529 train loss:3.768232 +step:5530 train loss:3.738988 +step:5531 train loss:3.678652 +step:5532 train loss:3.695122 +step:5533 train loss:3.737919 +step:5534 train loss:3.646559 +step:5535 train loss:3.699667 +step:5536 train loss:3.639111 +step:5537 train loss:3.683481 +step:5538 train loss:3.676560 +step:5539 train loss:3.624855 +step:5540 train loss:3.848523 +step:5541 train loss:3.659772 +step:5542 train loss:3.707869 +step:5543 train loss:3.694766 +step:5544 train loss:3.687926 +step:5545 train loss:3.676990 +step:5546 train loss:3.718570 +step:5547 train loss:3.647659 +step:5548 train loss:3.688231 +step:5549 train loss:3.692826 +step:5550 train loss:3.712342 +step:5551 train loss:3.726541 +step:5552 train loss:3.677428 +step:5553 train loss:3.704146 +step:5554 train loss:3.678481 +step:5555 train loss:3.688810 +step:5556 train loss:3.705286 +step:5557 train loss:3.771001 +step:5558 train loss:3.693386 +step:5559 train loss:3.692875 +step:5560 train loss:3.687140 +step:5561 train loss:3.720871 +step:5562 train loss:3.678446 +step:5563 train loss:3.660545 +step:5564 train loss:3.694149 +step:5565 train loss:3.759062 +step:5566 train loss:3.662096 +step:5567 train loss:3.777068 +step:5568 train loss:3.900888 +step:5569 train loss:3.693565 +step:5570 train loss:3.616980 +step:5571 train loss:3.712541 +step:5572 train loss:3.649090 +step:5573 train loss:3.639071 +step:5574 train loss:3.610982 +step:5575 train loss:3.699832 +step:5576 train loss:3.688411 +step:5577 train loss:3.692950 +step:5578 train loss:3.721045 +step:5579 train loss:3.677161 +step:5580 train loss:3.703444 +step:5581 train loss:3.722586 +step:5582 train loss:3.703364 +step:5583 train loss:3.713626 +step:5584 train loss:3.829950 +step:5585 train loss:3.739391 +step:5586 train loss:3.668282 +step:5587 train loss:3.706666 +step:5588 train loss:3.724788 +step:5589 train loss:3.720355 +step:5590 train loss:3.785674 +step:5591 train loss:3.647462 +step:5592 train loss:3.822328 +step:5593 train loss:3.705556 +step:5594 train loss:3.708709 +step:5595 train loss:3.696261 +step:5596 train loss:3.650164 +step:5597 train loss:3.666912 +step:5598 train loss:3.669947 +step:5599 train loss:3.679543 +step:5600 train loss:3.720083 +step:5601 train loss:3.743499 +step:5602 train loss:3.676111 +step:5603 train loss:3.716656 +step:5604 train loss:3.718874 +step:5605 train loss:3.687475 +step:5606 train loss:3.692517 +step:5607 train loss:3.724422 +step:5608 train loss:3.668208 +step:5609 train loss:3.714255 +step:5610 train loss:3.672382 +step:5611 train loss:3.711087 +step:5612 train loss:3.742496 +step:5613 train loss:3.704101 +step:5614 train loss:3.665928 +step:5615 train loss:3.768292 +step:5616 train loss:3.666364 +step:5617 train loss:3.755209 +step:5618 train loss:3.738357 +step:5619 train loss:3.693236 +step:5620 train loss:3.689239 +step:5621 train loss:3.768857 +step:5622 train loss:3.650949 +step:5623 train loss:3.686579 +step:5624 train loss:3.675617 +step:5625 train loss:3.709417 +step:5626 train loss:3.701902 +step:5627 train loss:3.677235 +step:5628 train loss:3.716371 +step:5629 train loss:3.695570 +step:5630 train loss:3.627963 +step:5631 train loss:3.669967 +step:5632 train loss:3.711810 +step:5633 train loss:3.708412 +step:5634 train loss:3.657377 +step:5635 train loss:3.700121 +step:5636 train loss:3.676395 +step:5637 train loss:3.819268 +step:5638 train loss:3.723719 +step:5639 train loss:3.701458 +step:5640 train loss:3.707392 +step:5641 train loss:3.745981 +step:5642 train loss:3.676286 +step:5643 train loss:3.696815 +step:5644 train loss:3.779482 +step:5645 train loss:3.731550 +step:5646 train loss:3.737129 +step:5647 train loss:3.725387 +step:5648 train loss:3.713138 +step:5649 train loss:3.629352 +step:5650 train loss:3.629641 +step:5651 train loss:3.707302 +step:5652 train loss:3.707572 +step:5653 train loss:3.672668 +step:5654 train loss:3.804614 +step:5655 train loss:3.665879 +step:5656 train loss:3.690656 +step:5657 train loss:3.756660 +step:5658 train loss:3.657883 +step:5659 train loss:3.694190 +step:5660 train loss:3.747872 +step:5661 train loss:3.683559 +step:5662 train loss:3.725668 +step:5663 train loss:3.613240 +step:5664 train loss:3.589566 +step:5665 train loss:3.706331 +step:5666 train loss:3.715233 +step:5667 train loss:3.747884 +step:5668 train loss:3.678617 +step:5669 train loss:3.694005 +step:5670 train loss:3.691504 +step:5671 train loss:3.677350 +step:5672 train loss:3.724220 +step:5673 train loss:3.694904 +step:5674 train loss:3.768651 +step:5675 train loss:3.679112 +step:5676 train loss:3.826158 +step:5677 train loss:3.726459 +step:5678 train loss:3.699020 +step:5679 train loss:3.695624 +step:5680 train loss:3.722177 +step:5681 train loss:3.691418 +step:5682 train loss:3.709988 +step:5683 train loss:3.665924 +step:5684 train loss:3.668828 +step:5685 train loss:3.720524 +step:5686 train loss:3.733159 +step:5687 train loss:3.691614 +step:5688 train loss:3.773327 +step:5689 train loss:3.677792 +step:5690 train loss:3.821181 +step:5691 train loss:3.656706 +step:5692 train loss:3.640953 +step:5693 train loss:3.653271 +step:5694 train loss:3.670142 +step:5695 train loss:3.685172 +step:5696 train loss:3.738286 +step:5697 train loss:3.665880 +step:5698 train loss:3.682724 +step:5699 train loss:3.698947 +step:5700 train loss:3.693480 +step:5701 train loss:3.680870 +step:5702 train loss:3.755100 +step:5703 train loss:3.650907 +step:5704 train loss:3.694658 +step:5705 train loss:3.705894 +step:5706 train loss:3.728325 +step:5707 train loss:3.645271 +step:5708 train loss:3.732843 +step:5709 train loss:3.736207 +step:5710 train loss:3.722277 +step:5711 train loss:3.747609 +step:5712 train loss:3.728190 +step:5713 train loss:3.652610 +step:5714 train loss:3.739666 +step:5715 train loss:3.697414 +step:5716 train loss:3.698316 +step:5717 train loss:3.729017 +step:5718 train loss:3.670875 +step:5719 train loss:3.739482 +step:5720 train loss:3.717726 +step:5721 train loss:3.647593 +step:5722 train loss:3.655811 +step:5723 train loss:3.740273 +step:5724 train loss:3.655404 +step:5725 train loss:3.724502 +step:5726 train loss:3.724423 +step:5727 train loss:3.677290 +step:5728 train loss:3.682111 +step:5729 train loss:3.686227 +step:5730 train loss:3.749727 +step:5731 train loss:3.628968 +step:5732 train loss:3.683612 +step:5733 train loss:3.670435 +step:5734 train loss:3.689692 +step:5735 train loss:3.680166 +step:5736 train loss:3.681751 +step:5737 train loss:3.705699 +step:5738 train loss:3.672538 +step:5739 train loss:3.681265 +step:5740 train loss:3.722604 +step:5741 train loss:3.689809 +step:5742 train loss:3.747481 +step:5743 train loss:3.709984 +step:5744 train loss:3.667181 +step:5745 train loss:3.676740 +step:5746 train loss:3.707407 +step:5747 train loss:3.688440 +step:5748 train loss:3.740185 +step:5749 train loss:3.696370 +step:5750 validation loss:3.626689 +step:5750 train loss:3.700256 +step:5751 train loss:3.717109 +step:5752 train loss:3.703940 +step:5753 train loss:3.668579 +step:5754 train loss:3.681669 +step:5755 train loss:3.694911 +step:5756 train loss:3.680217 +step:5757 train loss:3.750980 +step:5758 train loss:3.680832 +step:5759 train loss:3.646207 +step:5760 train loss:3.723853 +step:5761 train loss:3.721611 +step:5762 train loss:3.679790 +step:5763 train loss:3.705837 +step:5764 train loss:3.668433 +step:5765 train loss:3.785520 +step:5766 train loss:3.693978 +step:5767 train loss:3.729132 +step:5768 train loss:3.667742 +step:5769 train loss:3.789973 +step:5770 train loss:3.710625 +step:5771 train loss:3.736706 +step:5772 train loss:3.694380 +step:5773 train loss:3.670349 +step:5774 train loss:3.678621 +step:5775 train loss:3.745659 +step:5776 train loss:3.736594 +step:5777 train loss:3.652299 +step:5778 train loss:3.739586 +step:5779 train loss:3.704898 +step:5780 train loss:3.668752 +step:5781 train loss:3.732617 +step:5782 train loss:3.694502 +step:5783 train loss:3.651576 +step:5784 train loss:3.757232 +step:5785 train loss:3.747185 +step:5786 train loss:3.656918 +step:5787 train loss:3.701606 +step:5788 train loss:3.712963 +step:5789 train loss:3.659661 +step:5790 train loss:3.756777 +step:5791 train loss:3.687611 +step:5792 train loss:3.953020 +step:5793 train loss:3.729172 +step:5794 train loss:3.751306 +step:5795 train loss:3.739587 +step:5796 train loss:3.720090 +step:5797 train loss:3.704528 +step:5798 train loss:3.702950 +step:5799 train loss:3.667280 +step:5800 train loss:3.832856 +step:5801 train loss:3.703341 +step:5802 train loss:3.688238 +step:5803 train loss:3.704337 +step:5804 train loss:3.724485 +step:5805 train loss:3.682324 +step:5806 train loss:3.729272 +step:5807 train loss:3.649621 +step:5808 train loss:3.682716 +step:5809 train loss:3.692331 +step:5810 train loss:3.666031 +step:5811 train loss:3.678836 +step:5812 train loss:3.662672 +step:5813 train loss:3.672235 +step:5814 train loss:3.664081 +step:5815 train loss:3.665373 +step:5816 train loss:3.734234 +step:5817 train loss:3.739595 +step:5818 train loss:3.716629 +step:5819 train loss:3.767885 +step:5820 train loss:3.711080 +step:5821 train loss:3.697408 +step:5822 train loss:3.716827 +step:5823 train loss:3.717177 +step:5824 train loss:3.668554 +step:5825 train loss:3.759698 +step:5826 train loss:3.676632 +step:5827 train loss:3.641127 +step:5828 train loss:3.627127 +step:5829 train loss:3.691385 +step:5830 train loss:3.666889 +step:5831 train loss:3.634074 +step:5832 train loss:3.753047 +step:5833 train loss:3.731152 +step:5834 train loss:3.711987 +step:5835 train loss:3.664631 +step:5836 train loss:3.628521 +step:5837 train loss:3.746608 +step:5838 train loss:3.730489 +step:5839 train loss:3.708964 +step:5840 train loss:3.786836 +step:5841 train loss:3.714782 +step:5842 train loss:3.723265 +step:5843 train loss:3.670299 +step:5844 train loss:3.739481 +step:5845 train loss:3.650123 +step:5846 train loss:3.694311 +step:5847 train loss:3.723321 +step:5848 train loss:3.792303 +step:5849 train loss:3.681865 +step:5850 train loss:3.711867 +step:5851 train loss:3.681354 +step:5852 train loss:3.765778 +step:5853 train loss:3.856213 +step:5854 train loss:3.649581 +step:5855 train loss:3.709030 +step:5856 train loss:3.677817 +step:5857 train loss:3.690966 +step:5858 train loss:3.663589 +step:5859 train loss:3.666840 +step:5860 train loss:3.772669 +step:5861 train loss:3.653447 +step:5862 train loss:3.767589 +step:5863 train loss:3.704921 +step:5864 train loss:3.694189 +step:5865 train loss:3.700695 +step:5866 train loss:3.691393 +step:5867 train loss:3.769722 +step:5868 train loss:3.699694 +step:5869 train loss:3.720060 +step:5870 train loss:3.695147 +step:5871 train loss:3.679459 +step:5872 train loss:3.706950 +step:5873 train loss:3.683861 +step:5874 train loss:3.768473 +step:5875 train loss:3.697658 +step:5876 train loss:3.676055 +step:5877 train loss:3.684217 +step:5878 train loss:3.686954 +step:5879 train loss:3.657992 +step:5880 train loss:3.853399 +step:5881 train loss:3.696198 +step:5882 train loss:3.666083 +step:5883 train loss:3.664111 +step:5884 train loss:3.689001 +step:5885 train loss:3.679350 +step:5886 train loss:3.698076 +step:5887 train loss:3.700063 +step:5888 train loss:3.677816 +step:5889 train loss:3.658957 +step:5890 train loss:3.707961 +step:5891 train loss:3.650145 +step:5892 train loss:3.728058 +step:5893 train loss:3.653968 +step:5894 train loss:3.645179 +step:5895 train loss:3.647647 +step:5896 train loss:3.664235 +step:5897 train loss:3.728783 +step:5898 train loss:3.941938 +step:5899 train loss:3.688927 +step:5900 train loss:3.729907 +step:5901 train loss:3.678154 +step:5902 train loss:3.696647 +step:5903 train loss:3.686064 +step:5904 train loss:3.712387 +step:5905 train loss:3.816628 +step:5906 train loss:3.758278 +step:5907 train loss:3.700447 +step:5908 train loss:3.682724 +step:5909 train loss:3.669049 +step:5910 train loss:3.653353 +step:5911 train loss:3.675476 +step:5912 train loss:3.681056 +step:5913 train loss:3.712682 +step:5914 train loss:3.689958 +step:5915 train loss:3.814622 +step:5916 train loss:3.698153 +step:5917 train loss:3.668447 +step:5918 train loss:3.663863 +step:5919 train loss:3.689569 +step:5920 train loss:3.690511 +step:5921 train loss:3.664359 +step:5922 train loss:3.718073 +step:5923 train loss:3.713379 +step:5924 train loss:3.667458 +step:5925 train loss:3.791110 +step:5926 train loss:3.682353 +step:5927 train loss:3.654863 +step:5928 train loss:3.690916 +step:5929 train loss:3.709346 +step:5930 train loss:3.660439 +step:5931 train loss:3.645296 +step:5932 train loss:3.683004 +step:5933 train loss:3.736527 +step:5934 train loss:3.655035 +step:5935 train loss:3.678580 +step:5936 train loss:3.670725 +step:5937 train loss:3.652061 +step:5938 train loss:3.664935 +step:5939 train loss:3.645401 +step:5940 train loss:3.727366 +step:5941 train loss:3.662348 +step:5942 train loss:3.678417 +step:5943 train loss:3.681079 +step:5944 train loss:3.735634 +step:5945 train loss:3.666330 +step:5946 train loss:3.647940 +step:5947 train loss:3.661286 +step:5948 train loss:3.696510 +step:5949 train loss:3.744233 +step:5950 train loss:3.705488 +step:5951 train loss:3.704470 +step:5952 train loss:3.632618 +step:5953 train loss:3.675862 +step:5954 train loss:3.681607 +step:5955 train loss:3.690348 +step:5956 train loss:3.666435 +step:5957 train loss:3.633809 +step:5958 train loss:3.706417 +step:5959 train loss:3.668097 +step:5960 train loss:3.642395 +step:5961 train loss:3.665747 +step:5962 train loss:3.697840 +step:5963 train loss:3.734272 +step:5964 train loss:3.689694 +step:5965 train loss:3.704998 +step:5966 train loss:3.700322 +step:5967 train loss:3.665069 +step:5968 train loss:3.736099 +step:5969 train loss:3.681915 +step:5970 train loss:3.699844 +step:5971 train loss:3.649494 +step:5972 train loss:3.673692 +step:5973 train loss:3.669484 +step:5974 train loss:3.688845 +step:5975 train loss:3.659168 +step:5976 train loss:3.697267 +step:5977 train loss:3.656566 +step:5978 train loss:3.643731 +step:5979 train loss:3.679110 +step:5980 train loss:3.746996 +step:5981 train loss:3.646157 +step:5982 train loss:3.655168 +step:5983 train loss:3.723007 +step:5984 train loss:3.666613 +step:5985 train loss:3.707663 +step:5986 train loss:3.685163 +step:5987 train loss:3.668214 +step:5988 train loss:3.677549 +step:5989 train loss:3.694507 +step:5990 train loss:3.625595 +step:5991 train loss:3.689079 +step:5992 train loss:3.723414 +step:5993 train loss:3.676189 +step:5994 train loss:3.694316 +step:5995 train loss:3.584258 +step:5996 train loss:3.750341 +step:5997 train loss:3.733100 +step:5998 train loss:3.610860 +step:5999 train loss:3.637386 +step:6000 validation loss:3.613990 total_sharp:7.3045e-03 L1_sharp:3.6484e-01 L2_sharp:1.2861e-01 L3_sharp:1.6027e-01 L4_sharp:9.4026e-02 L5_sharp:1.0746e-01 L6_sharp:1.0871e-01 L7_sharp:1.4349e-01 L8_sharp:1.3455e-01 L9_sharp:1.2068e-01 L10_sharp:8.6593e-02 L11_sharp:7.3705e-02 L12_sharp:1.3499e-01 total_fnorm:1.2851e+00 total_l1_linf:7.1112e+03 total_spectral:1.2851e+00 L1_fnorm:3.2031e-02 L2_fnorm:3.0708e-02 L3_fnorm:3.0365e-02 L4_fnorm:3.1188e-02 L5_fnorm:3.1900e-02 L6_fnorm:3.1861e-02 L7_fnorm:3.1948e-02 L8_fnorm:3.1976e-02 L9_fnorm:3.1928e-02 L10_fnorm:3.1925e-02 L11_fnorm:3.1940e-02 L12_fnorm:3.1839e-02 L1_l1linf:3.0995e-01 L2_l1linf:3.1704e-01 L3_l1linf:3.1297e-01 L4_l1linf:3.2550e-01 L5_l1linf:3.1411e-01 L6_l1linf:2.8360e-01 L7_l1linf:2.7013e-01 L8_l1linf:2.7565e-01 L9_l1linf:2.9796e-01 L10_l1linf:2.9861e-01 L11_l1linf:3.1596e-01 L12_l1linf:3.0527e-01 L1_spectral:6.8651e-03 L2_spectral:7.1388e-03 L3_spectral:7.0463e-03 L4_spectral:7.2667e-03 L5_spectral:7.0529e-03 L6_spectral:6.3490e-03 L7_spectral:6.0935e-03 L8_spectral:6.1919e-03 L9_spectral:6.6626e-03 L10_spectral:6.7292e-03 L11_spectral:7.1041e-03 L12_spectral:6.8371e-03 ip_v_neg_g:4.7799e-03 cos_v_neg_g:6.5536e-04 v_norm:1.2851e+00 g_norm:5.6756e+00 hv_norm:1.7468e+00 cos_v_hv:5.3738e-03 hg_norm:9.8080e+02 cos_g_hg:4.5690e-01 v_par:2.4976e-05 v_perp:1.2851e+00 L1_cos_v_neg_g:9.5881e-03 L1_v_norm:3.2031e-02 L2_cos_v_neg_g:9.7550e-03 L2_v_norm:3.0708e-02 L3_cos_v_neg_g:8.8718e-03 L3_v_norm:3.0365e-02 L4_cos_v_neg_g:8.1310e-03 L4_v_norm:3.1188e-02 L5_cos_v_neg_g:8.9890e-03 L5_v_norm:3.1900e-02 L6_cos_v_neg_g:8.5033e-03 L6_v_norm:3.1861e-02 L7_cos_v_neg_g:7.3541e-03 L7_v_norm:3.1948e-02 L8_cos_v_neg_g:6.2557e-03 L8_v_norm:3.1976e-02 L9_cos_v_neg_g:5.5895e-03 L9_v_norm:3.1928e-02 L10_cos_v_neg_g:5.9236e-03 L10_v_norm:3.1925e-02 L11_cos_v_neg_g:5.0272e-03 L11_v_norm:3.1940e-02 L12_cos_v_neg_g:3.8493e-03 L12_v_norm:3.1839e-02 +step:6000 train loss:3.691975 +step:6001 train loss:3.652635 +step:6002 train loss:3.679765 +step:6003 train loss:3.708049 +step:6004 train loss:3.654022 +step:6005 train loss:3.726117 +step:6006 train loss:3.630302 +step:6007 train loss:3.652640 +step:6008 train loss:3.667249 +step:6009 train loss:3.704797 +step:6010 train loss:3.696220 +step:6011 train loss:3.681362 +step:6012 train loss:3.650751 +step:6013 train loss:3.713790 +step:6014 train loss:3.732222 +step:6015 train loss:3.727511 +step:6016 train loss:3.697230 +step:6017 train loss:3.701160 +step:6018 train loss:3.641191 +step:6019 train loss:3.677906 +step:6020 train loss:3.662927 +step:6021 train loss:3.592139 +step:6022 train loss:3.702508 +step:6023 train loss:3.640251 +step:6024 train loss:3.716497 +step:6025 train loss:3.685392 +step:6026 train loss:3.654309 +step:6027 train loss:3.697423 +step:6028 train loss:3.614597 +step:6029 train loss:3.724102 +step:6030 train loss:3.696839 +step:6031 train loss:3.668667 +step:6032 train loss:3.625957 +step:6033 train loss:3.682770 +step:6034 train loss:3.711281 +step:6035 train loss:3.628968 +step:6036 train loss:3.602076 +step:6037 train loss:3.712296 +step:6038 train loss:3.722368 +step:6039 train loss:3.702353 +step:6040 train loss:3.662862 +step:6041 train loss:3.646692 +step:6042 train loss:3.621294 +step:6043 train loss:3.680416 +step:6044 train loss:3.804614 +step:6045 train loss:3.645402 +step:6046 train loss:3.654245 +step:6047 train loss:3.692966 +step:6048 train loss:3.698955 +step:6049 train loss:3.676877 +step:6050 train loss:3.645240 +step:6051 train loss:3.698267 +step:6052 train loss:3.673447 +step:6053 train loss:3.784663 +step:6054 train loss:3.823047 +step:6055 train loss:3.641678 +step:6056 train loss:3.630688 +step:6057 train loss:3.667234 +step:6058 train loss:3.697859 +step:6059 train loss:3.698310 +step:6060 train loss:3.703115 +step:6061 train loss:3.723428 +step:6062 train loss:3.668949 +step:6063 train loss:3.688162 +step:6064 train loss:3.684831 +step:6065 train loss:3.680015 +step:6066 train loss:3.671338 +step:6067 train loss:3.710375 +step:6068 train loss:3.649018 +step:6069 train loss:3.606580 +step:6070 train loss:3.749711 +step:6071 train loss:3.698921 +step:6072 train loss:3.640845 +step:6073 train loss:3.677816 +step:6074 train loss:3.766977 +step:6075 train loss:3.686822 +step:6076 train loss:3.697634 +step:6077 train loss:3.700537 +step:6078 train loss:3.634724 +step:6079 train loss:3.662360 +step:6080 train loss:3.669476 +step:6081 train loss:3.705164 +step:6082 train loss:3.657803 +step:6083 train loss:3.668172 +step:6084 train loss:3.730608 +step:6085 train loss:3.729414 +step:6086 train loss:3.628936 +step:6087 train loss:3.674583 +step:6088 train loss:3.660345 +step:6089 train loss:3.718740 +step:6090 train loss:3.721571 +step:6091 train loss:3.666820 +step:6092 train loss:3.633580 +step:6093 train loss:3.691552 +step:6094 train loss:3.605406 +step:6095 train loss:3.771569 +step:6096 train loss:3.640770 +step:6097 train loss:3.719174 +step:6098 train loss:3.696874 +step:6099 train loss:3.750328 +step:6100 train loss:3.744589 +step:6101 train loss:3.679959 +step:6102 train loss:3.789803 +step:6103 train loss:3.685622 +step:6104 train loss:3.791833 +step:6105 train loss:3.722215 +step:6106 train loss:3.661297 +step:6107 train loss:3.727698 +step:6108 train loss:3.689199 +step:6109 train loss:3.762382 +step:6110 train loss:3.692943 +step:6111 train loss:3.727254 +step:6112 train loss:3.664331 +step:6113 train loss:3.691990 +step:6114 train loss:3.664812 +step:6115 train loss:3.731785 +step:6116 train loss:3.671841 +step:6117 train loss:3.722281 +step:6118 train loss:3.701725 +step:6119 train loss:3.715774 +step:6120 train loss:3.856267 +step:6121 train loss:3.691354 +step:6122 train loss:3.702989 +step:6123 train loss:3.688943 +step:6124 train loss:3.659684 +step:6125 train loss:3.650056 +step:6126 train loss:3.673481 +step:6127 train loss:3.661243 +step:6128 train loss:3.626060 +step:6129 train loss:3.858397 +step:6130 train loss:3.643891 +step:6131 train loss:3.618145 +step:6132 train loss:3.689742 +step:6133 train loss:3.656164 +step:6134 train loss:3.685449 +step:6135 train loss:3.765486 +step:6136 train loss:3.791344 +step:6137 train loss:3.649957 +step:6138 train loss:3.707410 +step:6139 train loss:3.685067 +step:6140 train loss:3.684495 +step:6141 train loss:3.642030 +step:6142 train loss:3.705336 +step:6143 train loss:3.672899 +step:6144 train loss:3.692873 +step:6145 train loss:3.931489 +step:6146 train loss:3.774893 +step:6147 train loss:3.858087 +step:6148 train loss:3.629234 +step:6149 train loss:3.752789 +step:6150 train loss:3.709873 +step:6151 train loss:3.661132 +step:6152 train loss:3.655155 +step:6153 train loss:3.725525 +step:6154 train loss:3.811057 +step:6155 train loss:3.676484 +step:6156 train loss:3.767992 +step:6157 train loss:3.700179 +step:6158 train loss:3.693197 +step:6159 train loss:3.662276 +step:6160 train loss:3.828667 +step:6161 train loss:3.678864 +step:6162 train loss:3.697718 +step:6163 train loss:3.728853 +step:6164 train loss:3.642476 +step:6165 train loss:3.710290 +step:6166 train loss:3.700029 +step:6167 train loss:3.716049 +step:6168 train loss:3.693956 +step:6169 train loss:3.691671 +step:6170 train loss:3.691143 +step:6171 train loss:3.661250 +step:6172 train loss:3.647474 +step:6173 train loss:3.701712 +step:6174 train loss:3.632036 +step:6175 train loss:3.638799 +step:6176 train loss:3.624394 +step:6177 train loss:3.722267 +step:6178 train loss:3.664129 +step:6179 train loss:3.669359 +step:6180 train loss:3.679702 +step:6181 train loss:3.712923 +step:6182 train loss:3.595212 +step:6183 train loss:3.600506 +step:6184 train loss:3.724393 +step:6185 train loss:3.679610 +step:6186 train loss:3.639899 +step:6187 train loss:3.680097 +step:6188 train loss:3.646448 +step:6189 train loss:3.688185 +step:6190 train loss:3.647879 +step:6191 train loss:3.680799 +step:6192 train loss:3.646685 +step:6193 train loss:3.713006 +step:6194 train loss:3.701731 +step:6195 train loss:3.682645 +step:6196 train loss:3.696718 +step:6197 train loss:3.723148 +step:6198 train loss:3.636918 +step:6199 train loss:3.660407 +step:6200 train loss:3.700484 +step:6201 train loss:3.745690 +step:6202 train loss:3.744851 +step:6203 train loss:3.743915 +step:6204 train loss:3.728359 +step:6205 train loss:3.668067 +step:6206 train loss:3.656514 +step:6207 train loss:3.712911 +step:6208 train loss:3.734898 +step:6209 train loss:3.709187 +step:6210 train loss:3.736163 +step:6211 train loss:3.655460 +step:6212 train loss:3.651979 +step:6213 train loss:3.663572 +step:6214 train loss:3.638148 +step:6215 train loss:3.811604 +step:6216 train loss:3.681361 +step:6217 train loss:3.741749 +step:6218 train loss:3.715647 +step:6219 train loss:3.726398 +step:6220 train loss:3.686198 +step:6221 train loss:3.653615 +step:6222 train loss:3.889426 +step:6223 train loss:3.653854 +step:6224 train loss:3.690388 +step:6225 train loss:3.663876 +step:6226 train loss:3.675484 +step:6227 train loss:3.678258 +step:6228 train loss:3.672521 +step:6229 train loss:3.712610 +step:6230 train loss:3.669955 +step:6231 train loss:3.781026 +step:6232 train loss:3.621731 +step:6233 train loss:3.664296 +step:6234 train loss:3.672613 +step:6235 train loss:3.699941 +step:6236 train loss:3.636427 +step:6237 train loss:3.660730 +step:6238 train loss:3.682383 +step:6239 train loss:3.667861 +step:6240 train loss:3.689672 +step:6241 train loss:3.674808 +step:6242 train loss:3.672929 +step:6243 train loss:3.705949 +step:6244 train loss:3.861666 +step:6245 train loss:3.661069 +step:6246 train loss:3.644071 +step:6247 train loss:3.643410 +step:6248 train loss:3.646595 +step:6249 train loss:3.584742 +step:6250 validation loss:3.601735 +step:6250 train loss:3.622502 +step:6251 train loss:3.637132 +step:6252 train loss:3.687445 +step:6253 train loss:3.695449 +step:6254 train loss:3.682122 +step:6255 train loss:3.649362 +step:6256 train loss:3.703351 +step:6257 train loss:3.699411 +step:6258 train loss:3.679565 +step:6259 train loss:3.682847 +step:6260 train loss:3.709880 +step:6261 train loss:3.735106 +step:6262 train loss:3.631041 +step:6263 train loss:3.660631 +step:6264 train loss:3.672130 +step:6265 train loss:3.656026 +step:6266 train loss:3.856046 +step:6267 train loss:3.665878 +step:6268 train loss:3.756810 +step:6269 train loss:3.626317 +step:6270 train loss:3.637705 +step:6271 train loss:3.689856 +step:6272 train loss:3.680906 +step:6273 train loss:3.873631 +step:6274 train loss:3.657452 +step:6275 train loss:3.688125 +step:6276 train loss:3.665656 +step:6277 train loss:3.646637 +step:6278 train loss:3.630997 +step:6279 train loss:3.685453 +step:6280 train loss:3.690249 +step:6281 train loss:3.625365 +step:6282 train loss:3.636035 +step:6283 train loss:3.722617 +step:6284 train loss:3.696850 +step:6285 train loss:3.695640 +step:6286 train loss:3.641221 +step:6287 train loss:3.666337 +step:6288 train loss:3.766011 +step:6289 train loss:3.632909 +step:6290 train loss:3.623352 +step:6291 train loss:3.658492 +step:6292 train loss:3.675511 +step:6293 train loss:3.664897 +step:6294 train loss:3.651557 +step:6295 train loss:3.672723 +step:6296 train loss:3.636279 +step:6297 train loss:3.762873 +step:6298 train loss:3.711326 +step:6299 train loss:3.604841 +step:6300 train loss:3.684013 +step:6301 train loss:3.715468 +step:6302 train loss:3.697628 +step:6303 train loss:3.663036 +step:6304 train loss:3.676868 +step:6305 train loss:3.649063 +step:6306 train loss:3.665490 +step:6307 train loss:3.676435 +step:6308 train loss:3.649519 +step:6309 train loss:3.641758 +step:6310 train loss:3.698979 +step:6311 train loss:3.652633 +step:6312 train loss:3.695860 +step:6313 train loss:3.625364 +step:6314 train loss:3.649287 +step:6315 train loss:3.704993 +step:6316 train loss:3.622340 +step:6317 train loss:3.624369 +step:6318 train loss:3.730562 +step:6319 train loss:3.660460 +step:6320 train loss:3.683021 +step:6321 train loss:3.661416 +step:6322 train loss:3.660319 +step:6323 train loss:3.602249 +step:6324 train loss:3.603589 +step:6325 train loss:3.705180 +step:6326 train loss:3.624794 +step:6327 train loss:3.696009 +step:6328 train loss:3.675507 +step:6329 train loss:3.595317 +step:6330 train loss:3.624460 +step:6331 train loss:3.640388 +step:6332 train loss:3.771727 +step:6333 train loss:3.655796 +step:6334 train loss:3.628101 +step:6335 train loss:3.601085 +step:6336 train loss:3.633134 +step:6337 train loss:3.652812 +step:6338 train loss:3.615499 +step:6339 train loss:3.659076 +step:6340 train loss:3.635787 +step:6341 train loss:3.652158 +step:6342 train loss:3.646951 +step:6343 train loss:3.746774 +step:6344 train loss:3.594535 +step:6345 train loss:3.613577 +step:6346 train loss:3.698233 +step:6347 train loss:3.565186 +step:6348 train loss:3.665956 +step:6349 train loss:3.638607 +step:6350 train loss:3.615126 +step:6351 train loss:3.615237 +step:6352 train loss:3.626155 +step:6353 train loss:3.651912 +step:6354 train loss:3.662525 +step:6355 train loss:3.668168 +step:6356 train loss:3.682821 +step:6357 train loss:3.531912 +step:6358 train loss:3.629426 +step:6359 train loss:3.683757 +step:6360 train loss:3.594919 +step:6361 train loss:3.605000 +step:6362 train loss:3.634808 +step:6363 train loss:3.619120 +step:6364 train loss:3.603841 +step:6365 train loss:3.670247 +step:6366 train loss:3.686854 +step:6367 train loss:3.606660 +step:6368 train loss:3.661061 +step:6369 train loss:3.624625 +step:6370 train loss:3.673874 +step:6371 train loss:3.594251 +step:6372 train loss:3.622143 +step:6373 train loss:3.654272 +step:6374 train loss:3.678418 +step:6375 train loss:3.637841 +step:6376 train loss:3.664300 +step:6377 train loss:3.656336 +step:6378 train loss:3.612190 +step:6379 train loss:3.651930 +step:6380 train loss:3.690581 +step:6381 train loss:3.661468 +step:6382 train loss:3.609417 +step:6383 train loss:3.684387 +step:6384 train loss:3.645198 +step:6385 train loss:3.630475 +step:6386 train loss:3.666625 +step:6387 train loss:3.640192 +step:6388 train loss:3.688234 +step:6389 train loss:3.687475 +step:6390 train loss:3.642396 +step:6391 train loss:3.626592 +step:6392 train loss:3.612195 +step:6393 train loss:3.669660 +step:6394 train loss:3.654066 +step:6395 train loss:3.831153 +step:6396 train loss:3.658446 +step:6397 train loss:3.600441 +step:6398 train loss:3.669317 +step:6399 train loss:3.611240 +step:6400 train loss:3.691258 +step:6401 train loss:3.715995 +step:6402 train loss:3.657922 +step:6403 train loss:3.645583 +step:6404 train loss:3.621880 +step:6405 train loss:3.651162 +step:6406 train loss:3.656245 +step:6407 train loss:3.719517 +step:6408 train loss:3.604220 +step:6409 train loss:3.597284 +step:6410 train loss:3.719174 +step:6411 train loss:3.653916 +step:6412 train loss:3.660710 +step:6413 train loss:3.657825 +step:6414 train loss:3.611430 +step:6415 train loss:3.665420 +step:6416 train loss:3.641997 +step:6417 train loss:3.611209 +step:6418 train loss:3.605166 +step:6419 train loss:3.692109 +step:6420 train loss:3.614540 +step:6421 train loss:3.645225 +step:6422 train loss:3.625227 +step:6423 train loss:3.643535 +step:6424 train loss:3.661741 +step:6425 train loss:3.659355 +step:6426 train loss:3.699613 +step:6427 train loss:3.663170 +step:6428 train loss:3.704876 +step:6429 train loss:3.659723 +step:6430 train loss:3.641465 +step:6431 train loss:3.610494 +step:6432 train loss:3.648159 +step:6433 train loss:3.661779 +step:6434 train loss:3.542332 +step:6435 train loss:3.721311 +step:6436 train loss:3.653923 +step:6437 train loss:3.621440 +step:6438 train loss:3.646923 +step:6439 train loss:3.622770 +step:6440 train loss:3.636608 +step:6441 train loss:3.633013 +step:6442 train loss:3.573774 +step:6443 train loss:3.623324 +step:6444 train loss:3.769710 +step:6445 train loss:3.668355 +step:6446 train loss:3.677393 +step:6447 train loss:3.654847 +step:6448 train loss:3.604651 +step:6449 train loss:3.631098 +step:6450 train loss:3.608775 +step:6451 train loss:3.602862 +step:6452 train loss:3.600539 +step:6453 train loss:3.650882 +step:6454 train loss:3.668146 +step:6455 train loss:3.658816 +step:6456 train loss:3.674769 +step:6457 train loss:3.658445 +step:6458 train loss:3.631853 +step:6459 train loss:3.609919 +step:6460 train loss:3.620668 +step:6461 train loss:3.617362 +step:6462 train loss:3.612681 +step:6463 train loss:3.709976 +step:6464 train loss:3.613021 +step:6465 train loss:3.659070 +step:6466 train loss:3.670353 +step:6467 train loss:3.598854 +step:6468 train loss:3.673647 +step:6469 train loss:3.589090 +step:6470 train loss:3.704376 +step:6471 train loss:3.619953 +step:6472 train loss:3.773697 +step:6473 train loss:3.657176 +step:6474 train loss:3.688021 +step:6475 train loss:3.627591 +step:6476 train loss:3.706722 +step:6477 train loss:3.630860 +step:6478 train loss:3.763680 +step:6479 train loss:3.678438 +step:6480 train loss:3.618243 +step:6481 train loss:3.670966 +step:6482 train loss:3.611609 +step:6483 train loss:3.675563 +step:6484 train loss:3.628211 +step:6485 train loss:3.693875 +step:6486 train loss:3.621243 +step:6487 train loss:3.628194 +step:6488 train loss:3.617887 +step:6489 train loss:3.622232 +step:6490 train loss:3.642018 +step:6491 train loss:3.611958 +step:6492 train loss:3.719882 +step:6493 train loss:3.622083 +step:6494 train loss:3.630249 +step:6495 train loss:3.622527 +step:6496 train loss:3.661573 +step:6497 train loss:3.676712 +step:6498 train loss:3.786154 +step:6499 train loss:3.751850 +step:6500 validation loss:3.591365 total_sharp:5.4207e-03 L1_sharp:4.4285e-01 L2_sharp:1.4394e-01 L3_sharp:1.2037e-01 L4_sharp:5.7888e-02 L5_sharp:7.0331e-02 L6_sharp:9.7764e-02 L7_sharp:1.1868e-01 L8_sharp:9.9902e-02 L9_sharp:7.9159e-02 L10_sharp:5.9466e-02 L11_sharp:5.4665e-02 L12_sharp:1.2204e-01 total_fnorm:1.2958e+00 total_l1_linf:7.1708e+03 total_spectral:1.2958e+00 L1_fnorm:3.1648e-02 L2_fnorm:3.0380e-02 L3_fnorm:3.0077e-02 L4_fnorm:3.1006e-02 L5_fnorm:3.1639e-02 L6_fnorm:3.1944e-02 L7_fnorm:3.1929e-02 L8_fnorm:3.1816e-02 L9_fnorm:3.1764e-02 L10_fnorm:3.1785e-02 L11_fnorm:3.1888e-02 L12_fnorm:3.2017e-02 L1_l1linf:2.9116e-01 L2_l1linf:3.0977e-01 L3_l1linf:2.9266e-01 L4_l1linf:2.8436e-01 L5_l1linf:2.8894e-01 L6_l1linf:2.9051e-01 L7_l1linf:2.6629e-01 L8_l1linf:2.5990e-01 L9_l1linf:2.7059e-01 L10_l1linf:2.8199e-01 L11_l1linf:3.0617e-01 L12_l1linf:3.1750e-01 L1_spectral:6.5328e-03 L2_spectral:6.9804e-03 L3_spectral:6.5587e-03 L4_spectral:6.4192e-03 L5_spectral:6.5059e-03 L6_spectral:6.4845e-03 L7_spectral:6.0381e-03 L8_spectral:5.8951e-03 L9_spectral:6.1362e-03 L10_spectral:6.3688e-03 L11_spectral:6.8657e-03 L12_spectral:7.0916e-03 ip_v_neg_g:3.6773e-03 cos_v_neg_g:5.1155e-04 v_norm:1.2958e+00 g_norm:5.5475e+00 hv_norm:1.3819e+00 cos_v_hv:5.0830e-03 hg_norm:7.5636e+02 cos_g_hg:4.7844e-01 v_par:1.7284e-05 v_perp:1.2958e+00 L1_cos_v_neg_g:7.7830e-03 L1_v_norm:3.1648e-02 L2_cos_v_neg_g:6.0163e-03 L2_v_norm:3.0380e-02 L3_cos_v_neg_g:4.4847e-03 L3_v_norm:3.0077e-02 L4_cos_v_neg_g:4.5086e-03 L4_v_norm:3.1006e-02 L5_cos_v_neg_g:4.2530e-03 L5_v_norm:3.1639e-02 L6_cos_v_neg_g:5.0378e-03 L6_v_norm:3.1944e-02 L7_cos_v_neg_g:5.1913e-03 L7_v_norm:3.1929e-02 L8_cos_v_neg_g:6.6870e-03 L8_v_norm:3.1816e-02 L9_cos_v_neg_g:6.7068e-03 L9_v_norm:3.1764e-02 L10_cos_v_neg_g:5.7752e-03 L10_v_norm:3.1785e-02 L11_cos_v_neg_g:4.8697e-03 L11_v_norm:3.1888e-02 L12_cos_v_neg_g:3.4740e-03 L12_v_norm:3.2017e-02 +step:6500 train loss:3.603309 +step:6501 train loss:3.618885 +step:6502 train loss:3.635415 +step:6503 train loss:3.698492 +step:6504 train loss:3.645526 +step:6505 train loss:3.655359 +step:6506 train loss:3.611942 +step:6507 train loss:3.682345 +step:6508 train loss:3.643150 +step:6509 train loss:3.633612 +step:6510 train loss:3.635543 +step:6511 train loss:3.657639 +step:6512 train loss:3.595687 +step:6513 train loss:3.665648 +step:6514 train loss:3.535223 +step:6515 train loss:3.634292 +step:6516 train loss:3.680319 +step:6517 train loss:3.593135 +step:6518 train loss:3.632587 +step:6519 train loss:3.619966 +step:6520 train loss:3.713059 +step:6521 train loss:3.685687 +step:6522 train loss:3.698180 +step:6523 train loss:3.589095 +step:6524 train loss:3.676991 +step:6525 train loss:3.657077 +step:6526 train loss:3.607378 +step:6527 train loss:3.649502 +step:6528 train loss:3.671953 +step:6529 train loss:3.701276 +step:6530 train loss:3.602333 +step:6531 train loss:3.691273 +step:6532 train loss:3.610389 +step:6533 train loss:3.651950 +step:6534 train loss:3.656966 +step:6535 train loss:3.640911 +step:6536 train loss:3.764868 +step:6537 train loss:3.573213 +step:6538 train loss:3.682630 +step:6539 train loss:3.610697 +step:6540 train loss:3.720438 +step:6541 train loss:3.699539 +step:6542 train loss:3.659299 +step:6543 train loss:3.611145 +step:6544 train loss:3.596681 +step:6545 train loss:3.582370 +step:6546 train loss:3.644393 +step:6547 train loss:3.696936 +step:6548 train loss:3.646686 +step:6549 train loss:3.655979 +step:6550 train loss:3.770646 +step:6551 train loss:3.643338 +step:6552 train loss:3.643289 +step:6553 train loss:3.680123 +step:6554 train loss:3.571556 +step:6555 train loss:3.653705 +step:6556 train loss:3.529064 +step:6557 train loss:3.875190 +step:6558 train loss:3.706053 +step:6559 train loss:3.621871 +step:6560 train loss:3.657474 +step:6561 train loss:3.634166 +step:6562 train loss:3.649027 +step:6563 train loss:3.546979 +step:6564 train loss:3.643646 +step:6565 train loss:3.552619 +step:6566 train loss:3.667032 +step:6567 train loss:3.637878 +step:6568 train loss:3.683082 +step:6569 train loss:3.628243 +step:6570 train loss:3.666524 +step:6571 train loss:3.597436 +step:6572 train loss:3.670309 +step:6573 train loss:3.677896 +step:6574 train loss:3.667873 +step:6575 train loss:3.611777 +step:6576 train loss:3.605756 +step:6577 train loss:3.670179 +step:6578 train loss:3.543209 +step:6579 train loss:3.642740 +step:6580 train loss:3.604777 +step:6581 train loss:3.613528 +step:6582 train loss:3.594214 +step:6583 train loss:3.689540 +step:6584 train loss:3.621284 +step:6585 train loss:3.659295 +step:6586 train loss:3.665322 +step:6587 train loss:3.676667 +step:6588 train loss:3.639942 +step:6589 train loss:3.672603 +step:6590 train loss:3.611259 +step:6591 train loss:3.667107 +step:6592 train loss:3.601635 +step:6593 train loss:3.616524 +step:6594 train loss:3.638039 +step:6595 train loss:3.623611 +step:6596 train loss:3.621589 +step:6597 train loss:3.646172 +step:6598 train loss:3.682048 +step:6599 train loss:3.583488 +step:6600 train loss:3.632082 +step:6601 train loss:3.693511 +step:6602 train loss:3.613809 +step:6603 train loss:3.645315 +step:6604 train loss:3.654276 +step:6605 train loss:3.634972 +step:6606 train loss:3.696743 +step:6607 train loss:3.617048 +step:6608 train loss:3.632574 +step:6609 train loss:3.599935 +step:6610 train loss:3.707340 +step:6611 train loss:3.631519 +step:6612 train loss:3.675027 +step:6613 train loss:3.594767 +step:6614 train loss:3.621190 +step:6615 train loss:3.621440 +step:6616 train loss:3.600126 +step:6617 train loss:3.644827 +step:6618 train loss:3.626266 +step:6619 train loss:3.598359 +step:6620 train loss:3.705823 +step:6621 train loss:3.584382 +step:6622 train loss:3.656481 +step:6623 train loss:3.588706 +step:6624 train loss:3.661201 +step:6625 train loss:3.699278 +step:6626 train loss:3.663751 +step:6627 train loss:3.620024 +step:6628 train loss:3.672512 +step:6629 train loss:3.576424 +step:6630 train loss:3.613624 +step:6631 train loss:3.650647 +step:6632 train loss:3.688339 +step:6633 train loss:3.639869 +step:6634 train loss:3.700608 +step:6635 train loss:3.603314 +step:6636 train loss:3.644820 +step:6637 train loss:3.607810 +step:6638 train loss:3.609749 +step:6639 train loss:3.620421 +step:6640 train loss:3.607115 +step:6641 train loss:3.625345 +step:6642 train loss:3.626543 +step:6643 train loss:3.699658 +step:6644 train loss:3.708067 +step:6645 train loss:3.580417 +step:6646 train loss:3.673280 +step:6647 train loss:3.627887 +step:6648 train loss:3.731784 +step:6649 train loss:3.658273 +step:6650 train loss:3.612042 +step:6651 train loss:3.652658 +step:6652 train loss:3.671545 +step:6653 train loss:3.608144 +step:6654 train loss:3.612218 +step:6655 train loss:3.647950 +step:6656 train loss:3.621618 +step:6657 train loss:3.643781 +step:6658 train loss:3.630497 +step:6659 train loss:3.779471 +step:6660 train loss:3.678641 +step:6661 train loss:3.604913 +step:6662 train loss:3.635588 +step:6663 train loss:3.568326 +step:6664 train loss:3.652589 +step:6665 train loss:3.658252 +step:6666 train loss:3.672075 +step:6667 train loss:3.587572 +step:6668 train loss:3.722893 +step:6669 train loss:3.597779 +step:6670 train loss:3.610888 +step:6671 train loss:3.689840 +step:6672 train loss:3.646268 +step:6673 train loss:3.650478 +step:6674 train loss:3.629050 +step:6675 train loss:3.639271 +step:6676 train loss:3.655159 +step:6677 train loss:3.609020 +step:6678 train loss:3.682783 +step:6679 train loss:3.715472 +step:6680 train loss:3.714220 +step:6681 train loss:3.669570 +step:6682 train loss:3.612937 +step:6683 train loss:3.637657 +step:6684 train loss:3.645381 +step:6685 train loss:3.657328 +step:6686 train loss:3.598521 +step:6687 train loss:3.612303 +step:6688 train loss:3.657847 +step:6689 train loss:3.661561 +step:6690 train loss:3.644980 +step:6691 train loss:3.674155 +step:6692 train loss:3.678640 +step:6693 train loss:3.708939 +step:6694 train loss:3.668639 +step:6695 train loss:3.637477 +step:6696 train loss:3.581287 +step:6697 train loss:3.787981 +step:6698 train loss:3.638030 +step:6699 train loss:3.634441 +step:6700 train loss:3.647482 +step:6701 train loss:3.704552 +step:6702 train loss:3.597876 +step:6703 train loss:3.643016 +step:6704 train loss:3.626194 +step:6705 train loss:3.635653 +step:6706 train loss:3.616359 +step:6707 train loss:3.686665 +step:6708 train loss:3.641986 +step:6709 train loss:3.672830 +step:6710 train loss:3.660403 +step:6711 train loss:3.613863 +step:6712 train loss:3.601776 +step:6713 train loss:3.626914 +step:6714 train loss:3.668071 +step:6715 train loss:3.612040 +step:6716 train loss:3.690267 +step:6717 train loss:3.635316 +step:6718 train loss:3.660913 +step:6719 train loss:3.691242 +step:6720 train loss:3.620760 +step:6721 train loss:3.637641 +step:6722 train loss:3.614605 +step:6723 train loss:3.741342 +step:6724 train loss:3.599683 +step:6725 train loss:3.664244 +step:6726 train loss:3.614142 +step:6727 train loss:3.680973 +step:6728 train loss:3.774255 +step:6729 train loss:3.636561 +step:6730 train loss:3.633710 +step:6731 train loss:3.673747 +step:6732 train loss:3.551067 +step:6733 train loss:3.686812 +step:6734 train loss:3.610199 +step:6735 train loss:3.637980 +step:6736 train loss:3.642551 +step:6737 train loss:3.636716 +step:6738 train loss:3.670475 +step:6739 train loss:3.622117 +step:6740 train loss:3.574760 +step:6741 train loss:3.686530 +step:6742 train loss:3.648107 +step:6743 train loss:3.650694 +step:6744 train loss:3.544641 +step:6745 train loss:3.693771 +step:6746 train loss:3.630149 +step:6747 train loss:3.619459 +step:6748 train loss:3.694361 +step:6749 train loss:3.673515 +step:6750 validation loss:3.582575 +step:6750 train loss:3.598689 +step:6751 train loss:3.631438 +step:6752 train loss:3.636420 +step:6753 train loss:3.668371 +step:6754 train loss:3.649403 +step:6755 train loss:3.657578 +step:6756 train loss:3.605377 +step:6757 train loss:3.569902 +step:6758 train loss:3.751799 +step:6759 train loss:3.637058 +step:6760 train loss:3.695755 +step:6761 train loss:3.624739 +step:6762 train loss:3.653498 +step:6763 train loss:3.549499 +step:6764 train loss:3.630079 +step:6765 train loss:3.627642 +step:6766 train loss:3.623214 +step:6767 train loss:3.579590 +step:6768 train loss:3.584374 +step:6769 train loss:3.548725 +step:6770 train loss:3.634993 +step:6771 train loss:3.635159 +step:6772 train loss:3.642521 +step:6773 train loss:3.625047 +step:6774 train loss:3.634869 +step:6775 train loss:3.681728 +step:6776 train loss:3.634111 +step:6777 train loss:3.712708 +step:6778 train loss:3.595122 +step:6779 train loss:3.653999 +step:6780 train loss:3.578953 +step:6781 train loss:3.648236 +step:6782 train loss:3.555130 +step:6783 train loss:3.599872 +step:6784 train loss:3.620798 +step:6785 train loss:3.609673 +step:6786 train loss:3.623088 +step:6787 train loss:3.697952 +step:6788 train loss:3.635647 +step:6789 train loss:3.645716 +step:6790 train loss:3.643640 +step:6791 train loss:3.653244 +step:6792 train loss:3.653575 +step:6793 train loss:3.653351 +step:6794 train loss:3.620658 +step:6795 train loss:3.622937 +step:6796 train loss:3.627002 +step:6797 train loss:3.724172 +step:6798 train loss:3.628675 +step:6799 train loss:3.622368 +step:6800 train loss:3.588665 +step:6801 train loss:3.719421 +step:6802 train loss:3.667951 +step:6803 train loss:3.655235 +step:6804 train loss:3.686865 +step:6805 train loss:3.643562 +step:6806 train loss:3.582378 +step:6807 train loss:3.638452 +step:6808 train loss:3.623680 +step:6809 train loss:3.649651 +step:6810 train loss:3.771769 +step:6811 train loss:3.671666 +step:6812 train loss:3.649604 +step:6813 train loss:3.658375 +step:6814 train loss:3.666290 +step:6815 train loss:3.708843 +step:6816 train loss:3.632887 +step:6817 train loss:3.653722 +step:6818 train loss:3.637146 +step:6819 train loss:3.613754 +step:6820 train loss:3.646213 +step:6821 train loss:3.608903 +step:6822 train loss:3.710519 +step:6823 train loss:3.692199 +step:6824 train loss:3.672701 +step:6825 train loss:3.615384 +step:6826 train loss:3.663535 +step:6827 train loss:3.643844 +step:6828 train loss:3.664738 +step:6829 train loss:3.650771 +step:6830 train loss:3.617754 +step:6831 train loss:3.577854 +step:6832 train loss:3.564077 +step:6833 train loss:3.584533 +step:6834 train loss:3.665448 +step:6835 train loss:3.642133 +step:6836 train loss:3.556570 +step:6837 train loss:3.626052 +step:6838 train loss:3.681346 +step:6839 train loss:3.767367 +step:6840 train loss:3.636314 +step:6841 train loss:3.599438 +step:6842 train loss:3.644414 +step:6843 train loss:3.749789 +step:6844 train loss:3.628127 +step:6845 train loss:3.686312 +step:6846 train loss:3.746007 +step:6847 train loss:3.674101 +step:6848 train loss:3.668794 +step:6849 train loss:3.691369 +step:6850 train loss:3.663997 +step:6851 train loss:3.589304 +step:6852 train loss:3.585525 +step:6853 train loss:3.568836 +step:6854 train loss:3.656183 +step:6855 train loss:3.622675 +step:6856 train loss:3.611302 +step:6857 train loss:3.657529 +step:6858 train loss:3.693225 +step:6859 train loss:3.596336 +step:6860 train loss:3.707182 +step:6861 train loss:3.730547 +step:6862 train loss:3.644948 +step:6863 train loss:3.641413 +step:6864 train loss:3.582536 +step:6865 train loss:3.660744 +step:6866 train loss:3.582705 +step:6867 train loss:3.760677 +step:6868 train loss:3.634637 +step:6869 train loss:3.672968 +step:6870 train loss:3.702450 +step:6871 train loss:3.618333 +step:6872 train loss:3.619160 +step:6873 train loss:3.637002 +step:6874 train loss:3.599247 +step:6875 train loss:3.605270 +step:6876 train loss:3.636437 +step:6877 train loss:3.673739 +step:6878 train loss:3.583507 +step:6879 train loss:3.635798 +step:6880 train loss:3.642178 +step:6881 train loss:3.604161 +step:6882 train loss:3.662215 +step:6883 train loss:3.655596 +step:6884 train loss:3.883672 +step:6885 train loss:3.652442 +step:6886 train loss:3.636349 +step:6887 train loss:3.569798 +step:6888 train loss:3.679574 +step:6889 train loss:3.557499 +step:6890 train loss:3.666523 +step:6891 train loss:3.674877 +step:6892 train loss:3.775758 +step:6893 train loss:3.607636 +step:6894 train loss:3.665241 +step:6895 train loss:3.669911 +step:6896 train loss:3.638835 +step:6897 train loss:3.598758 +step:6898 train loss:3.594073 +step:6899 train loss:3.683215 +step:6900 train loss:3.654192 +step:6901 train loss:3.606583 +step:6902 train loss:3.539316 +step:6903 train loss:3.589497 +step:6904 train loss:3.698761 +step:6905 train loss:3.726364 +step:6906 train loss:3.649865 +step:6907 train loss:3.670499 +step:6908 train loss:3.707595 +step:6909 train loss:3.698139 +step:6910 train loss:3.575662 +step:6911 train loss:3.700683 +step:6912 train loss:3.596891 +step:6913 train loss:3.627853 +step:6914 train loss:3.590151 +step:6915 train loss:3.615770 +step:6916 train loss:3.594469 +step:6917 train loss:3.712872 +step:6918 train loss:3.663985 +step:6919 train loss:3.653048 +step:6920 train loss:3.641015 +step:6921 train loss:3.705766 +step:6922 train loss:3.696105 +step:6923 train loss:3.561697 +step:6924 train loss:3.643154 +step:6925 train loss:3.618591 +step:6926 train loss:3.653480 +step:6927 train loss:3.707772 +step:6928 train loss:3.592709 +step:6929 train loss:3.610305 +step:6930 train loss:3.638624 +step:6931 train loss:3.642742 +step:6932 train loss:3.866465 +step:6933 train loss:3.706597 +step:6934 train loss:3.646692 +step:6935 train loss:3.630090 +step:6936 train loss:3.672099 +step:6937 train loss:3.609782 +step:6938 train loss:3.681147 +step:6939 train loss:3.610645 +step:6940 train loss:3.666061 +step:6941 train loss:3.580700 +step:6942 train loss:3.669397 +step:6943 train loss:3.559959 +step:6944 train loss:3.661414 +step:6945 train loss:3.596646 +step:6946 train loss:3.682985 +step:6947 train loss:3.612212 +step:6948 train loss:3.607303 +step:6949 train loss:3.682470 +step:6950 train loss:3.668881 +step:6951 train loss:3.679028 +step:6952 train loss:3.608073 +step:6953 train loss:3.652874 +step:6954 train loss:3.712818 +step:6955 train loss:3.623201 +step:6956 train loss:3.663403 +step:6957 train loss:3.654013 +step:6958 train loss:3.616865 +step:6959 train loss:3.653666 +step:6960 train loss:3.621776 +step:6961 train loss:3.625053 +step:6962 train loss:3.610551 +step:6963 train loss:3.574760 +step:6964 train loss:3.625226 +step:6965 train loss:3.612414 +step:6966 train loss:3.663346 +step:6967 train loss:3.595901 +step:6968 train loss:3.634644 +step:6969 train loss:3.653139 +step:6970 train loss:3.627296 +step:6971 train loss:3.693364 +step:6972 train loss:3.636784 +step:6973 train loss:3.601258 +step:6974 train loss:3.724653 +step:6975 train loss:3.633071 +step:6976 train loss:3.605011 +step:6977 train loss:3.643031 +step:6978 train loss:3.636719 +step:6979 train loss:3.641001 +step:6980 train loss:3.622896 +step:6981 train loss:3.677061 +step:6982 train loss:3.634766 +step:6983 train loss:3.622872 +step:6984 train loss:3.740466 +step:6985 train loss:3.590621 +step:6986 train loss:3.578018 +step:6987 train loss:3.627171 +step:6988 train loss:3.631692 +step:6989 train loss:3.781588 +step:6990 train loss:3.640885 +step:6991 train loss:3.601735 +step:6992 train loss:3.645961 +step:6993 train loss:3.714101 +step:6994 train loss:3.661746 +step:6995 train loss:3.610444 +step:6996 train loss:3.614584 +step:6997 train loss:3.695342 +step:6998 train loss:3.595810 +step:6999 train loss:3.641107 +step:7000 validation loss:3.573143 total_sharp:5.1223e-03 L1_sharp:3.2205e-01 L2_sharp:9.4294e-02 L3_sharp:1.3613e-01 L4_sharp:7.0655e-02 L5_sharp:7.5454e-02 L6_sharp:8.0554e-02 L7_sharp:9.3344e-02 L8_sharp:9.9406e-02 L9_sharp:9.4298e-02 L10_sharp:7.3345e-02 L11_sharp:5.9302e-02 L12_sharp:9.2623e-02 total_fnorm:1.3220e+00 total_l1_linf:7.3029e+03 total_spectral:1.3220e+00 L1_fnorm:3.1903e-02 L2_fnorm:3.0841e-02 L3_fnorm:3.0775e-02 L4_fnorm:3.1359e-02 L5_fnorm:3.1962e-02 L6_fnorm:3.2115e-02 L7_fnorm:3.2106e-02 L8_fnorm:3.2106e-02 L9_fnorm:3.2133e-02 L10_fnorm:3.2273e-02 L11_fnorm:3.2255e-02 L12_fnorm:3.2338e-02 L1_l1linf:3.0596e-01 L2_l1linf:3.1318e-01 L3_l1linf:3.4830e-01 L4_l1linf:3.3227e-01 L5_l1linf:3.2050e-01 L6_l1linf:2.9722e-01 L7_l1linf:2.7506e-01 L8_l1linf:2.8736e-01 L9_l1linf:3.0181e-01 L10_l1linf:3.2792e-01 L11_l1linf:3.3844e-01 L12_l1linf:3.4347e-01 L1_spectral:6.8154e-03 L2_spectral:7.1229e-03 L3_spectral:7.7959e-03 L4_spectral:7.4144e-03 L5_spectral:7.1490e-03 L6_spectral:6.7029e-03 L7_spectral:6.1745e-03 L8_spectral:6.4614e-03 L9_spectral:6.7545e-03 L10_spectral:7.3669e-03 L11_spectral:7.5700e-03 L12_spectral:7.7404e-03 ip_v_neg_g:3.7752e-03 cos_v_neg_g:5.0955e-04 v_norm:1.3220e+00 g_norm:5.6042e+00 hv_norm:1.2569e+00 cos_v_hv:5.3874e-03 hg_norm:7.5551e+02 cos_g_hg:4.6527e-01 v_par:1.6013e-05 v_perp:1.3220e+00 L1_cos_v_neg_g:7.0877e-03 L1_v_norm:3.1903e-02 L2_cos_v_neg_g:7.8981e-03 L2_v_norm:3.0841e-02 L3_cos_v_neg_g:7.5159e-03 L3_v_norm:3.0775e-02 L4_cos_v_neg_g:7.4467e-03 L4_v_norm:3.1359e-02 L5_cos_v_neg_g:8.1346e-03 L5_v_norm:3.1962e-02 L6_cos_v_neg_g:6.1911e-03 L6_v_norm:3.2115e-02 L7_cos_v_neg_g:5.1943e-03 L7_v_norm:3.2106e-02 L8_cos_v_neg_g:4.6777e-03 L8_v_norm:3.2106e-02 L9_cos_v_neg_g:5.3938e-03 L9_v_norm:3.2133e-02 L10_cos_v_neg_g:4.6338e-03 L10_v_norm:3.2273e-02 L11_cos_v_neg_g:3.7394e-03 L11_v_norm:3.2255e-02 L12_cos_v_neg_g:3.9635e-03 L12_v_norm:3.2338e-02 +step:7000 train loss:3.720832 +step:7001 train loss:3.626200 +step:7002 train loss:3.613226 +step:7003 train loss:3.636511 +step:7004 train loss:3.631423 +step:7005 train loss:3.617881 +step:7006 train loss:3.619262 +step:7007 train loss:3.671029 +step:7008 train loss:3.611234 +step:7009 train loss:3.653855 +step:7010 train loss:3.587132 +step:7011 train loss:3.647695 +step:7012 train loss:3.618606 +step:7013 train loss:3.691958 +step:7014 train loss:3.600699 +step:7015 train loss:3.656230 +step:7016 train loss:3.648541 +step:7017 train loss:3.615336 +step:7018 train loss:3.692247 +step:7019 train loss:3.621969 +step:7020 train loss:3.665677 +step:7021 train loss:3.614335 +step:7022 train loss:3.623755 +step:7023 train loss:3.646834 +step:7024 train loss:3.606694 +step:7025 train loss:3.657357 +step:7026 train loss:3.613219 +step:7027 train loss:3.675852 +step:7028 train loss:3.600407 +step:7029 train loss:3.587236 +step:7030 train loss:3.589499 +step:7031 train loss:3.644165 +step:7032 train loss:3.652747 +step:7033 train loss:3.626709 +step:7034 train loss:3.648645 +step:7035 train loss:3.699370 +step:7036 train loss:3.617571 +step:7037 train loss:3.647558 +step:7038 train loss:3.604909 +step:7039 train loss:3.663679 +step:7040 train loss:3.582396 +step:7041 train loss:3.671766 +step:7042 train loss:3.608208 +step:7043 train loss:3.580840 +step:7044 train loss:3.625079 +step:7045 train loss:3.626347 +step:7046 train loss:3.613974 +step:7047 train loss:3.655360 +step:7048 train loss:3.600273 +step:7049 train loss:3.615639 +step:7050 train loss:3.633996 +step:7051 train loss:3.654129 +step:7052 train loss:3.655674 +step:7053 train loss:3.616390 +step:7054 train loss:3.599204 +step:7055 train loss:3.660575 +step:7056 train loss:3.666966 +step:7057 train loss:3.587564 +step:7058 train loss:3.705354 +step:7059 train loss:3.619186 +step:7060 train loss:3.624734 +step:7061 train loss:3.603604 +step:7062 train loss:3.620565 +step:7063 train loss:3.680518 +step:7064 train loss:3.605903 +step:7065 train loss:3.655179 +step:7066 train loss:3.613494 +step:7067 train loss:3.650104 +step:7068 train loss:3.630448 +step:7069 train loss:3.585413 +step:7070 train loss:3.617037 +step:7071 train loss:3.580209 +step:7072 train loss:3.590952 +step:7073 train loss:3.578951 +step:7074 train loss:3.573783 +step:7075 train loss:3.597170 +step:7076 train loss:3.602117 +step:7077 train loss:3.617169 +step:7078 train loss:3.657826 +step:7079 train loss:3.672167 +step:7080 train loss:3.610912 +step:7081 train loss:3.638928 +step:7082 train loss:3.608379 +step:7083 train loss:3.634439 +step:7084 train loss:3.628549 +step:7085 train loss:3.584674 +step:7086 train loss:3.630639 +step:7087 train loss:3.603811 +step:7088 train loss:3.722925 +step:7089 train loss:3.622380 +step:7090 train loss:3.583253 +step:7091 train loss:3.599703 +step:7092 train loss:3.577141 +step:7093 train loss:3.670724 +step:7094 train loss:3.592571 +step:7095 train loss:3.603996 +step:7096 train loss:3.625729 +step:7097 train loss:3.612366 +step:7098 train loss:3.637978 +step:7099 train loss:3.593692 +step:7100 train loss:3.622177 +step:7101 train loss:3.696337 +step:7102 train loss:3.586227 +step:7103 train loss:3.613255 +step:7104 train loss:3.642494 +step:7105 train loss:3.616779 +step:7106 train loss:3.607406 +step:7107 train loss:3.640423 +step:7108 train loss:3.711038 +step:7109 train loss:3.637399 +step:7110 train loss:3.666771 +step:7111 train loss:3.643186 +step:7112 train loss:3.630115 +step:7113 train loss:3.634198 +step:7114 train loss:3.649293 +step:7115 train loss:3.691395 +step:7116 train loss:3.619679 +step:7117 train loss:3.651894 +step:7118 train loss:3.667801 +step:7119 train loss:3.625187 +step:7120 train loss:3.687463 +step:7121 train loss:3.598720 +step:7122 train loss:3.600966 +step:7123 train loss:3.544146 +step:7124 train loss:3.695671 +step:7125 train loss:3.554506 +step:7126 train loss:3.716309 +step:7127 train loss:3.677575 +step:7128 train loss:3.617826 +step:7129 train loss:3.629402 +step:7130 train loss:3.615938 +step:7131 train loss:3.554040 +step:7132 train loss:3.600358 +step:7133 train loss:3.644645 +step:7134 train loss:3.576710 +step:7135 train loss:3.628422 +step:7136 train loss:3.615210 +step:7137 train loss:3.593142 +step:7138 train loss:3.582250 +step:7139 train loss:3.587320 +step:7140 train loss:3.621149 +step:7141 train loss:3.617292 +step:7142 train loss:3.613662 +step:7143 train loss:3.650792 +step:7144 train loss:3.602233 +step:7145 train loss:3.619891 +step:7146 train loss:3.626617 +step:7147 train loss:3.647164 +step:7148 train loss:3.656121 +step:7149 train loss:3.653977 +step:7150 train loss:3.634861 +step:7151 train loss:3.596705 +step:7152 train loss:3.570312 +step:7153 train loss:3.606591 +step:7154 train loss:3.621002 +step:7155 train loss:3.641201 +step:7156 train loss:3.607884 +step:7157 train loss:3.626008 +step:7158 train loss:3.587657 +step:7159 train loss:3.635219 +step:7160 train loss:3.648176 +step:7161 train loss:3.598340 +step:7162 train loss:3.649175 +step:7163 train loss:3.582283 +step:7164 train loss:3.616874 +step:7165 train loss:3.624650 +step:7166 train loss:3.679733 +step:7167 train loss:3.660174 +step:7168 train loss:3.634860 +step:7169 train loss:3.612803 +step:7170 train loss:3.644994 +step:7171 train loss:3.586308 +step:7172 train loss:3.757838 +step:7173 train loss:3.597343 +step:7174 train loss:3.639807 +step:7175 train loss:3.617962 +step:7176 train loss:3.622121 +step:7177 train loss:3.640472 +step:7178 train loss:3.635965 +step:7179 train loss:3.622358 +step:7180 train loss:3.627236 +step:7181 train loss:3.651007 +step:7182 train loss:3.604296 +step:7183 train loss:3.679546 +step:7184 train loss:3.767025 +step:7185 train loss:3.681862 +step:7186 train loss:3.620251 +step:7187 train loss:3.631034 +step:7188 train loss:3.622918 +step:7189 train loss:3.618715 +step:7190 train loss:3.621818 +step:7191 train loss:3.612160 +step:7192 train loss:3.651838 +step:7193 train loss:3.564996 +step:7194 train loss:3.631317 +step:7195 train loss:3.604468 +step:7196 train loss:3.653747 +step:7197 train loss:3.629341 +step:7198 train loss:3.684486 +step:7199 train loss:3.644998 +step:7200 train loss:3.636853 +step:7201 train loss:3.644139 +step:7202 train loss:3.626078 +step:7203 train loss:3.635698 +step:7204 train loss:3.608146 +step:7205 train loss:3.562381 +step:7206 train loss:3.594684 +step:7207 train loss:3.771087 +step:7208 train loss:3.599023 +step:7209 train loss:3.684854 +step:7210 train loss:3.618679 +step:7211 train loss:3.651404 +step:7212 train loss:3.728831 +step:7213 train loss:3.581223 +step:7214 train loss:3.650249 +step:7215 train loss:3.617778 +step:7216 train loss:3.668651 +step:7217 train loss:3.627706 +step:7218 train loss:3.717279 +step:7219 train loss:3.624783 +step:7220 train loss:3.700450 +step:7221 train loss:3.584708 +step:7222 train loss:3.664389 +step:7223 train loss:3.583653 +step:7224 train loss:3.648094 +step:7225 train loss:3.621268 +step:7226 train loss:3.594085 +step:7227 train loss:3.612701 +step:7228 train loss:3.600137 +step:7229 train loss:3.605050 +step:7230 train loss:3.584871 +step:7231 train loss:3.719512 +step:7232 train loss:3.590202 +step:7233 train loss:3.661619 +step:7234 train loss:3.651171 +step:7235 train loss:3.621535 +step:7236 train loss:3.664030 +step:7237 train loss:3.612161 +step:7238 train loss:3.647420 +step:7239 train loss:3.603125 +step:7240 train loss:3.600785 +step:7241 train loss:3.617192 +step:7242 train loss:3.593983 +step:7243 train loss:3.637858 +step:7244 train loss:3.618175 +step:7245 train loss:3.616731 +step:7246 train loss:3.662746 +step:7247 train loss:3.616887 +step:7248 train loss:3.653294 +step:7249 train loss:3.605934 +step:7250 validation loss:3.564748 +step:7250 train loss:3.628188 +step:7251 train loss:3.670727 +step:7252 train loss:3.585394 +step:7253 train loss:3.673469 +step:7254 train loss:3.613865 +step:7255 train loss:3.579974 +step:7256 train loss:3.624341 +step:7257 train loss:3.670649 +step:7258 train loss:3.626251 +step:7259 train loss:3.609202 +step:7260 train loss:3.687913 +step:7261 train loss:3.650071 +step:7262 train loss:3.603393 +step:7263 train loss:3.643180 +step:7264 train loss:3.634894 +step:7265 train loss:3.536225 +step:7266 train loss:3.656508 +step:7267 train loss:3.579932 +step:7268 train loss:3.642951 +step:7269 train loss:3.647952 +step:7270 train loss:3.599865 +step:7271 train loss:3.619095 +step:7272 train loss:3.621811 +step:7273 train loss:3.617684 +step:7274 train loss:3.599211 +step:7275 train loss:3.670726 +step:7276 train loss:3.574672 +step:7277 train loss:3.626070 +step:7278 train loss:3.594412 +step:7279 train loss:3.575477 +step:7280 train loss:3.645134 +step:7281 train loss:3.666270 +step:7282 train loss:3.666678 +step:7283 train loss:3.556793 +step:7284 train loss:3.600823 +step:7285 train loss:3.629809 +step:7286 train loss:3.757715 +step:7287 train loss:3.669020 +step:7288 train loss:3.624858 +step:7289 train loss:3.623472 +step:7290 train loss:3.678131 +step:7291 train loss:3.636025 +step:7292 train loss:3.705746 +step:7293 train loss:3.604486 +step:7294 train loss:3.685897 +step:7295 train loss:3.578393 +step:7296 train loss:3.574095 +step:7297 train loss:3.619319 +step:7298 train loss:3.597958 +step:7299 train loss:3.635793 +step:7300 train loss:3.624311 +step:7301 train loss:3.572747 +step:7302 train loss:3.720665 +step:7303 train loss:3.611156 +step:7304 train loss:3.550206 +step:7305 train loss:3.630788 +step:7306 train loss:3.657844 +step:7307 train loss:3.664260 +step:7308 train loss:3.613027 +step:7309 train loss:3.577178 +step:7310 train loss:3.607870 +step:7311 train loss:3.593184 +step:7312 train loss:3.626888 +step:7313 train loss:3.671587 +step:7314 train loss:3.565000 +step:7315 train loss:3.559311 +step:7316 train loss:3.703369 +step:7317 train loss:3.640262 +step:7318 train loss:3.581223 +step:7319 train loss:3.606041 +step:7320 train loss:3.640588 +step:7321 train loss:3.670537 +step:7322 train loss:3.548210 +step:7323 train loss:3.604115 +step:7324 train loss:3.628944 +step:7325 train loss:3.592412 +step:7326 train loss:3.623450 +step:7327 train loss:3.597670 +step:7328 train loss:3.719228 +step:7329 train loss:3.560286 +step:7330 train loss:3.614425 +step:7331 train loss:3.612718 +step:7332 train loss:3.650200 +step:7333 train loss:3.633148 +step:7334 train loss:3.602379 +step:7335 train loss:3.597883 +step:7336 train loss:3.852646 +step:7337 train loss:3.636868 +step:7338 train loss:3.633637 +step:7339 train loss:3.646891 +step:7340 train loss:3.630414 +step:7341 train loss:3.622629 +step:7342 train loss:3.617177 +step:7343 train loss:3.627209 +step:7344 train loss:3.706292 +step:7345 train loss:3.565125 +step:7346 train loss:3.601724 +step:7347 train loss:3.596918 +step:7348 train loss:3.599686 +step:7349 train loss:3.700773 +step:7350 train loss:3.684505 +step:7351 train loss:3.616363 +step:7352 train loss:3.646877 +step:7353 train loss:3.627170 +step:7354 train loss:3.577459 +step:7355 train loss:3.761536 +step:7356 train loss:3.732751 +step:7357 train loss:3.654906 +step:7358 train loss:3.638702 +step:7359 train loss:3.602937 +step:7360 train loss:3.614225 +step:7361 train loss:3.566055 +step:7362 train loss:3.613414 +step:7363 train loss:3.630518 +step:7364 train loss:3.661891 +step:7365 train loss:3.647786 +step:7366 train loss:3.611803 +step:7367 train loss:3.682504 +step:7368 train loss:3.669780 +step:7369 train loss:3.656950 +step:7370 train loss:3.623059 +step:7371 train loss:3.582788 +step:7372 train loss:3.639213 +step:7373 train loss:3.661977 +step:7374 train loss:3.752315 +step:7375 train loss:3.576024 +step:7376 train loss:3.600500 +step:7377 train loss:3.645766 +step:7378 train loss:3.599362 +step:7379 train loss:3.723775 +step:7380 train loss:3.683098 +step:7381 train loss:3.651112 +step:7382 train loss:3.617470 +step:7383 train loss:3.702207 +step:7384 train loss:3.650589 +step:7385 train loss:3.604507 +step:7386 train loss:3.614320 +step:7387 train loss:3.653431 +step:7388 train loss:3.685559 +step:7389 train loss:3.629295 +step:7390 train loss:3.570688 +step:7391 train loss:3.609058 +step:7392 train loss:3.666794 +step:7393 train loss:3.632921 +step:7394 train loss:3.675930 +step:7395 train loss:3.562370 +step:7396 train loss:3.660386 +step:7397 train loss:3.590035 +step:7398 train loss:3.601793 +step:7399 train loss:3.655165 +step:7400 train loss:3.653653 +step:7401 train loss:3.567676 +step:7402 train loss:3.691478 +step:7403 train loss:3.572598 +step:7404 train loss:3.644597 +step:7405 train loss:3.762870 +step:7406 train loss:3.590817 +step:7407 train loss:3.642625 +step:7408 train loss:3.632723 +step:7409 train loss:3.611143 +step:7410 train loss:3.781559 +step:7411 train loss:3.621011 +step:7412 train loss:3.631880 +step:7413 train loss:3.677796 +step:7414 train loss:3.587397 +step:7415 train loss:3.648073 +step:7416 train loss:3.531352 +step:7417 train loss:3.648113 +step:7418 train loss:3.631927 +step:7419 train loss:3.603925 +step:7420 train loss:3.595386 +step:7421 train loss:3.628878 +step:7422 train loss:3.587870 +step:7423 train loss:3.724202 +step:7424 train loss:3.789590 +step:7425 train loss:3.676844 +step:7426 train loss:3.644411 +step:7427 train loss:3.613182 +step:7428 train loss:3.629086 +step:7429 train loss:3.650487 +step:7430 train loss:3.575079 +step:7431 train loss:3.581786 +step:7432 train loss:3.590009 +step:7433 train loss:3.684173 +step:7434 train loss:3.604453 +step:7435 train loss:3.684344 +step:7436 train loss:3.727643 +step:7437 train loss:3.547193 +step:7438 train loss:3.608471 +step:7439 train loss:3.621867 +step:7440 train loss:3.594672 +step:7441 train loss:3.561947 +step:7442 train loss:3.791971 +step:7443 train loss:3.613321 +step:7444 train loss:3.661097 +step:7445 train loss:3.583920 +step:7446 train loss:3.608567 +step:7447 train loss:3.536051 +step:7448 train loss:3.589342 +step:7449 train loss:3.604842 +step:7450 train loss:3.638097 +step:7451 train loss:3.666417 +step:7452 train loss:3.602637 +step:7453 train loss:3.625310 +step:7454 train loss:3.608506 +step:7455 train loss:3.622797 +step:7456 train loss:3.594293 +step:7457 train loss:3.597422 +step:7458 train loss:3.642261 +step:7459 train loss:3.616452 +step:7460 train loss:3.623295 +step:7461 train loss:3.662992 +step:7462 train loss:3.596267 +step:7463 train loss:3.660475 +step:7464 train loss:3.583443 +step:7465 train loss:3.594005 +step:7466 train loss:3.597203 +step:7467 train loss:3.604094 +step:7468 train loss:3.653264 +step:7469 train loss:3.588087 +step:7470 train loss:3.618031 +step:7471 train loss:3.607725 +step:7472 train loss:3.642513 +step:7473 train loss:3.585194 +step:7474 train loss:3.567138 +step:7475 train loss:3.600507 +step:7476 train loss:3.639789 +step:7477 train loss:3.610341 +step:7478 train loss:3.609222 +step:7479 train loss:3.622415 +step:7480 train loss:3.899055 +step:7481 train loss:3.551752 +step:7482 train loss:3.620077 +step:7483 train loss:3.616154 +step:7484 train loss:3.636675 +step:7485 train loss:3.621874 +step:7486 train loss:3.649903 +step:7487 train loss:3.644882 +step:7488 train loss:3.661700 +step:7489 train loss:3.657958 +step:7490 train loss:3.602937 +step:7491 train loss:3.622417 +step:7492 train loss:3.728640 +step:7493 train loss:3.706012 +step:7494 train loss:3.728434 +step:7495 train loss:3.603853 +step:7496 train loss:3.589893 +step:7497 train loss:3.684588 +step:7498 train loss:3.623389 +step:7499 train loss:3.658084 +step:7500 validation loss:3.558255 total_sharp:5.2380e-03 L1_sharp:3.4657e-01 L2_sharp:9.0898e-02 L3_sharp:1.2213e-01 L4_sharp:7.5790e-02 L5_sharp:7.6500e-02 L6_sharp:9.5411e-02 L7_sharp:1.5173e-01 L8_sharp:1.4628e-01 L9_sharp:1.0600e-01 L10_sharp:7.3771e-02 L11_sharp:6.0419e-02 L12_sharp:9.1663e-02 total_fnorm:1.3684e+00 total_l1_linf:7.5445e+03 total_spectral:1.3684e+00 L1_fnorm:3.2395e-02 L2_fnorm:3.1125e-02 L3_fnorm:3.0910e-02 L4_fnorm:3.1651e-02 L5_fnorm:3.2143e-02 L6_fnorm:3.2437e-02 L7_fnorm:3.2243e-02 L8_fnorm:3.2252e-02 L9_fnorm:3.2155e-02 L10_fnorm:3.2238e-02 L11_fnorm:3.2324e-02 L12_fnorm:3.2195e-02 L1_l1linf:3.3494e-01 L2_l1linf:3.5750e-01 L3_l1linf:3.4928e-01 L4_l1linf:3.4232e-01 L5_l1linf:3.2573e-01 L6_l1linf:3.2130e-01 L7_l1linf:3.1182e-01 L8_l1linf:3.1128e-01 L9_l1linf:3.0453e-01 L10_l1linf:3.2017e-01 L11_l1linf:3.4254e-01 L12_l1linf:3.3877e-01 L1_spectral:7.4280e-03 L2_spectral:7.9777e-03 L3_spectral:7.8343e-03 L4_spectral:7.7048e-03 L5_spectral:7.3355e-03 L6_spectral:7.2006e-03 L7_spectral:6.9561e-03 L8_spectral:6.9081e-03 L9_spectral:6.8512e-03 L10_spectral:7.2084e-03 L11_spectral:7.7116e-03 L12_spectral:7.6529e-03 ip_v_neg_g:5.9664e-03 cos_v_neg_g:7.1888e-04 v_norm:1.3684e+00 g_norm:6.0651e+00 hv_norm:1.5441e+00 cos_v_hv:4.6420e-03 hg_norm:2.3179e+03 cos_g_hg:4.2563e-01 v_par:2.3155e-05 v_perp:1.3684e+00 L1_cos_v_neg_g:1.1452e-02 L1_v_norm:3.2395e-02 L2_cos_v_neg_g:1.0343e-02 L2_v_norm:3.1125e-02 L3_cos_v_neg_g:1.0274e-02 L3_v_norm:3.0910e-02 L4_cos_v_neg_g:9.4935e-03 L4_v_norm:3.1651e-02 L5_cos_v_neg_g:9.1281e-03 L5_v_norm:3.2143e-02 L6_cos_v_neg_g:8.3548e-03 L6_v_norm:3.2437e-02 L7_cos_v_neg_g:8.3740e-03 L7_v_norm:3.2243e-02 L8_cos_v_neg_g:8.0839e-03 L8_v_norm:3.2252e-02 L9_cos_v_neg_g:8.2319e-03 L9_v_norm:3.2155e-02 L10_cos_v_neg_g:7.1944e-03 L10_v_norm:3.2238e-02 L11_cos_v_neg_g:5.7085e-03 L11_v_norm:3.2324e-02 L12_cos_v_neg_g:5.2408e-03 L12_v_norm:3.2195e-02 +step:7500 train loss:3.604993 +step:7501 train loss:3.596366 +step:7502 train loss:3.585598 +step:7503 train loss:3.563662 +step:7504 train loss:3.589803 +step:7505 train loss:3.576286 +step:7506 train loss:3.636990 +step:7507 train loss:3.556416 +step:7508 train loss:3.625709 +step:7509 train loss:3.596478 +step:7510 train loss:3.626824 +step:7511 train loss:3.633365 +step:7512 train loss:3.893814 +step:7513 train loss:3.587712 +step:7514 train loss:3.610726 +step:7515 train loss:3.586437 +step:7516 train loss:3.594968 +step:7517 train loss:3.625754 +step:7518 train loss:3.608671 +step:7519 train loss:3.616196 +step:7520 train loss:3.682102 +step:7521 train loss:3.570006 +step:7522 train loss:3.622586 +step:7523 train loss:3.654355 +step:7524 train loss:3.601999 +step:7525 train loss:3.603834 +step:7526 train loss:3.556419 +step:7527 train loss:3.563346 +step:7528 train loss:3.658786 +step:7529 train loss:3.638035 +step:7530 train loss:3.585424 +step:7531 train loss:3.657031 +step:7532 train loss:3.646255 +step:7533 train loss:3.573556 +step:7534 train loss:3.635443 +step:7535 train loss:3.640901 +step:7536 train loss:3.672577 +step:7537 train loss:3.691238 +step:7538 train loss:3.718680 +step:7539 train loss:3.620265 +step:7540 train loss:3.608576 +step:7541 train loss:3.661354 +step:7542 train loss:3.621561 +step:7543 train loss:3.577765 +step:7544 train loss:3.621440 +step:7545 train loss:3.607903 +step:7546 train loss:3.564293 +step:7547 train loss:3.610411 +step:7548 train loss:3.622488 +step:7549 train loss:3.605797 +step:7550 train loss:3.604684 +step:7551 train loss:3.702662 +step:7552 train loss:3.615729 +step:7553 train loss:3.654400 +step:7554 train loss:3.583103 +step:7555 train loss:3.669296 +step:7556 train loss:3.571275 +step:7557 train loss:3.669757 +step:7558 train loss:3.658273 +step:7559 train loss:3.613165 +step:7560 train loss:3.705671 +step:7561 train loss:3.680583 +step:7562 train loss:3.583170 +step:7563 train loss:3.578085 +step:7564 train loss:3.635268 +step:7565 train loss:3.650609 +step:7566 train loss:3.641376 +step:7567 train loss:3.661009 +step:7568 train loss:3.601353 +step:7569 train loss:3.659331 +step:7570 train loss:3.644089 +step:7571 train loss:3.726805 +step:7572 train loss:3.572987 +step:7573 train loss:3.645432 +step:7574 train loss:3.611549 +step:7575 train loss:3.600230 +step:7576 train loss:3.610654 +step:7577 train loss:3.627213 +step:7578 train loss:3.683528 +step:7579 train loss:3.618203 +step:7580 train loss:3.605572 +step:7581 train loss:3.590391 +step:7582 train loss:3.649389 +step:7583 train loss:3.581646 +step:7584 train loss:3.567068 +step:7585 train loss:3.538821 +step:7586 train loss:3.575796 +step:7587 train loss:3.638593 +step:7588 train loss:3.764375 +step:7589 train loss:3.585237 +step:7590 train loss:3.656936 +step:7591 train loss:3.662912 +step:7592 train loss:3.619146 +step:7593 train loss:3.640671 +step:7594 train loss:3.638807 +step:7595 train loss:3.606923 +step:7596 train loss:3.662020 +step:7597 train loss:3.565948 +step:7598 train loss:3.625990 +step:7599 train loss:3.622283 +step:7600 train loss:3.576154 +step:7601 train loss:3.688428 +step:7602 train loss:3.632879 +step:7603 train loss:3.590564 +step:7604 train loss:3.735950 +step:7605 train loss:3.629800 +step:7606 train loss:3.663938 +step:7607 train loss:3.611763 +step:7608 train loss:3.623530 +step:7609 train loss:3.655216 +step:7610 train loss:3.614832 +step:7611 train loss:3.593035 +step:7612 train loss:3.535810 +step:7613 train loss:3.583567 +step:7614 train loss:3.654513 +step:7615 train loss:3.610013 +step:7616 train loss:3.680975 +step:7617 train loss:3.579195 +step:7618 train loss:3.667692 +step:7619 train loss:3.611828 +step:7620 train loss:3.597819 +step:7621 train loss:3.543890 +step:7622 train loss:3.818018 +step:7623 train loss:3.832504 +step:7624 train loss:3.649917 +step:7625 train loss:3.682254 +step:7626 train loss:3.601405 +step:7627 train loss:3.675368 +step:7628 train loss:3.559011 +step:7629 train loss:3.617611 +step:7630 train loss:3.632217 +step:7631 train loss:3.610996 +step:7632 train loss:3.660032 +step:7633 train loss:3.729575 +step:7634 train loss:3.691002 +step:7635 train loss:3.593538 +step:7636 train loss:3.624590 +step:7637 train loss:3.574348 +step:7638 train loss:3.679731 +step:7639 train loss:3.608466 +step:7640 train loss:3.588403 +step:7641 train loss:3.620114 +step:7642 train loss:3.953810 +step:7643 train loss:3.706890 +step:7644 train loss:3.631771 +step:7645 train loss:3.623557 +step:7646 train loss:3.608387 +step:7647 train loss:3.596736 +step:7648 train loss:3.639522 +step:7649 train loss:3.594286 +step:7650 train loss:3.639971 +step:7651 train loss:3.665597 +step:7652 train loss:3.542365 +step:7653 train loss:3.734743 +step:7654 train loss:3.598031 +step:7655 train loss:3.614081 +step:7656 train loss:3.588977 +step:7657 train loss:3.605113 +step:7658 train loss:3.556714 +step:7659 train loss:3.621965 +step:7660 train loss:3.558911 +step:7661 train loss:3.571269 +step:7662 train loss:3.569431 +step:7663 train loss:3.621545 +step:7664 train loss:3.577643 +step:7665 train loss:3.555496 +step:7666 train loss:3.665165 +step:7667 train loss:3.573175 +step:7668 train loss:3.682478 +step:7669 train loss:3.619287 +step:7670 train loss:3.573732 +step:7671 train loss:3.629577 +step:7672 train loss:3.648422 +step:7673 train loss:3.613669 +step:7674 train loss:3.653357 +step:7675 train loss:3.703678 +step:7676 train loss:3.673593 +step:7677 train loss:3.703911 +step:7678 train loss:3.636754 +step:7679 train loss:3.660497 +step:7680 train loss:3.669878 +step:7681 train loss:3.639775 +step:7682 train loss:3.603574 +step:7683 train loss:3.609579 +step:7684 train loss:3.581618 +step:7685 train loss:3.559428 +step:7686 train loss:3.682241 +step:7687 train loss:3.597646 +step:7688 train loss:3.565197 +step:7689 train loss:3.612590 +step:7690 train loss:3.583237 +step:7691 train loss:3.605750 +step:7692 train loss:3.638277 +step:7693 train loss:3.643774 +step:7694 train loss:3.692336 +step:7695 train loss:3.621426 +step:7696 train loss:3.596964 +step:7697 train loss:3.581693 +step:7698 train loss:3.642080 +step:7699 train loss:3.639183 +step:7700 train loss:3.540715 +step:7701 train loss:3.655765 +step:7702 train loss:3.599623 +step:7703 train loss:3.601810 +step:7704 train loss:3.651797 +step:7705 train loss:3.613803 +step:7706 train loss:3.547097 +step:7707 train loss:3.663428 +step:7708 train loss:3.605145 +step:7709 train loss:3.624366 +step:7710 train loss:3.687542 +step:7711 train loss:3.648539 +step:7712 train loss:3.592531 +step:7713 train loss:3.674195 +step:7714 train loss:3.617876 +step:7715 train loss:3.569208 +step:7716 train loss:3.608121 +step:7717 train loss:3.637884 +step:7718 train loss:3.640868 +step:7719 train loss:3.596722 +step:7720 train loss:3.613421 +step:7721 train loss:3.652405 +step:7722 train loss:3.580717 +step:7723 train loss:3.948801 +step:7724 train loss:3.616284 +step:7725 train loss:3.522423 +step:7726 train loss:3.609242 +step:7727 train loss:3.632793 +step:7728 train loss:3.582134 +step:7729 train loss:3.596910 +step:7730 train loss:3.618515 +step:7731 train loss:3.647616 +step:7732 train loss:3.666280 +step:7733 train loss:3.577198 +step:7734 train loss:3.602441 +step:7735 train loss:3.690504 +step:7736 train loss:3.637544 +step:7737 train loss:3.653720 +step:7738 train loss:3.560047 +step:7739 train loss:3.629664 +step:7740 train loss:3.580804 +step:7741 train loss:3.617552 +step:7742 train loss:3.614933 +step:7743 train loss:3.569753 +step:7744 train loss:3.691844 +step:7745 train loss:3.585632 +step:7746 train loss:3.560213 +step:7747 train loss:3.653501 +step:7748 train loss:3.634512 +step:7749 train loss:3.561635 +step:7750 validation loss:3.550037 +step:7750 train loss:3.717565 +step:7751 train loss:3.599882 +step:7752 train loss:3.591306 +step:7753 train loss:3.595577 +step:7754 train loss:3.567737 +step:7755 train loss:3.631866 +step:7756 train loss:3.665169 +step:7757 train loss:3.614410 +step:7758 train loss:3.582533 +step:7759 train loss:3.606526 +step:7760 train loss:3.638572 +step:7761 train loss:3.628671 +step:7762 train loss:3.615884 +step:7763 train loss:3.599290 +step:7764 train loss:3.603353 +step:7765 train loss:3.560100 +step:7766 train loss:3.624339 +step:7767 train loss:3.629358 +step:7768 train loss:3.582772 +step:7769 train loss:3.644993 +step:7770 train loss:3.663532 +step:7771 train loss:3.638491 +step:7772 train loss:3.611846 +step:7773 train loss:3.666831 +step:7774 train loss:3.567732 +step:7775 train loss:3.554527 +step:7776 train loss:3.655572 +step:7777 train loss:3.613955 +step:7778 train loss:3.571339 +step:7779 train loss:3.612027 +step:7780 train loss:3.610853 +step:7781 train loss:3.619672 +step:7782 train loss:3.596654 +step:7783 train loss:3.579893 +step:7784 train loss:3.581452 +step:7785 train loss:3.622489 +step:7786 train loss:3.579966 +step:7787 train loss:3.660279 +step:7788 train loss:3.607682 +step:7789 train loss:3.546413 +step:7790 train loss:3.602393 +step:7791 train loss:3.637101 +step:7792 train loss:3.596536 +step:7793 train loss:3.620507 +step:7794 train loss:3.608284 +step:7795 train loss:3.641023 +step:7796 train loss:3.600259 +step:7797 train loss:3.621155 +step:7798 train loss:3.616051 +step:7799 train loss:3.604989 +step:7800 train loss:3.556264 +step:7801 train loss:3.624838 +step:7802 train loss:3.603049 +step:7803 train loss:3.654695 +step:7804 train loss:3.615956 +step:7805 train loss:3.613861 +step:7806 train loss:3.628115 +step:7807 train loss:3.703260 +step:7808 train loss:3.561571 +step:7809 train loss:3.539042 +step:7810 train loss:3.631221 +step:7811 train loss:3.565203 +step:7812 train loss:3.582817 +step:7813 train loss:3.670407 +step:7814 train loss:3.736053 +step:7815 train loss:3.553284 +step:7816 train loss:3.635875 +step:7817 train loss:3.670859 +step:7818 train loss:3.567009 +step:7819 train loss:3.617419 +step:7820 train loss:3.663871 +step:7821 train loss:3.590242 +step:7822 train loss:3.549851 +step:7823 train loss:3.608230 +step:7824 train loss:3.608135 +step:7825 train loss:3.592494 +step:7826 train loss:3.590755 +step:7827 train loss:3.631433 +step:7828 train loss:3.623171 +step:7829 train loss:3.575123 +step:7830 train loss:3.587713 +step:7831 train loss:3.594641 +step:7832 train loss:3.655522 +step:7833 train loss:3.633883 +step:7834 train loss:3.595707 +step:7835 train loss:3.621053 +step:7836 train loss:3.733821 +step:7837 train loss:3.619085 +step:7838 train loss:3.587791 +step:7839 train loss:3.546719 +step:7840 train loss:3.563598 +step:7841 train loss:3.658858 +step:7842 train loss:3.645761 +step:7843 train loss:3.695745 +step:7844 train loss:3.630371 +step:7845 train loss:3.606394 +step:7846 train loss:3.717883 +step:7847 train loss:3.605485 +step:7848 train loss:3.618697 +step:7849 train loss:3.634157 +step:7850 train loss:3.602561 +step:7851 train loss:3.629286 +step:7852 train loss:3.603502 +step:7853 train loss:3.577719 +step:7854 train loss:3.607628 +step:7855 train loss:3.607951 +step:7856 train loss:3.612660 +step:7857 train loss:3.597983 +step:7858 train loss:3.606619 +step:7859 train loss:3.617183 +step:7860 train loss:3.653286 +step:7861 train loss:3.636701 +step:7862 train loss:3.581274 +step:7863 train loss:3.684727 +step:7864 train loss:3.527020 +step:7865 train loss:3.599651 +step:7866 train loss:3.578696 +step:7867 train loss:3.623531 +step:7868 train loss:3.601359 +step:7869 train loss:3.603050 +step:7870 train loss:3.529076 +step:7871 train loss:3.589023 +step:7872 train loss:3.583843 +step:7873 train loss:3.660607 +step:7874 train loss:3.602609 +step:7875 train loss:3.607430 +step:7876 train loss:3.626644 +step:7877 train loss:3.581344 +step:7878 train loss:3.618292 +step:7879 train loss:3.953897 +step:7880 train loss:3.609466 +step:7881 train loss:3.632943 +step:7882 train loss:3.716751 +step:7883 train loss:3.530328 +step:7884 train loss:3.619350 +step:7885 train loss:3.603748 +step:7886 train loss:3.599812 +step:7887 train loss:3.595762 +step:7888 train loss:3.629242 +step:7889 train loss:3.676773 +step:7890 train loss:3.580293 +step:7891 train loss:3.631341 +step:7892 train loss:3.600271 +step:7893 train loss:3.576390 +step:7894 train loss:3.597537 +step:7895 train loss:3.582464 +step:7896 train loss:3.581544 +step:7897 train loss:3.604325 +step:7898 train loss:3.617311 +step:7899 train loss:3.603649 +step:7900 train loss:3.572560 +step:7901 train loss:3.561367 +step:7902 train loss:3.710967 +step:7903 train loss:3.555576 +step:7904 train loss:3.606359 +step:7905 train loss:3.673499 +step:7906 train loss:3.572678 +step:7907 train loss:3.599767 +step:7908 train loss:3.649404 +step:7909 train loss:3.699479 +step:7910 train loss:3.579272 +step:7911 train loss:3.600697 +step:7912 train loss:3.605834 +step:7913 train loss:3.576217 +step:7914 train loss:3.613218 +step:7915 train loss:3.718282 +step:7916 train loss:3.587882 +step:7917 train loss:3.647453 +step:7918 train loss:3.588348 +step:7919 train loss:3.581058 +step:7920 train loss:3.621609 +step:7921 train loss:3.624089 +step:7922 train loss:3.600531 +step:7923 train loss:3.649613 +step:7924 train loss:3.608412 +step:7925 train loss:3.631413 +step:7926 train loss:3.536113 +step:7927 train loss:3.810130 +step:7928 train loss:3.640551 +step:7929 train loss:3.605530 +step:7930 train loss:3.565810 +step:7931 train loss:3.588813 +step:7932 train loss:3.610484 +step:7933 train loss:3.627993 +step:7934 train loss:3.715950 +step:7935 train loss:3.639500 +step:7936 train loss:3.613842 +step:7937 train loss:3.562564 +step:7938 train loss:3.580327 +step:7939 train loss:3.626145 +step:7940 train loss:3.610847 +step:7941 train loss:3.635704 +step:7942 train loss:3.624569 +step:7943 train loss:3.637273 +step:7944 train loss:3.559914 +step:7945 train loss:3.662424 +step:7946 train loss:3.607731 +step:7947 train loss:3.621700 +step:7948 train loss:3.580394 +step:7949 train loss:3.626726 +step:7950 train loss:3.688479 +step:7951 train loss:3.649583 +step:7952 train loss:3.796095 +step:7953 train loss:3.688900 +step:7954 train loss:3.593334 +step:7955 train loss:3.585977 +step:7956 train loss:3.585050 +step:7957 train loss:3.661443 +step:7958 train loss:3.670745 +step:7959 train loss:3.623972 +step:7960 train loss:3.688068 +step:7961 train loss:3.595383 +step:7962 train loss:3.567988 +step:7963 train loss:3.606023 +step:7964 train loss:3.603031 +step:7965 train loss:3.611705 +step:7966 train loss:3.583239 +step:7967 train loss:3.606856 +step:7968 train loss:3.620300 +step:7969 train loss:3.575001 +step:7970 train loss:3.545026 +step:7971 train loss:3.628700 +step:7972 train loss:3.604311 +step:7973 train loss:3.575570 +step:7974 train loss:3.617958 +step:7975 train loss:3.602376 +step:7976 train loss:3.622227 +step:7977 train loss:3.654536 +step:7978 train loss:3.674374 +step:7979 train loss:3.623221 +step:7980 train loss:3.530622 +step:7981 train loss:3.567994 +step:7982 train loss:3.615957 +step:7983 train loss:3.631263 +step:7984 train loss:3.673722 +step:7985 train loss:3.597406 +step:7986 train loss:3.620775 +step:7987 train loss:3.675017 +step:7988 train loss:3.649370 +step:7989 train loss:3.552823 +step:7990 train loss:3.567841 +step:7991 train loss:3.584141 +step:7992 train loss:3.610305 +step:7993 train loss:3.586380 +step:7994 train loss:3.640331 +step:7995 train loss:3.643326 +step:7996 train loss:3.609875 +step:7997 train loss:3.625532 +step:7998 train loss:3.655577 +step:7999 train loss:3.584494 +step:8000 validation loss:3.543480 total_sharp:4.0714e-03 L1_sharp:7.5205e-02 L2_sharp:5.2085e-02 L3_sharp:8.2272e-02 L4_sharp:5.4122e-02 L5_sharp:6.7219e-02 L6_sharp:7.4631e-02 L7_sharp:1.0177e-01 L8_sharp:1.0768e-01 L9_sharp:8.6991e-02 L10_sharp:7.8192e-02 L11_sharp:6.2248e-02 L12_sharp:7.6901e-02 total_fnorm:1.3484e+00 total_l1_linf:7.4378e+03 total_spectral:1.3484e+00 L1_fnorm:3.1823e-02 L2_fnorm:3.0993e-02 L3_fnorm:3.0876e-02 L4_fnorm:3.1413e-02 L5_fnorm:3.2072e-02 L6_fnorm:3.2138e-02 L7_fnorm:3.2160e-02 L8_fnorm:3.2091e-02 L9_fnorm:3.2024e-02 L10_fnorm:3.2362e-02 L11_fnorm:3.2430e-02 L12_fnorm:3.2352e-02 L1_l1linf:2.9648e-01 L2_l1linf:3.2832e-01 L3_l1linf:3.2447e-01 L4_l1linf:3.3153e-01 L5_l1linf:3.2577e-01 L6_l1linf:2.9931e-01 L7_l1linf:2.8552e-01 L8_l1linf:2.9742e-01 L9_l1linf:2.9808e-01 L10_l1linf:3.3024e-01 L11_l1linf:3.5687e-01 L12_l1linf:3.5416e-01 L1_spectral:6.7080e-03 L2_spectral:7.4263e-03 L3_spectral:7.3217e-03 L4_spectral:7.5036e-03 L5_spectral:7.3490e-03 L6_spectral:6.7568e-03 L7_spectral:6.4178e-03 L8_spectral:6.6824e-03 L9_spectral:6.7148e-03 L10_spectral:7.3817e-03 L11_spectral:7.9381e-03 L12_spectral:7.9645e-03 ip_v_neg_g:4.0748e-03 cos_v_neg_g:5.5688e-04 v_norm:1.3484e+00 g_norm:5.4264e+00 hv_norm:1.1644e+00 cos_v_hv:4.7150e-03 hg_norm:8.4078e+02 cos_g_hg:4.7673e-01 v_par:1.8909e-05 v_perp:1.3484e+00 L1_cos_v_neg_g:4.2731e-03 L1_v_norm:3.1823e-02 L2_cos_v_neg_g:3.7500e-03 L2_v_norm:3.0993e-02 L3_cos_v_neg_g:4.2401e-03 L3_v_norm:3.0876e-02 L4_cos_v_neg_g:4.4494e-03 L4_v_norm:3.1413e-02 L5_cos_v_neg_g:4.9660e-03 L5_v_norm:3.2072e-02 L6_cos_v_neg_g:6.3125e-03 L6_v_norm:3.2138e-02 L7_cos_v_neg_g:7.4952e-03 L7_v_norm:3.2160e-02 L8_cos_v_neg_g:7.9805e-03 L8_v_norm:3.2091e-02 L9_cos_v_neg_g:7.5658e-03 L9_v_norm:3.2024e-02 L10_cos_v_neg_g:6.2213e-03 L10_v_norm:3.2362e-02 L11_cos_v_neg_g:6.1705e-03 L11_v_norm:3.2430e-02 L12_cos_v_neg_g:4.8153e-03 L12_v_norm:3.2352e-02 +step:8000 train loss:3.652430 +step:8001 train loss:3.610955 +step:8002 train loss:3.634523 +step:8003 train loss:3.651867 +step:8004 train loss:3.628424 +step:8005 train loss:3.544609 +step:8006 train loss:3.626575 +step:8007 train loss:3.595603 +step:8008 train loss:3.617282 +step:8009 train loss:3.691419 +step:8010 train loss:3.903257 +step:8011 train loss:3.578448 +step:8012 train loss:3.650596 +step:8013 train loss:3.608971 +step:8014 train loss:3.621905 +step:8015 train loss:3.618447 +step:8016 train loss:3.604980 +step:8017 train loss:3.628747 +step:8018 train loss:3.590472 +step:8019 train loss:3.557901 +step:8020 train loss:3.593199 +step:8021 train loss:3.671080 +step:8022 train loss:3.585659 +step:8023 train loss:3.619375 +step:8024 train loss:3.464723 +step:8025 train loss:3.599517 +step:8026 train loss:3.604805 +step:8027 train loss:3.612453 +step:8028 train loss:3.667862 +step:8029 train loss:3.599320 +step:8030 train loss:3.556113 +step:8031 train loss:3.614456 +step:8032 train loss:3.603270 +step:8033 train loss:3.555159 +step:8034 train loss:3.590690 +step:8035 train loss:3.572921 +step:8036 train loss:3.573200 +step:8037 train loss:3.540371 +step:8038 train loss:3.554890 +step:8039 train loss:3.650629 +step:8040 train loss:3.583540 +step:8041 train loss:3.581035 +step:8042 train loss:3.616912 +step:8043 train loss:3.558252 +step:8044 train loss:3.575624 +step:8045 train loss:3.638700 +step:8046 train loss:3.567306 +step:8047 train loss:3.572266 +step:8048 train loss:3.596069 +step:8049 train loss:3.649614 +step:8050 train loss:3.588560 +step:8051 train loss:3.566979 +step:8052 train loss:3.628862 +step:8053 train loss:3.576011 +step:8054 train loss:3.616291 +step:8055 train loss:3.643024 +step:8056 train loss:3.612855 +step:8057 train loss:3.687393 +step:8058 train loss:3.593623 +step:8059 train loss:3.654901 +step:8060 train loss:3.619749 +step:8061 train loss:3.512521 +step:8062 train loss:3.647417 +step:8063 train loss:3.607083 +step:8064 train loss:3.571640 +step:8065 train loss:3.629061 +step:8066 train loss:3.593494 +step:8067 train loss:3.653851 +step:8068 train loss:3.580006 +step:8069 train loss:3.607059 +step:8070 train loss:3.570565 +step:8071 train loss:3.581276 +step:8072 train loss:3.620127 +step:8073 train loss:3.574987 +step:8074 train loss:3.588292 +step:8075 train loss:3.566406 +step:8076 train loss:3.623710 +step:8077 train loss:3.625481 +step:8078 train loss:3.569288 +step:8079 train loss:3.592292 +step:8080 train loss:3.576447 +step:8081 train loss:3.598248 +step:8082 train loss:3.611588 +step:8083 train loss:3.513098 +step:8084 train loss:3.649757 +step:8085 train loss:3.522798 +step:8086 train loss:3.651617 +step:8087 train loss:3.543458 +step:8088 train loss:3.597469 +step:8089 train loss:3.625879 +step:8090 train loss:3.652174 +step:8091 train loss:3.591203 +step:8092 train loss:3.577823 +step:8093 train loss:3.583198 +step:8094 train loss:3.580069 +step:8095 train loss:3.611185 +step:8096 train loss:3.610933 +step:8097 train loss:3.539915 +step:8098 train loss:3.554823 +step:8099 train loss:3.535151 +step:8100 train loss:3.599399 +step:8101 train loss:3.668835 +step:8102 train loss:3.612740 +step:8103 train loss:3.562201 +step:8104 train loss:3.613168 +step:8105 train loss:3.611968 +step:8106 train loss:3.569577 +step:8107 train loss:3.555485 +step:8108 train loss:3.569195 +step:8109 train loss:3.569502 +step:8110 train loss:3.631836 +step:8111 train loss:3.555530 +step:8112 train loss:3.573758 +step:8113 train loss:3.562781 +step:8114 train loss:3.508683 +step:8115 train loss:3.562776 +step:8116 train loss:3.599366 +step:8117 train loss:3.566705 +step:8118 train loss:3.562362 +step:8119 train loss:3.601342 +step:8120 train loss:3.550667 +step:8121 train loss:3.606147 +step:8122 train loss:3.588182 +step:8123 train loss:3.600623 +step:8124 train loss:3.553900 +step:8125 train loss:3.546265 +step:8126 train loss:3.530527 +step:8127 train loss:3.632048 +step:8128 train loss:3.636073 +step:8129 train loss:3.555559 +step:8130 train loss:3.585865 +step:8131 train loss:3.553114 +step:8132 train loss:3.621427 +step:8133 train loss:3.544980 +step:8134 train loss:3.583163 +step:8135 train loss:3.569706 +step:8136 train loss:3.584610 +step:8137 train loss:3.645856 +step:8138 train loss:3.559168 +step:8139 train loss:3.628323 +step:8140 train loss:3.559835 +step:8141 train loss:3.581189 +step:8142 train loss:3.556705 +step:8143 train loss:3.610180 +step:8144 train loss:3.589039 +step:8145 train loss:3.562412 +step:8146 train loss:3.563271 +step:8147 train loss:3.589274 +step:8148 train loss:3.678261 +step:8149 train loss:3.591713 +step:8150 train loss:3.577367 +step:8151 train loss:3.565621 +step:8152 train loss:3.665174 +step:8153 train loss:3.538142 +step:8154 train loss:3.559246 +step:8155 train loss:3.578620 +step:8156 train loss:3.570951 +step:8157 train loss:3.586397 +step:8158 train loss:3.599422 +step:8159 train loss:3.610511 +step:8160 train loss:3.565141 +step:8161 train loss:3.609179 +step:8162 train loss:3.537169 +step:8163 train loss:3.601384 +step:8164 train loss:3.582909 +step:8165 train loss:3.637692 +step:8166 train loss:3.638731 +step:8167 train loss:3.546272 +step:8168 train loss:3.525037 +step:8169 train loss:3.573472 +step:8170 train loss:3.519073 +step:8171 train loss:3.583210 +step:8172 train loss:3.581442 +step:8173 train loss:3.580204 +step:8174 train loss:3.592572 +step:8175 train loss:3.549795 +step:8176 train loss:3.548144 +step:8177 train loss:3.593674 +step:8178 train loss:3.681147 +step:8179 train loss:3.585872 +step:8180 train loss:3.607339 +step:8181 train loss:3.608710 +step:8182 train loss:3.571428 +step:8183 train loss:3.555107 +step:8184 train loss:3.547572 +step:8185 train loss:3.589684 +step:8186 train loss:3.592447 +step:8187 train loss:3.601392 +step:8188 train loss:3.533008 +step:8189 train loss:3.679640 +step:8190 train loss:3.611087 +step:8191 train loss:3.616617 +step:8192 train loss:3.722954 +step:8193 train loss:3.596109 +step:8194 train loss:3.529384 +step:8195 train loss:3.628241 +step:8196 train loss:3.542603 +step:8197 train loss:3.577208 +step:8198 train loss:3.581042 +step:8199 train loss:3.585294 +step:8200 train loss:3.558320 +step:8201 train loss:3.677778 +step:8202 train loss:3.593106 +step:8203 train loss:3.616146 +step:8204 train loss:3.520654 +step:8205 train loss:3.533098 +step:8206 train loss:3.652283 +step:8207 train loss:3.579819 +step:8208 train loss:3.596051 +step:8209 train loss:3.644833 +step:8210 train loss:3.628629 +step:8211 train loss:3.563277 +step:8212 train loss:3.617957 +step:8213 train loss:3.627300 +step:8214 train loss:3.666491 +step:8215 train loss:3.638924 +step:8216 train loss:3.623652 +step:8217 train loss:3.599512 +step:8218 train loss:3.608812 +step:8219 train loss:3.740435 +step:8220 train loss:3.572448 +step:8221 train loss:3.593154 +step:8222 train loss:3.544153 +step:8223 train loss:3.562541 +step:8224 train loss:3.573498 +step:8225 train loss:3.625386 +step:8226 train loss:3.552082 +step:8227 train loss:3.624599 +step:8228 train loss:3.507558 +step:8229 train loss:3.552834 +step:8230 train loss:3.565592 +step:8231 train loss:3.594808 +step:8232 train loss:3.591020 +step:8233 train loss:3.640692 +step:8234 train loss:3.633772 +step:8235 train loss:3.605078 +step:8236 train loss:3.590550 +step:8237 train loss:3.541940 +step:8238 train loss:3.791590 +step:8239 train loss:3.621980 +step:8240 train loss:3.574241 +step:8241 train loss:3.541821 +step:8242 train loss:3.581898 +step:8243 train loss:3.569409 +step:8244 train loss:3.588593 +step:8245 train loss:3.568164 +step:8246 train loss:3.636812 +step:8247 train loss:3.664910 +step:8248 train loss:3.585393 +step:8249 train loss:3.580666 +step:8250 validation loss:3.532854 +step:8250 train loss:3.567911 +step:8251 train loss:3.661832 +step:8252 train loss:3.598375 +step:8253 train loss:3.573306 +step:8254 train loss:3.535894 +step:8255 train loss:3.575816 +step:8256 train loss:3.553314 +step:8257 train loss:3.664243 +step:8258 train loss:3.584314 +step:8259 train loss:3.569268 +step:8260 train loss:3.563428 +step:8261 train loss:3.566315 +step:8262 train loss:3.577712 +step:8263 train loss:3.595896 +step:8264 train loss:3.557821 +step:8265 train loss:3.551614 +step:8266 train loss:3.555470 +step:8267 train loss:3.491940 +step:8268 train loss:3.614335 +step:8269 train loss:3.546618 +step:8270 train loss:3.598971 +step:8271 train loss:3.624589 +step:8272 train loss:3.650164 +step:8273 train loss:3.529999 +step:8274 train loss:3.591022 +step:8275 train loss:3.548820 +step:8276 train loss:3.586591 +step:8277 train loss:3.655982 +step:8278 train loss:3.672735 +step:8279 train loss:3.584185 +step:8280 train loss:3.573429 +step:8281 train loss:3.539959 +step:8282 train loss:3.601500 +step:8283 train loss:3.582823 +step:8284 train loss:3.570616 +step:8285 train loss:3.560616 +step:8286 train loss:3.669645 +step:8287 train loss:3.606186 +step:8288 train loss:3.583717 +step:8289 train loss:3.591446 +step:8290 train loss:3.533824 +step:8291 train loss:3.574244 +step:8292 train loss:3.602909 +step:8293 train loss:3.571866 +step:8294 train loss:3.547964 +step:8295 train loss:3.581110 +step:8296 train loss:3.651233 +step:8297 train loss:3.727263 +step:8298 train loss:3.553766 +step:8299 train loss:3.587562 +step:8300 train loss:3.598907 +step:8301 train loss:3.567186 +step:8302 train loss:3.627012 +step:8303 train loss:3.755744 +step:8304 train loss:3.569391 +step:8305 train loss:3.615256 +step:8306 train loss:3.590377 +step:8307 train loss:3.605690 +step:8308 train loss:3.601743 +step:8309 train loss:3.628540 +step:8310 train loss:3.544826 +step:8311 train loss:3.634501 +step:8312 train loss:3.627514 +step:8313 train loss:3.693957 +step:8314 train loss:3.563636 +step:8315 train loss:3.510798 +step:8316 train loss:3.570278 +step:8317 train loss:3.594227 +step:8318 train loss:3.584771 +step:8319 train loss:3.618642 +step:8320 train loss:3.640925 +step:8321 train loss:3.549421 +step:8322 train loss:3.565625 +step:8323 train loss:3.603059 +step:8324 train loss:3.576218 +step:8325 train loss:3.632482 +step:8326 train loss:3.599060 +step:8327 train loss:3.590525 +step:8328 train loss:3.658581 +step:8329 train loss:3.571973 +step:8330 train loss:3.608578 +step:8331 train loss:3.539486 +step:8332 train loss:3.633941 +step:8333 train loss:3.655169 +step:8334 train loss:3.519276 +step:8335 train loss:3.585644 +step:8336 train loss:3.674364 +step:8337 train loss:3.609732 +step:8338 train loss:3.574374 +step:8339 train loss:3.554154 +step:8340 train loss:3.644086 +step:8341 train loss:3.545064 +step:8342 train loss:3.619395 +step:8343 train loss:3.530197 +step:8344 train loss:3.576279 +step:8345 train loss:3.610789 +step:8346 train loss:3.688429 +step:8347 train loss:3.582751 +step:8348 train loss:3.608560 +step:8349 train loss:3.586917 +step:8350 train loss:3.605469 +step:8351 train loss:3.547680 +step:8352 train loss:3.630481 +step:8353 train loss:3.589582 +step:8354 train loss:3.567539 +step:8355 train loss:3.572468 +step:8356 train loss:3.560765 +step:8357 train loss:3.579933 +step:8358 train loss:3.551903 +step:8359 train loss:3.551716 +step:8360 train loss:3.592111 +step:8361 train loss:3.612990 +step:8362 train loss:3.628379 +step:8363 train loss:3.628517 +step:8364 train loss:3.588539 +step:8365 train loss:3.736373 +step:8366 train loss:3.573681 +step:8367 train loss:3.554376 +step:8368 train loss:3.518987 +step:8369 train loss:3.554579 +step:8370 train loss:3.632463 +step:8371 train loss:3.609285 +step:8372 train loss:3.580600 +step:8373 train loss:3.598313 +step:8374 train loss:3.525583 +step:8375 train loss:3.591474 +step:8376 train loss:3.625597 +step:8377 train loss:3.451717 +step:8378 train loss:3.667096 +step:8379 train loss:3.530345 +step:8380 train loss:3.544441 +step:8381 train loss:3.546409 +step:8382 train loss:3.576398 +step:8383 train loss:3.534891 +step:8384 train loss:3.578856 +step:8385 train loss:3.590335 +step:8386 train loss:3.566674 +step:8387 train loss:3.733203 +step:8388 train loss:3.639922 +step:8389 train loss:3.621225 +step:8390 train loss:3.618600 +step:8391 train loss:3.555529 +step:8392 train loss:3.563380 +step:8393 train loss:3.516353 +step:8394 train loss:3.612679 +step:8395 train loss:3.614321 +step:8396 train loss:3.642208 +step:8397 train loss:3.574218 +step:8398 train loss:3.593693 +step:8399 train loss:3.558712 +step:8400 train loss:3.566306 +step:8401 train loss:3.567320 +step:8402 train loss:3.560986 +step:8403 train loss:3.567814 +step:8404 train loss:3.577412 +step:8405 train loss:3.529196 +step:8406 train loss:3.572949 +step:8407 train loss:3.613838 +step:8408 train loss:3.587043 +step:8409 train loss:3.508869 +step:8410 train loss:3.572290 +step:8411 train loss:3.598105 +step:8412 train loss:3.653296 +step:8413 train loss:3.629351 +step:8414 train loss:3.630285 +step:8415 train loss:3.550158 +step:8416 train loss:3.597589 +step:8417 train loss:3.516083 +step:8418 train loss:3.616119 +step:8419 train loss:3.575245 +step:8420 train loss:3.648257 +step:8421 train loss:3.564802 +step:8422 train loss:3.584056 +step:8423 train loss:3.597367 +step:8424 train loss:3.603233 +step:8425 train loss:3.660035 +step:8426 train loss:3.631560 +step:8427 train loss:3.548511 +step:8428 train loss:3.562254 +step:8429 train loss:3.624261 +step:8430 train loss:3.563132 +step:8431 train loss:3.569467 +step:8432 train loss:3.570875 +step:8433 train loss:3.542622 +step:8434 train loss:3.578557 +step:8435 train loss:3.502314 +step:8436 train loss:3.582705 +step:8437 train loss:3.626757 +step:8438 train loss:3.603865 +step:8439 train loss:3.545659 +step:8440 train loss:3.513314 +step:8441 train loss:3.574729 +step:8442 train loss:3.594586 +step:8443 train loss:3.550255 +step:8444 train loss:3.584605 +step:8445 train loss:3.537493 +step:8446 train loss:3.585945 +step:8447 train loss:3.598500 +step:8448 train loss:3.580019 +step:8449 train loss:3.573878 +step:8450 train loss:3.561587 +step:8451 train loss:3.596698 +step:8452 train loss:3.566499 +step:8453 train loss:3.549599 +step:8454 train loss:3.597751 +step:8455 train loss:3.670875 +step:8456 train loss:3.645303 +step:8457 train loss:3.703127 +step:8458 train loss:3.590984 +step:8459 train loss:3.598095 +step:8460 train loss:3.522094 +step:8461 train loss:3.681884 +step:8462 train loss:3.550266 +step:8463 train loss:3.594819 +step:8464 train loss:3.605171 +step:8465 train loss:3.613242 +step:8466 train loss:3.585432 +step:8467 train loss:3.589481 +step:8468 train loss:3.835717 +step:8469 train loss:3.548070 +step:8470 train loss:3.545741 +step:8471 train loss:3.590627 +step:8472 train loss:3.607301 +step:8473 train loss:3.568048 +step:8474 train loss:3.690395 +step:8475 train loss:3.647745 +step:8476 train loss:3.593607 +step:8477 train loss:3.586123 +step:8478 train loss:3.565635 +step:8479 train loss:3.565897 +step:8480 train loss:3.644471 +step:8481 train loss:3.566616 +step:8482 train loss:3.561171 +step:8483 train loss:3.701488 +step:8484 train loss:3.586443 +step:8485 train loss:3.633951 +step:8486 train loss:3.543829 +step:8487 train loss:3.598094 +step:8488 train loss:3.544588 +step:8489 train loss:3.618452 +step:8490 train loss:3.608706 +step:8491 train loss:3.626323 +step:8492 train loss:3.582463 +step:8493 train loss:3.651851 +step:8494 train loss:3.519726 +step:8495 train loss:3.613975 +step:8496 train loss:3.562475 +step:8497 train loss:3.595029 +step:8498 train loss:3.610584 +step:8499 train loss:3.586319 +step:8500 validation loss:3.526098 total_sharp:4.8577e-03 L1_sharp:9.3060e-02 L2_sharp:6.1492e-02 L3_sharp:9.5475e-02 L4_sharp:6.8387e-02 L5_sharp:8.5654e-02 L6_sharp:9.5702e-02 L7_sharp:1.2479e-01 L8_sharp:1.3255e-01 L9_sharp:1.1273e-01 L10_sharp:7.1939e-02 L11_sharp:6.4143e-02 L12_sharp:1.3605e-01 total_fnorm:1.3524e+00 total_l1_linf:7.4567e+03 total_spectral:1.3524e+00 L1_fnorm:3.2057e-02 L2_fnorm:3.0925e-02 L3_fnorm:3.0982e-02 L4_fnorm:3.1690e-02 L5_fnorm:3.2022e-02 L6_fnorm:3.2189e-02 L7_fnorm:3.2217e-02 L8_fnorm:3.2200e-02 L9_fnorm:3.2108e-02 L10_fnorm:3.2197e-02 L11_fnorm:3.2302e-02 L12_fnorm:3.2168e-02 L1_l1linf:3.0557e-01 L2_l1linf:3.3301e-01 L3_l1linf:3.4251e-01 L4_l1linf:3.5462e-01 L5_l1linf:3.2812e-01 L6_l1linf:3.0893e-01 L7_l1linf:2.8219e-01 L8_l1linf:2.9957e-01 L9_l1linf:3.1680e-01 L10_l1linf:3.2108e-01 L11_l1linf:3.4282e-01 L12_l1linf:3.3638e-01 L1_spectral:6.9256e-03 L2_spectral:7.5357e-03 L3_spectral:7.6681e-03 L4_spectral:8.0013e-03 L5_spectral:7.3611e-03 L6_spectral:6.9289e-03 L7_spectral:6.3759e-03 L8_spectral:6.7191e-03 L9_spectral:7.0494e-03 L10_spectral:7.1503e-03 L11_spectral:7.7058e-03 L12_spectral:7.5738e-03 ip_v_neg_g:4.9510e-03 cos_v_neg_g:6.7001e-04 v_norm:1.3524e+00 g_norm:5.4638e+00 hv_norm:1.2825e+00 cos_v_hv:5.1226e-03 hg_norm:1.3808e+03 cos_g_hg:4.1169e-01 v_par:2.0903e-05 v_perp:1.3524e+00 L1_cos_v_neg_g:6.4787e-03 L1_v_norm:3.2057e-02 L2_cos_v_neg_g:6.3423e-03 L2_v_norm:3.0925e-02 L3_cos_v_neg_g:6.0221e-03 L3_v_norm:3.0982e-02 L4_cos_v_neg_g:7.0663e-03 L4_v_norm:3.1690e-02 L5_cos_v_neg_g:9.7671e-03 L5_v_norm:3.2022e-02 L6_cos_v_neg_g:7.1556e-03 L6_v_norm:3.2189e-02 L7_cos_v_neg_g:7.6647e-03 L7_v_norm:3.2217e-02 L8_cos_v_neg_g:9.1415e-03 L8_v_norm:3.2200e-02 L9_cos_v_neg_g:9.1377e-03 L9_v_norm:3.2108e-02 L10_cos_v_neg_g:8.1352e-03 L10_v_norm:3.2197e-02 L11_cos_v_neg_g:6.2228e-03 L11_v_norm:3.2302e-02 L12_cos_v_neg_g:6.5484e-03 L12_v_norm:3.2168e-02 +step:8500 train loss:3.585296 +step:8501 train loss:3.797678 +step:8502 train loss:3.817264 +step:8503 train loss:3.574769 +step:8504 train loss:3.574193 +step:8505 train loss:3.550218 +step:8506 train loss:3.622271 +step:8507 train loss:3.557326 +step:8508 train loss:3.592644 +step:8509 train loss:3.535079 +step:8510 train loss:3.555211 +step:8511 train loss:3.514814 +step:8512 train loss:3.610609 +step:8513 train loss:3.618922 +step:8514 train loss:3.558024 +step:8515 train loss:3.656727 +step:8516 train loss:3.574233 +step:8517 train loss:3.598679 +step:8518 train loss:3.486697 +step:8519 train loss:3.581383 +step:8520 train loss:3.546416 +step:8521 train loss:3.589964 +step:8522 train loss:3.485153 +step:8523 train loss:3.575990 +step:8524 train loss:3.569639 +step:8525 train loss:3.633892 +step:8526 train loss:3.619580 +step:8527 train loss:3.559288 +step:8528 train loss:3.642912 +step:8529 train loss:3.595438 +step:8530 train loss:3.635123 +step:8531 train loss:3.620524 +step:8532 train loss:3.659738 +step:8533 train loss:3.608681 +step:8534 train loss:3.612950 +step:8535 train loss:3.582986 +step:8536 train loss:3.672772 +step:8537 train loss:3.587041 +step:8538 train loss:3.655044 +step:8539 train loss:3.579330 +step:8540 train loss:3.603620 +step:8541 train loss:3.545547 +step:8542 train loss:3.608396 +step:8543 train loss:3.526760 +step:8544 train loss:3.519962 +step:8545 train loss:3.575609 +step:8546 train loss:3.525866 +step:8547 train loss:3.580552 +step:8548 train loss:3.548705 +step:8549 train loss:3.592003 +step:8550 train loss:3.545089 +step:8551 train loss:3.594369 +step:8552 train loss:3.602461 +step:8553 train loss:3.597874 +step:8554 train loss:3.575681 +step:8555 train loss:3.584234 +step:8556 train loss:3.666043 +step:8557 train loss:3.558781 +step:8558 train loss:3.601901 +step:8559 train loss:3.591589 +step:8560 train loss:3.571875 +step:8561 train loss:3.528783 +step:8562 train loss:3.558276 +step:8563 train loss:3.554978 +step:8564 train loss:3.622063 +step:8565 train loss:3.602344 +step:8566 train loss:3.617811 +step:8567 train loss:3.567582 +step:8568 train loss:3.579618 +step:8569 train loss:3.594458 +step:8570 train loss:3.534086 +step:8571 train loss:3.575924 +step:8572 train loss:3.590732 +step:8573 train loss:3.669916 +step:8574 train loss:3.597820 +step:8575 train loss:3.597907 +step:8576 train loss:3.629972 +step:8577 train loss:3.712723 +step:8578 train loss:3.620092 +step:8579 train loss:3.609321 +step:8580 train loss:3.543026 +step:8581 train loss:3.583398 +step:8582 train loss:3.588189 +step:8583 train loss:3.588231 +step:8584 train loss:3.578187 +step:8585 train loss:3.660466 +step:8586 train loss:3.575503 +step:8587 train loss:3.583323 +step:8588 train loss:3.634063 +step:8589 train loss:3.575186 +step:8590 train loss:3.570541 +step:8591 train loss:3.574813 +step:8592 train loss:3.533463 +step:8593 train loss:3.614422 +step:8594 train loss:3.634489 +step:8595 train loss:3.561100 +step:8596 train loss:3.598343 +step:8597 train loss:3.564991 +step:8598 train loss:3.614997 +step:8599 train loss:3.590369 +step:8600 train loss:3.593146 +step:8601 train loss:3.580199 +step:8602 train loss:3.557282 +step:8603 train loss:3.612757 +step:8604 train loss:3.558649 +step:8605 train loss:3.568497 +step:8606 train loss:3.584354 +step:8607 train loss:3.588753 +step:8608 train loss:3.635371 +step:8609 train loss:3.528901 +step:8610 train loss:3.613753 +step:8611 train loss:3.535614 +step:8612 train loss:3.614061 +step:8613 train loss:3.551975 +step:8614 train loss:3.610203 +step:8615 train loss:3.655631 +step:8616 train loss:3.534461 +step:8617 train loss:3.602389 +step:8618 train loss:3.578854 +step:8619 train loss:3.531652 +step:8620 train loss:3.572997 +step:8621 train loss:3.605135 +step:8622 train loss:3.565395 +step:8623 train loss:3.575209 +step:8624 train loss:3.653145 +step:8625 train loss:3.572550 +step:8626 train loss:3.584216 +step:8627 train loss:3.578655 +step:8628 train loss:3.610872 +step:8629 train loss:3.518479 +step:8630 train loss:3.618754 +step:8631 train loss:3.562035 +step:8632 train loss:3.620258 +step:8633 train loss:3.564689 +step:8634 train loss:3.792856 +step:8635 train loss:3.590210 +step:8636 train loss:3.636595 +step:8637 train loss:3.561517 +step:8638 train loss:3.559036 +step:8639 train loss:3.622111 +step:8640 train loss:3.530720 +step:8641 train loss:3.633698 +step:8642 train loss:3.581997 +step:8643 train loss:3.689207 +step:8644 train loss:3.536689 +step:8645 train loss:3.604679 +step:8646 train loss:3.572930 +step:8647 train loss:3.589772 +step:8648 train loss:3.543960 +step:8649 train loss:3.625265 +step:8650 train loss:3.581333 +step:8651 train loss:3.594630 +step:8652 train loss:3.561249 +step:8653 train loss:3.594422 +step:8654 train loss:3.638089 +step:8655 train loss:3.568709 +step:8656 train loss:3.605209 +step:8657 train loss:3.612165 +step:8658 train loss:3.581831 +step:8659 train loss:3.572648 +step:8660 train loss:3.520423 +step:8661 train loss:3.579026 +step:8662 train loss:3.522404 +step:8663 train loss:3.592645 +step:8664 train loss:3.512558 +step:8665 train loss:3.528736 +step:8666 train loss:3.606543 +step:8667 train loss:3.501228 +step:8668 train loss:3.611991 +step:8669 train loss:3.644254 +step:8670 train loss:3.547922 +step:8671 train loss:3.544040 +step:8672 train loss:3.762599 +step:8673 train loss:3.531458 +step:8674 train loss:3.595339 +step:8675 train loss:3.641905 +step:8676 train loss:3.581277 +step:8677 train loss:3.607387 +step:8678 train loss:3.554608 +step:8679 train loss:3.612179 +step:8680 train loss:3.592493 +step:8681 train loss:3.593288 +step:8682 train loss:3.551697 +step:8683 train loss:3.564445 +step:8684 train loss:3.643263 +step:8685 train loss:3.582790 +step:8686 train loss:3.575935 +step:8687 train loss:3.529290 +step:8688 train loss:3.544114 +step:8689 train loss:3.616629 +step:8690 train loss:3.553062 +step:8691 train loss:3.633350 +step:8692 train loss:3.518948 +step:8693 train loss:3.610477 +step:8694 train loss:3.609973 +step:8695 train loss:3.593926 +step:8696 train loss:3.621836 +step:8697 train loss:3.571004 +step:8698 train loss:3.616260 +step:8699 train loss:3.564016 +step:8700 train loss:3.592261 +step:8701 train loss:3.554028 +step:8702 train loss:3.534866 +step:8703 train loss:3.556652 +step:8704 train loss:3.503784 +step:8705 train loss:3.592104 +step:8706 train loss:3.608232 +step:8707 train loss:3.606012 +step:8708 train loss:3.551126 +step:8709 train loss:3.615801 +step:8710 train loss:3.540401 +step:8711 train loss:3.597432 +step:8712 train loss:3.502110 +step:8713 train loss:3.576356 +step:8714 train loss:3.684456 +step:8715 train loss:3.539791 +step:8716 train loss:3.598504 +step:8717 train loss:3.565042 +step:8718 train loss:3.609196 +step:8719 train loss:3.574071 +step:8720 train loss:3.685176 +step:8721 train loss:3.579337 +step:8722 train loss:3.666342 +step:8723 train loss:3.540476 +step:8724 train loss:3.549276 +step:8725 train loss:3.581692 +step:8726 train loss:3.534674 +step:8727 train loss:3.611858 +step:8728 train loss:3.572723 +step:8729 train loss:3.573918 +step:8730 train loss:3.554286 +step:8731 train loss:3.555645 +step:8732 train loss:3.656877 +step:8733 train loss:3.580939 +step:8734 train loss:3.617903 +step:8735 train loss:3.689018 +step:8736 train loss:3.543241 +step:8737 train loss:3.573922 +step:8738 train loss:3.551574 +step:8739 train loss:3.612292 +step:8740 train loss:3.535100 +step:8741 train loss:3.588542 +step:8742 train loss:3.548144 +step:8743 train loss:3.581637 +step:8744 train loss:3.607836 +step:8745 train loss:3.647066 +step:8746 train loss:3.543819 +step:8747 train loss:3.646745 +step:8748 train loss:3.555766 +step:8749 train loss:3.595638 +step:8750 validation loss:3.520520 +step:8750 train loss:3.607950 +step:8751 train loss:3.642720 +step:8752 train loss:3.505088 +step:8753 train loss:3.548113 +step:8754 train loss:3.607177 +step:8755 train loss:3.584908 +step:8756 train loss:3.631482 +step:8757 train loss:3.542100 +step:8758 train loss:3.694621 +step:8759 train loss:3.545265 +step:8760 train loss:3.572986 +step:8761 train loss:3.653798 +step:8762 train loss:3.548401 +step:8763 train loss:3.522952 +step:8764 train loss:3.593345 +step:8765 train loss:3.663726 +step:8766 train loss:3.597250 +step:8767 train loss:3.551371 +step:8768 train loss:3.592124 +step:8769 train loss:3.563675 +step:8770 train loss:3.609995 +step:8771 train loss:3.580636 +step:8772 train loss:3.602931 +step:8773 train loss:3.564046 +step:8774 train loss:3.595373 +step:8775 train loss:3.595976 +step:8776 train loss:3.538057 +step:8777 train loss:3.577412 +step:8778 train loss:3.585228 +step:8779 train loss:3.604548 +step:8780 train loss:3.571901 +step:8781 train loss:3.573683 +step:8782 train loss:3.599159 +step:8783 train loss:3.578492 +step:8784 train loss:3.604437 +step:8785 train loss:3.589606 +step:8786 train loss:3.661694 +step:8787 train loss:3.607349 +step:8788 train loss:3.509124 +step:8789 train loss:3.609390 +step:8790 train loss:3.536099 +step:8791 train loss:3.587850 +step:8792 train loss:3.527735 +step:8793 train loss:3.614249 +step:8794 train loss:3.542137 +step:8795 train loss:3.610704 +step:8796 train loss:3.755730 +step:8797 train loss:3.496920 +step:8798 train loss:3.662320 +step:8799 train loss:3.574550 +step:8800 train loss:3.569458 +step:8801 train loss:3.588220 +step:8802 train loss:3.644726 +step:8803 train loss:3.602680 +step:8804 train loss:3.583049 +step:8805 train loss:3.603899 +step:8806 train loss:3.573594 +step:8807 train loss:3.563139 +step:8808 train loss:3.520340 +step:8809 train loss:3.641070 +step:8810 train loss:3.551794 +step:8811 train loss:3.535044 +step:8812 train loss:3.582111 +step:8813 train loss:3.490433 +step:8814 train loss:3.679929 +step:8815 train loss:3.527736 +step:8816 train loss:3.639716 +step:8817 train loss:3.579705 +step:8818 train loss:3.511953 +step:8819 train loss:3.629730 +step:8820 train loss:3.558568 +step:8821 train loss:3.583453 +step:8822 train loss:3.567178 +step:8823 train loss:3.579082 +step:8824 train loss:3.640737 +step:8825 train loss:3.614429 +step:8826 train loss:3.589525 +step:8827 train loss:3.550267 +step:8828 train loss:3.586552 +step:8829 train loss:3.568632 +step:8830 train loss:3.548656 +step:8831 train loss:3.621960 +step:8832 train loss:3.561592 +step:8833 train loss:3.590780 +step:8834 train loss:3.562611 +step:8835 train loss:3.497190 +step:8836 train loss:3.624027 +step:8837 train loss:3.526117 +step:8838 train loss:3.569546 +step:8839 train loss:3.558072 +step:8840 train loss:3.554237 +step:8841 train loss:3.574632 +step:8842 train loss:3.579104 +step:8843 train loss:3.593511 +step:8844 train loss:3.561697 +step:8845 train loss:3.579494 +step:8846 train loss:3.548226 +step:8847 train loss:3.582430 +step:8848 train loss:3.633008 +step:8849 train loss:3.611003 +step:8850 train loss:3.606387 +step:8851 train loss:3.488264 +step:8852 train loss:3.592314 +step:8853 train loss:3.574281 +step:8854 train loss:3.542879 +step:8855 train loss:3.613064 +step:8856 train loss:3.604892 +step:8857 train loss:3.672520 +step:8858 train loss:3.537324 +step:8859 train loss:3.606478 +step:8860 train loss:3.569826 +step:8861 train loss:3.547226 +step:8862 train loss:3.553498 +step:8863 train loss:3.534081 +step:8864 train loss:3.602153 +step:8865 train loss:3.596067 +step:8866 train loss:3.474038 +step:8867 train loss:3.581297 +step:8868 train loss:3.609292 +step:8869 train loss:3.690779 +step:8870 train loss:3.573688 +step:8871 train loss:3.592423 +step:8872 train loss:3.581130 +step:8873 train loss:3.578834 +step:8874 train loss:3.629658 +step:8875 train loss:3.566830 +step:8876 train loss:3.603377 +step:8877 train loss:3.585933 +step:8878 train loss:3.635493 +step:8879 train loss:3.598336 +step:8880 train loss:3.544878 +step:8881 train loss:3.507275 +step:8882 train loss:3.581107 +step:8883 train loss:3.567592 +step:8884 train loss:3.654240 +step:8885 train loss:3.588913 +step:8886 train loss:3.592190 +step:8887 train loss:3.619842 +step:8888 train loss:3.580406 +step:8889 train loss:3.582086 +step:8890 train loss:3.578235 +step:8891 train loss:3.545758 +step:8892 train loss:3.629536 +step:8893 train loss:3.569955 +step:8894 train loss:3.584068 +step:8895 train loss:3.614815 +step:8896 train loss:3.533723 +step:8897 train loss:3.623704 +step:8898 train loss:3.557050 +step:8899 train loss:3.579090 +step:8900 train loss:3.547187 +step:8901 train loss:3.559716 +step:8902 train loss:3.604206 +step:8903 train loss:3.542253 +step:8904 train loss:3.591089 +step:8905 train loss:3.564405 +step:8906 train loss:3.553435 +step:8907 train loss:3.572107 +step:8908 train loss:3.634014 +step:8909 train loss:3.579643 +step:8910 train loss:3.540894 +step:8911 train loss:3.638669 +step:8912 train loss:3.535935 +step:8913 train loss:3.549127 +step:8914 train loss:3.644811 +step:8915 train loss:3.577915 +step:8916 train loss:3.612648 +step:8917 train loss:3.570412 +step:8918 train loss:3.574334 +step:8919 train loss:3.558167 +step:8920 train loss:3.583524 +step:8921 train loss:3.582666 +step:8922 train loss:3.561851 +step:8923 train loss:3.751537 +step:8924 train loss:3.646546 +step:8925 train loss:3.576079 +step:8926 train loss:3.587676 +step:8927 train loss:3.611235 +step:8928 train loss:3.567926 +step:8929 train loss:3.559595 +step:8930 train loss:3.620774 +step:8931 train loss:3.529721 +step:8932 train loss:3.631059 +step:8933 train loss:3.539807 +step:8934 train loss:3.575107 +step:8935 train loss:3.590564 +step:8936 train loss:3.622886 +step:8937 train loss:3.627467 +step:8938 train loss:3.567638 +step:8939 train loss:3.629535 +step:8940 train loss:3.587077 +step:8941 train loss:3.528506 +step:8942 train loss:3.605407 +step:8943 train loss:3.538694 +step:8944 train loss:3.587772 +step:8945 train loss:3.604767 +step:8946 train loss:3.447922 +step:8947 train loss:3.646685 +step:8948 train loss:3.489041 +step:8949 train loss:3.493043 +step:8950 train loss:3.539428 +step:8951 train loss:3.572388 +step:8952 train loss:3.597303 +step:8953 train loss:3.551527 +step:8954 train loss:3.656238 +step:8955 train loss:3.569921 +step:8956 train loss:3.594464 +step:8957 train loss:3.586862 +step:8958 train loss:3.565034 +step:8959 train loss:3.553315 +step:8960 train loss:3.521769 +step:8961 train loss:3.543426 +step:8962 train loss:3.600315 +step:8963 train loss:3.577039 +step:8964 train loss:3.561155 +step:8965 train loss:3.602131 +step:8966 train loss:3.558736 +step:8967 train loss:3.538355 +step:8968 train loss:3.525657 +step:8969 train loss:3.515482 +step:8970 train loss:3.592805 +step:8971 train loss:3.545062 +step:8972 train loss:3.739268 +step:8973 train loss:3.630045 +step:8974 train loss:3.585896 +step:8975 train loss:3.588919 +step:8976 train loss:3.555618 +step:8977 train loss:3.640944 +step:8978 train loss:3.621531 +step:8979 train loss:3.542072 +step:8980 train loss:3.633483 +step:8981 train loss:3.586023 +step:8982 train loss:3.561363 +step:8983 train loss:3.504498 +step:8984 train loss:3.627197 +step:8985 train loss:3.547373 +step:8986 train loss:3.583045 +step:8987 train loss:3.556098 +step:8988 train loss:3.605570 +step:8989 train loss:3.514369 +step:8990 train loss:3.653124 +step:8991 train loss:3.504847 +step:8992 train loss:3.564533 +step:8993 train loss:3.656648 +step:8994 train loss:3.557613 +step:8995 train loss:3.588207 +step:8996 train loss:3.558503 +step:8997 train loss:3.501966 +step:8998 train loss:3.510758 +step:8999 train loss:3.533686 +step:9000 validation loss:3.513465 total_sharp:3.7629e-03 L1_sharp:6.4525e-02 L2_sharp:5.0301e-02 L3_sharp:7.6462e-02 L4_sharp:5.1079e-02 L5_sharp:5.3025e-02 L6_sharp:6.7828e-02 L7_sharp:8.3971e-02 L8_sharp:8.7006e-02 L9_sharp:8.1508e-02 L10_sharp:6.7276e-02 L11_sharp:6.9082e-02 L12_sharp:1.4100e-01 total_fnorm:1.3568e+00 total_l1_linf:7.4888e+03 total_spectral:1.3568e+00 L1_fnorm:3.1859e-02 L2_fnorm:3.1091e-02 L3_fnorm:3.1372e-02 L4_fnorm:3.1543e-02 L5_fnorm:3.2112e-02 L6_fnorm:3.2233e-02 L7_fnorm:3.2198e-02 L8_fnorm:3.2094e-02 L9_fnorm:3.2142e-02 L10_fnorm:3.2241e-02 L11_fnorm:3.2518e-02 L12_fnorm:3.2398e-02 L1_l1linf:3.0349e-01 L2_l1linf:3.3795e-01 L3_l1linf:3.4786e-01 L4_l1linf:3.3945e-01 L5_l1linf:3.2292e-01 L6_l1linf:3.1616e-01 L7_l1linf:2.9118e-01 L8_l1linf:2.9613e-01 L9_l1linf:3.0863e-01 L10_l1linf:3.2971e-01 L11_l1linf:3.5921e-01 L12_l1linf:3.6039e-01 L1_spectral:6.8833e-03 L2_spectral:7.5804e-03 L3_spectral:7.8340e-03 L4_spectral:7.6247e-03 L5_spectral:7.2670e-03 L6_spectral:7.0641e-03 L7_spectral:6.5687e-03 L8_spectral:6.6620e-03 L9_spectral:6.9500e-03 L10_spectral:7.4119e-03 L11_spectral:8.0753e-03 L12_spectral:8.0534e-03 ip_v_neg_g:3.5138e-03 cos_v_neg_g:4.4494e-04 v_norm:1.3568e+00 g_norm:5.8204e+00 hv_norm:1.1782e+00 cos_v_hv:4.3335e-03 hg_norm:3.6387e+03 cos_g_hg:4.3197e-01 v_par:1.6756e-05 v_perp:1.3568e+00 L1_cos_v_neg_g:3.8300e-03 L1_v_norm:3.1859e-02 L2_cos_v_neg_g:2.8118e-03 L2_v_norm:3.1091e-02 L3_cos_v_neg_g:5.0939e-03 L3_v_norm:3.1372e-02 L4_cos_v_neg_g:5.4573e-03 L4_v_norm:3.1543e-02 L5_cos_v_neg_g:6.1749e-03 L5_v_norm:3.2112e-02 L6_cos_v_neg_g:5.9781e-03 L6_v_norm:3.2233e-02 L7_cos_v_neg_g:5.5453e-03 L7_v_norm:3.2198e-02 L8_cos_v_neg_g:5.6046e-03 L8_v_norm:3.2094e-02 L9_cos_v_neg_g:5.9026e-03 L9_v_norm:3.2142e-02 L10_cos_v_neg_g:5.0612e-03 L10_v_norm:3.2241e-02 L11_cos_v_neg_g:4.5795e-03 L11_v_norm:3.2518e-02 L12_cos_v_neg_g:8.2405e-04 L12_v_norm:3.2398e-02 +step:9000 train loss:3.620410 +step:9001 train loss:3.590867 +step:9002 train loss:3.590169 +step:9003 train loss:3.535095 +step:9004 train loss:3.532728 +step:9005 train loss:3.546524 +step:9006 train loss:3.550931 +step:9007 train loss:3.566292 +step:9008 train loss:3.528100 +step:9009 train loss:3.522495 +step:9010 train loss:3.555460 +step:9011 train loss:3.552495 +step:9012 train loss:3.662848 +step:9013 train loss:3.490049 +step:9014 train loss:3.562003 +step:9015 train loss:3.567979 +step:9016 train loss:3.640051 +step:9017 train loss:3.583082 +step:9018 train loss:3.507308 +step:9019 train loss:3.586735 +step:9020 train loss:3.599023 +step:9021 train loss:3.555398 +step:9022 train loss:3.568515 +step:9023 train loss:3.567484 +step:9024 train loss:3.585981 +step:9025 train loss:3.570140 +step:9026 train loss:3.528885 +step:9027 train loss:3.574694 +step:9028 train loss:3.596751 +step:9029 train loss:3.614590 +step:9030 train loss:3.608725 +step:9031 train loss:3.573719 +step:9032 train loss:3.581949 +step:9033 train loss:3.570864 +step:9034 train loss:3.581012 +step:9035 train loss:3.582483 +step:9036 train loss:3.531219 +step:9037 train loss:3.526898 +step:9038 train loss:3.648204 +step:9039 train loss:3.553129 +step:9040 train loss:3.567697 +step:9041 train loss:3.617325 +step:9042 train loss:3.475143 +step:9043 train loss:3.567192 +step:9044 train loss:3.586305 +step:9045 train loss:3.530696 +step:9046 train loss:3.574111 +step:9047 train loss:3.569912 +step:9048 train loss:3.548945 +step:9049 train loss:3.582853 +step:9050 train loss:3.536568 +step:9051 train loss:3.579496 +step:9052 train loss:3.508448 +step:9053 train loss:3.628030 +step:9054 train loss:3.643048 +step:9055 train loss:3.565253 +step:9056 train loss:3.631350 +step:9057 train loss:3.485591 +step:9058 train loss:3.566187 +step:9059 train loss:3.645558 +step:9060 train loss:3.575496 +step:9061 train loss:3.603312 +step:9062 train loss:3.532551 +step:9063 train loss:3.663908 +step:9064 train loss:3.551651 +step:9065 train loss:3.562229 +step:9066 train loss:3.580220 +step:9067 train loss:3.545109 +step:9068 train loss:3.614888 +step:9069 train loss:3.574734 +step:9070 train loss:3.625675 +step:9071 train loss:3.555755 +step:9072 train loss:3.580032 +step:9073 train loss:3.542968 +step:9074 train loss:3.621096 +step:9075 train loss:3.569082 +step:9076 train loss:3.534351 +step:9077 train loss:3.617422 +step:9078 train loss:3.549819 +step:9079 train loss:3.594026 +step:9080 train loss:3.530634 +step:9081 train loss:3.565532 +step:9082 train loss:3.591320 +step:9083 train loss:3.624434 +step:9084 train loss:3.514120 +step:9085 train loss:3.581491 +step:9086 train loss:3.572899 +step:9087 train loss:3.515471 +step:9088 train loss:3.577898 +step:9089 train loss:3.593284 +step:9090 train loss:3.525497 +step:9091 train loss:3.630010 +step:9092 train loss:3.555750 +step:9093 train loss:3.549037 +step:9094 train loss:3.681253 +step:9095 train loss:3.544514 +step:9096 train loss:3.559489 +step:9097 train loss:3.541014 +step:9098 train loss:3.536482 +step:9099 train loss:3.663664 +step:9100 train loss:3.693451 +step:9101 train loss:3.612551 +step:9102 train loss:3.557804 +step:9103 train loss:3.561169 +step:9104 train loss:3.646465 +step:9105 train loss:3.513468 +step:9106 train loss:3.633173 +step:9107 train loss:3.572750 +step:9108 train loss:3.552013 +step:9109 train loss:3.576018 +step:9110 train loss:3.585098 +step:9111 train loss:3.559643 +step:9112 train loss:3.562874 +step:9113 train loss:3.591132 +step:9114 train loss:3.541040 +step:9115 train loss:3.565487 +step:9116 train loss:3.594649 +step:9117 train loss:3.597977 +step:9118 train loss:3.574678 +step:9119 train loss:3.495789 +step:9120 train loss:3.589726 +step:9121 train loss:3.626209 +step:9122 train loss:3.568695 +step:9123 train loss:3.586038 +step:9124 train loss:3.619063 +step:9125 train loss:3.566168 +step:9126 train loss:3.546941 +step:9127 train loss:3.579590 +step:9128 train loss:3.634250 +step:9129 train loss:3.589221 +step:9130 train loss:3.602882 +step:9131 train loss:3.580689 +step:9132 train loss:3.591225 +step:9133 train loss:3.575237 +step:9134 train loss:3.550222 +step:9135 train loss:3.580215 +step:9136 train loss:3.576912 +step:9137 train loss:3.630485 +step:9138 train loss:3.548758 +step:9139 train loss:3.622723 +step:9140 train loss:3.549008 +step:9141 train loss:3.525347 +step:9142 train loss:3.703200 +step:9143 train loss:3.529842 +step:9144 train loss:3.626197 +step:9145 train loss:3.633769 +step:9146 train loss:3.548771 +step:9147 train loss:3.617260 +step:9148 train loss:3.643023 +step:9149 train loss:3.552179 +step:9150 train loss:3.569415 +step:9151 train loss:3.635426 +step:9152 train loss:3.591577 +step:9153 train loss:3.553072 +step:9154 train loss:3.570027 +step:9155 train loss:3.537371 +step:9156 train loss:3.538881 +step:9157 train loss:3.556303 +step:9158 train loss:3.541069 +step:9159 train loss:3.621598 +step:9160 train loss:3.509001 +step:9161 train loss:3.537538 +step:9162 train loss:3.625446 +step:9163 train loss:3.568904 +step:9164 train loss:3.541648 +step:9165 train loss:3.536790 +step:9166 train loss:3.594183 +step:9167 train loss:3.535266 +step:9168 train loss:3.580120 +step:9169 train loss:3.512446 +step:9170 train loss:3.535741 +step:9171 train loss:3.602488 +step:9172 train loss:3.525003 +step:9173 train loss:3.648486 +step:9174 train loss:3.578449 +step:9175 train loss:3.551764 +step:9176 train loss:3.537078 +step:9177 train loss:3.581780 +step:9178 train loss:3.525339 +step:9179 train loss:3.487766 +step:9180 train loss:3.581768 +step:9181 train loss:3.589579 +step:9182 train loss:3.560813 +step:9183 train loss:3.566411 +step:9184 train loss:3.561352 +step:9185 train loss:3.579249 +step:9186 train loss:3.538347 +step:9187 train loss:3.613283 +step:9188 train loss:3.650824 +step:9189 train loss:3.569480 +step:9190 train loss:3.577186 +step:9191 train loss:3.568856 +step:9192 train loss:3.578322 +step:9193 train loss:3.580211 +step:9194 train loss:3.520972 +step:9195 train loss:3.508740 +step:9196 train loss:3.563962 +step:9197 train loss:3.516877 +step:9198 train loss:3.592742 +step:9199 train loss:3.542297 +step:9200 train loss:3.564742 +step:9201 train loss:3.603461 +step:9202 train loss:3.590432 +step:9203 train loss:3.546502 +step:9204 train loss:3.741950 +step:9205 train loss:3.660402 +step:9206 train loss:3.570264 +step:9207 train loss:3.625890 +step:9208 train loss:3.599139 +step:9209 train loss:3.622489 +step:9210 train loss:3.515477 +step:9211 train loss:3.540855 +step:9212 train loss:3.540558 +step:9213 train loss:3.606542 +step:9214 train loss:3.544656 +step:9215 train loss:3.611703 +step:9216 train loss:3.579802 +step:9217 train loss:3.518064 +step:9218 train loss:3.603462 +step:9219 train loss:3.567223 +step:9220 train loss:3.611970 +step:9221 train loss:3.661085 +step:9222 train loss:3.606887 +step:9223 train loss:3.773591 +step:9224 train loss:3.611542 +step:9225 train loss:3.542444 +step:9226 train loss:3.557767 +step:9227 train loss:3.577709 +step:9228 train loss:3.579131 +step:9229 train loss:3.540054 +step:9230 train loss:3.600907 +step:9231 train loss:3.485847 +step:9232 train loss:3.541850 +step:9233 train loss:3.564870 +step:9234 train loss:3.621870 +step:9235 train loss:3.623163 +step:9236 train loss:3.530439 +step:9237 train loss:3.591723 +step:9238 train loss:3.566580 +step:9239 train loss:3.560017 +step:9240 train loss:3.528286 +step:9241 train loss:3.558339 +step:9242 train loss:3.566876 +step:9243 train loss:3.564857 +step:9244 train loss:3.542076 +step:9245 train loss:3.550128 +step:9246 train loss:3.544443 +step:9247 train loss:3.559579 +step:9248 train loss:3.566561 +step:9249 train loss:3.564453 +step:9250 validation loss:3.510799 +step:9250 train loss:3.606739 +step:9251 train loss:3.548923 +step:9252 train loss:3.613920 +step:9253 train loss:3.612259 +step:9254 train loss:3.536718 +step:9255 train loss:3.656267 +step:9256 train loss:3.536787 +step:9257 train loss:3.478364 +step:9258 train loss:3.555729 +step:9259 train loss:3.563190 +step:9260 train loss:3.659691 +step:9261 train loss:3.540053 +step:9262 train loss:3.609702 +step:9263 train loss:3.509022 +step:9264 train loss:3.664173 +step:9265 train loss:3.682507 +step:9266 train loss:3.614329 +step:9267 train loss:3.562275 +step:9268 train loss:3.552464 +step:9269 train loss:3.581933 +step:9270 train loss:3.500866 +step:9271 train loss:3.614971 +step:9272 train loss:3.556191 +step:9273 train loss:3.573072 +step:9274 train loss:3.573146 +step:9275 train loss:3.575389 +step:9276 train loss:3.601842 +step:9277 train loss:3.576319 +step:9278 train loss:3.591124 +step:9279 train loss:3.583201 +step:9280 train loss:3.581863 +step:9281 train loss:3.554471 +step:9282 train loss:3.675282 +step:9283 train loss:3.567058 +step:9284 train loss:3.527830 +step:9285 train loss:3.542376 +step:9286 train loss:3.599310 +step:9287 train loss:3.572965 +step:9288 train loss:3.576584 +step:9289 train loss:3.549015 +step:9290 train loss:3.577658 +step:9291 train loss:3.554411 +step:9292 train loss:3.589203 +step:9293 train loss:3.652233 +step:9294 train loss:3.570652 +step:9295 train loss:3.558718 +step:9296 train loss:3.506765 +step:9297 train loss:3.577415 +step:9298 train loss:3.520055 +step:9299 train loss:3.498439 +step:9300 train loss:3.605199 +step:9301 train loss:3.632950 +step:9302 train loss:3.569998 +step:9303 train loss:3.620161 +step:9304 train loss:3.542412 +step:9305 train loss:3.532250 +step:9306 train loss:3.538162 +step:9307 train loss:3.536296 +step:9308 train loss:3.508885 +step:9309 train loss:3.498443 +step:9310 train loss:3.558571 +step:9311 train loss:3.612009 +step:9312 train loss:3.567849 +step:9313 train loss:3.513127 +step:9314 train loss:3.542199 +step:9315 train loss:3.576336 +step:9316 train loss:3.562240 +step:9317 train loss:3.534983 +step:9318 train loss:3.621791 +step:9319 train loss:3.532890 +step:9320 train loss:3.552484 +step:9321 train loss:3.567141 +step:9322 train loss:3.572875 +step:9323 train loss:3.646117 +step:9324 train loss:3.595088 +step:9325 train loss:3.531999 +step:9326 train loss:3.607759 +step:9327 train loss:3.604254 +step:9328 train loss:3.607268 +step:9329 train loss:3.496596 +step:9330 train loss:3.662086 +step:9331 train loss:3.591806 +step:9332 train loss:3.613038 +step:9333 train loss:3.634823 +step:9334 train loss:3.568640 +step:9335 train loss:3.663808 +step:9336 train loss:3.624434 +step:9337 train loss:3.578694 +step:9338 train loss:3.631396 +step:9339 train loss:3.608518 +step:9340 train loss:3.569048 +step:9341 train loss:3.654142 +step:9342 train loss:3.555078 +step:9343 train loss:3.548704 +step:9344 train loss:3.550436 +step:9345 train loss:3.689379 +step:9346 train loss:3.525525 +step:9347 train loss:3.544011 +step:9348 train loss:3.570904 +step:9349 train loss:3.515898 +step:9350 train loss:3.589567 +step:9351 train loss:3.568109 +step:9352 train loss:3.556653 +step:9353 train loss:3.585641 +step:9354 train loss:3.553918 +step:9355 train loss:3.548943 +step:9356 train loss:3.589864 +step:9357 train loss:3.549979 +step:9358 train loss:3.580120 +step:9359 train loss:3.517754 +step:9360 train loss:3.540231 +step:9361 train loss:3.536345 +step:9362 train loss:3.526804 +step:9363 train loss:3.593135 +step:9364 train loss:3.569526 +step:9365 train loss:3.574482 +step:9366 train loss:3.569857 +step:9367 train loss:3.583207 +step:9368 train loss:3.556539 +step:9369 train loss:3.557274 +step:9370 train loss:3.567743 +step:9371 train loss:3.579091 +step:9372 train loss:3.552001 +step:9373 train loss:3.533997 +step:9374 train loss:3.569685 +step:9375 train loss:3.582718 +step:9376 train loss:3.525199 +step:9377 train loss:3.592540 +step:9378 train loss:3.598475 +step:9379 train loss:3.621481 +step:9380 train loss:3.555549 +step:9381 train loss:3.565786 +step:9382 train loss:3.540643 +step:9383 train loss:3.537546 +step:9384 train loss:3.505954 +step:9385 train loss:3.578918 +step:9386 train loss:3.603677 +step:9387 train loss:3.581370 +step:9388 train loss:3.518564 +step:9389 train loss:3.535999 +step:9390 train loss:3.584666 +step:9391 train loss:3.587194 +step:9392 train loss:3.547841 +step:9393 train loss:3.540688 +step:9394 train loss:3.566721 +step:9395 train loss:3.562534 +step:9396 train loss:3.709430 +step:9397 train loss:3.597765 +step:9398 train loss:3.619762 +step:9399 train loss:3.571631 +step:9400 train loss:3.570231 +step:9401 train loss:3.567119 +step:9402 train loss:3.569001 +step:9403 train loss:3.500779 +step:9404 train loss:3.575878 +step:9405 train loss:3.534647 +step:9406 train loss:3.588308 +step:9407 train loss:3.534268 +step:9408 train loss:3.469184 +step:9409 train loss:3.532261 +step:9410 train loss:3.613551 +step:9411 train loss:3.576928 +step:9412 train loss:3.604900 +step:9413 train loss:3.623743 +step:9414 train loss:3.559919 +step:9415 train loss:3.550605 +step:9416 train loss:3.569204 +step:9417 train loss:3.520921 +step:9418 train loss:3.550145 +step:9419 train loss:3.520949 +step:9420 train loss:3.539710 +step:9421 train loss:3.585547 +step:9422 train loss:3.539378 +step:9423 train loss:3.600959 +step:9424 train loss:3.537670 +step:9425 train loss:3.583687 +step:9426 train loss:3.585576 +step:9427 train loss:3.558190 +step:9428 train loss:3.666177 +step:9429 train loss:3.553467 +step:9430 train loss:3.511629 +step:9431 train loss:3.603212 +step:9432 train loss:3.566624 +step:9433 train loss:3.604626 +step:9434 train loss:3.558322 +step:9435 train loss:3.585357 +step:9436 train loss:3.554773 +step:9437 train loss:3.566315 +step:9438 train loss:3.561654 +step:9439 train loss:3.559282 +step:9440 train loss:3.544653 +step:9441 train loss:3.565932 +step:9442 train loss:3.500746 +step:9443 train loss:3.554551 +step:9444 train loss:3.622346 +step:9445 train loss:3.553188 +step:9446 train loss:3.527295 +step:9447 train loss:3.597151 +step:9448 train loss:3.532752 +step:9449 train loss:3.556956 +step:9450 train loss:3.594121 +step:9451 train loss:3.513485 +step:9452 train loss:3.567434 +step:9453 train loss:3.543587 +step:9454 train loss:3.609113 +step:9455 train loss:3.588346 +step:9456 train loss:3.509578 +step:9457 train loss:3.561320 +step:9458 train loss:3.549976 +step:9459 train loss:3.538631 +step:9460 train loss:3.584409 +step:9461 train loss:3.611164 +step:9462 train loss:3.557497 +step:9463 train loss:3.589029 +step:9464 train loss:3.544964 +step:9465 train loss:3.629210 +step:9466 train loss:3.580589 +step:9467 train loss:3.606195 +step:9468 train loss:3.550128 +step:9469 train loss:3.539555 +step:9470 train loss:3.537783 +step:9471 train loss:3.576287 +step:9472 train loss:3.600425 +step:9473 train loss:3.591361 +step:9474 train loss:3.534624 +step:9475 train loss:3.527790 +step:9476 train loss:3.748426 +step:9477 train loss:3.615769 +step:9478 train loss:3.594906 +step:9479 train loss:3.694178 +step:9480 train loss:3.541896 +step:9481 train loss:3.573414 +step:9482 train loss:3.600069 +step:9483 train loss:3.557087 +step:9484 train loss:3.587440 +step:9485 train loss:3.505871 +step:9486 train loss:3.545808 +step:9487 train loss:3.578610 +step:9488 train loss:3.528474 +step:9489 train loss:3.579230 +step:9490 train loss:3.543199 +step:9491 train loss:3.587870 +step:9492 train loss:3.607750 +step:9493 train loss:3.575491 +step:9494 train loss:3.588976 +step:9495 train loss:3.540128 +step:9496 train loss:3.602585 +step:9497 train loss:3.615613 +step:9498 train loss:3.562718 +step:9499 train loss:3.611945 +step:9500 validation loss:3.508835 total_sharp:5.5488e-03 L1_sharp:4.7240e-01 L2_sharp:1.1277e-01 L3_sharp:1.2219e-01 L4_sharp:6.3200e-02 L5_sharp:7.6268e-02 L6_sharp:9.9050e-02 L7_sharp:1.3744e-01 L8_sharp:1.2986e-01 L9_sharp:8.8546e-02 L10_sharp:7.3622e-02 L11_sharp:6.3518e-02 L12_sharp:1.0213e-01 total_fnorm:1.3186e+00 total_l1_linf:7.2748e+03 total_spectral:1.3186e+00 L1_fnorm:3.2494e-02 L2_fnorm:3.1044e-02 L3_fnorm:3.0834e-02 L4_fnorm:3.1522e-02 L5_fnorm:3.1965e-02 L6_fnorm:3.2258e-02 L7_fnorm:3.2204e-02 L8_fnorm:3.2220e-02 L9_fnorm:3.2038e-02 L10_fnorm:3.2248e-02 L11_fnorm:3.2374e-02 L12_fnorm:3.2443e-02 L1_l1linf:3.4945e-01 L2_l1linf:3.4428e-01 L3_l1linf:3.3535e-01 L4_l1linf:3.3099e-01 L5_l1linf:3.2231e-01 L6_l1linf:3.1621e-01 L7_l1linf:3.0762e-01 L8_l1linf:3.0979e-01 L9_l1linf:3.0687e-01 L10_l1linf:3.2158e-01 L11_l1linf:3.5286e-01 L12_l1linf:3.5118e-01 L1_spectral:7.8138e-03 L2_spectral:7.7086e-03 L3_spectral:7.5355e-03 L4_spectral:7.4030e-03 L5_spectral:7.2048e-03 L6_spectral:7.0201e-03 L7_spectral:6.9020e-03 L8_spectral:6.9165e-03 L9_spectral:6.8861e-03 L10_spectral:7.2210e-03 L11_spectral:7.9080e-03 L12_spectral:7.9632e-03 ip_v_neg_g:4.1418e-03 cos_v_neg_g:6.1164e-04 v_norm:1.3186e+00 g_norm:5.1356e+00 hv_norm:1.4299e+00 cos_v_hv:5.1169e-03 hg_norm:6.7142e+02 cos_g_hg:4.5770e-01 v_par:1.6868e-05 v_perp:1.3186e+00 L1_cos_v_neg_g:1.1521e-02 L1_v_norm:3.2494e-02 L2_cos_v_neg_g:9.8666e-03 L2_v_norm:3.1044e-02 L3_cos_v_neg_g:8.6916e-03 L3_v_norm:3.0834e-02 L4_cos_v_neg_g:8.2714e-03 L4_v_norm:3.1522e-02 L5_cos_v_neg_g:6.9441e-03 L5_v_norm:3.1965e-02 L6_cos_v_neg_g:7.7221e-03 L6_v_norm:3.2258e-02 L7_cos_v_neg_g:7.3917e-03 L7_v_norm:3.2204e-02 L8_cos_v_neg_g:7.4114e-03 L8_v_norm:3.2220e-02 L9_cos_v_neg_g:5.5263e-03 L9_v_norm:3.2038e-02 L10_cos_v_neg_g:5.4034e-03 L10_v_norm:3.2248e-02 L11_cos_v_neg_g:4.6571e-03 L11_v_norm:3.2374e-02 L12_cos_v_neg_g:3.5585e-03 L12_v_norm:3.2443e-02 +step:9500 train loss:3.606474 +step:9501 train loss:3.583459 +step:9502 train loss:3.556185 +step:9503 train loss:3.571774 +step:9504 train loss:3.527207 +step:9505 train loss:3.549036 +step:9506 train loss:3.563231 +step:9507 train loss:3.554947 +step:9508 train loss:3.744673 +step:9509 train loss:3.560826 +step:9510 train loss:3.550779 +step:9511 train loss:3.574389 +step:9512 train loss:3.605481 +step:9513 train loss:3.595856 +step:9514 train loss:3.563233 +step:9515 train loss:3.464326 +step:9516 train loss:3.564334 +step:9517 train loss:3.601663 +step:9518 train loss:3.575845 +step:9519 train loss:3.588526 +step:9520 train loss:3.477007 +step:9521 train loss:3.467803 +step:9522 train loss:3.588110 +step:9523 train loss:3.585912 +step:9524 train loss:3.586149 +step:9525 train loss:3.628031 +step:9526 train loss:3.646371 +step:9527 train loss:3.604911 +step:9528 train loss:3.536222 +step:9529 train loss:3.576725 +step:9530 train loss:3.625301 +step:9531 train loss:3.531428 +step:9532 train loss:3.579527 +step:9533 train loss:3.552446 +step:9534 train loss:3.634978 +step:9535 train loss:3.553662 +step:9536 train loss:3.535893 +step:9537 train loss:3.485022 +step:9538 train loss:3.500384 +step:9539 train loss:3.575425 +step:9540 train loss:3.492865 +step:9541 train loss:3.551858 +step:9542 train loss:3.679085 +step:9543 train loss:3.575846 +step:9544 train loss:3.614790 +step:9545 train loss:3.548223 +step:9546 train loss:3.571510 +step:9547 train loss:3.620111 +step:9548 train loss:3.555651 +step:9549 train loss:3.526421 +step:9550 train loss:3.559892 +step:9551 train loss:3.549493 +step:9552 train loss:3.574190 +step:9553 train loss:3.571975 +step:9554 train loss:3.615512 +step:9555 train loss:3.616560 +step:9556 train loss:3.528425 +step:9557 train loss:3.546964 +step:9558 train loss:3.611662 +step:9559 train loss:3.622504 +step:9560 train loss:3.530364 +step:9561 train loss:3.557688 +step:9562 train loss:3.598474 +step:9563 train loss:3.543997 +step:9564 train loss:3.579138 +step:9565 train loss:3.558877 +step:9566 train loss:3.531521 +step:9567 train loss:3.592778 +step:9568 train loss:3.569494 +step:9569 train loss:3.609912 +step:9570 train loss:3.505159 +step:9571 train loss:3.578808 +step:9572 train loss:3.521029 +step:9573 train loss:3.555046 +step:9574 train loss:3.529071 +step:9575 train loss:3.599966 +step:9576 train loss:3.490403 +step:9577 train loss:3.541784 +step:9578 train loss:3.545734 +step:9579 train loss:3.542519 +step:9580 train loss:3.610309 +step:9581 train loss:3.598877 +step:9582 train loss:3.560955 +step:9583 train loss:3.595880 +step:9584 train loss:3.533320 +step:9585 train loss:3.551316 +step:9586 train loss:3.600437 +step:9587 train loss:3.570534 +step:9588 train loss:3.555950 +step:9589 train loss:3.615027 +step:9590 train loss:3.580798 +step:9591 train loss:3.546332 +step:9592 train loss:3.565524 +step:9593 train loss:3.565014 +step:9594 train loss:3.581534 +step:9595 train loss:3.559110 +step:9596 train loss:3.645849 +step:9597 train loss:3.551861 +step:9598 train loss:3.516332 +step:9599 train loss:3.522471 +step:9600 train loss:3.605429 +step:9601 train loss:3.523658 +step:9602 train loss:3.606942 +step:9603 train loss:3.600855 +step:9604 train loss:3.484150 +step:9605 train loss:3.571460 +step:9606 train loss:3.622607 +step:9607 train loss:3.547814 +step:9608 train loss:3.551138 +step:9609 train loss:3.563450 +step:9610 train loss:3.606829 +step:9611 train loss:3.541003 +step:9612 train loss:3.546066 +step:9613 train loss:3.585388 +step:9614 train loss:3.558335 +step:9615 train loss:3.742617 +step:9616 train loss:3.555125 +step:9617 train loss:3.554232 +step:9618 train loss:3.498252 +step:9619 train loss:3.561654 +step:9620 train loss:3.618309 +step:9621 train loss:3.540761 +step:9622 train loss:3.554004 +step:9623 train loss:3.595656 +step:9624 train loss:3.580028 +step:9625 train loss:3.594752 +step:9626 train loss:3.566655 +step:9627 train loss:3.647248 +step:9628 train loss:3.612204 +step:9629 train loss:3.524185 +step:9630 train loss:3.583027 +step:9631 train loss:3.570572 +step:9632 train loss:3.539793 +step:9633 train loss:3.580691 +step:9634 train loss:3.648522 +step:9635 train loss:3.553717 +step:9636 train loss:3.496808 +step:9637 train loss:3.632241 +step:9638 train loss:3.517161 +step:9639 train loss:3.487194 +step:9640 train loss:3.607572 +step:9641 train loss:3.577393 +step:9642 train loss:3.552276 +step:9643 train loss:3.557715 +step:9644 train loss:3.613293 +step:9645 train loss:3.541151 +step:9646 train loss:3.577480 +step:9647 train loss:3.587047 +step:9648 train loss:3.538422 +step:9649 train loss:3.513497 +step:9650 train loss:3.529664 +step:9651 train loss:3.622284 +step:9652 train loss:3.600681 +step:9653 train loss:3.540810 +step:9654 train loss:3.523711 +step:9655 train loss:3.523148 +step:9656 train loss:3.510428 +step:9657 train loss:3.543623 +step:9658 train loss:3.603401 +step:9659 train loss:3.705687 +step:9660 train loss:3.486732 +step:9661 train loss:3.510681 +step:9662 train loss:3.531538 +step:9663 train loss:3.567787 +step:9664 train loss:3.621511 +step:9665 train loss:3.464363 +step:9666 train loss:3.507339 +step:9667 train loss:3.643449 +step:9668 train loss:3.626173 +step:9669 train loss:3.638233 +step:9670 train loss:3.619860 +step:9671 train loss:3.622548 +step:9672 train loss:3.534503 +step:9673 train loss:3.556685 +step:9674 train loss:3.566863 +step:9675 train loss:3.562814 +step:9676 train loss:3.525834 +step:9677 train loss:3.531703 +step:9678 train loss:3.569433 +step:9679 train loss:3.558956 +step:9680 train loss:3.554208 +step:9681 train loss:3.543007 +step:9682 train loss:3.609532 +step:9683 train loss:3.584196 +step:9684 train loss:3.502046 +step:9685 train loss:3.584807 +step:9686 train loss:3.619609 +step:9687 train loss:3.525211 +step:9688 train loss:3.610547 +step:9689 train loss:3.712596 +step:9690 train loss:3.557523 +step:9691 train loss:3.543692 +step:9692 train loss:3.497777 +step:9693 train loss:3.502982 +step:9694 train loss:3.523202 +step:9695 train loss:3.627331 +step:9696 train loss:3.661081 +step:9697 train loss:3.569730 +step:9698 train loss:3.607284 +step:9699 train loss:3.565857 +step:9700 train loss:3.564661 +step:9701 train loss:3.614948 +step:9702 train loss:3.533073 +step:9703 train loss:3.553668 +step:9704 train loss:3.638537 +step:9705 train loss:3.538489 +step:9706 train loss:3.529594 +step:9707 train loss:3.579311 +step:9708 train loss:3.530789 +step:9709 train loss:3.551987 +step:9710 train loss:3.568736 +step:9711 train loss:3.543602 +step:9712 train loss:3.553764 +step:9713 train loss:3.602711 +step:9714 train loss:3.560069 +step:9715 train loss:3.583596 +step:9716 train loss:3.606529 +step:9717 train loss:3.522274 +step:9718 train loss:3.529578 +step:9719 train loss:3.614146 +step:9720 train loss:3.544181 +step:9721 train loss:3.532879 +step:9722 train loss:3.599226 +step:9723 train loss:3.544355 +step:9724 train loss:3.571510 +step:9725 train loss:3.625460 +step:9726 train loss:3.567202 +step:9727 train loss:3.541249 +step:9728 train loss:3.581933 +step:9729 train loss:3.609453 +step:9730 train loss:3.678954 +step:9731 train loss:3.601436 +step:9732 train loss:3.564763 +step:9733 train loss:3.603255 +step:9734 train loss:3.525380 +step:9735 train loss:3.633213 +step:9736 train loss:3.533029 +step:9737 train loss:3.592500 +step:9738 train loss:3.555252 +step:9739 train loss:3.628720 +step:9740 train loss:3.594372 +step:9741 train loss:3.533882 +step:9742 train loss:3.623558 +step:9743 train loss:3.502845 +step:9744 train loss:3.561516 +step:9745 train loss:3.521048 +step:9746 train loss:3.556633 +step:9747 train loss:3.547996 +step:9748 train loss:3.452166 +step:9749 train loss:3.545062 +step:9750 validation loss:3.500524 +step:9750 train loss:3.526679 +step:9751 train loss:3.667977 +step:9752 train loss:3.551547 +step:9753 train loss:3.513910 +step:9754 train loss:3.543049 +step:9755 train loss:3.539018 +step:9756 train loss:3.541117 +step:9757 train loss:3.502469 +step:9758 train loss:3.503630 +step:9759 train loss:3.544535 +step:9760 train loss:3.494085 +step:9761 train loss:3.533829 +step:9762 train loss:3.529531 +step:9763 train loss:3.552832 +step:9764 train loss:3.535300 +step:9765 train loss:3.499838 +step:9766 train loss:3.586963 +step:9767 train loss:3.543629 +step:9768 train loss:3.556883 +step:9769 train loss:3.507112 +step:9770 train loss:3.509630 +step:9771 train loss:3.559304 +step:9772 train loss:3.576107 +step:9773 train loss:3.545479 +step:9774 train loss:3.519458 +step:9775 train loss:3.608889 +step:9776 train loss:3.603120 +step:9777 train loss:3.500246 +step:9778 train loss:3.501016 +step:9779 train loss:3.512739 +step:9780 train loss:3.506358 +step:9781 train loss:3.525885 +step:9782 train loss:3.604327 +step:9783 train loss:3.513040 +step:9784 train loss:3.543920 +step:9785 train loss:3.529720 +step:9786 train loss:3.570154 +step:9787 train loss:3.591780 +step:9788 train loss:3.521096 +step:9789 train loss:3.528555 +step:9790 train loss:3.490177 +step:9791 train loss:3.540402 +step:9792 train loss:3.555876 +step:9793 train loss:3.570279 +step:9794 train loss:3.550129 +step:9795 train loss:3.552587 +step:9796 train loss:3.537087 +step:9797 train loss:3.533099 +step:9798 train loss:3.545184 +step:9799 train loss:3.552120 +step:9800 train loss:3.620250 +step:9801 train loss:3.546590 +step:9802 train loss:3.602480 +step:9803 train loss:3.463675 +step:9804 train loss:3.558113 +step:9805 train loss:3.561856 +step:9806 train loss:3.539125 +step:9807 train loss:3.505939 +step:9808 train loss:3.418842 +step:9809 train loss:3.608414 +step:9810 train loss:3.560337 +step:9811 train loss:3.550623 +step:9812 train loss:3.520787 +step:9813 train loss:3.604454 +step:9814 train loss:3.591725 +step:9815 train loss:3.496517 +step:9816 train loss:3.498896 +step:9817 train loss:3.531109 +step:9818 train loss:3.557078 +step:9819 train loss:3.526013 +step:9820 train loss:3.599477 +step:9821 train loss:3.571642 +step:9822 train loss:3.550398 +step:9823 train loss:3.607908 +step:9824 train loss:3.515333 +step:9825 train loss:3.599703 +step:9826 train loss:3.594171 +step:9827 train loss:3.600031 +step:9828 train loss:3.517344 +step:9829 train loss:3.527823 +step:9830 train loss:3.510822 +step:9831 train loss:3.574549 +step:9832 train loss:3.580202 +step:9833 train loss:3.498624 +step:9834 train loss:3.546218 +step:9835 train loss:3.514410 +step:9836 train loss:3.573215 +step:9837 train loss:3.548829 +step:9838 train loss:3.587156 +step:9839 train loss:3.560334 +step:9840 train loss:3.529042 +step:9841 train loss:3.535625 +step:9842 train loss:3.597204 +step:9843 train loss:3.590215 +step:9844 train loss:3.542778 +step:9845 train loss:3.566688 +step:9846 train loss:3.508150 +step:9847 train loss:3.636112 +step:9848 train loss:3.559128 +step:9849 train loss:3.586207 +step:9850 train loss:3.502919 +step:9851 train loss:3.556630 +step:9852 train loss:3.520724 +step:9853 train loss:3.544311 +step:9854 train loss:3.551773 +step:9855 train loss:3.501476 +step:9856 train loss:3.505783 +step:9857 train loss:3.497041 +step:9858 train loss:3.561316 +step:9859 train loss:3.478930 +step:9860 train loss:3.716207 +step:9861 train loss:3.539755 +step:9862 train loss:3.509142 +step:9863 train loss:3.492701 +step:9864 train loss:3.615512 +step:9865 train loss:3.491551 +step:9866 train loss:3.535269 +step:9867 train loss:3.530767 +step:9868 train loss:3.592099 +step:9869 train loss:3.554375 +step:9870 train loss:3.526434 +step:9871 train loss:3.564840 +step:9872 train loss:3.514430 +step:9873 train loss:3.559256 +step:9874 train loss:3.528580 +step:9875 train loss:3.529675 +step:9876 train loss:3.496171 +step:9877 train loss:3.543936 +step:9878 train loss:3.580711 +step:9879 train loss:3.578335 +step:9880 train loss:3.511447 +step:9881 train loss:3.562370 +step:9882 train loss:3.524329 +step:9883 train loss:3.531473 +step:9884 train loss:3.525236 +step:9885 train loss:3.589452 +step:9886 train loss:3.554808 +step:9887 train loss:3.557549 +step:9888 train loss:3.574881 +step:9889 train loss:3.610549 +step:9890 train loss:3.524436 +step:9891 train loss:3.529820 +step:9892 train loss:3.499621 +step:9893 train loss:3.619907 +step:9894 train loss:3.529155 +step:9895 train loss:3.465907 +step:9896 train loss:3.620811 +step:9897 train loss:3.498004 +step:9898 train loss:3.566169 +step:9899 train loss:3.544285 +step:9900 train loss:3.589637 +step:9901 train loss:3.514919 +step:9902 train loss:3.558419 +step:9903 train loss:3.530497 +step:9904 train loss:3.579144 +step:9905 train loss:3.484028 +step:9906 train loss:3.524019 +step:9907 train loss:3.529982 +step:9908 train loss:3.532008 +step:9909 train loss:3.545430 +step:9910 train loss:3.569953 +step:9911 train loss:3.648929 +step:9912 train loss:3.529757 +step:9913 train loss:3.531111 +step:9914 train loss:3.541418 +step:9915 train loss:3.542002 +step:9916 train loss:3.490600 +step:9917 train loss:3.526123 +step:9918 train loss:3.524568 +step:9919 train loss:3.686673 +step:9920 train loss:3.472738 +step:9921 train loss:3.566474 +step:9922 train loss:3.524799 +step:9923 train loss:3.580860 +step:9924 train loss:3.496203 +step:9925 train loss:3.554839 +step:9926 train loss:3.534071 +step:9927 train loss:3.575825 +step:9928 train loss:3.503873 +step:9929 train loss:3.540206 +step:9930 train loss:3.633450 +step:9931 train loss:3.594882 +step:9932 train loss:3.483745 +step:9933 train loss:3.577491 +step:9934 train loss:3.493161 +step:9935 train loss:3.613105 +step:9936 train loss:3.519524 +step:9937 train loss:3.546981 +step:9938 train loss:3.530077 +step:9939 train loss:3.599847 +step:9940 train loss:3.633170 +step:9941 train loss:3.509544 +step:9942 train loss:3.548409 +step:9943 train loss:3.676405 +step:9944 train loss:3.547261 +step:9945 train loss:3.566927 +step:9946 train loss:3.538877 +step:9947 train loss:3.490504 +step:9948 train loss:3.534994 +step:9949 train loss:3.432675 +step:9950 train loss:3.580785 +step:9951 train loss:3.501209 +step:9952 train loss:3.570541 +step:9953 train loss:3.531229 +step:9954 train loss:3.591971 +step:9955 train loss:3.560588 +step:9956 train loss:3.570398 +step:9957 train loss:3.542673 +step:9958 train loss:3.599047 +step:9959 train loss:3.496903 +step:9960 train loss:3.532912 +step:9961 train loss:3.539590 +step:9962 train loss:3.588809 +step:9963 train loss:3.481881 +step:9964 train loss:3.534359 +step:9965 train loss:3.536432 +step:9966 train loss:3.592329 +step:9967 train loss:3.509990 +step:9968 train loss:3.574793 +step:9969 train loss:3.490800 +step:9970 train loss:3.528161 +step:9971 train loss:3.570648 +step:9972 train loss:3.595566 +step:9973 train loss:3.570467 +step:9974 train loss:3.556756 +step:9975 train loss:3.526155 +step:9976 train loss:3.486856 +step:9977 train loss:3.540343 +step:9978 train loss:3.535123 +step:9979 train loss:3.546875 +step:9980 train loss:3.601071 +step:9981 train loss:3.506673 +step:9982 train loss:3.573553 +step:9983 train loss:3.492273 +step:9984 train loss:3.554245 +step:9985 train loss:3.497883 +step:9986 train loss:3.550786 +step:9987 train loss:3.587545 +step:9988 train loss:3.609019 +step:9989 train loss:3.503195 +step:9990 train loss:3.639886 +step:9991 train loss:3.480715 +step:9992 train loss:3.563216 +step:9993 train loss:3.550660 +step:9994 train loss:3.665786 +step:9995 train loss:3.604089 +step:9996 train loss:3.521888 +step:9997 train loss:3.561216 +step:9998 train loss:3.616819 +step:9999 train loss:3.581740 +step:10000 validation loss:3.494128 total_sharp:4.2809e-03 L1_sharp:2.6123e-01 L2_sharp:5.7804e-02 L3_sharp:8.1086e-02 L4_sharp:5.7026e-02 L5_sharp:5.5819e-02 L6_sharp:7.2920e-02 L7_sharp:8.9895e-02 L8_sharp:9.2356e-02 L9_sharp:7.3117e-02 L10_sharp:6.0301e-02 L11_sharp:5.1197e-02 L12_sharp:7.0118e-02 total_fnorm:1.2883e+00 total_l1_linf:7.1195e+03 total_spectral:1.2883e+00 L1_fnorm:3.1948e-02 L2_fnorm:3.1124e-02 L3_fnorm:3.1001e-02 L4_fnorm:3.1658e-02 L5_fnorm:3.1924e-02 L6_fnorm:3.2216e-02 L7_fnorm:3.2198e-02 L8_fnorm:3.2249e-02 L9_fnorm:3.2059e-02 L10_fnorm:3.2097e-02 L11_fnorm:3.2496e-02 L12_fnorm:3.2391e-02 L1_l1linf:3.1285e-01 L2_l1linf:3.4515e-01 L3_l1linf:3.3556e-01 L4_l1linf:3.4195e-01 L5_l1linf:3.1119e-01 L6_l1linf:3.1031e-01 L7_l1linf:2.8483e-01 L8_l1linf:3.0089e-01 L9_l1linf:2.9332e-01 L10_l1linf:3.2268e-01 L11_l1linf:3.7320e-01 L12_l1linf:3.4244e-01 L1_spectral:7.0773e-03 L2_spectral:7.7545e-03 L3_spectral:7.5872e-03 L4_spectral:7.7230e-03 L5_spectral:6.9790e-03 L6_spectral:7.0677e-03 L7_spectral:6.4184e-03 L8_spectral:6.7700e-03 L9_spectral:6.6258e-03 L10_spectral:7.2688e-03 L11_spectral:8.2844e-03 L12_spectral:7.7494e-03 ip_v_neg_g:1.5391e-03 cos_v_neg_g:2.0475e-04 v_norm:1.2883e+00 g_norm:5.8351e+00 hv_norm:1.1102e+00 cos_v_hv:4.9676e-03 hg_norm:1.9173e+03 cos_g_hg:5.2942e-01 v_par:9.7910e-06 v_perp:1.2883e+00 L1_cos_v_neg_g:4.3898e-04 L1_v_norm:3.1948e-02 L2_cos_v_neg_g:1.6522e-04 L2_v_norm:3.1124e-02 L3_cos_v_neg_g:1.9473e-04 L3_v_norm:3.1001e-02 L4_cos_v_neg_g:4.2516e-03 L4_v_norm:3.1658e-02 L5_cos_v_neg_g:2.8484e-03 L5_v_norm:3.1924e-02 L6_cos_v_neg_g:2.6802e-03 L6_v_norm:3.2216e-02 L7_cos_v_neg_g:2.8980e-03 L7_v_norm:3.2198e-02 L8_cos_v_neg_g:1.8994e-03 L8_v_norm:3.2249e-02 L9_cos_v_neg_g:1.6708e-03 L9_v_norm:3.2059e-02 L10_cos_v_neg_g:3.1486e-03 L10_v_norm:3.2097e-02 L11_cos_v_neg_g:2.2085e-03 L11_v_norm:3.2496e-02 L12_cos_v_neg_g:2.3348e-03 L12_v_norm:3.2391e-02 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b6e16438d2186530f26ea9fd96a0d4a281ab936b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.0005, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 44, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "3413f689-2dc9-401f-8e18-48d5ccd9c284", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..188c9e0f5da509f9e804dcb176c32eb3152fcf59 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2497953176498413, + "total_l1_linf_norm": 6897.74853515625, + "total_spectral_norm": 1.2497951984405518, + "layer_1_update_fnorm": 0.031134309247136116, + "layer_1_max_l1_linf_norm": 0.2214067131280899, + "layer_1_max_spectral_norm": 0.0047994074411690235, + "layer_2_update_fnorm": 0.0272360946983099, + "layer_2_max_l1_linf_norm": 0.20898659527301788, + "layer_2_max_spectral_norm": 0.0047026569955050945, + "layer_3_update_fnorm": 0.026406418532133102, + "layer_3_max_l1_linf_norm": 0.20318609476089478, + "layer_3_max_spectral_norm": 0.004670191090553999, + "layer_4_update_fnorm": 0.029058294370770454, + "layer_4_max_l1_linf_norm": 0.21591982245445251, + "layer_4_max_spectral_norm": 0.004874308127909899, + "layer_5_update_fnorm": 0.0295464638620615, + "layer_5_max_l1_linf_norm": 0.2299264371395111, + "layer_5_max_spectral_norm": 0.005166463553905487, + "layer_6_update_fnorm": 0.03032008372247219, + "layer_6_max_l1_linf_norm": 0.2271023690700531, + "layer_6_max_spectral_norm": 0.005113020073622465, + "layer_7_update_fnorm": 0.030809057876467705, + "layer_7_max_l1_linf_norm": 0.2283601462841034, + "layer_7_max_spectral_norm": 0.0056606074795126915, + "layer_8_update_fnorm": 0.031036047264933586, + "layer_8_max_l1_linf_norm": 0.22513136267662048, + "layer_8_max_spectral_norm": 0.0062605272978544235, + "layer_9_update_fnorm": 0.03151016682386398, + "layer_9_max_l1_linf_norm": 0.22552886605262756, + "layer_9_max_spectral_norm": 0.006371337920427322, + "layer_10_update_fnorm": 0.03174998238682747, + "layer_10_max_l1_linf_norm": 0.22781744599342346, + "layer_10_max_spectral_norm": 0.006783612072467804, + "layer_11_update_fnorm": 0.03160751238465309, + "layer_11_max_l1_linf_norm": 0.21319270133972168, + "layer_11_max_spectral_norm": 0.006418115925043821, + "layer_12_update_fnorm": 0.03212588280439377, + "layer_12_max_l1_linf_norm": 0.21392349898815155, + "layer_12_max_spectral_norm": 0.008311891928315163, + "total_sharpness": 0.029197225347161293, + "ip_v_neg_g": 0.02287415787577629, + "cos_v_neg_g": 0.001685126218944788, + "v_norm": 1.2497953176498413, + "g_norm": 10.861099243164062, + "hv_norm": 9.148089408874512, + "cos_v_hv": 0.003988871816545725, + "hg_norm": 16254.28515625, + "cos_g_hg": 0.6936747431755066, + "v_parallel_norm": 0.00010081403888761997, + "v_perp_norm": 1.2497953176498413, + "layer_1_v_norm": 0.031134309247136116, + "layer_1_cos_v_neg_g": 0.03466032072901726, + "layer_2_v_norm": 0.0272360946983099, + "layer_2_cos_v_neg_g": 0.027814583852887154, + "layer_3_v_norm": 0.026406418532133102, + "layer_3_cos_v_neg_g": 0.023214196786284447, + "layer_4_v_norm": 0.029058294370770454, + "layer_4_cos_v_neg_g": 0.02074304223060608, + "layer_5_v_norm": 0.0295464638620615, + "layer_5_cos_v_neg_g": 0.020112084224820137, + "layer_6_v_norm": 0.03032008372247219, + "layer_6_cos_v_neg_g": 0.01836700551211834, + "layer_7_v_norm": 0.030809057876467705, + "layer_7_cos_v_neg_g": 0.01489335298538208, + "layer_8_v_norm": 0.031036045402288437, + "layer_8_cos_v_neg_g": 0.01185961626470089, + "layer_9_v_norm": 0.03151016682386398, + "layer_9_cos_v_neg_g": 0.011072631925344467, + "layer_10_v_norm": 0.03174998238682747, + "layer_10_cos_v_neg_g": 0.010572608560323715, + "layer_11_v_norm": 0.03160751238465309, + "layer_11_cos_v_neg_g": 0.009196407161653042, + "layer_12_v_norm": 0.03212588280439377, + "layer_12_cos_v_neg_g": 0.007309848442673683, + "layer_1_sharpness": 4.649795055389404, + "layer_2_sharpness": 1.7444565296173096, + "layer_3_sharpness": 0.8025549054145813, + "layer_4_sharpness": 0.5433351993560791, + "layer_5_sharpness": 0.4268454909324646, + "layer_6_sharpness": 0.2946578860282898, + "layer_7_sharpness": 0.1721954494714737, + "layer_8_sharpness": 0.10111598670482635, + "layer_9_sharpness": 0.07747790962457657, + "layer_10_sharpness": 0.06258402019739151, + "layer_11_sharpness": 0.05358270928263664, + "layer_12_sharpness": 0.04261442646384239 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..be68979b92f2ed26b68d86bc84e23b149be226c1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3151967525482178, + "total_l1_linf_norm": 7254.1982421875, + "total_spectral_norm": 1.3151966333389282, + "layer_1_update_fnorm": 0.03173510730266571, + "layer_1_max_l1_linf_norm": 0.23565673828125, + "layer_1_max_spectral_norm": 0.005233236588537693, + "layer_2_update_fnorm": 0.028139619156718254, + "layer_2_max_l1_linf_norm": 0.23442494869232178, + "layer_2_max_spectral_norm": 0.005239923484623432, + "layer_3_update_fnorm": 0.02683684602379799, + "layer_3_max_l1_linf_norm": 0.24952450394630432, + "layer_3_max_spectral_norm": 0.005587048828601837, + "layer_4_update_fnorm": 0.02917381562292576, + "layer_4_max_l1_linf_norm": 0.24837373197078705, + "layer_4_max_spectral_norm": 0.00555040268227458, + "layer_5_update_fnorm": 0.029843980446457863, + "layer_5_max_l1_linf_norm": 0.25968700647354126, + "layer_5_max_spectral_norm": 0.005816956050693989, + "layer_6_update_fnorm": 0.030823074281215668, + "layer_6_max_l1_linf_norm": 0.2645050287246704, + "layer_6_max_spectral_norm": 0.005898747127503157, + "layer_7_update_fnorm": 0.031465720385313034, + "layer_7_max_l1_linf_norm": 0.2658010721206665, + "layer_7_max_spectral_norm": 0.005972776096314192, + "layer_8_update_fnorm": 0.031606949865818024, + "layer_8_max_l1_linf_norm": 0.26953598856925964, + "layer_8_max_spectral_norm": 0.006001221481710672, + "layer_9_update_fnorm": 0.031894199550151825, + "layer_9_max_l1_linf_norm": 0.26393038034439087, + "layer_9_max_spectral_norm": 0.0058965119533240795, + "layer_10_update_fnorm": 0.03198491781949997, + "layer_10_max_l1_linf_norm": 0.2659461498260498, + "layer_10_max_spectral_norm": 0.006112014874815941, + "layer_11_update_fnorm": 0.03159160166978836, + "layer_11_max_l1_linf_norm": 0.2681523561477661, + "layer_11_max_spectral_norm": 0.0059954095631837845, + "layer_12_update_fnorm": 0.03211161494255066, + "layer_12_max_l1_linf_norm": 0.28746461868286133, + "layer_12_max_spectral_norm": 0.006294894497841597, + "total_sharpness": 0.022447550669312477, + "ip_v_neg_g": 0.017225563526153564, + "cos_v_neg_g": 0.001253162045031786, + "v_norm": 1.3151967525482178, + "g_norm": 10.451424598693848, + "hv_norm": 7.319130897521973, + "cos_v_hv": 0.004033668432384729, + "hg_norm": 15586.7861328125, + "cos_g_hg": 0.6553531289100647, + "v_parallel_norm": 6.694739568047225e-05, + "v_perp_norm": 1.3151967525482178, + "layer_1_v_norm": 0.03173510730266571, + "layer_1_cos_v_neg_g": 0.02704157866537571, + "layer_2_v_norm": 0.028139619156718254, + "layer_2_cos_v_neg_g": 0.024509428068995476, + "layer_3_v_norm": 0.02683684416115284, + "layer_3_cos_v_neg_g": 0.02136324904859066, + "layer_4_v_norm": 0.02917381562292576, + "layer_4_cos_v_neg_g": 0.016065310686826706, + "layer_5_v_norm": 0.029843980446457863, + "layer_5_cos_v_neg_g": 0.015472866594791412, + "layer_6_v_norm": 0.03082307241857052, + "layer_6_cos_v_neg_g": 0.013303668238222599, + "layer_7_v_norm": 0.031465720385313034, + "layer_7_cos_v_neg_g": 0.011687584221363068, + "layer_8_v_norm": 0.031606949865818024, + "layer_8_cos_v_neg_g": 0.010229485109448433, + "layer_9_v_norm": 0.031894199550151825, + "layer_9_cos_v_neg_g": 0.008229545317590237, + "layer_10_v_norm": 0.03198491781949997, + "layer_10_cos_v_neg_g": 0.006549790967255831, + "layer_11_v_norm": 0.03159160166978836, + "layer_11_cos_v_neg_g": 0.005082378629595041, + "layer_12_v_norm": 0.03211161494255066, + "layer_12_cos_v_neg_g": 0.0039139739237725735, + "layer_1_sharpness": 1.6988714933395386, + "layer_2_sharpness": 1.0696545839309692, + "layer_3_sharpness": 0.7441144585609436, + "layer_4_sharpness": 0.4908061623573303, + "layer_5_sharpness": 0.4278990626335144, + "layer_6_sharpness": 0.3750459551811218, + "layer_7_sharpness": 0.2964353561401367, + "layer_8_sharpness": 0.17455700039863586, + "layer_9_sharpness": 0.12595896422863007, + "layer_10_sharpness": 0.09929011017084122, + "layer_11_sharpness": 0.11108856648206711, + "layer_12_sharpness": 0.21870961785316467 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..4f38a2ed0398bb8d0bb6c59da4171c686adbe25d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3131535053253174, + "total_l1_linf_norm": 7260.4443359375, + "total_spectral_norm": 1.313153624534607, + "layer_1_update_fnorm": 0.03149265795946121, + "layer_1_max_l1_linf_norm": 0.19291949272155762, + "layer_1_max_spectral_norm": 0.004412845242768526, + "layer_2_update_fnorm": 0.029272347688674927, + "layer_2_max_l1_linf_norm": 0.2207791656255722, + "layer_2_max_spectral_norm": 0.005034022033214569, + "layer_3_update_fnorm": 0.027832714840769768, + "layer_3_max_l1_linf_norm": 0.22867104411125183, + "layer_3_max_spectral_norm": 0.0051942989230155945, + "layer_4_update_fnorm": 0.02958112210035324, + "layer_4_max_l1_linf_norm": 0.22772392630577087, + "layer_4_max_spectral_norm": 0.005168382544070482, + "layer_5_update_fnorm": 0.030176397413015366, + "layer_5_max_l1_linf_norm": 0.22998562455177307, + "layer_5_max_spectral_norm": 0.005205140914767981, + "layer_6_update_fnorm": 0.0309869647026062, + "layer_6_max_l1_linf_norm": 0.23499131202697754, + "layer_6_max_spectral_norm": 0.005341915413737297, + "layer_7_update_fnorm": 0.03142860159277916, + "layer_7_max_l1_linf_norm": 0.23245544731616974, + "layer_7_max_spectral_norm": 0.005242680199444294, + "layer_8_update_fnorm": 0.03148205205798149, + "layer_8_max_l1_linf_norm": 0.24751518666744232, + "layer_8_max_spectral_norm": 0.005561273079365492, + "layer_9_update_fnorm": 0.031579144299030304, + "layer_9_max_l1_linf_norm": 0.24543076753616333, + "layer_9_max_spectral_norm": 0.005534414201974869, + "layer_10_update_fnorm": 0.03173594921827316, + "layer_10_max_l1_linf_norm": 0.2596050500869751, + "layer_10_max_spectral_norm": 0.005781584419310093, + "layer_11_update_fnorm": 0.031536005437374115, + "layer_11_max_l1_linf_norm": 0.2509411573410034, + "layer_11_max_spectral_norm": 0.005605346988886595, + "layer_12_update_fnorm": 0.031807154417037964, + "layer_12_max_l1_linf_norm": 0.27049505710601807, + "layer_12_max_spectral_norm": 0.0058924672193825245, + "total_sharpness": 0.014216816984117031, + "ip_v_neg_g": 0.011285219341516495, + "cos_v_neg_g": 0.0009002669830806553, + "v_norm": 1.3131535053253174, + "g_norm": 9.546038627624512, + "hv_norm": 4.897772789001465, + "cos_v_hv": 0.0038117049261927605, + "hg_norm": 7877.35205078125, + "cos_g_hg": 0.6456900835037231, + "v_parallel_norm": 3.696505518746562e-05, + "v_perp_norm": 1.3131535053253174, + "layer_1_v_norm": 0.03149265795946121, + "layer_1_cos_v_neg_g": 0.010106140747666359, + "layer_2_v_norm": 0.029272347688674927, + "layer_2_cos_v_neg_g": 0.010186096653342247, + "layer_3_v_norm": 0.027832714840769768, + "layer_3_cos_v_neg_g": 0.012135791592299938, + "layer_4_v_norm": 0.02958112210035324, + "layer_4_cos_v_neg_g": 0.012406393885612488, + "layer_5_v_norm": 0.030176397413015366, + "layer_5_cos_v_neg_g": 0.011905447579920292, + "layer_6_v_norm": 0.0309869647026062, + "layer_6_cos_v_neg_g": 0.01249791868031025, + "layer_7_v_norm": 0.03142860159277916, + "layer_7_cos_v_neg_g": 0.011243309825658798, + "layer_8_v_norm": 0.03148205205798149, + "layer_8_cos_v_neg_g": 0.011176821775734425, + "layer_9_v_norm": 0.031579144299030304, + "layer_9_cos_v_neg_g": 0.011289839632809162, + "layer_10_v_norm": 0.03173594921827316, + "layer_10_cos_v_neg_g": 0.011137941852211952, + "layer_11_v_norm": 0.031536005437374115, + "layer_11_cos_v_neg_g": 0.010923274792730808, + "layer_12_v_norm": 0.031807154417037964, + "layer_12_cos_v_neg_g": 0.009865792468190193, + "layer_1_sharpness": 0.4675735831260681, + "layer_2_sharpness": 0.4275911748409271, + "layer_3_sharpness": 0.5341774821281433, + "layer_4_sharpness": 0.29356032609939575, + "layer_5_sharpness": 0.24708491563796997, + "layer_6_sharpness": 0.24562661349773407, + "layer_7_sharpness": 0.18244421482086182, + "layer_8_sharpness": 0.15241779386997223, + "layer_9_sharpness": 0.14082801342010498, + "layer_10_sharpness": 0.12719760835170746, + "layer_11_sharpness": 0.1218533143401146, + "layer_12_sharpness": 0.16669656336307526 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..318d59290c06aff2dbb3a24d91a59ae687e613c8 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3214240074157715, + "total_l1_linf_norm": 7300.9033203125, + "total_spectral_norm": 1.321424126625061, + "layer_1_update_fnorm": 0.0316227525472641, + "layer_1_max_l1_linf_norm": 0.22072985768318176, + "layer_1_max_spectral_norm": 0.005032250192016363, + "layer_2_update_fnorm": 0.029851149767637253, + "layer_2_max_l1_linf_norm": 0.2431216537952423, + "layer_2_max_spectral_norm": 0.005554588045924902, + "layer_3_update_fnorm": 0.029024876654148102, + "layer_3_max_l1_linf_norm": 0.2618820369243622, + "layer_3_max_spectral_norm": 0.005925255827605724, + "layer_4_update_fnorm": 0.030193839222192764, + "layer_4_max_l1_linf_norm": 0.26658231019973755, + "layer_4_max_spectral_norm": 0.005972638726234436, + "layer_5_update_fnorm": 0.030726106837391853, + "layer_5_max_l1_linf_norm": 0.26599496603012085, + "layer_5_max_spectral_norm": 0.005968790035694838, + "layer_6_update_fnorm": 0.031314678490161896, + "layer_6_max_l1_linf_norm": 0.25711706280708313, + "layer_6_max_spectral_norm": 0.005791475996375084, + "layer_7_update_fnorm": 0.03167850896716118, + "layer_7_max_l1_linf_norm": 0.24824422597885132, + "layer_7_max_spectral_norm": 0.005597457755357027, + "layer_8_update_fnorm": 0.031642504036426544, + "layer_8_max_l1_linf_norm": 0.2561874985694885, + "layer_8_max_spectral_norm": 0.005774273071438074, + "layer_9_update_fnorm": 0.031693585216999054, + "layer_9_max_l1_linf_norm": 0.2579702138900757, + "layer_9_max_spectral_norm": 0.005782971158623695, + "layer_10_update_fnorm": 0.03181234374642372, + "layer_10_max_l1_linf_norm": 0.2740251421928406, + "layer_10_max_spectral_norm": 0.0061792610213160515, + "layer_11_update_fnorm": 0.03154008835554123, + "layer_11_max_l1_linf_norm": 0.2690846621990204, + "layer_11_max_spectral_norm": 0.006036446429789066, + "layer_12_update_fnorm": 0.03164812922477722, + "layer_12_max_l1_linf_norm": 0.26149123907089233, + "layer_12_max_spectral_norm": 0.005868560168892145, + "total_sharpness": 0.00969542097300291, + "ip_v_neg_g": 0.006957915611565113, + "cos_v_neg_g": 0.0006666648550890386, + "v_norm": 1.3214240074157715, + "g_norm": 7.898223400115967, + "hv_norm": 3.1080236434936523, + "cos_v_hv": 0.0041221571154892445, + "hg_norm": 3651.52001953125, + "cos_g_hg": 0.5945519804954529, + "v_parallel_norm": 2.7737538403016515e-05, + "v_perp_norm": 1.3214240074157715, + "layer_1_v_norm": 0.0316227525472641, + "layer_1_cos_v_neg_g": 0.007955827750265598, + "layer_2_v_norm": 0.029851149767637253, + "layer_2_cos_v_neg_g": 0.008642264641821384, + "layer_3_v_norm": 0.02902487851679325, + "layer_3_cos_v_neg_g": 0.010509015992283821, + "layer_4_v_norm": 0.030193839222192764, + "layer_4_cos_v_neg_g": 0.00933198630809784, + "layer_5_v_norm": 0.030726106837391853, + "layer_5_cos_v_neg_g": 0.00776266772300005, + "layer_6_v_norm": 0.031314682215452194, + "layer_6_cos_v_neg_g": 0.008197024464607239, + "layer_7_v_norm": 0.03167850896716118, + "layer_7_cos_v_neg_g": 0.007035915739834309, + "layer_8_v_norm": 0.031642504036426544, + "layer_8_cos_v_neg_g": 0.007489445619285107, + "layer_9_v_norm": 0.031693585216999054, + "layer_9_cos_v_neg_g": 0.005982275120913982, + "layer_10_v_norm": 0.03181234374642372, + "layer_10_cos_v_neg_g": 0.005788352340459824, + "layer_11_v_norm": 0.03154008835554123, + "layer_11_cos_v_neg_g": 0.006221403833478689, + "layer_12_v_norm": 0.03164812922477722, + "layer_12_cos_v_neg_g": 0.007244279142469168, + "layer_1_sharpness": 0.25596511363983154, + "layer_2_sharpness": 0.18736588954925537, + "layer_3_sharpness": 0.3137011229991913, + "layer_4_sharpness": 0.2081325501203537, + "layer_5_sharpness": 0.1623082160949707, + "layer_6_sharpness": 0.1657344549894333, + "layer_7_sharpness": 0.1521085798740387, + "layer_8_sharpness": 0.13982069492340088, + "layer_9_sharpness": 0.1081327572464943, + "layer_10_sharpness": 0.09350928664207458, + "layer_11_sharpness": 0.0803496241569519, + "layer_12_sharpness": 0.11911299079656601 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..e27f5cb1a51d92239d0fcaa5ae407b9080bc70c8 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3138370513916016, + "total_l1_linf_norm": 7256.7958984375, + "total_spectral_norm": 1.3138371706008911, + "layer_1_update_fnorm": 0.0318656824529171, + "layer_1_max_l1_linf_norm": 0.2525402903556824, + "layer_1_max_spectral_norm": 0.0057976427488029, + "layer_2_update_fnorm": 0.02998819574713707, + "layer_2_max_l1_linf_norm": 0.2821335792541504, + "layer_2_max_spectral_norm": 0.006420612800866365, + "layer_3_update_fnorm": 0.029357314109802246, + "layer_3_max_l1_linf_norm": 0.2896122336387634, + "layer_3_max_spectral_norm": 0.006579801440238953, + "layer_4_update_fnorm": 0.03055662102997303, + "layer_4_max_l1_linf_norm": 0.2997276186943054, + "layer_4_max_spectral_norm": 0.006768530700355768, + "layer_5_update_fnorm": 0.03104800544679165, + "layer_5_max_l1_linf_norm": 0.27936798334121704, + "layer_5_max_spectral_norm": 0.006316914223134518, + "layer_6_update_fnorm": 0.031715162098407745, + "layer_6_max_l1_linf_norm": 0.2699367105960846, + "layer_6_max_spectral_norm": 0.006079826969653368, + "layer_7_update_fnorm": 0.031831011176109314, + "layer_7_max_l1_linf_norm": 0.2576196789741516, + "layer_7_max_spectral_norm": 0.005857175216078758, + "layer_8_update_fnorm": 0.03187495470046997, + "layer_8_max_l1_linf_norm": 0.26788538694381714, + "layer_8_max_spectral_norm": 0.006090942304581404, + "layer_9_update_fnorm": 0.03199592977762222, + "layer_9_max_l1_linf_norm": 0.28205472230911255, + "layer_9_max_spectral_norm": 0.006313696037977934, + "layer_10_update_fnorm": 0.03213706240057945, + "layer_10_max_l1_linf_norm": 0.29520583152770996, + "layer_10_max_spectral_norm": 0.006584062706679106, + "layer_11_update_fnorm": 0.03198780119419098, + "layer_11_max_l1_linf_norm": 0.2997336685657501, + "layer_11_max_spectral_norm": 0.006760901305824518, + "layer_12_update_fnorm": 0.032266926020383835, + "layer_12_max_l1_linf_norm": 0.3017088770866394, + "layer_12_max_spectral_norm": 0.0068010082468390465, + "total_sharpness": 0.008490457199513912, + "ip_v_neg_g": 0.009306777268648148, + "cos_v_neg_g": 0.0009625406237319112, + "v_norm": 1.3138370513916016, + "g_norm": 7.359338283538818, + "hv_norm": 2.3924660682678223, + "cos_v_hv": 0.004662585444748402, + "hg_norm": 2550.683349609375, + "cos_g_hg": 0.5783512592315674, + "v_parallel_norm": 3.3516782423248515e-05, + "v_perp_norm": 1.3138370513916016, + "layer_1_v_norm": 0.0318656824529171, + "layer_1_cos_v_neg_g": 0.013451766222715378, + "layer_2_v_norm": 0.02998819574713707, + "layer_2_cos_v_neg_g": 0.014559918083250523, + "layer_3_v_norm": 0.029357314109802246, + "layer_3_cos_v_neg_g": 0.01523544080555439, + "layer_4_v_norm": 0.03055662102997303, + "layer_4_cos_v_neg_g": 0.013188700191676617, + "layer_5_v_norm": 0.03104800544679165, + "layer_5_cos_v_neg_g": 0.012182565405964851, + "layer_6_v_norm": 0.031715162098407745, + "layer_6_cos_v_neg_g": 0.011546021327376366, + "layer_7_v_norm": 0.031831011176109314, + "layer_7_cos_v_neg_g": 0.011024055071175098, + "layer_8_v_norm": 0.03187495470046997, + "layer_8_cos_v_neg_g": 0.011380501091480255, + "layer_9_v_norm": 0.03199592977762222, + "layer_9_cos_v_neg_g": 0.010481053963303566, + "layer_10_v_norm": 0.03213706240057945, + "layer_10_cos_v_neg_g": 0.009295870549976826, + "layer_11_v_norm": 0.03198780491948128, + "layer_11_cos_v_neg_g": 0.00637495843693614, + "layer_12_v_norm": 0.032266926020383835, + "layer_12_cos_v_neg_g": 0.005248110741376877, + "layer_1_sharpness": 0.24043598771095276, + "layer_2_sharpness": 0.17196165025234222, + "layer_3_sharpness": 0.19492848217487335, + "layer_4_sharpness": 0.14236310124397278, + "layer_5_sharpness": 0.1268496960401535, + "layer_6_sharpness": 0.12943977117538452, + "layer_7_sharpness": 0.13581086695194244, + "layer_8_sharpness": 0.12894633412361145, + "layer_9_sharpness": 0.1212194487452507, + "layer_10_sharpness": 0.097262442111969, + "layer_11_sharpness": 0.07920720428228378, + "layer_12_sharpness": 0.0880901962518692 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..e9d1509df8e116a2be05fad43afe96861d9027a7 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.9598575234413147, + "total_l1_linf_norm": 5286.609375, + "total_spectral_norm": 0.9598575234413147, + "layer_1_update_fnorm": 0.023092979565262794, + "layer_1_max_l1_linf_norm": 0.19821569323539734, + "layer_1_max_spectral_norm": 0.004412943497300148, + "layer_2_update_fnorm": 0.022575661540031433, + "layer_2_max_l1_linf_norm": 0.19852367043495178, + "layer_2_max_spectral_norm": 0.00514519726857543, + "layer_3_update_fnorm": 0.022040922194719315, + "layer_3_max_l1_linf_norm": 0.2041057050228119, + "layer_3_max_spectral_norm": 0.005102537572383881, + "layer_4_update_fnorm": 0.023249033838510513, + "layer_4_max_l1_linf_norm": 0.19943100214004517, + "layer_4_max_spectral_norm": 0.008288225159049034, + "layer_5_update_fnorm": 0.023811018094420433, + "layer_5_max_l1_linf_norm": 0.21378207206726074, + "layer_5_max_spectral_norm": 0.009496588259935379, + "layer_6_update_fnorm": 0.02399706467986107, + "layer_6_max_l1_linf_norm": 0.22266674041748047, + "layer_6_max_spectral_norm": 0.009679376147687435, + "layer_7_update_fnorm": 0.024613428860902786, + "layer_7_max_l1_linf_norm": 0.2691362500190735, + "layer_7_max_spectral_norm": 0.011131592094898224, + "layer_8_update_fnorm": 0.023686125874519348, + "layer_8_max_l1_linf_norm": 0.20641294121742249, + "layer_8_max_spectral_norm": 0.009105992503464222, + "layer_9_update_fnorm": 0.023402594029903412, + "layer_9_max_l1_linf_norm": 0.20043782889842987, + "layer_9_max_spectral_norm": 0.008600311353802681, + "layer_10_update_fnorm": 0.023189257830381393, + "layer_10_max_l1_linf_norm": 0.1878197193145752, + "layer_10_max_spectral_norm": 0.008027292788028717, + "layer_11_update_fnorm": 0.022334307432174683, + "layer_11_max_l1_linf_norm": 0.16140379011631012, + "layer_11_max_spectral_norm": 0.005236592143774033, + "layer_12_update_fnorm": 0.022497382014989853, + "layer_12_max_l1_linf_norm": 0.16626819968223572, + "layer_12_max_spectral_norm": 0.005407613702118397, + "total_sharpness": 0.020220668986439705, + "ip_v_neg_g": 0.009131000377237797, + "cos_v_neg_g": 0.0011563157895579934, + "v_norm": 0.9598575234413147, + "g_norm": 8.226879119873047, + "hv_norm": 5.192538261413574, + "cos_v_hv": 0.0037378561682999134, + "hg_norm": 10322.4990234375, + "cos_g_hg": 0.6656765937805176, + "v_parallel_norm": 5.873971167602576e-05, + "v_perp_norm": 0.9598575234413147, + "layer_1_v_norm": 0.023092979565262794, + "layer_1_cos_v_neg_g": 0.021441126242280006, + "layer_2_v_norm": 0.022575661540031433, + "layer_2_cos_v_neg_g": 0.01569044031202793, + "layer_3_v_norm": 0.022040924057364464, + "layer_3_cos_v_neg_g": 0.015446730889379978, + "layer_4_v_norm": 0.023249033838510513, + "layer_4_cos_v_neg_g": 0.012712011113762856, + "layer_5_v_norm": 0.023811018094420433, + "layer_5_cos_v_neg_g": 0.012454129755496979, + "layer_6_v_norm": 0.02399706467986107, + "layer_6_cos_v_neg_g": 0.012118998914957047, + "layer_7_v_norm": 0.024613428860902786, + "layer_7_cos_v_neg_g": 0.011202441528439522, + "layer_8_v_norm": 0.023686125874519348, + "layer_8_cos_v_neg_g": 0.011218304745852947, + "layer_9_v_norm": 0.023402594029903412, + "layer_9_cos_v_neg_g": 0.011049249209463596, + "layer_10_v_norm": 0.023189257830381393, + "layer_10_cos_v_neg_g": 0.010678277350962162, + "layer_11_v_norm": 0.022334307432174683, + "layer_11_cos_v_neg_g": 0.009470431134104729, + "layer_12_v_norm": 0.022497382014989853, + "layer_12_cos_v_neg_g": 0.00862051360309124, + "layer_1_sharpness": 1.9686822891235352, + "layer_2_sharpness": 0.5962660908699036, + "layer_3_sharpness": 0.43450579047203064, + "layer_4_sharpness": 0.24432486295700073, + "layer_5_sharpness": 0.1746724545955658, + "layer_6_sharpness": 0.13350215554237366, + "layer_7_sharpness": 0.09681929647922516, + "layer_8_sharpness": 0.0888759195804596, + "layer_9_sharpness": 0.07221187651157379, + "layer_10_sharpness": 0.057763680815696716, + "layer_11_sharpness": 0.04500338435173035, + "layer_12_sharpness": 0.03694257140159607 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..d660ad0f0f27fa34f98b14b5f0ec1d0c81817dfc --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.0005_seed_44/training_log.txt @@ -0,0 +1,4627 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019229 +step:1 train loss:11.024827 +step:2 train loss:11.018542 +step:3 train loss:11.016748 +step:4 train loss:11.011626 +step:5 train loss:11.006143 +step:6 train loss:10.996655 +step:7 train loss:10.989809 +step:8 train loss:10.979504 +step:9 train loss:10.968819 +step:10 train loss:10.954546 +step:11 train loss:10.945156 +step:12 train loss:10.925696 +step:13 train loss:10.912360 +step:14 train loss:10.891317 +step:15 train loss:10.874607 +step:16 train loss:10.854692 +step:17 train loss:10.836142 +step:18 train loss:10.812106 +step:19 train loss:10.789518 +step:20 train loss:10.761364 +step:21 train loss:10.739789 +step:22 train loss:10.707508 +step:23 train loss:10.684323 +step:24 train loss:10.647914 +step:25 train loss:10.625839 +step:26 train loss:10.586967 +step:27 train loss:10.554930 +step:28 train loss:10.525645 +step:29 train loss:10.490974 +step:30 train loss:10.453732 +step:31 train loss:10.413126 +step:32 train loss:10.371224 +step:33 train loss:10.335753 +step:34 train loss:10.300281 +step:35 train loss:10.250202 +step:36 train loss:10.210125 +step:37 train loss:10.156146 +step:38 train loss:10.120653 +step:39 train loss:10.068191 +step:40 train loss:10.024967 +step:41 train loss:9.969199 +step:42 train loss:9.934319 +step:43 train loss:9.863068 +step:44 train loss:9.825024 +step:45 train loss:9.763737 +step:46 train loss:9.725693 +step:47 train loss:9.665586 +step:48 train loss:9.611370 +step:49 train loss:9.546313 +step:50 train loss:9.487144 +step:51 train loss:9.421849 +step:52 train loss:9.381465 +step:53 train loss:9.317215 +step:54 train loss:9.263241 +step:55 train loss:9.192213 +step:56 train loss:9.134851 +step:57 train loss:9.085662 +step:58 train loss:9.007856 +step:59 train loss:8.954728 +step:60 train loss:8.893785 +step:61 train loss:8.838638 +step:62 train loss:8.777884 +step:63 train loss:8.752914 +step:64 train loss:8.645283 +step:65 train loss:8.592642 +step:66 train loss:8.549850 +step:67 train loss:8.495442 +step:68 train loss:8.437700 +step:69 train loss:8.370267 +step:70 train loss:8.315669 +step:71 train loss:8.239910 +step:72 train loss:8.218147 +step:73 train loss:8.149012 +step:74 train loss:8.122774 +step:75 train loss:8.043599 +step:76 train loss:8.057857 +step:77 train loss:7.968671 +step:78 train loss:7.837293 +step:79 train loss:7.864219 +step:80 train loss:7.829791 +step:81 train loss:7.833024 +step:82 train loss:7.798836 +step:83 train loss:7.742925 +step:84 train loss:7.706704 +step:85 train loss:7.669102 +step:86 train loss:7.641675 +step:87 train loss:7.612950 +step:88 train loss:7.609769 +step:89 train loss:7.572516 +step:90 train loss:7.605941 +step:91 train loss:7.607821 +step:92 train loss:7.601645 +step:93 train loss:7.557958 +step:94 train loss:7.542241 +step:95 train loss:7.504281 +step:96 train loss:7.581092 +step:97 train loss:7.538258 +step:98 train loss:7.532385 +step:99 train loss:7.502990 +step:100 train loss:7.572430 +step:101 train loss:7.458474 +step:102 train loss:7.450771 +step:103 train loss:7.437508 +step:104 train loss:7.475565 +step:105 train loss:7.525504 +step:106 train loss:7.465404 +step:107 train loss:7.430991 +step:108 train loss:7.435665 +step:109 train loss:7.463999 +step:110 train loss:7.394886 +step:111 train loss:7.399216 +step:112 train loss:7.386154 +step:113 train loss:7.346418 +step:114 train loss:7.402537 +step:115 train loss:7.342649 +step:116 train loss:7.318941 +step:117 train loss:7.255330 +step:118 train loss:7.314700 +step:119 train loss:7.254051 +step:120 train loss:7.259509 +step:121 train loss:7.184083 +step:122 train loss:7.253496 +step:123 train loss:7.178263 +step:124 train loss:7.160453 +step:125 train loss:7.128301 +step:126 train loss:7.202198 +step:127 train loss:7.120981 +step:128 train loss:7.142568 +step:129 train loss:7.113857 +step:130 train loss:7.158103 +step:131 train loss:7.087557 +step:132 train loss:6.996164 +step:133 train loss:7.059984 +step:134 train loss:7.012320 +step:135 train loss:6.926334 +step:136 train loss:6.950998 +step:137 train loss:6.946042 +step:138 train loss:6.872418 +step:139 train loss:6.946507 +step:140 train loss:6.850882 +step:141 train loss:6.940044 +step:142 train loss:6.882080 +step:143 train loss:6.886381 +step:144 train loss:6.847345 +step:145 train loss:6.780165 +step:146 train loss:6.781538 +step:147 train loss:6.833672 +step:148 train loss:6.830758 +step:149 train loss:6.769063 +step:150 train loss:6.776088 +step:151 train loss:6.682497 +step:152 train loss:6.710812 +step:153 train loss:6.679836 +step:154 train loss:6.758295 +step:155 train loss:6.723249 +step:156 train loss:6.754174 +step:157 train loss:6.644311 +step:158 train loss:6.620854 +step:159 train loss:6.657728 +step:160 train loss:6.628288 +step:161 train loss:6.614220 +step:162 train loss:6.584734 +step:163 train loss:6.594893 +step:164 train loss:6.605915 +step:165 train loss:6.603882 +step:166 train loss:6.553262 +step:167 train loss:6.546195 +step:168 train loss:6.515182 +step:169 train loss:6.467369 +step:170 train loss:6.437724 +step:171 train loss:6.540490 +step:172 train loss:6.467538 +step:173 train loss:6.501427 +step:174 train loss:6.506869 +step:175 train loss:6.466839 +step:176 train loss:6.411885 +step:177 train loss:6.458126 +step:178 train loss:6.462728 +step:179 train loss:6.411221 +step:180 train loss:6.396375 +step:181 train loss:6.428082 +step:182 train loss:6.354795 +step:183 train loss:6.448862 +step:184 train loss:6.414970 +step:185 train loss:6.323520 +step:186 train loss:6.477350 +step:187 train loss:6.405530 +step:188 train loss:6.225616 +step:189 train loss:6.381461 +step:190 train loss:6.375388 +step:191 train loss:6.292840 +step:192 train loss:6.202914 +step:193 train loss:6.348739 +step:194 train loss:6.373912 +step:195 train loss:6.361906 +step:196 train loss:6.334313 +step:197 train loss:6.330456 +step:198 train loss:6.268870 +step:199 train loss:6.349711 +step:200 train loss:6.380829 +step:201 train loss:6.314105 +step:202 train loss:6.316624 +step:203 train loss:6.271178 +step:204 train loss:6.312148 +step:205 train loss:6.164818 +step:206 train loss:6.302042 +step:207 train loss:6.267249 +step:208 train loss:6.210991 +step:209 train loss:6.210631 +step:210 train loss:6.207538 +step:211 train loss:6.282272 +step:212 train loss:6.227883 +step:213 train loss:6.243983 +step:214 train loss:6.226309 +step:215 train loss:6.248139 +step:216 train loss:6.191108 +step:217 train loss:6.190213 +step:218 train loss:6.170765 +step:219 train loss:6.145512 +step:220 train loss:6.195614 +step:221 train loss:6.146642 +step:222 train loss:6.189955 +step:223 train loss:6.200472 +step:224 train loss:6.191278 +step:225 train loss:6.127294 +step:226 train loss:6.132176 +step:227 train loss:6.195418 +step:228 train loss:6.160979 +step:229 train loss:6.235656 +step:230 train loss:6.100383 +step:231 train loss:6.153905 +step:232 train loss:6.139009 +step:233 train loss:6.109482 +step:234 train loss:6.107184 +step:235 train loss:6.190907 +step:236 train loss:6.134802 +step:237 train loss:6.164093 +step:238 train loss:6.170871 +step:239 train loss:6.077047 +step:240 train loss:6.149475 +step:241 train loss:6.182043 +step:242 train loss:6.165952 +step:243 train loss:6.072749 +step:244 train loss:6.100163 +step:245 train loss:6.086206 +step:246 train loss:6.082532 +step:247 train loss:6.074871 +step:248 train loss:6.032598 +step:249 train loss:6.091414 +step:250 validation loss:6.087651 +step:250 train loss:6.058851 +step:251 train loss:6.093233 +step:252 train loss:6.052349 +step:253 train loss:6.053158 +step:254 train loss:6.014877 +step:255 train loss:6.053869 +step:256 train loss:6.055378 +step:257 train loss:6.101521 +step:258 train loss:5.995920 +step:259 train loss:6.026568 +step:260 train loss:6.000401 +step:261 train loss:5.995425 +step:262 train loss:6.067326 +step:263 train loss:6.026738 +step:264 train loss:5.992797 +step:265 train loss:6.023511 +step:266 train loss:5.982904 +step:267 train loss:6.020341 +step:268 train loss:5.970379 +step:269 train loss:5.998469 +step:270 train loss:6.013130 +step:271 train loss:6.000080 +step:272 train loss:5.957611 +step:273 train loss:6.032084 +step:274 train loss:5.945892 +step:275 train loss:5.980421 +step:276 train loss:5.956283 +step:277 train loss:5.954937 +step:278 train loss:5.932760 +step:279 train loss:5.905635 +step:280 train loss:5.976632 +step:281 train loss:6.056978 +step:282 train loss:5.939535 +step:283 train loss:5.948024 +step:284 train loss:5.913712 +step:285 train loss:5.968010 +step:286 train loss:5.941287 +step:287 train loss:5.920202 +step:288 train loss:5.897108 +step:289 train loss:5.922467 +step:290 train loss:5.981249 +step:291 train loss:5.907040 +step:292 train loss:5.961900 +step:293 train loss:5.893188 +step:294 train loss:6.000935 +step:295 train loss:5.896590 +step:296 train loss:5.962238 +step:297 train loss:5.994241 +step:298 train loss:5.882687 +step:299 train loss:5.959131 +step:300 train loss:5.871490 +step:301 train loss:5.910331 +step:302 train loss:5.883237 +step:303 train loss:5.900589 +step:304 train loss:5.929808 +step:305 train loss:5.850082 +step:306 train loss:5.872778 +step:307 train loss:5.906905 +step:308 train loss:5.811996 +step:309 train loss:5.951226 +step:310 train loss:5.914088 +step:311 train loss:5.892192 +step:312 train loss:5.880865 +step:313 train loss:5.905352 +step:314 train loss:5.882987 +step:315 train loss:5.850868 +step:316 train loss:5.843720 +step:317 train loss:5.810218 +step:318 train loss:5.812917 +step:319 train loss:5.884202 +step:320 train loss:5.805412 +step:321 train loss:5.862805 +step:322 train loss:5.854207 +step:323 train loss:5.921351 +step:324 train loss:5.861889 +step:325 train loss:5.884412 +step:326 train loss:5.893154 +step:327 train loss:5.863178 +step:328 train loss:5.835917 +step:329 train loss:5.855209 +step:330 train loss:5.786417 +step:331 train loss:5.815044 +step:332 train loss:5.802115 +step:333 train loss:5.740041 +step:334 train loss:5.837810 +step:335 train loss:5.876613 +step:336 train loss:5.999293 +step:337 train loss:5.899937 +step:338 train loss:5.813226 +step:339 train loss:5.767658 +step:340 train loss:5.778140 +step:341 train loss:5.772314 +step:342 train loss:5.837625 +step:343 train loss:5.813503 +step:344 train loss:5.766348 +step:345 train loss:5.742236 +step:346 train loss:5.787469 +step:347 train loss:5.726390 +step:348 train loss:5.734647 +step:349 train loss:5.674202 +step:350 train loss:5.711979 +step:351 train loss:5.780760 +step:352 train loss:5.739626 +step:353 train loss:5.764412 +step:354 train loss:5.721587 +step:355 train loss:5.770528 +step:356 train loss:5.740188 +step:357 train loss:5.812945 +step:358 train loss:5.840405 +step:359 train loss:5.679321 +step:360 train loss:5.794280 +step:361 train loss:5.776030 +step:362 train loss:5.759215 +step:363 train loss:5.713886 +step:364 train loss:5.833621 +step:365 train loss:5.762697 +step:366 train loss:5.736254 +step:367 train loss:5.757778 +step:368 train loss:5.733380 +step:369 train loss:5.708629 +step:370 train loss:5.768206 +step:371 train loss:5.700957 +step:372 train loss:5.770258 +step:373 train loss:5.720962 +step:374 train loss:5.707410 +step:375 train loss:5.739349 +step:376 train loss:5.721657 +step:377 train loss:5.615117 +step:378 train loss:5.699454 +step:379 train loss:5.756068 +step:380 train loss:5.682985 +step:381 train loss:5.745278 +step:382 train loss:5.729787 +step:383 train loss:5.709769 +step:384 train loss:5.685821 +step:385 train loss:5.679996 +step:386 train loss:5.713454 +step:387 train loss:5.710883 +step:388 train loss:5.680829 +step:389 train loss:5.699056 +step:390 train loss:5.681082 +step:391 train loss:5.684888 +step:392 train loss:5.679329 +step:393 train loss:5.671312 +step:394 train loss:5.719734 +step:395 train loss:5.655837 +step:396 train loss:5.605073 +step:397 train loss:5.688819 +step:398 train loss:5.674297 +step:399 train loss:5.682730 +step:400 train loss:5.643605 +step:401 train loss:5.683395 +step:402 train loss:5.662094 +step:403 train loss:5.658284 +step:404 train loss:5.639948 +step:405 train loss:5.643041 +step:406 train loss:5.675351 +step:407 train loss:5.660959 +step:408 train loss:5.724403 +step:409 train loss:5.656850 +step:410 train loss:5.627966 +step:411 train loss:5.622069 +step:412 train loss:5.706977 +step:413 train loss:5.593025 +step:414 train loss:5.676815 +step:415 train loss:5.636518 +step:416 train loss:5.652085 +step:417 train loss:5.670761 +step:418 train loss:5.617374 +step:419 train loss:5.608505 +step:420 train loss:5.613526 +step:421 train loss:5.598932 +step:422 train loss:5.598065 +step:423 train loss:5.603180 +step:424 train loss:5.576582 +step:425 train loss:5.635759 +step:426 train loss:5.632866 +step:427 train loss:5.569490 +step:428 train loss:5.632885 +step:429 train loss:5.544620 +step:430 train loss:5.581846 +step:431 train loss:5.614752 +step:432 train loss:5.635098 +step:433 train loss:5.622914 +step:434 train loss:5.578851 +step:435 train loss:5.634642 +step:436 train loss:5.655640 +step:437 train loss:5.616739 +step:438 train loss:5.572254 +step:439 train loss:5.572240 +step:440 train loss:5.612805 +step:441 train loss:5.565928 +step:442 train loss:5.559774 +step:443 train loss:5.575337 +step:444 train loss:5.604707 +step:445 train loss:5.609832 +step:446 train loss:5.550813 +step:447 train loss:5.567812 +step:448 train loss:5.621777 +step:449 train loss:5.579304 +step:450 train loss:5.565860 +step:451 train loss:5.559008 +step:452 train loss:5.620384 +step:453 train loss:5.547112 +step:454 train loss:5.510738 +step:455 train loss:5.600578 +step:456 train loss:5.570130 +step:457 train loss:5.543658 +step:458 train loss:5.565875 +step:459 train loss:5.512213 +step:460 train loss:5.612301 +step:461 train loss:5.567973 +step:462 train loss:5.474926 +step:463 train loss:5.535673 +step:464 train loss:5.588606 +step:465 train loss:5.546242 +step:466 train loss:5.568152 +step:467 train loss:5.524268 +step:468 train loss:5.580377 +step:469 train loss:5.549517 +step:470 train loss:5.509352 +step:471 train loss:5.596972 +step:472 train loss:5.493509 +step:473 train loss:5.569108 +step:474 train loss:5.550447 +step:475 train loss:5.561392 +step:476 train loss:5.540117 +step:477 train loss:5.482453 +step:478 train loss:5.501509 +step:479 train loss:5.498559 +step:480 train loss:5.520012 +step:481 train loss:5.519747 +step:482 train loss:5.469912 +step:483 train loss:5.532095 +step:484 train loss:5.485777 +step:485 train loss:5.469367 +step:486 train loss:5.531628 +step:487 train loss:5.499993 +step:488 train loss:5.495762 +step:489 train loss:5.493203 +step:490 train loss:5.474801 +step:491 train loss:5.484398 +step:492 train loss:5.486768 +step:493 train loss:5.487791 +step:494 train loss:5.500406 +step:495 train loss:5.445471 +step:496 train loss:5.541395 +step:497 train loss:5.434326 +step:498 train loss:5.528761 +step:499 train loss:5.502591 +step:500 validation loss:5.481455 total_sharp:2.0221e-02 L1_sharp:1.9687e+00 L2_sharp:5.9627e-01 L3_sharp:4.3451e-01 L4_sharp:2.4432e-01 L5_sharp:1.7467e-01 L6_sharp:1.3350e-01 L7_sharp:9.6819e-02 L8_sharp:8.8876e-02 L9_sharp:7.2212e-02 L10_sharp:5.7764e-02 L11_sharp:4.5003e-02 L12_sharp:3.6943e-02 total_fnorm:9.5986e-01 total_l1_linf:5.2866e+03 total_spectral:9.5986e-01 L1_fnorm:2.3093e-02 L2_fnorm:2.2576e-02 L3_fnorm:2.2041e-02 L4_fnorm:2.3249e-02 L5_fnorm:2.3811e-02 L6_fnorm:2.3997e-02 L7_fnorm:2.4613e-02 L8_fnorm:2.3686e-02 L9_fnorm:2.3403e-02 L10_fnorm:2.3189e-02 L11_fnorm:2.2334e-02 L12_fnorm:2.2497e-02 L1_l1linf:1.9822e-01 L2_l1linf:1.9852e-01 L3_l1linf:2.0411e-01 L4_l1linf:1.9943e-01 L5_l1linf:2.1378e-01 L6_l1linf:2.2267e-01 L7_l1linf:2.6914e-01 L8_l1linf:2.0641e-01 L9_l1linf:2.0044e-01 L10_l1linf:1.8782e-01 L11_l1linf:1.6140e-01 L12_l1linf:1.6627e-01 L1_spectral:4.4129e-03 L2_spectral:5.1452e-03 L3_spectral:5.1025e-03 L4_spectral:8.2882e-03 L5_spectral:9.4966e-03 L6_spectral:9.6794e-03 L7_spectral:1.1132e-02 L8_spectral:9.1060e-03 L9_spectral:8.6003e-03 L10_spectral:8.0273e-03 L11_spectral:5.2366e-03 L12_spectral:5.4076e-03 ip_v_neg_g:9.1310e-03 cos_v_neg_g:1.1563e-03 v_norm:9.5986e-01 g_norm:8.2269e+00 hv_norm:5.1925e+00 cos_v_hv:3.7379e-03 hg_norm:1.0322e+04 cos_g_hg:6.6568e-01 v_par:5.8740e-05 v_perp:9.5986e-01 L1_cos_v_neg_g:2.1441e-02 L1_v_norm:2.3093e-02 L2_cos_v_neg_g:1.5690e-02 L2_v_norm:2.2576e-02 L3_cos_v_neg_g:1.5447e-02 L3_v_norm:2.2041e-02 L4_cos_v_neg_g:1.2712e-02 L4_v_norm:2.3249e-02 L5_cos_v_neg_g:1.2454e-02 L5_v_norm:2.3811e-02 L6_cos_v_neg_g:1.2119e-02 L6_v_norm:2.3997e-02 L7_cos_v_neg_g:1.1202e-02 L7_v_norm:2.4613e-02 L8_cos_v_neg_g:1.1218e-02 L8_v_norm:2.3686e-02 L9_cos_v_neg_g:1.1049e-02 L9_v_norm:2.3403e-02 L10_cos_v_neg_g:1.0678e-02 L10_v_norm:2.3189e-02 L11_cos_v_neg_g:9.4704e-03 L11_v_norm:2.2334e-02 L12_cos_v_neg_g:8.6205e-03 L12_v_norm:2.2497e-02 +step:500 train loss:5.509717 +step:501 train loss:5.458357 +step:502 train loss:5.495142 +step:503 train loss:5.424843 +step:504 train loss:5.518159 +step:505 train loss:5.450738 +step:506 train loss:5.452104 +step:507 train loss:5.462586 +step:508 train loss:5.489522 +step:509 train loss:5.493245 +step:510 train loss:5.428030 +step:511 train loss:5.420807 +step:512 train loss:5.419785 +step:513 train loss:5.452312 +step:514 train loss:5.497103 +step:515 train loss:5.453397 +step:516 train loss:5.523837 +step:517 train loss:5.453297 +step:518 train loss:5.439026 +step:519 train loss:5.491440 +step:520 train loss:5.442827 +step:521 train loss:5.431069 +step:522 train loss:5.456910 +step:523 train loss:5.448702 +step:524 train loss:5.406628 +step:525 train loss:5.410704 +step:526 train loss:5.422269 +step:527 train loss:5.420391 +step:528 train loss:5.423961 +step:529 train loss:5.447380 +step:530 train loss:5.396304 +step:531 train loss:5.438320 +step:532 train loss:5.408146 +step:533 train loss:5.365936 +step:534 train loss:5.442350 +step:535 train loss:5.424713 +step:536 train loss:5.492113 +step:537 train loss:5.381610 +step:538 train loss:5.351770 +step:539 train loss:5.435900 +step:540 train loss:5.476554 +step:541 train loss:5.379563 +step:542 train loss:5.405907 +step:543 train loss:5.424403 +step:544 train loss:5.421081 +step:545 train loss:5.401268 +step:546 train loss:5.364749 +step:547 train loss:5.384089 +step:548 train loss:5.346565 +step:549 train loss:5.400135 +step:550 train loss:5.374920 +step:551 train loss:5.382468 +step:552 train loss:5.473078 +step:553 train loss:5.437962 +step:554 train loss:5.383218 +step:555 train loss:5.449056 +step:556 train loss:5.395676 +step:557 train loss:5.371341 +step:558 train loss:5.349878 +step:559 train loss:5.398371 +step:560 train loss:5.446342 +step:561 train loss:5.332805 +step:562 train loss:5.316638 +step:563 train loss:5.394822 +step:564 train loss:5.362306 +step:565 train loss:5.380811 +step:566 train loss:5.376024 +step:567 train loss:5.372060 +step:568 train loss:5.397413 +step:569 train loss:5.380841 +step:570 train loss:5.317643 +step:571 train loss:5.343729 +step:572 train loss:5.341852 +step:573 train loss:5.337595 +step:574 train loss:5.375998 +step:575 train loss:5.339292 +step:576 train loss:5.349059 +step:577 train loss:5.369866 +step:578 train loss:5.348329 +step:579 train loss:5.393645 +step:580 train loss:5.329852 +step:581 train loss:5.382896 +step:582 train loss:5.347053 +step:583 train loss:5.361005 +step:584 train loss:5.343436 +step:585 train loss:5.333322 +step:586 train loss:5.322878 +step:587 train loss:5.396381 +step:588 train loss:5.319215 +step:589 train loss:5.370811 +step:590 train loss:5.378085 +step:591 train loss:5.320820 +step:592 train loss:5.298115 +step:593 train loss:5.329118 +step:594 train loss:5.301976 +step:595 train loss:5.343505 +step:596 train loss:5.318056 +step:597 train loss:5.349179 +step:598 train loss:5.321503 +step:599 train loss:5.321688 +step:600 train loss:5.300582 +step:601 train loss:5.275290 +step:602 train loss:5.283869 +step:603 train loss:5.340168 +step:604 train loss:5.315493 +step:605 train loss:5.345907 +step:606 train loss:5.296544 +step:607 train loss:5.284705 +step:608 train loss:5.282835 +step:609 train loss:5.257351 +step:610 train loss:5.277846 +step:611 train loss:5.283385 +step:612 train loss:5.324704 +step:613 train loss:5.247185 +step:614 train loss:5.290421 +step:615 train loss:5.342435 +step:616 train loss:5.264231 +step:617 train loss:5.298332 +step:618 train loss:5.268307 +step:619 train loss:5.301476 +step:620 train loss:5.319244 +step:621 train loss:5.257213 +step:622 train loss:5.310450 +step:623 train loss:5.321398 +step:624 train loss:5.304283 +step:625 train loss:5.303987 +step:626 train loss:5.306552 +step:627 train loss:5.271640 +step:628 train loss:5.282497 +step:629 train loss:5.228108 +step:630 train loss:5.252496 +step:631 train loss:5.246790 +step:632 train loss:5.263834 +step:633 train loss:5.285255 +step:634 train loss:5.281126 +step:635 train loss:5.219789 +step:636 train loss:5.306316 +step:637 train loss:5.223473 +step:638 train loss:5.159377 +step:639 train loss:5.285928 +step:640 train loss:5.231542 +step:641 train loss:5.260509 +step:642 train loss:5.301241 +step:643 train loss:5.207471 +step:644 train loss:5.292409 +step:645 train loss:5.248256 +step:646 train loss:5.242417 +step:647 train loss:5.255337 +step:648 train loss:5.350827 +step:649 train loss:5.246309 +step:650 train loss:5.310280 +step:651 train loss:5.196416 +step:652 train loss:5.223616 +step:653 train loss:5.221153 +step:654 train loss:5.217742 +step:655 train loss:5.254688 +step:656 train loss:5.204926 +step:657 train loss:5.258186 +step:658 train loss:5.184786 +step:659 train loss:5.266767 +step:660 train loss:5.230802 +step:661 train loss:5.267886 +step:662 train loss:5.262456 +step:663 train loss:5.261679 +step:664 train loss:5.177318 +step:665 train loss:5.190550 +step:666 train loss:5.191633 +step:667 train loss:5.248369 +step:668 train loss:5.223039 +step:669 train loss:5.208858 +step:670 train loss:5.224150 +step:671 train loss:5.197343 +step:672 train loss:5.170190 +step:673 train loss:5.257362 +step:674 train loss:5.261967 +step:675 train loss:5.166551 +step:676 train loss:5.248463 +step:677 train loss:5.185245 +step:678 train loss:5.176378 +step:679 train loss:5.220280 +step:680 train loss:5.177485 +step:681 train loss:5.228929 +step:682 train loss:5.143278 +step:683 train loss:5.201834 +step:684 train loss:5.238756 +step:685 train loss:5.171396 +step:686 train loss:5.280657 +step:687 train loss:5.207306 +step:688 train loss:5.140630 +step:689 train loss:5.190566 +step:690 train loss:5.160721 +step:691 train loss:5.172097 +step:692 train loss:5.192080 +step:693 train loss:5.185839 +step:694 train loss:5.170249 +step:695 train loss:5.133260 +step:696 train loss:5.105547 +step:697 train loss:5.213844 +step:698 train loss:5.151490 +step:699 train loss:5.147967 +step:700 train loss:5.226518 +step:701 train loss:5.136439 +step:702 train loss:5.204592 +step:703 train loss:5.137943 +step:704 train loss:5.100112 +step:705 train loss:5.139538 +step:706 train loss:5.044613 +step:707 train loss:5.106004 +step:708 train loss:5.191577 +step:709 train loss:5.157246 +step:710 train loss:5.117933 +step:711 train loss:5.190926 +step:712 train loss:5.142365 +step:713 train loss:5.100933 +step:714 train loss:5.182814 +step:715 train loss:5.097470 +step:716 train loss:5.226411 +step:717 train loss:5.113189 +step:718 train loss:5.178229 +step:719 train loss:5.133706 +step:720 train loss:5.117203 +step:721 train loss:5.123270 +step:722 train loss:5.144483 +step:723 train loss:5.183796 +step:724 train loss:5.154038 +step:725 train loss:5.136439 +step:726 train loss:5.114745 +step:727 train loss:5.143756 +step:728 train loss:5.134932 +step:729 train loss:5.064770 +step:730 train loss:5.153311 +step:731 train loss:5.175152 +step:732 train loss:5.155308 +step:733 train loss:5.134805 +step:734 train loss:5.126070 +step:735 train loss:5.196419 +step:736 train loss:5.145822 +step:737 train loss:5.138604 +step:738 train loss:5.163072 +step:739 train loss:5.115755 +step:740 train loss:5.127515 +step:741 train loss:5.189760 +step:742 train loss:5.102368 +step:743 train loss:5.083990 +step:744 train loss:5.143181 +step:745 train loss:5.086720 +step:746 train loss:5.090744 +step:747 train loss:5.115229 +step:748 train loss:5.076126 +step:749 train loss:5.114274 +step:750 validation loss:5.098406 +step:750 train loss:5.077173 +step:751 train loss:5.085661 +step:752 train loss:5.035908 +step:753 train loss:5.090799 +step:754 train loss:5.099434 +step:755 train loss:5.136775 +step:756 train loss:5.120171 +step:757 train loss:5.212214 +step:758 train loss:5.087015 +step:759 train loss:5.092171 +step:760 train loss:5.063445 +step:761 train loss:5.103160 +step:762 train loss:5.082809 +step:763 train loss:5.071861 +step:764 train loss:5.047186 +step:765 train loss:5.057633 +step:766 train loss:5.133574 +step:767 train loss:5.222212 +step:768 train loss:5.063642 +step:769 train loss:5.099552 +step:770 train loss:5.120415 +step:771 train loss:5.170652 +step:772 train loss:5.110246 +step:773 train loss:5.052591 +step:774 train loss:5.104881 +step:775 train loss:5.079012 +step:776 train loss:5.084428 +step:777 train loss:5.047469 +step:778 train loss:5.062182 +step:779 train loss:5.037153 +step:780 train loss:5.086985 +step:781 train loss:5.017600 +step:782 train loss:5.044055 +step:783 train loss:5.030413 +step:784 train loss:5.043571 +step:785 train loss:5.007130 +step:786 train loss:5.045348 +step:787 train loss:4.995906 +step:788 train loss:5.051730 +step:789 train loss:5.052012 +step:790 train loss:5.004940 +step:791 train loss:5.092849 +step:792 train loss:5.096661 +step:793 train loss:5.059546 +step:794 train loss:5.059903 +step:795 train loss:5.017348 +step:796 train loss:5.250653 +step:797 train loss:5.049243 +step:798 train loss:5.035783 +step:799 train loss:5.035134 +step:800 train loss:5.110557 +step:801 train loss:5.040326 +step:802 train loss:5.143456 +step:803 train loss:5.059903 +step:804 train loss:5.001726 +step:805 train loss:5.061815 +step:806 train loss:4.967371 +step:807 train loss:5.029794 +step:808 train loss:5.039186 +step:809 train loss:5.005479 +step:810 train loss:4.979836 +step:811 train loss:5.076469 +step:812 train loss:5.037545 +step:813 train loss:5.040041 +step:814 train loss:5.091078 +step:815 train loss:5.058013 +step:816 train loss:4.994814 +step:817 train loss:5.023875 +step:818 train loss:5.000263 +step:819 train loss:4.993357 +step:820 train loss:5.007509 +step:821 train loss:4.951258 +step:822 train loss:4.948978 +step:823 train loss:5.027268 +step:824 train loss:4.943841 +step:825 train loss:4.924222 +step:826 train loss:4.972141 +step:827 train loss:4.916810 +step:828 train loss:4.982953 +step:829 train loss:4.983109 +step:830 train loss:4.990963 +step:831 train loss:5.007052 +step:832 train loss:5.060124 +step:833 train loss:5.020980 +step:834 train loss:5.009864 +step:835 train loss:4.982224 +step:836 train loss:4.970876 +step:837 train loss:4.939987 +step:838 train loss:4.951174 +step:839 train loss:4.948450 +step:840 train loss:4.984429 +step:841 train loss:4.957067 +step:842 train loss:4.970508 +step:843 train loss:4.964041 +step:844 train loss:4.947639 +step:845 train loss:4.932706 +step:846 train loss:5.003390 +step:847 train loss:4.973156 +step:848 train loss:4.939895 +step:849 train loss:4.994151 +step:850 train loss:4.989120 +step:851 train loss:4.958657 +step:852 train loss:5.009649 +step:853 train loss:4.914958 +step:854 train loss:4.965494 +step:855 train loss:4.945763 +step:856 train loss:4.903899 +step:857 train loss:4.946123 +step:858 train loss:4.976900 +step:859 train loss:4.942878 +step:860 train loss:4.944510 +step:861 train loss:4.983411 +step:862 train loss:4.932161 +step:863 train loss:4.953798 +step:864 train loss:4.928095 +step:865 train loss:4.950428 +step:866 train loss:4.956879 +step:867 train loss:5.026975 +step:868 train loss:4.926127 +step:869 train loss:4.946701 +step:870 train loss:4.913817 +step:871 train loss:4.917247 +step:872 train loss:4.922287 +step:873 train loss:4.923660 +step:874 train loss:4.922142 +step:875 train loss:4.831594 +step:876 train loss:4.956837 +step:877 train loss:4.849495 +step:878 train loss:4.957534 +step:879 train loss:4.906827 +step:880 train loss:4.972218 +step:881 train loss:4.930245 +step:882 train loss:4.894958 +step:883 train loss:4.920415 +step:884 train loss:4.935636 +step:885 train loss:4.889336 +step:886 train loss:4.864975 +step:887 train loss:4.901562 +step:888 train loss:4.998676 +step:889 train loss:4.935493 +step:890 train loss:4.890441 +step:891 train loss:4.850176 +step:892 train loss:4.817071 +step:893 train loss:4.895861 +step:894 train loss:4.874730 +step:895 train loss:4.860663 +step:896 train loss:4.937190 +step:897 train loss:4.870861 +step:898 train loss:4.883227 +step:899 train loss:4.886551 +step:900 train loss:4.941650 +step:901 train loss:4.850204 +step:902 train loss:4.892175 +step:903 train loss:4.954428 +step:904 train loss:4.977251 +step:905 train loss:4.869802 +step:906 train loss:4.882493 +step:907 train loss:4.883384 +step:908 train loss:4.910227 +step:909 train loss:4.873565 +step:910 train loss:4.891009 +step:911 train loss:5.008814 +step:912 train loss:4.836914 +step:913 train loss:4.880666 +step:914 train loss:4.871774 +step:915 train loss:4.878485 +step:916 train loss:4.950629 +step:917 train loss:4.880791 +step:918 train loss:4.945194 +step:919 train loss:5.015298 +step:920 train loss:4.802628 +step:921 train loss:4.889709 +step:922 train loss:4.862563 +step:923 train loss:4.825710 +step:924 train loss:4.842693 +step:925 train loss:4.790578 +step:926 train loss:4.884814 +step:927 train loss:4.823787 +step:928 train loss:4.867198 +step:929 train loss:4.856044 +step:930 train loss:4.853297 +step:931 train loss:4.898053 +step:932 train loss:4.860642 +step:933 train loss:4.855263 +step:934 train loss:4.898829 +step:935 train loss:4.877196 +step:936 train loss:4.876708 +step:937 train loss:4.876424 +step:938 train loss:4.890691 +step:939 train loss:4.760968 +step:940 train loss:4.847416 +step:941 train loss:4.791131 +step:942 train loss:4.773578 +step:943 train loss:4.861287 +step:944 train loss:4.816781 +step:945 train loss:4.841676 +step:946 train loss:4.877318 +step:947 train loss:4.973272 +step:948 train loss:4.801242 +step:949 train loss:4.856507 +step:950 train loss:4.807925 +step:951 train loss:4.817366 +step:952 train loss:4.882266 +step:953 train loss:4.832271 +step:954 train loss:4.841856 +step:955 train loss:4.778567 +step:956 train loss:4.798418 +step:957 train loss:4.802984 +step:958 train loss:4.876308 +step:959 train loss:4.822022 +step:960 train loss:4.894289 +step:961 train loss:4.879452 +step:962 train loss:4.812576 +step:963 train loss:4.797565 +step:964 train loss:4.842830 +step:965 train loss:4.764549 +step:966 train loss:4.780210 +step:967 train loss:4.828715 +step:968 train loss:4.817765 +step:969 train loss:4.769454 +step:970 train loss:4.846008 +step:971 train loss:4.800325 +step:972 train loss:4.751282 +step:973 train loss:4.818839 +step:974 train loss:4.774348 +step:975 train loss:4.857599 +step:976 train loss:4.801561 +step:977 train loss:4.797898 +step:978 train loss:4.820722 +step:979 train loss:4.783973 +step:980 train loss:4.788622 +step:981 train loss:4.787744 +step:982 train loss:4.768169 +step:983 train loss:4.773517 +step:984 train loss:4.819588 +step:985 train loss:4.795962 +step:986 train loss:4.781471 +step:987 train loss:4.825596 +step:988 train loss:4.809739 +step:989 train loss:4.773796 +step:990 train loss:4.762762 +step:991 train loss:4.718925 +step:992 train loss:4.755651 +step:993 train loss:4.782290 +step:994 train loss:4.717810 +step:995 train loss:4.725930 +step:996 train loss:4.782775 +step:997 train loss:4.745508 +step:998 train loss:4.744831 +step:999 train loss:4.772534 +step:1000 validation loss:4.751392 total_sharp:2.9197e-02 L1_sharp:4.6498e+00 L2_sharp:1.7445e+00 L3_sharp:8.0255e-01 L4_sharp:5.4334e-01 L5_sharp:4.2685e-01 L6_sharp:2.9466e-01 L7_sharp:1.7220e-01 L8_sharp:1.0112e-01 L9_sharp:7.7478e-02 L10_sharp:6.2584e-02 L11_sharp:5.3583e-02 L12_sharp:4.2614e-02 total_fnorm:1.2498e+00 total_l1_linf:6.8977e+03 total_spectral:1.2498e+00 L1_fnorm:3.1134e-02 L2_fnorm:2.7236e-02 L3_fnorm:2.6406e-02 L4_fnorm:2.9058e-02 L5_fnorm:2.9546e-02 L6_fnorm:3.0320e-02 L7_fnorm:3.0809e-02 L8_fnorm:3.1036e-02 L9_fnorm:3.1510e-02 L10_fnorm:3.1750e-02 L11_fnorm:3.1608e-02 L12_fnorm:3.2126e-02 L1_l1linf:2.2141e-01 L2_l1linf:2.0899e-01 L3_l1linf:2.0319e-01 L4_l1linf:2.1592e-01 L5_l1linf:2.2993e-01 L6_l1linf:2.2710e-01 L7_l1linf:2.2836e-01 L8_l1linf:2.2513e-01 L9_l1linf:2.2553e-01 L10_l1linf:2.2782e-01 L11_l1linf:2.1319e-01 L12_l1linf:2.1392e-01 L1_spectral:4.7994e-03 L2_spectral:4.7027e-03 L3_spectral:4.6702e-03 L4_spectral:4.8743e-03 L5_spectral:5.1665e-03 L6_spectral:5.1130e-03 L7_spectral:5.6606e-03 L8_spectral:6.2605e-03 L9_spectral:6.3713e-03 L10_spectral:6.7836e-03 L11_spectral:6.4181e-03 L12_spectral:8.3119e-03 ip_v_neg_g:2.2874e-02 cos_v_neg_g:1.6851e-03 v_norm:1.2498e+00 g_norm:1.0861e+01 hv_norm:9.1481e+00 cos_v_hv:3.9889e-03 hg_norm:1.6254e+04 cos_g_hg:6.9367e-01 v_par:1.0081e-04 v_perp:1.2498e+00 L1_cos_v_neg_g:3.4660e-02 L1_v_norm:3.1134e-02 L2_cos_v_neg_g:2.7815e-02 L2_v_norm:2.7236e-02 L3_cos_v_neg_g:2.3214e-02 L3_v_norm:2.6406e-02 L4_cos_v_neg_g:2.0743e-02 L4_v_norm:2.9058e-02 L5_cos_v_neg_g:2.0112e-02 L5_v_norm:2.9546e-02 L6_cos_v_neg_g:1.8367e-02 L6_v_norm:3.0320e-02 L7_cos_v_neg_g:1.4893e-02 L7_v_norm:3.0809e-02 L8_cos_v_neg_g:1.1860e-02 L8_v_norm:3.1036e-02 L9_cos_v_neg_g:1.1073e-02 L9_v_norm:3.1510e-02 L10_cos_v_neg_g:1.0573e-02 L10_v_norm:3.1750e-02 L11_cos_v_neg_g:9.1964e-03 L11_v_norm:3.1608e-02 L12_cos_v_neg_g:7.3098e-03 L12_v_norm:3.2126e-02 +step:1000 train loss:4.789534 +step:1001 train loss:4.781324 +step:1002 train loss:4.784975 +step:1003 train loss:4.769719 +step:1004 train loss:4.740696 +step:1005 train loss:4.747326 +step:1006 train loss:4.817421 +step:1007 train loss:4.801091 +step:1008 train loss:4.733952 +step:1009 train loss:4.795108 +step:1010 train loss:4.775178 +step:1011 train loss:4.790092 +step:1012 train loss:4.738381 +step:1013 train loss:4.709949 +step:1014 train loss:4.712282 +step:1015 train loss:4.747246 +step:1016 train loss:4.747314 +step:1017 train loss:4.711575 +step:1018 train loss:4.757606 +step:1019 train loss:4.752942 +step:1020 train loss:4.725232 +step:1021 train loss:4.792965 +step:1022 train loss:4.718423 +step:1023 train loss:4.719986 +step:1024 train loss:4.796193 +step:1025 train loss:4.741906 +step:1026 train loss:4.706951 +step:1027 train loss:4.745187 +step:1028 train loss:4.745886 +step:1029 train loss:4.694141 +step:1030 train loss:4.753648 +step:1031 train loss:4.764527 +step:1032 train loss:4.707627 +step:1033 train loss:4.684446 +step:1034 train loss:4.746589 +step:1035 train loss:4.750571 +step:1036 train loss:4.661676 +step:1037 train loss:4.722009 +step:1038 train loss:4.745321 +step:1039 train loss:4.868698 +step:1040 train loss:4.697172 +step:1041 train loss:4.707714 +step:1042 train loss:4.741730 +step:1043 train loss:4.722268 +step:1044 train loss:4.720145 +step:1045 train loss:4.728565 +step:1046 train loss:4.686992 +step:1047 train loss:4.701265 +step:1048 train loss:4.707217 +step:1049 train loss:4.754626 +step:1050 train loss:4.719038 +step:1051 train loss:4.688035 +step:1052 train loss:4.778455 +step:1053 train loss:4.685508 +step:1054 train loss:4.687535 +step:1055 train loss:4.746002 +step:1056 train loss:4.688107 +step:1057 train loss:4.586381 +step:1058 train loss:4.694298 +step:1059 train loss:4.692369 +step:1060 train loss:4.671046 +step:1061 train loss:4.746517 +step:1062 train loss:4.685595 +step:1063 train loss:4.692908 +step:1064 train loss:4.669658 +step:1065 train loss:4.693683 +step:1066 train loss:4.659912 +step:1067 train loss:4.691042 +step:1068 train loss:4.669646 +step:1069 train loss:4.677773 +step:1070 train loss:4.668822 +step:1071 train loss:4.701565 +step:1072 train loss:4.699486 +step:1073 train loss:4.615841 +step:1074 train loss:4.650339 +step:1075 train loss:4.672989 +step:1076 train loss:4.729897 +step:1077 train loss:4.660528 +step:1078 train loss:4.689057 +step:1079 train loss:4.754323 +step:1080 train loss:4.629430 +step:1081 train loss:4.683141 +step:1082 train loss:4.689528 +step:1083 train loss:4.648127 +step:1084 train loss:4.614920 +step:1085 train loss:4.670679 +step:1086 train loss:4.696550 +step:1087 train loss:4.647837 +step:1088 train loss:4.655899 +step:1089 train loss:4.659695 +step:1090 train loss:4.606402 +step:1091 train loss:4.592926 +step:1092 train loss:4.733532 +step:1093 train loss:4.622684 +step:1094 train loss:4.655145 +step:1095 train loss:4.696987 +step:1096 train loss:4.638495 +step:1097 train loss:4.626081 +step:1098 train loss:4.603747 +step:1099 train loss:4.645736 +step:1100 train loss:4.669889 +step:1101 train loss:4.683996 +step:1102 train loss:4.687062 +step:1103 train loss:4.638610 +step:1104 train loss:4.661931 +step:1105 train loss:4.706330 +step:1106 train loss:4.638330 +step:1107 train loss:4.748894 +step:1108 train loss:4.705567 +step:1109 train loss:4.660490 +step:1110 train loss:4.622129 +step:1111 train loss:4.684193 +step:1112 train loss:4.618419 +step:1113 train loss:4.573580 +step:1114 train loss:4.568773 +step:1115 train loss:4.607697 +step:1116 train loss:4.686794 +step:1117 train loss:4.679179 +step:1118 train loss:4.726847 +step:1119 train loss:4.681493 +step:1120 train loss:4.652667 +step:1121 train loss:4.632239 +step:1122 train loss:4.620355 +step:1123 train loss:4.713553 +step:1124 train loss:4.594381 +step:1125 train loss:4.633731 +step:1126 train loss:4.577982 +step:1127 train loss:4.602400 +step:1128 train loss:4.605913 +step:1129 train loss:4.658302 +step:1130 train loss:4.570210 +step:1131 train loss:4.663069 +step:1132 train loss:4.609488 +step:1133 train loss:4.613149 +step:1134 train loss:4.601697 +step:1135 train loss:4.641838 +step:1136 train loss:4.651671 +step:1137 train loss:4.568167 +step:1138 train loss:4.637459 +step:1139 train loss:4.611429 +step:1140 train loss:4.688076 +step:1141 train loss:4.631613 +step:1142 train loss:4.591497 +step:1143 train loss:4.643495 +step:1144 train loss:4.682156 +step:1145 train loss:4.633897 +step:1146 train loss:4.573114 +step:1147 train loss:4.589611 +step:1148 train loss:4.600463 +step:1149 train loss:4.649630 +step:1150 train loss:4.670251 +step:1151 train loss:4.675146 +step:1152 train loss:4.579769 +step:1153 train loss:4.584498 +step:1154 train loss:4.569383 +step:1155 train loss:4.669037 +step:1156 train loss:4.572190 +step:1157 train loss:4.592238 +step:1158 train loss:4.696913 +step:1159 train loss:4.630641 +step:1160 train loss:4.563989 +step:1161 train loss:4.662614 +step:1162 train loss:4.594445 +step:1163 train loss:4.584687 +step:1164 train loss:4.488555 +step:1165 train loss:4.631964 +step:1166 train loss:4.547971 +step:1167 train loss:4.543452 +step:1168 train loss:4.619944 +step:1169 train loss:4.582091 +step:1170 train loss:4.580628 +step:1171 train loss:4.601415 +step:1172 train loss:4.585045 +step:1173 train loss:4.599675 +step:1174 train loss:4.531169 +step:1175 train loss:4.557698 +step:1176 train loss:4.684295 +step:1177 train loss:4.515759 +step:1178 train loss:4.570763 +step:1179 train loss:4.547927 +step:1180 train loss:4.580887 +step:1181 train loss:4.548092 +step:1182 train loss:4.636505 +step:1183 train loss:4.605092 +step:1184 train loss:4.521856 +step:1185 train loss:4.580453 +step:1186 train loss:4.545683 +step:1187 train loss:4.505641 +step:1188 train loss:4.540081 +step:1189 train loss:4.519679 +step:1190 train loss:4.545757 +step:1191 train loss:4.600023 +step:1192 train loss:4.566471 +step:1193 train loss:4.579903 +step:1194 train loss:4.671207 +step:1195 train loss:4.650539 +step:1196 train loss:4.558432 +step:1197 train loss:4.571089 +step:1198 train loss:4.536395 +step:1199 train loss:4.551085 +step:1200 train loss:4.628578 +step:1201 train loss:4.573347 +step:1202 train loss:4.509559 +step:1203 train loss:4.509097 +step:1204 train loss:4.558484 +step:1205 train loss:4.533391 +step:1206 train loss:4.511638 +step:1207 train loss:4.599296 +step:1208 train loss:4.573358 +step:1209 train loss:4.483409 +step:1210 train loss:4.578060 +step:1211 train loss:4.535210 +step:1212 train loss:4.545751 +step:1213 train loss:4.485648 +step:1214 train loss:4.590622 +step:1215 train loss:4.557121 +step:1216 train loss:4.548944 +step:1217 train loss:4.544991 +step:1218 train loss:4.559666 +step:1219 train loss:4.505245 +step:1220 train loss:4.515806 +step:1221 train loss:4.546000 +step:1222 train loss:4.602859 +step:1223 train loss:4.556944 +step:1224 train loss:4.524358 +step:1225 train loss:4.577866 +step:1226 train loss:4.513829 +step:1227 train loss:4.557326 +step:1228 train loss:4.513960 +step:1229 train loss:4.510077 +step:1230 train loss:4.496742 +step:1231 train loss:4.533429 +step:1232 train loss:4.491482 +step:1233 train loss:4.486475 +step:1234 train loss:4.579751 +step:1235 train loss:4.567892 +step:1236 train loss:4.457187 +step:1237 train loss:4.549883 +step:1238 train loss:4.507257 +step:1239 train loss:4.550811 +step:1240 train loss:4.436846 +step:1241 train loss:4.491832 +step:1242 train loss:4.505518 +step:1243 train loss:4.469746 +step:1244 train loss:4.572762 +step:1245 train loss:4.585180 +step:1246 train loss:4.516731 +step:1247 train loss:4.485537 +step:1248 train loss:4.515997 +step:1249 train loss:4.476745 +step:1250 validation loss:4.479740 +step:1250 train loss:4.474624 +step:1251 train loss:4.536600 +step:1252 train loss:4.487288 +step:1253 train loss:4.454149 +step:1254 train loss:4.476094 +step:1255 train loss:4.473726 +step:1256 train loss:4.514005 +step:1257 train loss:4.496377 +step:1258 train loss:4.544106 +step:1259 train loss:4.547778 +step:1260 train loss:4.430227 +step:1261 train loss:4.682065 +step:1262 train loss:4.513402 +step:1263 train loss:4.480283 +step:1264 train loss:4.474767 +step:1265 train loss:4.579306 +step:1266 train loss:4.480596 +step:1267 train loss:4.494557 +step:1268 train loss:4.510295 +step:1269 train loss:4.496248 +step:1270 train loss:4.429606 +step:1271 train loss:4.440870 +step:1272 train loss:4.456287 +step:1273 train loss:4.519547 +step:1274 train loss:4.482869 +step:1275 train loss:4.503808 +step:1276 train loss:4.507536 +step:1277 train loss:4.508317 +step:1278 train loss:4.443506 +step:1279 train loss:4.468202 +step:1280 train loss:4.475267 +step:1281 train loss:4.542564 +step:1282 train loss:4.438602 +step:1283 train loss:4.534873 +step:1284 train loss:4.485564 +step:1285 train loss:4.512835 +step:1286 train loss:4.419249 +step:1287 train loss:4.468435 +step:1288 train loss:4.498653 +step:1289 train loss:4.548960 +step:1290 train loss:4.482002 +step:1291 train loss:4.482852 +step:1292 train loss:4.464247 +step:1293 train loss:4.420082 +step:1294 train loss:4.483273 +step:1295 train loss:4.461308 +step:1296 train loss:4.518147 +step:1297 train loss:4.469436 +step:1298 train loss:4.488440 +step:1299 train loss:4.526464 +step:1300 train loss:4.442106 +step:1301 train loss:4.479500 +step:1302 train loss:4.440655 +step:1303 train loss:4.479793 +step:1304 train loss:4.511842 +step:1305 train loss:4.482598 +step:1306 train loss:4.482229 +step:1307 train loss:4.469268 +step:1308 train loss:4.419921 +step:1309 train loss:4.432071 +step:1310 train loss:4.404895 +step:1311 train loss:4.447430 +step:1312 train loss:4.490811 +step:1313 train loss:4.408852 +step:1314 train loss:4.415348 +step:1315 train loss:4.474955 +step:1316 train loss:4.425449 +step:1317 train loss:4.348661 +step:1318 train loss:4.482227 +step:1319 train loss:4.522759 +step:1320 train loss:4.438686 +step:1321 train loss:4.395136 +step:1322 train loss:4.503263 +step:1323 train loss:4.463943 +step:1324 train loss:4.555681 +step:1325 train loss:4.448079 +step:1326 train loss:4.486387 +step:1327 train loss:4.488910 +step:1328 train loss:4.405052 +step:1329 train loss:4.431450 +step:1330 train loss:4.448790 +step:1331 train loss:4.284258 +step:1332 train loss:4.498492 +step:1333 train loss:4.434628 +step:1334 train loss:4.464543 +step:1335 train loss:4.494043 +step:1336 train loss:4.479979 +step:1337 train loss:4.450953 +step:1338 train loss:4.426285 +step:1339 train loss:4.521622 +step:1340 train loss:4.461511 +step:1341 train loss:4.452038 +step:1342 train loss:4.429807 +step:1343 train loss:4.403484 +step:1344 train loss:4.473963 +step:1345 train loss:4.435202 +step:1346 train loss:4.523343 +step:1347 train loss:4.441270 +step:1348 train loss:4.422372 +step:1349 train loss:4.365468 +step:1350 train loss:4.378334 +step:1351 train loss:4.456658 +step:1352 train loss:4.414970 +step:1353 train loss:4.392101 +step:1354 train loss:4.412324 +step:1355 train loss:4.486021 +step:1356 train loss:4.403955 +step:1357 train loss:4.412049 +step:1358 train loss:4.408309 +step:1359 train loss:4.409665 +step:1360 train loss:4.443441 +step:1361 train loss:4.552739 +step:1362 train loss:4.479043 +step:1363 train loss:4.361055 +step:1364 train loss:4.397487 +step:1365 train loss:4.382727 +step:1366 train loss:4.415121 +step:1367 train loss:4.348864 +step:1368 train loss:4.379649 +step:1369 train loss:4.407832 +step:1370 train loss:4.434698 +step:1371 train loss:4.389376 +step:1372 train loss:4.411788 +step:1373 train loss:4.438819 +step:1374 train loss:4.451545 +step:1375 train loss:4.432693 +step:1376 train loss:4.424998 +step:1377 train loss:4.453062 +step:1378 train loss:4.411446 +step:1379 train loss:4.388373 +step:1380 train loss:4.460677 +step:1381 train loss:4.410910 +step:1382 train loss:4.378739 +step:1383 train loss:4.369179 +step:1384 train loss:4.510098 +step:1385 train loss:4.347894 +step:1386 train loss:4.404848 +step:1387 train loss:4.412907 +step:1388 train loss:4.375492 +step:1389 train loss:4.370128 +step:1390 train loss:4.394846 +step:1391 train loss:4.424310 +step:1392 train loss:4.391291 +step:1393 train loss:4.462507 +step:1394 train loss:4.376567 +step:1395 train loss:4.420954 +step:1396 train loss:4.404816 +step:1397 train loss:4.418354 +step:1398 train loss:4.422255 +step:1399 train loss:4.390929 +step:1400 train loss:4.371225 +step:1401 train loss:4.369589 +step:1402 train loss:4.366095 +step:1403 train loss:4.339993 +step:1404 train loss:4.390567 +step:1405 train loss:4.354899 +step:1406 train loss:4.379502 +step:1407 train loss:4.372546 +step:1408 train loss:4.367689 +step:1409 train loss:4.347985 +step:1410 train loss:4.369073 +step:1411 train loss:4.389955 +step:1412 train loss:4.461426 +step:1413 train loss:4.374913 +step:1414 train loss:4.399679 +step:1415 train loss:4.356143 +step:1416 train loss:4.411476 +step:1417 train loss:4.382691 +step:1418 train loss:4.322553 +step:1419 train loss:4.309372 +step:1420 train loss:4.359182 +step:1421 train loss:4.399870 +step:1422 train loss:4.378467 +step:1423 train loss:4.467927 +step:1424 train loss:4.358694 +step:1425 train loss:4.329499 +step:1426 train loss:4.357762 +step:1427 train loss:4.358712 +step:1428 train loss:4.335649 +step:1429 train loss:4.354837 +step:1430 train loss:4.344174 +step:1431 train loss:4.377088 +step:1432 train loss:4.356701 +step:1433 train loss:4.345761 +step:1434 train loss:4.313746 +step:1435 train loss:4.318250 +step:1436 train loss:4.372600 +step:1437 train loss:4.317882 +step:1438 train loss:4.328093 +step:1439 train loss:4.295891 +step:1440 train loss:4.334808 +step:1441 train loss:4.407922 +step:1442 train loss:4.372640 +step:1443 train loss:4.307623 +step:1444 train loss:4.325076 +step:1445 train loss:4.337389 +step:1446 train loss:4.340475 +step:1447 train loss:4.347108 +step:1448 train loss:4.321146 +step:1449 train loss:4.354616 +step:1450 train loss:4.364108 +step:1451 train loss:4.283395 +step:1452 train loss:4.343142 +step:1453 train loss:4.332347 +step:1454 train loss:4.332511 +step:1455 train loss:4.258604 +step:1456 train loss:4.351982 +step:1457 train loss:4.273708 +step:1458 train loss:4.419784 +step:1459 train loss:4.335208 +step:1460 train loss:4.305483 +step:1461 train loss:4.357545 +step:1462 train loss:4.354705 +step:1463 train loss:4.338242 +step:1464 train loss:4.302088 +step:1465 train loss:4.310353 +step:1466 train loss:4.268057 +step:1467 train loss:4.400190 +step:1468 train loss:4.287282 +step:1469 train loss:4.365482 +step:1470 train loss:4.296739 +step:1471 train loss:4.317526 +step:1472 train loss:4.297153 +step:1473 train loss:4.308241 +step:1474 train loss:4.254769 +step:1475 train loss:4.309501 +step:1476 train loss:4.386251 +step:1477 train loss:4.331413 +step:1478 train loss:4.271866 +step:1479 train loss:4.295698 +step:1480 train loss:4.297142 +step:1481 train loss:4.268233 +step:1482 train loss:4.327935 +step:1483 train loss:4.315246 +step:1484 train loss:4.356039 +step:1485 train loss:4.363972 +step:1486 train loss:4.309916 +step:1487 train loss:4.292642 +step:1488 train loss:4.300761 +step:1489 train loss:4.287539 +step:1490 train loss:4.347871 +step:1491 train loss:4.326632 +step:1492 train loss:4.333361 +step:1493 train loss:4.279402 +step:1494 train loss:4.309176 +step:1495 train loss:4.279863 +step:1496 train loss:4.261222 +step:1497 train loss:4.337221 +step:1498 train loss:4.242151 +step:1499 train loss:4.279726 +step:1500 validation loss:4.270115 total_sharp:2.2448e-02 L1_sharp:1.6989e+00 L2_sharp:1.0697e+00 L3_sharp:7.4411e-01 L4_sharp:4.9081e-01 L5_sharp:4.2790e-01 L6_sharp:3.7505e-01 L7_sharp:2.9644e-01 L8_sharp:1.7456e-01 L9_sharp:1.2596e-01 L10_sharp:9.9290e-02 L11_sharp:1.1109e-01 L12_sharp:2.1871e-01 total_fnorm:1.3152e+00 total_l1_linf:7.2542e+03 total_spectral:1.3152e+00 L1_fnorm:3.1735e-02 L2_fnorm:2.8140e-02 L3_fnorm:2.6837e-02 L4_fnorm:2.9174e-02 L5_fnorm:2.9844e-02 L6_fnorm:3.0823e-02 L7_fnorm:3.1466e-02 L8_fnorm:3.1607e-02 L9_fnorm:3.1894e-02 L10_fnorm:3.1985e-02 L11_fnorm:3.1592e-02 L12_fnorm:3.2112e-02 L1_l1linf:2.3566e-01 L2_l1linf:2.3442e-01 L3_l1linf:2.4952e-01 L4_l1linf:2.4837e-01 L5_l1linf:2.5969e-01 L6_l1linf:2.6451e-01 L7_l1linf:2.6580e-01 L8_l1linf:2.6954e-01 L9_l1linf:2.6393e-01 L10_l1linf:2.6595e-01 L11_l1linf:2.6815e-01 L12_l1linf:2.8746e-01 L1_spectral:5.2332e-03 L2_spectral:5.2399e-03 L3_spectral:5.5870e-03 L4_spectral:5.5504e-03 L5_spectral:5.8170e-03 L6_spectral:5.8987e-03 L7_spectral:5.9728e-03 L8_spectral:6.0012e-03 L9_spectral:5.8965e-03 L10_spectral:6.1120e-03 L11_spectral:5.9954e-03 L12_spectral:6.2949e-03 ip_v_neg_g:1.7226e-02 cos_v_neg_g:1.2532e-03 v_norm:1.3152e+00 g_norm:1.0451e+01 hv_norm:7.3191e+00 cos_v_hv:4.0337e-03 hg_norm:1.5587e+04 cos_g_hg:6.5535e-01 v_par:6.6947e-05 v_perp:1.3152e+00 L1_cos_v_neg_g:2.7042e-02 L1_v_norm:3.1735e-02 L2_cos_v_neg_g:2.4509e-02 L2_v_norm:2.8140e-02 L3_cos_v_neg_g:2.1363e-02 L3_v_norm:2.6837e-02 L4_cos_v_neg_g:1.6065e-02 L4_v_norm:2.9174e-02 L5_cos_v_neg_g:1.5473e-02 L5_v_norm:2.9844e-02 L6_cos_v_neg_g:1.3304e-02 L6_v_norm:3.0823e-02 L7_cos_v_neg_g:1.1688e-02 L7_v_norm:3.1466e-02 L8_cos_v_neg_g:1.0229e-02 L8_v_norm:3.1607e-02 L9_cos_v_neg_g:8.2295e-03 L9_v_norm:3.1894e-02 L10_cos_v_neg_g:6.5498e-03 L10_v_norm:3.1985e-02 L11_cos_v_neg_g:5.0824e-03 L11_v_norm:3.1592e-02 L12_cos_v_neg_g:3.9140e-03 L12_v_norm:3.2112e-02 +step:1500 train loss:4.286651 +step:1501 train loss:4.295067 +step:1502 train loss:4.246434 +step:1503 train loss:4.290878 +step:1504 train loss:4.255891 +step:1505 train loss:4.233915 +step:1506 train loss:4.217245 +step:1507 train loss:4.251380 +step:1508 train loss:4.257171 +step:1509 train loss:4.309532 +step:1510 train loss:4.243934 +step:1511 train loss:4.282294 +step:1512 train loss:4.241887 +step:1513 train loss:4.322577 +step:1514 train loss:4.270034 +step:1515 train loss:4.331491 +step:1516 train loss:4.249623 +step:1517 train loss:4.277683 +step:1518 train loss:4.346778 +step:1519 train loss:4.305123 +step:1520 train loss:4.348304 +step:1521 train loss:4.272876 +step:1522 train loss:4.317239 +step:1523 train loss:4.324676 +step:1524 train loss:4.229871 +step:1525 train loss:4.308757 +step:1526 train loss:4.224067 +step:1527 train loss:4.295174 +step:1528 train loss:4.327380 +step:1529 train loss:4.299811 +step:1530 train loss:4.324311 +step:1531 train loss:4.250576 +step:1532 train loss:4.322657 +step:1533 train loss:4.298191 +step:1534 train loss:4.240330 +step:1535 train loss:4.300096 +step:1536 train loss:4.323311 +step:1537 train loss:4.282315 +step:1538 train loss:4.262020 +step:1539 train loss:4.273775 +step:1540 train loss:4.279491 +step:1541 train loss:4.256307 +step:1542 train loss:4.341640 +step:1543 train loss:4.354483 +step:1544 train loss:4.233097 +step:1545 train loss:4.217335 +step:1546 train loss:4.250809 +step:1547 train loss:4.247050 +step:1548 train loss:4.282395 +step:1549 train loss:4.222939 +step:1550 train loss:4.320235 +step:1551 train loss:4.257460 +step:1552 train loss:4.292617 +step:1553 train loss:4.292732 +step:1554 train loss:4.302559 +step:1555 train loss:4.260978 +step:1556 train loss:4.244890 +step:1557 train loss:4.250273 +step:1558 train loss:4.272011 +step:1559 train loss:4.243732 +step:1560 train loss:4.322037 +step:1561 train loss:4.290198 +step:1562 train loss:4.188282 +step:1563 train loss:4.172483 +step:1564 train loss:4.281270 +step:1565 train loss:4.275046 +step:1566 train loss:4.281542 +step:1567 train loss:4.293190 +step:1568 train loss:4.237508 +step:1569 train loss:4.234231 +step:1570 train loss:4.243496 +step:1571 train loss:4.230311 +step:1572 train loss:4.236264 +step:1573 train loss:4.272228 +step:1574 train loss:4.230179 +step:1575 train loss:4.257163 +step:1576 train loss:4.207924 +step:1577 train loss:4.233107 +step:1578 train loss:4.217816 +step:1579 train loss:4.293964 +step:1580 train loss:4.244674 +step:1581 train loss:4.279337 +step:1582 train loss:4.273156 +step:1583 train loss:4.256394 +step:1584 train loss:4.181200 +step:1585 train loss:4.274447 +step:1586 train loss:4.227259 +step:1587 train loss:4.245940 +step:1588 train loss:4.226438 +step:1589 train loss:4.271494 +step:1590 train loss:4.181663 +step:1591 train loss:4.249403 +step:1592 train loss:4.195627 +step:1593 train loss:4.232290 +step:1594 train loss:4.228574 +step:1595 train loss:4.225120 +step:1596 train loss:4.229558 +step:1597 train loss:4.162161 +step:1598 train loss:4.260116 +step:1599 train loss:4.272593 +step:1600 train loss:4.163925 +step:1601 train loss:4.226579 +step:1602 train loss:4.279290 +step:1603 train loss:4.273879 +step:1604 train loss:4.205388 +step:1605 train loss:4.251493 +step:1606 train loss:4.298731 +step:1607 train loss:4.190545 +step:1608 train loss:4.217364 +step:1609 train loss:4.227429 +step:1610 train loss:4.289104 +step:1611 train loss:4.212567 +step:1612 train loss:4.144677 +step:1613 train loss:4.207197 +step:1614 train loss:4.315917 +step:1615 train loss:4.254692 +step:1616 train loss:4.261464 +step:1617 train loss:4.224976 +step:1618 train loss:4.235828 +step:1619 train loss:4.401927 +step:1620 train loss:4.195069 +step:1621 train loss:4.251746 +step:1622 train loss:4.181616 +step:1623 train loss:4.235808 +step:1624 train loss:4.213495 +step:1625 train loss:4.287640 +step:1626 train loss:4.183213 +step:1627 train loss:4.187593 +step:1628 train loss:4.208337 +step:1629 train loss:4.228004 +step:1630 train loss:4.253115 +step:1631 train loss:4.198271 +step:1632 train loss:4.178844 +step:1633 train loss:4.195224 +step:1634 train loss:4.239820 +step:1635 train loss:4.186494 +step:1636 train loss:4.180350 +step:1637 train loss:4.253133 +step:1638 train loss:4.343257 +step:1639 train loss:4.165909 +step:1640 train loss:4.234961 +step:1641 train loss:4.206204 +step:1642 train loss:4.283715 +step:1643 train loss:4.188717 +step:1644 train loss:4.213545 +step:1645 train loss:4.187384 +step:1646 train loss:4.262837 +step:1647 train loss:4.161650 +step:1648 train loss:4.222507 +step:1649 train loss:4.187750 +step:1650 train loss:4.203682 +step:1651 train loss:4.223656 +step:1652 train loss:4.231006 +step:1653 train loss:4.241725 +step:1654 train loss:4.228141 +step:1655 train loss:4.213274 +step:1656 train loss:4.202424 +step:1657 train loss:4.204010 +step:1658 train loss:4.175660 +step:1659 train loss:4.247543 +step:1660 train loss:4.150042 +step:1661 train loss:4.254601 +step:1662 train loss:4.198935 +step:1663 train loss:4.188052 +step:1664 train loss:4.281754 +step:1665 train loss:4.203833 +step:1666 train loss:4.216477 +step:1667 train loss:4.225554 +step:1668 train loss:4.205875 +step:1669 train loss:4.176787 +step:1670 train loss:4.212866 +step:1671 train loss:4.214437 +step:1672 train loss:4.212422 +step:1673 train loss:4.176656 +step:1674 train loss:4.165165 +step:1675 train loss:4.216965 +step:1676 train loss:4.464100 +step:1677 train loss:4.228641 +step:1678 train loss:4.137408 +step:1679 train loss:4.261241 +step:1680 train loss:4.190398 +step:1681 train loss:4.243133 +step:1682 train loss:4.195636 +step:1683 train loss:4.195679 +step:1684 train loss:4.153075 +step:1685 train loss:4.204877 +step:1686 train loss:4.195753 +step:1687 train loss:4.199348 +step:1688 train loss:4.188827 +step:1689 train loss:4.171316 +step:1690 train loss:4.204648 +step:1691 train loss:4.175356 +step:1692 train loss:4.196373 +step:1693 train loss:4.155283 +step:1694 train loss:4.122044 +step:1695 train loss:4.143291 +step:1696 train loss:4.155686 +step:1697 train loss:4.201101 +step:1698 train loss:4.185384 +step:1699 train loss:4.156569 +step:1700 train loss:4.231026 +step:1701 train loss:4.171253 +step:1702 train loss:4.160048 +step:1703 train loss:4.181909 +step:1704 train loss:4.190611 +step:1705 train loss:4.199410 +step:1706 train loss:4.217250 +step:1707 train loss:4.208737 +step:1708 train loss:4.140417 +step:1709 train loss:4.230744 +step:1710 train loss:4.156146 +step:1711 train loss:4.156696 +step:1712 train loss:4.179251 +step:1713 train loss:4.148859 +step:1714 train loss:4.503780 +step:1715 train loss:4.168769 +step:1716 train loss:4.147033 +step:1717 train loss:4.149253 +step:1718 train loss:4.223719 +step:1719 train loss:4.143437 +step:1720 train loss:4.218164 +step:1721 train loss:4.158678 +step:1722 train loss:4.133128 +step:1723 train loss:4.223470 +step:1724 train loss:4.181722 +step:1725 train loss:4.173531 +step:1726 train loss:4.176255 +step:1727 train loss:4.201109 +step:1728 train loss:4.208944 +step:1729 train loss:4.129140 +step:1730 train loss:4.217904 +step:1731 train loss:4.144850 +step:1732 train loss:4.152789 +step:1733 train loss:4.143287 +step:1734 train loss:4.187247 +step:1735 train loss:4.252365 +step:1736 train loss:4.157081 +step:1737 train loss:4.192565 +step:1738 train loss:4.160565 +step:1739 train loss:4.217401 +step:1740 train loss:4.211634 +step:1741 train loss:4.257058 +step:1742 train loss:4.252444 +step:1743 train loss:4.143312 +step:1744 train loss:4.161622 +step:1745 train loss:4.144446 +step:1746 train loss:4.130619 +step:1747 train loss:4.170412 +step:1748 train loss:4.103612 +step:1749 train loss:4.145320 +step:1750 validation loss:4.129283 +step:1750 train loss:4.176626 +step:1751 train loss:4.200304 +step:1752 train loss:4.150596 +step:1753 train loss:4.184772 +step:1754 train loss:4.175609 +step:1755 train loss:4.171286 +step:1756 train loss:4.200041 +step:1757 train loss:4.200574 +step:1758 train loss:4.121120 +step:1759 train loss:4.207637 +step:1760 train loss:4.164518 +step:1761 train loss:4.132465 +step:1762 train loss:4.137343 +step:1763 train loss:4.141052 +step:1764 train loss:4.420728 +step:1765 train loss:4.143873 +step:1766 train loss:4.232209 +step:1767 train loss:4.148101 +step:1768 train loss:4.121034 +step:1769 train loss:4.133816 +step:1770 train loss:4.160495 +step:1771 train loss:4.131878 +step:1772 train loss:4.236736 +step:1773 train loss:4.166883 +step:1774 train loss:4.172692 +step:1775 train loss:4.284283 +step:1776 train loss:4.162028 +step:1777 train loss:4.144743 +step:1778 train loss:4.207520 +step:1779 train loss:4.129870 +step:1780 train loss:4.188053 +step:1781 train loss:4.193007 +step:1782 train loss:4.219344 +step:1783 train loss:4.153098 +step:1784 train loss:4.237442 +step:1785 train loss:4.149489 +step:1786 train loss:4.144612 +step:1787 train loss:4.143650 +step:1788 train loss:4.165219 +step:1789 train loss:4.116248 +step:1790 train loss:4.130954 +step:1791 train loss:4.201962 +step:1792 train loss:4.207359 +step:1793 train loss:4.124952 +step:1794 train loss:4.168596 +step:1795 train loss:4.124245 +step:1796 train loss:4.106728 +step:1797 train loss:4.171407 +step:1798 train loss:4.105147 +step:1799 train loss:4.167658 +step:1800 train loss:4.186770 +step:1801 train loss:4.179865 +step:1802 train loss:4.186809 +step:1803 train loss:4.178979 +step:1804 train loss:4.178355 +step:1805 train loss:4.161088 +step:1806 train loss:4.177898 +step:1807 train loss:4.113468 +step:1808 train loss:4.179034 +step:1809 train loss:4.150815 +step:1810 train loss:4.152789 +step:1811 train loss:4.159807 +step:1812 train loss:4.148443 +step:1813 train loss:4.162159 +step:1814 train loss:4.214660 +step:1815 train loss:4.164549 +step:1816 train loss:4.118751 +step:1817 train loss:4.107440 +step:1818 train loss:4.167006 +step:1819 train loss:4.130640 +step:1820 train loss:4.176474 +step:1821 train loss:4.132252 +step:1822 train loss:4.114469 +step:1823 train loss:4.109747 +step:1824 train loss:4.181839 +step:1825 train loss:4.088886 +step:1826 train loss:4.136732 +step:1827 train loss:4.107887 +step:1828 train loss:4.151302 +step:1829 train loss:4.113457 +step:1830 train loss:4.306263 +step:1831 train loss:4.075212 +step:1832 train loss:4.108305 +step:1833 train loss:4.170137 +step:1834 train loss:4.112566 +step:1835 train loss:4.125306 +step:1836 train loss:4.161724 +step:1837 train loss:4.088724 +step:1838 train loss:4.183330 +step:1839 train loss:4.158511 +step:1840 train loss:4.135750 +step:1841 train loss:4.152761 +step:1842 train loss:4.132191 +step:1843 train loss:4.078191 +step:1844 train loss:4.143867 +step:1845 train loss:4.111537 +step:1846 train loss:4.158749 +step:1847 train loss:4.221308 +step:1848 train loss:4.007111 +step:1849 train loss:4.111058 +step:1850 train loss:4.086022 +step:1851 train loss:4.131725 +step:1852 train loss:4.115970 +step:1853 train loss:4.167639 +step:1854 train loss:4.132909 +step:1855 train loss:4.113024 +step:1856 train loss:4.125997 +step:1857 train loss:4.121521 +step:1858 train loss:4.167418 +step:1859 train loss:4.124794 +step:1860 train loss:4.087360 +step:1861 train loss:4.109471 +step:1862 train loss:4.147583 +step:1863 train loss:4.186103 +step:1864 train loss:4.082310 +step:1865 train loss:4.106633 +step:1866 train loss:4.115672 +step:1867 train loss:4.137661 +step:1868 train loss:4.189116 +step:1869 train loss:4.105156 +step:1870 train loss:4.135610 +step:1871 train loss:4.075611 +step:1872 train loss:4.139320 +step:1873 train loss:4.208129 +step:1874 train loss:4.062815 +step:1875 train loss:4.148109 +step:1876 train loss:4.106553 +step:1877 train loss:4.148668 +step:1878 train loss:4.075918 +step:1879 train loss:4.129857 +step:1880 train loss:4.211624 +step:1881 train loss:4.137993 +step:1882 train loss:4.152029 +step:1883 train loss:4.179173 +step:1884 train loss:4.182637 +step:1885 train loss:4.151580 +step:1886 train loss:4.072927 +step:1887 train loss:4.090541 +step:1888 train loss:4.094296 +step:1889 train loss:4.102415 +step:1890 train loss:4.118749 +step:1891 train loss:4.044754 +step:1892 train loss:4.146236 +step:1893 train loss:4.066955 +step:1894 train loss:4.080516 +step:1895 train loss:4.121617 +step:1896 train loss:4.166344 +step:1897 train loss:4.070815 +step:1898 train loss:4.113838 +step:1899 train loss:4.125815 +step:1900 train loss:4.079053 +step:1901 train loss:4.147940 +step:1902 train loss:4.150008 +step:1903 train loss:4.088801 +step:1904 train loss:4.080305 +step:1905 train loss:4.080317 +step:1906 train loss:4.129337 +step:1907 train loss:4.082253 +step:1908 train loss:4.087879 +step:1909 train loss:4.184984 +step:1910 train loss:4.079716 +step:1911 train loss:4.079668 +step:1912 train loss:4.131798 +step:1913 train loss:4.072769 +step:1914 train loss:4.105537 +step:1915 train loss:4.074709 +step:1916 train loss:4.119208 +step:1917 train loss:4.106830 +step:1918 train loss:4.020641 +step:1919 train loss:4.167735 +step:1920 train loss:4.277006 +step:1921 train loss:4.051024 +step:1922 train loss:4.030447 +step:1923 train loss:4.132602 +step:1924 train loss:4.164003 +step:1925 train loss:4.112257 +step:1926 train loss:4.047457 +step:1927 train loss:4.134530 +step:1928 train loss:4.045798 +step:1929 train loss:4.074116 +step:1930 train loss:4.147188 +step:1931 train loss:4.057083 +step:1932 train loss:4.110428 +step:1933 train loss:4.105646 +step:1934 train loss:4.174654 +step:1935 train loss:4.130942 +step:1936 train loss:4.096344 +step:1937 train loss:4.039300 +step:1938 train loss:4.398385 +step:1939 train loss:4.146271 +step:1940 train loss:4.128499 +step:1941 train loss:4.121925 +step:1942 train loss:4.122633 +step:1943 train loss:4.112113 +step:1944 train loss:4.077754 +step:1945 train loss:4.076821 +step:1946 train loss:4.095469 +step:1947 train loss:4.123604 +step:1948 train loss:4.029029 +step:1949 train loss:4.141310 +step:1950 train loss:4.081649 +step:1951 train loss:4.100631 +step:1952 train loss:4.130988 +step:1953 train loss:4.068650 +step:1954 train loss:4.094921 +step:1955 train loss:4.053375 +step:1956 train loss:4.130273 +step:1957 train loss:4.153192 +step:1958 train loss:4.168089 +step:1959 train loss:4.037755 +step:1960 train loss:4.078780 +step:1961 train loss:4.105201 +step:1962 train loss:4.105667 +step:1963 train loss:4.078256 +step:1964 train loss:4.113375 +step:1965 train loss:4.157070 +step:1966 train loss:4.057396 +step:1967 train loss:4.121131 +step:1968 train loss:4.056729 +step:1969 train loss:4.071365 +step:1970 train loss:4.135763 +step:1971 train loss:4.038668 +step:1972 train loss:4.145634 +step:1973 train loss:4.044555 +step:1974 train loss:4.093467 +step:1975 train loss:4.058332 +step:1976 train loss:4.074447 +step:1977 train loss:4.118978 +step:1978 train loss:4.065591 +step:1979 train loss:4.038264 +step:1980 train loss:4.079760 +step:1981 train loss:4.055030 +step:1982 train loss:4.145133 +step:1983 train loss:4.084073 +step:1984 train loss:4.122496 +step:1985 train loss:4.110725 +step:1986 train loss:4.098829 +step:1987 train loss:4.055285 +step:1988 train loss:4.085944 +step:1989 train loss:4.214127 +step:1990 train loss:4.059651 +step:1991 train loss:4.049540 +step:1992 train loss:4.059075 +step:1993 train loss:4.097003 +step:1994 train loss:4.084579 +step:1995 train loss:4.044649 +step:1996 train loss:4.091600 +step:1997 train loss:4.094158 +step:1998 train loss:4.052067 +step:1999 train loss:4.159048 +step:2000 validation loss:4.035661 total_sharp:1.4217e-02 L1_sharp:4.6757e-01 L2_sharp:4.2759e-01 L3_sharp:5.3418e-01 L4_sharp:2.9356e-01 L5_sharp:2.4708e-01 L6_sharp:2.4563e-01 L7_sharp:1.8244e-01 L8_sharp:1.5242e-01 L9_sharp:1.4083e-01 L10_sharp:1.2720e-01 L11_sharp:1.2185e-01 L12_sharp:1.6670e-01 total_fnorm:1.3132e+00 total_l1_linf:7.2604e+03 total_spectral:1.3132e+00 L1_fnorm:3.1493e-02 L2_fnorm:2.9272e-02 L3_fnorm:2.7833e-02 L4_fnorm:2.9581e-02 L5_fnorm:3.0176e-02 L6_fnorm:3.0987e-02 L7_fnorm:3.1429e-02 L8_fnorm:3.1482e-02 L9_fnorm:3.1579e-02 L10_fnorm:3.1736e-02 L11_fnorm:3.1536e-02 L12_fnorm:3.1807e-02 L1_l1linf:1.9292e-01 L2_l1linf:2.2078e-01 L3_l1linf:2.2867e-01 L4_l1linf:2.2772e-01 L5_l1linf:2.2999e-01 L6_l1linf:2.3499e-01 L7_l1linf:2.3246e-01 L8_l1linf:2.4752e-01 L9_l1linf:2.4543e-01 L10_l1linf:2.5961e-01 L11_l1linf:2.5094e-01 L12_l1linf:2.7050e-01 L1_spectral:4.4128e-03 L2_spectral:5.0340e-03 L3_spectral:5.1943e-03 L4_spectral:5.1684e-03 L5_spectral:5.2051e-03 L6_spectral:5.3419e-03 L7_spectral:5.2427e-03 L8_spectral:5.5613e-03 L9_spectral:5.5344e-03 L10_spectral:5.7816e-03 L11_spectral:5.6053e-03 L12_spectral:5.8925e-03 ip_v_neg_g:1.1285e-02 cos_v_neg_g:9.0027e-04 v_norm:1.3132e+00 g_norm:9.5460e+00 hv_norm:4.8978e+00 cos_v_hv:3.8117e-03 hg_norm:7.8774e+03 cos_g_hg:6.4569e-01 v_par:3.6965e-05 v_perp:1.3132e+00 L1_cos_v_neg_g:1.0106e-02 L1_v_norm:3.1493e-02 L2_cos_v_neg_g:1.0186e-02 L2_v_norm:2.9272e-02 L3_cos_v_neg_g:1.2136e-02 L3_v_norm:2.7833e-02 L4_cos_v_neg_g:1.2406e-02 L4_v_norm:2.9581e-02 L5_cos_v_neg_g:1.1905e-02 L5_v_norm:3.0176e-02 L6_cos_v_neg_g:1.2498e-02 L6_v_norm:3.0987e-02 L7_cos_v_neg_g:1.1243e-02 L7_v_norm:3.1429e-02 L8_cos_v_neg_g:1.1177e-02 L8_v_norm:3.1482e-02 L9_cos_v_neg_g:1.1290e-02 L9_v_norm:3.1579e-02 L10_cos_v_neg_g:1.1138e-02 L10_v_norm:3.1736e-02 L11_cos_v_neg_g:1.0923e-02 L11_v_norm:3.1536e-02 L12_cos_v_neg_g:9.8658e-03 L12_v_norm:3.1807e-02 +step:2000 train loss:4.133341 +step:2001 train loss:4.047938 +step:2002 train loss:4.150528 +step:2003 train loss:4.193339 +step:2004 train loss:4.062920 +step:2005 train loss:4.162465 +step:2006 train loss:4.057574 +step:2007 train loss:4.129381 +step:2008 train loss:4.070384 +step:2009 train loss:4.072064 +step:2010 train loss:4.196837 +step:2011 train loss:4.047462 +step:2012 train loss:4.070020 +step:2013 train loss:4.089826 +step:2014 train loss:3.972035 +step:2015 train loss:4.103907 +step:2016 train loss:4.085345 +step:2017 train loss:4.085439 +step:2018 train loss:4.053246 +step:2019 train loss:4.082948 +step:2020 train loss:4.090833 +step:2021 train loss:4.054821 +step:2022 train loss:4.095384 +step:2023 train loss:4.072508 +step:2024 train loss:4.120453 +step:2025 train loss:4.069478 +step:2026 train loss:4.046367 +step:2027 train loss:4.076446 +step:2028 train loss:4.010551 +step:2029 train loss:4.035494 +step:2030 train loss:4.041534 +step:2031 train loss:4.007162 +step:2032 train loss:4.052866 +step:2033 train loss:4.055833 +step:2034 train loss:4.042775 +step:2035 train loss:4.091711 +step:2036 train loss:4.079258 +step:2037 train loss:4.062293 +step:2038 train loss:4.065353 +step:2039 train loss:4.053090 +step:2040 train loss:4.083402 +step:2041 train loss:4.088840 +step:2042 train loss:4.019163 +step:2043 train loss:4.175528 +step:2044 train loss:4.034642 +step:2045 train loss:4.062244 +step:2046 train loss:4.065750 +step:2047 train loss:4.037481 +step:2048 train loss:4.088930 +step:2049 train loss:4.044300 +step:2050 train loss:4.068804 +step:2051 train loss:4.035141 +step:2052 train loss:4.077622 +step:2053 train loss:4.091234 +step:2054 train loss:4.039435 +step:2055 train loss:4.048951 +step:2056 train loss:4.093494 +step:2057 train loss:4.097435 +step:2058 train loss:4.066768 +step:2059 train loss:4.140396 +step:2060 train loss:4.094420 +step:2061 train loss:4.039608 +step:2062 train loss:4.063135 +step:2063 train loss:3.976813 +step:2064 train loss:4.089186 +step:2065 train loss:4.101593 +step:2066 train loss:3.963819 +step:2067 train loss:4.002885 +step:2068 train loss:4.111739 +step:2069 train loss:4.042433 +step:2070 train loss:4.048131 +step:2071 train loss:4.097095 +step:2072 train loss:4.018511 +step:2073 train loss:4.072494 +step:2074 train loss:4.047587 +step:2075 train loss:4.134866 +step:2076 train loss:4.081010 +step:2077 train loss:4.095141 +step:2078 train loss:4.047641 +step:2079 train loss:4.201847 +step:2080 train loss:4.016548 +step:2081 train loss:4.126840 +step:2082 train loss:4.058031 +step:2083 train loss:4.039112 +step:2084 train loss:4.024721 +step:2085 train loss:4.071370 +step:2086 train loss:4.082656 +step:2087 train loss:4.118297 +step:2088 train loss:3.983730 +step:2089 train loss:4.019443 +step:2090 train loss:4.052613 +step:2091 train loss:4.069585 +step:2092 train loss:4.050215 +step:2093 train loss:4.034280 +step:2094 train loss:4.074126 +step:2095 train loss:4.025858 +step:2096 train loss:4.009471 +step:2097 train loss:4.044828 +step:2098 train loss:4.042985 +step:2099 train loss:4.014994 +step:2100 train loss:4.094615 +step:2101 train loss:4.081905 +step:2102 train loss:4.049266 +step:2103 train loss:4.064754 +step:2104 train loss:4.042229 +step:2105 train loss:4.050009 +step:2106 train loss:4.044631 +step:2107 train loss:4.109113 +step:2108 train loss:4.038644 +step:2109 train loss:3.990883 +step:2110 train loss:4.089261 +step:2111 train loss:4.036587 +step:2112 train loss:4.087551 +step:2113 train loss:4.032062 +step:2114 train loss:4.036613 +step:2115 train loss:4.083117 +step:2116 train loss:4.018151 +step:2117 train loss:4.035324 +step:2118 train loss:4.035176 +step:2119 train loss:3.966140 +step:2120 train loss:4.052616 +step:2121 train loss:4.039057 +step:2122 train loss:4.044031 +step:2123 train loss:4.104944 +step:2124 train loss:4.101131 +step:2125 train loss:4.014353 +step:2126 train loss:4.021405 +step:2127 train loss:4.013788 +step:2128 train loss:4.003433 +step:2129 train loss:4.031006 +step:2130 train loss:4.031599 +step:2131 train loss:4.062468 +step:2132 train loss:3.989038 +step:2133 train loss:4.096570 +step:2134 train loss:4.044974 +step:2135 train loss:4.007565 +step:2136 train loss:4.094036 +step:2137 train loss:4.064588 +step:2138 train loss:4.016881 +step:2139 train loss:4.022816 +step:2140 train loss:4.029141 +step:2141 train loss:4.071877 +step:2142 train loss:4.045500 +step:2143 train loss:3.968353 +step:2144 train loss:4.074507 +step:2145 train loss:4.044319 +step:2146 train loss:4.080551 +step:2147 train loss:4.181174 +step:2148 train loss:3.988671 +step:2149 train loss:3.997437 +step:2150 train loss:4.027672 +step:2151 train loss:4.062880 +step:2152 train loss:4.051950 +step:2153 train loss:4.094676 +step:2154 train loss:4.010765 +step:2155 train loss:4.094458 +step:2156 train loss:4.009957 +step:2157 train loss:4.090602 +step:2158 train loss:4.123024 +step:2159 train loss:4.049996 +step:2160 train loss:4.129602 +step:2161 train loss:4.023754 +step:2162 train loss:4.028847 +step:2163 train loss:4.009787 +step:2164 train loss:4.029604 +step:2165 train loss:4.006846 +step:2166 train loss:4.125836 +step:2167 train loss:4.029879 +step:2168 train loss:4.048099 +step:2169 train loss:3.997846 +step:2170 train loss:4.135725 +step:2171 train loss:4.100870 +step:2172 train loss:4.037368 +step:2173 train loss:4.020866 +step:2174 train loss:4.084273 +step:2175 train loss:4.021518 +step:2176 train loss:4.095129 +step:2177 train loss:4.066743 +step:2178 train loss:3.993145 +step:2179 train loss:4.058922 +step:2180 train loss:4.071126 +step:2181 train loss:4.008508 +step:2182 train loss:4.057151 +step:2183 train loss:4.051977 +step:2184 train loss:4.003217 +step:2185 train loss:3.981046 +step:2186 train loss:4.023615 +step:2187 train loss:4.036052 +step:2188 train loss:4.084137 +step:2189 train loss:3.974379 +step:2190 train loss:4.022165 +step:2191 train loss:4.076292 +step:2192 train loss:4.005606 +step:2193 train loss:3.972362 +step:2194 train loss:3.987102 +step:2195 train loss:4.004143 +step:2196 train loss:4.012407 +step:2197 train loss:3.991270 +step:2198 train loss:4.008796 +step:2199 train loss:4.088344 +step:2200 train loss:4.015397 +step:2201 train loss:4.019011 +step:2202 train loss:3.987091 +step:2203 train loss:4.011051 +step:2204 train loss:4.037897 +step:2205 train loss:4.019665 +step:2206 train loss:4.021882 +step:2207 train loss:4.012001 +step:2208 train loss:3.995513 +step:2209 train loss:4.270583 +step:2210 train loss:4.044901 +step:2211 train loss:4.037416 +step:2212 train loss:4.005578 +step:2213 train loss:4.090732 +step:2214 train loss:4.083811 +step:2215 train loss:4.008957 +step:2216 train loss:3.979547 +step:2217 train loss:3.999908 +step:2218 train loss:4.002613 +step:2219 train loss:4.038197 +step:2220 train loss:3.983655 +step:2221 train loss:4.015980 +step:2222 train loss:4.035594 +step:2223 train loss:4.067262 +step:2224 train loss:4.047820 +step:2225 train loss:3.983035 +step:2226 train loss:4.050877 +step:2227 train loss:4.052010 +step:2228 train loss:4.049181 +step:2229 train loss:3.990323 +step:2230 train loss:4.114151 +step:2231 train loss:4.027061 +step:2232 train loss:4.027532 +step:2233 train loss:4.066493 +step:2234 train loss:3.966727 +step:2235 train loss:4.056430 +step:2236 train loss:3.989622 +step:2237 train loss:4.123862 +step:2238 train loss:3.937658 +step:2239 train loss:4.008560 +step:2240 train loss:4.025181 +step:2241 train loss:3.943743 +step:2242 train loss:4.077729 +step:2243 train loss:4.120821 +step:2244 train loss:3.998255 +step:2245 train loss:3.993015 +step:2246 train loss:3.968333 +step:2247 train loss:3.967274 +step:2248 train loss:4.019053 +step:2249 train loss:4.007885 +step:2250 validation loss:3.966226 +step:2250 train loss:4.014259 +step:2251 train loss:3.982029 +step:2252 train loss:3.982763 +step:2253 train loss:4.005699 +step:2254 train loss:4.009693 +step:2255 train loss:3.971597 +step:2256 train loss:4.021974 +step:2257 train loss:4.013252 +step:2258 train loss:4.002553 +step:2259 train loss:4.012700 +step:2260 train loss:3.975542 +step:2261 train loss:4.049412 +step:2262 train loss:4.066328 +step:2263 train loss:4.028046 +step:2264 train loss:4.136518 +step:2265 train loss:3.989172 +step:2266 train loss:4.034640 +step:2267 train loss:3.992659 +step:2268 train loss:3.994411 +step:2269 train loss:3.999319 +step:2270 train loss:3.988513 +step:2271 train loss:3.999158 +step:2272 train loss:4.037845 +step:2273 train loss:3.960965 +step:2274 train loss:3.988600 +step:2275 train loss:3.952372 +step:2276 train loss:4.020537 +step:2277 train loss:4.031271 +step:2278 train loss:4.018735 +step:2279 train loss:3.994422 +step:2280 train loss:3.905835 +step:2281 train loss:4.048260 +step:2282 train loss:3.984255 +step:2283 train loss:3.965732 +step:2284 train loss:3.989091 +step:2285 train loss:4.035384 +step:2286 train loss:3.997570 +step:2287 train loss:4.036962 +step:2288 train loss:4.006888 +step:2289 train loss:4.003586 +step:2290 train loss:4.008342 +step:2291 train loss:3.999032 +step:2292 train loss:4.029892 +step:2293 train loss:4.015104 +step:2294 train loss:4.010264 +step:2295 train loss:4.065671 +step:2296 train loss:3.999374 +step:2297 train loss:3.972014 +step:2298 train loss:4.032205 +step:2299 train loss:4.004668 +step:2300 train loss:3.924674 +step:2301 train loss:4.017317 +step:2302 train loss:4.035575 +step:2303 train loss:4.000934 +step:2304 train loss:3.991673 +step:2305 train loss:4.036577 +step:2306 train loss:4.031103 +step:2307 train loss:3.999669 +step:2308 train loss:4.023073 +step:2309 train loss:3.983211 +step:2310 train loss:3.965542 +step:2311 train loss:3.957742 +step:2312 train loss:4.021545 +step:2313 train loss:3.936117 +step:2314 train loss:4.011880 +step:2315 train loss:4.027377 +step:2316 train loss:4.061235 +step:2317 train loss:3.931842 +step:2318 train loss:3.969986 +step:2319 train loss:4.027144 +step:2320 train loss:3.998790 +step:2321 train loss:3.970185 +step:2322 train loss:3.979742 +step:2323 train loss:3.979918 +step:2324 train loss:4.004098 +step:2325 train loss:3.952636 +step:2326 train loss:3.968455 +step:2327 train loss:4.088781 +step:2328 train loss:4.038833 +step:2329 train loss:3.990083 +step:2330 train loss:3.944211 +step:2331 train loss:3.991401 +step:2332 train loss:3.920617 +step:2333 train loss:3.984117 +step:2334 train loss:3.962354 +step:2335 train loss:3.945992 +step:2336 train loss:4.197852 +step:2337 train loss:3.977846 +step:2338 train loss:4.018134 +step:2339 train loss:4.014699 +step:2340 train loss:4.023154 +step:2341 train loss:4.016077 +step:2342 train loss:3.969300 +step:2343 train loss:3.989520 +step:2344 train loss:4.030680 +step:2345 train loss:3.990258 +step:2346 train loss:4.020085 +step:2347 train loss:3.937346 +step:2348 train loss:4.000056 +step:2349 train loss:3.951460 +step:2350 train loss:4.001581 +step:2351 train loss:4.013820 +step:2352 train loss:4.011282 +step:2353 train loss:3.964863 +step:2354 train loss:4.020891 +step:2355 train loss:4.003226 +step:2356 train loss:4.045362 +step:2357 train loss:3.953877 +step:2358 train loss:3.968245 +step:2359 train loss:3.992901 +step:2360 train loss:4.013179 +step:2361 train loss:4.053161 +step:2362 train loss:3.876290 +step:2363 train loss:4.064956 +step:2364 train loss:4.018470 +step:2365 train loss:3.990752 +step:2366 train loss:3.947855 +step:2367 train loss:4.004247 +step:2368 train loss:4.001946 +step:2369 train loss:3.979304 +step:2370 train loss:3.999922 +step:2371 train loss:4.058351 +step:2372 train loss:3.913988 +step:2373 train loss:4.051217 +step:2374 train loss:4.032050 +step:2375 train loss:4.019313 +step:2376 train loss:4.006643 +step:2377 train loss:3.955888 +step:2378 train loss:3.998583 +step:2379 train loss:3.985694 +step:2380 train loss:4.044761 +step:2381 train loss:4.130617 +step:2382 train loss:3.925444 +step:2383 train loss:3.979840 +step:2384 train loss:4.003596 +step:2385 train loss:3.905168 +step:2386 train loss:4.063085 +step:2387 train loss:3.946458 +step:2388 train loss:3.992542 +step:2389 train loss:4.013981 +step:2390 train loss:3.964334 +step:2391 train loss:3.983953 +step:2392 train loss:4.012323 +step:2393 train loss:3.971274 +step:2394 train loss:3.990555 +step:2395 train loss:3.981215 +step:2396 train loss:3.988841 +step:2397 train loss:3.967324 +step:2398 train loss:4.017717 +step:2399 train loss:3.980016 +step:2400 train loss:3.964513 +step:2401 train loss:4.001381 +step:2402 train loss:3.951735 +step:2403 train loss:4.007779 +step:2404 train loss:3.959332 +step:2405 train loss:3.962508 +step:2406 train loss:3.991525 +step:2407 train loss:3.938345 +step:2408 train loss:3.981824 +step:2409 train loss:3.969189 +step:2410 train loss:3.971273 +step:2411 train loss:4.040331 +step:2412 train loss:4.033273 +step:2413 train loss:4.061656 +step:2414 train loss:3.957784 +step:2415 train loss:3.949727 +step:2416 train loss:3.966590 +step:2417 train loss:3.999178 +step:2418 train loss:4.021365 +step:2419 train loss:3.947461 +step:2420 train loss:3.969476 +step:2421 train loss:4.002327 +step:2422 train loss:4.050627 +step:2423 train loss:3.982176 +step:2424 train loss:3.949215 +step:2425 train loss:4.008043 +step:2426 train loss:3.948970 +step:2427 train loss:3.975245 +step:2428 train loss:4.052391 +step:2429 train loss:4.007665 +step:2430 train loss:4.095804 +step:2431 train loss:4.009299 +step:2432 train loss:3.978078 +step:2433 train loss:3.959188 +step:2434 train loss:3.942886 +step:2435 train loss:3.996071 +step:2436 train loss:3.958719 +step:2437 train loss:3.990730 +step:2438 train loss:4.029805 +step:2439 train loss:4.015003 +step:2440 train loss:3.961460 +step:2441 train loss:3.992437 +step:2442 train loss:3.984467 +step:2443 train loss:3.952154 +step:2444 train loss:3.986479 +step:2445 train loss:3.980399 +step:2446 train loss:3.955987 +step:2447 train loss:3.935616 +step:2448 train loss:3.982609 +step:2449 train loss:4.010600 +step:2450 train loss:3.972689 +step:2451 train loss:3.887678 +step:2452 train loss:3.995760 +step:2453 train loss:3.964919 +step:2454 train loss:3.961045 +step:2455 train loss:4.012557 +step:2456 train loss:3.968985 +step:2457 train loss:4.024644 +step:2458 train loss:4.003946 +step:2459 train loss:3.980574 +step:2460 train loss:3.980690 +step:2461 train loss:4.017089 +step:2462 train loss:3.989674 +step:2463 train loss:3.958986 +step:2464 train loss:3.980185 +step:2465 train loss:4.053730 +step:2466 train loss:4.133550 +step:2467 train loss:4.042397 +step:2468 train loss:3.937997 +step:2469 train loss:4.002851 +step:2470 train loss:4.055967 +step:2471 train loss:4.058211 +step:2472 train loss:4.046484 +step:2473 train loss:3.973672 +step:2474 train loss:3.930930 +step:2475 train loss:3.987288 +step:2476 train loss:4.064030 +step:2477 train loss:3.977275 +step:2478 train loss:3.933074 +step:2479 train loss:3.978456 +step:2480 train loss:3.969584 +step:2481 train loss:4.156130 +step:2482 train loss:3.971588 +step:2483 train loss:3.998665 +step:2484 train loss:3.947105 +step:2485 train loss:3.931664 +step:2486 train loss:3.973950 +step:2487 train loss:4.005247 +step:2488 train loss:3.918604 +step:2489 train loss:4.029690 +step:2490 train loss:3.950621 +step:2491 train loss:3.960064 +step:2492 train loss:3.999890 +step:2493 train loss:4.038855 +step:2494 train loss:3.961201 +step:2495 train loss:3.994068 +step:2496 train loss:3.974801 +step:2497 train loss:3.987410 +step:2498 train loss:3.991349 +step:2499 train loss:3.989372 +step:2500 validation loss:3.915236 total_sharp:9.6954e-03 L1_sharp:2.5597e-01 L2_sharp:1.8737e-01 L3_sharp:3.1370e-01 L4_sharp:2.0813e-01 L5_sharp:1.6231e-01 L6_sharp:1.6573e-01 L7_sharp:1.5211e-01 L8_sharp:1.3982e-01 L9_sharp:1.0813e-01 L10_sharp:9.3509e-02 L11_sharp:8.0350e-02 L12_sharp:1.1911e-01 total_fnorm:1.3214e+00 total_l1_linf:7.3009e+03 total_spectral:1.3214e+00 L1_fnorm:3.1623e-02 L2_fnorm:2.9851e-02 L3_fnorm:2.9025e-02 L4_fnorm:3.0194e-02 L5_fnorm:3.0726e-02 L6_fnorm:3.1315e-02 L7_fnorm:3.1679e-02 L8_fnorm:3.1643e-02 L9_fnorm:3.1694e-02 L10_fnorm:3.1812e-02 L11_fnorm:3.1540e-02 L12_fnorm:3.1648e-02 L1_l1linf:2.2073e-01 L2_l1linf:2.4312e-01 L3_l1linf:2.6188e-01 L4_l1linf:2.6658e-01 L5_l1linf:2.6599e-01 L6_l1linf:2.5712e-01 L7_l1linf:2.4824e-01 L8_l1linf:2.5619e-01 L9_l1linf:2.5797e-01 L10_l1linf:2.7403e-01 L11_l1linf:2.6908e-01 L12_l1linf:2.6149e-01 L1_spectral:5.0323e-03 L2_spectral:5.5546e-03 L3_spectral:5.9253e-03 L4_spectral:5.9726e-03 L5_spectral:5.9688e-03 L6_spectral:5.7915e-03 L7_spectral:5.5975e-03 L8_spectral:5.7743e-03 L9_spectral:5.7830e-03 L10_spectral:6.1793e-03 L11_spectral:6.0364e-03 L12_spectral:5.8686e-03 ip_v_neg_g:6.9579e-03 cos_v_neg_g:6.6666e-04 v_norm:1.3214e+00 g_norm:7.8982e+00 hv_norm:3.1080e+00 cos_v_hv:4.1222e-03 hg_norm:3.6515e+03 cos_g_hg:5.9455e-01 v_par:2.7738e-05 v_perp:1.3214e+00 L1_cos_v_neg_g:7.9558e-03 L1_v_norm:3.1623e-02 L2_cos_v_neg_g:8.6423e-03 L2_v_norm:2.9851e-02 L3_cos_v_neg_g:1.0509e-02 L3_v_norm:2.9025e-02 L4_cos_v_neg_g:9.3320e-03 L4_v_norm:3.0194e-02 L5_cos_v_neg_g:7.7627e-03 L5_v_norm:3.0726e-02 L6_cos_v_neg_g:8.1970e-03 L6_v_norm:3.1315e-02 L7_cos_v_neg_g:7.0359e-03 L7_v_norm:3.1679e-02 L8_cos_v_neg_g:7.4894e-03 L8_v_norm:3.1643e-02 L9_cos_v_neg_g:5.9823e-03 L9_v_norm:3.1694e-02 L10_cos_v_neg_g:5.7884e-03 L10_v_norm:3.1812e-02 L11_cos_v_neg_g:6.2214e-03 L11_v_norm:3.1540e-02 L12_cos_v_neg_g:7.2443e-03 L12_v_norm:3.1648e-02 +step:2500 train loss:3.934178 +step:2501 train loss:3.990051 +step:2502 train loss:3.982056 +step:2503 train loss:3.915299 +step:2504 train loss:3.944739 +step:2505 train loss:3.977979 +step:2506 train loss:3.936001 +step:2507 train loss:3.962336 +step:2508 train loss:3.915307 +step:2509 train loss:3.928948 +step:2510 train loss:3.927738 +step:2511 train loss:3.971447 +step:2512 train loss:4.018971 +step:2513 train loss:3.963690 +step:2514 train loss:3.950012 +step:2515 train loss:4.086272 +step:2516 train loss:3.975724 +step:2517 train loss:4.035572 +step:2518 train loss:3.999058 +step:2519 train loss:3.973298 +step:2520 train loss:3.981665 +step:2521 train loss:3.949142 +step:2522 train loss:3.993562 +step:2523 train loss:3.912851 +step:2524 train loss:3.969225 +step:2525 train loss:3.959773 +step:2526 train loss:4.008971 +step:2527 train loss:3.998038 +step:2528 train loss:3.982382 +step:2529 train loss:4.005606 +step:2530 train loss:3.980380 +step:2531 train loss:3.921206 +step:2532 train loss:4.016108 +step:2533 train loss:3.908642 +step:2534 train loss:4.007699 +step:2535 train loss:3.963904 +step:2536 train loss:3.883960 +step:2537 train loss:4.000984 +step:2538 train loss:3.978196 +step:2539 train loss:3.995923 +step:2540 train loss:3.936385 +step:2541 train loss:3.959342 +step:2542 train loss:3.971309 +step:2543 train loss:3.960991 +step:2544 train loss:3.946461 +step:2545 train loss:3.938211 +step:2546 train loss:3.905622 +step:2547 train loss:3.947859 +step:2548 train loss:3.968776 +step:2549 train loss:3.971146 +step:2550 train loss:4.097690 +step:2551 train loss:4.175507 +step:2552 train loss:3.907760 +step:2553 train loss:3.942166 +step:2554 train loss:4.088884 +step:2555 train loss:3.980355 +step:2556 train loss:3.906289 +step:2557 train loss:3.995672 +step:2558 train loss:3.990659 +step:2559 train loss:3.940413 +step:2560 train loss:3.926468 +step:2561 train loss:4.026531 +step:2562 train loss:3.978735 +step:2563 train loss:3.911584 +step:2564 train loss:3.983843 +step:2565 train loss:3.964446 +step:2566 train loss:3.942205 +step:2567 train loss:3.920443 +step:2568 train loss:3.979828 +step:2569 train loss:3.983514 +step:2570 train loss:3.938256 +step:2571 train loss:4.017705 +step:2572 train loss:3.982487 +step:2573 train loss:3.913851 +step:2574 train loss:3.955439 +step:2575 train loss:4.002978 +step:2576 train loss:3.957868 +step:2577 train loss:3.915826 +step:2578 train loss:3.958393 +step:2579 train loss:3.938667 +step:2580 train loss:3.910447 +step:2581 train loss:3.923273 +step:2582 train loss:3.932888 +step:2583 train loss:3.955379 +step:2584 train loss:3.969368 +step:2585 train loss:3.932584 +step:2586 train loss:3.959894 +step:2587 train loss:3.889290 +step:2588 train loss:3.922371 +step:2589 train loss:4.000306 +step:2590 train loss:3.922314 +step:2591 train loss:3.978000 +step:2592 train loss:4.029113 +step:2593 train loss:3.986233 +step:2594 train loss:3.942722 +step:2595 train loss:3.953327 +step:2596 train loss:3.996424 +step:2597 train loss:3.878876 +step:2598 train loss:4.031327 +step:2599 train loss:3.979637 +step:2600 train loss:4.008454 +step:2601 train loss:3.943014 +step:2602 train loss:3.980692 +step:2603 train loss:3.970359 +step:2604 train loss:3.893388 +step:2605 train loss:4.018196 +step:2606 train loss:3.972612 +step:2607 train loss:3.930322 +step:2608 train loss:3.902880 +step:2609 train loss:3.931254 +step:2610 train loss:3.956967 +step:2611 train loss:3.991003 +step:2612 train loss:3.953658 +step:2613 train loss:3.928813 +step:2614 train loss:3.915397 +step:2615 train loss:3.915298 +step:2616 train loss:3.987150 +step:2617 train loss:3.946313 +step:2618 train loss:3.911608 +step:2619 train loss:3.931658 +step:2620 train loss:3.920466 +step:2621 train loss:3.935220 +step:2622 train loss:4.010851 +step:2623 train loss:3.884070 +step:2624 train loss:3.897475 +step:2625 train loss:3.972291 +step:2626 train loss:3.964214 +step:2627 train loss:3.941880 +step:2628 train loss:3.995284 +step:2629 train loss:3.944359 +step:2630 train loss:3.937184 +step:2631 train loss:3.968206 +step:2632 train loss:3.939223 +step:2633 train loss:3.921126 +step:2634 train loss:3.968024 +step:2635 train loss:3.950966 +step:2636 train loss:4.001872 +step:2637 train loss:3.949522 +step:2638 train loss:3.927140 +step:2639 train loss:3.990283 +step:2640 train loss:3.906873 +step:2641 train loss:3.963933 +step:2642 train loss:3.882250 +step:2643 train loss:3.888670 +step:2644 train loss:3.980948 +step:2645 train loss:3.910054 +step:2646 train loss:3.946332 +step:2647 train loss:3.964800 +step:2648 train loss:3.998217 +step:2649 train loss:3.910460 +step:2650 train loss:3.900188 +step:2651 train loss:3.945151 +step:2652 train loss:3.913353 +step:2653 train loss:3.981090 +step:2654 train loss:3.942353 +step:2655 train loss:3.931091 +step:2656 train loss:3.950710 +step:2657 train loss:3.977030 +step:2658 train loss:3.982178 +step:2659 train loss:3.962326 +step:2660 train loss:3.952432 +step:2661 train loss:3.996383 +step:2662 train loss:3.972346 +step:2663 train loss:3.944419 +step:2664 train loss:3.961191 +step:2665 train loss:3.907988 +step:2666 train loss:3.936793 +step:2667 train loss:3.943795 +step:2668 train loss:3.920263 +step:2669 train loss:3.925468 +step:2670 train loss:3.952658 +step:2671 train loss:3.924692 +step:2672 train loss:3.948087 +step:2673 train loss:3.882510 +step:2674 train loss:3.973968 +step:2675 train loss:3.949456 +step:2676 train loss:3.970137 +step:2677 train loss:3.950512 +step:2678 train loss:3.932208 +step:2679 train loss:3.915211 +step:2680 train loss:3.901487 +step:2681 train loss:3.871381 +step:2682 train loss:3.959117 +step:2683 train loss:3.930478 +step:2684 train loss:3.959934 +step:2685 train loss:3.890382 +step:2686 train loss:3.897219 +step:2687 train loss:3.969691 +step:2688 train loss:3.987056 +step:2689 train loss:3.893534 +step:2690 train loss:3.975934 +step:2691 train loss:3.944369 +step:2692 train loss:3.970848 +step:2693 train loss:4.028908 +step:2694 train loss:3.922634 +step:2695 train loss:3.943249 +step:2696 train loss:3.947802 +step:2697 train loss:3.941210 +step:2698 train loss:3.943518 +step:2699 train loss:3.963961 +step:2700 train loss:3.940845 +step:2701 train loss:4.000985 +step:2702 train loss:3.939626 +step:2703 train loss:3.898121 +step:2704 train loss:3.969714 +step:2705 train loss:3.964411 +step:2706 train loss:3.896337 +step:2707 train loss:3.859845 +step:2708 train loss:3.953972 +step:2709 train loss:3.937634 +step:2710 train loss:3.947581 +step:2711 train loss:3.909425 +step:2712 train loss:3.967623 +step:2713 train loss:3.977704 +step:2714 train loss:3.917715 +step:2715 train loss:3.912636 +step:2716 train loss:3.977651 +step:2717 train loss:3.948342 +step:2718 train loss:3.939639 +step:2719 train loss:3.943804 +step:2720 train loss:3.904810 +step:2721 train loss:3.985562 +step:2722 train loss:3.916093 +step:2723 train loss:3.899193 +step:2724 train loss:3.924415 +step:2725 train loss:3.925892 +step:2726 train loss:3.896951 +step:2727 train loss:3.955595 +step:2728 train loss:3.896095 +step:2729 train loss:4.026116 +step:2730 train loss:3.962699 +step:2731 train loss:4.003108 +step:2732 train loss:3.921542 +step:2733 train loss:3.917394 +step:2734 train loss:3.960780 +step:2735 train loss:3.961186 +step:2736 train loss:3.883171 +step:2737 train loss:3.940091 +step:2738 train loss:3.995184 +step:2739 train loss:3.919503 +step:2740 train loss:3.918547 +step:2741 train loss:3.909911 +step:2742 train loss:3.821888 +step:2743 train loss:3.937054 +step:2744 train loss:3.958078 +step:2745 train loss:3.909175 +step:2746 train loss:3.931937 +step:2747 train loss:3.917397 +step:2748 train loss:3.874080 +step:2749 train loss:3.936023 +step:2750 validation loss:3.870754 +step:2750 train loss:3.946491 +step:2751 train loss:3.972957 +step:2752 train loss:3.953537 +step:2753 train loss:3.950312 +step:2754 train loss:3.884396 +step:2755 train loss:3.954871 +step:2756 train loss:3.924452 +step:2757 train loss:3.913163 +step:2758 train loss:3.939058 +step:2759 train loss:3.950052 +step:2760 train loss:3.862843 +step:2761 train loss:3.880572 +step:2762 train loss:3.894462 +step:2763 train loss:3.918100 +step:2764 train loss:3.861157 +step:2765 train loss:3.904044 +step:2766 train loss:3.997143 +step:2767 train loss:3.869611 +step:2768 train loss:3.931230 +step:2769 train loss:3.906077 +step:2770 train loss:3.927621 +step:2771 train loss:3.952001 +step:2772 train loss:3.914841 +step:2773 train loss:3.912524 +step:2774 train loss:3.907329 +step:2775 train loss:3.924877 +step:2776 train loss:3.880850 +step:2777 train loss:3.908493 +step:2778 train loss:3.921399 +step:2779 train loss:3.944141 +step:2780 train loss:3.912503 +step:2781 train loss:3.900930 +step:2782 train loss:3.889368 +step:2783 train loss:3.925782 +step:2784 train loss:3.930300 +step:2785 train loss:3.996619 +step:2786 train loss:3.967398 +step:2787 train loss:3.927773 +step:2788 train loss:3.921413 +step:2789 train loss:3.914197 +step:2790 train loss:3.857045 +step:2791 train loss:3.955897 +step:2792 train loss:3.944184 +step:2793 train loss:3.910912 +step:2794 train loss:3.922540 +step:2795 train loss:3.932769 +step:2796 train loss:3.927344 +step:2797 train loss:3.970580 +step:2798 train loss:3.959733 +step:2799 train loss:3.869421 +step:2800 train loss:3.915968 +step:2801 train loss:3.951458 +step:2802 train loss:3.975155 +step:2803 train loss:3.946072 +step:2804 train loss:3.882246 +step:2805 train loss:3.919869 +step:2806 train loss:3.917832 +step:2807 train loss:3.948610 +step:2808 train loss:3.885058 +step:2809 train loss:3.950384 +step:2810 train loss:3.942562 +step:2811 train loss:3.933369 +step:2812 train loss:3.977729 +step:2813 train loss:3.948545 +step:2814 train loss:3.939201 +step:2815 train loss:3.947719 +step:2816 train loss:3.951500 +step:2817 train loss:3.885520 +step:2818 train loss:3.990324 +step:2819 train loss:3.915414 +step:2820 train loss:3.910832 +step:2821 train loss:3.892389 +step:2822 train loss:3.935783 +step:2823 train loss:3.886994 +step:2824 train loss:3.783075 +step:2825 train loss:3.934550 +step:2826 train loss:3.926327 +step:2827 train loss:3.952333 +step:2828 train loss:3.937624 +step:2829 train loss:3.932078 +step:2830 train loss:3.958271 +step:2831 train loss:3.901381 +step:2832 train loss:3.870427 +step:2833 train loss:3.930081 +step:2834 train loss:3.883887 +step:2835 train loss:3.917861 +step:2836 train loss:3.920452 +step:2837 train loss:3.918286 +step:2838 train loss:3.857949 +step:2839 train loss:3.955677 +step:2840 train loss:3.919984 +step:2841 train loss:3.998594 +step:2842 train loss:3.945354 +step:2843 train loss:3.937112 +step:2844 train loss:3.958404 +step:2845 train loss:3.921922 +step:2846 train loss:3.871222 +step:2847 train loss:3.961602 +step:2848 train loss:3.915659 +step:2849 train loss:3.910446 +step:2850 train loss:3.968859 +step:2851 train loss:3.920771 +step:2852 train loss:4.000853 +step:2853 train loss:3.916643 +step:2854 train loss:3.853407 +step:2855 train loss:3.937927 +step:2856 train loss:3.860571 +step:2857 train loss:3.962902 +step:2858 train loss:3.918991 +step:2859 train loss:3.903967 +step:2860 train loss:3.902250 +step:2861 train loss:3.884670 +step:2862 train loss:3.907989 +step:2863 train loss:3.891751 +step:2864 train loss:3.900338 +step:2865 train loss:3.972021 +step:2866 train loss:3.989151 +step:2867 train loss:3.924813 +step:2868 train loss:3.922315 +step:2869 train loss:3.884685 +step:2870 train loss:3.969388 +step:2871 train loss:3.965739 +step:2872 train loss:3.927488 +step:2873 train loss:3.939614 +step:2874 train loss:3.912077 +step:2875 train loss:3.866791 +step:2876 train loss:3.918748 +step:2877 train loss:3.897291 +step:2878 train loss:3.909386 +step:2879 train loss:3.877452 +step:2880 train loss:3.898764 +step:2881 train loss:3.889059 +step:2882 train loss:3.822639 +step:2883 train loss:3.911833 +step:2884 train loss:3.975153 +step:2885 train loss:3.875363 +step:2886 train loss:3.918489 +step:2887 train loss:3.942354 +step:2888 train loss:3.918472 +step:2889 train loss:3.903785 +step:2890 train loss:3.876627 +step:2891 train loss:3.914320 +step:2892 train loss:3.924511 +step:2893 train loss:3.907607 +step:2894 train loss:3.875238 +step:2895 train loss:3.924137 +step:2896 train loss:3.972404 +step:2897 train loss:3.949142 +step:2898 train loss:4.079799 +step:2899 train loss:3.841261 +step:2900 train loss:3.919135 +step:2901 train loss:3.865036 +step:2902 train loss:3.864513 +step:2903 train loss:3.880719 +step:2904 train loss:3.910120 +step:2905 train loss:3.967394 +step:2906 train loss:3.936186 +step:2907 train loss:4.105961 +step:2908 train loss:3.858394 +step:2909 train loss:3.934069 +step:2910 train loss:3.904724 +step:2911 train loss:3.932914 +step:2912 train loss:3.895059 +step:2913 train loss:3.925538 +step:2914 train loss:3.954314 +step:2915 train loss:3.953426 +step:2916 train loss:3.913631 +step:2917 train loss:3.940733 +step:2918 train loss:3.935291 +step:2919 train loss:3.877052 +step:2920 train loss:3.933344 +step:2921 train loss:3.890023 +step:2922 train loss:3.912815 +step:2923 train loss:3.981249 +step:2924 train loss:3.913045 +step:2925 train loss:3.867207 +step:2926 train loss:3.956677 +step:2927 train loss:3.865672 +step:2928 train loss:3.836678 +step:2929 train loss:3.851021 +step:2930 train loss:3.870242 +step:2931 train loss:4.023039 +step:2932 train loss:3.941979 +step:2933 train loss:3.909667 +step:2934 train loss:3.902109 +step:2935 train loss:3.925534 +step:2936 train loss:3.874933 +step:2937 train loss:3.889560 +step:2938 train loss:3.910278 +step:2939 train loss:3.982547 +step:2940 train loss:3.883300 +step:2941 train loss:3.918571 +step:2942 train loss:3.880607 +step:2943 train loss:4.150655 +step:2944 train loss:3.984616 +step:2945 train loss:3.941350 +step:2946 train loss:3.948628 +step:2947 train loss:3.911941 +step:2948 train loss:3.874511 +step:2949 train loss:3.963290 +step:2950 train loss:3.912375 +step:2951 train loss:3.812435 +step:2952 train loss:3.882731 +step:2953 train loss:3.798990 +step:2954 train loss:3.887350 +step:2955 train loss:3.955028 +step:2956 train loss:3.905417 +step:2957 train loss:3.906733 +step:2958 train loss:3.862689 +step:2959 train loss:3.885417 +step:2960 train loss:3.977762 +step:2961 train loss:3.843194 +step:2962 train loss:3.918921 +step:2963 train loss:3.908080 +step:2964 train loss:3.888190 +step:2965 train loss:3.916667 +step:2966 train loss:3.890279 +step:2967 train loss:3.890617 +step:2968 train loss:3.862717 +step:2969 train loss:3.875474 +step:2970 train loss:3.940367 +step:2971 train loss:3.874811 +step:2972 train loss:3.853800 +step:2973 train loss:3.850684 +step:2974 train loss:3.891254 +step:2975 train loss:3.855728 +step:2976 train loss:3.893657 +step:2977 train loss:3.885695 +step:2978 train loss:3.967496 +step:2979 train loss:3.948665 +step:2980 train loss:3.952184 +step:2981 train loss:3.917461 +step:2982 train loss:3.901304 +step:2983 train loss:3.856921 +step:2984 train loss:3.826275 +step:2985 train loss:3.939246 +step:2986 train loss:3.836860 +step:2987 train loss:3.962902 +step:2988 train loss:3.886201 +step:2989 train loss:3.921258 +step:2990 train loss:3.873735 +step:2991 train loss:3.944617 +step:2992 train loss:3.939181 +step:2993 train loss:3.907079 +step:2994 train loss:3.889246 +step:2995 train loss:3.959062 +step:2996 train loss:3.885962 +step:2997 train loss:3.797202 +step:2998 train loss:3.906676 +step:2999 train loss:3.943482 +step:3000 validation loss:3.836786 total_sharp:8.4905e-03 L1_sharp:2.4044e-01 L2_sharp:1.7196e-01 L3_sharp:1.9493e-01 L4_sharp:1.4236e-01 L5_sharp:1.2685e-01 L6_sharp:1.2944e-01 L7_sharp:1.3581e-01 L8_sharp:1.2895e-01 L9_sharp:1.2122e-01 L10_sharp:9.7262e-02 L11_sharp:7.9207e-02 L12_sharp:8.8090e-02 total_fnorm:1.3138e+00 total_l1_linf:7.2568e+03 total_spectral:1.3138e+00 L1_fnorm:3.1866e-02 L2_fnorm:2.9988e-02 L3_fnorm:2.9357e-02 L4_fnorm:3.0557e-02 L5_fnorm:3.1048e-02 L6_fnorm:3.1715e-02 L7_fnorm:3.1831e-02 L8_fnorm:3.1875e-02 L9_fnorm:3.1996e-02 L10_fnorm:3.2137e-02 L11_fnorm:3.1988e-02 L12_fnorm:3.2267e-02 L1_l1linf:2.5254e-01 L2_l1linf:2.8213e-01 L3_l1linf:2.8961e-01 L4_l1linf:2.9973e-01 L5_l1linf:2.7937e-01 L6_l1linf:2.6994e-01 L7_l1linf:2.5762e-01 L8_l1linf:2.6789e-01 L9_l1linf:2.8205e-01 L10_l1linf:2.9521e-01 L11_l1linf:2.9973e-01 L12_l1linf:3.0171e-01 L1_spectral:5.7976e-03 L2_spectral:6.4206e-03 L3_spectral:6.5798e-03 L4_spectral:6.7685e-03 L5_spectral:6.3169e-03 L6_spectral:6.0798e-03 L7_spectral:5.8572e-03 L8_spectral:6.0909e-03 L9_spectral:6.3137e-03 L10_spectral:6.5841e-03 L11_spectral:6.7609e-03 L12_spectral:6.8010e-03 ip_v_neg_g:9.3068e-03 cos_v_neg_g:9.6254e-04 v_norm:1.3138e+00 g_norm:7.3593e+00 hv_norm:2.3925e+00 cos_v_hv:4.6626e-03 hg_norm:2.5507e+03 cos_g_hg:5.7835e-01 v_par:3.3517e-05 v_perp:1.3138e+00 L1_cos_v_neg_g:1.3452e-02 L1_v_norm:3.1866e-02 L2_cos_v_neg_g:1.4560e-02 L2_v_norm:2.9988e-02 L3_cos_v_neg_g:1.5235e-02 L3_v_norm:2.9357e-02 L4_cos_v_neg_g:1.3189e-02 L4_v_norm:3.0557e-02 L5_cos_v_neg_g:1.2183e-02 L5_v_norm:3.1048e-02 L6_cos_v_neg_g:1.1546e-02 L6_v_norm:3.1715e-02 L7_cos_v_neg_g:1.1024e-02 L7_v_norm:3.1831e-02 L8_cos_v_neg_g:1.1381e-02 L8_v_norm:3.1875e-02 L9_cos_v_neg_g:1.0481e-02 L9_v_norm:3.1996e-02 L10_cos_v_neg_g:9.2959e-03 L10_v_norm:3.2137e-02 L11_cos_v_neg_g:6.3750e-03 L11_v_norm:3.1988e-02 L12_cos_v_neg_g:5.2481e-03 L12_v_norm:3.2267e-02 +step:3000 train loss:3.838583 +step:3001 train loss:3.894960 +step:3002 train loss:3.891100 +step:3003 train loss:3.888668 +step:3004 train loss:3.914209 +step:3005 train loss:3.812602 +step:3006 train loss:3.864928 +step:3007 train loss:3.893843 +step:3008 train loss:3.938000 +step:3009 train loss:3.899243 +step:3010 train loss:3.911788 +step:3011 train loss:3.903317 +step:3012 train loss:3.879255 +step:3013 train loss:3.921802 +step:3014 train loss:3.880159 +step:3015 train loss:3.877868 +step:3016 train loss:3.905219 +step:3017 train loss:3.918438 +step:3018 train loss:3.849756 +step:3019 train loss:3.887915 +step:3020 train loss:3.905390 +step:3021 train loss:3.877078 +step:3022 train loss:3.964962 +step:3023 train loss:3.911253 +step:3024 train loss:3.900980 +step:3025 train loss:3.908722 +step:3026 train loss:3.885232 +step:3027 train loss:3.857569 +step:3028 train loss:3.909979 +step:3029 train loss:3.897573 +step:3030 train loss:3.870594 +step:3031 train loss:3.855727 +step:3032 train loss:3.840455 +step:3033 train loss:3.873454 +step:3034 train loss:3.913602 +step:3035 train loss:3.894537 +step:3036 train loss:3.853479 +step:3037 train loss:3.819023 +step:3038 train loss:3.935879 +step:3039 train loss:3.812081 +step:3040 train loss:3.804237 +step:3041 train loss:3.928282 +step:3042 train loss:3.866765 +step:3043 train loss:3.921848 +step:3044 train loss:3.816861 +step:3045 train loss:3.867628 +step:3046 train loss:3.836383 +step:3047 train loss:3.873122 +step:3048 train loss:3.830489 +step:3049 train loss:3.916992 +step:3050 train loss:3.799848 +step:3051 train loss:3.824122 +step:3052 train loss:3.839137 +step:3053 train loss:3.904829 +step:3054 train loss:3.978641 +step:3055 train loss:3.816611 +step:3056 train loss:3.855102 +step:3057 train loss:3.884431 +step:3058 train loss:3.834229 +step:3059 train loss:3.865596 +step:3060 train loss:3.857511 +step:3061 train loss:3.848857 +step:3062 train loss:3.894699 +step:3063 train loss:3.884494 +step:3064 train loss:3.903068 +step:3065 train loss:3.918881 +step:3066 train loss:3.825645 +step:3067 train loss:3.868072 +step:3068 train loss:3.923220 +step:3069 train loss:3.932046 +step:3070 train loss:3.868703 +step:3071 train loss:3.880544 +step:3072 train loss:3.884279 +step:3073 train loss:3.920533 +step:3074 train loss:3.857419 +step:3075 train loss:3.892284 +step:3076 train loss:3.827145 +step:3077 train loss:3.826273 +step:3078 train loss:3.851241 +step:3079 train loss:3.901175 +step:3080 train loss:3.892027 +step:3081 train loss:3.942586 +step:3082 train loss:3.910498 +step:3083 train loss:3.845170 +step:3084 train loss:3.922775 +step:3085 train loss:3.852363 +step:3086 train loss:3.910138 +step:3087 train loss:3.879682 +step:3088 train loss:3.964078 +step:3089 train loss:3.835640 +step:3090 train loss:3.908991 +step:3091 train loss:3.826799 +step:3092 train loss:3.859718 +step:3093 train loss:3.875724 +step:3094 train loss:3.866696 +step:3095 train loss:3.944551 +step:3096 train loss:3.875770 +step:3097 train loss:3.890729 +step:3098 train loss:3.863161 +step:3099 train loss:3.875333 +step:3100 train loss:3.897502 +step:3101 train loss:3.979862 +step:3102 train loss:3.906487 +step:3103 train loss:3.834613 +step:3104 train loss:3.915278 +step:3105 train loss:3.887685 +step:3106 train loss:3.879197 +step:3107 train loss:3.861488 +step:3108 train loss:3.841707 +step:3109 train loss:3.891852 +step:3110 train loss:3.826821 +step:3111 train loss:3.858773 +step:3112 train loss:3.795888 +step:3113 train loss:3.916286 +step:3114 train loss:3.826391 +step:3115 train loss:3.871598 +step:3116 train loss:3.745421 +step:3117 train loss:3.772952 +step:3118 train loss:3.868838 +step:3119 train loss:3.881282 +step:3120 train loss:3.879993 +step:3121 train loss:3.831717 +step:3122 train loss:3.907611 +step:3123 train loss:3.827099 +step:3124 train loss:3.885929 +step:3125 train loss:3.899992 +step:3126 train loss:4.001886 +step:3127 train loss:3.849962 +step:3128 train loss:3.880017 +step:3129 train loss:3.859402 +step:3130 train loss:3.835489 +step:3131 train loss:3.913587 +step:3132 train loss:3.902210 +step:3133 train loss:3.868449 +step:3134 train loss:3.771564 +step:3135 train loss:3.857914 +step:3136 train loss:3.836391 +step:3137 train loss:3.966836 +step:3138 train loss:3.868105 +step:3139 train loss:3.849524 +step:3140 train loss:3.869548 +step:3141 train loss:3.876314 +step:3142 train loss:3.811902 +step:3143 train loss:3.900771 +step:3144 train loss:3.843358 +step:3145 train loss:3.830551 +step:3146 train loss:3.840524 +step:3147 train loss:3.952679 +step:3148 train loss:3.854792 +step:3149 train loss:3.910994 +step:3150 train loss:3.893684 +step:3151 train loss:3.867510 +step:3152 train loss:3.860769 +step:3153 train loss:3.827741 +step:3154 train loss:3.903406 +step:3155 train loss:3.851763 +step:3156 train loss:3.898561 +step:3157 train loss:3.904392 +step:3158 train loss:3.869376 +step:3159 train loss:3.810603 +step:3160 train loss:3.861010 +step:3161 train loss:3.823720 +step:3162 train loss:3.889153 +step:3163 train loss:3.868303 +step:3164 train loss:3.850246 +step:3165 train loss:3.864290 +step:3166 train loss:3.905302 +step:3167 train loss:3.866498 +step:3168 train loss:3.941838 +step:3169 train loss:3.857339 +step:3170 train loss:3.844195 +step:3171 train loss:3.825045 +step:3172 train loss:3.837054 +step:3173 train loss:3.779848 +step:3174 train loss:3.885270 +step:3175 train loss:3.860768 +step:3176 train loss:3.871225 +step:3177 train loss:3.834352 +step:3178 train loss:3.815293 +step:3179 train loss:3.886913 +step:3180 train loss:3.820431 +step:3181 train loss:3.902073 +step:3182 train loss:3.909441 +step:3183 train loss:3.850981 +step:3184 train loss:3.850934 +step:3185 train loss:3.912224 +step:3186 train loss:3.867342 +step:3187 train loss:3.886278 +step:3188 train loss:3.925074 +step:3189 train loss:3.882260 +step:3190 train loss:3.826354 +step:3191 train loss:3.836759 +step:3192 train loss:3.796510 +step:3193 train loss:3.880908 +step:3194 train loss:3.837405 +step:3195 train loss:3.828895 +step:3196 train loss:3.872561 +step:3197 train loss:3.837861 +step:3198 train loss:3.862834 +step:3199 train loss:3.855453 +step:3200 train loss:3.856571 +step:3201 train loss:3.827977 +step:3202 train loss:3.876901 +step:3203 train loss:3.947157 +step:3204 train loss:3.909122 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..cac8442bf3324dc469f67f9c158e453db9fcbe08 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.001, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "5e3b5d10-cd07-4cde-986f-6a4cff417e1a", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..85a4181bb403a774207ad9b7d7b7379759c42f87 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3209375143051147, + "total_l1_linf_norm": 7941.41015625, + "total_spectral_norm": 1.3209376335144043, + "layer_1_update_fnorm": 0.0617540143430233, + "layer_1_max_l1_linf_norm": 0.21220602095127106, + "layer_1_max_spectral_norm": 0.004635004326701164, + "layer_2_update_fnorm": 0.056402359157800674, + "layer_2_max_l1_linf_norm": 0.20117127895355225, + "layer_2_max_spectral_norm": 0.004528792575001717, + "layer_3_update_fnorm": 0.05361899733543396, + "layer_3_max_l1_linf_norm": 0.20507323741912842, + "layer_3_max_spectral_norm": 0.004646815825253725, + "layer_4_update_fnorm": 0.05752219632267952, + "layer_4_max_l1_linf_norm": 0.21101561188697815, + "layer_4_max_spectral_norm": 0.004801120143383741, + "layer_5_update_fnorm": 0.058487795293331146, + "layer_5_max_l1_linf_norm": 0.21860644221305847, + "layer_5_max_spectral_norm": 0.004923616535961628, + "layer_6_update_fnorm": 0.059902314096689224, + "layer_6_max_l1_linf_norm": 0.20729321241378784, + "layer_6_max_spectral_norm": 0.0048547424376010895, + "layer_7_update_fnorm": 0.06042702496051788, + "layer_7_max_l1_linf_norm": 0.21374252438545227, + "layer_7_max_spectral_norm": 0.005618838127702475, + "layer_8_update_fnorm": 0.06048888713121414, + "layer_8_max_l1_linf_norm": 0.22104568779468536, + "layer_8_max_spectral_norm": 0.0060979644767940044, + "layer_9_update_fnorm": 0.06080456078052521, + "layer_9_max_l1_linf_norm": 0.21801188588142395, + "layer_9_max_spectral_norm": 0.0064860195852816105, + "layer_10_update_fnorm": 0.06109054014086723, + "layer_10_max_l1_linf_norm": 0.21293935179710388, + "layer_10_max_spectral_norm": 0.0058318842202425, + "layer_11_update_fnorm": 0.06065156310796738, + "layer_11_max_l1_linf_norm": 0.18612951040267944, + "layer_11_max_spectral_norm": 0.005440341308712959, + "layer_12_update_fnorm": 0.0611189641058445, + "layer_12_max_l1_linf_norm": 0.17174606025218964, + "layer_12_max_spectral_norm": 0.005802188999950886, + "total_sharpness": 0.026269199326634407, + "ip_v_neg_g": 0.024876123294234276, + "cos_v_neg_g": 0.002914024516940117, + "v_norm": 1.3209375143051147, + "g_norm": 6.462599277496338, + "hv_norm": 5.001190185546875, + "cos_v_hv": 0.006938342936336994, + "hg_norm": 2740.370849609375, + "cos_g_hg": 0.6361414790153503, + "v_parallel_norm": 0.00016891608538571745, + "v_perp_norm": 1.3209375143051147, + "layer_1_v_norm": 0.0617540143430233, + "layer_1_cos_v_neg_g": 0.03153359517455101, + "layer_2_v_norm": 0.056402359157800674, + "layer_2_cos_v_neg_g": 0.031459227204322815, + "layer_3_v_norm": 0.05361899733543396, + "layer_3_cos_v_neg_g": 0.02721431665122509, + "layer_4_v_norm": 0.05752219632267952, + "layer_4_cos_v_neg_g": 0.018693093210458755, + "layer_5_v_norm": 0.058487795293331146, + "layer_5_cos_v_neg_g": 0.016512064263224602, + "layer_6_v_norm": 0.059902314096689224, + "layer_6_cos_v_neg_g": 0.013213122263550758, + "layer_7_v_norm": 0.06042702496051788, + "layer_7_cos_v_neg_g": 0.012009786441922188, + "layer_8_v_norm": 0.06048888340592384, + "layer_8_cos_v_neg_g": 0.010599808767437935, + "layer_9_v_norm": 0.06080456078052521, + "layer_9_cos_v_neg_g": 0.009180677123367786, + "layer_10_v_norm": 0.06109054014086723, + "layer_10_cos_v_neg_g": 0.008248797617852688, + "layer_11_v_norm": 0.06065156310796738, + "layer_11_cos_v_neg_g": 0.008782869204878807, + "layer_12_v_norm": 0.0611189641058445, + "layer_12_cos_v_neg_g": 0.009599080309271812, + "layer_1_sharpness": 0.8462588787078857, + "layer_2_sharpness": 0.5700809955596924, + "layer_3_sharpness": 0.3219163715839386, + "layer_4_sharpness": 0.14444445073604584, + "layer_5_sharpness": 0.10038033127784729, + "layer_6_sharpness": 0.07140319049358368, + "layer_7_sharpness": 0.06493678689002991, + "layer_8_sharpness": 0.05133713781833649, + "layer_9_sharpness": 0.0346820093691349, + "layer_10_sharpness": 0.027500981464982033, + "layer_11_sharpness": 0.024757781997323036, + "layer_12_sharpness": 0.026502832770347595 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..3f0969f45c9b380fecd85667f89e7e74935d1658 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3464239835739136, + "total_l1_linf_norm": 8075.06884765625, + "total_spectral_norm": 1.3464239835739136, + "layer_1_update_fnorm": 0.060847919434309006, + "layer_1_max_l1_linf_norm": 0.3447530269622803, + "layer_1_max_spectral_norm": 0.007765722926706076, + "layer_2_update_fnorm": 0.05879515781998634, + "layer_2_max_l1_linf_norm": 0.3701484203338623, + "layer_2_max_spectral_norm": 0.008376776240766048, + "layer_3_update_fnorm": 0.05904396250844002, + "layer_3_max_l1_linf_norm": 0.3784865140914917, + "layer_3_max_spectral_norm": 0.008500976487994194, + "layer_4_update_fnorm": 0.06013995409011841, + "layer_4_max_l1_linf_norm": 0.36145907640457153, + "layer_4_max_spectral_norm": 0.008157186210155487, + "layer_5_update_fnorm": 0.06091999262571335, + "layer_5_max_l1_linf_norm": 0.3409767150878906, + "layer_5_max_spectral_norm": 0.007705542724579573, + "layer_6_update_fnorm": 0.06128457933664322, + "layer_6_max_l1_linf_norm": 0.33348965644836426, + "layer_6_max_spectral_norm": 0.007506547961384058, + "layer_7_update_fnorm": 0.06133315712213516, + "layer_7_max_l1_linf_norm": 0.32377660274505615, + "layer_7_max_spectral_norm": 0.0072408379055559635, + "layer_8_update_fnorm": 0.06126480922102928, + "layer_8_max_l1_linf_norm": 0.3188258409500122, + "layer_8_max_spectral_norm": 0.007174049504101276, + "layer_9_update_fnorm": 0.061355795711278915, + "layer_9_max_l1_linf_norm": 0.35177695751190186, + "layer_9_max_spectral_norm": 0.00786664243787527, + "layer_10_update_fnorm": 0.06141141429543495, + "layer_10_max_l1_linf_norm": 0.382619172334671, + "layer_10_max_spectral_norm": 0.00856894999742508, + "layer_11_update_fnorm": 0.061488840728998184, + "layer_11_max_l1_linf_norm": 0.4186510741710663, + "layer_11_max_spectral_norm": 0.009276813827455044, + "layer_12_update_fnorm": 0.06142319738864899, + "layer_12_max_l1_linf_norm": 0.4025733470916748, + "layer_12_max_spectral_norm": 0.009045220911502838, + "total_sharpness": 0.0050901807844638824, + "ip_v_neg_g": 0.0052380915731191635, + "cos_v_neg_g": 0.0010166342835873365, + "v_norm": 1.3464239835739136, + "g_norm": 3.826718330383301, + "hv_norm": 1.5299192667007446, + "cos_v_hv": 0.004479675088077784, + "hg_norm": 742.1688842773438, + "cos_g_hg": 0.5579183101654053, + "v_parallel_norm": 3.846636536763981e-05, + "v_perp_norm": 1.3464239835739136, + "layer_1_v_norm": 0.060847919434309006, + "layer_1_cos_v_neg_g": 0.019605426117777824, + "layer_2_v_norm": 0.05879515781998634, + "layer_2_cos_v_neg_g": 0.011119943112134933, + "layer_3_v_norm": 0.05904396250844002, + "layer_3_cos_v_neg_g": 0.004024804104119539, + "layer_4_v_norm": 0.06013995409011841, + "layer_4_cos_v_neg_g": 0.001577873481437564, + "layer_5_v_norm": 0.06091999262571335, + "layer_5_cos_v_neg_g": 0.0021993897389620543, + "layer_6_v_norm": 0.06128458306193352, + "layer_6_cos_v_neg_g": 0.002545334864407778, + "layer_7_v_norm": 0.06133315712213516, + "layer_7_cos_v_neg_g": 0.004951042588800192, + "layer_8_v_norm": 0.06126481294631958, + "layer_8_cos_v_neg_g": 0.006104219704866409, + "layer_9_v_norm": 0.061355795711278915, + "layer_9_cos_v_neg_g": 0.004582652822136879, + "layer_10_v_norm": 0.06141141429543495, + "layer_10_cos_v_neg_g": 0.0037412261590361595, + "layer_11_v_norm": 0.061488837003707886, + "layer_11_cos_v_neg_g": 0.004230809397995472, + "layer_12_v_norm": 0.06142319738864899, + "layer_12_cos_v_neg_g": 0.004792382009327412, + "layer_1_sharpness": 0.49581608176231384, + "layer_2_sharpness": 0.04619869217276573, + "layer_3_sharpness": 0.026187855750322342, + "layer_4_sharpness": 0.013813266530632973, + "layer_5_sharpness": 0.013884780928492546, + "layer_6_sharpness": 0.019044913351535797, + "layer_7_sharpness": 0.024627914652228355, + "layer_8_sharpness": 0.02724175527691841, + "layer_9_sharpness": 0.018163369968533516, + "layer_10_sharpness": 0.012842162512242794, + "layer_11_sharpness": 0.014039809815585613, + "layer_12_sharpness": 0.021033767610788345 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..bbbd9fa114f787198cef35a8759a063ac8e36bdb --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3127503395080566, + "total_l1_linf_norm": 7906.31396484375, + "total_spectral_norm": 1.312750220298767, + "layer_1_update_fnorm": 0.06187446787953377, + "layer_1_max_l1_linf_norm": 0.21095193922519684, + "layer_1_max_spectral_norm": 0.004770191386342049, + "layer_2_update_fnorm": 0.05819966271519661, + "layer_2_max_l1_linf_norm": 0.23838874697685242, + "layer_2_max_spectral_norm": 0.005383371375501156, + "layer_3_update_fnorm": 0.05564356967806816, + "layer_3_max_l1_linf_norm": 0.24841472506523132, + "layer_3_max_spectral_norm": 0.005621832329779863, + "layer_4_update_fnorm": 0.05841444060206413, + "layer_4_max_l1_linf_norm": 0.2520967721939087, + "layer_4_max_spectral_norm": 0.005688756238669157, + "layer_5_update_fnorm": 0.059250734746456146, + "layer_5_max_l1_linf_norm": 0.25002944469451904, + "layer_5_max_spectral_norm": 0.005623518954962492, + "layer_6_update_fnorm": 0.060658857226371765, + "layer_6_max_l1_linf_norm": 0.24538256227970123, + "layer_6_max_spectral_norm": 0.005508782342076302, + "layer_7_update_fnorm": 0.06089893728494644, + "layer_7_max_l1_linf_norm": 0.24572177231311798, + "layer_7_max_spectral_norm": 0.005519269034266472, + "layer_8_update_fnorm": 0.06094785034656525, + "layer_8_max_l1_linf_norm": 0.2556062638759613, + "layer_8_max_spectral_norm": 0.005730153061449528, + "layer_9_update_fnorm": 0.06107352301478386, + "layer_9_max_l1_linf_norm": 0.2744172215461731, + "layer_9_max_spectral_norm": 0.006154255475848913, + "layer_10_update_fnorm": 0.061055444180965424, + "layer_10_max_l1_linf_norm": 0.26646310091018677, + "layer_10_max_spectral_norm": 0.005940841976553202, + "layer_11_update_fnorm": 0.06088286265730858, + "layer_11_max_l1_linf_norm": 0.2651573717594147, + "layer_11_max_spectral_norm": 0.005990084260702133, + "layer_12_update_fnorm": 0.06114601716399193, + "layer_12_max_l1_linf_norm": 0.26582157611846924, + "layer_12_max_spectral_norm": 0.0059484378434717655, + "total_sharpness": 0.014027219265699387, + "ip_v_neg_g": 0.010642270557582378, + "cos_v_neg_g": 0.001630130922421813, + "v_norm": 1.3127503395080566, + "g_norm": 4.973128318786621, + "hv_norm": 2.3213891983032227, + "cos_v_hv": 0.007932419888675213, + "hg_norm": 805.060546875, + "cos_g_hg": 0.5550928115844727, + "v_parallel_norm": 7.719513087067753e-05, + "v_perp_norm": 1.3127503395080566, + "layer_1_v_norm": 0.06187446787953377, + "layer_1_cos_v_neg_g": 0.011196129955351353, + "layer_2_v_norm": 0.05819966271519661, + "layer_2_cos_v_neg_g": 0.01192288938909769, + "layer_3_v_norm": 0.05564357340335846, + "layer_3_cos_v_neg_g": 0.014665838330984116, + "layer_4_v_norm": 0.05841444060206413, + "layer_4_cos_v_neg_g": 0.01110968366265297, + "layer_5_v_norm": 0.059250734746456146, + "layer_5_cos_v_neg_g": 0.011045677587389946, + "layer_6_v_norm": 0.06065885350108147, + "layer_6_cos_v_neg_g": 0.009689541533589363, + "layer_7_v_norm": 0.06089893728494644, + "layer_7_cos_v_neg_g": 0.00991013366729021, + "layer_8_v_norm": 0.06094784662127495, + "layer_8_cos_v_neg_g": 0.009667857550084591, + "layer_9_v_norm": 0.06107352301478386, + "layer_9_cos_v_neg_g": 0.00867992453277111, + "layer_10_v_norm": 0.061055444180965424, + "layer_10_cos_v_neg_g": 0.007948899641633034, + "layer_11_v_norm": 0.06088286265730858, + "layer_11_cos_v_neg_g": 0.007182322908192873, + "layer_12_v_norm": 0.06114601716399193, + "layer_12_cos_v_neg_g": 0.005122308153659105, + "layer_1_sharpness": 0.1313929706811905, + "layer_2_sharpness": 0.10108131915330887, + "layer_3_sharpness": 0.13893693685531616, + "layer_4_sharpness": 0.07011973857879639, + "layer_5_sharpness": 0.05884504318237305, + "layer_6_sharpness": 0.055195778608322144, + "layer_7_sharpness": 0.05983301252126694, + "layer_8_sharpness": 0.050726521760225296, + "layer_9_sharpness": 0.04297168552875519, + "layer_10_sharpness": 0.03359311819076538, + "layer_11_sharpness": 0.027278997004032135, + "layer_12_sharpness": 0.03423604369163513 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..a03f29233389e9b329d2f857512222ed5c9bffc4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3691391944885254, + "total_l1_linf_norm": 8212.7490234375, + "total_spectral_norm": 1.369139313697815, + "layer_1_update_fnorm": 0.061538245528936386, + "layer_1_max_l1_linf_norm": 0.2115684151649475, + "layer_1_max_spectral_norm": 0.00481251860037446, + "layer_2_update_fnorm": 0.05808024853467941, + "layer_2_max_l1_linf_norm": 0.24054540693759918, + "layer_2_max_spectral_norm": 0.0054534790106117725, + "layer_3_update_fnorm": 0.055976636707782745, + "layer_3_max_l1_linf_norm": 0.24529996514320374, + "layer_3_max_spectral_norm": 0.005583669058978558, + "layer_4_update_fnorm": 0.05893970653414726, + "layer_4_max_l1_linf_norm": 0.2544841170310974, + "layer_4_max_spectral_norm": 0.00580319669097662, + "layer_5_update_fnorm": 0.05986177921295166, + "layer_5_max_l1_linf_norm": 0.2503693401813507, + "layer_5_max_spectral_norm": 0.005662314593791962, + "layer_6_update_fnorm": 0.0608292818069458, + "layer_6_max_l1_linf_norm": 0.244097039103508, + "layer_6_max_spectral_norm": 0.005503482650965452, + "layer_7_update_fnorm": 0.06085292622447014, + "layer_7_max_l1_linf_norm": 0.23271669447422028, + "layer_7_max_spectral_norm": 0.0053060525096952915, + "layer_8_update_fnorm": 0.060881357640028, + "layer_8_max_l1_linf_norm": 0.22542916238307953, + "layer_8_max_spectral_norm": 0.005114912986755371, + "layer_9_update_fnorm": 0.060964394360780716, + "layer_9_max_l1_linf_norm": 0.2603972554206848, + "layer_9_max_spectral_norm": 0.005901222117245197, + "layer_10_update_fnorm": 0.06093848496675491, + "layer_10_max_l1_linf_norm": 0.2631588280200958, + "layer_10_max_spectral_norm": 0.005918614566326141, + "layer_11_update_fnorm": 0.060858700424432755, + "layer_11_max_l1_linf_norm": 0.2709439694881439, + "layer_11_max_spectral_norm": 0.006126830354332924, + "layer_12_update_fnorm": 0.061218082904815674, + "layer_12_max_l1_linf_norm": 0.30885520577430725, + "layer_12_max_spectral_norm": 0.0067774029448628426, + "total_sharpness": 0.010205483995378017, + "ip_v_neg_g": 0.007488955743610859, + "cos_v_neg_g": 0.0010792497778311372, + "v_norm": 1.3691391944885254, + "g_norm": 5.06817626953125, + "hv_norm": 2.2609686851501465, + "cos_v_hv": 0.006179974414408207, + "hg_norm": 849.8262939453125, + "cos_g_hg": 0.5627140998840332, + "v_parallel_norm": 4.7860361519269645e-05, + "v_perp_norm": 1.3691391944885254, + "layer_1_v_norm": 0.061538245528936386, + "layer_1_cos_v_neg_g": 0.011916029267013073, + "layer_2_v_norm": 0.05808024853467941, + "layer_2_cos_v_neg_g": 0.013595384545624256, + "layer_3_v_norm": 0.055976636707782745, + "layer_3_cos_v_neg_g": 0.009295224212110043, + "layer_4_v_norm": 0.05893970653414726, + "layer_4_cos_v_neg_g": 0.006363385822623968, + "layer_5_v_norm": 0.05986177921295166, + "layer_5_cos_v_neg_g": 0.005693273153156042, + "layer_6_v_norm": 0.0608292818069458, + "layer_6_cos_v_neg_g": 0.00554319005459547, + "layer_7_v_norm": 0.06085292622447014, + "layer_7_cos_v_neg_g": 0.0053168353624641895, + "layer_8_v_norm": 0.0608813539147377, + "layer_8_cos_v_neg_g": 0.005830185953527689, + "layer_9_v_norm": 0.060964394360780716, + "layer_9_cos_v_neg_g": 0.0048421225510537624, + "layer_10_v_norm": 0.06093848496675491, + "layer_10_cos_v_neg_g": 0.004972631111741066, + "layer_11_v_norm": 0.060858700424432755, + "layer_11_cos_v_neg_g": 0.005377260036766529, + "layer_12_v_norm": 0.061218082904815674, + "layer_12_cos_v_neg_g": 0.006056726444512606, + "layer_1_sharpness": 0.3235915005207062, + "layer_2_sharpness": 0.21490131318569183, + "layer_3_sharpness": 0.11012983322143555, + "layer_4_sharpness": 0.037546709179878235, + "layer_5_sharpness": 0.03722051903605461, + "layer_6_sharpness": 0.039514802396297455, + "layer_7_sharpness": 0.04271889105439186, + "layer_8_sharpness": 0.03936252370476723, + "layer_9_sharpness": 0.03240107372403145, + "layer_10_sharpness": 0.024347975850105286, + "layer_11_sharpness": 0.02642092853784561, + "layer_12_sharpness": 0.06953734159469604 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..33095790165dfff4972c5a15802d28fcf7bfdeaa --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.352480173110962, + "total_l1_linf_norm": 8106.109375, + "total_spectral_norm": 1.3524800539016724, + "layer_1_update_fnorm": 0.06171466037631035, + "layer_1_max_l1_linf_norm": 0.2589343190193176, + "layer_1_max_spectral_norm": 0.005849685985594988, + "layer_2_update_fnorm": 0.05800430849194527, + "layer_2_max_l1_linf_norm": 0.27832290530204773, + "layer_2_max_spectral_norm": 0.006262197624891996, + "layer_3_update_fnorm": 0.056810423731803894, + "layer_3_max_l1_linf_norm": 0.291424036026001, + "layer_3_max_spectral_norm": 0.006584931164979935, + "layer_4_update_fnorm": 0.05901793763041496, + "layer_4_max_l1_linf_norm": 0.28237003087997437, + "layer_4_max_spectral_norm": 0.006401626858860254, + "layer_5_update_fnorm": 0.05997440963983536, + "layer_5_max_l1_linf_norm": 0.2787211537361145, + "layer_5_max_spectral_norm": 0.006270939018577337, + "layer_6_update_fnorm": 0.060898393392562866, + "layer_6_max_l1_linf_norm": 0.26627397537231445, + "layer_6_max_spectral_norm": 0.005985850002616644, + "layer_7_update_fnorm": 0.06096863001585007, + "layer_7_max_l1_linf_norm": 0.2576139569282532, + "layer_7_max_spectral_norm": 0.005773227196186781, + "layer_8_update_fnorm": 0.061132822185754776, + "layer_8_max_l1_linf_norm": 0.27283066511154175, + "layer_8_max_spectral_norm": 0.006118657998740673, + "layer_9_update_fnorm": 0.06106835976243019, + "layer_9_max_l1_linf_norm": 0.2843177318572998, + "layer_9_max_spectral_norm": 0.006380904000252485, + "layer_10_update_fnorm": 0.06113968789577484, + "layer_10_max_l1_linf_norm": 0.30219945311546326, + "layer_10_max_spectral_norm": 0.006810474209487438, + "layer_11_update_fnorm": 0.06101503595709801, + "layer_11_max_l1_linf_norm": 0.3013795018196106, + "layer_11_max_spectral_norm": 0.0068273083306849, + "layer_12_update_fnorm": 0.06101040914654732, + "layer_12_max_l1_linf_norm": 0.2956932485103607, + "layer_12_max_spectral_norm": 0.006722015794366598, + "total_sharpness": 0.008481831289827824, + "ip_v_neg_g": 0.0066098179668188095, + "cos_v_neg_g": 0.0011598978890106082, + "v_norm": 1.352480173110962, + "g_norm": 4.213459491729736, + "hv_norm": 1.4144971370697021, + "cos_v_hv": 0.008109955117106438, + "hg_norm": 384.36260986328125, + "cos_g_hg": 0.5116893649101257, + "v_parallel_norm": 4.565167546388693e-05, + "v_perp_norm": 1.352480173110962, + "layer_1_v_norm": 0.06171466037631035, + "layer_1_cos_v_neg_g": 0.016029117628932, + "layer_2_v_norm": 0.05800430849194527, + "layer_2_cos_v_neg_g": 0.014358834363520145, + "layer_3_v_norm": 0.056810423731803894, + "layer_3_cos_v_neg_g": 0.010191445238888264, + "layer_4_v_norm": 0.05901793763041496, + "layer_4_cos_v_neg_g": 0.005944633390754461, + "layer_5_v_norm": 0.05997440963983536, + "layer_5_cos_v_neg_g": 0.006364007946103811, + "layer_6_v_norm": 0.060898393392562866, + "layer_6_cos_v_neg_g": 0.006037876475602388, + "layer_7_v_norm": 0.06096863001585007, + "layer_7_cos_v_neg_g": 0.00659829331561923, + "layer_8_v_norm": 0.061132822185754776, + "layer_8_cos_v_neg_g": 0.00565419951453805, + "layer_9_v_norm": 0.06106835976243019, + "layer_9_cos_v_neg_g": 0.004820365458726883, + "layer_10_v_norm": 0.06113968789577484, + "layer_10_cos_v_neg_g": 0.0032337517477571964, + "layer_11_v_norm": 0.06101503595709801, + "layer_11_cos_v_neg_g": 0.003338233567774296, + "layer_12_v_norm": 0.06101040914654732, + "layer_12_cos_v_neg_g": 0.0033557384740561247, + "layer_1_sharpness": 0.168656587600708, + "layer_2_sharpness": 0.07459656894207001, + "layer_3_sharpness": 0.06838702410459518, + "layer_4_sharpness": 0.035957615822553635, + "layer_5_sharpness": 0.036676470190286636, + "layer_6_sharpness": 0.040052689611911774, + "layer_7_sharpness": 0.04493068531155586, + "layer_8_sharpness": 0.041525948792696, + "layer_9_sharpness": 0.032578036189079285, + "layer_10_sharpness": 0.02509589120745659, + "layer_11_sharpness": 0.022519918158650398, + "layer_12_sharpness": 0.029556602239608765 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..2496e63cdf200a752d62715b707a2fc73fdf7eb5 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3358492851257324, + "total_l1_linf_norm": 8027.18310546875, + "total_spectral_norm": 1.3358492851257324, + "layer_1_update_fnorm": 0.06143255904316902, + "layer_1_max_l1_linf_norm": 0.28158891201019287, + "layer_1_max_spectral_norm": 0.006380243692547083, + "layer_2_update_fnorm": 0.058433230966329575, + "layer_2_max_l1_linf_norm": 0.32009515166282654, + "layer_2_max_spectral_norm": 0.007262039463967085, + "layer_3_update_fnorm": 0.05686946585774422, + "layer_3_max_l1_linf_norm": 0.31843680143356323, + "layer_3_max_spectral_norm": 0.007179712876677513, + "layer_4_update_fnorm": 0.059253137558698654, + "layer_4_max_l1_linf_norm": 0.3164268732070923, + "layer_4_max_spectral_norm": 0.007161775603890419, + "layer_5_update_fnorm": 0.060369741171598434, + "layer_5_max_l1_linf_norm": 0.29063522815704346, + "layer_5_max_spectral_norm": 0.0065235840156674385, + "layer_6_update_fnorm": 0.06103314459323883, + "layer_6_max_l1_linf_norm": 0.28772228956222534, + "layer_6_max_spectral_norm": 0.00648892018944025, + "layer_7_update_fnorm": 0.061060383915901184, + "layer_7_max_l1_linf_norm": 0.27143317461013794, + "layer_7_max_spectral_norm": 0.006136658601462841, + "layer_8_update_fnorm": 0.061199162155389786, + "layer_8_max_l1_linf_norm": 0.28124675154685974, + "layer_8_max_spectral_norm": 0.006337479688227177, + "layer_9_update_fnorm": 0.06109888106584549, + "layer_9_max_l1_linf_norm": 0.3073064684867859, + "layer_9_max_spectral_norm": 0.006911891512572765, + "layer_10_update_fnorm": 0.061110273003578186, + "layer_10_max_l1_linf_norm": 0.30687403678894043, + "layer_10_max_spectral_norm": 0.006936653051525354, + "layer_11_update_fnorm": 0.0610828772187233, + "layer_11_max_l1_linf_norm": 0.32760587334632874, + "layer_11_max_spectral_norm": 0.007360654883086681, + "layer_12_update_fnorm": 0.06115907058119774, + "layer_12_max_l1_linf_norm": 0.3160170912742615, + "layer_12_max_spectral_norm": 0.007161209359765053, + "total_sharpness": 0.007314006332308054, + "ip_v_neg_g": 0.008600694127380848, + "cos_v_neg_g": 0.0016633231425657868, + "v_norm": 1.3358492851257324, + "g_norm": 3.870788097381592, + "hv_norm": 1.099161148071289, + "cos_v_hv": 0.00888897106051445, + "hg_norm": 254.40455627441406, + "cos_g_hg": 0.4850771427154541, + "v_parallel_norm": 5.617945862468332e-05, + "v_perp_norm": 1.3358492851257324, + "layer_1_v_norm": 0.06143255904316902, + "layer_1_cos_v_neg_g": 0.014498083852231503, + "layer_2_v_norm": 0.058433230966329575, + "layer_2_cos_v_neg_g": 0.014245263300836086, + "layer_3_v_norm": 0.056869469583034515, + "layer_3_cos_v_neg_g": 0.01153869554400444, + "layer_4_v_norm": 0.059253137558698654, + "layer_4_cos_v_neg_g": 0.010679805651307106, + "layer_5_v_norm": 0.060369741171598434, + "layer_5_cos_v_neg_g": 0.00983413029462099, + "layer_6_v_norm": 0.06103314459323883, + "layer_6_cos_v_neg_g": 0.009911015629768372, + "layer_7_v_norm": 0.061060383915901184, + "layer_7_cos_v_neg_g": 0.010069875977933407, + "layer_8_v_norm": 0.061199165880680084, + "layer_8_cos_v_neg_g": 0.011347422376275063, + "layer_9_v_norm": 0.06109888106584549, + "layer_9_cos_v_neg_g": 0.009925744496285915, + "layer_10_v_norm": 0.061110273003578186, + "layer_10_cos_v_neg_g": 0.008449767716228962, + "layer_11_v_norm": 0.061082880944013596, + "layer_11_cos_v_neg_g": 0.007127908058464527, + "layer_12_v_norm": 0.06115907058119774, + "layer_12_cos_v_neg_g": 0.006577228661626577, + "layer_1_sharpness": 0.15601766109466553, + "layer_2_sharpness": 0.05005662515759468, + "layer_3_sharpness": 0.05930851399898529, + "layer_4_sharpness": 0.029936879873275757, + "layer_5_sharpness": 0.02732297033071518, + "layer_6_sharpness": 0.03526858240365982, + "layer_7_sharpness": 0.040942758321762085, + "layer_8_sharpness": 0.040812231600284576, + "layer_9_sharpness": 0.029842866584658623, + "layer_10_sharpness": 0.022214043885469437, + "layer_11_sharpness": 0.02142222411930561, + "layer_12_sharpness": 0.031711045652627945 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..86e8cf3d178744552ef2d4644623d64afdf99faf --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3467340469360352, + "total_l1_linf_norm": 8079.25341796875, + "total_spectral_norm": 1.3467339277267456, + "layer_1_update_fnorm": 0.06157553568482399, + "layer_1_max_l1_linf_norm": 0.28524208068847656, + "layer_1_max_spectral_norm": 0.0064888265915215015, + "layer_2_update_fnorm": 0.05897834524512291, + "layer_2_max_l1_linf_norm": 0.3322506844997406, + "layer_2_max_spectral_norm": 0.007485948968678713, + "layer_3_update_fnorm": 0.05790039151906967, + "layer_3_max_l1_linf_norm": 0.32420456409454346, + "layer_3_max_spectral_norm": 0.007287499960511923, + "layer_4_update_fnorm": 0.05943175032734871, + "layer_4_max_l1_linf_norm": 0.3395395278930664, + "layer_4_max_spectral_norm": 0.007591279689222574, + "layer_5_update_fnorm": 0.06047416478395462, + "layer_5_max_l1_linf_norm": 0.334541916847229, + "layer_5_max_spectral_norm": 0.007479676511138678, + "layer_6_update_fnorm": 0.06122193485498428, + "layer_6_max_l1_linf_norm": 0.3252260684967041, + "layer_6_max_spectral_norm": 0.007321900688111782, + "layer_7_update_fnorm": 0.06114017590880394, + "layer_7_max_l1_linf_norm": 0.29022538661956787, + "layer_7_max_spectral_norm": 0.006464653182774782, + "layer_8_update_fnorm": 0.06117762625217438, + "layer_8_max_l1_linf_norm": 0.2964279055595398, + "layer_8_max_spectral_norm": 0.006603767164051533, + "layer_9_update_fnorm": 0.06108545884490013, + "layer_9_max_l1_linf_norm": 0.310930073261261, + "layer_9_max_spectral_norm": 0.0069195604883134365, + "layer_10_update_fnorm": 0.06115522235631943, + "layer_10_max_l1_linf_norm": 0.3324357271194458, + "layer_10_max_spectral_norm": 0.007401288487017155, + "layer_11_update_fnorm": 0.06107183173298836, + "layer_11_max_l1_linf_norm": 0.32695746421813965, + "layer_11_max_spectral_norm": 0.007373245432972908, + "layer_12_update_fnorm": 0.06106644496321678, + "layer_12_max_l1_linf_norm": 0.2984953820705414, + "layer_12_max_spectral_norm": 0.0068665496073663235, + "total_sharpness": 0.0065717617981135845, + "ip_v_neg_g": 0.007013428956270218, + "cos_v_neg_g": 0.0012783119454979897, + "v_norm": 1.3467340469360352, + "g_norm": 4.073912620544434, + "hv_norm": 1.0415043830871582, + "cos_v_hv": 0.008497722446918488, + "hg_norm": 345.9919128417969, + "cos_g_hg": 0.5581362843513489, + "v_parallel_norm": 4.279538552509621e-05, + "v_perp_norm": 1.3467340469360352, + "layer_1_v_norm": 0.06157553568482399, + "layer_1_cos_v_neg_g": 0.009294010698795319, + "layer_2_v_norm": 0.05897834524512291, + "layer_2_cos_v_neg_g": 0.011278755031526089, + "layer_3_v_norm": 0.05790039151906967, + "layer_3_cos_v_neg_g": 0.011879963800311089, + "layer_4_v_norm": 0.05943175032734871, + "layer_4_cos_v_neg_g": 0.008742420934140682, + "layer_5_v_norm": 0.06047416478395462, + "layer_5_cos_v_neg_g": 0.00973040983080864, + "layer_6_v_norm": 0.06122193485498428, + "layer_6_cos_v_neg_g": 0.010307046584784985, + "layer_7_v_norm": 0.06114017590880394, + "layer_7_cos_v_neg_g": 0.008678578771650791, + "layer_8_v_norm": 0.061177629977464676, + "layer_8_cos_v_neg_g": 0.008347704075276852, + "layer_9_v_norm": 0.06108545884490013, + "layer_9_cos_v_neg_g": 0.007117004599422216, + "layer_10_v_norm": 0.06115522235631943, + "layer_10_cos_v_neg_g": 0.00479779951274395, + "layer_11_v_norm": 0.06107182800769806, + "layer_11_cos_v_neg_g": 0.004581497050821781, + "layer_12_v_norm": 0.06106644496321678, + "layer_12_cos_v_neg_g": 0.003988797310739756, + "layer_1_sharpness": 0.06060260161757469, + "layer_2_sharpness": 0.03172149509191513, + "layer_3_sharpness": 0.0476415753364563, + "layer_4_sharpness": 0.033502306789159775, + "layer_5_sharpness": 0.03715590760111809, + "layer_6_sharpness": 0.04018302634358406, + "layer_7_sharpness": 0.039364755153656006, + "layer_8_sharpness": 0.03772933408617973, + "layer_9_sharpness": 0.027660854160785675, + "layer_10_sharpness": 0.020904095843434334, + "layer_11_sharpness": 0.019122155383229256, + "layer_12_sharpness": 0.0215607900172472 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..22a382f8bb0340fca8925b050810c1ed1767a331 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3523021936416626, + "total_l1_linf_norm": 8107.15234375, + "total_spectral_norm": 1.352301836013794, + "layer_1_update_fnorm": 0.06122328341007233, + "layer_1_max_l1_linf_norm": 0.2540726661682129, + "layer_1_max_spectral_norm": 0.005854744929820299, + "layer_2_update_fnorm": 0.05889584869146347, + "layer_2_max_l1_linf_norm": 0.2982848882675171, + "layer_2_max_spectral_norm": 0.006802498362958431, + "layer_3_update_fnorm": 0.0581316240131855, + "layer_3_max_l1_linf_norm": 0.3201873302459717, + "layer_3_max_spectral_norm": 0.007275600451976061, + "layer_4_update_fnorm": 0.05800323933362961, + "layer_4_max_l1_linf_norm": 0.32477161288261414, + "layer_4_max_spectral_norm": 0.007379519287496805, + "layer_5_update_fnorm": 0.06025269255042076, + "layer_5_max_l1_linf_norm": 0.3133571445941925, + "layer_5_max_spectral_norm": 0.007039402145892382, + "layer_6_update_fnorm": 0.06107257306575775, + "layer_6_max_l1_linf_norm": 0.30414247512817383, + "layer_6_max_spectral_norm": 0.006779724266380072, + "layer_7_update_fnorm": 0.061160217970609665, + "layer_7_max_l1_linf_norm": 0.2954297661781311, + "layer_7_max_spectral_norm": 0.00663179624825716, + "layer_8_update_fnorm": 0.06126847490668297, + "layer_8_max_l1_linf_norm": 0.323474645614624, + "layer_8_max_spectral_norm": 0.007190719246864319, + "layer_9_update_fnorm": 0.06116652861237526, + "layer_9_max_l1_linf_norm": 0.3149166703224182, + "layer_9_max_spectral_norm": 0.007043765392154455, + "layer_10_update_fnorm": 0.06112981215119362, + "layer_10_max_l1_linf_norm": 0.3291889727115631, + "layer_10_max_spectral_norm": 0.00733749195933342, + "layer_11_update_fnorm": 0.061146195977926254, + "layer_11_max_l1_linf_norm": 0.3544464111328125, + "layer_11_max_spectral_norm": 0.007907585240900517, + "layer_12_update_fnorm": 0.061278026551008224, + "layer_12_max_l1_linf_norm": 0.36060571670532227, + "layer_12_max_spectral_norm": 0.00805344246327877, + "total_sharpness": 0.005228396970778704, + "ip_v_neg_g": 0.0058813551440835, + "cos_v_neg_g": 0.0012407982721924782, + "v_norm": 1.3523021936416626, + "g_norm": 3.5051164627075195, + "hv_norm": 0.8004429340362549, + "cos_v_hv": 0.008833075873553753, + "hg_norm": 190.72000122070312, + "cos_g_hg": 0.4350890815258026, + "v_parallel_norm": 4.176910442765802e-05, + "v_perp_norm": 1.3523021936416626, + "layer_1_v_norm": 0.06122328341007233, + "layer_1_cos_v_neg_g": 0.007637517061084509, + "layer_2_v_norm": 0.05889584869146347, + "layer_2_cos_v_neg_g": 0.007248976267874241, + "layer_3_v_norm": 0.0581316240131855, + "layer_3_cos_v_neg_g": 0.005813245661556721, + "layer_4_v_norm": 0.05800323933362961, + "layer_4_cos_v_neg_g": 0.006142845842987299, + "layer_5_v_norm": 0.06025269255042076, + "layer_5_cos_v_neg_g": 0.006693570408970118, + "layer_6_v_norm": 0.06107256934046745, + "layer_6_cos_v_neg_g": 0.007870513014495373, + "layer_7_v_norm": 0.061160217970609665, + "layer_7_cos_v_neg_g": 0.007865242660045624, + "layer_8_v_norm": 0.06126847118139267, + "layer_8_cos_v_neg_g": 0.009245622903108597, + "layer_9_v_norm": 0.06116652861237526, + "layer_9_cos_v_neg_g": 0.009109245613217354, + "layer_10_v_norm": 0.06112981215119362, + "layer_10_cos_v_neg_g": 0.008322260342538357, + "layer_11_v_norm": 0.06114619970321655, + "layer_11_cos_v_neg_g": 0.006989015731960535, + "layer_12_v_norm": 0.061278026551008224, + "layer_12_cos_v_neg_g": 0.005483901593834162, + "layer_1_sharpness": 0.03650455176830292, + "layer_2_sharpness": 0.018612487241625786, + "layer_3_sharpness": 0.027554579079151154, + "layer_4_sharpness": 0.02042803354561329, + "layer_5_sharpness": 0.024984212592244148, + "layer_6_sharpness": 0.028936542570590973, + "layer_7_sharpness": 0.03645714744925499, + "layer_8_sharpness": 0.04120251536369324, + "layer_9_sharpness": 0.028516031801700592, + "layer_10_sharpness": 0.020336821675300598, + "layer_11_sharpness": 0.02193964086472988, + "layer_12_sharpness": 0.027690939605236053 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..3c61b7f19cc5d2d30f3467970812f3729db2ef54 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3595572710037231, + "total_l1_linf_norm": 8145.4150390625, + "total_spectral_norm": 1.3595572710037231, + "layer_1_update_fnorm": 0.06151890009641647, + "layer_1_max_l1_linf_norm": 0.32376083731651306, + "layer_1_max_spectral_norm": 0.007250683382153511, + "layer_2_update_fnorm": 0.0587039478123188, + "layer_2_max_l1_linf_norm": 0.3687310814857483, + "layer_2_max_spectral_norm": 0.008242002688348293, + "layer_3_update_fnorm": 0.05768290162086487, + "layer_3_max_l1_linf_norm": 0.35394594073295593, + "layer_3_max_spectral_norm": 0.007955854758620262, + "layer_4_update_fnorm": 0.05980771780014038, + "layer_4_max_l1_linf_norm": 0.3739609122276306, + "layer_4_max_spectral_norm": 0.008391762152314186, + "layer_5_update_fnorm": 0.060823626816272736, + "layer_5_max_l1_linf_norm": 0.38215088844299316, + "layer_5_max_spectral_norm": 0.008484735153615475, + "layer_6_update_fnorm": 0.061401866376399994, + "layer_6_max_l1_linf_norm": 0.34966203570365906, + "layer_6_max_spectral_norm": 0.007794796489179134, + "layer_7_update_fnorm": 0.06134982034564018, + "layer_7_max_l1_linf_norm": 0.3445467948913574, + "layer_7_max_spectral_norm": 0.007693052291870117, + "layer_8_update_fnorm": 0.06132041662931442, + "layer_8_max_l1_linf_norm": 0.3294413387775421, + "layer_8_max_spectral_norm": 0.007337831426411867, + "layer_9_update_fnorm": 0.06128150597214699, + "layer_9_max_l1_linf_norm": 0.34505021572113037, + "layer_9_max_spectral_norm": 0.0077721416018903255, + "layer_10_update_fnorm": 0.06132761389017105, + "layer_10_max_l1_linf_norm": 0.36168599128723145, + "layer_10_max_spectral_norm": 0.008117086254060268, + "layer_11_update_fnorm": 0.06132935360074043, + "layer_11_max_l1_linf_norm": 0.3841882348060608, + "layer_11_max_spectral_norm": 0.008625875227153301, + "layer_12_update_fnorm": 0.06123429536819458, + "layer_12_max_l1_linf_norm": 0.34694576263427734, + "layer_12_max_spectral_norm": 0.00791751965880394, + "total_sharpness": 0.006214204244315624, + "ip_v_neg_g": 0.005549879278987646, + "cos_v_neg_g": 0.0011809318093582988, + "v_norm": 1.3595572710037231, + "g_norm": 3.4566962718963623, + "hv_norm": 0.933009922504425, + "cos_v_hv": 0.00905517302453518, + "hg_norm": 237.84329223632812, + "cos_g_hg": 0.4376685321331024, + "v_parallel_norm": 4.103140236111358e-05, + "v_perp_norm": 1.3595572710037231, + "layer_1_v_norm": 0.06151890009641647, + "layer_1_cos_v_neg_g": 0.009183725342154503, + "layer_2_v_norm": 0.0587039478123188, + "layer_2_cos_v_neg_g": 0.007533003110438585, + "layer_3_v_norm": 0.05768289789557457, + "layer_3_cos_v_neg_g": 0.0055310712195932865, + "layer_4_v_norm": 0.05980771780014038, + "layer_4_cos_v_neg_g": 0.0061926450580358505, + "layer_5_v_norm": 0.060823626816272736, + "layer_5_cos_v_neg_g": 0.007403390482068062, + "layer_6_v_norm": 0.061401866376399994, + "layer_6_cos_v_neg_g": 0.007181548047810793, + "layer_7_v_norm": 0.06134982034564018, + "layer_7_cos_v_neg_g": 0.006498584058135748, + "layer_8_v_norm": 0.06132041662931442, + "layer_8_cos_v_neg_g": 0.007221582345664501, + "layer_9_v_norm": 0.06128150597214699, + "layer_9_cos_v_neg_g": 0.006832698825746775, + "layer_10_v_norm": 0.06132761389017105, + "layer_10_cos_v_neg_g": 0.006727187428623438, + "layer_11_v_norm": 0.06132935732603073, + "layer_11_cos_v_neg_g": 0.006806175224483013, + "layer_12_v_norm": 0.06123429536819458, + "layer_12_cos_v_neg_g": 0.005870808381587267, + "layer_1_sharpness": 0.06450970470905304, + "layer_2_sharpness": 0.037313010543584824, + "layer_3_sharpness": 0.05469169095158577, + "layer_4_sharpness": 0.026908086612820625, + "layer_5_sharpness": 0.032771240919828415, + "layer_6_sharpness": 0.037360917776823044, + "layer_7_sharpness": 0.04058777913451195, + "layer_8_sharpness": 0.03681943565607071, + "layer_9_sharpness": 0.0278066024184227, + "layer_10_sharpness": 0.022437112405896187, + "layer_11_sharpness": 0.020502695813775063, + "layer_12_sharpness": 0.024921350181102753 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..943603f3d8990843a0bd8f2f4e122bf29de6c8c6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.9303157329559326, + "total_l1_linf_norm": 5595.888671875, + "total_spectral_norm": 0.9303158521652222, + "layer_1_update_fnorm": 0.04394147917628288, + "layer_1_max_l1_linf_norm": 0.12408412992954254, + "layer_1_max_spectral_norm": 0.0028009344823658466, + "layer_2_update_fnorm": 0.042422764003276825, + "layer_2_max_l1_linf_norm": 0.1428455114364624, + "layer_2_max_spectral_norm": 0.0032797183375805616, + "layer_3_update_fnorm": 0.0416143573820591, + "layer_3_max_l1_linf_norm": 0.1576179414987564, + "layer_3_max_spectral_norm": 0.0035321114119142294, + "layer_4_update_fnorm": 0.041366126388311386, + "layer_4_max_l1_linf_norm": 0.16017380356788635, + "layer_4_max_spectral_norm": 0.006145496387034655, + "layer_5_update_fnorm": 0.04131374508142471, + "layer_5_max_l1_linf_norm": 0.16241306066513062, + "layer_5_max_spectral_norm": 0.0062666586600244045, + "layer_6_update_fnorm": 0.04169069230556488, + "layer_6_max_l1_linf_norm": 0.16737373173236847, + "layer_6_max_spectral_norm": 0.00720604881644249, + "layer_7_update_fnorm": 0.04204368218779564, + "layer_7_max_l1_linf_norm": 0.19485977292060852, + "layer_7_max_spectral_norm": 0.00823864433914423, + "layer_8_update_fnorm": 0.04226554185152054, + "layer_8_max_l1_linf_norm": 0.15700900554656982, + "layer_8_max_spectral_norm": 0.006770208943635225, + "layer_9_update_fnorm": 0.04250159487128258, + "layer_9_max_l1_linf_norm": 0.1560521423816681, + "layer_9_max_spectral_norm": 0.006614184007048607, + "layer_10_update_fnorm": 0.042729686945676804, + "layer_10_max_l1_linf_norm": 0.15975075960159302, + "layer_10_max_spectral_norm": 0.00624882010743022, + "layer_11_update_fnorm": 0.04279778152704239, + "layer_11_max_l1_linf_norm": 0.1626841276884079, + "layer_11_max_spectral_norm": 0.0041795531287789345, + "layer_12_update_fnorm": 0.0430147610604763, + "layer_12_max_l1_linf_norm": 0.1709607094526291, + "layer_12_max_spectral_norm": 0.0040675075724720955, + "total_sharpness": 0.04738589748740196, + "ip_v_neg_g": 0.019701102748513222, + "cos_v_neg_g": 0.0026370876003056765, + "v_norm": 0.9303157329559326, + "g_norm": 8.030370712280273, + "hv_norm": 7.849985599517822, + "cos_v_hv": 0.005615787114948034, + "hg_norm": 10602.482421875, + "cos_g_hg": 0.6899266839027405, + "v_parallel_norm": 0.00015895634714979678, + "v_perp_norm": 0.9303157329559326, + "layer_1_v_norm": 0.04394147917628288, + "layer_1_cos_v_neg_g": 0.024887138977646828, + "layer_2_v_norm": 0.042422764003276825, + "layer_2_cos_v_neg_g": 0.018601758405566216, + "layer_3_v_norm": 0.0416143573820591, + "layer_3_cos_v_neg_g": 0.01848720572888851, + "layer_4_v_norm": 0.041366126388311386, + "layer_4_cos_v_neg_g": 0.019003933295607567, + "layer_5_v_norm": 0.04131374508142471, + "layer_5_cos_v_neg_g": 0.018239600583910942, + "layer_6_v_norm": 0.04169069230556488, + "layer_6_cos_v_neg_g": 0.015826784074306488, + "layer_7_v_norm": 0.04204368218779564, + "layer_7_cos_v_neg_g": 0.015414089895784855, + "layer_8_v_norm": 0.04226554185152054, + "layer_8_cos_v_neg_g": 0.014062893576920033, + "layer_9_v_norm": 0.04250159487128258, + "layer_9_cos_v_neg_g": 0.0131293386220932, + "layer_10_v_norm": 0.042729686945676804, + "layer_10_cos_v_neg_g": 0.012480643577873707, + "layer_11_v_norm": 0.04279778525233269, + "layer_11_cos_v_neg_g": 0.012012016959488392, + "layer_12_v_norm": 0.0430147610604763, + "layer_12_cos_v_neg_g": 0.010083765722811222, + "layer_1_sharpness": 1.9389584064483643, + "layer_2_sharpness": 0.40497297048568726, + "layer_3_sharpness": 0.29372861981391907, + "layer_4_sharpness": 0.24433791637420654, + "layer_5_sharpness": 0.17500343918800354, + "layer_6_sharpness": 0.10848512500524521, + "layer_7_sharpness": 0.07914607971906662, + "layer_8_sharpness": 0.06121808663010597, + "layer_9_sharpness": 0.05131112411618233, + "layer_10_sharpness": 0.04904220998287201, + "layer_11_sharpness": 0.04999160021543503, + "layer_12_sharpness": 0.05088326334953308 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..bbf501cf08a34edd2a9d3ed79d0daf2dc4697dee --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3451244831085205, + "total_l1_linf_norm": 8072.609375, + "total_spectral_norm": 1.3451242446899414, + "layer_1_update_fnorm": 0.06134156882762909, + "layer_1_max_l1_linf_norm": 0.3165547251701355, + "layer_1_max_spectral_norm": 0.0070852418430149555, + "layer_2_update_fnorm": 0.05888660252094269, + "layer_2_max_l1_linf_norm": 0.33933502435684204, + "layer_2_max_spectral_norm": 0.007640276104211807, + "layer_3_update_fnorm": 0.05816281959414482, + "layer_3_max_l1_linf_norm": 0.34974294900894165, + "layer_3_max_spectral_norm": 0.007840108126401901, + "layer_4_update_fnorm": 0.05974618345499039, + "layer_4_max_l1_linf_norm": 0.3338547348976135, + "layer_4_max_spectral_norm": 0.007540808524936438, + "layer_5_update_fnorm": 0.06069382280111313, + "layer_5_max_l1_linf_norm": 0.3475370407104492, + "layer_5_max_spectral_norm": 0.007738042157143354, + "layer_6_update_fnorm": 0.06127474457025528, + "layer_6_max_l1_linf_norm": 0.3257908225059509, + "layer_6_max_spectral_norm": 0.007331701926887035, + "layer_7_update_fnorm": 0.061148371547460556, + "layer_7_max_l1_linf_norm": 0.3005301356315613, + "layer_7_max_spectral_norm": 0.006764787249267101, + "layer_8_update_fnorm": 0.06111292541027069, + "layer_8_max_l1_linf_norm": 0.28207141160964966, + "layer_8_max_spectral_norm": 0.006419673562049866, + "layer_9_update_fnorm": 0.061168041080236435, + "layer_9_max_l1_linf_norm": 0.3045675754547119, + "layer_9_max_spectral_norm": 0.006796596106141806, + "layer_10_update_fnorm": 0.061281852424144745, + "layer_10_max_l1_linf_norm": 0.34807974100112915, + "layer_10_max_spectral_norm": 0.00781894288957119, + "layer_11_update_fnorm": 0.0612599216401577, + "layer_11_max_l1_linf_norm": 0.36070042848587036, + "layer_11_max_spectral_norm": 0.00809476338326931, + "layer_12_update_fnorm": 0.06121077761054039, + "layer_12_max_l1_linf_norm": 0.3547724783420563, + "layer_12_max_spectral_norm": 0.00804588757455349, + "total_sharpness": 0.004799294285476208, + "ip_v_neg_g": 0.0057304622605443, + "cos_v_neg_g": 0.0011945972219109535, + "v_norm": 1.3451244831085205, + "g_norm": 3.566199779510498, + "hv_norm": 0.785346269607544, + "cos_v_hv": 0.0082201287150383, + "hg_norm": 465.8360290527344, + "cos_g_hg": 0.4584088623523712, + "v_parallel_norm": 4.591729521052912e-05, + "v_perp_norm": 1.3451244831085205, + "layer_1_v_norm": 0.06134156882762909, + "layer_1_cos_v_neg_g": 0.0087549714371562, + "layer_2_v_norm": 0.05888660252094269, + "layer_2_cos_v_neg_g": 0.007879892364144325, + "layer_3_v_norm": 0.05816281959414482, + "layer_3_cos_v_neg_g": 0.008152120746672153, + "layer_4_v_norm": 0.05974618345499039, + "layer_4_cos_v_neg_g": 0.006147170439362526, + "layer_5_v_norm": 0.06069382280111313, + "layer_5_cos_v_neg_g": 0.007412891369313002, + "layer_6_v_norm": 0.06127474457025528, + "layer_6_cos_v_neg_g": 0.007186426315456629, + "layer_7_v_norm": 0.061148371547460556, + "layer_7_cos_v_neg_g": 0.005363513249903917, + "layer_8_v_norm": 0.06111292168498039, + "layer_8_cos_v_neg_g": 0.006457559764385223, + "layer_9_v_norm": 0.061168041080236435, + "layer_9_cos_v_neg_g": 0.006710465531796217, + "layer_10_v_norm": 0.061281852424144745, + "layer_10_cos_v_neg_g": 0.0065889921970665455, + "layer_11_v_norm": 0.0612599216401577, + "layer_11_cos_v_neg_g": 0.006643349304795265, + "layer_12_v_norm": 0.06121077761054039, + "layer_12_cos_v_neg_g": 0.006524922791868448, + "layer_1_sharpness": 0.04717171564698219, + "layer_2_sharpness": 0.026745377108454704, + "layer_3_sharpness": 0.0443134680390358, + "layer_4_sharpness": 0.025575995445251465, + "layer_5_sharpness": 0.03248691186308861, + "layer_6_sharpness": 0.030480945482850075, + "layer_7_sharpness": 0.03212404623627663, + "layer_8_sharpness": 0.02991504780948162, + "layer_9_sharpness": 0.02083112858235836, + "layer_10_sharpness": 0.017039231956005096, + "layer_11_sharpness": 0.016425929963588715, + "layer_12_sharpness": 0.020061032846570015 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..9e11f7e2082fe703138c8e56e95383c8784d10a8 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.346644639968872, + "total_l1_linf_norm": 8085.0556640625, + "total_spectral_norm": 1.346644639968872, + "layer_1_update_fnorm": 0.06133353337645531, + "layer_1_max_l1_linf_norm": 0.34217846393585205, + "layer_1_max_spectral_norm": 0.007705814205110073, + "layer_2_update_fnorm": 0.0590081661939621, + "layer_2_max_l1_linf_norm": 0.3587159514427185, + "layer_2_max_spectral_norm": 0.008065054193139076, + "layer_3_update_fnorm": 0.05845547467470169, + "layer_3_max_l1_linf_norm": 0.35927873849868774, + "layer_3_max_spectral_norm": 0.008044875226914883, + "layer_4_update_fnorm": 0.059901218861341476, + "layer_4_max_l1_linf_norm": 0.37390783429145813, + "layer_4_max_spectral_norm": 0.00837745238095522, + "layer_5_update_fnorm": 0.06084909290075302, + "layer_5_max_l1_linf_norm": 0.33825820684432983, + "layer_5_max_spectral_norm": 0.007539050187915564, + "layer_6_update_fnorm": 0.06141003593802452, + "layer_6_max_l1_linf_norm": 0.35125482082366943, + "layer_6_max_spectral_norm": 0.007903876714408398, + "layer_7_update_fnorm": 0.06132620573043823, + "layer_7_max_l1_linf_norm": 0.33913105726242065, + "layer_7_max_spectral_norm": 0.0076443348079919815, + "layer_8_update_fnorm": 0.061311762779951096, + "layer_8_max_l1_linf_norm": 0.310880571603775, + "layer_8_max_spectral_norm": 0.00702662905678153, + "layer_9_update_fnorm": 0.06124332547187805, + "layer_9_max_l1_linf_norm": 0.32373160123825073, + "layer_9_max_spectral_norm": 0.00728426780551672, + "layer_10_update_fnorm": 0.06134572997689247, + "layer_10_max_l1_linf_norm": 0.36407470703125, + "layer_10_max_spectral_norm": 0.008149884641170502, + "layer_11_update_fnorm": 0.06137203797698021, + "layer_11_max_l1_linf_norm": 0.37858888506889343, + "layer_11_max_spectral_norm": 0.00844197440892458, + "layer_12_update_fnorm": 0.06140622869133949, + "layer_12_max_l1_linf_norm": 0.38587290048599243, + "layer_12_max_spectral_norm": 0.008603573776781559, + "total_sharpness": 0.0042523886077106, + "ip_v_neg_g": 0.004275016952306032, + "cos_v_neg_g": 0.000614793854765594, + "v_norm": 1.346644639968872, + "g_norm": 5.163632392883301, + "hv_norm": 0.8883737921714783, + "cos_v_hv": 0.006445998325943947, + "hg_norm": 977.98974609375, + "cos_g_hg": 0.6709577441215515, + "v_parallel_norm": 2.2734509911970235e-05, + "v_perp_norm": 1.346644639968872, + "layer_1_v_norm": 0.06133353337645531, + "layer_1_cos_v_neg_g": 0.008411251939833164, + "layer_2_v_norm": 0.0590081661939621, + "layer_2_cos_v_neg_g": 0.006553404964506626, + "layer_3_v_norm": 0.05845547467470169, + "layer_3_cos_v_neg_g": 0.005657307803630829, + "layer_4_v_norm": 0.059901218861341476, + "layer_4_cos_v_neg_g": 0.003452304285019636, + "layer_5_v_norm": 0.06084909290075302, + "layer_5_cos_v_neg_g": 0.004104031715542078, + "layer_6_v_norm": 0.06141003593802452, + "layer_6_cos_v_neg_g": 0.0023733119014650583, + "layer_7_v_norm": 0.06132620573043823, + "layer_7_cos_v_neg_g": 0.0006980173056945205, + "layer_8_v_norm": 0.061311762779951096, + "layer_8_cos_v_neg_g": 0.0003379083063919097, + "layer_9_v_norm": 0.06124332547187805, + "layer_9_cos_v_neg_g": 0.0010205853031948209, + "layer_10_v_norm": 0.06134572997689247, + "layer_10_cos_v_neg_g": 0.0024512354284524918, + "layer_11_v_norm": 0.06137203797698021, + "layer_11_cos_v_neg_g": 0.0030975101981312037, + "layer_12_v_norm": 0.06140622869133949, + "layer_12_cos_v_neg_g": 0.002825012430548668, + "layer_1_sharpness": 0.07616128772497177, + "layer_2_sharpness": 0.023794233798980713, + "layer_3_sharpness": 0.02945934422314167, + "layer_4_sharpness": 0.016830528154969215, + "layer_5_sharpness": 0.016779011115431786, + "layer_6_sharpness": 0.02027607336640358, + "layer_7_sharpness": 0.022289052605628967, + "layer_8_sharpness": 0.0253152996301651, + "layer_9_sharpness": 0.019298292696475983, + "layer_10_sharpness": 0.013342522084712982, + "layer_11_sharpness": 0.01502237468957901, + "layer_12_sharpness": 0.025079715996980667 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..5b5b15a26afa686c98b610ef70792699b5ab236c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.344268798828125, + "total_l1_linf_norm": 8068.93115234375, + "total_spectral_norm": 1.3442686796188354, + "layer_1_update_fnorm": 0.06121990457177162, + "layer_1_max_l1_linf_norm": 0.3139967918395996, + "layer_1_max_spectral_norm": 0.00707712396979332, + "layer_2_update_fnorm": 0.05925845354795456, + "layer_2_max_l1_linf_norm": 0.354116290807724, + "layer_2_max_spectral_norm": 0.007988843135535717, + "layer_3_update_fnorm": 0.058495327830314636, + "layer_3_max_l1_linf_norm": 0.36013463139533997, + "layer_3_max_spectral_norm": 0.008058611303567886, + "layer_4_update_fnorm": 0.060041557997465134, + "layer_4_max_l1_linf_norm": 0.3719056248664856, + "layer_4_max_spectral_norm": 0.008314045146107674, + "layer_5_update_fnorm": 0.060880906879901886, + "layer_5_max_l1_linf_norm": 0.323678195476532, + "layer_5_max_spectral_norm": 0.007270636036992073, + "layer_6_update_fnorm": 0.06131763756275177, + "layer_6_max_l1_linf_norm": 0.31674087047576904, + "layer_6_max_spectral_norm": 0.007114290725439787, + "layer_7_update_fnorm": 0.0613010972738266, + "layer_7_max_l1_linf_norm": 0.31557178497314453, + "layer_7_max_spectral_norm": 0.007042863871902227, + "layer_8_update_fnorm": 0.06117972731590271, + "layer_8_max_l1_linf_norm": 0.29822611808776855, + "layer_8_max_spectral_norm": 0.00671648932620883, + "layer_9_update_fnorm": 0.061229806393384933, + "layer_9_max_l1_linf_norm": 0.324985146522522, + "layer_9_max_spectral_norm": 0.007237429730594158, + "layer_10_update_fnorm": 0.06134732812643051, + "layer_10_max_l1_linf_norm": 0.3476146161556244, + "layer_10_max_spectral_norm": 0.007799352053552866, + "layer_11_update_fnorm": 0.061428219079971313, + "layer_11_max_l1_linf_norm": 0.3782495856285095, + "layer_11_max_spectral_norm": 0.008492335677146912, + "layer_12_update_fnorm": 0.06155337393283844, + "layer_12_max_l1_linf_norm": 0.4027659296989441, + "layer_12_max_spectral_norm": 0.008958923630416393, + "total_sharpness": 0.005172520875930786, + "ip_v_neg_g": 0.0044687967747449875, + "cos_v_neg_g": 0.001036364585161209, + "v_norm": 1.344268798828125, + "g_norm": 3.20768666267395, + "hv_norm": 0.8096517324447632, + "cos_v_hv": 0.008587962947785854, + "hg_norm": 160.48190307617188, + "cos_g_hg": 0.4379511773586273, + "v_parallel_norm": 3.521267353789881e-05, + "v_perp_norm": 1.344268798828125, + "layer_1_v_norm": 0.06121990457177162, + "layer_1_cos_v_neg_g": 0.006523951888084412, + "layer_2_v_norm": 0.05925845354795456, + "layer_2_cos_v_neg_g": 0.004162206314504147, + "layer_3_v_norm": 0.058495327830314636, + "layer_3_cos_v_neg_g": 0.00420186435803771, + "layer_4_v_norm": 0.060041557997465134, + "layer_4_cos_v_neg_g": 0.005771723110228777, + "layer_5_v_norm": 0.060880906879901886, + "layer_5_cos_v_neg_g": 0.006546021439135075, + "layer_6_v_norm": 0.06131763383746147, + "layer_6_cos_v_neg_g": 0.0065033817663788795, + "layer_7_v_norm": 0.0613010972738266, + "layer_7_cos_v_neg_g": 0.005893244408071041, + "layer_8_v_norm": 0.06117972731590271, + "layer_8_cos_v_neg_g": 0.006329411640763283, + "layer_9_v_norm": 0.061229806393384933, + "layer_9_cos_v_neg_g": 0.007107535842806101, + "layer_10_v_norm": 0.06134732812643051, + "layer_10_cos_v_neg_g": 0.005763330962508917, + "layer_11_v_norm": 0.061428219079971313, + "layer_11_cos_v_neg_g": 0.005084434058517218, + "layer_12_v_norm": 0.06155337393283844, + "layer_12_cos_v_neg_g": 0.005086004268378019, + "layer_1_sharpness": 0.03959883004426956, + "layer_2_sharpness": 0.013376413844525814, + "layer_3_sharpness": 0.03654613718390465, + "layer_4_sharpness": 0.02026182785630226, + "layer_5_sharpness": 0.021361391991376877, + "layer_6_sharpness": 0.024888306856155396, + "layer_7_sharpness": 0.02930457331240177, + "layer_8_sharpness": 0.031715862452983856, + "layer_9_sharpness": 0.0248810276389122, + "layer_10_sharpness": 0.019701456651091576, + "layer_11_sharpness": 0.02315012365579605, + "layer_12_sharpness": 0.05419829115271568 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..3dc68933eef0c35815dad64671df5de7f315aa59 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.357048511505127, + "total_l1_linf_norm": 8144.20263671875, + "total_spectral_norm": 1.3570483922958374, + "layer_1_update_fnorm": 0.0611271895468235, + "layer_1_max_l1_linf_norm": 0.30718058347702026, + "layer_1_max_spectral_norm": 0.00689860712736845, + "layer_2_update_fnorm": 0.05911387503147125, + "layer_2_max_l1_linf_norm": 0.3492807447910309, + "layer_2_max_spectral_norm": 0.007923734374344349, + "layer_3_update_fnorm": 0.05853143706917763, + "layer_3_max_l1_linf_norm": 0.33984100818634033, + "layer_3_max_spectral_norm": 0.007746194489300251, + "layer_4_update_fnorm": 0.05992566794157028, + "layer_4_max_l1_linf_norm": 0.34593039751052856, + "layer_4_max_spectral_norm": 0.007837716490030289, + "layer_5_update_fnorm": 0.060858264565467834, + "layer_5_max_l1_linf_norm": 0.3387283980846405, + "layer_5_max_spectral_norm": 0.007544956170022488, + "layer_6_update_fnorm": 0.061302367597818375, + "layer_6_max_l1_linf_norm": 0.32131922245025635, + "layer_6_max_spectral_norm": 0.007234468590468168, + "layer_7_update_fnorm": 0.061243895441293716, + "layer_7_max_l1_linf_norm": 0.3003578186035156, + "layer_7_max_spectral_norm": 0.0068282438442111015, + "layer_8_update_fnorm": 0.06119869649410248, + "layer_8_max_l1_linf_norm": 0.3077441155910492, + "layer_8_max_spectral_norm": 0.006895262282341719, + "layer_9_update_fnorm": 0.061302199959754944, + "layer_9_max_l1_linf_norm": 0.3282325565814972, + "layer_9_max_spectral_norm": 0.007431302685290575, + "layer_10_update_fnorm": 0.06136471405625343, + "layer_10_max_l1_linf_norm": 0.35518020391464233, + "layer_10_max_spectral_norm": 0.008007869124412537, + "layer_11_update_fnorm": 0.06128760427236557, + "layer_11_max_l1_linf_norm": 0.3627980053424835, + "layer_11_max_spectral_norm": 0.008091366849839687, + "layer_12_update_fnorm": 0.06125101074576378, + "layer_12_max_l1_linf_norm": 0.3487173914909363, + "layer_12_max_spectral_norm": 0.007968494668602943, + "total_sharpness": 0.003880183445289731, + "ip_v_neg_g": 0.002704933052882552, + "cos_v_neg_g": 0.0006452417001128197, + "v_norm": 1.357048511505127, + "g_norm": 3.0891480445861816, + "hv_norm": 0.6544795632362366, + "cos_v_hv": 0.008045472204685211, + "hg_norm": 120.91361236572266, + "cos_g_hg": 0.45701128244400024, + "v_parallel_norm": 1.9876364603987895e-05, + "v_perp_norm": 1.357048511505127, + "layer_1_v_norm": 0.0611271895468235, + "layer_1_cos_v_neg_g": 0.0058989496901631355, + "layer_2_v_norm": 0.05911387503147125, + "layer_2_cos_v_neg_g": 0.0031485180370509624, + "layer_3_v_norm": 0.05853143706917763, + "layer_3_cos_v_neg_g": 0.005407877266407013, + "layer_4_v_norm": 0.05992566794157028, + "layer_4_cos_v_neg_g": 0.00465749716386199, + "layer_5_v_norm": 0.060858264565467834, + "layer_5_cos_v_neg_g": 0.0032392179127782583, + "layer_6_v_norm": 0.061302367597818375, + "layer_6_cos_v_neg_g": 0.004090698901563883, + "layer_7_v_norm": 0.061243895441293716, + "layer_7_cos_v_neg_g": 0.003930142614990473, + "layer_8_v_norm": 0.06119869649410248, + "layer_8_cos_v_neg_g": 0.004813591483980417, + "layer_9_v_norm": 0.061302199959754944, + "layer_9_cos_v_neg_g": 0.004520398564636707, + "layer_10_v_norm": 0.06136471405625343, + "layer_10_cos_v_neg_g": 0.004000989720225334, + "layer_11_v_norm": 0.06128760427236557, + "layer_11_cos_v_neg_g": 0.0025654586497694254, + "layer_12_v_norm": 0.06125101074576378, + "layer_12_cos_v_neg_g": 0.001684845075942576, + "layer_1_sharpness": 0.05798589810729027, + "layer_2_sharpness": 0.013708186335861683, + "layer_3_sharpness": 0.02745472453534603, + "layer_4_sharpness": 0.016045251861214638, + "layer_5_sharpness": 0.018483156338334084, + "layer_6_sharpness": 0.024453191086649895, + "layer_7_sharpness": 0.027649162337183952, + "layer_8_sharpness": 0.03152972087264061, + "layer_9_sharpness": 0.022199543192982674, + "layer_10_sharpness": 0.015416540205478668, + "layer_11_sharpness": 0.012944266200065613, + "layer_12_sharpness": 0.019619280472397804 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..19a35e1a16fe2e4e831050f238d636e39b9ad361 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3533679246902466, + "total_l1_linf_norm": 8122.1650390625, + "total_spectral_norm": 1.3533680438995361, + "layer_1_update_fnorm": 0.060603067278862, + "layer_1_max_l1_linf_norm": 0.3272024989128113, + "layer_1_max_spectral_norm": 0.0073625799268484116, + "layer_2_update_fnorm": 0.05894885212182999, + "layer_2_max_l1_linf_norm": 0.383308470249176, + "layer_2_max_spectral_norm": 0.008589500561356544, + "layer_3_update_fnorm": 0.05846433341503143, + "layer_3_max_l1_linf_norm": 0.3945454955101013, + "layer_3_max_spectral_norm": 0.008834579028189182, + "layer_4_update_fnorm": 0.060128580778837204, + "layer_4_max_l1_linf_norm": 0.3807409405708313, + "layer_4_max_spectral_norm": 0.008515528403222561, + "layer_5_update_fnorm": 0.06106291338801384, + "layer_5_max_l1_linf_norm": 0.3659932613372803, + "layer_5_max_spectral_norm": 0.008216951042413712, + "layer_6_update_fnorm": 0.06150968745350838, + "layer_6_max_l1_linf_norm": 0.3628452718257904, + "layer_6_max_spectral_norm": 0.008079134859144688, + "layer_7_update_fnorm": 0.06153976172208786, + "layer_7_max_l1_linf_norm": 0.3719078600406647, + "layer_7_max_spectral_norm": 0.008256731554865837, + "layer_8_update_fnorm": 0.061391860246658325, + "layer_8_max_l1_linf_norm": 0.3524354100227356, + "layer_8_max_spectral_norm": 0.007800831459462643, + "layer_9_update_fnorm": 0.06139984726905823, + "layer_9_max_l1_linf_norm": 0.35394757986068726, + "layer_9_max_spectral_norm": 0.007906222715973854, + "layer_10_update_fnorm": 0.0614582821726799, + "layer_10_max_l1_linf_norm": 0.3733316957950592, + "layer_10_max_spectral_norm": 0.008425011299550533, + "layer_11_update_fnorm": 0.06148837134242058, + "layer_11_max_l1_linf_norm": 0.39536598324775696, + "layer_11_max_spectral_norm": 0.00883965753018856, + "layer_12_update_fnorm": 0.06137585639953613, + "layer_12_max_l1_linf_norm": 0.38482484221458435, + "layer_12_max_spectral_norm": 0.008639175444841385, + "total_sharpness": 0.004941969644278288, + "ip_v_neg_g": 0.0040615578182041645, + "cos_v_neg_g": 0.0009444516035728157, + "v_norm": 1.3533679246902466, + "g_norm": 3.177583932876587, + "hv_norm": 0.7750339508056641, + "cos_v_hv": 0.008629689924418926, + "hg_norm": 133.57278442382812, + "cos_g_hg": 0.44769057631492615, + "v_parallel_norm": 2.873386983992532e-05, + "v_perp_norm": 1.3533679246902466, + "layer_1_v_norm": 0.060603067278862, + "layer_1_cos_v_neg_g": 0.008443781174719334, + "layer_2_v_norm": 0.05894885212182999, + "layer_2_cos_v_neg_g": 0.006186277139931917, + "layer_3_v_norm": 0.05846433341503143, + "layer_3_cos_v_neg_g": 0.004953937605023384, + "layer_4_v_norm": 0.060128580778837204, + "layer_4_cos_v_neg_g": 0.004333595745265484, + "layer_5_v_norm": 0.06106291338801384, + "layer_5_cos_v_neg_g": 0.005529229063540697, + "layer_6_v_norm": 0.06150968372821808, + "layer_6_cos_v_neg_g": 0.005662298295646906, + "layer_7_v_norm": 0.06153976172208786, + "layer_7_cos_v_neg_g": 0.005646196659654379, + "layer_8_v_norm": 0.061391860246658325, + "layer_8_cos_v_neg_g": 0.007270380388945341, + "layer_9_v_norm": 0.06139984726905823, + "layer_9_cos_v_neg_g": 0.006963526830077171, + "layer_10_v_norm": 0.0614582821726799, + "layer_10_cos_v_neg_g": 0.0056796506978571415, + "layer_11_v_norm": 0.06148837134242058, + "layer_11_cos_v_neg_g": 0.0036461486015468836, + "layer_12_v_norm": 0.06137585639953613, + "layer_12_cos_v_neg_g": 0.0028315959498286247, + "layer_1_sharpness": 0.1445293426513672, + "layer_2_sharpness": 0.0272857453674078, + "layer_3_sharpness": 0.046574585139751434, + "layer_4_sharpness": 0.01827562041580677, + "layer_5_sharpness": 0.023491112515330315, + "layer_6_sharpness": 0.02983236126601696, + "layer_7_sharpness": 0.03781922906637192, + "layer_8_sharpness": 0.03716425225138664, + "layer_9_sharpness": 0.02398752234876156, + "layer_10_sharpness": 0.016726290807127953, + "layer_11_sharpness": 0.013563516549766064, + "layer_12_sharpness": 0.02511865831911564 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..a56a57d17ee0a1241f9b6d30022435672fe8af32 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.351572871208191, + "total_l1_linf_norm": 8103.3837890625, + "total_spectral_norm": 1.3515729904174805, + "layer_1_update_fnorm": 0.06089117005467415, + "layer_1_max_l1_linf_norm": 0.3834190368652344, + "layer_1_max_spectral_norm": 0.008543900214135647, + "layer_2_update_fnorm": 0.05892975628376007, + "layer_2_max_l1_linf_norm": 0.39773035049438477, + "layer_2_max_spectral_norm": 0.008954077027738094, + "layer_3_update_fnorm": 0.05876097455620766, + "layer_3_max_l1_linf_norm": 0.38096484541893005, + "layer_3_max_spectral_norm": 0.00854232907295227, + "layer_4_update_fnorm": 0.06014377623796463, + "layer_4_max_l1_linf_norm": 0.3585309386253357, + "layer_4_max_spectral_norm": 0.008105394430458546, + "layer_5_update_fnorm": 0.06098733842372894, + "layer_5_max_l1_linf_norm": 0.3505362868309021, + "layer_5_max_spectral_norm": 0.007920723408460617, + "layer_6_update_fnorm": 0.061322346329689026, + "layer_6_max_l1_linf_norm": 0.32696062326431274, + "layer_6_max_spectral_norm": 0.007329748477786779, + "layer_7_update_fnorm": 0.06124704331159592, + "layer_7_max_l1_linf_norm": 0.3049434423446655, + "layer_7_max_spectral_norm": 0.006849060766398907, + "layer_8_update_fnorm": 0.06117052584886551, + "layer_8_max_l1_linf_norm": 0.29315805435180664, + "layer_8_max_spectral_norm": 0.006664350163191557, + "layer_9_update_fnorm": 0.06130041927099228, + "layer_9_max_l1_linf_norm": 0.3168572187423706, + "layer_9_max_spectral_norm": 0.00715329684317112, + "layer_10_update_fnorm": 0.061401162296533585, + "layer_10_max_l1_linf_norm": 0.34947121143341064, + "layer_10_max_spectral_norm": 0.007884525693953037, + "layer_11_update_fnorm": 0.06144915893673897, + "layer_11_max_l1_linf_norm": 0.38311445713043213, + "layer_11_max_spectral_norm": 0.008609117940068245, + "layer_12_update_fnorm": 0.06138671934604645, + "layer_12_max_l1_linf_norm": 0.3851761817932129, + "layer_12_max_spectral_norm": 0.008667715825140476, + "total_sharpness": 0.004203910939395428, + "ip_v_neg_g": 0.004431319423019886, + "cos_v_neg_g": 0.0009650525753386319, + "v_norm": 1.351572871208191, + "g_norm": 3.3973681926727295, + "hv_norm": 0.7752702236175537, + "cos_v_hv": 0.007328919135034084, + "hg_norm": 258.8619689941406, + "cos_g_hg": 0.454611599445343, + "v_parallel_norm": 2.5983152227126993e-05, + "v_perp_norm": 1.351572871208191, + "layer_1_v_norm": 0.06089117005467415, + "layer_1_cos_v_neg_g": 0.011639531701803207, + "layer_2_v_norm": 0.05892975628376007, + "layer_2_cos_v_neg_g": 0.007640233729034662, + "layer_3_v_norm": 0.05876097083091736, + "layer_3_cos_v_neg_g": 0.00622900202870369, + "layer_4_v_norm": 0.06014377623796463, + "layer_4_cos_v_neg_g": 0.0048994701355695724, + "layer_5_v_norm": 0.06098733842372894, + "layer_5_cos_v_neg_g": 0.003589199623093009, + "layer_6_v_norm": 0.061322346329689026, + "layer_6_cos_v_neg_g": 0.004121546167880297, + "layer_7_v_norm": 0.06124704331159592, + "layer_7_cos_v_neg_g": 0.004044550936669111, + "layer_8_v_norm": 0.06117052584886551, + "layer_8_cos_v_neg_g": 0.004780495073646307, + "layer_9_v_norm": 0.06130041927099228, + "layer_9_cos_v_neg_g": 0.00575602799654007, + "layer_10_v_norm": 0.061401162296533585, + "layer_10_cos_v_neg_g": 0.006300970446318388, + "layer_11_v_norm": 0.06144915521144867, + "layer_11_cos_v_neg_g": 0.0060774036683142185, + "layer_12_v_norm": 0.06138671934604645, + "layer_12_cos_v_neg_g": 0.004663802683353424, + "layer_1_sharpness": 0.2043868750333786, + "layer_2_sharpness": 0.035307347774505615, + "layer_3_sharpness": 0.030294101685285568, + "layer_4_sharpness": 0.015999875962734222, + "layer_5_sharpness": 0.019323274493217468, + "layer_6_sharpness": 0.023000070825219154, + "layer_7_sharpness": 0.024230962619185448, + "layer_8_sharpness": 0.02556837908923626, + "layer_9_sharpness": 0.016814906150102615, + "layer_10_sharpness": 0.012613609433174133, + "layer_11_sharpness": 0.012137914076447487, + "layer_12_sharpness": 0.02563951350748539 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..a4642de3adaf2385bfe82fd4a06c8da9d3cc886c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3435149192810059, + "total_l1_linf_norm": 8066.970703125, + "total_spectral_norm": 1.3435149192810059, + "layer_1_update_fnorm": 0.06097891181707382, + "layer_1_max_l1_linf_norm": 0.31213802099227905, + "layer_1_max_spectral_norm": 0.007098224479705095, + "layer_2_update_fnorm": 0.059173356741666794, + "layer_2_max_l1_linf_norm": 0.3472118675708771, + "layer_2_max_spectral_norm": 0.007866532541811466, + "layer_3_update_fnorm": 0.05894461274147034, + "layer_3_max_l1_linf_norm": 0.3473759889602661, + "layer_3_max_spectral_norm": 0.007815396413207054, + "layer_4_update_fnorm": 0.060144633054733276, + "layer_4_max_l1_linf_norm": 0.3437731862068176, + "layer_4_max_spectral_norm": 0.00770155992358923, + "layer_5_update_fnorm": 0.06098926439881325, + "layer_5_max_l1_linf_norm": 0.34125977754592896, + "layer_5_max_spectral_norm": 0.007672126404941082, + "layer_6_update_fnorm": 0.06128564104437828, + "layer_6_max_l1_linf_norm": 0.3170609474182129, + "layer_6_max_spectral_norm": 0.007185241207480431, + "layer_7_update_fnorm": 0.06132951378822327, + "layer_7_max_l1_linf_norm": 0.30541008710861206, + "layer_7_max_spectral_norm": 0.0068879802711308, + "layer_8_update_fnorm": 0.06130123510956764, + "layer_8_max_l1_linf_norm": 0.32074716687202454, + "layer_8_max_spectral_norm": 0.007219192571938038, + "layer_9_update_fnorm": 0.061339545994997025, + "layer_9_max_l1_linf_norm": 0.35263094305992126, + "layer_9_max_spectral_norm": 0.007949705235660076, + "layer_10_update_fnorm": 0.06161868944764137, + "layer_10_max_l1_linf_norm": 0.3836531341075897, + "layer_10_max_spectral_norm": 0.008613025769591331, + "layer_11_update_fnorm": 0.061666153371334076, + "layer_11_max_l1_linf_norm": 0.4062930643558502, + "layer_11_max_spectral_norm": 0.009060674346983433, + "layer_12_update_fnorm": 0.061508677899837494, + "layer_12_max_l1_linf_norm": 0.4158231019973755, + "layer_12_max_spectral_norm": 0.009280378930270672, + "total_sharpness": 0.003796101314947009, + "ip_v_neg_g": 0.002978183561936021, + "cos_v_neg_g": 0.000722299562767148, + "v_norm": 1.3435149192810059, + "g_norm": 3.068962574005127, + "hv_norm": 0.6463655233383179, + "cos_v_hv": 0.007890457287430763, + "hg_norm": 139.1275634765625, + "cos_g_hg": 0.43444764614105225, + "v_parallel_norm": 2.5381301384186372e-05, + "v_perp_norm": 1.3435149192810059, + "layer_1_v_norm": 0.06097891181707382, + "layer_1_cos_v_neg_g": 0.005704884417355061, + "layer_2_v_norm": 0.059173356741666794, + "layer_2_cos_v_neg_g": 0.004631359130144119, + "layer_3_v_norm": 0.05894461274147034, + "layer_3_cos_v_neg_g": 0.005450454540550709, + "layer_4_v_norm": 0.060144633054733276, + "layer_4_cos_v_neg_g": 0.004293567035347223, + "layer_5_v_norm": 0.06098926439881325, + "layer_5_cos_v_neg_g": 0.003252084832638502, + "layer_6_v_norm": 0.06128564104437828, + "layer_6_cos_v_neg_g": 0.003274271497502923, + "layer_7_v_norm": 0.06132951378822327, + "layer_7_cos_v_neg_g": 0.005778972525149584, + "layer_8_v_norm": 0.06130123510956764, + "layer_8_cos_v_neg_g": 0.006002338137477636, + "layer_9_v_norm": 0.061339545994997025, + "layer_9_cos_v_neg_g": 0.004391832742840052, + "layer_10_v_norm": 0.06161868944764137, + "layer_10_cos_v_neg_g": 0.0032428547274321318, + "layer_11_v_norm": 0.061666157096624374, + "layer_11_cos_v_neg_g": 0.0020377254113554955, + "layer_12_v_norm": 0.061508677899837494, + "layer_12_cos_v_neg_g": 0.002644652035087347, + "layer_1_sharpness": 0.050105538219213486, + "layer_2_sharpness": 0.01353170070797205, + "layer_3_sharpness": 0.023545119911432266, + "layer_4_sharpness": 0.012884171679615974, + "layer_5_sharpness": 0.01838761940598488, + "layer_6_sharpness": 0.020192811265587807, + "layer_7_sharpness": 0.02577606774866581, + "layer_8_sharpness": 0.030443508177995682, + "layer_9_sharpness": 0.02325327880680561, + "layer_10_sharpness": 0.01823774166405201, + "layer_11_sharpness": 0.016278507187962532, + "layer_12_sharpness": 0.027607586234807968 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..5249a2b0a99ff2936b5332a01d0ea1500d5f0f2a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.347132921218872, + "total_l1_linf_norm": 8074.6494140625, + "total_spectral_norm": 1.3471328020095825, + "layer_1_update_fnorm": 0.0608895868062973, + "layer_1_max_l1_linf_norm": 0.3295953571796417, + "layer_1_max_spectral_norm": 0.00743680726736784, + "layer_2_update_fnorm": 0.05885178968310356, + "layer_2_max_l1_linf_norm": 0.3625098466873169, + "layer_2_max_spectral_norm": 0.008146635256707668, + "layer_3_update_fnorm": 0.05887521803379059, + "layer_3_max_l1_linf_norm": 0.37954574823379517, + "layer_3_max_spectral_norm": 0.008553666062653065, + "layer_4_update_fnorm": 0.060158871114254, + "layer_4_max_l1_linf_norm": 0.3594944477081299, + "layer_4_max_spectral_norm": 0.00808190368115902, + "layer_5_update_fnorm": 0.060830257833004, + "layer_5_max_l1_linf_norm": 0.33724331855773926, + "layer_5_max_spectral_norm": 0.007659608498215675, + "layer_6_update_fnorm": 0.06115786358714104, + "layer_6_max_l1_linf_norm": 0.31086456775665283, + "layer_6_max_spectral_norm": 0.007021625060588121, + "layer_7_update_fnorm": 0.061258405447006226, + "layer_7_max_l1_linf_norm": 0.30440330505371094, + "layer_7_max_spectral_norm": 0.0069092996418476105, + "layer_8_update_fnorm": 0.06117701157927513, + "layer_8_max_l1_linf_norm": 0.29258060455322266, + "layer_8_max_spectral_norm": 0.006638798862695694, + "layer_9_update_fnorm": 0.061079494655132294, + "layer_9_max_l1_linf_norm": 0.3145257830619812, + "layer_9_max_spectral_norm": 0.007134488318115473, + "layer_10_update_fnorm": 0.061243366450071335, + "layer_10_max_l1_linf_norm": 0.34414103627204895, + "layer_10_max_spectral_norm": 0.007713392376899719, + "layer_11_update_fnorm": 0.06120452657341957, + "layer_11_max_l1_linf_norm": 0.3626219630241394, + "layer_11_max_spectral_norm": 0.008197026327252388, + "layer_12_update_fnorm": 0.06133609265089035, + "layer_12_max_l1_linf_norm": 0.38331758975982666, + "layer_12_max_spectral_norm": 0.00862859096378088, + "total_sharpness": 0.003675499465316534, + "ip_v_neg_g": 0.004137324169278145, + "cos_v_neg_g": 0.0010071707656607032, + "v_norm": 1.347132921218872, + "g_norm": 3.0493409633636475, + "hv_norm": 0.5762231349945068, + "cos_v_hv": 0.008592827245593071, + "hg_norm": 111.35401916503906, + "cos_g_hg": 0.4539856016635895, + "v_parallel_norm": 3.1178671633824706e-05, + "v_perp_norm": 1.347132921218872, + "layer_1_v_norm": 0.0608895868062973, + "layer_1_cos_v_neg_g": 0.005412772763520479, + "layer_2_v_norm": 0.05885178968310356, + "layer_2_cos_v_neg_g": 0.0049841441214084625, + "layer_3_v_norm": 0.05887521803379059, + "layer_3_cos_v_neg_g": 0.00688215484842658, + "layer_4_v_norm": 0.060158871114254, + "layer_4_cos_v_neg_g": 0.007452539633959532, + "layer_5_v_norm": 0.060830257833004, + "layer_5_cos_v_neg_g": 0.008191042579710484, + "layer_6_v_norm": 0.061157867312431335, + "layer_6_cos_v_neg_g": 0.007711564656347036, + "layer_7_v_norm": 0.061258405447006226, + "layer_7_cos_v_neg_g": 0.007238361518830061, + "layer_8_v_norm": 0.06117701157927513, + "layer_8_cos_v_neg_g": 0.007095075212419033, + "layer_9_v_norm": 0.061079494655132294, + "layer_9_cos_v_neg_g": 0.006028272211551666, + "layer_10_v_norm": 0.061243366450071335, + "layer_10_cos_v_neg_g": 0.005972792394459248, + "layer_11_v_norm": 0.06120452657341957, + "layer_11_cos_v_neg_g": 0.00504113407805562, + "layer_12_v_norm": 0.06133609265089035, + "layer_12_cos_v_neg_g": 0.0026548756286501884, + "layer_1_sharpness": 0.05452869459986687, + "layer_2_sharpness": 0.015844810754060745, + "layer_3_sharpness": 0.029826726764440536, + "layer_4_sharpness": 0.016895942389965057, + "layer_5_sharpness": 0.019390787929296494, + "layer_6_sharpness": 0.022423740476369858, + "layer_7_sharpness": 0.027642473578453064, + "layer_8_sharpness": 0.026400508359074593, + "layer_9_sharpness": 0.019128477200865746, + "layer_10_sharpness": 0.01345585472881794, + "layer_11_sharpness": 0.014993407763540745, + "layer_12_sharpness": 0.031089410185813904 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..1ea22939f00bb54352a03308b73f8583d188032e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3468776941299438, + "total_l1_linf_norm": 8081.390625, + "total_spectral_norm": 1.3468774557113647, + "layer_1_update_fnorm": 0.060991495847702026, + "layer_1_max_l1_linf_norm": 0.3360558748245239, + "layer_1_max_spectral_norm": 0.007602771278470755, + "layer_2_update_fnorm": 0.0591198168694973, + "layer_2_max_l1_linf_norm": 0.3832114636898041, + "layer_2_max_spectral_norm": 0.008603372611105442, + "layer_3_update_fnorm": 0.05908949300646782, + "layer_3_max_l1_linf_norm": 0.3804994225502014, + "layer_3_max_spectral_norm": 0.008524703793227673, + "layer_4_update_fnorm": 0.0603676475584507, + "layer_4_max_l1_linf_norm": 0.3818931579589844, + "layer_4_max_spectral_norm": 0.008510146290063858, + "layer_5_update_fnorm": 0.06097649782896042, + "layer_5_max_l1_linf_norm": 0.3606014847755432, + "layer_5_max_spectral_norm": 0.00810222327709198, + "layer_6_update_fnorm": 0.06137407571077347, + "layer_6_max_l1_linf_norm": 0.3382163643836975, + "layer_6_max_spectral_norm": 0.00756220705807209, + "layer_7_update_fnorm": 0.06135345995426178, + "layer_7_max_l1_linf_norm": 0.3177306056022644, + "layer_7_max_spectral_norm": 0.0071617066860198975, + "layer_8_update_fnorm": 0.061259426176548004, + "layer_8_max_l1_linf_norm": 0.3245219588279724, + "layer_8_max_spectral_norm": 0.0073296246118843555, + "layer_9_update_fnorm": 0.061338622123003006, + "layer_9_max_l1_linf_norm": 0.33132943511009216, + "layer_9_max_spectral_norm": 0.00745911942794919, + "layer_10_update_fnorm": 0.06144637614488602, + "layer_10_max_l1_linf_norm": 0.36835983395576477, + "layer_10_max_spectral_norm": 0.008335137739777565, + "layer_11_update_fnorm": 0.06137380003929138, + "layer_11_max_l1_linf_norm": 0.3727359175682068, + "layer_11_max_spectral_norm": 0.00837726704776287, + "layer_12_update_fnorm": 0.06130789592862129, + "layer_12_max_l1_linf_norm": 0.3714735805988312, + "layer_12_max_spectral_norm": 0.008420385420322418, + "total_sharpness": 0.004228442907333374, + "ip_v_neg_g": 0.0026435875333845615, + "cos_v_neg_g": 0.0006006770418025553, + "v_norm": 1.3468776941299438, + "g_norm": 3.267566680908203, + "hv_norm": 0.7400966882705688, + "cos_v_hv": 0.0076952045783400536, + "hg_norm": 544.8310546875, + "cos_g_hg": 0.40570083260536194, + "v_parallel_norm": 2.872614459192846e-05, + "v_perp_norm": 1.3468776941299438, + "layer_1_v_norm": 0.060991495847702026, + "layer_1_cos_v_neg_g": 0.0037495156284421682, + "layer_2_v_norm": 0.0591198168694973, + "layer_2_cos_v_neg_g": 0.0030835114885121584, + "layer_3_v_norm": 0.05908949300646782, + "layer_3_cos_v_neg_g": 0.0023329223040491343, + "layer_4_v_norm": 0.0603676475584507, + "layer_4_cos_v_neg_g": 0.0025181081146001816, + "layer_5_v_norm": 0.06097649782896042, + "layer_5_cos_v_neg_g": 0.002962710103020072, + "layer_6_v_norm": 0.061374079436063766, + "layer_6_cos_v_neg_g": 0.002507917582988739, + "layer_7_v_norm": 0.06135345995426178, + "layer_7_cos_v_neg_g": 0.002451241249218583, + "layer_8_v_norm": 0.0612594299018383, + "layer_8_cos_v_neg_g": 0.0038929099682718515, + "layer_9_v_norm": 0.061338622123003006, + "layer_9_cos_v_neg_g": 0.004438252653926611, + "layer_10_v_norm": 0.06144637614488602, + "layer_10_cos_v_neg_g": 0.003635530825704336, + "layer_11_v_norm": 0.06137380003929138, + "layer_11_cos_v_neg_g": 0.0038557841908186674, + "layer_12_v_norm": 0.06130789592862129, + "layer_12_cos_v_neg_g": 0.004555037245154381, + "layer_1_sharpness": 0.04914066195487976, + "layer_2_sharpness": 0.019703399389982224, + "layer_3_sharpness": 0.023773763328790665, + "layer_4_sharpness": 0.015965253114700317, + "layer_5_sharpness": 0.020076824352145195, + "layer_6_sharpness": 0.02498209848999977, + "layer_7_sharpness": 0.025690244510769844, + "layer_8_sharpness": 0.0277847982943058, + "layer_9_sharpness": 0.019889449700713158, + "layer_10_sharpness": 0.013906686566770077, + "layer_11_sharpness": 0.014809198677539825, + "layer_12_sharpness": 0.024830220267176628 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..b2fda5efa6dcf34dd3707e639d27ad9aed7d707e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3417012691497803, + "total_l1_linf_norm": 8049.087890625, + "total_spectral_norm": 1.3417013883590698, + "layer_1_update_fnorm": 0.060948677361011505, + "layer_1_max_l1_linf_norm": 0.3294442594051361, + "layer_1_max_spectral_norm": 0.007440189830958843, + "layer_2_update_fnorm": 0.0588303841650486, + "layer_2_max_l1_linf_norm": 0.36786407232284546, + "layer_2_max_spectral_norm": 0.008256461471319199, + "layer_3_update_fnorm": 0.058916714042425156, + "layer_3_max_l1_linf_norm": 0.36112332344055176, + "layer_3_max_spectral_norm": 0.008188128471374512, + "layer_4_update_fnorm": 0.06022317335009575, + "layer_4_max_l1_linf_norm": 0.33692747354507446, + "layer_4_max_spectral_norm": 0.0076666721142828465, + "layer_5_update_fnorm": 0.060817718505859375, + "layer_5_max_l1_linf_norm": 0.31799939274787903, + "layer_5_max_spectral_norm": 0.007142869755625725, + "layer_6_update_fnorm": 0.06117869168519974, + "layer_6_max_l1_linf_norm": 0.30539458990097046, + "layer_6_max_spectral_norm": 0.006914996542036533, + "layer_7_update_fnorm": 0.0612226203083992, + "layer_7_max_l1_linf_norm": 0.2968367338180542, + "layer_7_max_spectral_norm": 0.006699254736304283, + "layer_8_update_fnorm": 0.06117250397801399, + "layer_8_max_l1_linf_norm": 0.30298110842704773, + "layer_8_max_spectral_norm": 0.006838880013674498, + "layer_9_update_fnorm": 0.061218295246362686, + "layer_9_max_l1_linf_norm": 0.33068761229515076, + "layer_9_max_spectral_norm": 0.007429090794175863, + "layer_10_update_fnorm": 0.061394114047288895, + "layer_10_max_l1_linf_norm": 0.3619537651538849, + "layer_10_max_spectral_norm": 0.008126466535031796, + "layer_11_update_fnorm": 0.061423059552907944, + "layer_11_max_l1_linf_norm": 0.3804778456687927, + "layer_11_max_spectral_norm": 0.008573967032134533, + "layer_12_update_fnorm": 0.06131849065423012, + "layer_12_max_l1_linf_norm": 0.3839341700077057, + "layer_12_max_spectral_norm": 0.008586708456277847, + "total_sharpness": 0.003945321775972843, + "ip_v_neg_g": 0.0031332566868513823, + "cos_v_neg_g": 0.0007849815883673728, + "v_norm": 1.3417012691497803, + "g_norm": 2.97495698928833, + "hv_norm": 0.662449300289154, + "cos_v_hv": 0.00799071416258812, + "hg_norm": 123.5588607788086, + "cos_g_hg": 0.44886261224746704, + "v_parallel_norm": 2.665813735802658e-05, + "v_perp_norm": 1.3417012691497803, + "layer_1_v_norm": 0.060948677361011505, + "layer_1_cos_v_neg_g": 0.007000681944191456, + "layer_2_v_norm": 0.0588303841650486, + "layer_2_cos_v_neg_g": 0.006235823035240173, + "layer_3_v_norm": 0.058916717767715454, + "layer_3_cos_v_neg_g": 0.004031449090689421, + "layer_4_v_norm": 0.06022317335009575, + "layer_4_cos_v_neg_g": 0.0034255001228302717, + "layer_5_v_norm": 0.060817718505859375, + "layer_5_cos_v_neg_g": 0.0026195133104920387, + "layer_6_v_norm": 0.06117869168519974, + "layer_6_cos_v_neg_g": 0.003527857596054673, + "layer_7_v_norm": 0.0612226203083992, + "layer_7_cos_v_neg_g": 0.006048789247870445, + "layer_8_v_norm": 0.06117250397801399, + "layer_8_cos_v_neg_g": 0.007031153421849012, + "layer_9_v_norm": 0.061218295246362686, + "layer_9_cos_v_neg_g": 0.006315150298178196, + "layer_10_v_norm": 0.061394114047288895, + "layer_10_cos_v_neg_g": 0.005114753730595112, + "layer_11_v_norm": 0.061423059552907944, + "layer_11_cos_v_neg_g": 0.004324081353843212, + "layer_12_v_norm": 0.06131849065423012, + "layer_12_cos_v_neg_g": 0.0032552185002714396, + "layer_1_sharpness": 0.06411784887313843, + "layer_2_sharpness": 0.024324147030711174, + "layer_3_sharpness": 0.02787649631500244, + "layer_4_sharpness": 0.014833182096481323, + "layer_5_sharpness": 0.017342783510684967, + "layer_6_sharpness": 0.02087445929646492, + "layer_7_sharpness": 0.02635996788740158, + "layer_8_sharpness": 0.029091699048876762, + "layer_9_sharpness": 0.022017844021320343, + "layer_10_sharpness": 0.015410354360938072, + "layer_11_sharpness": 0.014391648583114147, + "layer_12_sharpness": 0.026100099086761475 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..df4235c90ae6526aab8a0041f8dc95754878fddd --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019230 +step:1 train loss:11.024741 +step:2 train loss:11.018303 +step:3 train loss:11.016243 +step:4 train loss:11.010815 +step:5 train loss:11.004898 +step:6 train loss:10.994961 +step:7 train loss:10.987474 +step:8 train loss:10.976637 +step:9 train loss:10.965155 +step:10 train loss:10.950165 +step:11 train loss:10.939951 +step:12 train loss:10.919452 +step:13 train loss:10.904996 +step:14 train loss:10.882957 +step:15 train loss:10.865047 +step:16 train loss:10.843878 +step:17 train loss:10.823952 +step:18 train loss:10.798814 +step:19 train loss:10.774614 +step:20 train loss:10.745081 +step:21 train loss:10.721921 +step:22 train loss:10.687227 +step:23 train loss:10.662707 +step:24 train loss:10.624113 +step:25 train loss:10.600618 +step:26 train loss:10.559886 +step:27 train loss:10.525508 +step:28 train loss:10.494781 +step:29 train loss:10.458062 +step:30 train loss:10.419313 +step:31 train loss:10.375749 +step:32 train loss:10.331426 +step:33 train loss:10.293999 +step:34 train loss:10.257496 +step:35 train loss:10.204175 +step:36 train loss:10.162202 +step:37 train loss:10.105407 +step:38 train loss:10.069116 +step:39 train loss:10.013661 +step:40 train loss:9.968571 +step:41 train loss:9.909485 +step:42 train loss:9.873542 +step:43 train loss:9.798177 +step:44 train loss:9.757622 +step:45 train loss:9.695083 +step:46 train loss:9.654769 +step:47 train loss:9.593004 +step:48 train loss:9.535299 +step:49 train loss:9.467800 +step:50 train loss:9.405250 +step:51 train loss:9.338552 +step:52 train loss:9.296993 +step:53 train loss:9.231422 +step:54 train loss:9.174696 +step:55 train loss:9.101995 +step:56 train loss:9.041626 +step:57 train loss:8.991747 +step:58 train loss:8.910555 +step:59 train loss:8.857359 +step:60 train loss:8.794044 +step:61 train loss:8.737675 +step:62 train loss:8.675228 +step:63 train loss:8.649693 +step:64 train loss:8.540071 +step:65 train loss:8.488841 +step:66 train loss:8.444221 +step:67 train loss:8.389700 +step:68 train loss:8.331900 +step:69 train loss:8.263840 +step:70 train loss:8.208478 +step:71 train loss:8.134140 +step:72 train loss:8.114830 +step:73 train loss:8.046491 +step:74 train loss:8.021492 +step:75 train loss:7.943184 +step:76 train loss:7.963996 +step:77 train loss:7.876443 +step:78 train loss:7.736241 +step:79 train loss:7.776549 +step:80 train loss:7.744169 +step:81 train loss:7.755136 +step:82 train loss:7.724141 +step:83 train loss:7.673522 +step:84 train loss:7.638311 +step:85 train loss:7.605926 +step:86 train loss:7.581254 +step:87 train loss:7.553986 +step:88 train loss:7.554828 +step:89 train loss:7.519725 +step:90 train loss:7.554860 +step:91 train loss:7.557793 +step:92 train loss:7.552944 +step:93 train loss:7.505609 +step:94 train loss:7.487926 +step:95 train loss:7.444533 +step:96 train loss:7.522766 +step:97 train loss:7.473470 +step:98 train loss:7.464983 +step:99 train loss:7.428170 +step:100 train loss:7.491326 +step:101 train loss:7.373716 +step:102 train loss:7.361864 +step:103 train loss:7.346033 +step:104 train loss:7.380550 +step:105 train loss:7.425138 +step:106 train loss:7.361551 +step:107 train loss:7.322056 +step:108 train loss:7.327285 +step:109 train loss:7.354719 +step:110 train loss:7.278225 +step:111 train loss:7.282779 +step:112 train loss:7.265637 +step:113 train loss:7.221506 +step:114 train loss:7.275772 +step:115 train loss:7.211474 +step:116 train loss:7.184380 +step:117 train loss:7.117780 +step:118 train loss:7.176242 +step:119 train loss:7.113517 +step:120 train loss:7.116985 +step:121 train loss:7.033683 +step:122 train loss:7.112270 +step:123 train loss:7.027395 +step:124 train loss:7.010760 +step:125 train loss:6.974793 +step:126 train loss:7.056370 +step:127 train loss:6.969065 +step:128 train loss:6.994707 +step:129 train loss:6.963078 +step:130 train loss:7.006044 +step:131 train loss:6.935605 +step:132 train loss:6.843055 +step:133 train loss:6.908324 +step:134 train loss:6.862727 +step:135 train loss:6.771945 +step:136 train loss:6.801683 +step:137 train loss:6.795984 +step:138 train loss:6.723880 +step:139 train loss:6.798429 +step:140 train loss:6.700163 +step:141 train loss:6.793884 +step:142 train loss:6.733280 +step:143 train loss:6.738017 +step:144 train loss:6.701913 +step:145 train loss:6.632586 +step:146 train loss:6.638784 +step:147 train loss:6.690144 +step:148 train loss:6.689696 +step:149 train loss:6.630150 +step:150 train loss:6.637653 +step:151 train loss:6.540505 +step:152 train loss:6.573737 +step:153 train loss:6.546904 +step:154 train loss:6.628154 +step:155 train loss:6.595350 +step:156 train loss:6.625897 +step:157 train loss:6.520973 +step:158 train loss:6.500922 +step:159 train loss:6.536859 +step:160 train loss:6.510606 +step:161 train loss:6.500454 +step:162 train loss:6.471101 +step:163 train loss:6.484480 +step:164 train loss:6.495469 +step:165 train loss:6.496189 +step:166 train loss:6.446797 +step:167 train loss:6.444683 +step:168 train loss:6.415161 +step:169 train loss:6.364795 +step:170 train loss:6.334865 +step:171 train loss:6.444142 +step:172 train loss:6.372681 +step:173 train loss:6.413064 +step:174 train loss:6.416464 +step:175 train loss:6.377749 +step:176 train loss:6.326855 +step:177 train loss:6.372291 +step:178 train loss:6.377313 +step:179 train loss:6.328806 +step:180 train loss:6.314459 +step:181 train loss:6.347831 +step:182 train loss:6.277369 +step:183 train loss:6.369755 +step:184 train loss:6.336675 +step:185 train loss:6.249866 +step:186 train loss:6.399144 +step:187 train loss:6.332198 +step:188 train loss:6.153195 +step:189 train loss:6.309755 +step:190 train loss:6.301004 +step:191 train loss:6.222210 +step:192 train loss:6.134328 +step:193 train loss:6.283253 +step:194 train loss:6.304473 +step:195 train loss:6.295424 +step:196 train loss:6.267105 +step:197 train loss:6.264250 +step:198 train loss:6.202891 +step:199 train loss:6.283998 +step:200 train loss:6.317467 +step:201 train loss:6.250556 +step:202 train loss:6.252655 +step:203 train loss:6.208448 +step:204 train loss:6.247348 +step:205 train loss:6.099296 +step:206 train loss:6.236412 +step:207 train loss:6.204907 +step:208 train loss:6.149877 +step:209 train loss:6.146828 +step:210 train loss:6.145462 +step:211 train loss:6.218173 +step:212 train loss:6.167718 +step:213 train loss:6.181700 +step:214 train loss:6.163779 +step:215 train loss:6.186077 +step:216 train loss:6.130280 +step:217 train loss:6.135013 +step:218 train loss:6.110353 +step:219 train loss:6.086267 +step:220 train loss:6.132797 +step:221 train loss:6.084029 +step:222 train loss:6.125071 +step:223 train loss:6.141318 +step:224 train loss:6.130571 +step:225 train loss:6.068120 +step:226 train loss:6.071829 +step:227 train loss:6.134011 +step:228 train loss:6.098869 +step:229 train loss:6.171779 +step:230 train loss:6.037917 +step:231 train loss:6.092552 +step:232 train loss:6.076672 +step:233 train loss:6.049213 +step:234 train loss:6.047095 +step:235 train loss:6.127837 +step:236 train loss:6.074623 +step:237 train loss:6.104520 +step:238 train loss:6.108625 +step:239 train loss:6.014019 +step:240 train loss:6.085170 +step:241 train loss:6.120732 +step:242 train loss:6.102972 +step:243 train loss:6.010604 +step:244 train loss:6.039134 +step:245 train loss:6.022413 +step:246 train loss:6.019335 +step:247 train loss:6.010878 +step:248 train loss:5.967947 +step:249 train loss:6.027516 +step:250 validation loss:6.020460 +step:250 train loss:5.994598 +step:251 train loss:6.030679 +step:252 train loss:5.988575 +step:253 train loss:5.990091 +step:254 train loss:5.951527 +step:255 train loss:5.990941 +step:256 train loss:5.988977 +step:257 train loss:6.037634 +step:258 train loss:5.936235 +step:259 train loss:5.961418 +step:260 train loss:5.933743 +step:261 train loss:5.933568 +step:262 train loss:5.999703 +step:263 train loss:5.960375 +step:264 train loss:5.927125 +step:265 train loss:5.953290 +step:266 train loss:5.916287 +step:267 train loss:5.952142 +step:268 train loss:5.904911 +step:269 train loss:5.932606 +step:270 train loss:5.948658 +step:271 train loss:5.937981 +step:272 train loss:5.892991 +step:273 train loss:5.965160 +step:274 train loss:5.875009 +step:275 train loss:5.914486 +step:276 train loss:5.887588 +step:277 train loss:5.887440 +step:278 train loss:5.864511 +step:279 train loss:5.834289 +step:280 train loss:5.909454 +step:281 train loss:5.985534 +step:282 train loss:5.865908 +step:283 train loss:5.878535 +step:284 train loss:5.845081 +step:285 train loss:5.896250 +step:286 train loss:5.872317 +step:287 train loss:5.847991 +step:288 train loss:5.827922 +step:289 train loss:5.852885 +step:290 train loss:5.908237 +step:291 train loss:5.834074 +step:292 train loss:5.890963 +step:293 train loss:5.818717 +step:294 train loss:5.929769 +step:295 train loss:5.824619 +step:296 train loss:5.884552 +step:297 train loss:5.918818 +step:298 train loss:5.807889 +step:299 train loss:5.881838 +step:300 train loss:5.801690 +step:301 train loss:5.835197 +step:302 train loss:5.811179 +step:303 train loss:5.824759 +step:304 train loss:5.857472 +step:305 train loss:5.776789 +step:306 train loss:5.802345 +step:307 train loss:5.828986 +step:308 train loss:5.738055 +step:309 train loss:5.877319 +step:310 train loss:5.838541 +step:311 train loss:5.819833 +step:312 train loss:5.805499 +step:313 train loss:5.828464 +step:314 train loss:5.809721 +step:315 train loss:5.770685 +step:316 train loss:5.767491 +step:317 train loss:5.731225 +step:318 train loss:5.731032 +step:319 train loss:5.806708 +step:320 train loss:5.725986 +step:321 train loss:5.783318 +step:322 train loss:5.775108 +step:323 train loss:5.840291 +step:324 train loss:5.781217 +step:325 train loss:5.803863 +step:326 train loss:5.809285 +step:327 train loss:5.783624 +step:328 train loss:5.761231 +step:329 train loss:5.776128 +step:330 train loss:5.706761 +step:331 train loss:5.733931 +step:332 train loss:5.721168 +step:333 train loss:5.658888 +step:334 train loss:5.754961 +step:335 train loss:5.796879 +step:336 train loss:5.918247 +step:337 train loss:5.817627 +step:338 train loss:5.728302 +step:339 train loss:5.687125 +step:340 train loss:5.697145 +step:341 train loss:5.688021 +step:342 train loss:5.756304 +step:343 train loss:5.728725 +step:344 train loss:5.686623 +step:345 train loss:5.662453 +step:346 train loss:5.707230 +step:347 train loss:5.644893 +step:348 train loss:5.655421 +step:349 train loss:5.592456 +step:350 train loss:5.629547 +step:351 train loss:5.697526 +step:352 train loss:5.653859 +step:353 train loss:5.680959 +step:354 train loss:5.635986 +step:355 train loss:5.685215 +step:356 train loss:5.653403 +step:357 train loss:5.722245 +step:358 train loss:5.752104 +step:359 train loss:5.585601 +step:360 train loss:5.705313 +step:361 train loss:5.688769 +step:362 train loss:5.669315 +step:363 train loss:5.622115 +step:364 train loss:5.743177 +step:365 train loss:5.674870 +step:366 train loss:5.644628 +step:367 train loss:5.669342 +step:368 train loss:5.638212 +step:369 train loss:5.615979 +step:370 train loss:5.670683 +step:371 train loss:5.605338 +step:372 train loss:5.677341 +step:373 train loss:5.626622 +step:374 train loss:5.614076 +step:375 train loss:5.647197 +step:376 train loss:5.630581 +step:377 train loss:5.525252 +step:378 train loss:5.606997 +step:379 train loss:5.657910 +step:380 train loss:5.586870 +step:381 train loss:5.647376 +step:382 train loss:5.639512 +step:383 train loss:5.611398 +step:384 train loss:5.587474 +step:385 train loss:5.579244 +step:386 train loss:5.613693 +step:387 train loss:5.612486 +step:388 train loss:5.581329 +step:389 train loss:5.595913 +step:390 train loss:5.580510 +step:391 train loss:5.585418 +step:392 train loss:5.571363 +step:393 train loss:5.567640 +step:394 train loss:5.616010 +step:395 train loss:5.547915 +step:396 train loss:5.501915 +step:397 train loss:5.582508 +step:398 train loss:5.567742 +step:399 train loss:5.575205 +step:400 train loss:5.533461 +step:401 train loss:5.574653 +step:402 train loss:5.555552 +step:403 train loss:5.552613 +step:404 train loss:5.536013 +step:405 train loss:5.532195 +step:406 train loss:5.567003 +step:407 train loss:5.553811 +step:408 train loss:5.616371 +step:409 train loss:5.545782 +step:410 train loss:5.518808 +step:411 train loss:5.507516 +step:412 train loss:5.595047 +step:413 train loss:5.484324 +step:414 train loss:5.563834 +step:415 train loss:5.523430 +step:416 train loss:5.535769 +step:417 train loss:5.558698 +step:418 train loss:5.505237 +step:419 train loss:5.493591 +step:420 train loss:5.491127 +step:421 train loss:5.477455 +step:422 train loss:5.478524 +step:423 train loss:5.485318 +step:424 train loss:5.455287 +step:425 train loss:5.520472 +step:426 train loss:5.513596 +step:427 train loss:5.442745 +step:428 train loss:5.511198 +step:429 train loss:5.422696 +step:430 train loss:5.462234 +step:431 train loss:5.493433 +step:432 train loss:5.511255 +step:433 train loss:5.500070 +step:434 train loss:5.457796 +step:435 train loss:5.509636 +step:436 train loss:5.530915 +step:437 train loss:5.488895 +step:438 train loss:5.446527 +step:439 train loss:5.444825 +step:440 train loss:5.484443 +step:441 train loss:5.432217 +step:442 train loss:5.433097 +step:443 train loss:5.447392 +step:444 train loss:5.480868 +step:445 train loss:5.482832 +step:446 train loss:5.423745 +step:447 train loss:5.436110 +step:448 train loss:5.496797 +step:449 train loss:5.451364 +step:450 train loss:5.442685 +step:451 train loss:5.426438 +step:452 train loss:5.490372 +step:453 train loss:5.421293 +step:454 train loss:5.385511 +step:455 train loss:5.470152 +step:456 train loss:5.435144 +step:457 train loss:5.409209 +step:458 train loss:5.432217 +step:459 train loss:5.381907 +step:460 train loss:5.482208 +step:461 train loss:5.436792 +step:462 train loss:5.340000 +step:463 train loss:5.397450 +step:464 train loss:5.448472 +step:465 train loss:5.409588 +step:466 train loss:5.430804 +step:467 train loss:5.388560 +step:468 train loss:5.439885 +step:469 train loss:5.410275 +step:470 train loss:5.369850 +step:471 train loss:5.464227 +step:472 train loss:5.348798 +step:473 train loss:5.424161 +step:474 train loss:5.404216 +step:475 train loss:5.422061 +step:476 train loss:5.397481 +step:477 train loss:5.340196 +step:478 train loss:5.358020 +step:479 train loss:5.353870 +step:480 train loss:5.377018 +step:481 train loss:5.384233 +step:482 train loss:5.324852 +step:483 train loss:5.392952 +step:484 train loss:5.348560 +step:485 train loss:5.328021 +step:486 train loss:5.385024 +step:487 train loss:5.357346 +step:488 train loss:5.352942 +step:489 train loss:5.348989 +step:490 train loss:5.330175 +step:491 train loss:5.344451 +step:492 train loss:5.344996 +step:493 train loss:5.347616 +step:494 train loss:5.356636 +step:495 train loss:5.309812 +step:496 train loss:5.397909 +step:497 train loss:5.286688 +step:498 train loss:5.382550 +step:499 train loss:5.357548 +step:500 validation loss:5.335204 total_sharp:4.7386e-02 L1_sharp:1.9390e+00 L2_sharp:4.0497e-01 L3_sharp:2.9373e-01 L4_sharp:2.4434e-01 L5_sharp:1.7500e-01 L6_sharp:1.0849e-01 L7_sharp:7.9146e-02 L8_sharp:6.1218e-02 L9_sharp:5.1311e-02 L10_sharp:4.9042e-02 L11_sharp:4.9992e-02 L12_sharp:5.0883e-02 total_fnorm:9.3032e-01 total_l1_linf:5.5959e+03 total_spectral:9.3032e-01 L1_fnorm:4.3941e-02 L2_fnorm:4.2423e-02 L3_fnorm:4.1614e-02 L4_fnorm:4.1366e-02 L5_fnorm:4.1314e-02 L6_fnorm:4.1691e-02 L7_fnorm:4.2044e-02 L8_fnorm:4.2266e-02 L9_fnorm:4.2502e-02 L10_fnorm:4.2730e-02 L11_fnorm:4.2798e-02 L12_fnorm:4.3015e-02 L1_l1linf:1.2408e-01 L2_l1linf:1.4285e-01 L3_l1linf:1.5762e-01 L4_l1linf:1.6017e-01 L5_l1linf:1.6241e-01 L6_l1linf:1.6737e-01 L7_l1linf:1.9486e-01 L8_l1linf:1.5701e-01 L9_l1linf:1.5605e-01 L10_l1linf:1.5975e-01 L11_l1linf:1.6268e-01 L12_l1linf:1.7096e-01 L1_spectral:2.8009e-03 L2_spectral:3.2797e-03 L3_spectral:3.5321e-03 L4_spectral:6.1455e-03 L5_spectral:6.2667e-03 L6_spectral:7.2060e-03 L7_spectral:8.2386e-03 L8_spectral:6.7702e-03 L9_spectral:6.6142e-03 L10_spectral:6.2488e-03 L11_spectral:4.1796e-03 L12_spectral:4.0675e-03 ip_v_neg_g:1.9701e-02 cos_v_neg_g:2.6371e-03 v_norm:9.3032e-01 g_norm:8.0304e+00 hv_norm:7.8500e+00 cos_v_hv:5.6158e-03 hg_norm:1.0602e+04 cos_g_hg:6.8993e-01 v_par:1.5896e-04 v_perp:9.3032e-01 L1_cos_v_neg_g:2.4887e-02 L1_v_norm:4.3941e-02 L2_cos_v_neg_g:1.8602e-02 L2_v_norm:4.2423e-02 L3_cos_v_neg_g:1.8487e-02 L3_v_norm:4.1614e-02 L4_cos_v_neg_g:1.9004e-02 L4_v_norm:4.1366e-02 L5_cos_v_neg_g:1.8240e-02 L5_v_norm:4.1314e-02 L6_cos_v_neg_g:1.5827e-02 L6_v_norm:4.1691e-02 L7_cos_v_neg_g:1.5414e-02 L7_v_norm:4.2044e-02 L8_cos_v_neg_g:1.4063e-02 L8_v_norm:4.2266e-02 L9_cos_v_neg_g:1.3129e-02 L9_v_norm:4.2502e-02 L10_cos_v_neg_g:1.2481e-02 L10_v_norm:4.2730e-02 L11_cos_v_neg_g:1.2012e-02 L11_v_norm:4.2798e-02 L12_cos_v_neg_g:1.0084e-02 L12_v_norm:4.3015e-02 +step:500 train loss:5.361890 +step:501 train loss:5.316594 +step:502 train loss:5.357707 +step:503 train loss:5.284689 +step:504 train loss:5.370149 +step:505 train loss:5.303874 +step:506 train loss:5.304608 +step:507 train loss:5.317188 +step:508 train loss:5.344090 +step:509 train loss:5.342592 +step:510 train loss:5.274423 +step:511 train loss:5.269990 +step:512 train loss:5.266343 +step:513 train loss:5.293987 +step:514 train loss:5.358223 +step:515 train loss:5.303673 +step:516 train loss:5.378560 +step:517 train loss:5.303499 +step:518 train loss:5.288143 +step:519 train loss:5.343454 +step:520 train loss:5.289696 +step:521 train loss:5.281088 +step:522 train loss:5.304309 +step:523 train loss:5.302435 +step:524 train loss:5.252903 +step:525 train loss:5.259137 +step:526 train loss:5.272495 +step:527 train loss:5.273256 +step:528 train loss:5.269227 +step:529 train loss:5.297777 +step:530 train loss:5.251963 +step:531 train loss:5.286489 +step:532 train loss:5.251043 +step:533 train loss:5.208943 +step:534 train loss:5.285458 +step:535 train loss:5.276273 +step:536 train loss:5.340724 +step:537 train loss:5.226583 +step:538 train loss:5.191272 +step:539 train loss:5.282989 +step:540 train loss:5.323376 +step:541 train loss:5.224005 +step:542 train loss:5.254459 +step:543 train loss:5.269447 +step:544 train loss:5.265781 +step:545 train loss:5.247652 +step:546 train loss:5.205945 +step:547 train loss:5.227960 +step:548 train loss:5.187335 +step:549 train loss:5.240483 +step:550 train loss:5.220615 +step:551 train loss:5.224490 +step:552 train loss:5.319657 +step:553 train loss:5.282126 +step:554 train loss:5.226328 +step:555 train loss:5.284219 +step:556 train loss:5.235656 +step:557 train loss:5.207100 +step:558 train loss:5.187893 +step:559 train loss:5.237246 +step:560 train loss:5.283464 +step:561 train loss:5.170118 +step:562 train loss:5.156655 +step:563 train loss:5.230932 +step:564 train loss:5.198902 +step:565 train loss:5.216899 +step:566 train loss:5.222169 +step:567 train loss:5.213877 +step:568 train loss:5.239587 +step:569 train loss:5.215451 +step:570 train loss:5.149094 +step:571 train loss:5.178683 +step:572 train loss:5.178674 +step:573 train loss:5.171571 +step:574 train loss:5.213733 +step:575 train loss:5.173174 +step:576 train loss:5.179380 +step:577 train loss:5.196287 +step:578 train loss:5.176790 +step:579 train loss:5.223703 +step:580 train loss:5.160448 +step:581 train loss:5.217239 +step:582 train loss:5.179297 +step:583 train loss:5.192044 +step:584 train loss:5.174635 +step:585 train loss:5.161658 +step:586 train loss:5.156440 +step:587 train loss:5.233262 +step:588 train loss:5.149743 +step:589 train loss:5.198720 +step:590 train loss:5.210899 +step:591 train loss:5.143653 +step:592 train loss:5.129906 +step:593 train loss:5.148267 +step:594 train loss:5.121037 +step:595 train loss:5.167729 +step:596 train loss:5.144096 +step:597 train loss:5.172335 +step:598 train loss:5.144960 +step:599 train loss:5.143923 +step:600 train loss:5.117306 +step:601 train loss:5.096348 +step:602 train loss:5.106327 +step:603 train loss:5.160675 +step:604 train loss:5.138825 +step:605 train loss:5.167912 +step:606 train loss:5.111130 +step:607 train loss:5.104316 +step:608 train loss:5.103598 +step:609 train loss:5.076254 +step:610 train loss:5.092909 +step:611 train loss:5.101717 +step:612 train loss:5.139211 +step:613 train loss:5.063685 +step:614 train loss:5.100959 +step:615 train loss:5.151741 +step:616 train loss:5.074936 +step:617 train loss:5.107896 +step:618 train loss:5.077504 +step:619 train loss:5.114627 +step:620 train loss:5.137775 +step:621 train loss:5.067041 +step:622 train loss:5.130919 +step:623 train loss:5.131886 +step:624 train loss:5.109670 +step:625 train loss:5.112066 +step:626 train loss:5.111423 +step:627 train loss:5.084450 +step:628 train loss:5.086826 +step:629 train loss:5.028970 +step:630 train loss:5.057365 +step:631 train loss:5.047334 +step:632 train loss:5.062770 +step:633 train loss:5.086792 +step:634 train loss:5.082466 +step:635 train loss:5.018951 +step:636 train loss:5.109882 +step:637 train loss:5.023846 +step:638 train loss:4.962917 +step:639 train loss:5.082285 +step:640 train loss:5.032646 +step:641 train loss:5.055818 +step:642 train loss:5.103843 +step:643 train loss:5.004223 +step:644 train loss:5.086843 +step:645 train loss:5.040937 +step:646 train loss:5.035110 +step:647 train loss:5.051261 +step:648 train loss:5.145821 +step:649 train loss:5.040382 +step:650 train loss:5.105125 +step:651 train loss:4.983559 +step:652 train loss:5.014863 +step:653 train loss:5.015141 +step:654 train loss:5.007654 +step:655 train loss:5.049130 +step:656 train loss:4.992129 +step:657 train loss:5.052346 +step:658 train loss:4.972960 +step:659 train loss:5.053763 +step:660 train loss:5.013496 +step:661 train loss:5.057203 +step:662 train loss:5.048562 +step:663 train loss:5.047655 +step:664 train loss:4.956756 +step:665 train loss:4.973035 +step:666 train loss:4.975236 +step:667 train loss:5.036074 +step:668 train loss:5.008300 +step:669 train loss:4.987393 +step:670 train loss:5.009128 +step:671 train loss:4.984297 +step:672 train loss:4.952765 +step:673 train loss:5.039244 +step:674 train loss:5.043018 +step:675 train loss:4.945471 +step:676 train loss:5.026747 +step:677 train loss:4.961290 +step:678 train loss:4.949085 +step:679 train loss:4.998305 +step:680 train loss:4.951365 +step:681 train loss:4.999648 +step:682 train loss:4.912686 +step:683 train loss:4.974014 +step:684 train loss:5.012995 +step:685 train loss:4.943547 +step:686 train loss:5.053701 +step:687 train loss:4.982894 +step:688 train loss:4.914087 +step:689 train loss:4.962123 +step:690 train loss:4.928246 +step:691 train loss:4.940832 +step:692 train loss:4.958274 +step:693 train loss:4.947486 +step:694 train loss:4.937987 +step:695 train loss:4.896086 +step:696 train loss:4.855772 +step:697 train loss:4.977884 +step:698 train loss:4.905001 +step:699 train loss:4.905082 +step:700 train loss:4.993577 +step:701 train loss:4.888857 +step:702 train loss:4.960834 +step:703 train loss:4.895221 +step:704 train loss:4.854575 +step:705 train loss:4.898481 +step:706 train loss:4.791825 +step:707 train loss:4.859718 +step:708 train loss:4.947699 +step:709 train loss:4.910358 +step:710 train loss:4.874743 +step:711 train loss:4.942065 +step:712 train loss:4.886933 +step:713 train loss:4.848680 +step:714 train loss:4.938836 +step:715 train loss:4.837012 +step:716 train loss:4.979161 +step:717 train loss:4.859193 +step:718 train loss:4.926519 +step:719 train loss:4.876999 +step:720 train loss:4.865397 +step:721 train loss:4.881379 +step:722 train loss:4.894491 +step:723 train loss:4.933282 +step:724 train loss:4.903984 +step:725 train loss:4.873408 +step:726 train loss:4.857103 +step:727 train loss:4.887469 +step:728 train loss:4.873532 +step:729 train loss:4.803671 +step:730 train loss:4.900337 +step:731 train loss:4.919516 +step:732 train loss:4.894526 +step:733 train loss:4.871084 +step:734 train loss:4.860379 +step:735 train loss:4.935736 +step:736 train loss:4.874834 +step:737 train loss:4.868906 +step:738 train loss:4.898964 +step:739 train loss:4.842285 +step:740 train loss:4.860080 +step:741 train loss:4.934024 +step:742 train loss:4.831998 +step:743 train loss:4.819043 +step:744 train loss:4.866977 +step:745 train loss:4.809563 +step:746 train loss:4.812599 +step:747 train loss:4.836792 +step:748 train loss:4.802270 +step:749 train loss:4.841529 +step:750 validation loss:4.807273 +step:750 train loss:4.788427 +step:751 train loss:4.808831 +step:752 train loss:4.756138 +step:753 train loss:4.812222 +step:754 train loss:4.814518 +step:755 train loss:4.864384 +step:756 train loss:4.840843 +step:757 train loss:4.937369 +step:758 train loss:4.817375 +step:759 train loss:4.820220 +step:760 train loss:4.788408 +step:761 train loss:4.828839 +step:762 train loss:4.801047 +step:763 train loss:4.795657 +step:764 train loss:4.770867 +step:765 train loss:4.772249 +step:766 train loss:4.858785 +step:767 train loss:4.956745 +step:768 train loss:4.785673 +step:769 train loss:4.813561 +step:770 train loss:4.834926 +step:771 train loss:4.891728 +step:772 train loss:4.824677 +step:773 train loss:4.770927 +step:774 train loss:4.818674 +step:775 train loss:4.792909 +step:776 train loss:4.802387 +step:777 train loss:4.761813 +step:778 train loss:4.769716 +step:779 train loss:4.746894 +step:780 train loss:4.803893 +step:781 train loss:4.733067 +step:782 train loss:4.761609 +step:783 train loss:4.744031 +step:784 train loss:4.753923 +step:785 train loss:4.724584 +step:786 train loss:4.756618 +step:787 train loss:4.702364 +step:788 train loss:4.764993 +step:789 train loss:4.764957 +step:790 train loss:4.714020 +step:791 train loss:4.804117 +step:792 train loss:4.809032 +step:793 train loss:4.770092 +step:794 train loss:4.767099 +step:795 train loss:4.719419 +step:796 train loss:4.988019 +step:797 train loss:4.757311 +step:798 train loss:4.734052 +step:799 train loss:4.749831 +step:800 train loss:4.836716 +step:801 train loss:4.738809 +step:802 train loss:4.855143 +step:803 train loss:4.761494 +step:804 train loss:4.698143 +step:805 train loss:4.762194 +step:806 train loss:4.667486 +step:807 train loss:4.728600 +step:808 train loss:4.733438 +step:809 train loss:4.699048 +step:810 train loss:4.675791 +step:811 train loss:4.772537 +step:812 train loss:4.727587 +step:813 train loss:4.739721 +step:814 train loss:4.792621 +step:815 train loss:4.757517 +step:816 train loss:4.683439 +step:817 train loss:4.719078 +step:818 train loss:4.687184 +step:819 train loss:4.689230 +step:820 train loss:4.698637 +step:821 train loss:4.644192 +step:822 train loss:4.635793 +step:823 train loss:4.724714 +step:824 train loss:4.632496 +step:825 train loss:4.620955 +step:826 train loss:4.674679 +step:827 train loss:4.604577 +step:828 train loss:4.678395 +step:829 train loss:4.679585 +step:830 train loss:4.688059 +step:831 train loss:4.712095 +step:832 train loss:4.762361 +step:833 train loss:4.714705 +step:834 train loss:4.695405 +step:835 train loss:4.668451 +step:836 train loss:4.654918 +step:837 train loss:4.627253 +step:838 train loss:4.627291 +step:839 train loss:4.630860 +step:840 train loss:4.670595 +step:841 train loss:4.646152 +step:842 train loss:4.657529 +step:843 train loss:4.654932 +step:844 train loss:4.629337 +step:845 train loss:4.612157 +step:846 train loss:4.694951 +step:847 train loss:4.662507 +step:848 train loss:4.618294 +step:849 train loss:4.669549 +step:850 train loss:4.672256 +step:851 train loss:4.637735 +step:852 train loss:4.705423 +step:853 train loss:4.598113 +step:854 train loss:4.646617 +step:855 train loss:4.631401 +step:856 train loss:4.589793 +step:857 train loss:4.632576 +step:858 train loss:4.667444 +step:859 train loss:4.622358 +step:860 train loss:4.624994 +step:861 train loss:4.662467 +step:862 train loss:4.607562 +step:863 train loss:4.623557 +step:864 train loss:4.602955 +step:865 train loss:4.629805 +step:866 train loss:4.640912 +step:867 train loss:4.722673 +step:868 train loss:4.606483 +step:869 train loss:4.622904 +step:870 train loss:4.577261 +step:871 train loss:4.580971 +step:872 train loss:4.608569 +step:873 train loss:4.597826 +step:874 train loss:4.602226 +step:875 train loss:4.518342 +step:876 train loss:4.627816 +step:877 train loss:4.538083 +step:878 train loss:4.640398 +step:879 train loss:4.569640 +step:880 train loss:4.654124 +step:881 train loss:4.597932 +step:882 train loss:4.556649 +step:883 train loss:4.596261 +step:884 train loss:4.606959 +step:885 train loss:4.558619 +step:886 train loss:4.539752 +step:887 train loss:4.573753 +step:888 train loss:4.673717 +step:889 train loss:4.610452 +step:890 train loss:4.555018 +step:891 train loss:4.515300 +step:892 train loss:4.488386 +step:893 train loss:4.564198 +step:894 train loss:4.544095 +step:895 train loss:4.527318 +step:896 train loss:4.615048 +step:897 train loss:4.541689 +step:898 train loss:4.554106 +step:899 train loss:4.562319 +step:900 train loss:4.614040 +step:901 train loss:4.523074 +step:902 train loss:4.562752 +step:903 train loss:4.639701 +step:904 train loss:4.656611 +step:905 train loss:4.539707 +step:906 train loss:4.552158 +step:907 train loss:4.562771 +step:908 train loss:4.580966 +step:909 train loss:4.535715 +step:910 train loss:4.562599 +step:911 train loss:4.681302 +step:912 train loss:4.495494 +step:913 train loss:4.555428 +step:914 train loss:4.525032 +step:915 train loss:4.537035 +step:916 train loss:4.610765 +step:917 train loss:4.542831 +step:918 train loss:4.619409 +step:919 train loss:4.694705 +step:920 train loss:4.457073 +step:921 train loss:4.561493 +step:922 train loss:4.532856 +step:923 train loss:4.475366 +step:924 train loss:4.505231 +step:925 train loss:4.460795 +step:926 train loss:4.556365 +step:927 train loss:4.475407 +step:928 train loss:4.534370 +step:929 train loss:4.512718 +step:930 train loss:4.512140 +step:931 train loss:4.559976 +step:932 train loss:4.512727 +step:933 train loss:4.524042 +step:934 train loss:4.566511 +step:935 train loss:4.545835 +step:936 train loss:4.530445 +step:937 train loss:4.537003 +step:938 train loss:4.537660 +step:939 train loss:4.417572 +step:940 train loss:4.515935 +step:941 train loss:4.456320 +step:942 train loss:4.435378 +step:943 train loss:4.534469 +step:944 train loss:4.487866 +step:945 train loss:4.498331 +step:946 train loss:4.534104 +step:947 train loss:4.653693 +step:948 train loss:4.468639 +step:949 train loss:4.522376 +step:950 train loss:4.458182 +step:951 train loss:4.485054 +step:952 train loss:4.546519 +step:953 train loss:4.482986 +step:954 train loss:4.497194 +step:955 train loss:4.436467 +step:956 train loss:4.459639 +step:957 train loss:4.459389 +step:958 train loss:4.537628 +step:959 train loss:4.474587 +step:960 train loss:4.563656 +step:961 train loss:4.521813 +step:962 train loss:4.465791 +step:963 train loss:4.450988 +step:964 train loss:4.486460 +step:965 train loss:4.406084 +step:966 train loss:4.420126 +step:967 train loss:4.476771 +step:968 train loss:4.470238 +step:969 train loss:4.421807 +step:970 train loss:4.493783 +step:971 train loss:4.458544 +step:972 train loss:4.389762 +step:973 train loss:4.475069 +step:974 train loss:4.420828 +step:975 train loss:4.512839 +step:976 train loss:4.455075 +step:977 train loss:4.448565 +step:978 train loss:4.458033 +step:979 train loss:4.434551 +step:980 train loss:4.440495 +step:981 train loss:4.424337 +step:982 train loss:4.425188 +step:983 train loss:4.431108 +step:984 train loss:4.463290 +step:985 train loss:4.436940 +step:986 train loss:4.444135 +step:987 train loss:4.476112 +step:988 train loss:4.459592 +step:989 train loss:4.429502 +step:990 train loss:4.412888 +step:991 train loss:4.353364 +step:992 train loss:4.410953 +step:993 train loss:4.432908 +step:994 train loss:4.369409 +step:995 train loss:4.381948 +step:996 train loss:4.433898 +step:997 train loss:4.384812 +step:998 train loss:4.385277 +step:999 train loss:4.429138 +step:1000 validation loss:4.375106 total_sharp:2.6269e-02 L1_sharp:8.4626e-01 L2_sharp:5.7008e-01 L3_sharp:3.2192e-01 L4_sharp:1.4444e-01 L5_sharp:1.0038e-01 L6_sharp:7.1403e-02 L7_sharp:6.4937e-02 L8_sharp:5.1337e-02 L9_sharp:3.4682e-02 L10_sharp:2.7501e-02 L11_sharp:2.4758e-02 L12_sharp:2.6503e-02 total_fnorm:1.3209e+00 total_l1_linf:7.9414e+03 total_spectral:1.3209e+00 L1_fnorm:6.1754e-02 L2_fnorm:5.6402e-02 L3_fnorm:5.3619e-02 L4_fnorm:5.7522e-02 L5_fnorm:5.8488e-02 L6_fnorm:5.9902e-02 L7_fnorm:6.0427e-02 L8_fnorm:6.0489e-02 L9_fnorm:6.0805e-02 L10_fnorm:6.1091e-02 L11_fnorm:6.0652e-02 L12_fnorm:6.1119e-02 L1_l1linf:2.1221e-01 L2_l1linf:2.0117e-01 L3_l1linf:2.0507e-01 L4_l1linf:2.1102e-01 L5_l1linf:2.1861e-01 L6_l1linf:2.0729e-01 L7_l1linf:2.1374e-01 L8_l1linf:2.2105e-01 L9_l1linf:2.1801e-01 L10_l1linf:2.1294e-01 L11_l1linf:1.8613e-01 L12_l1linf:1.7175e-01 L1_spectral:4.6350e-03 L2_spectral:4.5288e-03 L3_spectral:4.6468e-03 L4_spectral:4.8011e-03 L5_spectral:4.9236e-03 L6_spectral:4.8547e-03 L7_spectral:5.6188e-03 L8_spectral:6.0980e-03 L9_spectral:6.4860e-03 L10_spectral:5.8319e-03 L11_spectral:5.4403e-03 L12_spectral:5.8022e-03 ip_v_neg_g:2.4876e-02 cos_v_neg_g:2.9140e-03 v_norm:1.3209e+00 g_norm:6.4626e+00 hv_norm:5.0012e+00 cos_v_hv:6.9383e-03 hg_norm:2.7404e+03 cos_g_hg:6.3614e-01 v_par:1.6892e-04 v_perp:1.3209e+00 L1_cos_v_neg_g:3.1534e-02 L1_v_norm:6.1754e-02 L2_cos_v_neg_g:3.1459e-02 L2_v_norm:5.6402e-02 L3_cos_v_neg_g:2.7214e-02 L3_v_norm:5.3619e-02 L4_cos_v_neg_g:1.8693e-02 L4_v_norm:5.7522e-02 L5_cos_v_neg_g:1.6512e-02 L5_v_norm:5.8488e-02 L6_cos_v_neg_g:1.3213e-02 L6_v_norm:5.9902e-02 L7_cos_v_neg_g:1.2010e-02 L7_v_norm:6.0427e-02 L8_cos_v_neg_g:1.0600e-02 L8_v_norm:6.0489e-02 L9_cos_v_neg_g:9.1807e-03 L9_v_norm:6.0805e-02 L10_cos_v_neg_g:8.2488e-03 L10_v_norm:6.1091e-02 L11_cos_v_neg_g:8.7829e-03 L11_v_norm:6.0652e-02 L12_cos_v_neg_g:9.5991e-03 L12_v_norm:6.1119e-02 +step:1000 train loss:4.439199 +step:1001 train loss:4.436048 +step:1002 train loss:4.424642 +step:1003 train loss:4.405044 +step:1004 train loss:4.376105 +step:1005 train loss:4.390963 +step:1006 train loss:4.469130 +step:1007 train loss:4.421818 +step:1008 train loss:4.385470 +step:1009 train loss:4.452493 +step:1010 train loss:4.416716 +step:1011 train loss:4.442437 +step:1012 train loss:4.386549 +step:1013 train loss:4.360003 +step:1014 train loss:4.359690 +step:1015 train loss:4.392371 +step:1016 train loss:4.402817 +step:1017 train loss:4.359879 +step:1018 train loss:4.410251 +step:1019 train loss:4.378129 +step:1020 train loss:4.362781 +step:1021 train loss:4.448296 +step:1022 train loss:4.353180 +step:1023 train loss:4.359365 +step:1024 train loss:4.442768 +step:1025 train loss:4.398127 +step:1026 train loss:4.337345 +step:1027 train loss:4.382086 +step:1028 train loss:4.385953 +step:1029 train loss:4.333335 +step:1030 train loss:4.412321 +step:1031 train loss:4.407040 +step:1032 train loss:4.363096 +step:1033 train loss:4.334151 +step:1034 train loss:4.396113 +step:1035 train loss:4.396891 +step:1036 train loss:4.313787 +step:1037 train loss:4.371358 +step:1038 train loss:4.389357 +step:1039 train loss:4.526902 +step:1040 train loss:4.362063 +step:1041 train loss:4.347486 +step:1042 train loss:4.370819 +step:1043 train loss:4.374059 +step:1044 train loss:4.358733 +step:1045 train loss:4.371106 +step:1046 train loss:4.315819 +step:1047 train loss:4.343061 +step:1048 train loss:4.337021 +step:1049 train loss:4.394061 +step:1050 train loss:4.353536 +step:1051 train loss:4.328613 +step:1052 train loss:4.430411 +step:1053 train loss:4.331268 +step:1054 train loss:4.328302 +step:1055 train loss:4.396296 +step:1056 train loss:4.337001 +step:1057 train loss:4.237292 +step:1058 train loss:4.340316 +step:1059 train loss:4.324782 +step:1060 train loss:4.318712 +step:1061 train loss:4.374872 +step:1062 train loss:4.331974 +step:1063 train loss:4.332783 +step:1064 train loss:4.320551 +step:1065 train loss:4.335743 +step:1066 train loss:4.302482 +step:1067 train loss:4.336158 +step:1068 train loss:4.299337 +step:1069 train loss:4.314038 +step:1070 train loss:4.317101 +step:1071 train loss:4.336622 +step:1072 train loss:4.358881 +step:1073 train loss:4.271779 +step:1074 train loss:4.286619 +step:1075 train loss:4.302772 +step:1076 train loss:4.368496 +step:1077 train loss:4.295316 +step:1078 train loss:4.341660 +step:1079 train loss:4.395019 +step:1080 train loss:4.260099 +step:1081 train loss:4.329540 +step:1082 train loss:4.330781 +step:1083 train loss:4.288512 +step:1084 train loss:4.265892 +step:1085 train loss:4.327993 +step:1086 train loss:4.318136 +step:1087 train loss:4.297996 +step:1088 train loss:4.300167 +step:1089 train loss:4.310173 +step:1090 train loss:4.254755 +step:1091 train loss:4.243653 +step:1092 train loss:4.354284 +step:1093 train loss:4.240912 +step:1094 train loss:4.300693 +step:1095 train loss:4.342440 +step:1096 train loss:4.275742 +step:1097 train loss:4.274773 +step:1098 train loss:4.243391 +step:1099 train loss:4.299225 +step:1100 train loss:4.340854 +step:1101 train loss:4.332874 +step:1102 train loss:4.346090 +step:1103 train loss:4.269345 +step:1104 train loss:4.296345 +step:1105 train loss:4.353612 +step:1106 train loss:4.288527 +step:1107 train loss:4.407747 +step:1108 train loss:4.353638 +step:1109 train loss:4.316069 +step:1110 train loss:4.270567 +step:1111 train loss:4.324885 +step:1112 train loss:4.240528 +step:1113 train loss:4.219111 +step:1114 train loss:4.206198 +step:1115 train loss:4.251064 +step:1116 train loss:4.313263 +step:1117 train loss:4.333035 +step:1118 train loss:4.360140 +step:1119 train loss:4.288705 +step:1120 train loss:4.303031 +step:1121 train loss:4.285603 +step:1122 train loss:4.271201 +step:1123 train loss:4.366662 +step:1124 train loss:4.250566 +step:1125 train loss:4.267615 +step:1126 train loss:4.230231 +step:1127 train loss:4.252408 +step:1128 train loss:4.253436 +step:1129 train loss:4.309676 +step:1130 train loss:4.226337 +step:1131 train loss:4.319715 +step:1132 train loss:4.265583 +step:1133 train loss:4.273716 +step:1134 train loss:4.247400 +step:1135 train loss:4.292379 +step:1136 train loss:4.309238 +step:1137 train loss:4.227288 +step:1138 train loss:4.297504 +step:1139 train loss:4.252993 +step:1140 train loss:4.329770 +step:1141 train loss:4.284334 +step:1142 train loss:4.222611 +step:1143 train loss:4.295213 +step:1144 train loss:4.319976 +step:1145 train loss:4.272458 +step:1146 train loss:4.220987 +step:1147 train loss:4.232602 +step:1148 train loss:4.261158 +step:1149 train loss:4.305818 +step:1150 train loss:4.320696 +step:1151 train loss:4.325139 +step:1152 train loss:4.229369 +step:1153 train loss:4.226024 +step:1154 train loss:4.214085 +step:1155 train loss:4.318553 +step:1156 train loss:4.218683 +step:1157 train loss:4.243762 +step:1158 train loss:4.306424 +step:1159 train loss:4.293995 +step:1160 train loss:4.221876 +step:1161 train loss:4.310763 +step:1162 train loss:4.250175 +step:1163 train loss:4.232493 +step:1164 train loss:4.141140 +step:1165 train loss:4.278299 +step:1166 train loss:4.207696 +step:1167 train loss:4.213284 +step:1168 train loss:4.270238 +step:1169 train loss:4.235732 +step:1170 train loss:4.238156 +step:1171 train loss:4.263319 +step:1172 train loss:4.229151 +step:1173 train loss:4.257027 +step:1174 train loss:4.196371 +step:1175 train loss:4.227852 +step:1176 train loss:4.342149 +step:1177 train loss:4.189841 +step:1178 train loss:4.246045 +step:1179 train loss:4.204505 +step:1180 train loss:4.232201 +step:1181 train loss:4.218600 +step:1182 train loss:4.276336 +step:1183 train loss:4.251432 +step:1184 train loss:4.190451 +step:1185 train loss:4.225034 +step:1186 train loss:4.210327 +step:1187 train loss:4.184040 +step:1188 train loss:4.216587 +step:1189 train loss:4.148624 +step:1190 train loss:4.206100 +step:1191 train loss:4.265668 +step:1192 train loss:4.220468 +step:1193 train loss:4.221123 +step:1194 train loss:4.336432 +step:1195 train loss:4.314583 +step:1196 train loss:4.206296 +step:1197 train loss:4.224635 +step:1198 train loss:4.206123 +step:1199 train loss:4.211253 +step:1200 train loss:4.267787 +step:1201 train loss:4.239323 +step:1202 train loss:4.175600 +step:1203 train loss:4.171529 +step:1204 train loss:4.210355 +step:1205 train loss:4.218894 +step:1206 train loss:4.160726 +step:1207 train loss:4.245736 +step:1208 train loss:4.220271 +step:1209 train loss:4.145981 +step:1210 train loss:4.237731 +step:1211 train loss:4.191376 +step:1212 train loss:4.217888 +step:1213 train loss:4.150156 +step:1214 train loss:4.234032 +step:1215 train loss:4.203559 +step:1216 train loss:4.210738 +step:1217 train loss:4.161853 +step:1218 train loss:4.224686 +step:1219 train loss:4.161554 +step:1220 train loss:4.182501 +step:1221 train loss:4.206039 +step:1222 train loss:4.244093 +step:1223 train loss:4.217258 +step:1224 train loss:4.189844 +step:1225 train loss:4.234721 +step:1226 train loss:4.176935 +step:1227 train loss:4.191491 +step:1228 train loss:4.192083 +step:1229 train loss:4.162019 +step:1230 train loss:4.156413 +step:1231 train loss:4.211018 +step:1232 train loss:4.165206 +step:1233 train loss:4.162701 +step:1234 train loss:4.245066 +step:1235 train loss:4.221491 +step:1236 train loss:4.132219 +step:1237 train loss:4.228543 +step:1238 train loss:4.181077 +step:1239 train loss:4.221604 +step:1240 train loss:4.124651 +step:1241 train loss:4.157425 +step:1242 train loss:4.188373 +step:1243 train loss:4.134336 +step:1244 train loss:4.254223 +step:1245 train loss:4.264962 +step:1246 train loss:4.197289 +step:1247 train loss:4.173610 +step:1248 train loss:4.203959 +step:1249 train loss:4.135288 +step:1250 validation loss:4.138309 +step:1250 train loss:4.148711 +step:1251 train loss:4.218506 +step:1252 train loss:4.165134 +step:1253 train loss:4.121086 +step:1254 train loss:4.151594 +step:1255 train loss:4.149150 +step:1256 train loss:4.194346 +step:1257 train loss:4.172568 +step:1258 train loss:4.224110 +step:1259 train loss:4.206053 +step:1260 train loss:4.108967 +step:1261 train loss:4.346762 +step:1262 train loss:4.193545 +step:1263 train loss:4.152540 +step:1264 train loss:4.163702 +step:1265 train loss:4.221072 +step:1266 train loss:4.162288 +step:1267 train loss:4.170776 +step:1268 train loss:4.178853 +step:1269 train loss:4.173131 +step:1270 train loss:4.099470 +step:1271 train loss:4.107777 +step:1272 train loss:4.136720 +step:1273 train loss:4.193018 +step:1274 train loss:4.157099 +step:1275 train loss:4.185865 +step:1276 train loss:4.183153 +step:1277 train loss:4.190659 +step:1278 train loss:4.134278 +step:1279 train loss:4.142935 +step:1280 train loss:4.159108 +step:1281 train loss:4.215441 +step:1282 train loss:4.136079 +step:1283 train loss:4.215331 +step:1284 train loss:4.158730 +step:1285 train loss:4.205728 +step:1286 train loss:4.105252 +step:1287 train loss:4.142099 +step:1288 train loss:4.175261 +step:1289 train loss:4.231308 +step:1290 train loss:4.181672 +step:1291 train loss:4.146729 +step:1292 train loss:4.129602 +step:1293 train loss:4.116315 +step:1294 train loss:4.170321 +step:1295 train loss:4.151324 +step:1296 train loss:4.196518 +step:1297 train loss:4.154562 +step:1298 train loss:4.172714 +step:1299 train loss:4.208207 +step:1300 train loss:4.128937 +step:1301 train loss:4.172182 +step:1302 train loss:4.134145 +step:1303 train loss:4.172692 +step:1304 train loss:4.200296 +step:1305 train loss:4.176879 +step:1306 train loss:4.171357 +step:1307 train loss:4.154989 +step:1308 train loss:4.110833 +step:1309 train loss:4.124723 +step:1310 train loss:4.112133 +step:1311 train loss:4.119880 +step:1312 train loss:4.191519 +step:1313 train loss:4.104966 +step:1314 train loss:4.113654 +step:1315 train loss:4.162349 +step:1316 train loss:4.132757 +step:1317 train loss:4.029843 +step:1318 train loss:4.183202 +step:1319 train loss:4.222414 +step:1320 train loss:4.134491 +step:1321 train loss:4.112470 +step:1322 train loss:4.215895 +step:1323 train loss:4.164695 +step:1324 train loss:4.260005 +step:1325 train loss:4.148853 +step:1326 train loss:4.175709 +step:1327 train loss:4.188601 +step:1328 train loss:4.095259 +step:1329 train loss:4.125685 +step:1330 train loss:4.148439 +step:1331 train loss:4.015945 +step:1332 train loss:4.195373 +step:1333 train loss:4.153389 +step:1334 train loss:4.158575 +step:1335 train loss:4.176009 +step:1336 train loss:4.178559 +step:1337 train loss:4.153936 +step:1338 train loss:4.132069 +step:1339 train loss:4.202520 +step:1340 train loss:4.169984 +step:1341 train loss:4.148576 +step:1342 train loss:4.120460 +step:1343 train loss:4.111378 +step:1344 train loss:4.177046 +step:1345 train loss:4.135009 +step:1346 train loss:4.215837 +step:1347 train loss:4.139186 +step:1348 train loss:4.103415 +step:1349 train loss:4.051397 +step:1350 train loss:4.085147 +step:1351 train loss:4.154608 +step:1352 train loss:4.125343 +step:1353 train loss:4.104609 +step:1354 train loss:4.109088 +step:1355 train loss:4.180517 +step:1356 train loss:4.090792 +step:1357 train loss:4.117637 +step:1358 train loss:4.113803 +step:1359 train loss:4.114875 +step:1360 train loss:4.143884 +step:1361 train loss:4.261256 +step:1362 train loss:4.178330 +step:1363 train loss:4.065928 +step:1364 train loss:4.086770 +step:1365 train loss:4.082452 +step:1366 train loss:4.116222 +step:1367 train loss:4.054079 +step:1368 train loss:4.085206 +step:1369 train loss:4.122373 +step:1370 train loss:4.142725 +step:1371 train loss:4.098962 +step:1372 train loss:4.132784 +step:1373 train loss:4.171601 +step:1374 train loss:4.176645 +step:1375 train loss:4.128682 +step:1376 train loss:4.148669 +step:1377 train loss:4.142457 +step:1378 train loss:4.130969 +step:1379 train loss:4.100523 +step:1380 train loss:4.167093 +step:1381 train loss:4.117166 +step:1382 train loss:4.094555 +step:1383 train loss:4.084313 +step:1384 train loss:4.160462 +step:1385 train loss:4.060576 +step:1386 train loss:4.125796 +step:1387 train loss:4.122792 +step:1388 train loss:4.094611 +step:1389 train loss:4.066780 +step:1390 train loss:4.104608 +step:1391 train loss:4.136714 +step:1392 train loss:4.114636 +step:1393 train loss:4.164607 +step:1394 train loss:4.096848 +step:1395 train loss:4.134030 +step:1396 train loss:4.122154 +step:1397 train loss:4.138559 +step:1398 train loss:4.145977 +step:1399 train loss:4.113922 +step:1400 train loss:4.092607 +step:1401 train loss:4.085423 +step:1402 train loss:4.090952 +step:1403 train loss:4.052379 +step:1404 train loss:4.112261 +step:1405 train loss:4.071376 +step:1406 train loss:4.099643 +step:1407 train loss:4.094693 +step:1408 train loss:4.078199 +step:1409 train loss:4.064548 +step:1410 train loss:4.083912 +step:1411 train loss:4.116840 +step:1412 train loss:4.173556 +step:1413 train loss:4.093416 +step:1414 train loss:4.122169 +step:1415 train loss:4.082494 +step:1416 train loss:4.137294 +step:1417 train loss:4.108482 +step:1418 train loss:4.046920 +step:1419 train loss:4.053785 +step:1420 train loss:4.078864 +step:1421 train loss:4.118003 +step:1422 train loss:4.097335 +step:1423 train loss:4.192664 +step:1424 train loss:4.090555 +step:1425 train loss:4.050743 +step:1426 train loss:4.078094 +step:1427 train loss:4.065421 +step:1428 train loss:4.050218 +step:1429 train loss:4.075805 +step:1430 train loss:4.080985 +step:1431 train loss:4.104030 +step:1432 train loss:4.090903 +step:1433 train loss:4.073149 +step:1434 train loss:4.045002 +step:1435 train loss:4.039337 +step:1436 train loss:4.107179 +step:1437 train loss:4.044408 +step:1438 train loss:4.046515 +step:1439 train loss:4.029242 +step:1440 train loss:4.069470 +step:1441 train loss:4.140511 +step:1442 train loss:4.102930 +step:1443 train loss:4.030220 +step:1444 train loss:4.046136 +step:1445 train loss:4.046246 +step:1446 train loss:4.076922 +step:1447 train loss:4.088814 +step:1448 train loss:4.052129 +step:1449 train loss:4.078507 +step:1450 train loss:4.095428 +step:1451 train loss:4.018402 +step:1452 train loss:4.071778 +step:1453 train loss:4.071020 +step:1454 train loss:4.065683 +step:1455 train loss:3.998948 +step:1456 train loss:4.079548 +step:1457 train loss:4.012501 +step:1458 train loss:4.152484 +step:1459 train loss:4.072272 +step:1460 train loss:4.043324 +step:1461 train loss:4.093380 +step:1462 train loss:4.101785 +step:1463 train loss:4.066521 +step:1464 train loss:4.041323 +step:1465 train loss:4.045002 +step:1466 train loss:4.008721 +step:1467 train loss:4.145987 +step:1468 train loss:4.027834 +step:1469 train loss:4.106512 +step:1470 train loss:4.038369 +step:1471 train loss:4.040580 +step:1472 train loss:4.040250 +step:1473 train loss:4.040004 +step:1474 train loss:3.982271 +step:1475 train loss:4.044616 +step:1476 train loss:4.123530 +step:1477 train loss:4.070415 +step:1478 train loss:4.006213 +step:1479 train loss:4.041853 +step:1480 train loss:4.038951 +step:1481 train loss:4.010234 +step:1482 train loss:4.075546 +step:1483 train loss:4.062396 +step:1484 train loss:4.095315 +step:1485 train loss:4.106906 +step:1486 train loss:4.044204 +step:1487 train loss:4.033840 +step:1488 train loss:4.037025 +step:1489 train loss:4.028836 +step:1490 train loss:4.087801 +step:1491 train loss:4.080202 +step:1492 train loss:4.071474 +step:1493 train loss:4.019247 +step:1494 train loss:4.053988 +step:1495 train loss:4.034673 +step:1496 train loss:4.007609 +step:1497 train loss:4.082306 +step:1498 train loss:3.987037 +step:1499 train loss:4.031409 +step:1500 validation loss:4.004510 total_sharp:1.4027e-02 L1_sharp:1.3139e-01 L2_sharp:1.0108e-01 L3_sharp:1.3894e-01 L4_sharp:7.0120e-02 L5_sharp:5.8845e-02 L6_sharp:5.5196e-02 L7_sharp:5.9833e-02 L8_sharp:5.0727e-02 L9_sharp:4.2972e-02 L10_sharp:3.3593e-02 L11_sharp:2.7279e-02 L12_sharp:3.4236e-02 total_fnorm:1.3128e+00 total_l1_linf:7.9063e+03 total_spectral:1.3128e+00 L1_fnorm:6.1874e-02 L2_fnorm:5.8200e-02 L3_fnorm:5.5644e-02 L4_fnorm:5.8414e-02 L5_fnorm:5.9251e-02 L6_fnorm:6.0659e-02 L7_fnorm:6.0899e-02 L8_fnorm:6.0948e-02 L9_fnorm:6.1074e-02 L10_fnorm:6.1055e-02 L11_fnorm:6.0883e-02 L12_fnorm:6.1146e-02 L1_l1linf:2.1095e-01 L2_l1linf:2.3839e-01 L3_l1linf:2.4841e-01 L4_l1linf:2.5210e-01 L5_l1linf:2.5003e-01 L6_l1linf:2.4538e-01 L7_l1linf:2.4572e-01 L8_l1linf:2.5561e-01 L9_l1linf:2.7442e-01 L10_l1linf:2.6646e-01 L11_l1linf:2.6516e-01 L12_l1linf:2.6582e-01 L1_spectral:4.7702e-03 L2_spectral:5.3834e-03 L3_spectral:5.6218e-03 L4_spectral:5.6888e-03 L5_spectral:5.6235e-03 L6_spectral:5.5088e-03 L7_spectral:5.5193e-03 L8_spectral:5.7302e-03 L9_spectral:6.1543e-03 L10_spectral:5.9408e-03 L11_spectral:5.9901e-03 L12_spectral:5.9484e-03 ip_v_neg_g:1.0642e-02 cos_v_neg_g:1.6301e-03 v_norm:1.3128e+00 g_norm:4.9731e+00 hv_norm:2.3214e+00 cos_v_hv:7.9324e-03 hg_norm:8.0506e+02 cos_g_hg:5.5509e-01 v_par:7.7195e-05 v_perp:1.3128e+00 L1_cos_v_neg_g:1.1196e-02 L1_v_norm:6.1874e-02 L2_cos_v_neg_g:1.1923e-02 L2_v_norm:5.8200e-02 L3_cos_v_neg_g:1.4666e-02 L3_v_norm:5.5644e-02 L4_cos_v_neg_g:1.1110e-02 L4_v_norm:5.8414e-02 L5_cos_v_neg_g:1.1046e-02 L5_v_norm:5.9251e-02 L6_cos_v_neg_g:9.6895e-03 L6_v_norm:6.0659e-02 L7_cos_v_neg_g:9.9101e-03 L7_v_norm:6.0899e-02 L8_cos_v_neg_g:9.6679e-03 L8_v_norm:6.0948e-02 L9_cos_v_neg_g:8.6799e-03 L9_v_norm:6.1074e-02 L10_cos_v_neg_g:7.9489e-03 L10_v_norm:6.1055e-02 L11_cos_v_neg_g:7.1823e-03 L11_v_norm:6.0883e-02 L12_cos_v_neg_g:5.1223e-03 L12_v_norm:6.1146e-02 +step:1500 train loss:4.026774 +step:1501 train loss:4.047540 +step:1502 train loss:3.986847 +step:1503 train loss:4.040436 +step:1504 train loss:4.007198 +step:1505 train loss:3.980619 +step:1506 train loss:3.970078 +step:1507 train loss:3.989510 +step:1508 train loss:4.002489 +step:1509 train loss:4.057659 +step:1510 train loss:3.994427 +step:1511 train loss:4.028352 +step:1512 train loss:4.003086 +step:1513 train loss:4.073342 +step:1514 train loss:4.026403 +step:1515 train loss:4.084905 +step:1516 train loss:4.011138 +step:1517 train loss:4.019612 +step:1518 train loss:4.102088 +step:1519 train loss:4.062143 +step:1520 train loss:4.101233 +step:1521 train loss:4.004742 +step:1522 train loss:4.063724 +step:1523 train loss:4.064010 +step:1524 train loss:3.986914 +step:1525 train loss:4.065347 +step:1526 train loss:3.980177 +step:1527 train loss:4.037482 +step:1528 train loss:4.085413 +step:1529 train loss:4.043037 +step:1530 train loss:4.084353 +step:1531 train loss:4.006011 +step:1532 train loss:4.077488 +step:1533 train loss:4.050497 +step:1534 train loss:3.996964 +step:1535 train loss:4.053300 +step:1536 train loss:4.074745 +step:1537 train loss:4.026469 +step:1538 train loss:4.029634 +step:1539 train loss:4.027228 +step:1540 train loss:4.044196 +step:1541 train loss:4.007347 +step:1542 train loss:4.096511 +step:1543 train loss:4.122968 +step:1544 train loss:3.993585 +step:1545 train loss:3.975013 +step:1546 train loss:4.013661 +step:1547 train loss:4.003178 +step:1548 train loss:4.043114 +step:1549 train loss:3.969849 +step:1550 train loss:4.087053 +step:1551 train loss:4.018682 +step:1552 train loss:4.047814 +step:1553 train loss:4.061112 +step:1554 train loss:4.066776 +step:1555 train loss:4.024323 +step:1556 train loss:4.003618 +step:1557 train loss:4.015874 +step:1558 train loss:4.038901 +step:1559 train loss:4.001595 +step:1560 train loss:4.084629 +step:1561 train loss:4.056811 +step:1562 train loss:3.946290 +step:1563 train loss:3.925546 +step:1564 train loss:4.056919 +step:1565 train loss:4.033320 +step:1566 train loss:4.051525 +step:1567 train loss:4.052964 +step:1568 train loss:4.005172 +step:1569 train loss:3.998984 +step:1570 train loss:4.015461 +step:1571 train loss:3.992955 +step:1572 train loss:3.996124 +step:1573 train loss:4.044041 +step:1574 train loss:3.997840 +step:1575 train loss:4.018630 +step:1576 train loss:3.976521 +step:1577 train loss:4.004481 +step:1578 train loss:3.985932 +step:1579 train loss:4.063420 +step:1580 train loss:4.017048 +step:1581 train loss:4.053840 +step:1582 train loss:4.050143 +step:1583 train loss:4.025496 +step:1584 train loss:3.945463 +step:1585 train loss:4.032364 +step:1586 train loss:3.998485 +step:1587 train loss:4.011568 +step:1588 train loss:3.994949 +step:1589 train loss:4.042697 +step:1590 train loss:3.951742 +step:1591 train loss:4.010198 +step:1592 train loss:3.959800 +step:1593 train loss:3.998347 +step:1594 train loss:4.000704 +step:1595 train loss:3.994973 +step:1596 train loss:3.999003 +step:1597 train loss:3.928113 +step:1598 train loss:4.031267 +step:1599 train loss:4.040830 +step:1600 train loss:3.922002 +step:1601 train loss:3.996614 +step:1602 train loss:4.055233 +step:1603 train loss:4.049901 +step:1604 train loss:3.974446 +step:1605 train loss:4.026852 +step:1606 train loss:4.071045 +step:1607 train loss:3.956493 +step:1608 train loss:3.988286 +step:1609 train loss:4.006001 +step:1610 train loss:4.067942 +step:1611 train loss:3.989249 +step:1612 train loss:3.914759 +step:1613 train loss:3.985535 +step:1614 train loss:4.095113 +step:1615 train loss:4.013549 +step:1616 train loss:4.020816 +step:1617 train loss:4.003371 +step:1618 train loss:4.007841 +step:1619 train loss:4.178891 +step:1620 train loss:3.970381 +step:1621 train loss:4.028450 +step:1622 train loss:3.948677 +step:1623 train loss:4.012825 +step:1624 train loss:3.984036 +step:1625 train loss:4.056520 +step:1626 train loss:3.948999 +step:1627 train loss:3.959684 +step:1628 train loss:3.977421 +step:1629 train loss:4.011126 +step:1630 train loss:4.030115 +step:1631 train loss:3.975841 +step:1632 train loss:3.954494 +step:1633 train loss:3.968812 +step:1634 train loss:4.023346 +step:1635 train loss:3.963883 +step:1636 train loss:3.951885 +step:1637 train loss:4.028274 +step:1638 train loss:4.127174 +step:1639 train loss:3.939914 +step:1640 train loss:4.016438 +step:1641 train loss:3.976705 +step:1642 train loss:4.071502 +step:1643 train loss:3.970439 +step:1644 train loss:3.985753 +step:1645 train loss:3.959340 +step:1646 train loss:4.037534 +step:1647 train loss:3.933679 +step:1648 train loss:3.996173 +step:1649 train loss:3.965228 +step:1650 train loss:3.976911 +step:1651 train loss:3.993176 +step:1652 train loss:4.009501 +step:1653 train loss:4.017736 +step:1654 train loss:4.009775 +step:1655 train loss:3.986662 +step:1656 train loss:3.980618 +step:1657 train loss:3.979221 +step:1658 train loss:3.956312 +step:1659 train loss:4.030299 +step:1660 train loss:3.929362 +step:1661 train loss:4.042742 +step:1662 train loss:3.981312 +step:1663 train loss:3.972087 +step:1664 train loss:4.069476 +step:1665 train loss:3.987940 +step:1666 train loss:3.999727 +step:1667 train loss:4.017306 +step:1668 train loss:3.992149 +step:1669 train loss:3.955491 +step:1670 train loss:4.006338 +step:1671 train loss:4.002541 +step:1672 train loss:4.000171 +step:1673 train loss:3.959406 +step:1674 train loss:3.956161 +step:1675 train loss:4.003068 +step:1676 train loss:4.259171 +step:1677 train loss:4.009681 +step:1678 train loss:3.923658 +step:1679 train loss:4.045739 +step:1680 train loss:3.969958 +step:1681 train loss:4.029109 +step:1682 train loss:3.983499 +step:1683 train loss:3.977749 +step:1684 train loss:3.930413 +step:1685 train loss:3.992837 +step:1686 train loss:3.977293 +step:1687 train loss:3.987639 +step:1688 train loss:3.972322 +step:1689 train loss:3.957570 +step:1690 train loss:3.985365 +step:1691 train loss:3.966701 +step:1692 train loss:3.986540 +step:1693 train loss:3.951812 +step:1694 train loss:3.909646 +step:1695 train loss:3.934529 +step:1696 train loss:3.944185 +step:1697 train loss:3.987848 +step:1698 train loss:3.976631 +step:1699 train loss:3.942724 +step:1700 train loss:4.021126 +step:1701 train loss:3.961231 +step:1702 train loss:3.952249 +step:1703 train loss:3.977827 +step:1704 train loss:3.981210 +step:1705 train loss:3.995044 +step:1706 train loss:4.003284 +step:1707 train loss:4.001244 +step:1708 train loss:3.927253 +step:1709 train loss:4.024480 +step:1710 train loss:3.945020 +step:1711 train loss:3.946326 +step:1712 train loss:3.970473 +step:1713 train loss:3.938473 +step:1714 train loss:4.301843 +step:1715 train loss:3.953503 +step:1716 train loss:3.939191 +step:1717 train loss:3.942761 +step:1718 train loss:4.016756 +step:1719 train loss:3.933940 +step:1720 train loss:4.014353 +step:1721 train loss:3.955271 +step:1722 train loss:3.926948 +step:1723 train loss:4.021117 +step:1724 train loss:3.974430 +step:1725 train loss:3.970826 +step:1726 train loss:3.970475 +step:1727 train loss:4.004501 +step:1728 train loss:4.011961 +step:1729 train loss:3.931345 +step:1730 train loss:4.007290 +step:1731 train loss:3.939243 +step:1732 train loss:3.951498 +step:1733 train loss:3.936853 +step:1734 train loss:3.987840 +step:1735 train loss:4.047548 +step:1736 train loss:3.957132 +step:1737 train loss:3.989751 +step:1738 train loss:3.948069 +step:1739 train loss:4.010407 +step:1740 train loss:4.004845 +step:1741 train loss:4.056272 +step:1742 train loss:4.043682 +step:1743 train loss:3.935798 +step:1744 train loss:3.949999 +step:1745 train loss:3.933909 +step:1746 train loss:3.920703 +step:1747 train loss:3.956496 +step:1748 train loss:3.892621 +step:1749 train loss:3.936380 +step:1750 validation loss:3.917227 +step:1750 train loss:3.972945 +step:1751 train loss:3.989388 +step:1752 train loss:3.955774 +step:1753 train loss:3.977741 +step:1754 train loss:3.970466 +step:1755 train loss:3.968521 +step:1756 train loss:3.993589 +step:1757 train loss:3.996790 +step:1758 train loss:3.915887 +step:1759 train loss:4.006521 +step:1760 train loss:3.958233 +step:1761 train loss:3.934055 +step:1762 train loss:3.932410 +step:1763 train loss:3.936315 +step:1764 train loss:4.224730 +step:1765 train loss:3.939090 +step:1766 train loss:4.030157 +step:1767 train loss:3.944448 +step:1768 train loss:3.922012 +step:1769 train loss:3.943163 +step:1770 train loss:3.957188 +step:1771 train loss:3.932266 +step:1772 train loss:4.040686 +step:1773 train loss:3.965480 +step:1774 train loss:3.970243 +step:1775 train loss:4.081309 +step:1776 train loss:3.957278 +step:1777 train loss:3.948423 +step:1778 train loss:4.002064 +step:1779 train loss:3.935920 +step:1780 train loss:3.991672 +step:1781 train loss:3.993248 +step:1782 train loss:4.020843 +step:1783 train loss:3.953657 +step:1784 train loss:4.039804 +step:1785 train loss:3.947699 +step:1786 train loss:3.944838 +step:1787 train loss:3.944535 +step:1788 train loss:3.965068 +step:1789 train loss:3.918777 +step:1790 train loss:3.933506 +step:1791 train loss:4.008357 +step:1792 train loss:4.011137 +step:1793 train loss:3.929495 +step:1794 train loss:3.974113 +step:1795 train loss:3.927234 +step:1796 train loss:3.911832 +step:1797 train loss:3.973147 +step:1798 train loss:3.913151 +step:1799 train loss:3.970238 +step:1800 train loss:3.995150 +step:1801 train loss:3.986332 +step:1802 train loss:3.992711 +step:1803 train loss:3.985004 +step:1804 train loss:3.981219 +step:1805 train loss:3.968359 +step:1806 train loss:3.981787 +step:1807 train loss:3.912153 +step:1808 train loss:3.974420 +step:1809 train loss:3.957255 +step:1810 train loss:3.952056 +step:1811 train loss:3.966304 +step:1812 train loss:3.950156 +step:1813 train loss:3.962516 +step:1814 train loss:4.026257 +step:1815 train loss:3.966348 +step:1816 train loss:3.919564 +step:1817 train loss:3.909760 +step:1818 train loss:3.968856 +step:1819 train loss:3.937358 +step:1820 train loss:3.975505 +step:1821 train loss:3.939491 +step:1822 train loss:3.917109 +step:1823 train loss:3.910704 +step:1824 train loss:3.988769 +step:1825 train loss:3.900133 +step:1826 train loss:3.943572 +step:1827 train loss:3.910205 +step:1828 train loss:3.958292 +step:1829 train loss:3.922763 +step:1830 train loss:4.116951 +step:1831 train loss:3.878261 +step:1832 train loss:3.922374 +step:1833 train loss:3.973845 +step:1834 train loss:3.924410 +step:1835 train loss:3.931918 +step:1836 train loss:3.970793 +step:1837 train loss:3.895943 +step:1838 train loss:3.992857 +step:1839 train loss:3.972351 +step:1840 train loss:3.943758 +step:1841 train loss:3.967408 +step:1842 train loss:3.943127 +step:1843 train loss:3.889196 +step:1844 train loss:3.954903 +step:1845 train loss:3.922694 +step:1846 train loss:3.976753 +step:1847 train loss:4.028935 +step:1848 train loss:3.823753 +step:1849 train loss:3.921454 +step:1850 train loss:3.896163 +step:1851 train loss:3.939984 +step:1852 train loss:3.920123 +step:1853 train loss:3.979113 +step:1854 train loss:3.941432 +step:1855 train loss:3.927355 +step:1856 train loss:3.930462 +step:1857 train loss:3.933525 +step:1858 train loss:3.981683 +step:1859 train loss:3.928561 +step:1860 train loss:3.900917 +step:1861 train loss:3.918832 +step:1862 train loss:3.959711 +step:1863 train loss:3.999069 +step:1864 train loss:3.896356 +step:1865 train loss:3.920227 +step:1866 train loss:3.923048 +step:1867 train loss:3.953849 +step:1868 train loss:4.001553 +step:1869 train loss:3.920217 +step:1870 train loss:3.948930 +step:1871 train loss:3.889577 +step:1872 train loss:3.954425 +step:1873 train loss:4.016591 +step:1874 train loss:3.880129 +step:1875 train loss:3.957029 +step:1876 train loss:3.917491 +step:1877 train loss:3.960084 +step:1878 train loss:3.884009 +step:1879 train loss:3.943994 +step:1880 train loss:4.021428 +step:1881 train loss:3.948150 +step:1882 train loss:3.966237 +step:1883 train loss:3.986404 +step:1884 train loss:3.995996 +step:1885 train loss:3.956700 +step:1886 train loss:3.884831 +step:1887 train loss:3.900661 +step:1888 train loss:3.904523 +step:1889 train loss:3.916778 +step:1890 train loss:3.925078 +step:1891 train loss:3.859129 +step:1892 train loss:3.954617 +step:1893 train loss:3.876281 +step:1894 train loss:3.896093 +step:1895 train loss:3.932510 +step:1896 train loss:3.980631 +step:1897 train loss:3.877705 +step:1898 train loss:3.925488 +step:1899 train loss:3.939711 +step:1900 train loss:3.892856 +step:1901 train loss:3.967830 +step:1902 train loss:3.961741 +step:1903 train loss:3.901798 +step:1904 train loss:3.889272 +step:1905 train loss:3.890431 +step:1906 train loss:3.944736 +step:1907 train loss:3.893351 +step:1908 train loss:3.906148 +step:1909 train loss:4.002771 +step:1910 train loss:3.891302 +step:1911 train loss:3.897786 +step:1912 train loss:3.949681 +step:1913 train loss:3.885882 +step:1914 train loss:3.922395 +step:1915 train loss:3.888782 +step:1916 train loss:3.937635 +step:1917 train loss:3.922202 +step:1918 train loss:3.833019 +step:1919 train loss:3.982559 +step:1920 train loss:4.090978 +step:1921 train loss:3.870444 +step:1922 train loss:3.848582 +step:1923 train loss:3.947007 +step:1924 train loss:3.985254 +step:1925 train loss:3.929282 +step:1926 train loss:3.867085 +step:1927 train loss:3.950561 +step:1928 train loss:3.864264 +step:1929 train loss:3.894155 +step:1930 train loss:3.963544 +step:1931 train loss:3.875033 +step:1932 train loss:3.925712 +step:1933 train loss:3.921921 +step:1934 train loss:3.994844 +step:1935 train loss:3.945655 +step:1936 train loss:3.913700 +step:1937 train loss:3.852055 +step:1938 train loss:4.217961 +step:1939 train loss:3.961052 +step:1940 train loss:3.941979 +step:1941 train loss:3.946663 +step:1942 train loss:3.940558 +step:1943 train loss:3.929840 +step:1944 train loss:3.893027 +step:1945 train loss:3.894693 +step:1946 train loss:3.916672 +step:1947 train loss:3.941307 +step:1948 train loss:3.849071 +step:1949 train loss:3.958622 +step:1950 train loss:3.898059 +step:1951 train loss:3.922233 +step:1952 train loss:3.947771 +step:1953 train loss:3.879155 +step:1954 train loss:3.913000 +step:1955 train loss:3.866673 +step:1956 train loss:3.948443 +step:1957 train loss:3.975316 +step:1958 train loss:3.989105 +step:1959 train loss:3.860148 +step:1960 train loss:3.899056 +step:1961 train loss:3.930892 +step:1962 train loss:3.924295 +step:1963 train loss:3.900929 +step:1964 train loss:3.937133 +step:1965 train loss:3.974020 +step:1966 train loss:3.879593 +step:1967 train loss:3.942323 +step:1968 train loss:3.878816 +step:1969 train loss:3.896878 +step:1970 train loss:3.961199 +step:1971 train loss:3.862322 +step:1972 train loss:3.970695 +step:1973 train loss:3.865664 +step:1974 train loss:3.912011 +step:1975 train loss:3.874977 +step:1976 train loss:3.894924 +step:1977 train loss:3.939225 +step:1978 train loss:3.882686 +step:1979 train loss:3.858817 +step:1980 train loss:3.900766 +step:1981 train loss:3.878387 +step:1982 train loss:3.963472 +step:1983 train loss:3.905610 +step:1984 train loss:3.945860 +step:1985 train loss:3.932906 +step:1986 train loss:3.921957 +step:1987 train loss:3.878144 +step:1988 train loss:3.906781 +step:1989 train loss:4.041561 +step:1990 train loss:3.882845 +step:1991 train loss:3.871902 +step:1992 train loss:3.884735 +step:1993 train loss:3.917553 +step:1994 train loss:3.907861 +step:1995 train loss:3.862149 +step:1996 train loss:3.914776 +step:1997 train loss:3.917276 +step:1998 train loss:3.870723 +step:1999 train loss:3.982869 +step:2000 validation loss:3.849992 total_sharp:1.0205e-02 L1_sharp:3.2359e-01 L2_sharp:2.1490e-01 L3_sharp:1.1013e-01 L4_sharp:3.7547e-02 L5_sharp:3.7221e-02 L6_sharp:3.9515e-02 L7_sharp:4.2719e-02 L8_sharp:3.9363e-02 L9_sharp:3.2401e-02 L10_sharp:2.4348e-02 L11_sharp:2.6421e-02 L12_sharp:6.9537e-02 total_fnorm:1.3691e+00 total_l1_linf:8.2127e+03 total_spectral:1.3691e+00 L1_fnorm:6.1538e-02 L2_fnorm:5.8080e-02 L3_fnorm:5.5977e-02 L4_fnorm:5.8940e-02 L5_fnorm:5.9862e-02 L6_fnorm:6.0829e-02 L7_fnorm:6.0853e-02 L8_fnorm:6.0881e-02 L9_fnorm:6.0964e-02 L10_fnorm:6.0938e-02 L11_fnorm:6.0859e-02 L12_fnorm:6.1218e-02 L1_l1linf:2.1157e-01 L2_l1linf:2.4055e-01 L3_l1linf:2.4530e-01 L4_l1linf:2.5448e-01 L5_l1linf:2.5037e-01 L6_l1linf:2.4410e-01 L7_l1linf:2.3272e-01 L8_l1linf:2.2543e-01 L9_l1linf:2.6040e-01 L10_l1linf:2.6316e-01 L11_l1linf:2.7094e-01 L12_l1linf:3.0886e-01 L1_spectral:4.8125e-03 L2_spectral:5.4535e-03 L3_spectral:5.5837e-03 L4_spectral:5.8032e-03 L5_spectral:5.6623e-03 L6_spectral:5.5035e-03 L7_spectral:5.3061e-03 L8_spectral:5.1149e-03 L9_spectral:5.9012e-03 L10_spectral:5.9186e-03 L11_spectral:6.1268e-03 L12_spectral:6.7774e-03 ip_v_neg_g:7.4890e-03 cos_v_neg_g:1.0792e-03 v_norm:1.3691e+00 g_norm:5.0682e+00 hv_norm:2.2610e+00 cos_v_hv:6.1800e-03 hg_norm:8.4983e+02 cos_g_hg:5.6271e-01 v_par:4.7860e-05 v_perp:1.3691e+00 L1_cos_v_neg_g:1.1916e-02 L1_v_norm:6.1538e-02 L2_cos_v_neg_g:1.3595e-02 L2_v_norm:5.8080e-02 L3_cos_v_neg_g:9.2952e-03 L3_v_norm:5.5977e-02 L4_cos_v_neg_g:6.3634e-03 L4_v_norm:5.8940e-02 L5_cos_v_neg_g:5.6933e-03 L5_v_norm:5.9862e-02 L6_cos_v_neg_g:5.5432e-03 L6_v_norm:6.0829e-02 L7_cos_v_neg_g:5.3168e-03 L7_v_norm:6.0853e-02 L8_cos_v_neg_g:5.8302e-03 L8_v_norm:6.0881e-02 L9_cos_v_neg_g:4.8421e-03 L9_v_norm:6.0964e-02 L10_cos_v_neg_g:4.9726e-03 L10_v_norm:6.0938e-02 L11_cos_v_neg_g:5.3773e-03 L11_v_norm:6.0859e-02 L12_cos_v_neg_g:6.0567e-03 L12_v_norm:6.1218e-02 +step:2000 train loss:3.950901 +step:2001 train loss:3.872255 +step:2002 train loss:3.972224 +step:2003 train loss:4.018670 +step:2004 train loss:3.889173 +step:2005 train loss:3.988472 +step:2006 train loss:3.875732 +step:2007 train loss:3.953402 +step:2008 train loss:3.894510 +step:2009 train loss:3.896103 +step:2010 train loss:4.021308 +step:2011 train loss:3.874524 +step:2012 train loss:3.900239 +step:2013 train loss:3.915410 +step:2014 train loss:3.804325 +step:2015 train loss:3.930903 +step:2016 train loss:3.907323 +step:2017 train loss:3.910772 +step:2018 train loss:3.877581 +step:2019 train loss:3.908344 +step:2020 train loss:3.915873 +step:2021 train loss:3.879274 +step:2022 train loss:3.922855 +step:2023 train loss:3.900003 +step:2024 train loss:3.951405 +step:2025 train loss:3.893008 +step:2026 train loss:3.873079 +step:2027 train loss:3.901573 +step:2028 train loss:3.832911 +step:2029 train loss:3.861743 +step:2030 train loss:3.865612 +step:2031 train loss:3.828225 +step:2032 train loss:3.880261 +step:2033 train loss:3.877180 +step:2034 train loss:3.871927 +step:2035 train loss:3.914412 +step:2036 train loss:3.905314 +step:2037 train loss:3.891135 +step:2038 train loss:3.888813 +step:2039 train loss:3.879045 +step:2040 train loss:3.909600 +step:2041 train loss:3.912942 +step:2042 train loss:3.842488 +step:2043 train loss:3.999058 +step:2044 train loss:3.864179 +step:2045 train loss:3.886055 +step:2046 train loss:3.893719 +step:2047 train loss:3.868577 +step:2048 train loss:3.912674 +step:2049 train loss:3.867178 +step:2050 train loss:3.892741 +step:2051 train loss:3.857067 +step:2052 train loss:3.904988 +step:2053 train loss:3.908760 +step:2054 train loss:3.874979 +step:2055 train loss:3.875396 +step:2056 train loss:3.919985 +step:2057 train loss:3.926508 +step:2058 train loss:3.893280 +step:2059 train loss:3.971604 +step:2060 train loss:3.920045 +step:2061 train loss:3.870954 +step:2062 train loss:3.898305 +step:2063 train loss:3.801697 +step:2064 train loss:3.920163 +step:2065 train loss:3.929510 +step:2066 train loss:3.788187 +step:2067 train loss:3.834267 +step:2068 train loss:3.941056 +step:2069 train loss:3.876435 +step:2070 train loss:3.880303 +step:2071 train loss:3.921848 +step:2072 train loss:3.849104 +step:2073 train loss:3.902866 +step:2074 train loss:3.880725 +step:2075 train loss:3.963053 +step:2076 train loss:3.908911 +step:2077 train loss:3.920828 +step:2078 train loss:3.875386 +step:2079 train loss:4.027236 +step:2080 train loss:3.845548 +step:2081 train loss:3.954906 +step:2082 train loss:3.884345 +step:2083 train loss:3.872587 +step:2084 train loss:3.850883 +step:2085 train loss:3.895197 +step:2086 train loss:3.909703 +step:2087 train loss:3.947204 +step:2088 train loss:3.814096 +step:2089 train loss:3.843781 +step:2090 train loss:3.881813 +step:2091 train loss:3.896671 +step:2092 train loss:3.878456 +step:2093 train loss:3.866872 +step:2094 train loss:3.904554 +step:2095 train loss:3.850183 +step:2096 train loss:3.840359 +step:2097 train loss:3.873759 +step:2098 train loss:3.876130 +step:2099 train loss:3.854633 +step:2100 train loss:3.921238 +step:2101 train loss:3.911879 +step:2102 train loss:3.878636 +step:2103 train loss:3.896067 +step:2104 train loss:3.874154 +step:2105 train loss:3.879457 +step:2106 train loss:3.873885 +step:2107 train loss:3.941187 +step:2108 train loss:3.862986 +step:2109 train loss:3.821871 +step:2110 train loss:3.919572 +step:2111 train loss:3.864804 +step:2112 train loss:3.924729 +step:2113 train loss:3.861926 +step:2114 train loss:3.867796 +step:2115 train loss:3.917756 +step:2116 train loss:3.852149 +step:2117 train loss:3.867707 +step:2118 train loss:3.862971 +step:2119 train loss:3.797420 +step:2120 train loss:3.883986 +step:2121 train loss:3.871824 +step:2122 train loss:3.882338 +step:2123 train loss:3.937481 +step:2124 train loss:3.939641 +step:2125 train loss:3.846644 +step:2126 train loss:3.853053 +step:2127 train loss:3.843230 +step:2128 train loss:3.839405 +step:2129 train loss:3.864495 +step:2130 train loss:3.869501 +step:2131 train loss:3.893178 +step:2132 train loss:3.822383 +step:2133 train loss:3.930536 +step:2134 train loss:3.882360 +step:2135 train loss:3.841688 +step:2136 train loss:3.933167 +step:2137 train loss:3.897557 +step:2138 train loss:3.854289 +step:2139 train loss:3.855309 +step:2140 train loss:3.860595 +step:2141 train loss:3.908849 +step:2142 train loss:3.877961 +step:2143 train loss:3.800894 +step:2144 train loss:3.908996 +step:2145 train loss:3.879106 +step:2146 train loss:3.914398 +step:2147 train loss:4.017311 +step:2148 train loss:3.819945 +step:2149 train loss:3.828854 +step:2150 train loss:3.857537 +step:2151 train loss:3.891171 +step:2152 train loss:3.885036 +step:2153 train loss:3.926998 +step:2154 train loss:3.841271 +step:2155 train loss:3.922849 +step:2156 train loss:3.844957 +step:2157 train loss:3.923128 +step:2158 train loss:3.958694 +step:2159 train loss:3.884748 +step:2160 train loss:3.959241 +step:2161 train loss:3.858666 +step:2162 train loss:3.863215 +step:2163 train loss:3.840606 +step:2164 train loss:3.865726 +step:2165 train loss:3.840523 +step:2166 train loss:3.958121 +step:2167 train loss:3.866575 +step:2168 train loss:3.880461 +step:2169 train loss:3.831087 +step:2170 train loss:3.976327 +step:2171 train loss:3.936406 +step:2172 train loss:3.870704 +step:2173 train loss:3.860801 +step:2174 train loss:3.923012 +step:2175 train loss:3.856577 +step:2176 train loss:3.934454 +step:2177 train loss:3.906965 +step:2178 train loss:3.830201 +step:2179 train loss:3.897539 +step:2180 train loss:3.913015 +step:2181 train loss:3.844174 +step:2182 train loss:3.894011 +step:2183 train loss:3.887300 +step:2184 train loss:3.840220 +step:2185 train loss:3.819008 +step:2186 train loss:3.859679 +step:2187 train loss:3.870622 +step:2188 train loss:3.919591 +step:2189 train loss:3.809877 +step:2190 train loss:3.855916 +step:2191 train loss:3.910462 +step:2192 train loss:3.839601 +step:2193 train loss:3.809864 +step:2194 train loss:3.815132 +step:2195 train loss:3.840416 +step:2196 train loss:3.848200 +step:2197 train loss:3.827736 +step:2198 train loss:3.852069 +step:2199 train loss:3.923341 +step:2200 train loss:3.855272 +step:2201 train loss:3.861809 +step:2202 train loss:3.823073 +step:2203 train loss:3.846693 +step:2204 train loss:3.876277 +step:2205 train loss:3.858662 +step:2206 train loss:3.858293 +step:2207 train loss:3.852679 +step:2208 train loss:3.830000 +step:2209 train loss:4.112811 +step:2210 train loss:3.882227 +step:2211 train loss:3.875445 +step:2212 train loss:3.848373 +step:2213 train loss:3.930281 +step:2214 train loss:3.923084 +step:2215 train loss:3.848109 +step:2216 train loss:3.817090 +step:2217 train loss:3.843773 +step:2218 train loss:3.845638 +step:2219 train loss:3.878823 +step:2220 train loss:3.822742 +step:2221 train loss:3.853187 +step:2222 train loss:3.869387 +step:2223 train loss:3.908109 +step:2224 train loss:3.883497 +step:2225 train loss:3.823681 +step:2226 train loss:3.890167 +step:2227 train loss:3.891278 +step:2228 train loss:3.887708 +step:2229 train loss:3.829322 +step:2230 train loss:3.955399 +step:2231 train loss:3.870842 +step:2232 train loss:3.864775 +step:2233 train loss:3.910011 +step:2234 train loss:3.805964 +step:2235 train loss:3.894053 +step:2236 train loss:3.832620 +step:2237 train loss:3.965256 +step:2238 train loss:3.772186 +step:2239 train loss:3.849909 +step:2240 train loss:3.863115 +step:2241 train loss:3.780063 +step:2242 train loss:3.921319 +step:2243 train loss:3.958608 +step:2244 train loss:3.836737 +step:2245 train loss:3.838174 +step:2246 train loss:3.806003 +step:2247 train loss:3.809516 +step:2248 train loss:3.862644 +step:2249 train loss:3.847084 +step:2250 validation loss:3.801934 +step:2250 train loss:3.857419 +step:2251 train loss:3.822297 +step:2252 train loss:3.824697 +step:2253 train loss:3.848081 +step:2254 train loss:3.855017 +step:2255 train loss:3.813150 +step:2256 train loss:3.863225 +step:2257 train loss:3.852102 +step:2258 train loss:3.842909 +step:2259 train loss:3.859205 +step:2260 train loss:3.812029 +step:2261 train loss:3.889423 +step:2262 train loss:3.910525 +step:2263 train loss:3.864766 +step:2264 train loss:3.979913 +step:2265 train loss:3.828192 +step:2266 train loss:3.873615 +step:2267 train loss:3.832559 +step:2268 train loss:3.835504 +step:2269 train loss:3.837458 +step:2270 train loss:3.830547 +step:2271 train loss:3.844049 +step:2272 train loss:3.883400 +step:2273 train loss:3.801355 +step:2274 train loss:3.832160 +step:2275 train loss:3.790365 +step:2276 train loss:3.863498 +step:2277 train loss:3.874002 +step:2278 train loss:3.857224 +step:2279 train loss:3.837874 +step:2280 train loss:3.746207 +step:2281 train loss:3.891998 +step:2282 train loss:3.821996 +step:2283 train loss:3.805260 +step:2284 train loss:3.824066 +step:2285 train loss:3.875655 +step:2286 train loss:3.837703 +step:2287 train loss:3.875197 +step:2288 train loss:3.845594 +step:2289 train loss:3.842362 +step:2290 train loss:3.850073 +step:2291 train loss:3.837591 +step:2292 train loss:3.877872 +step:2293 train loss:3.856234 +step:2294 train loss:3.854268 +step:2295 train loss:3.908019 +step:2296 train loss:3.840904 +step:2297 train loss:3.815994 +step:2298 train loss:3.873996 +step:2299 train loss:3.849370 +step:2300 train loss:3.764562 +step:2301 train loss:3.862032 +step:2302 train loss:3.874746 +step:2303 train loss:3.843554 +step:2304 train loss:3.836693 +step:2305 train loss:3.879848 +step:2306 train loss:3.870446 +step:2307 train loss:3.844398 +step:2308 train loss:3.866669 +step:2309 train loss:3.823482 +step:2310 train loss:3.810607 +step:2311 train loss:3.798159 +step:2312 train loss:3.864673 +step:2313 train loss:3.779639 +step:2314 train loss:3.855098 +step:2315 train loss:3.870507 +step:2316 train loss:3.907739 +step:2317 train loss:3.776559 +step:2318 train loss:3.817739 +step:2319 train loss:3.873390 +step:2320 train loss:3.843221 +step:2321 train loss:3.813331 +step:2322 train loss:3.828355 +step:2323 train loss:3.822991 +step:2324 train loss:3.851006 +step:2325 train loss:3.794048 +step:2326 train loss:3.821676 +step:2327 train loss:3.934215 +step:2328 train loss:3.883125 +step:2329 train loss:3.837489 +step:2330 train loss:3.795586 +step:2331 train loss:3.839159 +step:2332 train loss:3.764482 +step:2333 train loss:3.826352 +step:2334 train loss:3.804018 +step:2335 train loss:3.790541 +step:2336 train loss:4.043566 +step:2337 train loss:3.817812 +step:2338 train loss:3.857107 +step:2339 train loss:3.856090 +step:2340 train loss:3.869808 +step:2341 train loss:3.860590 +step:2342 train loss:3.812076 +step:2343 train loss:3.835140 +step:2344 train loss:3.877451 +step:2345 train loss:3.831695 +step:2346 train loss:3.860639 +step:2347 train loss:3.783876 +step:2348 train loss:3.842022 +step:2349 train loss:3.792222 +step:2350 train loss:3.850948 +step:2351 train loss:3.857847 +step:2352 train loss:3.862054 +step:2353 train loss:3.819183 +step:2354 train loss:3.866435 +step:2355 train loss:3.854151 +step:2356 train loss:3.894692 +step:2357 train loss:3.801294 +step:2358 train loss:3.816224 +step:2359 train loss:3.840823 +step:2360 train loss:3.862164 +step:2361 train loss:3.897090 +step:2362 train loss:3.726080 +step:2363 train loss:3.916903 +step:2364 train loss:3.867042 +step:2365 train loss:3.835932 +step:2366 train loss:3.789506 +step:2367 train loss:3.853758 +step:2368 train loss:3.843917 +step:2369 train loss:3.833884 +step:2370 train loss:3.847120 +step:2371 train loss:3.904106 +step:2372 train loss:3.756555 +step:2373 train loss:3.896783 +step:2374 train loss:3.879310 +step:2375 train loss:3.864125 +step:2376 train loss:3.856877 +step:2377 train loss:3.799883 +step:2378 train loss:3.846889 +step:2379 train loss:3.832539 +step:2380 train loss:3.892930 +step:2381 train loss:3.985187 +step:2382 train loss:3.771888 +step:2383 train loss:3.824761 +step:2384 train loss:3.850486 +step:2385 train loss:3.752505 +step:2386 train loss:3.909920 +step:2387 train loss:3.790315 +step:2388 train loss:3.841264 +step:2389 train loss:3.862908 +step:2390 train loss:3.810604 +step:2391 train loss:3.835241 +step:2392 train loss:3.861177 +step:2393 train loss:3.817256 +step:2394 train loss:3.838313 +step:2395 train loss:3.831394 +step:2396 train loss:3.837197 +step:2397 train loss:3.812627 +step:2398 train loss:3.866697 +step:2399 train loss:3.829513 +step:2400 train loss:3.808228 +step:2401 train loss:3.849346 +step:2402 train loss:3.801757 +step:2403 train loss:3.850369 +step:2404 train loss:3.808959 +step:2405 train loss:3.811126 +step:2406 train loss:3.837314 +step:2407 train loss:3.782650 +step:2408 train loss:3.828650 +step:2409 train loss:3.816606 +step:2410 train loss:3.816363 +step:2411 train loss:3.889833 +step:2412 train loss:3.875016 +step:2413 train loss:3.914721 +step:2414 train loss:3.806041 +step:2415 train loss:3.797832 +step:2416 train loss:3.813978 +step:2417 train loss:3.849748 +step:2418 train loss:3.868768 +step:2419 train loss:3.798207 +step:2420 train loss:3.818468 +step:2421 train loss:3.849133 +step:2422 train loss:3.899285 +step:2423 train loss:3.831293 +step:2424 train loss:3.798173 +step:2425 train loss:3.859371 +step:2426 train loss:3.798439 +step:2427 train loss:3.820152 +step:2428 train loss:3.903930 +step:2429 train loss:3.855342 +step:2430 train loss:3.947642 +step:2431 train loss:3.856884 +step:2432 train loss:3.829018 +step:2433 train loss:3.805304 +step:2434 train loss:3.789871 +step:2435 train loss:3.847739 +step:2436 train loss:3.809150 +step:2437 train loss:3.838788 +step:2438 train loss:3.879639 +step:2439 train loss:3.865140 +step:2440 train loss:3.806078 +step:2441 train loss:3.842369 +step:2442 train loss:3.835593 +step:2443 train loss:3.798559 +step:2444 train loss:3.836261 +step:2445 train loss:3.830482 +step:2446 train loss:3.802599 +step:2447 train loss:3.784349 +step:2448 train loss:3.834123 +step:2449 train loss:3.864559 +step:2450 train loss:3.821784 +step:2451 train loss:3.741703 +step:2452 train loss:3.843263 +step:2453 train loss:3.815174 +step:2454 train loss:3.811314 +step:2455 train loss:3.863981 +step:2456 train loss:3.816659 +step:2457 train loss:3.876133 +step:2458 train loss:3.855187 +step:2459 train loss:3.828590 +step:2460 train loss:3.835726 +step:2461 train loss:3.863039 +step:2462 train loss:3.836483 +step:2463 train loss:3.810538 +step:2464 train loss:3.827012 +step:2465 train loss:3.904319 +step:2466 train loss:3.985697 +step:2467 train loss:3.893286 +step:2468 train loss:3.787972 +step:2469 train loss:3.858717 +step:2470 train loss:3.907449 +step:2471 train loss:3.907278 +step:2472 train loss:3.887818 +step:2473 train loss:3.823586 +step:2474 train loss:3.782858 +step:2475 train loss:3.838490 +step:2476 train loss:3.912740 +step:2477 train loss:3.828568 +step:2478 train loss:3.784004 +step:2479 train loss:3.826452 +step:2480 train loss:3.818886 +step:2481 train loss:4.013783 +step:2482 train loss:3.823830 +step:2483 train loss:3.851659 +step:2484 train loss:3.804377 +step:2485 train loss:3.794957 +step:2486 train loss:3.831076 +step:2487 train loss:3.865482 +step:2488 train loss:3.776112 +step:2489 train loss:3.889731 +step:2490 train loss:3.808063 +step:2491 train loss:3.817566 +step:2492 train loss:3.859241 +step:2493 train loss:3.897204 +step:2494 train loss:3.816565 +step:2495 train loss:3.848922 +step:2496 train loss:3.825769 +step:2497 train loss:3.839844 +step:2498 train loss:3.848041 +step:2499 train loss:3.840171 +step:2500 validation loss:3.762760 total_sharp:8.4818e-03 L1_sharp:1.6866e-01 L2_sharp:7.4597e-02 L3_sharp:6.8387e-02 L4_sharp:3.5958e-02 L5_sharp:3.6676e-02 L6_sharp:4.0053e-02 L7_sharp:4.4931e-02 L8_sharp:4.1526e-02 L9_sharp:3.2578e-02 L10_sharp:2.5096e-02 L11_sharp:2.2520e-02 L12_sharp:2.9557e-02 total_fnorm:1.3525e+00 total_l1_linf:8.1061e+03 total_spectral:1.3525e+00 L1_fnorm:6.1715e-02 L2_fnorm:5.8004e-02 L3_fnorm:5.6810e-02 L4_fnorm:5.9018e-02 L5_fnorm:5.9974e-02 L6_fnorm:6.0898e-02 L7_fnorm:6.0969e-02 L8_fnorm:6.1133e-02 L9_fnorm:6.1068e-02 L10_fnorm:6.1140e-02 L11_fnorm:6.1015e-02 L12_fnorm:6.1010e-02 L1_l1linf:2.5893e-01 L2_l1linf:2.7832e-01 L3_l1linf:2.9142e-01 L4_l1linf:2.8237e-01 L5_l1linf:2.7872e-01 L6_l1linf:2.6627e-01 L7_l1linf:2.5761e-01 L8_l1linf:2.7283e-01 L9_l1linf:2.8432e-01 L10_l1linf:3.0220e-01 L11_l1linf:3.0138e-01 L12_l1linf:2.9569e-01 L1_spectral:5.8497e-03 L2_spectral:6.2622e-03 L3_spectral:6.5849e-03 L4_spectral:6.4016e-03 L5_spectral:6.2709e-03 L6_spectral:5.9859e-03 L7_spectral:5.7732e-03 L8_spectral:6.1187e-03 L9_spectral:6.3809e-03 L10_spectral:6.8105e-03 L11_spectral:6.8273e-03 L12_spectral:6.7220e-03 ip_v_neg_g:6.6098e-03 cos_v_neg_g:1.1599e-03 v_norm:1.3525e+00 g_norm:4.2135e+00 hv_norm:1.4145e+00 cos_v_hv:8.1100e-03 hg_norm:3.8436e+02 cos_g_hg:5.1169e-01 v_par:4.5652e-05 v_perp:1.3525e+00 L1_cos_v_neg_g:1.6029e-02 L1_v_norm:6.1715e-02 L2_cos_v_neg_g:1.4359e-02 L2_v_norm:5.8004e-02 L3_cos_v_neg_g:1.0191e-02 L3_v_norm:5.6810e-02 L4_cos_v_neg_g:5.9446e-03 L4_v_norm:5.9018e-02 L5_cos_v_neg_g:6.3640e-03 L5_v_norm:5.9974e-02 L6_cos_v_neg_g:6.0379e-03 L6_v_norm:6.0898e-02 L7_cos_v_neg_g:6.5983e-03 L7_v_norm:6.0969e-02 L8_cos_v_neg_g:5.6542e-03 L8_v_norm:6.1133e-02 L9_cos_v_neg_g:4.8204e-03 L9_v_norm:6.1068e-02 L10_cos_v_neg_g:3.2338e-03 L10_v_norm:6.1140e-02 L11_cos_v_neg_g:3.3382e-03 L11_v_norm:6.1015e-02 L12_cos_v_neg_g:3.3557e-03 L12_v_norm:6.1010e-02 +step:2500 train loss:3.784597 +step:2501 train loss:3.847186 +step:2502 train loss:3.837108 +step:2503 train loss:3.764381 +step:2504 train loss:3.801979 +step:2505 train loss:3.826139 +step:2506 train loss:3.791029 +step:2507 train loss:3.817445 +step:2508 train loss:3.764010 +step:2509 train loss:3.784896 +step:2510 train loss:3.783155 +step:2511 train loss:3.826785 +step:2512 train loss:3.872575 +step:2513 train loss:3.819580 +step:2514 train loss:3.803598 +step:2515 train loss:3.944122 +step:2516 train loss:3.830778 +step:2517 train loss:3.892848 +step:2518 train loss:3.856359 +step:2519 train loss:3.829184 +step:2520 train loss:3.836466 +step:2521 train loss:3.807579 +step:2522 train loss:3.849943 +step:2523 train loss:3.765395 +step:2524 train loss:3.824220 +step:2525 train loss:3.812200 +step:2526 train loss:3.864186 +step:2527 train loss:3.852797 +step:2528 train loss:3.837480 +step:2529 train loss:3.857572 +step:2530 train loss:3.834414 +step:2531 train loss:3.773036 +step:2532 train loss:3.874057 +step:2533 train loss:3.765571 +step:2534 train loss:3.863430 +step:2535 train loss:3.816617 +step:2536 train loss:3.738883 +step:2537 train loss:3.853026 +step:2538 train loss:3.833015 +step:2539 train loss:3.849465 +step:2540 train loss:3.786430 +step:2541 train loss:3.813451 +step:2542 train loss:3.824389 +step:2543 train loss:3.813891 +step:2544 train loss:3.800817 +step:2545 train loss:3.787452 +step:2546 train loss:3.753855 +step:2547 train loss:3.800214 +step:2548 train loss:3.820734 +step:2549 train loss:3.824697 +step:2550 train loss:3.954855 +step:2551 train loss:4.031906 +step:2552 train loss:3.764506 +step:2553 train loss:3.800025 +step:2554 train loss:3.943357 +step:2555 train loss:3.830873 +step:2556 train loss:3.757658 +step:2557 train loss:3.852152 +step:2558 train loss:3.842715 +step:2559 train loss:3.796458 +step:2560 train loss:3.785527 +step:2561 train loss:3.880603 +step:2562 train loss:3.833339 +step:2563 train loss:3.767133 +step:2564 train loss:3.835203 +step:2565 train loss:3.817446 +step:2566 train loss:3.795790 +step:2567 train loss:3.777136 +step:2568 train loss:3.831847 +step:2569 train loss:3.838911 +step:2570 train loss:3.790272 +step:2571 train loss:3.873453 +step:2572 train loss:3.835763 +step:2573 train loss:3.764538 +step:2574 train loss:3.813825 +step:2575 train loss:3.859069 +step:2576 train loss:3.809546 +step:2577 train loss:3.773809 +step:2578 train loss:3.814623 +step:2579 train loss:3.792389 +step:2580 train loss:3.764287 +step:2581 train loss:3.777079 +step:2582 train loss:3.787134 +step:2583 train loss:3.809310 +step:2584 train loss:3.826527 +step:2585 train loss:3.788479 +step:2586 train loss:3.813422 +step:2587 train loss:3.743290 +step:2588 train loss:3.776664 +step:2589 train loss:3.854915 +step:2590 train loss:3.777446 +step:2591 train loss:3.833917 +step:2592 train loss:3.886161 +step:2593 train loss:3.841881 +step:2594 train loss:3.802482 +step:2595 train loss:3.810917 +step:2596 train loss:3.851433 +step:2597 train loss:3.733848 +step:2598 train loss:3.890769 +step:2599 train loss:3.836136 +step:2600 train loss:3.868360 +step:2601 train loss:3.802459 +step:2602 train loss:3.836337 +step:2603 train loss:3.831232 +step:2604 train loss:3.751863 +step:2605 train loss:3.879553 +step:2606 train loss:3.828581 +step:2607 train loss:3.786834 +step:2608 train loss:3.761742 +step:2609 train loss:3.786700 +step:2610 train loss:3.811745 +step:2611 train loss:3.849048 +step:2612 train loss:3.810338 +step:2613 train loss:3.785370 +step:2614 train loss:3.774500 +step:2615 train loss:3.771250 +step:2616 train loss:3.846521 +step:2617 train loss:3.805844 +step:2618 train loss:3.773034 +step:2619 train loss:3.789934 +step:2620 train loss:3.783014 +step:2621 train loss:3.792466 +step:2622 train loss:3.871292 +step:2623 train loss:3.743958 +step:2624 train loss:3.759785 +step:2625 train loss:3.831105 +step:2626 train loss:3.825676 +step:2627 train loss:3.801941 +step:2628 train loss:3.857019 +step:2629 train loss:3.803339 +step:2630 train loss:3.798455 +step:2631 train loss:3.828821 +step:2632 train loss:3.797266 +step:2633 train loss:3.778975 +step:2634 train loss:3.825899 +step:2635 train loss:3.808322 +step:2636 train loss:3.858479 +step:2637 train loss:3.807101 +step:2638 train loss:3.790328 +step:2639 train loss:3.844827 +step:2640 train loss:3.762035 +step:2641 train loss:3.820521 +step:2642 train loss:3.740607 +step:2643 train loss:3.739645 +step:2644 train loss:3.835225 +step:2645 train loss:3.768354 +step:2646 train loss:3.802494 +step:2647 train loss:3.820898 +step:2648 train loss:3.854556 +step:2649 train loss:3.768853 +step:2650 train loss:3.758469 +step:2651 train loss:3.799593 +step:2652 train loss:3.771393 +step:2653 train loss:3.839990 +step:2654 train loss:3.797549 +step:2655 train loss:3.787085 +step:2656 train loss:3.805185 +step:2657 train loss:3.831386 +step:2658 train loss:3.839911 +step:2659 train loss:3.819187 +step:2660 train loss:3.807318 +step:2661 train loss:3.853005 +step:2662 train loss:3.828606 +step:2663 train loss:3.803504 +step:2664 train loss:3.820990 +step:2665 train loss:3.767226 +step:2666 train loss:3.796905 +step:2667 train loss:3.802520 +step:2668 train loss:3.779960 +step:2669 train loss:3.788381 +step:2670 train loss:3.812238 +step:2671 train loss:3.787107 +step:2672 train loss:3.809942 +step:2673 train loss:3.742064 +step:2674 train loss:3.836287 +step:2675 train loss:3.808479 +step:2676 train loss:3.829125 +step:2677 train loss:3.809308 +step:2678 train loss:3.795150 +step:2679 train loss:3.778222 +step:2680 train loss:3.764199 +step:2681 train loss:3.734319 +step:2682 train loss:3.820889 +step:2683 train loss:3.791985 +step:2684 train loss:3.821589 +step:2685 train loss:3.742358 +step:2686 train loss:3.753100 +step:2687 train loss:3.830491 +step:2688 train loss:3.844211 +step:2689 train loss:3.749557 +step:2690 train loss:3.834943 +step:2691 train loss:3.803934 +step:2692 train loss:3.830446 +step:2693 train loss:3.885163 +step:2694 train loss:3.781468 +step:2695 train loss:3.801427 +step:2696 train loss:3.803353 +step:2697 train loss:3.798589 +step:2698 train loss:3.807164 +step:2699 train loss:3.823345 +step:2700 train loss:3.798151 +step:2701 train loss:3.862753 +step:2702 train loss:3.798740 +step:2703 train loss:3.760725 +step:2704 train loss:3.831508 +step:2705 train loss:3.821397 +step:2706 train loss:3.753622 +step:2707 train loss:3.720481 +step:2708 train loss:3.815413 +step:2709 train loss:3.796290 +step:2710 train loss:3.804302 +step:2711 train loss:3.767829 +step:2712 train loss:3.833123 +step:2713 train loss:3.834738 +step:2714 train loss:3.775093 +step:2715 train loss:3.771532 +step:2716 train loss:3.839595 +step:2717 train loss:3.803812 +step:2718 train loss:3.802032 +step:2719 train loss:3.800400 +step:2720 train loss:3.765994 +step:2721 train loss:3.846728 +step:2722 train loss:3.773137 +step:2723 train loss:3.759385 +step:2724 train loss:3.784037 +step:2725 train loss:3.785681 +step:2726 train loss:3.758239 +step:2727 train loss:3.817084 +step:2728 train loss:3.755148 +step:2729 train loss:3.886530 +step:2730 train loss:3.827702 +step:2731 train loss:3.868166 +step:2732 train loss:3.778873 +step:2733 train loss:3.775395 +step:2734 train loss:3.820326 +step:2735 train loss:3.821887 +step:2736 train loss:3.745280 +step:2737 train loss:3.799817 +step:2738 train loss:3.856631 +step:2739 train loss:3.777906 +step:2740 train loss:3.777603 +step:2741 train loss:3.766880 +step:2742 train loss:3.687529 +step:2743 train loss:3.797158 +step:2744 train loss:3.817003 +step:2745 train loss:3.772295 +step:2746 train loss:3.793004 +step:2747 train loss:3.774114 +step:2748 train loss:3.734922 +step:2749 train loss:3.799361 +step:2750 validation loss:3.727712 +step:2750 train loss:3.809978 +step:2751 train loss:3.834460 +step:2752 train loss:3.816430 +step:2753 train loss:3.809751 +step:2754 train loss:3.746680 +step:2755 train loss:3.815860 +step:2756 train loss:3.788075 +step:2757 train loss:3.776000 +step:2758 train loss:3.802746 +step:2759 train loss:3.812027 +step:2760 train loss:3.724266 +step:2761 train loss:3.740583 +step:2762 train loss:3.757049 +step:2763 train loss:3.775420 +step:2764 train loss:3.719087 +step:2765 train loss:3.767493 +step:2766 train loss:3.858881 +step:2767 train loss:3.731364 +step:2768 train loss:3.793896 +step:2769 train loss:3.767201 +step:2770 train loss:3.786227 +step:2771 train loss:3.812564 +step:2772 train loss:3.776658 +step:2773 train loss:3.775277 +step:2774 train loss:3.772232 +step:2775 train loss:3.785212 +step:2776 train loss:3.739299 +step:2777 train loss:3.770121 +step:2778 train loss:3.781654 +step:2779 train loss:3.806639 +step:2780 train loss:3.777763 +step:2781 train loss:3.763828 +step:2782 train loss:3.752525 +step:2783 train loss:3.783627 +step:2784 train loss:3.792050 +step:2785 train loss:3.862566 +step:2786 train loss:3.827220 +step:2787 train loss:3.785986 +step:2788 train loss:3.783853 +step:2789 train loss:3.778612 +step:2790 train loss:3.716393 +step:2791 train loss:3.817904 +step:2792 train loss:3.804446 +step:2793 train loss:3.770875 +step:2794 train loss:3.782697 +step:2795 train loss:3.795457 +step:2796 train loss:3.789603 +step:2797 train loss:3.835027 +step:2798 train loss:3.821667 +step:2799 train loss:3.730481 +step:2800 train loss:3.775918 +step:2801 train loss:3.809852 +step:2802 train loss:3.837976 +step:2803 train loss:3.810292 +step:2804 train loss:3.743796 +step:2805 train loss:3.785378 +step:2806 train loss:3.779179 +step:2807 train loss:3.810072 +step:2808 train loss:3.748268 +step:2809 train loss:3.815595 +step:2810 train loss:3.806578 +step:2811 train loss:3.796646 +step:2812 train loss:3.843514 +step:2813 train loss:3.814112 +step:2814 train loss:3.801669 +step:2815 train loss:3.813205 +step:2816 train loss:3.818316 +step:2817 train loss:3.750886 +step:2818 train loss:3.856490 +step:2819 train loss:3.783785 +step:2820 train loss:3.778208 +step:2821 train loss:3.758719 +step:2822 train loss:3.801849 +step:2823 train loss:3.751248 +step:2824 train loss:3.643022 +step:2825 train loss:3.795465 +step:2826 train loss:3.790947 +step:2827 train loss:3.816027 +step:2828 train loss:3.803489 +step:2829 train loss:3.795050 +step:2830 train loss:3.824329 +step:2831 train loss:3.766332 +step:2832 train loss:3.736130 +step:2833 train loss:3.794945 +step:2834 train loss:3.745658 +step:2835 train loss:3.780000 +step:2836 train loss:3.785791 +step:2837 train loss:3.784983 +step:2838 train loss:3.725856 +step:2839 train loss:3.822528 +step:2840 train loss:3.784177 +step:2841 train loss:3.865269 +step:2842 train loss:3.809114 +step:2843 train loss:3.800996 +step:2844 train loss:3.827106 +step:2845 train loss:3.785569 +step:2846 train loss:3.735144 +step:2847 train loss:3.825833 +step:2848 train loss:3.778421 +step:2849 train loss:3.770031 +step:2850 train loss:3.829694 +step:2851 train loss:3.782585 +step:2852 train loss:3.864193 +step:2853 train loss:3.779516 +step:2854 train loss:3.721977 +step:2855 train loss:3.799183 +step:2856 train loss:3.721778 +step:2857 train loss:3.827579 +step:2858 train loss:3.782429 +step:2859 train loss:3.772517 +step:2860 train loss:3.762564 +step:2861 train loss:3.742951 +step:2862 train loss:3.773112 +step:2863 train loss:3.755476 +step:2864 train loss:3.760868 +step:2865 train loss:3.839164 +step:2866 train loss:3.850117 +step:2867 train loss:3.788101 +step:2868 train loss:3.789458 +step:2869 train loss:3.749137 +step:2870 train loss:3.834787 +step:2871 train loss:3.831320 +step:2872 train loss:3.795167 +step:2873 train loss:3.802208 +step:2874 train loss:3.780054 +step:2875 train loss:3.732639 +step:2876 train loss:3.778566 +step:2877 train loss:3.760861 +step:2878 train loss:3.775946 +step:2879 train loss:3.742367 +step:2880 train loss:3.760715 +step:2881 train loss:3.753556 +step:2882 train loss:3.686792 +step:2883 train loss:3.774756 +step:2884 train loss:3.844217 +step:2885 train loss:3.738016 +step:2886 train loss:3.786287 +step:2887 train loss:3.811993 +step:2888 train loss:3.783507 +step:2889 train loss:3.768347 +step:2890 train loss:3.742055 +step:2891 train loss:3.781709 +step:2892 train loss:3.789784 +step:2893 train loss:3.770932 +step:2894 train loss:3.742690 +step:2895 train loss:3.792880 +step:2896 train loss:3.838457 +step:2897 train loss:3.815197 +step:2898 train loss:3.950474 +step:2899 train loss:3.706359 +step:2900 train loss:3.781563 +step:2901 train loss:3.733227 +step:2902 train loss:3.732568 +step:2903 train loss:3.747804 +step:2904 train loss:3.774898 +step:2905 train loss:3.835120 +step:2906 train loss:3.806177 +step:2907 train loss:3.979105 +step:2908 train loss:3.725822 +step:2909 train loss:3.804657 +step:2910 train loss:3.776510 +step:2911 train loss:3.802715 +step:2912 train loss:3.761661 +step:2913 train loss:3.795807 +step:2914 train loss:3.823535 +step:2915 train loss:3.819084 +step:2916 train loss:3.778516 +step:2917 train loss:3.810809 +step:2918 train loss:3.802134 +step:2919 train loss:3.748439 +step:2920 train loss:3.800559 +step:2921 train loss:3.754650 +step:2922 train loss:3.779121 +step:2923 train loss:3.844793 +step:2924 train loss:3.781653 +step:2925 train loss:3.733866 +step:2926 train loss:3.824783 +step:2927 train loss:3.734761 +step:2928 train loss:3.701223 +step:2929 train loss:3.719333 +step:2930 train loss:3.737952 +step:2931 train loss:3.895312 +step:2932 train loss:3.809617 +step:2933 train loss:3.776813 +step:2934 train loss:3.769290 +step:2935 train loss:3.790852 +step:2936 train loss:3.741660 +step:2937 train loss:3.757340 +step:2938 train loss:3.780218 +step:2939 train loss:3.851378 +step:2940 train loss:3.751140 +step:2941 train loss:3.787686 +step:2942 train loss:3.746660 +step:2943 train loss:4.022379 +step:2944 train loss:3.852824 +step:2945 train loss:3.808385 +step:2946 train loss:3.818449 +step:2947 train loss:3.783725 +step:2948 train loss:3.737780 +step:2949 train loss:3.827777 +step:2950 train loss:3.783438 +step:2951 train loss:3.679211 +step:2952 train loss:3.751847 +step:2953 train loss:3.664436 +step:2954 train loss:3.755840 +step:2955 train loss:3.821901 +step:2956 train loss:3.771359 +step:2957 train loss:3.775437 +step:2958 train loss:3.727162 +step:2959 train loss:3.751461 +step:2960 train loss:3.843207 +step:2961 train loss:3.706194 +step:2962 train loss:3.783164 +step:2963 train loss:3.777004 +step:2964 train loss:3.756988 +step:2965 train loss:3.784431 +step:2966 train loss:3.758499 +step:2967 train loss:3.757582 +step:2968 train loss:3.730059 +step:2969 train loss:3.741902 +step:2970 train loss:3.808045 +step:2971 train loss:3.739562 +step:2972 train loss:3.720215 +step:2973 train loss:3.718504 +step:2974 train loss:3.758168 +step:2975 train loss:3.721112 +step:2976 train loss:3.763215 +step:2977 train loss:3.753980 +step:2978 train loss:3.837285 +step:2979 train loss:3.819155 +step:2980 train loss:3.824476 +step:2981 train loss:3.780822 +step:2982 train loss:3.768233 +step:2983 train loss:3.721804 +step:2984 train loss:3.695597 +step:2985 train loss:3.808320 +step:2986 train loss:3.705068 +step:2987 train loss:3.832169 +step:2988 train loss:3.758849 +step:2989 train loss:3.789020 +step:2990 train loss:3.741540 +step:2991 train loss:3.811415 +step:2992 train loss:3.804482 +step:2993 train loss:3.770783 +step:2994 train loss:3.756907 +step:2995 train loss:3.827120 +step:2996 train loss:3.753369 +step:2997 train loss:3.660871 +step:2998 train loss:3.776199 +step:2999 train loss:3.816605 +step:3000 validation loss:3.699826 total_sharp:7.3140e-03 L1_sharp:1.5602e-01 L2_sharp:5.0057e-02 L3_sharp:5.9309e-02 L4_sharp:2.9937e-02 L5_sharp:2.7323e-02 L6_sharp:3.5269e-02 L7_sharp:4.0943e-02 L8_sharp:4.0812e-02 L9_sharp:2.9843e-02 L10_sharp:2.2214e-02 L11_sharp:2.1422e-02 L12_sharp:3.1711e-02 total_fnorm:1.3358e+00 total_l1_linf:8.0272e+03 total_spectral:1.3358e+00 L1_fnorm:6.1433e-02 L2_fnorm:5.8433e-02 L3_fnorm:5.6869e-02 L4_fnorm:5.9253e-02 L5_fnorm:6.0370e-02 L6_fnorm:6.1033e-02 L7_fnorm:6.1060e-02 L8_fnorm:6.1199e-02 L9_fnorm:6.1099e-02 L10_fnorm:6.1110e-02 L11_fnorm:6.1083e-02 L12_fnorm:6.1159e-02 L1_l1linf:2.8159e-01 L2_l1linf:3.2010e-01 L3_l1linf:3.1844e-01 L4_l1linf:3.1643e-01 L5_l1linf:2.9064e-01 L6_l1linf:2.8772e-01 L7_l1linf:2.7143e-01 L8_l1linf:2.8125e-01 L9_l1linf:3.0731e-01 L10_l1linf:3.0687e-01 L11_l1linf:3.2761e-01 L12_l1linf:3.1602e-01 L1_spectral:6.3802e-03 L2_spectral:7.2620e-03 L3_spectral:7.1797e-03 L4_spectral:7.1618e-03 L5_spectral:6.5236e-03 L6_spectral:6.4889e-03 L7_spectral:6.1367e-03 L8_spectral:6.3375e-03 L9_spectral:6.9119e-03 L10_spectral:6.9367e-03 L11_spectral:7.3607e-03 L12_spectral:7.1612e-03 ip_v_neg_g:8.6007e-03 cos_v_neg_g:1.6633e-03 v_norm:1.3358e+00 g_norm:3.8708e+00 hv_norm:1.0992e+00 cos_v_hv:8.8890e-03 hg_norm:2.5440e+02 cos_g_hg:4.8508e-01 v_par:5.6179e-05 v_perp:1.3358e+00 L1_cos_v_neg_g:1.4498e-02 L1_v_norm:6.1433e-02 L2_cos_v_neg_g:1.4245e-02 L2_v_norm:5.8433e-02 L3_cos_v_neg_g:1.1539e-02 L3_v_norm:5.6869e-02 L4_cos_v_neg_g:1.0680e-02 L4_v_norm:5.9253e-02 L5_cos_v_neg_g:9.8341e-03 L5_v_norm:6.0370e-02 L6_cos_v_neg_g:9.9110e-03 L6_v_norm:6.1033e-02 L7_cos_v_neg_g:1.0070e-02 L7_v_norm:6.1060e-02 L8_cos_v_neg_g:1.1347e-02 L8_v_norm:6.1199e-02 L9_cos_v_neg_g:9.9257e-03 L9_v_norm:6.1099e-02 L10_cos_v_neg_g:8.4498e-03 L10_v_norm:6.1110e-02 L11_cos_v_neg_g:7.1279e-03 L11_v_norm:6.1083e-02 L12_cos_v_neg_g:6.5772e-03 L12_v_norm:6.1159e-02 +step:3000 train loss:3.708381 +step:3001 train loss:3.761295 +step:3002 train loss:3.760914 +step:3003 train loss:3.757173 +step:3004 train loss:3.783826 +step:3005 train loss:3.681590 +step:3006 train loss:3.731692 +step:3007 train loss:3.764688 +step:3008 train loss:3.809335 +step:3009 train loss:3.765525 +step:3010 train loss:3.785392 +step:3011 train loss:3.767977 +step:3012 train loss:3.746258 +step:3013 train loss:3.789344 +step:3014 train loss:3.748708 +step:3015 train loss:3.746940 +step:3016 train loss:3.768694 +step:3017 train loss:3.786662 +step:3018 train loss:3.719412 +step:3019 train loss:3.757216 +step:3020 train loss:3.775802 +step:3021 train loss:3.740951 +step:3022 train loss:3.831445 +step:3023 train loss:3.780003 +step:3024 train loss:3.768831 +step:3025 train loss:3.779142 +step:3026 train loss:3.749932 +step:3027 train loss:3.727725 +step:3028 train loss:3.779174 +step:3029 train loss:3.764726 +step:3030 train loss:3.741076 +step:3031 train loss:3.723388 +step:3032 train loss:3.711071 +step:3033 train loss:3.738926 +step:3034 train loss:3.784617 +step:3035 train loss:3.762994 +step:3036 train loss:3.723567 +step:3037 train loss:3.688807 +step:3038 train loss:3.804604 +step:3039 train loss:3.681267 +step:3040 train loss:3.670942 +step:3041 train loss:3.799441 +step:3042 train loss:3.733254 +step:3043 train loss:3.793070 +step:3044 train loss:3.688292 +step:3045 train loss:3.735453 +step:3046 train loss:3.708166 +step:3047 train loss:3.740386 +step:3048 train loss:3.703578 +step:3049 train loss:3.783455 +step:3050 train loss:3.671354 +step:3051 train loss:3.691488 +step:3052 train loss:3.707214 +step:3053 train loss:3.777783 +step:3054 train loss:3.850171 +step:3055 train loss:3.688277 +step:3056 train loss:3.719678 +step:3057 train loss:3.753662 +step:3058 train loss:3.702485 +step:3059 train loss:3.731019 +step:3060 train loss:3.728146 +step:3061 train loss:3.712914 +step:3062 train loss:3.766291 +step:3063 train loss:3.749556 +step:3064 train loss:3.773706 +step:3065 train loss:3.788491 +step:3066 train loss:3.688544 +step:3067 train loss:3.735635 +step:3068 train loss:3.789654 +step:3069 train loss:3.803280 +step:3070 train loss:3.732313 +step:3071 train loss:3.749866 +step:3072 train loss:3.750539 +step:3073 train loss:3.786358 +step:3074 train loss:3.723972 +step:3075 train loss:3.760458 +step:3076 train loss:3.696615 +step:3077 train loss:3.694647 +step:3078 train loss:3.722346 +step:3079 train loss:3.770133 +step:3080 train loss:3.762622 +step:3081 train loss:3.807467 +step:3082 train loss:3.782180 +step:3083 train loss:3.711398 +step:3084 train loss:3.793956 +step:3085 train loss:3.719950 +step:3086 train loss:3.781273 +step:3087 train loss:3.751341 +step:3088 train loss:3.832197 +step:3089 train loss:3.706362 +step:3090 train loss:3.779386 +step:3091 train loss:3.702432 +step:3092 train loss:3.728021 +step:3093 train loss:3.749402 +step:3094 train loss:3.734503 +step:3095 train loss:3.818202 +step:3096 train loss:3.747185 +step:3097 train loss:3.758114 +step:3098 train loss:3.735986 +step:3099 train loss:3.743777 +step:3100 train loss:3.769865 +step:3101 train loss:3.853798 +step:3102 train loss:3.776489 +step:3103 train loss:3.700973 +step:3104 train loss:3.784509 +step:3105 train loss:3.755610 +step:3106 train loss:3.751265 +step:3107 train loss:3.733286 +step:3108 train loss:3.709300 +step:3109 train loss:3.762338 +step:3110 train loss:3.692234 +step:3111 train loss:3.728032 +step:3112 train loss:3.663175 +step:3113 train loss:3.784743 +step:3114 train loss:3.696684 +step:3115 train loss:3.739308 +step:3116 train loss:3.618671 +step:3117 train loss:3.639493 +step:3118 train loss:3.739911 +step:3119 train loss:3.746337 +step:3120 train loss:3.749112 +step:3121 train loss:3.692390 +step:3122 train loss:3.776499 +step:3123 train loss:3.693331 +step:3124 train loss:3.755534 +step:3125 train loss:3.770630 +step:3126 train loss:3.873387 +step:3127 train loss:3.721768 +step:3128 train loss:3.748662 +step:3129 train loss:3.731042 +step:3130 train loss:3.704170 +step:3131 train loss:3.787432 +step:3132 train loss:3.773599 +step:3133 train loss:3.742792 +step:3134 train loss:3.639366 +step:3135 train loss:3.731821 +step:3136 train loss:3.707114 +step:3137 train loss:3.839876 +step:3138 train loss:3.740909 +step:3139 train loss:3.722087 +step:3140 train loss:3.743081 +step:3141 train loss:3.746635 +step:3142 train loss:3.683327 +step:3143 train loss:3.767982 +step:3144 train loss:3.715385 +step:3145 train loss:3.699398 +step:3146 train loss:3.713205 +step:3147 train loss:3.824739 +step:3148 train loss:3.729589 +step:3149 train loss:3.783421 +step:3150 train loss:3.766795 +step:3151 train loss:3.736644 +step:3152 train loss:3.732691 +step:3153 train loss:3.694682 +step:3154 train loss:3.775661 +step:3155 train loss:3.718630 +step:3156 train loss:3.769349 +step:3157 train loss:3.773420 +step:3158 train loss:3.743647 +step:3159 train loss:3.681834 +step:3160 train loss:3.732252 +step:3161 train loss:3.702549 +step:3162 train loss:3.760890 +step:3163 train loss:3.741201 +step:3164 train loss:3.720265 +step:3165 train loss:3.737781 +step:3166 train loss:3.776353 +step:3167 train loss:3.738339 +step:3168 train loss:3.813766 +step:3169 train loss:3.728556 +step:3170 train loss:3.711141 +step:3171 train loss:3.698979 +step:3172 train loss:3.704841 +step:3173 train loss:3.650505 +step:3174 train loss:3.765717 +step:3175 train loss:3.730896 +step:3176 train loss:3.742464 +step:3177 train loss:3.707790 +step:3178 train loss:3.686099 +step:3179 train loss:3.763859 +step:3180 train loss:3.693204 +step:3181 train loss:3.774526 +step:3182 train loss:3.783361 +step:3183 train loss:3.724870 +step:3184 train loss:3.722835 +step:3185 train loss:3.782239 +step:3186 train loss:3.739275 +step:3187 train loss:3.757587 +step:3188 train loss:3.799149 +step:3189 train loss:3.746533 +step:3190 train loss:3.700534 +step:3191 train loss:3.707026 +step:3192 train loss:3.668788 +step:3193 train loss:3.748458 +step:3194 train loss:3.713960 +step:3195 train loss:3.699338 +step:3196 train loss:3.748208 +step:3197 train loss:3.712923 +step:3198 train loss:3.743587 +step:3199 train loss:3.729166 +step:3200 train loss:3.731712 +step:3201 train loss:3.698917 +step:3202 train loss:3.756649 +step:3203 train loss:3.820297 +step:3204 train loss:3.784136 +step:3205 train loss:3.630826 +step:3206 train loss:3.911096 +step:3207 train loss:3.672012 +step:3208 train loss:3.736892 +step:3209 train loss:3.729711 +step:3210 train loss:3.710588 +step:3211 train loss:3.737296 +step:3212 train loss:3.748624 +step:3213 train loss:3.688366 +step:3214 train loss:3.794676 +step:3215 train loss:3.796610 +step:3216 train loss:3.667612 +step:3217 train loss:3.748432 +step:3218 train loss:3.792327 +step:3219 train loss:3.704256 +step:3220 train loss:3.777607 +step:3221 train loss:3.688972 +step:3222 train loss:3.733375 +step:3223 train loss:3.749073 +step:3224 train loss:3.761990 +step:3225 train loss:3.684721 +step:3226 train loss:3.717465 +step:3227 train loss:3.744812 +step:3228 train loss:3.741883 +step:3229 train loss:3.775139 +step:3230 train loss:3.787290 +step:3231 train loss:3.725960 +step:3232 train loss:3.735644 +step:3233 train loss:3.709157 +step:3234 train loss:3.697369 +step:3235 train loss:3.698780 +step:3236 train loss:3.719844 +step:3237 train loss:3.718839 +step:3238 train loss:3.735643 +step:3239 train loss:3.637399 +step:3240 train loss:3.752757 +step:3241 train loss:3.747741 +step:3242 train loss:3.802175 +step:3243 train loss:3.744831 +step:3244 train loss:3.759103 +step:3245 train loss:3.663226 +step:3246 train loss:3.791520 +step:3247 train loss:3.733268 +step:3248 train loss:3.754037 +step:3249 train loss:3.698324 +step:3250 validation loss:3.667410 +step:3250 train loss:3.699516 +step:3251 train loss:3.808855 +step:3252 train loss:3.738322 +step:3253 train loss:3.740037 +step:3254 train loss:3.807679 +step:3255 train loss:3.748298 +step:3256 train loss:3.745098 +step:3257 train loss:3.723957 +step:3258 train loss:3.657185 +step:3259 train loss:3.636034 +step:3260 train loss:3.750361 +step:3261 train loss:3.731968 +step:3262 train loss:3.720432 +step:3263 train loss:3.705692 +step:3264 train loss:3.816363 +step:3265 train loss:3.725064 +step:3266 train loss:3.751417 +step:3267 train loss:3.715793 +step:3268 train loss:3.720617 +step:3269 train loss:3.732527 +step:3270 train loss:3.761984 +step:3271 train loss:3.726570 +step:3272 train loss:3.701284 +step:3273 train loss:3.713017 +step:3274 train loss:3.846716 +step:3275 train loss:3.717897 +step:3276 train loss:3.786241 +step:3277 train loss:3.724214 +step:3278 train loss:3.700354 +step:3279 train loss:3.726143 +step:3280 train loss:3.754156 +step:3281 train loss:3.679616 +step:3282 train loss:3.750440 +step:3283 train loss:3.722836 +step:3284 train loss:3.684056 +step:3285 train loss:3.700806 +step:3286 train loss:3.734338 +step:3287 train loss:3.670673 +step:3288 train loss:3.752722 +step:3289 train loss:3.695305 +step:3290 train loss:3.728616 +step:3291 train loss:3.686755 +step:3292 train loss:3.710282 +step:3293 train loss:3.754468 +step:3294 train loss:3.766339 +step:3295 train loss:3.678117 +step:3296 train loss:3.732876 +step:3297 train loss:3.694279 +step:3298 train loss:3.695052 +step:3299 train loss:3.821085 +step:3300 train loss:3.660373 +step:3301 train loss:3.742356 +step:3302 train loss:3.711921 +step:3303 train loss:3.727625 +step:3304 train loss:3.694525 +step:3305 train loss:3.784692 +step:3306 train loss:3.719658 +step:3307 train loss:3.740332 +step:3308 train loss:3.696014 +step:3309 train loss:3.753925 +step:3310 train loss:3.669877 +step:3311 train loss:3.723010 +step:3312 train loss:3.693041 +step:3313 train loss:3.728295 +step:3314 train loss:3.723101 +step:3315 train loss:3.802253 +step:3316 train loss:3.656717 +step:3317 train loss:3.747514 +step:3318 train loss:3.756424 +step:3319 train loss:3.686159 +step:3320 train loss:3.843215 +step:3321 train loss:3.747228 +step:3322 train loss:3.745305 +step:3323 train loss:3.851708 +step:3324 train loss:3.770036 +step:3325 train loss:3.742060 +step:3326 train loss:3.734655 +step:3327 train loss:3.746543 +step:3328 train loss:3.725578 +step:3329 train loss:3.725199 +step:3330 train loss:3.718111 +step:3331 train loss:3.764663 +step:3332 train loss:3.786344 +step:3333 train loss:3.751327 +step:3334 train loss:3.684555 +step:3335 train loss:3.696265 +step:3336 train loss:3.732689 +step:3337 train loss:3.731319 +step:3338 train loss:3.719453 +step:3339 train loss:3.711919 +step:3340 train loss:3.748200 +step:3341 train loss:3.697183 +step:3342 train loss:3.746592 +step:3343 train loss:3.681599 +step:3344 train loss:3.742691 +step:3345 train loss:3.693112 +step:3346 train loss:3.703306 +step:3347 train loss:3.712612 +step:3348 train loss:3.722749 +step:3349 train loss:3.717775 +step:3350 train loss:3.741216 +step:3351 train loss:3.796275 +step:3352 train loss:3.736377 +step:3353 train loss:3.834618 +step:3354 train loss:3.680779 +step:3355 train loss:3.785567 +step:3356 train loss:3.737441 +step:3357 train loss:3.749529 +step:3358 train loss:3.687877 +step:3359 train loss:3.722570 +step:3360 train loss:3.713268 +step:3361 train loss:3.713518 +step:3362 train loss:3.704872 +step:3363 train loss:3.705213 +step:3364 train loss:3.687310 +step:3365 train loss:3.724888 +step:3366 train loss:3.754981 +step:3367 train loss:3.708220 +step:3368 train loss:3.803203 +step:3369 train loss:3.714711 +step:3370 train loss:3.781135 +step:3371 train loss:3.764823 +step:3372 train loss:3.731259 +step:3373 train loss:3.740659 +step:3374 train loss:3.786977 +step:3375 train loss:3.720013 +step:3376 train loss:3.724905 +step:3377 train loss:3.713213 +step:3378 train loss:3.691482 +step:3379 train loss:3.770248 +step:3380 train loss:3.749938 +step:3381 train loss:3.733378 +step:3382 train loss:3.751842 +step:3383 train loss:3.758270 +step:3384 train loss:3.689025 +step:3385 train loss:3.739465 +step:3386 train loss:3.719763 +step:3387 train loss:3.793071 +step:3388 train loss:3.696297 +step:3389 train loss:3.891857 +step:3390 train loss:3.630602 +step:3391 train loss:3.716737 +step:3392 train loss:3.700613 +step:3393 train loss:3.732936 +step:3394 train loss:3.688377 +step:3395 train loss:3.760562 +step:3396 train loss:3.674210 +step:3397 train loss:3.750783 +step:3398 train loss:3.716587 +step:3399 train loss:3.736238 +step:3400 train loss:3.683858 +step:3401 train loss:3.719263 +step:3402 train loss:3.878612 +step:3403 train loss:3.764251 +step:3404 train loss:3.880064 +step:3405 train loss:3.734859 +step:3406 train loss:3.713898 +step:3407 train loss:3.711271 +step:3408 train loss:3.693457 +step:3409 train loss:3.658425 +step:3410 train loss:3.693937 +step:3411 train loss:3.760534 +step:3412 train loss:3.684990 +step:3413 train loss:3.676579 +step:3414 train loss:3.715504 +step:3415 train loss:3.689676 +step:3416 train loss:3.691628 +step:3417 train loss:3.774597 +step:3418 train loss:3.772093 +step:3419 train loss:3.733450 +step:3420 train loss:3.705485 +step:3421 train loss:3.738062 +step:3422 train loss:3.754142 +step:3423 train loss:3.774264 +step:3424 train loss:3.655186 +step:3425 train loss:3.678889 +step:3426 train loss:3.674587 +step:3427 train loss:3.736222 +step:3428 train loss:3.662538 +step:3429 train loss:3.725070 +step:3430 train loss:3.689038 +step:3431 train loss:3.744982 +step:3432 train loss:3.729786 +step:3433 train loss:3.688885 +step:3434 train loss:3.775288 +step:3435 train loss:3.712122 +step:3436 train loss:3.804775 +step:3437 train loss:3.632934 +step:3438 train loss:3.739633 +step:3439 train loss:3.713620 +step:3440 train loss:3.808254 +step:3441 train loss:3.700925 +step:3442 train loss:3.770181 +step:3443 train loss:3.704036 +step:3444 train loss:3.723025 +step:3445 train loss:3.769535 +step:3446 train loss:3.675105 +step:3447 train loss:3.747211 +step:3448 train loss:3.702372 +step:3449 train loss:3.735142 +step:3450 train loss:3.641492 +step:3451 train loss:3.760198 +step:3452 train loss:3.712646 +step:3453 train loss:3.762300 +step:3454 train loss:3.789669 +step:3455 train loss:3.849668 +step:3456 train loss:3.786092 +step:3457 train loss:3.779606 +step:3458 train loss:3.708051 +step:3459 train loss:3.717648 +step:3460 train loss:3.659808 +step:3461 train loss:3.726332 +step:3462 train loss:3.725655 +step:3463 train loss:3.697178 +step:3464 train loss:3.749238 +step:3465 train loss:3.682412 +step:3466 train loss:3.748228 +step:3467 train loss:3.704936 +step:3468 train loss:3.719685 +step:3469 train loss:3.731497 +step:3470 train loss:3.711758 +step:3471 train loss:3.752755 +step:3472 train loss:3.638369 +step:3473 train loss:3.758816 +step:3474 train loss:3.658061 +step:3475 train loss:3.737745 +step:3476 train loss:3.708164 +step:3477 train loss:3.728246 +step:3478 train loss:3.705268 +step:3479 train loss:3.733569 +step:3480 train loss:3.753288 +step:3481 train loss:3.734667 +step:3482 train loss:3.714781 +step:3483 train loss:3.856471 +step:3484 train loss:3.700042 +step:3485 train loss:3.686433 +step:3486 train loss:3.738374 +step:3487 train loss:3.779838 +step:3488 train loss:3.682961 +step:3489 train loss:3.736335 +step:3490 train loss:3.704699 +step:3491 train loss:3.744007 +step:3492 train loss:3.777576 +step:3493 train loss:3.749039 +step:3494 train loss:3.741738 +step:3495 train loss:3.719620 +step:3496 train loss:3.686857 +step:3497 train loss:3.795933 +step:3498 train loss:3.743831 +step:3499 train loss:3.676783 +step:3500 validation loss:3.646385 total_sharp:6.5718e-03 L1_sharp:6.0603e-02 L2_sharp:3.1721e-02 L3_sharp:4.7642e-02 L4_sharp:3.3502e-02 L5_sharp:3.7156e-02 L6_sharp:4.0183e-02 L7_sharp:3.9365e-02 L8_sharp:3.7729e-02 L9_sharp:2.7661e-02 L10_sharp:2.0904e-02 L11_sharp:1.9122e-02 L12_sharp:2.1561e-02 total_fnorm:1.3467e+00 total_l1_linf:8.0793e+03 total_spectral:1.3467e+00 L1_fnorm:6.1576e-02 L2_fnorm:5.8978e-02 L3_fnorm:5.7900e-02 L4_fnorm:5.9432e-02 L5_fnorm:6.0474e-02 L6_fnorm:6.1222e-02 L7_fnorm:6.1140e-02 L8_fnorm:6.1178e-02 L9_fnorm:6.1085e-02 L10_fnorm:6.1155e-02 L11_fnorm:6.1072e-02 L12_fnorm:6.1066e-02 L1_l1linf:2.8524e-01 L2_l1linf:3.3225e-01 L3_l1linf:3.2420e-01 L4_l1linf:3.3954e-01 L5_l1linf:3.3454e-01 L6_l1linf:3.2523e-01 L7_l1linf:2.9023e-01 L8_l1linf:2.9643e-01 L9_l1linf:3.1093e-01 L10_l1linf:3.3244e-01 L11_l1linf:3.2696e-01 L12_l1linf:2.9850e-01 L1_spectral:6.4888e-03 L2_spectral:7.4859e-03 L3_spectral:7.2875e-03 L4_spectral:7.5913e-03 L5_spectral:7.4797e-03 L6_spectral:7.3219e-03 L7_spectral:6.4647e-03 L8_spectral:6.6038e-03 L9_spectral:6.9196e-03 L10_spectral:7.4013e-03 L11_spectral:7.3732e-03 L12_spectral:6.8665e-03 ip_v_neg_g:7.0134e-03 cos_v_neg_g:1.2783e-03 v_norm:1.3467e+00 g_norm:4.0739e+00 hv_norm:1.0415e+00 cos_v_hv:8.4977e-03 hg_norm:3.4599e+02 cos_g_hg:5.5814e-01 v_par:4.2795e-05 v_perp:1.3467e+00 L1_cos_v_neg_g:9.2940e-03 L1_v_norm:6.1576e-02 L2_cos_v_neg_g:1.1279e-02 L2_v_norm:5.8978e-02 L3_cos_v_neg_g:1.1880e-02 L3_v_norm:5.7900e-02 L4_cos_v_neg_g:8.7424e-03 L4_v_norm:5.9432e-02 L5_cos_v_neg_g:9.7304e-03 L5_v_norm:6.0474e-02 L6_cos_v_neg_g:1.0307e-02 L6_v_norm:6.1222e-02 L7_cos_v_neg_g:8.6786e-03 L7_v_norm:6.1140e-02 L8_cos_v_neg_g:8.3477e-03 L8_v_norm:6.1178e-02 L9_cos_v_neg_g:7.1170e-03 L9_v_norm:6.1085e-02 L10_cos_v_neg_g:4.7978e-03 L10_v_norm:6.1155e-02 L11_cos_v_neg_g:4.5815e-03 L11_v_norm:6.1072e-02 L12_cos_v_neg_g:3.9888e-03 L12_v_norm:6.1066e-02 +step:3500 train loss:3.694936 +step:3501 train loss:3.822232 +step:3502 train loss:3.802927 +step:3503 train loss:3.752956 +step:3504 train loss:3.704880 +step:3505 train loss:3.721686 +step:3506 train loss:3.618323 +step:3507 train loss:3.737777 +step:3508 train loss:3.682045 +step:3509 train loss:3.748445 +step:3510 train loss:3.682725 +step:3511 train loss:3.718738 +step:3512 train loss:3.858595 +step:3513 train loss:3.676059 +step:3514 train loss:3.693172 +step:3515 train loss:3.941715 +step:3516 train loss:3.736287 +step:3517 train loss:3.697184 +step:3518 train loss:3.698896 +step:3519 train loss:3.693532 +step:3520 train loss:3.724640 +step:3521 train loss:3.717112 +step:3522 train loss:3.625432 +step:3523 train loss:3.731575 +step:3524 train loss:3.713756 +step:3525 train loss:3.701007 +step:3526 train loss:3.724494 +step:3527 train loss:3.676998 +step:3528 train loss:3.727849 +step:3529 train loss:3.706841 +step:3530 train loss:3.699817 +step:3531 train loss:3.692151 +step:3532 train loss:3.875118 +step:3533 train loss:3.696905 +step:3534 train loss:3.713983 +step:3535 train loss:3.691781 +step:3536 train loss:3.689005 +step:3537 train loss:3.697572 +step:3538 train loss:3.731032 +step:3539 train loss:3.680476 +step:3540 train loss:3.744647 +step:3541 train loss:3.711021 +step:3542 train loss:3.723388 +step:3543 train loss:3.642965 +step:3544 train loss:3.658766 +step:3545 train loss:3.665313 +step:3546 train loss:3.731029 +step:3547 train loss:3.738284 +step:3548 train loss:3.712741 +step:3549 train loss:3.709986 +step:3550 train loss:3.697147 +step:3551 train loss:3.726536 +step:3552 train loss:3.620010 +step:3553 train loss:3.743375 +step:3554 train loss:3.734061 +step:3555 train loss:3.721301 +step:3556 train loss:3.746295 +step:3557 train loss:3.732251 +step:3558 train loss:3.707788 +step:3559 train loss:3.651774 +step:3560 train loss:3.745136 +step:3561 train loss:3.736577 +step:3562 train loss:3.909925 +step:3563 train loss:3.769240 +step:3564 train loss:3.727804 +step:3565 train loss:3.729262 +step:3566 train loss:3.704614 +step:3567 train loss:3.644186 +step:3568 train loss:3.670242 +step:3569 train loss:3.756530 +step:3570 train loss:3.778135 +step:3571 train loss:3.758092 +step:3572 train loss:3.747341 +step:3573 train loss:3.706786 +step:3574 train loss:3.704868 +step:3575 train loss:3.695666 +step:3576 train loss:3.679696 +step:3577 train loss:3.689535 +step:3578 train loss:3.773200 +step:3579 train loss:3.680449 +step:3580 train loss:3.762492 +step:3581 train loss:3.703565 +step:3582 train loss:3.758528 +step:3583 train loss:3.699029 +step:3584 train loss:3.671370 +step:3585 train loss:3.720903 +step:3586 train loss:3.668933 +step:3587 train loss:3.766396 +step:3588 train loss:3.895913 +step:3589 train loss:3.726639 +step:3590 train loss:3.712010 +step:3591 train loss:3.720958 +step:3592 train loss:3.681359 +step:3593 train loss:3.653406 +step:3594 train loss:3.706648 +step:3595 train loss:3.679562 +step:3596 train loss:3.757689 +step:3597 train loss:3.734718 +step:3598 train loss:3.685269 +step:3599 train loss:3.738969 +step:3600 train loss:3.676039 +step:3601 train loss:3.693741 +step:3602 train loss:3.682522 +step:3603 train loss:3.697688 +step:3604 train loss:3.723436 +step:3605 train loss:3.827862 +step:3606 train loss:3.727938 +step:3607 train loss:3.709791 +step:3608 train loss:3.729669 +step:3609 train loss:3.711394 +step:3610 train loss:3.681690 +step:3611 train loss:3.684400 +step:3612 train loss:3.750854 +step:3613 train loss:3.722674 +step:3614 train loss:3.666337 +step:3615 train loss:3.705361 +step:3616 train loss:3.660357 +step:3617 train loss:3.735603 +step:3618 train loss:3.691435 +step:3619 train loss:3.677882 +step:3620 train loss:3.694290 +step:3621 train loss:3.655979 +step:3622 train loss:3.763519 +step:3623 train loss:3.750850 +step:3624 train loss:3.724342 +step:3625 train loss:3.702138 +step:3626 train loss:3.711213 +step:3627 train loss:3.707289 +step:3628 train loss:3.693008 +step:3629 train loss:3.698835 +step:3630 train loss:3.776291 +step:3631 train loss:3.705486 +step:3632 train loss:3.737231 +step:3633 train loss:3.696007 +step:3634 train loss:3.697672 +step:3635 train loss:3.687189 +step:3636 train loss:3.755933 +step:3637 train loss:3.836669 +step:3638 train loss:3.748984 +step:3639 train loss:3.737774 +step:3640 train loss:3.743556 +step:3641 train loss:3.784280 +step:3642 train loss:3.677737 +step:3643 train loss:3.848764 +step:3644 train loss:3.741284 +step:3645 train loss:3.708432 +step:3646 train loss:3.830836 +step:3647 train loss:3.719307 +step:3648 train loss:3.713274 +step:3649 train loss:3.662188 +step:3650 train loss:3.704129 +step:3651 train loss:3.699339 +step:3652 train loss:3.686446 +step:3653 train loss:3.622582 +step:3654 train loss:3.683525 +step:3655 train loss:3.674348 +step:3656 train loss:3.706835 +step:3657 train loss:3.725424 +step:3658 train loss:3.719919 +step:3659 train loss:3.705565 +step:3660 train loss:3.674895 +step:3661 train loss:3.705954 +step:3662 train loss:3.677022 +step:3663 train loss:3.718409 +step:3664 train loss:3.671434 +step:3665 train loss:3.713970 +step:3666 train loss:3.753819 +step:3667 train loss:3.842832 +step:3668 train loss:3.722590 +step:3669 train loss:3.680829 +step:3670 train loss:3.727577 +step:3671 train loss:3.688841 +step:3672 train loss:3.725835 +step:3673 train loss:3.705954 +step:3674 train loss:3.723754 +step:3675 train loss:3.736638 +step:3676 train loss:3.698710 +step:3677 train loss:3.659857 +step:3678 train loss:3.722838 +step:3679 train loss:3.622987 +step:3680 train loss:3.724004 +step:3681 train loss:3.757127 +step:3682 train loss:3.737015 +step:3683 train loss:3.681922 +step:3684 train loss:3.680218 +step:3685 train loss:3.707691 +step:3686 train loss:3.736465 +step:3687 train loss:3.690696 +step:3688 train loss:3.667086 +step:3689 train loss:3.701042 +step:3690 train loss:3.691306 +step:3691 train loss:3.674249 +step:3692 train loss:3.731908 +step:3693 train loss:3.863509 +step:3694 train loss:3.679386 +step:3695 train loss:3.737338 +step:3696 train loss:3.700205 +step:3697 train loss:3.691738 +step:3698 train loss:3.633090 +step:3699 train loss:3.657440 +step:3700 train loss:3.689405 +step:3701 train loss:3.707816 +step:3702 train loss:3.728218 +step:3703 train loss:3.686294 +step:3704 train loss:3.729462 +step:3705 train loss:3.710640 +step:3706 train loss:3.662863 +step:3707 train loss:3.718515 +step:3708 train loss:3.693834 +step:3709 train loss:3.611626 +step:3710 train loss:3.739466 +step:3711 train loss:3.686919 +step:3712 train loss:3.727873 +step:3713 train loss:3.677760 +step:3714 train loss:3.695955 +step:3715 train loss:3.810535 +step:3716 train loss:3.716952 +step:3717 train loss:3.694508 +step:3718 train loss:3.697028 +step:3719 train loss:3.693280 +step:3720 train loss:3.703865 +step:3721 train loss:3.761448 +step:3722 train loss:3.772008 +step:3723 train loss:3.661189 +step:3724 train loss:3.717349 +step:3725 train loss:3.695624 +step:3726 train loss:3.714902 +step:3727 train loss:3.788563 +step:3728 train loss:3.753381 +step:3729 train loss:3.651887 +step:3730 train loss:3.668587 +step:3731 train loss:3.692050 +step:3732 train loss:3.843176 +step:3733 train loss:3.702381 +step:3734 train loss:3.706224 +step:3735 train loss:3.647404 +step:3736 train loss:3.702502 +step:3737 train loss:3.753514 +step:3738 train loss:3.776624 +step:3739 train loss:3.690569 +step:3740 train loss:3.595965 +step:3741 train loss:3.801814 +step:3742 train loss:3.710174 +step:3743 train loss:3.687187 +step:3744 train loss:3.677890 +step:3745 train loss:3.702924 +step:3746 train loss:3.668569 +step:3747 train loss:3.687154 +step:3748 train loss:3.726937 +step:3749 train loss:3.711891 +step:3750 validation loss:3.627629 +step:3750 train loss:3.718833 +step:3751 train loss:3.810476 +step:3752 train loss:3.748803 +step:3753 train loss:3.658534 +step:3754 train loss:3.713818 +step:3755 train loss:3.889879 +step:3756 train loss:3.670794 +step:3757 train loss:3.667255 +step:3758 train loss:3.697342 +step:3759 train loss:3.643013 +step:3760 train loss:3.641054 +step:3761 train loss:3.692723 +step:3762 train loss:3.684213 +step:3763 train loss:3.688052 +step:3764 train loss:3.679037 +step:3765 train loss:3.676691 +step:3766 train loss:3.648108 +step:3767 train loss:3.731679 +step:3768 train loss:3.672025 +step:3769 train loss:3.931288 +step:3770 train loss:3.723687 +step:3771 train loss:3.735602 +step:3772 train loss:3.691257 +step:3773 train loss:3.685224 +step:3774 train loss:3.692037 +step:3775 train loss:3.684724 +step:3776 train loss:3.687812 +step:3777 train loss:3.647409 +step:3778 train loss:3.665017 +step:3779 train loss:3.647886 +step:3780 train loss:3.730856 +step:3781 train loss:3.700132 +step:3782 train loss:3.616263 +step:3783 train loss:3.721620 +step:3784 train loss:3.731544 +step:3785 train loss:3.642968 +step:3786 train loss:3.750210 +step:3787 train loss:3.660232 +step:3788 train loss:3.674079 +step:3789 train loss:3.589622 +step:3790 train loss:3.698595 +step:3791 train loss:3.720075 +step:3792 train loss:3.687340 +step:3793 train loss:3.689243 +step:3794 train loss:3.717689 +step:3795 train loss:3.684347 +step:3796 train loss:3.703596 +step:3797 train loss:3.679814 +step:3798 train loss:3.687778 +step:3799 train loss:3.696485 +step:3800 train loss:3.606006 +step:3801 train loss:3.720513 +step:3802 train loss:3.649701 +step:3803 train loss:3.729544 +step:3804 train loss:3.739348 +step:3805 train loss:3.700260 +step:3806 train loss:3.715967 +step:3807 train loss:3.736392 +step:3808 train loss:3.692183 +step:3809 train loss:3.709218 +step:3810 train loss:3.707066 +step:3811 train loss:3.692390 +step:3812 train loss:3.695918 +step:3813 train loss:3.650954 +step:3814 train loss:3.692152 +step:3815 train loss:3.698182 +step:3816 train loss:3.713693 +step:3817 train loss:3.732358 +step:3818 train loss:3.705710 +step:3819 train loss:3.716236 +step:3820 train loss:3.716896 +step:3821 train loss:3.672251 +step:3822 train loss:3.755445 +step:3823 train loss:3.651422 +step:3824 train loss:3.665256 +step:3825 train loss:3.673372 +step:3826 train loss:3.746149 +step:3827 train loss:3.760656 +step:3828 train loss:3.648471 +step:3829 train loss:3.669225 +step:3830 train loss:3.730750 +step:3831 train loss:3.663640 +step:3832 train loss:3.723149 +step:3833 train loss:3.665927 +step:3834 train loss:3.630114 +step:3835 train loss:3.673562 +step:3836 train loss:3.649525 +step:3837 train loss:3.717785 +step:3838 train loss:3.670259 +step:3839 train loss:3.714518 +step:3840 train loss:3.725971 +step:3841 train loss:3.674031 +step:3842 train loss:3.704974 +step:3843 train loss:3.719256 +step:3844 train loss:3.689907 +step:3845 train loss:3.711048 +step:3846 train loss:3.753517 +step:3847 train loss:3.653369 +step:3848 train loss:3.658057 +step:3849 train loss:3.671530 +step:3850 train loss:3.694537 +step:3851 train loss:3.829721 +step:3852 train loss:3.806772 +step:3853 train loss:3.706520 +step:3854 train loss:3.662965 +step:3855 train loss:3.718160 +step:3856 train loss:3.643862 +step:3857 train loss:3.703857 +step:3858 train loss:3.619998 +step:3859 train loss:3.664119 +step:3860 train loss:3.735447 +step:3861 train loss:3.706561 +step:3862 train loss:3.645707 +step:3863 train loss:3.693420 +step:3864 train loss:3.664260 +step:3865 train loss:3.701998 +step:3866 train loss:3.720780 +step:3867 train loss:3.717851 +step:3868 train loss:3.666775 +step:3869 train loss:3.665000 +step:3870 train loss:3.642451 +step:3871 train loss:3.635377 +step:3872 train loss:3.772910 +step:3873 train loss:3.696373 +step:3874 train loss:3.706864 +step:3875 train loss:3.818890 +step:3876 train loss:3.689651 +step:3877 train loss:3.719142 +step:3878 train loss:3.742645 +step:3879 train loss:3.731394 +step:3880 train loss:3.812240 +step:3881 train loss:3.634896 +step:3882 train loss:3.670375 +step:3883 train loss:3.682540 +step:3884 train loss:3.678051 +step:3885 train loss:3.691307 +step:3886 train loss:3.753639 +step:3887 train loss:3.730706 +step:3888 train loss:3.694225 +step:3889 train loss:3.666881 +step:3890 train loss:3.699996 +step:3891 train loss:3.718229 +step:3892 train loss:3.625489 +step:3893 train loss:3.733033 +step:3894 train loss:3.682427 +step:3895 train loss:3.699056 +step:3896 train loss:3.692044 +step:3897 train loss:3.657256 +step:3898 train loss:3.719094 +step:3899 train loss:3.757939 +step:3900 train loss:3.713913 +step:3901 train loss:3.729127 +step:3902 train loss:3.658070 +step:3903 train loss:3.671596 +step:3904 train loss:3.705162 +step:3905 train loss:3.640976 +step:3906 train loss:3.675560 +step:3907 train loss:3.708252 +step:3908 train loss:3.788248 +step:3909 train loss:3.678153 +step:3910 train loss:3.704899 +step:3911 train loss:3.720219 +step:3912 train loss:3.668749 +step:3913 train loss:3.684206 +step:3914 train loss:3.704390 +step:3915 train loss:3.671606 +step:3916 train loss:3.708169 +step:3917 train loss:3.750964 +step:3918 train loss:3.726449 +step:3919 train loss:3.704683 +step:3920 train loss:3.680454 +step:3921 train loss:3.719561 +step:3922 train loss:3.726557 +step:3923 train loss:3.711977 +step:3924 train loss:3.648292 +step:3925 train loss:3.847806 +step:3926 train loss:3.696251 +step:3927 train loss:3.672458 +step:3928 train loss:3.751946 +step:3929 train loss:3.813838 +step:3930 train loss:3.706383 +step:3931 train loss:3.641507 +step:3932 train loss:3.693254 +step:3933 train loss:3.710564 +step:3934 train loss:3.663450 +step:3935 train loss:3.641001 +step:3936 train loss:3.734690 +step:3937 train loss:3.694340 +step:3938 train loss:3.702053 +step:3939 train loss:3.728893 +step:3940 train loss:3.676560 +step:3941 train loss:3.759938 +step:3942 train loss:3.721058 +step:3943 train loss:3.708333 +step:3944 train loss:3.758737 +step:3945 train loss:3.667803 +step:3946 train loss:3.609690 +step:3947 train loss:3.740562 +step:3948 train loss:3.710465 +step:3949 train loss:3.876479 +step:3950 train loss:3.674105 +step:3951 train loss:3.598765 +step:3952 train loss:3.571032 +step:3953 train loss:3.643205 +step:3954 train loss:3.692683 +step:3955 train loss:3.719068 +step:3956 train loss:3.676754 +step:3957 train loss:3.727927 +step:3958 train loss:3.707929 +step:3959 train loss:3.741200 +step:3960 train loss:3.664053 +step:3961 train loss:3.692124 +step:3962 train loss:3.697679 +step:3963 train loss:3.671403 +step:3964 train loss:3.654745 +step:3965 train loss:3.707917 +step:3966 train loss:3.661525 +step:3967 train loss:3.706968 +step:3968 train loss:3.725005 +step:3969 train loss:3.634051 +step:3970 train loss:3.744952 +step:3971 train loss:3.656982 +step:3972 train loss:3.692719 +step:3973 train loss:3.655992 +step:3974 train loss:3.744263 +step:3975 train loss:3.695013 +step:3976 train loss:3.650203 +step:3977 train loss:3.708670 +step:3978 train loss:3.676574 +step:3979 train loss:3.663177 +step:3980 train loss:3.730870 +step:3981 train loss:3.668077 +step:3982 train loss:3.685175 +step:3983 train loss:3.672044 +step:3984 train loss:3.704522 +step:3985 train loss:3.684308 +step:3986 train loss:3.694222 +step:3987 train loss:3.703516 +step:3988 train loss:3.652278 +step:3989 train loss:3.717090 +step:3990 train loss:3.708701 +step:3991 train loss:3.722011 +step:3992 train loss:3.680424 +step:3993 train loss:3.712438 +step:3994 train loss:3.659719 +step:3995 train loss:3.715054 +step:3996 train loss:3.633648 +step:3997 train loss:3.712023 +step:3998 train loss:3.591269 +step:3999 train loss:3.746501 +step:4000 validation loss:3.610452 total_sharp:5.2284e-03 L1_sharp:3.6505e-02 L2_sharp:1.8612e-02 L3_sharp:2.7555e-02 L4_sharp:2.0428e-02 L5_sharp:2.4984e-02 L6_sharp:2.8937e-02 L7_sharp:3.6457e-02 L8_sharp:4.1203e-02 L9_sharp:2.8516e-02 L10_sharp:2.0337e-02 L11_sharp:2.1940e-02 L12_sharp:2.7691e-02 total_fnorm:1.3523e+00 total_l1_linf:8.1072e+03 total_spectral:1.3523e+00 L1_fnorm:6.1223e-02 L2_fnorm:5.8896e-02 L3_fnorm:5.8132e-02 L4_fnorm:5.8003e-02 L5_fnorm:6.0253e-02 L6_fnorm:6.1073e-02 L7_fnorm:6.1160e-02 L8_fnorm:6.1268e-02 L9_fnorm:6.1167e-02 L10_fnorm:6.1130e-02 L11_fnorm:6.1146e-02 L12_fnorm:6.1278e-02 L1_l1linf:2.5407e-01 L2_l1linf:2.9828e-01 L3_l1linf:3.2019e-01 L4_l1linf:3.2477e-01 L5_l1linf:3.1336e-01 L6_l1linf:3.0414e-01 L7_l1linf:2.9543e-01 L8_l1linf:3.2347e-01 L9_l1linf:3.1492e-01 L10_l1linf:3.2919e-01 L11_l1linf:3.5445e-01 L12_l1linf:3.6061e-01 L1_spectral:5.8547e-03 L2_spectral:6.8025e-03 L3_spectral:7.2756e-03 L4_spectral:7.3795e-03 L5_spectral:7.0394e-03 L6_spectral:6.7797e-03 L7_spectral:6.6318e-03 L8_spectral:7.1907e-03 L9_spectral:7.0438e-03 L10_spectral:7.3375e-03 L11_spectral:7.9076e-03 L12_spectral:8.0534e-03 ip_v_neg_g:5.8814e-03 cos_v_neg_g:1.2408e-03 v_norm:1.3523e+00 g_norm:3.5051e+00 hv_norm:8.0044e-01 cos_v_hv:8.8331e-03 hg_norm:1.9072e+02 cos_g_hg:4.3509e-01 v_par:4.1769e-05 v_perp:1.3523e+00 L1_cos_v_neg_g:7.6375e-03 L1_v_norm:6.1223e-02 L2_cos_v_neg_g:7.2490e-03 L2_v_norm:5.8896e-02 L3_cos_v_neg_g:5.8132e-03 L3_v_norm:5.8132e-02 L4_cos_v_neg_g:6.1428e-03 L4_v_norm:5.8003e-02 L5_cos_v_neg_g:6.6936e-03 L5_v_norm:6.0253e-02 L6_cos_v_neg_g:7.8705e-03 L6_v_norm:6.1073e-02 L7_cos_v_neg_g:7.8652e-03 L7_v_norm:6.1160e-02 L8_cos_v_neg_g:9.2456e-03 L8_v_norm:6.1268e-02 L9_cos_v_neg_g:9.1092e-03 L9_v_norm:6.1167e-02 L10_cos_v_neg_g:8.3223e-03 L10_v_norm:6.1130e-02 L11_cos_v_neg_g:6.9890e-03 L11_v_norm:6.1146e-02 L12_cos_v_neg_g:5.4839e-03 L12_v_norm:6.1278e-02 +step:4000 train loss:3.626189 +step:4001 train loss:3.702497 +step:4002 train loss:3.681580 +step:4003 train loss:3.718037 +step:4004 train loss:3.624904 +step:4005 train loss:3.719401 +step:4006 train loss:3.726455 +step:4007 train loss:3.649569 +step:4008 train loss:3.604975 +step:4009 train loss:3.688754 +step:4010 train loss:3.665295 +step:4011 train loss:3.675164 +step:4012 train loss:3.688030 +step:4013 train loss:3.663581 +step:4014 train loss:3.678856 +step:4015 train loss:3.669385 +step:4016 train loss:3.679088 +step:4017 train loss:3.640573 +step:4018 train loss:3.582291 +step:4019 train loss:3.636966 +step:4020 train loss:3.702811 +step:4021 train loss:3.650233 +step:4022 train loss:3.655369 +step:4023 train loss:3.667900 +step:4024 train loss:3.577744 +step:4025 train loss:3.702553 +step:4026 train loss:3.691444 +step:4027 train loss:3.701580 +step:4028 train loss:3.716769 +step:4029 train loss:3.746813 +step:4030 train loss:3.661903 +step:4031 train loss:3.702248 +step:4032 train loss:3.662170 +step:4033 train loss:3.694148 +step:4034 train loss:3.708398 +step:4035 train loss:3.690256 +step:4036 train loss:3.684958 +step:4037 train loss:3.700521 +step:4038 train loss:3.621669 +step:4039 train loss:3.674365 +step:4040 train loss:3.656288 +step:4041 train loss:3.649129 +step:4042 train loss:3.669554 +step:4043 train loss:3.655507 +step:4044 train loss:3.690559 +step:4045 train loss:3.694801 +step:4046 train loss:3.651029 +step:4047 train loss:3.679640 +step:4048 train loss:3.689276 +step:4049 train loss:3.649847 +step:4050 train loss:3.755208 +step:4051 train loss:3.668554 +step:4052 train loss:3.688233 +step:4053 train loss:3.738175 +step:4054 train loss:3.709028 +step:4055 train loss:3.726476 +step:4056 train loss:3.723398 +step:4057 train loss:3.658353 +step:4058 train loss:3.638396 +step:4059 train loss:3.724027 +step:4060 train loss:3.664975 +step:4061 train loss:3.635629 +step:4062 train loss:3.749550 +step:4063 train loss:3.700400 +step:4064 train loss:3.668337 +step:4065 train loss:3.652408 +step:4066 train loss:3.683478 +step:4067 train loss:3.707807 +step:4068 train loss:3.670902 +step:4069 train loss:3.731014 +step:4070 train loss:3.647854 +step:4071 train loss:3.619954 +step:4072 train loss:3.693675 +step:4073 train loss:3.631541 +step:4074 train loss:3.681131 +step:4075 train loss:3.747951 +step:4076 train loss:3.603850 +step:4077 train loss:3.683455 +step:4078 train loss:3.781223 +step:4079 train loss:3.725862 +step:4080 train loss:3.669241 +step:4081 train loss:3.639337 +step:4082 train loss:3.693205 +step:4083 train loss:3.630104 +step:4084 train loss:3.649662 +step:4085 train loss:3.887352 +step:4086 train loss:3.651912 +step:4087 train loss:3.695369 +step:4088 train loss:3.680694 +step:4089 train loss:3.669949 +step:4090 train loss:3.690395 +step:4091 train loss:3.714844 +step:4092 train loss:3.635655 +step:4093 train loss:3.664685 +step:4094 train loss:3.685606 +step:4095 train loss:3.640914 +step:4096 train loss:3.674631 +step:4097 train loss:3.676592 +step:4098 train loss:3.649660 +step:4099 train loss:3.649177 +step:4100 train loss:3.704481 +step:4101 train loss:3.627202 +step:4102 train loss:3.663510 +step:4103 train loss:3.867832 +step:4104 train loss:3.682031 +step:4105 train loss:3.650073 +step:4106 train loss:3.718808 +step:4107 train loss:3.642704 +step:4108 train loss:3.643405 +step:4109 train loss:3.698271 +step:4110 train loss:3.706995 +step:4111 train loss:3.682739 +step:4112 train loss:3.701480 +step:4113 train loss:3.660957 +step:4114 train loss:3.609272 +step:4115 train loss:3.647155 +step:4116 train loss:3.633781 +step:4117 train loss:3.649161 +step:4118 train loss:3.701294 +step:4119 train loss:3.725806 +step:4120 train loss:3.648215 +step:4121 train loss:3.640712 +step:4122 train loss:3.706540 +step:4123 train loss:3.721799 +step:4124 train loss:3.696950 +step:4125 train loss:3.735160 +step:4126 train loss:3.668263 +step:4127 train loss:3.688523 +step:4128 train loss:3.679248 +step:4129 train loss:3.730045 +step:4130 train loss:3.656828 +step:4131 train loss:3.691844 +step:4132 train loss:3.708471 +step:4133 train loss:3.661255 +step:4134 train loss:3.714588 +step:4135 train loss:3.648796 +step:4136 train loss:3.667550 +step:4137 train loss:3.644078 +step:4138 train loss:3.647945 +step:4139 train loss:3.694757 +step:4140 train loss:3.652306 +step:4141 train loss:3.616627 +step:4142 train loss:3.662739 +step:4143 train loss:3.699025 +step:4144 train loss:3.652730 +step:4145 train loss:3.618447 +step:4146 train loss:3.686645 +step:4147 train loss:3.663759 +step:4148 train loss:3.656627 +step:4149 train loss:3.735075 +step:4150 train loss:3.696988 +step:4151 train loss:3.682073 +step:4152 train loss:3.703409 +step:4153 train loss:3.709840 +step:4154 train loss:3.717522 +step:4155 train loss:3.741678 +step:4156 train loss:3.613319 +step:4157 train loss:3.635849 +step:4158 train loss:3.696702 +step:4159 train loss:3.594662 +step:4160 train loss:3.686903 +step:4161 train loss:3.688456 +step:4162 train loss:3.594184 +step:4163 train loss:3.680266 +step:4164 train loss:3.626498 +step:4165 train loss:3.624826 +step:4166 train loss:3.691598 +step:4167 train loss:3.689230 +step:4168 train loss:3.680507 +step:4169 train loss:3.707882 +step:4170 train loss:3.826063 +step:4171 train loss:3.675143 +step:4172 train loss:3.695181 +step:4173 train loss:3.692542 +step:4174 train loss:3.653564 +step:4175 train loss:3.745040 +step:4176 train loss:3.668000 +step:4177 train loss:3.690919 +step:4178 train loss:3.669602 +step:4179 train loss:3.625330 +step:4180 train loss:3.622431 +step:4181 train loss:3.672063 +step:4182 train loss:3.656989 +step:4183 train loss:3.592264 +step:4184 train loss:3.664133 +step:4185 train loss:3.732731 +step:4186 train loss:3.707614 +step:4187 train loss:3.715276 +step:4188 train loss:3.689047 +step:4189 train loss:3.649090 +step:4190 train loss:3.693680 +step:4191 train loss:3.640261 +step:4192 train loss:3.729002 +step:4193 train loss:3.637990 +step:4194 train loss:3.620829 +step:4195 train loss:3.617082 +step:4196 train loss:3.687272 +step:4197 train loss:3.698541 +step:4198 train loss:3.622971 +step:4199 train loss:3.706203 +step:4200 train loss:3.667604 +step:4201 train loss:3.647549 +step:4202 train loss:3.665236 +step:4203 train loss:3.673283 +step:4204 train loss:3.665837 +step:4205 train loss:3.678531 +step:4206 train loss:3.698567 +step:4207 train loss:3.697635 +step:4208 train loss:3.663392 +step:4209 train loss:3.727719 +step:4210 train loss:3.756697 +step:4211 train loss:3.638404 +step:4212 train loss:3.679698 +step:4213 train loss:3.632089 +step:4214 train loss:3.638947 +step:4215 train loss:3.652437 +step:4216 train loss:3.628238 +step:4217 train loss:3.650417 +step:4218 train loss:3.692795 +step:4219 train loss:3.691226 +step:4220 train loss:3.768702 +step:4221 train loss:3.654905 +step:4222 train loss:3.717847 +step:4223 train loss:3.636595 +step:4224 train loss:3.712248 +step:4225 train loss:3.638833 +step:4226 train loss:3.695824 +step:4227 train loss:3.671592 +step:4228 train loss:3.644513 +step:4229 train loss:3.655631 +step:4230 train loss:3.637985 +step:4231 train loss:3.626359 +step:4232 train loss:3.675499 +step:4233 train loss:3.584551 +step:4234 train loss:3.666860 +step:4235 train loss:3.743679 +step:4236 train loss:3.711316 +step:4237 train loss:3.694792 +step:4238 train loss:3.704171 +step:4239 train loss:3.756421 +step:4240 train loss:3.662874 +step:4241 train loss:3.590860 +step:4242 train loss:3.708862 +step:4243 train loss:3.708436 +step:4244 train loss:3.723083 +step:4245 train loss:3.777292 +step:4246 train loss:3.651908 +step:4247 train loss:3.709423 +step:4248 train loss:3.660013 +step:4249 train loss:3.667536 +step:4250 validation loss:3.593781 +step:4250 train loss:3.648552 +step:4251 train loss:3.744478 +step:4252 train loss:3.653866 +step:4253 train loss:3.644848 +step:4254 train loss:3.655726 +step:4255 train loss:3.637651 +step:4256 train loss:3.654550 +step:4257 train loss:3.711743 +step:4258 train loss:3.571581 +step:4259 train loss:3.636167 +step:4260 train loss:3.703727 +step:4261 train loss:3.687185 +step:4262 train loss:3.830272 +step:4263 train loss:3.758080 +step:4264 train loss:3.698820 +step:4265 train loss:3.690402 +step:4266 train loss:3.687879 +step:4267 train loss:3.686600 +step:4268 train loss:3.634080 +step:4269 train loss:3.727602 +step:4270 train loss:3.707210 +step:4271 train loss:3.623782 +step:4272 train loss:3.677126 +step:4273 train loss:3.654386 +step:4274 train loss:3.639727 +step:4275 train loss:3.661357 +step:4276 train loss:3.627624 +step:4277 train loss:3.762610 +step:4278 train loss:3.611187 +step:4279 train loss:3.642011 +step:4280 train loss:3.723559 +step:4281 train loss:3.707308 +step:4282 train loss:3.771448 +step:4283 train loss:3.627681 +step:4284 train loss:3.654569 +step:4285 train loss:3.659447 +step:4286 train loss:3.723938 +step:4287 train loss:3.722881 +step:4288 train loss:3.700773 +step:4289 train loss:3.655255 +step:4290 train loss:3.664392 +step:4291 train loss:3.623796 +step:4292 train loss:3.666373 +step:4293 train loss:3.677764 +step:4294 train loss:3.666051 +step:4295 train loss:3.600666 +step:4296 train loss:3.673967 +step:4297 train loss:3.652959 +step:4298 train loss:3.666610 +step:4299 train loss:3.663464 +step:4300 train loss:3.778726 +step:4301 train loss:3.596026 +step:4302 train loss:3.735230 +step:4303 train loss:3.614741 +step:4304 train loss:3.618900 +step:4305 train loss:3.639640 +step:4306 train loss:3.716618 +step:4307 train loss:3.629563 +step:4308 train loss:3.628987 +step:4309 train loss:3.700240 +step:4310 train loss:3.634920 +step:4311 train loss:3.692295 +step:4312 train loss:3.685980 +step:4313 train loss:3.677257 +step:4314 train loss:3.625901 +step:4315 train loss:3.657475 +step:4316 train loss:3.605045 +step:4317 train loss:3.661290 +step:4318 train loss:3.700391 +step:4319 train loss:3.651344 +step:4320 train loss:3.710589 +step:4321 train loss:3.694214 +step:4322 train loss:3.649393 +step:4323 train loss:3.586666 +step:4324 train loss:3.679730 +step:4325 train loss:3.654074 +step:4326 train loss:3.645260 +step:4327 train loss:3.754241 +step:4328 train loss:3.662603 +step:4329 train loss:3.618800 +step:4330 train loss:3.664564 +step:4331 train loss:3.678047 +step:4332 train loss:3.706420 +step:4333 train loss:3.667741 +step:4334 train loss:3.682495 +step:4335 train loss:3.680682 +step:4336 train loss:3.693521 +step:4337 train loss:3.657578 +step:4338 train loss:3.781550 +step:4339 train loss:3.681184 +step:4340 train loss:3.687488 +step:4341 train loss:3.655360 +step:4342 train loss:3.670064 +step:4343 train loss:3.789060 +step:4344 train loss:3.680088 +step:4345 train loss:3.696191 +step:4346 train loss:3.712343 +step:4347 train loss:3.719782 +step:4348 train loss:3.630955 +step:4349 train loss:3.717881 +step:4350 train loss:3.654269 +step:4351 train loss:3.610301 +step:4352 train loss:3.684961 +step:4353 train loss:3.632221 +step:4354 train loss:3.686031 +step:4355 train loss:3.650518 +step:4356 train loss:3.673288 +step:4357 train loss:3.655806 +step:4358 train loss:3.750332 +step:4359 train loss:3.700046 +step:4360 train loss:3.615100 +step:4361 train loss:3.662678 +step:4362 train loss:3.682321 +step:4363 train loss:3.700034 +step:4364 train loss:3.666805 +step:4365 train loss:3.647950 +step:4366 train loss:3.695178 +step:4367 train loss:3.710921 +step:4368 train loss:3.683713 +step:4369 train loss:3.553735 +step:4370 train loss:3.682457 +step:4371 train loss:3.595356 +step:4372 train loss:3.743008 +step:4373 train loss:3.681396 +step:4374 train loss:3.648479 +step:4375 train loss:3.695168 +step:4376 train loss:3.704520 +step:4377 train loss:3.640122 +step:4378 train loss:3.650496 +step:4379 train loss:3.732163 +step:4380 train loss:3.713631 +step:4381 train loss:3.614880 +step:4382 train loss:3.661703 +step:4383 train loss:3.690007 +step:4384 train loss:3.688588 +step:4385 train loss:3.612592 +step:4386 train loss:3.669130 +step:4387 train loss:3.639438 +step:4388 train loss:3.657023 +step:4389 train loss:3.686957 +step:4390 train loss:3.726789 +step:4391 train loss:3.652804 +step:4392 train loss:3.726638 +step:4393 train loss:3.687220 +step:4394 train loss:3.622746 +step:4395 train loss:3.680778 +step:4396 train loss:3.654742 +step:4397 train loss:3.697444 +step:4398 train loss:3.646150 +step:4399 train loss:3.639230 +step:4400 train loss:3.642915 +step:4401 train loss:3.701850 +step:4402 train loss:3.699990 +step:4403 train loss:3.651321 +step:4404 train loss:3.683160 +step:4405 train loss:3.604315 +step:4406 train loss:3.682395 +step:4407 train loss:3.617781 +step:4408 train loss:3.713876 +step:4409 train loss:3.670582 +step:4410 train loss:3.675117 +step:4411 train loss:3.636397 +step:4412 train loss:3.749573 +step:4413 train loss:3.646711 +step:4414 train loss:3.654741 +step:4415 train loss:3.638917 +step:4416 train loss:3.634009 +step:4417 train loss:3.625244 +step:4418 train loss:3.698875 +step:4419 train loss:3.669363 +step:4420 train loss:3.676751 +step:4421 train loss:3.703131 +step:4422 train loss:3.718285 +step:4423 train loss:3.677188 +step:4424 train loss:3.662082 +step:4425 train loss:3.624762 +step:4426 train loss:3.700463 +step:4427 train loss:3.661782 +step:4428 train loss:3.598780 +step:4429 train loss:3.661128 +step:4430 train loss:3.699230 +step:4431 train loss:3.692340 +step:4432 train loss:3.599060 +step:4433 train loss:3.653115 +step:4434 train loss:3.652162 +step:4435 train loss:3.680378 +step:4436 train loss:3.616649 +step:4437 train loss:3.693733 +step:4438 train loss:3.663391 +step:4439 train loss:3.667651 +step:4440 train loss:3.666854 +step:4441 train loss:3.668616 +step:4442 train loss:3.719604 +step:4443 train loss:3.651412 +step:4444 train loss:3.735948 +step:4445 train loss:3.700231 +step:4446 train loss:3.631267 +step:4447 train loss:3.680011 +step:4448 train loss:3.699434 +step:4449 train loss:3.636203 +step:4450 train loss:3.654480 +step:4451 train loss:3.708676 +step:4452 train loss:3.761886 +step:4453 train loss:3.695361 +step:4454 train loss:3.664972 +step:4455 train loss:3.713520 +step:4456 train loss:3.656776 +step:4457 train loss:3.658327 +step:4458 train loss:3.670061 +step:4459 train loss:3.702596 +step:4460 train loss:3.614362 +step:4461 train loss:3.586756 +step:4462 train loss:3.642029 +step:4463 train loss:3.662170 +step:4464 train loss:3.633575 +step:4465 train loss:3.665575 +step:4466 train loss:3.763288 +step:4467 train loss:3.642624 +step:4468 train loss:3.636653 +step:4469 train loss:3.629322 +step:4470 train loss:3.604512 +step:4471 train loss:3.668617 +step:4472 train loss:3.590017 +step:4473 train loss:3.676445 +step:4474 train loss:3.701936 +step:4475 train loss:3.666885 +step:4476 train loss:3.625036 +step:4477 train loss:3.609076 +step:4478 train loss:3.670547 +step:4479 train loss:3.771681 +step:4480 train loss:3.606255 +step:4481 train loss:3.679079 +step:4482 train loss:3.638645 +step:4483 train loss:3.635339 +step:4484 train loss:3.679592 +step:4485 train loss:3.642857 +step:4486 train loss:3.742141 +step:4487 train loss:3.638435 +step:4488 train loss:3.634599 +step:4489 train loss:3.593321 +step:4490 train loss:3.676864 +step:4491 train loss:3.626166 +step:4492 train loss:3.660209 +step:4493 train loss:3.644909 +step:4494 train loss:3.640418 +step:4495 train loss:3.706995 +step:4496 train loss:3.648253 +step:4497 train loss:3.730761 +step:4498 train loss:3.623533 +step:4499 train loss:3.674021 +step:4500 validation loss:3.577666 total_sharp:6.2142e-03 L1_sharp:6.4510e-02 L2_sharp:3.7313e-02 L3_sharp:5.4692e-02 L4_sharp:2.6908e-02 L5_sharp:3.2771e-02 L6_sharp:3.7361e-02 L7_sharp:4.0588e-02 L8_sharp:3.6819e-02 L9_sharp:2.7807e-02 L10_sharp:2.2437e-02 L11_sharp:2.0503e-02 L12_sharp:2.4921e-02 total_fnorm:1.3596e+00 total_l1_linf:8.1454e+03 total_spectral:1.3596e+00 L1_fnorm:6.1519e-02 L2_fnorm:5.8704e-02 L3_fnorm:5.7683e-02 L4_fnorm:5.9808e-02 L5_fnorm:6.0824e-02 L6_fnorm:6.1402e-02 L7_fnorm:6.1350e-02 L8_fnorm:6.1320e-02 L9_fnorm:6.1282e-02 L10_fnorm:6.1328e-02 L11_fnorm:6.1329e-02 L12_fnorm:6.1234e-02 L1_l1linf:3.2376e-01 L2_l1linf:3.6873e-01 L3_l1linf:3.5395e-01 L4_l1linf:3.7396e-01 L5_l1linf:3.8215e-01 L6_l1linf:3.4966e-01 L7_l1linf:3.4455e-01 L8_l1linf:3.2944e-01 L9_l1linf:3.4505e-01 L10_l1linf:3.6169e-01 L11_l1linf:3.8419e-01 L12_l1linf:3.4695e-01 L1_spectral:7.2507e-03 L2_spectral:8.2420e-03 L3_spectral:7.9559e-03 L4_spectral:8.3918e-03 L5_spectral:8.4847e-03 L6_spectral:7.7948e-03 L7_spectral:7.6931e-03 L8_spectral:7.3378e-03 L9_spectral:7.7721e-03 L10_spectral:8.1171e-03 L11_spectral:8.6259e-03 L12_spectral:7.9175e-03 ip_v_neg_g:5.5499e-03 cos_v_neg_g:1.1809e-03 v_norm:1.3596e+00 g_norm:3.4567e+00 hv_norm:9.3301e-01 cos_v_hv:9.0552e-03 hg_norm:2.3784e+02 cos_g_hg:4.3767e-01 v_par:4.1031e-05 v_perp:1.3596e+00 L1_cos_v_neg_g:9.1837e-03 L1_v_norm:6.1519e-02 L2_cos_v_neg_g:7.5330e-03 L2_v_norm:5.8704e-02 L3_cos_v_neg_g:5.5311e-03 L3_v_norm:5.7683e-02 L4_cos_v_neg_g:6.1926e-03 L4_v_norm:5.9808e-02 L5_cos_v_neg_g:7.4034e-03 L5_v_norm:6.0824e-02 L6_cos_v_neg_g:7.1815e-03 L6_v_norm:6.1402e-02 L7_cos_v_neg_g:6.4986e-03 L7_v_norm:6.1350e-02 L8_cos_v_neg_g:7.2216e-03 L8_v_norm:6.1320e-02 L9_cos_v_neg_g:6.8327e-03 L9_v_norm:6.1282e-02 L10_cos_v_neg_g:6.7272e-03 L10_v_norm:6.1328e-02 L11_cos_v_neg_g:6.8062e-03 L11_v_norm:6.1329e-02 L12_cos_v_neg_g:5.8708e-03 L12_v_norm:6.1234e-02 +step:4500 train loss:3.581446 +step:4501 train loss:3.643768 +step:4502 train loss:3.766850 +step:4503 train loss:3.668420 +step:4504 train loss:3.680815 +step:4505 train loss:3.662577 +step:4506 train loss:3.634769 +step:4507 train loss:3.709837 +step:4508 train loss:3.645293 +step:4509 train loss:3.643655 +step:4510 train loss:3.677385 +step:4511 train loss:3.632947 +step:4512 train loss:3.653929 +step:4513 train loss:3.714253 +step:4514 train loss:3.618624 +step:4515 train loss:3.734153 +step:4516 train loss:3.707900 +step:4517 train loss:3.662102 +step:4518 train loss:3.603706 +step:4519 train loss:3.638641 +step:4520 train loss:3.650569 +step:4521 train loss:3.590662 +step:4522 train loss:3.646648 +step:4523 train loss:3.693369 +step:4524 train loss:3.675852 +step:4525 train loss:3.599016 +step:4526 train loss:3.639702 +step:4527 train loss:3.626120 +step:4528 train loss:3.657082 +step:4529 train loss:3.655043 +step:4530 train loss:3.748225 +step:4531 train loss:3.638685 +step:4532 train loss:3.659719 +step:4533 train loss:3.634594 +step:4534 train loss:3.727498 +step:4535 train loss:3.625625 +step:4536 train loss:3.696791 +step:4537 train loss:3.680630 +step:4538 train loss:3.658262 +step:4539 train loss:3.679332 +step:4540 train loss:3.655544 +step:4541 train loss:3.623768 +step:4542 train loss:3.672702 +step:4543 train loss:3.758698 +step:4544 train loss:3.701109 +step:4545 train loss:3.642572 +step:4546 train loss:3.735195 +step:4547 train loss:3.693431 +step:4548 train loss:3.698618 +step:4549 train loss:3.652467 +step:4550 train loss:3.622087 +step:4551 train loss:3.639330 +step:4552 train loss:3.641065 +step:4553 train loss:3.723990 +step:4554 train loss:3.617735 +step:4555 train loss:3.729763 +step:4556 train loss:3.665528 +step:4557 train loss:3.595337 +step:4558 train loss:3.680019 +step:4559 train loss:3.691300 +step:4560 train loss:3.628582 +step:4561 train loss:3.614962 +step:4562 train loss:3.658221 +step:4563 train loss:3.609996 +step:4564 train loss:3.635559 +step:4565 train loss:3.635128 +step:4566 train loss:3.609846 +step:4567 train loss:3.634849 +step:4568 train loss:3.635176 +step:4569 train loss:3.621072 +step:4570 train loss:3.669951 +step:4571 train loss:3.647880 +step:4572 train loss:3.641397 +step:4573 train loss:3.649570 +step:4574 train loss:3.795771 +step:4575 train loss:3.627774 +step:4576 train loss:3.616561 +step:4577 train loss:3.656314 +step:4578 train loss:3.696268 +step:4579 train loss:3.648249 +step:4580 train loss:3.708958 +step:4581 train loss:3.646258 +step:4582 train loss:3.641529 +step:4583 train loss:3.648173 +step:4584 train loss:3.619665 +step:4585 train loss:3.697363 +step:4586 train loss:3.687404 +step:4587 train loss:3.586697 +step:4588 train loss:3.629858 +step:4589 train loss:3.706328 +step:4590 train loss:3.674088 +step:4591 train loss:3.614435 +step:4592 train loss:3.699626 +step:4593 train loss:3.618066 +step:4594 train loss:3.648397 +step:4595 train loss:3.671831 +step:4596 train loss:3.612927 +step:4597 train loss:3.748248 +step:4598 train loss:3.668658 +step:4599 train loss:3.619418 +step:4600 train loss:3.627340 +step:4601 train loss:3.649505 +step:4602 train loss:3.602118 +step:4603 train loss:3.614497 +step:4604 train loss:3.722720 +step:4605 train loss:3.640421 +step:4606 train loss:3.667267 +step:4607 train loss:3.647611 +step:4608 train loss:3.682673 +step:4609 train loss:3.641604 +step:4610 train loss:3.685761 +step:4611 train loss:3.712245 +step:4612 train loss:3.708186 +step:4613 train loss:3.688558 +step:4614 train loss:3.682634 +step:4615 train loss:3.623012 +step:4616 train loss:3.606233 +step:4617 train loss:3.650190 +step:4618 train loss:3.666788 +step:4619 train loss:3.628249 +step:4620 train loss:3.642655 +step:4621 train loss:3.646849 +step:4622 train loss:3.581782 +step:4623 train loss:3.690129 +step:4624 train loss:3.674982 +step:4625 train loss:3.632254 +step:4626 train loss:3.674526 +step:4627 train loss:3.645316 +step:4628 train loss:3.631834 +step:4629 train loss:3.669114 +step:4630 train loss:3.725912 +step:4631 train loss:3.728159 +step:4632 train loss:3.623127 +step:4633 train loss:3.635894 +step:4634 train loss:3.709524 +step:4635 train loss:3.674760 +step:4636 train loss:3.689691 +step:4637 train loss:3.626930 +step:4638 train loss:3.632494 +step:4639 train loss:3.630022 +step:4640 train loss:3.638582 +step:4641 train loss:3.644565 +step:4642 train loss:3.677141 +step:4643 train loss:3.639349 +step:4644 train loss:3.662945 +step:4645 train loss:3.676183 +step:4646 train loss:3.633503 +step:4647 train loss:3.591250 +step:4648 train loss:3.697167 +step:4649 train loss:3.711062 +step:4650 train loss:3.656624 +step:4651 train loss:3.658154 +step:4652 train loss:3.647362 +step:4653 train loss:3.705026 +step:4654 train loss:3.700448 +step:4655 train loss:3.603563 +step:4656 train loss:3.636603 +step:4657 train loss:3.691278 +step:4658 train loss:3.647235 +step:4659 train loss:3.660154 +step:4660 train loss:3.703782 +step:4661 train loss:3.620678 +step:4662 train loss:3.632122 +step:4663 train loss:3.638873 +step:4664 train loss:3.699256 +step:4665 train loss:3.695888 +step:4666 train loss:3.690644 +step:4667 train loss:3.683385 +step:4668 train loss:3.645483 +step:4669 train loss:3.657761 +step:4670 train loss:3.688615 +step:4671 train loss:3.689948 +step:4672 train loss:3.560805 +step:4673 train loss:3.597097 +step:4674 train loss:3.724520 +step:4675 train loss:3.629885 +step:4676 train loss:3.591924 +step:4677 train loss:3.594200 +step:4678 train loss:3.567134 +step:4679 train loss:3.667015 +step:4680 train loss:3.607585 +step:4681 train loss:3.656953 +step:4682 train loss:3.607130 +step:4683 train loss:3.576358 +step:4684 train loss:3.691010 +step:4685 train loss:3.631474 +step:4686 train loss:3.641071 +step:4687 train loss:3.678031 +step:4688 train loss:3.607233 +step:4689 train loss:3.683128 +step:4690 train loss:3.624352 +step:4691 train loss:3.660873 +step:4692 train loss:3.587892 +step:4693 train loss:3.626876 +step:4694 train loss:3.668133 +step:4695 train loss:3.688018 +step:4696 train loss:3.675277 +step:4697 train loss:3.586933 +step:4698 train loss:3.606415 +step:4699 train loss:3.655922 +step:4700 train loss:3.624774 +step:4701 train loss:3.634800 +step:4702 train loss:3.588149 +step:4703 train loss:3.669956 +step:4704 train loss:3.658169 +step:4705 train loss:3.600636 +step:4706 train loss:3.609189 +step:4707 train loss:3.596887 +step:4708 train loss:3.663722 +step:4709 train loss:3.608357 +step:4710 train loss:3.625434 +step:4711 train loss:3.685100 +step:4712 train loss:3.581046 +step:4713 train loss:3.688982 +step:4714 train loss:3.586488 +step:4715 train loss:3.678824 +step:4716 train loss:3.645613 +step:4717 train loss:3.577271 +step:4718 train loss:3.667310 +step:4719 train loss:3.594221 +step:4720 train loss:3.691427 +step:4721 train loss:3.647610 +step:4722 train loss:3.702216 +step:4723 train loss:3.598289 +step:4724 train loss:3.646414 +step:4725 train loss:3.583836 +step:4726 train loss:3.629788 +step:4727 train loss:3.635383 +step:4728 train loss:3.642600 +step:4729 train loss:3.672379 +step:4730 train loss:3.570659 +step:4731 train loss:3.633008 +step:4732 train loss:3.586257 +step:4733 train loss:3.521368 +step:4734 train loss:3.657856 +step:4735 train loss:3.610188 +step:4736 train loss:3.651634 +step:4737 train loss:3.533014 +step:4738 train loss:3.679432 +step:4739 train loss:3.558686 +step:4740 train loss:3.667855 +step:4741 train loss:3.636436 +step:4742 train loss:3.598486 +step:4743 train loss:3.592849 +step:4744 train loss:3.638766 +step:4745 train loss:3.659467 +step:4746 train loss:3.697159 +step:4747 train loss:3.659709 +step:4748 train loss:3.559412 +step:4749 train loss:3.626252 +step:4750 validation loss:3.566222 +step:4750 train loss:3.572009 +step:4751 train loss:3.667769 +step:4752 train loss:3.599935 +step:4753 train loss:3.707758 +step:4754 train loss:3.575318 +step:4755 train loss:3.619488 +step:4756 train loss:3.690686 +step:4757 train loss:3.616791 +step:4758 train loss:3.632175 +step:4759 train loss:3.633028 +step:4760 train loss:3.661799 +step:4761 train loss:3.581703 +step:4762 train loss:3.613688 +step:4763 train loss:3.635658 +step:4764 train loss:3.696704 +step:4765 train loss:3.589716 +step:4766 train loss:3.611272 +step:4767 train loss:3.564311 +step:4768 train loss:3.622288 +step:4769 train loss:3.648154 +step:4770 train loss:3.606853 +step:4771 train loss:3.618850 +step:4772 train loss:3.592294 +step:4773 train loss:3.626379 +step:4774 train loss:3.569221 +step:4775 train loss:3.702529 +step:4776 train loss:3.566331 +step:4777 train loss:3.642005 +step:4778 train loss:3.581190 +step:4779 train loss:3.629663 +step:4780 train loss:3.565103 +step:4781 train loss:3.575266 +step:4782 train loss:3.679754 +step:4783 train loss:3.669612 +step:4784 train loss:3.629451 +step:4785 train loss:3.627248 +step:4786 train loss:3.738695 +step:4787 train loss:3.571647 +step:4788 train loss:3.593328 +step:4789 train loss:3.619224 +step:4790 train loss:3.669326 +step:4791 train loss:3.636610 +step:4792 train loss:3.679165 +step:4793 train loss:3.595080 +step:4794 train loss:3.670861 +step:4795 train loss:3.617193 +step:4796 train loss:3.608799 +step:4797 train loss:3.614163 +step:4798 train loss:3.623334 +step:4799 train loss:3.620722 +step:4800 train loss:3.650332 +step:4801 train loss:3.642201 +step:4802 train loss:3.679692 +step:4803 train loss:3.662100 +step:4804 train loss:3.621452 +step:4805 train loss:3.614942 +step:4806 train loss:3.594059 +step:4807 train loss:3.702583 +step:4808 train loss:3.573176 +step:4809 train loss:3.677052 +step:4810 train loss:3.617199 +step:4811 train loss:3.636300 +step:4812 train loss:3.611300 +step:4813 train loss:3.568007 +step:4814 train loss:3.561311 +step:4815 train loss:3.554772 +step:4816 train loss:3.620810 +step:4817 train loss:3.558051 +step:4818 train loss:3.626190 +step:4819 train loss:3.618841 +step:4820 train loss:3.874398 +step:4821 train loss:3.645189 +step:4822 train loss:3.655538 +step:4823 train loss:3.586767 +step:4824 train loss:3.594881 +step:4825 train loss:3.575339 +step:4826 train loss:3.662702 +step:4827 train loss:3.610133 +step:4828 train loss:3.552806 +step:4829 train loss:3.655154 +step:4830 train loss:3.597922 +step:4831 train loss:3.744776 +step:4832 train loss:3.614652 +step:4833 train loss:3.650425 +step:4834 train loss:3.554812 +step:4835 train loss:3.643241 +step:4836 train loss:3.623670 +step:4837 train loss:3.652723 +step:4838 train loss:3.591290 +step:4839 train loss:3.660448 +step:4840 train loss:3.564312 +step:4841 train loss:3.660871 +step:4842 train loss:3.575962 +step:4843 train loss:3.654893 +step:4844 train loss:3.655781 +step:4845 train loss:3.593382 +step:4846 train loss:3.607678 +step:4847 train loss:3.593403 +step:4848 train loss:3.615890 +step:4849 train loss:3.571707 +step:4850 train loss:3.577743 +step:4851 train loss:3.574612 +step:4852 train loss:3.653162 +step:4853 train loss:3.628744 +step:4854 train loss:3.606031 +step:4855 train loss:3.670080 +step:4856 train loss:3.639839 +step:4857 train loss:3.647048 +step:4858 train loss:3.730575 +step:4859 train loss:3.576805 +step:4860 train loss:3.652160 +step:4861 train loss:3.622730 +step:4862 train loss:3.657524 +step:4863 train loss:3.593116 +step:4864 train loss:3.603886 +step:4865 train loss:3.597509 +step:4866 train loss:3.643866 +step:4867 train loss:3.610687 +step:4868 train loss:3.628456 +step:4869 train loss:3.576873 +step:4870 train loss:3.608639 +step:4871 train loss:3.693133 +step:4872 train loss:3.637940 +step:4873 train loss:3.634594 +step:4874 train loss:3.607144 +step:4875 train loss:3.573087 +step:4876 train loss:3.584798 +step:4877 train loss:3.586759 +step:4878 train loss:3.627132 +step:4879 train loss:3.586664 +step:4880 train loss:3.613763 +step:4881 train loss:3.558183 +step:4882 train loss:3.762681 +step:4883 train loss:3.572749 +step:4884 train loss:3.602643 +step:4885 train loss:3.574870 +step:4886 train loss:3.653743 +step:4887 train loss:3.605132 +step:4888 train loss:3.615385 +step:4889 train loss:3.605179 +step:4890 train loss:3.649159 +step:4891 train loss:3.585754 +step:4892 train loss:3.593071 +step:4893 train loss:3.638081 +step:4894 train loss:3.574759 +step:4895 train loss:3.604033 +step:4896 train loss:3.588920 +step:4897 train loss:3.661988 +step:4898 train loss:3.611909 +step:4899 train loss:3.594052 +step:4900 train loss:3.641620 +step:4901 train loss:3.587654 +step:4902 train loss:3.583803 +step:4903 train loss:3.603695 +step:4904 train loss:3.617504 +step:4905 train loss:3.613987 +step:4906 train loss:3.615304 +step:4907 train loss:3.687474 +step:4908 train loss:3.593514 +step:4909 train loss:3.599509 +step:4910 train loss:3.622410 +step:4911 train loss:3.674517 +step:4912 train loss:3.649011 +step:4913 train loss:3.626817 +step:4914 train loss:3.618966 +step:4915 train loss:3.599400 +step:4916 train loss:3.540650 +step:4917 train loss:3.565619 +step:4918 train loss:3.599703 +step:4919 train loss:3.586233 +step:4920 train loss:3.589266 +step:4921 train loss:3.750525 +step:4922 train loss:3.644158 +step:4923 train loss:3.661688 +step:4924 train loss:3.660831 +step:4925 train loss:3.596811 +step:4926 train loss:3.587664 +step:4927 train loss:3.616873 +step:4928 train loss:3.656938 +step:4929 train loss:3.610976 +step:4930 train loss:3.593328 +step:4931 train loss:3.584689 +step:4932 train loss:3.596251 +step:4933 train loss:3.587334 +step:4934 train loss:3.654736 +step:4935 train loss:3.641995 +step:4936 train loss:3.604610 +step:4937 train loss:3.714848 +step:4938 train loss:3.700154 +step:4939 train loss:3.566316 +step:4940 train loss:3.645130 +step:4941 train loss:3.549277 +step:4942 train loss:3.587858 +step:4943 train loss:3.593510 +step:4944 train loss:3.590853 +step:4945 train loss:3.640497 +step:4946 train loss:3.610700 +step:4947 train loss:3.598095 +step:4948 train loss:3.633150 +step:4949 train loss:3.540739 +step:4950 train loss:3.619328 +step:4951 train loss:3.672030 +step:4952 train loss:3.612473 +step:4953 train loss:3.644364 +step:4954 train loss:3.548705 +step:4955 train loss:3.623476 +step:4956 train loss:3.653030 +step:4957 train loss:3.649589 +step:4958 train loss:3.562218 +step:4959 train loss:3.679861 +step:4960 train loss:3.606634 +step:4961 train loss:3.625262 +step:4962 train loss:3.588657 +step:4963 train loss:3.634350 +step:4964 train loss:3.584154 +step:4965 train loss:3.736549 +step:4966 train loss:3.584740 +step:4967 train loss:3.694606 +step:4968 train loss:3.583259 +step:4969 train loss:3.625662 +step:4970 train loss:3.616713 +step:4971 train loss:3.568445 +step:4972 train loss:3.612208 +step:4973 train loss:3.618562 +step:4974 train loss:3.608117 +step:4975 train loss:3.693193 +step:4976 train loss:3.673484 +step:4977 train loss:3.618394 +step:4978 train loss:3.606156 +step:4979 train loss:3.603003 +step:4980 train loss:3.712138 +step:4981 train loss:3.551086 +step:4982 train loss:3.633199 +step:4983 train loss:3.554208 +step:4984 train loss:3.740300 +step:4985 train loss:3.638656 +step:4986 train loss:3.582563 +step:4987 train loss:3.601846 +step:4988 train loss:3.799041 +step:4989 train loss:3.603689 +step:4990 train loss:3.595972 +step:4991 train loss:3.611026 +step:4992 train loss:3.597494 +step:4993 train loss:3.574794 +step:4994 train loss:3.686439 +step:4995 train loss:3.610299 +step:4996 train loss:3.696240 +step:4997 train loss:3.596297 +step:4998 train loss:3.600509 +step:4999 train loss:3.584620 +step:5000 validation loss:3.555526 total_sharp:4.7993e-03 L1_sharp:4.7172e-02 L2_sharp:2.6745e-02 L3_sharp:4.4313e-02 L4_sharp:2.5576e-02 L5_sharp:3.2487e-02 L6_sharp:3.0481e-02 L7_sharp:3.2124e-02 L8_sharp:2.9915e-02 L9_sharp:2.0831e-02 L10_sharp:1.7039e-02 L11_sharp:1.6426e-02 L12_sharp:2.0061e-02 total_fnorm:1.3451e+00 total_l1_linf:8.0726e+03 total_spectral:1.3451e+00 L1_fnorm:6.1342e-02 L2_fnorm:5.8887e-02 L3_fnorm:5.8163e-02 L4_fnorm:5.9746e-02 L5_fnorm:6.0694e-02 L6_fnorm:6.1275e-02 L7_fnorm:6.1148e-02 L8_fnorm:6.1113e-02 L9_fnorm:6.1168e-02 L10_fnorm:6.1282e-02 L11_fnorm:6.1260e-02 L12_fnorm:6.1211e-02 L1_l1linf:3.1655e-01 L2_l1linf:3.3934e-01 L3_l1linf:3.4974e-01 L4_l1linf:3.3385e-01 L5_l1linf:3.4754e-01 L6_l1linf:3.2579e-01 L7_l1linf:3.0053e-01 L8_l1linf:2.8207e-01 L9_l1linf:3.0457e-01 L10_l1linf:3.4808e-01 L11_l1linf:3.6070e-01 L12_l1linf:3.5477e-01 L1_spectral:7.0852e-03 L2_spectral:7.6403e-03 L3_spectral:7.8401e-03 L4_spectral:7.5408e-03 L5_spectral:7.7380e-03 L6_spectral:7.3317e-03 L7_spectral:6.7648e-03 L8_spectral:6.4197e-03 L9_spectral:6.7966e-03 L10_spectral:7.8189e-03 L11_spectral:8.0948e-03 L12_spectral:8.0459e-03 ip_v_neg_g:5.7305e-03 cos_v_neg_g:1.1946e-03 v_norm:1.3451e+00 g_norm:3.5662e+00 hv_norm:7.8535e-01 cos_v_hv:8.2201e-03 hg_norm:4.6584e+02 cos_g_hg:4.5841e-01 v_par:4.5917e-05 v_perp:1.3451e+00 L1_cos_v_neg_g:8.7550e-03 L1_v_norm:6.1342e-02 L2_cos_v_neg_g:7.8799e-03 L2_v_norm:5.8887e-02 L3_cos_v_neg_g:8.1521e-03 L3_v_norm:5.8163e-02 L4_cos_v_neg_g:6.1472e-03 L4_v_norm:5.9746e-02 L5_cos_v_neg_g:7.4129e-03 L5_v_norm:6.0694e-02 L6_cos_v_neg_g:7.1864e-03 L6_v_norm:6.1275e-02 L7_cos_v_neg_g:5.3635e-03 L7_v_norm:6.1148e-02 L8_cos_v_neg_g:6.4576e-03 L8_v_norm:6.1113e-02 L9_cos_v_neg_g:6.7105e-03 L9_v_norm:6.1168e-02 L10_cos_v_neg_g:6.5890e-03 L10_v_norm:6.1282e-02 L11_cos_v_neg_g:6.6433e-03 L11_v_norm:6.1260e-02 L12_cos_v_neg_g:6.5249e-03 L12_v_norm:6.1211e-02 +step:5000 train loss:3.698096 +step:5001 train loss:3.566178 +step:5002 train loss:3.620901 +step:5003 train loss:3.618006 +step:5004 train loss:3.609645 +step:5005 train loss:3.605411 +step:5006 train loss:3.648199 +step:5007 train loss:3.650960 +step:5008 train loss:3.586768 +step:5009 train loss:3.634602 +step:5010 train loss:3.583446 +step:5011 train loss:3.613657 +step:5012 train loss:3.586959 +step:5013 train loss:3.688925 +step:5014 train loss:3.603339 +step:5015 train loss:3.677494 +step:5016 train loss:3.607102 +step:5017 train loss:3.650191 +step:5018 train loss:3.571513 +step:5019 train loss:3.604217 +step:5020 train loss:3.597605 +step:5021 train loss:3.612594 +step:5022 train loss:3.647689 +step:5023 train loss:3.617303 +step:5024 train loss:3.670192 +step:5025 train loss:3.551897 +step:5026 train loss:3.679198 +step:5027 train loss:3.609444 +step:5028 train loss:3.679730 +step:5029 train loss:3.571674 +step:5030 train loss:3.614306 +step:5031 train loss:3.600495 +step:5032 train loss:3.627149 +step:5033 train loss:3.613532 +step:5034 train loss:3.607538 +step:5035 train loss:3.694455 +step:5036 train loss:3.642069 +step:5037 train loss:3.593289 +step:5038 train loss:3.643138 +step:5039 train loss:3.656904 +step:5040 train loss:3.619587 +step:5041 train loss:3.634602 +step:5042 train loss:3.539497 +step:5043 train loss:3.680740 +step:5044 train loss:3.598268 +step:5045 train loss:3.650638 +step:5046 train loss:3.571074 +step:5047 train loss:3.647762 +step:5048 train loss:3.562302 +step:5049 train loss:3.697141 +step:5050 train loss:3.583624 +step:5051 train loss:3.628252 +step:5052 train loss:3.525323 +step:5053 train loss:3.707350 +step:5054 train loss:3.598315 +step:5055 train loss:3.622479 +step:5056 train loss:3.656465 +step:5057 train loss:3.588695 +step:5058 train loss:3.618917 +step:5059 train loss:3.580147 +step:5060 train loss:3.627414 +step:5061 train loss:3.623091 +step:5062 train loss:3.592261 +step:5063 train loss:3.587160 +step:5064 train loss:3.594156 +step:5065 train loss:3.579810 +step:5066 train loss:3.638909 +step:5067 train loss:3.622846 +step:5068 train loss:3.606095 +step:5069 train loss:3.578634 +step:5070 train loss:3.607506 +step:5071 train loss:3.676705 +step:5072 train loss:3.567950 +step:5073 train loss:3.577626 +step:5074 train loss:3.524398 +step:5075 train loss:3.593867 +step:5076 train loss:3.525353 +step:5077 train loss:3.584934 +step:5078 train loss:3.612077 +step:5079 train loss:3.630787 +step:5080 train loss:3.605864 +step:5081 train loss:3.617285 +step:5082 train loss:3.608033 +step:5083 train loss:3.660084 +step:5084 train loss:3.641001 +step:5085 train loss:3.602544 +step:5086 train loss:3.679507 +step:5087 train loss:3.662683 +step:5088 train loss:3.584049 +step:5089 train loss:3.650873 +step:5090 train loss:3.594085 +step:5091 train loss:3.598228 +step:5092 train loss:3.697602 +step:5093 train loss:3.577981 +step:5094 train loss:3.574775 +step:5095 train loss:3.625191 +step:5096 train loss:3.594201 +step:5097 train loss:3.603698 +step:5098 train loss:3.607260 +step:5099 train loss:3.569506 +step:5100 train loss:3.581336 +step:5101 train loss:3.774318 +step:5102 train loss:3.618903 +step:5103 train loss:3.629370 +step:5104 train loss:3.676539 +step:5105 train loss:3.608525 +step:5106 train loss:3.572023 +step:5107 train loss:3.590895 +step:5108 train loss:3.583107 +step:5109 train loss:3.661187 +step:5110 train loss:3.576470 +step:5111 train loss:3.669137 +step:5112 train loss:3.577074 +step:5113 train loss:3.559225 +step:5114 train loss:3.606588 +step:5115 train loss:3.567751 +step:5116 train loss:3.624710 +step:5117 train loss:3.569046 +step:5118 train loss:3.596071 +step:5119 train loss:3.579519 +step:5120 train loss:3.622115 +step:5121 train loss:3.571259 +step:5122 train loss:3.582075 +step:5123 train loss:3.564739 +step:5124 train loss:3.529307 +step:5125 train loss:3.638551 +step:5126 train loss:3.624696 +step:5127 train loss:3.626643 +step:5128 train loss:3.640752 +step:5129 train loss:3.567509 +step:5130 train loss:3.581150 +step:5131 train loss:3.519612 +step:5132 train loss:3.639921 +step:5133 train loss:3.606813 +step:5134 train loss:3.609813 +step:5135 train loss:3.561317 +step:5136 train loss:3.630227 +step:5137 train loss:3.626644 +step:5138 train loss:3.606108 +step:5139 train loss:3.642006 +step:5140 train loss:3.616935 +step:5141 train loss:3.646387 +step:5142 train loss:3.594443 +step:5143 train loss:3.622425 +step:5144 train loss:3.618229 +step:5145 train loss:3.561924 +step:5146 train loss:3.555807 +step:5147 train loss:3.634783 +step:5148 train loss:3.561755 +step:5149 train loss:3.634398 +step:5150 train loss:3.609210 +step:5151 train loss:3.574621 +step:5152 train loss:3.621213 +step:5153 train loss:3.593585 +step:5154 train loss:3.606496 +step:5155 train loss:3.609036 +step:5156 train loss:3.588603 +step:5157 train loss:3.591070 +step:5158 train loss:3.613034 +step:5159 train loss:3.648459 +step:5160 train loss:3.719496 +step:5161 train loss:3.646754 +step:5162 train loss:3.663169 +step:5163 train loss:3.579534 +step:5164 train loss:3.642080 +step:5165 train loss:3.656187 +step:5166 train loss:3.596174 +step:5167 train loss:3.693325 +step:5168 train loss:3.607276 +step:5169 train loss:3.641915 +step:5170 train loss:3.619174 +step:5171 train loss:3.662181 +step:5172 train loss:3.581903 +step:5173 train loss:3.646416 +step:5174 train loss:3.581479 +step:5175 train loss:3.613622 +step:5176 train loss:3.601755 +step:5177 train loss:3.599136 +step:5178 train loss:3.666338 +step:5179 train loss:3.577605 +step:5180 train loss:3.657766 +step:5181 train loss:3.602570 +step:5182 train loss:3.659880 +step:5183 train loss:3.592026 +step:5184 train loss:3.571508 +step:5185 train loss:3.596884 +step:5186 train loss:3.652465 +step:5187 train loss:3.646192 +step:5188 train loss:3.581004 +step:5189 train loss:3.623488 +step:5190 train loss:3.607835 +step:5191 train loss:3.586365 +step:5192 train loss:3.571101 +step:5193 train loss:3.656308 +step:5194 train loss:3.608415 +step:5195 train loss:3.580722 +step:5196 train loss:3.650435 +step:5197 train loss:3.700936 +step:5198 train loss:3.608139 +step:5199 train loss:3.593568 +step:5200 train loss:3.619247 +step:5201 train loss:3.607429 +step:5202 train loss:3.611935 +step:5203 train loss:3.617114 +step:5204 train loss:3.586980 +step:5205 train loss:3.630629 +step:5206 train loss:3.567427 +step:5207 train loss:3.571068 +step:5208 train loss:3.633930 +step:5209 train loss:3.654588 +step:5210 train loss:3.555514 +step:5211 train loss:3.602097 +step:5212 train loss:3.617879 +step:5213 train loss:3.591604 +step:5214 train loss:3.641080 +step:5215 train loss:3.751113 +step:5216 train loss:3.604173 +step:5217 train loss:3.580994 +step:5218 train loss:3.587121 +step:5219 train loss:3.649521 +step:5220 train loss:3.567986 +step:5221 train loss:3.568835 +step:5222 train loss:3.651716 +step:5223 train loss:3.644578 +step:5224 train loss:3.542859 +step:5225 train loss:3.690325 +step:5226 train loss:3.604343 +step:5227 train loss:3.676191 +step:5228 train loss:3.649464 +step:5229 train loss:3.586771 +step:5230 train loss:3.603004 +step:5231 train loss:3.547474 +step:5232 train loss:3.672087 +step:5233 train loss:3.632967 +step:5234 train loss:3.635299 +step:5235 train loss:3.585568 +step:5236 train loss:3.660778 +step:5237 train loss:3.712569 +step:5238 train loss:3.615310 +step:5239 train loss:3.677781 +step:5240 train loss:3.560202 +step:5241 train loss:3.619080 +step:5242 train loss:3.590081 +step:5243 train loss:3.594205 +step:5244 train loss:3.594764 +step:5245 train loss:3.636441 +step:5246 train loss:3.680211 +step:5247 train loss:3.608898 +step:5248 train loss:3.579515 +step:5249 train loss:3.635830 +step:5250 validation loss:3.537830 +step:5250 train loss:3.605598 +step:5251 train loss:3.669730 +step:5252 train loss:3.561026 +step:5253 train loss:3.713659 +step:5254 train loss:3.585486 +step:5255 train loss:3.658001 +step:5256 train loss:3.571968 +step:5257 train loss:3.626217 +step:5258 train loss:3.624543 +step:5259 train loss:3.609944 +step:5260 train loss:3.604337 +step:5261 train loss:3.594410 +step:5262 train loss:3.635236 +step:5263 train loss:3.621554 +step:5264 train loss:3.573260 +step:5265 train loss:3.653177 +step:5266 train loss:3.571476 +step:5267 train loss:3.582856 +step:5268 train loss:3.564269 +step:5269 train loss:3.567895 +step:5270 train loss:3.616303 +step:5271 train loss:3.541181 +step:5272 train loss:3.634710 +step:5273 train loss:3.542863 +step:5274 train loss:3.592892 +step:5275 train loss:3.605810 +step:5276 train loss:3.731587 +step:5277 train loss:3.629414 +step:5278 train loss:3.578696 +step:5279 train loss:3.627239 +step:5280 train loss:3.604388 +step:5281 train loss:3.596874 +step:5282 train loss:3.569434 +step:5283 train loss:3.568940 +step:5284 train loss:3.577746 +step:5285 train loss:3.646275 +step:5286 train loss:3.552965 +step:5287 train loss:3.654474 +step:5288 train loss:3.630173 +step:5289 train loss:3.600078 +step:5290 train loss:3.653020 +step:5291 train loss:3.606330 +step:5292 train loss:3.624041 +step:5293 train loss:3.593904 +step:5294 train loss:3.582645 +step:5295 train loss:3.588020 +step:5296 train loss:3.578349 +step:5297 train loss:3.601530 +step:5298 train loss:3.545990 +step:5299 train loss:3.637429 +step:5300 train loss:3.588222 +step:5301 train loss:3.658026 +step:5302 train loss:3.664216 +step:5303 train loss:3.523588 +step:5304 train loss:3.557648 +step:5305 train loss:3.536604 +step:5306 train loss:3.568363 +step:5307 train loss:3.572290 +step:5308 train loss:3.665537 +step:5309 train loss:3.616308 +step:5310 train loss:3.600399 +step:5311 train loss:3.671492 +step:5312 train loss:3.549032 +step:5313 train loss:3.639726 +step:5314 train loss:3.634743 +step:5315 train loss:3.594679 +step:5316 train loss:3.624473 +step:5317 train loss:3.645228 +step:5318 train loss:3.599275 +step:5319 train loss:3.625637 +step:5320 train loss:3.576815 +step:5321 train loss:3.700330 +step:5322 train loss:3.608669 +step:5323 train loss:3.609660 +step:5324 train loss:3.554310 +step:5325 train loss:3.638270 +step:5326 train loss:3.627996 +step:5327 train loss:3.518977 +step:5328 train loss:3.655960 +step:5329 train loss:3.619440 +step:5330 train loss:3.617143 +step:5331 train loss:3.667666 +step:5332 train loss:3.591648 +step:5333 train loss:3.652899 +step:5334 train loss:3.630434 +step:5335 train loss:3.686904 +step:5336 train loss:3.722879 +step:5337 train loss:3.557712 +step:5338 train loss:3.565577 +step:5339 train loss:3.589251 +step:5340 train loss:3.613525 +step:5341 train loss:3.628313 +step:5342 train loss:3.527895 +step:5343 train loss:3.687568 +step:5344 train loss:3.570549 +step:5345 train loss:3.570631 +step:5346 train loss:3.571571 +step:5347 train loss:3.595422 +step:5348 train loss:3.638785 +step:5349 train loss:3.577126 +step:5350 train loss:3.617800 +step:5351 train loss:3.691908 +step:5352 train loss:3.730778 +step:5353 train loss:3.641609 +step:5354 train loss:3.611213 +step:5355 train loss:3.577392 +step:5356 train loss:3.601967 +step:5357 train loss:3.579307 +step:5358 train loss:3.600945 +step:5359 train loss:3.616031 +step:5360 train loss:3.585826 +step:5361 train loss:3.589308 +step:5362 train loss:3.572236 +step:5363 train loss:3.569890 +step:5364 train loss:3.569482 +step:5365 train loss:3.604546 +step:5366 train loss:3.634594 +step:5367 train loss:3.563030 +step:5368 train loss:3.628118 +step:5369 train loss:3.646357 +step:5370 train loss:3.543458 +step:5371 train loss:3.599393 +step:5372 train loss:3.617774 +step:5373 train loss:3.659824 +step:5374 train loss:3.543603 +step:5375 train loss:3.586812 +step:5376 train loss:3.656044 +step:5377 train loss:3.590946 +step:5378 train loss:3.566333 +step:5379 train loss:3.569944 +step:5380 train loss:3.602337 +step:5381 train loss:3.645903 +step:5382 train loss:3.548301 +step:5383 train loss:3.612881 +step:5384 train loss:3.628669 +step:5385 train loss:3.624070 +step:5386 train loss:3.609568 +step:5387 train loss:3.612832 +step:5388 train loss:3.626651 +step:5389 train loss:3.555595 +step:5390 train loss:3.586883 +step:5391 train loss:3.524584 +step:5392 train loss:3.590197 +step:5393 train loss:3.583835 +step:5394 train loss:3.575155 +step:5395 train loss:3.647741 +step:5396 train loss:3.612544 +step:5397 train loss:3.632987 +step:5398 train loss:3.630504 +step:5399 train loss:3.661886 +step:5400 train loss:3.667227 +step:5401 train loss:3.627498 +step:5402 train loss:3.733734 +step:5403 train loss:3.640018 +step:5404 train loss:3.610992 +step:5405 train loss:3.683983 +step:5406 train loss:3.644031 +step:5407 train loss:3.572691 +step:5408 train loss:3.717959 +step:5409 train loss:3.557222 +step:5410 train loss:3.622225 +step:5411 train loss:3.607641 +step:5412 train loss:3.580708 +step:5413 train loss:3.634189 +step:5414 train loss:3.610479 +step:5415 train loss:3.590925 +step:5416 train loss:3.582959 +step:5417 train loss:3.651130 +step:5418 train loss:3.667667 +step:5419 train loss:3.569782 +step:5420 train loss:3.632734 +step:5421 train loss:3.600760 +step:5422 train loss:3.644040 +step:5423 train loss:3.623273 +step:5424 train loss:3.528547 +step:5425 train loss:3.593686 +step:5426 train loss:3.679528 +step:5427 train loss:3.570500 +step:5428 train loss:3.606869 +step:5429 train loss:3.541830 +step:5430 train loss:3.575422 +step:5431 train loss:3.638718 +step:5432 train loss:3.613101 +step:5433 train loss:3.620543 +step:5434 train loss:3.568284 +step:5435 train loss:3.565488 +step:5436 train loss:3.570982 +step:5437 train loss:3.607458 +step:5438 train loss:3.587986 +step:5439 train loss:3.592555 +step:5440 train loss:3.636837 +step:5441 train loss:3.656656 +step:5442 train loss:3.576177 +step:5443 train loss:3.578757 +step:5444 train loss:3.523117 +step:5445 train loss:3.609122 +step:5446 train loss:3.579665 +step:5447 train loss:3.615945 +step:5448 train loss:3.672492 +step:5449 train loss:3.562216 +step:5450 train loss:3.596225 +step:5451 train loss:3.591031 +step:5452 train loss:3.604394 +step:5453 train loss:3.660590 +step:5454 train loss:3.587123 +step:5455 train loss:3.572621 +step:5456 train loss:3.713594 +step:5457 train loss:3.590799 +step:5458 train loss:3.627184 +step:5459 train loss:3.570073 +step:5460 train loss:3.586188 +step:5461 train loss:3.590992 +step:5462 train loss:3.591734 +step:5463 train loss:3.601631 +step:5464 train loss:3.604143 +step:5465 train loss:3.547077 +step:5466 train loss:3.624209 +step:5467 train loss:3.609272 +step:5468 train loss:3.614634 +step:5469 train loss:3.707834 +step:5470 train loss:3.602063 +step:5471 train loss:3.674045 +step:5472 train loss:3.621894 +step:5473 train loss:3.523885 +step:5474 train loss:3.858807 +step:5475 train loss:3.534732 +step:5476 train loss:3.611688 +step:5477 train loss:3.612781 +step:5478 train loss:3.610344 +step:5479 train loss:3.753938 +step:5480 train loss:3.599287 +step:5481 train loss:3.660318 +step:5482 train loss:3.574513 +step:5483 train loss:3.610753 +step:5484 train loss:3.649578 +step:5485 train loss:3.566581 +step:5486 train loss:3.612231 +step:5487 train loss:3.611889 +step:5488 train loss:3.525538 +step:5489 train loss:3.630189 +step:5490 train loss:3.576555 +step:5491 train loss:3.676764 +step:5492 train loss:3.606842 +step:5493 train loss:3.534279 +step:5494 train loss:3.592386 +step:5495 train loss:3.568201 +step:5496 train loss:3.566556 +step:5497 train loss:3.686223 +step:5498 train loss:3.552957 +step:5499 train loss:3.690547 +step:5500 validation loss:3.529334 total_sharp:4.2524e-03 L1_sharp:7.6161e-02 L2_sharp:2.3794e-02 L3_sharp:2.9459e-02 L4_sharp:1.6831e-02 L5_sharp:1.6779e-02 L6_sharp:2.0276e-02 L7_sharp:2.2289e-02 L8_sharp:2.5315e-02 L9_sharp:1.9298e-02 L10_sharp:1.3343e-02 L11_sharp:1.5022e-02 L12_sharp:2.5080e-02 total_fnorm:1.3466e+00 total_l1_linf:8.0851e+03 total_spectral:1.3466e+00 L1_fnorm:6.1334e-02 L2_fnorm:5.9008e-02 L3_fnorm:5.8455e-02 L4_fnorm:5.9901e-02 L5_fnorm:6.0849e-02 L6_fnorm:6.1410e-02 L7_fnorm:6.1326e-02 L8_fnorm:6.1312e-02 L9_fnorm:6.1243e-02 L10_fnorm:6.1346e-02 L11_fnorm:6.1372e-02 L12_fnorm:6.1406e-02 L1_l1linf:3.4218e-01 L2_l1linf:3.5872e-01 L3_l1linf:3.5928e-01 L4_l1linf:3.7391e-01 L5_l1linf:3.3826e-01 L6_l1linf:3.5125e-01 L7_l1linf:3.3913e-01 L8_l1linf:3.1088e-01 L9_l1linf:3.2373e-01 L10_l1linf:3.6407e-01 L11_l1linf:3.7859e-01 L12_l1linf:3.8587e-01 L1_spectral:7.7058e-03 L2_spectral:8.0651e-03 L3_spectral:8.0449e-03 L4_spectral:8.3775e-03 L5_spectral:7.5391e-03 L6_spectral:7.9039e-03 L7_spectral:7.6443e-03 L8_spectral:7.0266e-03 L9_spectral:7.2843e-03 L10_spectral:8.1499e-03 L11_spectral:8.4420e-03 L12_spectral:8.6036e-03 ip_v_neg_g:4.2750e-03 cos_v_neg_g:6.1479e-04 v_norm:1.3466e+00 g_norm:5.1636e+00 hv_norm:8.8837e-01 cos_v_hv:6.4460e-03 hg_norm:9.7799e+02 cos_g_hg:6.7096e-01 v_par:2.2735e-05 v_perp:1.3466e+00 L1_cos_v_neg_g:8.4113e-03 L1_v_norm:6.1334e-02 L2_cos_v_neg_g:6.5534e-03 L2_v_norm:5.9008e-02 L3_cos_v_neg_g:5.6573e-03 L3_v_norm:5.8455e-02 L4_cos_v_neg_g:3.4523e-03 L4_v_norm:5.9901e-02 L5_cos_v_neg_g:4.1040e-03 L5_v_norm:6.0849e-02 L6_cos_v_neg_g:2.3733e-03 L6_v_norm:6.1410e-02 L7_cos_v_neg_g:6.9802e-04 L7_v_norm:6.1326e-02 L8_cos_v_neg_g:3.3791e-04 L8_v_norm:6.1312e-02 L9_cos_v_neg_g:1.0206e-03 L9_v_norm:6.1243e-02 L10_cos_v_neg_g:2.4512e-03 L10_v_norm:6.1346e-02 L11_cos_v_neg_g:3.0975e-03 L11_v_norm:6.1372e-02 L12_cos_v_neg_g:2.8250e-03 L12_v_norm:6.1406e-02 +step:5500 train loss:3.604351 +step:5501 train loss:3.677454 +step:5502 train loss:3.626353 +step:5503 train loss:3.590250 +step:5504 train loss:3.637894 +step:5505 train loss:3.599960 +step:5506 train loss:3.641771 +step:5507 train loss:3.630050 +step:5508 train loss:3.651719 +step:5509 train loss:3.660742 +step:5510 train loss:3.636400 +step:5511 train loss:3.628227 +step:5512 train loss:3.750525 +step:5513 train loss:3.552046 +step:5514 train loss:3.611326 +step:5515 train loss:3.638448 +step:5516 train loss:3.661456 +step:5517 train loss:3.621446 +step:5518 train loss:3.648988 +step:5519 train loss:3.681630 +step:5520 train loss:3.589506 +step:5521 train loss:3.602160 +step:5522 train loss:3.568890 +step:5523 train loss:3.615941 +step:5524 train loss:3.662247 +step:5525 train loss:3.571207 +step:5526 train loss:3.580981 +step:5527 train loss:3.605783 +step:5528 train loss:3.709446 +step:5529 train loss:3.669557 +step:5530 train loss:3.640114 +step:5531 train loss:3.574749 +step:5532 train loss:3.599523 +step:5533 train loss:3.636431 +step:5534 train loss:3.549053 +step:5535 train loss:3.602346 +step:5536 train loss:3.538082 +step:5537 train loss:3.584627 +step:5538 train loss:3.581277 +step:5539 train loss:3.526554 +step:5540 train loss:3.748759 +step:5541 train loss:3.560158 +step:5542 train loss:3.607848 +step:5543 train loss:3.597710 +step:5544 train loss:3.583414 +step:5545 train loss:3.578558 +step:5546 train loss:3.615734 +step:5547 train loss:3.547409 +step:5548 train loss:3.590850 +step:5549 train loss:3.592897 +step:5550 train loss:3.615452 +step:5551 train loss:3.622693 +step:5552 train loss:3.578777 +step:5553 train loss:3.606764 +step:5554 train loss:3.579217 +step:5555 train loss:3.587479 +step:5556 train loss:3.603178 +step:5557 train loss:3.670751 +step:5558 train loss:3.589980 +step:5559 train loss:3.595173 +step:5560 train loss:3.588514 +step:5561 train loss:3.625115 +step:5562 train loss:3.578881 +step:5563 train loss:3.557271 +step:5564 train loss:3.594009 +step:5565 train loss:3.660582 +step:5566 train loss:3.563221 +step:5567 train loss:3.682508 +step:5568 train loss:3.803540 +step:5569 train loss:3.590176 +step:5570 train loss:3.519351 +step:5571 train loss:3.610334 +step:5572 train loss:3.550689 +step:5573 train loss:3.539633 +step:5574 train loss:3.507907 +step:5575 train loss:3.604079 +step:5576 train loss:3.588189 +step:5577 train loss:3.595839 +step:5578 train loss:3.623085 +step:5579 train loss:3.579066 +step:5580 train loss:3.603455 +step:5581 train loss:3.626039 +step:5582 train loss:3.603504 +step:5583 train loss:3.616724 +step:5584 train loss:3.732907 +step:5585 train loss:3.639253 +step:5586 train loss:3.576082 +step:5587 train loss:3.606699 +step:5588 train loss:3.624134 +step:5589 train loss:3.623241 +step:5590 train loss:3.683219 +step:5591 train loss:3.551228 +step:5592 train loss:3.723752 +step:5593 train loss:3.601889 +step:5594 train loss:3.607550 +step:5595 train loss:3.600045 +step:5596 train loss:3.548190 +step:5597 train loss:3.567421 +step:5598 train loss:3.572443 +step:5599 train loss:3.574299 +step:5600 train loss:3.619143 +step:5601 train loss:3.642271 +step:5602 train loss:3.577616 +step:5603 train loss:3.617556 +step:5604 train loss:3.615163 +step:5605 train loss:3.587048 +step:5606 train loss:3.591855 +step:5607 train loss:3.621516 +step:5608 train loss:3.565863 +step:5609 train loss:3.615740 +step:5610 train loss:3.573078 +step:5611 train loss:3.613565 +step:5612 train loss:3.642600 +step:5613 train loss:3.603374 +step:5614 train loss:3.568167 +step:5615 train loss:3.670732 +step:5616 train loss:3.568009 +step:5617 train loss:3.655567 +step:5618 train loss:3.639705 +step:5619 train loss:3.593887 +step:5620 train loss:3.593131 +step:5621 train loss:3.670163 +step:5622 train loss:3.553347 +step:5623 train loss:3.589045 +step:5624 train loss:3.580173 +step:5625 train loss:3.614354 +step:5626 train loss:3.606656 +step:5627 train loss:3.579958 +step:5628 train loss:3.620281 +step:5629 train loss:3.599961 +step:5630 train loss:3.530421 +step:5631 train loss:3.571630 +step:5632 train loss:3.615438 +step:5633 train loss:3.607746 +step:5634 train loss:3.561889 +step:5635 train loss:3.601159 +step:5636 train loss:3.576198 +step:5637 train loss:3.718042 +step:5638 train loss:3.625011 +step:5639 train loss:3.604133 +step:5640 train loss:3.608394 +step:5641 train loss:3.647805 +step:5642 train loss:3.582116 +step:5643 train loss:3.598352 +step:5644 train loss:3.681336 +step:5645 train loss:3.636773 +step:5646 train loss:3.634648 +step:5647 train loss:3.624463 +step:5648 train loss:3.615699 +step:5649 train loss:3.527467 +step:5650 train loss:3.532509 +step:5651 train loss:3.607788 +step:5652 train loss:3.608237 +step:5653 train loss:3.576036 +step:5654 train loss:3.706114 +step:5655 train loss:3.565213 +step:5656 train loss:3.589921 +step:5657 train loss:3.658791 +step:5658 train loss:3.558287 +step:5659 train loss:3.596346 +step:5660 train loss:3.647547 +step:5661 train loss:3.586444 +step:5662 train loss:3.625744 +step:5663 train loss:3.514431 +step:5664 train loss:3.488909 +step:5665 train loss:3.610594 +step:5666 train loss:3.614033 +step:5667 train loss:3.648479 +step:5668 train loss:3.579774 +step:5669 train loss:3.592779 +step:5670 train loss:3.593067 +step:5671 train loss:3.577296 +step:5672 train loss:3.629419 +step:5673 train loss:3.594285 +step:5674 train loss:3.668822 +step:5675 train loss:3.580605 +step:5676 train loss:3.729891 +step:5677 train loss:3.625543 +step:5678 train loss:3.601839 +step:5679 train loss:3.592403 +step:5680 train loss:3.625469 +step:5681 train loss:3.592698 +step:5682 train loss:3.606732 +step:5683 train loss:3.564744 +step:5684 train loss:3.575098 +step:5685 train loss:3.619604 +step:5686 train loss:3.634684 +step:5687 train loss:3.584311 +step:5688 train loss:3.671557 +step:5689 train loss:3.576533 +step:5690 train loss:3.726696 +step:5691 train loss:3.558971 +step:5692 train loss:3.545358 +step:5693 train loss:3.552837 +step:5694 train loss:3.572772 +step:5695 train loss:3.587346 +step:5696 train loss:3.639656 +step:5697 train loss:3.564485 +step:5698 train loss:3.585867 +step:5699 train loss:3.598299 +step:5700 train loss:3.594242 +step:5701 train loss:3.587167 +step:5702 train loss:3.657044 +step:5703 train loss:3.556077 +step:5704 train loss:3.599045 +step:5705 train loss:3.607635 +step:5706 train loss:3.630976 +step:5707 train loss:3.548035 +step:5708 train loss:3.631567 +step:5709 train loss:3.639116 +step:5710 train loss:3.626269 +step:5711 train loss:3.649261 +step:5712 train loss:3.631370 +step:5713 train loss:3.557456 +step:5714 train loss:3.641720 +step:5715 train loss:3.600324 +step:5716 train loss:3.602305 +step:5717 train loss:3.629117 +step:5718 train loss:3.573546 +step:5719 train loss:3.645085 +step:5720 train loss:3.617655 +step:5721 train loss:3.548928 +step:5722 train loss:3.557348 +step:5723 train loss:3.638940 +step:5724 train loss:3.557428 +step:5725 train loss:3.628514 +step:5726 train loss:3.623504 +step:5727 train loss:3.579623 +step:5728 train loss:3.583917 +step:5729 train loss:3.586701 +step:5730 train loss:3.656463 +step:5731 train loss:3.524095 +step:5732 train loss:3.585486 +step:5733 train loss:3.574560 +step:5734 train loss:3.589782 +step:5735 train loss:3.580869 +step:5736 train loss:3.582824 +step:5737 train loss:3.605464 +step:5738 train loss:3.571341 +step:5739 train loss:3.582666 +step:5740 train loss:3.622871 +step:5741 train loss:3.593053 +step:5742 train loss:3.650111 +step:5743 train loss:3.615481 +step:5744 train loss:3.573286 +step:5745 train loss:3.577953 +step:5746 train loss:3.607667 +step:5747 train loss:3.593574 +step:5748 train loss:3.641658 +step:5749 train loss:3.597716 +step:5750 validation loss:3.523633 +step:5750 train loss:3.603816 +step:5751 train loss:3.618788 +step:5752 train loss:3.604851 +step:5753 train loss:3.575459 +step:5754 train loss:3.583805 +step:5755 train loss:3.599279 +step:5756 train loss:3.586174 +step:5757 train loss:3.648520 +step:5758 train loss:3.583756 +step:5759 train loss:3.546803 +step:5760 train loss:3.627250 +step:5761 train loss:3.622650 +step:5762 train loss:3.582511 +step:5763 train loss:3.608544 +step:5764 train loss:3.571211 +step:5765 train loss:3.689739 +step:5766 train loss:3.598095 +step:5767 train loss:3.632122 +step:5768 train loss:3.570977 +step:5769 train loss:3.691225 +step:5770 train loss:3.615300 +step:5771 train loss:3.640549 +step:5772 train loss:3.593307 +step:5773 train loss:3.572665 +step:5774 train loss:3.580334 +step:5775 train loss:3.650645 +step:5776 train loss:3.637170 +step:5777 train loss:3.554638 +step:5778 train loss:3.635762 +step:5779 train loss:3.600080 +step:5780 train loss:3.569728 +step:5781 train loss:3.636723 +step:5782 train loss:3.595613 +step:5783 train loss:3.554685 +step:5784 train loss:3.658014 +step:5785 train loss:3.649394 +step:5786 train loss:3.558857 +step:5787 train loss:3.605563 +step:5788 train loss:3.615740 +step:5789 train loss:3.559210 +step:5790 train loss:3.658367 +step:5791 train loss:3.587312 +step:5792 train loss:3.859238 +step:5793 train loss:3.631000 +step:5794 train loss:3.649990 +step:5795 train loss:3.642556 +step:5796 train loss:3.622242 +step:5797 train loss:3.605387 +step:5798 train loss:3.604103 +step:5799 train loss:3.572783 +step:5800 train loss:3.732213 +step:5801 train loss:3.607157 +step:5802 train loss:3.593997 +step:5803 train loss:3.606530 +step:5804 train loss:3.626499 +step:5805 train loss:3.586113 +step:5806 train loss:3.628866 +step:5807 train loss:3.550123 +step:5808 train loss:3.583315 +step:5809 train loss:3.594334 +step:5810 train loss:3.569192 +step:5811 train loss:3.581644 +step:5812 train loss:3.563302 +step:5813 train loss:3.573616 +step:5814 train loss:3.567958 +step:5815 train loss:3.570693 +step:5816 train loss:3.632893 +step:5817 train loss:3.643695 +step:5818 train loss:3.618033 +step:5819 train loss:3.669165 +step:5820 train loss:3.610074 +step:5821 train loss:3.602276 +step:5822 train loss:3.618034 +step:5823 train loss:3.620623 +step:5824 train loss:3.571110 +step:5825 train loss:3.665698 +step:5826 train loss:3.578884 +step:5827 train loss:3.545582 +step:5828 train loss:3.531439 +step:5829 train loss:3.596992 +step:5830 train loss:3.568592 +step:5831 train loss:3.540829 +step:5832 train loss:3.654786 +step:5833 train loss:3.632844 +step:5834 train loss:3.617324 +step:5835 train loss:3.568314 +step:5836 train loss:3.529651 +step:5837 train loss:3.652295 +step:5838 train loss:3.633850 +step:5839 train loss:3.609380 +step:5840 train loss:3.689855 +step:5841 train loss:3.615903 +step:5842 train loss:3.625700 +step:5843 train loss:3.572557 +step:5844 train loss:3.643284 +step:5845 train loss:3.549985 +step:5846 train loss:3.597338 +step:5847 train loss:3.624960 +step:5848 train loss:3.693001 +step:5849 train loss:3.585464 +step:5850 train loss:3.615618 +step:5851 train loss:3.580055 +step:5852 train loss:3.669801 +step:5853 train loss:3.760604 +step:5854 train loss:3.550317 +step:5855 train loss:3.609423 +step:5856 train loss:3.582313 +step:5857 train loss:3.592923 +step:5858 train loss:3.566892 +step:5859 train loss:3.571456 +step:5860 train loss:3.674468 +step:5861 train loss:3.560183 +step:5862 train loss:3.670985 +step:5863 train loss:3.611867 +step:5864 train loss:3.597301 +step:5865 train loss:3.602024 +step:5866 train loss:3.595037 +step:5867 train loss:3.677575 +step:5868 train loss:3.600029 +step:5869 train loss:3.623909 +step:5870 train loss:3.601035 +step:5871 train loss:3.583293 +step:5872 train loss:3.610011 +step:5873 train loss:3.590233 +step:5874 train loss:3.671250 +step:5875 train loss:3.600433 +step:5876 train loss:3.580446 +step:5877 train loss:3.589389 +step:5878 train loss:3.587216 +step:5879 train loss:3.560488 +step:5880 train loss:3.758379 +step:5881 train loss:3.596667 +step:5882 train loss:3.568143 +step:5883 train loss:3.569184 +step:5884 train loss:3.586966 +step:5885 train loss:3.583673 +step:5886 train loss:3.602629 +step:5887 train loss:3.603279 +step:5888 train loss:3.584112 +step:5889 train loss:3.563337 +step:5890 train loss:3.609546 +step:5891 train loss:3.554683 +step:5892 train loss:3.635062 +step:5893 train loss:3.558082 +step:5894 train loss:3.548147 +step:5895 train loss:3.554743 +step:5896 train loss:3.564417 +step:5897 train loss:3.633192 +step:5898 train loss:3.847490 +step:5899 train loss:3.583157 +step:5900 train loss:3.631289 +step:5901 train loss:3.583375 +step:5902 train loss:3.595881 +step:5903 train loss:3.585773 +step:5904 train loss:3.615446 +step:5905 train loss:3.720254 +step:5906 train loss:3.661518 +step:5907 train loss:3.604893 +step:5908 train loss:3.582262 +step:5909 train loss:3.574145 +step:5910 train loss:3.559438 +step:5911 train loss:3.577994 +step:5912 train loss:3.589059 +step:5913 train loss:3.614974 +step:5914 train loss:3.595641 +step:5915 train loss:3.717163 +step:5916 train loss:3.598942 +step:5917 train loss:3.572941 +step:5918 train loss:3.569830 +step:5919 train loss:3.596175 +step:5920 train loss:3.594720 +step:5921 train loss:3.566002 +step:5922 train loss:3.624089 +step:5923 train loss:3.615937 +step:5924 train loss:3.573090 +step:5925 train loss:3.695209 +step:5926 train loss:3.581039 +step:5927 train loss:3.559221 +step:5928 train loss:3.595016 +step:5929 train loss:3.615063 +step:5930 train loss:3.564476 +step:5931 train loss:3.548217 +step:5932 train loss:3.589666 +step:5933 train loss:3.643871 +step:5934 train loss:3.556696 +step:5935 train loss:3.584691 +step:5936 train loss:3.574669 +step:5937 train loss:3.553436 +step:5938 train loss:3.570086 +step:5939 train loss:3.549469 +step:5940 train loss:3.632919 +step:5941 train loss:3.566177 +step:5942 train loss:3.581489 +step:5943 train loss:3.587739 +step:5944 train loss:3.638463 +step:5945 train loss:3.572212 +step:5946 train loss:3.551789 +step:5947 train loss:3.564689 +step:5948 train loss:3.602180 +step:5949 train loss:3.647058 +step:5950 train loss:3.608109 +step:5951 train loss:3.608962 +step:5952 train loss:3.533966 +step:5953 train loss:3.573553 +step:5954 train loss:3.584991 +step:5955 train loss:3.590108 +step:5956 train loss:3.567017 +step:5957 train loss:3.534693 +step:5958 train loss:3.611643 +step:5959 train loss:3.568650 +step:5960 train loss:3.543798 +step:5961 train loss:3.570090 +step:5962 train loss:3.600764 +step:5963 train loss:3.635947 +step:5964 train loss:3.591323 +step:5965 train loss:3.609627 +step:5966 train loss:3.605486 +step:5967 train loss:3.570567 +step:5968 train loss:3.644745 +step:5969 train loss:3.586055 +step:5970 train loss:3.603148 +step:5971 train loss:3.552870 +step:5972 train loss:3.582104 +step:5973 train loss:3.571901 +step:5974 train loss:3.595070 +step:5975 train loss:3.564801 +step:5976 train loss:3.606152 +step:5977 train loss:3.564111 +step:5978 train loss:3.550093 +step:5979 train loss:3.584382 +step:5980 train loss:3.651911 +step:5981 train loss:3.547867 +step:5982 train loss:3.559073 +step:5983 train loss:3.626761 +step:5984 train loss:3.569240 +step:5985 train loss:3.611365 +step:5986 train loss:3.587037 +step:5987 train loss:3.571613 +step:5988 train loss:3.581148 +step:5989 train loss:3.599259 +step:5990 train loss:3.531253 +step:5991 train loss:3.594683 +step:5992 train loss:3.626232 +step:5993 train loss:3.578641 +step:5994 train loss:3.598458 +step:5995 train loss:3.489624 +step:5996 train loss:3.654505 +step:5997 train loss:3.637429 +step:5998 train loss:3.514350 +step:5999 train loss:3.539537 +step:6000 validation loss:3.510554 total_sharp:5.1725e-03 L1_sharp:3.9599e-02 L2_sharp:1.3376e-02 L3_sharp:3.6546e-02 L4_sharp:2.0262e-02 L5_sharp:2.1361e-02 L6_sharp:2.4888e-02 L7_sharp:2.9305e-02 L8_sharp:3.1716e-02 L9_sharp:2.4881e-02 L10_sharp:1.9701e-02 L11_sharp:2.3150e-02 L12_sharp:5.4198e-02 total_fnorm:1.3443e+00 total_l1_linf:8.0689e+03 total_spectral:1.3443e+00 L1_fnorm:6.1220e-02 L2_fnorm:5.9258e-02 L3_fnorm:5.8495e-02 L4_fnorm:6.0042e-02 L5_fnorm:6.0881e-02 L6_fnorm:6.1318e-02 L7_fnorm:6.1301e-02 L8_fnorm:6.1180e-02 L9_fnorm:6.1230e-02 L10_fnorm:6.1347e-02 L11_fnorm:6.1428e-02 L12_fnorm:6.1553e-02 L1_l1linf:3.1400e-01 L2_l1linf:3.5412e-01 L3_l1linf:3.6013e-01 L4_l1linf:3.7191e-01 L5_l1linf:3.2368e-01 L6_l1linf:3.1674e-01 L7_l1linf:3.1557e-01 L8_l1linf:2.9823e-01 L9_l1linf:3.2499e-01 L10_l1linf:3.4761e-01 L11_l1linf:3.7825e-01 L12_l1linf:4.0277e-01 L1_spectral:7.0771e-03 L2_spectral:7.9888e-03 L3_spectral:8.0586e-03 L4_spectral:8.3140e-03 L5_spectral:7.2706e-03 L6_spectral:7.1143e-03 L7_spectral:7.0429e-03 L8_spectral:6.7165e-03 L9_spectral:7.2374e-03 L10_spectral:7.7994e-03 L11_spectral:8.4923e-03 L12_spectral:8.9589e-03 ip_v_neg_g:4.4688e-03 cos_v_neg_g:1.0364e-03 v_norm:1.3443e+00 g_norm:3.2077e+00 hv_norm:8.0965e-01 cos_v_hv:8.5880e-03 hg_norm:1.6048e+02 cos_g_hg:4.3795e-01 v_par:3.5213e-05 v_perp:1.3443e+00 L1_cos_v_neg_g:6.5240e-03 L1_v_norm:6.1220e-02 L2_cos_v_neg_g:4.1622e-03 L2_v_norm:5.9258e-02 L3_cos_v_neg_g:4.2019e-03 L3_v_norm:5.8495e-02 L4_cos_v_neg_g:5.7717e-03 L4_v_norm:6.0042e-02 L5_cos_v_neg_g:6.5460e-03 L5_v_norm:6.0881e-02 L6_cos_v_neg_g:6.5034e-03 L6_v_norm:6.1318e-02 L7_cos_v_neg_g:5.8932e-03 L7_v_norm:6.1301e-02 L8_cos_v_neg_g:6.3294e-03 L8_v_norm:6.1180e-02 L9_cos_v_neg_g:7.1075e-03 L9_v_norm:6.1230e-02 L10_cos_v_neg_g:5.7633e-03 L10_v_norm:6.1347e-02 L11_cos_v_neg_g:5.0844e-03 L11_v_norm:6.1428e-02 L12_cos_v_neg_g:5.0860e-03 L12_v_norm:6.1553e-02 +step:6000 train loss:3.592199 +step:6001 train loss:3.555853 +step:6002 train loss:3.582694 +step:6003 train loss:3.606137 +step:6004 train loss:3.553595 +step:6005 train loss:3.628446 +step:6006 train loss:3.534361 +step:6007 train loss:3.557736 +step:6008 train loss:3.570437 +step:6009 train loss:3.607660 +step:6010 train loss:3.597375 +step:6011 train loss:3.586981 +step:6012 train loss:3.554212 +step:6013 train loss:3.615617 +step:6014 train loss:3.632785 +step:6015 train loss:3.630310 +step:6016 train loss:3.598786 +step:6017 train loss:3.607780 +step:6018 train loss:3.545436 +step:6019 train loss:3.581435 +step:6020 train loss:3.568422 +step:6021 train loss:3.496181 +step:6022 train loss:3.610573 +step:6023 train loss:3.544347 +step:6024 train loss:3.621670 +step:6025 train loss:3.589074 +step:6026 train loss:3.559876 +step:6027 train loss:3.600781 +step:6028 train loss:3.517578 +step:6029 train loss:3.631787 +step:6030 train loss:3.601938 +step:6031 train loss:3.573295 +step:6032 train loss:3.532080 +step:6033 train loss:3.587846 +step:6034 train loss:3.614646 +step:6035 train loss:3.531830 +step:6036 train loss:3.506227 +step:6037 train loss:3.618526 +step:6038 train loss:3.624858 +step:6039 train loss:3.607760 +step:6040 train loss:3.568499 +step:6041 train loss:3.547190 +step:6042 train loss:3.526442 +step:6043 train loss:3.587914 +step:6044 train loss:3.708514 +step:6045 train loss:3.550224 +step:6046 train loss:3.560004 +step:6047 train loss:3.595608 +step:6048 train loss:3.603154 +step:6049 train loss:3.584047 +step:6050 train loss:3.551547 +step:6051 train loss:3.603779 +step:6052 train loss:3.575649 +step:6053 train loss:3.691265 +step:6054 train loss:3.731319 +step:6055 train loss:3.545888 +step:6056 train loss:3.536267 +step:6057 train loss:3.570212 +step:6058 train loss:3.600575 +step:6059 train loss:3.602765 +step:6060 train loss:3.608671 +step:6061 train loss:3.626838 +step:6062 train loss:3.576402 +step:6063 train loss:3.590881 +step:6064 train loss:3.588165 +step:6065 train loss:3.587394 +step:6066 train loss:3.574957 +step:6067 train loss:3.613898 +step:6068 train loss:3.556710 +step:6069 train loss:3.510283 +step:6070 train loss:3.661809 +step:6071 train loss:3.603929 +step:6072 train loss:3.547385 +step:6073 train loss:3.584868 +step:6074 train loss:3.670918 +step:6075 train loss:3.589747 +step:6076 train loss:3.599772 +step:6077 train loss:3.601390 +step:6078 train loss:3.538303 +step:6079 train loss:3.566140 +step:6080 train loss:3.573539 +step:6081 train loss:3.611225 +step:6082 train loss:3.559978 +step:6083 train loss:3.573763 +step:6084 train loss:3.636755 +step:6085 train loss:3.635441 +step:6086 train loss:3.534790 +step:6087 train loss:3.578915 +step:6088 train loss:3.564009 +step:6089 train loss:3.624214 +step:6090 train loss:3.626088 +step:6091 train loss:3.573833 +step:6092 train loss:3.536307 +step:6093 train loss:3.596713 +step:6094 train loss:3.510330 +step:6095 train loss:3.676871 +step:6096 train loss:3.545760 +step:6097 train loss:3.623266 +step:6098 train loss:3.598304 +step:6099 train loss:3.655319 +step:6100 train loss:3.648588 +step:6101 train loss:3.582495 +step:6102 train loss:3.700474 +step:6103 train loss:3.586772 +step:6104 train loss:3.697312 +step:6105 train loss:3.629315 +step:6106 train loss:3.568501 +step:6107 train loss:3.632773 +step:6108 train loss:3.596733 +step:6109 train loss:3.666861 +step:6110 train loss:3.598169 +step:6111 train loss:3.631904 +step:6112 train loss:3.569710 +step:6113 train loss:3.596799 +step:6114 train loss:3.568773 +step:6115 train loss:3.630961 +step:6116 train loss:3.572585 +step:6117 train loss:3.626620 +step:6118 train loss:3.608557 +step:6119 train loss:3.617521 +step:6120 train loss:3.760590 +step:6121 train loss:3.596741 +step:6122 train loss:3.606644 +step:6123 train loss:3.589171 +step:6124 train loss:3.562742 +step:6125 train loss:3.554068 +step:6126 train loss:3.574032 +step:6127 train loss:3.562647 +step:6128 train loss:3.521511 +step:6129 train loss:3.757961 +step:6130 train loss:3.547001 +step:6131 train loss:3.522886 +step:6132 train loss:3.596123 +step:6133 train loss:3.561582 +step:6134 train loss:3.589093 +step:6135 train loss:3.672190 +step:6136 train loss:3.692891 +step:6137 train loss:3.555202 +step:6138 train loss:3.610485 +step:6139 train loss:3.591045 +step:6140 train loss:3.588656 +step:6141 train loss:3.548678 +step:6142 train loss:3.611014 +step:6143 train loss:3.578836 +step:6144 train loss:3.598490 +step:6145 train loss:3.844234 +step:6146 train loss:3.680658 +step:6147 train loss:3.765179 +step:6148 train loss:3.533625 +step:6149 train loss:3.659371 +step:6150 train loss:3.612168 +step:6151 train loss:3.566174 +step:6152 train loss:3.562781 +step:6153 train loss:3.631631 +step:6154 train loss:3.717016 +step:6155 train loss:3.584286 +step:6156 train loss:3.678421 +step:6157 train loss:3.605620 +step:6158 train loss:3.599118 +step:6159 train loss:3.566621 +step:6160 train loss:3.732898 +step:6161 train loss:3.583954 +step:6162 train loss:3.602342 +step:6163 train loss:3.633218 +step:6164 train loss:3.546527 +step:6165 train loss:3.613307 +step:6166 train loss:3.607431 +step:6167 train loss:3.624571 +step:6168 train loss:3.595273 +step:6169 train loss:3.595993 +step:6170 train loss:3.598273 +step:6171 train loss:3.567094 +step:6172 train loss:3.556125 +step:6173 train loss:3.605297 +step:6174 train loss:3.534720 +step:6175 train loss:3.544769 +step:6176 train loss:3.528460 +step:6177 train loss:3.624759 +step:6178 train loss:3.569453 +step:6179 train loss:3.580653 +step:6180 train loss:3.584853 +step:6181 train loss:3.617700 +step:6182 train loss:3.500701 +step:6183 train loss:3.509606 +step:6184 train loss:3.628619 +step:6185 train loss:3.583924 +step:6186 train loss:3.546237 +step:6187 train loss:3.585113 +step:6188 train loss:3.553401 +step:6189 train loss:3.592804 +step:6190 train loss:3.553473 +step:6191 train loss:3.586364 +step:6192 train loss:3.551952 +step:6193 train loss:3.619187 +step:6194 train loss:3.608639 +step:6195 train loss:3.588993 +step:6196 train loss:3.604964 +step:6197 train loss:3.629127 +step:6198 train loss:3.542503 +step:6199 train loss:3.566377 +step:6200 train loss:3.606623 +step:6201 train loss:3.649129 +step:6202 train loss:3.650824 +step:6203 train loss:3.650293 +step:6204 train loss:3.633853 +step:6205 train loss:3.572354 +step:6206 train loss:3.560723 +step:6207 train loss:3.615714 +step:6208 train loss:3.645319 +step:6209 train loss:3.613150 +step:6210 train loss:3.643974 +step:6211 train loss:3.562371 +step:6212 train loss:3.554326 +step:6213 train loss:3.568851 +step:6214 train loss:3.543420 +step:6215 train loss:3.719086 +step:6216 train loss:3.588397 +step:6217 train loss:3.646259 +step:6218 train loss:3.621431 +step:6219 train loss:3.634706 +step:6220 train loss:3.590712 +step:6221 train loss:3.555291 +step:6222 train loss:3.799898 +step:6223 train loss:3.556018 +step:6224 train loss:3.591653 +step:6225 train loss:3.568748 +step:6226 train loss:3.580628 +step:6227 train loss:3.583404 +step:6228 train loss:3.577699 +step:6229 train loss:3.618645 +step:6230 train loss:3.574212 +step:6231 train loss:3.685411 +step:6232 train loss:3.528239 +step:6233 train loss:3.568764 +step:6234 train loss:3.577114 +step:6235 train loss:3.605322 +step:6236 train loss:3.539784 +step:6237 train loss:3.566236 +step:6238 train loss:3.587972 +step:6239 train loss:3.575346 +step:6240 train loss:3.594590 +step:6241 train loss:3.579647 +step:6242 train loss:3.577567 +step:6243 train loss:3.612744 +step:6244 train loss:3.767198 +step:6245 train loss:3.564359 +step:6246 train loss:3.551492 +step:6247 train loss:3.546675 +step:6248 train loss:3.551970 +step:6249 train loss:3.491811 +step:6250 validation loss:3.502774 +step:6250 train loss:3.531190 +step:6251 train loss:3.547330 +step:6252 train loss:3.592121 +step:6253 train loss:3.601322 +step:6254 train loss:3.591012 +step:6255 train loss:3.553667 +step:6256 train loss:3.605878 +step:6257 train loss:3.607097 +step:6258 train loss:3.585804 +step:6259 train loss:3.590923 +step:6260 train loss:3.619788 +step:6261 train loss:3.641198 +step:6262 train loss:3.533626 +step:6263 train loss:3.565846 +step:6264 train loss:3.578428 +step:6265 train loss:3.564437 +step:6266 train loss:3.767740 +step:6267 train loss:3.571555 +step:6268 train loss:3.660233 +step:6269 train loss:3.534395 +step:6270 train loss:3.546452 +step:6271 train loss:3.595157 +step:6272 train loss:3.584626 +step:6273 train loss:3.782925 +step:6274 train loss:3.562169 +step:6275 train loss:3.595206 +step:6276 train loss:3.567880 +step:6277 train loss:3.551806 +step:6278 train loss:3.535551 +step:6279 train loss:3.590134 +step:6280 train loss:3.598123 +step:6281 train loss:3.531544 +step:6282 train loss:3.541488 +step:6283 train loss:3.631404 +step:6284 train loss:3.600235 +step:6285 train loss:3.598568 +step:6286 train loss:3.544847 +step:6287 train loss:3.574483 +step:6288 train loss:3.672749 +step:6289 train loss:3.536979 +step:6290 train loss:3.530304 +step:6291 train loss:3.564141 +step:6292 train loss:3.582623 +step:6293 train loss:3.571023 +step:6294 train loss:3.556456 +step:6295 train loss:3.579602 +step:6296 train loss:3.543469 +step:6297 train loss:3.668644 +step:6298 train loss:3.618234 +step:6299 train loss:3.507886 +step:6300 train loss:3.592093 +step:6301 train loss:3.618580 +step:6302 train loss:3.602631 +step:6303 train loss:3.570887 +step:6304 train loss:3.587174 +step:6305 train loss:3.557220 +step:6306 train loss:3.570212 +step:6307 train loss:3.578588 +step:6308 train loss:3.553954 +step:6309 train loss:3.550475 +step:6310 train loss:3.607074 +step:6311 train loss:3.559449 +step:6312 train loss:3.598887 +step:6313 train loss:3.530455 +step:6314 train loss:3.554239 +step:6315 train loss:3.610032 +step:6316 train loss:3.530392 +step:6317 train loss:3.526951 +step:6318 train loss:3.637000 +step:6319 train loss:3.566988 +step:6320 train loss:3.584004 +step:6321 train loss:3.568129 +step:6322 train loss:3.570932 +step:6323 train loss:3.506338 +step:6324 train loss:3.509451 +step:6325 train loss:3.607396 +step:6326 train loss:3.527190 +step:6327 train loss:3.601939 +step:6328 train loss:3.579901 +step:6329 train loss:3.500174 +step:6330 train loss:3.528666 +step:6331 train loss:3.546718 +step:6332 train loss:3.679354 +step:6333 train loss:3.558110 +step:6334 train loss:3.535339 +step:6335 train loss:3.506375 +step:6336 train loss:3.539236 +step:6337 train loss:3.562731 +step:6338 train loss:3.517561 +step:6339 train loss:3.563325 +step:6340 train loss:3.539744 +step:6341 train loss:3.557613 +step:6342 train loss:3.553680 +step:6343 train loss:3.651793 +step:6344 train loss:3.503314 +step:6345 train loss:3.518212 +step:6346 train loss:3.597347 +step:6347 train loss:3.471668 +step:6348 train loss:3.569125 +step:6349 train loss:3.544521 +step:6350 train loss:3.517605 +step:6351 train loss:3.517807 +step:6352 train loss:3.535413 +step:6353 train loss:3.555165 +step:6354 train loss:3.568592 +step:6355 train loss:3.573714 +step:6356 train loss:3.588413 +step:6357 train loss:3.442561 +step:6358 train loss:3.533545 +step:6359 train loss:3.589154 +step:6360 train loss:3.501966 +step:6361 train loss:3.503665 +step:6362 train loss:3.541877 +step:6363 train loss:3.524154 +step:6364 train loss:3.509058 +step:6365 train loss:3.577931 +step:6366 train loss:3.590970 +step:6367 train loss:3.519927 +step:6368 train loss:3.562448 +step:6369 train loss:3.528327 +step:6370 train loss:3.581060 +step:6371 train loss:3.498394 +step:6372 train loss:3.527613 +step:6373 train loss:3.553995 +step:6374 train loss:3.583528 +step:6375 train loss:3.544512 +step:6376 train loss:3.568324 +step:6377 train loss:3.565661 +step:6378 train loss:3.514528 +step:6379 train loss:3.557617 +step:6380 train loss:3.599748 +step:6381 train loss:3.563913 +step:6382 train loss:3.518874 +step:6383 train loss:3.583925 +step:6384 train loss:3.554921 +step:6385 train loss:3.535670 +step:6386 train loss:3.569082 +step:6387 train loss:3.548066 +step:6388 train loss:3.590784 +step:6389 train loss:3.595623 +step:6390 train loss:3.547881 +step:6391 train loss:3.534215 +step:6392 train loss:3.519500 +step:6393 train loss:3.574305 +step:6394 train loss:3.561885 +step:6395 train loss:3.738909 +step:6396 train loss:3.563016 +step:6397 train loss:3.506835 +step:6398 train loss:3.579417 +step:6399 train loss:3.519317 +step:6400 train loss:3.593047 +step:6401 train loss:3.627836 +step:6402 train loss:3.560422 +step:6403 train loss:3.552638 +step:6404 train loss:3.530349 +step:6405 train loss:3.557742 +step:6406 train loss:3.563353 +step:6407 train loss:3.623506 +step:6408 train loss:3.511121 +step:6409 train loss:3.500805 +step:6410 train loss:3.629217 +step:6411 train loss:3.559831 +step:6412 train loss:3.565191 +step:6413 train loss:3.569601 +step:6414 train loss:3.517969 +step:6415 train loss:3.578794 +step:6416 train loss:3.547394 +step:6417 train loss:3.517032 +step:6418 train loss:3.511306 +step:6419 train loss:3.593878 +step:6420 train loss:3.523717 +step:6421 train loss:3.546252 +step:6422 train loss:3.535693 +step:6423 train loss:3.544594 +step:6424 train loss:3.568877 +step:6425 train loss:3.563110 +step:6426 train loss:3.605376 +step:6427 train loss:3.567835 +step:6428 train loss:3.607087 +step:6429 train loss:3.569069 +step:6430 train loss:3.545884 +step:6431 train loss:3.519639 +step:6432 train loss:3.553402 +step:6433 train loss:3.567639 +step:6434 train loss:3.451246 +step:6435 train loss:3.628711 +step:6436 train loss:3.562137 +step:6437 train loss:3.525741 +step:6438 train loss:3.555705 +step:6439 train loss:3.528131 +step:6440 train loss:3.543383 +step:6441 train loss:3.537681 +step:6442 train loss:3.477933 +step:6443 train loss:3.533733 +step:6444 train loss:3.674369 +step:6445 train loss:3.577042 +step:6446 train loss:3.580860 +step:6447 train loss:3.562407 +step:6448 train loss:3.509360 +step:6449 train loss:3.536628 +step:6450 train loss:3.517778 +step:6451 train loss:3.507864 +step:6452 train loss:3.507723 +step:6453 train loss:3.553208 +step:6454 train loss:3.578032 +step:6455 train loss:3.564975 +step:6456 train loss:3.582022 +step:6457 train loss:3.561249 +step:6458 train loss:3.535329 +step:6459 train loss:3.514121 +step:6460 train loss:3.525514 +step:6461 train loss:3.522624 +step:6462 train loss:3.520008 +step:6463 train loss:3.616353 +step:6464 train loss:3.521708 +step:6465 train loss:3.563170 +step:6466 train loss:3.577706 +step:6467 train loss:3.505674 +step:6468 train loss:3.582440 +step:6469 train loss:3.493835 +step:6470 train loss:3.614727 +step:6471 train loss:3.521816 +step:6472 train loss:3.678738 +step:6473 train loss:3.562037 +step:6474 train loss:3.594577 +step:6475 train loss:3.537245 +step:6476 train loss:3.611600 +step:6477 train loss:3.539361 +step:6478 train loss:3.673015 +step:6479 train loss:3.585561 +step:6480 train loss:3.521604 +step:6481 train loss:3.577695 +step:6482 train loss:3.518580 +step:6483 train loss:3.581450 +step:6484 train loss:3.536228 +step:6485 train loss:3.597690 +step:6486 train loss:3.529871 +step:6487 train loss:3.530559 +step:6488 train loss:3.524851 +step:6489 train loss:3.527963 +step:6490 train loss:3.549289 +step:6491 train loss:3.519972 +step:6492 train loss:3.625442 +step:6493 train loss:3.528955 +step:6494 train loss:3.532442 +step:6495 train loss:3.530657 +step:6496 train loss:3.563081 +step:6497 train loss:3.580608 +step:6498 train loss:3.688805 +step:6499 train loss:3.659470 +step:6500 validation loss:3.491532 total_sharp:3.8802e-03 L1_sharp:5.7986e-02 L2_sharp:1.3708e-02 L3_sharp:2.7455e-02 L4_sharp:1.6045e-02 L5_sharp:1.8483e-02 L6_sharp:2.4453e-02 L7_sharp:2.7649e-02 L8_sharp:3.1530e-02 L9_sharp:2.2200e-02 L10_sharp:1.5417e-02 L11_sharp:1.2944e-02 L12_sharp:1.9619e-02 total_fnorm:1.3570e+00 total_l1_linf:8.1442e+03 total_spectral:1.3570e+00 L1_fnorm:6.1127e-02 L2_fnorm:5.9114e-02 L3_fnorm:5.8531e-02 L4_fnorm:5.9926e-02 L5_fnorm:6.0858e-02 L6_fnorm:6.1302e-02 L7_fnorm:6.1244e-02 L8_fnorm:6.1199e-02 L9_fnorm:6.1302e-02 L10_fnorm:6.1365e-02 L11_fnorm:6.1288e-02 L12_fnorm:6.1251e-02 L1_l1linf:3.0718e-01 L2_l1linf:3.4928e-01 L3_l1linf:3.3984e-01 L4_l1linf:3.4593e-01 L5_l1linf:3.3873e-01 L6_l1linf:3.2132e-01 L7_l1linf:3.0036e-01 L8_l1linf:3.0774e-01 L9_l1linf:3.2823e-01 L10_l1linf:3.5518e-01 L11_l1linf:3.6280e-01 L12_l1linf:3.4872e-01 L1_spectral:6.8986e-03 L2_spectral:7.9237e-03 L3_spectral:7.7462e-03 L4_spectral:7.8377e-03 L5_spectral:7.5450e-03 L6_spectral:7.2345e-03 L7_spectral:6.8282e-03 L8_spectral:6.8953e-03 L9_spectral:7.4313e-03 L10_spectral:8.0079e-03 L11_spectral:8.0914e-03 L12_spectral:7.9685e-03 ip_v_neg_g:2.7049e-03 cos_v_neg_g:6.4524e-04 v_norm:1.3570e+00 g_norm:3.0891e+00 hv_norm:6.5448e-01 cos_v_hv:8.0455e-03 hg_norm:1.2091e+02 cos_g_hg:4.5701e-01 v_par:1.9876e-05 v_perp:1.3570e+00 L1_cos_v_neg_g:5.8989e-03 L1_v_norm:6.1127e-02 L2_cos_v_neg_g:3.1485e-03 L2_v_norm:5.9114e-02 L3_cos_v_neg_g:5.4079e-03 L3_v_norm:5.8531e-02 L4_cos_v_neg_g:4.6575e-03 L4_v_norm:5.9926e-02 L5_cos_v_neg_g:3.2392e-03 L5_v_norm:6.0858e-02 L6_cos_v_neg_g:4.0907e-03 L6_v_norm:6.1302e-02 L7_cos_v_neg_g:3.9301e-03 L7_v_norm:6.1244e-02 L8_cos_v_neg_g:4.8136e-03 L8_v_norm:6.1199e-02 L9_cos_v_neg_g:4.5204e-03 L9_v_norm:6.1302e-02 L10_cos_v_neg_g:4.0010e-03 L10_v_norm:6.1365e-02 L11_cos_v_neg_g:2.5655e-03 L11_v_norm:6.1288e-02 L12_cos_v_neg_g:1.6848e-03 L12_v_norm:6.1251e-02 +step:6500 train loss:3.506316 +step:6501 train loss:3.523404 +step:6502 train loss:3.544121 +step:6503 train loss:3.603322 +step:6504 train loss:3.550042 +step:6505 train loss:3.559319 +step:6506 train loss:3.518555 +step:6507 train loss:3.587090 +step:6508 train loss:3.553667 +step:6509 train loss:3.537267 +step:6510 train loss:3.543144 +step:6511 train loss:3.562436 +step:6512 train loss:3.501430 +step:6513 train loss:3.571396 +step:6514 train loss:3.442923 +step:6515 train loss:3.536540 +step:6516 train loss:3.585508 +step:6517 train loss:3.497627 +step:6518 train loss:3.539320 +step:6519 train loss:3.528620 +step:6520 train loss:3.617777 +step:6521 train loss:3.593396 +step:6522 train loss:3.604611 +step:6523 train loss:3.499008 +step:6524 train loss:3.583258 +step:6525 train loss:3.568008 +step:6526 train loss:3.510036 +step:6527 train loss:3.561237 +step:6528 train loss:3.578491 +step:6529 train loss:3.607886 +step:6530 train loss:3.513640 +step:6531 train loss:3.595405 +step:6532 train loss:3.519325 +step:6533 train loss:3.558354 +step:6534 train loss:3.564137 +step:6535 train loss:3.542790 +step:6536 train loss:3.674138 +step:6537 train loss:3.480549 +step:6538 train loss:3.593079 +step:6539 train loss:3.515917 +step:6540 train loss:3.628007 +step:6541 train loss:3.608115 +step:6542 train loss:3.564168 +step:6543 train loss:3.519523 +step:6544 train loss:3.501669 +step:6545 train loss:3.492067 +step:6546 train loss:3.554523 +step:6547 train loss:3.608793 +step:6548 train loss:3.550866 +step:6549 train loss:3.565228 +step:6550 train loss:3.675347 +step:6551 train loss:3.556817 +step:6552 train loss:3.549125 +step:6553 train loss:3.588939 +step:6554 train loss:3.480119 +step:6555 train loss:3.564448 +step:6556 train loss:3.436191 +step:6557 train loss:3.782311 +step:6558 train loss:3.613917 +step:6559 train loss:3.524812 +step:6560 train loss:3.565649 +step:6561 train loss:3.536743 +step:6562 train loss:3.559117 +step:6563 train loss:3.450232 +step:6564 train loss:3.554881 +step:6565 train loss:3.460216 +step:6566 train loss:3.573300 +step:6567 train loss:3.544491 +step:6568 train loss:3.589268 +step:6569 train loss:3.537499 +step:6570 train loss:3.574694 +step:6571 train loss:3.503683 +step:6572 train loss:3.580341 +step:6573 train loss:3.590824 +step:6574 train loss:3.577999 +step:6575 train loss:3.523651 +step:6576 train loss:3.513312 +step:6577 train loss:3.581712 +step:6578 train loss:3.452271 +step:6579 train loss:3.553895 +step:6580 train loss:3.510489 +step:6581 train loss:3.521971 +step:6582 train loss:3.500334 +step:6583 train loss:3.600965 +step:6584 train loss:3.529367 +step:6585 train loss:3.567342 +step:6586 train loss:3.574319 +step:6587 train loss:3.583818 +step:6588 train loss:3.549835 +step:6589 train loss:3.581014 +step:6590 train loss:3.517902 +step:6591 train loss:3.571761 +step:6592 train loss:3.507545 +step:6593 train loss:3.520415 +step:6594 train loss:3.545165 +step:6595 train loss:3.530658 +step:6596 train loss:3.526612 +step:6597 train loss:3.551208 +step:6598 train loss:3.590836 +step:6599 train loss:3.484080 +step:6600 train loss:3.539668 +step:6601 train loss:3.597113 +step:6602 train loss:3.521091 +step:6603 train loss:3.550145 +step:6604 train loss:3.561410 +step:6605 train loss:3.540280 +step:6606 train loss:3.601241 +step:6607 train loss:3.521937 +step:6608 train loss:3.536443 +step:6609 train loss:3.504785 +step:6610 train loss:3.616546 +step:6611 train loss:3.539429 +step:6612 train loss:3.581084 +step:6613 train loss:3.500255 +step:6614 train loss:3.525928 +step:6615 train loss:3.526047 +step:6616 train loss:3.507526 +step:6617 train loss:3.549244 +step:6618 train loss:3.536322 +step:6619 train loss:3.505681 +step:6620 train loss:3.613710 +step:6621 train loss:3.489689 +step:6622 train loss:3.564032 +step:6623 train loss:3.494241 +step:6624 train loss:3.567748 +step:6625 train loss:3.607106 +step:6626 train loss:3.571163 +step:6627 train loss:3.521986 +step:6628 train loss:3.580462 +step:6629 train loss:3.482967 +step:6630 train loss:3.517428 +step:6631 train loss:3.556547 +step:6632 train loss:3.593055 +step:6633 train loss:3.544044 +step:6634 train loss:3.607760 +step:6635 train loss:3.508203 +step:6636 train loss:3.549066 +step:6637 train loss:3.515067 +step:6638 train loss:3.517014 +step:6639 train loss:3.528161 +step:6640 train loss:3.517564 +step:6641 train loss:3.528002 +step:6642 train loss:3.531224 +step:6643 train loss:3.608729 +step:6644 train loss:3.613673 +step:6645 train loss:3.487172 +step:6646 train loss:3.579531 +step:6647 train loss:3.535639 +step:6648 train loss:3.639055 +step:6649 train loss:3.564850 +step:6650 train loss:3.514606 +step:6651 train loss:3.563188 +step:6652 train loss:3.577768 +step:6653 train loss:3.519888 +step:6654 train loss:3.516551 +step:6655 train loss:3.556459 +step:6656 train loss:3.528601 +step:6657 train loss:3.553276 +step:6658 train loss:3.537031 +step:6659 train loss:3.689394 +step:6660 train loss:3.588450 +step:6661 train loss:3.512286 +step:6662 train loss:3.544863 +step:6663 train loss:3.479475 +step:6664 train loss:3.558432 +step:6665 train loss:3.568368 +step:6666 train loss:3.582421 +step:6667 train loss:3.496801 +step:6668 train loss:3.626070 +step:6669 train loss:3.508462 +step:6670 train loss:3.517584 +step:6671 train loss:3.598145 +step:6672 train loss:3.550678 +step:6673 train loss:3.559846 +step:6674 train loss:3.533432 +step:6675 train loss:3.550065 +step:6676 train loss:3.564312 +step:6677 train loss:3.517284 +step:6678 train loss:3.588450 +step:6679 train loss:3.623562 +step:6680 train loss:3.622935 +step:6681 train loss:3.576373 +step:6682 train loss:3.520380 +step:6683 train loss:3.542721 +step:6684 train loss:3.556149 +step:6685 train loss:3.568276 +step:6686 train loss:3.503989 +step:6687 train loss:3.519485 +step:6688 train loss:3.564264 +step:6689 train loss:3.573220 +step:6690 train loss:3.549967 +step:6691 train loss:3.580281 +step:6692 train loss:3.586641 +step:6693 train loss:3.619122 +step:6694 train loss:3.575925 +step:6695 train loss:3.547085 +step:6696 train loss:3.485981 +step:6697 train loss:3.696858 +step:6698 train loss:3.547285 +step:6699 train loss:3.542268 +step:6700 train loss:3.554697 +step:6701 train loss:3.612862 +step:6702 train loss:3.503266 +step:6703 train loss:3.550683 +step:6704 train loss:3.534081 +step:6705 train loss:3.544673 +step:6706 train loss:3.521866 +step:6707 train loss:3.596664 +step:6708 train loss:3.550655 +step:6709 train loss:3.579842 +step:6710 train loss:3.569229 +step:6711 train loss:3.520739 +step:6712 train loss:3.509409 +step:6713 train loss:3.535126 +step:6714 train loss:3.577478 +step:6715 train loss:3.521631 +step:6716 train loss:3.598925 +step:6717 train loss:3.540502 +step:6718 train loss:3.564453 +step:6719 train loss:3.598974 +step:6720 train loss:3.527410 +step:6721 train loss:3.544487 +step:6722 train loss:3.521730 +step:6723 train loss:3.649429 +step:6724 train loss:3.505830 +step:6725 train loss:3.569991 +step:6726 train loss:3.523064 +step:6727 train loss:3.588406 +step:6728 train loss:3.682235 +step:6729 train loss:3.544418 +step:6730 train loss:3.540677 +step:6731 train loss:3.582269 +step:6732 train loss:3.456448 +step:6733 train loss:3.595287 +step:6734 train loss:3.525619 +step:6735 train loss:3.547594 +step:6736 train loss:3.549311 +step:6737 train loss:3.544486 +step:6738 train loss:3.577480 +step:6739 train loss:3.534681 +step:6740 train loss:3.484607 +step:6741 train loss:3.594425 +step:6742 train loss:3.555006 +step:6743 train loss:3.559444 +step:6744 train loss:3.449569 +step:6745 train loss:3.605377 +step:6746 train loss:3.534853 +step:6747 train loss:3.529642 +step:6748 train loss:3.600424 +step:6749 train loss:3.584567 +step:6750 validation loss:3.482252 +step:6750 train loss:3.504016 +step:6751 train loss:3.539262 +step:6752 train loss:3.542914 +step:6753 train loss:3.576797 +step:6754 train loss:3.558226 +step:6755 train loss:3.567865 +step:6756 train loss:3.508106 +step:6757 train loss:3.479689 +step:6758 train loss:3.654011 +step:6759 train loss:3.544065 +step:6760 train loss:3.601164 +step:6761 train loss:3.533916 +step:6762 train loss:3.558186 +step:6763 train loss:3.455906 +step:6764 train loss:3.537808 +step:6765 train loss:3.541149 +step:6766 train loss:3.533833 +step:6767 train loss:3.487422 +step:6768 train loss:3.493102 +step:6769 train loss:3.455150 +step:6770 train loss:3.543930 +step:6771 train loss:3.544253 +step:6772 train loss:3.553500 +step:6773 train loss:3.532755 +step:6774 train loss:3.546847 +step:6775 train loss:3.591465 +step:6776 train loss:3.546217 +step:6777 train loss:3.620324 +step:6778 train loss:3.510318 +step:6779 train loss:3.560604 +step:6780 train loss:3.491175 +step:6781 train loss:3.555076 +step:6782 train loss:3.468663 +step:6783 train loss:3.503299 +step:6784 train loss:3.528902 +step:6785 train loss:3.514461 +step:6786 train loss:3.531982 +step:6787 train loss:3.605791 +step:6788 train loss:3.546853 +step:6789 train loss:3.554515 +step:6790 train loss:3.551932 +step:6791 train loss:3.563349 +step:6792 train loss:3.561334 +step:6793 train loss:3.560235 +step:6794 train loss:3.531857 +step:6795 train loss:3.531727 +step:6796 train loss:3.536278 +step:6797 train loss:3.634245 +step:6798 train loss:3.537079 +step:6799 train loss:3.527777 +step:6800 train loss:3.495468 +step:6801 train loss:3.625854 +step:6802 train loss:3.575261 +step:6803 train loss:3.565363 +step:6804 train loss:3.593966 +step:6805 train loss:3.552918 +step:6806 train loss:3.490163 +step:6807 train loss:3.545852 +step:6808 train loss:3.529992 +step:6809 train loss:3.557798 +step:6810 train loss:3.678535 +step:6811 train loss:3.581952 +step:6812 train loss:3.555062 +step:6813 train loss:3.567508 +step:6814 train loss:3.574977 +step:6815 train loss:3.621686 +step:6816 train loss:3.538855 +step:6817 train loss:3.562077 +step:6818 train loss:3.542651 +step:6819 train loss:3.523826 +step:6820 train loss:3.553948 +step:6821 train loss:3.516974 +step:6822 train loss:3.618823 +step:6823 train loss:3.600559 +step:6824 train loss:3.578230 +step:6825 train loss:3.524498 +step:6826 train loss:3.570183 +step:6827 train loss:3.554511 +step:6828 train loss:3.573061 +step:6829 train loss:3.558137 +step:6830 train loss:3.526657 +step:6831 train loss:3.486735 +step:6832 train loss:3.473192 +step:6833 train loss:3.488061 +step:6834 train loss:3.576583 +step:6835 train loss:3.548662 +step:6836 train loss:3.466890 +step:6837 train loss:3.533416 +step:6838 train loss:3.590391 +step:6839 train loss:3.676329 +step:6840 train loss:3.549488 +step:6841 train loss:3.506082 +step:6842 train loss:3.553151 +step:6843 train loss:3.659050 +step:6844 train loss:3.538243 +step:6845 train loss:3.591440 +step:6846 train loss:3.655647 +step:6847 train loss:3.586779 +step:6848 train loss:3.575055 +step:6849 train loss:3.598486 +step:6850 train loss:3.571069 +step:6851 train loss:3.499316 +step:6852 train loss:3.490735 +step:6853 train loss:3.481318 +step:6854 train loss:3.561063 +step:6855 train loss:3.531824 +step:6856 train loss:3.517607 +step:6857 train loss:3.567429 +step:6858 train loss:3.599939 +step:6859 train loss:3.504909 +step:6860 train loss:3.616203 +step:6861 train loss:3.643382 +step:6862 train loss:3.551570 +step:6863 train loss:3.547144 +step:6864 train loss:3.494192 +step:6865 train loss:3.566708 +step:6866 train loss:3.492589 +step:6867 train loss:3.672025 +step:6868 train loss:3.544915 +step:6869 train loss:3.577039 +step:6870 train loss:3.615128 +step:6871 train loss:3.530217 +step:6872 train loss:3.524865 +step:6873 train loss:3.544186 +step:6874 train loss:3.506623 +step:6875 train loss:3.510092 +step:6876 train loss:3.541407 +step:6877 train loss:3.582541 +step:6878 train loss:3.492808 +step:6879 train loss:3.540187 +step:6880 train loss:3.549027 +step:6881 train loss:3.510247 +step:6882 train loss:3.576521 +step:6883 train loss:3.564405 +step:6884 train loss:3.791448 +step:6885 train loss:3.560151 +step:6886 train loss:3.542565 +step:6887 train loss:3.480221 +step:6888 train loss:3.583644 +step:6889 train loss:3.465683 +step:6890 train loss:3.574864 +step:6891 train loss:3.583709 +step:6892 train loss:3.682432 +step:6893 train loss:3.515796 +step:6894 train loss:3.575525 +step:6895 train loss:3.578243 +step:6896 train loss:3.552305 +step:6897 train loss:3.505161 +step:6898 train loss:3.505463 +step:6899 train loss:3.594936 +step:6900 train loss:3.566486 +step:6901 train loss:3.518665 +step:6902 train loss:3.449732 +step:6903 train loss:3.495490 +step:6904 train loss:3.605997 +step:6905 train loss:3.640782 +step:6906 train loss:3.558666 +step:6907 train loss:3.575662 +step:6908 train loss:3.614220 +step:6909 train loss:3.606148 +step:6910 train loss:3.481090 +step:6911 train loss:3.611965 +step:6912 train loss:3.505338 +step:6913 train loss:3.540742 +step:6914 train loss:3.497097 +step:6915 train loss:3.526273 +step:6916 train loss:3.500775 +step:6917 train loss:3.624866 +step:6918 train loss:3.572013 +step:6919 train loss:3.564839 +step:6920 train loss:3.549868 +step:6921 train loss:3.616803 +step:6922 train loss:3.606395 +step:6923 train loss:3.472247 +step:6924 train loss:3.552263 +step:6925 train loss:3.526971 +step:6926 train loss:3.564464 +step:6927 train loss:3.618255 +step:6928 train loss:3.503105 +step:6929 train loss:3.516093 +step:6930 train loss:3.550331 +step:6931 train loss:3.550586 +step:6932 train loss:3.783595 +step:6933 train loss:3.614993 +step:6934 train loss:3.556041 +step:6935 train loss:3.539237 +step:6936 train loss:3.578805 +step:6937 train loss:3.523971 +step:6938 train loss:3.588880 +step:6939 train loss:3.520707 +step:6940 train loss:3.577039 +step:6941 train loss:3.491980 +step:6942 train loss:3.580923 +step:6943 train loss:3.471541 +step:6944 train loss:3.566356 +step:6945 train loss:3.502443 +step:6946 train loss:3.596142 +step:6947 train loss:3.518890 +step:6948 train loss:3.513860 +step:6949 train loss:3.590200 +step:6950 train loss:3.581318 +step:6951 train loss:3.584904 +step:6952 train loss:3.516110 +step:6953 train loss:3.559138 +step:6954 train loss:3.621511 +step:6955 train loss:3.536468 +step:6956 train loss:3.573428 +step:6957 train loss:3.562682 +step:6958 train loss:3.524095 +step:6959 train loss:3.561614 +step:6960 train loss:3.530743 +step:6961 train loss:3.535255 +step:6962 train loss:3.519363 +step:6963 train loss:3.490148 +step:6964 train loss:3.532469 +step:6965 train loss:3.523957 +step:6966 train loss:3.567683 +step:6967 train loss:3.503582 +step:6968 train loss:3.543896 +step:6969 train loss:3.563109 +step:6970 train loss:3.536796 +step:6971 train loss:3.602260 +step:6972 train loss:3.550117 +step:6973 train loss:3.508615 +step:6974 train loss:3.636414 +step:6975 train loss:3.541412 +step:6976 train loss:3.514542 +step:6977 train loss:3.550983 +step:6978 train loss:3.543631 +step:6979 train loss:3.554043 +step:6980 train loss:3.530858 +step:6981 train loss:3.589986 +step:6982 train loss:3.543290 +step:6983 train loss:3.534470 +step:6984 train loss:3.652506 +step:6985 train loss:3.497473 +step:6986 train loss:3.488446 +step:6987 train loss:3.538695 +step:6988 train loss:3.542545 +step:6989 train loss:3.688142 +step:6990 train loss:3.551620 +step:6991 train loss:3.507269 +step:6992 train loss:3.554799 +step:6993 train loss:3.622786 +step:6994 train loss:3.570550 +step:6995 train loss:3.521598 +step:6996 train loss:3.519750 +step:6997 train loss:3.603771 +step:6998 train loss:3.500690 +step:6999 train loss:3.551920 +step:7000 validation loss:3.475118 total_sharp:4.9420e-03 L1_sharp:1.4453e-01 L2_sharp:2.7286e-02 L3_sharp:4.6575e-02 L4_sharp:1.8276e-02 L5_sharp:2.3491e-02 L6_sharp:2.9832e-02 L7_sharp:3.7819e-02 L8_sharp:3.7164e-02 L9_sharp:2.3988e-02 L10_sharp:1.6726e-02 L11_sharp:1.3564e-02 L12_sharp:2.5119e-02 total_fnorm:1.3534e+00 total_l1_linf:8.1222e+03 total_spectral:1.3534e+00 L1_fnorm:6.0603e-02 L2_fnorm:5.8949e-02 L3_fnorm:5.8464e-02 L4_fnorm:6.0129e-02 L5_fnorm:6.1063e-02 L6_fnorm:6.1510e-02 L7_fnorm:6.1540e-02 L8_fnorm:6.1392e-02 L9_fnorm:6.1400e-02 L10_fnorm:6.1458e-02 L11_fnorm:6.1488e-02 L12_fnorm:6.1376e-02 L1_l1linf:3.2720e-01 L2_l1linf:3.8331e-01 L3_l1linf:3.9455e-01 L4_l1linf:3.8074e-01 L5_l1linf:3.6599e-01 L6_l1linf:3.6285e-01 L7_l1linf:3.7191e-01 L8_l1linf:3.5244e-01 L9_l1linf:3.5395e-01 L10_l1linf:3.7333e-01 L11_l1linf:3.9537e-01 L12_l1linf:3.8482e-01 L1_spectral:7.3626e-03 L2_spectral:8.5895e-03 L3_spectral:8.8346e-03 L4_spectral:8.5155e-03 L5_spectral:8.2170e-03 L6_spectral:8.0791e-03 L7_spectral:8.2567e-03 L8_spectral:7.8008e-03 L9_spectral:7.9062e-03 L10_spectral:8.4250e-03 L11_spectral:8.8397e-03 L12_spectral:8.6392e-03 ip_v_neg_g:4.0616e-03 cos_v_neg_g:9.4445e-04 v_norm:1.3534e+00 g_norm:3.1776e+00 hv_norm:7.7503e-01 cos_v_hv:8.6297e-03 hg_norm:1.3357e+02 cos_g_hg:4.4769e-01 v_par:2.8734e-05 v_perp:1.3534e+00 L1_cos_v_neg_g:8.4438e-03 L1_v_norm:6.0603e-02 L2_cos_v_neg_g:6.1863e-03 L2_v_norm:5.8949e-02 L3_cos_v_neg_g:4.9539e-03 L3_v_norm:5.8464e-02 L4_cos_v_neg_g:4.3336e-03 L4_v_norm:6.0129e-02 L5_cos_v_neg_g:5.5292e-03 L5_v_norm:6.1063e-02 L6_cos_v_neg_g:5.6623e-03 L6_v_norm:6.1510e-02 L7_cos_v_neg_g:5.6462e-03 L7_v_norm:6.1540e-02 L8_cos_v_neg_g:7.2704e-03 L8_v_norm:6.1392e-02 L9_cos_v_neg_g:6.9635e-03 L9_v_norm:6.1400e-02 L10_cos_v_neg_g:5.6797e-03 L10_v_norm:6.1458e-02 L11_cos_v_neg_g:3.6461e-03 L11_v_norm:6.1488e-02 L12_cos_v_neg_g:2.8316e-03 L12_v_norm:6.1376e-02 +step:7000 train loss:3.627381 +step:7001 train loss:3.533241 +step:7002 train loss:3.520707 +step:7003 train loss:3.547010 +step:7004 train loss:3.541828 +step:7005 train loss:3.526488 +step:7006 train loss:3.531428 +step:7007 train loss:3.581659 +step:7008 train loss:3.524522 +step:7009 train loss:3.563619 +step:7010 train loss:3.501697 +step:7011 train loss:3.554977 +step:7012 train loss:3.528435 +step:7013 train loss:3.602246 +step:7014 train loss:3.511280 +step:7015 train loss:3.569210 +step:7016 train loss:3.558600 +step:7017 train loss:3.524127 +step:7018 train loss:3.602639 +step:7019 train loss:3.530154 +step:7020 train loss:3.576022 +step:7021 train loss:3.521367 +step:7022 train loss:3.538487 +step:7023 train loss:3.553086 +step:7024 train loss:3.515797 +step:7025 train loss:3.565937 +step:7026 train loss:3.522633 +step:7027 train loss:3.585344 +step:7028 train loss:3.509643 +step:7029 train loss:3.498363 +step:7030 train loss:3.501636 +step:7031 train loss:3.554526 +step:7032 train loss:3.562411 +step:7033 train loss:3.537721 +step:7034 train loss:3.560814 +step:7035 train loss:3.609534 +step:7036 train loss:3.528706 +step:7037 train loss:3.555957 +step:7038 train loss:3.516898 +step:7039 train loss:3.571445 +step:7040 train loss:3.489889 +step:7041 train loss:3.582722 +step:7042 train loss:3.516199 +step:7043 train loss:3.488126 +step:7044 train loss:3.536226 +step:7045 train loss:3.534831 +step:7046 train loss:3.526535 +step:7047 train loss:3.565592 +step:7048 train loss:3.513081 +step:7049 train loss:3.523587 +step:7050 train loss:3.544305 +step:7051 train loss:3.563294 +step:7052 train loss:3.564651 +step:7053 train loss:3.527323 +step:7054 train loss:3.505895 +step:7055 train loss:3.572546 +step:7056 train loss:3.573614 +step:7057 train loss:3.499007 +step:7058 train loss:3.616807 +step:7059 train loss:3.524210 +step:7060 train loss:3.532756 +step:7061 train loss:3.507500 +step:7062 train loss:3.531584 +step:7063 train loss:3.590013 +step:7064 train loss:3.511960 +step:7065 train loss:3.564808 +step:7066 train loss:3.523427 +step:7067 train loss:3.558822 +step:7068 train loss:3.534815 +step:7069 train loss:3.497156 +step:7070 train loss:3.523352 +step:7071 train loss:3.493802 +step:7072 train loss:3.495494 +step:7073 train loss:3.489631 +step:7074 train loss:3.484723 +step:7075 train loss:3.505270 +step:7076 train loss:3.514582 +step:7077 train loss:3.525446 +step:7078 train loss:3.570437 +step:7079 train loss:3.581888 +step:7080 train loss:3.525963 +step:7081 train loss:3.547331 +step:7082 train loss:3.512799 +step:7083 train loss:3.546293 +step:7084 train loss:3.538644 +step:7085 train loss:3.498061 +step:7086 train loss:3.538072 +step:7087 train loss:3.515998 +step:7088 train loss:3.637776 +step:7089 train loss:3.531033 +step:7090 train loss:3.495872 +step:7091 train loss:3.508558 +step:7092 train loss:3.486067 +step:7093 train loss:3.583148 +step:7094 train loss:3.503260 +step:7095 train loss:3.516098 +step:7096 train loss:3.535909 +step:7097 train loss:3.524596 +step:7098 train loss:3.548651 +step:7099 train loss:3.503583 +step:7100 train loss:3.536635 +step:7101 train loss:3.605390 +step:7102 train loss:3.496418 +step:7103 train loss:3.521276 +step:7104 train loss:3.552638 +step:7105 train loss:3.528844 +step:7106 train loss:3.517206 +step:7107 train loss:3.551177 +step:7108 train loss:3.620231 +step:7109 train loss:3.548922 +step:7110 train loss:3.576417 +step:7111 train loss:3.553612 +step:7112 train loss:3.542969 +step:7113 train loss:3.541516 +step:7114 train loss:3.557505 +step:7115 train loss:3.599035 +step:7116 train loss:3.526422 +step:7117 train loss:3.563018 +step:7118 train loss:3.577163 +step:7119 train loss:3.537178 +step:7120 train loss:3.593474 +step:7121 train loss:3.509727 +step:7122 train loss:3.511926 +step:7123 train loss:3.451846 +step:7124 train loss:3.608215 +step:7125 train loss:3.462182 +step:7126 train loss:3.627051 +step:7127 train loss:3.585058 +step:7128 train loss:3.532262 +step:7129 train loss:3.537248 +step:7130 train loss:3.526937 +step:7131 train loss:3.466491 +step:7132 train loss:3.509456 +step:7133 train loss:3.555023 +step:7134 train loss:3.490159 +step:7135 train loss:3.542881 +step:7136 train loss:3.525530 +step:7137 train loss:3.503477 +step:7138 train loss:3.491062 +step:7139 train loss:3.496800 +step:7140 train loss:3.529922 +step:7141 train loss:3.526231 +step:7142 train loss:3.524078 +step:7143 train loss:3.561282 +step:7144 train loss:3.509716 +step:7145 train loss:3.528069 +step:7146 train loss:3.537294 +step:7147 train loss:3.560337 +step:7148 train loss:3.563523 +step:7149 train loss:3.568908 +step:7150 train loss:3.542290 +step:7151 train loss:3.508019 +step:7152 train loss:3.478984 +step:7153 train loss:3.517445 +step:7154 train loss:3.533710 +step:7155 train loss:3.549174 +step:7156 train loss:3.518417 +step:7157 train loss:3.539141 +step:7158 train loss:3.497777 +step:7159 train loss:3.548304 +step:7160 train loss:3.556938 +step:7161 train loss:3.508717 +step:7162 train loss:3.556645 +step:7163 train loss:3.494640 +step:7164 train loss:3.528396 +step:7165 train loss:3.534389 +step:7166 train loss:3.590205 +step:7167 train loss:3.569026 +step:7168 train loss:3.544908 +step:7169 train loss:3.522141 +step:7170 train loss:3.552648 +step:7171 train loss:3.499236 +step:7172 train loss:3.667037 +step:7173 train loss:3.507431 +step:7174 train loss:3.550966 +step:7175 train loss:3.525493 +step:7176 train loss:3.533074 +step:7177 train loss:3.549176 +step:7178 train loss:3.549132 +step:7179 train loss:3.535567 +step:7180 train loss:3.537491 +step:7181 train loss:3.566004 +step:7182 train loss:3.515290 +step:7183 train loss:3.591650 +step:7184 train loss:3.679882 +step:7185 train loss:3.594221 +step:7186 train loss:3.532378 +step:7187 train loss:3.542750 +step:7188 train loss:3.530991 +step:7189 train loss:3.529614 +step:7190 train loss:3.532731 +step:7191 train loss:3.524128 +step:7192 train loss:3.558201 +step:7193 train loss:3.475913 +step:7194 train loss:3.536492 +step:7195 train loss:3.516487 +step:7196 train loss:3.562824 +step:7197 train loss:3.540064 +step:7198 train loss:3.596278 +step:7199 train loss:3.555203 +step:7200 train loss:3.547170 +step:7201 train loss:3.554418 +step:7202 train loss:3.533566 +step:7203 train loss:3.547162 +step:7204 train loss:3.517300 +step:7205 train loss:3.475937 +step:7206 train loss:3.503841 +step:7207 train loss:3.681215 +step:7208 train loss:3.511030 +step:7209 train loss:3.595469 +step:7210 train loss:3.532118 +step:7211 train loss:3.561625 +step:7212 train loss:3.645646 +step:7213 train loss:3.491725 +step:7214 train loss:3.562961 +step:7215 train loss:3.530849 +step:7216 train loss:3.580527 +step:7217 train loss:3.539668 +step:7218 train loss:3.625063 +step:7219 train loss:3.536136 +step:7220 train loss:3.613387 +step:7221 train loss:3.494937 +step:7222 train loss:3.577115 +step:7223 train loss:3.495732 +step:7224 train loss:3.556516 +step:7225 train loss:3.536147 +step:7226 train loss:3.503829 +step:7227 train loss:3.523569 +step:7228 train loss:3.510625 +step:7229 train loss:3.515471 +step:7230 train loss:3.499950 +step:7231 train loss:3.632349 +step:7232 train loss:3.502531 +step:7233 train loss:3.574421 +step:7234 train loss:3.560228 +step:7235 train loss:3.530955 +step:7236 train loss:3.572752 +step:7237 train loss:3.522347 +step:7238 train loss:3.561778 +step:7239 train loss:3.514864 +step:7240 train loss:3.513529 +step:7241 train loss:3.525936 +step:7242 train loss:3.507568 +step:7243 train loss:3.550692 +step:7244 train loss:3.529030 +step:7245 train loss:3.531488 +step:7246 train loss:3.573360 +step:7247 train loss:3.526191 +step:7248 train loss:3.565614 +step:7249 train loss:3.515387 +step:7250 validation loss:3.471061 +step:7250 train loss:3.537737 +step:7251 train loss:3.581418 +step:7252 train loss:3.495317 +step:7253 train loss:3.585864 +step:7254 train loss:3.522219 +step:7255 train loss:3.494940 +step:7256 train loss:3.535889 +step:7257 train loss:3.575595 +step:7258 train loss:3.534385 +step:7259 train loss:3.516575 +step:7260 train loss:3.601309 +step:7261 train loss:3.557392 +step:7262 train loss:3.514264 +step:7263 train loss:3.557013 +step:7264 train loss:3.540973 +step:7265 train loss:3.447323 +step:7266 train loss:3.568910 +step:7267 train loss:3.489948 +step:7268 train loss:3.554128 +step:7269 train loss:3.558666 +step:7270 train loss:3.511627 +step:7271 train loss:3.529426 +step:7272 train loss:3.534166 +step:7273 train loss:3.530394 +step:7274 train loss:3.509635 +step:7275 train loss:3.580672 +step:7276 train loss:3.487626 +step:7277 train loss:3.535829 +step:7278 train loss:3.505986 +step:7279 train loss:3.487197 +step:7280 train loss:3.558015 +step:7281 train loss:3.579591 +step:7282 train loss:3.575982 +step:7283 train loss:3.469350 +step:7284 train loss:3.509481 +step:7285 train loss:3.539676 +step:7286 train loss:3.670319 +step:7287 train loss:3.577586 +step:7288 train loss:3.534522 +step:7289 train loss:3.536843 +step:7290 train loss:3.587006 +step:7291 train loss:3.547957 +step:7292 train loss:3.615610 +step:7293 train loss:3.515827 +step:7294 train loss:3.599814 +step:7295 train loss:3.488952 +step:7296 train loss:3.486470 +step:7297 train loss:3.531271 +step:7298 train loss:3.507427 +step:7299 train loss:3.550070 +step:7300 train loss:3.535463 +step:7301 train loss:3.484944 +step:7302 train loss:3.628803 +step:7303 train loss:3.519525 +step:7304 train loss:3.466267 +step:7305 train loss:3.541200 +step:7306 train loss:3.569391 +step:7307 train loss:3.575362 +step:7308 train loss:3.525681 +step:7309 train loss:3.490470 +step:7310 train loss:3.520910 +step:7311 train loss:3.505270 +step:7312 train loss:3.545086 +step:7313 train loss:3.582544 +step:7314 train loss:3.478050 +step:7315 train loss:3.472148 +step:7316 train loss:3.617313 +step:7317 train loss:3.552922 +step:7318 train loss:3.490841 +step:7319 train loss:3.518524 +step:7320 train loss:3.552948 +step:7321 train loss:3.578959 +step:7322 train loss:3.461123 +step:7323 train loss:3.514874 +step:7324 train loss:3.541307 +step:7325 train loss:3.505921 +step:7326 train loss:3.533569 +step:7327 train loss:3.509313 +step:7328 train loss:3.627598 +step:7329 train loss:3.469966 +step:7330 train loss:3.527344 +step:7331 train loss:3.522804 +step:7332 train loss:3.562484 +step:7333 train loss:3.543810 +step:7334 train loss:3.511593 +step:7335 train loss:3.509163 +step:7336 train loss:3.763585 +step:7337 train loss:3.549982 +step:7338 train loss:3.544955 +step:7339 train loss:3.556930 +step:7340 train loss:3.543904 +step:7341 train loss:3.534019 +step:7342 train loss:3.526090 +step:7343 train loss:3.537845 +step:7344 train loss:3.618667 +step:7345 train loss:3.478232 +step:7346 train loss:3.513522 +step:7347 train loss:3.507403 +step:7348 train loss:3.511133 +step:7349 train loss:3.611912 +step:7350 train loss:3.594273 +step:7351 train loss:3.530175 +step:7352 train loss:3.559555 +step:7353 train loss:3.540810 +step:7354 train loss:3.490719 +step:7355 train loss:3.671518 +step:7356 train loss:3.643015 +step:7357 train loss:3.565885 +step:7358 train loss:3.547324 +step:7359 train loss:3.515386 +step:7360 train loss:3.526975 +step:7361 train loss:3.479707 +step:7362 train loss:3.527425 +step:7363 train loss:3.539249 +step:7364 train loss:3.575460 +step:7365 train loss:3.559547 +step:7366 train loss:3.523896 +step:7367 train loss:3.599976 +step:7368 train loss:3.578293 +step:7369 train loss:3.572110 +step:7370 train loss:3.536552 +step:7371 train loss:3.493638 +step:7372 train loss:3.552770 +step:7373 train loss:3.574245 +step:7374 train loss:3.667639 +step:7375 train loss:3.490363 +step:7376 train loss:3.510495 +step:7377 train loss:3.556642 +step:7378 train loss:3.508321 +step:7379 train loss:3.633073 +step:7380 train loss:3.594952 +step:7381 train loss:3.558891 +step:7382 train loss:3.526234 +step:7383 train loss:3.618153 +step:7384 train loss:3.559127 +step:7385 train loss:3.517017 +step:7386 train loss:3.521840 +step:7387 train loss:3.564035 +step:7388 train loss:3.597562 +step:7389 train loss:3.540565 +step:7390 train loss:3.480428 +step:7391 train loss:3.521000 +step:7392 train loss:3.578433 +step:7393 train loss:3.544110 +step:7394 train loss:3.582820 +step:7395 train loss:3.471585 +step:7396 train loss:3.571121 +step:7397 train loss:3.500676 +step:7398 train loss:3.513959 +step:7399 train loss:3.563615 +step:7400 train loss:3.565356 +step:7401 train loss:3.483823 +step:7402 train loss:3.601963 +step:7403 train loss:3.485667 +step:7404 train loss:3.555164 +step:7405 train loss:3.679182 +step:7406 train loss:3.504963 +step:7407 train loss:3.554013 +step:7408 train loss:3.546077 +step:7409 train loss:3.523481 +step:7410 train loss:3.693120 +step:7411 train loss:3.535447 +step:7412 train loss:3.540007 +step:7413 train loss:3.592154 +step:7414 train loss:3.502074 +step:7415 train loss:3.560581 +step:7416 train loss:3.443373 +step:7417 train loss:3.563652 +step:7418 train loss:3.545161 +step:7419 train loss:3.515876 +step:7420 train loss:3.506567 +step:7421 train loss:3.541081 +step:7422 train loss:3.498171 +step:7423 train loss:3.636031 +step:7424 train loss:3.698962 +step:7425 train loss:3.587907 +step:7426 train loss:3.554626 +step:7427 train loss:3.527084 +step:7428 train loss:3.548302 +step:7429 train loss:3.562653 +step:7430 train loss:3.488455 +step:7431 train loss:3.493250 +step:7432 train loss:3.502185 +step:7433 train loss:3.598316 +step:7434 train loss:3.513973 +step:7435 train loss:3.598066 +step:7436 train loss:3.640777 +step:7437 train loss:3.461933 +step:7438 train loss:3.523010 +step:7439 train loss:3.533331 +step:7440 train loss:3.507853 +step:7441 train loss:3.474493 +step:7442 train loss:3.702700 +step:7443 train loss:3.526676 +step:7444 train loss:3.568064 +step:7445 train loss:3.497732 +step:7446 train loss:3.518773 +step:7447 train loss:3.444796 +step:7448 train loss:3.503888 +step:7449 train loss:3.516209 +step:7450 train loss:3.549956 +step:7451 train loss:3.581765 +step:7452 train loss:3.510413 +step:7453 train loss:3.535559 +step:7454 train loss:3.519366 +step:7455 train loss:3.531114 +step:7456 train loss:3.505084 +step:7457 train loss:3.514179 +step:7458 train loss:3.551435 +step:7459 train loss:3.527764 +step:7460 train loss:3.538106 +step:7461 train loss:3.573664 +step:7462 train loss:3.509622 +step:7463 train loss:3.572973 +step:7464 train loss:3.496854 +step:7465 train loss:3.505637 +step:7466 train loss:3.507626 +step:7467 train loss:3.515832 +step:7468 train loss:3.566953 +step:7469 train loss:3.499252 +step:7470 train loss:3.530420 +step:7471 train loss:3.519829 +step:7472 train loss:3.556232 +step:7473 train loss:3.493800 +step:7474 train loss:3.479764 +step:7475 train loss:3.512210 +step:7476 train loss:3.547221 +step:7477 train loss:3.521319 +step:7478 train loss:3.518695 +step:7479 train loss:3.533124 +step:7480 train loss:3.811121 +step:7481 train loss:3.462690 +step:7482 train loss:3.532314 +step:7483 train loss:3.527589 +step:7484 train loss:3.549140 +step:7485 train loss:3.533340 +step:7486 train loss:3.560985 +step:7487 train loss:3.555120 +step:7488 train loss:3.573587 +step:7489 train loss:3.571016 +step:7490 train loss:3.514599 +step:7491 train loss:3.537422 +step:7492 train loss:3.642589 +step:7493 train loss:3.619083 +step:7494 train loss:3.641816 +step:7495 train loss:3.512849 +step:7496 train loss:3.502434 +step:7497 train loss:3.600019 +step:7498 train loss:3.533196 +step:7499 train loss:3.574120 +step:7500 validation loss:3.465194 total_sharp:4.2039e-03 L1_sharp:2.0439e-01 L2_sharp:3.5307e-02 L3_sharp:3.0294e-02 L4_sharp:1.6000e-02 L5_sharp:1.9323e-02 L6_sharp:2.3000e-02 L7_sharp:2.4231e-02 L8_sharp:2.5568e-02 L9_sharp:1.6815e-02 L10_sharp:1.2614e-02 L11_sharp:1.2138e-02 L12_sharp:2.5640e-02 total_fnorm:1.3516e+00 total_l1_linf:8.1034e+03 total_spectral:1.3516e+00 L1_fnorm:6.0891e-02 L2_fnorm:5.8930e-02 L3_fnorm:5.8761e-02 L4_fnorm:6.0144e-02 L5_fnorm:6.0987e-02 L6_fnorm:6.1322e-02 L7_fnorm:6.1247e-02 L8_fnorm:6.1171e-02 L9_fnorm:6.1300e-02 L10_fnorm:6.1401e-02 L11_fnorm:6.1449e-02 L12_fnorm:6.1387e-02 L1_l1linf:3.8342e-01 L2_l1linf:3.9773e-01 L3_l1linf:3.8096e-01 L4_l1linf:3.5853e-01 L5_l1linf:3.5054e-01 L6_l1linf:3.2696e-01 L7_l1linf:3.0494e-01 L8_l1linf:2.9316e-01 L9_l1linf:3.1686e-01 L10_l1linf:3.4947e-01 L11_l1linf:3.8311e-01 L12_l1linf:3.8518e-01 L1_spectral:8.5439e-03 L2_spectral:8.9541e-03 L3_spectral:8.5423e-03 L4_spectral:8.1054e-03 L5_spectral:7.9207e-03 L6_spectral:7.3297e-03 L7_spectral:6.8491e-03 L8_spectral:6.6644e-03 L9_spectral:7.1533e-03 L10_spectral:7.8845e-03 L11_spectral:8.6091e-03 L12_spectral:8.6677e-03 ip_v_neg_g:4.4313e-03 cos_v_neg_g:9.6505e-04 v_norm:1.3516e+00 g_norm:3.3974e+00 hv_norm:7.7527e-01 cos_v_hv:7.3289e-03 hg_norm:2.5886e+02 cos_g_hg:4.5461e-01 v_par:2.5983e-05 v_perp:1.3516e+00 L1_cos_v_neg_g:1.1640e-02 L1_v_norm:6.0891e-02 L2_cos_v_neg_g:7.6402e-03 L2_v_norm:5.8930e-02 L3_cos_v_neg_g:6.2290e-03 L3_v_norm:5.8761e-02 L4_cos_v_neg_g:4.8995e-03 L4_v_norm:6.0144e-02 L5_cos_v_neg_g:3.5892e-03 L5_v_norm:6.0987e-02 L6_cos_v_neg_g:4.1215e-03 L6_v_norm:6.1322e-02 L7_cos_v_neg_g:4.0446e-03 L7_v_norm:6.1247e-02 L8_cos_v_neg_g:4.7805e-03 L8_v_norm:6.1171e-02 L9_cos_v_neg_g:5.7560e-03 L9_v_norm:6.1300e-02 L10_cos_v_neg_g:6.3010e-03 L10_v_norm:6.1401e-02 L11_cos_v_neg_g:6.0774e-03 L11_v_norm:6.1449e-02 L12_cos_v_neg_g:4.6638e-03 L12_v_norm:6.1387e-02 +step:7500 train loss:3.517674 +step:7501 train loss:3.507362 +step:7502 train loss:3.499435 +step:7503 train loss:3.476758 +step:7504 train loss:3.501834 +step:7505 train loss:3.491011 +step:7506 train loss:3.552065 +step:7507 train loss:3.469354 +step:7508 train loss:3.538007 +step:7509 train loss:3.510643 +step:7510 train loss:3.539792 +step:7511 train loss:3.546279 +step:7512 train loss:3.808191 +step:7513 train loss:3.499155 +step:7514 train loss:3.525508 +step:7515 train loss:3.493454 +step:7516 train loss:3.505874 +step:7517 train loss:3.538853 +step:7518 train loss:3.515793 +step:7519 train loss:3.527775 +step:7520 train loss:3.594285 +step:7521 train loss:3.479060 +step:7522 train loss:3.534689 +step:7523 train loss:3.568244 +step:7524 train loss:3.514042 +step:7525 train loss:3.515970 +step:7526 train loss:3.465635 +step:7527 train loss:3.475570 +step:7528 train loss:3.574131 +step:7529 train loss:3.550416 +step:7530 train loss:3.497237 +step:7531 train loss:3.571072 +step:7532 train loss:3.561112 +step:7533 train loss:3.488255 +step:7534 train loss:3.550343 +step:7535 train loss:3.553711 +step:7536 train loss:3.586924 +step:7537 train loss:3.606272 +step:7538 train loss:3.631947 +step:7539 train loss:3.532066 +step:7540 train loss:3.519154 +step:7541 train loss:3.574139 +step:7542 train loss:3.534429 +step:7543 train loss:3.488295 +step:7544 train loss:3.535466 +step:7545 train loss:3.520710 +step:7546 train loss:3.475548 +step:7547 train loss:3.523571 +step:7548 train loss:3.536049 +step:7549 train loss:3.517213 +step:7550 train loss:3.518034 +step:7551 train loss:3.616034 +step:7552 train loss:3.530362 +step:7553 train loss:3.567542 +step:7554 train loss:3.491593 +step:7555 train loss:3.584181 +step:7556 train loss:3.485692 +step:7557 train loss:3.580944 +step:7558 train loss:3.568904 +step:7559 train loss:3.526418 +step:7560 train loss:3.620984 +step:7561 train loss:3.592347 +step:7562 train loss:3.497134 +step:7563 train loss:3.491401 +step:7564 train loss:3.545121 +step:7565 train loss:3.563075 +step:7566 train loss:3.555197 +step:7567 train loss:3.571324 +step:7568 train loss:3.515415 +step:7569 train loss:3.575056 +step:7570 train loss:3.558931 +step:7571 train loss:3.639201 +step:7572 train loss:3.488886 +step:7573 train loss:3.557319 +step:7574 train loss:3.520741 +step:7575 train loss:3.516189 +step:7576 train loss:3.521282 +step:7577 train loss:3.539523 +step:7578 train loss:3.596452 +step:7579 train loss:3.532830 +step:7580 train loss:3.519872 +step:7581 train loss:3.506466 +step:7582 train loss:3.562339 +step:7583 train loss:3.498888 +step:7584 train loss:3.482073 +step:7585 train loss:3.449043 +step:7586 train loss:3.488810 +step:7587 train loss:3.549710 +step:7588 train loss:3.678820 +step:7589 train loss:3.498254 +step:7590 train loss:3.567765 +step:7591 train loss:3.573367 +step:7592 train loss:3.528492 +step:7593 train loss:3.554330 +step:7594 train loss:3.551553 +step:7595 train loss:3.520384 +step:7596 train loss:3.572465 +step:7597 train loss:3.477041 +step:7598 train loss:3.539586 +step:7599 train loss:3.531735 +step:7600 train loss:3.490105 +step:7601 train loss:3.605978 +step:7602 train loss:3.543733 +step:7603 train loss:3.507071 +step:7604 train loss:3.648871 +step:7605 train loss:3.539324 +step:7606 train loss:3.574394 +step:7607 train loss:3.524741 +step:7608 train loss:3.535892 +step:7609 train loss:3.571159 +step:7610 train loss:3.528897 +step:7611 train loss:3.505888 +step:7612 train loss:3.450084 +step:7613 train loss:3.496985 +step:7614 train loss:3.567107 +step:7615 train loss:3.527607 +step:7616 train loss:3.594301 +step:7617 train loss:3.493892 +step:7618 train loss:3.581796 +step:7619 train loss:3.522725 +step:7620 train loss:3.511843 +step:7621 train loss:3.457756 +step:7622 train loss:3.735409 +step:7623 train loss:3.746212 +step:7624 train loss:3.561561 +step:7625 train loss:3.597640 +step:7626 train loss:3.517581 +step:7627 train loss:3.588193 +step:7628 train loss:3.470430 +step:7629 train loss:3.530343 +step:7630 train loss:3.544501 +step:7631 train loss:3.524044 +step:7632 train loss:3.575991 +step:7633 train loss:3.642874 +step:7634 train loss:3.604444 +step:7635 train loss:3.509926 +step:7636 train loss:3.535799 +step:7637 train loss:3.485429 +step:7638 train loss:3.594563 +step:7639 train loss:3.522147 +step:7640 train loss:3.502845 +step:7641 train loss:3.533046 +step:7642 train loss:3.870799 +step:7643 train loss:3.620138 +step:7644 train loss:3.544436 +step:7645 train loss:3.532962 +step:7646 train loss:3.520313 +step:7647 train loss:3.513049 +step:7648 train loss:3.548118 +step:7649 train loss:3.506318 +step:7650 train loss:3.553485 +step:7651 train loss:3.577489 +step:7652 train loss:3.455841 +step:7653 train loss:3.650555 +step:7654 train loss:3.509153 +step:7655 train loss:3.527098 +step:7656 train loss:3.502815 +step:7657 train loss:3.516441 +step:7658 train loss:3.471833 +step:7659 train loss:3.537517 +step:7660 train loss:3.469356 +step:7661 train loss:3.484768 +step:7662 train loss:3.485932 +step:7663 train loss:3.535087 +step:7664 train loss:3.492173 +step:7665 train loss:3.468311 +step:7666 train loss:3.575963 +step:7667 train loss:3.489212 +step:7668 train loss:3.599239 +step:7669 train loss:3.533223 +step:7670 train loss:3.487846 +step:7671 train loss:3.542641 +step:7672 train loss:3.561665 +step:7673 train loss:3.528290 +step:7674 train loss:3.565721 +step:7675 train loss:3.621223 +step:7676 train loss:3.587956 +step:7677 train loss:3.616041 +step:7678 train loss:3.554787 +step:7679 train loss:3.576653 +step:7680 train loss:3.583201 +step:7681 train loss:3.549140 +step:7682 train loss:3.520742 +step:7683 train loss:3.522377 +step:7684 train loss:3.496060 +step:7685 train loss:3.474037 +step:7686 train loss:3.594204 +step:7687 train loss:3.510023 +step:7688 train loss:3.477804 +step:7689 train loss:3.524937 +step:7690 train loss:3.492744 +step:7691 train loss:3.520392 +step:7692 train loss:3.553398 +step:7693 train loss:3.556911 +step:7694 train loss:3.607935 +step:7695 train loss:3.533270 +step:7696 train loss:3.508394 +step:7697 train loss:3.495724 +step:7698 train loss:3.556519 +step:7699 train loss:3.553301 +step:7700 train loss:3.450934 +step:7701 train loss:3.568964 +step:7702 train loss:3.511587 +step:7703 train loss:3.515718 +step:7704 train loss:3.567238 +step:7705 train loss:3.525560 +step:7706 train loss:3.461441 +step:7707 train loss:3.579029 +step:7708 train loss:3.518571 +step:7709 train loss:3.538444 +step:7710 train loss:3.602194 +step:7711 train loss:3.563312 +step:7712 train loss:3.506767 +step:7713 train loss:3.587955 +step:7714 train loss:3.533720 +step:7715 train loss:3.484457 +step:7716 train loss:3.524299 +step:7717 train loss:3.548998 +step:7718 train loss:3.554368 +step:7719 train loss:3.511949 +step:7720 train loss:3.525703 +step:7721 train loss:3.566341 +step:7722 train loss:3.494513 +step:7723 train loss:3.866585 +step:7724 train loss:3.532497 +step:7725 train loss:3.433126 +step:7726 train loss:3.518655 +step:7727 train loss:3.544210 +step:7728 train loss:3.495942 +step:7729 train loss:3.507119 +step:7730 train loss:3.527813 +step:7731 train loss:3.559068 +step:7732 train loss:3.576363 +step:7733 train loss:3.489274 +step:7734 train loss:3.516023 +step:7735 train loss:3.601549 +step:7736 train loss:3.548896 +step:7737 train loss:3.567230 +step:7738 train loss:3.469310 +step:7739 train loss:3.544418 +step:7740 train loss:3.491632 +step:7741 train loss:3.529939 +step:7742 train loss:3.527783 +step:7743 train loss:3.478955 +step:7744 train loss:3.611210 +step:7745 train loss:3.497223 +step:7746 train loss:3.471859 +step:7747 train loss:3.566925 +step:7748 train loss:3.545318 +step:7749 train loss:3.471771 +step:7750 validation loss:3.457385 +step:7750 train loss:3.629153 +step:7751 train loss:3.514909 +step:7752 train loss:3.505881 +step:7753 train loss:3.508625 +step:7754 train loss:3.482122 +step:7755 train loss:3.548494 +step:7756 train loss:3.576283 +step:7757 train loss:3.528683 +step:7758 train loss:3.495087 +step:7759 train loss:3.524382 +step:7760 train loss:3.550864 +step:7761 train loss:3.543854 +step:7762 train loss:3.530408 +step:7763 train loss:3.512184 +step:7764 train loss:3.519290 +step:7765 train loss:3.473348 +step:7766 train loss:3.537422 +step:7767 train loss:3.541539 +step:7768 train loss:3.497125 +step:7769 train loss:3.559943 +step:7770 train loss:3.576644 +step:7771 train loss:3.551831 +step:7772 train loss:3.523978 +step:7773 train loss:3.583393 +step:7774 train loss:3.482265 +step:7775 train loss:3.469104 +step:7776 train loss:3.571400 +step:7777 train loss:3.527059 +step:7778 train loss:3.484514 +step:7779 train loss:3.528088 +step:7780 train loss:3.522781 +step:7781 train loss:3.532656 +step:7782 train loss:3.514016 +step:7783 train loss:3.497114 +step:7784 train loss:3.497173 +step:7785 train loss:3.535493 +step:7786 train loss:3.494870 +step:7787 train loss:3.572130 +step:7788 train loss:3.524561 +step:7789 train loss:3.460046 +step:7790 train loss:3.522376 +step:7791 train loss:3.552059 +step:7792 train loss:3.512175 +step:7793 train loss:3.534745 +step:7794 train loss:3.522279 +step:7795 train loss:3.552682 +step:7796 train loss:3.518647 +step:7797 train loss:3.534970 +step:7798 train loss:3.528990 +step:7799 train loss:3.518140 +step:7800 train loss:3.473436 +step:7801 train loss:3.538759 +step:7802 train loss:3.520678 +step:7803 train loss:3.570630 +step:7804 train loss:3.529450 +step:7805 train loss:3.527281 +step:7806 train loss:3.545281 +step:7807 train loss:3.615012 +step:7808 train loss:3.475834 +step:7809 train loss:3.453323 +step:7810 train loss:3.541919 +step:7811 train loss:3.474974 +step:7812 train loss:3.495183 +step:7813 train loss:3.582916 +step:7814 train loss:3.650663 +step:7815 train loss:3.465366 +step:7816 train loss:3.550278 +step:7817 train loss:3.579880 +step:7818 train loss:3.480485 +step:7819 train loss:3.531639 +step:7820 train loss:3.572604 +step:7821 train loss:3.507089 +step:7822 train loss:3.463938 +step:7823 train loss:3.546872 +step:7824 train loss:3.520428 +step:7825 train loss:3.504678 +step:7826 train loss:3.504532 +step:7827 train loss:3.545003 +step:7828 train loss:3.534795 +step:7829 train loss:3.490149 +step:7830 train loss:3.499089 +step:7831 train loss:3.504031 +step:7832 train loss:3.569858 +step:7833 train loss:3.550292 +step:7834 train loss:3.511307 +step:7835 train loss:3.537793 +step:7836 train loss:3.645573 +step:7837 train loss:3.535263 +step:7838 train loss:3.504786 +step:7839 train loss:3.461339 +step:7840 train loss:3.478076 +step:7841 train loss:3.576854 +step:7842 train loss:3.560611 +step:7843 train loss:3.610772 +step:7844 train loss:3.542547 +step:7845 train loss:3.521605 +step:7846 train loss:3.632064 +step:7847 train loss:3.524167 +step:7848 train loss:3.532444 +step:7849 train loss:3.548295 +step:7850 train loss:3.516917 +step:7851 train loss:3.547536 +step:7852 train loss:3.519279 +step:7853 train loss:3.492475 +step:7854 train loss:3.523038 +step:7855 train loss:3.523422 +step:7856 train loss:3.527325 +step:7857 train loss:3.514531 +step:7858 train loss:3.521782 +step:7859 train loss:3.531170 +step:7860 train loss:3.565481 +step:7861 train loss:3.554981 +step:7862 train loss:3.494974 +step:7863 train loss:3.599138 +step:7864 train loss:3.440175 +step:7865 train loss:3.518968 +step:7866 train loss:3.492706 +step:7867 train loss:3.537195 +step:7868 train loss:3.516470 +step:7869 train loss:3.520446 +step:7870 train loss:3.440239 +step:7871 train loss:3.504248 +step:7872 train loss:3.494852 +step:7873 train loss:3.574233 +step:7874 train loss:3.516317 +step:7875 train loss:3.520312 +step:7876 train loss:3.539724 +step:7877 train loss:3.491968 +step:7878 train loss:3.530387 +step:7879 train loss:3.867478 +step:7880 train loss:3.521820 +step:7881 train loss:3.550689 +step:7882 train loss:3.629214 +step:7883 train loss:3.442718 +step:7884 train loss:3.535624 +step:7885 train loss:3.516062 +step:7886 train loss:3.514969 +step:7887 train loss:3.510676 +step:7888 train loss:3.542154 +step:7889 train loss:3.590017 +step:7890 train loss:3.496549 +step:7891 train loss:3.544962 +step:7892 train loss:3.516701 +step:7893 train loss:3.492167 +step:7894 train loss:3.513389 +step:7895 train loss:3.497154 +step:7896 train loss:3.498206 +step:7897 train loss:3.519437 +step:7898 train loss:3.531427 +step:7899 train loss:3.516840 +step:7900 train loss:3.487896 +step:7901 train loss:3.478088 +step:7902 train loss:3.625977 +step:7903 train loss:3.470956 +step:7904 train loss:3.522116 +step:7905 train loss:3.586082 +step:7906 train loss:3.485749 +step:7907 train loss:3.512372 +step:7908 train loss:3.565069 +step:7909 train loss:3.615431 +step:7910 train loss:3.494742 +step:7911 train loss:3.514815 +step:7912 train loss:3.519557 +step:7913 train loss:3.493935 +step:7914 train loss:3.529918 +step:7915 train loss:3.633497 +step:7916 train loss:3.505113 +step:7917 train loss:3.563070 +step:7918 train loss:3.504365 +step:7919 train loss:3.496311 +step:7920 train loss:3.534370 +step:7921 train loss:3.539631 +step:7922 train loss:3.513979 +step:7923 train loss:3.561100 +step:7924 train loss:3.522997 +step:7925 train loss:3.545341 +step:7926 train loss:3.449062 +step:7927 train loss:3.726226 +step:7928 train loss:3.555107 +step:7929 train loss:3.517131 +step:7930 train loss:3.478396 +step:7931 train loss:3.504031 +step:7932 train loss:3.525277 +step:7933 train loss:3.540038 +step:7934 train loss:3.631732 +step:7935 train loss:3.554781 +step:7936 train loss:3.526941 +step:7937 train loss:3.478353 +step:7938 train loss:3.490105 +step:7939 train loss:3.540016 +step:7940 train loss:3.523620 +step:7941 train loss:3.550146 +step:7942 train loss:3.539554 +step:7943 train loss:3.553908 +step:7944 train loss:3.472681 +step:7945 train loss:3.576358 +step:7946 train loss:3.523509 +step:7947 train loss:3.537118 +step:7948 train loss:3.494394 +step:7949 train loss:3.545280 +step:7950 train loss:3.603225 +step:7951 train loss:3.565881 +step:7952 train loss:3.712174 +step:7953 train loss:3.605766 +step:7954 train loss:3.508646 +step:7955 train loss:3.496382 +step:7956 train loss:3.499236 +step:7957 train loss:3.576513 +step:7958 train loss:3.584991 +step:7959 train loss:3.541041 +step:7960 train loss:3.603579 +step:7961 train loss:3.511309 +step:7962 train loss:3.484231 +step:7963 train loss:3.521384 +step:7964 train loss:3.517256 +step:7965 train loss:3.527426 +step:7966 train loss:3.499841 +step:7967 train loss:3.523613 +step:7968 train loss:3.534061 +step:7969 train loss:3.490417 +step:7970 train loss:3.458962 +step:7971 train loss:3.544962 +step:7972 train loss:3.520543 +step:7973 train loss:3.493159 +step:7974 train loss:3.532459 +step:7975 train loss:3.519605 +step:7976 train loss:3.536018 +step:7977 train loss:3.569575 +step:7978 train loss:3.592892 +step:7979 train loss:3.537891 +step:7980 train loss:3.444208 +step:7981 train loss:3.482063 +step:7982 train loss:3.532891 +step:7983 train loss:3.547219 +step:7984 train loss:3.587620 +step:7985 train loss:3.515000 +step:7986 train loss:3.534795 +step:7987 train loss:3.588162 +step:7988 train loss:3.563142 +step:7989 train loss:3.467259 +step:7990 train loss:3.484840 +step:7991 train loss:3.497531 +step:7992 train loss:3.522311 +step:7993 train loss:3.504050 +step:7994 train loss:3.557449 +step:7995 train loss:3.558261 +step:7996 train loss:3.525208 +step:7997 train loss:3.542809 +step:7998 train loss:3.570172 +step:7999 train loss:3.499680 +step:8000 validation loss:3.451246 total_sharp:3.7961e-03 L1_sharp:5.0106e-02 L2_sharp:1.3532e-02 L3_sharp:2.3545e-02 L4_sharp:1.2884e-02 L5_sharp:1.8388e-02 L6_sharp:2.0193e-02 L7_sharp:2.5776e-02 L8_sharp:3.0444e-02 L9_sharp:2.3253e-02 L10_sharp:1.8238e-02 L11_sharp:1.6279e-02 L12_sharp:2.7608e-02 total_fnorm:1.3435e+00 total_l1_linf:8.0670e+03 total_spectral:1.3435e+00 L1_fnorm:6.0979e-02 L2_fnorm:5.9173e-02 L3_fnorm:5.8945e-02 L4_fnorm:6.0145e-02 L5_fnorm:6.0989e-02 L6_fnorm:6.1286e-02 L7_fnorm:6.1330e-02 L8_fnorm:6.1301e-02 L9_fnorm:6.1340e-02 L10_fnorm:6.1619e-02 L11_fnorm:6.1666e-02 L12_fnorm:6.1509e-02 L1_l1linf:3.1214e-01 L2_l1linf:3.4721e-01 L3_l1linf:3.4738e-01 L4_l1linf:3.4377e-01 L5_l1linf:3.4126e-01 L6_l1linf:3.1706e-01 L7_l1linf:3.0541e-01 L8_l1linf:3.2075e-01 L9_l1linf:3.5263e-01 L10_l1linf:3.8365e-01 L11_l1linf:4.0629e-01 L12_l1linf:4.1582e-01 L1_spectral:7.0982e-03 L2_spectral:7.8665e-03 L3_spectral:7.8154e-03 L4_spectral:7.7016e-03 L5_spectral:7.6721e-03 L6_spectral:7.1852e-03 L7_spectral:6.8880e-03 L8_spectral:7.2192e-03 L9_spectral:7.9497e-03 L10_spectral:8.6130e-03 L11_spectral:9.0607e-03 L12_spectral:9.2804e-03 ip_v_neg_g:2.9782e-03 cos_v_neg_g:7.2230e-04 v_norm:1.3435e+00 g_norm:3.0690e+00 hv_norm:6.4637e-01 cos_v_hv:7.8905e-03 hg_norm:1.3913e+02 cos_g_hg:4.3445e-01 v_par:2.5381e-05 v_perp:1.3435e+00 L1_cos_v_neg_g:5.7049e-03 L1_v_norm:6.0979e-02 L2_cos_v_neg_g:4.6314e-03 L2_v_norm:5.9173e-02 L3_cos_v_neg_g:5.4505e-03 L3_v_norm:5.8945e-02 L4_cos_v_neg_g:4.2936e-03 L4_v_norm:6.0145e-02 L5_cos_v_neg_g:3.2521e-03 L5_v_norm:6.0989e-02 L6_cos_v_neg_g:3.2743e-03 L6_v_norm:6.1286e-02 L7_cos_v_neg_g:5.7790e-03 L7_v_norm:6.1330e-02 L8_cos_v_neg_g:6.0023e-03 L8_v_norm:6.1301e-02 L9_cos_v_neg_g:4.3918e-03 L9_v_norm:6.1340e-02 L10_cos_v_neg_g:3.2429e-03 L10_v_norm:6.1619e-02 L11_cos_v_neg_g:2.0377e-03 L11_v_norm:6.1666e-02 L12_cos_v_neg_g:2.6447e-03 L12_v_norm:6.1509e-02 +step:8000 train loss:3.568398 +step:8001 train loss:3.528152 +step:8002 train loss:3.548812 +step:8003 train loss:3.564785 +step:8004 train loss:3.542747 +step:8005 train loss:3.462922 +step:8006 train loss:3.542183 +step:8007 train loss:3.509939 +step:8008 train loss:3.535929 +step:8009 train loss:3.607550 +step:8010 train loss:3.824437 +step:8011 train loss:3.490184 +step:8012 train loss:3.569447 +step:8013 train loss:3.519906 +step:8014 train loss:3.533320 +step:8015 train loss:3.533197 +step:8016 train loss:3.520644 +step:8017 train loss:3.543676 +step:8018 train loss:3.502866 +step:8019 train loss:3.469699 +step:8020 train loss:3.509768 +step:8021 train loss:3.586259 +step:8022 train loss:3.502886 +step:8023 train loss:3.535549 +step:8024 train loss:3.395116 +step:8025 train loss:3.511550 +step:8026 train loss:3.519323 +step:8027 train loss:3.528329 +step:8028 train loss:3.582277 +step:8029 train loss:3.512541 +step:8030 train loss:3.471865 +step:8031 train loss:3.531402 +step:8032 train loss:3.516927 +step:8033 train loss:3.467647 +step:8034 train loss:3.503805 +step:8035 train loss:3.489315 +step:8036 train loss:3.484266 +step:8037 train loss:3.454377 +step:8038 train loss:3.468831 +step:8039 train loss:3.561769 +step:8040 train loss:3.496869 +step:8041 train loss:3.494909 +step:8042 train loss:3.530899 +step:8043 train loss:3.472784 +step:8044 train loss:3.487340 +step:8045 train loss:3.556132 +step:8046 train loss:3.481357 +step:8047 train loss:3.484746 +step:8048 train loss:3.514230 +step:8049 train loss:3.561904 +step:8050 train loss:3.501731 +step:8051 train loss:3.478205 +step:8052 train loss:3.539863 +step:8053 train loss:3.493744 +step:8054 train loss:3.529631 +step:8055 train loss:3.558670 +step:8056 train loss:3.526632 +step:8057 train loss:3.602002 +step:8058 train loss:3.509684 +step:8059 train loss:3.568124 +step:8060 train loss:3.537752 +step:8061 train loss:3.426059 +step:8062 train loss:3.558997 +step:8063 train loss:3.523518 +step:8064 train loss:3.483456 +step:8065 train loss:3.545634 +step:8066 train loss:3.504675 +step:8067 train loss:3.566742 +step:8068 train loss:3.497225 +step:8069 train loss:3.520159 +step:8070 train loss:3.488697 +step:8071 train loss:3.496693 +step:8072 train loss:3.536384 +step:8073 train loss:3.491072 +step:8074 train loss:3.501148 +step:8075 train loss:3.482863 +step:8076 train loss:3.535789 +step:8077 train loss:3.541849 +step:8078 train loss:3.486949 +step:8079 train loss:3.507535 +step:8080 train loss:3.495351 +step:8081 train loss:3.510864 +step:8082 train loss:3.527952 +step:8083 train loss:3.431556 +step:8084 train loss:3.568318 +step:8085 train loss:3.439625 +step:8086 train loss:3.568590 +step:8087 train loss:3.460474 +step:8088 train loss:3.510306 +step:8089 train loss:3.543742 +step:8090 train loss:3.567773 +step:8091 train loss:3.511580 +step:8092 train loss:3.491978 +step:8093 train loss:3.499390 +step:8094 train loss:3.500141 +step:8095 train loss:3.526368 +step:8096 train loss:3.526110 +step:8097 train loss:3.455043 +step:8098 train loss:3.470723 +step:8099 train loss:3.455677 +step:8100 train loss:3.513279 +step:8101 train loss:3.587672 +step:8102 train loss:3.526983 +step:8103 train loss:3.479830 +step:8104 train loss:3.528924 +step:8105 train loss:3.526718 +step:8106 train loss:3.489315 +step:8107 train loss:3.468514 +step:8108 train loss:3.487113 +step:8109 train loss:3.484027 +step:8110 train loss:3.548670 +step:8111 train loss:3.469383 +step:8112 train loss:3.491089 +step:8113 train loss:3.478745 +step:8114 train loss:3.422786 +step:8115 train loss:3.477535 +step:8116 train loss:3.514745 +step:8117 train loss:3.483937 +step:8118 train loss:3.477491 +step:8119 train loss:3.519984 +step:8120 train loss:3.464551 +step:8121 train loss:3.523036 +step:8122 train loss:3.505752 +step:8123 train loss:3.515036 +step:8124 train loss:3.475610 +step:8125 train loss:3.458795 +step:8126 train loss:3.449563 +step:8127 train loss:3.544710 +step:8128 train loss:3.550990 +step:8129 train loss:3.471080 +step:8130 train loss:3.498389 +step:8131 train loss:3.470970 +step:8132 train loss:3.537931 +step:8133 train loss:3.461066 +step:8134 train loss:3.498429 +step:8135 train loss:3.490494 +step:8136 train loss:3.500056 +step:8137 train loss:3.563194 +step:8138 train loss:3.471069 +step:8139 train loss:3.545270 +step:8140 train loss:3.473270 +step:8141 train loss:3.495214 +step:8142 train loss:3.476844 +step:8143 train loss:3.528697 +step:8144 train loss:3.504803 +step:8145 train loss:3.473841 +step:8146 train loss:3.481921 +step:8147 train loss:3.503597 +step:8148 train loss:3.597342 +step:8149 train loss:3.507657 +step:8150 train loss:3.487860 +step:8151 train loss:3.480171 +step:8152 train loss:3.577587 +step:8153 train loss:3.454014 +step:8154 train loss:3.473657 +step:8155 train loss:3.496992 +step:8156 train loss:3.478987 +step:8157 train loss:3.500959 +step:8158 train loss:3.512105 +step:8159 train loss:3.528234 +step:8160 train loss:3.479724 +step:8161 train loss:3.524613 +step:8162 train loss:3.453125 +step:8163 train loss:3.514346 +step:8164 train loss:3.500106 +step:8165 train loss:3.550318 +step:8166 train loss:3.553652 +step:8167 train loss:3.456283 +step:8168 train loss:3.439079 +step:8169 train loss:3.487887 +step:8170 train loss:3.437120 +step:8171 train loss:3.498678 +step:8172 train loss:3.496275 +step:8173 train loss:3.496696 +step:8174 train loss:3.506002 +step:8175 train loss:3.466321 +step:8176 train loss:3.460819 +step:8177 train loss:3.510485 +step:8178 train loss:3.593882 +step:8179 train loss:3.500950 +step:8180 train loss:3.526501 +step:8181 train loss:3.525051 +step:8182 train loss:3.484737 +step:8183 train loss:3.471458 +step:8184 train loss:3.464971 +step:8185 train loss:3.504837 +step:8186 train loss:3.507143 +step:8187 train loss:3.518115 +step:8188 train loss:3.446320 +step:8189 train loss:3.595006 +step:8190 train loss:3.527346 +step:8191 train loss:3.529572 +step:8192 train loss:3.641342 +step:8193 train loss:3.510990 +step:8194 train loss:3.447236 +step:8195 train loss:3.545238 +step:8196 train loss:3.460804 +step:8197 train loss:3.490712 +step:8198 train loss:3.497294 +step:8199 train loss:3.499959 +step:8200 train loss:3.478730 +step:8201 train loss:3.593138 +step:8202 train loss:3.508598 +step:8203 train loss:3.528477 +step:8204 train loss:3.439946 +step:8205 train loss:3.446608 +step:8206 train loss:3.570487 +step:8207 train loss:3.494914 +step:8208 train loss:3.515498 +step:8209 train loss:3.558767 +step:8210 train loss:3.542593 +step:8211 train loss:3.474871 +step:8212 train loss:3.532328 +step:8213 train loss:3.544256 +step:8214 train loss:3.581352 +step:8215 train loss:3.554170 +step:8216 train loss:3.537096 +step:8217 train loss:3.516743 +step:8218 train loss:3.523281 +step:8219 train loss:3.658941 +step:8220 train loss:3.487864 +step:8221 train loss:3.509059 +step:8222 train loss:3.459386 +step:8223 train loss:3.480457 +step:8224 train loss:3.488402 +step:8225 train loss:3.541043 +step:8226 train loss:3.469031 +step:8227 train loss:3.539190 +step:8228 train loss:3.424210 +step:8229 train loss:3.468540 +step:8230 train loss:3.485491 +step:8231 train loss:3.509145 +step:8232 train loss:3.509465 +step:8233 train loss:3.553565 +step:8234 train loss:3.549629 +step:8235 train loss:3.518973 +step:8236 train loss:3.505727 +step:8237 train loss:3.455016 +step:8238 train loss:3.708660 +step:8239 train loss:3.542486 +step:8240 train loss:3.488273 +step:8241 train loss:3.457530 +step:8242 train loss:3.497272 +step:8243 train loss:3.488145 +step:8244 train loss:3.500743 +step:8245 train loss:3.484390 +step:8246 train loss:3.551803 +step:8247 train loss:3.583715 +step:8248 train loss:3.501752 +step:8249 train loss:3.493290 +step:8250 validation loss:3.441398 +step:8250 train loss:3.482186 +step:8251 train loss:3.579397 +step:8252 train loss:3.514659 +step:8253 train loss:3.483351 +step:8254 train loss:3.454156 +step:8255 train loss:3.488577 +step:8256 train loss:3.470154 +step:8257 train loss:3.579352 +step:8258 train loss:3.499285 +step:8259 train loss:3.480363 +step:8260 train loss:3.481507 +step:8261 train loss:3.481433 +step:8262 train loss:3.494961 +step:8263 train loss:3.508378 +step:8264 train loss:3.472832 +step:8265 train loss:3.466180 +step:8266 train loss:3.473588 +step:8267 train loss:3.404971 +step:8268 train loss:3.530810 +step:8269 train loss:3.461979 +step:8270 train loss:3.515209 +step:8271 train loss:3.540971 +step:8272 train loss:3.567622 +step:8273 train loss:3.443563 +step:8274 train loss:3.506736 +step:8275 train loss:3.465447 +step:8276 train loss:3.504518 +step:8277 train loss:3.572738 +step:8278 train loss:3.589232 +step:8279 train loss:3.499717 +step:8280 train loss:3.488197 +step:8281 train loss:3.455991 +step:8282 train loss:3.515994 +step:8283 train loss:3.501445 +step:8284 train loss:3.486145 +step:8285 train loss:3.477629 +step:8286 train loss:3.590263 +step:8287 train loss:3.521801 +step:8288 train loss:3.495317 +step:8289 train loss:3.508611 +step:8290 train loss:3.448165 +step:8291 train loss:3.487014 +step:8292 train loss:3.517863 +step:8293 train loss:3.490625 +step:8294 train loss:3.461318 +step:8295 train loss:3.498844 +step:8296 train loss:3.566024 +step:8297 train loss:3.644302 +step:8298 train loss:3.467849 +step:8299 train loss:3.503919 +step:8300 train loss:3.512617 +step:8301 train loss:3.485439 +step:8302 train loss:3.543716 +step:8303 train loss:3.678004 +step:8304 train loss:3.483706 +step:8305 train loss:3.531638 +step:8306 train loss:3.507524 +step:8307 train loss:3.524261 +step:8308 train loss:3.522815 +step:8309 train loss:3.545037 +step:8310 train loss:3.463109 +step:8311 train loss:3.553447 +step:8312 train loss:3.545329 +step:8313 train loss:3.610989 +step:8314 train loss:3.480165 +step:8315 train loss:3.428929 +step:8316 train loss:3.488930 +step:8317 train loss:3.511102 +step:8318 train loss:3.499556 +step:8319 train loss:3.537584 +step:8320 train loss:3.557934 +step:8321 train loss:3.466068 +step:8322 train loss:3.482818 +step:8323 train loss:3.518460 +step:8324 train loss:3.493809 +step:8325 train loss:3.549390 +step:8326 train loss:3.516205 +step:8327 train loss:3.503976 +step:8328 train loss:3.576615 +step:8329 train loss:3.484815 +step:8330 train loss:3.526789 +step:8331 train loss:3.454041 +step:8332 train loss:3.553160 +step:8333 train loss:3.569992 +step:8334 train loss:3.438347 +step:8335 train loss:3.498956 +step:8336 train loss:3.592651 +step:8337 train loss:3.526021 +step:8338 train loss:3.490294 +step:8339 train loss:3.470653 +step:8340 train loss:3.562974 +step:8341 train loss:3.461853 +step:8342 train loss:3.536475 +step:8343 train loss:3.447415 +step:8344 train loss:3.494086 +step:8345 train loss:3.528466 +step:8346 train loss:3.610881 +step:8347 train loss:3.500164 +step:8348 train loss:3.527938 +step:8349 train loss:3.499972 +step:8350 train loss:3.521166 +step:8351 train loss:3.461200 +step:8352 train loss:3.547161 +step:8353 train loss:3.502210 +step:8354 train loss:3.484830 +step:8355 train loss:3.484853 +step:8356 train loss:3.480722 +step:8357 train loss:3.496165 +step:8358 train loss:3.469151 +step:8359 train loss:3.462856 +step:8360 train loss:3.511022 +step:8361 train loss:3.523658 +step:8362 train loss:3.542960 +step:8363 train loss:3.541586 +step:8364 train loss:3.506580 +step:8365 train loss:3.653113 +step:8366 train loss:3.495153 +step:8367 train loss:3.469531 +step:8368 train loss:3.437430 +step:8369 train loss:3.469566 +step:8370 train loss:3.551007 +step:8371 train loss:3.522906 +step:8372 train loss:3.498686 +step:8373 train loss:3.512018 +step:8374 train loss:3.442507 +step:8375 train loss:3.506736 +step:8376 train loss:3.543543 +step:8377 train loss:3.371809 +step:8378 train loss:3.585560 +step:8379 train loss:3.446082 +step:8380 train loss:3.461081 +step:8381 train loss:3.463706 +step:8382 train loss:3.491215 +step:8383 train loss:3.450381 +step:8384 train loss:3.494315 +step:8385 train loss:3.503466 +step:8386 train loss:3.486971 +step:8387 train loss:3.647125 +step:8388 train loss:3.558676 +step:8389 train loss:3.536550 +step:8390 train loss:3.536311 +step:8391 train loss:3.467380 +step:8392 train loss:3.479587 +step:8393 train loss:3.433241 +step:8394 train loss:3.528543 +step:8395 train loss:3.531892 +step:8396 train loss:3.556751 +step:8397 train loss:3.488937 +step:8398 train loss:3.508353 +step:8399 train loss:3.474309 +step:8400 train loss:3.482308 +step:8401 train loss:3.487164 +step:8402 train loss:3.470563 +step:8403 train loss:3.487705 +step:8404 train loss:3.492712 +step:8405 train loss:3.446626 +step:8406 train loss:3.488131 +step:8407 train loss:3.531187 +step:8408 train loss:3.502313 +step:8409 train loss:3.424103 +step:8410 train loss:3.489163 +step:8411 train loss:3.514328 +step:8412 train loss:3.573390 +step:8413 train loss:3.548067 +step:8414 train loss:3.545547 +step:8415 train loss:3.467006 +step:8416 train loss:3.512650 +step:8417 train loss:3.429067 +step:8418 train loss:3.534915 +step:8419 train loss:3.488548 +step:8420 train loss:3.565913 +step:8421 train loss:3.481223 +step:8422 train loss:3.500285 +step:8423 train loss:3.515767 +step:8424 train loss:3.519161 +step:8425 train loss:3.577911 +step:8426 train loss:3.548206 +step:8427 train loss:3.466177 +step:8428 train loss:3.479499 +step:8429 train loss:3.541626 +step:8430 train loss:3.479626 +step:8431 train loss:3.486999 +step:8432 train loss:3.486948 +step:8433 train loss:3.462222 +step:8434 train loss:3.498983 +step:8435 train loss:3.418226 +step:8436 train loss:3.498725 +step:8437 train loss:3.542115 +step:8438 train loss:3.520876 +step:8439 train loss:3.462654 +step:8440 train loss:3.430837 +step:8441 train loss:3.489366 +step:8442 train loss:3.512465 +step:8443 train loss:3.470116 +step:8444 train loss:3.503358 +step:8445 train loss:3.452114 +step:8446 train loss:3.502854 +step:8447 train loss:3.514485 +step:8448 train loss:3.498439 +step:8449 train loss:3.487521 +step:8450 train loss:3.479245 +step:8451 train loss:3.509533 +step:8452 train loss:3.485704 +step:8453 train loss:3.464963 +step:8454 train loss:3.513145 +step:8455 train loss:3.585670 +step:8456 train loss:3.563725 +step:8457 train loss:3.617520 +step:8458 train loss:3.507306 +step:8459 train loss:3.511981 +step:8460 train loss:3.441541 +step:8461 train loss:3.600687 +step:8462 train loss:3.470295 +step:8463 train loss:3.507408 +step:8464 train loss:3.521664 +step:8465 train loss:3.529379 +step:8466 train loss:3.501869 +step:8467 train loss:3.506149 +step:8468 train loss:3.757434 +step:8469 train loss:3.465877 +step:8470 train loss:3.461317 +step:8471 train loss:3.505253 +step:8472 train loss:3.526463 +step:8473 train loss:3.482118 +step:8474 train loss:3.606135 +step:8475 train loss:3.562490 +step:8476 train loss:3.512620 +step:8477 train loss:3.501650 +step:8478 train loss:3.481083 +step:8479 train loss:3.486071 +step:8480 train loss:3.595765 +step:8481 train loss:3.482507 +step:8482 train loss:3.477887 +step:8483 train loss:3.619465 +step:8484 train loss:3.503918 +step:8485 train loss:3.548309 +step:8486 train loss:3.461636 +step:8487 train loss:3.515052 +step:8488 train loss:3.460180 +step:8489 train loss:3.539618 +step:8490 train loss:3.526722 +step:8491 train loss:3.546864 +step:8492 train loss:3.499233 +step:8493 train loss:3.572687 +step:8494 train loss:3.437013 +step:8495 train loss:3.534627 +step:8496 train loss:3.480052 +step:8497 train loss:3.513740 +step:8498 train loss:3.527855 +step:8499 train loss:3.505842 +step:8500 validation loss:3.440687 total_sharp:3.6755e-03 L1_sharp:5.4529e-02 L2_sharp:1.5845e-02 L3_sharp:2.9827e-02 L4_sharp:1.6896e-02 L5_sharp:1.9391e-02 L6_sharp:2.2424e-02 L7_sharp:2.7642e-02 L8_sharp:2.6401e-02 L9_sharp:1.9128e-02 L10_sharp:1.3456e-02 L11_sharp:1.4993e-02 L12_sharp:3.1089e-02 total_fnorm:1.3471e+00 total_l1_linf:8.0746e+03 total_spectral:1.3471e+00 L1_fnorm:6.0890e-02 L2_fnorm:5.8852e-02 L3_fnorm:5.8875e-02 L4_fnorm:6.0159e-02 L5_fnorm:6.0830e-02 L6_fnorm:6.1158e-02 L7_fnorm:6.1258e-02 L8_fnorm:6.1177e-02 L9_fnorm:6.1079e-02 L10_fnorm:6.1243e-02 L11_fnorm:6.1205e-02 L12_fnorm:6.1336e-02 L1_l1linf:3.2960e-01 L2_l1linf:3.6251e-01 L3_l1linf:3.7955e-01 L4_l1linf:3.5949e-01 L5_l1linf:3.3724e-01 L6_l1linf:3.1086e-01 L7_l1linf:3.0440e-01 L8_l1linf:2.9258e-01 L9_l1linf:3.1453e-01 L10_l1linf:3.4414e-01 L11_l1linf:3.6262e-01 L12_l1linf:3.8332e-01 L1_spectral:7.4368e-03 L2_spectral:8.1466e-03 L3_spectral:8.5537e-03 L4_spectral:8.0819e-03 L5_spectral:7.6596e-03 L6_spectral:7.0216e-03 L7_spectral:6.9093e-03 L8_spectral:6.6388e-03 L9_spectral:7.1345e-03 L10_spectral:7.7134e-03 L11_spectral:8.1970e-03 L12_spectral:8.6286e-03 ip_v_neg_g:4.1373e-03 cos_v_neg_g:1.0072e-03 v_norm:1.3471e+00 g_norm:3.0493e+00 hv_norm:5.7622e-01 cos_v_hv:8.5928e-03 hg_norm:1.1135e+02 cos_g_hg:4.5399e-01 v_par:3.1179e-05 v_perp:1.3471e+00 L1_cos_v_neg_g:5.4128e-03 L1_v_norm:6.0890e-02 L2_cos_v_neg_g:4.9841e-03 L2_v_norm:5.8852e-02 L3_cos_v_neg_g:6.8822e-03 L3_v_norm:5.8875e-02 L4_cos_v_neg_g:7.4525e-03 L4_v_norm:6.0159e-02 L5_cos_v_neg_g:8.1910e-03 L5_v_norm:6.0830e-02 L6_cos_v_neg_g:7.7116e-03 L6_v_norm:6.1158e-02 L7_cos_v_neg_g:7.2384e-03 L7_v_norm:6.1258e-02 L8_cos_v_neg_g:7.0951e-03 L8_v_norm:6.1177e-02 L9_cos_v_neg_g:6.0283e-03 L9_v_norm:6.1079e-02 L10_cos_v_neg_g:5.9728e-03 L10_v_norm:6.1243e-02 L11_cos_v_neg_g:5.0411e-03 L11_v_norm:6.1205e-02 L12_cos_v_neg_g:2.6549e-03 L12_v_norm:6.1336e-02 +step:8500 train loss:3.502222 +step:8501 train loss:3.719056 +step:8502 train loss:3.732619 +step:8503 train loss:3.494397 +step:8504 train loss:3.490618 +step:8505 train loss:3.469562 +step:8506 train loss:3.539665 +step:8507 train loss:3.477437 +step:8508 train loss:3.511431 +step:8509 train loss:3.452719 +step:8510 train loss:3.474808 +step:8511 train loss:3.431690 +step:8512 train loss:3.530860 +step:8513 train loss:3.538018 +step:8514 train loss:3.482813 +step:8515 train loss:3.576086 +step:8516 train loss:3.495193 +step:8517 train loss:3.516104 +step:8518 train loss:3.407258 +step:8519 train loss:3.499334 +step:8520 train loss:3.468115 +step:8521 train loss:3.505235 +step:8522 train loss:3.403101 +step:8523 train loss:3.494231 +step:8524 train loss:3.487727 +step:8525 train loss:3.553130 +step:8526 train loss:3.531955 +step:8527 train loss:3.476444 +step:8528 train loss:3.559117 +step:8529 train loss:3.515816 +step:8530 train loss:3.549669 +step:8531 train loss:3.537365 +step:8532 train loss:3.575596 +step:8533 train loss:3.530007 +step:8534 train loss:3.528746 +step:8535 train loss:3.499507 +step:8536 train loss:3.590488 +step:8537 train loss:3.505198 +step:8538 train loss:3.572802 +step:8539 train loss:3.493624 +step:8540 train loss:3.521151 +step:8541 train loss:3.462432 +step:8542 train loss:3.528706 +step:8543 train loss:3.442376 +step:8544 train loss:3.439021 +step:8545 train loss:3.489812 +step:8546 train loss:3.440736 +step:8547 train loss:3.496034 +step:8548 train loss:3.466881 +step:8549 train loss:3.508394 +step:8550 train loss:3.461961 +step:8551 train loss:3.512691 +step:8552 train loss:3.511942 +step:8553 train loss:3.516409 +step:8554 train loss:3.492077 +step:8555 train loss:3.503335 +step:8556 train loss:3.583032 +step:8557 train loss:3.479940 +step:8558 train loss:3.517046 +step:8559 train loss:3.511141 +step:8560 train loss:3.490708 +step:8561 train loss:3.447060 +step:8562 train loss:3.474177 +step:8563 train loss:3.471929 +step:8564 train loss:3.542501 +step:8565 train loss:3.518315 +step:8566 train loss:3.536199 +step:8567 train loss:3.483700 +step:8568 train loss:3.501704 +step:8569 train loss:3.510016 +step:8570 train loss:3.453480 +step:8571 train loss:3.497183 +step:8572 train loss:3.511508 +step:8573 train loss:3.585001 +step:8574 train loss:3.515995 +step:8575 train loss:3.514972 +step:8576 train loss:3.549228 +step:8577 train loss:3.629839 +step:8578 train loss:3.540341 +step:8579 train loss:3.527212 +step:8580 train loss:3.461957 +step:8581 train loss:3.501100 +step:8582 train loss:3.506738 +step:8583 train loss:3.505667 +step:8584 train loss:3.495260 +step:8585 train loss:3.578008 +step:8586 train loss:3.493488 +step:8587 train loss:3.503325 +step:8588 train loss:3.551177 +step:8589 train loss:3.495053 +step:8590 train loss:3.488405 +step:8591 train loss:3.492663 +step:8592 train loss:3.450055 +step:8593 train loss:3.529670 +step:8594 train loss:3.554348 +step:8595 train loss:3.476117 +step:8596 train loss:3.517893 +step:8597 train loss:3.481032 +step:8598 train loss:3.535072 +step:8599 train loss:3.506099 +step:8600 train loss:3.511101 +step:8601 train loss:3.499104 +step:8602 train loss:3.473303 +step:8603 train loss:3.530678 +step:8604 train loss:3.476404 +step:8605 train loss:3.488019 +step:8606 train loss:3.500882 +step:8607 train loss:3.509278 +step:8608 train loss:3.552264 +step:8609 train loss:3.450097 +step:8610 train loss:3.522874 +step:8611 train loss:3.452568 +step:8612 train loss:3.532106 +step:8613 train loss:3.466355 +step:8614 train loss:3.528551 +step:8615 train loss:3.569680 +step:8616 train loss:3.452804 +step:8617 train loss:3.518208 +step:8618 train loss:3.497938 +step:8619 train loss:3.449527 +step:8620 train loss:3.492377 +step:8621 train loss:3.523244 +step:8622 train loss:3.482104 +step:8623 train loss:3.494238 +step:8624 train loss:3.569343 +step:8625 train loss:3.490266 +step:8626 train loss:3.500821 +step:8627 train loss:3.495247 +step:8628 train loss:3.529336 +step:8629 train loss:3.436502 +step:8630 train loss:3.535900 +step:8631 train loss:3.478463 +step:8632 train loss:3.532241 +step:8633 train loss:3.482771 +step:8634 train loss:3.712422 +step:8635 train loss:3.509831 +step:8636 train loss:3.552660 +step:8637 train loss:3.480211 +step:8638 train loss:3.480235 +step:8639 train loss:3.536410 +step:8640 train loss:3.449717 +step:8641 train loss:3.550007 +step:8642 train loss:3.500947 +step:8643 train loss:3.609889 +step:8644 train loss:3.454031 +step:8645 train loss:3.524392 +step:8646 train loss:3.487152 +step:8647 train loss:3.511101 +step:8648 train loss:3.461049 +step:8649 train loss:3.543303 +step:8650 train loss:3.499878 +step:8651 train loss:3.509763 +step:8652 train loss:3.481884 +step:8653 train loss:3.513018 +step:8654 train loss:3.555605 +step:8655 train loss:3.485960 +step:8656 train loss:3.526746 +step:8657 train loss:3.530790 +step:8658 train loss:3.500998 +step:8659 train loss:3.494720 +step:8660 train loss:3.438705 +step:8661 train loss:3.497736 +step:8662 train loss:3.440817 +step:8663 train loss:3.513906 +step:8664 train loss:3.428958 +step:8665 train loss:3.449027 +step:8666 train loss:3.527137 +step:8667 train loss:3.420399 +step:8668 train loss:3.529342 +step:8669 train loss:3.565203 +step:8670 train loss:3.465881 +step:8671 train loss:3.465656 +step:8672 train loss:3.681326 +step:8673 train loss:3.446246 +step:8674 train loss:3.515213 +step:8675 train loss:3.556772 +step:8676 train loss:3.500434 +step:8677 train loss:3.523506 +step:8678 train loss:3.472410 +step:8679 train loss:3.530852 +step:8680 train loss:3.507755 +step:8681 train loss:3.513386 +step:8682 train loss:3.466002 +step:8683 train loss:3.483741 +step:8684 train loss:3.557438 +step:8685 train loss:3.503803 +step:8686 train loss:3.492712 +step:8687 train loss:3.447682 +step:8688 train loss:3.464341 +step:8689 train loss:3.536675 +step:8690 train loss:3.472712 +step:8691 train loss:3.550758 +step:8692 train loss:3.439803 +step:8693 train loss:3.529678 +step:8694 train loss:3.527908 +step:8695 train loss:3.514266 +step:8696 train loss:3.538932 +step:8697 train loss:3.493096 +step:8698 train loss:3.532702 +step:8699 train loss:3.484709 +step:8700 train loss:3.508109 +step:8701 train loss:3.472991 +step:8702 train loss:3.457119 +step:8703 train loss:3.471855 +step:8704 train loss:3.424748 +step:8705 train loss:3.508134 +step:8706 train loss:3.526066 +step:8707 train loss:3.523496 +step:8708 train loss:3.468135 +step:8709 train loss:3.532286 +step:8710 train loss:3.458815 +step:8711 train loss:3.513697 +step:8712 train loss:3.420811 +step:8713 train loss:3.497348 +step:8714 train loss:3.603534 +step:8715 train loss:3.461563 +step:8716 train loss:3.514355 +step:8717 train loss:3.485797 +step:8718 train loss:3.525895 +step:8719 train loss:3.491603 +step:8720 train loss:3.603499 +step:8721 train loss:3.495621 +step:8722 train loss:3.585221 +step:8723 train loss:3.458073 +step:8724 train loss:3.470902 +step:8725 train loss:3.498803 +step:8726 train loss:3.452148 +step:8727 train loss:3.531053 +step:8728 train loss:3.489848 +step:8729 train loss:3.493694 +step:8730 train loss:3.471449 +step:8731 train loss:3.474844 +step:8732 train loss:3.576013 +step:8733 train loss:3.498565 +step:8734 train loss:3.536965 +step:8735 train loss:3.605439 +step:8736 train loss:3.464065 +step:8737 train loss:3.490764 +step:8738 train loss:3.469864 +step:8739 train loss:3.531579 +step:8740 train loss:3.453619 +step:8741 train loss:3.507308 +step:8742 train loss:3.464327 +step:8743 train loss:3.503209 +step:8744 train loss:3.523948 +step:8745 train loss:3.563648 +step:8746 train loss:3.462177 +step:8747 train loss:3.564204 +step:8748 train loss:3.475960 +step:8749 train loss:3.514468 +step:8750 validation loss:3.432452 +step:8750 train loss:3.523846 +step:8751 train loss:3.562841 +step:8752 train loss:3.420600 +step:8753 train loss:3.468121 +step:8754 train loss:3.521943 +step:8755 train loss:3.503218 +step:8756 train loss:3.549534 +step:8757 train loss:3.459622 +step:8758 train loss:3.615716 +step:8759 train loss:3.463003 +step:8760 train loss:3.493897 +step:8761 train loss:3.572721 +step:8762 train loss:3.468916 +step:8763 train loss:3.440036 +step:8764 train loss:3.514731 +step:8765 train loss:3.581443 +step:8766 train loss:3.513605 +step:8767 train loss:3.471383 +step:8768 train loss:3.513175 +step:8769 train loss:3.484831 +step:8770 train loss:3.529432 +step:8771 train loss:3.502443 +step:8772 train loss:3.521179 +step:8773 train loss:3.482410 +step:8774 train loss:3.515090 +step:8775 train loss:3.515678 +step:8776 train loss:3.457353 +step:8777 train loss:3.494919 +step:8778 train loss:3.505042 +step:8779 train loss:3.524309 +step:8780 train loss:3.490255 +step:8781 train loss:3.494270 +step:8782 train loss:3.516580 +step:8783 train loss:3.495642 +step:8784 train loss:3.520873 +step:8785 train loss:3.507244 +step:8786 train loss:3.582803 +step:8787 train loss:3.525493 +step:8788 train loss:3.430567 +step:8789 train loss:3.527581 +step:8790 train loss:3.454461 +step:8791 train loss:3.505451 +step:8792 train loss:3.443252 +step:8793 train loss:3.532940 +step:8794 train loss:3.456854 +step:8795 train loss:3.526105 +step:8796 train loss:3.669640 +step:8797 train loss:3.415399 +step:8798 train loss:3.575360 +step:8799 train loss:3.489105 +step:8800 train loss:3.484604 +step:8801 train loss:3.506295 +step:8802 train loss:3.563505 +step:8803 train loss:3.522169 +step:8804 train loss:3.505065 +step:8805 train loss:3.523129 +step:8806 train loss:3.490764 +step:8807 train loss:3.483689 +step:8808 train loss:3.438827 +step:8809 train loss:3.562110 +step:8810 train loss:3.468317 +step:8811 train loss:3.455462 +step:8812 train loss:3.500409 +step:8813 train loss:3.407184 +step:8814 train loss:3.598593 +step:8815 train loss:3.443670 +step:8816 train loss:3.561448 +step:8817 train loss:3.498315 +step:8818 train loss:3.430309 +step:8819 train loss:3.547950 +step:8820 train loss:3.477893 +step:8821 train loss:3.502543 +step:8822 train loss:3.486162 +step:8823 train loss:3.498114 +step:8824 train loss:3.560214 +step:8825 train loss:3.536877 +step:8826 train loss:3.507973 +step:8827 train loss:3.466554 +step:8828 train loss:3.507985 +step:8829 train loss:3.485929 +step:8830 train loss:3.466179 +step:8831 train loss:3.541590 +step:8832 train loss:3.477517 +step:8833 train loss:3.512533 +step:8834 train loss:3.478667 +step:8835 train loss:3.415621 +step:8836 train loss:3.544341 +step:8837 train loss:3.445005 +step:8838 train loss:3.489797 +step:8839 train loss:3.475363 +step:8840 train loss:3.476942 +step:8841 train loss:3.491629 +step:8842 train loss:3.500963 +step:8843 train loss:3.513773 +step:8844 train loss:3.479001 +step:8845 train loss:3.500497 +step:8846 train loss:3.467921 +step:8847 train loss:3.506337 +step:8848 train loss:3.552714 +step:8849 train loss:3.530983 +step:8850 train loss:3.524246 +step:8851 train loss:3.408291 +step:8852 train loss:3.511333 +step:8853 train loss:3.492626 +step:8854 train loss:3.462649 +step:8855 train loss:3.531681 +step:8856 train loss:3.522827 +step:8857 train loss:3.592056 +step:8858 train loss:3.457360 +step:8859 train loss:3.528467 +step:8860 train loss:3.488054 +step:8861 train loss:3.468963 +step:8862 train loss:3.470699 +step:8863 train loss:3.453888 +step:8864 train loss:3.520650 +step:8865 train loss:3.514038 +step:8866 train loss:3.395047 +step:8867 train loss:3.500052 +step:8868 train loss:3.527302 +step:8869 train loss:3.611894 +step:8870 train loss:3.490432 +step:8871 train loss:3.512968 +step:8872 train loss:3.498074 +step:8873 train loss:3.498821 +step:8874 train loss:3.550863 +step:8875 train loss:3.485444 +step:8876 train loss:3.523479 +step:8877 train loss:3.506068 +step:8878 train loss:3.554288 +step:8879 train loss:3.516192 +step:8880 train loss:3.463720 +step:8881 train loss:3.428599 +step:8882 train loss:3.499176 +step:8883 train loss:3.485315 +step:8884 train loss:3.574131 +step:8885 train loss:3.509314 +step:8886 train loss:3.512934 +step:8887 train loss:3.537041 +step:8888 train loss:3.498924 +step:8889 train loss:3.501826 +step:8890 train loss:3.493067 +step:8891 train loss:3.464613 +step:8892 train loss:3.548193 +step:8893 train loss:3.490276 +step:8894 train loss:3.506154 +step:8895 train loss:3.536036 +step:8896 train loss:3.453960 +step:8897 train loss:3.543374 +step:8898 train loss:3.477109 +step:8899 train loss:3.499466 +step:8900 train loss:3.465292 +step:8901 train loss:3.481135 +step:8902 train loss:3.522887 +step:8903 train loss:3.461135 +step:8904 train loss:3.511484 +step:8905 train loss:3.486530 +step:8906 train loss:3.478029 +step:8907 train loss:3.491012 +step:8908 train loss:3.553803 +step:8909 train loss:3.499488 +step:8910 train loss:3.461279 +step:8911 train loss:3.559656 +step:8912 train loss:3.455910 +step:8913 train loss:3.464846 +step:8914 train loss:3.564063 +step:8915 train loss:3.502175 +step:8916 train loss:3.532055 +step:8917 train loss:3.487714 +step:8918 train loss:3.492761 +step:8919 train loss:3.480750 +step:8920 train loss:3.505761 +step:8921 train loss:3.500250 +step:8922 train loss:3.482188 +step:8923 train loss:3.670521 +step:8924 train loss:3.562025 +step:8925 train loss:3.491796 +step:8926 train loss:3.503405 +step:8927 train loss:3.532954 +step:8928 train loss:3.485926 +step:8929 train loss:3.479796 +step:8930 train loss:3.536943 +step:8931 train loss:3.447077 +step:8932 train loss:3.551790 +step:8933 train loss:3.458989 +step:8934 train loss:3.496243 +step:8935 train loss:3.511596 +step:8936 train loss:3.545882 +step:8937 train loss:3.544964 +step:8938 train loss:3.484350 +step:8939 train loss:3.551146 +step:8940 train loss:3.505608 +step:8941 train loss:3.447645 +step:8942 train loss:3.526863 +step:8943 train loss:3.457693 +step:8944 train loss:3.508193 +step:8945 train loss:3.524373 +step:8946 train loss:3.369410 +step:8947 train loss:3.562876 +step:8948 train loss:3.410727 +step:8949 train loss:3.412745 +step:8950 train loss:3.459003 +step:8951 train loss:3.495115 +step:8952 train loss:3.515864 +step:8953 train loss:3.471631 +step:8954 train loss:3.574219 +step:8955 train loss:3.489845 +step:8956 train loss:3.518087 +step:8957 train loss:3.506022 +step:8958 train loss:3.482359 +step:8959 train loss:3.473695 +step:8960 train loss:3.441487 +step:8961 train loss:3.465134 +step:8962 train loss:3.518264 +step:8963 train loss:3.496481 +step:8964 train loss:3.480193 +step:8965 train loss:3.519927 +step:8966 train loss:3.478641 +step:8967 train loss:3.459626 +step:8968 train loss:3.444730 +step:8969 train loss:3.432966 +step:8970 train loss:3.513856 +step:8971 train loss:3.463499 +step:8972 train loss:3.663419 +step:8973 train loss:3.548201 +step:8974 train loss:3.507004 +step:8975 train loss:3.507599 +step:8976 train loss:3.473766 +step:8977 train loss:3.558494 +step:8978 train loss:3.543136 +step:8979 train loss:3.460183 +step:8980 train loss:3.554058 +step:8981 train loss:3.505021 +step:8982 train loss:3.480385 +step:8983 train loss:3.423825 +step:8984 train loss:3.548656 +step:8985 train loss:3.465366 +step:8986 train loss:3.502059 +step:8987 train loss:3.476607 +step:8988 train loss:3.523962 +step:8989 train loss:3.435732 +step:8990 train loss:3.574523 +step:8991 train loss:3.427565 +step:8992 train loss:3.483817 +step:8993 train loss:3.576731 +step:8994 train loss:3.479190 +step:8995 train loss:3.505117 +step:8996 train loss:3.477012 +step:8997 train loss:3.424789 +step:8998 train loss:3.429391 +step:8999 train loss:3.454431 +step:9000 validation loss:3.425196 total_sharp:4.2284e-03 L1_sharp:4.9141e-02 L2_sharp:1.9703e-02 L3_sharp:2.3774e-02 L4_sharp:1.5965e-02 L5_sharp:2.0077e-02 L6_sharp:2.4982e-02 L7_sharp:2.5690e-02 L8_sharp:2.7785e-02 L9_sharp:1.9889e-02 L10_sharp:1.3907e-02 L11_sharp:1.4809e-02 L12_sharp:2.4830e-02 total_fnorm:1.3469e+00 total_l1_linf:8.0814e+03 total_spectral:1.3469e+00 L1_fnorm:6.0991e-02 L2_fnorm:5.9120e-02 L3_fnorm:5.9089e-02 L4_fnorm:6.0368e-02 L5_fnorm:6.0976e-02 L6_fnorm:6.1374e-02 L7_fnorm:6.1353e-02 L8_fnorm:6.1259e-02 L9_fnorm:6.1339e-02 L10_fnorm:6.1446e-02 L11_fnorm:6.1374e-02 L12_fnorm:6.1308e-02 L1_l1linf:3.3606e-01 L2_l1linf:3.8321e-01 L3_l1linf:3.8050e-01 L4_l1linf:3.8189e-01 L5_l1linf:3.6060e-01 L6_l1linf:3.3822e-01 L7_l1linf:3.1773e-01 L8_l1linf:3.2452e-01 L9_l1linf:3.3133e-01 L10_l1linf:3.6836e-01 L11_l1linf:3.7274e-01 L12_l1linf:3.7147e-01 L1_spectral:7.6028e-03 L2_spectral:8.6034e-03 L3_spectral:8.5247e-03 L4_spectral:8.5101e-03 L5_spectral:8.1022e-03 L6_spectral:7.5622e-03 L7_spectral:7.1617e-03 L8_spectral:7.3296e-03 L9_spectral:7.4591e-03 L10_spectral:8.3351e-03 L11_spectral:8.3773e-03 L12_spectral:8.4204e-03 ip_v_neg_g:2.6436e-03 cos_v_neg_g:6.0068e-04 v_norm:1.3469e+00 g_norm:3.2676e+00 hv_norm:7.4010e-01 cos_v_hv:7.6952e-03 hg_norm:5.4483e+02 cos_g_hg:4.0570e-01 v_par:2.8726e-05 v_perp:1.3469e+00 L1_cos_v_neg_g:3.7495e-03 L1_v_norm:6.0991e-02 L2_cos_v_neg_g:3.0835e-03 L2_v_norm:5.9120e-02 L3_cos_v_neg_g:2.3329e-03 L3_v_norm:5.9089e-02 L4_cos_v_neg_g:2.5181e-03 L4_v_norm:6.0368e-02 L5_cos_v_neg_g:2.9627e-03 L5_v_norm:6.0976e-02 L6_cos_v_neg_g:2.5079e-03 L6_v_norm:6.1374e-02 L7_cos_v_neg_g:2.4512e-03 L7_v_norm:6.1353e-02 L8_cos_v_neg_g:3.8929e-03 L8_v_norm:6.1259e-02 L9_cos_v_neg_g:4.4383e-03 L9_v_norm:6.1339e-02 L10_cos_v_neg_g:3.6355e-03 L10_v_norm:6.1446e-02 L11_cos_v_neg_g:3.8558e-03 L11_v_norm:6.1374e-02 L12_cos_v_neg_g:4.5550e-03 L12_v_norm:6.1308e-02 +step:9000 train loss:3.539974 +step:9001 train loss:3.507761 +step:9002 train loss:3.513291 +step:9003 train loss:3.452859 +step:9004 train loss:3.454003 +step:9005 train loss:3.466350 +step:9006 train loss:3.469330 +step:9007 train loss:3.488456 +step:9008 train loss:3.444758 +step:9009 train loss:3.440482 +step:9010 train loss:3.477672 +step:9011 train loss:3.474341 +step:9012 train loss:3.587981 +step:9013 train loss:3.410058 +step:9014 train loss:3.483384 +step:9015 train loss:3.484972 +step:9016 train loss:3.558969 +step:9017 train loss:3.502822 +step:9018 train loss:3.424621 +step:9019 train loss:3.507123 +step:9020 train loss:3.518027 +step:9021 train loss:3.476736 +step:9022 train loss:3.488645 +step:9023 train loss:3.486456 +step:9024 train loss:3.504523 +step:9025 train loss:3.489092 +step:9026 train loss:3.448775 +step:9027 train loss:3.493423 +step:9028 train loss:3.513084 +step:9029 train loss:3.531877 +step:9030 train loss:3.528738 +step:9031 train loss:3.494566 +step:9032 train loss:3.505079 +step:9033 train loss:3.488872 +step:9034 train loss:3.500061 +step:9035 train loss:3.502710 +step:9036 train loss:3.451612 +step:9037 train loss:3.445514 +step:9038 train loss:3.569452 +step:9039 train loss:3.474659 +step:9040 train loss:3.488317 +step:9041 train loss:3.537211 +step:9042 train loss:3.392902 +step:9043 train loss:3.487916 +step:9044 train loss:3.506522 +step:9045 train loss:3.451766 +step:9046 train loss:3.497187 +step:9047 train loss:3.489727 +step:9048 train loss:3.469853 +step:9049 train loss:3.502900 +step:9050 train loss:3.459639 +step:9051 train loss:3.497089 +step:9052 train loss:3.427593 +step:9053 train loss:3.552658 +step:9054 train loss:3.563892 +step:9055 train loss:3.485624 +step:9056 train loss:3.548643 +step:9057 train loss:3.402094 +step:9058 train loss:3.487582 +step:9059 train loss:3.563206 +step:9060 train loss:3.498197 +step:9061 train loss:3.521825 +step:9062 train loss:3.451678 +step:9063 train loss:3.585594 +step:9064 train loss:3.472126 +step:9065 train loss:3.482592 +step:9066 train loss:3.501242 +step:9067 train loss:3.464847 +step:9068 train loss:3.535105 +step:9069 train loss:3.494627 +step:9070 train loss:3.544538 +step:9071 train loss:3.479177 +step:9072 train loss:3.499093 +step:9073 train loss:3.462287 +step:9074 train loss:3.541897 +step:9075 train loss:3.489383 +step:9076 train loss:3.455291 +step:9077 train loss:3.532675 +step:9078 train loss:3.470340 +step:9079 train loss:3.514896 +step:9080 train loss:3.449170 +step:9081 train loss:3.486526 +step:9082 train loss:3.513590 +step:9083 train loss:3.542953 +step:9084 train loss:3.435061 +step:9085 train loss:3.504363 +step:9086 train loss:3.487571 +step:9087 train loss:3.435597 +step:9088 train loss:3.497485 +step:9089 train loss:3.514205 +step:9090 train loss:3.446087 +step:9091 train loss:3.547271 +step:9092 train loss:3.471884 +step:9093 train loss:3.470463 +step:9094 train loss:3.597663 +step:9095 train loss:3.465109 +step:9096 train loss:3.480462 +step:9097 train loss:3.463023 +step:9098 train loss:3.456841 +step:9099 train loss:3.581371 +step:9100 train loss:3.614042 +step:9101 train loss:3.531788 +step:9102 train loss:3.475869 +step:9103 train loss:3.481575 +step:9104 train loss:3.568843 +step:9105 train loss:3.430674 +step:9106 train loss:3.554786 +step:9107 train loss:3.491320 +step:9108 train loss:3.473151 +step:9109 train loss:3.496685 +step:9110 train loss:3.501111 +step:9111 train loss:3.481625 +step:9112 train loss:3.483736 +step:9113 train loss:3.515875 +step:9114 train loss:3.462114 +step:9115 train loss:3.487627 +step:9116 train loss:3.513858 +step:9117 train loss:3.522000 +step:9118 train loss:3.491174 +step:9119 train loss:3.414642 +step:9120 train loss:3.512228 +step:9121 train loss:3.543549 +step:9122 train loss:3.490547 +step:9123 train loss:3.508632 +step:9124 train loss:3.539655 +step:9125 train loss:3.490064 +step:9126 train loss:3.466844 +step:9127 train loss:3.499285 +step:9128 train loss:3.554635 +step:9129 train loss:3.509000 +step:9130 train loss:3.523448 +step:9131 train loss:3.502138 +step:9132 train loss:3.511818 +step:9133 train loss:3.498848 +step:9134 train loss:3.472264 +step:9135 train loss:3.502006 +step:9136 train loss:3.498379 +step:9137 train loss:3.551856 +step:9138 train loss:3.469454 +step:9139 train loss:3.545906 +step:9140 train loss:3.469117 +step:9141 train loss:3.445242 +step:9142 train loss:3.625103 +step:9143 train loss:3.451790 +step:9144 train loss:3.545943 +step:9145 train loss:3.551583 +step:9146 train loss:3.466882 +step:9147 train loss:3.541245 +step:9148 train loss:3.561857 +step:9149 train loss:3.469284 +step:9150 train loss:3.491286 +step:9151 train loss:3.551851 +step:9152 train loss:3.509938 +step:9153 train loss:3.474888 +step:9154 train loss:3.492019 +step:9155 train loss:3.455811 +step:9156 train loss:3.457566 +step:9157 train loss:3.477634 +step:9158 train loss:3.458483 +step:9159 train loss:3.544771 +step:9160 train loss:3.428664 +step:9161 train loss:3.455956 +step:9162 train loss:3.544344 +step:9163 train loss:3.488611 +step:9164 train loss:3.461707 +step:9165 train loss:3.455401 +step:9166 train loss:3.513756 +step:9167 train loss:3.455267 +step:9168 train loss:3.498578 +step:9169 train loss:3.434941 +step:9170 train loss:3.456841 +step:9171 train loss:3.523564 +step:9172 train loss:3.444647 +step:9173 train loss:3.567682 +step:9174 train loss:3.495801 +step:9175 train loss:3.473821 +step:9176 train loss:3.455964 +step:9177 train loss:3.502072 +step:9178 train loss:3.445532 +step:9179 train loss:3.407262 +step:9180 train loss:3.500423 +step:9181 train loss:3.508743 +step:9182 train loss:3.479903 +step:9183 train loss:3.488800 +step:9184 train loss:3.481821 +step:9185 train loss:3.498815 +step:9186 train loss:3.459272 +step:9187 train loss:3.532542 +step:9188 train loss:3.569732 +step:9189 train loss:3.492059 +step:9190 train loss:3.496992 +step:9191 train loss:3.491717 +step:9192 train loss:3.503279 +step:9193 train loss:3.501098 +step:9194 train loss:3.439434 +step:9195 train loss:3.430064 +step:9196 train loss:3.482179 +step:9197 train loss:3.440178 +step:9198 train loss:3.511323 +step:9199 train loss:3.460802 +step:9200 train loss:3.488119 +step:9201 train loss:3.521367 +step:9202 train loss:3.509601 +step:9203 train loss:3.464571 +step:9204 train loss:3.662565 +step:9205 train loss:3.577852 +step:9206 train loss:3.491510 +step:9207 train loss:3.543431 +step:9208 train loss:3.518892 +step:9209 train loss:3.541166 +step:9210 train loss:3.434699 +step:9211 train loss:3.458678 +step:9212 train loss:3.460250 +step:9213 train loss:3.524807 +step:9214 train loss:3.464831 +step:9215 train loss:3.532397 +step:9216 train loss:3.494611 +step:9217 train loss:3.436433 +step:9218 train loss:3.526410 +step:9219 train loss:3.486141 +step:9220 train loss:3.530902 +step:9221 train loss:3.583025 +step:9222 train loss:3.526428 +step:9223 train loss:3.697672 +step:9224 train loss:3.534087 +step:9225 train loss:3.465916 +step:9226 train loss:3.481789 +step:9227 train loss:3.499382 +step:9228 train loss:3.500107 +step:9229 train loss:3.457952 +step:9230 train loss:3.520414 +step:9231 train loss:3.406020 +step:9232 train loss:3.464106 +step:9233 train loss:3.486762 +step:9234 train loss:3.542292 +step:9235 train loss:3.544495 +step:9236 train loss:3.451648 +step:9237 train loss:3.514809 +step:9238 train loss:3.488613 +step:9239 train loss:3.478875 +step:9240 train loss:3.449810 +step:9241 train loss:3.479319 +step:9242 train loss:3.488686 +step:9243 train loss:3.486429 +step:9244 train loss:3.460629 +step:9245 train loss:3.468137 +step:9246 train loss:3.466803 +step:9247 train loss:3.478953 +step:9248 train loss:3.486722 +step:9249 train loss:3.486616 +step:9250 validation loss:3.423069 +step:9250 train loss:3.527357 +step:9251 train loss:3.466725 +step:9252 train loss:3.536028 +step:9253 train loss:3.531287 +step:9254 train loss:3.459713 +step:9255 train loss:3.577998 +step:9256 train loss:3.458174 +step:9257 train loss:3.397619 +step:9258 train loss:3.478617 +step:9259 train loss:3.483376 +step:9260 train loss:3.579357 +step:9261 train loss:3.458450 +step:9262 train loss:3.530708 +step:9263 train loss:3.431818 +step:9264 train loss:3.578225 +step:9265 train loss:3.603608 +step:9266 train loss:3.534297 +step:9267 train loss:3.481726 +step:9268 train loss:3.473574 +step:9269 train loss:3.500086 +step:9270 train loss:3.422603 +step:9271 train loss:3.535975 +step:9272 train loss:3.475420 +step:9273 train loss:3.496578 +step:9274 train loss:3.498505 +step:9275 train loss:3.496592 +step:9276 train loss:3.523763 +step:9277 train loss:3.497505 +step:9278 train loss:3.512052 +step:9279 train loss:3.505749 +step:9280 train loss:3.502206 +step:9281 train loss:3.474465 +step:9282 train loss:3.597538 +step:9283 train loss:3.482828 +step:9284 train loss:3.447655 +step:9285 train loss:3.466895 +step:9286 train loss:3.521057 +step:9287 train loss:3.498665 +step:9288 train loss:3.498220 +step:9289 train loss:3.467159 +step:9290 train loss:3.497519 +step:9291 train loss:3.475205 +step:9292 train loss:3.510951 +step:9293 train loss:3.569410 +step:9294 train loss:3.492336 +step:9295 train loss:3.477689 +step:9296 train loss:3.428073 +step:9297 train loss:3.497101 +step:9298 train loss:3.440020 +step:9299 train loss:3.421202 +step:9300 train loss:3.527688 +step:9301 train loss:3.554603 +step:9302 train loss:3.491920 +step:9303 train loss:3.541899 +step:9304 train loss:3.461337 +step:9305 train loss:3.453353 +step:9306 train loss:3.457168 +step:9307 train loss:3.457700 +step:9308 train loss:3.432125 +step:9309 train loss:3.419787 +step:9310 train loss:3.478016 +step:9311 train loss:3.537895 +step:9312 train loss:3.488040 +step:9313 train loss:3.435408 +step:9314 train loss:3.463481 +step:9315 train loss:3.497547 +step:9316 train loss:3.481494 +step:9317 train loss:3.455924 +step:9318 train loss:3.543762 +step:9319 train loss:3.452647 +step:9320 train loss:3.474343 +step:9321 train loss:3.487394 +step:9322 train loss:3.495675 +step:9323 train loss:3.570569 +step:9324 train loss:3.513152 +step:9325 train loss:3.453562 +step:9326 train loss:3.530184 +step:9327 train loss:3.526388 +step:9328 train loss:3.528023 +step:9329 train loss:3.412957 +step:9330 train loss:3.581788 +step:9331 train loss:3.512433 +step:9332 train loss:3.534706 +step:9333 train loss:3.553683 +step:9334 train loss:3.488384 +step:9335 train loss:3.585538 +step:9336 train loss:3.543019 +step:9337 train loss:3.496313 +step:9338 train loss:3.552207 +step:9339 train loss:3.529209 +step:9340 train loss:3.488780 +step:9341 train loss:3.577689 +step:9342 train loss:3.474961 +step:9343 train loss:3.471082 +step:9344 train loss:3.470688 +step:9345 train loss:3.611524 +step:9346 train loss:3.450008 +step:9347 train loss:3.466058 +step:9348 train loss:3.491081 +step:9349 train loss:3.436048 +step:9350 train loss:3.513280 +step:9351 train loss:3.487895 +step:9352 train loss:3.473917 +step:9353 train loss:3.506335 +step:9354 train loss:3.473958 +step:9355 train loss:3.469571 +step:9356 train loss:3.515673 +step:9357 train loss:3.467974 +step:9358 train loss:3.500692 +step:9359 train loss:3.443620 +step:9360 train loss:3.462196 +step:9361 train loss:3.460119 +step:9362 train loss:3.447785 +step:9363 train loss:3.512873 +step:9364 train loss:3.489229 +step:9365 train loss:3.496155 +step:9366 train loss:3.489476 +step:9367 train loss:3.503666 +step:9368 train loss:3.476473 +step:9369 train loss:3.478566 +step:9370 train loss:3.484438 +step:9371 train loss:3.505236 +step:9372 train loss:3.470979 +step:9373 train loss:3.453408 +step:9374 train loss:3.492509 +step:9375 train loss:3.504295 +step:9376 train loss:3.442650 +step:9377 train loss:3.516188 +step:9378 train loss:3.515602 +step:9379 train loss:3.542438 +step:9380 train loss:3.475974 +step:9381 train loss:3.483327 +step:9382 train loss:3.460464 +step:9383 train loss:3.453729 +step:9384 train loss:3.425636 +step:9385 train loss:3.499936 +step:9386 train loss:3.525904 +step:9387 train loss:3.502883 +step:9388 train loss:3.441268 +step:9389 train loss:3.458213 +step:9390 train loss:3.499025 +step:9391 train loss:3.507463 +step:9392 train loss:3.469755 +step:9393 train loss:3.460689 +step:9394 train loss:3.488123 +step:9395 train loss:3.484827 +step:9396 train loss:3.631002 +step:9397 train loss:3.520257 +step:9398 train loss:3.539951 +step:9399 train loss:3.492650 +step:9400 train loss:3.492440 +step:9401 train loss:3.486983 +step:9402 train loss:3.488755 +step:9403 train loss:3.423529 +step:9404 train loss:3.497558 +step:9405 train loss:3.457378 +step:9406 train loss:3.511493 +step:9407 train loss:3.453093 +step:9408 train loss:3.390216 +step:9409 train loss:3.455512 +step:9410 train loss:3.537354 +step:9411 train loss:3.499474 +step:9412 train loss:3.527915 +step:9413 train loss:3.544915 +step:9414 train loss:3.482661 +step:9415 train loss:3.475615 +step:9416 train loss:3.489685 +step:9417 train loss:3.443867 +step:9418 train loss:3.469315 +step:9419 train loss:3.440231 +step:9420 train loss:3.457805 +step:9421 train loss:3.508281 +step:9422 train loss:3.459121 +step:9423 train loss:3.524574 +step:9424 train loss:3.463460 +step:9425 train loss:3.506213 +step:9426 train loss:3.507927 +step:9427 train loss:3.482141 +step:9428 train loss:3.587305 +step:9429 train loss:3.477137 +step:9430 train loss:3.435819 +step:9431 train loss:3.524028 +step:9432 train loss:3.488506 +step:9433 train loss:3.527040 +step:9434 train loss:3.478110 +step:9435 train loss:3.504499 +step:9436 train loss:3.475172 +step:9437 train loss:3.487461 +step:9438 train loss:3.478977 +step:9439 train loss:3.481328 +step:9440 train loss:3.471205 +step:9441 train loss:3.483663 +step:9442 train loss:3.422654 +step:9443 train loss:3.477840 +step:9444 train loss:3.544285 +step:9445 train loss:3.475379 +step:9446 train loss:3.449949 +step:9447 train loss:3.518547 +step:9448 train loss:3.455077 +step:9449 train loss:3.477666 +step:9450 train loss:3.519985 +step:9451 train loss:3.431850 +step:9452 train loss:3.489854 +step:9453 train loss:3.466722 +step:9454 train loss:3.528535 +step:9455 train loss:3.510997 +step:9456 train loss:3.432824 +step:9457 train loss:3.480850 +step:9458 train loss:3.468086 +step:9459 train loss:3.460380 +step:9460 train loss:3.503356 +step:9461 train loss:3.528955 +step:9462 train loss:3.479527 +step:9463 train loss:3.509084 +step:9464 train loss:3.463259 +step:9465 train loss:3.554475 +step:9466 train loss:3.501596 +step:9467 train loss:3.525843 +step:9468 train loss:3.470949 +step:9469 train loss:3.459007 +step:9470 train loss:3.458477 +step:9471 train loss:3.500070 +step:9472 train loss:3.522353 +step:9473 train loss:3.513072 +step:9474 train loss:3.457422 +step:9475 train loss:3.448728 +step:9476 train loss:3.668467 +step:9477 train loss:3.540235 +step:9478 train loss:3.515236 +step:9479 train loss:3.614164 +step:9480 train loss:3.461690 +step:9481 train loss:3.496987 +step:9482 train loss:3.521266 +step:9483 train loss:3.478302 +step:9484 train loss:3.509935 +step:9485 train loss:3.428570 +step:9486 train loss:3.466906 +step:9487 train loss:3.500273 +step:9488 train loss:3.454258 +step:9489 train loss:3.499127 +step:9490 train loss:3.465332 +step:9491 train loss:3.509704 +step:9492 train loss:3.528910 +step:9493 train loss:3.497087 +step:9494 train loss:3.508833 +step:9495 train loss:3.462781 +step:9496 train loss:3.521626 +step:9497 train loss:3.538993 +step:9498 train loss:3.485079 +step:9499 train loss:3.534932 +step:9500 validation loss:3.422362 total_sharp:3.9453e-03 L1_sharp:6.4118e-02 L2_sharp:2.4324e-02 L3_sharp:2.7876e-02 L4_sharp:1.4833e-02 L5_sharp:1.7343e-02 L6_sharp:2.0874e-02 L7_sharp:2.6360e-02 L8_sharp:2.9092e-02 L9_sharp:2.2018e-02 L10_sharp:1.5410e-02 L11_sharp:1.4392e-02 L12_sharp:2.6100e-02 total_fnorm:1.3417e+00 total_l1_linf:8.0491e+03 total_spectral:1.3417e+00 L1_fnorm:6.0949e-02 L2_fnorm:5.8830e-02 L3_fnorm:5.8917e-02 L4_fnorm:6.0223e-02 L5_fnorm:6.0818e-02 L6_fnorm:6.1179e-02 L7_fnorm:6.1223e-02 L8_fnorm:6.1173e-02 L9_fnorm:6.1218e-02 L10_fnorm:6.1394e-02 L11_fnorm:6.1423e-02 L12_fnorm:6.1318e-02 L1_l1linf:3.2944e-01 L2_l1linf:3.6786e-01 L3_l1linf:3.6112e-01 L4_l1linf:3.3693e-01 L5_l1linf:3.1800e-01 L6_l1linf:3.0539e-01 L7_l1linf:2.9684e-01 L8_l1linf:3.0298e-01 L9_l1linf:3.3069e-01 L10_l1linf:3.6195e-01 L11_l1linf:3.8048e-01 L12_l1linf:3.8393e-01 L1_spectral:7.4402e-03 L2_spectral:8.2565e-03 L3_spectral:8.1881e-03 L4_spectral:7.6667e-03 L5_spectral:7.1429e-03 L6_spectral:6.9150e-03 L7_spectral:6.6993e-03 L8_spectral:6.8389e-03 L9_spectral:7.4291e-03 L10_spectral:8.1265e-03 L11_spectral:8.5740e-03 L12_spectral:8.5867e-03 ip_v_neg_g:3.1333e-03 cos_v_neg_g:7.8498e-04 v_norm:1.3417e+00 g_norm:2.9750e+00 hv_norm:6.6245e-01 cos_v_hv:7.9907e-03 hg_norm:1.2356e+02 cos_g_hg:4.4886e-01 v_par:2.6658e-05 v_perp:1.3417e+00 L1_cos_v_neg_g:7.0007e-03 L1_v_norm:6.0949e-02 L2_cos_v_neg_g:6.2358e-03 L2_v_norm:5.8830e-02 L3_cos_v_neg_g:4.0314e-03 L3_v_norm:5.8917e-02 L4_cos_v_neg_g:3.4255e-03 L4_v_norm:6.0223e-02 L5_cos_v_neg_g:2.6195e-03 L5_v_norm:6.0818e-02 L6_cos_v_neg_g:3.5279e-03 L6_v_norm:6.1179e-02 L7_cos_v_neg_g:6.0488e-03 L7_v_norm:6.1223e-02 L8_cos_v_neg_g:7.0312e-03 L8_v_norm:6.1173e-02 L9_cos_v_neg_g:6.3152e-03 L9_v_norm:6.1218e-02 L10_cos_v_neg_g:5.1148e-03 L10_v_norm:6.1394e-02 L11_cos_v_neg_g:4.3241e-03 L11_v_norm:6.1423e-02 L12_cos_v_neg_g:3.2552e-03 L12_v_norm:6.1318e-02 +step:9500 train loss:3.525177 +step:9501 train loss:3.505944 +step:9502 train loss:3.475518 +step:9503 train loss:3.491555 +step:9504 train loss:3.447198 +step:9505 train loss:3.471030 +step:9506 train loss:3.487786 +step:9507 train loss:3.472331 +step:9508 train loss:3.668574 +step:9509 train loss:3.485058 +step:9510 train loss:3.471630 +step:9511 train loss:3.496715 +step:9512 train loss:3.528739 +step:9513 train loss:3.518600 +step:9514 train loss:3.487256 +step:9515 train loss:3.387110 +step:9516 train loss:3.489022 +step:9517 train loss:3.524300 +step:9518 train loss:3.499005 +step:9519 train loss:3.508166 +step:9520 train loss:3.397500 +step:9521 train loss:3.390135 +step:9522 train loss:3.509892 +step:9523 train loss:3.505264 +step:9524 train loss:3.508879 +step:9525 train loss:3.550913 +step:9526 train loss:3.568156 +step:9527 train loss:3.527160 +step:9528 train loss:3.456735 +step:9529 train loss:3.499268 +step:9530 train loss:3.545767 +step:9531 train loss:3.454947 +step:9532 train loss:3.503166 +step:9533 train loss:3.476459 +step:9534 train loss:3.555265 +step:9535 train loss:3.478375 +step:9536 train loss:3.458029 +step:9537 train loss:3.405919 +step:9538 train loss:3.422566 +step:9539 train loss:3.494336 +step:9540 train loss:3.412002 +step:9541 train loss:3.473192 +step:9542 train loss:3.600667 +step:9543 train loss:3.498137 +step:9544 train loss:3.538267 +step:9545 train loss:3.468931 +step:9546 train loss:3.496465 +step:9547 train loss:3.540114 +step:9548 train loss:3.480051 +step:9549 train loss:3.449234 +step:9550 train loss:3.478777 +step:9551 train loss:3.473478 +step:9552 train loss:3.498331 +step:9553 train loss:3.491073 +step:9554 train loss:3.536349 +step:9555 train loss:3.540998 +step:9556 train loss:3.450716 +step:9557 train loss:3.470564 +step:9558 train loss:3.536063 +step:9559 train loss:3.541250 +step:9560 train loss:3.452573 +step:9561 train loss:3.480750 +step:9562 train loss:3.517310 +step:9563 train loss:3.465566 +step:9564 train loss:3.500775 +step:9565 train loss:3.480322 +step:9566 train loss:3.451632 +step:9567 train loss:3.517000 +step:9568 train loss:3.489524 +step:9569 train loss:3.531986 +step:9570 train loss:3.424422 +step:9571 train loss:3.499385 +step:9572 train loss:3.443135 +step:9573 train loss:3.474236 +step:9574 train loss:3.448622 +step:9575 train loss:3.521110 +step:9576 train loss:3.410875 +step:9577 train loss:3.462275 +step:9578 train loss:3.467989 +step:9579 train loss:3.466254 +step:9580 train loss:3.530522 +step:9581 train loss:3.521973 +step:9582 train loss:3.484969 +step:9583 train loss:3.517703 +step:9584 train loss:3.454717 +step:9585 train loss:3.472846 +step:9586 train loss:3.525437 +step:9587 train loss:3.490772 +step:9588 train loss:3.480667 +step:9589 train loss:3.537876 +step:9590 train loss:3.501969 +step:9591 train loss:3.467293 +step:9592 train loss:3.488741 +step:9593 train loss:3.488945 +step:9594 train loss:3.503489 +step:9595 train loss:3.482710 +step:9596 train loss:3.567916 +step:9597 train loss:3.474061 +step:9598 train loss:3.435752 +step:9599 train loss:3.442782 +step:9600 train loss:3.528477 +step:9601 train loss:3.446543 +step:9602 train loss:3.531566 +step:9603 train loss:3.523235 +step:9604 train loss:3.404569 +step:9605 train loss:3.494785 +step:9606 train loss:3.548188 +step:9607 train loss:3.469028 +step:9608 train loss:3.474507 +step:9609 train loss:3.485014 +step:9610 train loss:3.529061 +step:9611 train loss:3.461485 +step:9612 train loss:3.470480 +step:9613 train loss:3.507625 +step:9614 train loss:3.478726 +step:9615 train loss:3.667639 +step:9616 train loss:3.479527 +step:9617 train loss:3.474102 +step:9618 train loss:3.419044 +step:9619 train loss:3.483615 +step:9620 train loss:3.539251 +step:9621 train loss:3.463167 +step:9622 train loss:3.475029 +step:9623 train loss:3.514531 +step:9624 train loss:3.504517 +step:9625 train loss:3.516048 +step:9626 train loss:3.487617 +step:9627 train loss:3.567555 +step:9628 train loss:3.533817 +step:9629 train loss:3.447043 +step:9630 train loss:3.506345 +step:9631 train loss:3.491947 +step:9632 train loss:3.462165 +step:9633 train loss:3.506213 +step:9634 train loss:3.571572 +step:9635 train loss:3.474593 +step:9636 train loss:3.421303 +step:9637 train loss:3.553750 +step:9638 train loss:3.435748 +step:9639 train loss:3.406733 +step:9640 train loss:3.527707 +step:9641 train loss:3.501993 +step:9642 train loss:3.476649 +step:9643 train loss:3.482579 +step:9644 train loss:3.537261 +step:9645 train loss:3.463840 +step:9646 train loss:3.500673 +step:9647 train loss:3.512491 +step:9648 train loss:3.463811 +step:9649 train loss:3.436345 +step:9650 train loss:3.452721 +step:9651 train loss:3.543957 +step:9652 train loss:3.523535 +step:9653 train loss:3.466148 +step:9654 train loss:3.446637 +step:9655 train loss:3.444556 +step:9656 train loss:3.436863 +step:9657 train loss:3.464901 +step:9658 train loss:3.523413 +step:9659 train loss:3.628276 +step:9660 train loss:3.410782 +step:9661 train loss:3.430840 +step:9662 train loss:3.450815 +step:9663 train loss:3.492862 +step:9664 train loss:3.542596 +step:9665 train loss:3.387204 +step:9666 train loss:3.429007 +step:9667 train loss:3.566382 +step:9668 train loss:3.547713 +step:9669 train loss:3.561839 +step:9670 train loss:3.543843 +step:9671 train loss:3.544188 +step:9672 train loss:3.456188 +step:9673 train loss:3.479061 +step:9674 train loss:3.489074 +step:9675 train loss:3.488506 +step:9676 train loss:3.447480 +step:9677 train loss:3.452717 +step:9678 train loss:3.490071 +step:9679 train loss:3.480266 +step:9680 train loss:3.480041 +step:9681 train loss:3.464075 +step:9682 train loss:3.533549 +step:9683 train loss:3.505006 +step:9684 train loss:3.426020 +step:9685 train loss:3.509369 +step:9686 train loss:3.544767 +step:9687 train loss:3.448701 +step:9688 train loss:3.535650 +step:9689 train loss:3.635896 +step:9690 train loss:3.478188 +step:9691 train loss:3.465632 +step:9692 train loss:3.423499 +step:9693 train loss:3.426186 +step:9694 train loss:3.444510 +step:9695 train loss:3.550287 +step:9696 train loss:3.584129 +step:9697 train loss:3.495371 +step:9698 train loss:3.529502 +step:9699 train loss:3.492151 +step:9700 train loss:3.490005 +step:9701 train loss:3.539049 +step:9702 train loss:3.457127 +step:9703 train loss:3.480595 +step:9704 train loss:3.561166 +step:9705 train loss:3.461067 +step:9706 train loss:3.455138 +step:9707 train loss:3.502923 +step:9708 train loss:3.452666 +step:9709 train loss:3.476108 +step:9710 train loss:3.492905 +step:9711 train loss:3.466605 +step:9712 train loss:3.478065 +step:9713 train loss:3.527486 +step:9714 train loss:3.483361 +step:9715 train loss:3.504199 +step:9716 train loss:3.528141 +step:9717 train loss:3.445990 +step:9718 train loss:3.449570 +step:9719 train loss:3.535601 +step:9720 train loss:3.466760 +step:9721 train loss:3.456233 +step:9722 train loss:3.518837 +step:9723 train loss:3.469100 +step:9724 train loss:3.496937 +step:9725 train loss:3.547858 +step:9726 train loss:3.490181 +step:9727 train loss:3.466264 +step:9728 train loss:3.504836 +step:9729 train loss:3.533353 +step:9730 train loss:3.603892 +step:9731 train loss:3.522773 +step:9732 train loss:3.486218 +step:9733 train loss:3.526345 +step:9734 train loss:3.447262 +step:9735 train loss:3.552742 +step:9736 train loss:3.455215 +step:9737 train loss:3.514409 +step:9738 train loss:3.478768 +step:9739 train loss:3.553463 +step:9740 train loss:3.517029 +step:9741 train loss:3.459056 +step:9742 train loss:3.551085 +step:9743 train loss:3.425920 +step:9744 train loss:3.484281 +step:9745 train loss:3.443764 +step:9746 train loss:3.481587 +step:9747 train loss:3.472181 +step:9748 train loss:3.373080 +step:9749 train loss:3.468100 +step:9750 validation loss:3.415165 +step:9750 train loss:3.450333 +step:9751 train loss:3.589461 +step:9752 train loss:3.475594 +step:9753 train loss:3.433395 +step:9754 train loss:3.467185 +step:9755 train loss:3.461684 +step:9756 train loss:3.463187 +step:9757 train loss:3.427465 +step:9758 train loss:3.420727 +step:9759 train loss:3.469030 +step:9760 train loss:3.413409 +step:9761 train loss:3.454320 +step:9762 train loss:3.451272 +step:9763 train loss:3.474603 +step:9764 train loss:3.458111 +step:9765 train loss:3.420411 +step:9766 train loss:3.509224 +step:9767 train loss:3.465751 +step:9768 train loss:3.477849 +step:9769 train loss:3.432102 +step:9770 train loss:3.431440 +step:9771 train loss:3.481624 +step:9772 train loss:3.495110 +step:9773 train loss:3.469714 +step:9774 train loss:3.440058 +step:9775 train loss:3.532485 +step:9776 train loss:3.529380 +step:9777 train loss:3.418613 +step:9778 train loss:3.427552 +step:9779 train loss:3.431572 +step:9780 train loss:3.428811 +step:9781 train loss:3.448596 +step:9782 train loss:3.525328 +step:9783 train loss:3.436030 +step:9784 train loss:3.462501 +step:9785 train loss:3.454654 +step:9786 train loss:3.490827 +step:9787 train loss:3.513134 +step:9788 train loss:3.441451 +step:9789 train loss:3.452875 +step:9790 train loss:3.411736 +step:9791 train loss:3.461864 +step:9792 train loss:3.478257 +step:9793 train loss:3.493243 +step:9794 train loss:3.471921 +step:9795 train loss:3.474121 +step:9796 train loss:3.460152 +step:9797 train loss:3.454759 +step:9798 train loss:3.471277 +step:9799 train loss:3.474254 +step:9800 train loss:3.544837 +step:9801 train loss:3.468542 +step:9802 train loss:3.526743 +step:9803 train loss:3.384480 +step:9804 train loss:3.481164 +step:9805 train loss:3.485042 +step:9806 train loss:3.459100 +step:9807 train loss:3.426440 +step:9808 train loss:3.341033 +step:9809 train loss:3.529918 +step:9810 train loss:3.486912 +step:9811 train loss:3.468876 +step:9812 train loss:3.442784 +step:9813 train loss:3.524507 +step:9814 train loss:3.515299 +step:9815 train loss:3.419567 +step:9816 train loss:3.422105 +step:9817 train loss:3.453509 +step:9818 train loss:3.479142 +step:9819 train loss:3.450174 +step:9820 train loss:3.520867 +step:9821 train loss:3.498036 +step:9822 train loss:3.470633 +step:9823 train loss:3.534574 +step:9824 train loss:3.436408 +step:9825 train loss:3.523041 +step:9826 train loss:3.517529 +step:9827 train loss:3.524634 +step:9828 train loss:3.439907 +step:9829 train loss:3.446630 +step:9830 train loss:3.436525 +step:9831 train loss:3.493199 +step:9832 train loss:3.504955 +step:9833 train loss:3.417451 +step:9834 train loss:3.468028 +step:9835 train loss:3.435727 +step:9836 train loss:3.497380 +step:9837 train loss:3.470907 +step:9838 train loss:3.508019 +step:9839 train loss:3.483122 +step:9840 train loss:3.452889 +step:9841 train loss:3.459868 +step:9842 train loss:3.520860 +step:9843 train loss:3.513678 +step:9844 train loss:3.461867 +step:9845 train loss:3.493299 +step:9846 train loss:3.427849 +step:9847 train loss:3.558702 +step:9848 train loss:3.480886 +step:9849 train loss:3.506512 +step:9850 train loss:3.426311 +step:9851 train loss:3.477883 +step:9852 train loss:3.443373 +step:9853 train loss:3.465878 +step:9854 train loss:3.477326 +step:9855 train loss:3.425663 +step:9856 train loss:3.428453 +step:9857 train loss:3.417471 +step:9858 train loss:3.481491 +step:9859 train loss:3.399848 +step:9860 train loss:3.639746 +step:9861 train loss:3.463228 +step:9862 train loss:3.430510 +step:9863 train loss:3.413424 +step:9864 train loss:3.537384 +step:9865 train loss:3.416662 +step:9866 train loss:3.456937 +step:9867 train loss:3.454672 +step:9868 train loss:3.514435 +step:9869 train loss:3.478345 +step:9870 train loss:3.448488 +step:9871 train loss:3.490363 +step:9872 train loss:3.434532 +step:9873 train loss:3.484805 +step:9874 train loss:3.449021 +step:9875 train loss:3.453238 +step:9876 train loss:3.415923 +step:9877 train loss:3.467083 +step:9878 train loss:3.499165 +step:9879 train loss:3.499869 +step:9880 train loss:3.431704 +step:9881 train loss:3.483917 +step:9882 train loss:3.446187 +step:9883 train loss:3.456159 +step:9884 train loss:3.448970 +step:9885 train loss:3.512496 +step:9886 train loss:3.477283 +step:9887 train loss:3.476526 +step:9888 train loss:3.499645 +step:9889 train loss:3.534274 +step:9890 train loss:3.444276 +step:9891 train loss:3.450752 +step:9892 train loss:3.422819 +step:9893 train loss:3.541228 +step:9894 train loss:3.453347 +step:9895 train loss:3.389447 +step:9896 train loss:3.543890 +step:9897 train loss:3.421791 +step:9898 train loss:3.489181 +step:9899 train loss:3.468213 +step:9900 train loss:3.512997 +step:9901 train loss:3.436557 +step:9902 train loss:3.483501 +step:9903 train loss:3.452891 +step:9904 train loss:3.502935 +step:9905 train loss:3.405318 +step:9906 train loss:3.446470 +step:9907 train loss:3.455132 +step:9908 train loss:3.451216 +step:9909 train loss:3.467435 +step:9910 train loss:3.492136 +step:9911 train loss:3.576202 +step:9912 train loss:3.451980 +step:9913 train loss:3.455081 +step:9914 train loss:3.462308 +step:9915 train loss:3.463132 +step:9916 train loss:3.412680 +step:9917 train loss:3.451380 +step:9918 train loss:3.446186 +step:9919 train loss:3.608455 +step:9920 train loss:3.396780 +step:9921 train loss:3.490101 +step:9922 train loss:3.450194 +step:9923 train loss:3.505220 +step:9924 train loss:3.421241 +step:9925 train loss:3.478701 +step:9926 train loss:3.457433 +step:9927 train loss:3.501500 +step:9928 train loss:3.427833 +step:9929 train loss:3.464962 +step:9930 train loss:3.556993 +step:9931 train loss:3.519117 +step:9932 train loss:3.406886 +step:9933 train loss:3.502994 +step:9934 train loss:3.419942 +step:9935 train loss:3.536939 +step:9936 train loss:3.444287 +step:9937 train loss:3.470507 +step:9938 train loss:3.457017 +step:9939 train loss:3.521213 +step:9940 train loss:3.554801 +step:9941 train loss:3.429578 +step:9942 train loss:3.474431 +step:9943 train loss:3.600363 +step:9944 train loss:3.469987 +step:9945 train loss:3.491736 +step:9946 train loss:3.465580 +step:9947 train loss:3.413064 +step:9948 train loss:3.459268 +step:9949 train loss:3.353579 +step:9950 train loss:3.505631 +step:9951 train loss:3.421543 +step:9952 train loss:3.492597 +step:9953 train loss:3.455757 +step:9954 train loss:3.514797 +step:9955 train loss:3.486141 +step:9956 train loss:3.491752 +step:9957 train loss:3.467279 +step:9958 train loss:3.521032 +step:9959 train loss:3.421445 +step:9960 train loss:3.454357 +step:9961 train loss:3.461779 +step:9962 train loss:3.512860 +step:9963 train loss:3.402348 +step:9964 train loss:3.459745 +step:9965 train loss:3.461002 +step:9966 train loss:3.517598 +step:9967 train loss:3.434224 +step:9968 train loss:3.497787 +step:9969 train loss:3.412592 +step:9970 train loss:3.454483 +step:9971 train loss:3.494206 +step:9972 train loss:3.517579 +step:9973 train loss:3.494094 +step:9974 train loss:3.482631 +step:9975 train loss:3.452501 +step:9976 train loss:3.410320 +step:9977 train loss:3.461303 +step:9978 train loss:3.459996 +step:9979 train loss:3.469387 +step:9980 train loss:3.524632 +step:9981 train loss:3.433080 +step:9982 train loss:3.494490 +step:9983 train loss:3.413892 +step:9984 train loss:3.477338 +step:9985 train loss:3.420231 +step:9986 train loss:3.474596 +step:9987 train loss:3.516214 +step:9988 train loss:3.531529 +step:9989 train loss:3.424405 +step:9990 train loss:3.564123 +step:9991 train loss:3.410427 +step:9992 train loss:3.485602 +step:9993 train loss:3.477717 +step:9994 train loss:3.591932 +step:9995 train loss:3.529716 +step:9996 train loss:3.443937 +step:9997 train loss:3.484518 +step:9998 train loss:3.537858 +step:9999 train loss:3.503715 +step:10000 validation loss:3.413746 total_sharp:5.0902e-03 L1_sharp:4.9582e-01 L2_sharp:4.6199e-02 L3_sharp:2.6188e-02 L4_sharp:1.3813e-02 L5_sharp:1.3885e-02 L6_sharp:1.9045e-02 L7_sharp:2.4628e-02 L8_sharp:2.7242e-02 L9_sharp:1.8163e-02 L10_sharp:1.2842e-02 L11_sharp:1.4040e-02 L12_sharp:2.1034e-02 total_fnorm:1.3464e+00 total_l1_linf:8.0751e+03 total_spectral:1.3464e+00 L1_fnorm:6.0848e-02 L2_fnorm:5.8795e-02 L3_fnorm:5.9044e-02 L4_fnorm:6.0140e-02 L5_fnorm:6.0920e-02 L6_fnorm:6.1285e-02 L7_fnorm:6.1333e-02 L8_fnorm:6.1265e-02 L9_fnorm:6.1356e-02 L10_fnorm:6.1411e-02 L11_fnorm:6.1489e-02 L12_fnorm:6.1423e-02 L1_l1linf:3.4475e-01 L2_l1linf:3.7015e-01 L3_l1linf:3.7849e-01 L4_l1linf:3.6146e-01 L5_l1linf:3.4098e-01 L6_l1linf:3.3349e-01 L7_l1linf:3.2378e-01 L8_l1linf:3.1883e-01 L9_l1linf:3.5178e-01 L10_l1linf:3.8262e-01 L11_l1linf:4.1865e-01 L12_l1linf:4.0257e-01 L1_spectral:7.7657e-03 L2_spectral:8.3768e-03 L3_spectral:8.5010e-03 L4_spectral:8.1572e-03 L5_spectral:7.7055e-03 L6_spectral:7.5065e-03 L7_spectral:7.2408e-03 L8_spectral:7.1740e-03 L9_spectral:7.8666e-03 L10_spectral:8.5689e-03 L11_spectral:9.2768e-03 L12_spectral:9.0452e-03 ip_v_neg_g:5.2381e-03 cos_v_neg_g:1.0166e-03 v_norm:1.3464e+00 g_norm:3.8267e+00 hv_norm:1.5299e+00 cos_v_hv:4.4797e-03 hg_norm:7.4217e+02 cos_g_hg:5.5792e-01 v_par:3.8466e-05 v_perp:1.3464e+00 L1_cos_v_neg_g:1.9605e-02 L1_v_norm:6.0848e-02 L2_cos_v_neg_g:1.1120e-02 L2_v_norm:5.8795e-02 L3_cos_v_neg_g:4.0248e-03 L3_v_norm:5.9044e-02 L4_cos_v_neg_g:1.5779e-03 L4_v_norm:6.0140e-02 L5_cos_v_neg_g:2.1994e-03 L5_v_norm:6.0920e-02 L6_cos_v_neg_g:2.5453e-03 L6_v_norm:6.1285e-02 L7_cos_v_neg_g:4.9510e-03 L7_v_norm:6.1333e-02 L8_cos_v_neg_g:6.1042e-03 L8_v_norm:6.1265e-02 L9_cos_v_neg_g:4.5827e-03 L9_v_norm:6.1356e-02 L10_cos_v_neg_g:3.7412e-03 L10_v_norm:6.1411e-02 L11_cos_v_neg_g:4.2308e-03 L11_v_norm:6.1489e-02 L12_cos_v_neg_g:4.7924e-03 L12_v_norm:6.1423e-02 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..9c3ef31643b87eee2f23effecc708ff1b1b4b8a3 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.001, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 43, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "6c0bef06-3c27-44e9-8513-a6405eebf610", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..2ff27c8c49d4420635608cac141ea9eb5cc1e5f0 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3813310861587524, + "total_l1_linf_norm": 8261.935546875, + "total_spectral_norm": 1.3813310861587524, + "layer_1_update_fnorm": 0.06171097978949547, + "layer_1_max_l1_linf_norm": 0.1928468644618988, + "layer_1_max_spectral_norm": 0.004379595164209604, + "layer_2_update_fnorm": 0.05719190835952759, + "layer_2_max_l1_linf_norm": 0.21821585297584534, + "layer_2_max_spectral_norm": 0.004941977560520172, + "layer_3_update_fnorm": 0.05480729788541794, + "layer_3_max_l1_linf_norm": 0.22789788246154785, + "layer_3_max_spectral_norm": 0.005138332489877939, + "layer_4_update_fnorm": 0.05735813081264496, + "layer_4_max_l1_linf_norm": 0.24315530061721802, + "layer_4_max_spectral_norm": 0.005495063494890928, + "layer_5_update_fnorm": 0.05850900337100029, + "layer_5_max_l1_linf_norm": 0.2360285222530365, + "layer_5_max_spectral_norm": 0.00529020931571722, + "layer_6_update_fnorm": 0.05998456850647926, + "layer_6_max_l1_linf_norm": 0.23662069439888, + "layer_6_max_spectral_norm": 0.005378078203648329, + "layer_7_update_fnorm": 0.06048192083835602, + "layer_7_max_l1_linf_norm": 0.23486444354057312, + "layer_7_max_spectral_norm": 0.005819763522595167, + "layer_8_update_fnorm": 0.06057251617312431, + "layer_8_max_l1_linf_norm": 0.2389133870601654, + "layer_8_max_spectral_norm": 0.006039527710527182, + "layer_9_update_fnorm": 0.0608992874622345, + "layer_9_max_l1_linf_norm": 0.2412022352218628, + "layer_9_max_spectral_norm": 0.0064452774822711945, + "layer_10_update_fnorm": 0.06122179701924324, + "layer_10_max_l1_linf_norm": 0.2388581931591034, + "layer_10_max_spectral_norm": 0.005897691939026117, + "layer_11_update_fnorm": 0.06083141267299652, + "layer_11_max_l1_linf_norm": 0.23849619925022125, + "layer_11_max_spectral_norm": 0.005443142261356115, + "layer_12_update_fnorm": 0.061347536742687225, + "layer_12_max_l1_linf_norm": 0.22575783729553223, + "layer_12_max_spectral_norm": 0.006102045997977257, + "total_sharpness": 0.019999800249934196, + "ip_v_neg_g": 0.01717827282845974, + "cos_v_neg_g": 0.002058620797470212, + "v_norm": 1.3813310861587524, + "g_norm": 6.040951728820801, + "hv_norm": 3.9530513286590576, + "cos_v_hv": 0.006988613400608301, + "hg_norm": 1927.117919921875, + "cos_g_hg": 0.587643027305603, + "v_parallel_norm": 0.00012068547948729247, + "v_perp_norm": 1.3813310861587524, + "layer_1_v_norm": 0.06171097978949547, + "layer_1_cos_v_neg_g": 0.022518165409564972, + "layer_2_v_norm": 0.05719190835952759, + "layer_2_cos_v_neg_g": 0.02217145450413227, + "layer_3_v_norm": 0.05480729788541794, + "layer_3_cos_v_neg_g": 0.019631879404187202, + "layer_4_v_norm": 0.05735813081264496, + "layer_4_cos_v_neg_g": 0.014962684363126755, + "layer_5_v_norm": 0.05850900337100029, + "layer_5_cos_v_neg_g": 0.01200434286147356, + "layer_6_v_norm": 0.05998457223176956, + "layer_6_cos_v_neg_g": 0.010180789045989513, + "layer_7_v_norm": 0.06048192083835602, + "layer_7_cos_v_neg_g": 0.009326264262199402, + "layer_8_v_norm": 0.06057251617312431, + "layer_8_cos_v_neg_g": 0.008231543935835361, + "layer_9_v_norm": 0.0608992874622345, + "layer_9_cos_v_neg_g": 0.006568857003003359, + "layer_10_v_norm": 0.06122179701924324, + "layer_10_cos_v_neg_g": 0.007125533185899258, + "layer_11_v_norm": 0.06083140894770622, + "layer_11_cos_v_neg_g": 0.007152696140110493, + "layer_12_v_norm": 0.061347536742687225, + "layer_12_cos_v_neg_g": 0.006240701302886009, + "layer_1_sharpness": 0.5020738840103149, + "layer_2_sharpness": 0.3204740583896637, + "layer_3_sharpness": 0.2296759933233261, + "layer_4_sharpness": 0.15303950011730194, + "layer_5_sharpness": 0.09437907487154007, + "layer_6_sharpness": 0.07368778437376022, + "layer_7_sharpness": 0.06702184677124023, + "layer_8_sharpness": 0.05345429480075836, + "layer_9_sharpness": 0.03943187743425369, + "layer_10_sharpness": 0.03494441509246826, + "layer_11_sharpness": 0.03525117039680481, + "layer_12_sharpness": 0.03570570424199104 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..707495f9d04682eb1181b22ced51538ec76a631f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3462632894515991, + "total_l1_linf_norm": 8076.4267578125, + "total_spectral_norm": 1.3462629318237305, + "layer_1_update_fnorm": 0.06106223538517952, + "layer_1_max_l1_linf_norm": 0.3599584698677063, + "layer_1_max_spectral_norm": 0.008106514811515808, + "layer_2_update_fnorm": 0.059032488614320755, + "layer_2_max_l1_linf_norm": 0.3796253800392151, + "layer_2_max_spectral_norm": 0.00857541710138321, + "layer_3_update_fnorm": 0.05902464687824249, + "layer_3_max_l1_linf_norm": 0.36915889382362366, + "layer_3_max_spectral_norm": 0.008392730727791786, + "layer_4_update_fnorm": 0.06036444753408432, + "layer_4_max_l1_linf_norm": 0.36500632762908936, + "layer_4_max_spectral_norm": 0.00824036542326212, + "layer_5_update_fnorm": 0.06093303859233856, + "layer_5_max_l1_linf_norm": 0.330966591835022, + "layer_5_max_spectral_norm": 0.007457695435732603, + "layer_6_update_fnorm": 0.06120626628398895, + "layer_6_max_l1_linf_norm": 0.30537980794906616, + "layer_6_max_spectral_norm": 0.006912826560437679, + "layer_7_update_fnorm": 0.0613071471452713, + "layer_7_max_l1_linf_norm": 0.31989410519599915, + "layer_7_max_spectral_norm": 0.007174494210630655, + "layer_8_update_fnorm": 0.06131012737751007, + "layer_8_max_l1_linf_norm": 0.33805564045906067, + "layer_8_max_spectral_norm": 0.0075819711200892925, + "layer_9_update_fnorm": 0.061373524367809296, + "layer_9_max_l1_linf_norm": 0.344782292842865, + "layer_9_max_spectral_norm": 0.007709480822086334, + "layer_10_update_fnorm": 0.06145269423723221, + "layer_10_max_l1_linf_norm": 0.3752601742744446, + "layer_10_max_spectral_norm": 0.008401717990636826, + "layer_11_update_fnorm": 0.061456479132175446, + "layer_11_max_l1_linf_norm": 0.4082026481628418, + "layer_11_max_spectral_norm": 0.009138918481767178, + "layer_12_update_fnorm": 0.06149265542626381, + "layer_12_max_l1_linf_norm": 0.4091470241546631, + "layer_12_max_spectral_norm": 0.009230651892721653, + "total_sharpness": 0.004145909566432238, + "ip_v_neg_g": 0.0048094261437654495, + "cos_v_neg_g": 0.0009305260609835386, + "v_norm": 1.3462632894515991, + "g_norm": 3.839146852493286, + "hv_norm": 1.0619953870773315, + "cos_v_hv": 0.005255659576505423, + "hg_norm": 689.1567993164062, + "cos_g_hg": 0.5688039660453796, + "v_parallel_norm": 3.6684130463982e-05, + "v_perp_norm": 1.3462632894515991, + "layer_1_v_norm": 0.06106223538517952, + "layer_1_cos_v_neg_g": 0.014862987212836742, + "layer_2_v_norm": 0.059032488614320755, + "layer_2_cos_v_neg_g": 0.008469519205391407, + "layer_3_v_norm": 0.05902464687824249, + "layer_3_cos_v_neg_g": 0.004276124760508537, + "layer_4_v_norm": 0.06036444753408432, + "layer_4_cos_v_neg_g": 0.0023237657733261585, + "layer_5_v_norm": 0.06093303859233856, + "layer_5_cos_v_neg_g": 0.0037241727113723755, + "layer_6_v_norm": 0.06120627000927925, + "layer_6_cos_v_neg_g": 0.004324690438807011, + "layer_7_v_norm": 0.0613071471452713, + "layer_7_cos_v_neg_g": 0.005784239619970322, + "layer_8_v_norm": 0.06131012737751007, + "layer_8_cos_v_neg_g": 0.00648152083158493, + "layer_9_v_norm": 0.061373524367809296, + "layer_9_cos_v_neg_g": 0.004153906833380461, + "layer_10_v_norm": 0.06145269423723221, + "layer_10_cos_v_neg_g": 0.003830496920272708, + "layer_11_v_norm": 0.061456479132175446, + "layer_11_cos_v_neg_g": 0.002823536517098546, + "layer_12_v_norm": 0.06149265542626381, + "layer_12_cos_v_neg_g": 0.003317799186334014, + "layer_1_sharpness": 0.2704842984676361, + "layer_2_sharpness": 0.03308388218283653, + "layer_3_sharpness": 0.021441299468278885, + "layer_4_sharpness": 0.013093702495098114, + "layer_5_sharpness": 0.0145473787561059, + "layer_6_sharpness": 0.01829213835299015, + "layer_7_sharpness": 0.0236902367323637, + "layer_8_sharpness": 0.026300067082047462, + "layer_9_sharpness": 0.017468884587287903, + "layer_10_sharpness": 0.011902688071131706, + "layer_11_sharpness": 0.012699496932327747, + "layer_12_sharpness": 0.020524101331830025 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..0f89cf9a447943808f55d89b9612d2a26ad3f72d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3380324840545654, + "total_l1_linf_norm": 8042.265625, + "total_spectral_norm": 1.338032603263855, + "layer_1_update_fnorm": 0.06186702102422714, + "layer_1_max_l1_linf_norm": 0.20833900570869446, + "layer_1_max_spectral_norm": 0.004758793860673904, + "layer_2_update_fnorm": 0.05855697765946388, + "layer_2_max_l1_linf_norm": 0.2265576273202896, + "layer_2_max_spectral_norm": 0.005186580121517181, + "layer_3_update_fnorm": 0.05587412044405937, + "layer_3_max_l1_linf_norm": 0.2463393658399582, + "layer_3_max_spectral_norm": 0.005619897972792387, + "layer_4_update_fnorm": 0.058176204562187195, + "layer_4_max_l1_linf_norm": 0.2514781951904297, + "layer_4_max_spectral_norm": 0.005684213247150183, + "layer_5_update_fnorm": 0.059265512973070145, + "layer_5_max_l1_linf_norm": 0.262673020362854, + "layer_5_max_spectral_norm": 0.005918014328926802, + "layer_6_update_fnorm": 0.06062522903084755, + "layer_6_max_l1_linf_norm": 0.2486724853515625, + "layer_6_max_spectral_norm": 0.00562842795625329, + "layer_7_update_fnorm": 0.060915507376194, + "layer_7_max_l1_linf_norm": 0.25722455978393555, + "layer_7_max_spectral_norm": 0.005754950921982527, + "layer_8_update_fnorm": 0.060975559055805206, + "layer_8_max_l1_linf_norm": 0.261592298746109, + "layer_8_max_spectral_norm": 0.00581630552187562, + "layer_9_update_fnorm": 0.061059921979904175, + "layer_9_max_l1_linf_norm": 0.259723424911499, + "layer_9_max_spectral_norm": 0.005878058727830648, + "layer_10_update_fnorm": 0.06113230809569359, + "layer_10_max_l1_linf_norm": 0.2719898819923401, + "layer_10_max_spectral_norm": 0.006137556862086058, + "layer_11_update_fnorm": 0.060908619314432144, + "layer_11_max_l1_linf_norm": 0.2626298666000366, + "layer_11_max_spectral_norm": 0.005949005018919706, + "layer_12_update_fnorm": 0.06110279634594917, + "layer_12_max_l1_linf_norm": 0.23862968385219574, + "layer_12_max_spectral_norm": 0.0054575372487306595, + "total_sharpness": 0.01389604527503252, + "ip_v_neg_g": 0.0114117581397295, + "cos_v_neg_g": 0.0017176201799884439, + "v_norm": 1.3380324840545654, + "g_norm": 4.965451717376709, + "hv_norm": 2.289262056350708, + "cos_v_hv": 0.008121988736093044, + "hg_norm": 858.3081665039062, + "cos_g_hg": 0.5320188999176025, + "v_parallel_norm": 7.908239786047488e-05, + "v_perp_norm": 1.3380324840545654, + "layer_1_v_norm": 0.06186702102422714, + "layer_1_cos_v_neg_g": 0.012536265887320042, + "layer_2_v_norm": 0.05855697765946388, + "layer_2_cos_v_neg_g": 0.014158299192786217, + "layer_3_v_norm": 0.05587412044405937, + "layer_3_cos_v_neg_g": 0.016978712752461433, + "layer_4_v_norm": 0.058176204562187195, + "layer_4_cos_v_neg_g": 0.012669285759329796, + "layer_5_v_norm": 0.059265512973070145, + "layer_5_cos_v_neg_g": 0.011676433496177197, + "layer_6_v_norm": 0.06062522530555725, + "layer_6_cos_v_neg_g": 0.009314890950918198, + "layer_7_v_norm": 0.060915507376194, + "layer_7_cos_v_neg_g": 0.009274656884372234, + "layer_8_v_norm": 0.060975559055805206, + "layer_8_cos_v_neg_g": 0.009500164538621902, + "layer_9_v_norm": 0.061059921979904175, + "layer_9_cos_v_neg_g": 0.008821526542305946, + "layer_10_v_norm": 0.06113230809569359, + "layer_10_cos_v_neg_g": 0.007984925992786884, + "layer_11_v_norm": 0.060908619314432144, + "layer_11_cos_v_neg_g": 0.006903900299221277, + "layer_12_v_norm": 0.06110279634594917, + "layer_12_cos_v_neg_g": 0.005278660915791988, + "layer_1_sharpness": 0.12409775704145432, + "layer_2_sharpness": 0.07865342497825623, + "layer_3_sharpness": 0.12294084578752518, + "layer_4_sharpness": 0.07281193882226944, + "layer_5_sharpness": 0.06507808715105057, + "layer_6_sharpness": 0.05742922052741051, + "layer_7_sharpness": 0.06342294812202454, + "layer_8_sharpness": 0.05452192202210426, + "layer_9_sharpness": 0.043621812015771866, + "layer_10_sharpness": 0.03417985141277313, + "layer_11_sharpness": 0.02865879237651825, + "layer_12_sharpness": 0.03149545192718506 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..e7c649ffc5e317f2377625e024f132b8a768a990 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3430967330932617, + "total_l1_linf_norm": 8073.076171875, + "total_spectral_norm": 1.3430969715118408, + "layer_1_update_fnorm": 0.061601124703884125, + "layer_1_max_l1_linf_norm": 0.21344682574272156, + "layer_1_max_spectral_norm": 0.004875508137047291, + "layer_2_update_fnorm": 0.05862483009696007, + "layer_2_max_l1_linf_norm": 0.23003380000591278, + "layer_2_max_spectral_norm": 0.005277448799461126, + "layer_3_update_fnorm": 0.056665342301130295, + "layer_3_max_l1_linf_norm": 0.25635939836502075, + "layer_3_max_spectral_norm": 0.005810169503092766, + "layer_4_update_fnorm": 0.05893191322684288, + "layer_4_max_l1_linf_norm": 0.2713918685913086, + "layer_4_max_spectral_norm": 0.006098696030676365, + "layer_5_update_fnorm": 0.05986873432993889, + "layer_5_max_l1_linf_norm": 0.26493340730667114, + "layer_5_max_spectral_norm": 0.005996353924274445, + "layer_6_update_fnorm": 0.060818541795015335, + "layer_6_max_l1_linf_norm": 0.2669752836227417, + "layer_6_max_spectral_norm": 0.005984376650303602, + "layer_7_update_fnorm": 0.06089623272418976, + "layer_7_max_l1_linf_norm": 0.2534656226634979, + "layer_7_max_spectral_norm": 0.005688427481800318, + "layer_8_update_fnorm": 0.061000220477581024, + "layer_8_max_l1_linf_norm": 0.25061678886413574, + "layer_8_max_spectral_norm": 0.00566860381513834, + "layer_9_update_fnorm": 0.06097499653697014, + "layer_9_max_l1_linf_norm": 0.26874643564224243, + "layer_9_max_spectral_norm": 0.006036314647644758, + "layer_10_update_fnorm": 0.06099466234445572, + "layer_10_max_l1_linf_norm": 0.2724592089653015, + "layer_10_max_spectral_norm": 0.006070597097277641, + "layer_11_update_fnorm": 0.060830529779195786, + "layer_11_max_l1_linf_norm": 0.2701250910758972, + "layer_11_max_spectral_norm": 0.00608033686876297, + "layer_12_update_fnorm": 0.061085186898708344, + "layer_12_max_l1_linf_norm": 0.2753103971481323, + "layer_12_max_spectral_norm": 0.006171117536723614, + "total_sharpness": 0.009818131104111671, + "ip_v_neg_g": 0.009590371511876583, + "cos_v_neg_g": 0.001395820640027523, + "v_norm": 1.3430967330932617, + "g_norm": 5.1156229972839355, + "hv_norm": 1.8438284397125244, + "cos_v_hv": 0.007151803933084011, + "hg_norm": 906.9188842773438, + "cos_g_hg": 0.5685179829597473, + "v_parallel_norm": 5.844581392011605e-05, + "v_perp_norm": 1.3430967330932617, + "layer_1_v_norm": 0.061601124703884125, + "layer_1_cos_v_neg_g": 0.005947065073996782, + "layer_2_v_norm": 0.05862483009696007, + "layer_2_cos_v_neg_g": 0.009416818618774414, + "layer_3_v_norm": 0.056665338575839996, + "layer_3_cos_v_neg_g": 0.01055404357612133, + "layer_4_v_norm": 0.05893191322684288, + "layer_4_cos_v_neg_g": 0.009883194230496883, + "layer_5_v_norm": 0.05986873432993889, + "layer_5_cos_v_neg_g": 0.011161341331899166, + "layer_6_v_norm": 0.060818541795015335, + "layer_6_cos_v_neg_g": 0.011225603520870209, + "layer_7_v_norm": 0.06089623272418976, + "layer_7_cos_v_neg_g": 0.010242056101560593, + "layer_8_v_norm": 0.061000220477581024, + "layer_8_cos_v_neg_g": 0.010269389487802982, + "layer_9_v_norm": 0.06097499653697014, + "layer_9_cos_v_neg_g": 0.0089264502748847, + "layer_10_v_norm": 0.06099466234445572, + "layer_10_cos_v_neg_g": 0.007505720015615225, + "layer_11_v_norm": 0.060830529779195786, + "layer_11_cos_v_neg_g": 0.007356380578130484, + "layer_12_v_norm": 0.061085186898708344, + "layer_12_cos_v_neg_g": 0.00621422566473484, + "layer_1_sharpness": 0.11112136393785477, + "layer_2_sharpness": 0.051013458520174026, + "layer_3_sharpness": 0.06792952865362167, + "layer_4_sharpness": 0.04607274383306503, + "layer_5_sharpness": 0.04752783477306366, + "layer_6_sharpness": 0.05273076146841049, + "layer_7_sharpness": 0.05027053877711296, + "layer_8_sharpness": 0.044829826802015305, + "layer_9_sharpness": 0.03651592507958412, + "layer_10_sharpness": 0.02976350672543049, + "layer_11_sharpness": 0.030298706144094467, + "layer_12_sharpness": 0.048385750502347946 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..0772253fb8c3a2f7af780eaa11695fc5c11407a7 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3561897277832031, + "total_l1_linf_norm": 8133.8388671875, + "total_spectral_norm": 1.3561896085739136, + "layer_1_update_fnorm": 0.06151667982339859, + "layer_1_max_l1_linf_norm": 0.25227463245391846, + "layer_1_max_spectral_norm": 0.00569562055170536, + "layer_2_update_fnorm": 0.05839942768216133, + "layer_2_max_l1_linf_norm": 0.29431989789009094, + "layer_2_max_spectral_norm": 0.006602306850254536, + "layer_3_update_fnorm": 0.05727188661694527, + "layer_3_max_l1_linf_norm": 0.30876192450523376, + "layer_3_max_spectral_norm": 0.006962816696614027, + "layer_4_update_fnorm": 0.05932017043232918, + "layer_4_max_l1_linf_norm": 0.29514414072036743, + "layer_4_max_spectral_norm": 0.006724091246724129, + "layer_5_update_fnorm": 0.060040734708309174, + "layer_5_max_l1_linf_norm": 0.28735288977622986, + "layer_5_max_spectral_norm": 0.006503817159682512, + "layer_6_update_fnorm": 0.06093522161245346, + "layer_6_max_l1_linf_norm": 0.2754172086715698, + "layer_6_max_spectral_norm": 0.006156586576253176, + "layer_7_update_fnorm": 0.06104852631688118, + "layer_7_max_l1_linf_norm": 0.26735129952430725, + "layer_7_max_spectral_norm": 0.0060353088192641735, + "layer_8_update_fnorm": 0.06119443103671074, + "layer_8_max_l1_linf_norm": 0.28057658672332764, + "layer_8_max_spectral_norm": 0.006314759608358145, + "layer_9_update_fnorm": 0.06105847284197807, + "layer_9_max_l1_linf_norm": 0.28056609630584717, + "layer_9_max_spectral_norm": 0.006346747279167175, + "layer_10_update_fnorm": 0.061053115874528885, + "layer_10_max_l1_linf_norm": 0.28903868794441223, + "layer_10_max_spectral_norm": 0.0065681771375238895, + "layer_11_update_fnorm": 0.06090725585818291, + "layer_11_max_l1_linf_norm": 0.2866431474685669, + "layer_11_max_spectral_norm": 0.006472949869930744, + "layer_12_update_fnorm": 0.060999978333711624, + "layer_12_max_l1_linf_norm": 0.28497838973999023, + "layer_12_max_spectral_norm": 0.006416276562958956, + "total_sharpness": 0.007988938130438328, + "ip_v_neg_g": 0.00497279642149806, + "cos_v_neg_g": 0.0008699779282324016, + "v_norm": 1.3561897277832031, + "g_norm": 4.214751720428467, + "hv_norm": 1.407511830329895, + "cos_v_hv": 0.0076976376585662365, + "hg_norm": 378.8216552734375, + "cos_g_hg": 0.5088281035423279, + "v_parallel_norm": 3.7243615224724635e-05, + "v_perp_norm": 1.3561897277832031, + "layer_1_v_norm": 0.06151667982339859, + "layer_1_cos_v_neg_g": 0.005347943864762783, + "layer_2_v_norm": 0.05839942768216133, + "layer_2_cos_v_neg_g": 0.005869603715837002, + "layer_3_v_norm": 0.05727188661694527, + "layer_3_cos_v_neg_g": 0.0077662658877670765, + "layer_4_v_norm": 0.05932017043232918, + "layer_4_cos_v_neg_g": 0.006164484191685915, + "layer_5_v_norm": 0.060040734708309174, + "layer_5_cos_v_neg_g": 0.006147132720798254, + "layer_6_v_norm": 0.06093522161245346, + "layer_6_cos_v_neg_g": 0.006238239351660013, + "layer_7_v_norm": 0.06104852631688118, + "layer_7_cos_v_neg_g": 0.006114853546023369, + "layer_8_v_norm": 0.06119443103671074, + "layer_8_cos_v_neg_g": 0.005624836310744286, + "layer_9_v_norm": 0.06105847284197807, + "layer_9_cos_v_neg_g": 0.004643724299967289, + "layer_10_v_norm": 0.061053115874528885, + "layer_10_cos_v_neg_g": 0.0036543055903166533, + "layer_11_v_norm": 0.06090725585818291, + "layer_11_cos_v_neg_g": 0.0038684962783008814, + "layer_12_v_norm": 0.060999978333711624, + "layer_12_cos_v_neg_g": 0.004249451216310263, + "layer_1_sharpness": 0.07411053031682968, + "layer_2_sharpness": 0.04660452529788017, + "layer_3_sharpness": 0.05610913410782814, + "layer_4_sharpness": 0.03449692949652672, + "layer_5_sharpness": 0.0366186797618866, + "layer_6_sharpness": 0.042254816740751266, + "layer_7_sharpness": 0.0463087297976017, + "layer_8_sharpness": 0.046269942075014114, + "layer_9_sharpness": 0.03316986560821533, + "layer_10_sharpness": 0.023768071085214615, + "layer_11_sharpness": 0.01988888904452324, + "layer_12_sharpness": 0.03636438027024269 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..4e37ba35753a972b6f516274c8e488e0f11f9860 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3238589763641357, + "total_l1_linf_norm": 7960.1826171875, + "total_spectral_norm": 1.3238588571548462, + "layer_1_update_fnorm": 0.061650581657886505, + "layer_1_max_l1_linf_norm": 0.2711581587791443, + "layer_1_max_spectral_norm": 0.006168619263917208, + "layer_2_update_fnorm": 0.05836694315075874, + "layer_2_max_l1_linf_norm": 0.32596591114997864, + "layer_2_max_spectral_norm": 0.007332659792155027, + "layer_3_update_fnorm": 0.05694109946489334, + "layer_3_max_l1_linf_norm": 0.31857556104660034, + "layer_3_max_spectral_norm": 0.007155443541705608, + "layer_4_update_fnorm": 0.059359706938266754, + "layer_4_max_l1_linf_norm": 0.3266984820365906, + "layer_4_max_spectral_norm": 0.00738940853625536, + "layer_5_update_fnorm": 0.060262423008680344, + "layer_5_max_l1_linf_norm": 0.29744669795036316, + "layer_5_max_spectral_norm": 0.006710144225507975, + "layer_6_update_fnorm": 0.061044711619615555, + "layer_6_max_l1_linf_norm": 0.29494550824165344, + "layer_6_max_spectral_norm": 0.006636128760874271, + "layer_7_update_fnorm": 0.06109796464443207, + "layer_7_max_l1_linf_norm": 0.28034478425979614, + "layer_7_max_spectral_norm": 0.006358328741043806, + "layer_8_update_fnorm": 0.06118132546544075, + "layer_8_max_l1_linf_norm": 0.2826237976551056, + "layer_8_max_spectral_norm": 0.006316798739135265, + "layer_9_update_fnorm": 0.061080556362867355, + "layer_9_max_l1_linf_norm": 0.30734020471572876, + "layer_9_max_spectral_norm": 0.0068822624161839485, + "layer_10_update_fnorm": 0.06122676655650139, + "layer_10_max_l1_linf_norm": 0.33009928464889526, + "layer_10_max_spectral_norm": 0.007403443567454815, + "layer_11_update_fnorm": 0.06115102022886276, + "layer_11_max_l1_linf_norm": 0.33780646324157715, + "layer_11_max_spectral_norm": 0.007596681825816631, + "layer_12_update_fnorm": 0.06116415187716484, + "layer_12_max_l1_linf_norm": 0.31441807746887207, + "layer_12_max_spectral_norm": 0.007156347390264273, + "total_sharpness": 0.007889463566243649, + "ip_v_neg_g": 0.008183756843209267, + "cos_v_neg_g": 0.0015881374711170793, + "v_norm": 1.3238589763641357, + "g_norm": 3.892448663711548, + "hv_norm": 1.1407546997070312, + "cos_v_hv": 0.009155813604593277, + "hg_norm": 289.7705993652344, + "cos_g_hg": 0.4690590798854828, + "v_parallel_norm": 5.422019967227243e-05, + "v_perp_norm": 1.3238589763641357, + "layer_1_v_norm": 0.061650581657886505, + "layer_1_cos_v_neg_g": 0.00984332524240017, + "layer_2_v_norm": 0.05836694315075874, + "layer_2_cos_v_neg_g": 0.010693118907511234, + "layer_3_v_norm": 0.05694109946489334, + "layer_3_cos_v_neg_g": 0.009765282273292542, + "layer_4_v_norm": 0.059359706938266754, + "layer_4_cos_v_neg_g": 0.009458189830183983, + "layer_5_v_norm": 0.060262423008680344, + "layer_5_cos_v_neg_g": 0.010006068274378777, + "layer_6_v_norm": 0.061044711619615555, + "layer_6_cos_v_neg_g": 0.00936050433665514, + "layer_7_v_norm": 0.06109796464443207, + "layer_7_cos_v_neg_g": 0.011123855598270893, + "layer_8_v_norm": 0.06118132546544075, + "layer_8_cos_v_neg_g": 0.010440955869853497, + "layer_9_v_norm": 0.061080556362867355, + "layer_9_cos_v_neg_g": 0.010465731844305992, + "layer_10_v_norm": 0.06122676655650139, + "layer_10_cos_v_neg_g": 0.009314604103565216, + "layer_11_v_norm": 0.06115102395415306, + "layer_11_cos_v_neg_g": 0.008039399981498718, + "layer_12_v_norm": 0.06116415187716484, + "layer_12_cos_v_neg_g": 0.007595163304358721, + "layer_1_sharpness": 0.14204581081867218, + "layer_2_sharpness": 0.056624915450811386, + "layer_3_sharpness": 0.05703715234994888, + "layer_4_sharpness": 0.029014436528086662, + "layer_5_sharpness": 0.02937600016593933, + "layer_6_sharpness": 0.03452584892511368, + "layer_7_sharpness": 0.041389286518096924, + "layer_8_sharpness": 0.04101875424385071, + "layer_9_sharpness": 0.031084667891263962, + "layer_10_sharpness": 0.02374756895005703, + "layer_11_sharpness": 0.023936301469802856, + "layer_12_sharpness": 0.036572352051734924 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..f520581d350508b9de90d894745474ffa3ca489d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3468728065490723, + "total_l1_linf_norm": 8079.7685546875, + "total_spectral_norm": 1.3468726873397827, + "layer_1_update_fnorm": 0.06147196516394615, + "layer_1_max_l1_linf_norm": 0.2711202800273895, + "layer_1_max_spectral_norm": 0.0061539108864963055, + "layer_2_update_fnorm": 0.05897817015647888, + "layer_2_max_l1_linf_norm": 0.319884717464447, + "layer_2_max_spectral_norm": 0.007180171553045511, + "layer_3_update_fnorm": 0.057754773646593094, + "layer_3_max_l1_linf_norm": 0.3282538056373596, + "layer_3_max_spectral_norm": 0.007377095054835081, + "layer_4_update_fnorm": 0.05933809652924538, + "layer_4_max_l1_linf_norm": 0.320911705493927, + "layer_4_max_spectral_norm": 0.0072275386191904545, + "layer_5_update_fnorm": 0.06038328632712364, + "layer_5_max_l1_linf_norm": 0.32352784276008606, + "layer_5_max_spectral_norm": 0.0072373636066913605, + "layer_6_update_fnorm": 0.06129319593310356, + "layer_6_max_l1_linf_norm": 0.33533141016960144, + "layer_6_max_spectral_norm": 0.007506146561354399, + "layer_7_update_fnorm": 0.06120758503675461, + "layer_7_max_l1_linf_norm": 0.3056849241256714, + "layer_7_max_spectral_norm": 0.0068406760692596436, + "layer_8_update_fnorm": 0.06117726489901543, + "layer_8_max_l1_linf_norm": 0.3044978380203247, + "layer_8_max_spectral_norm": 0.006788118276745081, + "layer_9_update_fnorm": 0.061120036989450455, + "layer_9_max_l1_linf_norm": 0.32003918290138245, + "layer_9_max_spectral_norm": 0.00711726862937212, + "layer_10_update_fnorm": 0.06121324747800827, + "layer_10_max_l1_linf_norm": 0.3370468020439148, + "layer_10_max_spectral_norm": 0.007513049058616161, + "layer_11_update_fnorm": 0.06105194240808487, + "layer_11_max_l1_linf_norm": 0.3285580575466156, + "layer_11_max_spectral_norm": 0.0073706177063286304, + "layer_12_update_fnorm": 0.061063844710588455, + "layer_12_max_l1_linf_norm": 0.2971464991569519, + "layer_12_max_spectral_norm": 0.006811082828789949, + "total_sharpness": 0.006921213585883379, + "ip_v_neg_g": 0.0076406169682741165, + "cos_v_neg_g": 0.0013973513850942254, + "v_norm": 1.3468728065490723, + "g_norm": 4.059720993041992, + "hv_norm": 1.0563468933105469, + "cos_v_hv": 0.008824747987091541, + "hg_norm": 348.8305969238281, + "cos_g_hg": 0.5566153526306152, + "v_parallel_norm": 4.674864612752572e-05, + "v_perp_norm": 1.3468728065490723, + "layer_1_v_norm": 0.06147196516394615, + "layer_1_cos_v_neg_g": 0.01115013100206852, + "layer_2_v_norm": 0.05897817015647888, + "layer_2_cos_v_neg_g": 0.012410874478518963, + "layer_3_v_norm": 0.057754773646593094, + "layer_3_cos_v_neg_g": 0.012438845820724964, + "layer_4_v_norm": 0.05933809652924538, + "layer_4_cos_v_neg_g": 0.010207335464656353, + "layer_5_v_norm": 0.06038328632712364, + "layer_5_cos_v_neg_g": 0.009613563306629658, + "layer_6_v_norm": 0.06129319965839386, + "layer_6_cos_v_neg_g": 0.009664983488619328, + "layer_7_v_norm": 0.06120758503675461, + "layer_7_cos_v_neg_g": 0.009074250236153603, + "layer_8_v_norm": 0.061177268624305725, + "layer_8_cos_v_neg_g": 0.009019182063639164, + "layer_9_v_norm": 0.061120036989450455, + "layer_9_cos_v_neg_g": 0.00774720311164856, + "layer_10_v_norm": 0.06121324747800827, + "layer_10_cos_v_neg_g": 0.006625584326684475, + "layer_11_v_norm": 0.06105194240808487, + "layer_11_cos_v_neg_g": 0.005833551287651062, + "layer_12_v_norm": 0.061063844710588455, + "layer_12_cos_v_neg_g": 0.004443131387233734, + "layer_1_sharpness": 0.05211455747485161, + "layer_2_sharpness": 0.032281577587127686, + "layer_3_sharpness": 0.060411833226680756, + "layer_4_sharpness": 0.030993767082691193, + "layer_5_sharpness": 0.034648213535547256, + "layer_6_sharpness": 0.04174187406897545, + "layer_7_sharpness": 0.04430370405316353, + "layer_8_sharpness": 0.042146872729063034, + "layer_9_sharpness": 0.02998451329767704, + "layer_10_sharpness": 0.023370420560240746, + "layer_11_sharpness": 0.02014845609664917, + "layer_12_sharpness": 0.023160500451922417 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..4fe4448a6fd0c91b289b7e46f0e68737ae5c23c0 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.354966163635254, + "total_l1_linf_norm": 8122.25439453125, + "total_spectral_norm": 1.3549665212631226, + "layer_1_update_fnorm": 0.06115415319800377, + "layer_1_max_l1_linf_norm": 0.26487278938293457, + "layer_1_max_spectral_norm": 0.006071400362998247, + "layer_2_update_fnorm": 0.058438848704099655, + "layer_2_max_l1_linf_norm": 0.307525098323822, + "layer_2_max_spectral_norm": 0.006952189840376377, + "layer_3_update_fnorm": 0.05796917900443077, + "layer_3_max_l1_linf_norm": 0.32002896070480347, + "layer_3_max_spectral_norm": 0.007187696173787117, + "layer_4_update_fnorm": 0.05933809652924538, + "layer_4_max_l1_linf_norm": 0.3341637849807739, + "layer_4_max_spectral_norm": 0.0075384872034192085, + "layer_5_update_fnorm": 0.060272496193647385, + "layer_5_max_l1_linf_norm": 0.29926830530166626, + "layer_5_max_spectral_norm": 0.006802822463214397, + "layer_6_update_fnorm": 0.06106989085674286, + "layer_6_max_l1_linf_norm": 0.2872481942176819, + "layer_6_max_spectral_norm": 0.006493370980024338, + "layer_7_update_fnorm": 0.06111758574843407, + "layer_7_max_l1_linf_norm": 0.297305166721344, + "layer_7_max_spectral_norm": 0.006645502056926489, + "layer_8_update_fnorm": 0.0612010732293129, + "layer_8_max_l1_linf_norm": 0.3016711473464966, + "layer_8_max_spectral_norm": 0.0067554679699242115, + "layer_9_update_fnorm": 0.06109599769115448, + "layer_9_max_l1_linf_norm": 0.30627843737602234, + "layer_9_max_spectral_norm": 0.00687923189252615, + "layer_10_update_fnorm": 0.061205845326185226, + "layer_10_max_l1_linf_norm": 0.34132757782936096, + "layer_10_max_spectral_norm": 0.0076540568843483925, + "layer_11_update_fnorm": 0.06116294488310814, + "layer_11_max_l1_linf_norm": 0.35838642716407776, + "layer_11_max_spectral_norm": 0.008043334819376469, + "layer_12_update_fnorm": 0.06121000275015831, + "layer_12_max_l1_linf_norm": 0.35596880316734314, + "layer_12_max_spectral_norm": 0.007969745434820652, + "total_sharpness": 0.005958817433565855, + "ip_v_neg_g": 0.0069830287247896194, + "cos_v_neg_g": 0.0014574030647054315, + "v_norm": 1.354966163635254, + "g_norm": 3.5361907482147217, + "hv_norm": 0.9132518172264099, + "cos_v_hv": 0.00884093064814806, + "hg_norm": 180.07516479492188, + "cos_g_hg": 0.4649503231048584, + "v_parallel_norm": 4.566574716591276e-05, + "v_perp_norm": 1.354966163635254, + "layer_1_v_norm": 0.06115415319800377, + "layer_1_cos_v_neg_g": 0.00881665013730526, + "layer_2_v_norm": 0.058438848704099655, + "layer_2_cos_v_neg_g": 0.011417612433433533, + "layer_3_v_norm": 0.05796917900443077, + "layer_3_cos_v_neg_g": 0.008272345177829266, + "layer_4_v_norm": 0.05933809652924538, + "layer_4_cos_v_neg_g": 0.007489919662475586, + "layer_5_v_norm": 0.060272496193647385, + "layer_5_cos_v_neg_g": 0.007792588323354721, + "layer_6_v_norm": 0.06106989085674286, + "layer_6_cos_v_neg_g": 0.008481187745928764, + "layer_7_v_norm": 0.06111758574843407, + "layer_7_cos_v_neg_g": 0.010359838604927063, + "layer_8_v_norm": 0.0612010732293129, + "layer_8_cos_v_neg_g": 0.010453615337610245, + "layer_9_v_norm": 0.06109599769115448, + "layer_9_cos_v_neg_g": 0.010063239373266697, + "layer_10_v_norm": 0.061205845326185226, + "layer_10_cos_v_neg_g": 0.009069232270121574, + "layer_11_v_norm": 0.06116294488310814, + "layer_11_cos_v_neg_g": 0.008078751154243946, + "layer_12_v_norm": 0.06121000275015831, + "layer_12_cos_v_neg_g": 0.007197442464530468, + "layer_1_sharpness": 0.04858405143022537, + "layer_2_sharpness": 0.0353640578687191, + "layer_3_sharpness": 0.040920380502939224, + "layer_4_sharpness": 0.02528538554906845, + "layer_5_sharpness": 0.025386210530996323, + "layer_6_sharpness": 0.027445152401924133, + "layer_7_sharpness": 0.03785482794046402, + "layer_8_sharpness": 0.04106336086988449, + "layer_9_sharpness": 0.029159655794501305, + "layer_10_sharpness": 0.022770481184124947, + "layer_11_sharpness": 0.023654211312532425, + "layer_12_sharpness": 0.0338183157145977 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..a594e4821c9ee312743b075515e843eed43ce19d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3563470840454102, + "total_l1_linf_norm": 8128.87158203125, + "total_spectral_norm": 1.3563469648361206, + "layer_1_update_fnorm": 0.06156415492296219, + "layer_1_max_l1_linf_norm": 0.3264409899711609, + "layer_1_max_spectral_norm": 0.007337711285799742, + "layer_2_update_fnorm": 0.05895135924220085, + "layer_2_max_l1_linf_norm": 0.3651745319366455, + "layer_2_max_spectral_norm": 0.008218673057854176, + "layer_3_update_fnorm": 0.05753667652606964, + "layer_3_max_l1_linf_norm": 0.36578091979026794, + "layer_3_max_spectral_norm": 0.008193250745534897, + "layer_4_update_fnorm": 0.05969666689634323, + "layer_4_max_l1_linf_norm": 0.36763641238212585, + "layer_4_max_spectral_norm": 0.008229917846620083, + "layer_5_update_fnorm": 0.06067253649234772, + "layer_5_max_l1_linf_norm": 0.34753721952438354, + "layer_5_max_spectral_norm": 0.007778629660606384, + "layer_6_update_fnorm": 0.061333067715168, + "layer_6_max_l1_linf_norm": 0.3409368395805359, + "layer_6_max_spectral_norm": 0.007639893796294928, + "layer_7_update_fnorm": 0.061335887759923935, + "layer_7_max_l1_linf_norm": 0.34628912806510925, + "layer_7_max_spectral_norm": 0.007742739748209715, + "layer_8_update_fnorm": 0.061287254095077515, + "layer_8_max_l1_linf_norm": 0.3273782432079315, + "layer_8_max_spectral_norm": 0.007372476626187563, + "layer_9_update_fnorm": 0.061192836612463, + "layer_9_max_l1_linf_norm": 0.34067749977111816, + "layer_9_max_spectral_norm": 0.007635710760951042, + "layer_10_update_fnorm": 0.06137983873486519, + "layer_10_max_l1_linf_norm": 0.3616864085197449, + "layer_10_max_spectral_norm": 0.008160700090229511, + "layer_11_update_fnorm": 0.0613531693816185, + "layer_11_max_l1_linf_norm": 0.38947850465774536, + "layer_11_max_spectral_norm": 0.008687905967235565, + "layer_12_update_fnorm": 0.06134560704231262, + "layer_12_max_l1_linf_norm": 0.369913250207901, + "layer_12_max_spectral_norm": 0.008324670605361462, + "total_sharpness": 0.006490487139672041, + "ip_v_neg_g": 0.005672144703567028, + "cos_v_neg_g": 0.0011898946249857545, + "v_norm": 1.3563470840454102, + "g_norm": 3.514535665512085, + "hv_norm": 0.9399426579475403, + "cos_v_hv": 0.009365840815007687, + "hg_norm": 297.91302490234375, + "cos_g_hg": 0.4319918751716614, + "v_parallel_norm": 4.451677523320541e-05, + "v_perp_norm": 1.3563470840454102, + "layer_1_v_norm": 0.06156415492296219, + "layer_1_cos_v_neg_g": 0.008801820687949657, + "layer_2_v_norm": 0.05895135924220085, + "layer_2_cos_v_neg_g": 0.00417801970615983, + "layer_3_v_norm": 0.05753667652606964, + "layer_3_cos_v_neg_g": 0.0026060643140226603, + "layer_4_v_norm": 0.05969666689634323, + "layer_4_cos_v_neg_g": 0.004701330792158842, + "layer_5_v_norm": 0.06067253649234772, + "layer_5_cos_v_neg_g": 0.007202776148915291, + "layer_6_v_norm": 0.061333067715168, + "layer_6_cos_v_neg_g": 0.00795714184641838, + "layer_7_v_norm": 0.061335887759923935, + "layer_7_cos_v_neg_g": 0.006231700070202351, + "layer_8_v_norm": 0.061287254095077515, + "layer_8_cos_v_neg_g": 0.007441581692546606, + "layer_9_v_norm": 0.061192836612463, + "layer_9_cos_v_neg_g": 0.007922456599771976, + "layer_10_v_norm": 0.06137983873486519, + "layer_10_cos_v_neg_g": 0.0077851261012256145, + "layer_11_v_norm": 0.0613531731069088, + "layer_11_cos_v_neg_g": 0.008040708489716053, + "layer_12_v_norm": 0.06134560704231262, + "layer_12_cos_v_neg_g": 0.007876677438616753, + "layer_1_sharpness": 0.06611871719360352, + "layer_2_sharpness": 0.026312176138162613, + "layer_3_sharpness": 0.06047340855002403, + "layer_4_sharpness": 0.025108754634857178, + "layer_5_sharpness": 0.03034890443086624, + "layer_6_sharpness": 0.037179045379161835, + "layer_7_sharpness": 0.04092098027467728, + "layer_8_sharpness": 0.03773438557982445, + "layer_9_sharpness": 0.02728784643113613, + "layer_10_sharpness": 0.023590443655848503, + "layer_11_sharpness": 0.02373942732810974, + "layer_12_sharpness": 0.03563658148050308 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..50edb047b013ab757c86bbbe0af6faef1163197d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.9832406640052795, + "total_l1_linf_norm": 5884.59033203125, + "total_spectral_norm": 0.9832406640052795, + "layer_1_update_fnorm": 0.04391435161232948, + "layer_1_max_l1_linf_norm": 0.11911968886852264, + "layer_1_max_spectral_norm": 0.0027237304020673037, + "layer_2_update_fnorm": 0.04267079010605812, + "layer_2_max_l1_linf_norm": 0.1448899209499359, + "layer_2_max_spectral_norm": 0.0032752889674156904, + "layer_3_update_fnorm": 0.04178805276751518, + "layer_3_max_l1_linf_norm": 0.15339314937591553, + "layer_3_max_spectral_norm": 0.003474801080301404, + "layer_4_update_fnorm": 0.04151464253664017, + "layer_4_max_l1_linf_norm": 0.16045838594436646, + "layer_4_max_spectral_norm": 0.0063051762990653515, + "layer_5_update_fnorm": 0.041438885033130646, + "layer_5_max_l1_linf_norm": 0.15924639999866486, + "layer_5_max_spectral_norm": 0.006284652277827263, + "layer_6_update_fnorm": 0.04176691919565201, + "layer_6_max_l1_linf_norm": 0.16812855005264282, + "layer_6_max_spectral_norm": 0.0072241248562932014, + "layer_7_update_fnorm": 0.04208701103925705, + "layer_7_max_l1_linf_norm": 0.18934917449951172, + "layer_7_max_spectral_norm": 0.008079340681433678, + "layer_8_update_fnorm": 0.04231699928641319, + "layer_8_max_l1_linf_norm": 0.16029274463653564, + "layer_8_max_spectral_norm": 0.006738868076354265, + "layer_9_update_fnorm": 0.04253910481929779, + "layer_9_max_l1_linf_norm": 0.16610251367092133, + "layer_9_max_spectral_norm": 0.006613309029489756, + "layer_10_update_fnorm": 0.04276006668806076, + "layer_10_max_l1_linf_norm": 0.15853600203990936, + "layer_10_max_spectral_norm": 0.006677994970232248, + "layer_11_update_fnorm": 0.04282039776444435, + "layer_11_max_l1_linf_norm": 0.16024275124073029, + "layer_11_max_spectral_norm": 0.0042294529266655445, + "layer_12_update_fnorm": 0.04296143725514412, + "layer_12_max_l1_linf_norm": 0.16138774156570435, + "layer_12_max_spectral_norm": 0.004154802765697241, + "total_sharpness": 0.032561928033828735, + "ip_v_neg_g": 0.014239495620131493, + "cos_v_neg_g": 0.001886757556349039, + "v_norm": 0.9832406640052795, + "g_norm": 7.6757121086120605, + "hv_norm": 5.585876941680908, + "cos_v_hv": 0.005731635726988316, + "hg_norm": 9007.833984375, + "cos_g_hg": 0.6811404824256897, + "v_parallel_norm": 0.00010244918667012826, + "v_perp_norm": 0.9832406640052795, + "layer_1_v_norm": 0.04391435161232948, + "layer_1_cos_v_neg_g": 0.01451273262500763, + "layer_2_v_norm": 0.04267079010605812, + "layer_2_cos_v_neg_g": 0.014656390063464642, + "layer_3_v_norm": 0.04178805276751518, + "layer_3_cos_v_neg_g": 0.016160553321242332, + "layer_4_v_norm": 0.04151464253664017, + "layer_4_cos_v_neg_g": 0.01656261458992958, + "layer_5_v_norm": 0.041438885033130646, + "layer_5_cos_v_neg_g": 0.016178464516997337, + "layer_6_v_norm": 0.04176691919565201, + "layer_6_cos_v_neg_g": 0.01404116116464138, + "layer_7_v_norm": 0.04208701103925705, + "layer_7_cos_v_neg_g": 0.0132229533046484, + "layer_8_v_norm": 0.04231699928641319, + "layer_8_cos_v_neg_g": 0.012186942622065544, + "layer_9_v_norm": 0.04253910481929779, + "layer_9_cos_v_neg_g": 0.011629290878772736, + "layer_10_v_norm": 0.04276006668806076, + "layer_10_cos_v_neg_g": 0.010446937754750252, + "layer_11_v_norm": 0.04282040148973465, + "layer_11_cos_v_neg_g": 0.009624115191400051, + "layer_12_v_norm": 0.04296143725514412, + "layer_12_cos_v_neg_g": 0.006473622750490904, + "layer_1_sharpness": 0.5182405710220337, + "layer_2_sharpness": 0.2853761315345764, + "layer_3_sharpness": 0.2532748281955719, + "layer_4_sharpness": 0.21429085731506348, + "layer_5_sharpness": 0.1605987399816513, + "layer_6_sharpness": 0.1099657490849495, + "layer_7_sharpness": 0.0814221054315567, + "layer_8_sharpness": 0.0658336728811264, + "layer_9_sharpness": 0.05261944979429245, + "layer_10_sharpness": 0.04385418817400932, + "layer_11_sharpness": 0.03542666509747505, + "layer_12_sharpness": 0.025597887113690376 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..4dfb5347323867c0edab4a5fb20ec1dae85551be --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.344731092453003, + "total_l1_linf_norm": 8070.38671875, + "total_spectral_norm": 1.344731092453003, + "layer_1_update_fnorm": 0.06140230596065521, + "layer_1_max_l1_linf_norm": 0.3205619156360626, + "layer_1_max_spectral_norm": 0.007267266511917114, + "layer_2_update_fnorm": 0.058977190405130386, + "layer_2_max_l1_linf_norm": 0.34310492873191833, + "layer_2_max_spectral_norm": 0.00773969478905201, + "layer_3_update_fnorm": 0.05820826068520546, + "layer_3_max_l1_linf_norm": 0.3410520553588867, + "layer_3_max_spectral_norm": 0.007721350062638521, + "layer_4_update_fnorm": 0.059833597391843796, + "layer_4_max_l1_linf_norm": 0.3382914066314697, + "layer_4_max_spectral_norm": 0.0075870081782341, + "layer_5_update_fnorm": 0.060745157301425934, + "layer_5_max_l1_linf_norm": 0.3402724862098694, + "layer_5_max_spectral_norm": 0.00763690797612071, + "layer_6_update_fnorm": 0.061267491430044174, + "layer_6_max_l1_linf_norm": 0.30771714448928833, + "layer_6_max_spectral_norm": 0.006973603740334511, + "layer_7_update_fnorm": 0.061133306473493576, + "layer_7_max_l1_linf_norm": 0.2928897440433502, + "layer_7_max_spectral_norm": 0.006641013082116842, + "layer_8_update_fnorm": 0.06116480007767677, + "layer_8_max_l1_linf_norm": 0.2951565086841583, + "layer_8_max_spectral_norm": 0.006637452635914087, + "layer_9_update_fnorm": 0.06110527738928795, + "layer_9_max_l1_linf_norm": 0.29768794775009155, + "layer_9_max_spectral_norm": 0.006721405778080225, + "layer_10_update_fnorm": 0.061346959322690964, + "layer_10_max_l1_linf_norm": 0.3455520272254944, + "layer_10_max_spectral_norm": 0.0078092776238918304, + "layer_11_update_fnorm": 0.06126851588487625, + "layer_11_max_l1_linf_norm": 0.36034727096557617, + "layer_11_max_spectral_norm": 0.008117624558508396, + "layer_12_update_fnorm": 0.06125478446483612, + "layer_12_max_l1_linf_norm": 0.35568520426750183, + "layer_12_max_spectral_norm": 0.008046004921197891, + "total_sharpness": 0.004557955078780651, + "ip_v_neg_g": 0.004943569656461477, + "cos_v_neg_g": 0.0010654858779162169, + "v_norm": 1.344731092453003, + "g_norm": 3.450305223464966, + "hv_norm": 0.6912949085235596, + "cos_v_hv": 0.008866294287145138, + "hg_norm": 310.29071044921875, + "cos_g_hg": 0.4171808958053589, + "v_parallel_norm": 3.925981218344532e-05, + "v_perp_norm": 1.344731092453003, + "layer_1_v_norm": 0.06140230596065521, + "layer_1_cos_v_neg_g": 0.007225526962429285, + "layer_2_v_norm": 0.058977190405130386, + "layer_2_cos_v_neg_g": 0.007706007454544306, + "layer_3_v_norm": 0.05820826068520546, + "layer_3_cos_v_neg_g": 0.008121617138385773, + "layer_4_v_norm": 0.059833597391843796, + "layer_4_cos_v_neg_g": 0.007177472580224276, + "layer_5_v_norm": 0.060745157301425934, + "layer_5_cos_v_neg_g": 0.007030599284917116, + "layer_6_v_norm": 0.061267491430044174, + "layer_6_cos_v_neg_g": 0.006380552425980568, + "layer_7_v_norm": 0.061133306473493576, + "layer_7_cos_v_neg_g": 0.005150621756911278, + "layer_8_v_norm": 0.06116480007767677, + "layer_8_cos_v_neg_g": 0.00527941295877099, + "layer_9_v_norm": 0.06110527738928795, + "layer_9_cos_v_neg_g": 0.005395091138780117, + "layer_10_v_norm": 0.061346959322690964, + "layer_10_cos_v_neg_g": 0.005417664535343647, + "layer_11_v_norm": 0.06126851588487625, + "layer_11_cos_v_neg_g": 0.005567368119955063, + "layer_12_v_norm": 0.06125478446483612, + "layer_12_cos_v_neg_g": 0.005131697282195091, + "layer_1_sharpness": 0.040869101881980896, + "layer_2_sharpness": 0.023191392421722412, + "layer_3_sharpness": 0.039856284856796265, + "layer_4_sharpness": 0.02185514010488987, + "layer_5_sharpness": 0.026873687282204628, + "layer_6_sharpness": 0.026669658720493317, + "layer_7_sharpness": 0.0308051947504282, + "layer_8_sharpness": 0.030481044203042984, + "layer_9_sharpness": 0.021490881219506264, + "layer_10_sharpness": 0.01667703315615654, + "layer_11_sharpness": 0.015950331464409828, + "layer_12_sharpness": 0.022858213633298874 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..a3cb1dcb6bed153ca623253511c1f8d71a8c8ec6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3459688425064087, + "total_l1_linf_norm": 8081.01953125, + "total_spectral_norm": 1.3459689617156982, + "layer_1_update_fnorm": 0.061252161860466, + "layer_1_max_l1_linf_norm": 0.3304598331451416, + "layer_1_max_spectral_norm": 0.007348613813519478, + "layer_2_update_fnorm": 0.05886739492416382, + "layer_2_max_l1_linf_norm": 0.34847962856292725, + "layer_2_max_spectral_norm": 0.007779392879456282, + "layer_3_update_fnorm": 0.05830223485827446, + "layer_3_max_l1_linf_norm": 0.34176018834114075, + "layer_3_max_spectral_norm": 0.00771003682166338, + "layer_4_update_fnorm": 0.060028042644262314, + "layer_4_max_l1_linf_norm": 0.34909963607788086, + "layer_4_max_spectral_norm": 0.007844243198633194, + "layer_5_update_fnorm": 0.06084800139069557, + "layer_5_max_l1_linf_norm": 0.3325071632862091, + "layer_5_max_spectral_norm": 0.007435941137373447, + "layer_6_update_fnorm": 0.06130925938487053, + "layer_6_max_l1_linf_norm": 0.3187013864517212, + "layer_6_max_spectral_norm": 0.00715005025267601, + "layer_7_update_fnorm": 0.061279457062482834, + "layer_7_max_l1_linf_norm": 0.31533515453338623, + "layer_7_max_spectral_norm": 0.00712888827547431, + "layer_8_update_fnorm": 0.06128997355699539, + "layer_8_max_l1_linf_norm": 0.3094559609889984, + "layer_8_max_spectral_norm": 0.006952138151973486, + "layer_9_update_fnorm": 0.06123220548033714, + "layer_9_max_l1_linf_norm": 0.3241318464279175, + "layer_9_max_spectral_norm": 0.007298061158508062, + "layer_10_update_fnorm": 0.06130031868815422, + "layer_10_max_l1_linf_norm": 0.3467596173286438, + "layer_10_max_spectral_norm": 0.007763644214719534, + "layer_11_update_fnorm": 0.06130719184875488, + "layer_11_max_l1_linf_norm": 0.3618773818016052, + "layer_11_max_spectral_norm": 0.008112679235637188, + "layer_12_update_fnorm": 0.06125019118189812, + "layer_12_max_l1_linf_norm": 0.37091219425201416, + "layer_12_max_spectral_norm": 0.008317534811794758, + "total_sharpness": 0.004505552351474762, + "ip_v_neg_g": 0.0031511918641626835, + "cos_v_neg_g": 0.0004388278175611049, + "v_norm": 1.3459688425064087, + "g_norm": 5.335138320922852, + "hv_norm": 1.0195379257202148, + "cos_v_hv": 0.005948119331151247, + "hg_norm": 1046.2872314453125, + "cos_g_hg": 0.6716752648353577, + "v_parallel_norm": 2.0445577320060693e-05, + "v_perp_norm": 1.3459688425064087, + "layer_1_v_norm": 0.061252161860466, + "layer_1_cos_v_neg_g": 0.010395730845630169, + "layer_2_v_norm": 0.05886739492416382, + "layer_2_cos_v_neg_g": 0.008456561714410782, + "layer_3_v_norm": 0.05830223485827446, + "layer_3_cos_v_neg_g": 0.005081391427665949, + "layer_4_v_norm": 0.060028042644262314, + "layer_4_cos_v_neg_g": 0.0009137113811448216, + "layer_5_v_norm": 0.06084800139069557, + "layer_5_cos_v_neg_g": 0.0019883825443685055, + "layer_6_v_norm": 0.06130925565958023, + "layer_6_cos_v_neg_g": 0.0008756286697462201, + "layer_7_v_norm": 0.061279457062482834, + "layer_7_cos_v_neg_g": 0.0004652739444281906, + "layer_8_v_norm": 0.06128997355699539, + "layer_8_cos_v_neg_g": 0.0011677824659273028, + "layer_9_v_norm": 0.06123220548033714, + "layer_9_cos_v_neg_g": -0.0004827359225600958, + "layer_10_v_norm": 0.06130031868815422, + "layer_10_cos_v_neg_g": 0.0010238519171252847, + "layer_11_v_norm": 0.06130719557404518, + "layer_11_cos_v_neg_g": 0.0010995620395988226, + "layer_12_v_norm": 0.06125019118189812, + "layer_12_cos_v_neg_g": 0.00028186055715195835, + "layer_1_sharpness": 0.16640885174274445, + "layer_2_sharpness": 0.045188963413238525, + "layer_3_sharpness": 0.033299706876277924, + "layer_4_sharpness": 0.01276437472552061, + "layer_5_sharpness": 0.016457509249448776, + "layer_6_sharpness": 0.019982390105724335, + "layer_7_sharpness": 0.02471098303794861, + "layer_8_sharpness": 0.027113929390907288, + "layer_9_sharpness": 0.018847631290555, + "layer_10_sharpness": 0.012627477757632732, + "layer_11_sharpness": 0.01307647954672575, + "layer_12_sharpness": 0.018755385652184486 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..99a4efd9f72982cfaca78d15c2104b76e08681f9 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3442034721374512, + "total_l1_linf_norm": 8067.60009765625, + "total_spectral_norm": 1.3442035913467407, + "layer_1_update_fnorm": 0.061154864728450775, + "layer_1_max_l1_linf_norm": 0.3383864760398865, + "layer_1_max_spectral_norm": 0.007609596010297537, + "layer_2_update_fnorm": 0.058813612908124924, + "layer_2_max_l1_linf_norm": 0.35875654220581055, + "layer_2_max_spectral_norm": 0.008096568286418915, + "layer_3_update_fnorm": 0.05823967233300209, + "layer_3_max_l1_linf_norm": 0.3561984896659851, + "layer_3_max_spectral_norm": 0.008022705093026161, + "layer_4_update_fnorm": 0.06006096675992012, + "layer_4_max_l1_linf_norm": 0.36332377791404724, + "layer_4_max_spectral_norm": 0.00814024731516838, + "layer_5_update_fnorm": 0.060893796384334564, + "layer_5_max_l1_linf_norm": 0.3324878215789795, + "layer_5_max_spectral_norm": 0.00746598606929183, + "layer_6_update_fnorm": 0.06123494729399681, + "layer_6_max_l1_linf_norm": 0.30399078130722046, + "layer_6_max_spectral_norm": 0.006803964730352163, + "layer_7_update_fnorm": 0.06129126995801926, + "layer_7_max_l1_linf_norm": 0.3129909634590149, + "layer_7_max_spectral_norm": 0.00703820027410984, + "layer_8_update_fnorm": 0.061245255172252655, + "layer_8_max_l1_linf_norm": 0.3152872622013092, + "layer_8_max_spectral_norm": 0.007047221530228853, + "layer_9_update_fnorm": 0.06124640628695488, + "layer_9_max_l1_linf_norm": 0.3344079554080963, + "layer_9_max_spectral_norm": 0.007479014340788126, + "layer_10_update_fnorm": 0.06140756234526634, + "layer_10_max_l1_linf_norm": 0.3448520004749298, + "layer_10_max_spectral_norm": 0.007772898301482201, + "layer_11_update_fnorm": 0.06131730228662491, + "layer_11_max_l1_linf_norm": 0.37213030457496643, + "layer_11_max_spectral_norm": 0.008260547183454037, + "layer_12_update_fnorm": 0.0613621324300766, + "layer_12_max_l1_linf_norm": 0.3650614321231842, + "layer_12_max_spectral_norm": 0.008226699195802212, + "total_sharpness": 0.0056226421147584915, + "ip_v_neg_g": 0.005113594233989716, + "cos_v_neg_g": 0.001197568723000586, + "v_norm": 1.3442034721374512, + "g_norm": 3.1765873432159424, + "hv_norm": 0.8608102202415466, + "cos_v_hv": 0.008780071511864662, + "hg_norm": 134.2747344970703, + "cos_g_hg": 0.45137500762939453, + "v_parallel_norm": 3.480501618469134e-05, + "v_perp_norm": 1.3442034721374512, + "layer_1_v_norm": 0.061154864728450775, + "layer_1_cos_v_neg_g": 0.010841233655810356, + "layer_2_v_norm": 0.058813612908124924, + "layer_2_cos_v_neg_g": 0.008857344277203083, + "layer_3_v_norm": 0.05823966860771179, + "layer_3_cos_v_neg_g": 0.007336120121181011, + "layer_4_v_norm": 0.06006096675992012, + "layer_4_cos_v_neg_g": 0.007105959113687277, + "layer_5_v_norm": 0.060893796384334564, + "layer_5_cos_v_neg_g": 0.0068295300006866455, + "layer_6_v_norm": 0.06123494356870651, + "layer_6_cos_v_neg_g": 0.006382320076227188, + "layer_7_v_norm": 0.06129126995801926, + "layer_7_cos_v_neg_g": 0.006609742995351553, + "layer_8_v_norm": 0.061245255172252655, + "layer_8_cos_v_neg_g": 0.007365053053945303, + "layer_9_v_norm": 0.06124640628695488, + "layer_9_cos_v_neg_g": 0.008176407776772976, + "layer_10_v_norm": 0.06140756234526634, + "layer_10_cos_v_neg_g": 0.006442070472985506, + "layer_11_v_norm": 0.06131730228662491, + "layer_11_cos_v_neg_g": 0.005295095033943653, + "layer_12_v_norm": 0.0613621324300766, + "layer_12_cos_v_neg_g": 0.004478797782212496, + "layer_1_sharpness": 0.12618662416934967, + "layer_2_sharpness": 0.026621533557772636, + "layer_3_sharpness": 0.04269373044371605, + "layer_4_sharpness": 0.02167186699807644, + "layer_5_sharpness": 0.02423066645860672, + "layer_6_sharpness": 0.023442259058356285, + "layer_7_sharpness": 0.030992215499281883, + "layer_8_sharpness": 0.03499050810933113, + "layer_9_sharpness": 0.027272755280137062, + "layer_10_sharpness": 0.019095150753855705, + "layer_11_sharpness": 0.021668214350938797, + "layer_12_sharpness": 0.03928416222333908 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..85d8c5b2ab40960cc4fb859719b56cb2c26d3389 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3571829795837402, + "total_l1_linf_norm": 8146.8994140625, + "total_spectral_norm": 1.3571827411651611, + "layer_1_update_fnorm": 0.061171941459178925, + "layer_1_max_l1_linf_norm": 0.3167945146560669, + "layer_1_max_spectral_norm": 0.007191143464297056, + "layer_2_update_fnorm": 0.05906171351671219, + "layer_2_max_l1_linf_norm": 0.35453546047210693, + "layer_2_max_spectral_norm": 0.00802799966186285, + "layer_3_update_fnorm": 0.058514200150966644, + "layer_3_max_l1_linf_norm": 0.3440784811973572, + "layer_3_max_spectral_norm": 0.00774873374029994, + "layer_4_update_fnorm": 0.059980928897857666, + "layer_4_max_l1_linf_norm": 0.3400668203830719, + "layer_4_max_spectral_norm": 0.0076729548163712025, + "layer_5_update_fnorm": 0.06090323254466057, + "layer_5_max_l1_linf_norm": 0.3274785876274109, + "layer_5_max_spectral_norm": 0.007367056328803301, + "layer_6_update_fnorm": 0.06122569367289543, + "layer_6_max_l1_linf_norm": 0.3110572397708893, + "layer_6_max_spectral_norm": 0.006981440354138613, + "layer_7_update_fnorm": 0.061262745410203934, + "layer_7_max_l1_linf_norm": 0.3102441430091858, + "layer_7_max_spectral_norm": 0.006931942421942949, + "layer_8_update_fnorm": 0.061116013675928116, + "layer_8_max_l1_linf_norm": 0.2855852246284485, + "layer_8_max_spectral_norm": 0.006434796843677759, + "layer_9_update_fnorm": 0.061193760484457016, + "layer_9_max_l1_linf_norm": 0.3138411045074463, + "layer_9_max_spectral_norm": 0.007057757582515478, + "layer_10_update_fnorm": 0.06135037541389465, + "layer_10_max_l1_linf_norm": 0.34430646896362305, + "layer_10_max_spectral_norm": 0.007718455512076616, + "layer_11_update_fnorm": 0.06120472773909569, + "layer_11_max_l1_linf_norm": 0.3421383500099182, + "layer_11_max_spectral_norm": 0.007791195996105671, + "layer_12_update_fnorm": 0.06118617206811905, + "layer_12_max_l1_linf_norm": 0.3293198049068451, + "layer_12_max_spectral_norm": 0.00760109955444932, + "total_sharpness": 0.003652874380350113, + "ip_v_neg_g": 0.002529134973883629, + "cos_v_neg_g": 0.0005954725784249604, + "v_norm": 1.3571829795837402, + "g_norm": 3.1294772624969482, + "hv_norm": 0.621657133102417, + "cos_v_hv": 0.007974844425916672, + "hg_norm": 127.77393341064453, + "cos_g_hg": 0.46680307388305664, + "v_parallel_norm": 1.8648701370693743e-05, + "v_perp_norm": 1.3571829795837402, + "layer_1_v_norm": 0.061171941459178925, + "layer_1_cos_v_neg_g": 0.009509471245110035, + "layer_2_v_norm": 0.05906171351671219, + "layer_2_cos_v_neg_g": 0.004513931926339865, + "layer_3_v_norm": 0.058514200150966644, + "layer_3_cos_v_neg_g": 0.003077216213569045, + "layer_4_v_norm": 0.059980928897857666, + "layer_4_cos_v_neg_g": 0.0026541550178080797, + "layer_5_v_norm": 0.06090323254466057, + "layer_5_cos_v_neg_g": 0.0023573709186166525, + "layer_6_v_norm": 0.06122569739818573, + "layer_6_cos_v_neg_g": 0.00376339559443295, + "layer_7_v_norm": 0.061262745410203934, + "layer_7_cos_v_neg_g": 0.003517700359225273, + "layer_8_v_norm": 0.061116013675928116, + "layer_8_cos_v_neg_g": 0.005075972061604261, + "layer_9_v_norm": 0.061193760484457016, + "layer_9_cos_v_neg_g": 0.0034980536438524723, + "layer_10_v_norm": 0.06135037541389465, + "layer_10_cos_v_neg_g": 0.0029951827600598335, + "layer_11_v_norm": 0.06120472773909569, + "layer_11_cos_v_neg_g": 0.002054436830803752, + "layer_12_v_norm": 0.06118617206811905, + "layer_12_cos_v_neg_g": 0.0014719485770910978, + "layer_1_sharpness": 0.0649106353521347, + "layer_2_sharpness": 0.01504144910722971, + "layer_3_sharpness": 0.027629079297184944, + "layer_4_sharpness": 0.015716729685664177, + "layer_5_sharpness": 0.01820644922554493, + "layer_6_sharpness": 0.0250750370323658, + "layer_7_sharpness": 0.02829672582447529, + "layer_8_sharpness": 0.028973672538995743, + "layer_9_sharpness": 0.019610365852713585, + "layer_10_sharpness": 0.014246554113924503, + "layer_11_sharpness": 0.011256045661866665, + "layer_12_sharpness": 0.017023751512169838 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..4da65484930ce8954c395323712e7f7a1d8a00da --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3529835939407349, + "total_l1_linf_norm": 8121.73876953125, + "total_spectral_norm": 1.352983832359314, + "layer_1_update_fnorm": 0.06083556264638901, + "layer_1_max_l1_linf_norm": 0.305585652589798, + "layer_1_max_spectral_norm": 0.006974867545068264, + "layer_2_update_fnorm": 0.058857642114162445, + "layer_2_max_l1_linf_norm": 0.36039337515830994, + "layer_2_max_spectral_norm": 0.008223436772823334, + "layer_3_update_fnorm": 0.05861378461122513, + "layer_3_max_l1_linf_norm": 0.3683522939682007, + "layer_3_max_spectral_norm": 0.00823634397238493, + "layer_4_update_fnorm": 0.06014513969421387, + "layer_4_max_l1_linf_norm": 0.36549127101898193, + "layer_4_max_spectral_norm": 0.00818549282848835, + "layer_5_update_fnorm": 0.060999613255262375, + "layer_5_max_l1_linf_norm": 0.33778977394104004, + "layer_5_max_spectral_norm": 0.007605815306305885, + "layer_6_update_fnorm": 0.06132237985730171, + "layer_6_max_l1_linf_norm": 0.34771233797073364, + "layer_6_max_spectral_norm": 0.00780463544651866, + "layer_7_update_fnorm": 0.06146873161196709, + "layer_7_max_l1_linf_norm": 0.34782344102859497, + "layer_7_max_spectral_norm": 0.007734731771051884, + "layer_8_update_fnorm": 0.061384230852127075, + "layer_8_max_l1_linf_norm": 0.3372586965560913, + "layer_8_max_spectral_norm": 0.007492163218557835, + "layer_9_update_fnorm": 0.061353765428066254, + "layer_9_max_l1_linf_norm": 0.34530550241470337, + "layer_9_max_spectral_norm": 0.0076813120394945145, + "layer_10_update_fnorm": 0.06148873269557953, + "layer_10_max_l1_linf_norm": 0.36799463629722595, + "layer_10_max_spectral_norm": 0.008299077861011028, + "layer_11_update_fnorm": 0.061486903578042984, + "layer_11_max_l1_linf_norm": 0.3744068145751953, + "layer_11_max_spectral_norm": 0.008396023884415627, + "layer_12_update_fnorm": 0.06132369861006737, + "layer_12_max_l1_linf_norm": 0.3758670389652252, + "layer_12_max_spectral_norm": 0.008411443792283535, + "total_sharpness": 0.003972484264522791, + "ip_v_neg_g": 0.0033911389764398336, + "cos_v_neg_g": 0.0007943758391775191, + "v_norm": 1.3529835939407349, + "g_norm": 3.155200958251953, + "hv_norm": 0.6176192164421082, + "cos_v_hv": 0.008702297694981098, + "hg_norm": 127.92202758789062, + "cos_g_hg": 0.43834036588668823, + "v_parallel_norm": 2.4857818061718717e-05, + "v_perp_norm": 1.3529835939407349, + "layer_1_v_norm": 0.06083556264638901, + "layer_1_cos_v_neg_g": 0.007271070033311844, + "layer_2_v_norm": 0.058857642114162445, + "layer_2_cos_v_neg_g": 0.005448824726045132, + "layer_3_v_norm": 0.05861378461122513, + "layer_3_cos_v_neg_g": 0.0038092925678938627, + "layer_4_v_norm": 0.06014513969421387, + "layer_4_cos_v_neg_g": 0.0039543285965919495, + "layer_5_v_norm": 0.060999613255262375, + "layer_5_cos_v_neg_g": 0.004942072555422783, + "layer_6_v_norm": 0.06132238358259201, + "layer_6_cos_v_neg_g": 0.005816648714244366, + "layer_7_v_norm": 0.06146873161196709, + "layer_7_cos_v_neg_g": 0.0049094557762146, + "layer_8_v_norm": 0.061384230852127075, + "layer_8_cos_v_neg_g": 0.005129815544933081, + "layer_9_v_norm": 0.061353765428066254, + "layer_9_cos_v_neg_g": 0.0046546547673642635, + "layer_10_v_norm": 0.06148873269557953, + "layer_10_cos_v_neg_g": 0.003673071740195155, + "layer_11_v_norm": 0.061486903578042984, + "layer_11_cos_v_neg_g": 0.0026350850239396095, + "layer_12_v_norm": 0.06132369861006737, + "layer_12_cos_v_neg_g": 0.0037269489839673042, + "layer_1_sharpness": 0.05430344492197037, + "layer_2_sharpness": 0.016864586621522903, + "layer_3_sharpness": 0.03441649302840233, + "layer_4_sharpness": 0.017894666641950607, + "layer_5_sharpness": 0.019793575629591942, + "layer_6_sharpness": 0.027563203126192093, + "layer_7_sharpness": 0.03367540240287781, + "layer_8_sharpness": 0.03328514099121094, + "layer_9_sharpness": 0.02111375518143177, + "layer_10_sharpness": 0.0157658401876688, + "layer_11_sharpness": 0.013546545058488846, + "layer_12_sharpness": 0.022690704092383385 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..69bf0990f124883db6842354ca9d2e923144667e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3524043560028076, + "total_l1_linf_norm": 8109.1220703125, + "total_spectral_norm": 1.3524043560028076, + "layer_1_update_fnorm": 0.06113198399543762, + "layer_1_max_l1_linf_norm": 0.3494628667831421, + "layer_1_max_spectral_norm": 0.007844009436666965, + "layer_2_update_fnorm": 0.05909394845366478, + "layer_2_max_l1_linf_norm": 0.36416807770729065, + "layer_2_max_spectral_norm": 0.008220086805522442, + "layer_3_update_fnorm": 0.05868168920278549, + "layer_3_max_l1_linf_norm": 0.3510845899581909, + "layer_3_max_spectral_norm": 0.00788198597729206, + "layer_4_update_fnorm": 0.06012597307562828, + "layer_4_max_l1_linf_norm": 0.33458876609802246, + "layer_4_max_spectral_norm": 0.007598527707159519, + "layer_5_update_fnorm": 0.06092483922839165, + "layer_5_max_l1_linf_norm": 0.3276079595088959, + "layer_5_max_spectral_norm": 0.007464868016541004, + "layer_6_update_fnorm": 0.06124073639512062, + "layer_6_max_l1_linf_norm": 0.3053284287452698, + "layer_6_max_spectral_norm": 0.0069380565546453, + "layer_7_update_fnorm": 0.06129353120923042, + "layer_7_max_l1_linf_norm": 0.2885552942752838, + "layer_7_max_spectral_norm": 0.006557787302881479, + "layer_8_update_fnorm": 0.06111803650856018, + "layer_8_max_l1_linf_norm": 0.2812463045120239, + "layer_8_max_spectral_norm": 0.006396229844540358, + "layer_9_update_fnorm": 0.06124371662735939, + "layer_9_max_l1_linf_norm": 0.31633299589157104, + "layer_9_max_spectral_norm": 0.00718738604336977, + "layer_10_update_fnorm": 0.06142769008874893, + "layer_10_max_l1_linf_norm": 0.3536577820777893, + "layer_10_max_spectral_norm": 0.008039473555982113, + "layer_11_update_fnorm": 0.061416614800691605, + "layer_11_max_l1_linf_norm": 0.375943124294281, + "layer_11_max_spectral_norm": 0.008470939472317696, + "layer_12_update_fnorm": 0.06133974343538284, + "layer_12_max_l1_linf_norm": 0.3697704076766968, + "layer_12_max_spectral_norm": 0.008313990198075771, + "total_sharpness": 0.003254384733736515, + "ip_v_neg_g": 0.002263463567942381, + "cos_v_neg_g": 0.0005030474276281893, + "v_norm": 1.3524043560028076, + "g_norm": 3.327039957046509, + "hv_norm": 0.5387899279594421, + "cos_v_hv": 0.00816875696182251, + "hg_norm": 239.14100646972656, + "cos_g_hg": 0.4295696020126343, + "v_parallel_norm": 2.0514504285529256e-05, + "v_perp_norm": 1.3524043560028076, + "layer_1_v_norm": 0.06113198399543762, + "layer_1_cos_v_neg_g": 0.0018392818747088313, + "layer_2_v_norm": 0.05909394845366478, + "layer_2_cos_v_neg_g": 0.002785278484225273, + "layer_3_v_norm": 0.05868169292807579, + "layer_3_cos_v_neg_g": 0.0016194161726161838, + "layer_4_v_norm": 0.06012597307562828, + "layer_4_cos_v_neg_g": 0.001185653149150312, + "layer_5_v_norm": 0.06092483922839165, + "layer_5_cos_v_neg_g": 0.001261218567378819, + "layer_6_v_norm": 0.06124074012041092, + "layer_6_cos_v_neg_g": 0.0017076439689844847, + "layer_7_v_norm": 0.06129353120923042, + "layer_7_cos_v_neg_g": 0.002518776338547468, + "layer_8_v_norm": 0.06111803650856018, + "layer_8_cos_v_neg_g": 0.0033825093414634466, + "layer_9_v_norm": 0.06124371662735939, + "layer_9_cos_v_neg_g": 0.004666144028306007, + "layer_10_v_norm": 0.06142769008874893, + "layer_10_cos_v_neg_g": 0.0047164359129965305, + "layer_11_v_norm": 0.061416614800691605, + "layer_11_cos_v_neg_g": 0.00501615833491087, + "layer_12_v_norm": 0.06133974343538284, + "layer_12_cos_v_neg_g": 0.004633837845176458, + "layer_1_sharpness": 0.04744190722703934, + "layer_2_sharpness": 0.014815679751336575, + "layer_3_sharpness": 0.022566556930541992, + "layer_4_sharpness": 0.012878045439720154, + "layer_5_sharpness": 0.01758621819317341, + "layer_6_sharpness": 0.019861891865730286, + "layer_7_sharpness": 0.024313809350132942, + "layer_8_sharpness": 0.025781195610761642, + "layer_9_sharpness": 0.018526168540120125, + "layer_10_sharpness": 0.01316190417855978, + "layer_11_sharpness": 0.012790897861123085, + "layer_12_sharpness": 0.021060742437839508 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..27daba11f2b082c9cb65fbb179e4bbd150afae53 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3433866500854492, + "total_l1_linf_norm": 8059.4462890625, + "total_spectral_norm": 1.3433866500854492, + "layer_1_update_fnorm": 0.06091199070215225, + "layer_1_max_l1_linf_norm": 0.3051219880580902, + "layer_1_max_spectral_norm": 0.006894594058394432, + "layer_2_update_fnorm": 0.05901976302266121, + "layer_2_max_l1_linf_norm": 0.3653643727302551, + "layer_2_max_spectral_norm": 0.008247649297118187, + "layer_3_update_fnorm": 0.05898061394691467, + "layer_3_max_l1_linf_norm": 0.3660791516304016, + "layer_3_max_spectral_norm": 0.008245041593909264, + "layer_4_update_fnorm": 0.0602511428296566, + "layer_4_max_l1_linf_norm": 0.3524549603462219, + "layer_4_max_spectral_norm": 0.007984481751918793, + "layer_5_update_fnorm": 0.061011169105768204, + "layer_5_max_l1_linf_norm": 0.3420681953430176, + "layer_5_max_spectral_norm": 0.007704396732151508, + "layer_6_update_fnorm": 0.06118829548358917, + "layer_6_max_l1_linf_norm": 0.30341196060180664, + "layer_6_max_spectral_norm": 0.006952138617634773, + "layer_7_update_fnorm": 0.06135934963822365, + "layer_7_max_l1_linf_norm": 0.32169461250305176, + "layer_7_max_spectral_norm": 0.007231301628053188, + "layer_8_update_fnorm": 0.061249908059835434, + "layer_8_max_l1_linf_norm": 0.3258207440376282, + "layer_8_max_spectral_norm": 0.007281875237822533, + "layer_9_update_fnorm": 0.06125108525156975, + "layer_9_max_l1_linf_norm": 0.3365100026130676, + "layer_9_max_spectral_norm": 0.007518277503550053, + "layer_10_update_fnorm": 0.06151791661977768, + "layer_10_max_l1_linf_norm": 0.3716263771057129, + "layer_10_max_spectral_norm": 0.008295421488583088, + "layer_11_update_fnorm": 0.06156906858086586, + "layer_11_max_l1_linf_norm": 0.3953491449356079, + "layer_11_max_spectral_norm": 0.00883115641772747, + "layer_12_update_fnorm": 0.061511240899562836, + "layer_12_max_l1_linf_norm": 0.4140954613685608, + "layer_12_max_spectral_norm": 0.009256049990653992, + "total_sharpness": 0.004030665382742882, + "ip_v_neg_g": 0.00328863225877285, + "cos_v_neg_g": 0.0007851147092878819, + "v_norm": 1.3433866500854492, + "g_norm": 3.1180362701416016, + "hv_norm": 0.7048049569129944, + "cos_v_hv": 0.007682610768824816, + "hg_norm": 175.64959716796875, + "cos_g_hg": 0.4401445686817169, + "v_parallel_norm": 2.716923518164549e-05, + "v_perp_norm": 1.3433866500854492, + "layer_1_v_norm": 0.06091199070215225, + "layer_1_cos_v_neg_g": 0.0005810040165670216, + "layer_2_v_norm": 0.05901976302266121, + "layer_2_cos_v_neg_g": 0.0022563550155609846, + "layer_3_v_norm": 0.05898061394691467, + "layer_3_cos_v_neg_g": 0.004534635227173567, + "layer_4_v_norm": 0.0602511428296566, + "layer_4_cos_v_neg_g": 0.005510455928742886, + "layer_5_v_norm": 0.061011169105768204, + "layer_5_cos_v_neg_g": 0.0032782915513962507, + "layer_6_v_norm": 0.06118829548358917, + "layer_6_cos_v_neg_g": 0.004402111750096083, + "layer_7_v_norm": 0.06135934963822365, + "layer_7_cos_v_neg_g": 0.00713758310303092, + "layer_8_v_norm": 0.061249908059835434, + "layer_8_cos_v_neg_g": 0.0076927486807107925, + "layer_9_v_norm": 0.06125108525156975, + "layer_9_cos_v_neg_g": 0.006271592807024717, + "layer_10_v_norm": 0.06151791661977768, + "layer_10_cos_v_neg_g": 0.005068025551736355, + "layer_11_v_norm": 0.06156907230615616, + "layer_11_cos_v_neg_g": 0.0037991045974195004, + "layer_12_v_norm": 0.061511240899562836, + "layer_12_cos_v_neg_g": 0.0037799705751240253, + "layer_1_sharpness": 0.06376660615205765, + "layer_2_sharpness": 0.017059314996004105, + "layer_3_sharpness": 0.026465466246008873, + "layer_4_sharpness": 0.015974193811416626, + "layer_5_sharpness": 0.019186437129974365, + "layer_6_sharpness": 0.02108576148748398, + "layer_7_sharpness": 0.027837632223963737, + "layer_8_sharpness": 0.030513785779476166, + "layer_9_sharpness": 0.022190507501363754, + "layer_10_sharpness": 0.017324892804026604, + "layer_11_sharpness": 0.01728224940598011, + "layer_12_sharpness": 0.03058859519660473 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..690587375d6f8b247f1b0d6a8b4a740eadc4aaf5 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.345653772354126, + "total_l1_linf_norm": 8066.3671875, + "total_spectral_norm": 1.3456538915634155, + "layer_1_update_fnorm": 0.06075911223888397, + "layer_1_max_l1_linf_norm": 0.32623255252838135, + "layer_1_max_spectral_norm": 0.007399711757898331, + "layer_2_update_fnorm": 0.05875609442591667, + "layer_2_max_l1_linf_norm": 0.3625965416431427, + "layer_2_max_spectral_norm": 0.008214184083044529, + "layer_3_update_fnorm": 0.058811817318201065, + "layer_3_max_l1_linf_norm": 0.39528006315231323, + "layer_3_max_spectral_norm": 0.008835607208311558, + "layer_4_update_fnorm": 0.06011013686656952, + "layer_4_max_l1_linf_norm": 0.37810540199279785, + "layer_4_max_spectral_norm": 0.008479995653033257, + "layer_5_update_fnorm": 0.060972075909376144, + "layer_5_max_l1_linf_norm": 0.36635899543762207, + "layer_5_max_spectral_norm": 0.008238407783210278, + "layer_6_update_fnorm": 0.06125675514340401, + "layer_6_max_l1_linf_norm": 0.315638929605484, + "layer_6_max_spectral_norm": 0.007099182344973087, + "layer_7_update_fnorm": 0.06137501820921898, + "layer_7_max_l1_linf_norm": 0.32649117708206177, + "layer_7_max_spectral_norm": 0.0073234024457633495, + "layer_8_update_fnorm": 0.061263807117938995, + "layer_8_max_l1_linf_norm": 0.30760297179222107, + "layer_8_max_spectral_norm": 0.006936566438525915, + "layer_9_update_fnorm": 0.061172593384981155, + "layer_9_max_l1_linf_norm": 0.3324575126171112, + "layer_9_max_spectral_norm": 0.007437612395733595, + "layer_10_update_fnorm": 0.06137316673994064, + "layer_10_max_l1_linf_norm": 0.34636425971984863, + "layer_10_max_spectral_norm": 0.0077897366136312485, + "layer_11_update_fnorm": 0.06122725084424019, + "layer_11_max_l1_linf_norm": 0.3639855980873108, + "layer_11_max_spectral_norm": 0.008155384100973606, + "layer_12_update_fnorm": 0.061252955347299576, + "layer_12_max_l1_linf_norm": 0.35496044158935547, + "layer_12_max_spectral_norm": 0.008127106353640556, + "total_sharpness": 0.0044040558859705925, + "ip_v_neg_g": 0.003956228494644165, + "cos_v_neg_g": 0.0009642779477871954, + "v_norm": 1.345653772354126, + "g_norm": 3.0489182472229004, + "hv_norm": 0.7245396375656128, + "cos_v_hv": 0.00817944761365652, + "hg_norm": 117.45918273925781, + "cos_g_hg": 0.4581172466278076, + "v_parallel_norm": 2.7526222766027786e-05, + "v_perp_norm": 1.345653772354126, + "layer_1_v_norm": 0.06075911223888397, + "layer_1_cos_v_neg_g": 0.009480757638812065, + "layer_2_v_norm": 0.05875609442591667, + "layer_2_cos_v_neg_g": 0.007632334250956774, + "layer_3_v_norm": 0.058811817318201065, + "layer_3_cos_v_neg_g": 0.006725120823830366, + "layer_4_v_norm": 0.06011013686656952, + "layer_4_cos_v_neg_g": 0.006251342594623566, + "layer_5_v_norm": 0.060972075909376144, + "layer_5_cos_v_neg_g": 0.006182693410664797, + "layer_6_v_norm": 0.06125675514340401, + "layer_6_cos_v_neg_g": 0.005934349726885557, + "layer_7_v_norm": 0.06137501820921898, + "layer_7_cos_v_neg_g": 0.006248905323445797, + "layer_8_v_norm": 0.061263807117938995, + "layer_8_cos_v_neg_g": 0.005346180405467749, + "layer_9_v_norm": 0.061172593384981155, + "layer_9_cos_v_neg_g": 0.004632030613720417, + "layer_10_v_norm": 0.06137316673994064, + "layer_10_cos_v_neg_g": 0.004516331013292074, + "layer_11_v_norm": 0.06122725084424019, + "layer_11_cos_v_neg_g": 0.004160311538726091, + "layer_12_v_norm": 0.061252955347299576, + "layer_12_cos_v_neg_g": 0.0032672712113708258, + "layer_1_sharpness": 0.11651010811328888, + "layer_2_sharpness": 0.018851501867175102, + "layer_3_sharpness": 0.035033755004405975, + "layer_4_sharpness": 0.020939407870173454, + "layer_5_sharpness": 0.022712137550115585, + "layer_6_sharpness": 0.023929210379719734, + "layer_7_sharpness": 0.028256677091121674, + "layer_8_sharpness": 0.03047848492860794, + "layer_9_sharpness": 0.02222522161900997, + "layer_10_sharpness": 0.014110163785517216, + "layer_11_sharpness": 0.015177980996668339, + "layer_12_sharpness": 0.024675536900758743 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..c42f9d91b998c3a392cb5da273a12f9c7d98527c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.345869779586792, + "total_l1_linf_norm": 8076.8779296875, + "total_spectral_norm": 1.345869541168213, + "layer_1_update_fnorm": 0.061020638793706894, + "layer_1_max_l1_linf_norm": 0.3262726962566376, + "layer_1_max_spectral_norm": 0.007372357416898012, + "layer_2_update_fnorm": 0.05929587408900261, + "layer_2_max_l1_linf_norm": 0.37742364406585693, + "layer_2_max_spectral_norm": 0.008468721993267536, + "layer_3_update_fnorm": 0.059227023273706436, + "layer_3_max_l1_linf_norm": 0.3890465497970581, + "layer_3_max_spectral_norm": 0.008755188435316086, + "layer_4_update_fnorm": 0.06027636677026749, + "layer_4_max_l1_linf_norm": 0.3610036373138428, + "layer_4_max_spectral_norm": 0.00814712606370449, + "layer_5_update_fnorm": 0.06108938157558441, + "layer_5_max_l1_linf_norm": 0.33481839299201965, + "layer_5_max_spectral_norm": 0.007576704025268555, + "layer_6_update_fnorm": 0.06126326322555542, + "layer_6_max_l1_linf_norm": 0.32258129119873047, + "layer_6_max_spectral_norm": 0.0072614881210029125, + "layer_7_update_fnorm": 0.06135902181267738, + "layer_7_max_l1_linf_norm": 0.3165174126625061, + "layer_7_max_spectral_norm": 0.007131841965019703, + "layer_8_update_fnorm": 0.061337731778621674, + "layer_8_max_l1_linf_norm": 0.32221776247024536, + "layer_8_max_spectral_norm": 0.007317379582673311, + "layer_9_update_fnorm": 0.061380960047245026, + "layer_9_max_l1_linf_norm": 0.3345643877983093, + "layer_9_max_spectral_norm": 0.007508318871259689, + "layer_10_update_fnorm": 0.061479777097702026, + "layer_10_max_l1_linf_norm": 0.3584570288658142, + "layer_10_max_spectral_norm": 0.00808702316135168, + "layer_11_update_fnorm": 0.06137041002511978, + "layer_11_max_l1_linf_norm": 0.3583255708217621, + "layer_11_max_spectral_norm": 0.008108798414468765, + "layer_12_update_fnorm": 0.0612182579934597, + "layer_12_max_l1_linf_norm": 0.34362632036209106, + "layer_12_max_spectral_norm": 0.007863507606089115, + "total_sharpness": 0.0037937795277684927, + "ip_v_neg_g": 0.002422381192445755, + "cos_v_neg_g": 0.0005553110386244953, + "v_norm": 1.345869779586792, + "g_norm": 3.241179943084717, + "hv_norm": 0.6830226182937622, + "cos_v_hv": 0.007475497201085091, + "hg_norm": 400.1446533203125, + "cos_g_hg": 0.40594232082366943, + "v_parallel_norm": 3.557874515536241e-05, + "v_perp_norm": 1.345869779586792, + "layer_1_v_norm": 0.061020638793706894, + "layer_1_cos_v_neg_g": 0.0012672959128394723, + "layer_2_v_norm": 0.05929587408900261, + "layer_2_cos_v_neg_g": 0.001621257048100233, + "layer_3_v_norm": 0.05922701954841614, + "layer_3_cos_v_neg_g": 0.0023630389478057623, + "layer_4_v_norm": 0.06027636677026749, + "layer_4_cos_v_neg_g": 0.0021758030634373426, + "layer_5_v_norm": 0.06108938157558441, + "layer_5_cos_v_neg_g": 0.003255055285990238, + "layer_6_v_norm": 0.06126325950026512, + "layer_6_cos_v_neg_g": 0.0028759539127349854, + "layer_7_v_norm": 0.06135902181267738, + "layer_7_cos_v_neg_g": 0.001464507426135242, + "layer_8_v_norm": 0.061337728053331375, + "layer_8_cos_v_neg_g": 0.002725284080952406, + "layer_9_v_norm": 0.061380960047245026, + "layer_9_cos_v_neg_g": 0.003949928563088179, + "layer_10_v_norm": 0.061479777097702026, + "layer_10_cos_v_neg_g": 0.0035284303594380617, + "layer_11_v_norm": 0.06137040629982948, + "layer_11_cos_v_neg_g": 0.0033234842121601105, + "layer_12_v_norm": 0.0612182579934597, + "layer_12_cos_v_neg_g": 0.005937052890658379, + "layer_1_sharpness": 0.03682418167591095, + "layer_2_sharpness": 0.012483627535402775, + "layer_3_sharpness": 0.024434253573417664, + "layer_4_sharpness": 0.014318806119263172, + "layer_5_sharpness": 0.017852123826742172, + "layer_6_sharpness": 0.020705711096525192, + "layer_7_sharpness": 0.026631172746419907, + "layer_8_sharpness": 0.028847098350524902, + "layer_9_sharpness": 0.020260561257600784, + "layer_10_sharpness": 0.013578546233475208, + "layer_11_sharpness": 0.012694303877651691, + "layer_12_sharpness": 0.021814391016960144 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..f60bedaa3261dc137e588c4b1fc68a3aaa25ef89 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.341294288635254, + "total_l1_linf_norm": 8045.35498046875, + "total_spectral_norm": 1.3412939310073853, + "layer_1_update_fnorm": 0.0606367290019989, + "layer_1_max_l1_linf_norm": 0.3212396502494812, + "layer_1_max_spectral_norm": 0.007260391488671303, + "layer_2_update_fnorm": 0.058533910661935806, + "layer_2_max_l1_linf_norm": 0.381094753742218, + "layer_2_max_spectral_norm": 0.008584436029195786, + "layer_3_update_fnorm": 0.058820538222789764, + "layer_3_max_l1_linf_norm": 0.3844677209854126, + "layer_3_max_spectral_norm": 0.008602989837527275, + "layer_4_update_fnorm": 0.06026288494467735, + "layer_4_max_l1_linf_norm": 0.3673478364944458, + "layer_4_max_spectral_norm": 0.008289346471428871, + "layer_5_update_fnorm": 0.06102418154478073, + "layer_5_max_l1_linf_norm": 0.3446961045265198, + "layer_5_max_spectral_norm": 0.007748899981379509, + "layer_6_update_fnorm": 0.061319995671510696, + "layer_6_max_l1_linf_norm": 0.3346256911754608, + "layer_6_max_spectral_norm": 0.0075855921022593975, + "layer_7_update_fnorm": 0.0613577738404274, + "layer_7_max_l1_linf_norm": 0.3139219880104065, + "layer_7_max_spectral_norm": 0.0071227289736270905, + "layer_8_update_fnorm": 0.06114022433757782, + "layer_8_max_l1_linf_norm": 0.2990809977054596, + "layer_8_max_spectral_norm": 0.006718133110553026, + "layer_9_update_fnorm": 0.06119420751929283, + "layer_9_max_l1_linf_norm": 0.3301313519477844, + "layer_9_max_spectral_norm": 0.007358855102211237, + "layer_10_update_fnorm": 0.061311669647693634, + "layer_10_max_l1_linf_norm": 0.3544062674045563, + "layer_10_max_spectral_norm": 0.007963997311890125, + "layer_11_update_fnorm": 0.061429522931575775, + "layer_11_max_l1_linf_norm": 0.3730820417404175, + "layer_11_max_spectral_norm": 0.008463362231850624, + "layer_12_update_fnorm": 0.06138277426362038, + "layer_12_max_l1_linf_norm": 0.3910161852836609, + "layer_12_max_spectral_norm": 0.008772458881139755, + "total_sharpness": 0.004279202315956354, + "ip_v_neg_g": 0.004531064070761204, + "cos_v_neg_g": 0.0011269922833889723, + "v_norm": 1.341294288635254, + "g_norm": 2.9974722862243652, + "hv_norm": 0.6964330077171326, + "cos_v_hv": 0.008241524919867516, + "hg_norm": 129.0118408203125, + "cos_g_hg": 0.45304155349731445, + "v_parallel_norm": 3.3128540962934494e-05, + "v_perp_norm": 1.341294288635254, + "layer_1_v_norm": 0.0606367290019989, + "layer_1_cos_v_neg_g": 0.01657906547188759, + "layer_2_v_norm": 0.058533910661935806, + "layer_2_cos_v_neg_g": 0.010872643440961838, + "layer_3_v_norm": 0.058820538222789764, + "layer_3_cos_v_neg_g": 0.0075148483738303185, + "layer_4_v_norm": 0.06026288494467735, + "layer_4_cos_v_neg_g": 0.005842121783643961, + "layer_5_v_norm": 0.06102418154478073, + "layer_5_cos_v_neg_g": 0.004837366286665201, + "layer_6_v_norm": 0.061319995671510696, + "layer_6_cos_v_neg_g": 0.005789297632873058, + "layer_7_v_norm": 0.0613577738404274, + "layer_7_cos_v_neg_g": 0.005999511107802391, + "layer_8_v_norm": 0.06114022433757782, + "layer_8_cos_v_neg_g": 0.00680141244083643, + "layer_9_v_norm": 0.06119420751929283, + "layer_9_cos_v_neg_g": 0.005999556742608547, + "layer_10_v_norm": 0.061311669647693634, + "layer_10_cos_v_neg_g": 0.005193740129470825, + "layer_11_v_norm": 0.061429526656866074, + "layer_11_cos_v_neg_g": 0.00432232953608036, + "layer_12_v_norm": 0.06138277426362038, + "layer_12_cos_v_neg_g": 0.0027028091717511415, + "layer_1_sharpness": 0.10272114723920822, + "layer_2_sharpness": 0.02968515269458294, + "layer_3_sharpness": 0.032932884991168976, + "layer_4_sharpness": 0.01778995618224144, + "layer_5_sharpness": 0.01886950619518757, + "layer_6_sharpness": 0.02278691716492176, + "layer_7_sharpness": 0.025728534907102585, + "layer_8_sharpness": 0.026790354400873184, + "layer_9_sharpness": 0.020805783569812775, + "layer_10_sharpness": 0.014427597634494305, + "layer_11_sharpness": 0.014872836880385876, + "layer_12_sharpness": 0.02705829218029976 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..0f079f3264186a1087cd8cf0f539134bf45393ac --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_43/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019231 +step:1 train loss:11.024735 +step:2 train loss:11.018315 +step:3 train loss:11.016232 +step:4 train loss:11.010811 +step:5 train loss:11.004906 +step:6 train loss:10.994955 +step:7 train loss:10.987457 +step:8 train loss:10.976646 +step:9 train loss:10.965155 +step:10 train loss:10.950180 +step:11 train loss:10.939948 +step:12 train loss:10.919463 +step:13 train loss:10.905008 +step:14 train loss:10.882977 +step:15 train loss:10.865030 +step:16 train loss:10.843892 +step:17 train loss:10.823960 +step:18 train loss:10.798859 +step:19 train loss:10.774637 +step:20 train loss:10.745078 +step:21 train loss:10.721909 +step:22 train loss:10.687239 +step:23 train loss:10.662719 +step:24 train loss:10.624106 +step:25 train loss:10.600592 +step:26 train loss:10.559874 +step:27 train loss:10.525499 +step:28 train loss:10.494775 +step:29 train loss:10.458089 +step:30 train loss:10.419309 +step:31 train loss:10.375748 +step:32 train loss:10.331464 +step:33 train loss:10.294003 +step:34 train loss:10.257481 +step:35 train loss:10.204177 +step:36 train loss:10.162207 +step:37 train loss:10.105402 +step:38 train loss:10.069149 +step:39 train loss:10.013633 +step:40 train loss:9.968578 +step:41 train loss:9.909510 +step:42 train loss:9.873583 +step:43 train loss:9.798204 +step:44 train loss:9.757635 +step:45 train loss:9.695083 +step:46 train loss:9.654811 +step:47 train loss:9.592978 +step:48 train loss:9.535313 +step:49 train loss:9.467804 +step:50 train loss:9.405265 +step:51 train loss:9.338597 +step:52 train loss:9.297013 +step:53 train loss:9.231430 +step:54 train loss:9.174731 +step:55 train loss:9.102020 +step:56 train loss:9.041647 +step:57 train loss:8.991768 +step:58 train loss:8.910547 +step:59 train loss:8.857391 +step:60 train loss:8.794086 +step:61 train loss:8.737688 +step:62 train loss:8.675218 +step:63 train loss:8.649654 +step:64 train loss:8.540017 +step:65 train loss:8.488805 +step:66 train loss:8.444192 +step:67 train loss:8.389709 +step:68 train loss:8.331861 +step:69 train loss:8.263840 +step:70 train loss:8.208496 +step:71 train loss:8.134136 +step:72 train loss:8.114820 +step:73 train loss:8.046495 +step:74 train loss:8.021479 +step:75 train loss:7.943197 +step:76 train loss:7.963980 +step:77 train loss:7.876443 +step:78 train loss:7.736265 +step:79 train loss:7.776551 +step:80 train loss:7.744184 +step:81 train loss:7.755118 +step:82 train loss:7.724107 +step:83 train loss:7.673529 +step:84 train loss:7.638307 +step:85 train loss:7.605921 +step:86 train loss:7.581257 +step:87 train loss:7.553988 +step:88 train loss:7.554807 +step:89 train loss:7.519719 +step:90 train loss:7.554834 +step:91 train loss:7.557792 +step:92 train loss:7.552929 +step:93 train loss:7.505594 +step:94 train loss:7.487934 +step:95 train loss:7.444523 +step:96 train loss:7.522715 +step:97 train loss:7.473468 +step:98 train loss:7.464965 +step:99 train loss:7.428155 +step:100 train loss:7.491318 +step:101 train loss:7.373684 +step:102 train loss:7.361803 +step:103 train loss:7.345960 +step:104 train loss:7.380501 +step:105 train loss:7.425108 +step:106 train loss:7.361501 +step:107 train loss:7.322057 +step:108 train loss:7.327222 +step:109 train loss:7.354668 +step:110 train loss:7.278187 +step:111 train loss:7.282719 +step:112 train loss:7.265587 +step:113 train loss:7.221465 +step:114 train loss:7.275703 +step:115 train loss:7.211420 +step:116 train loss:7.184349 +step:117 train loss:7.117716 +step:118 train loss:7.176182 +step:119 train loss:7.113448 +step:120 train loss:7.116930 +step:121 train loss:7.033597 +step:122 train loss:7.112236 +step:123 train loss:7.027349 +step:124 train loss:7.010715 +step:125 train loss:6.974749 +step:126 train loss:7.056285 +step:127 train loss:6.969008 +step:128 train loss:6.994620 +step:129 train loss:6.962997 +step:130 train loss:7.005975 +step:131 train loss:6.935537 +step:132 train loss:6.842960 +step:133 train loss:6.908257 +step:134 train loss:6.862650 +step:135 train loss:6.771880 +step:136 train loss:6.801610 +step:137 train loss:6.795937 +step:138 train loss:6.723796 +step:139 train loss:6.798335 +step:140 train loss:6.700123 +step:141 train loss:6.793843 +step:142 train loss:6.733228 +step:143 train loss:6.737950 +step:144 train loss:6.701897 +step:145 train loss:6.632520 +step:146 train loss:6.638736 +step:147 train loss:6.690093 +step:148 train loss:6.689727 +step:149 train loss:6.630073 +step:150 train loss:6.637619 +step:151 train loss:6.540400 +step:152 train loss:6.573645 +step:153 train loss:6.546754 +step:154 train loss:6.627958 +step:155 train loss:6.595223 +step:156 train loss:6.625754 +step:157 train loss:6.520756 +step:158 train loss:6.500699 +step:159 train loss:6.536511 +step:160 train loss:6.510296 +step:161 train loss:6.500055 +step:162 train loss:6.470693 +step:163 train loss:6.484112 +step:164 train loss:6.494946 +step:165 train loss:6.495717 +step:166 train loss:6.446188 +step:167 train loss:6.444027 +step:168 train loss:6.414441 +step:169 train loss:6.363801 +step:170 train loss:6.334330 +step:171 train loss:6.443296 +step:172 train loss:6.371969 +step:173 train loss:6.411995 +step:174 train loss:6.415717 +step:175 train loss:6.376712 +step:176 train loss:6.325871 +step:177 train loss:6.371185 +step:178 train loss:6.376064 +step:179 train loss:6.327588 +step:180 train loss:6.312634 +step:181 train loss:6.346103 +step:182 train loss:6.275743 +step:183 train loss:6.367960 +step:184 train loss:6.334597 +step:185 train loss:6.247943 +step:186 train loss:6.396499 +step:187 train loss:6.329583 +step:188 train loss:6.150600 +step:189 train loss:6.307583 +step:190 train loss:6.299177 +step:191 train loss:6.219942 +step:192 train loss:6.130644 +step:193 train loss:6.281026 +step:194 train loss:6.303142 +step:195 train loss:6.293632 +step:196 train loss:6.265094 +step:197 train loss:6.261992 +step:198 train loss:6.200657 +step:199 train loss:6.281760 +step:200 train loss:6.315210 +step:201 train loss:6.247628 +step:202 train loss:6.250268 +step:203 train loss:6.206905 +step:204 train loss:6.244771 +step:205 train loss:6.097297 +step:206 train loss:6.233616 +step:207 train loss:6.203285 +step:208 train loss:6.146672 +step:209 train loss:6.143551 +step:210 train loss:6.142097 +step:211 train loss:6.214893 +step:212 train loss:6.164345 +step:213 train loss:6.178090 +step:214 train loss:6.160224 +step:215 train loss:6.182670 +step:216 train loss:6.126705 +step:217 train loss:6.130810 +step:218 train loss:6.107502 +step:219 train loss:6.084798 +step:220 train loss:6.130335 +step:221 train loss:6.082902 +step:222 train loss:6.123311 +step:223 train loss:6.140779 +step:224 train loss:6.129517 +step:225 train loss:6.066830 +step:226 train loss:6.070210 +step:227 train loss:6.133522 +step:228 train loss:6.098330 +step:229 train loss:6.171500 +step:230 train loss:6.036695 +step:231 train loss:6.091779 +step:232 train loss:6.076061 +step:233 train loss:6.047544 +step:234 train loss:6.044756 +step:235 train loss:6.125645 +step:236 train loss:6.072594 +step:237 train loss:6.102182 +step:238 train loss:6.105458 +step:239 train loss:6.013076 +step:240 train loss:6.083868 +step:241 train loss:6.118982 +step:242 train loss:6.099601 +step:243 train loss:6.009931 +step:244 train loss:6.038765 +step:245 train loss:6.022056 +step:246 train loss:6.018365 +step:247 train loss:6.010921 +step:248 train loss:5.969300 +step:249 train loss:6.027124 +step:250 validation loss:6.020007 +step:250 train loss:5.995000 +step:251 train loss:6.031392 +step:252 train loss:5.988236 +step:253 train loss:5.991181 +step:254 train loss:5.951815 +step:255 train loss:5.991140 +step:256 train loss:5.989470 +step:257 train loss:6.037974 +step:258 train loss:5.935277 +step:259 train loss:5.961125 +step:260 train loss:5.934062 +step:261 train loss:5.932766 +step:262 train loss:5.999704 +step:263 train loss:5.960494 +step:264 train loss:5.927128 +step:265 train loss:5.954226 +step:266 train loss:5.916222 +step:267 train loss:5.950707 +step:268 train loss:5.905081 +step:269 train loss:5.932317 +step:270 train loss:5.948431 +step:271 train loss:5.936333 +step:272 train loss:5.892939 +step:273 train loss:5.964429 +step:274 train loss:5.874728 +step:275 train loss:5.913724 +step:276 train loss:5.886925 +step:277 train loss:5.887211 +step:278 train loss:5.864058 +step:279 train loss:5.833899 +step:280 train loss:5.909206 +step:281 train loss:5.985699 +step:282 train loss:5.865832 +step:283 train loss:5.879034 +step:284 train loss:5.844643 +step:285 train loss:5.895650 +step:286 train loss:5.871652 +step:287 train loss:5.847653 +step:288 train loss:5.826914 +step:289 train loss:5.852562 +step:290 train loss:5.908474 +step:291 train loss:5.834278 +step:292 train loss:5.890879 +step:293 train loss:5.818510 +step:294 train loss:5.929598 +step:295 train loss:5.824515 +step:296 train loss:5.885139 +step:297 train loss:5.918228 +step:298 train loss:5.807675 +step:299 train loss:5.881785 +step:300 train loss:5.801909 +step:301 train loss:5.834864 +step:302 train loss:5.809978 +step:303 train loss:5.824821 +step:304 train loss:5.855574 +step:305 train loss:5.776592 +step:306 train loss:5.800211 +step:307 train loss:5.826447 +step:308 train loss:5.737041 +step:309 train loss:5.874305 +step:310 train loss:5.837175 +step:311 train loss:5.817231 +step:312 train loss:5.801711 +step:313 train loss:5.827101 +step:314 train loss:5.806613 +step:315 train loss:5.768621 +step:316 train loss:5.766463 +step:317 train loss:5.728506 +step:318 train loss:5.730103 +step:319 train loss:5.804926 +step:320 train loss:5.724104 +step:321 train loss:5.782079 +step:322 train loss:5.775717 +step:323 train loss:5.839627 +step:324 train loss:5.781889 +step:325 train loss:5.803891 +step:326 train loss:5.809815 +step:327 train loss:5.785342 +step:328 train loss:5.760763 +step:329 train loss:5.776894 +step:330 train loss:5.707363 +step:331 train loss:5.736480 +step:332 train loss:5.722613 +step:333 train loss:5.660031 +step:334 train loss:5.755105 +step:335 train loss:5.798287 +step:336 train loss:5.920121 +step:337 train loss:5.817526 +step:338 train loss:5.730813 +step:339 train loss:5.687054 +step:340 train loss:5.696202 +step:341 train loss:5.687854 +step:342 train loss:5.754877 +step:343 train loss:5.728819 +step:344 train loss:5.685544 +step:345 train loss:5.661315 +step:346 train loss:5.705753 +step:347 train loss:5.643436 +step:348 train loss:5.653120 +step:349 train loss:5.590527 +step:350 train loss:5.627229 +step:351 train loss:5.695796 +step:352 train loss:5.651986 +step:353 train loss:5.680420 +step:354 train loss:5.635612 +step:355 train loss:5.683823 +step:356 train loss:5.653012 +step:357 train loss:5.723623 +step:358 train loss:5.751221 +step:359 train loss:5.584384 +step:360 train loss:5.703438 +step:361 train loss:5.686773 +step:362 train loss:5.667274 +step:363 train loss:5.620634 +step:364 train loss:5.740849 +step:365 train loss:5.672269 +step:366 train loss:5.643749 +step:367 train loss:5.668502 +step:368 train loss:5.638547 +step:369 train loss:5.618223 +step:370 train loss:5.671360 +step:371 train loss:5.609275 +step:372 train loss:5.678171 +step:373 train loss:5.627647 +step:374 train loss:5.617700 +step:375 train loss:5.646459 +step:376 train loss:5.631663 +step:377 train loss:5.526816 +step:378 train loss:5.607062 +step:379 train loss:5.657858 +step:380 train loss:5.587035 +step:381 train loss:5.646064 +step:382 train loss:5.637226 +step:383 train loss:5.611064 +step:384 train loss:5.585575 +step:385 train loss:5.578123 +step:386 train loss:5.612060 +step:387 train loss:5.611519 +step:388 train loss:5.580713 +step:389 train loss:5.595238 +step:390 train loss:5.580389 +step:391 train loss:5.586313 +step:392 train loss:5.572303 +step:393 train loss:5.567935 +step:394 train loss:5.615057 +step:395 train loss:5.546851 +step:396 train loss:5.502635 +step:397 train loss:5.583053 +step:398 train loss:5.568362 +step:399 train loss:5.575215 +step:400 train loss:5.535647 +step:401 train loss:5.573127 +step:402 train loss:5.554780 +step:403 train loss:5.551716 +step:404 train loss:5.535191 +step:405 train loss:5.531170 +step:406 train loss:5.564767 +step:407 train loss:5.550850 +step:408 train loss:5.614025 +step:409 train loss:5.541315 +step:410 train loss:5.515747 +step:411 train loss:5.507025 +step:412 train loss:5.591809 +step:413 train loss:5.481044 +step:414 train loss:5.560757 +step:415 train loss:5.519250 +step:416 train loss:5.533264 +step:417 train loss:5.554775 +step:418 train loss:5.502742 +step:419 train loss:5.491139 +step:420 train loss:5.487208 +step:421 train loss:5.476147 +step:422 train loss:5.477377 +step:423 train loss:5.483468 +step:424 train loss:5.452730 +step:425 train loss:5.518220 +step:426 train loss:5.512684 +step:427 train loss:5.443097 +step:428 train loss:5.510825 +step:429 train loss:5.423726 +step:430 train loss:5.463033 +step:431 train loss:5.490971 +step:432 train loss:5.511775 +step:433 train loss:5.498909 +step:434 train loss:5.459141 +step:435 train loss:5.513493 +step:436 train loss:5.532963 +step:437 train loss:5.491135 +step:438 train loss:5.447226 +step:439 train loss:5.444715 +step:440 train loss:5.487348 +step:441 train loss:5.432096 +step:442 train loss:5.434789 +step:443 train loss:5.448751 +step:444 train loss:5.482403 +step:445 train loss:5.483468 +step:446 train loss:5.423751 +step:447 train loss:5.438614 +step:448 train loss:5.497557 +step:449 train loss:5.451196 +step:450 train loss:5.443315 +step:451 train loss:5.426085 +step:452 train loss:5.488847 +step:453 train loss:5.420843 +step:454 train loss:5.384759 +step:455 train loss:5.471022 +step:456 train loss:5.433642 +step:457 train loss:5.408062 +step:458 train loss:5.431355 +step:459 train loss:5.380785 +step:460 train loss:5.479963 +step:461 train loss:5.436514 +step:462 train loss:5.338398 +step:463 train loss:5.394954 +step:464 train loss:5.447906 +step:465 train loss:5.406068 +step:466 train loss:5.430471 +step:467 train loss:5.388844 +step:468 train loss:5.436785 +step:469 train loss:5.408883 +step:470 train loss:5.369368 +step:471 train loss:5.464394 +step:472 train loss:5.346179 +step:473 train loss:5.424461 +step:474 train loss:5.406222 +step:475 train loss:5.423697 +step:476 train loss:5.397898 +step:477 train loss:5.340361 +step:478 train loss:5.359834 +step:479 train loss:5.358295 +step:480 train loss:5.378117 +step:481 train loss:5.382380 +step:482 train loss:5.323139 +step:483 train loss:5.390405 +step:484 train loss:5.347180 +step:485 train loss:5.326474 +step:486 train loss:5.384277 +step:487 train loss:5.357728 +step:488 train loss:5.353239 +step:489 train loss:5.348168 +step:490 train loss:5.327811 +step:491 train loss:5.344940 +step:492 train loss:5.346372 +step:493 train loss:5.347581 +step:494 train loss:5.355665 +step:495 train loss:5.307131 +step:496 train loss:5.395967 +step:497 train loss:5.281092 +step:498 train loss:5.379648 +step:499 train loss:5.355829 +step:500 validation loss:5.330107 total_sharp:3.2562e-02 L1_sharp:5.1824e-01 L2_sharp:2.8538e-01 L3_sharp:2.5327e-01 L4_sharp:2.1429e-01 L5_sharp:1.6060e-01 L6_sharp:1.0997e-01 L7_sharp:8.1422e-02 L8_sharp:6.5834e-02 L9_sharp:5.2619e-02 L10_sharp:4.3854e-02 L11_sharp:3.5427e-02 L12_sharp:2.5598e-02 total_fnorm:9.8324e-01 total_l1_linf:5.8846e+03 total_spectral:9.8324e-01 L1_fnorm:4.3914e-02 L2_fnorm:4.2671e-02 L3_fnorm:4.1788e-02 L4_fnorm:4.1515e-02 L5_fnorm:4.1439e-02 L6_fnorm:4.1767e-02 L7_fnorm:4.2087e-02 L8_fnorm:4.2317e-02 L9_fnorm:4.2539e-02 L10_fnorm:4.2760e-02 L11_fnorm:4.2820e-02 L12_fnorm:4.2961e-02 L1_l1linf:1.1912e-01 L2_l1linf:1.4489e-01 L3_l1linf:1.5339e-01 L4_l1linf:1.6046e-01 L5_l1linf:1.5925e-01 L6_l1linf:1.6813e-01 L7_l1linf:1.8935e-01 L8_l1linf:1.6029e-01 L9_l1linf:1.6610e-01 L10_l1linf:1.5854e-01 L11_l1linf:1.6024e-01 L12_l1linf:1.6139e-01 L1_spectral:2.7237e-03 L2_spectral:3.2753e-03 L3_spectral:3.4748e-03 L4_spectral:6.3052e-03 L5_spectral:6.2847e-03 L6_spectral:7.2241e-03 L7_spectral:8.0793e-03 L8_spectral:6.7389e-03 L9_spectral:6.6133e-03 L10_spectral:6.6780e-03 L11_spectral:4.2295e-03 L12_spectral:4.1548e-03 ip_v_neg_g:1.4239e-02 cos_v_neg_g:1.8868e-03 v_norm:9.8324e-01 g_norm:7.6757e+00 hv_norm:5.5859e+00 cos_v_hv:5.7316e-03 hg_norm:9.0078e+03 cos_g_hg:6.8114e-01 v_par:1.0245e-04 v_perp:9.8324e-01 L1_cos_v_neg_g:1.4513e-02 L1_v_norm:4.3914e-02 L2_cos_v_neg_g:1.4656e-02 L2_v_norm:4.2671e-02 L3_cos_v_neg_g:1.6161e-02 L3_v_norm:4.1788e-02 L4_cos_v_neg_g:1.6563e-02 L4_v_norm:4.1515e-02 L5_cos_v_neg_g:1.6178e-02 L5_v_norm:4.1439e-02 L6_cos_v_neg_g:1.4041e-02 L6_v_norm:4.1767e-02 L7_cos_v_neg_g:1.3223e-02 L7_v_norm:4.2087e-02 L8_cos_v_neg_g:1.2187e-02 L8_v_norm:4.2317e-02 L9_cos_v_neg_g:1.1629e-02 L9_v_norm:4.2539e-02 L10_cos_v_neg_g:1.0447e-02 L10_v_norm:4.2760e-02 L11_cos_v_neg_g:9.6241e-03 L11_v_norm:4.2820e-02 L12_cos_v_neg_g:6.4736e-03 L12_v_norm:4.2961e-02 +step:500 train loss:5.356783 +step:501 train loss:5.312751 +step:502 train loss:5.353671 +step:503 train loss:5.280728 +step:504 train loss:5.371505 +step:505 train loss:5.308244 +step:506 train loss:5.307669 +step:507 train loss:5.318729 +step:508 train loss:5.346843 +step:509 train loss:5.345504 +step:510 train loss:5.277702 +step:511 train loss:5.271829 +step:512 train loss:5.267664 +step:513 train loss:5.296409 +step:514 train loss:5.362726 +step:515 train loss:5.306318 +step:516 train loss:5.376793 +step:517 train loss:5.301762 +step:518 train loss:5.289404 +step:519 train loss:5.343533 +step:520 train loss:5.293093 +step:521 train loss:5.283324 +step:522 train loss:5.304925 +step:523 train loss:5.302344 +step:524 train loss:5.252339 +step:525 train loss:5.258994 +step:526 train loss:5.272094 +step:527 train loss:5.271746 +step:528 train loss:5.271202 +step:529 train loss:5.298205 +step:530 train loss:5.256299 +step:531 train loss:5.286722 +step:532 train loss:5.251335 +step:533 train loss:5.213750 +step:534 train loss:5.287768 +step:535 train loss:5.274749 +step:536 train loss:5.337938 +step:537 train loss:5.227232 +step:538 train loss:5.191310 +step:539 train loss:5.284407 +step:540 train loss:5.320483 +step:541 train loss:5.225492 +step:542 train loss:5.252205 +step:543 train loss:5.264112 +step:544 train loss:5.263690 +step:545 train loss:5.246790 +step:546 train loss:5.208318 +step:547 train loss:5.228768 +step:548 train loss:5.188716 +step:549 train loss:5.237600 +step:550 train loss:5.214390 +step:551 train loss:5.223515 +step:552 train loss:5.317054 +step:553 train loss:5.279723 +step:554 train loss:5.222086 +step:555 train loss:5.284377 +step:556 train loss:5.232258 +step:557 train loss:5.205556 +step:558 train loss:5.184359 +step:559 train loss:5.233490 +step:560 train loss:5.283988 +step:561 train loss:5.167965 +step:562 train loss:5.156672 +step:563 train loss:5.228467 +step:564 train loss:5.197516 +step:565 train loss:5.213265 +step:566 train loss:5.220074 +step:567 train loss:5.210519 +step:568 train loss:5.238211 +step:569 train loss:5.217219 +step:570 train loss:5.149825 +step:571 train loss:5.181821 +step:572 train loss:5.176745 +step:573 train loss:5.173105 +step:574 train loss:5.218994 +step:575 train loss:5.174712 +step:576 train loss:5.185780 +step:577 train loss:5.199991 +step:578 train loss:5.176855 +step:579 train loss:5.219469 +step:580 train loss:5.155274 +step:581 train loss:5.212829 +step:582 train loss:5.174712 +step:583 train loss:5.189551 +step:584 train loss:5.173077 +step:585 train loss:5.161017 +step:586 train loss:5.154778 +step:587 train loss:5.228881 +step:588 train loss:5.146418 +step:589 train loss:5.201160 +step:590 train loss:5.211989 +step:591 train loss:5.146908 +step:592 train loss:5.129214 +step:593 train loss:5.148430 +step:594 train loss:5.125359 +step:595 train loss:5.171961 +step:596 train loss:5.149292 +step:597 train loss:5.178594 +step:598 train loss:5.147900 +step:599 train loss:5.147789 +step:600 train loss:5.121852 +step:601 train loss:5.106526 +step:602 train loss:5.110430 +step:603 train loss:5.160964 +step:604 train loss:5.141896 +step:605 train loss:5.173660 +step:606 train loss:5.111972 +step:607 train loss:5.104239 +step:608 train loss:5.101243 +step:609 train loss:5.073801 +step:610 train loss:5.094142 +step:611 train loss:5.101954 +step:612 train loss:5.140287 +step:613 train loss:5.061693 +step:614 train loss:5.102229 +step:615 train loss:5.155225 +step:616 train loss:5.077435 +step:617 train loss:5.110009 +step:618 train loss:5.078316 +step:619 train loss:5.114757 +step:620 train loss:5.141016 +step:621 train loss:5.070560 +step:622 train loss:5.132623 +step:623 train loss:5.130297 +step:624 train loss:5.111948 +step:625 train loss:5.108840 +step:626 train loss:5.112209 +step:627 train loss:5.085190 +step:628 train loss:5.093400 +step:629 train loss:5.032949 +step:630 train loss:5.063952 +step:631 train loss:5.054890 +step:632 train loss:5.066143 +step:633 train loss:5.090718 +step:634 train loss:5.085038 +step:635 train loss:5.023102 +step:636 train loss:5.108594 +step:637 train loss:5.026320 +step:638 train loss:4.960488 +step:639 train loss:5.084850 +step:640 train loss:5.032686 +step:641 train loss:5.061199 +step:642 train loss:5.099272 +step:643 train loss:5.001828 +step:644 train loss:5.085988 +step:645 train loss:5.049637 +step:646 train loss:5.032321 +step:647 train loss:5.045856 +step:648 train loss:5.142953 +step:649 train loss:5.042445 +step:650 train loss:5.105316 +step:651 train loss:4.988074 +step:652 train loss:5.017846 +step:653 train loss:5.010485 +step:654 train loss:5.011794 +step:655 train loss:5.052078 +step:656 train loss:4.995077 +step:657 train loss:5.052882 +step:658 train loss:4.979291 +step:659 train loss:5.058359 +step:660 train loss:5.017950 +step:661 train loss:5.059756 +step:662 train loss:5.050797 +step:663 train loss:5.046858 +step:664 train loss:4.956039 +step:665 train loss:4.978265 +step:666 train loss:4.971679 +step:667 train loss:5.030893 +step:668 train loss:5.004937 +step:669 train loss:4.986230 +step:670 train loss:5.004685 +step:671 train loss:4.981621 +step:672 train loss:4.946699 +step:673 train loss:5.037339 +step:674 train loss:5.043829 +step:675 train loss:4.939856 +step:676 train loss:5.026826 +step:677 train loss:4.959218 +step:678 train loss:4.947364 +step:679 train loss:5.002301 +step:680 train loss:4.954621 +step:681 train loss:5.002726 +step:682 train loss:4.914642 +step:683 train loss:4.975673 +step:684 train loss:5.011324 +step:685 train loss:4.942325 +step:686 train loss:5.050460 +step:687 train loss:4.975091 +step:688 train loss:4.911141 +step:689 train loss:4.959342 +step:690 train loss:4.927334 +step:691 train loss:4.940225 +step:692 train loss:4.951929 +step:693 train loss:4.948050 +step:694 train loss:4.939645 +step:695 train loss:4.893673 +step:696 train loss:4.860511 +step:697 train loss:4.981074 +step:698 train loss:4.906792 +step:699 train loss:4.910547 +step:700 train loss:4.990699 +step:701 train loss:4.888059 +step:702 train loss:4.965239 +step:703 train loss:4.893808 +step:704 train loss:4.853610 +step:705 train loss:4.900240 +step:706 train loss:4.789418 +step:707 train loss:4.853433 +step:708 train loss:4.943395 +step:709 train loss:4.906407 +step:710 train loss:4.874133 +step:711 train loss:4.935431 +step:712 train loss:4.885491 +step:713 train loss:4.841764 +step:714 train loss:4.933929 +step:715 train loss:4.837416 +step:716 train loss:4.981233 +step:717 train loss:4.858097 +step:718 train loss:4.928821 +step:719 train loss:4.880689 +step:720 train loss:4.859715 +step:721 train loss:4.880318 +step:722 train loss:4.893885 +step:723 train loss:4.930887 +step:724 train loss:4.903355 +step:725 train loss:4.872603 +step:726 train loss:4.856833 +step:727 train loss:4.888075 +step:728 train loss:4.874478 +step:729 train loss:4.803657 +step:730 train loss:4.902915 +step:731 train loss:4.921611 +step:732 train loss:4.893146 +step:733 train loss:4.869210 +step:734 train loss:4.862517 +step:735 train loss:4.940045 +step:736 train loss:4.877875 +step:737 train loss:4.869197 +step:738 train loss:4.899642 +step:739 train loss:4.841004 +step:740 train loss:4.859156 +step:741 train loss:4.933746 +step:742 train loss:4.829827 +step:743 train loss:4.813090 +step:744 train loss:4.868540 +step:745 train loss:4.808212 +step:746 train loss:4.812490 +step:747 train loss:4.836924 +step:748 train loss:4.799565 +step:749 train loss:4.840182 +step:750 validation loss:4.811642 +step:750 train loss:4.793537 +step:751 train loss:4.810527 +step:752 train loss:4.754850 +step:753 train loss:4.809259 +step:754 train loss:4.815764 +step:755 train loss:4.862493 +step:756 train loss:4.840479 +step:757 train loss:4.938230 +step:758 train loss:4.808542 +step:759 train loss:4.812334 +step:760 train loss:4.781981 +step:761 train loss:4.827229 +step:762 train loss:4.794825 +step:763 train loss:4.795451 +step:764 train loss:4.774440 +step:765 train loss:4.771646 +step:766 train loss:4.862029 +step:767 train loss:4.955555 +step:768 train loss:4.782587 +step:769 train loss:4.818297 +step:770 train loss:4.832779 +step:771 train loss:4.892824 +step:772 train loss:4.822108 +step:773 train loss:4.762854 +step:774 train loss:4.814366 +step:775 train loss:4.783782 +step:776 train loss:4.793625 +step:777 train loss:4.757079 +step:778 train loss:4.761052 +step:779 train loss:4.741268 +step:780 train loss:4.800700 +step:781 train loss:4.730762 +step:782 train loss:4.760315 +step:783 train loss:4.737844 +step:784 train loss:4.750541 +step:785 train loss:4.722034 +step:786 train loss:4.750684 +step:787 train loss:4.703359 +step:788 train loss:4.764914 +step:789 train loss:4.760480 +step:790 train loss:4.715796 +step:791 train loss:4.799731 +step:792 train loss:4.808559 +step:793 train loss:4.771698 +step:794 train loss:4.763435 +step:795 train loss:4.720103 +step:796 train loss:4.983619 +step:797 train loss:4.753689 +step:798 train loss:4.735021 +step:799 train loss:4.745524 +step:800 train loss:4.838422 +step:801 train loss:4.738626 +step:802 train loss:4.854692 +step:803 train loss:4.758552 +step:804 train loss:4.694778 +step:805 train loss:4.762538 +step:806 train loss:4.666435 +step:807 train loss:4.724412 +step:808 train loss:4.733000 +step:809 train loss:4.698414 +step:810 train loss:4.672024 +step:811 train loss:4.772987 +step:812 train loss:4.726734 +step:813 train loss:4.740767 +step:814 train loss:4.791288 +step:815 train loss:4.755909 +step:816 train loss:4.683775 +step:817 train loss:4.719665 +step:818 train loss:4.690046 +step:819 train loss:4.685503 +step:820 train loss:4.694378 +step:821 train loss:4.642468 +step:822 train loss:4.632442 +step:823 train loss:4.716363 +step:824 train loss:4.626936 +step:825 train loss:4.613265 +step:826 train loss:4.664793 +step:827 train loss:4.598341 +step:828 train loss:4.668155 +step:829 train loss:4.671748 +step:830 train loss:4.681940 +step:831 train loss:4.703708 +step:832 train loss:4.759292 +step:833 train loss:4.711668 +step:834 train loss:4.689600 +step:835 train loss:4.667445 +step:836 train loss:4.655430 +step:837 train loss:4.626976 +step:838 train loss:4.626994 +step:839 train loss:4.632399 +step:840 train loss:4.673537 +step:841 train loss:4.647124 +step:842 train loss:4.659768 +step:843 train loss:4.653468 +step:844 train loss:4.627629 +step:845 train loss:4.613608 +step:846 train loss:4.692270 +step:847 train loss:4.658673 +step:848 train loss:4.615962 +step:849 train loss:4.668101 +step:850 train loss:4.674444 +step:851 train loss:4.635540 +step:852 train loss:4.707303 +step:853 train loss:4.597269 +step:854 train loss:4.644882 +step:855 train loss:4.631536 +step:856 train loss:4.583979 +step:857 train loss:4.628953 +step:858 train loss:4.660839 +step:859 train loss:4.618080 +step:860 train loss:4.618958 +step:861 train loss:4.656617 +step:862 train loss:4.598243 +step:863 train loss:4.616422 +step:864 train loss:4.596311 +step:865 train loss:4.618104 +step:866 train loss:4.631181 +step:867 train loss:4.717324 +step:868 train loss:4.600864 +step:869 train loss:4.621212 +step:870 train loss:4.575087 +step:871 train loss:4.582970 +step:872 train loss:4.606497 +step:873 train loss:4.599006 +step:874 train loss:4.601966 +step:875 train loss:4.517337 +step:876 train loss:4.629045 +step:877 train loss:4.535823 +step:878 train loss:4.641227 +step:879 train loss:4.569041 +step:880 train loss:4.652332 +step:881 train loss:4.598448 +step:882 train loss:4.556302 +step:883 train loss:4.594820 +step:884 train loss:4.605807 +step:885 train loss:4.559025 +step:886 train loss:4.541299 +step:887 train loss:4.573329 +step:888 train loss:4.672935 +step:889 train loss:4.611185 +step:890 train loss:4.557559 +step:891 train loss:4.517385 +step:892 train loss:4.493349 +step:893 train loss:4.562366 +step:894 train loss:4.540133 +step:895 train loss:4.526506 +step:896 train loss:4.610246 +step:897 train loss:4.535260 +step:898 train loss:4.551390 +step:899 train loss:4.557003 +step:900 train loss:4.611917 +step:901 train loss:4.518471 +step:902 train loss:4.558106 +step:903 train loss:4.635050 +step:904 train loss:4.651311 +step:905 train loss:4.535681 +step:906 train loss:4.551190 +step:907 train loss:4.560725 +step:908 train loss:4.574659 +step:909 train loss:4.530917 +step:910 train loss:4.559362 +step:911 train loss:4.678213 +step:912 train loss:4.489003 +step:913 train loss:4.550424 +step:914 train loss:4.523427 +step:915 train loss:4.537382 +step:916 train loss:4.610454 +step:917 train loss:4.545116 +step:918 train loss:4.620955 +step:919 train loss:4.693692 +step:920 train loss:4.461575 +step:921 train loss:4.559970 +step:922 train loss:4.534418 +step:923 train loss:4.475899 +step:924 train loss:4.507025 +step:925 train loss:4.462745 +step:926 train loss:4.557569 +step:927 train loss:4.476575 +step:928 train loss:4.536862 +step:929 train loss:4.513403 +step:930 train loss:4.513919 +step:931 train loss:4.557315 +step:932 train loss:4.509511 +step:933 train loss:4.523360 +step:934 train loss:4.569011 +step:935 train loss:4.547029 +step:936 train loss:4.528470 +step:937 train loss:4.534189 +step:938 train loss:4.532558 +step:939 train loss:4.418305 +step:940 train loss:4.515410 +step:941 train loss:4.453640 +step:942 train loss:4.431486 +step:943 train loss:4.526256 +step:944 train loss:4.484787 +step:945 train loss:4.491548 +step:946 train loss:4.525435 +step:947 train loss:4.648829 +step:948 train loss:4.459352 +step:949 train loss:4.515439 +step:950 train loss:4.450928 +step:951 train loss:4.476739 +step:952 train loss:4.541516 +step:953 train loss:4.479887 +step:954 train loss:4.497624 +step:955 train loss:4.434669 +step:956 train loss:4.457385 +step:957 train loss:4.453326 +step:958 train loss:4.533945 +step:959 train loss:4.473335 +step:960 train loss:4.559152 +step:961 train loss:4.521216 +step:962 train loss:4.466103 +step:963 train loss:4.445897 +step:964 train loss:4.484295 +step:965 train loss:4.406071 +step:966 train loss:4.418320 +step:967 train loss:4.473081 +step:968 train loss:4.467587 +step:969 train loss:4.420675 +step:970 train loss:4.491437 +step:971 train loss:4.456310 +step:972 train loss:4.388748 +step:973 train loss:4.475442 +step:974 train loss:4.420423 +step:975 train loss:4.509751 +step:976 train loss:4.454751 +step:977 train loss:4.447772 +step:978 train loss:4.458592 +step:979 train loss:4.434451 +step:980 train loss:4.437338 +step:981 train loss:4.419441 +step:982 train loss:4.425915 +step:983 train loss:4.430412 +step:984 train loss:4.461820 +step:985 train loss:4.438175 +step:986 train loss:4.445533 +step:987 train loss:4.477667 +step:988 train loss:4.462182 +step:989 train loss:4.429788 +step:990 train loss:4.416120 +step:991 train loss:4.353004 +step:992 train loss:4.406877 +step:993 train loss:4.427147 +step:994 train loss:4.364124 +step:995 train loss:4.377791 +step:996 train loss:4.430908 +step:997 train loss:4.383216 +step:998 train loss:4.380398 +step:999 train loss:4.421385 +step:1000 validation loss:4.366725 total_sharp:2.0000e-02 L1_sharp:5.0207e-01 L2_sharp:3.2047e-01 L3_sharp:2.2968e-01 L4_sharp:1.5304e-01 L5_sharp:9.4379e-02 L6_sharp:7.3688e-02 L7_sharp:6.7022e-02 L8_sharp:5.3454e-02 L9_sharp:3.9432e-02 L10_sharp:3.4944e-02 L11_sharp:3.5251e-02 L12_sharp:3.5706e-02 total_fnorm:1.3813e+00 total_l1_linf:8.2619e+03 total_spectral:1.3813e+00 L1_fnorm:6.1711e-02 L2_fnorm:5.7192e-02 L3_fnorm:5.4807e-02 L4_fnorm:5.7358e-02 L5_fnorm:5.8509e-02 L6_fnorm:5.9985e-02 L7_fnorm:6.0482e-02 L8_fnorm:6.0573e-02 L9_fnorm:6.0899e-02 L10_fnorm:6.1222e-02 L11_fnorm:6.0831e-02 L12_fnorm:6.1348e-02 L1_l1linf:1.9285e-01 L2_l1linf:2.1822e-01 L3_l1linf:2.2790e-01 L4_l1linf:2.4316e-01 L5_l1linf:2.3603e-01 L6_l1linf:2.3662e-01 L7_l1linf:2.3486e-01 L8_l1linf:2.3891e-01 L9_l1linf:2.4120e-01 L10_l1linf:2.3886e-01 L11_l1linf:2.3850e-01 L12_l1linf:2.2576e-01 L1_spectral:4.3796e-03 L2_spectral:4.9420e-03 L3_spectral:5.1383e-03 L4_spectral:5.4951e-03 L5_spectral:5.2902e-03 L6_spectral:5.3781e-03 L7_spectral:5.8198e-03 L8_spectral:6.0395e-03 L9_spectral:6.4453e-03 L10_spectral:5.8977e-03 L11_spectral:5.4431e-03 L12_spectral:6.1020e-03 ip_v_neg_g:1.7178e-02 cos_v_neg_g:2.0586e-03 v_norm:1.3813e+00 g_norm:6.0410e+00 hv_norm:3.9531e+00 cos_v_hv:6.9886e-03 hg_norm:1.9271e+03 cos_g_hg:5.8764e-01 v_par:1.2069e-04 v_perp:1.3813e+00 L1_cos_v_neg_g:2.2518e-02 L1_v_norm:6.1711e-02 L2_cos_v_neg_g:2.2171e-02 L2_v_norm:5.7192e-02 L3_cos_v_neg_g:1.9632e-02 L3_v_norm:5.4807e-02 L4_cos_v_neg_g:1.4963e-02 L4_v_norm:5.7358e-02 L5_cos_v_neg_g:1.2004e-02 L5_v_norm:5.8509e-02 L6_cos_v_neg_g:1.0181e-02 L6_v_norm:5.9985e-02 L7_cos_v_neg_g:9.3263e-03 L7_v_norm:6.0482e-02 L8_cos_v_neg_g:8.2315e-03 L8_v_norm:6.0573e-02 L9_cos_v_neg_g:6.5689e-03 L9_v_norm:6.0899e-02 L10_cos_v_neg_g:7.1255e-03 L10_v_norm:6.1222e-02 L11_cos_v_neg_g:7.1527e-03 L11_v_norm:6.0831e-02 L12_cos_v_neg_g:6.2407e-03 L12_v_norm:6.1348e-02 +step:1000 train loss:4.430436 +step:1001 train loss:4.433821 +step:1002 train loss:4.420214 +step:1003 train loss:4.401489 +step:1004 train loss:4.374619 +step:1005 train loss:4.385573 +step:1006 train loss:4.463593 +step:1007 train loss:4.416638 +step:1008 train loss:4.382629 +step:1009 train loss:4.455533 +step:1010 train loss:4.412687 +step:1011 train loss:4.437655 +step:1012 train loss:4.380422 +step:1013 train loss:4.357225 +step:1014 train loss:4.361205 +step:1015 train loss:4.393960 +step:1016 train loss:4.402764 +step:1017 train loss:4.357853 +step:1018 train loss:4.412736 +step:1019 train loss:4.378868 +step:1020 train loss:4.360740 +step:1021 train loss:4.445530 +step:1022 train loss:4.352900 +step:1023 train loss:4.358693 +step:1024 train loss:4.440639 +step:1025 train loss:4.397681 +step:1026 train loss:4.335556 +step:1027 train loss:4.382568 +step:1028 train loss:4.383799 +step:1029 train loss:4.329900 +step:1030 train loss:4.410977 +step:1031 train loss:4.403968 +step:1032 train loss:4.360887 +step:1033 train loss:4.330167 +step:1034 train loss:4.394246 +step:1035 train loss:4.394003 +step:1036 train loss:4.309931 +step:1037 train loss:4.368953 +step:1038 train loss:4.389399 +step:1039 train loss:4.526735 +step:1040 train loss:4.362226 +step:1041 train loss:4.347193 +step:1042 train loss:4.373980 +step:1043 train loss:4.373776 +step:1044 train loss:4.358738 +step:1045 train loss:4.370564 +step:1046 train loss:4.316000 +step:1047 train loss:4.344493 +step:1048 train loss:4.337831 +step:1049 train loss:4.392323 +step:1050 train loss:4.351305 +step:1051 train loss:4.327528 +step:1052 train loss:4.426747 +step:1053 train loss:4.328837 +step:1054 train loss:4.325039 +step:1055 train loss:4.394263 +step:1056 train loss:4.334928 +step:1057 train loss:4.235325 +step:1058 train loss:4.338608 +step:1059 train loss:4.322944 +step:1060 train loss:4.315059 +step:1061 train loss:4.373882 +step:1062 train loss:4.330939 +step:1063 train loss:4.331341 +step:1064 train loss:4.317757 +step:1065 train loss:4.331226 +step:1066 train loss:4.302710 +step:1067 train loss:4.333128 +step:1068 train loss:4.297877 +step:1069 train loss:4.315332 +step:1070 train loss:4.320058 +step:1071 train loss:4.336216 +step:1072 train loss:4.360099 +step:1073 train loss:4.274885 +step:1074 train loss:4.289887 +step:1075 train loss:4.306489 +step:1076 train loss:4.370822 +step:1077 train loss:4.297270 +step:1078 train loss:4.346465 +step:1079 train loss:4.394510 +step:1080 train loss:4.260067 +step:1081 train loss:4.327704 +step:1082 train loss:4.328189 +step:1083 train loss:4.289217 +step:1084 train loss:4.264257 +step:1085 train loss:4.325523 +step:1086 train loss:4.315199 +step:1087 train loss:4.296929 +step:1088 train loss:4.297575 +step:1089 train loss:4.305289 +step:1090 train loss:4.251445 +step:1091 train loss:4.243650 +step:1092 train loss:4.353703 +step:1093 train loss:4.240509 +step:1094 train loss:4.301089 +step:1095 train loss:4.341752 +step:1096 train loss:4.276121 +step:1097 train loss:4.275496 +step:1098 train loss:4.243782 +step:1099 train loss:4.298504 +step:1100 train loss:4.342409 +step:1101 train loss:4.331586 +step:1102 train loss:4.346035 +step:1103 train loss:4.270724 +step:1104 train loss:4.299142 +step:1105 train loss:4.355612 +step:1106 train loss:4.290780 +step:1107 train loss:4.410476 +step:1108 train loss:4.353662 +step:1109 train loss:4.315589 +step:1110 train loss:4.268783 +step:1111 train loss:4.321544 +step:1112 train loss:4.237716 +step:1113 train loss:4.216858 +step:1114 train loss:4.206264 +step:1115 train loss:4.249227 +step:1116 train loss:4.313034 +step:1117 train loss:4.333364 +step:1118 train loss:4.358370 +step:1119 train loss:4.291401 +step:1120 train loss:4.304225 +step:1121 train loss:4.282401 +step:1122 train loss:4.267653 +step:1123 train loss:4.365448 +step:1124 train loss:4.246030 +step:1125 train loss:4.264438 +step:1126 train loss:4.225476 +step:1127 train loss:4.246181 +step:1128 train loss:4.249603 +step:1129 train loss:4.305418 +step:1130 train loss:4.221066 +step:1131 train loss:4.314826 +step:1132 train loss:4.260890 +step:1133 train loss:4.270798 +step:1134 train loss:4.243488 +step:1135 train loss:4.289096 +step:1136 train loss:4.306136 +step:1137 train loss:4.225580 +step:1138 train loss:4.296897 +step:1139 train loss:4.253531 +step:1140 train loss:4.333471 +step:1141 train loss:4.283862 +step:1142 train loss:4.223575 +step:1143 train loss:4.293648 +step:1144 train loss:4.320994 +step:1145 train loss:4.270597 +step:1146 train loss:4.221085 +step:1147 train loss:4.232230 +step:1148 train loss:4.257576 +step:1149 train loss:4.305092 +step:1150 train loss:4.316135 +step:1151 train loss:4.320903 +step:1152 train loss:4.226550 +step:1153 train loss:4.222890 +step:1154 train loss:4.209712 +step:1155 train loss:4.314499 +step:1156 train loss:4.216832 +step:1157 train loss:4.243568 +step:1158 train loss:4.303097 +step:1159 train loss:4.293861 +step:1160 train loss:4.224060 +step:1161 train loss:4.312711 +step:1162 train loss:4.253111 +step:1163 train loss:4.234149 +step:1164 train loss:4.140935 +step:1165 train loss:4.279639 +step:1166 train loss:4.208671 +step:1167 train loss:4.211062 +step:1168 train loss:4.267912 +step:1169 train loss:4.229326 +step:1170 train loss:4.233333 +step:1171 train loss:4.256347 +step:1172 train loss:4.221706 +step:1173 train loss:4.251231 +step:1174 train loss:4.188367 +step:1175 train loss:4.221050 +step:1176 train loss:4.333410 +step:1177 train loss:4.183952 +step:1178 train loss:4.241168 +step:1179 train loss:4.197506 +step:1180 train loss:4.226481 +step:1181 train loss:4.213728 +step:1182 train loss:4.273591 +step:1183 train loss:4.247010 +step:1184 train loss:4.186194 +step:1185 train loss:4.223554 +step:1186 train loss:4.209869 +step:1187 train loss:4.184395 +step:1188 train loss:4.218571 +step:1189 train loss:4.150766 +step:1190 train loss:4.206769 +step:1191 train loss:4.266788 +step:1192 train loss:4.222289 +step:1193 train loss:4.224081 +step:1194 train loss:4.337857 +step:1195 train loss:4.317005 +step:1196 train loss:4.207084 +step:1197 train loss:4.227248 +step:1198 train loss:4.208662 +step:1199 train loss:4.211272 +step:1200 train loss:4.265472 +step:1201 train loss:4.239257 +step:1202 train loss:4.176827 +step:1203 train loss:4.170474 +step:1204 train loss:4.208582 +step:1205 train loss:4.217396 +step:1206 train loss:4.159692 +step:1207 train loss:4.244309 +step:1208 train loss:4.218622 +step:1209 train loss:4.144999 +step:1210 train loss:4.238576 +step:1211 train loss:4.190334 +step:1212 train loss:4.215893 +step:1213 train loss:4.151590 +step:1214 train loss:4.234795 +step:1215 train loss:4.204150 +step:1216 train loss:4.212634 +step:1217 train loss:4.161809 +step:1218 train loss:4.225962 +step:1219 train loss:4.163968 +step:1220 train loss:4.183254 +step:1221 train loss:4.208425 +step:1222 train loss:4.245597 +step:1223 train loss:4.221990 +step:1224 train loss:4.193191 +step:1225 train loss:4.235097 +step:1226 train loss:4.178558 +step:1227 train loss:4.189802 +step:1228 train loss:4.190832 +step:1229 train loss:4.161057 +step:1230 train loss:4.154918 +step:1231 train loss:4.208375 +step:1232 train loss:4.159161 +step:1233 train loss:4.161715 +step:1234 train loss:4.244749 +step:1235 train loss:4.221674 +step:1236 train loss:4.133868 +step:1237 train loss:4.230240 +step:1238 train loss:4.183545 +step:1239 train loss:4.226875 +step:1240 train loss:4.127519 +step:1241 train loss:4.162370 +step:1242 train loss:4.193365 +step:1243 train loss:4.137834 +step:1244 train loss:4.255984 +step:1245 train loss:4.267513 +step:1246 train loss:4.198828 +step:1247 train loss:4.175019 +step:1248 train loss:4.205246 +step:1249 train loss:4.136058 +step:1250 validation loss:4.141857 +step:1250 train loss:4.150365 +step:1251 train loss:4.218735 +step:1252 train loss:4.167563 +step:1253 train loss:4.122274 +step:1254 train loss:4.152317 +step:1255 train loss:4.150339 +step:1256 train loss:4.193714 +step:1257 train loss:4.172660 +step:1258 train loss:4.223752 +step:1259 train loss:4.208223 +step:1260 train loss:4.110092 +step:1261 train loss:4.347928 +step:1262 train loss:4.195275 +step:1263 train loss:4.153419 +step:1264 train loss:4.161973 +step:1265 train loss:4.222437 +step:1266 train loss:4.161229 +step:1267 train loss:4.169829 +step:1268 train loss:4.178622 +step:1269 train loss:4.173072 +step:1270 train loss:4.101202 +step:1271 train loss:4.107524 +step:1272 train loss:4.137056 +step:1273 train loss:4.195398 +step:1274 train loss:4.158095 +step:1275 train loss:4.185924 +step:1276 train loss:4.182177 +step:1277 train loss:4.192004 +step:1278 train loss:4.134567 +step:1279 train loss:4.142354 +step:1280 train loss:4.158459 +step:1281 train loss:4.212622 +step:1282 train loss:4.134427 +step:1283 train loss:4.210781 +step:1284 train loss:4.155128 +step:1285 train loss:4.204530 +step:1286 train loss:4.103171 +step:1287 train loss:4.142071 +step:1288 train loss:4.173944 +step:1289 train loss:4.229011 +step:1290 train loss:4.182177 +step:1291 train loss:4.146836 +step:1292 train loss:4.129845 +step:1293 train loss:4.118667 +step:1294 train loss:4.169567 +step:1295 train loss:4.151115 +step:1296 train loss:4.197608 +step:1297 train loss:4.156342 +step:1298 train loss:4.173338 +step:1299 train loss:4.212231 +step:1300 train loss:4.131221 +step:1301 train loss:4.177036 +step:1302 train loss:4.137251 +step:1303 train loss:4.173797 +step:1304 train loss:4.204598 +step:1305 train loss:4.179938 +step:1306 train loss:4.172499 +step:1307 train loss:4.155895 +step:1308 train loss:4.112768 +step:1309 train loss:4.125967 +step:1310 train loss:4.110000 +step:1311 train loss:4.117368 +step:1312 train loss:4.187571 +step:1313 train loss:4.104816 +step:1314 train loss:4.110351 +step:1315 train loss:4.160172 +step:1316 train loss:4.131173 +step:1317 train loss:4.027075 +step:1318 train loss:4.181905 +step:1319 train loss:4.220248 +step:1320 train loss:4.131926 +step:1321 train loss:4.111503 +step:1322 train loss:4.215880 +step:1323 train loss:4.164400 +step:1324 train loss:4.260303 +step:1325 train loss:4.146835 +step:1326 train loss:4.174851 +step:1327 train loss:4.188674 +step:1328 train loss:4.096342 +step:1329 train loss:4.126071 +step:1330 train loss:4.148039 +step:1331 train loss:4.015521 +step:1332 train loss:4.194029 +step:1333 train loss:4.154981 +step:1334 train loss:4.160205 +step:1335 train loss:4.176416 +step:1336 train loss:4.179805 +step:1337 train loss:4.153766 +step:1338 train loss:4.132946 +step:1339 train loss:4.203697 +step:1340 train loss:4.170084 +step:1341 train loss:4.148699 +step:1342 train loss:4.121084 +step:1343 train loss:4.111166 +step:1344 train loss:4.176130 +step:1345 train loss:4.131629 +step:1346 train loss:4.214234 +step:1347 train loss:4.138220 +step:1348 train loss:4.104609 +step:1349 train loss:4.052619 +step:1350 train loss:4.084790 +step:1351 train loss:4.155525 +step:1352 train loss:4.127248 +step:1353 train loss:4.104642 +step:1354 train loss:4.108289 +step:1355 train loss:4.180802 +step:1356 train loss:4.091317 +step:1357 train loss:4.119743 +step:1358 train loss:4.114305 +step:1359 train loss:4.113896 +step:1360 train loss:4.143144 +step:1361 train loss:4.261704 +step:1362 train loss:4.177169 +step:1363 train loss:4.064866 +step:1364 train loss:4.086857 +step:1365 train loss:4.082387 +step:1366 train loss:4.116706 +step:1367 train loss:4.053235 +step:1368 train loss:4.084373 +step:1369 train loss:4.120756 +step:1370 train loss:4.138310 +step:1371 train loss:4.096360 +step:1372 train loss:4.127696 +step:1373 train loss:4.168080 +step:1374 train loss:4.171448 +step:1375 train loss:4.124567 +step:1376 train loss:4.145090 +step:1377 train loss:4.140157 +step:1378 train loss:4.127201 +step:1379 train loss:4.100821 +step:1380 train loss:4.165884 +step:1381 train loss:4.116006 +step:1382 train loss:4.093206 +step:1383 train loss:4.085967 +step:1384 train loss:4.161098 +step:1385 train loss:4.061709 +step:1386 train loss:4.126016 +step:1387 train loss:4.122855 +step:1388 train loss:4.094365 +step:1389 train loss:4.066236 +step:1390 train loss:4.102508 +step:1391 train loss:4.135951 +step:1392 train loss:4.112766 +step:1393 train loss:4.161815 +step:1394 train loss:4.095255 +step:1395 train loss:4.132737 +step:1396 train loss:4.121500 +step:1397 train loss:4.135839 +step:1398 train loss:4.143909 +step:1399 train loss:4.115891 +step:1400 train loss:4.091296 +step:1401 train loss:4.084891 +step:1402 train loss:4.091085 +step:1403 train loss:4.051934 +step:1404 train loss:4.110249 +step:1405 train loss:4.068645 +step:1406 train loss:4.099337 +step:1407 train loss:4.093662 +step:1408 train loss:4.078940 +step:1409 train loss:4.065619 +step:1410 train loss:4.083957 +step:1411 train loss:4.118584 +step:1412 train loss:4.174246 +step:1413 train loss:4.093904 +step:1414 train loss:4.125062 +step:1415 train loss:4.082670 +step:1416 train loss:4.137510 +step:1417 train loss:4.106524 +step:1418 train loss:4.046580 +step:1419 train loss:4.053786 +step:1420 train loss:4.078176 +step:1421 train loss:4.118008 +step:1422 train loss:4.096784 +step:1423 train loss:4.191042 +step:1424 train loss:4.090010 +step:1425 train loss:4.050972 +step:1426 train loss:4.077456 +step:1427 train loss:4.067036 +step:1428 train loss:4.049697 +step:1429 train loss:4.075132 +step:1430 train loss:4.080945 +step:1431 train loss:4.104090 +step:1432 train loss:4.091354 +step:1433 train loss:4.071709 +step:1434 train loss:4.044244 +step:1435 train loss:4.037461 +step:1436 train loss:4.109120 +step:1437 train loss:4.042617 +step:1438 train loss:4.045529 +step:1439 train loss:4.027932 +step:1440 train loss:4.068236 +step:1441 train loss:4.140111 +step:1442 train loss:4.102810 +step:1443 train loss:4.030915 +step:1444 train loss:4.043043 +step:1445 train loss:4.043962 +step:1446 train loss:4.072989 +step:1447 train loss:4.085786 +step:1448 train loss:4.051346 +step:1449 train loss:4.076879 +step:1450 train loss:4.093019 +step:1451 train loss:4.016618 +step:1452 train loss:4.073607 +step:1453 train loss:4.069904 +step:1454 train loss:4.065054 +step:1455 train loss:4.000025 +step:1456 train loss:4.080257 +step:1457 train loss:4.012249 +step:1458 train loss:4.152421 +step:1459 train loss:4.071632 +step:1460 train loss:4.042612 +step:1461 train loss:4.093452 +step:1462 train loss:4.102744 +step:1463 train loss:4.065272 +step:1464 train loss:4.044551 +step:1465 train loss:4.044932 +step:1466 train loss:4.010079 +step:1467 train loss:4.147542 +step:1468 train loss:4.027016 +step:1469 train loss:4.106844 +step:1470 train loss:4.038644 +step:1471 train loss:4.040274 +step:1472 train loss:4.039851 +step:1473 train loss:4.040202 +step:1474 train loss:3.982569 +step:1475 train loss:4.047203 +step:1476 train loss:4.125401 +step:1477 train loss:4.075500 +step:1478 train loss:4.011871 +step:1479 train loss:4.044918 +step:1480 train loss:4.042566 +step:1481 train loss:4.014108 +step:1482 train loss:4.079306 +step:1483 train loss:4.066921 +step:1484 train loss:4.098454 +step:1485 train loss:4.108861 +step:1486 train loss:4.047276 +step:1487 train loss:4.034490 +step:1488 train loss:4.036759 +step:1489 train loss:4.029954 +step:1490 train loss:4.086147 +step:1491 train loss:4.077382 +step:1492 train loss:4.071999 +step:1493 train loss:4.019961 +step:1494 train loss:4.053141 +step:1495 train loss:4.034455 +step:1496 train loss:4.006217 +step:1497 train loss:4.080036 +step:1498 train loss:3.985875 +step:1499 train loss:4.030730 +step:1500 validation loss:4.001816 total_sharp:1.3896e-02 L1_sharp:1.2410e-01 L2_sharp:7.8653e-02 L3_sharp:1.2294e-01 L4_sharp:7.2812e-02 L5_sharp:6.5078e-02 L6_sharp:5.7429e-02 L7_sharp:6.3423e-02 L8_sharp:5.4522e-02 L9_sharp:4.3622e-02 L10_sharp:3.4180e-02 L11_sharp:2.8659e-02 L12_sharp:3.1495e-02 total_fnorm:1.3380e+00 total_l1_linf:8.0423e+03 total_spectral:1.3380e+00 L1_fnorm:6.1867e-02 L2_fnorm:5.8557e-02 L3_fnorm:5.5874e-02 L4_fnorm:5.8176e-02 L5_fnorm:5.9266e-02 L6_fnorm:6.0625e-02 L7_fnorm:6.0916e-02 L8_fnorm:6.0976e-02 L9_fnorm:6.1060e-02 L10_fnorm:6.1132e-02 L11_fnorm:6.0909e-02 L12_fnorm:6.1103e-02 L1_l1linf:2.0834e-01 L2_l1linf:2.2656e-01 L3_l1linf:2.4634e-01 L4_l1linf:2.5148e-01 L5_l1linf:2.6267e-01 L6_l1linf:2.4867e-01 L7_l1linf:2.5722e-01 L8_l1linf:2.6159e-01 L9_l1linf:2.5972e-01 L10_l1linf:2.7199e-01 L11_l1linf:2.6263e-01 L12_l1linf:2.3863e-01 L1_spectral:4.7588e-03 L2_spectral:5.1866e-03 L3_spectral:5.6199e-03 L4_spectral:5.6842e-03 L5_spectral:5.9180e-03 L6_spectral:5.6284e-03 L7_spectral:5.7550e-03 L8_spectral:5.8163e-03 L9_spectral:5.8781e-03 L10_spectral:6.1376e-03 L11_spectral:5.9490e-03 L12_spectral:5.4575e-03 ip_v_neg_g:1.1412e-02 cos_v_neg_g:1.7176e-03 v_norm:1.3380e+00 g_norm:4.9655e+00 hv_norm:2.2893e+00 cos_v_hv:8.1220e-03 hg_norm:8.5831e+02 cos_g_hg:5.3202e-01 v_par:7.9082e-05 v_perp:1.3380e+00 L1_cos_v_neg_g:1.2536e-02 L1_v_norm:6.1867e-02 L2_cos_v_neg_g:1.4158e-02 L2_v_norm:5.8557e-02 L3_cos_v_neg_g:1.6979e-02 L3_v_norm:5.5874e-02 L4_cos_v_neg_g:1.2669e-02 L4_v_norm:5.8176e-02 L5_cos_v_neg_g:1.1676e-02 L5_v_norm:5.9266e-02 L6_cos_v_neg_g:9.3149e-03 L6_v_norm:6.0625e-02 L7_cos_v_neg_g:9.2747e-03 L7_v_norm:6.0916e-02 L8_cos_v_neg_g:9.5002e-03 L8_v_norm:6.0976e-02 L9_cos_v_neg_g:8.8215e-03 L9_v_norm:6.1060e-02 L10_cos_v_neg_g:7.9849e-03 L10_v_norm:6.1132e-02 L11_cos_v_neg_g:6.9039e-03 L11_v_norm:6.0909e-02 L12_cos_v_neg_g:5.2787e-03 L12_v_norm:6.1103e-02 +step:1500 train loss:4.025854 +step:1501 train loss:4.048527 +step:1502 train loss:3.984568 +step:1503 train loss:4.040518 +step:1504 train loss:4.006959 +step:1505 train loss:3.980059 +step:1506 train loss:3.969425 +step:1507 train loss:3.991298 +step:1508 train loss:4.003238 +step:1509 train loss:4.056666 +step:1510 train loss:3.993820 +step:1511 train loss:4.024395 +step:1512 train loss:3.998389 +step:1513 train loss:4.069726 +step:1514 train loss:4.021398 +step:1515 train loss:4.080245 +step:1516 train loss:4.006867 +step:1517 train loss:4.017790 +step:1518 train loss:4.095958 +step:1519 train loss:4.057889 +step:1520 train loss:4.097423 +step:1521 train loss:4.000633 +step:1522 train loss:4.059740 +step:1523 train loss:4.062486 +step:1524 train loss:3.984446 +step:1525 train loss:4.063933 +step:1526 train loss:3.978590 +step:1527 train loss:4.037373 +step:1528 train loss:4.086488 +step:1529 train loss:4.043332 +step:1530 train loss:4.083542 +step:1531 train loss:4.005341 +step:1532 train loss:4.076995 +step:1533 train loss:4.048962 +step:1534 train loss:3.996671 +step:1535 train loss:4.052619 +step:1536 train loss:4.076392 +step:1537 train loss:4.028953 +step:1538 train loss:4.030207 +step:1539 train loss:4.027463 +step:1540 train loss:4.045648 +step:1541 train loss:4.007771 +step:1542 train loss:4.099950 +step:1543 train loss:4.124387 +step:1544 train loss:3.994953 +step:1545 train loss:3.978109 +step:1546 train loss:4.015586 +step:1547 train loss:4.004165 +step:1548 train loss:4.045554 +step:1549 train loss:3.971691 +step:1550 train loss:4.086477 +step:1551 train loss:4.019348 +step:1552 train loss:4.048212 +step:1553 train loss:4.057321 +step:1554 train loss:4.063607 +step:1555 train loss:4.021979 +step:1556 train loss:4.001417 +step:1557 train loss:4.012835 +step:1558 train loss:4.037537 +step:1559 train loss:4.000846 +step:1560 train loss:4.082525 +step:1561 train loss:4.057009 +step:1562 train loss:3.947842 +step:1563 train loss:3.926867 +step:1564 train loss:4.057088 +step:1565 train loss:4.033547 +step:1566 train loss:4.051813 +step:1567 train loss:4.050285 +step:1568 train loss:4.002738 +step:1569 train loss:3.996002 +step:1570 train loss:4.013117 +step:1571 train loss:3.988990 +step:1572 train loss:3.993518 +step:1573 train loss:4.037687 +step:1574 train loss:3.991626 +step:1575 train loss:4.014049 +step:1576 train loss:3.971756 +step:1577 train loss:3.999601 +step:1578 train loss:3.980656 +step:1579 train loss:4.058494 +step:1580 train loss:4.014411 +step:1581 train loss:4.048483 +step:1582 train loss:4.047511 +step:1583 train loss:4.022175 +step:1584 train loss:3.944336 +step:1585 train loss:4.031059 +step:1586 train loss:3.997507 +step:1587 train loss:4.010231 +step:1588 train loss:3.991933 +step:1589 train loss:4.041889 +step:1590 train loss:3.951930 +step:1591 train loss:4.010372 +step:1592 train loss:3.959345 +step:1593 train loss:3.997150 +step:1594 train loss:3.999678 +step:1595 train loss:3.995234 +step:1596 train loss:3.999967 +step:1597 train loss:3.927474 +step:1598 train loss:4.031485 +step:1599 train loss:4.039712 +step:1600 train loss:3.920992 +step:1601 train loss:3.995669 +step:1602 train loss:4.054633 +step:1603 train loss:4.047934 +step:1604 train loss:3.974549 +step:1605 train loss:4.026477 +step:1606 train loss:4.071229 +step:1607 train loss:3.956286 +step:1608 train loss:3.986884 +step:1609 train loss:4.006466 +step:1610 train loss:4.066191 +step:1611 train loss:3.988215 +step:1612 train loss:3.915884 +step:1613 train loss:3.986544 +step:1614 train loss:4.094283 +step:1615 train loss:4.012697 +step:1616 train loss:4.022692 +step:1617 train loss:4.005669 +step:1618 train loss:4.012155 +step:1619 train loss:4.182699 +step:1620 train loss:3.974735 +step:1621 train loss:4.030797 +step:1622 train loss:3.953297 +step:1623 train loss:4.017217 +step:1624 train loss:3.989008 +step:1625 train loss:4.061314 +step:1626 train loss:3.953770 +step:1627 train loss:3.965640 +step:1628 train loss:3.979745 +step:1629 train loss:4.011023 +step:1630 train loss:4.030946 +step:1631 train loss:3.976734 +step:1632 train loss:3.953766 +step:1633 train loss:3.968368 +step:1634 train loss:4.022316 +step:1635 train loss:3.963578 +step:1636 train loss:3.949951 +step:1637 train loss:4.027375 +step:1638 train loss:4.127635 +step:1639 train loss:3.937647 +step:1640 train loss:4.014865 +step:1641 train loss:3.976483 +step:1642 train loss:4.072536 +step:1643 train loss:3.971040 +step:1644 train loss:3.986316 +step:1645 train loss:3.959886 +step:1646 train loss:4.040498 +step:1647 train loss:3.935421 +step:1648 train loss:3.998800 +step:1649 train loss:3.967064 +step:1650 train loss:3.979647 +step:1651 train loss:3.996568 +step:1652 train loss:4.013709 +step:1653 train loss:4.020020 +step:1654 train loss:4.013165 +step:1655 train loss:3.990332 +step:1656 train loss:3.982099 +step:1657 train loss:3.982717 +step:1658 train loss:3.957741 +step:1659 train loss:4.031814 +step:1660 train loss:3.930799 +step:1661 train loss:4.042966 +step:1662 train loss:3.982793 +step:1663 train loss:3.973075 +step:1664 train loss:4.070194 +step:1665 train loss:3.989440 +step:1666 train loss:3.999316 +step:1667 train loss:4.016507 +step:1668 train loss:3.991111 +step:1669 train loss:3.955059 +step:1670 train loss:4.005055 +step:1671 train loss:4.002346 +step:1672 train loss:3.998828 +step:1673 train loss:3.956366 +step:1674 train loss:3.955196 +step:1675 train loss:3.999072 +step:1676 train loss:4.258128 +step:1677 train loss:4.007941 +step:1678 train loss:3.922647 +step:1679 train loss:4.046508 +step:1680 train loss:3.970113 +step:1681 train loss:4.029481 +step:1682 train loss:3.983542 +step:1683 train loss:3.978723 +step:1684 train loss:3.930946 +step:1685 train loss:3.994297 +step:1686 train loss:3.978889 +step:1687 train loss:3.988690 +step:1688 train loss:3.974514 +step:1689 train loss:3.958933 +step:1690 train loss:3.987534 +step:1691 train loss:3.969177 +step:1692 train loss:3.989594 +step:1693 train loss:3.953555 +step:1694 train loss:3.912303 +step:1695 train loss:3.935130 +step:1696 train loss:3.945043 +step:1697 train loss:3.987841 +step:1698 train loss:3.977028 +step:1699 train loss:3.942443 +step:1700 train loss:4.020389 +step:1701 train loss:3.959301 +step:1702 train loss:3.949297 +step:1703 train loss:3.975712 +step:1704 train loss:3.979464 +step:1705 train loss:3.994102 +step:1706 train loss:4.002110 +step:1707 train loss:3.999604 +step:1708 train loss:3.928195 +step:1709 train loss:4.025510 +step:1710 train loss:3.945526 +step:1711 train loss:3.947093 +step:1712 train loss:3.972183 +step:1713 train loss:3.940079 +step:1714 train loss:4.304527 +step:1715 train loss:3.953896 +step:1716 train loss:3.939851 +step:1717 train loss:3.942610 +step:1718 train loss:4.016482 +step:1719 train loss:3.926373 +step:1720 train loss:4.010927 +step:1721 train loss:3.953759 +step:1722 train loss:3.924338 +step:1723 train loss:4.020363 +step:1724 train loss:3.974000 +step:1725 train loss:3.967660 +step:1726 train loss:3.969221 +step:1727 train loss:4.001428 +step:1728 train loss:4.010630 +step:1729 train loss:3.929205 +step:1730 train loss:4.006356 +step:1731 train loss:3.936441 +step:1732 train loss:3.950304 +step:1733 train loss:3.931000 +step:1734 train loss:3.985400 +step:1735 train loss:4.046684 +step:1736 train loss:3.956124 +step:1737 train loss:3.988397 +step:1738 train loss:3.947935 +step:1739 train loss:4.010910 +step:1740 train loss:4.003779 +step:1741 train loss:4.055810 +step:1742 train loss:4.043350 +step:1743 train loss:3.935444 +step:1744 train loss:3.950639 +step:1745 train loss:3.934448 +step:1746 train loss:3.921644 +step:1747 train loss:3.955692 +step:1748 train loss:3.894806 +step:1749 train loss:3.937003 +step:1750 validation loss:3.916432 +step:1750 train loss:3.973308 +step:1751 train loss:3.989503 +step:1752 train loss:3.957437 +step:1753 train loss:3.980505 +step:1754 train loss:3.972055 +step:1755 train loss:3.970986 +step:1756 train loss:3.994654 +step:1757 train loss:3.997406 +step:1758 train loss:3.917110 +step:1759 train loss:4.006410 +step:1760 train loss:3.958829 +step:1761 train loss:3.933962 +step:1762 train loss:3.932827 +step:1763 train loss:3.937927 +step:1764 train loss:4.225133 +step:1765 train loss:3.939204 +step:1766 train loss:4.029773 +step:1767 train loss:3.945456 +step:1768 train loss:3.921247 +step:1769 train loss:3.937817 +step:1770 train loss:3.958133 +step:1771 train loss:3.932057 +step:1772 train loss:4.041005 +step:1773 train loss:3.965415 +step:1774 train loss:3.971655 +step:1775 train loss:4.081407 +step:1776 train loss:3.956684 +step:1777 train loss:3.947489 +step:1778 train loss:4.002794 +step:1779 train loss:3.936917 +step:1780 train loss:3.990076 +step:1781 train loss:3.992963 +step:1782 train loss:4.021955 +step:1783 train loss:3.953141 +step:1784 train loss:4.041867 +step:1785 train loss:3.948601 +step:1786 train loss:3.945997 +step:1787 train loss:3.945473 +step:1788 train loss:3.965719 +step:1789 train loss:3.920077 +step:1790 train loss:3.934454 +step:1791 train loss:4.009666 +step:1792 train loss:4.010139 +step:1793 train loss:3.928850 +step:1794 train loss:3.972813 +step:1795 train loss:3.925018 +step:1796 train loss:3.909605 +step:1797 train loss:3.973230 +step:1798 train loss:3.912389 +step:1799 train loss:3.968713 +step:1800 train loss:3.995157 +step:1801 train loss:3.983816 +step:1802 train loss:3.992065 +step:1803 train loss:3.983751 +step:1804 train loss:3.979851 +step:1805 train loss:3.968670 +step:1806 train loss:3.979144 +step:1807 train loss:3.910876 +step:1808 train loss:3.974104 +step:1809 train loss:3.957972 +step:1810 train loss:3.952100 +step:1811 train loss:3.966614 +step:1812 train loss:3.950140 +step:1813 train loss:3.963453 +step:1814 train loss:4.026369 +step:1815 train loss:3.967266 +step:1816 train loss:3.921293 +step:1817 train loss:3.910938 +step:1818 train loss:3.969801 +step:1819 train loss:3.936503 +step:1820 train loss:3.976483 +step:1821 train loss:3.939857 +step:1822 train loss:3.918288 +step:1823 train loss:3.912103 +step:1824 train loss:3.990318 +step:1825 train loss:3.900592 +step:1826 train loss:3.943583 +step:1827 train loss:3.911057 +step:1828 train loss:3.957430 +step:1829 train loss:3.923034 +step:1830 train loss:4.116905 +step:1831 train loss:3.878044 +step:1832 train loss:3.922495 +step:1833 train loss:3.972206 +step:1834 train loss:3.920624 +step:1835 train loss:3.930417 +step:1836 train loss:3.968423 +step:1837 train loss:3.892153 +step:1838 train loss:3.990288 +step:1839 train loss:3.969884 +step:1840 train loss:3.940871 +step:1841 train loss:3.963155 +step:1842 train loss:3.940187 +step:1843 train loss:3.886729 +step:1844 train loss:3.953558 +step:1845 train loss:3.920069 +step:1846 train loss:3.976388 +step:1847 train loss:4.027847 +step:1848 train loss:3.822206 +step:1849 train loss:3.920485 +step:1850 train loss:3.894693 +step:1851 train loss:3.938027 +step:1852 train loss:3.920106 +step:1853 train loss:3.978876 +step:1854 train loss:3.940562 +step:1855 train loss:3.927554 +step:1856 train loss:3.930952 +step:1857 train loss:3.933961 +step:1858 train loss:3.981623 +step:1859 train loss:3.928227 +step:1860 train loss:3.901345 +step:1861 train loss:3.919059 +step:1862 train loss:3.960051 +step:1863 train loss:3.998524 +step:1864 train loss:3.897480 +step:1865 train loss:3.920755 +step:1866 train loss:3.925787 +step:1867 train loss:3.953998 +step:1868 train loss:4.004114 +step:1869 train loss:3.921593 +step:1870 train loss:3.950016 +step:1871 train loss:3.891008 +step:1872 train loss:3.955592 +step:1873 train loss:4.018136 +step:1874 train loss:3.880879 +step:1875 train loss:3.958452 +step:1876 train loss:3.918422 +step:1877 train loss:3.960941 +step:1878 train loss:3.885133 +step:1879 train loss:3.945214 +step:1880 train loss:4.024786 +step:1881 train loss:3.950306 +step:1882 train loss:3.968240 +step:1883 train loss:3.990122 +step:1884 train loss:3.998497 +step:1885 train loss:3.959545 +step:1886 train loss:3.888139 +step:1887 train loss:3.902269 +step:1888 train loss:3.905292 +step:1889 train loss:3.917537 +step:1890 train loss:3.925755 +step:1891 train loss:3.858132 +step:1892 train loss:3.953816 +step:1893 train loss:3.876919 +step:1894 train loss:3.895901 +step:1895 train loss:3.931666 +step:1896 train loss:3.979189 +step:1897 train loss:3.877964 +step:1898 train loss:3.925200 +step:1899 train loss:3.939140 +step:1900 train loss:3.891591 +step:1901 train loss:3.967083 +step:1902 train loss:3.961725 +step:1903 train loss:3.901530 +step:1904 train loss:3.889348 +step:1905 train loss:3.891223 +step:1906 train loss:3.945232 +step:1907 train loss:3.892723 +step:1908 train loss:3.906740 +step:1909 train loss:4.002115 +step:1910 train loss:3.889147 +step:1911 train loss:3.898057 +step:1912 train loss:3.949056 +step:1913 train loss:3.885470 +step:1914 train loss:3.921794 +step:1915 train loss:3.888916 +step:1916 train loss:3.936561 +step:1917 train loss:3.921371 +step:1918 train loss:3.832896 +step:1919 train loss:3.982532 +step:1920 train loss:4.089645 +step:1921 train loss:3.868973 +step:1922 train loss:3.848201 +step:1923 train loss:3.946723 +step:1924 train loss:3.985010 +step:1925 train loss:3.928150 +step:1926 train loss:3.867013 +step:1927 train loss:3.948821 +step:1928 train loss:3.864595 +step:1929 train loss:3.892632 +step:1930 train loss:3.962639 +step:1931 train loss:3.873594 +step:1932 train loss:3.925361 +step:1933 train loss:3.923568 +step:1934 train loss:3.994460 +step:1935 train loss:3.945646 +step:1936 train loss:3.914810 +step:1937 train loss:3.853031 +step:1938 train loss:4.218219 +step:1939 train loss:3.961632 +step:1940 train loss:3.943500 +step:1941 train loss:3.947335 +step:1942 train loss:3.941451 +step:1943 train loss:3.933392 +step:1944 train loss:3.895568 +step:1945 train loss:3.895569 +step:1946 train loss:3.919495 +step:1947 train loss:3.944568 +step:1948 train loss:3.851284 +step:1949 train loss:3.960758 +step:1950 train loss:3.901000 +step:1951 train loss:3.923817 +step:1952 train loss:3.949911 +step:1953 train loss:3.880352 +step:1954 train loss:3.915143 +step:1955 train loss:3.868842 +step:1956 train loss:3.949502 +step:1957 train loss:3.974434 +step:1958 train loss:3.988527 +step:1959 train loss:3.860224 +step:1960 train loss:3.899610 +step:1961 train loss:3.931093 +step:1962 train loss:3.924977 +step:1963 train loss:3.901916 +step:1964 train loss:3.940752 +step:1965 train loss:3.974948 +step:1966 train loss:3.882919 +step:1967 train loss:3.944662 +step:1968 train loss:3.880563 +step:1969 train loss:3.898084 +step:1970 train loss:3.962934 +step:1971 train loss:3.863463 +step:1972 train loss:3.971707 +step:1973 train loss:3.867991 +step:1974 train loss:3.912660 +step:1975 train loss:3.876291 +step:1976 train loss:3.896239 +step:1977 train loss:3.941556 +step:1978 train loss:3.885268 +step:1979 train loss:3.860715 +step:1980 train loss:3.900881 +step:1981 train loss:3.879599 +step:1982 train loss:3.962786 +step:1983 train loss:3.905110 +step:1984 train loss:3.946222 +step:1985 train loss:3.933151 +step:1986 train loss:3.920506 +step:1987 train loss:3.877904 +step:1988 train loss:3.904596 +step:1989 train loss:4.042711 +step:1990 train loss:3.881544 +step:1991 train loss:3.871468 +step:1992 train loss:3.885080 +step:1993 train loss:3.918958 +step:1994 train loss:3.911766 +step:1995 train loss:3.862463 +step:1996 train loss:3.916090 +step:1997 train loss:3.920689 +step:1998 train loss:3.872560 +step:1999 train loss:3.983894 +step:2000 validation loss:3.851679 total_sharp:9.8181e-03 L1_sharp:1.1112e-01 L2_sharp:5.1013e-02 L3_sharp:6.7930e-02 L4_sharp:4.6073e-02 L5_sharp:4.7528e-02 L6_sharp:5.2731e-02 L7_sharp:5.0271e-02 L8_sharp:4.4830e-02 L9_sharp:3.6516e-02 L10_sharp:2.9764e-02 L11_sharp:3.0299e-02 L12_sharp:4.8386e-02 total_fnorm:1.3431e+00 total_l1_linf:8.0731e+03 total_spectral:1.3431e+00 L1_fnorm:6.1601e-02 L2_fnorm:5.8625e-02 L3_fnorm:5.6665e-02 L4_fnorm:5.8932e-02 L5_fnorm:5.9869e-02 L6_fnorm:6.0819e-02 L7_fnorm:6.0896e-02 L8_fnorm:6.1000e-02 L9_fnorm:6.0975e-02 L10_fnorm:6.0995e-02 L11_fnorm:6.0831e-02 L12_fnorm:6.1085e-02 L1_l1linf:2.1345e-01 L2_l1linf:2.3003e-01 L3_l1linf:2.5636e-01 L4_l1linf:2.7139e-01 L5_l1linf:2.6493e-01 L6_l1linf:2.6698e-01 L7_l1linf:2.5347e-01 L8_l1linf:2.5062e-01 L9_l1linf:2.6875e-01 L10_l1linf:2.7246e-01 L11_l1linf:2.7013e-01 L12_l1linf:2.7531e-01 L1_spectral:4.8755e-03 L2_spectral:5.2774e-03 L3_spectral:5.8102e-03 L4_spectral:6.0987e-03 L5_spectral:5.9964e-03 L6_spectral:5.9844e-03 L7_spectral:5.6884e-03 L8_spectral:5.6686e-03 L9_spectral:6.0363e-03 L10_spectral:6.0706e-03 L11_spectral:6.0803e-03 L12_spectral:6.1711e-03 ip_v_neg_g:9.5904e-03 cos_v_neg_g:1.3958e-03 v_norm:1.3431e+00 g_norm:5.1156e+00 hv_norm:1.8438e+00 cos_v_hv:7.1518e-03 hg_norm:9.0692e+02 cos_g_hg:5.6852e-01 v_par:5.8446e-05 v_perp:1.3431e+00 L1_cos_v_neg_g:5.9471e-03 L1_v_norm:6.1601e-02 L2_cos_v_neg_g:9.4168e-03 L2_v_norm:5.8625e-02 L3_cos_v_neg_g:1.0554e-02 L3_v_norm:5.6665e-02 L4_cos_v_neg_g:9.8832e-03 L4_v_norm:5.8932e-02 L5_cos_v_neg_g:1.1161e-02 L5_v_norm:5.9869e-02 L6_cos_v_neg_g:1.1226e-02 L6_v_norm:6.0819e-02 L7_cos_v_neg_g:1.0242e-02 L7_v_norm:6.0896e-02 L8_cos_v_neg_g:1.0269e-02 L8_v_norm:6.1000e-02 L9_cos_v_neg_g:8.9265e-03 L9_v_norm:6.0975e-02 L10_cos_v_neg_g:7.5057e-03 L10_v_norm:6.0995e-02 L11_cos_v_neg_g:7.3564e-03 L11_v_norm:6.0831e-02 L12_cos_v_neg_g:6.2142e-03 L12_v_norm:6.1085e-02 +step:2000 train loss:3.951728 +step:2001 train loss:3.872866 +step:2002 train loss:3.971295 +step:2003 train loss:4.016721 +step:2004 train loss:3.888269 +step:2005 train loss:3.986727 +step:2006 train loss:3.873651 +step:2007 train loss:3.950030 +step:2008 train loss:3.891984 +step:2009 train loss:3.892005 +step:2010 train loss:4.019495 +step:2011 train loss:3.871474 +step:2012 train loss:3.896883 +step:2013 train loss:3.912545 +step:2014 train loss:3.802996 +step:2015 train loss:3.928929 +step:2016 train loss:3.905738 +step:2017 train loss:3.910740 +step:2018 train loss:3.877559 +step:2019 train loss:3.905898 +step:2020 train loss:3.915654 +step:2021 train loss:3.878303 +step:2022 train loss:3.920198 +step:2023 train loss:3.897740 +step:2024 train loss:3.949905 +step:2025 train loss:3.890423 +step:2026 train loss:3.871262 +step:2027 train loss:3.899207 +step:2028 train loss:3.831019 +step:2029 train loss:3.860852 +step:2030 train loss:3.865970 +step:2031 train loss:3.829709 +step:2032 train loss:3.880363 +step:2033 train loss:3.877939 +step:2034 train loss:3.873517 +step:2035 train loss:3.916183 +step:2036 train loss:3.905561 +step:2037 train loss:3.890568 +step:2038 train loss:3.890116 +step:2039 train loss:3.879823 +step:2040 train loss:3.909803 +step:2041 train loss:3.912457 +step:2042 train loss:3.843457 +step:2043 train loss:4.000756 +step:2044 train loss:3.865805 +step:2045 train loss:3.886900 +step:2046 train loss:3.893120 +step:2047 train loss:3.870118 +step:2048 train loss:3.911556 +step:2049 train loss:3.867546 +step:2050 train loss:3.892944 +step:2051 train loss:3.858408 +step:2052 train loss:3.904236 +step:2053 train loss:3.908255 +step:2054 train loss:3.874500 +step:2055 train loss:3.873664 +step:2056 train loss:3.919500 +step:2057 train loss:3.927240 +step:2058 train loss:3.891410 +step:2059 train loss:3.970627 +step:2060 train loss:3.918431 +step:2061 train loss:3.870290 +step:2062 train loss:3.897078 +step:2063 train loss:3.800119 +step:2064 train loss:3.919991 +step:2065 train loss:3.929919 +step:2066 train loss:3.787667 +step:2067 train loss:3.834148 +step:2068 train loss:3.941259 +step:2069 train loss:3.876183 +step:2070 train loss:3.879675 +step:2071 train loss:3.922182 +step:2072 train loss:3.849059 +step:2073 train loss:3.903193 +step:2074 train loss:3.882742 +step:2075 train loss:3.964840 +step:2076 train loss:3.909679 +step:2077 train loss:3.921914 +step:2078 train loss:3.878530 +step:2079 train loss:4.030090 +step:2080 train loss:3.849349 +step:2081 train loss:3.959312 +step:2082 train loss:3.887979 +step:2083 train loss:3.876624 +step:2084 train loss:3.855329 +step:2085 train loss:3.901362 +step:2086 train loss:3.914770 +step:2087 train loss:3.950519 +step:2088 train loss:3.818696 +step:2089 train loss:3.848839 +step:2090 train loss:3.885395 +step:2091 train loss:3.899857 +step:2092 train loss:3.879951 +step:2093 train loss:3.868820 +step:2094 train loss:3.906092 +step:2095 train loss:3.849664 +step:2096 train loss:3.839730 +step:2097 train loss:3.872017 +step:2098 train loss:3.872764 +step:2099 train loss:3.852279 +step:2100 train loss:3.919820 +step:2101 train loss:3.909983 +step:2102 train loss:3.878167 +step:2103 train loss:3.894617 +step:2104 train loss:3.872922 +step:2105 train loss:3.878465 +step:2106 train loss:3.875499 +step:2107 train loss:3.941441 +step:2108 train loss:3.862206 +step:2109 train loss:3.820749 +step:2110 train loss:3.919352 +step:2111 train loss:3.864834 +step:2112 train loss:3.923912 +step:2113 train loss:3.862584 +step:2114 train loss:3.869584 +step:2115 train loss:3.919940 +step:2116 train loss:3.850693 +step:2117 train loss:3.867065 +step:2118 train loss:3.861612 +step:2119 train loss:3.796084 +step:2120 train loss:3.882638 +step:2121 train loss:3.871733 +step:2122 train loss:3.882123 +step:2123 train loss:3.936548 +step:2124 train loss:3.939094 +step:2125 train loss:3.846734 +step:2126 train loss:3.853189 +step:2127 train loss:3.844051 +step:2128 train loss:3.838388 +step:2129 train loss:3.864138 +step:2130 train loss:3.868522 +step:2131 train loss:3.890860 +step:2132 train loss:3.821163 +step:2133 train loss:3.930427 +step:2134 train loss:3.879827 +step:2135 train loss:3.841681 +step:2136 train loss:3.930455 +step:2137 train loss:3.897486 +step:2138 train loss:3.852945 +step:2139 train loss:3.855948 +step:2140 train loss:3.861114 +step:2141 train loss:3.907762 +step:2142 train loss:3.878636 +step:2143 train loss:3.801917 +step:2144 train loss:3.908062 +step:2145 train loss:3.878786 +step:2146 train loss:3.914404 +step:2147 train loss:4.017199 +step:2148 train loss:3.820407 +step:2149 train loss:3.830958 +step:2150 train loss:3.858035 +step:2151 train loss:3.892015 +step:2152 train loss:3.885061 +step:2153 train loss:3.925859 +step:2154 train loss:3.842155 +step:2155 train loss:3.922901 +step:2156 train loss:3.845244 +step:2157 train loss:3.922163 +step:2158 train loss:3.958815 +step:2159 train loss:3.884513 +step:2160 train loss:3.961295 +step:2161 train loss:3.858435 +step:2162 train loss:3.863592 +step:2163 train loss:3.839146 +step:2164 train loss:3.864222 +step:2165 train loss:3.839652 +step:2166 train loss:3.957226 +step:2167 train loss:3.865268 +step:2168 train loss:3.879292 +step:2169 train loss:3.829861 +step:2170 train loss:3.974982 +step:2171 train loss:3.935587 +step:2172 train loss:3.870601 +step:2173 train loss:3.861345 +step:2174 train loss:3.923763 +step:2175 train loss:3.856432 +step:2176 train loss:3.933932 +step:2177 train loss:3.906093 +step:2178 train loss:3.831239 +step:2179 train loss:3.897798 +step:2180 train loss:3.914031 +step:2181 train loss:3.846013 +step:2182 train loss:3.894562 +step:2183 train loss:3.888569 +step:2184 train loss:3.841115 +step:2185 train loss:3.819932 +step:2186 train loss:3.860423 +step:2187 train loss:3.870523 +step:2188 train loss:3.920277 +step:2189 train loss:3.809862 +step:2190 train loss:3.855370 +step:2191 train loss:3.912206 +step:2192 train loss:3.840683 +step:2193 train loss:3.811452 +step:2194 train loss:3.819985 +step:2195 train loss:3.842822 +step:2196 train loss:3.850505 +step:2197 train loss:3.830555 +step:2198 train loss:3.853936 +step:2199 train loss:3.924349 +step:2200 train loss:3.856269 +step:2201 train loss:3.862783 +step:2202 train loss:3.826286 +step:2203 train loss:3.848356 +step:2204 train loss:3.877570 +step:2205 train loss:3.859154 +step:2206 train loss:3.859270 +step:2207 train loss:3.852334 +step:2208 train loss:3.830852 +step:2209 train loss:4.110688 +step:2210 train loss:3.883364 +step:2211 train loss:3.874655 +step:2212 train loss:3.847823 +step:2213 train loss:3.929335 +step:2214 train loss:3.921541 +step:2215 train loss:3.846075 +step:2216 train loss:3.815197 +step:2217 train loss:3.842398 +step:2218 train loss:3.844044 +step:2219 train loss:3.878936 +step:2220 train loss:3.819418 +step:2221 train loss:3.853465 +step:2222 train loss:3.868609 +step:2223 train loss:3.908530 +step:2224 train loss:3.883976 +step:2225 train loss:3.823141 +step:2226 train loss:3.889972 +step:2227 train loss:3.890521 +step:2228 train loss:3.886709 +step:2229 train loss:3.827243 +step:2230 train loss:3.954030 +step:2231 train loss:3.871023 +step:2232 train loss:3.863784 +step:2233 train loss:3.908736 +step:2234 train loss:3.805775 +step:2235 train loss:3.892958 +step:2236 train loss:3.831462 +step:2237 train loss:3.964473 +step:2238 train loss:3.768342 +step:2239 train loss:3.848685 +step:2240 train loss:3.861210 +step:2241 train loss:3.778521 +step:2242 train loss:3.917934 +step:2243 train loss:3.955858 +step:2244 train loss:3.833489 +step:2245 train loss:3.834090 +step:2246 train loss:3.802976 +step:2247 train loss:3.806907 +step:2248 train loss:3.860780 +step:2249 train loss:3.845095 +step:2250 validation loss:3.798993 +step:2250 train loss:3.855095 +step:2251 train loss:3.820736 +step:2252 train loss:3.822917 +step:2253 train loss:3.846717 +step:2254 train loss:3.853558 +step:2255 train loss:3.813115 +step:2256 train loss:3.864117 +step:2257 train loss:3.852481 +step:2258 train loss:3.842731 +step:2259 train loss:3.860209 +step:2260 train loss:3.812680 +step:2261 train loss:3.889127 +step:2262 train loss:3.908766 +step:2263 train loss:3.865776 +step:2264 train loss:3.978823 +step:2265 train loss:3.828573 +step:2266 train loss:3.872983 +step:2267 train loss:3.832436 +step:2268 train loss:3.835110 +step:2269 train loss:3.836443 +step:2270 train loss:3.828021 +step:2271 train loss:3.845091 +step:2272 train loss:3.882519 +step:2273 train loss:3.801274 +step:2274 train loss:3.832266 +step:2275 train loss:3.790091 +step:2276 train loss:3.862606 +step:2277 train loss:3.875961 +step:2278 train loss:3.856509 +step:2279 train loss:3.838926 +step:2280 train loss:3.747499 +step:2281 train loss:3.893720 +step:2282 train loss:3.823330 +step:2283 train loss:3.806472 +step:2284 train loss:3.825377 +step:2285 train loss:3.877030 +step:2286 train loss:3.838901 +step:2287 train loss:3.876015 +step:2288 train loss:3.847809 +step:2289 train loss:3.843634 +step:2290 train loss:3.851539 +step:2291 train loss:3.838918 +step:2292 train loss:3.877020 +step:2293 train loss:3.855738 +step:2294 train loss:3.854512 +step:2295 train loss:3.907380 +step:2296 train loss:3.839890 +step:2297 train loss:3.815557 +step:2298 train loss:3.874207 +step:2299 train loss:3.849135 +step:2300 train loss:3.766687 +step:2301 train loss:3.860303 +step:2302 train loss:3.874776 +step:2303 train loss:3.843617 +step:2304 train loss:3.835624 +step:2305 train loss:3.880541 +step:2306 train loss:3.871802 +step:2307 train loss:3.845330 +step:2308 train loss:3.867980 +step:2309 train loss:3.825491 +step:2310 train loss:3.813451 +step:2311 train loss:3.799622 +step:2312 train loss:3.867795 +step:2313 train loss:3.783315 +step:2314 train loss:3.858704 +step:2315 train loss:3.872759 +step:2316 train loss:3.909609 +step:2317 train loss:3.778068 +step:2318 train loss:3.821363 +step:2319 train loss:3.874511 +step:2320 train loss:3.841690 +step:2321 train loss:3.814151 +step:2322 train loss:3.828705 +step:2323 train loss:3.823133 +step:2324 train loss:3.849892 +step:2325 train loss:3.792289 +step:2326 train loss:3.820232 +step:2327 train loss:3.932950 +step:2328 train loss:3.882244 +step:2329 train loss:3.836325 +step:2330 train loss:3.794178 +step:2331 train loss:3.837207 +step:2332 train loss:3.764355 +step:2333 train loss:3.825670 +step:2334 train loss:3.804044 +step:2335 train loss:3.790397 +step:2336 train loss:4.041621 +step:2337 train loss:3.819121 +step:2338 train loss:3.858716 +step:2339 train loss:3.857970 +step:2340 train loss:3.871851 +step:2341 train loss:3.861501 +step:2342 train loss:3.812167 +step:2343 train loss:3.836766 +step:2344 train loss:3.878187 +step:2345 train loss:3.833057 +step:2346 train loss:3.861948 +step:2347 train loss:3.784420 +step:2348 train loss:3.843663 +step:2349 train loss:3.792612 +step:2350 train loss:3.849140 +step:2351 train loss:3.857555 +step:2352 train loss:3.860220 +step:2353 train loss:3.816985 +step:2354 train loss:3.864558 +step:2355 train loss:3.852662 +step:2356 train loss:3.890884 +step:2357 train loss:3.799040 +step:2358 train loss:3.814171 +step:2359 train loss:3.838353 +step:2360 train loss:3.861059 +step:2361 train loss:3.895330 +step:2362 train loss:3.724986 +step:2363 train loss:3.915697 +step:2364 train loss:3.866736 +step:2365 train loss:3.837061 +step:2366 train loss:3.789187 +step:2367 train loss:3.854336 +step:2368 train loss:3.845134 +step:2369 train loss:3.831925 +step:2370 train loss:3.847496 +step:2371 train loss:3.904351 +step:2372 train loss:3.756443 +step:2373 train loss:3.897541 +step:2374 train loss:3.878434 +step:2375 train loss:3.863257 +step:2376 train loss:3.855880 +step:2377 train loss:3.799164 +step:2378 train loss:3.846071 +step:2379 train loss:3.830719 +step:2380 train loss:3.892307 +step:2381 train loss:3.986079 +step:2382 train loss:3.772990 +step:2383 train loss:3.824774 +step:2384 train loss:3.850559 +step:2385 train loss:3.751880 +step:2386 train loss:3.910052 +step:2387 train loss:3.791904 +step:2388 train loss:3.841176 +step:2389 train loss:3.862384 +step:2390 train loss:3.810746 +step:2391 train loss:3.834185 +step:2392 train loss:3.860351 +step:2393 train loss:3.816655 +step:2394 train loss:3.837765 +step:2395 train loss:3.831630 +step:2396 train loss:3.838436 +step:2397 train loss:3.812990 +step:2398 train loss:3.868130 +step:2399 train loss:3.831492 +step:2400 train loss:3.811101 +step:2401 train loss:3.850634 +step:2402 train loss:3.801714 +step:2403 train loss:3.851811 +step:2404 train loss:3.810003 +step:2405 train loss:3.812026 +step:2406 train loss:3.837443 +step:2407 train loss:3.782178 +step:2408 train loss:3.827511 +step:2409 train loss:3.816995 +step:2410 train loss:3.815914 +step:2411 train loss:3.888888 +step:2412 train loss:3.875277 +step:2413 train loss:3.913776 +step:2414 train loss:3.803641 +step:2415 train loss:3.795137 +step:2416 train loss:3.812678 +step:2417 train loss:3.849253 +step:2418 train loss:3.867497 +step:2419 train loss:3.798414 +step:2420 train loss:3.818750 +step:2421 train loss:3.849145 +step:2422 train loss:3.899680 +step:2423 train loss:3.830967 +step:2424 train loss:3.798751 +step:2425 train loss:3.858533 +step:2426 train loss:3.798679 +step:2427 train loss:3.820199 +step:2428 train loss:3.904074 +step:2429 train loss:3.856724 +step:2430 train loss:3.947866 +step:2431 train loss:3.859695 +step:2432 train loss:3.830715 +step:2433 train loss:3.805997 +step:2434 train loss:3.792824 +step:2435 train loss:3.848300 +step:2436 train loss:3.808362 +step:2437 train loss:3.837846 +step:2438 train loss:3.879471 +step:2439 train loss:3.866440 +step:2440 train loss:3.807023 +step:2441 train loss:3.842710 +step:2442 train loss:3.836089 +step:2443 train loss:3.798731 +step:2444 train loss:3.836143 +step:2445 train loss:3.832015 +step:2446 train loss:3.802906 +step:2447 train loss:3.784583 +step:2448 train loss:3.834880 +step:2449 train loss:3.865189 +step:2450 train loss:3.824056 +step:2451 train loss:3.742960 +step:2452 train loss:3.843312 +step:2453 train loss:3.816844 +step:2454 train loss:3.812413 +step:2455 train loss:3.867351 +step:2456 train loss:3.817835 +step:2457 train loss:3.876616 +step:2458 train loss:3.855787 +step:2459 train loss:3.830116 +step:2460 train loss:3.836890 +step:2461 train loss:3.865165 +step:2462 train loss:3.837983 +step:2463 train loss:3.811909 +step:2464 train loss:3.828982 +step:2465 train loss:3.905093 +step:2466 train loss:3.986988 +step:2467 train loss:3.893687 +step:2468 train loss:3.788404 +step:2469 train loss:3.858753 +step:2470 train loss:3.908144 +step:2471 train loss:3.907711 +step:2472 train loss:3.887969 +step:2473 train loss:3.823480 +step:2474 train loss:3.781766 +step:2475 train loss:3.839148 +step:2476 train loss:3.913315 +step:2477 train loss:3.830063 +step:2478 train loss:3.784281 +step:2479 train loss:3.827387 +step:2480 train loss:3.820549 +step:2481 train loss:4.015535 +step:2482 train loss:3.822737 +step:2483 train loss:3.852038 +step:2484 train loss:3.801276 +step:2485 train loss:3.790508 +step:2486 train loss:3.825645 +step:2487 train loss:3.861725 +step:2488 train loss:3.771706 +step:2489 train loss:3.881684 +step:2490 train loss:3.803523 +step:2491 train loss:3.814942 +step:2492 train loss:3.856332 +step:2493 train loss:3.894480 +step:2494 train loss:3.814258 +step:2495 train loss:3.847541 +step:2496 train loss:3.824526 +step:2497 train loss:3.839165 +step:2498 train loss:3.845217 +step:2499 train loss:3.840206 +step:2500 validation loss:3.762103 total_sharp:7.9889e-03 L1_sharp:7.4111e-02 L2_sharp:4.6605e-02 L3_sharp:5.6109e-02 L4_sharp:3.4497e-02 L5_sharp:3.6619e-02 L6_sharp:4.2255e-02 L7_sharp:4.6309e-02 L8_sharp:4.6270e-02 L9_sharp:3.3170e-02 L10_sharp:2.3768e-02 L11_sharp:1.9889e-02 L12_sharp:3.6364e-02 total_fnorm:1.3562e+00 total_l1_linf:8.1338e+03 total_spectral:1.3562e+00 L1_fnorm:6.1517e-02 L2_fnorm:5.8399e-02 L3_fnorm:5.7272e-02 L4_fnorm:5.9320e-02 L5_fnorm:6.0041e-02 L6_fnorm:6.0935e-02 L7_fnorm:6.1049e-02 L8_fnorm:6.1194e-02 L9_fnorm:6.1058e-02 L10_fnorm:6.1053e-02 L11_fnorm:6.0907e-02 L12_fnorm:6.1000e-02 L1_l1linf:2.5227e-01 L2_l1linf:2.9432e-01 L3_l1linf:3.0876e-01 L4_l1linf:2.9514e-01 L5_l1linf:2.8735e-01 L6_l1linf:2.7542e-01 L7_l1linf:2.6735e-01 L8_l1linf:2.8058e-01 L9_l1linf:2.8057e-01 L10_l1linf:2.8904e-01 L11_l1linf:2.8664e-01 L12_l1linf:2.8498e-01 L1_spectral:5.6956e-03 L2_spectral:6.6023e-03 L3_spectral:6.9628e-03 L4_spectral:6.7241e-03 L5_spectral:6.5038e-03 L6_spectral:6.1566e-03 L7_spectral:6.0353e-03 L8_spectral:6.3148e-03 L9_spectral:6.3467e-03 L10_spectral:6.5682e-03 L11_spectral:6.4729e-03 L12_spectral:6.4163e-03 ip_v_neg_g:4.9728e-03 cos_v_neg_g:8.6998e-04 v_norm:1.3562e+00 g_norm:4.2148e+00 hv_norm:1.4075e+00 cos_v_hv:7.6976e-03 hg_norm:3.7882e+02 cos_g_hg:5.0883e-01 v_par:3.7244e-05 v_perp:1.3562e+00 L1_cos_v_neg_g:5.3479e-03 L1_v_norm:6.1517e-02 L2_cos_v_neg_g:5.8696e-03 L2_v_norm:5.8399e-02 L3_cos_v_neg_g:7.7663e-03 L3_v_norm:5.7272e-02 L4_cos_v_neg_g:6.1645e-03 L4_v_norm:5.9320e-02 L5_cos_v_neg_g:6.1471e-03 L5_v_norm:6.0041e-02 L6_cos_v_neg_g:6.2382e-03 L6_v_norm:6.0935e-02 L7_cos_v_neg_g:6.1149e-03 L7_v_norm:6.1049e-02 L8_cos_v_neg_g:5.6248e-03 L8_v_norm:6.1194e-02 L9_cos_v_neg_g:4.6437e-03 L9_v_norm:6.1058e-02 L10_cos_v_neg_g:3.6543e-03 L10_v_norm:6.1053e-02 L11_cos_v_neg_g:3.8685e-03 L11_v_norm:6.0907e-02 L12_cos_v_neg_g:4.2495e-03 L12_v_norm:6.1000e-02 +step:2500 train loss:3.783950 +step:2501 train loss:3.848147 +step:2502 train loss:3.836614 +step:2503 train loss:3.763288 +step:2504 train loss:3.800148 +step:2505 train loss:3.823203 +step:2506 train loss:3.788857 +step:2507 train loss:3.815248 +step:2508 train loss:3.760617 +step:2509 train loss:3.783068 +step:2510 train loss:3.780275 +step:2511 train loss:3.825856 +step:2512 train loss:3.871444 +step:2513 train loss:3.818160 +step:2514 train loss:3.803570 +step:2515 train loss:3.949765 +step:2516 train loss:3.827315 +step:2517 train loss:3.891827 +step:2518 train loss:3.852004 +step:2519 train loss:3.825977 +step:2520 train loss:3.832957 +step:2521 train loss:3.802967 +step:2522 train loss:3.845605 +step:2523 train loss:3.761849 +step:2524 train loss:3.821024 +step:2525 train loss:3.810809 +step:2526 train loss:3.862960 +step:2527 train loss:3.852898 +step:2528 train loss:3.836413 +step:2529 train loss:3.857674 +step:2530 train loss:3.835182 +step:2531 train loss:3.773155 +step:2532 train loss:3.876002 +step:2533 train loss:3.768602 +step:2534 train loss:3.865711 +step:2535 train loss:3.817973 +step:2536 train loss:3.742745 +step:2537 train loss:3.856233 +step:2538 train loss:3.833916 +step:2539 train loss:3.851618 +step:2540 train loss:3.786731 +step:2541 train loss:3.814126 +step:2542 train loss:3.826507 +step:2543 train loss:3.815714 +step:2544 train loss:3.802641 +step:2545 train loss:3.788780 +step:2546 train loss:3.755874 +step:2547 train loss:3.801813 +step:2548 train loss:3.822734 +step:2549 train loss:3.826591 +step:2550 train loss:3.955170 +step:2551 train loss:4.031436 +step:2552 train loss:3.765559 +step:2553 train loss:3.799434 +step:2554 train loss:3.944748 +step:2555 train loss:3.830000 +step:2556 train loss:3.756608 +step:2557 train loss:3.851922 +step:2558 train loss:3.843864 +step:2559 train loss:3.798745 +step:2560 train loss:3.783895 +step:2561 train loss:3.881293 +step:2562 train loss:3.833167 +step:2563 train loss:3.766806 +step:2564 train loss:3.835121 +step:2565 train loss:3.817668 +step:2566 train loss:3.796542 +step:2567 train loss:3.777588 +step:2568 train loss:3.830566 +step:2569 train loss:3.839938 +step:2570 train loss:3.791935 +step:2571 train loss:3.874337 +step:2572 train loss:3.836282 +step:2573 train loss:3.766384 +step:2574 train loss:3.813549 +step:2575 train loss:3.859070 +step:2576 train loss:3.811387 +step:2577 train loss:3.773815 +step:2578 train loss:3.813953 +step:2579 train loss:3.792079 +step:2580 train loss:3.764923 +step:2581 train loss:3.777333 +step:2582 train loss:3.787189 +step:2583 train loss:3.810481 +step:2584 train loss:3.827788 +step:2585 train loss:3.789080 +step:2586 train loss:3.814089 +step:2587 train loss:3.744303 +step:2588 train loss:3.777328 +step:2589 train loss:3.855206 +step:2590 train loss:3.778748 +step:2591 train loss:3.834661 +step:2592 train loss:3.886812 +step:2593 train loss:3.841190 +step:2594 train loss:3.802472 +step:2595 train loss:3.810900 +step:2596 train loss:3.852777 +step:2597 train loss:3.733477 +step:2598 train loss:3.889489 +step:2599 train loss:3.836214 +step:2600 train loss:3.868335 +step:2601 train loss:3.802171 +step:2602 train loss:3.835254 +step:2603 train loss:3.831208 +step:2604 train loss:3.751925 +step:2605 train loss:3.880361 +step:2606 train loss:3.828185 +step:2607 train loss:3.787096 +step:2608 train loss:3.761667 +step:2609 train loss:3.786910 +step:2610 train loss:3.812309 +step:2611 train loss:3.848796 +step:2612 train loss:3.810558 +step:2613 train loss:3.784099 +step:2614 train loss:3.774811 +step:2615 train loss:3.771521 +step:2616 train loss:3.846354 +step:2617 train loss:3.805799 +step:2618 train loss:3.770465 +step:2619 train loss:3.790411 +step:2620 train loss:3.781878 +step:2621 train loss:3.793387 +step:2622 train loss:3.870063 +step:2623 train loss:3.742701 +step:2624 train loss:3.756579 +step:2625 train loss:3.829556 +step:2626 train loss:3.823518 +step:2627 train loss:3.799138 +step:2628 train loss:3.856472 +step:2629 train loss:3.801903 +step:2630 train loss:3.795812 +step:2631 train loss:3.826135 +step:2632 train loss:3.794384 +step:2633 train loss:3.778595 +step:2634 train loss:3.824850 +step:2635 train loss:3.808550 +step:2636 train loss:3.859442 +step:2637 train loss:3.807840 +step:2638 train loss:3.793288 +step:2639 train loss:3.846457 +step:2640 train loss:3.764521 +step:2641 train loss:3.821625 +step:2642 train loss:3.743378 +step:2643 train loss:3.741179 +step:2644 train loss:3.836766 +step:2645 train loss:3.769897 +step:2646 train loss:3.802910 +step:2647 train loss:3.822448 +step:2648 train loss:3.855254 +step:2649 train loss:3.768818 +step:2650 train loss:3.758441 +step:2651 train loss:3.799266 +step:2652 train loss:3.772504 +step:2653 train loss:3.839979 +step:2654 train loss:3.796712 +step:2655 train loss:3.786845 +step:2656 train loss:3.804351 +step:2657 train loss:3.830933 +step:2658 train loss:3.838943 +step:2659 train loss:3.818625 +step:2660 train loss:3.806844 +step:2661 train loss:3.852359 +step:2662 train loss:3.828721 +step:2663 train loss:3.803095 +step:2664 train loss:3.819236 +step:2665 train loss:3.765920 +step:2666 train loss:3.795344 +step:2667 train loss:3.802010 +step:2668 train loss:3.780845 +step:2669 train loss:3.785634 +step:2670 train loss:3.810596 +step:2671 train loss:3.786894 +step:2672 train loss:3.809507 +step:2673 train loss:3.741569 +step:2674 train loss:3.835380 +step:2675 train loss:3.806969 +step:2676 train loss:3.829470 +step:2677 train loss:3.809211 +step:2678 train loss:3.794547 +step:2679 train loss:3.778282 +step:2680 train loss:3.763846 +step:2681 train loss:3.735949 +step:2682 train loss:3.820776 +step:2683 train loss:3.792285 +step:2684 train loss:3.821634 +step:2685 train loss:3.741209 +step:2686 train loss:3.754000 +step:2687 train loss:3.831637 +step:2688 train loss:3.845318 +step:2689 train loss:3.750500 +step:2690 train loss:3.836110 +step:2691 train loss:3.804035 +step:2692 train loss:3.831212 +step:2693 train loss:3.884850 +step:2694 train loss:3.782730 +step:2695 train loss:3.802282 +step:2696 train loss:3.804498 +step:2697 train loss:3.798151 +step:2698 train loss:3.806780 +step:2699 train loss:3.824233 +step:2700 train loss:3.798257 +step:2701 train loss:3.862810 +step:2702 train loss:3.800159 +step:2703 train loss:3.759666 +step:2704 train loss:3.833084 +step:2705 train loss:3.822369 +step:2706 train loss:3.754253 +step:2707 train loss:3.719635 +step:2708 train loss:3.815894 +step:2709 train loss:3.796416 +step:2710 train loss:3.803432 +step:2711 train loss:3.767795 +step:2712 train loss:3.833830 +step:2713 train loss:3.834829 +step:2714 train loss:3.774225 +step:2715 train loss:3.771125 +step:2716 train loss:3.840850 +step:2717 train loss:3.804618 +step:2718 train loss:3.802419 +step:2719 train loss:3.803288 +step:2720 train loss:3.767189 +step:2721 train loss:3.847607 +step:2722 train loss:3.773896 +step:2723 train loss:3.761557 +step:2724 train loss:3.785220 +step:2725 train loss:3.788256 +step:2726 train loss:3.759361 +step:2727 train loss:3.819528 +step:2728 train loss:3.756727 +step:2729 train loss:3.886524 +step:2730 train loss:3.829341 +step:2731 train loss:3.868731 +step:2732 train loss:3.779603 +step:2733 train loss:3.778011 +step:2734 train loss:3.822331 +step:2735 train loss:3.822533 +step:2736 train loss:3.746060 +step:2737 train loss:3.801069 +step:2738 train loss:3.858022 +step:2739 train loss:3.779347 +step:2740 train loss:3.779443 +step:2741 train loss:3.767669 +step:2742 train loss:3.688673 +step:2743 train loss:3.797697 +step:2744 train loss:3.817806 +step:2745 train loss:3.772141 +step:2746 train loss:3.793088 +step:2747 train loss:3.772700 +step:2748 train loss:3.734633 +step:2749 train loss:3.797883 +step:2750 validation loss:3.727227 +step:2750 train loss:3.808883 +step:2751 train loss:3.832978 +step:2752 train loss:3.814648 +step:2753 train loss:3.808821 +step:2754 train loss:3.745331 +step:2755 train loss:3.813333 +step:2756 train loss:3.786031 +step:2757 train loss:3.773711 +step:2758 train loss:3.801898 +step:2759 train loss:3.810851 +step:2760 train loss:3.723737 +step:2761 train loss:3.740314 +step:2762 train loss:3.757469 +step:2763 train loss:3.776441 +step:2764 train loss:3.721636 +step:2765 train loss:3.771096 +step:2766 train loss:3.859420 +step:2767 train loss:3.733962 +step:2768 train loss:3.794975 +step:2769 train loss:3.769555 +step:2770 train loss:3.786874 +step:2771 train loss:3.812415 +step:2772 train loss:3.776516 +step:2773 train loss:3.776896 +step:2774 train loss:3.772202 +step:2775 train loss:3.786368 +step:2776 train loss:3.739832 +step:2777 train loss:3.770947 +step:2778 train loss:3.781567 +step:2779 train loss:3.806353 +step:2780 train loss:3.779402 +step:2781 train loss:3.764549 +step:2782 train loss:3.753937 +step:2783 train loss:3.784572 +step:2784 train loss:3.793317 +step:2785 train loss:3.862693 +step:2786 train loss:3.829104 +step:2787 train loss:3.787042 +step:2788 train loss:3.783984 +step:2789 train loss:3.778720 +step:2790 train loss:3.715875 +step:2791 train loss:3.817909 +step:2792 train loss:3.805737 +step:2793 train loss:3.771166 +step:2794 train loss:3.782865 +step:2795 train loss:3.795861 +step:2796 train loss:3.789601 +step:2797 train loss:3.835857 +step:2798 train loss:3.823128 +step:2799 train loss:3.731922 +step:2800 train loss:3.776781 +step:2801 train loss:3.811727 +step:2802 train loss:3.839496 +step:2803 train loss:3.813520 +step:2804 train loss:3.744829 +step:2805 train loss:3.787483 +step:2806 train loss:3.780284 +step:2807 train loss:3.812218 +step:2808 train loss:3.749860 +step:2809 train loss:3.817248 +step:2810 train loss:3.808811 +step:2811 train loss:3.798048 +step:2812 train loss:3.845456 +step:2813 train loss:3.815759 +step:2814 train loss:3.803849 +step:2815 train loss:3.815063 +step:2816 train loss:3.819278 +step:2817 train loss:3.752276 +step:2818 train loss:3.856267 +step:2819 train loss:3.781531 +step:2820 train loss:3.777017 +step:2821 train loss:3.756607 +step:2822 train loss:3.800623 +step:2823 train loss:3.749013 +step:2824 train loss:3.641867 +step:2825 train loss:3.793752 +step:2826 train loss:3.789142 +step:2827 train loss:3.817568 +step:2828 train loss:3.805562 +step:2829 train loss:3.793907 +step:2830 train loss:3.825467 +step:2831 train loss:3.767092 +step:2832 train loss:3.735251 +step:2833 train loss:3.795150 +step:2834 train loss:3.745636 +step:2835 train loss:3.779773 +step:2836 train loss:3.785900 +step:2837 train loss:3.784396 +step:2838 train loss:3.725667 +step:2839 train loss:3.822823 +step:2840 train loss:3.784136 +step:2841 train loss:3.864402 +step:2842 train loss:3.808841 +step:2843 train loss:3.800743 +step:2844 train loss:3.826095 +step:2845 train loss:3.785067 +step:2846 train loss:3.733845 +step:2847 train loss:3.825690 +step:2848 train loss:3.777742 +step:2849 train loss:3.770521 +step:2850 train loss:3.829242 +step:2851 train loss:3.782866 +step:2852 train loss:3.866174 +step:2853 train loss:3.780218 +step:2854 train loss:3.721639 +step:2855 train loss:3.798823 +step:2856 train loss:3.723624 +step:2857 train loss:3.826948 +step:2858 train loss:3.783308 +step:2859 train loss:3.768298 +step:2860 train loss:3.762600 +step:2861 train loss:3.743755 +step:2862 train loss:3.774337 +step:2863 train loss:3.756865 +step:2864 train loss:3.762044 +step:2865 train loss:3.841328 +step:2866 train loss:3.852280 +step:2867 train loss:3.788857 +step:2868 train loss:3.791836 +step:2869 train loss:3.750796 +step:2870 train loss:3.836260 +step:2871 train loss:3.832910 +step:2872 train loss:3.796019 +step:2873 train loss:3.802796 +step:2874 train loss:3.780704 +step:2875 train loss:3.733616 +step:2876 train loss:3.779292 +step:2877 train loss:3.762156 +step:2878 train loss:3.776640 +step:2879 train loss:3.742726 +step:2880 train loss:3.762166 +step:2881 train loss:3.756846 +step:2882 train loss:3.688313 +step:2883 train loss:3.775566 +step:2884 train loss:3.844325 +step:2885 train loss:3.740170 +step:2886 train loss:3.786858 +step:2887 train loss:3.813440 +step:2888 train loss:3.784255 +step:2889 train loss:3.769000 +step:2890 train loss:3.742805 +step:2891 train loss:3.781243 +step:2892 train loss:3.788836 +step:2893 train loss:3.771358 +step:2894 train loss:3.742782 +step:2895 train loss:3.792665 +step:2896 train loss:3.838049 +step:2897 train loss:3.814626 +step:2898 train loss:3.950463 +step:2899 train loss:3.705470 +step:2900 train loss:3.780393 +step:2901 train loss:3.733022 +step:2902 train loss:3.732709 +step:2903 train loss:3.748113 +step:2904 train loss:3.774447 +step:2905 train loss:3.835558 +step:2906 train loss:3.806528 +step:2907 train loss:3.978410 +step:2908 train loss:3.726044 +step:2909 train loss:3.803956 +step:2910 train loss:3.776838 +step:2911 train loss:3.803491 +step:2912 train loss:3.761272 +step:2913 train loss:3.795314 +step:2914 train loss:3.822992 +step:2915 train loss:3.818820 +step:2916 train loss:3.777375 +step:2917 train loss:3.811680 +step:2918 train loss:3.801768 +step:2919 train loss:3.747429 +step:2920 train loss:3.800537 +step:2921 train loss:3.754757 +step:2922 train loss:3.778183 +step:2923 train loss:3.845214 +step:2924 train loss:3.781927 +step:2925 train loss:3.734045 +step:2926 train loss:3.825365 +step:2927 train loss:3.734427 +step:2928 train loss:3.703057 +step:2929 train loss:3.719737 +step:2930 train loss:3.738200 +step:2931 train loss:3.896738 +step:2932 train loss:3.810095 +step:2933 train loss:3.775802 +step:2934 train loss:3.770314 +step:2935 train loss:3.790431 +step:2936 train loss:3.741534 +step:2937 train loss:3.756929 +step:2938 train loss:3.779229 +step:2939 train loss:3.852533 +step:2940 train loss:3.749982 +step:2941 train loss:3.788278 +step:2942 train loss:3.747050 +step:2943 train loss:4.022487 +step:2944 train loss:3.851876 +step:2945 train loss:3.810080 +step:2946 train loss:3.819065 +step:2947 train loss:3.783873 +step:2948 train loss:3.738537 +step:2949 train loss:3.829870 +step:2950 train loss:3.783927 +step:2951 train loss:3.678593 +step:2952 train loss:3.751164 +step:2953 train loss:3.663383 +step:2954 train loss:3.755454 +step:2955 train loss:3.821426 +step:2956 train loss:3.771205 +step:2957 train loss:3.773474 +step:2958 train loss:3.727317 +step:2959 train loss:3.750696 +step:2960 train loss:3.841391 +step:2961 train loss:3.704965 +step:2962 train loss:3.783306 +step:2963 train loss:3.777187 +step:2964 train loss:3.757546 +step:2965 train loss:3.784606 +step:2966 train loss:3.757659 +step:2967 train loss:3.757838 +step:2968 train loss:3.730402 +step:2969 train loss:3.741989 +step:2970 train loss:3.808502 +step:2971 train loss:3.740569 +step:2972 train loss:3.719416 +step:2973 train loss:3.718423 +step:2974 train loss:3.757645 +step:2975 train loss:3.722065 +step:2976 train loss:3.762703 +step:2977 train loss:3.753809 +step:2978 train loss:3.837127 +step:2979 train loss:3.818152 +step:2980 train loss:3.826139 +step:2981 train loss:3.780988 +step:2982 train loss:3.768676 +step:2983 train loss:3.722880 +step:2984 train loss:3.697492 +step:2985 train loss:3.809717 +step:2986 train loss:3.706063 +step:2987 train loss:3.833801 +step:2988 train loss:3.759970 +step:2989 train loss:3.789851 +step:2990 train loss:3.742150 +step:2991 train loss:3.811454 +step:2992 train loss:3.804697 +step:2993 train loss:3.771020 +step:2994 train loss:3.757994 +step:2995 train loss:3.829175 +step:2996 train loss:3.753839 +step:2997 train loss:3.661990 +step:2998 train loss:3.778095 +step:2999 train loss:3.816858 +step:3000 validation loss:3.700527 total_sharp:7.8895e-03 L1_sharp:1.4205e-01 L2_sharp:5.6625e-02 L3_sharp:5.7037e-02 L4_sharp:2.9014e-02 L5_sharp:2.9376e-02 L6_sharp:3.4526e-02 L7_sharp:4.1389e-02 L8_sharp:4.1019e-02 L9_sharp:3.1085e-02 L10_sharp:2.3748e-02 L11_sharp:2.3936e-02 L12_sharp:3.6572e-02 total_fnorm:1.3239e+00 total_l1_linf:7.9602e+03 total_spectral:1.3239e+00 L1_fnorm:6.1651e-02 L2_fnorm:5.8367e-02 L3_fnorm:5.6941e-02 L4_fnorm:5.9360e-02 L5_fnorm:6.0262e-02 L6_fnorm:6.1045e-02 L7_fnorm:6.1098e-02 L8_fnorm:6.1181e-02 L9_fnorm:6.1081e-02 L10_fnorm:6.1227e-02 L11_fnorm:6.1151e-02 L12_fnorm:6.1164e-02 L1_l1linf:2.7116e-01 L2_l1linf:3.2597e-01 L3_l1linf:3.1858e-01 L4_l1linf:3.2670e-01 L5_l1linf:2.9745e-01 L6_l1linf:2.9495e-01 L7_l1linf:2.8034e-01 L8_l1linf:2.8262e-01 L9_l1linf:3.0734e-01 L10_l1linf:3.3010e-01 L11_l1linf:3.3781e-01 L12_l1linf:3.1442e-01 L1_spectral:6.1686e-03 L2_spectral:7.3327e-03 L3_spectral:7.1554e-03 L4_spectral:7.3894e-03 L5_spectral:6.7101e-03 L6_spectral:6.6361e-03 L7_spectral:6.3583e-03 L8_spectral:6.3168e-03 L9_spectral:6.8823e-03 L10_spectral:7.4034e-03 L11_spectral:7.5967e-03 L12_spectral:7.1563e-03 ip_v_neg_g:8.1838e-03 cos_v_neg_g:1.5881e-03 v_norm:1.3239e+00 g_norm:3.8924e+00 hv_norm:1.1408e+00 cos_v_hv:9.1558e-03 hg_norm:2.8977e+02 cos_g_hg:4.6906e-01 v_par:5.4220e-05 v_perp:1.3239e+00 L1_cos_v_neg_g:9.8433e-03 L1_v_norm:6.1651e-02 L2_cos_v_neg_g:1.0693e-02 L2_v_norm:5.8367e-02 L3_cos_v_neg_g:9.7653e-03 L3_v_norm:5.6941e-02 L4_cos_v_neg_g:9.4582e-03 L4_v_norm:5.9360e-02 L5_cos_v_neg_g:1.0006e-02 L5_v_norm:6.0262e-02 L6_cos_v_neg_g:9.3605e-03 L6_v_norm:6.1045e-02 L7_cos_v_neg_g:1.1124e-02 L7_v_norm:6.1098e-02 L8_cos_v_neg_g:1.0441e-02 L8_v_norm:6.1181e-02 L9_cos_v_neg_g:1.0466e-02 L9_v_norm:6.1081e-02 L10_cos_v_neg_g:9.3146e-03 L10_v_norm:6.1227e-02 L11_cos_v_neg_g:8.0394e-03 L11_v_norm:6.1151e-02 L12_cos_v_neg_g:7.5952e-03 L12_v_norm:6.1164e-02 +step:3000 train loss:3.709892 +step:3001 train loss:3.763501 +step:3002 train loss:3.761288 +step:3003 train loss:3.757888 +step:3004 train loss:3.784499 +step:3005 train loss:3.679961 +step:3006 train loss:3.732279 +step:3007 train loss:3.763623 +step:3008 train loss:3.810094 +step:3009 train loss:3.765914 +step:3010 train loss:3.784492 +step:3011 train loss:3.768349 +step:3012 train loss:3.747115 +step:3013 train loss:3.790336 +step:3014 train loss:3.749633 +step:3015 train loss:3.745863 +step:3016 train loss:3.767524 +step:3017 train loss:3.785572 +step:3018 train loss:3.717898 +step:3019 train loss:3.755974 +step:3020 train loss:3.775244 +step:3021 train loss:3.740578 +step:3022 train loss:3.831538 +step:3023 train loss:3.778826 +step:3024 train loss:3.768080 +step:3025 train loss:3.779397 +step:3026 train loss:3.750782 +step:3027 train loss:3.727291 +step:3028 train loss:3.780214 +step:3029 train loss:3.766209 +step:3030 train loss:3.741050 +step:3031 train loss:3.723210 +step:3032 train loss:3.711868 +step:3033 train loss:3.737938 +step:3034 train loss:3.783740 +step:3035 train loss:3.762079 +step:3036 train loss:3.723771 +step:3037 train loss:3.686959 +step:3038 train loss:3.804147 +step:3039 train loss:3.681643 +step:3040 train loss:3.669148 +step:3041 train loss:3.798074 +step:3042 train loss:3.732569 +step:3043 train loss:3.793022 +step:3044 train loss:3.687866 +step:3045 train loss:3.733938 +step:3046 train loss:3.708671 +step:3047 train loss:3.740009 +step:3048 train loss:3.703667 +step:3049 train loss:3.784603 +step:3050 train loss:3.671405 +step:3051 train loss:3.687725 +step:3052 train loss:3.706590 +step:3053 train loss:3.778232 +step:3054 train loss:3.850400 +step:3055 train loss:3.688382 +step:3056 train loss:3.720004 +step:3057 train loss:3.755033 +step:3058 train loss:3.703178 +step:3059 train loss:3.731445 +step:3060 train loss:3.728972 +step:3061 train loss:3.713460 +step:3062 train loss:3.765454 +step:3063 train loss:3.749413 +step:3064 train loss:3.774891 +step:3065 train loss:3.789145 +step:3066 train loss:3.689508 +step:3067 train loss:3.735853 +step:3068 train loss:3.791147 +step:3069 train loss:3.804135 +step:3070 train loss:3.733459 +step:3071 train loss:3.750530 +step:3072 train loss:3.752860 +step:3073 train loss:3.788251 +step:3074 train loss:3.726256 +step:3075 train loss:3.761183 +step:3076 train loss:3.696927 +step:3077 train loss:3.695356 +step:3078 train loss:3.722999 +step:3079 train loss:3.770091 +step:3080 train loss:3.762639 +step:3081 train loss:3.805983 +step:3082 train loss:3.781985 +step:3083 train loss:3.710950 +step:3084 train loss:3.793711 +step:3085 train loss:3.720373 +step:3086 train loss:3.781273 +step:3087 train loss:3.750760 +step:3088 train loss:3.831586 +step:3089 train loss:3.706783 +step:3090 train loss:3.778505 +step:3091 train loss:3.702692 +step:3092 train loss:3.726951 +step:3093 train loss:3.747298 +step:3094 train loss:3.733055 +step:3095 train loss:3.816787 +step:3096 train loss:3.746025 +step:3097 train loss:3.757379 +step:3098 train loss:3.735193 +step:3099 train loss:3.742767 +step:3100 train loss:3.768577 +step:3101 train loss:3.852522 +step:3102 train loss:3.776338 +step:3103 train loss:3.701221 +step:3104 train loss:3.784068 +step:3105 train loss:3.756068 +step:3106 train loss:3.750420 +step:3107 train loss:3.733245 +step:3108 train loss:3.708297 +step:3109 train loss:3.763073 +step:3110 train loss:3.692886 +step:3111 train loss:3.729114 +step:3112 train loss:3.663919 +step:3113 train loss:3.786816 +step:3114 train loss:3.696531 +step:3115 train loss:3.741353 +step:3116 train loss:3.618885 +step:3117 train loss:3.638333 +step:3118 train loss:3.739529 +step:3119 train loss:3.745925 +step:3120 train loss:3.748629 +step:3121 train loss:3.692170 +step:3122 train loss:3.776046 +step:3123 train loss:3.693929 +step:3124 train loss:3.756874 +step:3125 train loss:3.770127 +step:3126 train loss:3.873333 +step:3127 train loss:3.721614 +step:3128 train loss:3.749099 +step:3129 train loss:3.730785 +step:3130 train loss:3.708787 +step:3131 train loss:3.788627 +step:3132 train loss:3.773077 +step:3133 train loss:3.741896 +step:3134 train loss:3.639426 +step:3135 train loss:3.732194 +step:3136 train loss:3.706924 +step:3137 train loss:3.839678 +step:3138 train loss:3.741248 +step:3139 train loss:3.722861 +step:3140 train loss:3.742970 +step:3141 train loss:3.747232 +step:3142 train loss:3.682563 +step:3143 train loss:3.767260 +step:3144 train loss:3.715259 +step:3145 train loss:3.698814 +step:3146 train loss:3.712510 +step:3147 train loss:3.824074 +step:3148 train loss:3.729140 +step:3149 train loss:3.782864 +step:3150 train loss:3.766474 +step:3151 train loss:3.737002 +step:3152 train loss:3.733211 +step:3153 train loss:3.695218 +step:3154 train loss:3.776708 +step:3155 train loss:3.719329 +step:3156 train loss:3.769296 +step:3157 train loss:3.773320 +step:3158 train loss:3.744402 +step:3159 train loss:3.681454 +step:3160 train loss:3.732539 +step:3161 train loss:3.702109 +step:3162 train loss:3.760416 +step:3163 train loss:3.742248 +step:3164 train loss:3.720039 +step:3165 train loss:3.737507 +step:3166 train loss:3.777108 +step:3167 train loss:3.736736 +step:3168 train loss:3.815109 +step:3169 train loss:3.729981 +step:3170 train loss:3.711297 +step:3171 train loss:3.700064 +step:3172 train loss:3.705419 +step:3173 train loss:3.650210 +step:3174 train loss:3.764976 +step:3175 train loss:3.730572 +step:3176 train loss:3.741769 +step:3177 train loss:3.708401 +step:3178 train loss:3.685689 +step:3179 train loss:3.762943 +step:3180 train loss:3.692136 +step:3181 train loss:3.774392 +step:3182 train loss:3.782833 +step:3183 train loss:3.725250 +step:3184 train loss:3.722975 +step:3185 train loss:3.782156 +step:3186 train loss:3.738830 +step:3187 train loss:3.758176 +step:3188 train loss:3.799165 +step:3189 train loss:3.746611 +step:3190 train loss:3.698919 +step:3191 train loss:3.706139 +step:3192 train loss:3.668642 +step:3193 train loss:3.748515 +step:3194 train loss:3.712842 +step:3195 train loss:3.698882 +step:3196 train loss:3.748091 +step:3197 train loss:3.712526 +step:3198 train loss:3.746411 +step:3199 train loss:3.728665 +step:3200 train loss:3.731689 +step:3201 train loss:3.699329 +step:3202 train loss:3.757025 +step:3203 train loss:3.819566 +step:3204 train loss:3.786507 +step:3205 train loss:3.630092 +step:3206 train loss:3.911400 +step:3207 train loss:3.672427 +step:3208 train loss:3.737311 +step:3209 train loss:3.729650 +step:3210 train loss:3.711038 +step:3211 train loss:3.737355 +step:3212 train loss:3.747906 +step:3213 train loss:3.691068 +step:3214 train loss:3.794300 +step:3215 train loss:3.796592 +step:3216 train loss:3.668613 +step:3217 train loss:3.748873 +step:3218 train loss:3.790550 +step:3219 train loss:3.704622 +step:3220 train loss:3.776289 +step:3221 train loss:3.688182 +step:3222 train loss:3.732897 +step:3223 train loss:3.748443 +step:3224 train loss:3.764726 +step:3225 train loss:3.684837 +step:3226 train loss:3.718251 +step:3227 train loss:3.744060 +step:3228 train loss:3.741027 +step:3229 train loss:3.774152 +step:3230 train loss:3.786105 +step:3231 train loss:3.724114 +step:3232 train loss:3.734090 +step:3233 train loss:3.707851 +step:3234 train loss:3.695763 +step:3235 train loss:3.696854 +step:3236 train loss:3.718104 +step:3237 train loss:3.716001 +step:3238 train loss:3.733525 +step:3239 train loss:3.635616 +step:3240 train loss:3.750858 +step:3241 train loss:3.747252 +step:3242 train loss:3.801921 +step:3243 train loss:3.742961 +step:3244 train loss:3.758840 +step:3245 train loss:3.663109 +step:3246 train loss:3.790320 +step:3247 train loss:3.732218 +step:3248 train loss:3.754555 +step:3249 train loss:3.698996 +step:3250 validation loss:3.666382 +step:3250 train loss:3.699032 +step:3251 train loss:3.809241 +step:3252 train loss:3.738618 +step:3253 train loss:3.739991 +step:3254 train loss:3.807986 +step:3255 train loss:3.749424 +step:3256 train loss:3.745266 +step:3257 train loss:3.724295 +step:3258 train loss:3.658549 +step:3259 train loss:3.636183 +step:3260 train loss:3.750757 +step:3261 train loss:3.731775 +step:3262 train loss:3.721142 +step:3263 train loss:3.705040 +step:3264 train loss:3.816379 +step:3265 train loss:3.726679 +step:3266 train loss:3.753240 +step:3267 train loss:3.717464 +step:3268 train loss:3.721298 +step:3269 train loss:3.733912 +step:3270 train loss:3.762469 +step:3271 train loss:3.726448 +step:3272 train loss:3.703312 +step:3273 train loss:3.712953 +step:3274 train loss:3.847172 +step:3275 train loss:3.719078 +step:3276 train loss:3.787141 +step:3277 train loss:3.725807 +step:3278 train loss:3.701041 +step:3279 train loss:3.728862 +step:3280 train loss:3.757365 +step:3281 train loss:3.681665 +step:3282 train loss:3.752784 +step:3283 train loss:3.723397 +step:3284 train loss:3.685826 +step:3285 train loss:3.703417 +step:3286 train loss:3.734980 +step:3287 train loss:3.673333 +step:3288 train loss:3.752930 +step:3289 train loss:3.697433 +step:3290 train loss:3.729630 +step:3291 train loss:3.688661 +step:3292 train loss:3.710708 +step:3293 train loss:3.756611 +step:3294 train loss:3.765990 +step:3295 train loss:3.678669 +step:3296 train loss:3.732568 +step:3297 train loss:3.695838 +step:3298 train loss:3.695418 +step:3299 train loss:3.821738 +step:3300 train loss:3.659797 +step:3301 train loss:3.743745 +step:3302 train loss:3.709551 +step:3303 train loss:3.728426 +step:3304 train loss:3.696404 +step:3305 train loss:3.785292 +step:3306 train loss:3.718900 +step:3307 train loss:3.740925 +step:3308 train loss:3.696600 +step:3309 train loss:3.754261 +step:3310 train loss:3.670917 +step:3311 train loss:3.722993 +step:3312 train loss:3.694234 +step:3313 train loss:3.728960 +step:3314 train loss:3.723954 +step:3315 train loss:3.801839 +step:3316 train loss:3.656387 +step:3317 train loss:3.746819 +step:3318 train loss:3.759069 +step:3319 train loss:3.684821 +step:3320 train loss:3.844803 +step:3321 train loss:3.747646 +step:3322 train loss:3.746414 +step:3323 train loss:3.852216 +step:3324 train loss:3.770670 +step:3325 train loss:3.741860 +step:3326 train loss:3.733720 +step:3327 train loss:3.747012 +step:3328 train loss:3.725483 +step:3329 train loss:3.723590 +step:3330 train loss:3.716751 +step:3331 train loss:3.764136 +step:3332 train loss:3.785683 +step:3333 train loss:3.751244 +step:3334 train loss:3.682122 +step:3335 train loss:3.695093 +step:3336 train loss:3.731711 +step:3337 train loss:3.729964 +step:3338 train loss:3.719196 +step:3339 train loss:3.712176 +step:3340 train loss:3.747994 +step:3341 train loss:3.696324 +step:3342 train loss:3.746453 +step:3343 train loss:3.682130 +step:3344 train loss:3.741848 +step:3345 train loss:3.693122 +step:3346 train loss:3.704417 +step:3347 train loss:3.712178 +step:3348 train loss:3.722852 +step:3349 train loss:3.717158 +step:3350 train loss:3.742405 +step:3351 train loss:3.796870 +step:3352 train loss:3.736334 +step:3353 train loss:3.834708 +step:3354 train loss:3.681091 +step:3355 train loss:3.786604 +step:3356 train loss:3.736937 +step:3357 train loss:3.749440 +step:3358 train loss:3.687188 +step:3359 train loss:3.721571 +step:3360 train loss:3.712355 +step:3361 train loss:3.713828 +step:3362 train loss:3.704078 +step:3363 train loss:3.706069 +step:3364 train loss:3.687118 +step:3365 train loss:3.724232 +step:3366 train loss:3.756071 +step:3367 train loss:3.708401 +step:3368 train loss:3.803112 +step:3369 train loss:3.714918 +step:3370 train loss:3.784986 +step:3371 train loss:3.763889 +step:3372 train loss:3.729884 +step:3373 train loss:3.740223 +step:3374 train loss:3.787347 +step:3375 train loss:3.718485 +step:3376 train loss:3.725923 +step:3377 train loss:3.713331 +step:3378 train loss:3.690175 +step:3379 train loss:3.769156 +step:3380 train loss:3.749357 +step:3381 train loss:3.733735 +step:3382 train loss:3.749440 +step:3383 train loss:3.759425 +step:3384 train loss:3.689283 +step:3385 train loss:3.739019 +step:3386 train loss:3.718933 +step:3387 train loss:3.791759 +step:3388 train loss:3.695410 +step:3389 train loss:3.891573 +step:3390 train loss:3.629704 +step:3391 train loss:3.716394 +step:3392 train loss:3.700166 +step:3393 train loss:3.731191 +step:3394 train loss:3.688530 +step:3395 train loss:3.759541 +step:3396 train loss:3.672750 +step:3397 train loss:3.749368 +step:3398 train loss:3.715371 +step:3399 train loss:3.735013 +step:3400 train loss:3.684000 +step:3401 train loss:3.717908 +step:3402 train loss:3.877845 +step:3403 train loss:3.764073 +step:3404 train loss:3.878319 +step:3405 train loss:3.734409 +step:3406 train loss:3.713196 +step:3407 train loss:3.710228 +step:3408 train loss:3.693031 +step:3409 train loss:3.659137 +step:3410 train loss:3.693351 +step:3411 train loss:3.762072 +step:3412 train loss:3.686632 +step:3413 train loss:3.675985 +step:3414 train loss:3.714255 +step:3415 train loss:3.689246 +step:3416 train loss:3.692932 +step:3417 train loss:3.775332 +step:3418 train loss:3.772043 +step:3419 train loss:3.733836 +step:3420 train loss:3.704979 +step:3421 train loss:3.739290 +step:3422 train loss:3.754241 +step:3423 train loss:3.774614 +step:3424 train loss:3.655837 +step:3425 train loss:3.679386 +step:3426 train loss:3.675584 +step:3427 train loss:3.737338 +step:3428 train loss:3.662225 +step:3429 train loss:3.723775 +step:3430 train loss:3.689906 +step:3431 train loss:3.745358 +step:3432 train loss:3.728814 +step:3433 train loss:3.689573 +step:3434 train loss:3.775016 +step:3435 train loss:3.711657 +step:3436 train loss:3.806573 +step:3437 train loss:3.628625 +step:3438 train loss:3.739322 +step:3439 train loss:3.711997 +step:3440 train loss:3.808897 +step:3441 train loss:3.701375 +step:3442 train loss:3.769693 +step:3443 train loss:3.703530 +step:3444 train loss:3.721742 +step:3445 train loss:3.769600 +step:3446 train loss:3.674376 +step:3447 train loss:3.747131 +step:3448 train loss:3.703027 +step:3449 train loss:3.736958 +step:3450 train loss:3.642814 +step:3451 train loss:3.760669 +step:3452 train loss:3.711407 +step:3453 train loss:3.761836 +step:3454 train loss:3.788097 +step:3455 train loss:3.849277 +step:3456 train loss:3.786084 +step:3457 train loss:3.779855 +step:3458 train loss:3.707857 +step:3459 train loss:3.717764 +step:3460 train loss:3.660757 +step:3461 train loss:3.725535 +step:3462 train loss:3.726654 +step:3463 train loss:3.697712 +step:3464 train loss:3.748898 +step:3465 train loss:3.681735 +step:3466 train loss:3.748060 +step:3467 train loss:3.703991 +step:3468 train loss:3.719351 +step:3469 train loss:3.731835 +step:3470 train loss:3.713488 +step:3471 train loss:3.751297 +step:3472 train loss:3.638144 +step:3473 train loss:3.759263 +step:3474 train loss:3.658413 +step:3475 train loss:3.738290 +step:3476 train loss:3.707999 +step:3477 train loss:3.728076 +step:3478 train loss:3.705145 +step:3479 train loss:3.734771 +step:3480 train loss:3.752853 +step:3481 train loss:3.734746 +step:3482 train loss:3.716028 +step:3483 train loss:3.858273 +step:3484 train loss:3.700269 +step:3485 train loss:3.686017 +step:3486 train loss:3.738173 +step:3487 train loss:3.779828 +step:3488 train loss:3.683918 +step:3489 train loss:3.736041 +step:3490 train loss:3.704900 +step:3491 train loss:3.743536 +step:3492 train loss:3.777709 +step:3493 train loss:3.749263 +step:3494 train loss:3.742497 +step:3495 train loss:3.719894 +step:3496 train loss:3.687814 +step:3497 train loss:3.794967 +step:3498 train loss:3.743221 +step:3499 train loss:3.676058 +step:3500 validation loss:3.645944 total_sharp:6.9212e-03 L1_sharp:5.2115e-02 L2_sharp:3.2282e-02 L3_sharp:6.0412e-02 L4_sharp:3.0994e-02 L5_sharp:3.4648e-02 L6_sharp:4.1742e-02 L7_sharp:4.4304e-02 L8_sharp:4.2147e-02 L9_sharp:2.9985e-02 L10_sharp:2.3370e-02 L11_sharp:2.0148e-02 L12_sharp:2.3161e-02 total_fnorm:1.3469e+00 total_l1_linf:8.0798e+03 total_spectral:1.3469e+00 L1_fnorm:6.1472e-02 L2_fnorm:5.8978e-02 L3_fnorm:5.7755e-02 L4_fnorm:5.9338e-02 L5_fnorm:6.0383e-02 L6_fnorm:6.1293e-02 L7_fnorm:6.1208e-02 L8_fnorm:6.1177e-02 L9_fnorm:6.1120e-02 L10_fnorm:6.1213e-02 L11_fnorm:6.1052e-02 L12_fnorm:6.1064e-02 L1_l1linf:2.7112e-01 L2_l1linf:3.1988e-01 L3_l1linf:3.2825e-01 L4_l1linf:3.2091e-01 L5_l1linf:3.2353e-01 L6_l1linf:3.3533e-01 L7_l1linf:3.0568e-01 L8_l1linf:3.0450e-01 L9_l1linf:3.2004e-01 L10_l1linf:3.3705e-01 L11_l1linf:3.2856e-01 L12_l1linf:2.9715e-01 L1_spectral:6.1539e-03 L2_spectral:7.1802e-03 L3_spectral:7.3771e-03 L4_spectral:7.2275e-03 L5_spectral:7.2374e-03 L6_spectral:7.5061e-03 L7_spectral:6.8407e-03 L8_spectral:6.7881e-03 L9_spectral:7.1173e-03 L10_spectral:7.5130e-03 L11_spectral:7.3706e-03 L12_spectral:6.8111e-03 ip_v_neg_g:7.6406e-03 cos_v_neg_g:1.3974e-03 v_norm:1.3469e+00 g_norm:4.0597e+00 hv_norm:1.0563e+00 cos_v_hv:8.8247e-03 hg_norm:3.4883e+02 cos_g_hg:5.5662e-01 v_par:4.6749e-05 v_perp:1.3469e+00 L1_cos_v_neg_g:1.1150e-02 L1_v_norm:6.1472e-02 L2_cos_v_neg_g:1.2411e-02 L2_v_norm:5.8978e-02 L3_cos_v_neg_g:1.2439e-02 L3_v_norm:5.7755e-02 L4_cos_v_neg_g:1.0207e-02 L4_v_norm:5.9338e-02 L5_cos_v_neg_g:9.6136e-03 L5_v_norm:6.0383e-02 L6_cos_v_neg_g:9.6650e-03 L6_v_norm:6.1293e-02 L7_cos_v_neg_g:9.0743e-03 L7_v_norm:6.1208e-02 L8_cos_v_neg_g:9.0192e-03 L8_v_norm:6.1177e-02 L9_cos_v_neg_g:7.7472e-03 L9_v_norm:6.1120e-02 L10_cos_v_neg_g:6.6256e-03 L10_v_norm:6.1213e-02 L11_cos_v_neg_g:5.8336e-03 L11_v_norm:6.1052e-02 L12_cos_v_neg_g:4.4431e-03 L12_v_norm:6.1064e-02 +step:3500 train loss:3.696384 +step:3501 train loss:3.821790 +step:3502 train loss:3.802731 +step:3503 train loss:3.753620 +step:3504 train loss:3.706203 +step:3505 train loss:3.720850 +step:3506 train loss:3.619116 +step:3507 train loss:3.737847 +step:3508 train loss:3.682790 +step:3509 train loss:3.748702 +step:3510 train loss:3.682215 +step:3511 train loss:3.718827 +step:3512 train loss:3.857331 +step:3513 train loss:3.678459 +step:3514 train loss:3.693722 +step:3515 train loss:3.942614 +step:3516 train loss:3.737078 +step:3517 train loss:3.696896 +step:3518 train loss:3.699409 +step:3519 train loss:3.694419 +step:3520 train loss:3.726007 +step:3521 train loss:3.717282 +step:3522 train loss:3.626169 +step:3523 train loss:3.732183 +step:3524 train loss:3.713559 +step:3525 train loss:3.701131 +step:3526 train loss:3.725030 +step:3527 train loss:3.677426 +step:3528 train loss:3.728836 +step:3529 train loss:3.707569 +step:3530 train loss:3.700171 +step:3531 train loss:3.692848 +step:3532 train loss:3.875836 +step:3533 train loss:3.696563 +step:3534 train loss:3.713417 +step:3535 train loss:3.691180 +step:3536 train loss:3.688590 +step:3537 train loss:3.698011 +step:3538 train loss:3.730497 +step:3539 train loss:3.679298 +step:3540 train loss:3.744343 +step:3541 train loss:3.710392 +step:3542 train loss:3.722649 +step:3543 train loss:3.642526 +step:3544 train loss:3.659735 +step:3545 train loss:3.665587 +step:3546 train loss:3.731372 +step:3547 train loss:3.738292 +step:3548 train loss:3.713126 +step:3549 train loss:3.710902 +step:3550 train loss:3.696800 +step:3551 train loss:3.726611 +step:3552 train loss:3.621176 +step:3553 train loss:3.743275 +step:3554 train loss:3.732786 +step:3555 train loss:3.722253 +step:3556 train loss:3.745484 +step:3557 train loss:3.733552 +step:3558 train loss:3.707395 +step:3559 train loss:3.652064 +step:3560 train loss:3.745018 +step:3561 train loss:3.736271 +step:3562 train loss:3.909402 +step:3563 train loss:3.769579 +step:3564 train loss:3.727963 +step:3565 train loss:3.729681 +step:3566 train loss:3.703976 +step:3567 train loss:3.644072 +step:3568 train loss:3.670282 +step:3569 train loss:3.756307 +step:3570 train loss:3.779579 +step:3571 train loss:3.759342 +step:3572 train loss:3.748065 +step:3573 train loss:3.706092 +step:3574 train loss:3.705163 +step:3575 train loss:3.696517 +step:3576 train loss:3.678973 +step:3577 train loss:3.689295 +step:3578 train loss:3.772762 +step:3579 train loss:3.680127 +step:3580 train loss:3.764042 +step:3581 train loss:3.703253 +step:3582 train loss:3.757976 +step:3583 train loss:3.698400 +step:3584 train loss:3.670952 +step:3585 train loss:3.720494 +step:3586 train loss:3.668485 +step:3587 train loss:3.767121 +step:3588 train loss:3.896410 +step:3589 train loss:3.727407 +step:3590 train loss:3.710777 +step:3591 train loss:3.719826 +step:3592 train loss:3.680366 +step:3593 train loss:3.653087 +step:3594 train loss:3.707078 +step:3595 train loss:3.679190 +step:3596 train loss:3.757242 +step:3597 train loss:3.735189 +step:3598 train loss:3.685127 +step:3599 train loss:3.738788 +step:3600 train loss:3.676626 +step:3601 train loss:3.692959 +step:3602 train loss:3.681739 +step:3603 train loss:3.696657 +step:3604 train loss:3.724418 +step:3605 train loss:3.827643 +step:3606 train loss:3.728098 +step:3607 train loss:3.709861 +step:3608 train loss:3.728654 +step:3609 train loss:3.711449 +step:3610 train loss:3.681809 +step:3611 train loss:3.684025 +step:3612 train loss:3.750993 +step:3613 train loss:3.722422 +step:3614 train loss:3.667252 +step:3615 train loss:3.706721 +step:3616 train loss:3.660365 +step:3617 train loss:3.735549 +step:3618 train loss:3.690785 +step:3619 train loss:3.678164 +step:3620 train loss:3.696500 +step:3621 train loss:3.656022 +step:3622 train loss:3.763815 +step:3623 train loss:3.752163 +step:3624 train loss:3.726071 +step:3625 train loss:3.703737 +step:3626 train loss:3.711253 +step:3627 train loss:3.708096 +step:3628 train loss:3.693821 +step:3629 train loss:3.699160 +step:3630 train loss:3.776894 +step:3631 train loss:3.705309 +step:3632 train loss:3.737633 +step:3633 train loss:3.696773 +step:3634 train loss:3.697989 +step:3635 train loss:3.687023 +step:3636 train loss:3.756614 +step:3637 train loss:3.836340 +step:3638 train loss:3.749231 +step:3639 train loss:3.738394 +step:3640 train loss:3.744834 +step:3641 train loss:3.785194 +step:3642 train loss:3.679394 +step:3643 train loss:3.844989 +step:3644 train loss:3.741686 +step:3645 train loss:3.707930 +step:3646 train loss:3.832070 +step:3647 train loss:3.718843 +step:3648 train loss:3.713051 +step:3649 train loss:3.661752 +step:3650 train loss:3.704199 +step:3651 train loss:3.699306 +step:3652 train loss:3.687392 +step:3653 train loss:3.621415 +step:3654 train loss:3.683825 +step:3655 train loss:3.673449 +step:3656 train loss:3.705540 +step:3657 train loss:3.723020 +step:3658 train loss:3.719132 +step:3659 train loss:3.705792 +step:3660 train loss:3.675647 +step:3661 train loss:3.705320 +step:3662 train loss:3.677455 +step:3663 train loss:3.716564 +step:3664 train loss:3.672042 +step:3665 train loss:3.713046 +step:3666 train loss:3.754722 +step:3667 train loss:3.840794 +step:3668 train loss:3.723318 +step:3669 train loss:3.681645 +step:3670 train loss:3.727814 +step:3671 train loss:3.689053 +step:3672 train loss:3.726234 +step:3673 train loss:3.706864 +step:3674 train loss:3.723705 +step:3675 train loss:3.736548 +step:3676 train loss:3.699807 +step:3677 train loss:3.660750 +step:3678 train loss:3.724164 +step:3679 train loss:3.623641 +step:3680 train loss:3.725382 +step:3681 train loss:3.757076 +step:3682 train loss:3.737859 +step:3683 train loss:3.682106 +step:3684 train loss:3.680835 +step:3685 train loss:3.708679 +step:3686 train loss:3.737268 +step:3687 train loss:3.691090 +step:3688 train loss:3.666708 +step:3689 train loss:3.700914 +step:3690 train loss:3.692256 +step:3691 train loss:3.674397 +step:3692 train loss:3.730994 +step:3693 train loss:3.863116 +step:3694 train loss:3.680237 +step:3695 train loss:3.737020 +step:3696 train loss:3.700158 +step:3697 train loss:3.693004 +step:3698 train loss:3.633024 +step:3699 train loss:3.658063 +step:3700 train loss:3.689445 +step:3701 train loss:3.708078 +step:3702 train loss:3.728168 +step:3703 train loss:3.687456 +step:3704 train loss:3.729137 +step:3705 train loss:3.710729 +step:3706 train loss:3.663421 +step:3707 train loss:3.718110 +step:3708 train loss:3.693313 +step:3709 train loss:3.612924 +step:3710 train loss:3.740644 +step:3711 train loss:3.687318 +step:3712 train loss:3.728915 +step:3713 train loss:3.677406 +step:3714 train loss:3.696000 +step:3715 train loss:3.811150 +step:3716 train loss:3.717247 +step:3717 train loss:3.694364 +step:3718 train loss:3.697570 +step:3719 train loss:3.694483 +step:3720 train loss:3.703777 +step:3721 train loss:3.763006 +step:3722 train loss:3.772559 +step:3723 train loss:3.661204 +step:3724 train loss:3.718115 +step:3725 train loss:3.695685 +step:3726 train loss:3.715547 +step:3727 train loss:3.787687 +step:3728 train loss:3.752041 +step:3729 train loss:3.651414 +step:3730 train loss:3.669021 +step:3731 train loss:3.692348 +step:3732 train loss:3.843743 +step:3733 train loss:3.701795 +step:3734 train loss:3.706019 +step:3735 train loss:3.647262 +step:3736 train loss:3.702665 +step:3737 train loss:3.754025 +step:3738 train loss:3.776932 +step:3739 train loss:3.690668 +step:3740 train loss:3.595080 +step:3741 train loss:3.801994 +step:3742 train loss:3.711118 +step:3743 train loss:3.687177 +step:3744 train loss:3.681354 +step:3745 train loss:3.704634 +step:3746 train loss:3.667375 +step:3747 train loss:3.688911 +step:3748 train loss:3.728077 +step:3749 train loss:3.712252 +step:3750 validation loss:3.628232 +step:3750 train loss:3.720342 +step:3751 train loss:3.811852 +step:3752 train loss:3.747017 +step:3753 train loss:3.659864 +step:3754 train loss:3.714243 +step:3755 train loss:3.889467 +step:3756 train loss:3.671184 +step:3757 train loss:3.667105 +step:3758 train loss:3.696395 +step:3759 train loss:3.642374 +step:3760 train loss:3.641423 +step:3761 train loss:3.693154 +step:3762 train loss:3.683914 +step:3763 train loss:3.687978 +step:3764 train loss:3.678287 +step:3765 train loss:3.677379 +step:3766 train loss:3.647640 +step:3767 train loss:3.731447 +step:3768 train loss:3.671172 +step:3769 train loss:3.930574 +step:3770 train loss:3.722918 +step:3771 train loss:3.734146 +step:3772 train loss:3.691303 +step:3773 train loss:3.685238 +step:3774 train loss:3.690997 +step:3775 train loss:3.685149 +step:3776 train loss:3.686732 +step:3777 train loss:3.646778 +step:3778 train loss:3.664377 +step:3779 train loss:3.647156 +step:3780 train loss:3.730245 +step:3781 train loss:3.697984 +step:3782 train loss:3.617145 +step:3783 train loss:3.720587 +step:3784 train loss:3.730793 +step:3785 train loss:3.641745 +step:3786 train loss:3.750923 +step:3787 train loss:3.660708 +step:3788 train loss:3.673759 +step:3789 train loss:3.588880 +step:3790 train loss:3.698511 +step:3791 train loss:3.719584 +step:3792 train loss:3.687869 +step:3793 train loss:3.689677 +step:3794 train loss:3.718697 +step:3795 train loss:3.685803 +step:3796 train loss:3.703014 +step:3797 train loss:3.679777 +step:3798 train loss:3.688716 +step:3799 train loss:3.696779 +step:3800 train loss:3.606619 +step:3801 train loss:3.721138 +step:3802 train loss:3.649812 +step:3803 train loss:3.729423 +step:3804 train loss:3.740362 +step:3805 train loss:3.701180 +step:3806 train loss:3.717160 +step:3807 train loss:3.736721 +step:3808 train loss:3.693738 +step:3809 train loss:3.710208 +step:3810 train loss:3.707466 +step:3811 train loss:3.694825 +step:3812 train loss:3.695815 +step:3813 train loss:3.652218 +step:3814 train loss:3.693527 +step:3815 train loss:3.699333 +step:3816 train loss:3.714812 +step:3817 train loss:3.733660 +step:3818 train loss:3.706382 +step:3819 train loss:3.719294 +step:3820 train loss:3.718906 +step:3821 train loss:3.675609 +step:3822 train loss:3.758052 +step:3823 train loss:3.653435 +step:3824 train loss:3.666388 +step:3825 train loss:3.674699 +step:3826 train loss:3.754204 +step:3827 train loss:3.762872 +step:3828 train loss:3.650555 +step:3829 train loss:3.671150 +step:3830 train loss:3.731335 +step:3831 train loss:3.665373 +step:3832 train loss:3.725014 +step:3833 train loss:3.666500 +step:3834 train loss:3.631399 +step:3835 train loss:3.674047 +step:3836 train loss:3.649196 +step:3837 train loss:3.718519 +step:3838 train loss:3.671887 +step:3839 train loss:3.713843 +step:3840 train loss:3.725583 +step:3841 train loss:3.671801 +step:3842 train loss:3.706158 +step:3843 train loss:3.718765 +step:3844 train loss:3.690846 +step:3845 train loss:3.711941 +step:3846 train loss:3.753392 +step:3847 train loss:3.653797 +step:3848 train loss:3.657921 +step:3849 train loss:3.668812 +step:3850 train loss:3.692823 +step:3851 train loss:3.829247 +step:3852 train loss:3.806130 +step:3853 train loss:3.705924 +step:3854 train loss:3.663721 +step:3855 train loss:3.718075 +step:3856 train loss:3.643869 +step:3857 train loss:3.703658 +step:3858 train loss:3.617602 +step:3859 train loss:3.664194 +step:3860 train loss:3.732322 +step:3861 train loss:3.706369 +step:3862 train loss:3.644157 +step:3863 train loss:3.693558 +step:3864 train loss:3.662895 +step:3865 train loss:3.702642 +step:3866 train loss:3.720185 +step:3867 train loss:3.718367 +step:3868 train loss:3.664888 +step:3869 train loss:3.666075 +step:3870 train loss:3.643613 +step:3871 train loss:3.636607 +step:3872 train loss:3.772010 +step:3873 train loss:3.696438 +step:3874 train loss:3.708450 +step:3875 train loss:3.817615 +step:3876 train loss:3.690273 +step:3877 train loss:3.718460 +step:3878 train loss:3.743384 +step:3879 train loss:3.730733 +step:3880 train loss:3.812445 +step:3881 train loss:3.635338 +step:3882 train loss:3.672017 +step:3883 train loss:3.683039 +step:3884 train loss:3.677988 +step:3885 train loss:3.691935 +step:3886 train loss:3.753330 +step:3887 train loss:3.731852 +step:3888 train loss:3.694075 +step:3889 train loss:3.666809 +step:3890 train loss:3.700161 +step:3891 train loss:3.717548 +step:3892 train loss:3.626106 +step:3893 train loss:3.733669 +step:3894 train loss:3.683007 +step:3895 train loss:3.699526 +step:3896 train loss:3.692525 +step:3897 train loss:3.656923 +step:3898 train loss:3.718598 +step:3899 train loss:3.759117 +step:3900 train loss:3.714125 +step:3901 train loss:3.729499 +step:3902 train loss:3.658426 +step:3903 train loss:3.671575 +step:3904 train loss:3.705737 +step:3905 train loss:3.640878 +step:3906 train loss:3.674793 +step:3907 train loss:3.710367 +step:3908 train loss:3.787759 +step:3909 train loss:3.678048 +step:3910 train loss:3.704583 +step:3911 train loss:3.720928 +step:3912 train loss:3.668148 +step:3913 train loss:3.684897 +step:3914 train loss:3.705267 +step:3915 train loss:3.672112 +step:3916 train loss:3.710013 +step:3917 train loss:3.751704 +step:3918 train loss:3.728292 +step:3919 train loss:3.704878 +step:3920 train loss:3.683459 +step:3921 train loss:3.721387 +step:3922 train loss:3.727585 +step:3923 train loss:3.713513 +step:3924 train loss:3.649585 +step:3925 train loss:3.849214 +step:3926 train loss:3.697428 +step:3927 train loss:3.672948 +step:3928 train loss:3.752639 +step:3929 train loss:3.813552 +step:3930 train loss:3.706529 +step:3931 train loss:3.643551 +step:3932 train loss:3.693833 +step:3933 train loss:3.711295 +step:3934 train loss:3.663825 +step:3935 train loss:3.641562 +step:3936 train loss:3.735682 +step:3937 train loss:3.693956 +step:3938 train loss:3.702475 +step:3939 train loss:3.728548 +step:3940 train loss:3.677302 +step:3941 train loss:3.760832 +step:3942 train loss:3.721947 +step:3943 train loss:3.708353 +step:3944 train loss:3.758550 +step:3945 train loss:3.668960 +step:3946 train loss:3.610842 +step:3947 train loss:3.742540 +step:3948 train loss:3.709596 +step:3949 train loss:3.876867 +step:3950 train loss:3.675578 +step:3951 train loss:3.601778 +step:3952 train loss:3.568624 +step:3953 train loss:3.642703 +step:3954 train loss:3.692702 +step:3955 train loss:3.718369 +step:3956 train loss:3.676378 +step:3957 train loss:3.727381 +step:3958 train loss:3.707856 +step:3959 train loss:3.742218 +step:3960 train loss:3.664416 +step:3961 train loss:3.693128 +step:3962 train loss:3.697514 +step:3963 train loss:3.671757 +step:3964 train loss:3.653717 +step:3965 train loss:3.706769 +step:3966 train loss:3.656699 +step:3967 train loss:3.708312 +step:3968 train loss:3.723632 +step:3969 train loss:3.634307 +step:3970 train loss:3.744714 +step:3971 train loss:3.656940 +step:3972 train loss:3.691562 +step:3973 train loss:3.656393 +step:3974 train loss:3.743135 +step:3975 train loss:3.695157 +step:3976 train loss:3.650233 +step:3977 train loss:3.709413 +step:3978 train loss:3.677825 +step:3979 train loss:3.663751 +step:3980 train loss:3.732318 +step:3981 train loss:3.667796 +step:3982 train loss:3.685614 +step:3983 train loss:3.671251 +step:3984 train loss:3.705866 +step:3985 train loss:3.684886 +step:3986 train loss:3.695966 +step:3987 train loss:3.703508 +step:3988 train loss:3.648575 +step:3989 train loss:3.717093 +step:3990 train loss:3.708299 +step:3991 train loss:3.722708 +step:3992 train loss:3.680231 +step:3993 train loss:3.713940 +step:3994 train loss:3.660173 +step:3995 train loss:3.714407 +step:3996 train loss:3.633381 +step:3997 train loss:3.711904 +step:3998 train loss:3.592011 +step:3999 train loss:3.747323 +step:4000 validation loss:3.611864 total_sharp:5.9588e-03 L1_sharp:4.8584e-02 L2_sharp:3.5364e-02 L3_sharp:4.0920e-02 L4_sharp:2.5285e-02 L5_sharp:2.5386e-02 L6_sharp:2.7445e-02 L7_sharp:3.7855e-02 L8_sharp:4.1063e-02 L9_sharp:2.9160e-02 L10_sharp:2.2770e-02 L11_sharp:2.3654e-02 L12_sharp:3.3818e-02 total_fnorm:1.3550e+00 total_l1_linf:8.1223e+03 total_spectral:1.3550e+00 L1_fnorm:6.1154e-02 L2_fnorm:5.8439e-02 L3_fnorm:5.7969e-02 L4_fnorm:5.9338e-02 L5_fnorm:6.0272e-02 L6_fnorm:6.1070e-02 L7_fnorm:6.1118e-02 L8_fnorm:6.1201e-02 L9_fnorm:6.1096e-02 L10_fnorm:6.1206e-02 L11_fnorm:6.1163e-02 L12_fnorm:6.1210e-02 L1_l1linf:2.6487e-01 L2_l1linf:3.0753e-01 L3_l1linf:3.2003e-01 L4_l1linf:3.3416e-01 L5_l1linf:2.9927e-01 L6_l1linf:2.8725e-01 L7_l1linf:2.9731e-01 L8_l1linf:3.0167e-01 L9_l1linf:3.0628e-01 L10_l1linf:3.4133e-01 L11_l1linf:3.5839e-01 L12_l1linf:3.5597e-01 L1_spectral:6.0714e-03 L2_spectral:6.9522e-03 L3_spectral:7.1877e-03 L4_spectral:7.5385e-03 L5_spectral:6.8028e-03 L6_spectral:6.4934e-03 L7_spectral:6.6455e-03 L8_spectral:6.7555e-03 L9_spectral:6.8792e-03 L10_spectral:7.6541e-03 L11_spectral:8.0433e-03 L12_spectral:7.9697e-03 ip_v_neg_g:6.9830e-03 cos_v_neg_g:1.4574e-03 v_norm:1.3550e+00 g_norm:3.5362e+00 hv_norm:9.1325e-01 cos_v_hv:8.8409e-03 hg_norm:1.8008e+02 cos_g_hg:4.6495e-01 v_par:4.5666e-05 v_perp:1.3550e+00 L1_cos_v_neg_g:8.8167e-03 L1_v_norm:6.1154e-02 L2_cos_v_neg_g:1.1418e-02 L2_v_norm:5.8439e-02 L3_cos_v_neg_g:8.2723e-03 L3_v_norm:5.7969e-02 L4_cos_v_neg_g:7.4899e-03 L4_v_norm:5.9338e-02 L5_cos_v_neg_g:7.7926e-03 L5_v_norm:6.0272e-02 L6_cos_v_neg_g:8.4812e-03 L6_v_norm:6.1070e-02 L7_cos_v_neg_g:1.0360e-02 L7_v_norm:6.1118e-02 L8_cos_v_neg_g:1.0454e-02 L8_v_norm:6.1201e-02 L9_cos_v_neg_g:1.0063e-02 L9_v_norm:6.1096e-02 L10_cos_v_neg_g:9.0692e-03 L10_v_norm:6.1206e-02 L11_cos_v_neg_g:8.0788e-03 L11_v_norm:6.1163e-02 L12_cos_v_neg_g:7.1974e-03 L12_v_norm:6.1210e-02 +step:4000 train loss:3.627960 +step:4001 train loss:3.704055 +step:4002 train loss:3.681862 +step:4003 train loss:3.717441 +step:4004 train loss:3.624924 +step:4005 train loss:3.719012 +step:4006 train loss:3.726356 +step:4007 train loss:3.648356 +step:4008 train loss:3.605745 +step:4009 train loss:3.688736 +step:4010 train loss:3.665564 +step:4011 train loss:3.674191 +step:4012 train loss:3.687260 +step:4013 train loss:3.663268 +step:4014 train loss:3.678523 +step:4015 train loss:3.668899 +step:4016 train loss:3.678900 +step:4017 train loss:3.640967 +step:4018 train loss:3.580941 +step:4019 train loss:3.637200 +step:4020 train loss:3.703681 +step:4021 train loss:3.650610 +step:4022 train loss:3.653789 +step:4023 train loss:3.667743 +step:4024 train loss:3.577597 +step:4025 train loss:3.702286 +step:4026 train loss:3.693152 +step:4027 train loss:3.703311 +step:4028 train loss:3.717071 +step:4029 train loss:3.746723 +step:4030 train loss:3.663886 +step:4031 train loss:3.703729 +step:4032 train loss:3.661168 +step:4033 train loss:3.694183 +step:4034 train loss:3.708396 +step:4035 train loss:3.690033 +step:4036 train loss:3.685559 +step:4037 train loss:3.701418 +step:4038 train loss:3.621855 +step:4039 train loss:3.674815 +step:4040 train loss:3.657094 +step:4041 train loss:3.648871 +step:4042 train loss:3.669716 +step:4043 train loss:3.654731 +step:4044 train loss:3.690739 +step:4045 train loss:3.694501 +step:4046 train loss:3.652038 +step:4047 train loss:3.679858 +step:4048 train loss:3.688525 +step:4049 train loss:3.651202 +step:4050 train loss:3.755175 +step:4051 train loss:3.668957 +step:4052 train loss:3.688038 +step:4053 train loss:3.737504 +step:4054 train loss:3.708435 +step:4055 train loss:3.726802 +step:4056 train loss:3.723519 +step:4057 train loss:3.658933 +step:4058 train loss:3.640748 +step:4059 train loss:3.724655 +step:4060 train loss:3.664583 +step:4061 train loss:3.635782 +step:4062 train loss:3.750290 +step:4063 train loss:3.699959 +step:4064 train loss:3.669104 +step:4065 train loss:3.652747 +step:4066 train loss:3.683284 +step:4067 train loss:3.707948 +step:4068 train loss:3.672871 +step:4069 train loss:3.731754 +step:4070 train loss:3.647836 +step:4071 train loss:3.620884 +step:4072 train loss:3.694891 +step:4073 train loss:3.631773 +step:4074 train loss:3.682061 +step:4075 train loss:3.749131 +step:4076 train loss:3.604510 +step:4077 train loss:3.684106 +step:4078 train loss:3.781627 +step:4079 train loss:3.726638 +step:4080 train loss:3.669411 +step:4081 train loss:3.640044 +step:4082 train loss:3.693805 +step:4083 train loss:3.629988 +step:4084 train loss:3.651580 +step:4085 train loss:3.887856 +step:4086 train loss:3.654030 +step:4087 train loss:3.696153 +step:4088 train loss:3.682168 +step:4089 train loss:3.668761 +step:4090 train loss:3.691772 +step:4091 train loss:3.713866 +step:4092 train loss:3.637429 +step:4093 train loss:3.664033 +step:4094 train loss:3.686642 +step:4095 train loss:3.641485 +step:4096 train loss:3.675404 +step:4097 train loss:3.676558 +step:4098 train loss:3.648745 +step:4099 train loss:3.649627 +step:4100 train loss:3.705295 +step:4101 train loss:3.627440 +step:4102 train loss:3.663688 +step:4103 train loss:3.867783 +step:4104 train loss:3.682631 +step:4105 train loss:3.649348 +step:4106 train loss:3.718881 +step:4107 train loss:3.641414 +step:4108 train loss:3.643845 +step:4109 train loss:3.698570 +step:4110 train loss:3.708993 +step:4111 train loss:3.683490 +step:4112 train loss:3.703139 +step:4113 train loss:3.661402 +step:4114 train loss:3.610098 +step:4115 train loss:3.647233 +step:4116 train loss:3.633551 +step:4117 train loss:3.649807 +step:4118 train loss:3.701817 +step:4119 train loss:3.727808 +step:4120 train loss:3.649542 +step:4121 train loss:3.642252 +step:4122 train loss:3.707214 +step:4123 train loss:3.724087 +step:4124 train loss:3.698472 +step:4125 train loss:3.736600 +step:4126 train loss:3.667674 +step:4127 train loss:3.690080 +step:4128 train loss:3.679731 +step:4129 train loss:3.728913 +step:4130 train loss:3.657221 +step:4131 train loss:3.694123 +step:4132 train loss:3.709432 +step:4133 train loss:3.663103 +step:4134 train loss:3.715702 +step:4135 train loss:3.648281 +step:4136 train loss:3.668838 +step:4137 train loss:3.643993 +step:4138 train loss:3.647980 +step:4139 train loss:3.694039 +step:4140 train loss:3.653405 +step:4141 train loss:3.616054 +step:4142 train loss:3.663280 +step:4143 train loss:3.698646 +step:4144 train loss:3.652654 +step:4145 train loss:3.618962 +step:4146 train loss:3.686436 +step:4147 train loss:3.664656 +step:4148 train loss:3.656379 +step:4149 train loss:3.736286 +step:4150 train loss:3.698446 +step:4151 train loss:3.680838 +step:4152 train loss:3.704986 +step:4153 train loss:3.709774 +step:4154 train loss:3.719224 +step:4155 train loss:3.743152 +step:4156 train loss:3.613621 +step:4157 train loss:3.637141 +step:4158 train loss:3.696512 +step:4159 train loss:3.594880 +step:4160 train loss:3.687099 +step:4161 train loss:3.688395 +step:4162 train loss:3.594671 +step:4163 train loss:3.678024 +step:4164 train loss:3.626643 +step:4165 train loss:3.625066 +step:4166 train loss:3.691155 +step:4167 train loss:3.689869 +step:4168 train loss:3.681326 +step:4169 train loss:3.707388 +step:4170 train loss:3.825980 +step:4171 train loss:3.675673 +step:4172 train loss:3.695212 +step:4173 train loss:3.692266 +step:4174 train loss:3.653652 +step:4175 train loss:3.745417 +step:4176 train loss:3.667774 +step:4177 train loss:3.690662 +step:4178 train loss:3.668967 +step:4179 train loss:3.623951 +step:4180 train loss:3.621943 +step:4181 train loss:3.673464 +step:4182 train loss:3.656834 +step:4183 train loss:3.592818 +step:4184 train loss:3.663854 +step:4185 train loss:3.731199 +step:4186 train loss:3.706688 +step:4187 train loss:3.715531 +step:4188 train loss:3.689115 +step:4189 train loss:3.650079 +step:4190 train loss:3.691782 +step:4191 train loss:3.640593 +step:4192 train loss:3.728346 +step:4193 train loss:3.637850 +step:4194 train loss:3.619031 +step:4195 train loss:3.617105 +step:4196 train loss:3.686176 +step:4197 train loss:3.698092 +step:4198 train loss:3.622468 +step:4199 train loss:3.704933 +step:4200 train loss:3.667865 +step:4201 train loss:3.647135 +step:4202 train loss:3.664387 +step:4203 train loss:3.673149 +step:4204 train loss:3.666634 +step:4205 train loss:3.679238 +step:4206 train loss:3.699039 +step:4207 train loss:3.698412 +step:4208 train loss:3.662954 +step:4209 train loss:3.727103 +step:4210 train loss:3.757755 +step:4211 train loss:3.638463 +step:4212 train loss:3.680174 +step:4213 train loss:3.632079 +step:4214 train loss:3.639557 +step:4215 train loss:3.654392 +step:4216 train loss:3.629214 +step:4217 train loss:3.651147 +step:4218 train loss:3.693149 +step:4219 train loss:3.686929 +step:4220 train loss:3.768248 +step:4221 train loss:3.655131 +step:4222 train loss:3.718239 +step:4223 train loss:3.637059 +step:4224 train loss:3.712102 +step:4225 train loss:3.639010 +step:4226 train loss:3.695828 +step:4227 train loss:3.671631 +step:4228 train loss:3.645064 +step:4229 train loss:3.655147 +step:4230 train loss:3.638350 +step:4231 train loss:3.627469 +step:4232 train loss:3.675533 +step:4233 train loss:3.583735 +step:4234 train loss:3.666650 +step:4235 train loss:3.744140 +step:4236 train loss:3.710876 +step:4237 train loss:3.695007 +step:4238 train loss:3.704657 +step:4239 train loss:3.755401 +step:4240 train loss:3.662815 +step:4241 train loss:3.590784 +step:4242 train loss:3.709877 +step:4243 train loss:3.707914 +step:4244 train loss:3.723299 +step:4245 train loss:3.776335 +step:4246 train loss:3.652776 +step:4247 train loss:3.708748 +step:4248 train loss:3.659218 +step:4249 train loss:3.667022 +step:4250 validation loss:3.593731 +step:4250 train loss:3.647802 +step:4251 train loss:3.743586 +step:4252 train loss:3.653386 +step:4253 train loss:3.644413 +step:4254 train loss:3.655953 +step:4255 train loss:3.638054 +step:4256 train loss:3.654416 +step:4257 train loss:3.711192 +step:4258 train loss:3.571166 +step:4259 train loss:3.636056 +step:4260 train loss:3.702570 +step:4261 train loss:3.686477 +step:4262 train loss:3.829450 +step:4263 train loss:3.756410 +step:4264 train loss:3.698197 +step:4265 train loss:3.689781 +step:4266 train loss:3.686802 +step:4267 train loss:3.687718 +step:4268 train loss:3.634421 +step:4269 train loss:3.726415 +step:4270 train loss:3.707843 +step:4271 train loss:3.623339 +step:4272 train loss:3.676672 +step:4273 train loss:3.654771 +step:4274 train loss:3.639758 +step:4275 train loss:3.661196 +step:4276 train loss:3.627542 +step:4277 train loss:3.762838 +step:4278 train loss:3.611319 +step:4279 train loss:3.641764 +step:4280 train loss:3.724723 +step:4281 train loss:3.707895 +step:4282 train loss:3.772428 +step:4283 train loss:3.627526 +step:4284 train loss:3.654350 +step:4285 train loss:3.659424 +step:4286 train loss:3.722828 +step:4287 train loss:3.723362 +step:4288 train loss:3.700380 +step:4289 train loss:3.656026 +step:4290 train loss:3.664440 +step:4291 train loss:3.624022 +step:4292 train loss:3.666334 +step:4293 train loss:3.680669 +step:4294 train loss:3.664730 +step:4295 train loss:3.603036 +step:4296 train loss:3.672939 +step:4297 train loss:3.654445 +step:4298 train loss:3.666610 +step:4299 train loss:3.664166 +step:4300 train loss:3.778383 +step:4301 train loss:3.597217 +step:4302 train loss:3.735299 +step:4303 train loss:3.614532 +step:4304 train loss:3.620391 +step:4305 train loss:3.639905 +step:4306 train loss:3.717001 +step:4307 train loss:3.628881 +step:4308 train loss:3.628408 +step:4309 train loss:3.699091 +step:4310 train loss:3.634183 +step:4311 train loss:3.691739 +step:4312 train loss:3.684915 +step:4313 train loss:3.678702 +step:4314 train loss:3.624462 +step:4315 train loss:3.657620 +step:4316 train loss:3.605568 +step:4317 train loss:3.661378 +step:4318 train loss:3.701820 +step:4319 train loss:3.650256 +step:4320 train loss:3.711519 +step:4321 train loss:3.692786 +step:4322 train loss:3.649741 +step:4323 train loss:3.587230 +step:4324 train loss:3.679075 +step:4325 train loss:3.654410 +step:4326 train loss:3.647273 +step:4327 train loss:3.754069 +step:4328 train loss:3.662952 +step:4329 train loss:3.618902 +step:4330 train loss:3.664574 +step:4331 train loss:3.678779 +step:4332 train loss:3.706610 +step:4333 train loss:3.668159 +step:4334 train loss:3.682792 +step:4335 train loss:3.681150 +step:4336 train loss:3.693802 +step:4337 train loss:3.658082 +step:4338 train loss:3.782138 +step:4339 train loss:3.681807 +step:4340 train loss:3.687847 +step:4341 train loss:3.656161 +step:4342 train loss:3.670734 +step:4343 train loss:3.790413 +step:4344 train loss:3.680236 +step:4345 train loss:3.697627 +step:4346 train loss:3.711394 +step:4347 train loss:3.720156 +step:4348 train loss:3.631677 +step:4349 train loss:3.719121 +step:4350 train loss:3.655136 +step:4351 train loss:3.611049 +step:4352 train loss:3.684674 +step:4353 train loss:3.632584 +step:4354 train loss:3.686261 +step:4355 train loss:3.649245 +step:4356 train loss:3.673140 +step:4357 train loss:3.655449 +step:4358 train loss:3.750040 +step:4359 train loss:3.700716 +step:4360 train loss:3.614584 +step:4361 train loss:3.662538 +step:4362 train loss:3.683397 +step:4363 train loss:3.700866 +step:4364 train loss:3.668003 +step:4365 train loss:3.648135 +step:4366 train loss:3.696028 +step:4367 train loss:3.710452 +step:4368 train loss:3.683905 +step:4369 train loss:3.554312 +step:4370 train loss:3.682527 +step:4371 train loss:3.594471 +step:4372 train loss:3.743152 +step:4373 train loss:3.681067 +step:4374 train loss:3.649198 +step:4375 train loss:3.694957 +step:4376 train loss:3.703931 +step:4377 train loss:3.639942 +step:4378 train loss:3.650615 +step:4379 train loss:3.732359 +step:4380 train loss:3.713135 +step:4381 train loss:3.615137 +step:4382 train loss:3.661850 +step:4383 train loss:3.690351 +step:4384 train loss:3.688504 +step:4385 train loss:3.611796 +step:4386 train loss:3.669616 +step:4387 train loss:3.639030 +step:4388 train loss:3.658026 +step:4389 train loss:3.687341 +step:4390 train loss:3.725741 +step:4391 train loss:3.654244 +step:4392 train loss:3.727003 +step:4393 train loss:3.687558 +step:4394 train loss:3.623062 +step:4395 train loss:3.681333 +step:4396 train loss:3.656000 +step:4397 train loss:3.697349 +step:4398 train loss:3.645663 +step:4399 train loss:3.639024 +step:4400 train loss:3.644210 +step:4401 train loss:3.701958 +step:4402 train loss:3.700883 +step:4403 train loss:3.651656 +step:4404 train loss:3.683241 +step:4405 train loss:3.605196 +step:4406 train loss:3.682337 +step:4407 train loss:3.618527 +step:4408 train loss:3.714049 +step:4409 train loss:3.670358 +step:4410 train loss:3.677206 +step:4411 train loss:3.635661 +step:4412 train loss:3.749051 +step:4413 train loss:3.647451 +step:4414 train loss:3.655661 +step:4415 train loss:3.639378 +step:4416 train loss:3.634905 +step:4417 train loss:3.625235 +step:4418 train loss:3.698908 +step:4419 train loss:3.668945 +step:4420 train loss:3.676812 +step:4421 train loss:3.703025 +step:4422 train loss:3.718493 +step:4423 train loss:3.677827 +step:4424 train loss:3.663728 +step:4425 train loss:3.625872 +step:4426 train loss:3.699545 +step:4427 train loss:3.662374 +step:4428 train loss:3.599003 +step:4429 train loss:3.662611 +step:4430 train loss:3.698302 +step:4431 train loss:3.692629 +step:4432 train loss:3.598515 +step:4433 train loss:3.652421 +step:4434 train loss:3.652221 +step:4435 train loss:3.681203 +step:4436 train loss:3.617237 +step:4437 train loss:3.694886 +step:4438 train loss:3.664613 +step:4439 train loss:3.668605 +step:4440 train loss:3.667538 +step:4441 train loss:3.669230 +step:4442 train loss:3.719454 +step:4443 train loss:3.651663 +step:4444 train loss:3.737646 +step:4445 train loss:3.699583 +step:4446 train loss:3.632524 +step:4447 train loss:3.681091 +step:4448 train loss:3.700600 +step:4449 train loss:3.636590 +step:4450 train loss:3.655310 +step:4451 train loss:3.708288 +step:4452 train loss:3.763920 +step:4453 train loss:3.696641 +step:4454 train loss:3.665709 +step:4455 train loss:3.713642 +step:4456 train loss:3.656799 +step:4457 train loss:3.658588 +step:4458 train loss:3.668499 +step:4459 train loss:3.703223 +step:4460 train loss:3.613564 +step:4461 train loss:3.586498 +step:4462 train loss:3.641950 +step:4463 train loss:3.661650 +step:4464 train loss:3.632886 +step:4465 train loss:3.665175 +step:4466 train loss:3.764449 +step:4467 train loss:3.642505 +step:4468 train loss:3.638207 +step:4469 train loss:3.629643 +step:4470 train loss:3.606186 +step:4471 train loss:3.668082 +step:4472 train loss:3.591012 +step:4473 train loss:3.677603 +step:4474 train loss:3.703745 +step:4475 train loss:3.665438 +step:4476 train loss:3.625526 +step:4477 train loss:3.609992 +step:4478 train loss:3.671083 +step:4479 train loss:3.771629 +step:4480 train loss:3.606961 +step:4481 train loss:3.679341 +step:4482 train loss:3.639580 +step:4483 train loss:3.636401 +step:4484 train loss:3.680457 +step:4485 train loss:3.643808 +step:4486 train loss:3.742058 +step:4487 train loss:3.639972 +step:4488 train loss:3.636585 +step:4489 train loss:3.594664 +step:4490 train loss:3.677590 +step:4491 train loss:3.624406 +step:4492 train loss:3.661179 +step:4493 train loss:3.646418 +step:4494 train loss:3.640729 +step:4495 train loss:3.706409 +step:4496 train loss:3.648227 +step:4497 train loss:3.730564 +step:4498 train loss:3.624143 +step:4499 train loss:3.674083 +step:4500 validation loss:3.578648 total_sharp:6.4905e-03 L1_sharp:6.6119e-02 L2_sharp:2.6312e-02 L3_sharp:6.0473e-02 L4_sharp:2.5109e-02 L5_sharp:3.0349e-02 L6_sharp:3.7179e-02 L7_sharp:4.0921e-02 L8_sharp:3.7734e-02 L9_sharp:2.7288e-02 L10_sharp:2.3590e-02 L11_sharp:2.3739e-02 L12_sharp:3.5637e-02 total_fnorm:1.3563e+00 total_l1_linf:8.1289e+03 total_spectral:1.3563e+00 L1_fnorm:6.1564e-02 L2_fnorm:5.8951e-02 L3_fnorm:5.7537e-02 L4_fnorm:5.9697e-02 L5_fnorm:6.0673e-02 L6_fnorm:6.1333e-02 L7_fnorm:6.1336e-02 L8_fnorm:6.1287e-02 L9_fnorm:6.1193e-02 L10_fnorm:6.1380e-02 L11_fnorm:6.1353e-02 L12_fnorm:6.1346e-02 L1_l1linf:3.2644e-01 L2_l1linf:3.6517e-01 L3_l1linf:3.6578e-01 L4_l1linf:3.6764e-01 L5_l1linf:3.4754e-01 L6_l1linf:3.4094e-01 L7_l1linf:3.4629e-01 L8_l1linf:3.2738e-01 L9_l1linf:3.4068e-01 L10_l1linf:3.6169e-01 L11_l1linf:3.8948e-01 L12_l1linf:3.6991e-01 L1_spectral:7.3377e-03 L2_spectral:8.2187e-03 L3_spectral:8.1933e-03 L4_spectral:8.2299e-03 L5_spectral:7.7786e-03 L6_spectral:7.6399e-03 L7_spectral:7.7427e-03 L8_spectral:7.3725e-03 L9_spectral:7.6357e-03 L10_spectral:8.1607e-03 L11_spectral:8.6879e-03 L12_spectral:8.3247e-03 ip_v_neg_g:5.6721e-03 cos_v_neg_g:1.1899e-03 v_norm:1.3563e+00 g_norm:3.5145e+00 hv_norm:9.3994e-01 cos_v_hv:9.3658e-03 hg_norm:2.9791e+02 cos_g_hg:4.3199e-01 v_par:4.4517e-05 v_perp:1.3563e+00 L1_cos_v_neg_g:8.8018e-03 L1_v_norm:6.1564e-02 L2_cos_v_neg_g:4.1780e-03 L2_v_norm:5.8951e-02 L3_cos_v_neg_g:2.6061e-03 L3_v_norm:5.7537e-02 L4_cos_v_neg_g:4.7013e-03 L4_v_norm:5.9697e-02 L5_cos_v_neg_g:7.2028e-03 L5_v_norm:6.0673e-02 L6_cos_v_neg_g:7.9571e-03 L6_v_norm:6.1333e-02 L7_cos_v_neg_g:6.2317e-03 L7_v_norm:6.1336e-02 L8_cos_v_neg_g:7.4416e-03 L8_v_norm:6.1287e-02 L9_cos_v_neg_g:7.9225e-03 L9_v_norm:6.1193e-02 L10_cos_v_neg_g:7.7851e-03 L10_v_norm:6.1380e-02 L11_cos_v_neg_g:8.0407e-03 L11_v_norm:6.1353e-02 L12_cos_v_neg_g:7.8767e-03 L12_v_norm:6.1346e-02 +step:4500 train loss:3.582404 +step:4501 train loss:3.643768 +step:4502 train loss:3.768029 +step:4503 train loss:3.668663 +step:4504 train loss:3.681957 +step:4505 train loss:3.663463 +step:4506 train loss:3.636204 +step:4507 train loss:3.710600 +step:4508 train loss:3.644925 +step:4509 train loss:3.643535 +step:4510 train loss:3.677799 +step:4511 train loss:3.633251 +step:4512 train loss:3.654137 +step:4513 train loss:3.714681 +step:4514 train loss:3.619093 +step:4515 train loss:3.734216 +step:4516 train loss:3.707446 +step:4517 train loss:3.661447 +step:4518 train loss:3.602667 +step:4519 train loss:3.638845 +step:4520 train loss:3.650968 +step:4521 train loss:3.590502 +step:4522 train loss:3.647477 +step:4523 train loss:3.692233 +step:4524 train loss:3.675380 +step:4525 train loss:3.598907 +step:4526 train loss:3.639518 +step:4527 train loss:3.626324 +step:4528 train loss:3.656556 +step:4529 train loss:3.654494 +step:4530 train loss:3.748013 +step:4531 train loss:3.638262 +step:4532 train loss:3.659972 +step:4533 train loss:3.635460 +step:4534 train loss:3.727104 +step:4535 train loss:3.625937 +step:4536 train loss:3.697017 +step:4537 train loss:3.680329 +step:4538 train loss:3.658367 +step:4539 train loss:3.681092 +step:4540 train loss:3.657288 +step:4541 train loss:3.623143 +step:4542 train loss:3.674813 +step:4543 train loss:3.759130 +step:4544 train loss:3.700812 +step:4545 train loss:3.643608 +step:4546 train loss:3.734716 +step:4547 train loss:3.693906 +step:4548 train loss:3.698855 +step:4549 train loss:3.653792 +step:4550 train loss:3.620998 +step:4551 train loss:3.639111 +step:4552 train loss:3.640121 +step:4553 train loss:3.724569 +step:4554 train loss:3.617404 +step:4555 train loss:3.729684 +step:4556 train loss:3.666168 +step:4557 train loss:3.595325 +step:4558 train loss:3.679637 +step:4559 train loss:3.691136 +step:4560 train loss:3.628359 +step:4561 train loss:3.616269 +step:4562 train loss:3.658020 +step:4563 train loss:3.610065 +step:4564 train loss:3.636723 +step:4565 train loss:3.635897 +step:4566 train loss:3.609882 +step:4567 train loss:3.635603 +step:4568 train loss:3.635175 +step:4569 train loss:3.620385 +step:4570 train loss:3.670696 +step:4571 train loss:3.648182 +step:4572 train loss:3.641849 +step:4573 train loss:3.651022 +step:4574 train loss:3.796137 +step:4575 train loss:3.627135 +step:4576 train loss:3.616562 +step:4577 train loss:3.657905 +step:4578 train loss:3.696395 +step:4579 train loss:3.648361 +step:4580 train loss:3.709087 +step:4581 train loss:3.645615 +step:4582 train loss:3.641428 +step:4583 train loss:3.648699 +step:4584 train loss:3.620441 +step:4585 train loss:3.697402 +step:4586 train loss:3.686309 +step:4587 train loss:3.587477 +step:4588 train loss:3.630419 +step:4589 train loss:3.706157 +step:4590 train loss:3.674230 +step:4591 train loss:3.613923 +step:4592 train loss:3.699493 +step:4593 train loss:3.617692 +step:4594 train loss:3.648523 +step:4595 train loss:3.672514 +step:4596 train loss:3.612594 +step:4597 train loss:3.748030 +step:4598 train loss:3.668109 +step:4599 train loss:3.618695 +step:4600 train loss:3.627276 +step:4601 train loss:3.649676 +step:4602 train loss:3.601696 +step:4603 train loss:3.616292 +step:4604 train loss:3.721229 +step:4605 train loss:3.640008 +step:4606 train loss:3.666756 +step:4607 train loss:3.648186 +step:4608 train loss:3.683471 +step:4609 train loss:3.642206 +step:4610 train loss:3.685270 +step:4611 train loss:3.710938 +step:4612 train loss:3.708733 +step:4613 train loss:3.688780 +step:4614 train loss:3.681463 +step:4615 train loss:3.622401 +step:4616 train loss:3.606346 +step:4617 train loss:3.649854 +step:4618 train loss:3.666583 +step:4619 train loss:3.626891 +step:4620 train loss:3.642478 +step:4621 train loss:3.646041 +step:4622 train loss:3.582631 +step:4623 train loss:3.691554 +step:4624 train loss:3.673761 +step:4625 train loss:3.632752 +step:4626 train loss:3.673351 +step:4627 train loss:3.645597 +step:4628 train loss:3.632569 +step:4629 train loss:3.669314 +step:4630 train loss:3.726800 +step:4631 train loss:3.728760 +step:4632 train loss:3.624019 +step:4633 train loss:3.636844 +step:4634 train loss:3.710102 +step:4635 train loss:3.674501 +step:4636 train loss:3.690691 +step:4637 train loss:3.627386 +step:4638 train loss:3.634445 +step:4639 train loss:3.630305 +step:4640 train loss:3.639735 +step:4641 train loss:3.644788 +step:4642 train loss:3.677288 +step:4643 train loss:3.639421 +step:4644 train loss:3.663163 +step:4645 train loss:3.676957 +step:4646 train loss:3.634112 +step:4647 train loss:3.590647 +step:4648 train loss:3.697455 +step:4649 train loss:3.709983 +step:4650 train loss:3.656866 +step:4651 train loss:3.658722 +step:4652 train loss:3.649152 +step:4653 train loss:3.704624 +step:4654 train loss:3.699803 +step:4655 train loss:3.603569 +step:4656 train loss:3.637176 +step:4657 train loss:3.691226 +step:4658 train loss:3.647339 +step:4659 train loss:3.659687 +step:4660 train loss:3.702834 +step:4661 train loss:3.621879 +step:4662 train loss:3.634462 +step:4663 train loss:3.647288 +step:4664 train loss:3.700222 +step:4665 train loss:3.695358 +step:4666 train loss:3.690865 +step:4667 train loss:3.683659 +step:4668 train loss:3.645113 +step:4669 train loss:3.656590 +step:4670 train loss:3.687105 +step:4671 train loss:3.693463 +step:4672 train loss:3.560658 +step:4673 train loss:3.597459 +step:4674 train loss:3.724574 +step:4675 train loss:3.631572 +step:4676 train loss:3.591793 +step:4677 train loss:3.595204 +step:4678 train loss:3.568212 +step:4679 train loss:3.667359 +step:4680 train loss:3.606497 +step:4681 train loss:3.657851 +step:4682 train loss:3.608520 +step:4683 train loss:3.576867 +step:4684 train loss:3.691721 +step:4685 train loss:3.631305 +step:4686 train loss:3.641104 +step:4687 train loss:3.677680 +step:4688 train loss:3.608537 +step:4689 train loss:3.683372 +step:4690 train loss:3.625812 +step:4691 train loss:3.660380 +step:4692 train loss:3.588989 +step:4693 train loss:3.626631 +step:4694 train loss:3.668361 +step:4695 train loss:3.687571 +step:4696 train loss:3.676566 +step:4697 train loss:3.586910 +step:4698 train loss:3.606632 +step:4699 train loss:3.656274 +step:4700 train loss:3.624318 +step:4701 train loss:3.634942 +step:4702 train loss:3.588605 +step:4703 train loss:3.669889 +step:4704 train loss:3.658771 +step:4705 train loss:3.600284 +step:4706 train loss:3.608311 +step:4707 train loss:3.596518 +step:4708 train loss:3.663738 +step:4709 train loss:3.610872 +step:4710 train loss:3.626037 +step:4711 train loss:3.684930 +step:4712 train loss:3.581431 +step:4713 train loss:3.688256 +step:4714 train loss:3.586441 +step:4715 train loss:3.679633 +step:4716 train loss:3.646560 +step:4717 train loss:3.577275 +step:4718 train loss:3.666934 +step:4719 train loss:3.594280 +step:4720 train loss:3.690839 +step:4721 train loss:3.647490 +step:4722 train loss:3.701736 +step:4723 train loss:3.597355 +step:4724 train loss:3.647204 +step:4725 train loss:3.583431 +step:4726 train loss:3.630261 +step:4727 train loss:3.635072 +step:4728 train loss:3.642575 +step:4729 train loss:3.671401 +step:4730 train loss:3.570797 +step:4731 train loss:3.633652 +step:4732 train loss:3.584051 +step:4733 train loss:3.521452 +step:4734 train loss:3.657684 +step:4735 train loss:3.609837 +step:4736 train loss:3.652431 +step:4737 train loss:3.533999 +step:4738 train loss:3.679589 +step:4739 train loss:3.558454 +step:4740 train loss:3.668586 +step:4741 train loss:3.636778 +step:4742 train loss:3.599136 +step:4743 train loss:3.593260 +step:4744 train loss:3.639411 +step:4745 train loss:3.657939 +step:4746 train loss:3.699077 +step:4747 train loss:3.659784 +step:4748 train loss:3.560556 +step:4749 train loss:3.626776 +step:4750 validation loss:3.566822 +step:4750 train loss:3.572545 +step:4751 train loss:3.669093 +step:4752 train loss:3.599661 +step:4753 train loss:3.707069 +step:4754 train loss:3.575247 +step:4755 train loss:3.619871 +step:4756 train loss:3.691674 +step:4757 train loss:3.616489 +step:4758 train loss:3.632876 +step:4759 train loss:3.633446 +step:4760 train loss:3.661823 +step:4761 train loss:3.582376 +step:4762 train loss:3.613039 +step:4763 train loss:3.636075 +step:4764 train loss:3.696949 +step:4765 train loss:3.590382 +step:4766 train loss:3.612363 +step:4767 train loss:3.565097 +step:4768 train loss:3.623724 +step:4769 train loss:3.647694 +step:4770 train loss:3.607187 +step:4771 train loss:3.618975 +step:4772 train loss:3.592498 +step:4773 train loss:3.625825 +step:4774 train loss:3.569717 +step:4775 train loss:3.703839 +step:4776 train loss:3.567328 +step:4777 train loss:3.641592 +step:4778 train loss:3.581313 +step:4779 train loss:3.629353 +step:4780 train loss:3.565054 +step:4781 train loss:3.574516 +step:4782 train loss:3.679645 +step:4783 train loss:3.669997 +step:4784 train loss:3.629906 +step:4785 train loss:3.628439 +step:4786 train loss:3.739544 +step:4787 train loss:3.572589 +step:4788 train loss:3.594811 +step:4789 train loss:3.619413 +step:4790 train loss:3.671642 +step:4791 train loss:3.636536 +step:4792 train loss:3.678854 +step:4793 train loss:3.595332 +step:4794 train loss:3.670850 +step:4795 train loss:3.617828 +step:4796 train loss:3.609027 +step:4797 train loss:3.614915 +step:4798 train loss:3.622917 +step:4799 train loss:3.619371 +step:4800 train loss:3.651094 +step:4801 train loss:3.641910 +step:4802 train loss:3.681509 +step:4803 train loss:3.663370 +step:4804 train loss:3.621309 +step:4805 train loss:3.614314 +step:4806 train loss:3.593582 +step:4807 train loss:3.701751 +step:4808 train loss:3.573554 +step:4809 train loss:3.677096 +step:4810 train loss:3.618230 +step:4811 train loss:3.636528 +step:4812 train loss:3.611568 +step:4813 train loss:3.569481 +step:4814 train loss:3.561837 +step:4815 train loss:3.555450 +step:4816 train loss:3.621266 +step:4817 train loss:3.558233 +step:4818 train loss:3.625905 +step:4819 train loss:3.620277 +step:4820 train loss:3.872219 +step:4821 train loss:3.646324 +step:4822 train loss:3.655359 +step:4823 train loss:3.585721 +step:4824 train loss:3.596884 +step:4825 train loss:3.574617 +step:4826 train loss:3.662533 +step:4827 train loss:3.609742 +step:4828 train loss:3.552757 +step:4829 train loss:3.655580 +step:4830 train loss:3.597676 +step:4831 train loss:3.745578 +step:4832 train loss:3.615023 +step:4833 train loss:3.652322 +step:4834 train loss:3.554276 +step:4835 train loss:3.644113 +step:4836 train loss:3.624621 +step:4837 train loss:3.653815 +step:4838 train loss:3.592150 +step:4839 train loss:3.658724 +step:4840 train loss:3.565399 +step:4841 train loss:3.662067 +step:4842 train loss:3.577283 +step:4843 train loss:3.655507 +step:4844 train loss:3.656056 +step:4845 train loss:3.593750 +step:4846 train loss:3.607219 +step:4847 train loss:3.593315 +step:4848 train loss:3.616776 +step:4849 train loss:3.571499 +step:4850 train loss:3.578239 +step:4851 train loss:3.574745 +step:4852 train loss:3.653384 +step:4853 train loss:3.628799 +step:4854 train loss:3.605997 +step:4855 train loss:3.671653 +step:4856 train loss:3.640829 +step:4857 train loss:3.647615 +step:4858 train loss:3.731490 +step:4859 train loss:3.575323 +step:4860 train loss:3.651664 +step:4861 train loss:3.623270 +step:4862 train loss:3.658241 +step:4863 train loss:3.594567 +step:4864 train loss:3.604899 +step:4865 train loss:3.596461 +step:4866 train loss:3.644718 +step:4867 train loss:3.610418 +step:4868 train loss:3.629521 +step:4869 train loss:3.576894 +step:4870 train loss:3.609671 +step:4871 train loss:3.692421 +step:4872 train loss:3.637379 +step:4873 train loss:3.634883 +step:4874 train loss:3.606261 +step:4875 train loss:3.573514 +step:4876 train loss:3.586329 +step:4877 train loss:3.587074 +step:4878 train loss:3.627289 +step:4879 train loss:3.589218 +step:4880 train loss:3.614238 +step:4881 train loss:3.559731 +step:4882 train loss:3.762285 +step:4883 train loss:3.571739 +step:4884 train loss:3.602730 +step:4885 train loss:3.575767 +step:4886 train loss:3.653763 +step:4887 train loss:3.606073 +step:4888 train loss:3.615387 +step:4889 train loss:3.608344 +step:4890 train loss:3.649567 +step:4891 train loss:3.585536 +step:4892 train loss:3.592729 +step:4893 train loss:3.637708 +step:4894 train loss:3.573789 +step:4895 train loss:3.604365 +step:4896 train loss:3.590116 +step:4897 train loss:3.661895 +step:4898 train loss:3.612094 +step:4899 train loss:3.593212 +step:4900 train loss:3.641917 +step:4901 train loss:3.589973 +step:4902 train loss:3.584380 +step:4903 train loss:3.603125 +step:4904 train loss:3.616905 +step:4905 train loss:3.613293 +step:4906 train loss:3.616407 +step:4907 train loss:3.686493 +step:4908 train loss:3.596294 +step:4909 train loss:3.599630 +step:4910 train loss:3.623991 +step:4911 train loss:3.673842 +step:4912 train loss:3.648392 +step:4913 train loss:3.626109 +step:4914 train loss:3.617947 +step:4915 train loss:3.599574 +step:4916 train loss:3.538637 +step:4917 train loss:3.565030 +step:4918 train loss:3.598281 +step:4919 train loss:3.587789 +step:4920 train loss:3.586280 +step:4921 train loss:3.748146 +step:4922 train loss:3.644934 +step:4923 train loss:3.658727 +step:4924 train loss:3.660631 +step:4925 train loss:3.594080 +step:4926 train loss:3.588055 +step:4927 train loss:3.615222 +step:4928 train loss:3.656981 +step:4929 train loss:3.609184 +step:4930 train loss:3.592897 +step:4931 train loss:3.583544 +step:4932 train loss:3.596051 +step:4933 train loss:3.588161 +step:4934 train loss:3.656248 +step:4935 train loss:3.642002 +step:4936 train loss:3.604793 +step:4937 train loss:3.714837 +step:4938 train loss:3.701080 +step:4939 train loss:3.567591 +step:4940 train loss:3.645483 +step:4941 train loss:3.548700 +step:4942 train loss:3.589774 +step:4943 train loss:3.592932 +step:4944 train loss:3.592602 +step:4945 train loss:3.640422 +step:4946 train loss:3.611428 +step:4947 train loss:3.597109 +step:4948 train loss:3.632539 +step:4949 train loss:3.541070 +step:4950 train loss:3.620801 +step:4951 train loss:3.670797 +step:4952 train loss:3.612531 +step:4953 train loss:3.644951 +step:4954 train loss:3.547933 +step:4955 train loss:3.623678 +step:4956 train loss:3.653316 +step:4957 train loss:3.649220 +step:4958 train loss:3.562606 +step:4959 train loss:3.680020 +step:4960 train loss:3.607123 +step:4961 train loss:3.626067 +step:4962 train loss:3.586918 +step:4963 train loss:3.635044 +step:4964 train loss:3.585461 +step:4965 train loss:3.737811 +step:4966 train loss:3.586048 +step:4967 train loss:3.694541 +step:4968 train loss:3.584057 +step:4969 train loss:3.626853 +step:4970 train loss:3.616953 +step:4971 train loss:3.566877 +step:4972 train loss:3.613604 +step:4973 train loss:3.619296 +step:4974 train loss:3.609363 +step:4975 train loss:3.692494 +step:4976 train loss:3.673917 +step:4977 train loss:3.617992 +step:4978 train loss:3.608180 +step:4979 train loss:3.602183 +step:4980 train loss:3.712360 +step:4981 train loss:3.550873 +step:4982 train loss:3.630464 +step:4983 train loss:3.553610 +step:4984 train loss:3.739640 +step:4985 train loss:3.638597 +step:4986 train loss:3.582793 +step:4987 train loss:3.601751 +step:4988 train loss:3.798672 +step:4989 train loss:3.601761 +step:4990 train loss:3.597409 +step:4991 train loss:3.610008 +step:4992 train loss:3.596952 +step:4993 train loss:3.574578 +step:4994 train loss:3.685800 +step:4995 train loss:3.610046 +step:4996 train loss:3.697245 +step:4997 train loss:3.596607 +step:4998 train loss:3.599985 +step:4999 train loss:3.585640 +step:5000 validation loss:3.553002 total_sharp:4.5580e-03 L1_sharp:4.0869e-02 L2_sharp:2.3191e-02 L3_sharp:3.9856e-02 L4_sharp:2.1855e-02 L5_sharp:2.6874e-02 L6_sharp:2.6670e-02 L7_sharp:3.0805e-02 L8_sharp:3.0481e-02 L9_sharp:2.1491e-02 L10_sharp:1.6677e-02 L11_sharp:1.5950e-02 L12_sharp:2.2858e-02 total_fnorm:1.3447e+00 total_l1_linf:8.0704e+03 total_spectral:1.3447e+00 L1_fnorm:6.1402e-02 L2_fnorm:5.8977e-02 L3_fnorm:5.8208e-02 L4_fnorm:5.9834e-02 L5_fnorm:6.0745e-02 L6_fnorm:6.1267e-02 L7_fnorm:6.1133e-02 L8_fnorm:6.1165e-02 L9_fnorm:6.1105e-02 L10_fnorm:6.1347e-02 L11_fnorm:6.1269e-02 L12_fnorm:6.1255e-02 L1_l1linf:3.2056e-01 L2_l1linf:3.4310e-01 L3_l1linf:3.4105e-01 L4_l1linf:3.3829e-01 L5_l1linf:3.4027e-01 L6_l1linf:3.0772e-01 L7_l1linf:2.9289e-01 L8_l1linf:2.9516e-01 L9_l1linf:2.9769e-01 L10_l1linf:3.4555e-01 L11_l1linf:3.6035e-01 L12_l1linf:3.5569e-01 L1_spectral:7.2673e-03 L2_spectral:7.7397e-03 L3_spectral:7.7214e-03 L4_spectral:7.5870e-03 L5_spectral:7.6369e-03 L6_spectral:6.9736e-03 L7_spectral:6.6410e-03 L8_spectral:6.6375e-03 L9_spectral:6.7214e-03 L10_spectral:7.8093e-03 L11_spectral:8.1176e-03 L12_spectral:8.0460e-03 ip_v_neg_g:4.9436e-03 cos_v_neg_g:1.0655e-03 v_norm:1.3447e+00 g_norm:3.4503e+00 hv_norm:6.9129e-01 cos_v_hv:8.8663e-03 hg_norm:3.1029e+02 cos_g_hg:4.1718e-01 v_par:3.9260e-05 v_perp:1.3447e+00 L1_cos_v_neg_g:7.2255e-03 L1_v_norm:6.1402e-02 L2_cos_v_neg_g:7.7060e-03 L2_v_norm:5.8977e-02 L3_cos_v_neg_g:8.1216e-03 L3_v_norm:5.8208e-02 L4_cos_v_neg_g:7.1775e-03 L4_v_norm:5.9834e-02 L5_cos_v_neg_g:7.0306e-03 L5_v_norm:6.0745e-02 L6_cos_v_neg_g:6.3806e-03 L6_v_norm:6.1267e-02 L7_cos_v_neg_g:5.1506e-03 L7_v_norm:6.1133e-02 L8_cos_v_neg_g:5.2794e-03 L8_v_norm:6.1165e-02 L9_cos_v_neg_g:5.3951e-03 L9_v_norm:6.1105e-02 L10_cos_v_neg_g:5.4177e-03 L10_v_norm:6.1347e-02 L11_cos_v_neg_g:5.5674e-03 L11_v_norm:6.1269e-02 L12_cos_v_neg_g:5.1317e-03 L12_v_norm:6.1255e-02 +step:5000 train loss:3.698572 +step:5001 train loss:3.567721 +step:5002 train loss:3.621038 +step:5003 train loss:3.616745 +step:5004 train loss:3.608589 +step:5005 train loss:3.606252 +step:5006 train loss:3.649249 +step:5007 train loss:3.651245 +step:5008 train loss:3.587208 +step:5009 train loss:3.633984 +step:5010 train loss:3.584382 +step:5011 train loss:3.613910 +step:5012 train loss:3.588057 +step:5013 train loss:3.689340 +step:5014 train loss:3.603957 +step:5015 train loss:3.677394 +step:5016 train loss:3.606726 +step:5017 train loss:3.650578 +step:5018 train loss:3.571024 +step:5019 train loss:3.605724 +step:5020 train loss:3.599370 +step:5021 train loss:3.613176 +step:5022 train loss:3.648006 +step:5023 train loss:3.617770 +step:5024 train loss:3.669226 +step:5025 train loss:3.552814 +step:5026 train loss:3.679066 +step:5027 train loss:3.609847 +step:5028 train loss:3.678543 +step:5029 train loss:3.572887 +step:5030 train loss:3.614751 +step:5031 train loss:3.600144 +step:5032 train loss:3.627539 +step:5033 train loss:3.613225 +step:5034 train loss:3.607348 +step:5035 train loss:3.695463 +step:5036 train loss:3.642247 +step:5037 train loss:3.594072 +step:5038 train loss:3.642169 +step:5039 train loss:3.656644 +step:5040 train loss:3.619961 +step:5041 train loss:3.634684 +step:5042 train loss:3.540180 +step:5043 train loss:3.680887 +step:5044 train loss:3.598258 +step:5045 train loss:3.649937 +step:5046 train loss:3.572105 +step:5047 train loss:3.648174 +step:5048 train loss:3.562879 +step:5049 train loss:3.696660 +step:5050 train loss:3.584244 +step:5051 train loss:3.628555 +step:5052 train loss:3.526227 +step:5053 train loss:3.707504 +step:5054 train loss:3.598505 +step:5055 train loss:3.623979 +step:5056 train loss:3.656051 +step:5057 train loss:3.588745 +step:5058 train loss:3.619008 +step:5059 train loss:3.580002 +step:5060 train loss:3.626128 +step:5061 train loss:3.623229 +step:5062 train loss:3.590922 +step:5063 train loss:3.587371 +step:5064 train loss:3.595168 +step:5065 train loss:3.580582 +step:5066 train loss:3.639537 +step:5067 train loss:3.622417 +step:5068 train loss:3.607384 +step:5069 train loss:3.578978 +step:5070 train loss:3.608710 +step:5071 train loss:3.677098 +step:5072 train loss:3.569318 +step:5073 train loss:3.577008 +step:5074 train loss:3.525362 +step:5075 train loss:3.593957 +step:5076 train loss:3.526334 +step:5077 train loss:3.585348 +step:5078 train loss:3.598433 +step:5079 train loss:3.628886 +step:5080 train loss:3.605670 +step:5081 train loss:3.616340 +step:5082 train loss:3.606370 +step:5083 train loss:3.659451 +step:5084 train loss:3.641172 +step:5085 train loss:3.602582 +step:5086 train loss:3.678821 +step:5087 train loss:3.662690 +step:5088 train loss:3.583632 +step:5089 train loss:3.650937 +step:5090 train loss:3.595047 +step:5091 train loss:3.599288 +step:5092 train loss:3.697098 +step:5093 train loss:3.577532 +step:5094 train loss:3.575815 +step:5095 train loss:3.625306 +step:5096 train loss:3.594353 +step:5097 train loss:3.603465 +step:5098 train loss:3.607744 +step:5099 train loss:3.568644 +step:5100 train loss:3.581448 +step:5101 train loss:3.774141 +step:5102 train loss:3.618702 +step:5103 train loss:3.629693 +step:5104 train loss:3.677260 +step:5105 train loss:3.609115 +step:5106 train loss:3.571883 +step:5107 train loss:3.590334 +step:5108 train loss:3.583919 +step:5109 train loss:3.662289 +step:5110 train loss:3.576856 +step:5111 train loss:3.668651 +step:5112 train loss:3.578781 +step:5113 train loss:3.558942 +step:5114 train loss:3.606574 +step:5115 train loss:3.568398 +step:5116 train loss:3.623767 +step:5117 train loss:3.568300 +step:5118 train loss:3.597172 +step:5119 train loss:3.580171 +step:5120 train loss:3.621932 +step:5121 train loss:3.570769 +step:5122 train loss:3.581402 +step:5123 train loss:3.564248 +step:5124 train loss:3.528176 +step:5125 train loss:3.638517 +step:5126 train loss:3.625327 +step:5127 train loss:3.627993 +step:5128 train loss:3.641447 +step:5129 train loss:3.567866 +step:5130 train loss:3.580628 +step:5131 train loss:3.519615 +step:5132 train loss:3.640678 +step:5133 train loss:3.607595 +step:5134 train loss:3.610942 +step:5135 train loss:3.561934 +step:5136 train loss:3.630017 +step:5137 train loss:3.627648 +step:5138 train loss:3.606770 +step:5139 train loss:3.641719 +step:5140 train loss:3.616818 +step:5141 train loss:3.647014 +step:5142 train loss:3.594790 +step:5143 train loss:3.622350 +step:5144 train loss:3.617465 +step:5145 train loss:3.562785 +step:5146 train loss:3.556844 +step:5147 train loss:3.633988 +step:5148 train loss:3.563063 +step:5149 train loss:3.633951 +step:5150 train loss:3.608646 +step:5151 train loss:3.575043 +step:5152 train loss:3.621873 +step:5153 train loss:3.591928 +step:5154 train loss:3.608047 +step:5155 train loss:3.611116 +step:5156 train loss:3.589918 +step:5157 train loss:3.591932 +step:5158 train loss:3.613792 +step:5159 train loss:3.649515 +step:5160 train loss:3.720596 +step:5161 train loss:3.647127 +step:5162 train loss:3.663499 +step:5163 train loss:3.578428 +step:5164 train loss:3.643136 +step:5165 train loss:3.656415 +step:5166 train loss:3.595175 +step:5167 train loss:3.693670 +step:5168 train loss:3.607455 +step:5169 train loss:3.641255 +step:5170 train loss:3.619741 +step:5171 train loss:3.663538 +step:5172 train loss:3.581708 +step:5173 train loss:3.646795 +step:5174 train loss:3.582332 +step:5175 train loss:3.613753 +step:5176 train loss:3.602473 +step:5177 train loss:3.599174 +step:5178 train loss:3.666939 +step:5179 train loss:3.577431 +step:5180 train loss:3.658425 +step:5181 train loss:3.603363 +step:5182 train loss:3.660737 +step:5183 train loss:3.591906 +step:5184 train loss:3.571761 +step:5185 train loss:3.596796 +step:5186 train loss:3.651480 +step:5187 train loss:3.647199 +step:5188 train loss:3.580356 +step:5189 train loss:3.624278 +step:5190 train loss:3.606379 +step:5191 train loss:3.587068 +step:5192 train loss:3.570772 +step:5193 train loss:3.656523 +step:5194 train loss:3.607693 +step:5195 train loss:3.579986 +step:5196 train loss:3.650670 +step:5197 train loss:3.690193 +step:5198 train loss:3.609157 +step:5199 train loss:3.594245 +step:5200 train loss:3.618189 +step:5201 train loss:3.608950 +step:5202 train loss:3.611126 +step:5203 train loss:3.617469 +step:5204 train loss:3.586230 +step:5205 train loss:3.629869 +step:5206 train loss:3.567968 +step:5207 train loss:3.571055 +step:5208 train loss:3.634029 +step:5209 train loss:3.652435 +step:5210 train loss:3.555776 +step:5211 train loss:3.600344 +step:5212 train loss:3.617687 +step:5213 train loss:3.592777 +step:5214 train loss:3.640232 +step:5215 train loss:3.753209 +step:5216 train loss:3.602826 +step:5217 train loss:3.582500 +step:5218 train loss:3.586926 +step:5219 train loss:3.648779 +step:5220 train loss:3.569056 +step:5221 train loss:3.568968 +step:5222 train loss:3.652735 +step:5223 train loss:3.644869 +step:5224 train loss:3.542159 +step:5225 train loss:3.690047 +step:5226 train loss:3.605356 +step:5227 train loss:3.677557 +step:5228 train loss:3.649111 +step:5229 train loss:3.586958 +step:5230 train loss:3.602985 +step:5231 train loss:3.547773 +step:5232 train loss:3.671635 +step:5233 train loss:3.632545 +step:5234 train loss:3.634863 +step:5235 train loss:3.584315 +step:5236 train loss:3.661916 +step:5237 train loss:3.712656 +step:5238 train loss:3.613558 +step:5239 train loss:3.676702 +step:5240 train loss:3.560241 +step:5241 train loss:3.619482 +step:5242 train loss:3.589332 +step:5243 train loss:3.594297 +step:5244 train loss:3.594701 +step:5245 train loss:3.636998 +step:5246 train loss:3.679586 +step:5247 train loss:3.608693 +step:5248 train loss:3.578038 +step:5249 train loss:3.636543 +step:5250 validation loss:3.539452 +step:5250 train loss:3.606133 +step:5251 train loss:3.669773 +step:5252 train loss:3.561995 +step:5253 train loss:3.712491 +step:5254 train loss:3.586432 +step:5255 train loss:3.658265 +step:5256 train loss:3.573151 +step:5257 train loss:3.625778 +step:5258 train loss:3.624952 +step:5259 train loss:3.609463 +step:5260 train loss:3.604884 +step:5261 train loss:3.594199 +step:5262 train loss:3.635289 +step:5263 train loss:3.621419 +step:5264 train loss:3.572942 +step:5265 train loss:3.653440 +step:5266 train loss:3.572026 +step:5267 train loss:3.582878 +step:5268 train loss:3.563965 +step:5269 train loss:3.567380 +step:5270 train loss:3.617099 +step:5271 train loss:3.541362 +step:5272 train loss:3.634474 +step:5273 train loss:3.543715 +step:5274 train loss:3.592503 +step:5275 train loss:3.607128 +step:5276 train loss:3.731751 +step:5277 train loss:3.630595 +step:5278 train loss:3.579092 +step:5279 train loss:3.627564 +step:5280 train loss:3.603853 +step:5281 train loss:3.598436 +step:5282 train loss:3.571344 +step:5283 train loss:3.569332 +step:5284 train loss:3.577959 +step:5285 train loss:3.646980 +step:5286 train loss:3.553091 +step:5287 train loss:3.654621 +step:5288 train loss:3.630550 +step:5289 train loss:3.601615 +step:5290 train loss:3.654335 +step:5291 train loss:3.606233 +step:5292 train loss:3.624123 +step:5293 train loss:3.592569 +step:5294 train loss:3.582271 +step:5295 train loss:3.588291 +step:5296 train loss:3.578770 +step:5297 train loss:3.601735 +step:5298 train loss:3.545568 +step:5299 train loss:3.638637 +step:5300 train loss:3.588940 +step:5301 train loss:3.659346 +step:5302 train loss:3.663917 +step:5303 train loss:3.524488 +step:5304 train loss:3.557821 +step:5305 train loss:3.537355 +step:5306 train loss:3.568919 +step:5307 train loss:3.573091 +step:5308 train loss:3.665283 +step:5309 train loss:3.616707 +step:5310 train loss:3.601348 +step:5311 train loss:3.670671 +step:5312 train loss:3.549111 +step:5313 train loss:3.640614 +step:5314 train loss:3.634522 +step:5315 train loss:3.594564 +step:5316 train loss:3.627030 +step:5317 train loss:3.644148 +step:5318 train loss:3.599892 +step:5319 train loss:3.624912 +step:5320 train loss:3.577722 +step:5321 train loss:3.699968 +step:5322 train loss:3.608587 +step:5323 train loss:3.610778 +step:5324 train loss:3.555326 +step:5325 train loss:3.637153 +step:5326 train loss:3.629240 +step:5327 train loss:3.518985 +step:5328 train loss:3.655685 +step:5329 train loss:3.620459 +step:5330 train loss:3.617207 +step:5331 train loss:3.669502 +step:5332 train loss:3.591211 +step:5333 train loss:3.653917 +step:5334 train loss:3.629681 +step:5335 train loss:3.687950 +step:5336 train loss:3.724266 +step:5337 train loss:3.559133 +step:5338 train loss:3.566379 +step:5339 train loss:3.589772 +step:5340 train loss:3.613902 +step:5341 train loss:3.628264 +step:5342 train loss:3.529186 +step:5343 train loss:3.686449 +step:5344 train loss:3.570555 +step:5345 train loss:3.570482 +step:5346 train loss:3.572668 +step:5347 train loss:3.596670 +step:5348 train loss:3.639052 +step:5349 train loss:3.577404 +step:5350 train loss:3.618762 +step:5351 train loss:3.693755 +step:5352 train loss:3.732596 +step:5353 train loss:3.642462 +step:5354 train loss:3.610190 +step:5355 train loss:3.577322 +step:5356 train loss:3.603405 +step:5357 train loss:3.580371 +step:5358 train loss:3.603039 +step:5359 train loss:3.616071 +step:5360 train loss:3.586860 +step:5361 train loss:3.589077 +step:5362 train loss:3.573445 +step:5363 train loss:3.570240 +step:5364 train loss:3.570989 +step:5365 train loss:3.603968 +step:5366 train loss:3.635690 +step:5367 train loss:3.563708 +step:5368 train loss:3.628524 +step:5369 train loss:3.646025 +step:5370 train loss:3.544426 +step:5371 train loss:3.600810 +step:5372 train loss:3.619020 +step:5373 train loss:3.660738 +step:5374 train loss:3.543589 +step:5375 train loss:3.587451 +step:5376 train loss:3.657485 +step:5377 train loss:3.592218 +step:5378 train loss:3.568610 +step:5379 train loss:3.569066 +step:5380 train loss:3.603127 +step:5381 train loss:3.646675 +step:5382 train loss:3.542968 +step:5383 train loss:3.623952 +step:5384 train loss:3.629225 +step:5385 train loss:3.625397 +step:5386 train loss:3.608974 +step:5387 train loss:3.614281 +step:5388 train loss:3.626337 +step:5389 train loss:3.557424 +step:5390 train loss:3.585702 +step:5391 train loss:3.525910 +step:5392 train loss:3.589839 +step:5393 train loss:3.580766 +step:5394 train loss:3.575342 +step:5395 train loss:3.648833 +step:5396 train loss:3.612319 +step:5397 train loss:3.633183 +step:5398 train loss:3.629884 +step:5399 train loss:3.660764 +step:5400 train loss:3.667437 +step:5401 train loss:3.627864 +step:5402 train loss:3.733912 +step:5403 train loss:3.641240 +step:5404 train loss:3.612094 +step:5405 train loss:3.684975 +step:5406 train loss:3.643628 +step:5407 train loss:3.573494 +step:5408 train loss:3.716559 +step:5409 train loss:3.557324 +step:5410 train loss:3.622987 +step:5411 train loss:3.608722 +step:5412 train loss:3.580265 +step:5413 train loss:3.634976 +step:5414 train loss:3.610733 +step:5415 train loss:3.589170 +step:5416 train loss:3.582973 +step:5417 train loss:3.650933 +step:5418 train loss:3.667701 +step:5419 train loss:3.569672 +step:5420 train loss:3.632825 +step:5421 train loss:3.600381 +step:5422 train loss:3.644240 +step:5423 train loss:3.622388 +step:5424 train loss:3.528461 +step:5425 train loss:3.592923 +step:5426 train loss:3.678525 +step:5427 train loss:3.570435 +step:5428 train loss:3.607154 +step:5429 train loss:3.542482 +step:5430 train loss:3.576133 +step:5431 train loss:3.639479 +step:5432 train loss:3.614063 +step:5433 train loss:3.621175 +step:5434 train loss:3.569079 +step:5435 train loss:3.566368 +step:5436 train loss:3.571580 +step:5437 train loss:3.608140 +step:5438 train loss:3.587918 +step:5439 train loss:3.592371 +step:5440 train loss:3.636311 +step:5441 train loss:3.656387 +step:5442 train loss:3.575102 +step:5443 train loss:3.578236 +step:5444 train loss:3.522696 +step:5445 train loss:3.608391 +step:5446 train loss:3.579208 +step:5447 train loss:3.615630 +step:5448 train loss:3.672009 +step:5449 train loss:3.562762 +step:5450 train loss:3.596626 +step:5451 train loss:3.591152 +step:5452 train loss:3.605134 +step:5453 train loss:3.660690 +step:5454 train loss:3.585916 +step:5455 train loss:3.573229 +step:5456 train loss:3.713528 +step:5457 train loss:3.590810 +step:5458 train loss:3.626738 +step:5459 train loss:3.570312 +step:5460 train loss:3.586322 +step:5461 train loss:3.592964 +step:5462 train loss:3.591546 +step:5463 train loss:3.601544 +step:5464 train loss:3.603871 +step:5465 train loss:3.547237 +step:5466 train loss:3.623918 +step:5467 train loss:3.608648 +step:5468 train loss:3.614074 +step:5469 train loss:3.709013 +step:5470 train loss:3.599988 +step:5471 train loss:3.673908 +step:5472 train loss:3.622147 +step:5473 train loss:3.524231 +step:5474 train loss:3.859927 +step:5475 train loss:3.534508 +step:5476 train loss:3.611265 +step:5477 train loss:3.612513 +step:5478 train loss:3.609693 +step:5479 train loss:3.754796 +step:5480 train loss:3.598699 +step:5481 train loss:3.660585 +step:5482 train loss:3.574575 +step:5483 train loss:3.612538 +step:5484 train loss:3.650278 +step:5485 train loss:3.567819 +step:5486 train loss:3.612201 +step:5487 train loss:3.612516 +step:5488 train loss:3.525611 +step:5489 train loss:3.630411 +step:5490 train loss:3.576975 +step:5491 train loss:3.677761 +step:5492 train loss:3.606940 +step:5493 train loss:3.534630 +step:5494 train loss:3.590936 +step:5495 train loss:3.569460 +step:5496 train loss:3.565845 +step:5497 train loss:3.684583 +step:5498 train loss:3.552216 +step:5499 train loss:3.690747 +step:5500 validation loss:3.530345 total_sharp:4.5056e-03 L1_sharp:1.6641e-01 L2_sharp:4.5189e-02 L3_sharp:3.3300e-02 L4_sharp:1.2764e-02 L5_sharp:1.6458e-02 L6_sharp:1.9982e-02 L7_sharp:2.4711e-02 L8_sharp:2.7114e-02 L9_sharp:1.8848e-02 L10_sharp:1.2627e-02 L11_sharp:1.3076e-02 L12_sharp:1.8755e-02 total_fnorm:1.3460e+00 total_l1_linf:8.0810e+03 total_spectral:1.3460e+00 L1_fnorm:6.1252e-02 L2_fnorm:5.8867e-02 L3_fnorm:5.8302e-02 L4_fnorm:6.0028e-02 L5_fnorm:6.0848e-02 L6_fnorm:6.1309e-02 L7_fnorm:6.1279e-02 L8_fnorm:6.1290e-02 L9_fnorm:6.1232e-02 L10_fnorm:6.1300e-02 L11_fnorm:6.1307e-02 L12_fnorm:6.1250e-02 L1_l1linf:3.3046e-01 L2_l1linf:3.4848e-01 L3_l1linf:3.4176e-01 L4_l1linf:3.4910e-01 L5_l1linf:3.3251e-01 L6_l1linf:3.1870e-01 L7_l1linf:3.1534e-01 L8_l1linf:3.0946e-01 L9_l1linf:3.2413e-01 L10_l1linf:3.4676e-01 L11_l1linf:3.6188e-01 L12_l1linf:3.7091e-01 L1_spectral:7.3486e-03 L2_spectral:7.7794e-03 L3_spectral:7.7100e-03 L4_spectral:7.8442e-03 L5_spectral:7.4359e-03 L6_spectral:7.1501e-03 L7_spectral:7.1289e-03 L8_spectral:6.9521e-03 L9_spectral:7.2981e-03 L10_spectral:7.7636e-03 L11_spectral:8.1127e-03 L12_spectral:8.3175e-03 ip_v_neg_g:3.1512e-03 cos_v_neg_g:4.3883e-04 v_norm:1.3460e+00 g_norm:5.3351e+00 hv_norm:1.0195e+00 cos_v_hv:5.9481e-03 hg_norm:1.0463e+03 cos_g_hg:6.7168e-01 v_par:2.0446e-05 v_perp:1.3460e+00 L1_cos_v_neg_g:1.0396e-02 L1_v_norm:6.1252e-02 L2_cos_v_neg_g:8.4566e-03 L2_v_norm:5.8867e-02 L3_cos_v_neg_g:5.0814e-03 L3_v_norm:5.8302e-02 L4_cos_v_neg_g:9.1371e-04 L4_v_norm:6.0028e-02 L5_cos_v_neg_g:1.9884e-03 L5_v_norm:6.0848e-02 L6_cos_v_neg_g:8.7563e-04 L6_v_norm:6.1309e-02 L7_cos_v_neg_g:4.6527e-04 L7_v_norm:6.1279e-02 L8_cos_v_neg_g:1.1678e-03 L8_v_norm:6.1290e-02 L9_cos_v_neg_g:-4.8274e-04 L9_v_norm:6.1232e-02 L10_cos_v_neg_g:1.0239e-03 L10_v_norm:6.1300e-02 L11_cos_v_neg_g:1.0996e-03 L11_v_norm:6.1307e-02 L12_cos_v_neg_g:2.8186e-04 L12_v_norm:6.1250e-02 +step:5500 train loss:3.604825 +step:5501 train loss:3.676273 +step:5502 train loss:3.625439 +step:5503 train loss:3.590019 +step:5504 train loss:3.638035 +step:5505 train loss:3.599984 +step:5506 train loss:3.641338 +step:5507 train loss:3.630059 +step:5508 train loss:3.652011 +step:5509 train loss:3.662510 +step:5510 train loss:3.636989 +step:5511 train loss:3.628329 +step:5512 train loss:3.750972 +step:5513 train loss:3.551907 +step:5514 train loss:3.611632 +step:5515 train loss:3.639346 +step:5516 train loss:3.661374 +step:5517 train loss:3.622112 +step:5518 train loss:3.648062 +step:5519 train loss:3.683488 +step:5520 train loss:3.589559 +step:5521 train loss:3.601748 +step:5522 train loss:3.568237 +step:5523 train loss:3.614739 +step:5524 train loss:3.661757 +step:5525 train loss:3.571315 +step:5526 train loss:3.581938 +step:5527 train loss:3.605209 +step:5528 train loss:3.709622 +step:5529 train loss:3.670687 +step:5530 train loss:3.640977 +step:5531 train loss:3.575397 +step:5532 train loss:3.599783 +step:5533 train loss:3.635940 +step:5534 train loss:3.549782 +step:5535 train loss:3.601715 +step:5536 train loss:3.538466 +step:5537 train loss:3.585420 +step:5538 train loss:3.580963 +step:5539 train loss:3.524859 +step:5540 train loss:3.750679 +step:5541 train loss:3.559284 +step:5542 train loss:3.607989 +step:5543 train loss:3.597584 +step:5544 train loss:3.582963 +step:5545 train loss:3.578273 +step:5546 train loss:3.616022 +step:5547 train loss:3.547467 +step:5548 train loss:3.589639 +step:5549 train loss:3.593071 +step:5550 train loss:3.615947 +step:5551 train loss:3.623020 +step:5552 train loss:3.578739 +step:5553 train loss:3.607033 +step:5554 train loss:3.579576 +step:5555 train loss:3.587239 +step:5556 train loss:3.603590 +step:5557 train loss:3.671093 +step:5558 train loss:3.591191 +step:5559 train loss:3.596273 +step:5560 train loss:3.588026 +step:5561 train loss:3.624692 +step:5562 train loss:3.579262 +step:5563 train loss:3.558708 +step:5564 train loss:3.595127 +step:5565 train loss:3.661697 +step:5566 train loss:3.564616 +step:5567 train loss:3.682915 +step:5568 train loss:3.805018 +step:5569 train loss:3.590971 +step:5570 train loss:3.520042 +step:5571 train loss:3.611073 +step:5572 train loss:3.551728 +step:5573 train loss:3.538979 +step:5574 train loss:3.508339 +step:5575 train loss:3.604886 +step:5576 train loss:3.588461 +step:5577 train loss:3.595683 +step:5578 train loss:3.623399 +step:5579 train loss:3.579259 +step:5580 train loss:3.603857 +step:5581 train loss:3.625493 +step:5582 train loss:3.604374 +step:5583 train loss:3.616217 +step:5584 train loss:3.733830 +step:5585 train loss:3.638807 +step:5586 train loss:3.579627 +step:5587 train loss:3.609229 +step:5588 train loss:3.625911 +step:5589 train loss:3.623844 +step:5590 train loss:3.683915 +step:5591 train loss:3.550621 +step:5592 train loss:3.725412 +step:5593 train loss:3.601805 +step:5594 train loss:3.607991 +step:5595 train loss:3.599476 +step:5596 train loss:3.548117 +step:5597 train loss:3.567417 +step:5598 train loss:3.571755 +step:5599 train loss:3.576514 +step:5600 train loss:3.618892 +step:5601 train loss:3.642655 +step:5602 train loss:3.578291 +step:5603 train loss:3.618704 +step:5604 train loss:3.615159 +step:5605 train loss:3.588191 +step:5606 train loss:3.592583 +step:5607 train loss:3.622860 +step:5608 train loss:3.565829 +step:5609 train loss:3.616932 +step:5610 train loss:3.574251 +step:5611 train loss:3.614013 +step:5612 train loss:3.641589 +step:5613 train loss:3.603418 +step:5614 train loss:3.569033 +step:5615 train loss:3.669947 +step:5616 train loss:3.567780 +step:5617 train loss:3.656972 +step:5618 train loss:3.639149 +step:5619 train loss:3.594144 +step:5620 train loss:3.593766 +step:5621 train loss:3.669852 +step:5622 train loss:3.553098 +step:5623 train loss:3.589821 +step:5624 train loss:3.578820 +step:5625 train loss:3.613518 +step:5626 train loss:3.605690 +step:5627 train loss:3.580894 +step:5628 train loss:3.621087 +step:5629 train loss:3.600277 +step:5630 train loss:3.530670 +step:5631 train loss:3.573532 +step:5632 train loss:3.616044 +step:5633 train loss:3.608864 +step:5634 train loss:3.562846 +step:5635 train loss:3.601060 +step:5636 train loss:3.577980 +step:5637 train loss:3.717667 +step:5638 train loss:3.625753 +step:5639 train loss:3.604578 +step:5640 train loss:3.608106 +step:5641 train loss:3.648568 +step:5642 train loss:3.581794 +step:5643 train loss:3.598576 +step:5644 train loss:3.679935 +step:5645 train loss:3.636973 +step:5646 train loss:3.634941 +step:5647 train loss:3.624408 +step:5648 train loss:3.615926 +step:5649 train loss:3.527045 +step:5650 train loss:3.533883 +step:5651 train loss:3.607679 +step:5652 train loss:3.608744 +step:5653 train loss:3.576618 +step:5654 train loss:3.706360 +step:5655 train loss:3.565616 +step:5656 train loss:3.591595 +step:5657 train loss:3.659043 +step:5658 train loss:3.560107 +step:5659 train loss:3.597914 +step:5660 train loss:3.647736 +step:5661 train loss:3.586860 +step:5662 train loss:3.627204 +step:5663 train loss:3.516219 +step:5664 train loss:3.488753 +step:5665 train loss:3.610372 +step:5666 train loss:3.614232 +step:5667 train loss:3.649124 +step:5668 train loss:3.580637 +step:5669 train loss:3.593046 +step:5670 train loss:3.594368 +step:5671 train loss:3.576675 +step:5672 train loss:3.628627 +step:5673 train loss:3.594747 +step:5674 train loss:3.668813 +step:5675 train loss:3.580021 +step:5676 train loss:3.730433 +step:5677 train loss:3.624387 +step:5678 train loss:3.601071 +step:5679 train loss:3.590100 +step:5680 train loss:3.625184 +step:5681 train loss:3.594400 +step:5682 train loss:3.607082 +step:5683 train loss:3.565501 +step:5684 train loss:3.576561 +step:5685 train loss:3.619113 +step:5686 train loss:3.636429 +step:5687 train loss:3.583260 +step:5688 train loss:3.673795 +step:5689 train loss:3.577703 +step:5690 train loss:3.728184 +step:5691 train loss:3.560750 +step:5692 train loss:3.547456 +step:5693 train loss:3.554347 +step:5694 train loss:3.571980 +step:5695 train loss:3.588574 +step:5696 train loss:3.639756 +step:5697 train loss:3.566313 +step:5698 train loss:3.585173 +step:5699 train loss:3.598920 +step:5700 train loss:3.595206 +step:5701 train loss:3.587411 +step:5702 train loss:3.657503 +step:5703 train loss:3.555505 +step:5704 train loss:3.599345 +step:5705 train loss:3.607989 +step:5706 train loss:3.631470 +step:5707 train loss:3.547502 +step:5708 train loss:3.633180 +step:5709 train loss:3.639269 +step:5710 train loss:3.626074 +step:5711 train loss:3.650055 +step:5712 train loss:3.632493 +step:5713 train loss:3.557109 +step:5714 train loss:3.642003 +step:5715 train loss:3.599632 +step:5716 train loss:3.604288 +step:5717 train loss:3.630536 +step:5718 train loss:3.573274 +step:5719 train loss:3.646265 +step:5720 train loss:3.616786 +step:5721 train loss:3.548079 +step:5722 train loss:3.558664 +step:5723 train loss:3.639230 +step:5724 train loss:3.558382 +step:5725 train loss:3.627442 +step:5726 train loss:3.624332 +step:5727 train loss:3.580505 +step:5728 train loss:3.584555 +step:5729 train loss:3.585054 +step:5730 train loss:3.658575 +step:5731 train loss:3.525078 +step:5732 train loss:3.585544 +step:5733 train loss:3.575163 +step:5734 train loss:3.590574 +step:5735 train loss:3.579799 +step:5736 train loss:3.586517 +step:5737 train loss:3.605476 +step:5738 train loss:3.571046 +step:5739 train loss:3.583715 +step:5740 train loss:3.622663 +step:5741 train loss:3.596469 +step:5742 train loss:3.651704 +step:5743 train loss:3.617025 +step:5744 train loss:3.574743 +step:5745 train loss:3.579141 +step:5746 train loss:3.608892 +step:5747 train loss:3.594995 +step:5748 train loss:3.641598 +step:5749 train loss:3.598577 +step:5750 validation loss:3.526641 +step:5750 train loss:3.604828 +step:5751 train loss:3.619191 +step:5752 train loss:3.604777 +step:5753 train loss:3.575006 +step:5754 train loss:3.585793 +step:5755 train loss:3.600641 +step:5756 train loss:3.586564 +step:5757 train loss:3.648770 +step:5758 train loss:3.584955 +step:5759 train loss:3.547189 +step:5760 train loss:3.627502 +step:5761 train loss:3.622280 +step:5762 train loss:3.582188 +step:5763 train loss:3.607511 +step:5764 train loss:3.570704 +step:5765 train loss:3.690522 +step:5766 train loss:3.598560 +step:5767 train loss:3.632720 +step:5768 train loss:3.570073 +step:5769 train loss:3.690902 +step:5770 train loss:3.614866 +step:5771 train loss:3.641053 +step:5772 train loss:3.594226 +step:5773 train loss:3.572011 +step:5774 train loss:3.579444 +step:5775 train loss:3.650824 +step:5776 train loss:3.638085 +step:5777 train loss:3.554339 +step:5778 train loss:3.638617 +step:5779 train loss:3.600066 +step:5780 train loss:3.571108 +step:5781 train loss:3.637009 +step:5782 train loss:3.596078 +step:5783 train loss:3.554689 +step:5784 train loss:3.660084 +step:5785 train loss:3.648761 +step:5786 train loss:3.558053 +step:5787 train loss:3.606195 +step:5788 train loss:3.615275 +step:5789 train loss:3.559690 +step:5790 train loss:3.657711 +step:5791 train loss:3.588208 +step:5792 train loss:3.858883 +step:5793 train loss:3.631898 +step:5794 train loss:3.650321 +step:5795 train loss:3.642034 +step:5796 train loss:3.623774 +step:5797 train loss:3.605725 +step:5798 train loss:3.604078 +step:5799 train loss:3.573371 +step:5800 train loss:3.732110 +step:5801 train loss:3.606758 +step:5802 train loss:3.593683 +step:5803 train loss:3.606771 +step:5804 train loss:3.626720 +step:5805 train loss:3.586703 +step:5806 train loss:3.628838 +step:5807 train loss:3.550987 +step:5808 train loss:3.582886 +step:5809 train loss:3.595396 +step:5810 train loss:3.568819 +step:5811 train loss:3.583303 +step:5812 train loss:3.562728 +step:5813 train loss:3.574408 +step:5814 train loss:3.568360 +step:5815 train loss:3.571586 +step:5816 train loss:3.632668 +step:5817 train loss:3.644408 +step:5818 train loss:3.618439 +step:5819 train loss:3.669790 +step:5820 train loss:3.608824 +step:5821 train loss:3.602681 +step:5822 train loss:3.617979 +step:5823 train loss:3.621741 +step:5824 train loss:3.571382 +step:5825 train loss:3.666152 +step:5826 train loss:3.578697 +step:5827 train loss:3.545969 +step:5828 train loss:3.530863 +step:5829 train loss:3.597964 +step:5830 train loss:3.568384 +step:5831 train loss:3.542052 +step:5832 train loss:3.655965 +step:5833 train loss:3.633211 +step:5834 train loss:3.616749 +step:5835 train loss:3.569252 +step:5836 train loss:3.530618 +step:5837 train loss:3.652326 +step:5838 train loss:3.633448 +step:5839 train loss:3.609325 +step:5840 train loss:3.689789 +step:5841 train loss:3.615310 +step:5842 train loss:3.625773 +step:5843 train loss:3.573000 +step:5844 train loss:3.641129 +step:5845 train loss:3.549603 +step:5846 train loss:3.597351 +step:5847 train loss:3.624267 +step:5848 train loss:3.693116 +step:5849 train loss:3.586534 +step:5850 train loss:3.615214 +step:5851 train loss:3.579644 +step:5852 train loss:3.669105 +step:5853 train loss:3.760787 +step:5854 train loss:3.550078 +step:5855 train loss:3.611336 +step:5856 train loss:3.583288 +step:5857 train loss:3.593091 +step:5858 train loss:3.567350 +step:5859 train loss:3.571446 +step:5860 train loss:3.674201 +step:5861 train loss:3.560449 +step:5862 train loss:3.671794 +step:5863 train loss:3.611018 +step:5864 train loss:3.597213 +step:5865 train loss:3.601999 +step:5866 train loss:3.596170 +step:5867 train loss:3.677351 +step:5868 train loss:3.600300 +step:5869 train loss:3.624886 +step:5870 train loss:3.600561 +step:5871 train loss:3.583538 +step:5872 train loss:3.610486 +step:5873 train loss:3.590536 +step:5874 train loss:3.670625 +step:5875 train loss:3.600515 +step:5876 train loss:3.580501 +step:5877 train loss:3.589524 +step:5878 train loss:3.586991 +step:5879 train loss:3.560333 +step:5880 train loss:3.758319 +step:5881 train loss:3.597317 +step:5882 train loss:3.567479 +step:5883 train loss:3.566962 +step:5884 train loss:3.588024 +step:5885 train loss:3.582709 +step:5886 train loss:3.603637 +step:5887 train loss:3.604154 +step:5888 train loss:3.584885 +step:5889 train loss:3.563467 +step:5890 train loss:3.610329 +step:5891 train loss:3.554844 +step:5892 train loss:3.635304 +step:5893 train loss:3.557703 +step:5894 train loss:3.548697 +step:5895 train loss:3.555005 +step:5896 train loss:3.564878 +step:5897 train loss:3.633372 +step:5898 train loss:3.849171 +step:5899 train loss:3.584610 +step:5900 train loss:3.632795 +step:5901 train loss:3.582127 +step:5902 train loss:3.595315 +step:5903 train loss:3.585693 +step:5904 train loss:3.614921 +step:5905 train loss:3.720110 +step:5906 train loss:3.660766 +step:5907 train loss:3.605662 +step:5908 train loss:3.582611 +step:5909 train loss:3.573848 +step:5910 train loss:3.561936 +step:5911 train loss:3.578554 +step:5912 train loss:3.583985 +step:5913 train loss:3.615167 +step:5914 train loss:3.597521 +step:5915 train loss:3.718619 +step:5916 train loss:3.600490 +step:5917 train loss:3.573299 +step:5918 train loss:3.570358 +step:5919 train loss:3.597239 +step:5920 train loss:3.594222 +step:5921 train loss:3.566574 +step:5922 train loss:3.624285 +step:5923 train loss:3.617448 +step:5924 train loss:3.572535 +step:5925 train loss:3.695776 +step:5926 train loss:3.581626 +step:5927 train loss:3.559448 +step:5928 train loss:3.596685 +step:5929 train loss:3.615073 +step:5930 train loss:3.565846 +step:5931 train loss:3.548786 +step:5932 train loss:3.590147 +step:5933 train loss:3.642598 +step:5934 train loss:3.557892 +step:5935 train loss:3.584946 +step:5936 train loss:3.574044 +step:5937 train loss:3.553904 +step:5938 train loss:3.571934 +step:5939 train loss:3.549569 +step:5940 train loss:3.633194 +step:5941 train loss:3.567247 +step:5942 train loss:3.583677 +step:5943 train loss:3.587691 +step:5944 train loss:3.639272 +step:5945 train loss:3.571967 +step:5946 train loss:3.553072 +step:5947 train loss:3.564684 +step:5948 train loss:3.601721 +step:5949 train loss:3.647662 +step:5950 train loss:3.607807 +step:5951 train loss:3.609833 +step:5952 train loss:3.533560 +step:5953 train loss:3.574478 +step:5954 train loss:3.584723 +step:5955 train loss:3.591259 +step:5956 train loss:3.568590 +step:5957 train loss:3.535855 +step:5958 train loss:3.612195 +step:5959 train loss:3.569718 +step:5960 train loss:3.544181 +step:5961 train loss:3.570778 +step:5962 train loss:3.600361 +step:5963 train loss:3.635264 +step:5964 train loss:3.590550 +step:5965 train loss:3.608614 +step:5966 train loss:3.605654 +step:5967 train loss:3.571474 +step:5968 train loss:3.644580 +step:5969 train loss:3.587227 +step:5970 train loss:3.603580 +step:5971 train loss:3.553741 +step:5972 train loss:3.581780 +step:5973 train loss:3.571612 +step:5974 train loss:3.595789 +step:5975 train loss:3.565961 +step:5976 train loss:3.605685 +step:5977 train loss:3.564172 +step:5978 train loss:3.548885 +step:5979 train loss:3.585166 +step:5980 train loss:3.653327 +step:5981 train loss:3.549069 +step:5982 train loss:3.559724 +step:5983 train loss:3.627153 +step:5984 train loss:3.569426 +step:5985 train loss:3.612648 +step:5986 train loss:3.588895 +step:5987 train loss:3.573093 +step:5988 train loss:3.581508 +step:5989 train loss:3.599769 +step:5990 train loss:3.530540 +step:5991 train loss:3.595609 +step:5992 train loss:3.625792 +step:5993 train loss:3.578713 +step:5994 train loss:3.599221 +step:5995 train loss:3.490938 +step:5996 train loss:3.655056 +step:5997 train loss:3.638061 +step:5998 train loss:3.514333 +step:5999 train loss:3.539586 +step:6000 validation loss:3.511816 total_sharp:5.6226e-03 L1_sharp:1.2619e-01 L2_sharp:2.6622e-02 L3_sharp:4.2694e-02 L4_sharp:2.1672e-02 L5_sharp:2.4231e-02 L6_sharp:2.3442e-02 L7_sharp:3.0992e-02 L8_sharp:3.4991e-02 L9_sharp:2.7273e-02 L10_sharp:1.9095e-02 L11_sharp:2.1668e-02 L12_sharp:3.9284e-02 total_fnorm:1.3442e+00 total_l1_linf:8.0676e+03 total_spectral:1.3442e+00 L1_fnorm:6.1155e-02 L2_fnorm:5.8814e-02 L3_fnorm:5.8240e-02 L4_fnorm:6.0061e-02 L5_fnorm:6.0894e-02 L6_fnorm:6.1235e-02 L7_fnorm:6.1291e-02 L8_fnorm:6.1245e-02 L9_fnorm:6.1246e-02 L10_fnorm:6.1408e-02 L11_fnorm:6.1317e-02 L12_fnorm:6.1362e-02 L1_l1linf:3.3839e-01 L2_l1linf:3.5876e-01 L3_l1linf:3.5620e-01 L4_l1linf:3.6332e-01 L5_l1linf:3.3249e-01 L6_l1linf:3.0399e-01 L7_l1linf:3.1299e-01 L8_l1linf:3.1529e-01 L9_l1linf:3.3441e-01 L10_l1linf:3.4485e-01 L11_l1linf:3.7213e-01 L12_l1linf:3.6506e-01 L1_spectral:7.6096e-03 L2_spectral:8.0966e-03 L3_spectral:8.0227e-03 L4_spectral:8.1402e-03 L5_spectral:7.4660e-03 L6_spectral:6.8040e-03 L7_spectral:7.0382e-03 L8_spectral:7.0472e-03 L9_spectral:7.4790e-03 L10_spectral:7.7729e-03 L11_spectral:8.2605e-03 L12_spectral:8.2267e-03 ip_v_neg_g:5.1136e-03 cos_v_neg_g:1.1976e-03 v_norm:1.3442e+00 g_norm:3.1766e+00 hv_norm:8.6081e-01 cos_v_hv:8.7801e-03 hg_norm:1.3427e+02 cos_g_hg:4.5138e-01 v_par:3.4805e-05 v_perp:1.3442e+00 L1_cos_v_neg_g:1.0841e-02 L1_v_norm:6.1155e-02 L2_cos_v_neg_g:8.8573e-03 L2_v_norm:5.8814e-02 L3_cos_v_neg_g:7.3361e-03 L3_v_norm:5.8240e-02 L4_cos_v_neg_g:7.1060e-03 L4_v_norm:6.0061e-02 L5_cos_v_neg_g:6.8295e-03 L5_v_norm:6.0894e-02 L6_cos_v_neg_g:6.3823e-03 L6_v_norm:6.1235e-02 L7_cos_v_neg_g:6.6097e-03 L7_v_norm:6.1291e-02 L8_cos_v_neg_g:7.3651e-03 L8_v_norm:6.1245e-02 L9_cos_v_neg_g:8.1764e-03 L9_v_norm:6.1246e-02 L10_cos_v_neg_g:6.4421e-03 L10_v_norm:6.1408e-02 L11_cos_v_neg_g:5.2951e-03 L11_v_norm:6.1317e-02 L12_cos_v_neg_g:4.4788e-03 L12_v_norm:6.1362e-02 +step:6000 train loss:3.593530 +step:6001 train loss:3.555480 +step:6002 train loss:3.584272 +step:6003 train loss:3.606715 +step:6004 train loss:3.553758 +step:6005 train loss:3.628184 +step:6006 train loss:3.534820 +step:6007 train loss:3.557580 +step:6008 train loss:3.570832 +step:6009 train loss:3.606807 +step:6010 train loss:3.594866 +step:6011 train loss:3.586579 +step:6012 train loss:3.555359 +step:6013 train loss:3.613858 +step:6014 train loss:3.633254 +step:6015 train loss:3.631576 +step:6016 train loss:3.599109 +step:6017 train loss:3.608483 +step:6018 train loss:3.546361 +step:6019 train loss:3.581977 +step:6020 train loss:3.568789 +step:6021 train loss:3.496355 +step:6022 train loss:3.610989 +step:6023 train loss:3.545837 +step:6024 train loss:3.622759 +step:6025 train loss:3.588979 +step:6026 train loss:3.559446 +step:6027 train loss:3.600375 +step:6028 train loss:3.517632 +step:6029 train loss:3.631443 +step:6030 train loss:3.601304 +step:6031 train loss:3.573286 +step:6032 train loss:3.533646 +step:6033 train loss:3.588033 +step:6034 train loss:3.614402 +step:6035 train loss:3.532425 +step:6036 train loss:3.507570 +step:6037 train loss:3.618379 +step:6038 train loss:3.625547 +step:6039 train loss:3.608223 +step:6040 train loss:3.567967 +step:6041 train loss:3.548110 +step:6042 train loss:3.526590 +step:6043 train loss:3.587873 +step:6044 train loss:3.709485 +step:6045 train loss:3.548927 +step:6046 train loss:3.559681 +step:6047 train loss:3.596259 +step:6048 train loss:3.602653 +step:6049 train loss:3.583502 +step:6050 train loss:3.551389 +step:6051 train loss:3.605130 +step:6052 train loss:3.576019 +step:6053 train loss:3.691678 +step:6054 train loss:3.732165 +step:6055 train loss:3.546489 +step:6056 train loss:3.537194 +step:6057 train loss:3.571297 +step:6058 train loss:3.599249 +step:6059 train loss:3.603544 +step:6060 train loss:3.608722 +step:6061 train loss:3.626570 +step:6062 train loss:3.577215 +step:6063 train loss:3.590632 +step:6064 train loss:3.588449 +step:6065 train loss:3.587476 +step:6066 train loss:3.575159 +step:6067 train loss:3.614233 +step:6068 train loss:3.555722 +step:6069 train loss:3.510441 +step:6070 train loss:3.662039 +step:6071 train loss:3.604173 +step:6072 train loss:3.545470 +step:6073 train loss:3.584553 +step:6074 train loss:3.669882 +step:6075 train loss:3.589400 +step:6076 train loss:3.598340 +step:6077 train loss:3.601383 +step:6078 train loss:3.537650 +step:6079 train loss:3.566138 +step:6080 train loss:3.574416 +step:6081 train loss:3.610750 +step:6082 train loss:3.560506 +step:6083 train loss:3.572807 +step:6084 train loss:3.637310 +step:6085 train loss:3.633714 +step:6086 train loss:3.534780 +step:6087 train loss:3.579556 +step:6088 train loss:3.565524 +step:6089 train loss:3.624982 +step:6090 train loss:3.625741 +step:6091 train loss:3.575302 +step:6092 train loss:3.535524 +step:6093 train loss:3.596394 +step:6094 train loss:3.511863 +step:6095 train loss:3.676183 +step:6096 train loss:3.546240 +step:6097 train loss:3.622548 +step:6098 train loss:3.597876 +step:6099 train loss:3.654294 +step:6100 train loss:3.649199 +step:6101 train loss:3.583762 +step:6102 train loss:3.700642 +step:6103 train loss:3.586754 +step:6104 train loss:3.697355 +step:6105 train loss:3.628434 +step:6106 train loss:3.567722 +step:6107 train loss:3.633101 +step:6108 train loss:3.595948 +step:6109 train loss:3.665541 +step:6110 train loss:3.598266 +step:6111 train loss:3.632586 +step:6112 train loss:3.570125 +step:6113 train loss:3.597016 +step:6114 train loss:3.569236 +step:6115 train loss:3.630541 +step:6116 train loss:3.573710 +step:6117 train loss:3.626510 +step:6118 train loss:3.606870 +step:6119 train loss:3.617821 +step:6120 train loss:3.760247 +step:6121 train loss:3.597657 +step:6122 train loss:3.607104 +step:6123 train loss:3.591007 +step:6124 train loss:3.564193 +step:6125 train loss:3.554994 +step:6126 train loss:3.573831 +step:6127 train loss:3.561511 +step:6128 train loss:3.522409 +step:6129 train loss:3.758572 +step:6130 train loss:3.547765 +step:6131 train loss:3.523825 +step:6132 train loss:3.596264 +step:6133 train loss:3.560934 +step:6134 train loss:3.590134 +step:6135 train loss:3.672065 +step:6136 train loss:3.693404 +step:6137 train loss:3.553643 +step:6138 train loss:3.611196 +step:6139 train loss:3.590139 +step:6140 train loss:3.588147 +step:6141 train loss:3.548470 +step:6142 train loss:3.612161 +step:6143 train loss:3.577450 +step:6144 train loss:3.600869 +step:6145 train loss:3.843919 +step:6146 train loss:3.681012 +step:6147 train loss:3.765258 +step:6148 train loss:3.532088 +step:6149 train loss:3.658821 +step:6150 train loss:3.612618 +step:6151 train loss:3.564975 +step:6152 train loss:3.561882 +step:6153 train loss:3.630928 +step:6154 train loss:3.716732 +step:6155 train loss:3.583374 +step:6156 train loss:3.676600 +step:6157 train loss:3.607022 +step:6158 train loss:3.599887 +step:6159 train loss:3.565863 +step:6160 train loss:3.732082 +step:6161 train loss:3.583222 +step:6162 train loss:3.600729 +step:6163 train loss:3.633050 +step:6164 train loss:3.546368 +step:6165 train loss:3.613294 +step:6166 train loss:3.607111 +step:6167 train loss:3.623668 +step:6168 train loss:3.605113 +step:6169 train loss:3.595728 +step:6170 train loss:3.599654 +step:6171 train loss:3.568308 +step:6172 train loss:3.555645 +step:6173 train loss:3.605466 +step:6174 train loss:3.533991 +step:6175 train loss:3.545141 +step:6176 train loss:3.529243 +step:6177 train loss:3.626422 +step:6178 train loss:3.570350 +step:6179 train loss:3.578332 +step:6180 train loss:3.584488 +step:6181 train loss:3.618478 +step:6182 train loss:3.499760 +step:6183 train loss:3.510908 +step:6184 train loss:3.628398 +step:6185 train loss:3.585394 +step:6186 train loss:3.546174 +step:6187 train loss:3.584143 +step:6188 train loss:3.552826 +step:6189 train loss:3.592348 +step:6190 train loss:3.553406 +step:6191 train loss:3.585681 +step:6192 train loss:3.551341 +step:6193 train loss:3.618999 +step:6194 train loss:3.609255 +step:6195 train loss:3.589301 +step:6196 train loss:3.604270 +step:6197 train loss:3.628891 +step:6198 train loss:3.541965 +step:6199 train loss:3.565497 +step:6200 train loss:3.606356 +step:6201 train loss:3.650811 +step:6202 train loss:3.649403 +step:6203 train loss:3.651036 +step:6204 train loss:3.634103 +step:6205 train loss:3.573061 +step:6206 train loss:3.559958 +step:6207 train loss:3.616256 +step:6208 train loss:3.645118 +step:6209 train loss:3.612737 +step:6210 train loss:3.644775 +step:6211 train loss:3.562096 +step:6212 train loss:3.554255 +step:6213 train loss:3.570081 +step:6214 train loss:3.544635 +step:6215 train loss:3.718323 +step:6216 train loss:3.587704 +step:6217 train loss:3.647270 +step:6218 train loss:3.621496 +step:6219 train loss:3.635188 +step:6220 train loss:3.590612 +step:6221 train loss:3.556204 +step:6222 train loss:3.797131 +step:6223 train loss:3.557964 +step:6224 train loss:3.591716 +step:6225 train loss:3.568789 +step:6226 train loss:3.581611 +step:6227 train loss:3.585169 +step:6228 train loss:3.578335 +step:6229 train loss:3.619775 +step:6230 train loss:3.573351 +step:6231 train loss:3.686993 +step:6232 train loss:3.528694 +step:6233 train loss:3.569057 +step:6234 train loss:3.577534 +step:6235 train loss:3.606620 +step:6236 train loss:3.540364 +step:6237 train loss:3.566182 +step:6238 train loss:3.587825 +step:6239 train loss:3.574707 +step:6240 train loss:3.596420 +step:6241 train loss:3.580475 +step:6242 train loss:3.576586 +step:6243 train loss:3.614425 +step:6244 train loss:3.767964 +step:6245 train loss:3.564358 +step:6246 train loss:3.554211 +step:6247 train loss:3.546870 +step:6248 train loss:3.552270 +step:6249 train loss:3.493228 +step:6250 validation loss:3.505048 +step:6250 train loss:3.529642 +step:6251 train loss:3.547908 +step:6252 train loss:3.591465 +step:6253 train loss:3.602165 +step:6254 train loss:3.589967 +step:6255 train loss:3.554173 +step:6256 train loss:3.607014 +step:6257 train loss:3.606393 +step:6258 train loss:3.587097 +step:6259 train loss:3.590980 +step:6260 train loss:3.620102 +step:6261 train loss:3.642200 +step:6262 train loss:3.535391 +step:6263 train loss:3.566471 +step:6264 train loss:3.578683 +step:6265 train loss:3.564967 +step:6266 train loss:3.768852 +step:6267 train loss:3.572445 +step:6268 train loss:3.659825 +step:6269 train loss:3.533866 +step:6270 train loss:3.546837 +step:6271 train loss:3.595933 +step:6272 train loss:3.584903 +step:6273 train loss:3.785001 +step:6274 train loss:3.562661 +step:6275 train loss:3.595600 +step:6276 train loss:3.567506 +step:6277 train loss:3.552059 +step:6278 train loss:3.536530 +step:6279 train loss:3.591666 +step:6280 train loss:3.596798 +step:6281 train loss:3.531372 +step:6282 train loss:3.541670 +step:6283 train loss:3.630701 +step:6284 train loss:3.601664 +step:6285 train loss:3.597610 +step:6286 train loss:3.545825 +step:6287 train loss:3.573473 +step:6288 train loss:3.673136 +step:6289 train loss:3.536627 +step:6290 train loss:3.528838 +step:6291 train loss:3.563629 +step:6292 train loss:3.583251 +step:6293 train loss:3.570147 +step:6294 train loss:3.555781 +step:6295 train loss:3.579633 +step:6296 train loss:3.543185 +step:6297 train loss:3.668242 +step:6298 train loss:3.617620 +step:6299 train loss:3.509305 +step:6300 train loss:3.591131 +step:6301 train loss:3.619172 +step:6302 train loss:3.602911 +step:6303 train loss:3.570382 +step:6304 train loss:3.585615 +step:6305 train loss:3.558443 +step:6306 train loss:3.569906 +step:6307 train loss:3.580018 +step:6308 train loss:3.554020 +step:6309 train loss:3.550939 +step:6310 train loss:3.606466 +step:6311 train loss:3.560308 +step:6312 train loss:3.598398 +step:6313 train loss:3.531038 +step:6314 train loss:3.554113 +step:6315 train loss:3.609324 +step:6316 train loss:3.532338 +step:6317 train loss:3.527246 +step:6318 train loss:3.637377 +step:6319 train loss:3.566489 +step:6320 train loss:3.584332 +step:6321 train loss:3.568654 +step:6322 train loss:3.570224 +step:6323 train loss:3.506613 +step:6324 train loss:3.511766 +step:6325 train loss:3.607198 +step:6326 train loss:3.526960 +step:6327 train loss:3.602594 +step:6328 train loss:3.581589 +step:6329 train loss:3.500973 +step:6330 train loss:3.528685 +step:6331 train loss:3.547411 +step:6332 train loss:3.679237 +step:6333 train loss:3.559750 +step:6334 train loss:3.535512 +step:6335 train loss:3.507039 +step:6336 train loss:3.538466 +step:6337 train loss:3.563260 +step:6338 train loss:3.519338 +step:6339 train loss:3.565171 +step:6340 train loss:3.539783 +step:6341 train loss:3.557759 +step:6342 train loss:3.553647 +step:6343 train loss:3.652178 +step:6344 train loss:3.503676 +step:6345 train loss:3.518241 +step:6346 train loss:3.597846 +step:6347 train loss:3.471905 +step:6348 train loss:3.569524 +step:6349 train loss:3.544905 +step:6350 train loss:3.518909 +step:6351 train loss:3.517994 +step:6352 train loss:3.535169 +step:6353 train loss:3.555590 +step:6354 train loss:3.567833 +step:6355 train loss:3.574707 +step:6356 train loss:3.587958 +step:6357 train loss:3.443164 +step:6358 train loss:3.534547 +step:6359 train loss:3.589386 +step:6360 train loss:3.502024 +step:6361 train loss:3.502294 +step:6362 train loss:3.543604 +step:6363 train loss:3.524885 +step:6364 train loss:3.509412 +step:6365 train loss:3.578766 +step:6366 train loss:3.592254 +step:6367 train loss:3.519521 +step:6368 train loss:3.563550 +step:6369 train loss:3.529972 +step:6370 train loss:3.580744 +step:6371 train loss:3.498880 +step:6372 train loss:3.527492 +step:6373 train loss:3.555176 +step:6374 train loss:3.583991 +step:6375 train loss:3.544078 +step:6376 train loss:3.569253 +step:6377 train loss:3.566222 +step:6378 train loss:3.515345 +step:6379 train loss:3.556439 +step:6380 train loss:3.599547 +step:6381 train loss:3.565104 +step:6382 train loss:3.518451 +step:6383 train loss:3.584921 +step:6384 train loss:3.554755 +step:6385 train loss:3.536273 +step:6386 train loss:3.567987 +step:6387 train loss:3.548668 +step:6388 train loss:3.590827 +step:6389 train loss:3.595082 +step:6390 train loss:3.546782 +step:6391 train loss:3.534372 +step:6392 train loss:3.519942 +step:6393 train loss:3.574680 +step:6394 train loss:3.562606 +step:6395 train loss:3.736994 +step:6396 train loss:3.563386 +step:6397 train loss:3.506370 +step:6398 train loss:3.579148 +step:6399 train loss:3.519744 +step:6400 train loss:3.592848 +step:6401 train loss:3.628259 +step:6402 train loss:3.561057 +step:6403 train loss:3.552990 +step:6404 train loss:3.532160 +step:6405 train loss:3.557513 +step:6406 train loss:3.563336 +step:6407 train loss:3.623342 +step:6408 train loss:3.511874 +step:6409 train loss:3.501564 +step:6410 train loss:3.631790 +step:6411 train loss:3.559494 +step:6412 train loss:3.564949 +step:6413 train loss:3.569512 +step:6414 train loss:3.518183 +step:6415 train loss:3.578549 +step:6416 train loss:3.548097 +step:6417 train loss:3.517590 +step:6418 train loss:3.510398 +step:6419 train loss:3.595144 +step:6420 train loss:3.522372 +step:6421 train loss:3.547081 +step:6422 train loss:3.536468 +step:6423 train loss:3.545012 +step:6424 train loss:3.568804 +step:6425 train loss:3.563359 +step:6426 train loss:3.605431 +step:6427 train loss:3.567329 +step:6428 train loss:3.606691 +step:6429 train loss:3.569007 +step:6430 train loss:3.546010 +step:6431 train loss:3.518863 +step:6432 train loss:3.553544 +step:6433 train loss:3.567924 +step:6434 train loss:3.450780 +step:6435 train loss:3.627814 +step:6436 train loss:3.562261 +step:6437 train loss:3.526572 +step:6438 train loss:3.555115 +step:6439 train loss:3.528521 +step:6440 train loss:3.544654 +step:6441 train loss:3.536978 +step:6442 train loss:3.479677 +step:6443 train loss:3.532720 +step:6444 train loss:3.674098 +step:6445 train loss:3.577270 +step:6446 train loss:3.580532 +step:6447 train loss:3.563596 +step:6448 train loss:3.510509 +step:6449 train loss:3.536380 +step:6450 train loss:3.517130 +step:6451 train loss:3.508858 +step:6452 train loss:3.509361 +step:6453 train loss:3.553800 +step:6454 train loss:3.578448 +step:6455 train loss:3.566512 +step:6456 train loss:3.580818 +step:6457 train loss:3.561977 +step:6458 train loss:3.534614 +step:6459 train loss:3.515079 +step:6460 train loss:3.525724 +step:6461 train loss:3.523472 +step:6462 train loss:3.519361 +step:6463 train loss:3.616968 +step:6464 train loss:3.521942 +step:6465 train loss:3.563397 +step:6466 train loss:3.579902 +step:6467 train loss:3.505449 +step:6468 train loss:3.583165 +step:6469 train loss:3.494222 +step:6470 train loss:3.614497 +step:6471 train loss:3.523606 +step:6472 train loss:3.678660 +step:6473 train loss:3.562545 +step:6474 train loss:3.595081 +step:6475 train loss:3.537156 +step:6476 train loss:3.611376 +step:6477 train loss:3.539872 +step:6478 train loss:3.672520 +step:6479 train loss:3.584321 +step:6480 train loss:3.522549 +step:6481 train loss:3.577630 +step:6482 train loss:3.519905 +step:6483 train loss:3.580320 +step:6484 train loss:3.536044 +step:6485 train loss:3.598703 +step:6486 train loss:3.529620 +step:6487 train loss:3.530017 +step:6488 train loss:3.524129 +step:6489 train loss:3.527205 +step:6490 train loss:3.550832 +step:6491 train loss:3.520424 +step:6492 train loss:3.625511 +step:6493 train loss:3.528816 +step:6494 train loss:3.533015 +step:6495 train loss:3.531580 +step:6496 train loss:3.562859 +step:6497 train loss:3.582213 +step:6498 train loss:3.688532 +step:6499 train loss:3.659921 +step:6500 validation loss:3.493865 total_sharp:3.6529e-03 L1_sharp:6.4911e-02 L2_sharp:1.5041e-02 L3_sharp:2.7629e-02 L4_sharp:1.5717e-02 L5_sharp:1.8206e-02 L6_sharp:2.5075e-02 L7_sharp:2.8297e-02 L8_sharp:2.8974e-02 L9_sharp:1.9610e-02 L10_sharp:1.4247e-02 L11_sharp:1.1256e-02 L12_sharp:1.7024e-02 total_fnorm:1.3572e+00 total_l1_linf:8.1469e+03 total_spectral:1.3572e+00 L1_fnorm:6.1172e-02 L2_fnorm:5.9062e-02 L3_fnorm:5.8514e-02 L4_fnorm:5.9981e-02 L5_fnorm:6.0903e-02 L6_fnorm:6.1226e-02 L7_fnorm:6.1263e-02 L8_fnorm:6.1116e-02 L9_fnorm:6.1194e-02 L10_fnorm:6.1350e-02 L11_fnorm:6.1205e-02 L12_fnorm:6.1186e-02 L1_l1linf:3.1679e-01 L2_l1linf:3.5454e-01 L3_l1linf:3.4408e-01 L4_l1linf:3.4007e-01 L5_l1linf:3.2748e-01 L6_l1linf:3.1106e-01 L7_l1linf:3.1024e-01 L8_l1linf:2.8559e-01 L9_l1linf:3.1384e-01 L10_l1linf:3.4431e-01 L11_l1linf:3.4214e-01 L12_l1linf:3.2932e-01 L1_spectral:7.1911e-03 L2_spectral:8.0280e-03 L3_spectral:7.7487e-03 L4_spectral:7.6730e-03 L5_spectral:7.3671e-03 L6_spectral:6.9814e-03 L7_spectral:6.9319e-03 L8_spectral:6.4348e-03 L9_spectral:7.0578e-03 L10_spectral:7.7185e-03 L11_spectral:7.7912e-03 L12_spectral:7.6011e-03 ip_v_neg_g:2.5291e-03 cos_v_neg_g:5.9547e-04 v_norm:1.3572e+00 g_norm:3.1295e+00 hv_norm:6.2166e-01 cos_v_hv:7.9748e-03 hg_norm:1.2777e+02 cos_g_hg:4.6680e-01 v_par:1.8649e-05 v_perp:1.3572e+00 L1_cos_v_neg_g:9.5095e-03 L1_v_norm:6.1172e-02 L2_cos_v_neg_g:4.5139e-03 L2_v_norm:5.9062e-02 L3_cos_v_neg_g:3.0772e-03 L3_v_norm:5.8514e-02 L4_cos_v_neg_g:2.6542e-03 L4_v_norm:5.9981e-02 L5_cos_v_neg_g:2.3574e-03 L5_v_norm:6.0903e-02 L6_cos_v_neg_g:3.7634e-03 L6_v_norm:6.1226e-02 L7_cos_v_neg_g:3.5177e-03 L7_v_norm:6.1263e-02 L8_cos_v_neg_g:5.0760e-03 L8_v_norm:6.1116e-02 L9_cos_v_neg_g:3.4981e-03 L9_v_norm:6.1194e-02 L10_cos_v_neg_g:2.9952e-03 L10_v_norm:6.1350e-02 L11_cos_v_neg_g:2.0544e-03 L11_v_norm:6.1205e-02 L12_cos_v_neg_g:1.4719e-03 L12_v_norm:6.1186e-02 +step:6500 train loss:3.506544 +step:6501 train loss:3.525247 +step:6502 train loss:3.543859 +step:6503 train loss:3.603225 +step:6504 train loss:3.550236 +step:6505 train loss:3.558505 +step:6506 train loss:3.518141 +step:6507 train loss:3.587364 +step:6508 train loss:3.552824 +step:6509 train loss:3.536302 +step:6510 train loss:3.545200 +step:6511 train loss:3.561271 +step:6512 train loss:3.502102 +step:6513 train loss:3.570614 +step:6514 train loss:3.443318 +step:6515 train loss:3.536775 +step:6516 train loss:3.584671 +step:6517 train loss:3.499460 +step:6518 train loss:3.539803 +step:6519 train loss:3.529317 +step:6520 train loss:3.616709 +step:6521 train loss:3.594687 +step:6522 train loss:3.605047 +step:6523 train loss:3.499214 +step:6524 train loss:3.583920 +step:6525 train loss:3.568514 +step:6526 train loss:3.510286 +step:6527 train loss:3.560146 +step:6528 train loss:3.578103 +step:6529 train loss:3.608398 +step:6530 train loss:3.513477 +step:6531 train loss:3.594302 +step:6532 train loss:3.519432 +step:6533 train loss:3.557688 +step:6534 train loss:3.564621 +step:6535 train loss:3.542639 +step:6536 train loss:3.674826 +step:6537 train loss:3.480160 +step:6538 train loss:3.592512 +step:6539 train loss:3.516366 +step:6540 train loss:3.627857 +step:6541 train loss:3.607834 +step:6542 train loss:3.565641 +step:6543 train loss:3.520511 +step:6544 train loss:3.503242 +step:6545 train loss:3.490619 +step:6546 train loss:3.554059 +step:6547 train loss:3.607977 +step:6548 train loss:3.551272 +step:6549 train loss:3.565731 +step:6550 train loss:3.676602 +step:6551 train loss:3.555479 +step:6552 train loss:3.551642 +step:6553 train loss:3.588242 +step:6554 train loss:3.481394 +step:6555 train loss:3.564864 +step:6556 train loss:3.436603 +step:6557 train loss:3.782398 +step:6558 train loss:3.614559 +step:6559 train loss:3.526183 +step:6560 train loss:3.566152 +step:6561 train loss:3.538352 +step:6562 train loss:3.560108 +step:6563 train loss:3.450650 +step:6564 train loss:3.555203 +step:6565 train loss:3.461363 +step:6566 train loss:3.572800 +step:6567 train loss:3.544472 +step:6568 train loss:3.589226 +step:6569 train loss:3.536215 +step:6570 train loss:3.573447 +step:6571 train loss:3.504021 +step:6572 train loss:3.580123 +step:6573 train loss:3.591079 +step:6574 train loss:3.578471 +step:6575 train loss:3.523946 +step:6576 train loss:3.512927 +step:6577 train loss:3.582155 +step:6578 train loss:3.452555 +step:6579 train loss:3.554976 +step:6580 train loss:3.510337 +step:6581 train loss:3.521939 +step:6582 train loss:3.501484 +step:6583 train loss:3.601772 +step:6584 train loss:3.530672 +step:6585 train loss:3.567908 +step:6586 train loss:3.574238 +step:6587 train loss:3.583008 +step:6588 train loss:3.549765 +step:6589 train loss:3.580548 +step:6590 train loss:3.517709 +step:6591 train loss:3.571071 +step:6592 train loss:3.509920 +step:6593 train loss:3.521077 +step:6594 train loss:3.544623 +step:6595 train loss:3.530782 +step:6596 train loss:3.526059 +step:6597 train loss:3.551434 +step:6598 train loss:3.591467 +step:6599 train loss:3.484296 +step:6600 train loss:3.538249 +step:6601 train loss:3.596962 +step:6602 train loss:3.521955 +step:6603 train loss:3.550565 +step:6604 train loss:3.561833 +step:6605 train loss:3.540770 +step:6606 train loss:3.601463 +step:6607 train loss:3.522234 +step:6608 train loss:3.537498 +step:6609 train loss:3.505399 +step:6610 train loss:3.616296 +step:6611 train loss:3.540321 +step:6612 train loss:3.582718 +step:6613 train loss:3.499690 +step:6614 train loss:3.526358 +step:6615 train loss:3.527074 +step:6616 train loss:3.508628 +step:6617 train loss:3.549890 +step:6618 train loss:3.535497 +step:6619 train loss:3.507029 +step:6620 train loss:3.613587 +step:6621 train loss:3.490041 +step:6622 train loss:3.564436 +step:6623 train loss:3.493682 +step:6624 train loss:3.567964 +step:6625 train loss:3.609069 +step:6626 train loss:3.572008 +step:6627 train loss:3.522101 +step:6628 train loss:3.581420 +step:6629 train loss:3.483977 +step:6630 train loss:3.519306 +step:6631 train loss:3.556026 +step:6632 train loss:3.593367 +step:6633 train loss:3.544583 +step:6634 train loss:3.608564 +step:6635 train loss:3.508892 +step:6636 train loss:3.549100 +step:6637 train loss:3.516828 +step:6638 train loss:3.517376 +step:6639 train loss:3.529336 +step:6640 train loss:3.516845 +step:6641 train loss:3.530596 +step:6642 train loss:3.532318 +step:6643 train loss:3.609833 +step:6644 train loss:3.614090 +step:6645 train loss:3.488854 +step:6646 train loss:3.578976 +step:6647 train loss:3.536478 +step:6648 train loss:3.639612 +step:6649 train loss:3.566538 +step:6650 train loss:3.514586 +step:6651 train loss:3.563369 +step:6652 train loss:3.578636 +step:6653 train loss:3.521194 +step:6654 train loss:3.517026 +step:6655 train loss:3.557882 +step:6656 train loss:3.529159 +step:6657 train loss:3.555016 +step:6658 train loss:3.536030 +step:6659 train loss:3.689192 +step:6660 train loss:3.587383 +step:6661 train loss:3.513152 +step:6662 train loss:3.544922 +step:6663 train loss:3.477909 +step:6664 train loss:3.558558 +step:6665 train loss:3.568867 +step:6666 train loss:3.583432 +step:6667 train loss:3.496857 +step:6668 train loss:3.626405 +step:6669 train loss:3.508945 +step:6670 train loss:3.518047 +step:6671 train loss:3.598925 +step:6672 train loss:3.551475 +step:6673 train loss:3.560391 +step:6674 train loss:3.535061 +step:6675 train loss:3.549816 +step:6676 train loss:3.564300 +step:6677 train loss:3.516989 +step:6678 train loss:3.587978 +step:6679 train loss:3.623317 +step:6680 train loss:3.624181 +step:6681 train loss:3.577277 +step:6682 train loss:3.519832 +step:6683 train loss:3.543329 +step:6684 train loss:3.556931 +step:6685 train loss:3.567377 +step:6686 train loss:3.504029 +step:6687 train loss:3.520145 +step:6688 train loss:3.564921 +step:6689 train loss:3.573208 +step:6690 train loss:3.549448 +step:6691 train loss:3.581242 +step:6692 train loss:3.587722 +step:6693 train loss:3.619699 +step:6694 train loss:3.576762 +step:6695 train loss:3.546651 +step:6696 train loss:3.485975 +step:6697 train loss:3.697482 +step:6698 train loss:3.546554 +step:6699 train loss:3.542585 +step:6700 train loss:3.555061 +step:6701 train loss:3.613177 +step:6702 train loss:3.502858 +step:6703 train loss:3.550217 +step:6704 train loss:3.534462 +step:6705 train loss:3.544685 +step:6706 train loss:3.521799 +step:6707 train loss:3.597996 +step:6708 train loss:3.551122 +step:6709 train loss:3.580112 +step:6710 train loss:3.569942 +step:6711 train loss:3.521188 +step:6712 train loss:3.512049 +step:6713 train loss:3.535199 +step:6714 train loss:3.578458 +step:6715 train loss:3.519629 +step:6716 train loss:3.599595 +step:6717 train loss:3.539479 +step:6718 train loss:3.564862 +step:6719 train loss:3.598109 +step:6720 train loss:3.527697 +step:6721 train loss:3.545426 +step:6722 train loss:3.521726 +step:6723 train loss:3.650050 +step:6724 train loss:3.507390 +step:6725 train loss:3.571559 +step:6726 train loss:3.521972 +step:6727 train loss:3.588556 +step:6728 train loss:3.681690 +step:6729 train loss:3.545048 +step:6730 train loss:3.541038 +step:6731 train loss:3.582558 +step:6732 train loss:3.457084 +step:6733 train loss:3.594751 +step:6734 train loss:3.519583 +step:6735 train loss:3.548054 +step:6736 train loss:3.549496 +step:6737 train loss:3.545228 +step:6738 train loss:3.576671 +step:6739 train loss:3.534514 +step:6740 train loss:3.483807 +step:6741 train loss:3.593801 +step:6742 train loss:3.556160 +step:6743 train loss:3.559667 +step:6744 train loss:3.449974 +step:6745 train loss:3.606560 +step:6746 train loss:3.534665 +step:6747 train loss:3.529953 +step:6748 train loss:3.601302 +step:6749 train loss:3.583485 +step:6750 validation loss:3.484097 +step:6750 train loss:3.504949 +step:6751 train loss:3.539180 +step:6752 train loss:3.543199 +step:6753 train loss:3.577110 +step:6754 train loss:3.557964 +step:6755 train loss:3.568837 +step:6756 train loss:3.508296 +step:6757 train loss:3.479566 +step:6758 train loss:3.653644 +step:6759 train loss:3.545789 +step:6760 train loss:3.602564 +step:6761 train loss:3.534250 +step:6762 train loss:3.558822 +step:6763 train loss:3.457354 +step:6764 train loss:3.538142 +step:6765 train loss:3.540365 +step:6766 train loss:3.533572 +step:6767 train loss:3.487966 +step:6768 train loss:3.492484 +step:6769 train loss:3.455470 +step:6770 train loss:3.543614 +step:6771 train loss:3.543780 +step:6772 train loss:3.552934 +step:6773 train loss:3.533556 +step:6774 train loss:3.544672 +step:6775 train loss:3.590524 +step:6776 train loss:3.544768 +step:6777 train loss:3.620681 +step:6778 train loss:3.505902 +step:6779 train loss:3.560545 +step:6780 train loss:3.491932 +step:6781 train loss:3.556342 +step:6782 train loss:3.468998 +step:6783 train loss:3.504190 +step:6784 train loss:3.528830 +step:6785 train loss:3.515822 +step:6786 train loss:3.532001 +step:6787 train loss:3.607041 +step:6788 train loss:3.546070 +step:6789 train loss:3.555276 +step:6790 train loss:3.551950 +step:6791 train loss:3.564825 +step:6792 train loss:3.562163 +step:6793 train loss:3.559399 +step:6794 train loss:3.530254 +step:6795 train loss:3.531261 +step:6796 train loss:3.536651 +step:6797 train loss:3.632421 +step:6798 train loss:3.536086 +step:6799 train loss:3.525935 +step:6800 train loss:3.496014 +step:6801 train loss:3.626858 +step:6802 train loss:3.574959 +step:6803 train loss:3.564556 +step:6804 train loss:3.594626 +step:6805 train loss:3.553134 +step:6806 train loss:3.489354 +step:6807 train loss:3.546751 +step:6808 train loss:3.530249 +step:6809 train loss:3.559222 +step:6810 train loss:3.678765 +step:6811 train loss:3.583181 +step:6812 train loss:3.555849 +step:6813 train loss:3.568127 +step:6814 train loss:3.574541 +step:6815 train loss:3.622530 +step:6816 train loss:3.539102 +step:6817 train loss:3.562800 +step:6818 train loss:3.541974 +step:6819 train loss:3.524301 +step:6820 train loss:3.554093 +step:6821 train loss:3.518706 +step:6822 train loss:3.618545 +step:6823 train loss:3.599647 +step:6824 train loss:3.577683 +step:6825 train loss:3.524068 +step:6826 train loss:3.570457 +step:6827 train loss:3.554991 +step:6828 train loss:3.571544 +step:6829 train loss:3.559537 +step:6830 train loss:3.526655 +step:6831 train loss:3.487253 +step:6832 train loss:3.472096 +step:6833 train loss:3.489035 +step:6834 train loss:3.576069 +step:6835 train loss:3.548285 +step:6836 train loss:3.468532 +step:6837 train loss:3.533825 +step:6838 train loss:3.590953 +step:6839 train loss:3.675601 +step:6840 train loss:3.550265 +step:6841 train loss:3.507015 +step:6842 train loss:3.553854 +step:6843 train loss:3.659163 +step:6844 train loss:3.537682 +step:6845 train loss:3.592825 +step:6846 train loss:3.656248 +step:6847 train loss:3.587339 +step:6848 train loss:3.575773 +step:6849 train loss:3.598407 +step:6850 train loss:3.570751 +step:6851 train loss:3.499541 +step:6852 train loss:3.491668 +step:6853 train loss:3.480381 +step:6854 train loss:3.561531 +step:6855 train loss:3.532087 +step:6856 train loss:3.517027 +step:6857 train loss:3.567604 +step:6858 train loss:3.599242 +step:6859 train loss:3.506174 +step:6860 train loss:3.615431 +step:6861 train loss:3.642222 +step:6862 train loss:3.551392 +step:6863 train loss:3.548989 +step:6864 train loss:3.494934 +step:6865 train loss:3.566314 +step:6866 train loss:3.493257 +step:6867 train loss:3.671824 +step:6868 train loss:3.544483 +step:6869 train loss:3.577152 +step:6870 train loss:3.614676 +step:6871 train loss:3.531408 +step:6872 train loss:3.525134 +step:6873 train loss:3.545484 +step:6874 train loss:3.505423 +step:6875 train loss:3.509953 +step:6876 train loss:3.540821 +step:6877 train loss:3.581497 +step:6878 train loss:3.493047 +step:6879 train loss:3.540331 +step:6880 train loss:3.549464 +step:6881 train loss:3.511794 +step:6882 train loss:3.574857 +step:6883 train loss:3.566816 +step:6884 train loss:3.792402 +step:6885 train loss:3.560288 +step:6886 train loss:3.543524 +step:6887 train loss:3.480264 +step:6888 train loss:3.584560 +step:6889 train loss:3.465827 +step:6890 train loss:3.576915 +step:6891 train loss:3.582939 +step:6892 train loss:3.682158 +step:6893 train loss:3.516151 +step:6894 train loss:3.575991 +step:6895 train loss:3.577850 +step:6896 train loss:3.553540 +step:6897 train loss:3.506309 +step:6898 train loss:3.505535 +step:6899 train loss:3.595773 +step:6900 train loss:3.566431 +step:6901 train loss:3.518935 +step:6902 train loss:3.450466 +step:6903 train loss:3.496179 +step:6904 train loss:3.604770 +step:6905 train loss:3.640350 +step:6906 train loss:3.559125 +step:6907 train loss:3.576280 +step:6908 train loss:3.615161 +step:6909 train loss:3.606134 +step:6910 train loss:3.482541 +step:6911 train loss:3.612339 +step:6912 train loss:3.504155 +step:6913 train loss:3.541132 +step:6914 train loss:3.498573 +step:6915 train loss:3.526727 +step:6916 train loss:3.501747 +step:6917 train loss:3.625178 +step:6918 train loss:3.573101 +step:6919 train loss:3.566240 +step:6920 train loss:3.550751 +step:6921 train loss:3.615612 +step:6922 train loss:3.605424 +step:6923 train loss:3.470588 +step:6924 train loss:3.553218 +step:6925 train loss:3.526134 +step:6926 train loss:3.563984 +step:6927 train loss:3.619468 +step:6928 train loss:3.502576 +step:6929 train loss:3.516477 +step:6930 train loss:3.551224 +step:6931 train loss:3.551523 +step:6932 train loss:3.785601 +step:6933 train loss:3.616248 +step:6934 train loss:3.554890 +step:6935 train loss:3.540285 +step:6936 train loss:3.579017 +step:6937 train loss:3.522430 +step:6938 train loss:3.588468 +step:6939 train loss:3.521610 +step:6940 train loss:3.576897 +step:6941 train loss:3.492062 +step:6942 train loss:3.582349 +step:6943 train loss:3.471906 +step:6944 train loss:3.564778 +step:6945 train loss:3.501211 +step:6946 train loss:3.595269 +step:6947 train loss:3.519922 +step:6948 train loss:3.512539 +step:6949 train loss:3.590530 +step:6950 train loss:3.579456 +step:6951 train loss:3.584803 +step:6952 train loss:3.515520 +step:6953 train loss:3.559582 +step:6954 train loss:3.621958 +step:6955 train loss:3.537127 +step:6956 train loss:3.574830 +step:6957 train loss:3.563815 +step:6958 train loss:3.525115 +step:6959 train loss:3.561204 +step:6960 train loss:3.531954 +step:6961 train loss:3.537217 +step:6962 train loss:3.519294 +step:6963 train loss:3.489634 +step:6964 train loss:3.533011 +step:6965 train loss:3.522895 +step:6966 train loss:3.569477 +step:6967 train loss:3.504314 +step:6968 train loss:3.544302 +step:6969 train loss:3.563459 +step:6970 train loss:3.538171 +step:6971 train loss:3.602569 +step:6972 train loss:3.551648 +step:6973 train loss:3.508125 +step:6974 train loss:3.637152 +step:6975 train loss:3.542014 +step:6976 train loss:3.513841 +step:6977 train loss:3.552003 +step:6978 train loss:3.544729 +step:6979 train loss:3.554337 +step:6980 train loss:3.530653 +step:6981 train loss:3.590340 +step:6982 train loss:3.544329 +step:6983 train loss:3.534000 +step:6984 train loss:3.653715 +step:6985 train loss:3.498407 +step:6986 train loss:3.488898 +step:6987 train loss:3.537775 +step:6988 train loss:3.542547 +step:6989 train loss:3.688320 +step:6990 train loss:3.551798 +step:6991 train loss:3.507855 +step:6992 train loss:3.555049 +step:6993 train loss:3.621907 +step:6994 train loss:3.569830 +step:6995 train loss:3.520453 +step:6996 train loss:3.520129 +step:6997 train loss:3.602853 +step:6998 train loss:3.502067 +step:6999 train loss:3.551371 +step:7000 validation loss:3.477438 total_sharp:3.9725e-03 L1_sharp:5.4303e-02 L2_sharp:1.6865e-02 L3_sharp:3.4416e-02 L4_sharp:1.7895e-02 L5_sharp:1.9794e-02 L6_sharp:2.7563e-02 L7_sharp:3.3675e-02 L8_sharp:3.3285e-02 L9_sharp:2.1114e-02 L10_sharp:1.5766e-02 L11_sharp:1.3547e-02 L12_sharp:2.2691e-02 total_fnorm:1.3530e+00 total_l1_linf:8.1217e+03 total_spectral:1.3530e+00 L1_fnorm:6.0836e-02 L2_fnorm:5.8858e-02 L3_fnorm:5.8614e-02 L4_fnorm:6.0145e-02 L5_fnorm:6.1000e-02 L6_fnorm:6.1322e-02 L7_fnorm:6.1469e-02 L8_fnorm:6.1384e-02 L9_fnorm:6.1354e-02 L10_fnorm:6.1489e-02 L11_fnorm:6.1487e-02 L12_fnorm:6.1324e-02 L1_l1linf:3.0559e-01 L2_l1linf:3.6039e-01 L3_l1linf:3.6835e-01 L4_l1linf:3.6549e-01 L5_l1linf:3.3779e-01 L6_l1linf:3.4771e-01 L7_l1linf:3.4782e-01 L8_l1linf:3.3726e-01 L9_l1linf:3.4531e-01 L10_l1linf:3.6799e-01 L11_l1linf:3.7441e-01 L12_l1linf:3.7587e-01 L1_spectral:6.9749e-03 L2_spectral:8.2234e-03 L3_spectral:8.2363e-03 L4_spectral:8.1855e-03 L5_spectral:7.6058e-03 L6_spectral:7.8046e-03 L7_spectral:7.7347e-03 L8_spectral:7.4922e-03 L9_spectral:7.6813e-03 L10_spectral:8.2991e-03 L11_spectral:8.3960e-03 L12_spectral:8.4114e-03 ip_v_neg_g:3.3911e-03 cos_v_neg_g:7.9438e-04 v_norm:1.3530e+00 g_norm:3.1552e+00 hv_norm:6.1762e-01 cos_v_hv:8.7023e-03 hg_norm:1.2792e+02 cos_g_hg:4.3834e-01 v_par:2.4858e-05 v_perp:1.3530e+00 L1_cos_v_neg_g:7.2711e-03 L1_v_norm:6.0836e-02 L2_cos_v_neg_g:5.4488e-03 L2_v_norm:5.8858e-02 L3_cos_v_neg_g:3.8093e-03 L3_v_norm:5.8614e-02 L4_cos_v_neg_g:3.9543e-03 L4_v_norm:6.0145e-02 L5_cos_v_neg_g:4.9421e-03 L5_v_norm:6.1000e-02 L6_cos_v_neg_g:5.8166e-03 L6_v_norm:6.1322e-02 L7_cos_v_neg_g:4.9095e-03 L7_v_norm:6.1469e-02 L8_cos_v_neg_g:5.1298e-03 L8_v_norm:6.1384e-02 L9_cos_v_neg_g:4.6547e-03 L9_v_norm:6.1354e-02 L10_cos_v_neg_g:3.6731e-03 L10_v_norm:6.1489e-02 L11_cos_v_neg_g:2.6351e-03 L11_v_norm:6.1487e-02 L12_cos_v_neg_g:3.7269e-03 L12_v_norm:6.1324e-02 +step:7000 train loss:3.626534 +step:7001 train loss:3.545625 +step:7002 train loss:3.522338 +step:7003 train loss:3.547305 +step:7004 train loss:3.543037 +step:7005 train loss:3.525834 +step:7006 train loss:3.530517 +step:7007 train loss:3.581101 +step:7008 train loss:3.524151 +step:7009 train loss:3.564086 +step:7010 train loss:3.500130 +step:7011 train loss:3.555210 +step:7012 train loss:3.527918 +step:7013 train loss:3.602132 +step:7014 train loss:3.511086 +step:7015 train loss:3.569080 +step:7016 train loss:3.559913 +step:7017 train loss:3.523747 +step:7018 train loss:3.603542 +step:7019 train loss:3.529898 +step:7020 train loss:3.575744 +step:7021 train loss:3.520135 +step:7022 train loss:3.536482 +step:7023 train loss:3.554673 +step:7024 train loss:3.515818 +step:7025 train loss:3.565497 +step:7026 train loss:3.523089 +step:7027 train loss:3.585000 +step:7028 train loss:3.508240 +step:7029 train loss:3.498777 +step:7030 train loss:3.499848 +step:7031 train loss:3.555739 +step:7032 train loss:3.560774 +step:7033 train loss:3.537261 +step:7034 train loss:3.560500 +step:7035 train loss:3.610476 +step:7036 train loss:3.529980 +step:7037 train loss:3.556236 +step:7038 train loss:3.517173 +step:7039 train loss:3.571875 +step:7040 train loss:3.489717 +step:7041 train loss:3.582849 +step:7042 train loss:3.514985 +step:7043 train loss:3.487201 +step:7044 train loss:3.536384 +step:7045 train loss:3.534368 +step:7046 train loss:3.527175 +step:7047 train loss:3.566294 +step:7048 train loss:3.512498 +step:7049 train loss:3.523861 +step:7050 train loss:3.545793 +step:7051 train loss:3.562850 +step:7052 train loss:3.565307 +step:7053 train loss:3.526935 +step:7054 train loss:3.506161 +step:7055 train loss:3.573107 +step:7056 train loss:3.572944 +step:7057 train loss:3.498116 +step:7058 train loss:3.616291 +step:7059 train loss:3.522946 +step:7060 train loss:3.533486 +step:7061 train loss:3.508491 +step:7062 train loss:3.532451 +step:7063 train loss:3.590519 +step:7064 train loss:3.514160 +step:7065 train loss:3.565015 +step:7066 train loss:3.523067 +step:7067 train loss:3.558652 +step:7068 train loss:3.535232 +step:7069 train loss:3.497644 +step:7070 train loss:3.523767 +step:7071 train loss:3.493736 +step:7072 train loss:3.497265 +step:7073 train loss:3.490384 +step:7074 train loss:3.485575 +step:7075 train loss:3.504426 +step:7076 train loss:3.516393 +step:7077 train loss:3.525109 +step:7078 train loss:3.570869 +step:7079 train loss:3.581112 +step:7080 train loss:3.525006 +step:7081 train loss:3.547318 +step:7082 train loss:3.514249 +step:7083 train loss:3.546111 +step:7084 train loss:3.538723 +step:7085 train loss:3.497649 +step:7086 train loss:3.538954 +step:7087 train loss:3.515468 +step:7088 train loss:3.638018 +step:7089 train loss:3.529875 +step:7090 train loss:3.495672 +step:7091 train loss:3.508258 +step:7092 train loss:3.487382 +step:7093 train loss:3.582405 +step:7094 train loss:3.502930 +step:7095 train loss:3.514916 +step:7096 train loss:3.536054 +step:7097 train loss:3.524085 +step:7098 train loss:3.548324 +step:7099 train loss:3.504659 +step:7100 train loss:3.535988 +step:7101 train loss:3.605594 +step:7102 train loss:3.495108 +step:7103 train loss:3.520150 +step:7104 train loss:3.552627 +step:7105 train loss:3.529533 +step:7106 train loss:3.517801 +step:7107 train loss:3.553254 +step:7108 train loss:3.620558 +step:7109 train loss:3.550023 +step:7110 train loss:3.576053 +step:7111 train loss:3.553221 +step:7112 train loss:3.543849 +step:7113 train loss:3.541837 +step:7114 train loss:3.559363 +step:7115 train loss:3.598772 +step:7116 train loss:3.527164 +step:7117 train loss:3.563735 +step:7118 train loss:3.576996 +step:7119 train loss:3.537975 +step:7120 train loss:3.593922 +step:7121 train loss:3.510069 +step:7122 train loss:3.511521 +step:7123 train loss:3.451616 +step:7124 train loss:3.608328 +step:7125 train loss:3.461717 +step:7126 train loss:3.627718 +step:7127 train loss:3.586033 +step:7128 train loss:3.530602 +step:7129 train loss:3.537520 +step:7130 train loss:3.527923 +step:7131 train loss:3.466406 +step:7132 train loss:3.510078 +step:7133 train loss:3.556225 +step:7134 train loss:3.486818 +step:7135 train loss:3.542735 +step:7136 train loss:3.525310 +step:7137 train loss:3.504311 +step:7138 train loss:3.491281 +step:7139 train loss:3.495548 +step:7140 train loss:3.529980 +step:7141 train loss:3.526232 +step:7142 train loss:3.523895 +step:7143 train loss:3.560793 +step:7144 train loss:3.510613 +step:7145 train loss:3.527907 +step:7146 train loss:3.537387 +step:7147 train loss:3.560562 +step:7148 train loss:3.563836 +step:7149 train loss:3.569079 +step:7150 train loss:3.543106 +step:7151 train loss:3.507182 +step:7152 train loss:3.479412 +step:7153 train loss:3.517468 +step:7154 train loss:3.533953 +step:7155 train loss:3.549360 +step:7156 train loss:3.517292 +step:7157 train loss:3.538432 +step:7158 train loss:3.498452 +step:7159 train loss:3.547019 +step:7160 train loss:3.557905 +step:7161 train loss:3.508821 +step:7162 train loss:3.556566 +step:7163 train loss:3.494719 +step:7164 train loss:3.528282 +step:7165 train loss:3.536739 +step:7166 train loss:3.591108 +step:7167 train loss:3.568675 +step:7168 train loss:3.544718 +step:7169 train loss:3.523562 +step:7170 train loss:3.553280 +step:7171 train loss:3.499274 +step:7172 train loss:3.666113 +step:7173 train loss:3.506907 +step:7174 train loss:3.552482 +step:7175 train loss:3.526471 +step:7176 train loss:3.533783 +step:7177 train loss:3.550447 +step:7178 train loss:3.548743 +step:7179 train loss:3.536390 +step:7180 train loss:3.537335 +step:7181 train loss:3.565567 +step:7182 train loss:3.516511 +step:7183 train loss:3.591045 +step:7184 train loss:3.678814 +step:7185 train loss:3.593645 +step:7186 train loss:3.532977 +step:7187 train loss:3.541983 +step:7188 train loss:3.531255 +step:7189 train loss:3.529124 +step:7190 train loss:3.532738 +step:7191 train loss:3.525130 +step:7192 train loss:3.557263 +step:7193 train loss:3.477003 +step:7194 train loss:3.537726 +step:7195 train loss:3.515077 +step:7196 train loss:3.563473 +step:7197 train loss:3.541341 +step:7198 train loss:3.597229 +step:7199 train loss:3.555483 +step:7200 train loss:3.546549 +step:7201 train loss:3.555587 +step:7202 train loss:3.532689 +step:7203 train loss:3.548542 +step:7204 train loss:3.516651 +step:7205 train loss:3.476580 +step:7206 train loss:3.503848 +step:7207 train loss:3.682361 +step:7208 train loss:3.513179 +step:7209 train loss:3.594568 +step:7210 train loss:3.532750 +step:7211 train loss:3.562142 +step:7212 train loss:3.642817 +step:7213 train loss:3.490993 +step:7214 train loss:3.561962 +step:7215 train loss:3.530709 +step:7216 train loss:3.580818 +step:7217 train loss:3.539261 +step:7218 train loss:3.625242 +step:7219 train loss:3.536518 +step:7220 train loss:3.614236 +step:7221 train loss:3.495028 +step:7222 train loss:3.576442 +step:7223 train loss:3.496172 +step:7224 train loss:3.556851 +step:7225 train loss:3.535553 +step:7226 train loss:3.503417 +step:7227 train loss:3.524869 +step:7228 train loss:3.510926 +step:7229 train loss:3.514905 +step:7230 train loss:3.500948 +step:7231 train loss:3.632040 +step:7232 train loss:3.503371 +step:7233 train loss:3.573797 +step:7234 train loss:3.560566 +step:7235 train loss:3.531753 +step:7236 train loss:3.571656 +step:7237 train loss:3.524361 +step:7238 train loss:3.560150 +step:7239 train loss:3.515299 +step:7240 train loss:3.512757 +step:7241 train loss:3.525305 +step:7242 train loss:3.508887 +step:7243 train loss:3.552200 +step:7244 train loss:3.526874 +step:7245 train loss:3.532397 +step:7246 train loss:3.571742 +step:7247 train loss:3.527554 +step:7248 train loss:3.566107 +step:7249 train loss:3.516280 +step:7250 validation loss:3.470867 +step:7250 train loss:3.537826 +step:7251 train loss:3.582789 +step:7252 train loss:3.496042 +step:7253 train loss:3.585623 +step:7254 train loss:3.523595 +step:7255 train loss:3.495429 +step:7256 train loss:3.534581 +step:7257 train loss:3.576413 +step:7258 train loss:3.533345 +step:7259 train loss:3.515523 +step:7260 train loss:3.600593 +step:7261 train loss:3.558842 +step:7262 train loss:3.514922 +step:7263 train loss:3.556811 +step:7264 train loss:3.541622 +step:7265 train loss:3.446626 +step:7266 train loss:3.569996 +step:7267 train loss:3.489284 +step:7268 train loss:3.554381 +step:7269 train loss:3.557678 +step:7270 train loss:3.512603 +step:7271 train loss:3.529511 +step:7272 train loss:3.533657 +step:7273 train loss:3.531144 +step:7274 train loss:3.509250 +step:7275 train loss:3.581186 +step:7276 train loss:3.486400 +step:7277 train loss:3.535739 +step:7278 train loss:3.506323 +step:7279 train loss:3.486863 +step:7280 train loss:3.556756 +step:7281 train loss:3.579645 +step:7282 train loss:3.576578 +step:7283 train loss:3.469334 +step:7284 train loss:3.509737 +step:7285 train loss:3.539106 +step:7286 train loss:3.671345 +step:7287 train loss:3.579674 +step:7288 train loss:3.533631 +step:7289 train loss:3.536564 +step:7290 train loss:3.586056 +step:7291 train loss:3.548122 +step:7292 train loss:3.615625 +step:7293 train loss:3.514909 +step:7294 train loss:3.600111 +step:7295 train loss:3.489329 +step:7296 train loss:3.486005 +step:7297 train loss:3.530580 +step:7298 train loss:3.507400 +step:7299 train loss:3.549214 +step:7300 train loss:3.534016 +step:7301 train loss:3.485352 +step:7302 train loss:3.630183 +step:7303 train loss:3.519101 +step:7304 train loss:3.465837 +step:7305 train loss:3.541085 +step:7306 train loss:3.571146 +step:7307 train loss:3.575990 +step:7308 train loss:3.526359 +step:7309 train loss:3.490514 +step:7310 train loss:3.522417 +step:7311 train loss:3.505820 +step:7312 train loss:3.544756 +step:7313 train loss:3.583774 +step:7314 train loss:3.477713 +step:7315 train loss:3.473815 +step:7316 train loss:3.616428 +step:7317 train loss:3.553622 +step:7318 train loss:3.492352 +step:7319 train loss:3.517694 +step:7320 train loss:3.552349 +step:7321 train loss:3.580115 +step:7322 train loss:3.460522 +step:7323 train loss:3.515223 +step:7324 train loss:3.542119 +step:7325 train loss:3.507041 +step:7326 train loss:3.532276 +step:7327 train loss:3.509677 +step:7328 train loss:3.627947 +step:7329 train loss:3.470622 +step:7330 train loss:3.527243 +step:7331 train loss:3.522660 +step:7332 train loss:3.562680 +step:7333 train loss:3.544359 +step:7334 train loss:3.512406 +step:7335 train loss:3.510118 +step:7336 train loss:3.763179 +step:7337 train loss:3.549989 +step:7338 train loss:3.545683 +step:7339 train loss:3.557237 +step:7340 train loss:3.542443 +step:7341 train loss:3.533559 +step:7342 train loss:3.526219 +step:7343 train loss:3.539408 +step:7344 train loss:3.618778 +step:7345 train loss:3.477476 +step:7346 train loss:3.513361 +step:7347 train loss:3.507464 +step:7348 train loss:3.510949 +step:7349 train loss:3.611796 +step:7350 train loss:3.594081 +step:7351 train loss:3.529140 +step:7352 train loss:3.557280 +step:7353 train loss:3.541147 +step:7354 train loss:3.491639 +step:7355 train loss:3.671606 +step:7356 train loss:3.643938 +step:7357 train loss:3.566238 +step:7358 train loss:3.547073 +step:7359 train loss:3.515344 +step:7360 train loss:3.527633 +step:7361 train loss:3.479946 +step:7362 train loss:3.527421 +step:7363 train loss:3.539857 +step:7364 train loss:3.574599 +step:7365 train loss:3.557596 +step:7366 train loss:3.524613 +step:7367 train loss:3.598038 +step:7368 train loss:3.579497 +step:7369 train loss:3.572037 +step:7370 train loss:3.535233 +step:7371 train loss:3.494976 +step:7372 train loss:3.552039 +step:7373 train loss:3.574393 +step:7374 train loss:3.666228 +step:7375 train loss:3.492096 +step:7376 train loss:3.510352 +step:7377 train loss:3.557226 +step:7378 train loss:3.508960 +step:7379 train loss:3.632718 +step:7380 train loss:3.597183 +step:7381 train loss:3.558446 +step:7382 train loss:3.527651 +step:7383 train loss:3.617774 +step:7384 train loss:3.560169 +step:7385 train loss:3.517698 +step:7386 train loss:3.523166 +step:7387 train loss:3.564790 +step:7388 train loss:3.598383 +step:7389 train loss:3.540604 +step:7390 train loss:3.483001 +step:7391 train loss:3.521185 +step:7392 train loss:3.580898 +step:7393 train loss:3.544124 +step:7394 train loss:3.583479 +step:7395 train loss:3.473457 +step:7396 train loss:3.572086 +step:7397 train loss:3.502711 +step:7398 train loss:3.515410 +step:7399 train loss:3.563761 +step:7400 train loss:3.566568 +step:7401 train loss:3.484198 +step:7402 train loss:3.602278 +step:7403 train loss:3.486838 +step:7404 train loss:3.555976 +step:7405 train loss:3.679897 +step:7406 train loss:3.504714 +step:7407 train loss:3.553772 +step:7408 train loss:3.547398 +step:7409 train loss:3.523846 +step:7410 train loss:3.692112 +step:7411 train loss:3.535138 +step:7412 train loss:3.539752 +step:7413 train loss:3.592164 +step:7414 train loss:3.502243 +step:7415 train loss:3.560450 +step:7416 train loss:3.443278 +step:7417 train loss:3.564721 +step:7418 train loss:3.546004 +step:7419 train loss:3.516764 +step:7420 train loss:3.506050 +step:7421 train loss:3.541387 +step:7422 train loss:3.499686 +step:7423 train loss:3.635958 +step:7424 train loss:3.699649 +step:7425 train loss:3.589225 +step:7426 train loss:3.554033 +step:7427 train loss:3.526151 +step:7428 train loss:3.565520 +step:7429 train loss:3.563279 +step:7430 train loss:3.488154 +step:7431 train loss:3.493944 +step:7432 train loss:3.503553 +step:7433 train loss:3.599548 +step:7434 train loss:3.513838 +step:7435 train loss:3.597998 +step:7436 train loss:3.641822 +step:7437 train loss:3.462669 +step:7438 train loss:3.522605 +step:7439 train loss:3.532992 +step:7440 train loss:3.507698 +step:7441 train loss:3.476973 +step:7442 train loss:3.702157 +step:7443 train loss:3.525994 +step:7444 train loss:3.569112 +step:7445 train loss:3.498376 +step:7446 train loss:3.520773 +step:7447 train loss:3.446025 +step:7448 train loss:3.503657 +step:7449 train loss:3.517464 +step:7450 train loss:3.549181 +step:7451 train loss:3.581391 +step:7452 train loss:3.512233 +step:7453 train loss:3.535994 +step:7454 train loss:3.520715 +step:7455 train loss:3.533878 +step:7456 train loss:3.505862 +step:7457 train loss:3.514290 +step:7458 train loss:3.552568 +step:7459 train loss:3.531212 +step:7460 train loss:3.538614 +step:7461 train loss:3.574096 +step:7462 train loss:3.510870 +step:7463 train loss:3.573500 +step:7464 train loss:3.497323 +step:7465 train loss:3.506058 +step:7466 train loss:3.507878 +step:7467 train loss:3.517536 +step:7468 train loss:3.567364 +step:7469 train loss:3.500638 +step:7470 train loss:3.532727 +step:7471 train loss:3.521855 +step:7472 train loss:3.555600 +step:7473 train loss:3.495553 +step:7474 train loss:3.480287 +step:7475 train loss:3.512476 +step:7476 train loss:3.549285 +step:7477 train loss:3.520799 +step:7478 train loss:3.518717 +step:7479 train loss:3.533909 +step:7480 train loss:3.811435 +step:7481 train loss:3.462216 +step:7482 train loss:3.533792 +step:7483 train loss:3.529033 +step:7484 train loss:3.548787 +step:7485 train loss:3.534634 +step:7486 train loss:3.561012 +step:7487 train loss:3.554604 +step:7488 train loss:3.573849 +step:7489 train loss:3.572050 +step:7490 train loss:3.516224 +step:7491 train loss:3.536901 +step:7492 train loss:3.643948 +step:7493 train loss:3.619459 +step:7494 train loss:3.643872 +step:7495 train loss:3.514093 +step:7496 train loss:3.502631 +step:7497 train loss:3.601148 +step:7498 train loss:3.534252 +step:7499 train loss:3.574024 +step:7500 validation loss:3.467987 total_sharp:3.2544e-03 L1_sharp:4.7442e-02 L2_sharp:1.4816e-02 L3_sharp:2.2567e-02 L4_sharp:1.2878e-02 L5_sharp:1.7586e-02 L6_sharp:1.9862e-02 L7_sharp:2.4314e-02 L8_sharp:2.5781e-02 L9_sharp:1.8526e-02 L10_sharp:1.3162e-02 L11_sharp:1.2791e-02 L12_sharp:2.1061e-02 total_fnorm:1.3524e+00 total_l1_linf:8.1091e+03 total_spectral:1.3524e+00 L1_fnorm:6.1132e-02 L2_fnorm:5.9094e-02 L3_fnorm:5.8682e-02 L4_fnorm:6.0126e-02 L5_fnorm:6.0925e-02 L6_fnorm:6.1241e-02 L7_fnorm:6.1294e-02 L8_fnorm:6.1118e-02 L9_fnorm:6.1244e-02 L10_fnorm:6.1428e-02 L11_fnorm:6.1417e-02 L12_fnorm:6.1340e-02 L1_l1linf:3.4946e-01 L2_l1linf:3.6417e-01 L3_l1linf:3.5108e-01 L4_l1linf:3.3459e-01 L5_l1linf:3.2761e-01 L6_l1linf:3.0533e-01 L7_l1linf:2.8856e-01 L8_l1linf:2.8125e-01 L9_l1linf:3.1633e-01 L10_l1linf:3.5366e-01 L11_l1linf:3.7594e-01 L12_l1linf:3.6977e-01 L1_spectral:7.8440e-03 L2_spectral:8.2201e-03 L3_spectral:7.8820e-03 L4_spectral:7.5985e-03 L5_spectral:7.4649e-03 L6_spectral:6.9381e-03 L7_spectral:6.5578e-03 L8_spectral:6.3962e-03 L9_spectral:7.1874e-03 L10_spectral:8.0395e-03 L11_spectral:8.4709e-03 L12_spectral:8.3140e-03 ip_v_neg_g:2.2635e-03 cos_v_neg_g:5.0305e-04 v_norm:1.3524e+00 g_norm:3.3270e+00 hv_norm:5.3879e-01 cos_v_hv:8.1688e-03 hg_norm:2.3914e+02 cos_g_hg:4.2957e-01 v_par:2.0515e-05 v_perp:1.3524e+00 L1_cos_v_neg_g:1.8393e-03 L1_v_norm:6.1132e-02 L2_cos_v_neg_g:2.7853e-03 L2_v_norm:5.9094e-02 L3_cos_v_neg_g:1.6194e-03 L3_v_norm:5.8682e-02 L4_cos_v_neg_g:1.1857e-03 L4_v_norm:6.0126e-02 L5_cos_v_neg_g:1.2612e-03 L5_v_norm:6.0925e-02 L6_cos_v_neg_g:1.7076e-03 L6_v_norm:6.1241e-02 L7_cos_v_neg_g:2.5188e-03 L7_v_norm:6.1294e-02 L8_cos_v_neg_g:3.3825e-03 L8_v_norm:6.1118e-02 L9_cos_v_neg_g:4.6661e-03 L9_v_norm:6.1244e-02 L10_cos_v_neg_g:4.7164e-03 L10_v_norm:6.1428e-02 L11_cos_v_neg_g:5.0162e-03 L11_v_norm:6.1417e-02 L12_cos_v_neg_g:4.6338e-03 L12_v_norm:6.1340e-02 +step:7500 train loss:3.519205 +step:7501 train loss:3.507070 +step:7502 train loss:3.498713 +step:7503 train loss:3.477971 +step:7504 train loss:3.500997 +step:7505 train loss:3.490176 +step:7506 train loss:3.552271 +step:7507 train loss:3.468176 +step:7508 train loss:3.539664 +step:7509 train loss:3.509780 +step:7510 train loss:3.539623 +step:7511 train loss:3.546310 +step:7512 train loss:3.808845 +step:7513 train loss:3.498822 +step:7514 train loss:3.525952 +step:7515 train loss:3.494072 +step:7516 train loss:3.506430 +step:7517 train loss:3.539808 +step:7518 train loss:3.517386 +step:7519 train loss:3.529030 +step:7520 train loss:3.594405 +step:7521 train loss:3.480267 +step:7522 train loss:3.534124 +step:7523 train loss:3.569370 +step:7524 train loss:3.514117 +step:7525 train loss:3.517428 +step:7526 train loss:3.464413 +step:7527 train loss:3.475780 +step:7528 train loss:3.573002 +step:7529 train loss:3.550884 +step:7530 train loss:3.497735 +step:7531 train loss:3.571523 +step:7532 train loss:3.562024 +step:7533 train loss:3.487975 +step:7534 train loss:3.551373 +step:7535 train loss:3.554146 +step:7536 train loss:3.587511 +step:7537 train loss:3.606710 +step:7538 train loss:3.633419 +step:7539 train loss:3.531914 +step:7540 train loss:3.519661 +step:7541 train loss:3.575026 +step:7542 train loss:3.534827 +step:7543 train loss:3.489618 +step:7544 train loss:3.533522 +step:7545 train loss:3.521928 +step:7546 train loss:3.475519 +step:7547 train loss:3.522581 +step:7548 train loss:3.536656 +step:7549 train loss:3.518756 +step:7550 train loss:3.518221 +step:7551 train loss:3.616841 +step:7552 train loss:3.531103 +step:7553 train loss:3.567303 +step:7554 train loss:3.491343 +step:7555 train loss:3.584841 +step:7556 train loss:3.484513 +step:7557 train loss:3.581745 +step:7558 train loss:3.570546 +step:7559 train loss:3.526597 +step:7560 train loss:3.621482 +step:7561 train loss:3.592448 +step:7562 train loss:3.497788 +step:7563 train loss:3.491004 +step:7564 train loss:3.545800 +step:7565 train loss:3.564226 +step:7566 train loss:3.556002 +step:7567 train loss:3.571462 +step:7568 train loss:3.515499 +step:7569 train loss:3.576009 +step:7570 train loss:3.558849 +step:7571 train loss:3.640095 +step:7572 train loss:3.490626 +step:7573 train loss:3.557395 +step:7574 train loss:3.522929 +step:7575 train loss:3.514572 +step:7576 train loss:3.523012 +step:7577 train loss:3.539885 +step:7578 train loss:3.596945 +step:7579 train loss:3.532690 +step:7580 train loss:3.518993 +step:7581 train loss:3.508205 +step:7582 train loss:3.562560 +step:7583 train loss:3.499818 +step:7584 train loss:3.481606 +step:7585 train loss:3.450987 +step:7586 train loss:3.486852 +step:7587 train loss:3.549259 +step:7588 train loss:3.678587 +step:7589 train loss:3.497818 +step:7590 train loss:3.567901 +step:7591 train loss:3.571922 +step:7592 train loss:3.527587 +step:7593 train loss:3.553761 +step:7594 train loss:3.552041 +step:7595 train loss:3.521869 +step:7596 train loss:3.572498 +step:7597 train loss:3.477372 +step:7598 train loss:3.539726 +step:7599 train loss:3.532770 +step:7600 train loss:3.490860 +step:7601 train loss:3.605175 +step:7602 train loss:3.543200 +step:7603 train loss:3.506209 +step:7604 train loss:3.650152 +step:7605 train loss:3.540584 +step:7606 train loss:3.575288 +step:7607 train loss:3.524419 +step:7608 train loss:3.535563 +step:7609 train loss:3.571688 +step:7610 train loss:3.529123 +step:7611 train loss:3.505485 +step:7612 train loss:3.450828 +step:7613 train loss:3.497756 +step:7614 train loss:3.567949 +step:7615 train loss:3.527453 +step:7616 train loss:3.593580 +step:7617 train loss:3.495201 +step:7618 train loss:3.582645 +step:7619 train loss:3.523880 +step:7620 train loss:3.511175 +step:7621 train loss:3.457243 +step:7622 train loss:3.734334 +step:7623 train loss:3.748068 +step:7624 train loss:3.561060 +step:7625 train loss:3.597598 +step:7626 train loss:3.516470 +step:7627 train loss:3.588988 +step:7628 train loss:3.469731 +step:7629 train loss:3.532191 +step:7630 train loss:3.543849 +step:7631 train loss:3.523396 +step:7632 train loss:3.576822 +step:7633 train loss:3.643902 +step:7634 train loss:3.604745 +step:7635 train loss:3.509715 +step:7636 train loss:3.536270 +step:7637 train loss:3.484868 +step:7638 train loss:3.595314 +step:7639 train loss:3.522121 +step:7640 train loss:3.503288 +step:7641 train loss:3.533033 +step:7642 train loss:3.871105 +step:7643 train loss:3.620017 +step:7644 train loss:3.544162 +step:7645 train loss:3.532665 +step:7646 train loss:3.519021 +step:7647 train loss:3.512679 +step:7648 train loss:3.547614 +step:7649 train loss:3.507025 +step:7650 train loss:3.554123 +step:7651 train loss:3.576414 +step:7652 train loss:3.455830 +step:7653 train loss:3.654646 +step:7654 train loss:3.508780 +step:7655 train loss:3.528193 +step:7656 train loss:3.502683 +step:7657 train loss:3.516655 +step:7658 train loss:3.471483 +step:7659 train loss:3.535883 +step:7660 train loss:3.470226 +step:7661 train loss:3.485330 +step:7662 train loss:3.487076 +step:7663 train loss:3.536643 +step:7664 train loss:3.492933 +step:7665 train loss:3.467894 +step:7666 train loss:3.575647 +step:7667 train loss:3.488604 +step:7668 train loss:3.599046 +step:7669 train loss:3.533513 +step:7670 train loss:3.488860 +step:7671 train loss:3.542395 +step:7672 train loss:3.562774 +step:7673 train loss:3.528177 +step:7674 train loss:3.566670 +step:7675 train loss:3.619903 +step:7676 train loss:3.586785 +step:7677 train loss:3.615440 +step:7678 train loss:3.554228 +step:7679 train loss:3.576552 +step:7680 train loss:3.582336 +step:7681 train loss:3.549998 +step:7682 train loss:3.519697 +step:7683 train loss:3.522575 +step:7684 train loss:3.494926 +step:7685 train loss:3.473094 +step:7686 train loss:3.594473 +step:7687 train loss:3.508394 +step:7688 train loss:3.477271 +step:7689 train loss:3.527100 +step:7690 train loss:3.493042 +step:7691 train loss:3.520239 +step:7692 train loss:3.552792 +step:7693 train loss:3.557517 +step:7694 train loss:3.607607 +step:7695 train loss:3.534061 +step:7696 train loss:3.509985 +step:7697 train loss:3.496221 +step:7698 train loss:3.557377 +step:7699 train loss:3.553129 +step:7700 train loss:3.452021 +step:7701 train loss:3.569589 +step:7702 train loss:3.511654 +step:7703 train loss:3.517138 +step:7704 train loss:3.567978 +step:7705 train loss:3.525613 +step:7706 train loss:3.462390 +step:7707 train loss:3.580388 +step:7708 train loss:3.519216 +step:7709 train loss:3.538998 +step:7710 train loss:3.601979 +step:7711 train loss:3.563898 +step:7712 train loss:3.508905 +step:7713 train loss:3.588405 +step:7714 train loss:3.533127 +step:7715 train loss:3.484423 +step:7716 train loss:3.523864 +step:7717 train loss:3.550433 +step:7718 train loss:3.554503 +step:7719 train loss:3.513057 +step:7720 train loss:3.526574 +step:7721 train loss:3.567189 +step:7722 train loss:3.495917 +step:7723 train loss:3.867346 +step:7724 train loss:3.532150 +step:7725 train loss:3.435674 +step:7726 train loss:3.518159 +step:7727 train loss:3.543720 +step:7728 train loss:3.495728 +step:7729 train loss:3.507313 +step:7730 train loss:3.528797 +step:7731 train loss:3.558563 +step:7732 train loss:3.578682 +step:7733 train loss:3.488609 +step:7734 train loss:3.516448 +step:7735 train loss:3.600880 +step:7736 train loss:3.550174 +step:7737 train loss:3.567034 +step:7738 train loss:3.469697 +step:7739 train loss:3.544132 +step:7740 train loss:3.491528 +step:7741 train loss:3.529785 +step:7742 train loss:3.525915 +step:7743 train loss:3.479993 +step:7744 train loss:3.610052 +step:7745 train loss:3.497831 +step:7746 train loss:3.471664 +step:7747 train loss:3.566645 +step:7748 train loss:3.546420 +step:7749 train loss:3.471019 +step:7750 validation loss:3.460968 +step:7750 train loss:3.630569 +step:7751 train loss:3.515718 +step:7752 train loss:3.506710 +step:7753 train loss:3.509047 +step:7754 train loss:3.482910 +step:7755 train loss:3.548889 +step:7756 train loss:3.576444 +step:7757 train loss:3.526919 +step:7758 train loss:3.495601 +step:7759 train loss:3.522942 +step:7760 train loss:3.552007 +step:7761 train loss:3.543766 +step:7762 train loss:3.529320 +step:7763 train loss:3.512702 +step:7764 train loss:3.518298 +step:7765 train loss:3.473206 +step:7766 train loss:3.537849 +step:7767 train loss:3.544328 +step:7768 train loss:3.497206 +step:7769 train loss:3.560308 +step:7770 train loss:3.577236 +step:7771 train loss:3.551861 +step:7772 train loss:3.525483 +step:7773 train loss:3.583316 +step:7774 train loss:3.482963 +step:7775 train loss:3.470471 +step:7776 train loss:3.572195 +step:7777 train loss:3.528091 +step:7778 train loss:3.484856 +step:7779 train loss:3.529052 +step:7780 train loss:3.522651 +step:7781 train loss:3.531081 +step:7782 train loss:3.515634 +step:7783 train loss:3.497999 +step:7784 train loss:3.495669 +step:7785 train loss:3.536340 +step:7786 train loss:3.494004 +step:7787 train loss:3.574269 +step:7788 train loss:3.524575 +step:7789 train loss:3.461149 +step:7790 train loss:3.521148 +step:7791 train loss:3.552789 +step:7792 train loss:3.512817 +step:7793 train loss:3.535871 +step:7794 train loss:3.523034 +step:7795 train loss:3.553083 +step:7796 train loss:3.517791 +step:7797 train loss:3.536965 +step:7798 train loss:3.529612 +step:7799 train loss:3.519984 +step:7800 train loss:3.472686 +step:7801 train loss:3.540806 +step:7802 train loss:3.520341 +step:7803 train loss:3.570970 +step:7804 train loss:3.530685 +step:7805 train loss:3.528430 +step:7806 train loss:3.545345 +step:7807 train loss:3.615789 +step:7808 train loss:3.477648 +step:7809 train loss:3.452394 +step:7810 train loss:3.542996 +step:7811 train loss:3.474919 +step:7812 train loss:3.495426 +step:7813 train loss:3.582471 +step:7814 train loss:3.652750 +step:7815 train loss:3.465529 +step:7816 train loss:3.550172 +step:7817 train loss:3.580335 +step:7818 train loss:3.479986 +step:7819 train loss:3.531570 +step:7820 train loss:3.572495 +step:7821 train loss:3.506010 +step:7822 train loss:3.464537 +step:7823 train loss:3.536181 +step:7824 train loss:3.519820 +step:7825 train loss:3.505053 +step:7826 train loss:3.504745 +step:7827 train loss:3.543438 +step:7828 train loss:3.534415 +step:7829 train loss:3.489274 +step:7830 train loss:3.500206 +step:7831 train loss:3.503692 +step:7832 train loss:3.570446 +step:7833 train loss:3.549751 +step:7834 train loss:3.511454 +step:7835 train loss:3.537786 +step:7836 train loss:3.647061 +step:7837 train loss:3.535221 +step:7838 train loss:3.505047 +step:7839 train loss:3.462166 +step:7840 train loss:3.478992 +step:7841 train loss:3.576950 +step:7842 train loss:3.560697 +step:7843 train loss:3.611183 +step:7844 train loss:3.543387 +step:7845 train loss:3.522564 +step:7846 train loss:3.633129 +step:7847 train loss:3.523244 +step:7848 train loss:3.533100 +step:7849 train loss:3.548702 +step:7850 train loss:3.516683 +step:7851 train loss:3.547842 +step:7852 train loss:3.519343 +step:7853 train loss:3.490201 +step:7854 train loss:3.522763 +step:7855 train loss:3.523590 +step:7856 train loss:3.526410 +step:7857 train loss:3.513689 +step:7858 train loss:3.523480 +step:7859 train loss:3.530382 +step:7860 train loss:3.566122 +step:7861 train loss:3.554101 +step:7862 train loss:3.496876 +step:7863 train loss:3.599291 +step:7864 train loss:3.441318 +step:7865 train loss:3.519699 +step:7866 train loss:3.492388 +step:7867 train loss:3.539001 +step:7868 train loss:3.516807 +step:7869 train loss:3.520818 +step:7870 train loss:3.438559 +step:7871 train loss:3.504212 +step:7872 train loss:3.495200 +step:7873 train loss:3.574673 +step:7874 train loss:3.516190 +step:7875 train loss:3.520824 +step:7876 train loss:3.539887 +step:7877 train loss:3.492922 +step:7878 train loss:3.530177 +step:7879 train loss:3.868770 +step:7880 train loss:3.522681 +step:7881 train loss:3.549370 +step:7882 train loss:3.629347 +step:7883 train loss:3.443166 +step:7884 train loss:3.534459 +step:7885 train loss:3.516914 +step:7886 train loss:3.516575 +step:7887 train loss:3.510250 +step:7888 train loss:3.542640 +step:7889 train loss:3.589956 +step:7890 train loss:3.496136 +step:7891 train loss:3.546561 +step:7892 train loss:3.515998 +step:7893 train loss:3.494023 +step:7894 train loss:3.513369 +step:7895 train loss:3.499220 +step:7896 train loss:3.497552 +step:7897 train loss:3.520627 +step:7898 train loss:3.530036 +step:7899 train loss:3.517871 +step:7900 train loss:3.488111 +step:7901 train loss:3.477687 +step:7902 train loss:3.627115 +step:7903 train loss:3.470534 +step:7904 train loss:3.523331 +step:7905 train loss:3.589055 +step:7906 train loss:3.485808 +step:7907 train loss:3.512970 +step:7908 train loss:3.564288 +step:7909 train loss:3.616073 +step:7910 train loss:3.494748 +step:7911 train loss:3.516937 +step:7912 train loss:3.519594 +step:7913 train loss:3.493762 +step:7914 train loss:3.530280 +step:7915 train loss:3.632876 +step:7916 train loss:3.502730 +step:7917 train loss:3.563663 +step:7918 train loss:3.502941 +step:7919 train loss:3.495570 +step:7920 train loss:3.534170 +step:7921 train loss:3.538936 +step:7922 train loss:3.512985 +step:7923 train loss:3.562273 +step:7924 train loss:3.522478 +step:7925 train loss:3.546303 +step:7926 train loss:3.447802 +step:7927 train loss:3.726455 +step:7928 train loss:3.554584 +step:7929 train loss:3.517187 +step:7930 train loss:3.478026 +step:7931 train loss:3.504295 +step:7932 train loss:3.525842 +step:7933 train loss:3.540750 +step:7934 train loss:3.632488 +step:7935 train loss:3.553191 +step:7936 train loss:3.528090 +step:7937 train loss:3.478015 +step:7938 train loss:3.491007 +step:7939 train loss:3.540577 +step:7940 train loss:3.524076 +step:7941 train loss:3.549754 +step:7942 train loss:3.538268 +step:7943 train loss:3.552191 +step:7944 train loss:3.472713 +step:7945 train loss:3.576302 +step:7946 train loss:3.524468 +step:7947 train loss:3.536850 +step:7948 train loss:3.495647 +step:7949 train loss:3.546128 +step:7950 train loss:3.602494 +step:7951 train loss:3.568968 +step:7952 train loss:3.712728 +step:7953 train loss:3.605676 +step:7954 train loss:3.510108 +step:7955 train loss:3.497090 +step:7956 train loss:3.499200 +step:7957 train loss:3.576372 +step:7958 train loss:3.586217 +step:7959 train loss:3.540351 +step:7960 train loss:3.603440 +step:7961 train loss:3.512020 +step:7962 train loss:3.483927 +step:7963 train loss:3.522390 +step:7964 train loss:3.518003 +step:7965 train loss:3.528130 +step:7966 train loss:3.499072 +step:7967 train loss:3.523636 +step:7968 train loss:3.534981 +step:7969 train loss:3.491685 +step:7970 train loss:3.460871 +step:7971 train loss:3.546845 +step:7972 train loss:3.520681 +step:7973 train loss:3.494457 +step:7974 train loss:3.531427 +step:7975 train loss:3.518750 +step:7976 train loss:3.537124 +step:7977 train loss:3.569818 +step:7978 train loss:3.592381 +step:7979 train loss:3.536777 +step:7980 train loss:3.444718 +step:7981 train loss:3.483178 +step:7982 train loss:3.532668 +step:7983 train loss:3.548893 +step:7984 train loss:3.588060 +step:7985 train loss:3.514929 +step:7986 train loss:3.535413 +step:7987 train loss:3.590138 +step:7988 train loss:3.564784 +step:7989 train loss:3.468660 +step:7990 train loss:3.484392 +step:7991 train loss:3.498501 +step:7992 train loss:3.523196 +step:7993 train loss:3.503992 +step:7994 train loss:3.556934 +step:7995 train loss:3.558722 +step:7996 train loss:3.523866 +step:7997 train loss:3.544789 +step:7998 train loss:3.570512 +step:7999 train loss:3.498887 +step:8000 validation loss:3.453284 total_sharp:4.0307e-03 L1_sharp:6.3767e-02 L2_sharp:1.7059e-02 L3_sharp:2.6465e-02 L4_sharp:1.5974e-02 L5_sharp:1.9186e-02 L6_sharp:2.1086e-02 L7_sharp:2.7838e-02 L8_sharp:3.0514e-02 L9_sharp:2.2191e-02 L10_sharp:1.7325e-02 L11_sharp:1.7282e-02 L12_sharp:3.0589e-02 total_fnorm:1.3434e+00 total_l1_linf:8.0594e+03 total_spectral:1.3434e+00 L1_fnorm:6.0912e-02 L2_fnorm:5.9020e-02 L3_fnorm:5.8981e-02 L4_fnorm:6.0251e-02 L5_fnorm:6.1011e-02 L6_fnorm:6.1188e-02 L7_fnorm:6.1359e-02 L8_fnorm:6.1250e-02 L9_fnorm:6.1251e-02 L10_fnorm:6.1518e-02 L11_fnorm:6.1569e-02 L12_fnorm:6.1511e-02 L1_l1linf:3.0512e-01 L2_l1linf:3.6536e-01 L3_l1linf:3.6608e-01 L4_l1linf:3.5245e-01 L5_l1linf:3.4207e-01 L6_l1linf:3.0341e-01 L7_l1linf:3.2169e-01 L8_l1linf:3.2582e-01 L9_l1linf:3.3651e-01 L10_l1linf:3.7163e-01 L11_l1linf:3.9535e-01 L12_l1linf:4.1410e-01 L1_spectral:6.8946e-03 L2_spectral:8.2476e-03 L3_spectral:8.2450e-03 L4_spectral:7.9845e-03 L5_spectral:7.7044e-03 L6_spectral:6.9521e-03 L7_spectral:7.2313e-03 L8_spectral:7.2819e-03 L9_spectral:7.5183e-03 L10_spectral:8.2954e-03 L11_spectral:8.8312e-03 L12_spectral:9.2560e-03 ip_v_neg_g:3.2886e-03 cos_v_neg_g:7.8511e-04 v_norm:1.3434e+00 g_norm:3.1180e+00 hv_norm:7.0480e-01 cos_v_hv:7.6826e-03 hg_norm:1.7565e+02 cos_g_hg:4.4014e-01 v_par:2.7169e-05 v_perp:1.3434e+00 L1_cos_v_neg_g:5.8100e-04 L1_v_norm:6.0912e-02 L2_cos_v_neg_g:2.2564e-03 L2_v_norm:5.9020e-02 L3_cos_v_neg_g:4.5346e-03 L3_v_norm:5.8981e-02 L4_cos_v_neg_g:5.5105e-03 L4_v_norm:6.0251e-02 L5_cos_v_neg_g:3.2783e-03 L5_v_norm:6.1011e-02 L6_cos_v_neg_g:4.4021e-03 L6_v_norm:6.1188e-02 L7_cos_v_neg_g:7.1376e-03 L7_v_norm:6.1359e-02 L8_cos_v_neg_g:7.6927e-03 L8_v_norm:6.1250e-02 L9_cos_v_neg_g:6.2716e-03 L9_v_norm:6.1251e-02 L10_cos_v_neg_g:5.0680e-03 L10_v_norm:6.1518e-02 L11_cos_v_neg_g:3.7991e-03 L11_v_norm:6.1569e-02 L12_cos_v_neg_g:3.7800e-03 L12_v_norm:6.1511e-02 +step:8000 train loss:3.567520 +step:8001 train loss:3.528992 +step:8002 train loss:3.548995 +step:8003 train loss:3.564955 +step:8004 train loss:3.542353 +step:8005 train loss:3.462678 +step:8006 train loss:3.542106 +step:8007 train loss:3.509350 +step:8008 train loss:3.534875 +step:8009 train loss:3.606988 +step:8010 train loss:3.829288 +step:8011 train loss:3.491637 +step:8012 train loss:3.568882 +step:8013 train loss:3.521419 +step:8014 train loss:3.534101 +step:8015 train loss:3.534096 +step:8016 train loss:3.521206 +step:8017 train loss:3.542151 +step:8018 train loss:3.504773 +step:8019 train loss:3.470149 +step:8020 train loss:3.510808 +step:8021 train loss:3.586407 +step:8022 train loss:3.502828 +step:8023 train loss:3.536005 +step:8024 train loss:3.388710 +step:8025 train loss:3.511512 +step:8026 train loss:3.520182 +step:8027 train loss:3.527406 +step:8028 train loss:3.582967 +step:8029 train loss:3.512821 +step:8030 train loss:3.472057 +step:8031 train loss:3.531350 +step:8032 train loss:3.516719 +step:8033 train loss:3.468119 +step:8034 train loss:3.502725 +step:8035 train loss:3.488991 +step:8036 train loss:3.484868 +step:8037 train loss:3.454544 +step:8038 train loss:3.469013 +step:8039 train loss:3.561861 +step:8040 train loss:3.496973 +step:8041 train loss:3.494700 +step:8042 train loss:3.529307 +step:8043 train loss:3.473667 +step:8044 train loss:3.487931 +step:8045 train loss:3.555861 +step:8046 train loss:3.479863 +step:8047 train loss:3.485730 +step:8048 train loss:3.514473 +step:8049 train loss:3.561605 +step:8050 train loss:3.503303 +step:8051 train loss:3.478919 +step:8052 train loss:3.540218 +step:8053 train loss:3.493993 +step:8054 train loss:3.529744 +step:8055 train loss:3.558911 +step:8056 train loss:3.528601 +step:8057 train loss:3.603485 +step:8058 train loss:3.510084 +step:8059 train loss:3.569500 +step:8060 train loss:3.538180 +step:8061 train loss:3.428950 +step:8062 train loss:3.559479 +step:8063 train loss:3.523109 +step:8064 train loss:3.483388 +step:8065 train loss:3.546459 +step:8066 train loss:3.505603 +step:8067 train loss:3.570065 +step:8068 train loss:3.497510 +step:8069 train loss:3.519978 +step:8070 train loss:3.488059 +step:8071 train loss:3.496289 +step:8072 train loss:3.537185 +step:8073 train loss:3.490740 +step:8074 train loss:3.501536 +step:8075 train loss:3.484677 +step:8076 train loss:3.537264 +step:8077 train loss:3.542482 +step:8078 train loss:3.487229 +step:8079 train loss:3.507037 +step:8080 train loss:3.495654 +step:8081 train loss:3.512478 +step:8082 train loss:3.529377 +step:8083 train loss:3.431357 +step:8084 train loss:3.567916 +step:8085 train loss:3.440263 +step:8086 train loss:3.567270 +step:8087 train loss:3.460763 +step:8088 train loss:3.511015 +step:8089 train loss:3.543516 +step:8090 train loss:3.567681 +step:8091 train loss:3.510923 +step:8092 train loss:3.493034 +step:8093 train loss:3.500152 +step:8094 train loss:3.500698 +step:8095 train loss:3.526289 +step:8096 train loss:3.527719 +step:8097 train loss:3.456130 +step:8098 train loss:3.472470 +step:8099 train loss:3.456192 +step:8100 train loss:3.512647 +step:8101 train loss:3.589167 +step:8102 train loss:3.527889 +step:8103 train loss:3.479433 +step:8104 train loss:3.529821 +step:8105 train loss:3.526484 +step:8106 train loss:3.490155 +step:8107 train loss:3.470480 +step:8108 train loss:3.487129 +step:8109 train loss:3.483927 +step:8110 train loss:3.549152 +step:8111 train loss:3.470181 +step:8112 train loss:3.491305 +step:8113 train loss:3.478387 +step:8114 train loss:3.422728 +step:8115 train loss:3.478635 +step:8116 train loss:3.515257 +step:8117 train loss:3.484766 +step:8118 train loss:3.476704 +step:8119 train loss:3.519614 +step:8120 train loss:3.466646 +step:8121 train loss:3.523638 +step:8122 train loss:3.505673 +step:8123 train loss:3.515880 +step:8124 train loss:3.475734 +step:8125 train loss:3.459686 +step:8126 train loss:3.449241 +step:8127 train loss:3.545400 +step:8128 train loss:3.551649 +step:8129 train loss:3.471341 +step:8130 train loss:3.499538 +step:8131 train loss:3.471410 +step:8132 train loss:3.539050 +step:8133 train loss:3.461724 +step:8134 train loss:3.499766 +step:8135 train loss:3.490800 +step:8136 train loss:3.500584 +step:8137 train loss:3.563187 +step:8138 train loss:3.471774 +step:8139 train loss:3.545230 +step:8140 train loss:3.473722 +step:8141 train loss:3.495714 +step:8142 train loss:3.478282 +step:8143 train loss:3.528383 +step:8144 train loss:3.505380 +step:8145 train loss:3.474557 +step:8146 train loss:3.482822 +step:8147 train loss:3.505252 +step:8148 train loss:3.598229 +step:8149 train loss:3.506879 +step:8150 train loss:3.489425 +step:8151 train loss:3.479949 +step:8152 train loss:3.577557 +step:8153 train loss:3.454164 +step:8154 train loss:3.475123 +step:8155 train loss:3.497073 +step:8156 train loss:3.479897 +step:8157 train loss:3.500105 +step:8158 train loss:3.513294 +step:8159 train loss:3.527657 +step:8160 train loss:3.479885 +step:8161 train loss:3.523740 +step:8162 train loss:3.453073 +step:8163 train loss:3.514292 +step:8164 train loss:3.499811 +step:8165 train loss:3.552099 +step:8166 train loss:3.554372 +step:8167 train loss:3.457345 +step:8168 train loss:3.439073 +step:8169 train loss:3.487686 +step:8170 train loss:3.436918 +step:8171 train loss:3.499544 +step:8172 train loss:3.496580 +step:8173 train loss:3.498558 +step:8174 train loss:3.505193 +step:8175 train loss:3.466500 +step:8176 train loss:3.462882 +step:8177 train loss:3.509815 +step:8178 train loss:3.594544 +step:8179 train loss:3.501633 +step:8180 train loss:3.526325 +step:8181 train loss:3.525620 +step:8182 train loss:3.486736 +step:8183 train loss:3.473267 +step:8184 train loss:3.465094 +step:8185 train loss:3.505759 +step:8186 train loss:3.508852 +step:8187 train loss:3.519048 +step:8188 train loss:3.449125 +step:8189 train loss:3.595938 +step:8190 train loss:3.527920 +step:8191 train loss:3.531588 +step:8192 train loss:3.642435 +step:8193 train loss:3.512003 +step:8194 train loss:3.447310 +step:8195 train loss:3.546167 +step:8196 train loss:3.461091 +step:8197 train loss:3.491660 +step:8198 train loss:3.497617 +step:8199 train loss:3.500834 +step:8200 train loss:3.477931 +step:8201 train loss:3.594025 +step:8202 train loss:3.508433 +step:8203 train loss:3.529060 +step:8204 train loss:3.440899 +step:8205 train loss:3.447245 +step:8206 train loss:3.570968 +step:8207 train loss:3.494581 +step:8208 train loss:3.515347 +step:8209 train loss:3.560207 +step:8210 train loss:3.543846 +step:8211 train loss:3.475790 +step:8212 train loss:3.533173 +step:8213 train loss:3.544405 +step:8214 train loss:3.580899 +step:8215 train loss:3.554327 +step:8216 train loss:3.537956 +step:8217 train loss:3.516372 +step:8218 train loss:3.524762 +step:8219 train loss:3.658699 +step:8220 train loss:3.488001 +step:8221 train loss:3.509490 +step:8222 train loss:3.461015 +step:8223 train loss:3.480947 +step:8224 train loss:3.488908 +step:8225 train loss:3.541569 +step:8226 train loss:3.468026 +step:8227 train loss:3.539485 +step:8228 train loss:3.425163 +step:8229 train loss:3.468492 +step:8230 train loss:3.486380 +step:8231 train loss:3.508932 +step:8232 train loss:3.509515 +step:8233 train loss:3.553566 +step:8234 train loss:3.548673 +step:8235 train loss:3.519616 +step:8236 train loss:3.506626 +step:8237 train loss:3.456850 +step:8238 train loss:3.709637 +step:8239 train loss:3.542777 +step:8240 train loss:3.488300 +step:8241 train loss:3.459719 +step:8242 train loss:3.498229 +step:8243 train loss:3.489639 +step:8244 train loss:3.501922 +step:8245 train loss:3.484465 +step:8246 train loss:3.552269 +step:8247 train loss:3.584451 +step:8248 train loss:3.503122 +step:8249 train loss:3.493141 +step:8250 validation loss:3.443934 +step:8250 train loss:3.483102 +step:8251 train loss:3.578825 +step:8252 train loss:3.515428 +step:8253 train loss:3.483907 +step:8254 train loss:3.455158 +step:8255 train loss:3.488363 +step:8256 train loss:3.469688 +step:8257 train loss:3.579235 +step:8258 train loss:3.499605 +step:8259 train loss:3.481178 +step:8260 train loss:3.482222 +step:8261 train loss:3.480580 +step:8262 train loss:3.495080 +step:8263 train loss:3.508463 +step:8264 train loss:3.472500 +step:8265 train loss:3.466860 +step:8266 train loss:3.473304 +step:8267 train loss:3.405735 +step:8268 train loss:3.530843 +step:8269 train loss:3.460366 +step:8270 train loss:3.515353 +step:8271 train loss:3.541202 +step:8272 train loss:3.567504 +step:8273 train loss:3.444696 +step:8274 train loss:3.506817 +step:8275 train loss:3.464869 +step:8276 train loss:3.503841 +step:8277 train loss:3.573190 +step:8278 train loss:3.588104 +step:8279 train loss:3.499803 +step:8280 train loss:3.487462 +step:8281 train loss:3.455623 +step:8282 train loss:3.515814 +step:8283 train loss:3.500514 +step:8284 train loss:3.486963 +step:8285 train loss:3.478205 +step:8286 train loss:3.588528 +step:8287 train loss:3.523685 +step:8288 train loss:3.494766 +step:8289 train loss:3.508414 +step:8290 train loss:3.448210 +step:8291 train loss:3.486645 +step:8292 train loss:3.518067 +step:8293 train loss:3.491205 +step:8294 train loss:3.461840 +step:8295 train loss:3.498531 +step:8296 train loss:3.566443 +step:8297 train loss:3.646215 +step:8298 train loss:3.468237 +step:8299 train loss:3.502537 +step:8300 train loss:3.514196 +step:8301 train loss:3.486231 +step:8302 train loss:3.544049 +step:8303 train loss:3.678679 +step:8304 train loss:3.484790 +step:8305 train loss:3.530920 +step:8306 train loss:3.507586 +step:8307 train loss:3.524923 +step:8308 train loss:3.522952 +step:8309 train loss:3.545145 +step:8310 train loss:3.462378 +step:8311 train loss:3.553830 +step:8312 train loss:3.544986 +step:8313 train loss:3.612337 +step:8314 train loss:3.480598 +step:8315 train loss:3.429801 +step:8316 train loss:3.489911 +step:8317 train loss:3.510876 +step:8318 train loss:3.501499 +step:8319 train loss:3.539255 +step:8320 train loss:3.558865 +step:8321 train loss:3.466130 +step:8322 train loss:3.482047 +step:8323 train loss:3.519714 +step:8324 train loss:3.494321 +step:8325 train loss:3.548528 +step:8326 train loss:3.515653 +step:8327 train loss:3.504106 +step:8328 train loss:3.576538 +step:8329 train loss:3.484832 +step:8330 train loss:3.526141 +step:8331 train loss:3.453666 +step:8332 train loss:3.552956 +step:8333 train loss:3.570104 +step:8334 train loss:3.439172 +step:8335 train loss:3.498204 +step:8336 train loss:3.593752 +step:8337 train loss:3.525178 +step:8338 train loss:3.489914 +step:8339 train loss:3.472036 +step:8340 train loss:3.562499 +step:8341 train loss:3.462555 +step:8342 train loss:3.535516 +step:8343 train loss:3.446877 +step:8344 train loss:3.494679 +step:8345 train loss:3.528688 +step:8346 train loss:3.609963 +step:8347 train loss:3.499041 +step:8348 train loss:3.527158 +step:8349 train loss:3.500483 +step:8350 train loss:3.520909 +step:8351 train loss:3.460967 +step:8352 train loss:3.547592 +step:8353 train loss:3.503458 +step:8354 train loss:3.484559 +step:8355 train loss:3.484326 +step:8356 train loss:3.480242 +step:8357 train loss:3.495152 +step:8358 train loss:3.469200 +step:8359 train loss:3.463840 +step:8360 train loss:3.510249 +step:8361 train loss:3.523926 +step:8362 train loss:3.544697 +step:8363 train loss:3.541966 +step:8364 train loss:3.506061 +step:8365 train loss:3.652673 +step:8366 train loss:3.496003 +step:8367 train loss:3.470040 +step:8368 train loss:3.436922 +step:8369 train loss:3.471070 +step:8370 train loss:3.551444 +step:8371 train loss:3.523038 +step:8372 train loss:3.498709 +step:8373 train loss:3.512451 +step:8374 train loss:3.442080 +step:8375 train loss:3.507053 +step:8376 train loss:3.543210 +step:8377 train loss:3.372025 +step:8378 train loss:3.586126 +step:8379 train loss:3.449517 +step:8380 train loss:3.459407 +step:8381 train loss:3.464736 +step:8382 train loss:3.490157 +step:8383 train loss:3.448930 +step:8384 train loss:3.494360 +step:8385 train loss:3.504769 +step:8386 train loss:3.486332 +step:8387 train loss:3.647737 +step:8388 train loss:3.557973 +step:8389 train loss:3.538307 +step:8390 train loss:3.536261 +step:8391 train loss:3.467768 +step:8392 train loss:3.478948 +step:8393 train loss:3.432945 +step:8394 train loss:3.529430 +step:8395 train loss:3.533251 +step:8396 train loss:3.558206 +step:8397 train loss:3.489469 +step:8398 train loss:3.510182 +step:8399 train loss:3.475165 +step:8400 train loss:3.482782 +step:8401 train loss:3.488258 +step:8402 train loss:3.471184 +step:8403 train loss:3.487580 +step:8404 train loss:3.492372 +step:8405 train loss:3.446469 +step:8406 train loss:3.488635 +step:8407 train loss:3.531328 +step:8408 train loss:3.502226 +step:8409 train loss:3.425772 +step:8410 train loss:3.488815 +step:8411 train loss:3.513294 +step:8412 train loss:3.574165 +step:8413 train loss:3.548025 +step:8414 train loss:3.545517 +step:8415 train loss:3.467224 +step:8416 train loss:3.513066 +step:8417 train loss:3.429876 +step:8418 train loss:3.534011 +step:8419 train loss:3.489640 +step:8420 train loss:3.566463 +step:8421 train loss:3.481160 +step:8422 train loss:3.499580 +step:8423 train loss:3.515339 +step:8424 train loss:3.520295 +step:8425 train loss:3.578158 +step:8426 train loss:3.547350 +step:8427 train loss:3.467258 +step:8428 train loss:3.479127 +step:8429 train loss:3.541419 +step:8430 train loss:3.480802 +step:8431 train loss:3.486968 +step:8432 train loss:3.487910 +step:8433 train loss:3.462501 +step:8434 train loss:3.497958 +step:8435 train loss:3.417540 +step:8436 train loss:3.499655 +step:8437 train loss:3.542014 +step:8438 train loss:3.519617 +step:8439 train loss:3.461511 +step:8440 train loss:3.431327 +step:8441 train loss:3.487934 +step:8442 train loss:3.512056 +step:8443 train loss:3.469808 +step:8444 train loss:3.501769 +step:8445 train loss:3.453142 +step:8446 train loss:3.503531 +step:8447 train loss:3.516119 +step:8448 train loss:3.498463 +step:8449 train loss:3.487900 +step:8450 train loss:3.479001 +step:8451 train loss:3.511104 +step:8452 train loss:3.484747 +step:8453 train loss:3.465183 +step:8454 train loss:3.513927 +step:8455 train loss:3.585807 +step:8456 train loss:3.563433 +step:8457 train loss:3.617263 +step:8458 train loss:3.507360 +step:8459 train loss:3.512302 +step:8460 train loss:3.441313 +step:8461 train loss:3.600362 +step:8462 train loss:3.469753 +step:8463 train loss:3.507560 +step:8464 train loss:3.522545 +step:8465 train loss:3.528611 +step:8466 train loss:3.502347 +step:8467 train loss:3.506234 +step:8468 train loss:3.758193 +step:8469 train loss:3.466586 +step:8470 train loss:3.461790 +step:8471 train loss:3.504119 +step:8472 train loss:3.526294 +step:8473 train loss:3.481915 +step:8474 train loss:3.606775 +step:8475 train loss:3.562829 +step:8476 train loss:3.513972 +step:8477 train loss:3.502316 +step:8478 train loss:3.481510 +step:8479 train loss:3.486469 +step:8480 train loss:3.553508 +step:8481 train loss:3.481979 +step:8482 train loss:3.477465 +step:8483 train loss:3.620057 +step:8484 train loss:3.501431 +step:8485 train loss:3.549673 +step:8486 train loss:3.460774 +step:8487 train loss:3.515109 +step:8488 train loss:3.460500 +step:8489 train loss:3.539007 +step:8490 train loss:3.526536 +step:8491 train loss:3.546501 +step:8492 train loss:3.500180 +step:8493 train loss:3.572203 +step:8494 train loss:3.437318 +step:8495 train loss:3.534376 +step:8496 train loss:3.480343 +step:8497 train loss:3.513576 +step:8498 train loss:3.527598 +step:8499 train loss:3.505982 +step:8500 validation loss:3.440152 total_sharp:4.4041e-03 L1_sharp:1.1651e-01 L2_sharp:1.8852e-02 L3_sharp:3.5034e-02 L4_sharp:2.0939e-02 L5_sharp:2.2712e-02 L6_sharp:2.3929e-02 L7_sharp:2.8257e-02 L8_sharp:3.0478e-02 L9_sharp:2.2225e-02 L10_sharp:1.4110e-02 L11_sharp:1.5178e-02 L12_sharp:2.4676e-02 total_fnorm:1.3457e+00 total_l1_linf:8.0664e+03 total_spectral:1.3457e+00 L1_fnorm:6.0759e-02 L2_fnorm:5.8756e-02 L3_fnorm:5.8812e-02 L4_fnorm:6.0110e-02 L5_fnorm:6.0972e-02 L6_fnorm:6.1257e-02 L7_fnorm:6.1375e-02 L8_fnorm:6.1264e-02 L9_fnorm:6.1173e-02 L10_fnorm:6.1373e-02 L11_fnorm:6.1227e-02 L12_fnorm:6.1253e-02 L1_l1linf:3.2623e-01 L2_l1linf:3.6260e-01 L3_l1linf:3.9528e-01 L4_l1linf:3.7811e-01 L5_l1linf:3.6636e-01 L6_l1linf:3.1564e-01 L7_l1linf:3.2649e-01 L8_l1linf:3.0760e-01 L9_l1linf:3.3246e-01 L10_l1linf:3.4636e-01 L11_l1linf:3.6399e-01 L12_l1linf:3.5496e-01 L1_spectral:7.3997e-03 L2_spectral:8.2142e-03 L3_spectral:8.8356e-03 L4_spectral:8.4800e-03 L5_spectral:8.2384e-03 L6_spectral:7.0992e-03 L7_spectral:7.3234e-03 L8_spectral:6.9366e-03 L9_spectral:7.4376e-03 L10_spectral:7.7897e-03 L11_spectral:8.1554e-03 L12_spectral:8.1271e-03 ip_v_neg_g:3.9562e-03 cos_v_neg_g:9.6428e-04 v_norm:1.3457e+00 g_norm:3.0489e+00 hv_norm:7.2454e-01 cos_v_hv:8.1794e-03 hg_norm:1.1746e+02 cos_g_hg:4.5812e-01 v_par:2.7526e-05 v_perp:1.3457e+00 L1_cos_v_neg_g:9.4808e-03 L1_v_norm:6.0759e-02 L2_cos_v_neg_g:7.6323e-03 L2_v_norm:5.8756e-02 L3_cos_v_neg_g:6.7251e-03 L3_v_norm:5.8812e-02 L4_cos_v_neg_g:6.2513e-03 L4_v_norm:6.0110e-02 L5_cos_v_neg_g:6.1827e-03 L5_v_norm:6.0972e-02 L6_cos_v_neg_g:5.9343e-03 L6_v_norm:6.1257e-02 L7_cos_v_neg_g:6.2489e-03 L7_v_norm:6.1375e-02 L8_cos_v_neg_g:5.3462e-03 L8_v_norm:6.1264e-02 L9_cos_v_neg_g:4.6320e-03 L9_v_norm:6.1173e-02 L10_cos_v_neg_g:4.5163e-03 L10_v_norm:6.1373e-02 L11_cos_v_neg_g:4.1603e-03 L11_v_norm:6.1227e-02 L12_cos_v_neg_g:3.2673e-03 L12_v_norm:6.1253e-02 +step:8500 train loss:3.500467 +step:8501 train loss:3.720035 +step:8502 train loss:3.731632 +step:8503 train loss:3.493913 +step:8504 train loss:3.490375 +step:8505 train loss:3.470565 +step:8506 train loss:3.538936 +step:8507 train loss:3.478100 +step:8508 train loss:3.510734 +step:8509 train loss:3.451889 +step:8510 train loss:3.474267 +step:8511 train loss:3.431397 +step:8512 train loss:3.531913 +step:8513 train loss:3.537411 +step:8514 train loss:3.481783 +step:8515 train loss:3.575698 +step:8516 train loss:3.493993 +step:8517 train loss:3.516156 +step:8518 train loss:3.407523 +step:8519 train loss:3.498589 +step:8520 train loss:3.467264 +step:8521 train loss:3.505848 +step:8522 train loss:3.401192 +step:8523 train loss:3.494139 +step:8524 train loss:3.486725 +step:8525 train loss:3.552403 +step:8526 train loss:3.532556 +step:8527 train loss:3.476603 +step:8528 train loss:3.559572 +step:8529 train loss:3.515279 +step:8530 train loss:3.550399 +step:8531 train loss:3.537608 +step:8532 train loss:3.575920 +step:8533 train loss:3.529522 +step:8534 train loss:3.530123 +step:8535 train loss:3.499588 +step:8536 train loss:3.591434 +step:8537 train loss:3.504484 +step:8538 train loss:3.573885 +step:8539 train loss:3.493783 +step:8540 train loss:3.523104 +step:8541 train loss:3.462175 +step:8542 train loss:3.528059 +step:8543 train loss:3.442787 +step:8544 train loss:3.439705 +step:8545 train loss:3.489777 +step:8546 train loss:3.443041 +step:8547 train loss:3.496429 +step:8548 train loss:3.467452 +step:8549 train loss:3.509820 +step:8550 train loss:3.462266 +step:8551 train loss:3.512471 +step:8552 train loss:3.513259 +step:8553 train loss:3.516817 +step:8554 train loss:3.492040 +step:8555 train loss:3.503551 +step:8556 train loss:3.583769 +step:8557 train loss:3.480583 +step:8558 train loss:3.518424 +step:8559 train loss:3.510687 +step:8560 train loss:3.490402 +step:8561 train loss:3.446426 +step:8562 train loss:3.474177 +step:8563 train loss:3.473555 +step:8564 train loss:3.542177 +step:8565 train loss:3.517258 +step:8566 train loss:3.536754 +step:8567 train loss:3.483917 +step:8568 train loss:3.501253 +step:8569 train loss:3.509699 +step:8570 train loss:3.452965 +step:8571 train loss:3.497583 +step:8572 train loss:3.512027 +step:8573 train loss:3.584622 +step:8574 train loss:3.516560 +step:8575 train loss:3.514879 +step:8576 train loss:3.548564 +step:8577 train loss:3.630225 +step:8578 train loss:3.542123 +step:8579 train loss:3.525762 +step:8580 train loss:3.461585 +step:8581 train loss:3.501219 +step:8582 train loss:3.507892 +step:8583 train loss:3.507065 +step:8584 train loss:3.494777 +step:8585 train loss:3.578631 +step:8586 train loss:3.494225 +step:8587 train loss:3.501991 +step:8588 train loss:3.550434 +step:8589 train loss:3.495031 +step:8590 train loss:3.487671 +step:8591 train loss:3.492390 +step:8592 train loss:3.450769 +step:8593 train loss:3.528441 +step:8594 train loss:3.553919 +step:8595 train loss:3.476246 +step:8596 train loss:3.517873 +step:8597 train loss:3.482568 +step:8598 train loss:3.533976 +step:8599 train loss:3.504617 +step:8600 train loss:3.510491 +step:8601 train loss:3.498949 +step:8602 train loss:3.472548 +step:8603 train loss:3.531244 +step:8604 train loss:3.476999 +step:8605 train loss:3.488776 +step:8606 train loss:3.500722 +step:8607 train loss:3.508793 +step:8608 train loss:3.552131 +step:8609 train loss:3.450002 +step:8610 train loss:3.522794 +step:8611 train loss:3.453118 +step:8612 train loss:3.532647 +step:8613 train loss:3.466202 +step:8614 train loss:3.527118 +step:8615 train loss:3.570756 +step:8616 train loss:3.451603 +step:8617 train loss:3.519357 +step:8618 train loss:3.498303 +step:8619 train loss:3.451300 +step:8620 train loss:3.491993 +step:8621 train loss:3.524226 +step:8622 train loss:3.481996 +step:8623 train loss:3.493719 +step:8624 train loss:3.568012 +step:8625 train loss:3.491407 +step:8626 train loss:3.501444 +step:8627 train loss:3.493373 +step:8628 train loss:3.528169 +step:8629 train loss:3.435445 +step:8630 train loss:3.535455 +step:8631 train loss:3.480352 +step:8632 train loss:3.534755 +step:8633 train loss:3.482339 +step:8634 train loss:3.714111 +step:8635 train loss:3.508286 +step:8636 train loss:3.553091 +step:8637 train loss:3.478223 +step:8638 train loss:3.478090 +step:8639 train loss:3.535944 +step:8640 train loss:3.449305 +step:8641 train loss:3.548796 +step:8642 train loss:3.500835 +step:8643 train loss:3.609342 +step:8644 train loss:3.454298 +step:8645 train loss:3.525405 +step:8646 train loss:3.488589 +step:8647 train loss:3.510234 +step:8648 train loss:3.460643 +step:8649 train loss:3.544084 +step:8650 train loss:3.498883 +step:8651 train loss:3.509389 +step:8652 train loss:3.480386 +step:8653 train loss:3.513078 +step:8654 train loss:3.556000 +step:8655 train loss:3.487211 +step:8656 train loss:3.528093 +step:8657 train loss:3.531101 +step:8658 train loss:3.501702 +step:8659 train loss:3.494170 +step:8660 train loss:3.442993 +step:8661 train loss:3.498265 +step:8662 train loss:3.440001 +step:8663 train loss:3.513214 +step:8664 train loss:3.429003 +step:8665 train loss:3.449324 +step:8666 train loss:3.526798 +step:8667 train loss:3.420515 +step:8668 train loss:3.529639 +step:8669 train loss:3.565469 +step:8670 train loss:3.466184 +step:8671 train loss:3.463048 +step:8672 train loss:3.680872 +step:8673 train loss:3.445767 +step:8674 train loss:3.515822 +step:8675 train loss:3.557729 +step:8676 train loss:3.501162 +step:8677 train loss:3.524130 +step:8678 train loss:3.471667 +step:8679 train loss:3.531263 +step:8680 train loss:3.509675 +step:8681 train loss:3.511358 +step:8682 train loss:3.466681 +step:8683 train loss:3.484901 +step:8684 train loss:3.557617 +step:8685 train loss:3.502850 +step:8686 train loss:3.493597 +step:8687 train loss:3.448105 +step:8688 train loss:3.466869 +step:8689 train loss:3.536778 +step:8690 train loss:3.472284 +step:8691 train loss:3.550244 +step:8692 train loss:3.440684 +step:8693 train loss:3.528837 +step:8694 train loss:3.528830 +step:8695 train loss:3.515090 +step:8696 train loss:3.539632 +step:8697 train loss:3.492933 +step:8698 train loss:3.532286 +step:8699 train loss:3.483339 +step:8700 train loss:3.508950 +step:8701 train loss:3.471119 +step:8702 train loss:3.455458 +step:8703 train loss:3.472023 +step:8704 train loss:3.424732 +step:8705 train loss:3.508013 +step:8706 train loss:3.527391 +step:8707 train loss:3.523777 +step:8708 train loss:3.468654 +step:8709 train loss:3.532187 +step:8710 train loss:3.459224 +step:8711 train loss:3.514019 +step:8712 train loss:3.422390 +step:8713 train loss:3.495748 +step:8714 train loss:3.604085 +step:8715 train loss:3.461716 +step:8716 train loss:3.515069 +step:8717 train loss:3.485289 +step:8718 train loss:3.526090 +step:8719 train loss:3.491641 +step:8720 train loss:3.603657 +step:8721 train loss:3.495584 +step:8722 train loss:3.586196 +step:8723 train loss:3.458467 +step:8724 train loss:3.469107 +step:8725 train loss:3.498985 +step:8726 train loss:3.453070 +step:8727 train loss:3.531772 +step:8728 train loss:3.489681 +step:8729 train loss:3.493745 +step:8730 train loss:3.471299 +step:8731 train loss:3.474932 +step:8732 train loss:3.575536 +step:8733 train loss:3.498837 +step:8734 train loss:3.537808 +step:8735 train loss:3.605632 +step:8736 train loss:3.465183 +step:8737 train loss:3.491524 +step:8738 train loss:3.470258 +step:8739 train loss:3.531229 +step:8740 train loss:3.453110 +step:8741 train loss:3.508466 +step:8742 train loss:3.463763 +step:8743 train loss:3.503161 +step:8744 train loss:3.524854 +step:8745 train loss:3.565835 +step:8746 train loss:3.462432 +step:8747 train loss:3.565674 +step:8748 train loss:3.475499 +step:8749 train loss:3.515334 +step:8750 validation loss:3.432883 +step:8750 train loss:3.524572 +step:8751 train loss:3.563759 +step:8752 train loss:3.421043 +step:8753 train loss:3.470404 +step:8754 train loss:3.522089 +step:8755 train loss:3.502658 +step:8756 train loss:3.548023 +step:8757 train loss:3.460258 +step:8758 train loss:3.613701 +step:8759 train loss:3.462701 +step:8760 train loss:3.494792 +step:8761 train loss:3.572786 +step:8762 train loss:3.468295 +step:8763 train loss:3.439707 +step:8764 train loss:3.514305 +step:8765 train loss:3.581369 +step:8766 train loss:3.513143 +step:8767 train loss:3.472084 +step:8768 train loss:3.512235 +step:8769 train loss:3.484999 +step:8770 train loss:3.530099 +step:8771 train loss:3.502611 +step:8772 train loss:3.519761 +step:8773 train loss:3.481941 +step:8774 train loss:3.514556 +step:8775 train loss:3.516163 +step:8776 train loss:3.458556 +step:8777 train loss:3.495213 +step:8778 train loss:3.505748 +step:8779 train loss:3.525941 +step:8780 train loss:3.490477 +step:8781 train loss:3.494851 +step:8782 train loss:3.516440 +step:8783 train loss:3.496329 +step:8784 train loss:3.523957 +step:8785 train loss:3.508685 +step:8786 train loss:3.583780 +step:8787 train loss:3.524174 +step:8788 train loss:3.428944 +step:8789 train loss:3.527881 +step:8790 train loss:3.455996 +step:8791 train loss:3.506414 +step:8792 train loss:3.444802 +step:8793 train loss:3.533574 +step:8794 train loss:3.457489 +step:8795 train loss:3.527000 +step:8796 train loss:3.670415 +step:8797 train loss:3.416897 +step:8798 train loss:3.575608 +step:8799 train loss:3.491595 +step:8800 train loss:3.485658 +step:8801 train loss:3.505713 +step:8802 train loss:3.563953 +step:8803 train loss:3.522312 +step:8804 train loss:3.505497 +step:8805 train loss:3.522582 +step:8806 train loss:3.491405 +step:8807 train loss:3.484437 +step:8808 train loss:3.439500 +step:8809 train loss:3.563254 +step:8810 train loss:3.467021 +step:8811 train loss:3.456083 +step:8812 train loss:3.500587 +step:8813 train loss:3.409073 +step:8814 train loss:3.597662 +step:8815 train loss:3.444614 +step:8816 train loss:3.562692 +step:8817 train loss:3.497769 +step:8818 train loss:3.430376 +step:8819 train loss:3.548572 +step:8820 train loss:3.477850 +step:8821 train loss:3.503560 +step:8822 train loss:3.485096 +step:8823 train loss:3.498899 +step:8824 train loss:3.560103 +step:8825 train loss:3.537174 +step:8826 train loss:3.508284 +step:8827 train loss:3.468320 +step:8828 train loss:3.508692 +step:8829 train loss:3.486907 +step:8830 train loss:3.465663 +step:8831 train loss:3.542198 +step:8832 train loss:3.479531 +step:8833 train loss:3.511608 +step:8834 train loss:3.478888 +step:8835 train loss:3.414371 +step:8836 train loss:3.544963 +step:8837 train loss:3.445410 +step:8838 train loss:3.490025 +step:8839 train loss:3.475436 +step:8840 train loss:3.477772 +step:8841 train loss:3.492632 +step:8842 train loss:3.500832 +step:8843 train loss:3.514962 +step:8844 train loss:3.479440 +step:8845 train loss:3.500955 +step:8846 train loss:3.468613 +step:8847 train loss:3.505866 +step:8848 train loss:3.551672 +step:8849 train loss:3.531225 +step:8850 train loss:3.525412 +step:8851 train loss:3.408880 +step:8852 train loss:3.511669 +step:8853 train loss:3.492159 +step:8854 train loss:3.462440 +step:8855 train loss:3.531281 +step:8856 train loss:3.523386 +step:8857 train loss:3.591444 +step:8858 train loss:3.457223 +step:8859 train loss:3.528054 +step:8860 train loss:3.488844 +step:8861 train loss:3.468422 +step:8862 train loss:3.470440 +step:8863 train loss:3.453745 +step:8864 train loss:3.522144 +step:8865 train loss:3.513986 +step:8866 train loss:3.395984 +step:8867 train loss:3.500237 +step:8868 train loss:3.527516 +step:8869 train loss:3.610627 +step:8870 train loss:3.491228 +step:8871 train loss:3.512283 +step:8872 train loss:3.498993 +step:8873 train loss:3.499100 +step:8874 train loss:3.551157 +step:8875 train loss:3.485840 +step:8876 train loss:3.524695 +step:8877 train loss:3.506976 +step:8878 train loss:3.554152 +step:8879 train loss:3.516340 +step:8880 train loss:3.464374 +step:8881 train loss:3.427579 +step:8882 train loss:3.499194 +step:8883 train loss:3.485357 +step:8884 train loss:3.574635 +step:8885 train loss:3.509926 +step:8886 train loss:3.513112 +step:8887 train loss:3.537123 +step:8888 train loss:3.499141 +step:8889 train loss:3.502400 +step:8890 train loss:3.493777 +step:8891 train loss:3.465040 +step:8892 train loss:3.547462 +step:8893 train loss:3.488958 +step:8894 train loss:3.507015 +step:8895 train loss:3.536001 +step:8896 train loss:3.454032 +step:8897 train loss:3.544150 +step:8898 train loss:3.477200 +step:8899 train loss:3.499909 +step:8900 train loss:3.464594 +step:8901 train loss:3.482376 +step:8902 train loss:3.521518 +step:8903 train loss:3.462209 +step:8904 train loss:3.511870 +step:8905 train loss:3.486989 +step:8906 train loss:3.477618 +step:8907 train loss:3.489951 +step:8908 train loss:3.554813 +step:8909 train loss:3.499552 +step:8910 train loss:3.460534 +step:8911 train loss:3.559519 +step:8912 train loss:3.455806 +step:8913 train loss:3.465687 +step:8914 train loss:3.562913 +step:8915 train loss:3.501857 +step:8916 train loss:3.531985 +step:8917 train loss:3.488892 +step:8918 train loss:3.492851 +step:8919 train loss:3.480773 +step:8920 train loss:3.505955 +step:8921 train loss:3.500398 +step:8922 train loss:3.482728 +step:8923 train loss:3.668328 +step:8924 train loss:3.561624 +step:8925 train loss:3.493335 +step:8926 train loss:3.501929 +step:8927 train loss:3.531738 +step:8928 train loss:3.484554 +step:8929 train loss:3.480250 +step:8930 train loss:3.537556 +step:8931 train loss:3.447996 +step:8932 train loss:3.551425 +step:8933 train loss:3.458942 +step:8934 train loss:3.496619 +step:8935 train loss:3.509921 +step:8936 train loss:3.545884 +step:8937 train loss:3.541454 +step:8938 train loss:3.484456 +step:8939 train loss:3.551332 +step:8940 train loss:3.506436 +step:8941 train loss:3.447689 +step:8942 train loss:3.527068 +step:8943 train loss:3.457680 +step:8944 train loss:3.507487 +step:8945 train loss:3.523542 +step:8946 train loss:3.369454 +step:8947 train loss:3.561320 +step:8948 train loss:3.409252 +step:8949 train loss:3.411740 +step:8950 train loss:3.458610 +step:8951 train loss:3.494883 +step:8952 train loss:3.514760 +step:8953 train loss:3.471642 +step:8954 train loss:3.575326 +step:8955 train loss:3.488885 +step:8956 train loss:3.517206 +step:8957 train loss:3.505999 +step:8958 train loss:3.481833 +step:8959 train loss:3.471928 +step:8960 train loss:3.441873 +step:8961 train loss:3.465884 +step:8962 train loss:3.519645 +step:8963 train loss:3.496724 +step:8964 train loss:3.479707 +step:8965 train loss:3.521002 +step:8966 train loss:3.481359 +step:8967 train loss:3.458219 +step:8968 train loss:3.444124 +step:8969 train loss:3.433203 +step:8970 train loss:3.513911 +step:8971 train loss:3.463780 +step:8972 train loss:3.664219 +step:8973 train loss:3.548145 +step:8974 train loss:3.507857 +step:8975 train loss:3.508548 +step:8976 train loss:3.473574 +step:8977 train loss:3.559311 +step:8978 train loss:3.543377 +step:8979 train loss:3.460346 +step:8980 train loss:3.553781 +step:8981 train loss:3.506653 +step:8982 train loss:3.485753 +step:8983 train loss:3.423945 +step:8984 train loss:3.547655 +step:8985 train loss:3.466826 +step:8986 train loss:3.501795 +step:8987 train loss:3.475392 +step:8988 train loss:3.525100 +step:8989 train loss:3.435691 +step:8990 train loss:3.575114 +step:8991 train loss:3.426800 +step:8992 train loss:3.485180 +step:8993 train loss:3.576150 +step:8994 train loss:3.478487 +step:8995 train loss:3.505623 +step:8996 train loss:3.475625 +step:8997 train loss:3.424658 +step:8998 train loss:3.429354 +step:8999 train loss:3.453257 +step:9000 validation loss:3.426526 total_sharp:3.7938e-03 L1_sharp:3.6824e-02 L2_sharp:1.2484e-02 L3_sharp:2.4434e-02 L4_sharp:1.4319e-02 L5_sharp:1.7852e-02 L6_sharp:2.0706e-02 L7_sharp:2.6631e-02 L8_sharp:2.8847e-02 L9_sharp:2.0261e-02 L10_sharp:1.3579e-02 L11_sharp:1.2694e-02 L12_sharp:2.1814e-02 total_fnorm:1.3459e+00 total_l1_linf:8.0769e+03 total_spectral:1.3459e+00 L1_fnorm:6.1021e-02 L2_fnorm:5.9296e-02 L3_fnorm:5.9227e-02 L4_fnorm:6.0276e-02 L5_fnorm:6.1089e-02 L6_fnorm:6.1263e-02 L7_fnorm:6.1359e-02 L8_fnorm:6.1338e-02 L9_fnorm:6.1381e-02 L10_fnorm:6.1480e-02 L11_fnorm:6.1370e-02 L12_fnorm:6.1218e-02 L1_l1linf:3.2627e-01 L2_l1linf:3.7742e-01 L3_l1linf:3.8905e-01 L4_l1linf:3.6100e-01 L5_l1linf:3.3482e-01 L6_l1linf:3.2258e-01 L7_l1linf:3.1652e-01 L8_l1linf:3.2222e-01 L9_l1linf:3.3456e-01 L10_l1linf:3.5846e-01 L11_l1linf:3.5833e-01 L12_l1linf:3.4363e-01 L1_spectral:7.3724e-03 L2_spectral:8.4687e-03 L3_spectral:8.7552e-03 L4_spectral:8.1471e-03 L5_spectral:7.5767e-03 L6_spectral:7.2615e-03 L7_spectral:7.1318e-03 L8_spectral:7.3174e-03 L9_spectral:7.5083e-03 L10_spectral:8.0870e-03 L11_spectral:8.1088e-03 L12_spectral:7.8635e-03 ip_v_neg_g:2.4224e-03 cos_v_neg_g:5.5531e-04 v_norm:1.3459e+00 g_norm:3.2412e+00 hv_norm:6.8302e-01 cos_v_hv:7.4755e-03 hg_norm:4.0014e+02 cos_g_hg:4.0594e-01 v_par:3.5579e-05 v_perp:1.3459e+00 L1_cos_v_neg_g:1.2673e-03 L1_v_norm:6.1021e-02 L2_cos_v_neg_g:1.6213e-03 L2_v_norm:5.9296e-02 L3_cos_v_neg_g:2.3630e-03 L3_v_norm:5.9227e-02 L4_cos_v_neg_g:2.1758e-03 L4_v_norm:6.0276e-02 L5_cos_v_neg_g:3.2551e-03 L5_v_norm:6.1089e-02 L6_cos_v_neg_g:2.8760e-03 L6_v_norm:6.1263e-02 L7_cos_v_neg_g:1.4645e-03 L7_v_norm:6.1359e-02 L8_cos_v_neg_g:2.7253e-03 L8_v_norm:6.1338e-02 L9_cos_v_neg_g:3.9499e-03 L9_v_norm:6.1381e-02 L10_cos_v_neg_g:3.5284e-03 L10_v_norm:6.1480e-02 L11_cos_v_neg_g:3.3235e-03 L11_v_norm:6.1370e-02 L12_cos_v_neg_g:5.9371e-03 L12_v_norm:6.1218e-02 +step:9000 train loss:3.539118 +step:9001 train loss:3.509215 +step:9002 train loss:3.513718 +step:9003 train loss:3.452842 +step:9004 train loss:3.454735 +step:9005 train loss:3.467068 +step:9006 train loss:3.469773 +step:9007 train loss:3.488000 +step:9008 train loss:3.445843 +step:9009 train loss:3.441681 +step:9010 train loss:3.477330 +step:9011 train loss:3.473365 +step:9012 train loss:3.585536 +step:9013 train loss:3.409738 +step:9014 train loss:3.483897 +step:9015 train loss:3.484175 +step:9016 train loss:3.560654 +step:9017 train loss:3.502704 +step:9018 train loss:3.423745 +step:9019 train loss:3.506378 +step:9020 train loss:3.518667 +step:9021 train loss:3.476532 +step:9022 train loss:3.489553 +step:9023 train loss:3.486767 +step:9024 train loss:3.505630 +step:9025 train loss:3.489922 +step:9026 train loss:3.447477 +step:9027 train loss:3.493584 +step:9028 train loss:3.513607 +step:9029 train loss:3.532449 +step:9030 train loss:3.529317 +step:9031 train loss:3.495377 +step:9032 train loss:3.505194 +step:9033 train loss:3.488040 +step:9034 train loss:3.501462 +step:9035 train loss:3.501809 +step:9036 train loss:3.450536 +step:9037 train loss:3.445874 +step:9038 train loss:3.568236 +step:9039 train loss:3.472245 +step:9040 train loss:3.488252 +step:9041 train loss:3.537651 +step:9042 train loss:3.391836 +step:9043 train loss:3.486504 +step:9044 train loss:3.506937 +step:9045 train loss:3.453233 +step:9046 train loss:3.495337 +step:9047 train loss:3.489883 +step:9048 train loss:3.468716 +step:9049 train loss:3.502437 +step:9050 train loss:3.458806 +step:9051 train loss:3.497097 +step:9052 train loss:3.426784 +step:9053 train loss:3.551421 +step:9054 train loss:3.563603 +step:9055 train loss:3.487143 +step:9056 train loss:3.549405 +step:9057 train loss:3.402731 +step:9058 train loss:3.487899 +step:9059 train loss:3.564199 +step:9060 train loss:3.497579 +step:9061 train loss:3.521850 +step:9062 train loss:3.451705 +step:9063 train loss:3.586461 +step:9064 train loss:3.471315 +step:9065 train loss:3.482977 +step:9066 train loss:3.501293 +step:9067 train loss:3.465541 +step:9068 train loss:3.535232 +step:9069 train loss:3.495209 +step:9070 train loss:3.543750 +step:9071 train loss:3.479224 +step:9072 train loss:3.499109 +step:9073 train loss:3.462265 +step:9074 train loss:3.542512 +step:9075 train loss:3.488907 +step:9076 train loss:3.454905 +step:9077 train loss:3.532645 +step:9078 train loss:3.468744 +step:9079 train loss:3.516302 +step:9080 train loss:3.449219 +step:9081 train loss:3.486130 +step:9082 train loss:3.512189 +step:9083 train loss:3.542183 +step:9084 train loss:3.434556 +step:9085 train loss:3.504747 +step:9086 train loss:3.488856 +step:9087 train loss:3.436941 +step:9088 train loss:3.497123 +step:9089 train loss:3.513181 +step:9090 train loss:3.445271 +step:9091 train loss:3.546904 +step:9092 train loss:3.471910 +step:9093 train loss:3.471301 +step:9094 train loss:3.597461 +step:9095 train loss:3.465640 +step:9096 train loss:3.480044 +step:9097 train loss:3.465016 +step:9098 train loss:3.456228 +step:9099 train loss:3.581535 +step:9100 train loss:3.613379 +step:9101 train loss:3.533911 +step:9102 train loss:3.475793 +step:9103 train loss:3.482415 +step:9104 train loss:3.568425 +step:9105 train loss:3.431676 +step:9106 train loss:3.555285 +step:9107 train loss:3.491384 +step:9108 train loss:3.471128 +step:9109 train loss:3.497456 +step:9110 train loss:3.499594 +step:9111 train loss:3.481509 +step:9112 train loss:3.483890 +step:9113 train loss:3.514499 +step:9114 train loss:3.461907 +step:9115 train loss:3.487172 +step:9116 train loss:3.514401 +step:9117 train loss:3.521974 +step:9118 train loss:3.491802 +step:9119 train loss:3.413340 +step:9120 train loss:3.511481 +step:9121 train loss:3.543324 +step:9122 train loss:3.490438 +step:9123 train loss:3.508448 +step:9124 train loss:3.539924 +step:9125 train loss:3.488586 +step:9126 train loss:3.467110 +step:9127 train loss:3.499158 +step:9128 train loss:3.554152 +step:9129 train loss:3.508767 +step:9130 train loss:3.524056 +step:9131 train loss:3.503318 +step:9132 train loss:3.511543 +step:9133 train loss:3.500094 +step:9134 train loss:3.472870 +step:9135 train loss:3.500674 +step:9136 train loss:3.498694 +step:9137 train loss:3.551641 +step:9138 train loss:3.471103 +step:9139 train loss:3.546380 +step:9140 train loss:3.469128 +step:9141 train loss:3.443521 +step:9142 train loss:3.624418 +step:9143 train loss:3.451595 +step:9144 train loss:3.545578 +step:9145 train loss:3.551856 +step:9146 train loss:3.466743 +step:9147 train loss:3.541987 +step:9148 train loss:3.562369 +step:9149 train loss:3.469700 +step:9150 train loss:3.491332 +step:9151 train loss:3.551753 +step:9152 train loss:3.510093 +step:9153 train loss:3.475363 +step:9154 train loss:3.491408 +step:9155 train loss:3.456346 +step:9156 train loss:3.458677 +step:9157 train loss:3.478129 +step:9158 train loss:3.459168 +step:9159 train loss:3.543895 +step:9160 train loss:3.429524 +step:9161 train loss:3.457424 +step:9162 train loss:3.545202 +step:9163 train loss:3.489712 +step:9164 train loss:3.461097 +step:9165 train loss:3.455781 +step:9166 train loss:3.513232 +step:9167 train loss:3.455712 +step:9168 train loss:3.497766 +step:9169 train loss:3.435269 +step:9170 train loss:3.457243 +step:9171 train loss:3.521769 +step:9172 train loss:3.444945 +step:9173 train loss:3.566806 +step:9174 train loss:3.498022 +step:9175 train loss:3.472941 +step:9176 train loss:3.454547 +step:9177 train loss:3.503455 +step:9178 train loss:3.445004 +step:9179 train loss:3.408680 +step:9180 train loss:3.500906 +step:9181 train loss:3.509517 +step:9182 train loss:3.480896 +step:9183 train loss:3.488278 +step:9184 train loss:3.482218 +step:9185 train loss:3.498759 +step:9186 train loss:3.458612 +step:9187 train loss:3.534358 +step:9188 train loss:3.570810 +step:9189 train loss:3.491292 +step:9190 train loss:3.497294 +step:9191 train loss:3.490452 +step:9192 train loss:3.503144 +step:9193 train loss:3.501112 +step:9194 train loss:3.440145 +step:9195 train loss:3.429711 +step:9196 train loss:3.482276 +step:9197 train loss:3.440094 +step:9198 train loss:3.512314 +step:9199 train loss:3.460089 +step:9200 train loss:3.487978 +step:9201 train loss:3.520313 +step:9202 train loss:3.509921 +step:9203 train loss:3.464460 +step:9204 train loss:3.660353 +step:9205 train loss:3.578161 +step:9206 train loss:3.491904 +step:9207 train loss:3.543701 +step:9208 train loss:3.518336 +step:9209 train loss:3.542058 +step:9210 train loss:3.433341 +step:9211 train loss:3.459416 +step:9212 train loss:3.462722 +step:9213 train loss:3.525411 +step:9214 train loss:3.464522 +step:9215 train loss:3.532538 +step:9216 train loss:3.493885 +step:9217 train loss:3.436326 +step:9218 train loss:3.525799 +step:9219 train loss:3.486193 +step:9220 train loss:3.531226 +step:9221 train loss:3.583767 +step:9222 train loss:3.526725 +step:9223 train loss:3.697543 +step:9224 train loss:3.532944 +step:9225 train loss:3.464985 +step:9226 train loss:3.481660 +step:9227 train loss:3.499196 +step:9228 train loss:3.500107 +step:9229 train loss:3.459332 +step:9230 train loss:3.519855 +step:9231 train loss:3.406916 +step:9232 train loss:3.463849 +step:9233 train loss:3.487030 +step:9234 train loss:3.541540 +step:9235 train loss:3.545667 +step:9236 train loss:3.451580 +step:9237 train loss:3.515467 +step:9238 train loss:3.489063 +step:9239 train loss:3.478613 +step:9240 train loss:3.449266 +step:9241 train loss:3.480253 +step:9242 train loss:3.489655 +step:9243 train loss:3.485610 +step:9244 train loss:3.461488 +step:9245 train loss:3.469506 +step:9246 train loss:3.466749 +step:9247 train loss:3.478815 +step:9248 train loss:3.487716 +step:9249 train loss:3.486377 +step:9250 validation loss:3.425785 +step:9250 train loss:3.527002 +step:9251 train loss:3.467244 +step:9252 train loss:3.535886 +step:9253 train loss:3.532167 +step:9254 train loss:3.459573 +step:9255 train loss:3.578126 +step:9256 train loss:3.457647 +step:9257 train loss:3.398060 +step:9258 train loss:3.478343 +step:9259 train loss:3.482616 +step:9260 train loss:3.578175 +step:9261 train loss:3.457695 +step:9262 train loss:3.529788 +step:9263 train loss:3.431863 +step:9264 train loss:3.578049 +step:9265 train loss:3.604673 +step:9266 train loss:3.534125 +step:9267 train loss:3.481835 +step:9268 train loss:3.474035 +step:9269 train loss:3.500029 +step:9270 train loss:3.422933 +step:9271 train loss:3.536466 +step:9272 train loss:3.474631 +step:9273 train loss:3.497104 +step:9274 train loss:3.497376 +step:9275 train loss:3.497754 +step:9276 train loss:3.523429 +step:9277 train loss:3.498464 +step:9278 train loss:3.510578 +step:9279 train loss:3.505773 +step:9280 train loss:3.502617 +step:9281 train loss:3.474734 +step:9282 train loss:3.597558 +step:9283 train loss:3.482430 +step:9284 train loss:3.446669 +step:9285 train loss:3.467060 +step:9286 train loss:3.520459 +step:9287 train loss:3.493543 +step:9288 train loss:3.498677 +step:9289 train loss:3.466874 +step:9290 train loss:3.498335 +step:9291 train loss:3.473924 +step:9292 train loss:3.513253 +step:9293 train loss:3.570780 +step:9294 train loss:3.492582 +step:9295 train loss:3.477448 +step:9296 train loss:3.428272 +step:9297 train loss:3.498428 +step:9298 train loss:3.440550 +step:9299 train loss:3.420223 +step:9300 train loss:3.527156 +step:9301 train loss:3.555102 +step:9302 train loss:3.491696 +step:9303 train loss:3.540719 +step:9304 train loss:3.462086 +step:9305 train loss:3.455191 +step:9306 train loss:3.457084 +step:9307 train loss:3.457295 +step:9308 train loss:3.432120 +step:9309 train loss:3.419603 +step:9310 train loss:3.477861 +step:9311 train loss:3.537684 +step:9312 train loss:3.488250 +step:9313 train loss:3.434477 +step:9314 train loss:3.463837 +step:9315 train loss:3.498479 +step:9316 train loss:3.481886 +step:9317 train loss:3.455716 +step:9318 train loss:3.543363 +step:9319 train loss:3.454054 +step:9320 train loss:3.473812 +step:9321 train loss:3.489181 +step:9322 train loss:3.495356 +step:9323 train loss:3.570633 +step:9324 train loss:3.513729 +step:9325 train loss:3.452249 +step:9326 train loss:3.528611 +step:9327 train loss:3.525318 +step:9328 train loss:3.528524 +step:9329 train loss:3.413154 +step:9330 train loss:3.581819 +step:9331 train loss:3.512458 +step:9332 train loss:3.534749 +step:9333 train loss:3.553079 +step:9334 train loss:3.488707 +step:9335 train loss:3.585061 +step:9336 train loss:3.543178 +step:9337 train loss:3.496570 +step:9338 train loss:3.552055 +step:9339 train loss:3.529317 +step:9340 train loss:3.488929 +step:9341 train loss:3.578154 +step:9342 train loss:3.474725 +step:9343 train loss:3.472276 +step:9344 train loss:3.471514 +step:9345 train loss:3.609927 +step:9346 train loss:3.451033 +step:9347 train loss:3.465680 +step:9348 train loss:3.491696 +step:9349 train loss:3.437230 +step:9350 train loss:3.512925 +step:9351 train loss:3.487711 +step:9352 train loss:3.473995 +step:9353 train loss:3.505604 +step:9354 train loss:3.473565 +step:9355 train loss:3.470145 +step:9356 train loss:3.514279 +step:9357 train loss:3.467070 +step:9358 train loss:3.502015 +step:9359 train loss:3.442571 +step:9360 train loss:3.461016 +step:9361 train loss:3.458449 +step:9362 train loss:3.448052 +step:9363 train loss:3.512426 +step:9364 train loss:3.490283 +step:9365 train loss:3.495428 +step:9366 train loss:3.489080 +step:9367 train loss:3.503692 +step:9368 train loss:3.476781 +step:9369 train loss:3.478606 +step:9370 train loss:3.483537 +step:9371 train loss:3.505641 +step:9372 train loss:3.471117 +step:9373 train loss:3.453737 +step:9374 train loss:3.492979 +step:9375 train loss:3.505553 +step:9376 train loss:3.443214 +step:9377 train loss:3.516878 +step:9378 train loss:3.516922 +step:9379 train loss:3.544271 +step:9380 train loss:3.475811 +step:9381 train loss:3.482828 +step:9382 train loss:3.461283 +step:9383 train loss:3.455578 +step:9384 train loss:3.427697 +step:9385 train loss:3.501667 +step:9386 train loss:3.525980 +step:9387 train loss:3.502751 +step:9388 train loss:3.442990 +step:9389 train loss:3.458730 +step:9390 train loss:3.499676 +step:9391 train loss:3.506195 +step:9392 train loss:3.468642 +step:9393 train loss:3.460702 +step:9394 train loss:3.488238 +step:9395 train loss:3.484880 +step:9396 train loss:3.630513 +step:9397 train loss:3.519420 +step:9398 train loss:3.540363 +step:9399 train loss:3.493237 +step:9400 train loss:3.494791 +step:9401 train loss:3.486487 +step:9402 train loss:3.489725 +step:9403 train loss:3.423627 +step:9404 train loss:3.496975 +step:9405 train loss:3.457578 +step:9406 train loss:3.509645 +step:9407 train loss:3.453926 +step:9408 train loss:3.390754 +step:9409 train loss:3.455154 +step:9410 train loss:3.538334 +step:9411 train loss:3.499122 +step:9412 train loss:3.527105 +step:9413 train loss:3.544784 +step:9414 train loss:3.482647 +step:9415 train loss:3.475341 +step:9416 train loss:3.490425 +step:9417 train loss:3.444403 +step:9418 train loss:3.471441 +step:9419 train loss:3.440599 +step:9420 train loss:3.458508 +step:9421 train loss:3.507970 +step:9422 train loss:3.459190 +step:9423 train loss:3.524496 +step:9424 train loss:3.463433 +step:9425 train loss:3.505551 +step:9426 train loss:3.508537 +step:9427 train loss:3.482597 +step:9428 train loss:3.588644 +step:9429 train loss:3.477165 +step:9430 train loss:3.435940 +step:9431 train loss:3.523520 +step:9432 train loss:3.489392 +step:9433 train loss:3.527277 +step:9434 train loss:3.479475 +step:9435 train loss:3.505850 +step:9436 train loss:3.475191 +step:9437 train loss:3.488944 +step:9438 train loss:3.480240 +step:9439 train loss:3.480865 +step:9440 train loss:3.471888 +step:9441 train loss:3.483208 +step:9442 train loss:3.422191 +step:9443 train loss:3.477493 +step:9444 train loss:3.545446 +step:9445 train loss:3.476273 +step:9446 train loss:3.450562 +step:9447 train loss:3.519427 +step:9448 train loss:3.454105 +step:9449 train loss:3.477762 +step:9450 train loss:3.519049 +step:9451 train loss:3.433692 +step:9452 train loss:3.488897 +step:9453 train loss:3.466528 +step:9454 train loss:3.527584 +step:9455 train loss:3.509752 +step:9456 train loss:3.435250 +step:9457 train loss:3.481893 +step:9458 train loss:3.468395 +step:9459 train loss:3.461068 +step:9460 train loss:3.502436 +step:9461 train loss:3.530791 +step:9462 train loss:3.479853 +step:9463 train loss:3.509817 +step:9464 train loss:3.463405 +step:9465 train loss:3.554153 +step:9466 train loss:3.501417 +step:9467 train loss:3.526639 +step:9468 train loss:3.472814 +step:9469 train loss:3.459989 +step:9470 train loss:3.459636 +step:9471 train loss:3.499072 +step:9472 train loss:3.524464 +step:9473 train loss:3.512184 +step:9474 train loss:3.456369 +step:9475 train loss:3.449059 +step:9476 train loss:3.668417 +step:9477 train loss:3.540457 +step:9478 train loss:3.516162 +step:9479 train loss:3.615351 +step:9480 train loss:3.463054 +step:9481 train loss:3.496249 +step:9482 train loss:3.522880 +step:9483 train loss:3.477957 +step:9484 train loss:3.509300 +step:9485 train loss:3.429073 +step:9486 train loss:3.468802 +step:9487 train loss:3.500062 +step:9488 train loss:3.453148 +step:9489 train loss:3.498706 +step:9490 train loss:3.465879 +step:9491 train loss:3.510382 +step:9492 train loss:3.529099 +step:9493 train loss:3.497724 +step:9494 train loss:3.510820 +step:9495 train loss:3.462173 +step:9496 train loss:3.522936 +step:9497 train loss:3.538916 +step:9498 train loss:3.486411 +step:9499 train loss:3.536051 +step:9500 validation loss:3.424275 total_sharp:4.2792e-03 L1_sharp:1.0272e-01 L2_sharp:2.9685e-02 L3_sharp:3.2933e-02 L4_sharp:1.7790e-02 L5_sharp:1.8870e-02 L6_sharp:2.2787e-02 L7_sharp:2.5729e-02 L8_sharp:2.6790e-02 L9_sharp:2.0806e-02 L10_sharp:1.4428e-02 L11_sharp:1.4873e-02 L12_sharp:2.7058e-02 total_fnorm:1.3413e+00 total_l1_linf:8.0454e+03 total_spectral:1.3413e+00 L1_fnorm:6.0637e-02 L2_fnorm:5.8534e-02 L3_fnorm:5.8821e-02 L4_fnorm:6.0263e-02 L5_fnorm:6.1024e-02 L6_fnorm:6.1320e-02 L7_fnorm:6.1358e-02 L8_fnorm:6.1140e-02 L9_fnorm:6.1194e-02 L10_fnorm:6.1312e-02 L11_fnorm:6.1430e-02 L12_fnorm:6.1383e-02 L1_l1linf:3.2124e-01 L2_l1linf:3.8109e-01 L3_l1linf:3.8447e-01 L4_l1linf:3.6735e-01 L5_l1linf:3.4470e-01 L6_l1linf:3.3463e-01 L7_l1linf:3.1392e-01 L8_l1linf:2.9908e-01 L9_l1linf:3.3013e-01 L10_l1linf:3.5441e-01 L11_l1linf:3.7308e-01 L12_l1linf:3.9102e-01 L1_spectral:7.2604e-03 L2_spectral:8.5844e-03 L3_spectral:8.6030e-03 L4_spectral:8.2893e-03 L5_spectral:7.7489e-03 L6_spectral:7.5856e-03 L7_spectral:7.1227e-03 L8_spectral:6.7181e-03 L9_spectral:7.3589e-03 L10_spectral:7.9640e-03 L11_spectral:8.4634e-03 L12_spectral:8.7725e-03 ip_v_neg_g:4.5311e-03 cos_v_neg_g:1.1270e-03 v_norm:1.3413e+00 g_norm:2.9975e+00 hv_norm:6.9643e-01 cos_v_hv:8.2415e-03 hg_norm:1.2901e+02 cos_g_hg:4.5304e-01 v_par:3.3129e-05 v_perp:1.3413e+00 L1_cos_v_neg_g:1.6579e-02 L1_v_norm:6.0637e-02 L2_cos_v_neg_g:1.0873e-02 L2_v_norm:5.8534e-02 L3_cos_v_neg_g:7.5148e-03 L3_v_norm:5.8821e-02 L4_cos_v_neg_g:5.8421e-03 L4_v_norm:6.0263e-02 L5_cos_v_neg_g:4.8374e-03 L5_v_norm:6.1024e-02 L6_cos_v_neg_g:5.7893e-03 L6_v_norm:6.1320e-02 L7_cos_v_neg_g:5.9995e-03 L7_v_norm:6.1358e-02 L8_cos_v_neg_g:6.8014e-03 L8_v_norm:6.1140e-02 L9_cos_v_neg_g:5.9996e-03 L9_v_norm:6.1194e-02 L10_cos_v_neg_g:5.1937e-03 L10_v_norm:6.1312e-02 L11_cos_v_neg_g:4.3223e-03 L11_v_norm:6.1430e-02 L12_cos_v_neg_g:2.7028e-03 L12_v_norm:6.1383e-02 +step:9500 train loss:3.525933 +step:9501 train loss:3.505327 +step:9502 train loss:3.476253 +step:9503 train loss:3.491686 +step:9504 train loss:3.446152 +step:9505 train loss:3.471837 +step:9506 train loss:3.487233 +step:9507 train loss:3.473473 +step:9508 train loss:3.667682 +step:9509 train loss:3.484029 +step:9510 train loss:3.471444 +step:9511 train loss:3.496451 +step:9512 train loss:3.528415 +step:9513 train loss:3.519717 +step:9514 train loss:3.488121 +step:9515 train loss:3.386954 +step:9516 train loss:3.489149 +step:9517 train loss:3.524493 +step:9518 train loss:3.499265 +step:9519 train loss:3.508820 +step:9520 train loss:3.398013 +step:9521 train loss:3.389446 +step:9522 train loss:3.510672 +step:9523 train loss:3.506047 +step:9524 train loss:3.508418 +step:9525 train loss:3.552792 +step:9526 train loss:3.567847 +step:9527 train loss:3.526228 +step:9528 train loss:3.457234 +step:9529 train loss:3.499238 +step:9530 train loss:3.544826 +step:9531 train loss:3.453903 +step:9532 train loss:3.503808 +step:9533 train loss:3.475878 +step:9534 train loss:3.556874 +step:9535 train loss:3.478353 +step:9536 train loss:3.458087 +step:9537 train loss:3.406149 +step:9538 train loss:3.421480 +step:9539 train loss:3.494785 +step:9540 train loss:3.413959 +step:9541 train loss:3.473659 +step:9542 train loss:3.600423 +step:9543 train loss:3.497645 +step:9544 train loss:3.537379 +step:9545 train loss:3.471623 +step:9546 train loss:3.497945 +step:9547 train loss:3.540532 +step:9548 train loss:3.480634 +step:9549 train loss:3.447554 +step:9550 train loss:3.479499 +step:9551 train loss:3.474079 +step:9552 train loss:3.498622 +step:9553 train loss:3.490754 +step:9554 train loss:3.536857 +step:9555 train loss:3.541464 +step:9556 train loss:3.450758 +step:9557 train loss:3.470825 +step:9558 train loss:3.534626 +step:9559 train loss:3.540421 +step:9560 train loss:3.452744 +step:9561 train loss:3.480929 +step:9562 train loss:3.516978 +step:9563 train loss:3.465910 +step:9564 train loss:3.501075 +step:9565 train loss:3.479244 +step:9566 train loss:3.452427 +step:9567 train loss:3.518283 +step:9568 train loss:3.490040 +step:9569 train loss:3.531143 +step:9570 train loss:3.425350 +step:9571 train loss:3.498140 +step:9572 train loss:3.442748 +step:9573 train loss:3.472682 +step:9574 train loss:3.449555 +step:9575 train loss:3.521670 +step:9576 train loss:3.411584 +step:9577 train loss:3.462157 +step:9578 train loss:3.467267 +step:9579 train loss:3.466025 +step:9580 train loss:3.529599 +step:9581 train loss:3.521892 +step:9582 train loss:3.482836 +step:9583 train loss:3.517653 +step:9584 train loss:3.453408 +step:9585 train loss:3.473516 +step:9586 train loss:3.525186 +step:9587 train loss:3.492161 +step:9588 train loss:3.480055 +step:9589 train loss:3.537771 +step:9590 train loss:3.501605 +step:9591 train loss:3.468311 +step:9592 train loss:3.489637 +step:9593 train loss:3.488903 +step:9594 train loss:3.505407 +step:9595 train loss:3.483051 +step:9596 train loss:3.568832 +step:9597 train loss:3.474682 +step:9598 train loss:3.436471 +step:9599 train loss:3.443635 +step:9600 train loss:3.528105 +step:9601 train loss:3.447371 +step:9602 train loss:3.531681 +step:9603 train loss:3.523143 +step:9604 train loss:3.406261 +step:9605 train loss:3.495189 +step:9606 train loss:3.548940 +step:9607 train loss:3.470232 +step:9608 train loss:3.474766 +step:9609 train loss:3.485044 +step:9610 train loss:3.528772 +step:9611 train loss:3.462183 +step:9612 train loss:3.471130 +step:9613 train loss:3.508170 +step:9614 train loss:3.478384 +step:9615 train loss:3.666317 +step:9616 train loss:3.480636 +step:9617 train loss:3.472615 +step:9618 train loss:3.420316 +step:9619 train loss:3.483800 +step:9620 train loss:3.539741 +step:9621 train loss:3.462003 +step:9622 train loss:3.476329 +step:9623 train loss:3.516824 +step:9624 train loss:3.504309 +step:9625 train loss:3.516418 +step:9626 train loss:3.487862 +step:9627 train loss:3.568704 +step:9628 train loss:3.534377 +step:9629 train loss:3.447098 +step:9630 train loss:3.506334 +step:9631 train loss:3.491858 +step:9632 train loss:3.460917 +step:9633 train loss:3.505918 +step:9634 train loss:3.571493 +step:9635 train loss:3.474430 +step:9636 train loss:3.421571 +step:9637 train loss:3.552401 +step:9638 train loss:3.437651 +step:9639 train loss:3.407363 +step:9640 train loss:3.529304 +step:9641 train loss:3.500916 +step:9642 train loss:3.478760 +step:9643 train loss:3.482408 +step:9644 train loss:3.538077 +step:9645 train loss:3.464578 +step:9646 train loss:3.503052 +step:9647 train loss:3.513002 +step:9648 train loss:3.463122 +step:9649 train loss:3.434888 +step:9650 train loss:3.452568 +step:9651 train loss:3.541992 +step:9652 train loss:3.523575 +step:9653 train loss:3.466830 +step:9654 train loss:3.446994 +step:9655 train loss:3.443746 +step:9656 train loss:3.436611 +step:9657 train loss:3.465279 +step:9658 train loss:3.523623 +step:9659 train loss:3.630058 +step:9660 train loss:3.410949 +step:9661 train loss:3.430408 +step:9662 train loss:3.450569 +step:9663 train loss:3.493977 +step:9664 train loss:3.543034 +step:9665 train loss:3.386677 +step:9666 train loss:3.428968 +step:9667 train loss:3.565086 +step:9668 train loss:3.547978 +step:9669 train loss:3.564353 +step:9670 train loss:3.543072 +step:9671 train loss:3.542856 +step:9672 train loss:3.456023 +step:9673 train loss:3.478679 +step:9674 train loss:3.491352 +step:9675 train loss:3.489265 +step:9676 train loss:3.448258 +step:9677 train loss:3.453541 +step:9678 train loss:3.489559 +step:9679 train loss:3.481191 +step:9680 train loss:3.480609 +step:9681 train loss:3.464784 +step:9682 train loss:3.533092 +step:9683 train loss:3.507101 +step:9684 train loss:3.425519 +step:9685 train loss:3.510067 +step:9686 train loss:3.545264 +step:9687 train loss:3.449330 +step:9688 train loss:3.535372 +step:9689 train loss:3.636126 +step:9690 train loss:3.477579 +step:9691 train loss:3.466691 +step:9692 train loss:3.430122 +step:9693 train loss:3.426636 +step:9694 train loss:3.445278 +step:9695 train loss:3.551527 +step:9696 train loss:3.585417 +step:9697 train loss:3.493883 +step:9698 train loss:3.530030 +step:9699 train loss:3.492449 +step:9700 train loss:3.490260 +step:9701 train loss:3.541010 +step:9702 train loss:3.458502 +step:9703 train loss:3.479524 +step:9704 train loss:3.560984 +step:9705 train loss:3.460268 +step:9706 train loss:3.455032 +step:9707 train loss:3.502552 +step:9708 train loss:3.452231 +step:9709 train loss:3.476160 +step:9710 train loss:3.492927 +step:9711 train loss:3.466565 +step:9712 train loss:3.478840 +step:9713 train loss:3.526822 +step:9714 train loss:3.483369 +step:9715 train loss:3.505899 +step:9716 train loss:3.527357 +step:9717 train loss:3.446960 +step:9718 train loss:3.451241 +step:9719 train loss:3.535311 +step:9720 train loss:3.467657 +step:9721 train loss:3.456768 +step:9722 train loss:3.519578 +step:9723 train loss:3.469306 +step:9724 train loss:3.496168 +step:9725 train loss:3.549954 +step:9726 train loss:3.491083 +step:9727 train loss:3.467287 +step:9728 train loss:3.506308 +step:9729 train loss:3.534040 +step:9730 train loss:3.602741 +step:9731 train loss:3.523119 +step:9732 train loss:3.486622 +step:9733 train loss:3.527867 +step:9734 train loss:3.447225 +step:9735 train loss:3.553235 +step:9736 train loss:3.456348 +step:9737 train loss:3.515811 +step:9738 train loss:3.480853 +step:9739 train loss:3.553524 +step:9740 train loss:3.517484 +step:9741 train loss:3.458636 +step:9742 train loss:3.551444 +step:9743 train loss:3.426175 +step:9744 train loss:3.485327 +step:9745 train loss:3.444079 +step:9746 train loss:3.480317 +step:9747 train loss:3.471698 +step:9748 train loss:3.371351 +step:9749 train loss:3.467365 +step:9750 validation loss:3.417198 +step:9750 train loss:3.450788 +step:9751 train loss:3.589652 +step:9752 train loss:3.476084 +step:9753 train loss:3.434735 +step:9754 train loss:3.466030 +step:9755 train loss:3.463134 +step:9756 train loss:3.462180 +step:9757 train loss:3.428195 +step:9758 train loss:3.423143 +step:9759 train loss:3.467863 +step:9760 train loss:3.414930 +step:9761 train loss:3.454392 +step:9762 train loss:3.451991 +step:9763 train loss:3.473789 +step:9764 train loss:3.460532 +step:9765 train loss:3.421336 +step:9766 train loss:3.510453 +step:9767 train loss:3.465223 +step:9768 train loss:3.479756 +step:9769 train loss:3.430353 +step:9770 train loss:3.431646 +step:9771 train loss:3.483442 +step:9772 train loss:3.495467 +step:9773 train loss:3.470375 +step:9774 train loss:3.441526 +step:9775 train loss:3.530898 +step:9776 train loss:3.529311 +step:9777 train loss:3.419058 +step:9778 train loss:3.426851 +step:9779 train loss:3.431958 +step:9780 train loss:3.428268 +step:9781 train loss:3.447744 +step:9782 train loss:3.524637 +step:9783 train loss:3.437787 +step:9784 train loss:3.463332 +step:9785 train loss:3.455632 +step:9786 train loss:3.491325 +step:9787 train loss:3.514002 +step:9788 train loss:3.442975 +step:9789 train loss:3.452896 +step:9790 train loss:3.411661 +step:9791 train loss:3.463020 +step:9792 train loss:3.478715 +step:9793 train loss:3.492709 +step:9794 train loss:3.471688 +step:9795 train loss:3.474577 +step:9796 train loss:3.460760 +step:9797 train loss:3.454895 +step:9798 train loss:3.471308 +step:9799 train loss:3.474959 +step:9800 train loss:3.545556 +step:9801 train loss:3.470035 +step:9802 train loss:3.527293 +step:9803 train loss:3.384495 +step:9804 train loss:3.482085 +step:9805 train loss:3.484730 +step:9806 train loss:3.458941 +step:9807 train loss:3.427613 +step:9808 train loss:3.342329 +step:9809 train loss:3.530788 +step:9810 train loss:3.486046 +step:9811 train loss:3.469994 +step:9812 train loss:3.443178 +step:9813 train loss:3.523408 +step:9814 train loss:3.515456 +step:9815 train loss:3.418609 +step:9816 train loss:3.422544 +step:9817 train loss:3.454093 +step:9818 train loss:3.479571 +step:9819 train loss:3.451873 +step:9820 train loss:3.520550 +step:9821 train loss:3.498962 +step:9822 train loss:3.472312 +step:9823 train loss:3.534480 +step:9824 train loss:3.437438 +step:9825 train loss:3.522347 +step:9826 train loss:3.518251 +step:9827 train loss:3.524649 +step:9828 train loss:3.440639 +step:9829 train loss:3.446838 +step:9830 train loss:3.436990 +step:9831 train loss:3.493432 +step:9832 train loss:3.503103 +step:9833 train loss:3.416887 +step:9834 train loss:3.467935 +step:9835 train loss:3.435632 +step:9836 train loss:3.498879 +step:9837 train loss:3.470047 +step:9838 train loss:3.507255 +step:9839 train loss:3.484728 +step:9840 train loss:3.452596 +step:9841 train loss:3.459579 +step:9842 train loss:3.522508 +step:9843 train loss:3.513382 +step:9844 train loss:3.461804 +step:9845 train loss:3.493371 +step:9846 train loss:3.428463 +step:9847 train loss:3.558031 +step:9848 train loss:3.481873 +step:9849 train loss:3.506625 +step:9850 train loss:3.426297 +step:9851 train loss:3.479209 +step:9852 train loss:3.445635 +step:9853 train loss:3.465205 +step:9854 train loss:3.476974 +step:9855 train loss:3.425263 +step:9856 train loss:3.428267 +step:9857 train loss:3.418177 +step:9858 train loss:3.480988 +step:9859 train loss:3.400804 +step:9860 train loss:3.639066 +step:9861 train loss:3.465410 +step:9862 train loss:3.431717 +step:9863 train loss:3.413536 +step:9864 train loss:3.538800 +step:9865 train loss:3.417228 +step:9866 train loss:3.457557 +step:9867 train loss:3.456532 +step:9868 train loss:3.514161 +step:9869 train loss:3.479675 +step:9870 train loss:3.449366 +step:9871 train loss:3.489245 +step:9872 train loss:3.433279 +step:9873 train loss:3.483914 +step:9874 train loss:3.450614 +step:9875 train loss:3.453020 +step:9876 train loss:3.415241 +step:9877 train loss:3.467454 +step:9878 train loss:3.498964 +step:9879 train loss:3.499250 +step:9880 train loss:3.431544 +step:9881 train loss:3.486262 +step:9882 train loss:3.446617 +step:9883 train loss:3.455554 +step:9884 train loss:3.448269 +step:9885 train loss:3.514053 +step:9886 train loss:3.477461 +step:9887 train loss:3.478306 +step:9888 train loss:3.500346 +step:9889 train loss:3.533081 +step:9890 train loss:3.445723 +step:9891 train loss:3.449781 +step:9892 train loss:3.422949 +step:9893 train loss:3.540410 +step:9894 train loss:3.454521 +step:9895 train loss:3.390022 +step:9896 train loss:3.544037 +step:9897 train loss:3.419639 +step:9898 train loss:3.489315 +step:9899 train loss:3.468367 +step:9900 train loss:3.513437 +step:9901 train loss:3.436407 +step:9902 train loss:3.483080 +step:9903 train loss:3.452573 +step:9904 train loss:3.503046 +step:9905 train loss:3.406222 +step:9906 train loss:3.446896 +step:9907 train loss:3.455813 +step:9908 train loss:3.451627 +step:9909 train loss:3.467436 +step:9910 train loss:3.492164 +step:9911 train loss:3.575567 +step:9912 train loss:3.451620 +step:9913 train loss:3.457508 +step:9914 train loss:3.462826 +step:9915 train loss:3.462187 +step:9916 train loss:3.412226 +step:9917 train loss:3.450601 +step:9918 train loss:3.446862 +step:9919 train loss:3.610167 +step:9920 train loss:3.396804 +step:9921 train loss:3.489901 +step:9922 train loss:3.450099 +step:9923 train loss:3.505636 +step:9924 train loss:3.422577 +step:9925 train loss:3.479537 +step:9926 train loss:3.458215 +step:9927 train loss:3.501967 +step:9928 train loss:3.428746 +step:9929 train loss:3.465590 +step:9930 train loss:3.556166 +step:9931 train loss:3.519447 +step:9932 train loss:3.406271 +step:9933 train loss:3.503034 +step:9934 train loss:3.422065 +step:9935 train loss:3.536580 +step:9936 train loss:3.443317 +step:9937 train loss:3.469931 +step:9938 train loss:3.456527 +step:9939 train loss:3.521592 +step:9940 train loss:3.555977 +step:9941 train loss:3.429546 +step:9942 train loss:3.474217 +step:9943 train loss:3.600770 +step:9944 train loss:3.469952 +step:9945 train loss:3.493631 +step:9946 train loss:3.465309 +step:9947 train loss:3.413795 +step:9948 train loss:3.458470 +step:9949 train loss:3.354055 +step:9950 train loss:3.504766 +step:9951 train loss:3.423010 +step:9952 train loss:3.491256 +step:9953 train loss:3.455140 +step:9954 train loss:3.515994 +step:9955 train loss:3.488126 +step:9956 train loss:3.491175 +step:9957 train loss:3.468093 +step:9958 train loss:3.521244 +step:9959 train loss:3.422510 +step:9960 train loss:3.455472 +step:9961 train loss:3.462176 +step:9962 train loss:3.512641 +step:9963 train loss:3.403181 +step:9964 train loss:3.459609 +step:9965 train loss:3.461900 +step:9966 train loss:3.518266 +step:9967 train loss:3.433752 +step:9968 train loss:3.497540 +step:9969 train loss:3.412618 +step:9970 train loss:3.454056 +step:9971 train loss:3.496380 +step:9972 train loss:3.517838 +step:9973 train loss:3.494456 +step:9974 train loss:3.482727 +step:9975 train loss:3.451771 +step:9976 train loss:3.410244 +step:9977 train loss:3.460625 +step:9978 train loss:3.458812 +step:9979 train loss:3.471231 +step:9980 train loss:3.525662 +step:9981 train loss:3.432651 +step:9982 train loss:3.494152 +step:9983 train loss:3.414672 +step:9984 train loss:3.477843 +step:9985 train loss:3.420157 +step:9986 train loss:3.474090 +step:9987 train loss:3.519982 +step:9988 train loss:3.531100 +step:9989 train loss:3.425360 +step:9990 train loss:3.563827 +step:9991 train loss:3.410565 +step:9992 train loss:3.485595 +step:9993 train loss:3.477904 +step:9994 train loss:3.592801 +step:9995 train loss:3.530923 +step:9996 train loss:3.443087 +step:9997 train loss:3.485494 +step:9998 train loss:3.538726 +step:9999 train loss:3.504030 +step:10000 validation loss:3.412180 total_sharp:4.1459e-03 L1_sharp:2.7048e-01 L2_sharp:3.3084e-02 L3_sharp:2.1441e-02 L4_sharp:1.3094e-02 L5_sharp:1.4547e-02 L6_sharp:1.8292e-02 L7_sharp:2.3690e-02 L8_sharp:2.6300e-02 L9_sharp:1.7469e-02 L10_sharp:1.1903e-02 L11_sharp:1.2699e-02 L12_sharp:2.0524e-02 total_fnorm:1.3463e+00 total_l1_linf:8.0764e+03 total_spectral:1.3463e+00 L1_fnorm:6.1062e-02 L2_fnorm:5.9032e-02 L3_fnorm:5.9025e-02 L4_fnorm:6.0364e-02 L5_fnorm:6.0933e-02 L6_fnorm:6.1206e-02 L7_fnorm:6.1307e-02 L8_fnorm:6.1310e-02 L9_fnorm:6.1374e-02 L10_fnorm:6.1453e-02 L11_fnorm:6.1456e-02 L12_fnorm:6.1493e-02 L1_l1linf:3.5996e-01 L2_l1linf:3.7963e-01 L3_l1linf:3.6916e-01 L4_l1linf:3.6501e-01 L5_l1linf:3.3097e-01 L6_l1linf:3.0538e-01 L7_l1linf:3.1989e-01 L8_l1linf:3.3806e-01 L9_l1linf:3.4478e-01 L10_l1linf:3.7526e-01 L11_l1linf:4.0820e-01 L12_l1linf:4.0915e-01 L1_spectral:8.1065e-03 L2_spectral:8.5754e-03 L3_spectral:8.3927e-03 L4_spectral:8.2404e-03 L5_spectral:7.4577e-03 L6_spectral:6.9128e-03 L7_spectral:7.1745e-03 L8_spectral:7.5820e-03 L9_spectral:7.7095e-03 L10_spectral:8.4017e-03 L11_spectral:9.1389e-03 L12_spectral:9.2307e-03 ip_v_neg_g:4.8094e-03 cos_v_neg_g:9.3053e-04 v_norm:1.3463e+00 g_norm:3.8391e+00 hv_norm:1.0620e+00 cos_v_hv:5.2557e-03 hg_norm:6.8916e+02 cos_g_hg:5.6880e-01 v_par:3.6684e-05 v_perp:1.3463e+00 L1_cos_v_neg_g:1.4863e-02 L1_v_norm:6.1062e-02 L2_cos_v_neg_g:8.4695e-03 L2_v_norm:5.9032e-02 L3_cos_v_neg_g:4.2761e-03 L3_v_norm:5.9025e-02 L4_cos_v_neg_g:2.3238e-03 L4_v_norm:6.0364e-02 L5_cos_v_neg_g:3.7242e-03 L5_v_norm:6.0933e-02 L6_cos_v_neg_g:4.3247e-03 L6_v_norm:6.1206e-02 L7_cos_v_neg_g:5.7842e-03 L7_v_norm:6.1307e-02 L8_cos_v_neg_g:6.4815e-03 L8_v_norm:6.1310e-02 L9_cos_v_neg_g:4.1539e-03 L9_v_norm:6.1374e-02 L10_cos_v_neg_g:3.8305e-03 L10_v_norm:6.1453e-02 L11_cos_v_neg_g:2.8235e-03 L11_v_norm:6.1456e-02 L12_cos_v_neg_g:3.3178e-03 L12_v_norm:6.1493e-02 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/config.json new file mode 100644 index 0000000000000000000000000000000000000000..ecb6166303ccd094681a07703590932851b7b298 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.001, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 44, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "99772d4f-832e-4887-bf4b-69544226f94a", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..6090b0786d166c7bb61335e73ce82d673e6de25a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3533458709716797, + "total_l1_linf_norm": 8110.9189453125, + "total_spectral_norm": 1.3533458709716797, + "layer_1_update_fnorm": 0.06190734729170799, + "layer_1_max_l1_linf_norm": 0.22612883150577545, + "layer_1_max_spectral_norm": 0.0050444272346794605, + "layer_2_update_fnorm": 0.05807807296514511, + "layer_2_max_l1_linf_norm": 0.23550522327423096, + "layer_2_max_spectral_norm": 0.005272770766168833, + "layer_3_update_fnorm": 0.055451635271310806, + "layer_3_max_l1_linf_norm": 0.24435479938983917, + "layer_3_max_spectral_norm": 0.005528400652110577, + "layer_4_update_fnorm": 0.057362668216228485, + "layer_4_max_l1_linf_norm": 0.2546113133430481, + "layer_4_max_spectral_norm": 0.005673847161233425, + "layer_5_update_fnorm": 0.058574192225933075, + "layer_5_max_l1_linf_norm": 0.2613559365272522, + "layer_5_max_spectral_norm": 0.005827660672366619, + "layer_6_update_fnorm": 0.060034170746803284, + "layer_6_max_l1_linf_norm": 0.2505625784397125, + "layer_6_max_spectral_norm": 0.005621796473860741, + "layer_7_update_fnorm": 0.060470499098300934, + "layer_7_max_l1_linf_norm": 0.24853087961673737, + "layer_7_max_spectral_norm": 0.0057885111309587955, + "layer_8_update_fnorm": 0.06060301885008812, + "layer_8_max_l1_linf_norm": 0.24912375211715698, + "layer_8_max_spectral_norm": 0.005994797218590975, + "layer_9_update_fnorm": 0.06097645312547684, + "layer_9_max_l1_linf_norm": 0.25154030323028564, + "layer_9_max_spectral_norm": 0.006564775016158819, + "layer_10_update_fnorm": 0.06127309054136276, + "layer_10_max_l1_linf_norm": 0.2474975287914276, + "layer_10_max_spectral_norm": 0.006201824638992548, + "layer_11_update_fnorm": 0.06086641922593117, + "layer_11_max_l1_linf_norm": 0.23277470469474792, + "layer_11_max_spectral_norm": 0.0058075180277228355, + "layer_12_update_fnorm": 0.0612555630505085, + "layer_12_max_l1_linf_norm": 0.21385273337364197, + "layer_12_max_spectral_norm": 0.006025891751050949, + "total_sharpness": 0.024370869621634483, + "ip_v_neg_g": 0.02333638444542885, + "cos_v_neg_g": 0.0026975302025675774, + "v_norm": 1.3533458709716797, + "g_norm": 6.392319679260254, + "hv_norm": 4.724084377288818, + "cos_v_hv": 0.006981715559959412, + "hg_norm": 2716.595947265625, + "cos_g_hg": 0.6317651271820068, + "v_parallel_norm": 0.00014907018339727074, + "v_perp_norm": 1.3533458709716797, + "layer_1_v_norm": 0.06190734729170799, + "layer_1_cos_v_neg_g": 0.02404179982841015, + "layer_2_v_norm": 0.05807807296514511, + "layer_2_cos_v_neg_g": 0.024328790605068207, + "layer_3_v_norm": 0.055451635271310806, + "layer_3_cos_v_neg_g": 0.02447100542485714, + "layer_4_v_norm": 0.057362668216228485, + "layer_4_cos_v_neg_g": 0.022742291912436485, + "layer_5_v_norm": 0.058574192225933075, + "layer_5_cos_v_neg_g": 0.02030276134610176, + "layer_6_v_norm": 0.060034167021512985, + "layer_6_cos_v_neg_g": 0.014936241321265697, + "layer_7_v_norm": 0.060470499098300934, + "layer_7_cos_v_neg_g": 0.013599994592368603, + "layer_8_v_norm": 0.06060301885008812, + "layer_8_cos_v_neg_g": 0.012093833647668362, + "layer_9_v_norm": 0.06097645312547684, + "layer_9_cos_v_neg_g": 0.009678563103079796, + "layer_10_v_norm": 0.06127309054136276, + "layer_10_cos_v_neg_g": 0.009081518277525902, + "layer_11_v_norm": 0.06086641550064087, + "layer_11_cos_v_neg_g": 0.008895210921764374, + "layer_12_v_norm": 0.0612555630505085, + "layer_12_cos_v_neg_g": 0.008527243509888649, + "layer_1_sharpness": 0.380207359790802, + "layer_2_sharpness": 0.25969362258911133, + "layer_3_sharpness": 0.26497170329093933, + "layer_4_sharpness": 0.19819794595241547, + "layer_5_sharpness": 0.14758062362670898, + "layer_6_sharpness": 0.09327725321054459, + "layer_7_sharpness": 0.07667147368192673, + "layer_8_sharpness": 0.06217243894934654, + "layer_9_sharpness": 0.04438547044992447, + "layer_10_sharpness": 0.036781080067157745, + "layer_11_sharpness": 0.03196690231561661, + "layer_12_sharpness": 0.026295140385627747 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..399ec639408906c3e683ef5d06ef855e171a2169 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3466235399246216, + "total_l1_linf_norm": 8077.072265625, + "total_spectral_norm": 1.3466235399246216, + "layer_1_update_fnorm": 0.060889218002557755, + "layer_1_max_l1_linf_norm": 0.3638330101966858, + "layer_1_max_spectral_norm": 0.008059842512011528, + "layer_2_update_fnorm": 0.058775875717401505, + "layer_2_max_l1_linf_norm": 0.3953462839126587, + "layer_2_max_spectral_norm": 0.008803080767393112, + "layer_3_update_fnorm": 0.05884033814072609, + "layer_3_max_l1_linf_norm": 0.3804609179496765, + "layer_3_max_spectral_norm": 0.008559100329875946, + "layer_4_update_fnorm": 0.060309071093797684, + "layer_4_max_l1_linf_norm": 0.3639785647392273, + "layer_4_max_spectral_norm": 0.008224125020205975, + "layer_5_update_fnorm": 0.06097668036818504, + "layer_5_max_l1_linf_norm": 0.3420190215110779, + "layer_5_max_spectral_norm": 0.007716109976172447, + "layer_6_update_fnorm": 0.06127830594778061, + "layer_6_max_l1_linf_norm": 0.3296714723110199, + "layer_6_max_spectral_norm": 0.007403991650789976, + "layer_7_update_fnorm": 0.061297472566366196, + "layer_7_max_l1_linf_norm": 0.3119199275970459, + "layer_7_max_spectral_norm": 0.007073832675814629, + "layer_8_update_fnorm": 0.061240192502737045, + "layer_8_max_l1_linf_norm": 0.31611788272857666, + "layer_8_max_spectral_norm": 0.007140425965189934, + "layer_9_update_fnorm": 0.061373259872198105, + "layer_9_max_l1_linf_norm": 0.34934061765670776, + "layer_9_max_spectral_norm": 0.00783849973231554, + "layer_10_update_fnorm": 0.061446383595466614, + "layer_10_max_l1_linf_norm": 0.37984588742256165, + "layer_10_max_spectral_norm": 0.008458339609205723, + "layer_11_update_fnorm": 0.061456337571144104, + "layer_11_max_l1_linf_norm": 0.40865451097488403, + "layer_11_max_spectral_norm": 0.009158886037766933, + "layer_12_update_fnorm": 0.06146148219704628, + "layer_12_max_l1_linf_norm": 0.40855246782302856, + "layer_12_max_spectral_norm": 0.009229646995663643, + "total_sharpness": 0.0050910175777971745, + "ip_v_neg_g": 0.005656283348798752, + "cos_v_neg_g": 0.0011139179114252329, + "v_norm": 1.3466235399246216, + "g_norm": 3.770785093307495, + "hv_norm": 1.4288005828857422, + "cos_v_hv": 0.004798209294676781, + "hg_norm": 802.7987060546875, + "cos_g_hg": 0.5554344654083252, + "v_parallel_norm": 4.3317664676578715e-05, + "v_perp_norm": 1.3466235399246216, + "layer_1_v_norm": 0.060889218002557755, + "layer_1_cos_v_neg_g": 0.019812114536762238, + "layer_2_v_norm": 0.058775875717401505, + "layer_2_cos_v_neg_g": 0.00817389041185379, + "layer_3_v_norm": 0.05884033441543579, + "layer_3_cos_v_neg_g": 0.006008031312376261, + "layer_4_v_norm": 0.060309071093797684, + "layer_4_cos_v_neg_g": 0.001422915724106133, + "layer_5_v_norm": 0.06097668036818504, + "layer_5_cos_v_neg_g": 0.0039444551803171635, + "layer_6_v_norm": 0.06127830967307091, + "layer_6_cos_v_neg_g": 0.00435398006811738, + "layer_7_v_norm": 0.061297472566366196, + "layer_7_cos_v_neg_g": 0.005852792877703905, + "layer_8_v_norm": 0.061240192502737045, + "layer_8_cos_v_neg_g": 0.006781654432415962, + "layer_9_v_norm": 0.061373259872198105, + "layer_9_cos_v_neg_g": 0.005459772888571024, + "layer_10_v_norm": 0.061446383595466614, + "layer_10_cos_v_neg_g": 0.005206760950386524, + "layer_11_v_norm": 0.061456337571144104, + "layer_11_cos_v_neg_g": 0.004737758077681065, + "layer_12_v_norm": 0.06146148219704628, + "layer_12_cos_v_neg_g": 0.004967899993062019, + "layer_1_sharpness": 0.510013222694397, + "layer_2_sharpness": 0.044767528772354126, + "layer_3_sharpness": 0.026924284175038338, + "layer_4_sharpness": 0.013244733214378357, + "layer_5_sharpness": 0.015142274089157581, + "layer_6_sharpness": 0.022601502016186714, + "layer_7_sharpness": 0.023962542414665222, + "layer_8_sharpness": 0.025396957993507385, + "layer_9_sharpness": 0.019110247492790222, + "layer_10_sharpness": 0.01286941859871149, + "layer_11_sharpness": 0.01306848507374525, + "layer_12_sharpness": 0.019421083852648735 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..b6c5fe88c6c0aba3306df755249248168746455c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3232606649398804, + "total_l1_linf_norm": 7960.71875, + "total_spectral_norm": 1.3232605457305908, + "layer_1_update_fnorm": 0.061785247176885605, + "layer_1_max_l1_linf_norm": 0.2438610941171646, + "layer_1_max_spectral_norm": 0.005344655364751816, + "layer_2_update_fnorm": 0.0568578876554966, + "layer_2_max_l1_linf_norm": 0.23965540528297424, + "layer_2_max_spectral_norm": 0.005419176071882248, + "layer_3_update_fnorm": 0.05469558760523796, + "layer_3_max_l1_linf_norm": 0.25182947516441345, + "layer_3_max_spectral_norm": 0.005694412160664797, + "layer_4_update_fnorm": 0.058036912232637405, + "layer_4_max_l1_linf_norm": 0.257373571395874, + "layer_4_max_spectral_norm": 0.005809773690998554, + "layer_5_update_fnorm": 0.059178490191698074, + "layer_5_max_l1_linf_norm": 0.2669193744659424, + "layer_5_max_spectral_norm": 0.006029411684721708, + "layer_6_update_fnorm": 0.0605340301990509, + "layer_6_max_l1_linf_norm": 0.24621039628982544, + "layer_6_max_spectral_norm": 0.005545145832002163, + "layer_7_update_fnorm": 0.060799866914749146, + "layer_7_max_l1_linf_norm": 0.23820927739143372, + "layer_7_max_spectral_norm": 0.005400739144533873, + "layer_8_update_fnorm": 0.060893721878528595, + "layer_8_max_l1_linf_norm": 0.23517918586730957, + "layer_8_max_spectral_norm": 0.005317718721926212, + "layer_9_update_fnorm": 0.061022110283374786, + "layer_9_max_l1_linf_norm": 0.24539509415626526, + "layer_9_max_spectral_norm": 0.005556326825171709, + "layer_10_update_fnorm": 0.06101260706782341, + "layer_10_max_l1_linf_norm": 0.25015997886657715, + "layer_10_max_spectral_norm": 0.00565165001899004, + "layer_11_update_fnorm": 0.06082569807767868, + "layer_11_max_l1_linf_norm": 0.25794732570648193, + "layer_11_max_spectral_norm": 0.005805209279060364, + "layer_12_update_fnorm": 0.06095121055841446, + "layer_12_max_l1_linf_norm": 0.2275981456041336, + "layer_12_max_spectral_norm": 0.005272068083286285, + "total_sharpness": 0.015319302678108215, + "ip_v_neg_g": 0.012889441102743149, + "cos_v_neg_g": 0.0019363044993951917, + "v_norm": 1.3232606649398804, + "g_norm": 5.030544757843018, + "hv_norm": 2.492143392562866, + "cos_v_hv": 0.008134135976433754, + "hg_norm": 894.3419799804688, + "cos_g_hg": 0.552725613117218, + "v_parallel_norm": 8.714826981304213e-05, + "v_perp_norm": 1.3232606649398804, + "layer_1_v_norm": 0.061785247176885605, + "layer_1_cos_v_neg_g": 0.021842369809746742, + "layer_2_v_norm": 0.0568578876554966, + "layer_2_cos_v_neg_g": 0.021111024543642998, + "layer_3_v_norm": 0.05469558760523796, + "layer_3_cos_v_neg_g": 0.0188879556953907, + "layer_4_v_norm": 0.058036912232637405, + "layer_4_cos_v_neg_g": 0.011970282532274723, + "layer_5_v_norm": 0.059178490191698074, + "layer_5_cos_v_neg_g": 0.011019818484783173, + "layer_6_v_norm": 0.0605340339243412, + "layer_6_cos_v_neg_g": 0.009178084321320057, + "layer_7_v_norm": 0.060799866914749146, + "layer_7_cos_v_neg_g": 0.008635054342448711, + "layer_8_v_norm": 0.060893721878528595, + "layer_8_cos_v_neg_g": 0.008699864149093628, + "layer_9_v_norm": 0.061022110283374786, + "layer_9_cos_v_neg_g": 0.007246690336614847, + "layer_10_v_norm": 0.06101260706782341, + "layer_10_cos_v_neg_g": 0.006396155804395676, + "layer_11_v_norm": 0.06082570552825928, + "layer_11_cos_v_neg_g": 0.00623965822160244, + "layer_12_v_norm": 0.06095121055841446, + "layer_12_cos_v_neg_g": 0.0049662054516375065, + "layer_1_sharpness": 0.47142648696899414, + "layer_2_sharpness": 0.22554731369018555, + "layer_3_sharpness": 0.13807480037212372, + "layer_4_sharpness": 0.07106208056211472, + "layer_5_sharpness": 0.06813472509384155, + "layer_6_sharpness": 0.05419052764773369, + "layer_7_sharpness": 0.05372826009988785, + "layer_8_sharpness": 0.04457569494843483, + "layer_9_sharpness": 0.03321331739425659, + "layer_10_sharpness": 0.026481952518224716, + "layer_11_sharpness": 0.02180739864706993, + "layer_12_sharpness": 0.02584434486925602 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..1160ca58f81aa0bb79c7f3f6f18660ecd455fed9 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3834056854248047, + "total_l1_linf_norm": 8286.21484375, + "total_spectral_norm": 1.3834054470062256, + "layer_1_update_fnorm": 0.06183021888136864, + "layer_1_max_l1_linf_norm": 0.2660636305809021, + "layer_1_max_spectral_norm": 0.005886825267225504, + "layer_2_update_fnorm": 0.05759831517934799, + "layer_2_max_l1_linf_norm": 0.2622973620891571, + "layer_2_max_spectral_norm": 0.005924613680690527, + "layer_3_update_fnorm": 0.05611065775156021, + "layer_3_max_l1_linf_norm": 0.2870367169380188, + "layer_3_max_spectral_norm": 0.006472007371485233, + "layer_4_update_fnorm": 0.058936599642038345, + "layer_4_max_l1_linf_norm": 0.28861868381500244, + "layer_4_max_spectral_norm": 0.0064680371433496475, + "layer_5_update_fnorm": 0.05986487865447998, + "layer_5_max_l1_linf_norm": 0.28094157576560974, + "layer_5_max_spectral_norm": 0.006379537284374237, + "layer_6_update_fnorm": 0.06085536628961563, + "layer_6_max_l1_linf_norm": 0.26968181133270264, + "layer_6_max_spectral_norm": 0.006047670263797045, + "layer_7_update_fnorm": 0.060961414128541946, + "layer_7_max_l1_linf_norm": 0.26070156693458557, + "layer_7_max_spectral_norm": 0.005827663000673056, + "layer_8_update_fnorm": 0.06106327101588249, + "layer_8_max_l1_linf_norm": 0.2532096803188324, + "layer_8_max_spectral_norm": 0.005700890906155109, + "layer_9_update_fnorm": 0.061092864722013474, + "layer_9_max_l1_linf_norm": 0.2758840322494507, + "layer_9_max_spectral_norm": 0.006183426361531019, + "layer_10_update_fnorm": 0.06110375374555588, + "layer_10_max_l1_linf_norm": 0.28980666399002075, + "layer_10_max_spectral_norm": 0.006512398365885019, + "layer_11_update_fnorm": 0.060958705842494965, + "layer_11_max_l1_linf_norm": 0.28901535272598267, + "layer_11_max_spectral_norm": 0.006493242457509041, + "layer_12_update_fnorm": 0.06132565811276436, + "layer_12_max_l1_linf_norm": 0.3064172863960266, + "layer_12_max_spectral_norm": 0.006762051954865456, + "total_sharpness": 0.011282814666628838, + "ip_v_neg_g": 0.012052549980580807, + "cos_v_neg_g": 0.00167307467199862, + "v_norm": 1.3834056854248047, + "g_norm": 5.207317352294922, + "hv_norm": 2.1656594276428223, + "cos_v_hv": 0.007207370363175869, + "hg_norm": 1123.462158203125, + "cos_g_hg": 0.5671474933624268, + "v_parallel_norm": 6.748572923243046e-05, + "v_perp_norm": 1.3834056854248047, + "layer_1_v_norm": 0.06183021888136864, + "layer_1_cos_v_neg_g": 0.01752289943397045, + "layer_2_v_norm": 0.05759831517934799, + "layer_2_cos_v_neg_g": 0.016918981447815895, + "layer_3_v_norm": 0.05611065775156021, + "layer_3_cos_v_neg_g": 0.013391060754656792, + "layer_4_v_norm": 0.058936599642038345, + "layer_4_cos_v_neg_g": 0.010818958282470703, + "layer_5_v_norm": 0.05986487865447998, + "layer_5_cos_v_neg_g": 0.010449966415762901, + "layer_6_v_norm": 0.06085536628961563, + "layer_6_cos_v_neg_g": 0.010155192576348782, + "layer_7_v_norm": 0.060961414128541946, + "layer_7_cos_v_neg_g": 0.009554916992783546, + "layer_8_v_norm": 0.06106327101588249, + "layer_8_cos_v_neg_g": 0.009265618398785591, + "layer_9_v_norm": 0.061092864722013474, + "layer_9_cos_v_neg_g": 0.008736147545278072, + "layer_10_v_norm": 0.06110375374555588, + "layer_10_cos_v_neg_g": 0.007850401103496552, + "layer_11_v_norm": 0.060958705842494965, + "layer_11_cos_v_neg_g": 0.00818947795778513, + "layer_12_v_norm": 0.06132565811276436, + "layer_12_cos_v_neg_g": 0.008055292069911957, + "layer_1_sharpness": 0.31717073917388916, + "layer_2_sharpness": 0.12517650425434113, + "layer_3_sharpness": 0.10358385741710663, + "layer_4_sharpness": 0.047554872930049896, + "layer_5_sharpness": 0.05028834566473961, + "layer_6_sharpness": 0.05098826438188553, + "layer_7_sharpness": 0.048821043223142624, + "layer_8_sharpness": 0.04347606003284454, + "layer_9_sharpness": 0.03550422191619873, + "layer_10_sharpness": 0.030300548300147057, + "layer_11_sharpness": 0.03200806304812431, + "layer_12_sharpness": 0.05878950655460358 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..9361c0f3d5fdb1801896ae74b9eaa0a221d20e3d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.358206033706665, + "total_l1_linf_norm": 8144.85400390625, + "total_spectral_norm": 1.3582061529159546, + "layer_1_update_fnorm": 0.06156932935118675, + "layer_1_max_l1_linf_norm": 0.25377896428108215, + "layer_1_max_spectral_norm": 0.005801470018923283, + "layer_2_update_fnorm": 0.05859994515776634, + "layer_2_max_l1_linf_norm": 0.28599318861961365, + "layer_2_max_spectral_norm": 0.00642838841304183, + "layer_3_update_fnorm": 0.05737462267279625, + "layer_3_max_l1_linf_norm": 0.29773998260498047, + "layer_3_max_spectral_norm": 0.006718076299875975, + "layer_4_update_fnorm": 0.059334345161914825, + "layer_4_max_l1_linf_norm": 0.2998775839805603, + "layer_4_max_spectral_norm": 0.006731770001351833, + "layer_5_update_fnorm": 0.06010839715600014, + "layer_5_max_l1_linf_norm": 0.2933471202850342, + "layer_5_max_spectral_norm": 0.0066331904381513596, + "layer_6_update_fnorm": 0.06094935163855553, + "layer_6_max_l1_linf_norm": 0.27725866436958313, + "layer_6_max_spectral_norm": 0.006255809683352709, + "layer_7_update_fnorm": 0.06101508066058159, + "layer_7_max_l1_linf_norm": 0.26857152581214905, + "layer_7_max_spectral_norm": 0.006037566345185041, + "layer_8_update_fnorm": 0.06115316227078438, + "layer_8_max_l1_linf_norm": 0.28016382455825806, + "layer_8_max_spectral_norm": 0.006277501117438078, + "layer_9_update_fnorm": 0.061040718108415604, + "layer_9_max_l1_linf_norm": 0.27998608350753784, + "layer_9_max_spectral_norm": 0.006292771082371473, + "layer_10_update_fnorm": 0.06105408817529678, + "layer_10_max_l1_linf_norm": 0.29134634137153625, + "layer_10_max_spectral_norm": 0.006566965952515602, + "layer_11_update_fnorm": 0.060971517115831375, + "layer_11_max_l1_linf_norm": 0.2938099503517151, + "layer_11_max_spectral_norm": 0.006617652717977762, + "layer_12_update_fnorm": 0.061067719012498856, + "layer_12_max_l1_linf_norm": 0.30075377225875854, + "layer_12_max_spectral_norm": 0.006766790524125099, + "total_sharpness": 0.007670773193240166, + "ip_v_neg_g": 0.005086774006485939, + "cos_v_neg_g": 0.0008965482702478766, + "v_norm": 1.358206033706665, + "g_norm": 4.177371501922607, + "hv_norm": 1.3748971223831177, + "cos_v_hv": 0.007577651180326939, + "hg_norm": 359.55767822265625, + "cos_g_hg": 0.49862679839134216, + "v_parallel_norm": 3.8521022361237556e-05, + "v_perp_norm": 1.358206033706665, + "layer_1_v_norm": 0.06156932935118675, + "layer_1_cos_v_neg_g": 0.006400131154805422, + "layer_2_v_norm": 0.05859994515776634, + "layer_2_cos_v_neg_g": 0.006841812748461962, + "layer_3_v_norm": 0.05737462639808655, + "layer_3_cos_v_neg_g": 0.006524953991174698, + "layer_4_v_norm": 0.059334345161914825, + "layer_4_cos_v_neg_g": 0.00568128889426589, + "layer_5_v_norm": 0.06010839715600014, + "layer_5_cos_v_neg_g": 0.005339566618204117, + "layer_6_v_norm": 0.06094935163855553, + "layer_6_cos_v_neg_g": 0.005633939057588577, + "layer_7_v_norm": 0.06101508066058159, + "layer_7_cos_v_neg_g": 0.00524872587993741, + "layer_8_v_norm": 0.06115316227078438, + "layer_8_cos_v_neg_g": 0.005363135132938623, + "layer_9_v_norm": 0.061040718108415604, + "layer_9_cos_v_neg_g": 0.004325053188949823, + "layer_10_v_norm": 0.06105408817529678, + "layer_10_cos_v_neg_g": 0.004576452076435089, + "layer_11_v_norm": 0.060971517115831375, + "layer_11_cos_v_neg_g": 0.005189984571188688, + "layer_12_v_norm": 0.061067719012498856, + "layer_12_cos_v_neg_g": 0.005427381489425898, + "layer_1_sharpness": 0.06725849211215973, + "layer_2_sharpness": 0.0419512614607811, + "layer_3_sharpness": 0.05436630919575691, + "layer_4_sharpness": 0.034488506615161896, + "layer_5_sharpness": 0.03552718460559845, + "layer_6_sharpness": 0.037633445113897324, + "layer_7_sharpness": 0.04185960441827774, + "layer_8_sharpness": 0.04355189949274063, + "layer_9_sharpness": 0.02975267358124256, + "layer_10_sharpness": 0.023384761065244675, + "layer_11_sharpness": 0.02146119996905327, + "layer_12_sharpness": 0.03932974487543106 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..0ffb6c26325725eeef81b406a88a8b5c0c191927 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.344842553138733, + "total_l1_linf_norm": 8075.64013671875, + "total_spectral_norm": 1.344842553138733, + "layer_1_update_fnorm": 0.061483144760131836, + "layer_1_max_l1_linf_norm": 0.2791596055030823, + "layer_1_max_spectral_norm": 0.006363161839544773, + "layer_2_update_fnorm": 0.05830740928649902, + "layer_2_max_l1_linf_norm": 0.3106129765510559, + "layer_2_max_spectral_norm": 0.0070395865477621555, + "layer_3_update_fnorm": 0.05697872117161751, + "layer_3_max_l1_linf_norm": 0.3263022303581238, + "layer_3_max_spectral_norm": 0.007310620509088039, + "layer_4_update_fnorm": 0.05934871360659599, + "layer_4_max_l1_linf_norm": 0.31877264380455017, + "layer_4_max_spectral_norm": 0.007206624373793602, + "layer_5_update_fnorm": 0.06041094660758972, + "layer_5_max_l1_linf_norm": 0.30636125802993774, + "layer_5_max_spectral_norm": 0.006888888776302338, + "layer_6_update_fnorm": 0.06115632876753807, + "layer_6_max_l1_linf_norm": 0.2887377142906189, + "layer_6_max_spectral_norm": 0.006534819025546312, + "layer_7_update_fnorm": 0.06108102202415466, + "layer_7_max_l1_linf_norm": 0.2852208614349365, + "layer_7_max_spectral_norm": 0.006446889601647854, + "layer_8_update_fnorm": 0.06119200587272644, + "layer_8_max_l1_linf_norm": 0.2879764437675476, + "layer_8_max_spectral_norm": 0.006453278008848429, + "layer_9_update_fnorm": 0.061153851449489594, + "layer_9_max_l1_linf_norm": 0.32159942388534546, + "layer_9_max_spectral_norm": 0.007200935389846563, + "layer_10_update_fnorm": 0.061217159032821655, + "layer_10_max_l1_linf_norm": 0.33054232597351074, + "layer_10_max_spectral_norm": 0.007406748365610838, + "layer_11_update_fnorm": 0.061117202043533325, + "layer_11_max_l1_linf_norm": 0.33483657240867615, + "layer_11_max_spectral_norm": 0.007505397312343121, + "layer_12_update_fnorm": 0.061248816549777985, + "layer_12_max_l1_linf_norm": 0.324709415435791, + "layer_12_max_spectral_norm": 0.007384470198303461, + "total_sharpness": 0.007780296728014946, + "ip_v_neg_g": 0.00934193842113018, + "cos_v_neg_g": 0.0017863628454506397, + "v_norm": 1.344842553138733, + "g_norm": 3.8886232376098633, + "hv_norm": 1.1659985780715942, + "cos_v_hv": 0.00897365901619196, + "hg_norm": 275.99755859375, + "cos_g_hg": 0.48807504773139954, + "v_parallel_norm": 5.908294042455964e-05, + "v_perp_norm": 1.344842553138733, + "layer_1_v_norm": 0.061483144760131836, + "layer_1_cos_v_neg_g": 0.013568999245762825, + "layer_2_v_norm": 0.05830740928649902, + "layer_2_cos_v_neg_g": 0.014662286266684532, + "layer_3_v_norm": 0.056978724896907806, + "layer_3_cos_v_neg_g": 0.012216243892908096, + "layer_4_v_norm": 0.05934871360659599, + "layer_4_cos_v_neg_g": 0.010376496240496635, + "layer_5_v_norm": 0.06041094660758972, + "layer_5_cos_v_neg_g": 0.01033744029700756, + "layer_6_v_norm": 0.06115632876753807, + "layer_6_cos_v_neg_g": 0.011573298834264278, + "layer_7_v_norm": 0.06108102202415466, + "layer_7_cos_v_neg_g": 0.011386538855731487, + "layer_8_v_norm": 0.06119200587272644, + "layer_8_cos_v_neg_g": 0.012048264034092426, + "layer_9_v_norm": 0.061153851449489594, + "layer_9_cos_v_neg_g": 0.010912150144577026, + "layer_10_v_norm": 0.061217159032821655, + "layer_10_cos_v_neg_g": 0.009790739044547081, + "layer_11_v_norm": 0.061117202043533325, + "layer_11_cos_v_neg_g": 0.007838433608412743, + "layer_12_v_norm": 0.061248816549777985, + "layer_12_cos_v_neg_g": 0.007225691340863705, + "layer_1_sharpness": 0.13386933505535126, + "layer_2_sharpness": 0.05231822282075882, + "layer_3_sharpness": 0.0584416538476944, + "layer_4_sharpness": 0.02834155783057213, + "layer_5_sharpness": 0.029050789773464203, + "layer_6_sharpness": 0.036129508167505264, + "layer_7_sharpness": 0.04172491654753685, + "layer_8_sharpness": 0.0427776500582695, + "layer_9_sharpness": 0.030919359996914864, + "layer_10_sharpness": 0.02395631931722164, + "layer_11_sharpness": 0.022093765437602997, + "layer_12_sharpness": 0.03196747973561287 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..0c8f0e0260308cddc7c666dd3faacf51395eaeb4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.349349021911621, + "total_l1_linf_norm": 8092.1943359375, + "total_spectral_norm": 1.3493489027023315, + "layer_1_update_fnorm": 0.0615241602063179, + "layer_1_max_l1_linf_norm": 0.29019248485565186, + "layer_1_max_spectral_norm": 0.006530249956995249, + "layer_2_update_fnorm": 0.058722566813230515, + "layer_2_max_l1_linf_norm": 0.3284692168235779, + "layer_2_max_spectral_norm": 0.007372405845671892, + "layer_3_update_fnorm": 0.05777152627706528, + "layer_3_max_l1_linf_norm": 0.3423222303390503, + "layer_3_max_spectral_norm": 0.007619612850248814, + "layer_4_update_fnorm": 0.059351757168769836, + "layer_4_max_l1_linf_norm": 0.3490714728832245, + "layer_4_max_spectral_norm": 0.007797061465680599, + "layer_5_update_fnorm": 0.060347750782966614, + "layer_5_max_l1_linf_norm": 0.3251630663871765, + "layer_5_max_spectral_norm": 0.007277882192283869, + "layer_6_update_fnorm": 0.06134282052516937, + "layer_6_max_l1_linf_norm": 0.3429110050201416, + "layer_6_max_spectral_norm": 0.00760746281594038, + "layer_7_update_fnorm": 0.06116337701678276, + "layer_7_max_l1_linf_norm": 0.2962369918823242, + "layer_7_max_spectral_norm": 0.006616862956434488, + "layer_8_update_fnorm": 0.06114419922232628, + "layer_8_max_l1_linf_norm": 0.2987516522407532, + "layer_8_max_spectral_norm": 0.0066856881603598595, + "layer_9_update_fnorm": 0.061167579144239426, + "layer_9_max_l1_linf_norm": 0.3176189064979553, + "layer_9_max_spectral_norm": 0.007150747813284397, + "layer_10_update_fnorm": 0.061182521283626556, + "layer_10_max_l1_linf_norm": 0.326071560382843, + "layer_10_max_spectral_norm": 0.007339388132095337, + "layer_11_update_fnorm": 0.06105712056159973, + "layer_11_max_l1_linf_norm": 0.32678771018981934, + "layer_11_max_spectral_norm": 0.007312766741961241, + "layer_12_update_fnorm": 0.06106046959757805, + "layer_12_max_l1_linf_norm": 0.2962591052055359, + "layer_12_max_spectral_norm": 0.006779227871447802, + "total_sharpness": 0.0067525445483624935, + "ip_v_neg_g": 0.008819743990898132, + "cos_v_neg_g": 0.00159508700016886, + "v_norm": 1.349349021911621, + "g_norm": 4.097767353057861, + "hv_norm": 1.0522091388702393, + "cos_v_hv": 0.00865943729877472, + "hg_norm": 379.3396301269531, + "cos_g_hg": 0.5577971339225769, + "v_parallel_norm": 5.4039650422055274e-05, + "v_perp_norm": 1.349349021911621, + "layer_1_v_norm": 0.0615241602063179, + "layer_1_cos_v_neg_g": 0.012803599238395691, + "layer_2_v_norm": 0.058722566813230515, + "layer_2_cos_v_neg_g": 0.01344362460076809, + "layer_3_v_norm": 0.057771530002355576, + "layer_3_cos_v_neg_g": 0.014012706466019154, + "layer_4_v_norm": 0.059351757168769836, + "layer_4_cos_v_neg_g": 0.011966251768171787, + "layer_5_v_norm": 0.060347750782966614, + "layer_5_cos_v_neg_g": 0.011462588794529438, + "layer_6_v_norm": 0.06134282052516937, + "layer_6_cos_v_neg_g": 0.01294760312885046, + "layer_7_v_norm": 0.06116337701678276, + "layer_7_cos_v_neg_g": 0.011128444224596024, + "layer_8_v_norm": 0.06114419922232628, + "layer_8_cos_v_neg_g": 0.010373474098742008, + "layer_9_v_norm": 0.061167579144239426, + "layer_9_cos_v_neg_g": 0.009236487559974194, + "layer_10_v_norm": 0.061182521283626556, + "layer_10_cos_v_neg_g": 0.006306069903075695, + "layer_11_v_norm": 0.06105712056159973, + "layer_11_cos_v_neg_g": 0.005010551307350397, + "layer_12_v_norm": 0.06106046959757805, + "layer_12_cos_v_neg_g": 0.004657018464058638, + "layer_1_sharpness": 0.05843836069107056, + "layer_2_sharpness": 0.035552266985177994, + "layer_3_sharpness": 0.054069869220256805, + "layer_4_sharpness": 0.03590225800871849, + "layer_5_sharpness": 0.034496769309043884, + "layer_6_sharpness": 0.04224298521876335, + "layer_7_sharpness": 0.0424223355948925, + "layer_8_sharpness": 0.03893536329269409, + "layer_9_sharpness": 0.02833211049437523, + "layer_10_sharpness": 0.0199330672621727, + "layer_11_sharpness": 0.018389742821455002, + "layer_12_sharpness": 0.025231195613741875 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..9d0de7b6106ff99e0cdd16748d039e759d47d3d6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3493527173995972, + "total_l1_linf_norm": 8094.15478515625, + "total_spectral_norm": 1.3493525981903076, + "layer_1_update_fnorm": 0.061166997998952866, + "layer_1_max_l1_linf_norm": 0.2514207363128662, + "layer_1_max_spectral_norm": 0.005793490447103977, + "layer_2_update_fnorm": 0.05915992707014084, + "layer_2_max_l1_linf_norm": 0.2902556359767914, + "layer_2_max_spectral_norm": 0.006613538600504398, + "layer_3_update_fnorm": 0.05829066038131714, + "layer_3_max_l1_linf_norm": 0.3129097819328308, + "layer_3_max_spectral_norm": 0.007072123233228922, + "layer_4_update_fnorm": 0.05964706838130951, + "layer_4_max_l1_linf_norm": 0.31985780596733093, + "layer_4_max_spectral_norm": 0.007281764876097441, + "layer_5_update_fnorm": 0.06052089110016823, + "layer_5_max_l1_linf_norm": 0.301310658454895, + "layer_5_max_spectral_norm": 0.006827847100794315, + "layer_6_update_fnorm": 0.06111590564250946, + "layer_6_max_l1_linf_norm": 0.2749038338661194, + "layer_6_max_spectral_norm": 0.006240774877369404, + "layer_7_update_fnorm": 0.06111731752753258, + "layer_7_max_l1_linf_norm": 0.2882993817329407, + "layer_7_max_spectral_norm": 0.006465590558946133, + "layer_8_update_fnorm": 0.06127012148499489, + "layer_8_max_l1_linf_norm": 0.30908113718032837, + "layer_8_max_spectral_norm": 0.006839504465460777, + "layer_9_update_fnorm": 0.06121484562754631, + "layer_9_max_l1_linf_norm": 0.3197498023509979, + "layer_9_max_spectral_norm": 0.007119807880371809, + "layer_10_update_fnorm": 0.06120717152953148, + "layer_10_max_l1_linf_norm": 0.3350478410720825, + "layer_10_max_spectral_norm": 0.007501751184463501, + "layer_11_update_fnorm": 0.06117572262883186, + "layer_11_max_l1_linf_norm": 0.3556565046310425, + "layer_11_max_spectral_norm": 0.007955740205943584, + "layer_12_update_fnorm": 0.061268553137779236, + "layer_12_max_l1_linf_norm": 0.36389636993408203, + "layer_12_max_spectral_norm": 0.008152973838150501, + "total_sharpness": 0.004822331015020609, + "ip_v_neg_g": 0.004979400895535946, + "cos_v_neg_g": 0.0010457944590598345, + "v_norm": 1.3493527173995972, + "g_norm": 3.528623104095459, + "hv_norm": 0.7802619934082031, + "cos_v_hv": 0.008339538238942623, + "hg_norm": 178.74510192871094, + "cos_g_hg": 0.46538856625556946, + "v_parallel_norm": 3.730475145857781e-05, + "v_perp_norm": 1.3493527173995972, + "layer_1_v_norm": 0.061166997998952866, + "layer_1_cos_v_neg_g": 0.005630252417176962, + "layer_2_v_norm": 0.05915992707014084, + "layer_2_cos_v_neg_g": 0.006546174641698599, + "layer_3_v_norm": 0.05829065665602684, + "layer_3_cos_v_neg_g": 0.004672755021601915, + "layer_4_v_norm": 0.05964706838130951, + "layer_4_cos_v_neg_g": 0.004977156408131123, + "layer_5_v_norm": 0.06052089110016823, + "layer_5_cos_v_neg_g": 0.00478855986148119, + "layer_6_v_norm": 0.06111590564250946, + "layer_6_cos_v_neg_g": 0.0054029617458581924, + "layer_7_v_norm": 0.06111731752753258, + "layer_7_cos_v_neg_g": 0.0071853394620120525, + "layer_8_v_norm": 0.06127012148499489, + "layer_8_cos_v_neg_g": 0.008320559747517109, + "layer_9_v_norm": 0.06121484562754631, + "layer_9_cos_v_neg_g": 0.008370493538677692, + "layer_10_v_norm": 0.06120717152953148, + "layer_10_cos_v_neg_g": 0.007010575849562883, + "layer_11_v_norm": 0.06117572262883186, + "layer_11_cos_v_neg_g": 0.006084779743105173, + "layer_12_v_norm": 0.061268553137779236, + "layer_12_cos_v_neg_g": 0.005243015941232443, + "layer_1_sharpness": 0.033484868705272675, + "layer_2_sharpness": 0.01717539317905903, + "layer_3_sharpness": 0.033197298645973206, + "layer_4_sharpness": 0.020145390182733536, + "layer_5_sharpness": 0.02191157080233097, + "layer_6_sharpness": 0.025745749473571777, + "layer_7_sharpness": 0.03473013639450073, + "layer_8_sharpness": 0.03662966191768646, + "layer_9_sharpness": 0.025366706773638725, + "layer_10_sharpness": 0.019683722406625748, + "layer_11_sharpness": 0.02100607007741928, + "layer_12_sharpness": 0.033419009298086166 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..171f764d0f896f4bbb57089edb5fb5d2744f4735 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3565073013305664, + "total_l1_linf_norm": 8130.580078125, + "total_spectral_norm": 1.356507420539856, + "layer_1_update_fnorm": 0.061582088470458984, + "layer_1_max_l1_linf_norm": 0.33102577924728394, + "layer_1_max_spectral_norm": 0.0074053555727005005, + "layer_2_update_fnorm": 0.05893777683377266, + "layer_2_max_l1_linf_norm": 0.3680035173892975, + "layer_2_max_spectral_norm": 0.008305893279612064, + "layer_3_update_fnorm": 0.05809817090630531, + "layer_3_max_l1_linf_norm": 0.37408336997032166, + "layer_3_max_spectral_norm": 0.008324285969138145, + "layer_4_update_fnorm": 0.0598299577832222, + "layer_4_max_l1_linf_norm": 0.36468273401260376, + "layer_4_max_spectral_norm": 0.00814060214906931, + "layer_5_update_fnorm": 0.0608358196914196, + "layer_5_max_l1_linf_norm": 0.37503495812416077, + "layer_5_max_spectral_norm": 0.008357040584087372, + "layer_6_update_fnorm": 0.061510466039180756, + "layer_6_max_l1_linf_norm": 0.363919734954834, + "layer_6_max_spectral_norm": 0.00809446256607771, + "layer_7_update_fnorm": 0.06142883002758026, + "layer_7_max_l1_linf_norm": 0.34846365451812744, + "layer_7_max_spectral_norm": 0.007781967055052519, + "layer_8_update_fnorm": 0.06135562062263489, + "layer_8_max_l1_linf_norm": 0.32671302556991577, + "layer_8_max_spectral_norm": 0.007310416083782911, + "layer_9_update_fnorm": 0.06125841289758682, + "layer_9_max_l1_linf_norm": 0.35189634561538696, + "layer_9_max_spectral_norm": 0.007840297184884548, + "layer_10_update_fnorm": 0.06134742125868797, + "layer_10_max_l1_linf_norm": 0.3640339970588684, + "layer_10_max_spectral_norm": 0.008122521452605724, + "layer_11_update_fnorm": 0.06130633503198624, + "layer_11_max_l1_linf_norm": 0.3735843896865845, + "layer_11_max_spectral_norm": 0.0084525216370821, + "layer_12_update_fnorm": 0.061172422021627426, + "layer_12_max_l1_linf_norm": 0.33801957964897156, + "layer_12_max_spectral_norm": 0.007724648807197809, + "total_sharpness": 0.006751033943146467, + "ip_v_neg_g": 0.0062148659490048885, + "cos_v_neg_g": 0.0012944038026034832, + "v_norm": 1.3565073013305664, + "g_norm": 3.5394833087921143, + "hv_norm": 0.9592494368553162, + "cos_v_hv": 0.009546867571771145, + "hg_norm": 368.56292724609375, + "cos_g_hg": 0.43024682998657227, + "v_parallel_norm": 4.331216405262239e-05, + "v_perp_norm": 1.3565073013305664, + "layer_1_v_norm": 0.061582088470458984, + "layer_1_cos_v_neg_g": 0.008469964377582073, + "layer_2_v_norm": 0.05893777683377266, + "layer_2_cos_v_neg_g": 0.006776389665901661, + "layer_3_v_norm": 0.058098167181015015, + "layer_3_cos_v_neg_g": 0.006239747628569603, + "layer_4_v_norm": 0.0598299577832222, + "layer_4_cos_v_neg_g": 0.006201982032507658, + "layer_5_v_norm": 0.0608358196914196, + "layer_5_cos_v_neg_g": 0.00823068805038929, + "layer_6_v_norm": 0.06151046231389046, + "layer_6_cos_v_neg_g": 0.009032242931425571, + "layer_7_v_norm": 0.06142883002758026, + "layer_7_cos_v_neg_g": 0.008258488029241562, + "layer_8_v_norm": 0.06135562062263489, + "layer_8_cos_v_neg_g": 0.008376099169254303, + "layer_9_v_norm": 0.06125841289758682, + "layer_9_cos_v_neg_g": 0.008085254579782486, + "layer_10_v_norm": 0.06134742125868797, + "layer_10_cos_v_neg_g": 0.0072465636767446995, + "layer_11_v_norm": 0.061306338757276535, + "layer_11_cos_v_neg_g": 0.007310590706765652, + "layer_12_v_norm": 0.061172422021627426, + "layer_12_cos_v_neg_g": 0.006997305434197187, + "layer_1_sharpness": 0.049338821321725845, + "layer_2_sharpness": 0.02957008220255375, + "layer_3_sharpness": 0.05058108642697334, + "layer_4_sharpness": 0.02558407001197338, + "layer_5_sharpness": 0.031500767916440964, + "layer_6_sharpness": 0.04243822395801544, + "layer_7_sharpness": 0.046036090701818466, + "layer_8_sharpness": 0.04211164265871048, + "layer_9_sharpness": 0.032422032207250595, + "layer_10_sharpness": 0.025523504242300987, + "layer_11_sharpness": 0.02408960461616516, + "layer_12_sharpness": 0.03424592316150665 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..7dc00901ee3f039c5b708dd9200dadb56875c4e6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 0.9347360730171204, + "total_l1_linf_norm": 5615.501953125, + "total_spectral_norm": 0.9347361922264099, + "layer_1_update_fnorm": 0.04395246505737305, + "layer_1_max_l1_linf_norm": 0.16485825181007385, + "layer_1_max_spectral_norm": 0.0035629570484161377, + "layer_2_update_fnorm": 0.041669413447380066, + "layer_2_max_l1_linf_norm": 0.1523132026195526, + "layer_2_max_spectral_norm": 0.0034567974507808685, + "layer_3_update_fnorm": 0.041370049118995667, + "layer_3_max_l1_linf_norm": 0.152810201048851, + "layer_3_max_spectral_norm": 0.0034633753821253777, + "layer_4_update_fnorm": 0.0412338487803936, + "layer_4_max_l1_linf_norm": 0.16077907383441925, + "layer_4_max_spectral_norm": 0.005803470965474844, + "layer_5_update_fnorm": 0.0411682203412056, + "layer_5_max_l1_linf_norm": 0.16444899141788483, + "layer_5_max_spectral_norm": 0.006298227235674858, + "layer_6_update_fnorm": 0.04151051491498947, + "layer_6_max_l1_linf_norm": 0.16381850838661194, + "layer_6_max_spectral_norm": 0.006845582742244005, + "layer_7_update_fnorm": 0.04180228337645531, + "layer_7_max_l1_linf_norm": 0.17407765984535217, + "layer_7_max_spectral_norm": 0.0074694459326565266, + "layer_8_update_fnorm": 0.0420929379761219, + "layer_8_max_l1_linf_norm": 0.1639086753129959, + "layer_8_max_spectral_norm": 0.006269524805247784, + "layer_9_update_fnorm": 0.0424085408449173, + "layer_9_max_l1_linf_norm": 0.17063049972057343, + "layer_9_max_spectral_norm": 0.006343895103782415, + "layer_10_update_fnorm": 0.04268521070480347, + "layer_10_max_l1_linf_norm": 0.17820300161838531, + "layer_10_max_spectral_norm": 0.006051274947822094, + "layer_11_update_fnorm": 0.04278675094246864, + "layer_11_max_l1_linf_norm": 0.17768290638923645, + "layer_11_max_spectral_norm": 0.003940405789762735, + "layer_12_update_fnorm": 0.04297424107789993, + "layer_12_max_l1_linf_norm": 0.1860792338848114, + "layer_12_max_spectral_norm": 0.004128287546336651, + "total_sharpness": 0.05363766849040985, + "ip_v_neg_g": 0.023820368573069572, + "cos_v_neg_g": 0.0031347775366157293, + "v_norm": 0.9347360730171204, + "g_norm": 8.129292488098145, + "hv_norm": 8.343709945678711, + "cos_v_hv": 0.006008964963257313, + "hg_norm": 10823.8388671875, + "cos_g_hg": 0.715160071849823, + "v_parallel_norm": 0.00021996538271196187, + "v_perp_norm": 0.9347360134124756, + "layer_1_v_norm": 0.04395246505737305, + "layer_1_cos_v_neg_g": 0.032629918307065964, + "layer_2_v_norm": 0.041669413447380066, + "layer_2_cos_v_neg_g": 0.022293200716376305, + "layer_3_v_norm": 0.041370049118995667, + "layer_3_cos_v_neg_g": 0.019170524552464485, + "layer_4_v_norm": 0.0412338487803936, + "layer_4_cos_v_neg_g": 0.01938050612807274, + "layer_5_v_norm": 0.0411682203412056, + "layer_5_cos_v_neg_g": 0.020056482404470444, + "layer_6_v_norm": 0.04151051864027977, + "layer_6_cos_v_neg_g": 0.01772671565413475, + "layer_7_v_norm": 0.04180228337645531, + "layer_7_cos_v_neg_g": 0.016363296657800674, + "layer_8_v_norm": 0.0420929417014122, + "layer_8_cos_v_neg_g": 0.015815308317542076, + "layer_9_v_norm": 0.0424085408449173, + "layer_9_cos_v_neg_g": 0.01604866236448288, + "layer_10_v_norm": 0.04268521070480347, + "layer_10_cos_v_neg_g": 0.016903964802622795, + "layer_11_v_norm": 0.04278675094246864, + "layer_11_cos_v_neg_g": 0.0174284428358078, + "layer_12_v_norm": 0.04297424107789993, + "layer_12_cos_v_neg_g": 0.015779688954353333, + "layer_1_sharpness": 3.4889607429504395, + "layer_2_sharpness": 0.45534777641296387, + "layer_3_sharpness": 0.24480530619621277, + "layer_4_sharpness": 0.21998295187950134, + "layer_5_sharpness": 0.18102198839187622, + "layer_6_sharpness": 0.12464971840381622, + "layer_7_sharpness": 0.08958660811185837, + "layer_8_sharpness": 0.07878044247627258, + "layer_9_sharpness": 0.07123412191867828, + "layer_10_sharpness": 0.07920058816671371, + "layer_11_sharpness": 0.085492342710495, + "layer_12_sharpness": 0.0819433331489563 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..c3eab62b011468869cfe5aaa2fd836f0802089b6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3465676307678223, + "total_l1_linf_norm": 8079.56103515625, + "total_spectral_norm": 1.3465678691864014, + "layer_1_update_fnorm": 0.0610838308930397, + "layer_1_max_l1_linf_norm": 0.274905800819397, + "layer_1_max_spectral_norm": 0.006382985971868038, + "layer_2_update_fnorm": 0.05865101143717766, + "layer_2_max_l1_linf_norm": 0.32783302664756775, + "layer_2_max_spectral_norm": 0.007395637221634388, + "layer_3_update_fnorm": 0.05783362686634064, + "layer_3_max_l1_linf_norm": 0.33143872022628784, + "layer_3_max_spectral_norm": 0.007474446669220924, + "layer_4_update_fnorm": 0.059707801789045334, + "layer_4_max_l1_linf_norm": 0.33015838265419006, + "layer_4_max_spectral_norm": 0.007488593924790621, + "layer_5_update_fnorm": 0.060603801161050797, + "layer_5_max_l1_linf_norm": 0.3266497850418091, + "layer_5_max_spectral_norm": 0.007318991236388683, + "layer_6_update_fnorm": 0.06119208782911301, + "layer_6_max_l1_linf_norm": 0.3084043860435486, + "layer_6_max_spectral_norm": 0.006961021572351456, + "layer_7_update_fnorm": 0.06109042093157768, + "layer_7_max_l1_linf_norm": 0.28481972217559814, + "layer_7_max_spectral_norm": 0.006480489391833544, + "layer_8_update_fnorm": 0.06111343204975128, + "layer_8_max_l1_linf_norm": 0.2832086980342865, + "layer_8_max_spectral_norm": 0.0064569031819701195, + "layer_9_update_fnorm": 0.06112521514296532, + "layer_9_max_l1_linf_norm": 0.30130457878112793, + "layer_9_max_spectral_norm": 0.006815377622842789, + "layer_10_update_fnorm": 0.06127135083079338, + "layer_10_max_l1_linf_norm": 0.34619903564453125, + "layer_10_max_spectral_norm": 0.007771276868879795, + "layer_11_update_fnorm": 0.06124897301197052, + "layer_11_max_l1_linf_norm": 0.35295432806015015, + "layer_11_max_spectral_norm": 0.007954330183565617, + "layer_12_update_fnorm": 0.06119358167052269, + "layer_12_max_l1_linf_norm": 0.34335485100746155, + "layer_12_max_spectral_norm": 0.007848076522350311, + "total_sharpness": 0.004598987754434347, + "ip_v_neg_g": 0.005062234587967396, + "cos_v_neg_g": 0.0010821002069860697, + "v_norm": 1.3465676307678223, + "g_norm": 3.4741344451904297, + "hv_norm": 0.7634376883506775, + "cos_v_hv": 0.008111792616546154, + "hg_norm": 288.75299072265625, + "cos_g_hg": 0.46295633912086487, + "v_parallel_norm": 4.158042065682821e-05, + "v_perp_norm": 1.3465676307678223, + "layer_1_v_norm": 0.0610838308930397, + "layer_1_cos_v_neg_g": 0.00874828640371561, + "layer_2_v_norm": 0.05865101143717766, + "layer_2_cos_v_neg_g": 0.007960605435073376, + "layer_3_v_norm": 0.05783362686634064, + "layer_3_cos_v_neg_g": 0.008811002597212791, + "layer_4_v_norm": 0.059707801789045334, + "layer_4_cos_v_neg_g": 0.005764150992035866, + "layer_5_v_norm": 0.060603801161050797, + "layer_5_cos_v_neg_g": 0.005272477399557829, + "layer_6_v_norm": 0.06119208782911301, + "layer_6_cos_v_neg_g": 0.005880860146135092, + "layer_7_v_norm": 0.06109042093157768, + "layer_7_cos_v_neg_g": 0.005197775084525347, + "layer_8_v_norm": 0.06111343204975128, + "layer_8_cos_v_neg_g": 0.005728867370635271, + "layer_9_v_norm": 0.06112521514296532, + "layer_9_cos_v_neg_g": 0.004877562168985605, + "layer_10_v_norm": 0.06127135083079338, + "layer_10_cos_v_neg_g": 0.005637685302644968, + "layer_11_v_norm": 0.06124897673726082, + "layer_11_cos_v_neg_g": 0.005494645796716213, + "layer_12_v_norm": 0.06119358167052269, + "layer_12_cos_v_neg_g": 0.005903769284486771, + "layer_1_sharpness": 0.0691884309053421, + "layer_2_sharpness": 0.027051648125052452, + "layer_3_sharpness": 0.03696206212043762, + "layer_4_sharpness": 0.019755806773900986, + "layer_5_sharpness": 0.025134015828371048, + "layer_6_sharpness": 0.025804515928030014, + "layer_7_sharpness": 0.029245587065815926, + "layer_8_sharpness": 0.02987181767821312, + "layer_9_sharpness": 0.021689698100090027, + "layer_10_sharpness": 0.017170768231153488, + "layer_11_sharpness": 0.016134992241859436, + "layer_12_sharpness": 0.02360719069838524 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..1f34ac3437281d30996b646d846c804eee58fe63 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3461865186691284, + "total_l1_linf_norm": 8083.07861328125, + "total_spectral_norm": 1.3461867570877075, + "layer_1_update_fnorm": 0.061297547072172165, + "layer_1_max_l1_linf_norm": 0.32425573468208313, + "layer_1_max_spectral_norm": 0.007270207162946463, + "layer_2_update_fnorm": 0.05895264446735382, + "layer_2_max_l1_linf_norm": 0.3422617018222809, + "layer_2_max_spectral_norm": 0.007781400345265865, + "layer_3_update_fnorm": 0.05866791680455208, + "layer_3_max_l1_linf_norm": 0.3489583432674408, + "layer_3_max_spectral_norm": 0.007836755365133286, + "layer_4_update_fnorm": 0.06000157445669174, + "layer_4_max_l1_linf_norm": 0.3642902374267578, + "layer_4_max_spectral_norm": 0.008154252544045448, + "layer_5_update_fnorm": 0.060894086956977844, + "layer_5_max_l1_linf_norm": 0.34192895889282227, + "layer_5_max_spectral_norm": 0.0076507097110152245, + "layer_6_update_fnorm": 0.06136823073029518, + "layer_6_max_l1_linf_norm": 0.3497513234615326, + "layer_6_max_spectral_norm": 0.007847832515835762, + "layer_7_update_fnorm": 0.06126630678772926, + "layer_7_max_l1_linf_norm": 0.318484902381897, + "layer_7_max_spectral_norm": 0.007167505100369453, + "layer_8_update_fnorm": 0.06124512478709221, + "layer_8_max_l1_linf_norm": 0.3066592216491699, + "layer_8_max_spectral_norm": 0.006874022074043751, + "layer_9_update_fnorm": 0.0613420270383358, + "layer_9_max_l1_linf_norm": 0.3431089520454407, + "layer_9_max_spectral_norm": 0.007691774517297745, + "layer_10_update_fnorm": 0.06137487292289734, + "layer_10_max_l1_linf_norm": 0.3678780794143677, + "layer_10_max_spectral_norm": 0.008209793828427792, + "layer_11_update_fnorm": 0.06136336922645569, + "layer_11_max_l1_linf_norm": 0.38472259044647217, + "layer_11_max_spectral_norm": 0.008632407523691654, + "layer_12_update_fnorm": 0.061387479305267334, + "layer_12_max_l1_linf_norm": 0.379106342792511, + "layer_12_max_spectral_norm": 0.008478069677948952, + "total_sharpness": 0.0038270370569080114, + "ip_v_neg_g": -0.000659087672829628, + "cos_v_neg_g": -9.073151886695996e-05, + "v_norm": 1.3461865186691284, + "g_norm": 5.396097183227539, + "hv_norm": 0.7995947599411011, + "cos_v_hv": 0.0064431456848979, + "hg_norm": 984.499755859375, + "cos_g_hg": 0.6900433301925659, + "v_parallel_norm": 2.298876097484026e-05, + "v_perp_norm": 1.3461865186691284, + "layer_1_v_norm": 0.061297547072172165, + "layer_1_cos_v_neg_g": 0.002041523577645421, + "layer_2_v_norm": 0.05895264446735382, + "layer_2_cos_v_neg_g": 0.0019044621149078012, + "layer_3_v_norm": 0.05866791680455208, + "layer_3_cos_v_neg_g": 0.0008783367811702192, + "layer_4_v_norm": 0.06000157445669174, + "layer_4_cos_v_neg_g": 0.0008809290593490005, + "layer_5_v_norm": 0.060894086956977844, + "layer_5_cos_v_neg_g": 0.0022412429098039865, + "layer_6_v_norm": 0.06136823073029518, + "layer_6_cos_v_neg_g": -0.000495695392601192, + "layer_7_v_norm": 0.06126630678772926, + "layer_7_cos_v_neg_g": -0.0016233284259214997, + "layer_8_v_norm": 0.06124512478709221, + "layer_8_cos_v_neg_g": -0.0018084347248077393, + "layer_9_v_norm": 0.0613420270383358, + "layer_9_cos_v_neg_g": -0.0027746744453907013, + "layer_10_v_norm": 0.06137487292289734, + "layer_10_cos_v_neg_g": -0.0013747714692726731, + "layer_11_v_norm": 0.06136337295174599, + "layer_11_cos_v_neg_g": -0.0009745244751684368, + "layer_12_v_norm": 0.061387479305267334, + "layer_12_cos_v_neg_g": -0.0013885913649573922, + "layer_1_sharpness": 0.041924744844436646, + "layer_2_sharpness": 0.016392355784773827, + "layer_3_sharpness": 0.01936653070151806, + "layer_4_sharpness": 0.01429701317101717, + "layer_5_sharpness": 0.01663401909172535, + "layer_6_sharpness": 0.021886253729462624, + "layer_7_sharpness": 0.022794373333454132, + "layer_8_sharpness": 0.024993015453219414, + "layer_9_sharpness": 0.01892208866775036, + "layer_10_sharpness": 0.013514921069145203, + "layer_11_sharpness": 0.013633049093186855, + "layer_12_sharpness": 0.01853804662823677 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..3100faa5468a28a11c14ea7d9670ce445e1a59c4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3440022468566895, + "total_l1_linf_norm": 8067.51025390625, + "total_spectral_norm": 1.3440022468566895, + "layer_1_update_fnorm": 0.06114157661795616, + "layer_1_max_l1_linf_norm": 0.30512648820877075, + "layer_1_max_spectral_norm": 0.0068873511627316475, + "layer_2_update_fnorm": 0.059090450406074524, + "layer_2_max_l1_linf_norm": 0.35083895921707153, + "layer_2_max_spectral_norm": 0.007933354005217552, + "layer_3_update_fnorm": 0.05846317857503891, + "layer_3_max_l1_linf_norm": 0.3585342764854431, + "layer_3_max_spectral_norm": 0.008065162226557732, + "layer_4_update_fnorm": 0.05987214297056198, + "layer_4_max_l1_linf_norm": 0.34556305408477783, + "layer_4_max_spectral_norm": 0.007743137888610363, + "layer_5_update_fnorm": 0.06083463877439499, + "layer_5_max_l1_linf_norm": 0.3242843747138977, + "layer_5_max_spectral_norm": 0.007321517914533615, + "layer_6_update_fnorm": 0.06124067306518555, + "layer_6_max_l1_linf_norm": 0.3113175630569458, + "layer_6_max_spectral_norm": 0.007002110593020916, + "layer_7_update_fnorm": 0.06127466633915901, + "layer_7_max_l1_linf_norm": 0.31352341175079346, + "layer_7_max_spectral_norm": 0.007060233503580093, + "layer_8_update_fnorm": 0.06121283769607544, + "layer_8_max_l1_linf_norm": 0.30403977632522583, + "layer_8_max_spectral_norm": 0.0068445997312664986, + "layer_9_update_fnorm": 0.061227161437273026, + "layer_9_max_l1_linf_norm": 0.32477861642837524, + "layer_9_max_spectral_norm": 0.007282531354576349, + "layer_10_update_fnorm": 0.061326708644628525, + "layer_10_max_l1_linf_norm": 0.33560532331466675, + "layer_10_max_spectral_norm": 0.0075404588133096695, + "layer_11_update_fnorm": 0.06130179017782211, + "layer_11_max_l1_linf_norm": 0.3597284257411957, + "layer_11_max_spectral_norm": 0.008046610280871391, + "layer_12_update_fnorm": 0.061251044273376465, + "layer_12_max_l1_linf_norm": 0.3554500639438629, + "layer_12_max_spectral_norm": 0.008006311021745205, + "total_sharpness": 0.005346738267689943, + "ip_v_neg_g": 0.004338924773037434, + "cos_v_neg_g": 0.0010173168266192079, + "v_norm": 1.3440022468566895, + "g_norm": 3.173407793045044, + "hv_norm": 0.8283340334892273, + "cos_v_hv": 0.008675278164446354, + "hg_norm": 151.99038696289062, + "cos_g_hg": 0.4235382676124573, + "v_parallel_norm": 3.015100082848221e-05, + "v_perp_norm": 1.3440022468566895, + "layer_1_v_norm": 0.06114157661795616, + "layer_1_cos_v_neg_g": 0.010326841846108437, + "layer_2_v_norm": 0.059090450406074524, + "layer_2_cos_v_neg_g": 0.007382935378700495, + "layer_3_v_norm": 0.05846317857503891, + "layer_3_cos_v_neg_g": 0.005394155625253916, + "layer_4_v_norm": 0.05987214297056198, + "layer_4_cos_v_neg_g": 0.006114704068750143, + "layer_5_v_norm": 0.06083463877439499, + "layer_5_cos_v_neg_g": 0.0057527911849319935, + "layer_6_v_norm": 0.061240676790475845, + "layer_6_cos_v_neg_g": 0.006361293140798807, + "layer_7_v_norm": 0.06127466633915901, + "layer_7_cos_v_neg_g": 0.005271452479064465, + "layer_8_v_norm": 0.06121283397078514, + "layer_8_cos_v_neg_g": 0.005846874322742224, + "layer_9_v_norm": 0.061227161437273026, + "layer_9_cos_v_neg_g": 0.00668027438223362, + "layer_10_v_norm": 0.061326708644628525, + "layer_10_cos_v_neg_g": 0.00561797060072422, + "layer_11_v_norm": 0.06130179017782211, + "layer_11_cos_v_neg_g": 0.0049234009347856045, + "layer_12_v_norm": 0.061251044273376465, + "layer_12_cos_v_neg_g": 0.0031692567281425, + "layer_1_sharpness": 0.06323985010385513, + "layer_2_sharpness": 0.015559149906039238, + "layer_3_sharpness": 0.03675038740038872, + "layer_4_sharpness": 0.01937709003686905, + "layer_5_sharpness": 0.02375740557909012, + "layer_6_sharpness": 0.025188755244016647, + "layer_7_sharpness": 0.0318901389837265, + "layer_8_sharpness": 0.0359685942530632, + "layer_9_sharpness": 0.026951700448989868, + "layer_10_sharpness": 0.01979675143957138, + "layer_11_sharpness": 0.020238975062966347, + "layer_12_sharpness": 0.03740312531590462 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..4beabcd02cc1dd12a4653e66a7ec71db755e569d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3569905757904053, + "total_l1_linf_norm": 8145.69189453125, + "total_spectral_norm": 1.3569905757904053, + "layer_1_update_fnorm": 0.06097286194562912, + "layer_1_max_l1_linf_norm": 0.2815793752670288, + "layer_1_max_spectral_norm": 0.006517300382256508, + "layer_2_update_fnorm": 0.05887904763221741, + "layer_2_max_l1_linf_norm": 0.339761346578598, + "layer_2_max_spectral_norm": 0.007673454005271196, + "layer_3_update_fnorm": 0.0585421547293663, + "layer_3_max_l1_linf_norm": 0.33129170536994934, + "layer_3_max_spectral_norm": 0.00750319566577673, + "layer_4_update_fnorm": 0.05998830497264862, + "layer_4_max_l1_linf_norm": 0.3493553102016449, + "layer_4_max_spectral_norm": 0.007788835093379021, + "layer_5_update_fnorm": 0.0607486292719841, + "layer_5_max_l1_linf_norm": 0.31987830996513367, + "layer_5_max_spectral_norm": 0.007235857658088207, + "layer_6_update_fnorm": 0.06118910759687424, + "layer_6_max_l1_linf_norm": 0.3060220181941986, + "layer_6_max_spectral_norm": 0.006862782873213291, + "layer_7_update_fnorm": 0.061221834272146225, + "layer_7_max_l1_linf_norm": 0.2992739677429199, + "layer_7_max_spectral_norm": 0.006727355532348156, + "layer_8_update_fnorm": 0.06118512898683548, + "layer_8_max_l1_linf_norm": 0.29359060525894165, + "layer_8_max_spectral_norm": 0.0066785188391804695, + "layer_9_update_fnorm": 0.06121847778558731, + "layer_9_max_l1_linf_norm": 0.3060237765312195, + "layer_9_max_spectral_norm": 0.0069971815682947636, + "layer_10_update_fnorm": 0.06130027025938034, + "layer_10_max_l1_linf_norm": 0.3554418981075287, + "layer_10_max_spectral_norm": 0.008053946308791637, + "layer_11_update_fnorm": 0.06125405430793762, + "layer_11_max_l1_linf_norm": 0.36034345626831055, + "layer_11_max_spectral_norm": 0.008120432496070862, + "layer_12_update_fnorm": 0.06125890836119652, + "layer_12_max_l1_linf_norm": 0.35151833295822144, + "layer_12_max_spectral_norm": 0.008072944357991219, + "total_sharpness": 0.003699091263115406, + "ip_v_neg_g": 0.0015458915149793029, + "cos_v_neg_g": 0.00036412806366570294, + "v_norm": 1.3569905757904053, + "g_norm": 3.1285853385925293, + "hv_norm": 0.5900687575340271, + "cos_v_hv": 0.008506858721375465, + "hg_norm": 132.46771240234375, + "cos_g_hg": 0.4686322808265686, + "v_parallel_norm": 1.8164624634664506e-05, + "v_perp_norm": 1.3569905757904053, + "layer_1_v_norm": 0.06097286194562912, + "layer_1_cos_v_neg_g": 0.0014663367765024304, + "layer_2_v_norm": 0.05887904763221741, + "layer_2_cos_v_neg_g": 0.0021251938305795193, + "layer_3_v_norm": 0.0585421547293663, + "layer_3_cos_v_neg_g": 0.0032193458173424006, + "layer_4_v_norm": 0.05998830497264862, + "layer_4_cos_v_neg_g": 0.0011325677623972297, + "layer_5_v_norm": 0.0607486292719841, + "layer_5_cos_v_neg_g": 0.0009826596360653639, + "layer_6_v_norm": 0.061189111322164536, + "layer_6_cos_v_neg_g": 0.002320205792784691, + "layer_7_v_norm": 0.061221834272146225, + "layer_7_cos_v_neg_g": 0.0027548789512366056, + "layer_8_v_norm": 0.06118512898683548, + "layer_8_cos_v_neg_g": 0.0032689443323761225, + "layer_9_v_norm": 0.06121847778558731, + "layer_9_cos_v_neg_g": 0.003402746981009841, + "layer_10_v_norm": 0.06130027025938034, + "layer_10_cos_v_neg_g": 0.00363575154915452, + "layer_11_v_norm": 0.06125405430793762, + "layer_11_cos_v_neg_g": 0.0030050287023186684, + "layer_12_v_norm": 0.06125890836119652, + "layer_12_cos_v_neg_g": 0.0017030742019414902, + "layer_1_sharpness": 0.03696185722947121, + "layer_2_sharpness": 0.012205561622977257, + "layer_3_sharpness": 0.024704696610569954, + "layer_4_sharpness": 0.015330544672906399, + "layer_5_sharpness": 0.016613885760307312, + "layer_6_sharpness": 0.02185053750872612, + "layer_7_sharpness": 0.024500219151377678, + "layer_8_sharpness": 0.026625556871294975, + "layer_9_sharpness": 0.019831553101539612, + "layer_10_sharpness": 0.0170726515352726, + "layer_11_sharpness": 0.014571759849786758, + "layer_12_sharpness": 0.023654090240597725 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..42ea8baeed0c98a163a13607e11167d8bc6ea5c5 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.353485345840454, + "total_l1_linf_norm": 8125.0986328125, + "total_spectral_norm": 1.353485345840454, + "layer_1_update_fnorm": 0.061172910034656525, + "layer_1_max_l1_linf_norm": 0.33375445008277893, + "layer_1_max_spectral_norm": 0.007522315718233585, + "layer_2_update_fnorm": 0.0592479333281517, + "layer_2_max_l1_linf_norm": 0.3913043141365051, + "layer_2_max_spectral_norm": 0.008787855505943298, + "layer_3_update_fnorm": 0.05862530693411827, + "layer_3_max_l1_linf_norm": 0.3915897607803345, + "layer_3_max_spectral_norm": 0.008745796047151089, + "layer_4_update_fnorm": 0.060176536440849304, + "layer_4_max_l1_linf_norm": 0.36307424306869507, + "layer_4_max_spectral_norm": 0.00818988960236311, + "layer_5_update_fnorm": 0.061063848435878754, + "layer_5_max_l1_linf_norm": 0.3457636833190918, + "layer_5_max_spectral_norm": 0.007824371568858624, + "layer_6_update_fnorm": 0.06152581423521042, + "layer_6_max_l1_linf_norm": 0.35865771770477295, + "layer_6_max_spectral_norm": 0.008030761033296585, + "layer_7_update_fnorm": 0.06151311472058296, + "layer_7_max_l1_linf_norm": 0.34118619561195374, + "layer_7_max_spectral_norm": 0.007606958970427513, + "layer_8_update_fnorm": 0.061485160142183304, + "layer_8_max_l1_linf_norm": 0.344914972782135, + "layer_8_max_spectral_norm": 0.007687022443860769, + "layer_9_update_fnorm": 0.06132727488875389, + "layer_9_max_l1_linf_norm": 0.33685749769210815, + "layer_9_max_spectral_norm": 0.007526504807174206, + "layer_10_update_fnorm": 0.06150908023118973, + "layer_10_max_l1_linf_norm": 0.3681226372718811, + "layer_10_max_spectral_norm": 0.008264650590717793, + "layer_11_update_fnorm": 0.06147434934973717, + "layer_11_max_l1_linf_norm": 0.39189159870147705, + "layer_11_max_spectral_norm": 0.00872738379985094, + "layer_12_update_fnorm": 0.061430104076862335, + "layer_12_max_l1_linf_norm": 0.3869023025035858, + "layer_12_max_spectral_norm": 0.008745891973376274, + "total_sharpness": 0.004351604264229536, + "ip_v_neg_g": 0.0032319137826561928, + "cos_v_neg_g": 0.0007426654337905347, + "v_norm": 1.353485345840454, + "g_norm": 3.2152369022369385, + "hv_norm": 0.6835647821426392, + "cos_v_hv": 0.008616349659860134, + "hg_norm": 147.4119415283203, + "cos_g_hg": 0.45367079973220825, + "v_parallel_norm": 2.055449840554502e-05, + "v_perp_norm": 1.353485345840454, + "layer_1_v_norm": 0.061172910034656525, + "layer_1_cos_v_neg_g": 0.00745775830000639, + "layer_2_v_norm": 0.0592479333281517, + "layer_2_cos_v_neg_g": 0.004916926845908165, + "layer_3_v_norm": 0.05862530693411827, + "layer_3_cos_v_neg_g": 0.006367159076035023, + "layer_4_v_norm": 0.060176536440849304, + "layer_4_cos_v_neg_g": 0.003640315728262067, + "layer_5_v_norm": 0.061063848435878754, + "layer_5_cos_v_neg_g": 0.004345747642219067, + "layer_6_v_norm": 0.06152581423521042, + "layer_6_cos_v_neg_g": 0.0052837589755654335, + "layer_7_v_norm": 0.06151311472058296, + "layer_7_cos_v_neg_g": 0.004075733013451099, + "layer_8_v_norm": 0.061485156416893005, + "layer_8_cos_v_neg_g": 0.0053549897857010365, + "layer_9_v_norm": 0.06132727488875389, + "layer_9_cos_v_neg_g": 0.004453599452972412, + "layer_10_v_norm": 0.06150908023118973, + "layer_10_cos_v_neg_g": 0.0030818323139101267, + "layer_11_v_norm": 0.06147434934973717, + "layer_11_cos_v_neg_g": 0.0021522047463804483, + "layer_12_v_norm": 0.061430104076862335, + "layer_12_cos_v_neg_g": 0.001650982885621488, + "layer_1_sharpness": 0.05234378203749657, + "layer_2_sharpness": 0.0180507842451334, + "layer_3_sharpness": 0.04437666013836861, + "layer_4_sharpness": 0.016535386443138123, + "layer_5_sharpness": 0.02101997286081314, + "layer_6_sharpness": 0.02780948393046856, + "layer_7_sharpness": 0.03568127006292343, + "layer_8_sharpness": 0.03522563725709915, + "layer_9_sharpness": 0.02203049324452877, + "layer_10_sharpness": 0.016327235847711563, + "layer_11_sharpness": 0.014910545200109482, + "layer_12_sharpness": 0.031078066676855087 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..5821463ee0b1b470a33f4ae74412b55245cbd4b8 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3517124652862549, + "total_l1_linf_norm": 8103.2587890625, + "total_spectral_norm": 1.3517123460769653, + "layer_1_update_fnorm": 0.060851745307445526, + "layer_1_max_l1_linf_norm": 0.38374942541122437, + "layer_1_max_spectral_norm": 0.00851758848875761, + "layer_2_update_fnorm": 0.05898270383477211, + "layer_2_max_l1_linf_norm": 0.40699321031570435, + "layer_2_max_spectral_norm": 0.00909560825675726, + "layer_3_update_fnorm": 0.05866001173853874, + "layer_3_max_l1_linf_norm": 0.3895670175552368, + "layer_3_max_spectral_norm": 0.008694659918546677, + "layer_4_update_fnorm": 0.06019139289855957, + "layer_4_max_l1_linf_norm": 0.37200939655303955, + "layer_4_max_spectral_norm": 0.00843591894954443, + "layer_5_update_fnorm": 0.061013754457235336, + "layer_5_max_l1_linf_norm": 0.3567398190498352, + "layer_5_max_spectral_norm": 0.008009095676243305, + "layer_6_update_fnorm": 0.06136271730065346, + "layer_6_max_l1_linf_norm": 0.3368867039680481, + "layer_6_max_spectral_norm": 0.007547557819634676, + "layer_7_update_fnorm": 0.06129029765725136, + "layer_7_max_l1_linf_norm": 0.29965704679489136, + "layer_7_max_spectral_norm": 0.00675841374322772, + "layer_8_update_fnorm": 0.061194032430648804, + "layer_8_max_l1_linf_norm": 0.3048640191555023, + "layer_8_max_spectral_norm": 0.006824789568781853, + "layer_9_update_fnorm": 0.06126388907432556, + "layer_9_max_l1_linf_norm": 0.34087735414505005, + "layer_9_max_spectral_norm": 0.007678952068090439, + "layer_10_update_fnorm": 0.06142140552401543, + "layer_10_max_l1_linf_norm": 0.3651440739631653, + "layer_10_max_spectral_norm": 0.008212337270379066, + "layer_11_update_fnorm": 0.06141500547528267, + "layer_11_max_l1_linf_norm": 0.38839930295944214, + "layer_11_max_spectral_norm": 0.008704420179128647, + "layer_12_update_fnorm": 0.06134181469678879, + "layer_12_max_l1_linf_norm": 0.37246501445770264, + "layer_12_max_spectral_norm": 0.008387495763599873, + "total_sharpness": 0.004571467638015747, + "ip_v_neg_g": 0.004586962983012199, + "cos_v_neg_g": 0.0010082959197461605, + "v_norm": 1.3517124652862549, + "g_norm": 3.365525722503662, + "hv_norm": 0.8203746676445007, + "cos_v_hv": 0.007532302290201187, + "hg_norm": 206.62937927246094, + "cos_g_hg": 0.4663063883781433, + "v_parallel_norm": 3.0111046726233326e-05, + "v_perp_norm": 1.3517124652862549, + "layer_1_v_norm": 0.060851745307445526, + "layer_1_cos_v_neg_g": 0.012114915065467358, + "layer_2_v_norm": 0.05898270383477211, + "layer_2_cos_v_neg_g": 0.007602188270539045, + "layer_3_v_norm": 0.05866001173853874, + "layer_3_cos_v_neg_g": 0.0067283702082931995, + "layer_4_v_norm": 0.06019139289855957, + "layer_4_cos_v_neg_g": 0.004988688509911299, + "layer_5_v_norm": 0.061013754457235336, + "layer_5_cos_v_neg_g": 0.003630192019045353, + "layer_6_v_norm": 0.061362721025943756, + "layer_6_cos_v_neg_g": 0.0036685396917164326, + "layer_7_v_norm": 0.06129029765725136, + "layer_7_cos_v_neg_g": 0.004021904896944761, + "layer_8_v_norm": 0.061194032430648804, + "layer_8_cos_v_neg_g": 0.005337906535714865, + "layer_9_v_norm": 0.06126388907432556, + "layer_9_cos_v_neg_g": 0.00590845150873065, + "layer_10_v_norm": 0.06142140552401543, + "layer_10_cos_v_neg_g": 0.006501794792711735, + "layer_11_v_norm": 0.06141500547528267, + "layer_11_cos_v_neg_g": 0.005695489700883627, + "layer_12_v_norm": 0.06134181469678879, + "layer_12_cos_v_neg_g": 0.005646969191730022, + "layer_1_sharpness": 0.22346895933151245, + "layer_2_sharpness": 0.03952503204345703, + "layer_3_sharpness": 0.034911029040813446, + "layer_4_sharpness": 0.017442479729652405, + "layer_5_sharpness": 0.019592586904764175, + "layer_6_sharpness": 0.02235317789018154, + "layer_7_sharpness": 0.023358091711997986, + "layer_8_sharpness": 0.027814956381917, + "layer_9_sharpness": 0.020738137885928154, + "layer_10_sharpness": 0.014935916289687157, + "layer_11_sharpness": 0.014128775335848331, + "layer_12_sharpness": 0.02305208332836628 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..df66dbb55120eb887dc3a2dbd833fa85e30df5a9 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3437312841415405, + "total_l1_linf_norm": 8068.423828125, + "total_spectral_norm": 1.3437310457229614, + "layer_1_update_fnorm": 0.06101447716355324, + "layer_1_max_l1_linf_norm": 0.36348897218704224, + "layer_1_max_spectral_norm": 0.008076365105807781, + "layer_2_update_fnorm": 0.05888868495821953, + "layer_2_max_l1_linf_norm": 0.37215477228164673, + "layer_2_max_spectral_norm": 0.008399405516684055, + "layer_3_update_fnorm": 0.0587504617869854, + "layer_3_max_l1_linf_norm": 0.35630643367767334, + "layer_3_max_spectral_norm": 0.008011311292648315, + "layer_4_update_fnorm": 0.06025826558470726, + "layer_4_max_l1_linf_norm": 0.3552932143211365, + "layer_4_max_spectral_norm": 0.007999284192919731, + "layer_5_update_fnorm": 0.06093721091747284, + "layer_5_max_l1_linf_norm": 0.3484574556350708, + "layer_5_max_spectral_norm": 0.007776112295687199, + "layer_6_update_fnorm": 0.061201564967632294, + "layer_6_max_l1_linf_norm": 0.3165002465248108, + "layer_6_max_spectral_norm": 0.007140045054256916, + "layer_7_update_fnorm": 0.061400510370731354, + "layer_7_max_l1_linf_norm": 0.3245474100112915, + "layer_7_max_spectral_norm": 0.007330903317779303, + "layer_8_update_fnorm": 0.06126120686531067, + "layer_8_max_l1_linf_norm": 0.329738974571228, + "layer_8_max_spectral_norm": 0.0073553347028791904, + "layer_9_update_fnorm": 0.061372559517621994, + "layer_9_max_l1_linf_norm": 0.3480483293533325, + "layer_9_max_spectral_norm": 0.007788842543959618, + "layer_10_update_fnorm": 0.06160067766904831, + "layer_10_max_l1_linf_norm": 0.3913191258907318, + "layer_10_max_spectral_norm": 0.008743762038648129, + "layer_11_update_fnorm": 0.061669908463954926, + "layer_11_max_l1_linf_norm": 0.41722536087036133, + "layer_11_max_spectral_norm": 0.009275526739656925, + "layer_12_update_fnorm": 0.06172604113817215, + "layer_12_max_l1_linf_norm": 0.45355433225631714, + "layer_12_max_spectral_norm": 0.009982212446630001, + "total_sharpness": 0.004772666376084089, + "ip_v_neg_g": 0.004276870749890804, + "cos_v_neg_g": 0.0010190509492531419, + "v_norm": 1.3437312841415405, + "g_norm": 3.1233296394348145, + "hv_norm": 0.8252756595611572, + "cos_v_hv": 0.007770955562591553, + "hg_norm": 173.26805114746094, + "cos_g_hg": 0.4524116516113281, + "v_parallel_norm": 3.436838596826419e-05, + "v_perp_norm": 1.3437312841415405, + "layer_1_v_norm": 0.06101447716355324, + "layer_1_cos_v_neg_g": 0.005959570873528719, + "layer_2_v_norm": 0.05888868495821953, + "layer_2_cos_v_neg_g": 0.00639295531436801, + "layer_3_v_norm": 0.0587504617869854, + "layer_3_cos_v_neg_g": 0.006222747266292572, + "layer_4_v_norm": 0.06025826558470726, + "layer_4_cos_v_neg_g": 0.005808437708765268, + "layer_5_v_norm": 0.06093721091747284, + "layer_5_cos_v_neg_g": 0.004360026679933071, + "layer_6_v_norm": 0.061201564967632294, + "layer_6_cos_v_neg_g": 0.004563220776617527, + "layer_7_v_norm": 0.061400510370731354, + "layer_7_cos_v_neg_g": 0.006822772789746523, + "layer_8_v_norm": 0.06126120686531067, + "layer_8_cos_v_neg_g": 0.007813339121639729, + "layer_9_v_norm": 0.061372559517621994, + "layer_9_cos_v_neg_g": 0.006214122287929058, + "layer_10_v_norm": 0.06160067766904831, + "layer_10_cos_v_neg_g": 0.0056230612099170685, + "layer_11_v_norm": 0.061669908463954926, + "layer_11_cos_v_neg_g": 0.004589098040014505, + "layer_12_v_norm": 0.06172604113817215, + "layer_12_cos_v_neg_g": 0.005462584085762501, + "layer_1_sharpness": 0.12591880559921265, + "layer_2_sharpness": 0.02701355144381523, + "layer_3_sharpness": 0.02719217725098133, + "layer_4_sharpness": 0.015310860238969326, + "layer_5_sharpness": 0.018933076411485672, + "layer_6_sharpness": 0.021148815751075745, + "layer_7_sharpness": 0.02794347144663334, + "layer_8_sharpness": 0.030737649649381638, + "layer_9_sharpness": 0.023145105689764023, + "layer_10_sharpness": 0.02013278380036354, + "layer_11_sharpness": 0.02090613543987274, + "layer_12_sharpness": 0.04217814281582832 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..07f62949df7492f7d19cbeedd53c1c29fc85684f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.347874402999878, + "total_l1_linf_norm": 8078.2626953125, + "total_spectral_norm": 1.3478741645812988, + "layer_1_update_fnorm": 0.06088700518012047, + "layer_1_max_l1_linf_norm": 0.32299214601516724, + "layer_1_max_spectral_norm": 0.00734873628243804, + "layer_2_update_fnorm": 0.0588492751121521, + "layer_2_max_l1_linf_norm": 0.3615596294403076, + "layer_2_max_spectral_norm": 0.008155157789587975, + "layer_3_update_fnorm": 0.058811333030462265, + "layer_3_max_l1_linf_norm": 0.37973272800445557, + "layer_3_max_spectral_norm": 0.008444641716778278, + "layer_4_update_fnorm": 0.06013939529657364, + "layer_4_max_l1_linf_norm": 0.3622511029243469, + "layer_4_max_spectral_norm": 0.008172739297151566, + "layer_5_update_fnorm": 0.06082288175821304, + "layer_5_max_l1_linf_norm": 0.34212154150009155, + "layer_5_max_spectral_norm": 0.007692694664001465, + "layer_6_update_fnorm": 0.06115052476525307, + "layer_6_max_l1_linf_norm": 0.2966352105140686, + "layer_6_max_spectral_norm": 0.006647635716944933, + "layer_7_update_fnorm": 0.06120990961790085, + "layer_7_max_l1_linf_norm": 0.2950819134712219, + "layer_7_max_spectral_norm": 0.006603892892599106, + "layer_8_update_fnorm": 0.061129793524742126, + "layer_8_max_l1_linf_norm": 0.2940915822982788, + "layer_8_max_spectral_norm": 0.006628848146647215, + "layer_9_update_fnorm": 0.06113729625940323, + "layer_9_max_l1_linf_norm": 0.31622153520584106, + "layer_9_max_spectral_norm": 0.007136654108762741, + "layer_10_update_fnorm": 0.06132260337471962, + "layer_10_max_l1_linf_norm": 0.3555009961128235, + "layer_10_max_spectral_norm": 0.00798452366143465, + "layer_11_update_fnorm": 0.0611620768904686, + "layer_11_max_l1_linf_norm": 0.3679179847240448, + "layer_11_max_spectral_norm": 0.008236361667513847, + "layer_12_update_fnorm": 0.06130436807870865, + "layer_12_max_l1_linf_norm": 0.3705659508705139, + "layer_12_max_spectral_norm": 0.008328461088240147, + "total_sharpness": 0.0035249735228717327, + "ip_v_neg_g": 0.0032120896503329277, + "cos_v_neg_g": 0.0007827501394785941, + "v_norm": 1.347874402999878, + "g_norm": 3.0444939136505127, + "hv_norm": 0.5504859089851379, + "cos_v_hv": 0.008630960248410702, + "hg_norm": 107.34870147705078, + "cos_g_hg": 0.4558972418308258, + "v_parallel_norm": 2.360723192396108e-05, + "v_perp_norm": 1.347874402999878, + "layer_1_v_norm": 0.06088700518012047, + "layer_1_cos_v_neg_g": 0.005155845545232296, + "layer_2_v_norm": 0.0588492751121521, + "layer_2_cos_v_neg_g": 0.004620793741196394, + "layer_3_v_norm": 0.058811333030462265, + "layer_3_cos_v_neg_g": 0.004126179963350296, + "layer_4_v_norm": 0.06013939529657364, + "layer_4_cos_v_neg_g": 0.004627291578799486, + "layer_5_v_norm": 0.06082288175821304, + "layer_5_cos_v_neg_g": 0.0055439709685742855, + "layer_6_v_norm": 0.06115052476525307, + "layer_6_cos_v_neg_g": 0.005339693743735552, + "layer_7_v_norm": 0.06120990961790085, + "layer_7_cos_v_neg_g": 0.005862041842192411, + "layer_8_v_norm": 0.061129793524742126, + "layer_8_cos_v_neg_g": 0.005646731238812208, + "layer_9_v_norm": 0.06113729625940323, + "layer_9_cos_v_neg_g": 0.004707379266619682, + "layer_10_v_norm": 0.06132260337471962, + "layer_10_cos_v_neg_g": 0.005142907612025738, + "layer_11_v_norm": 0.0611620768904686, + "layer_11_cos_v_neg_g": 0.0039587365463376045, + "layer_12_v_norm": 0.06130436807870865, + "layer_12_cos_v_neg_g": 0.0022118608467280865, + "layer_1_sharpness": 0.029637319967150688, + "layer_2_sharpness": 0.0159523356705904, + "layer_3_sharpness": 0.033248171210289, + "layer_4_sharpness": 0.016316737979650497, + "layer_5_sharpness": 0.02013365365564823, + "layer_6_sharpness": 0.020083697512745857, + "layer_7_sharpness": 0.024149663746356964, + "layer_8_sharpness": 0.025972263887524605, + "layer_9_sharpness": 0.017891934141516685, + "layer_10_sharpness": 0.014658264815807343, + "layer_11_sharpness": 0.01440802775323391, + "layer_12_sharpness": 0.028969183564186096 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..ab432235f8a75d1e1ef21643c2a33dbbecefab32 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3457107543945312, + "total_l1_linf_norm": 8075.755859375, + "total_spectral_norm": 1.3457109928131104, + "layer_1_update_fnorm": 0.06094101071357727, + "layer_1_max_l1_linf_norm": 0.3267756700515747, + "layer_1_max_spectral_norm": 0.007369705941528082, + "layer_2_update_fnorm": 0.0590461865067482, + "layer_2_max_l1_linf_norm": 0.3707670271396637, + "layer_2_max_spectral_norm": 0.00838276743888855, + "layer_3_update_fnorm": 0.058992281556129456, + "layer_3_max_l1_linf_norm": 0.36383187770843506, + "layer_3_max_spectral_norm": 0.008156820200383663, + "layer_4_update_fnorm": 0.0603686198592186, + "layer_4_max_l1_linf_norm": 0.361878901720047, + "layer_4_max_spectral_norm": 0.008118039928376675, + "layer_5_update_fnorm": 0.060917168855667114, + "layer_5_max_l1_linf_norm": 0.33452069759368896, + "layer_5_max_spectral_norm": 0.007576765492558479, + "layer_6_update_fnorm": 0.06119072809815407, + "layer_6_max_l1_linf_norm": 0.3094891905784607, + "layer_6_max_spectral_norm": 0.006915755569934845, + "layer_7_update_fnorm": 0.06138306111097336, + "layer_7_max_l1_linf_norm": 0.3077070415019989, + "layer_7_max_spectral_norm": 0.00694879237562418, + "layer_8_update_fnorm": 0.06119604781270027, + "layer_8_max_l1_linf_norm": 0.3062363266944885, + "layer_8_max_spectral_norm": 0.00692979758605361, + "layer_9_update_fnorm": 0.06127195432782173, + "layer_9_max_l1_linf_norm": 0.32356318831443787, + "layer_9_max_spectral_norm": 0.0072710951790213585, + "layer_10_update_fnorm": 0.061466120183467865, + "layer_10_max_l1_linf_norm": 0.3534625768661499, + "layer_10_max_spectral_norm": 0.008003530092537403, + "layer_11_update_fnorm": 0.06136469915509224, + "layer_11_max_l1_linf_norm": 0.3706985414028168, + "layer_11_max_spectral_norm": 0.008279841393232346, + "layer_12_update_fnorm": 0.06132662296295166, + "layer_12_max_l1_linf_norm": 0.35865795612335205, + "layer_12_max_spectral_norm": 0.008130918256938457, + "total_sharpness": 0.0037028908263891935, + "ip_v_neg_g": 0.002145597245544195, + "cos_v_neg_g": 0.0004769203078467399, + "v_norm": 1.3457107543945312, + "g_norm": 3.3431098461151123, + "hv_norm": 0.6917877197265625, + "cos_v_hv": 0.007203106302767992, + "hg_norm": 634.3978271484375, + "cos_g_hg": 0.46486636996269226, + "v_parallel_norm": 3.2813248253660277e-05, + "v_perp_norm": 1.3457107543945312, + "layer_1_v_norm": 0.06094101071357727, + "layer_1_cos_v_neg_g": -0.00010149703302886337, + "layer_2_v_norm": 0.0590461865067482, + "layer_2_cos_v_neg_g": -0.0007821092149242759, + "layer_3_v_norm": 0.05899227783083916, + "layer_3_cos_v_neg_g": 0.0002054817887255922, + "layer_4_v_norm": 0.0603686198592186, + "layer_4_cos_v_neg_g": 0.0008918073726817966, + "layer_5_v_norm": 0.060917168855667114, + "layer_5_cos_v_neg_g": 0.002447485225275159, + "layer_6_v_norm": 0.061190731823444366, + "layer_6_cos_v_neg_g": 0.0027942913584411144, + "layer_7_v_norm": 0.06138306111097336, + "layer_7_cos_v_neg_g": 0.0016171294264495373, + "layer_8_v_norm": 0.06119604781270027, + "layer_8_cos_v_neg_g": 0.003031587228178978, + "layer_9_v_norm": 0.06127195432782173, + "layer_9_cos_v_neg_g": 0.004086683504283428, + "layer_10_v_norm": 0.061466120183467865, + "layer_10_cos_v_neg_g": 0.003567164298146963, + "layer_11_v_norm": 0.06136469915509224, + "layer_11_cos_v_neg_g": 0.004766935482621193, + "layer_12_v_norm": 0.06132662296295166, + "layer_12_cos_v_neg_g": 0.006491187494248152, + "layer_1_sharpness": 0.053067777305841446, + "layer_2_sharpness": 0.01454067137092352, + "layer_3_sharpness": 0.0213864054530859, + "layer_4_sharpness": 0.012819736264646053, + "layer_5_sharpness": 0.017147473990917206, + "layer_6_sharpness": 0.01958928257226944, + "layer_7_sharpness": 0.023083262145519257, + "layer_8_sharpness": 0.02450527623295784, + "layer_9_sharpness": 0.018414931371808052, + "layer_10_sharpness": 0.013198197819292545, + "layer_11_sharpness": 0.013828362338244915, + "layer_12_sharpness": 0.023272372782230377 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..96034b0b5939120144f2536d9ee7b79a258ec7bd --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.341964602470398, + "total_l1_linf_norm": 8050.5087890625, + "total_spectral_norm": 1.341964602470398, + "layer_1_update_fnorm": 0.06093679368495941, + "layer_1_max_l1_linf_norm": 0.3369292914867401, + "layer_1_max_spectral_norm": 0.007618387695401907, + "layer_2_update_fnorm": 0.058991119265556335, + "layer_2_max_l1_linf_norm": 0.3778406083583832, + "layer_2_max_spectral_norm": 0.008563044480979443, + "layer_3_update_fnorm": 0.05905529111623764, + "layer_3_max_l1_linf_norm": 0.3675881028175354, + "layer_3_max_spectral_norm": 0.008302440866827965, + "layer_4_update_fnorm": 0.060321174561977386, + "layer_4_max_l1_linf_norm": 0.3439404368400574, + "layer_4_max_spectral_norm": 0.007773945573717356, + "layer_5_update_fnorm": 0.06091915816068649, + "layer_5_max_l1_linf_norm": 0.33395424485206604, + "layer_5_max_spectral_norm": 0.007585856597870588, + "layer_6_update_fnorm": 0.06130187585949898, + "layer_6_max_l1_linf_norm": 0.32454150915145874, + "layer_6_max_spectral_norm": 0.007348506711423397, + "layer_7_update_fnorm": 0.061288297176361084, + "layer_7_max_l1_linf_norm": 0.3014589548110962, + "layer_7_max_spectral_norm": 0.006792136467993259, + "layer_8_update_fnorm": 0.06128988042473793, + "layer_8_max_l1_linf_norm": 0.3109428882598877, + "layer_8_max_spectral_norm": 0.007048215717077255, + "layer_9_update_fnorm": 0.06134774163365364, + "layer_9_max_l1_linf_norm": 0.34578967094421387, + "layer_9_max_spectral_norm": 0.0077915918081998825, + "layer_10_update_fnorm": 0.061481427401304245, + "layer_10_max_l1_linf_norm": 0.38740992546081543, + "layer_10_max_spectral_norm": 0.008637888357043266, + "layer_11_update_fnorm": 0.06147689372301102, + "layer_11_max_l1_linf_norm": 0.4011041522026062, + "layer_11_max_spectral_norm": 0.00900927372276783, + "layer_12_update_fnorm": 0.06140357628464699, + "layer_12_max_l1_linf_norm": 0.39110827445983887, + "layer_12_max_spectral_norm": 0.008780857548117638, + "total_sharpness": 0.003629348473623395, + "ip_v_neg_g": 0.0037020561285316944, + "cos_v_neg_g": 0.0009146499214693904, + "v_norm": 1.341964602470398, + "g_norm": 3.016108989715576, + "hv_norm": 0.5994952321052551, + "cos_v_hv": 0.008124263025820255, + "hg_norm": 139.33514404296875, + "cos_g_hg": 0.47303932905197144, + "v_parallel_norm": 2.839861554093659e-05, + "v_perp_norm": 1.341964602470398, + "layer_1_v_norm": 0.06093679368495941, + "layer_1_cos_v_neg_g": 0.007255180738866329, + "layer_2_v_norm": 0.058991119265556335, + "layer_2_cos_v_neg_g": 0.007567532826215029, + "layer_3_v_norm": 0.05905529111623764, + "layer_3_cos_v_neg_g": 0.004975580610334873, + "layer_4_v_norm": 0.060321174561977386, + "layer_4_cos_v_neg_g": 0.005535364616662264, + "layer_5_v_norm": 0.06091915816068649, + "layer_5_cos_v_neg_g": 0.004205369856208563, + "layer_6_v_norm": 0.06130187585949898, + "layer_6_cos_v_neg_g": 0.00505864666774869, + "layer_7_v_norm": 0.061288297176361084, + "layer_7_cos_v_neg_g": 0.00565663306042552, + "layer_8_v_norm": 0.06128988042473793, + "layer_8_cos_v_neg_g": 0.007465407717972994, + "layer_9_v_norm": 0.06134774163365364, + "layer_9_cos_v_neg_g": 0.007592096924781799, + "layer_10_v_norm": 0.061481427401304245, + "layer_10_cos_v_neg_g": 0.0057460349053144455, + "layer_11_v_norm": 0.06147688999772072, + "layer_11_cos_v_neg_g": 0.004495304077863693, + "layer_12_v_norm": 0.06140357628464699, + "layer_12_cos_v_neg_g": 0.0034924657084047794, + "layer_1_sharpness": 0.03519571200013161, + "layer_2_sharpness": 0.017220890149474144, + "layer_3_sharpness": 0.024222521111369133, + "layer_4_sharpness": 0.014352926053106785, + "layer_5_sharpness": 0.016781218349933624, + "layer_6_sharpness": 0.021331055089831352, + "layer_7_sharpness": 0.022441435605287552, + "layer_8_sharpness": 0.027199197560548782, + "layer_9_sharpness": 0.02086038514971733, + "layer_10_sharpness": 0.015169884078204632, + "layer_11_sharpness": 0.014147521927952766, + "layer_12_sharpness": 0.027418842539191246 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..310e96aac7e3769467e70829e1d78928034068dd --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.001_seed_44/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019227 +step:1 train loss:11.024735 +step:2 train loss:11.018307 +step:3 train loss:11.016239 +step:4 train loss:11.010803 +step:5 train loss:11.004908 +step:6 train loss:10.994957 +step:7 train loss:10.987471 +step:8 train loss:10.976641 +step:9 train loss:10.965153 +step:10 train loss:10.950165 +step:11 train loss:10.939953 +step:12 train loss:10.919480 +step:13 train loss:10.905028 +step:14 train loss:10.882958 +step:15 train loss:10.865052 +step:16 train loss:10.843879 +step:17 train loss:10.823975 +step:18 train loss:10.798833 +step:19 train loss:10.774611 +step:20 train loss:10.745102 +step:21 train loss:10.721910 +step:22 train loss:10.687216 +step:23 train loss:10.662731 +step:24 train loss:10.624081 +step:25 train loss:10.600607 +step:26 train loss:10.559893 +step:27 train loss:10.525505 +step:28 train loss:10.494772 +step:29 train loss:10.458115 +step:30 train loss:10.419331 +step:31 train loss:10.375756 +step:32 train loss:10.331447 +step:33 train loss:10.294002 +step:34 train loss:10.257494 +step:35 train loss:10.204170 +step:36 train loss:10.162193 +step:37 train loss:10.105415 +step:38 train loss:10.069136 +step:39 train loss:10.013648 +step:40 train loss:9.968587 +step:41 train loss:9.909500 +step:42 train loss:9.873577 +step:43 train loss:9.798200 +step:44 train loss:9.757629 +step:45 train loss:9.695082 +step:46 train loss:9.654797 +step:47 train loss:9.593014 +step:48 train loss:9.535325 +step:49 train loss:9.467770 +step:50 train loss:9.405278 +step:51 train loss:9.338597 +step:52 train loss:9.297021 +step:53 train loss:9.231423 +step:54 train loss:9.174736 +step:55 train loss:9.102027 +step:56 train loss:9.041641 +step:57 train loss:8.991793 +step:58 train loss:8.910569 +step:59 train loss:8.857420 +step:60 train loss:8.794079 +step:61 train loss:8.737702 +step:62 train loss:8.675240 +step:63 train loss:8.649685 +step:64 train loss:8.540056 +step:65 train loss:8.488802 +step:66 train loss:8.444206 +step:67 train loss:8.389730 +step:68 train loss:8.331909 +step:69 train loss:8.263893 +step:70 train loss:8.208508 +step:71 train loss:8.134100 +step:72 train loss:8.114820 +step:73 train loss:8.046484 +step:74 train loss:8.021495 +step:75 train loss:7.943194 +step:76 train loss:7.963986 +step:77 train loss:7.876433 +step:78 train loss:7.736255 +step:79 train loss:7.776563 +step:80 train loss:7.744181 +step:81 train loss:7.755136 +step:82 train loss:7.724148 +step:83 train loss:7.673524 +step:84 train loss:7.638297 +step:85 train loss:7.605925 +step:86 train loss:7.581249 +step:87 train loss:7.553989 +step:88 train loss:7.554813 +step:89 train loss:7.519701 +step:90 train loss:7.554859 +step:91 train loss:7.557783 +step:92 train loss:7.552939 +step:93 train loss:7.505616 +step:94 train loss:7.487918 +step:95 train loss:7.444517 +step:96 train loss:7.522739 +step:97 train loss:7.473475 +step:98 train loss:7.464982 +step:99 train loss:7.428150 +step:100 train loss:7.491340 +step:101 train loss:7.373709 +step:102 train loss:7.361826 +step:103 train loss:7.345967 +step:104 train loss:7.380526 +step:105 train loss:7.425124 +step:106 train loss:7.361510 +step:107 train loss:7.322051 +step:108 train loss:7.327245 +step:109 train loss:7.354669 +step:110 train loss:7.278209 +step:111 train loss:7.282748 +step:112 train loss:7.265621 +step:113 train loss:7.221453 +step:114 train loss:7.275710 +step:115 train loss:7.211456 +step:116 train loss:7.184329 +step:117 train loss:7.117763 +step:118 train loss:7.176205 +step:119 train loss:7.113457 +step:120 train loss:7.116940 +step:121 train loss:7.033610 +step:122 train loss:7.112226 +step:123 train loss:7.027365 +step:124 train loss:7.010740 +step:125 train loss:6.974777 +step:126 train loss:7.056312 +step:127 train loss:6.969022 +step:128 train loss:6.994669 +step:129 train loss:6.963025 +step:130 train loss:7.006012 +step:131 train loss:6.935572 +step:132 train loss:6.842982 +step:133 train loss:6.908279 +step:134 train loss:6.862655 +step:135 train loss:6.771872 +step:136 train loss:6.801622 +step:137 train loss:6.795914 +step:138 train loss:6.723803 +step:139 train loss:6.798357 +step:140 train loss:6.700121 +step:141 train loss:6.793844 +step:142 train loss:6.733232 +step:143 train loss:6.737979 +step:144 train loss:6.701881 +step:145 train loss:6.632514 +step:146 train loss:6.638752 +step:147 train loss:6.690071 +step:148 train loss:6.689690 +step:149 train loss:6.630089 +step:150 train loss:6.637627 +step:151 train loss:6.540509 +step:152 train loss:6.573794 +step:153 train loss:6.546869 +step:154 train loss:6.628136 +step:155 train loss:6.595417 +step:156 train loss:6.625991 +step:157 train loss:6.521063 +step:158 train loss:6.501024 +step:159 train loss:6.536962 +step:160 train loss:6.510665 +step:161 train loss:6.500556 +step:162 train loss:6.471191 +step:163 train loss:6.484604 +step:164 train loss:6.495565 +step:165 train loss:6.496298 +step:166 train loss:6.446899 +step:167 train loss:6.444741 +step:168 train loss:6.415298 +step:169 train loss:6.364865 +step:170 train loss:6.334935 +step:171 train loss:6.444202 +step:172 train loss:6.372733 +step:173 train loss:6.413185 +step:174 train loss:6.416553 +step:175 train loss:6.377934 +step:176 train loss:6.327142 +step:177 train loss:6.372478 +step:178 train loss:6.377587 +step:179 train loss:6.328986 +step:180 train loss:6.314675 +step:181 train loss:6.347972 +step:182 train loss:6.277540 +step:183 train loss:6.369951 +step:184 train loss:6.336613 +step:185 train loss:6.250039 +step:186 train loss:6.399167 +step:187 train loss:6.332285 +step:188 train loss:6.153207 +step:189 train loss:6.309803 +step:190 train loss:6.300925 +step:191 train loss:6.222202 +step:192 train loss:6.134181 +step:193 train loss:6.283207 +step:194 train loss:6.304448 +step:195 train loss:6.295326 +step:196 train loss:6.267115 +step:197 train loss:6.264072 +step:198 train loss:6.202922 +step:199 train loss:6.283882 +step:200 train loss:6.317516 +step:201 train loss:6.250439 +step:202 train loss:6.252607 +step:203 train loss:6.208460 +step:204 train loss:6.247331 +step:205 train loss:6.099188 +step:206 train loss:6.236424 +step:207 train loss:6.205005 +step:208 train loss:6.150146 +step:209 train loss:6.147347 +step:210 train loss:6.145446 +step:211 train loss:6.218185 +step:212 train loss:6.167772 +step:213 train loss:6.181551 +step:214 train loss:6.163689 +step:215 train loss:6.185811 +step:216 train loss:6.130353 +step:217 train loss:6.134649 +step:218 train loss:6.110358 +step:219 train loss:6.086230 +step:220 train loss:6.132717 +step:221 train loss:6.084199 +step:222 train loss:6.124851 +step:223 train loss:6.141297 +step:224 train loss:6.130251 +step:225 train loss:6.068103 +step:226 train loss:6.071585 +step:227 train loss:6.133692 +step:228 train loss:6.098815 +step:229 train loss:6.171565 +step:230 train loss:6.037723 +step:231 train loss:6.092332 +step:232 train loss:6.076674 +step:233 train loss:6.048770 +step:234 train loss:6.046787 +step:235 train loss:6.127611 +step:236 train loss:6.074633 +step:237 train loss:6.104558 +step:238 train loss:6.108466 +step:239 train loss:6.014054 +step:240 train loss:6.085529 +step:241 train loss:6.120563 +step:242 train loss:6.103229 +step:243 train loss:6.010895 +step:244 train loss:6.039536 +step:245 train loss:6.022882 +step:246 train loss:6.020766 +step:247 train loss:6.011327 +step:248 train loss:5.969962 +step:249 train loss:6.029217 +step:250 validation loss:6.021579 +step:250 train loss:5.995894 +step:251 train loss:6.031888 +step:252 train loss:5.988900 +step:253 train loss:5.992262 +step:254 train loss:5.952560 +step:255 train loss:5.992106 +step:256 train loss:5.990673 +step:257 train loss:6.038387 +step:258 train loss:5.937331 +step:259 train loss:5.962624 +step:260 train loss:5.935462 +step:261 train loss:5.935046 +step:262 train loss:6.001496 +step:263 train loss:5.961571 +step:264 train loss:5.929299 +step:265 train loss:5.957457 +step:266 train loss:5.917049 +step:267 train loss:5.952867 +step:268 train loss:5.906852 +step:269 train loss:5.934766 +step:270 train loss:5.949683 +step:271 train loss:5.938632 +step:272 train loss:5.894545 +step:273 train loss:5.966669 +step:274 train loss:5.876147 +step:275 train loss:5.915651 +step:276 train loss:5.889423 +step:277 train loss:5.889820 +step:278 train loss:5.867286 +step:279 train loss:5.836212 +step:280 train loss:5.910774 +step:281 train loss:5.988620 +step:282 train loss:5.869115 +step:283 train loss:5.881510 +step:284 train loss:5.845642 +step:285 train loss:5.897831 +step:286 train loss:5.874252 +step:287 train loss:5.850119 +step:288 train loss:5.829166 +step:289 train loss:5.853996 +step:290 train loss:5.911005 +step:291 train loss:5.836195 +step:292 train loss:5.892765 +step:293 train loss:5.820755 +step:294 train loss:5.932979 +step:295 train loss:5.827731 +step:296 train loss:5.886928 +step:297 train loss:5.920017 +step:298 train loss:5.809880 +step:299 train loss:5.882053 +step:300 train loss:5.801880 +step:301 train loss:5.836011 +step:302 train loss:5.810318 +step:303 train loss:5.825334 +step:304 train loss:5.855780 +step:305 train loss:5.777153 +step:306 train loss:5.800450 +step:307 train loss:5.827225 +step:308 train loss:5.737692 +step:309 train loss:5.875309 +step:310 train loss:5.837486 +step:311 train loss:5.818130 +step:312 train loss:5.802339 +step:313 train loss:5.828163 +step:314 train loss:5.808238 +step:315 train loss:5.769924 +step:316 train loss:5.766390 +step:317 train loss:5.730030 +step:318 train loss:5.731000 +step:319 train loss:5.805696 +step:320 train loss:5.724265 +step:321 train loss:5.782537 +step:322 train loss:5.775466 +step:323 train loss:5.838621 +step:324 train loss:5.781351 +step:325 train loss:5.802313 +step:326 train loss:5.807617 +step:327 train loss:5.783441 +step:328 train loss:5.758474 +step:329 train loss:5.775363 +step:330 train loss:5.707510 +step:331 train loss:5.733800 +step:332 train loss:5.719876 +step:333 train loss:5.658534 +step:334 train loss:5.755364 +step:335 train loss:5.796071 +step:336 train loss:5.918329 +step:337 train loss:5.816034 +step:338 train loss:5.727026 +step:339 train loss:5.686287 +step:340 train loss:5.696559 +step:341 train loss:5.688559 +step:342 train loss:5.754073 +step:343 train loss:5.728540 +step:344 train loss:5.683598 +step:345 train loss:5.659180 +step:346 train loss:5.703960 +step:347 train loss:5.642217 +step:348 train loss:5.650535 +step:349 train loss:5.588979 +step:350 train loss:5.625846 +step:351 train loss:5.694894 +step:352 train loss:5.654613 +step:353 train loss:5.679564 +step:354 train loss:5.635169 +step:355 train loss:5.684047 +step:356 train loss:5.652158 +step:357 train loss:5.723651 +step:358 train loss:5.750944 +step:359 train loss:5.584950 +step:360 train loss:5.704249 +step:361 train loss:5.689801 +step:362 train loss:5.668557 +step:363 train loss:5.623461 +step:364 train loss:5.742470 +step:365 train loss:5.676064 +step:366 train loss:5.645038 +step:367 train loss:5.668805 +step:368 train loss:5.640085 +step:369 train loss:5.617858 +step:370 train loss:5.672552 +step:371 train loss:5.608713 +step:372 train loss:5.679093 +step:373 train loss:5.628675 +step:374 train loss:5.617082 +step:375 train loss:5.646767 +step:376 train loss:5.631882 +step:377 train loss:5.525508 +step:378 train loss:5.604692 +step:379 train loss:5.657555 +step:380 train loss:5.586180 +step:381 train loss:5.644693 +step:382 train loss:5.638626 +step:383 train loss:5.609059 +step:384 train loss:5.586104 +step:385 train loss:5.577719 +step:386 train loss:5.612254 +step:387 train loss:5.611614 +step:388 train loss:5.579593 +step:389 train loss:5.594998 +step:390 train loss:5.578847 +step:391 train loss:5.583766 +step:392 train loss:5.571788 +step:393 train loss:5.566312 +step:394 train loss:5.615326 +step:395 train loss:5.546737 +step:396 train loss:5.501557 +step:397 train loss:5.583221 +step:398 train loss:5.567162 +step:399 train loss:5.574852 +step:400 train loss:5.534717 +step:401 train loss:5.574289 +step:402 train loss:5.555802 +step:403 train loss:5.551907 +step:404 train loss:5.534731 +step:405 train loss:5.531617 +step:406 train loss:5.564084 +step:407 train loss:5.552166 +step:408 train loss:5.614812 +step:409 train loss:5.544759 +step:410 train loss:5.516575 +step:411 train loss:5.506204 +step:412 train loss:5.593777 +step:413 train loss:5.482384 +step:414 train loss:5.562528 +step:415 train loss:5.521815 +step:416 train loss:5.533467 +step:417 train loss:5.558098 +step:418 train loss:5.504344 +step:419 train loss:5.495962 +step:420 train loss:5.491569 +step:421 train loss:5.478773 +step:422 train loss:5.480315 +step:423 train loss:5.486889 +step:424 train loss:5.457998 +step:425 train loss:5.522960 +step:426 train loss:5.513494 +step:427 train loss:5.445142 +step:428 train loss:5.510786 +step:429 train loss:5.422049 +step:430 train loss:5.461040 +step:431 train loss:5.491563 +step:432 train loss:5.509405 +step:433 train loss:5.499180 +step:434 train loss:5.456192 +step:435 train loss:5.508404 +step:436 train loss:5.530219 +step:437 train loss:5.487754 +step:438 train loss:5.447825 +step:439 train loss:5.444249 +step:440 train loss:5.484708 +step:441 train loss:5.429962 +step:442 train loss:5.430705 +step:443 train loss:5.445916 +step:444 train loss:5.476098 +step:445 train loss:5.481686 +step:446 train loss:5.421386 +step:447 train loss:5.435111 +step:448 train loss:5.494952 +step:449 train loss:5.450047 +step:450 train loss:5.445728 +step:451 train loss:5.423896 +step:452 train loss:5.490699 +step:453 train loss:5.420763 +step:454 train loss:5.382266 +step:455 train loss:5.471649 +step:456 train loss:5.433904 +step:457 train loss:5.408566 +step:458 train loss:5.429578 +step:459 train loss:5.378844 +step:460 train loss:5.480458 +step:461 train loss:5.436940 +step:462 train loss:5.340724 +step:463 train loss:5.396577 +step:464 train loss:5.452487 +step:465 train loss:5.409534 +step:466 train loss:5.432364 +step:467 train loss:5.389565 +step:468 train loss:5.441378 +step:469 train loss:5.413470 +step:470 train loss:5.370143 +step:471 train loss:5.465644 +step:472 train loss:5.349461 +step:473 train loss:5.424899 +step:474 train loss:5.405228 +step:475 train loss:5.424554 +step:476 train loss:5.395856 +step:477 train loss:5.339159 +step:478 train loss:5.358943 +step:479 train loss:5.355295 +step:480 train loss:5.377864 +step:481 train loss:5.384935 +step:482 train loss:5.324632 +step:483 train loss:5.392918 +step:484 train loss:5.350260 +step:485 train loss:5.329288 +step:486 train loss:5.385946 +step:487 train loss:5.357703 +step:488 train loss:5.355474 +step:489 train loss:5.350269 +step:490 train loss:5.331040 +step:491 train loss:5.343955 +step:492 train loss:5.344949 +step:493 train loss:5.347243 +step:494 train loss:5.358126 +step:495 train loss:5.309574 +step:496 train loss:5.396367 +step:497 train loss:5.284681 +step:498 train loss:5.383919 +step:499 train loss:5.356615 +step:500 validation loss:5.337407 total_sharp:5.3638e-02 L1_sharp:3.4890e+00 L2_sharp:4.5535e-01 L3_sharp:2.4481e-01 L4_sharp:2.1998e-01 L5_sharp:1.8102e-01 L6_sharp:1.2465e-01 L7_sharp:8.9587e-02 L8_sharp:7.8780e-02 L9_sharp:7.1234e-02 L10_sharp:7.9201e-02 L11_sharp:8.5492e-02 L12_sharp:8.1943e-02 total_fnorm:9.3474e-01 total_l1_linf:5.6155e+03 total_spectral:9.3474e-01 L1_fnorm:4.3952e-02 L2_fnorm:4.1669e-02 L3_fnorm:4.1370e-02 L4_fnorm:4.1234e-02 L5_fnorm:4.1168e-02 L6_fnorm:4.1511e-02 L7_fnorm:4.1802e-02 L8_fnorm:4.2093e-02 L9_fnorm:4.2409e-02 L10_fnorm:4.2685e-02 L11_fnorm:4.2787e-02 L12_fnorm:4.2974e-02 L1_l1linf:1.6486e-01 L2_l1linf:1.5231e-01 L3_l1linf:1.5281e-01 L4_l1linf:1.6078e-01 L5_l1linf:1.6445e-01 L6_l1linf:1.6382e-01 L7_l1linf:1.7408e-01 L8_l1linf:1.6391e-01 L9_l1linf:1.7063e-01 L10_l1linf:1.7820e-01 L11_l1linf:1.7768e-01 L12_l1linf:1.8608e-01 L1_spectral:3.5630e-03 L2_spectral:3.4568e-03 L3_spectral:3.4634e-03 L4_spectral:5.8035e-03 L5_spectral:6.2982e-03 L6_spectral:6.8456e-03 L7_spectral:7.4694e-03 L8_spectral:6.2695e-03 L9_spectral:6.3439e-03 L10_spectral:6.0513e-03 L11_spectral:3.9404e-03 L12_spectral:4.1283e-03 ip_v_neg_g:2.3820e-02 cos_v_neg_g:3.1348e-03 v_norm:9.3474e-01 g_norm:8.1293e+00 hv_norm:8.3437e+00 cos_v_hv:6.0090e-03 hg_norm:1.0824e+04 cos_g_hg:7.1516e-01 v_par:2.1997e-04 v_perp:9.3474e-01 L1_cos_v_neg_g:3.2630e-02 L1_v_norm:4.3952e-02 L2_cos_v_neg_g:2.2293e-02 L2_v_norm:4.1669e-02 L3_cos_v_neg_g:1.9171e-02 L3_v_norm:4.1370e-02 L4_cos_v_neg_g:1.9381e-02 L4_v_norm:4.1234e-02 L5_cos_v_neg_g:2.0056e-02 L5_v_norm:4.1168e-02 L6_cos_v_neg_g:1.7727e-02 L6_v_norm:4.1511e-02 L7_cos_v_neg_g:1.6363e-02 L7_v_norm:4.1802e-02 L8_cos_v_neg_g:1.5815e-02 L8_v_norm:4.2093e-02 L9_cos_v_neg_g:1.6049e-02 L9_v_norm:4.2409e-02 L10_cos_v_neg_g:1.6904e-02 L10_v_norm:4.2685e-02 L11_cos_v_neg_g:1.7428e-02 L11_v_norm:4.2787e-02 L12_cos_v_neg_g:1.5780e-02 L12_v_norm:4.2974e-02 +step:500 train loss:5.360855 +step:501 train loss:5.315164 +step:502 train loss:5.356928 +step:503 train loss:5.285473 +step:504 train loss:5.370163 +step:505 train loss:5.307924 +step:506 train loss:5.309422 +step:507 train loss:5.317580 +step:508 train loss:5.341613 +step:509 train loss:5.339903 +step:510 train loss:5.276687 +step:511 train loss:5.271942 +step:512 train loss:5.265886 +step:513 train loss:5.291573 +step:514 train loss:5.360665 +step:515 train loss:5.303598 +step:516 train loss:5.374993 +step:517 train loss:5.302111 +step:518 train loss:5.284842 +step:519 train loss:5.340313 +step:520 train loss:5.285219 +step:521 train loss:5.282000 +step:522 train loss:5.304765 +step:523 train loss:5.300441 +step:524 train loss:5.250518 +step:525 train loss:5.255875 +step:526 train loss:5.270888 +step:527 train loss:5.271067 +step:528 train loss:5.268848 +step:529 train loss:5.295389 +step:530 train loss:5.250727 +step:531 train loss:5.286650 +step:532 train loss:5.253655 +step:533 train loss:5.212920 +step:534 train loss:5.288486 +step:535 train loss:5.274652 +step:536 train loss:5.338667 +step:537 train loss:5.223358 +step:538 train loss:5.190907 +step:539 train loss:5.285978 +step:540 train loss:5.325503 +step:541 train loss:5.227288 +step:542 train loss:5.250107 +step:543 train loss:5.269728 +step:544 train loss:5.267894 +step:545 train loss:5.250023 +step:546 train loss:5.210184 +step:547 train loss:5.236386 +step:548 train loss:5.191440 +step:549 train loss:5.242018 +step:550 train loss:5.225445 +step:551 train loss:5.226718 +step:552 train loss:5.320701 +step:553 train loss:5.279345 +step:554 train loss:5.228953 +step:555 train loss:5.284637 +step:556 train loss:5.230860 +step:557 train loss:5.204876 +step:558 train loss:5.183481 +step:559 train loss:5.235224 +step:560 train loss:5.281628 +step:561 train loss:5.166138 +step:562 train loss:5.153950 +step:563 train loss:5.228477 +step:564 train loss:5.196709 +step:565 train loss:5.216021 +step:566 train loss:5.220788 +step:567 train loss:5.213980 +step:568 train loss:5.240879 +step:569 train loss:5.218556 +step:570 train loss:5.155680 +step:571 train loss:5.186616 +step:572 train loss:5.181547 +step:573 train loss:5.175763 +step:574 train loss:5.221095 +step:575 train loss:5.178800 +step:576 train loss:5.183849 +step:577 train loss:5.202612 +step:578 train loss:5.179089 +step:579 train loss:5.221156 +step:580 train loss:5.165855 +step:581 train loss:5.216087 +step:582 train loss:5.187124 +step:583 train loss:5.201057 +step:584 train loss:5.178547 +step:585 train loss:5.171470 +step:586 train loss:5.163432 +step:587 train loss:5.234155 +step:588 train loss:5.149444 +step:589 train loss:5.205478 +step:590 train loss:5.212801 +step:591 train loss:5.147686 +step:592 train loss:5.129079 +step:593 train loss:5.150949 +step:594 train loss:5.120888 +step:595 train loss:5.167315 +step:596 train loss:5.143626 +step:597 train loss:5.176571 +step:598 train loss:5.146825 +step:599 train loss:5.144068 +step:600 train loss:5.122737 +step:601 train loss:5.100111 +step:602 train loss:5.106746 +step:603 train loss:5.164360 +step:604 train loss:5.137712 +step:605 train loss:5.169453 +step:606 train loss:5.115602 +step:607 train loss:5.107529 +step:608 train loss:5.105742 +step:609 train loss:5.076798 +step:610 train loss:5.098867 +step:611 train loss:5.102141 +step:612 train loss:5.141812 +step:613 train loss:5.062498 +step:614 train loss:5.103421 +step:615 train loss:5.156333 +step:616 train loss:5.077785 +step:617 train loss:5.113093 +step:618 train loss:5.081343 +step:619 train loss:5.113545 +step:620 train loss:5.136793 +step:621 train loss:5.069334 +step:622 train loss:5.131825 +step:623 train loss:5.132622 +step:624 train loss:5.114665 +step:625 train loss:5.115555 +step:626 train loss:5.119241 +step:627 train loss:5.085872 +step:628 train loss:5.092859 +step:629 train loss:5.040293 +step:630 train loss:5.061300 +step:631 train loss:5.054176 +step:632 train loss:5.068464 +step:633 train loss:5.089923 +step:634 train loss:5.085710 +step:635 train loss:5.020475 +step:636 train loss:5.111547 +step:637 train loss:5.024792 +step:638 train loss:4.962890 +step:639 train loss:5.087611 +step:640 train loss:5.030982 +step:641 train loss:5.060534 +step:642 train loss:5.103592 +step:643 train loss:5.003901 +step:644 train loss:5.090178 +step:645 train loss:5.045423 +step:646 train loss:5.038476 +step:647 train loss:5.052690 +step:648 train loss:5.144674 +step:649 train loss:5.043980 +step:650 train loss:5.106031 +step:651 train loss:4.986301 +step:652 train loss:5.015871 +step:653 train loss:5.008659 +step:654 train loss:5.009776 +step:655 train loss:5.051793 +step:656 train loss:4.990715 +step:657 train loss:5.051246 +step:658 train loss:4.975074 +step:659 train loss:5.053669 +step:660 train loss:5.018664 +step:661 train loss:5.060182 +step:662 train loss:5.050113 +step:663 train loss:5.049261 +step:664 train loss:4.960276 +step:665 train loss:4.974657 +step:666 train loss:4.972216 +step:667 train loss:5.035393 +step:668 train loss:5.009114 +step:669 train loss:4.986339 +step:670 train loss:5.003482 +step:671 train loss:4.978244 +step:672 train loss:4.950254 +step:673 train loss:5.043319 +step:674 train loss:5.042514 +step:675 train loss:4.942414 +step:676 train loss:5.025621 +step:677 train loss:4.957109 +step:678 train loss:4.947425 +step:679 train loss:5.003928 +step:680 train loss:4.950728 +step:681 train loss:5.003160 +step:682 train loss:4.923865 +step:683 train loss:4.979812 +step:684 train loss:5.023026 +step:685 train loss:4.954522 +step:686 train loss:5.061186 +step:687 train loss:4.986403 +step:688 train loss:4.918913 +step:689 train loss:4.966162 +step:690 train loss:4.931481 +step:691 train loss:4.941733 +step:692 train loss:4.955673 +step:693 train loss:4.951963 +step:694 train loss:4.940436 +step:695 train loss:4.895943 +step:696 train loss:4.856143 +step:697 train loss:4.983821 +step:698 train loss:4.909277 +step:699 train loss:4.909100 +step:700 train loss:4.990256 +step:701 train loss:4.894829 +step:702 train loss:4.972980 +step:703 train loss:4.900511 +step:704 train loss:4.857298 +step:705 train loss:4.903502 +step:706 train loss:4.796422 +step:707 train loss:4.861022 +step:708 train loss:4.950474 +step:709 train loss:4.912925 +step:710 train loss:4.873311 +step:711 train loss:4.937569 +step:712 train loss:4.885320 +step:713 train loss:4.849277 +step:714 train loss:4.933614 +step:715 train loss:4.834826 +step:716 train loss:4.979484 +step:717 train loss:4.864064 +step:718 train loss:4.929953 +step:719 train loss:4.880215 +step:720 train loss:4.865423 +step:721 train loss:4.881598 +step:722 train loss:4.892731 +step:723 train loss:4.934474 +step:724 train loss:4.902546 +step:725 train loss:4.870832 +step:726 train loss:4.859803 +step:727 train loss:4.887778 +step:728 train loss:4.874115 +step:729 train loss:4.808906 +step:730 train loss:4.902781 +step:731 train loss:4.920289 +step:732 train loss:4.892192 +step:733 train loss:4.870865 +step:734 train loss:4.862384 +step:735 train loss:4.941827 +step:736 train loss:4.876972 +step:737 train loss:4.870632 +step:738 train loss:4.904343 +step:739 train loss:4.849562 +step:740 train loss:4.859715 +step:741 train loss:4.932447 +step:742 train loss:4.837101 +step:743 train loss:4.820439 +step:744 train loss:4.873886 +step:745 train loss:4.815631 +step:746 train loss:4.819856 +step:747 train loss:4.849056 +step:748 train loss:4.809837 +step:749 train loss:4.847317 +step:750 validation loss:4.814464 +step:750 train loss:4.796201 +step:751 train loss:4.812119 +step:752 train loss:4.760635 +step:753 train loss:4.808799 +step:754 train loss:4.817458 +step:755 train loss:4.861222 +step:756 train loss:4.841086 +step:757 train loss:4.939485 +step:758 train loss:4.813241 +step:759 train loss:4.814385 +step:760 train loss:4.788204 +step:761 train loss:4.823186 +step:762 train loss:4.797991 +step:763 train loss:4.796566 +step:764 train loss:4.771905 +step:765 train loss:4.775305 +step:766 train loss:4.857718 +step:767 train loss:4.961256 +step:768 train loss:4.785746 +step:769 train loss:4.819094 +step:770 train loss:4.840564 +step:771 train loss:4.897455 +step:772 train loss:4.825516 +step:773 train loss:4.764340 +step:774 train loss:4.818646 +step:775 train loss:4.786762 +step:776 train loss:4.796302 +step:777 train loss:4.756414 +step:778 train loss:4.763277 +step:779 train loss:4.748501 +step:780 train loss:4.805847 +step:781 train loss:4.733996 +step:782 train loss:4.763913 +step:783 train loss:4.747492 +step:784 train loss:4.759390 +step:785 train loss:4.727827 +step:786 train loss:4.754827 +step:787 train loss:4.702651 +step:788 train loss:4.763694 +step:789 train loss:4.761922 +step:790 train loss:4.713176 +step:791 train loss:4.797788 +step:792 train loss:4.806350 +step:793 train loss:4.767913 +step:794 train loss:4.762635 +step:795 train loss:4.722035 +step:796 train loss:4.988124 +step:797 train loss:4.755551 +step:798 train loss:4.737434 +step:799 train loss:4.748520 +step:800 train loss:4.850232 +step:801 train loss:4.742970 +step:802 train loss:4.858015 +step:803 train loss:4.761512 +step:804 train loss:4.700315 +step:805 train loss:4.765451 +step:806 train loss:4.676672 +step:807 train loss:4.731641 +step:808 train loss:4.740559 +step:809 train loss:4.703607 +step:810 train loss:4.677443 +step:811 train loss:4.775321 +step:812 train loss:4.732631 +step:813 train loss:4.741920 +step:814 train loss:4.793289 +step:815 train loss:4.760844 +step:816 train loss:4.687364 +step:817 train loss:4.718535 +step:818 train loss:4.688235 +step:819 train loss:4.686263 +step:820 train loss:4.699093 +step:821 train loss:4.641123 +step:822 train loss:4.628582 +step:823 train loss:4.715026 +step:824 train loss:4.627555 +step:825 train loss:4.613011 +step:826 train loss:4.661951 +step:827 train loss:4.601013 +step:828 train loss:4.668797 +step:829 train loss:4.672450 +step:830 train loss:4.682074 +step:831 train loss:4.704373 +step:832 train loss:4.760971 +step:833 train loss:4.714859 +step:834 train loss:4.695486 +step:835 train loss:4.670318 +step:836 train loss:4.653012 +step:837 train loss:4.628813 +step:838 train loss:4.628448 +step:839 train loss:4.629407 +step:840 train loss:4.672436 +step:841 train loss:4.647013 +step:842 train loss:4.658413 +step:843 train loss:4.654630 +step:844 train loss:4.629706 +step:845 train loss:4.617885 +step:846 train loss:4.693167 +step:847 train loss:4.660543 +step:848 train loss:4.622041 +step:849 train loss:4.670645 +step:850 train loss:4.671537 +step:851 train loss:4.642032 +step:852 train loss:4.708914 +step:853 train loss:4.595707 +step:854 train loss:4.645302 +step:855 train loss:4.631786 +step:856 train loss:4.585148 +step:857 train loss:4.635142 +step:858 train loss:4.662756 +step:859 train loss:4.618740 +step:860 train loss:4.620190 +step:861 train loss:4.659624 +step:862 train loss:4.605632 +step:863 train loss:4.622090 +step:864 train loss:4.598287 +step:865 train loss:4.621828 +step:866 train loss:4.632760 +step:867 train loss:4.715638 +step:868 train loss:4.603407 +step:869 train loss:4.622128 +step:870 train loss:4.573936 +step:871 train loss:4.581270 +step:872 train loss:4.608202 +step:873 train loss:4.598057 +step:874 train loss:4.605381 +step:875 train loss:4.518996 +step:876 train loss:4.627644 +step:877 train loss:4.538433 +step:878 train loss:4.643433 +step:879 train loss:4.573139 +step:880 train loss:4.656640 +step:881 train loss:4.600307 +step:882 train loss:4.561017 +step:883 train loss:4.600516 +step:884 train loss:4.610072 +step:885 train loss:4.565107 +step:886 train loss:4.545243 +step:887 train loss:4.576740 +step:888 train loss:4.678128 +step:889 train loss:4.614243 +step:890 train loss:4.560959 +step:891 train loss:4.523912 +step:892 train loss:4.492571 +step:893 train loss:4.569557 +step:894 train loss:4.546052 +step:895 train loss:4.531100 +step:896 train loss:4.616138 +step:897 train loss:4.543652 +step:898 train loss:4.555659 +step:899 train loss:4.561390 +step:900 train loss:4.615829 +step:901 train loss:4.522712 +step:902 train loss:4.563995 +step:903 train loss:4.639423 +step:904 train loss:4.656562 +step:905 train loss:4.537848 +step:906 train loss:4.550929 +step:907 train loss:4.560143 +step:908 train loss:4.575986 +step:909 train loss:4.532010 +step:910 train loss:4.556400 +step:911 train loss:4.679857 +step:912 train loss:4.492777 +step:913 train loss:4.552982 +step:914 train loss:4.521786 +step:915 train loss:4.535026 +step:916 train loss:4.608581 +step:917 train loss:4.543881 +step:918 train loss:4.616711 +step:919 train loss:4.692457 +step:920 train loss:4.457026 +step:921 train loss:4.565269 +step:922 train loss:4.536101 +step:923 train loss:4.478435 +step:924 train loss:4.507964 +step:925 train loss:4.464398 +step:926 train loss:4.559630 +step:927 train loss:4.475406 +step:928 train loss:4.537338 +step:929 train loss:4.516412 +step:930 train loss:4.514504 +step:931 train loss:4.558098 +step:932 train loss:4.513729 +step:933 train loss:4.525393 +step:934 train loss:4.565793 +step:935 train loss:4.545314 +step:936 train loss:4.529897 +step:937 train loss:4.538705 +step:938 train loss:4.537934 +step:939 train loss:4.418329 +step:940 train loss:4.516006 +step:941 train loss:4.456887 +step:942 train loss:4.438039 +step:943 train loss:4.534685 +step:944 train loss:4.488504 +step:945 train loss:4.501295 +step:946 train loss:4.532903 +step:947 train loss:4.652693 +step:948 train loss:4.461988 +step:949 train loss:4.518534 +step:950 train loss:4.453514 +step:951 train loss:4.478603 +step:952 train loss:4.541149 +step:953 train loss:4.480560 +step:954 train loss:4.494738 +step:955 train loss:4.436016 +step:956 train loss:4.458734 +step:957 train loss:4.456206 +step:958 train loss:4.533780 +step:959 train loss:4.471669 +step:960 train loss:4.562188 +step:961 train loss:4.521492 +step:962 train loss:4.465864 +step:963 train loss:4.446464 +step:964 train loss:4.486762 +step:965 train loss:4.411547 +step:966 train loss:4.418982 +step:967 train loss:4.478349 +step:968 train loss:4.472032 +step:969 train loss:4.424044 +step:970 train loss:4.498899 +step:971 train loss:4.464590 +step:972 train loss:4.390110 +step:973 train loss:4.474720 +step:974 train loss:4.422288 +step:975 train loss:4.507528 +step:976 train loss:4.451201 +step:977 train loss:4.444160 +step:978 train loss:4.456306 +step:979 train loss:4.434384 +step:980 train loss:4.435749 +step:981 train loss:4.418590 +step:982 train loss:4.418243 +step:983 train loss:4.427738 +step:984 train loss:4.460320 +step:985 train loss:4.434813 +step:986 train loss:4.440310 +step:987 train loss:4.474526 +step:988 train loss:4.459177 +step:989 train loss:4.429868 +step:990 train loss:4.414327 +step:991 train loss:4.351023 +step:992 train loss:4.408038 +step:993 train loss:4.430305 +step:994 train loss:4.369967 +step:995 train loss:4.382988 +step:996 train loss:4.433510 +step:997 train loss:4.386407 +step:998 train loss:4.384752 +step:999 train loss:4.425157 +step:1000 validation loss:4.372633 total_sharp:2.4371e-02 L1_sharp:3.8021e-01 L2_sharp:2.5969e-01 L3_sharp:2.6497e-01 L4_sharp:1.9820e-01 L5_sharp:1.4758e-01 L6_sharp:9.3277e-02 L7_sharp:7.6671e-02 L8_sharp:6.2172e-02 L9_sharp:4.4385e-02 L10_sharp:3.6781e-02 L11_sharp:3.1967e-02 L12_sharp:2.6295e-02 total_fnorm:1.3533e+00 total_l1_linf:8.1109e+03 total_spectral:1.3533e+00 L1_fnorm:6.1907e-02 L2_fnorm:5.8078e-02 L3_fnorm:5.5452e-02 L4_fnorm:5.7363e-02 L5_fnorm:5.8574e-02 L6_fnorm:6.0034e-02 L7_fnorm:6.0470e-02 L8_fnorm:6.0603e-02 L9_fnorm:6.0976e-02 L10_fnorm:6.1273e-02 L11_fnorm:6.0866e-02 L12_fnorm:6.1256e-02 L1_l1linf:2.2613e-01 L2_l1linf:2.3551e-01 L3_l1linf:2.4435e-01 L4_l1linf:2.5461e-01 L5_l1linf:2.6136e-01 L6_l1linf:2.5056e-01 L7_l1linf:2.4853e-01 L8_l1linf:2.4912e-01 L9_l1linf:2.5154e-01 L10_l1linf:2.4750e-01 L11_l1linf:2.3277e-01 L12_l1linf:2.1385e-01 L1_spectral:5.0444e-03 L2_spectral:5.2728e-03 L3_spectral:5.5284e-03 L4_spectral:5.6738e-03 L5_spectral:5.8277e-03 L6_spectral:5.6218e-03 L7_spectral:5.7885e-03 L8_spectral:5.9948e-03 L9_spectral:6.5648e-03 L10_spectral:6.2018e-03 L11_spectral:5.8075e-03 L12_spectral:6.0259e-03 ip_v_neg_g:2.3336e-02 cos_v_neg_g:2.6975e-03 v_norm:1.3533e+00 g_norm:6.3923e+00 hv_norm:4.7241e+00 cos_v_hv:6.9817e-03 hg_norm:2.7166e+03 cos_g_hg:6.3177e-01 v_par:1.4907e-04 v_perp:1.3533e+00 L1_cos_v_neg_g:2.4042e-02 L1_v_norm:6.1907e-02 L2_cos_v_neg_g:2.4329e-02 L2_v_norm:5.8078e-02 L3_cos_v_neg_g:2.4471e-02 L3_v_norm:5.5452e-02 L4_cos_v_neg_g:2.2742e-02 L4_v_norm:5.7363e-02 L5_cos_v_neg_g:2.0303e-02 L5_v_norm:5.8574e-02 L6_cos_v_neg_g:1.4936e-02 L6_v_norm:6.0034e-02 L7_cos_v_neg_g:1.3600e-02 L7_v_norm:6.0470e-02 L8_cos_v_neg_g:1.2094e-02 L8_v_norm:6.0603e-02 L9_cos_v_neg_g:9.6786e-03 L9_v_norm:6.0976e-02 L10_cos_v_neg_g:9.0815e-03 L10_v_norm:6.1273e-02 L11_cos_v_neg_g:8.8952e-03 L11_v_norm:6.0866e-02 L12_cos_v_neg_g:8.5272e-03 L12_v_norm:6.1256e-02 +step:1000 train loss:4.436195 +step:1001 train loss:4.436913 +step:1002 train loss:4.426413 +step:1003 train loss:4.406297 +step:1004 train loss:4.378546 +step:1005 train loss:4.393362 +step:1006 train loss:4.468907 +step:1007 train loss:4.422702 +step:1008 train loss:4.391037 +step:1009 train loss:4.468313 +step:1010 train loss:4.416565 +step:1011 train loss:4.439834 +step:1012 train loss:4.384478 +step:1013 train loss:4.357768 +step:1014 train loss:4.358893 +step:1015 train loss:4.394414 +step:1016 train loss:4.404398 +step:1017 train loss:4.358488 +step:1018 train loss:4.409816 +step:1019 train loss:4.375195 +step:1020 train loss:4.360095 +step:1021 train loss:4.445410 +step:1022 train loss:4.351338 +step:1023 train loss:4.357373 +step:1024 train loss:4.437772 +step:1025 train loss:4.395430 +step:1026 train loss:4.338148 +step:1027 train loss:4.380997 +step:1028 train loss:4.383282 +step:1029 train loss:4.331110 +step:1030 train loss:4.413821 +step:1031 train loss:4.407424 +step:1032 train loss:4.361133 +step:1033 train loss:4.331652 +step:1034 train loss:4.396157 +step:1035 train loss:4.398273 +step:1036 train loss:4.313342 +step:1037 train loss:4.373784 +step:1038 train loss:4.392930 +step:1039 train loss:4.527810 +step:1040 train loss:4.362740 +step:1041 train loss:4.351763 +step:1042 train loss:4.374049 +step:1043 train loss:4.379630 +step:1044 train loss:4.361210 +step:1045 train loss:4.373314 +step:1046 train loss:4.316141 +step:1047 train loss:4.346653 +step:1048 train loss:4.340777 +step:1049 train loss:4.395257 +step:1050 train loss:4.355816 +step:1051 train loss:4.328486 +step:1052 train loss:4.426524 +step:1053 train loss:4.328630 +step:1054 train loss:4.321113 +step:1055 train loss:4.389516 +step:1056 train loss:4.332905 +step:1057 train loss:4.230863 +step:1058 train loss:4.333114 +step:1059 train loss:4.320669 +step:1060 train loss:4.312686 +step:1061 train loss:4.369808 +step:1062 train loss:4.329967 +step:1063 train loss:4.330539 +step:1064 train loss:4.317337 +step:1065 train loss:4.331682 +step:1066 train loss:4.301652 +step:1067 train loss:4.335107 +step:1068 train loss:4.297827 +step:1069 train loss:4.314600 +step:1070 train loss:4.317127 +step:1071 train loss:4.337818 +step:1072 train loss:4.359498 +step:1073 train loss:4.270267 +step:1074 train loss:4.287467 +step:1075 train loss:4.303632 +step:1076 train loss:4.368292 +step:1077 train loss:4.296011 +step:1078 train loss:4.340345 +step:1079 train loss:4.391061 +step:1080 train loss:4.259031 +step:1081 train loss:4.325656 +step:1082 train loss:4.326616 +step:1083 train loss:4.286065 +step:1084 train loss:4.264167 +step:1085 train loss:4.327484 +step:1086 train loss:4.320075 +step:1087 train loss:4.298293 +step:1088 train loss:4.300861 +step:1089 train loss:4.310240 +step:1090 train loss:4.254152 +step:1091 train loss:4.248870 +step:1092 train loss:4.361895 +step:1093 train loss:4.248166 +step:1094 train loss:4.306679 +step:1095 train loss:4.346503 +step:1096 train loss:4.283635 +step:1097 train loss:4.278830 +step:1098 train loss:4.249084 +step:1099 train loss:4.300035 +step:1100 train loss:4.341280 +step:1101 train loss:4.332526 +step:1102 train loss:4.347287 +step:1103 train loss:4.270203 +step:1104 train loss:4.297582 +step:1105 train loss:4.352332 +step:1106 train loss:4.286681 +step:1107 train loss:4.408423 +step:1108 train loss:4.353552 +step:1109 train loss:4.316255 +step:1110 train loss:4.268699 +step:1111 train loss:4.322541 +step:1112 train loss:4.238561 +step:1113 train loss:4.220663 +step:1114 train loss:4.208592 +step:1115 train loss:4.251404 +step:1116 train loss:4.312920 +step:1117 train loss:4.333486 +step:1118 train loss:4.359152 +step:1119 train loss:4.294470 +step:1120 train loss:4.303192 +step:1121 train loss:4.283561 +step:1122 train loss:4.269229 +step:1123 train loss:4.366765 +step:1124 train loss:4.250434 +step:1125 train loss:4.266793 +step:1126 train loss:4.229932 +step:1127 train loss:4.251269 +step:1128 train loss:4.254034 +step:1129 train loss:4.308359 +step:1130 train loss:4.222460 +step:1131 train loss:4.314693 +step:1132 train loss:4.262709 +step:1133 train loss:4.270487 +step:1134 train loss:4.243843 +step:1135 train loss:4.290024 +step:1136 train loss:4.306781 +step:1137 train loss:4.224891 +step:1138 train loss:4.296833 +step:1139 train loss:4.251659 +step:1140 train loss:4.329687 +step:1141 train loss:4.282897 +step:1142 train loss:4.225893 +step:1143 train loss:4.293883 +step:1144 train loss:4.319271 +step:1145 train loss:4.272234 +step:1146 train loss:4.223852 +step:1147 train loss:4.232889 +step:1148 train loss:4.258854 +step:1149 train loss:4.308963 +step:1150 train loss:4.320207 +step:1151 train loss:4.323159 +step:1152 train loss:4.227921 +step:1153 train loss:4.224615 +step:1154 train loss:4.209658 +step:1155 train loss:4.313454 +step:1156 train loss:4.216561 +step:1157 train loss:4.240738 +step:1158 train loss:4.301169 +step:1159 train loss:4.291521 +step:1160 train loss:4.219730 +step:1161 train loss:4.312023 +step:1162 train loss:4.250987 +step:1163 train loss:4.231869 +step:1164 train loss:4.140723 +step:1165 train loss:4.280943 +step:1166 train loss:4.205259 +step:1167 train loss:4.210115 +step:1168 train loss:4.269403 +step:1169 train loss:4.229920 +step:1170 train loss:4.235880 +step:1171 train loss:4.257036 +step:1172 train loss:4.224797 +step:1173 train loss:4.252293 +step:1174 train loss:4.191717 +step:1175 train loss:4.225034 +step:1176 train loss:4.336363 +step:1177 train loss:4.186288 +step:1178 train loss:4.242811 +step:1179 train loss:4.199223 +step:1180 train loss:4.224463 +step:1181 train loss:4.215470 +step:1182 train loss:4.276071 +step:1183 train loss:4.249676 +step:1184 train loss:4.190514 +step:1185 train loss:4.227272 +step:1186 train loss:4.214191 +step:1187 train loss:4.186973 +step:1188 train loss:4.219918 +step:1189 train loss:4.154260 +step:1190 train loss:4.210043 +step:1191 train loss:4.270200 +step:1192 train loss:4.226978 +step:1193 train loss:4.226723 +step:1194 train loss:4.340426 +step:1195 train loss:4.318650 +step:1196 train loss:4.208838 +step:1197 train loss:4.227608 +step:1198 train loss:4.210565 +step:1199 train loss:4.213374 +step:1200 train loss:4.269783 +step:1201 train loss:4.242006 +step:1202 train loss:4.178353 +step:1203 train loss:4.171850 +step:1204 train loss:4.209387 +step:1205 train loss:4.221209 +step:1206 train loss:4.161188 +step:1207 train loss:4.248188 +step:1208 train loss:4.223329 +step:1209 train loss:4.148097 +step:1210 train loss:4.239297 +step:1211 train loss:4.191942 +step:1212 train loss:4.215487 +step:1213 train loss:4.149690 +step:1214 train loss:4.233446 +step:1215 train loss:4.201685 +step:1216 train loss:4.216455 +step:1217 train loss:4.160412 +step:1218 train loss:4.225438 +step:1219 train loss:4.162858 +step:1220 train loss:4.183887 +step:1221 train loss:4.206444 +step:1222 train loss:4.244604 +step:1223 train loss:4.219718 +step:1224 train loss:4.190997 +step:1225 train loss:4.235899 +step:1226 train loss:4.176824 +step:1227 train loss:4.190421 +step:1228 train loss:4.191055 +step:1229 train loss:4.161236 +step:1230 train loss:4.156220 +step:1231 train loss:4.210220 +step:1232 train loss:4.162605 +step:1233 train loss:4.160258 +step:1234 train loss:4.243099 +step:1235 train loss:4.221484 +step:1236 train loss:4.131001 +step:1237 train loss:4.228493 +step:1238 train loss:4.181536 +step:1239 train loss:4.223438 +step:1240 train loss:4.127599 +step:1241 train loss:4.157652 +step:1242 train loss:4.187311 +step:1243 train loss:4.133208 +step:1244 train loss:4.254090 +step:1245 train loss:4.266302 +step:1246 train loss:4.196423 +step:1247 train loss:4.173599 +step:1248 train loss:4.203782 +step:1249 train loss:4.136261 +step:1250 validation loss:4.139888 +step:1250 train loss:4.150300 +step:1251 train loss:4.219256 +step:1252 train loss:4.166943 +step:1253 train loss:4.123005 +step:1254 train loss:4.154387 +step:1255 train loss:4.149278 +step:1256 train loss:4.195271 +step:1257 train loss:4.173094 +step:1258 train loss:4.223719 +step:1259 train loss:4.205748 +step:1260 train loss:4.111069 +step:1261 train loss:4.350261 +step:1262 train loss:4.196079 +step:1263 train loss:4.154250 +step:1264 train loss:4.163705 +step:1265 train loss:4.221220 +step:1266 train loss:4.160954 +step:1267 train loss:4.168932 +step:1268 train loss:4.178596 +step:1269 train loss:4.171686 +step:1270 train loss:4.098939 +step:1271 train loss:4.105240 +step:1272 train loss:4.134897 +step:1273 train loss:4.191170 +step:1274 train loss:4.154099 +step:1275 train loss:4.180121 +step:1276 train loss:4.180468 +step:1277 train loss:4.187129 +step:1278 train loss:4.133654 +step:1279 train loss:4.142001 +step:1280 train loss:4.159226 +step:1281 train loss:4.213642 +step:1282 train loss:4.136189 +step:1283 train loss:4.214231 +step:1284 train loss:4.158240 +step:1285 train loss:4.206383 +step:1286 train loss:4.105731 +step:1287 train loss:4.144463 +step:1288 train loss:4.174322 +step:1289 train loss:4.228011 +step:1290 train loss:4.180602 +step:1291 train loss:4.147472 +step:1292 train loss:4.128953 +step:1293 train loss:4.117450 +step:1294 train loss:4.171128 +step:1295 train loss:4.150468 +step:1296 train loss:4.196552 +step:1297 train loss:4.156168 +step:1298 train loss:4.173076 +step:1299 train loss:4.211250 +step:1300 train loss:4.130861 +step:1301 train loss:4.174695 +step:1302 train loss:4.137298 +step:1303 train loss:4.174038 +step:1304 train loss:4.201283 +step:1305 train loss:4.180778 +step:1306 train loss:4.172181 +step:1307 train loss:4.155023 +step:1308 train loss:4.111720 +step:1309 train loss:4.124269 +step:1310 train loss:4.110962 +step:1311 train loss:4.117635 +step:1312 train loss:4.189993 +step:1313 train loss:4.105812 +step:1314 train loss:4.112155 +step:1315 train loss:4.160754 +step:1316 train loss:4.132673 +step:1317 train loss:4.027703 +step:1318 train loss:4.182106 +step:1319 train loss:4.220622 +step:1320 train loss:4.132527 +step:1321 train loss:4.111145 +step:1322 train loss:4.215408 +step:1323 train loss:4.163314 +step:1324 train loss:4.259245 +step:1325 train loss:4.145981 +step:1326 train loss:4.172506 +step:1327 train loss:4.186269 +step:1328 train loss:4.093260 +step:1329 train loss:4.123543 +step:1330 train loss:4.146384 +step:1331 train loss:4.002974 +step:1332 train loss:4.189396 +step:1333 train loss:4.151048 +step:1334 train loss:4.156153 +step:1335 train loss:4.174036 +step:1336 train loss:4.180342 +step:1337 train loss:4.155459 +step:1338 train loss:4.134179 +step:1339 train loss:4.205891 +step:1340 train loss:4.170691 +step:1341 train loss:4.150954 +step:1342 train loss:4.123228 +step:1343 train loss:4.112249 +step:1344 train loss:4.177340 +step:1345 train loss:4.134363 +step:1346 train loss:4.215209 +step:1347 train loss:4.139272 +step:1348 train loss:4.106774 +step:1349 train loss:4.054296 +step:1350 train loss:4.086249 +step:1351 train loss:4.156653 +step:1352 train loss:4.128430 +step:1353 train loss:4.106170 +step:1354 train loss:4.109047 +step:1355 train loss:4.179916 +step:1356 train loss:4.090664 +step:1357 train loss:4.119137 +step:1358 train loss:4.113489 +step:1359 train loss:4.112698 +step:1360 train loss:4.142814 +step:1361 train loss:4.260194 +step:1362 train loss:4.178505 +step:1363 train loss:4.066604 +step:1364 train loss:4.085797 +step:1365 train loss:4.081664 +step:1366 train loss:4.116667 +step:1367 train loss:4.053383 +step:1368 train loss:4.083886 +step:1369 train loss:4.120901 +step:1370 train loss:4.139699 +step:1371 train loss:4.096399 +step:1372 train loss:4.128405 +step:1373 train loss:4.166626 +step:1374 train loss:4.169448 +step:1375 train loss:4.123743 +step:1376 train loss:4.144321 +step:1377 train loss:4.137620 +step:1378 train loss:4.127969 +step:1379 train loss:4.100193 +step:1380 train loss:4.166236 +step:1381 train loss:4.117535 +step:1382 train loss:4.092168 +step:1383 train loss:4.085010 +step:1384 train loss:4.161343 +step:1385 train loss:4.063433 +step:1386 train loss:4.126807 +step:1387 train loss:4.123423 +step:1388 train loss:4.096442 +step:1389 train loss:4.067327 +step:1390 train loss:4.104255 +step:1391 train loss:4.136603 +step:1392 train loss:4.113361 +step:1393 train loss:4.161535 +step:1394 train loss:4.094329 +step:1395 train loss:4.131546 +step:1396 train loss:4.120821 +step:1397 train loss:4.135204 +step:1398 train loss:4.142622 +step:1399 train loss:4.113508 +step:1400 train loss:4.092587 +step:1401 train loss:4.085055 +step:1402 train loss:4.088548 +step:1403 train loss:4.051601 +step:1404 train loss:4.112343 +step:1405 train loss:4.071194 +step:1406 train loss:4.099160 +step:1407 train loss:4.093937 +step:1408 train loss:4.078307 +step:1409 train loss:4.066178 +step:1410 train loss:4.084129 +step:1411 train loss:4.117386 +step:1412 train loss:4.173739 +step:1413 train loss:4.093988 +step:1414 train loss:4.123063 +step:1415 train loss:4.082965 +step:1416 train loss:4.136708 +step:1417 train loss:4.108464 +step:1418 train loss:4.047368 +step:1419 train loss:4.054994 +step:1420 train loss:4.080489 +step:1421 train loss:4.117963 +step:1422 train loss:4.097949 +step:1423 train loss:4.192538 +step:1424 train loss:4.091588 +step:1425 train loss:4.052624 +step:1426 train loss:4.078898 +step:1427 train loss:4.068231 +step:1428 train loss:4.052405 +step:1429 train loss:4.077466 +step:1430 train loss:4.082105 +step:1431 train loss:4.105241 +step:1432 train loss:4.091655 +step:1433 train loss:4.072623 +step:1434 train loss:4.044493 +step:1435 train loss:4.039511 +step:1436 train loss:4.106889 +step:1437 train loss:4.043162 +step:1438 train loss:4.045881 +step:1439 train loss:4.029556 +step:1440 train loss:4.069420 +step:1441 train loss:4.140428 +step:1442 train loss:4.105275 +step:1443 train loss:4.034140 +step:1444 train loss:4.047935 +step:1445 train loss:4.047703 +step:1446 train loss:4.076877 +step:1447 train loss:4.089184 +step:1448 train loss:4.050348 +step:1449 train loss:4.078166 +step:1450 train loss:4.093758 +step:1451 train loss:4.015797 +step:1452 train loss:4.068758 +step:1453 train loss:4.065759 +step:1454 train loss:4.065094 +step:1455 train loss:3.997859 +step:1456 train loss:4.077447 +step:1457 train loss:4.012154 +step:1458 train loss:4.151993 +step:1459 train loss:4.070630 +step:1460 train loss:4.042503 +step:1461 train loss:4.094906 +step:1462 train loss:4.102663 +step:1463 train loss:4.068069 +step:1464 train loss:4.041893 +step:1465 train loss:4.047968 +step:1466 train loss:4.008348 +step:1467 train loss:4.146521 +step:1468 train loss:4.026564 +step:1469 train loss:4.106460 +step:1470 train loss:4.037299 +step:1471 train loss:4.040135 +step:1472 train loss:4.039126 +step:1473 train loss:4.040620 +step:1474 train loss:3.981617 +step:1475 train loss:4.047024 +step:1476 train loss:4.124310 +step:1477 train loss:4.077229 +step:1478 train loss:4.010739 +step:1479 train loss:4.044553 +step:1480 train loss:4.042909 +step:1481 train loss:4.013608 +step:1482 train loss:4.080229 +step:1483 train loss:4.065370 +step:1484 train loss:4.099069 +step:1485 train loss:4.108383 +step:1486 train loss:4.046031 +step:1487 train loss:4.031568 +step:1488 train loss:4.037106 +step:1489 train loss:4.028574 +step:1490 train loss:4.084780 +step:1491 train loss:4.075831 +step:1492 train loss:4.070892 +step:1493 train loss:4.018564 +step:1494 train loss:4.053945 +step:1495 train loss:4.034038 +step:1496 train loss:4.006741 +step:1497 train loss:4.082082 +step:1498 train loss:3.986335 +step:1499 train loss:4.032220 +step:1500 validation loss:4.004855 total_sharp:1.5319e-02 L1_sharp:4.7143e-01 L2_sharp:2.2555e-01 L3_sharp:1.3807e-01 L4_sharp:7.1062e-02 L5_sharp:6.8135e-02 L6_sharp:5.4191e-02 L7_sharp:5.3728e-02 L8_sharp:4.4576e-02 L9_sharp:3.3213e-02 L10_sharp:2.6482e-02 L11_sharp:2.1807e-02 L12_sharp:2.5844e-02 total_fnorm:1.3233e+00 total_l1_linf:7.9607e+03 total_spectral:1.3233e+00 L1_fnorm:6.1785e-02 L2_fnorm:5.6858e-02 L3_fnorm:5.4696e-02 L4_fnorm:5.8037e-02 L5_fnorm:5.9178e-02 L6_fnorm:6.0534e-02 L7_fnorm:6.0800e-02 L8_fnorm:6.0894e-02 L9_fnorm:6.1022e-02 L10_fnorm:6.1013e-02 L11_fnorm:6.0826e-02 L12_fnorm:6.0951e-02 L1_l1linf:2.4386e-01 L2_l1linf:2.3966e-01 L3_l1linf:2.5183e-01 L4_l1linf:2.5737e-01 L5_l1linf:2.6692e-01 L6_l1linf:2.4621e-01 L7_l1linf:2.3821e-01 L8_l1linf:2.3518e-01 L9_l1linf:2.4540e-01 L10_l1linf:2.5016e-01 L11_l1linf:2.5795e-01 L12_l1linf:2.2760e-01 L1_spectral:5.3447e-03 L2_spectral:5.4192e-03 L3_spectral:5.6944e-03 L4_spectral:5.8098e-03 L5_spectral:6.0294e-03 L6_spectral:5.5451e-03 L7_spectral:5.4007e-03 L8_spectral:5.3177e-03 L9_spectral:5.5563e-03 L10_spectral:5.6517e-03 L11_spectral:5.8052e-03 L12_spectral:5.2721e-03 ip_v_neg_g:1.2889e-02 cos_v_neg_g:1.9363e-03 v_norm:1.3233e+00 g_norm:5.0305e+00 hv_norm:2.4921e+00 cos_v_hv:8.1341e-03 hg_norm:8.9434e+02 cos_g_hg:5.5273e-01 v_par:8.7148e-05 v_perp:1.3233e+00 L1_cos_v_neg_g:2.1842e-02 L1_v_norm:6.1785e-02 L2_cos_v_neg_g:2.1111e-02 L2_v_norm:5.6858e-02 L3_cos_v_neg_g:1.8888e-02 L3_v_norm:5.4696e-02 L4_cos_v_neg_g:1.1970e-02 L4_v_norm:5.8037e-02 L5_cos_v_neg_g:1.1020e-02 L5_v_norm:5.9178e-02 L6_cos_v_neg_g:9.1781e-03 L6_v_norm:6.0534e-02 L7_cos_v_neg_g:8.6351e-03 L7_v_norm:6.0800e-02 L8_cos_v_neg_g:8.6999e-03 L8_v_norm:6.0894e-02 L9_cos_v_neg_g:7.2467e-03 L9_v_norm:6.1022e-02 L10_cos_v_neg_g:6.3962e-03 L10_v_norm:6.1013e-02 L11_cos_v_neg_g:6.2397e-03 L11_v_norm:6.0826e-02 L12_cos_v_neg_g:4.9662e-03 L12_v_norm:6.0951e-02 +step:1500 train loss:4.028634 +step:1501 train loss:4.048352 +step:1502 train loss:3.986159 +step:1503 train loss:4.040226 +step:1504 train loss:4.008329 +step:1505 train loss:3.980549 +step:1506 train loss:3.969773 +step:1507 train loss:3.989635 +step:1508 train loss:4.002236 +step:1509 train loss:4.054755 +step:1510 train loss:3.992654 +step:1511 train loss:4.024912 +step:1512 train loss:3.999409 +step:1513 train loss:4.067092 +step:1514 train loss:4.021525 +step:1515 train loss:4.081258 +step:1516 train loss:4.006135 +step:1517 train loss:4.017227 +step:1518 train loss:4.096037 +step:1519 train loss:4.059737 +step:1520 train loss:4.100425 +step:1521 train loss:4.002945 +step:1522 train loss:4.063202 +step:1523 train loss:4.063483 +step:1524 train loss:3.987761 +step:1525 train loss:4.065705 +step:1526 train loss:3.981438 +step:1527 train loss:4.039210 +step:1528 train loss:4.087549 +step:1529 train loss:4.044282 +step:1530 train loss:4.085058 +step:1531 train loss:4.006759 +step:1532 train loss:4.077235 +step:1533 train loss:4.049322 +step:1534 train loss:3.995123 +step:1535 train loss:4.051373 +step:1536 train loss:4.075182 +step:1537 train loss:4.027899 +step:1538 train loss:4.028342 +step:1539 train loss:4.026138 +step:1540 train loss:4.040437 +step:1541 train loss:4.006124 +step:1542 train loss:4.095307 +step:1543 train loss:4.121354 +step:1544 train loss:3.992017 +step:1545 train loss:3.974957 +step:1546 train loss:4.012342 +step:1547 train loss:4.000638 +step:1548 train loss:4.042778 +step:1549 train loss:3.968455 +step:1550 train loss:4.084332 +step:1551 train loss:4.016877 +step:1552 train loss:4.047037 +step:1553 train loss:4.057317 +step:1554 train loss:4.063994 +step:1555 train loss:4.021657 +step:1556 train loss:4.001947 +step:1557 train loss:4.013763 +step:1558 train loss:4.037372 +step:1559 train loss:4.000391 +step:1560 train loss:4.083337 +step:1561 train loss:4.057030 +step:1562 train loss:3.946677 +step:1563 train loss:3.927271 +step:1564 train loss:4.060159 +step:1565 train loss:4.036106 +step:1566 train loss:4.054740 +step:1567 train loss:4.053967 +step:1568 train loss:4.005424 +step:1569 train loss:3.998767 +step:1570 train loss:4.016161 +step:1571 train loss:3.990993 +step:1572 train loss:3.995699 +step:1573 train loss:4.041207 +step:1574 train loss:3.995498 +step:1575 train loss:4.016746 +step:1576 train loss:3.973948 +step:1577 train loss:4.000560 +step:1578 train loss:3.982756 +step:1579 train loss:4.059154 +step:1580 train loss:4.014476 +step:1581 train loss:4.052725 +step:1582 train loss:4.050361 +step:1583 train loss:4.026332 +step:1584 train loss:3.948954 +step:1585 train loss:4.034478 +step:1586 train loss:4.004248 +step:1587 train loss:4.012953 +step:1588 train loss:3.996629 +step:1589 train loss:4.045820 +step:1590 train loss:3.955511 +step:1591 train loss:4.013216 +step:1592 train loss:3.962629 +step:1593 train loss:4.001880 +step:1594 train loss:3.999499 +step:1595 train loss:3.995744 +step:1596 train loss:4.002250 +step:1597 train loss:3.929828 +step:1598 train loss:4.032541 +step:1599 train loss:4.043844 +step:1600 train loss:3.922405 +step:1601 train loss:3.996930 +step:1602 train loss:4.055491 +step:1603 train loss:4.050226 +step:1604 train loss:3.974065 +step:1605 train loss:4.025123 +step:1606 train loss:4.070528 +step:1607 train loss:3.955639 +step:1608 train loss:3.990028 +step:1609 train loss:4.006035 +step:1610 train loss:4.067154 +step:1611 train loss:3.988756 +step:1612 train loss:3.914647 +step:1613 train loss:3.985090 +step:1614 train loss:4.091920 +step:1615 train loss:4.015015 +step:1616 train loss:4.022319 +step:1617 train loss:4.002494 +step:1618 train loss:4.011321 +step:1619 train loss:4.181866 +step:1620 train loss:3.972772 +step:1621 train loss:4.029966 +step:1622 train loss:3.951061 +step:1623 train loss:4.015465 +step:1624 train loss:3.985534 +step:1625 train loss:4.059225 +step:1626 train loss:3.949433 +step:1627 train loss:3.961841 +step:1628 train loss:3.978528 +step:1629 train loss:4.010100 +step:1630 train loss:4.030734 +step:1631 train loss:3.973790 +step:1632 train loss:3.952912 +step:1633 train loss:3.966326 +step:1634 train loss:4.021425 +step:1635 train loss:3.961257 +step:1636 train loss:3.948289 +step:1637 train loss:4.025787 +step:1638 train loss:4.125655 +step:1639 train loss:3.937129 +step:1640 train loss:4.015185 +step:1641 train loss:3.975748 +step:1642 train loss:4.071132 +step:1643 train loss:3.970364 +step:1644 train loss:3.985478 +step:1645 train loss:3.960768 +step:1646 train loss:4.038256 +step:1647 train loss:3.934347 +step:1648 train loss:3.997638 +step:1649 train loss:3.965534 +step:1650 train loss:3.978449 +step:1651 train loss:3.994903 +step:1652 train loss:4.011323 +step:1653 train loss:4.019572 +step:1654 train loss:4.010113 +step:1655 train loss:3.988218 +step:1656 train loss:3.981443 +step:1657 train loss:3.980541 +step:1658 train loss:3.955791 +step:1659 train loss:4.031317 +step:1660 train loss:3.930616 +step:1661 train loss:4.042968 +step:1662 train loss:3.981262 +step:1663 train loss:3.971314 +step:1664 train loss:4.070658 +step:1665 train loss:3.988385 +step:1666 train loss:3.999281 +step:1667 train loss:4.017561 +step:1668 train loss:3.991619 +step:1669 train loss:3.954815 +step:1670 train loss:4.004515 +step:1671 train loss:4.002145 +step:1672 train loss:3.997983 +step:1673 train loss:3.956264 +step:1674 train loss:3.954233 +step:1675 train loss:3.999640 +step:1676 train loss:4.257768 +step:1677 train loss:4.008239 +step:1678 train loss:3.922899 +step:1679 train loss:4.046455 +step:1680 train loss:3.970040 +step:1681 train loss:4.032377 +step:1682 train loss:3.983798 +step:1683 train loss:3.979014 +step:1684 train loss:3.931128 +step:1685 train loss:3.993596 +step:1686 train loss:3.978574 +step:1687 train loss:3.987544 +step:1688 train loss:3.971282 +step:1689 train loss:3.957453 +step:1690 train loss:3.986038 +step:1691 train loss:3.967203 +step:1692 train loss:3.987756 +step:1693 train loss:3.953168 +step:1694 train loss:3.909586 +step:1695 train loss:3.933867 +step:1696 train loss:3.945554 +step:1697 train loss:3.988125 +step:1698 train loss:3.981144 +step:1699 train loss:3.943944 +step:1700 train loss:4.021668 +step:1701 train loss:3.961321 +step:1702 train loss:3.951210 +step:1703 train loss:3.978006 +step:1704 train loss:3.983076 +step:1705 train loss:3.997059 +step:1706 train loss:4.004779 +step:1707 train loss:4.003907 +step:1708 train loss:3.928981 +step:1709 train loss:4.027884 +step:1710 train loss:3.948577 +step:1711 train loss:3.950382 +step:1712 train loss:3.974317 +step:1713 train loss:3.941683 +step:1714 train loss:4.305004 +step:1715 train loss:3.954126 +step:1716 train loss:3.940776 +step:1717 train loss:3.942585 +step:1718 train loss:4.016557 +step:1719 train loss:3.931494 +step:1720 train loss:4.011684 +step:1721 train loss:3.955692 +step:1722 train loss:3.927586 +step:1723 train loss:4.021253 +step:1724 train loss:3.974919 +step:1725 train loss:3.970702 +step:1726 train loss:3.969493 +step:1727 train loss:4.002174 +step:1728 train loss:4.012255 +step:1729 train loss:3.933238 +step:1730 train loss:4.010159 +step:1731 train loss:3.938967 +step:1732 train loss:3.951612 +step:1733 train loss:3.937934 +step:1734 train loss:3.987190 +step:1735 train loss:4.047741 +step:1736 train loss:3.956920 +step:1737 train loss:3.987840 +step:1738 train loss:3.948587 +step:1739 train loss:4.010475 +step:1740 train loss:4.004231 +step:1741 train loss:4.055859 +step:1742 train loss:4.043920 +step:1743 train loss:3.935030 +step:1744 train loss:3.950463 +step:1745 train loss:3.934930 +step:1746 train loss:3.921081 +step:1747 train loss:3.956699 +step:1748 train loss:3.893301 +step:1749 train loss:3.937378 +step:1750 validation loss:3.918212 +step:1750 train loss:3.974031 +step:1751 train loss:3.991816 +step:1752 train loss:3.958805 +step:1753 train loss:3.982025 +step:1754 train loss:3.973300 +step:1755 train loss:3.968437 +step:1756 train loss:3.994126 +step:1757 train loss:3.998603 +step:1758 train loss:3.916152 +step:1759 train loss:4.007413 +step:1760 train loss:3.958424 +step:1761 train loss:3.931594 +step:1762 train loss:3.930870 +step:1763 train loss:3.935308 +step:1764 train loss:4.222337 +step:1765 train loss:3.936707 +step:1766 train loss:4.027840 +step:1767 train loss:3.943612 +step:1768 train loss:3.920148 +step:1769 train loss:3.937819 +step:1770 train loss:3.956092 +step:1771 train loss:3.930872 +step:1772 train loss:4.040700 +step:1773 train loss:3.965902 +step:1774 train loss:3.971622 +step:1775 train loss:4.079760 +step:1776 train loss:3.956841 +step:1777 train loss:3.947413 +step:1778 train loss:4.001417 +step:1779 train loss:3.936023 +step:1780 train loss:3.989896 +step:1781 train loss:3.992721 +step:1782 train loss:4.021553 +step:1783 train loss:3.955614 +step:1784 train loss:4.042638 +step:1785 train loss:3.948818 +step:1786 train loss:3.946186 +step:1787 train loss:3.945612 +step:1788 train loss:3.966322 +step:1789 train loss:3.919527 +step:1790 train loss:3.934791 +step:1791 train loss:4.008526 +step:1792 train loss:4.012050 +step:1793 train loss:3.929216 +step:1794 train loss:3.974496 +step:1795 train loss:3.927541 +step:1796 train loss:3.911143 +step:1797 train loss:3.974102 +step:1798 train loss:3.914603 +step:1799 train loss:3.970273 +step:1800 train loss:3.997257 +step:1801 train loss:3.986267 +step:1802 train loss:3.993026 +step:1803 train loss:3.984348 +step:1804 train loss:3.980470 +step:1805 train loss:3.968519 +step:1806 train loss:3.979447 +step:1807 train loss:3.911317 +step:1808 train loss:3.974761 +step:1809 train loss:3.958380 +step:1810 train loss:3.951443 +step:1811 train loss:3.966707 +step:1812 train loss:3.950568 +step:1813 train loss:3.963118 +step:1814 train loss:4.026335 +step:1815 train loss:3.966952 +step:1816 train loss:3.920548 +step:1817 train loss:3.910460 +step:1818 train loss:3.970368 +step:1819 train loss:3.937280 +step:1820 train loss:3.977801 +step:1821 train loss:3.942266 +step:1822 train loss:3.920334 +step:1823 train loss:3.912405 +step:1824 train loss:3.990849 +step:1825 train loss:3.902930 +step:1826 train loss:3.945501 +step:1827 train loss:3.913388 +step:1828 train loss:3.961218 +step:1829 train loss:3.925547 +step:1830 train loss:4.118295 +step:1831 train loss:3.879127 +step:1832 train loss:3.925362 +step:1833 train loss:3.973782 +step:1834 train loss:3.922958 +step:1835 train loss:3.932943 +step:1836 train loss:3.968266 +step:1837 train loss:3.894076 +step:1838 train loss:3.990294 +step:1839 train loss:3.969974 +step:1840 train loss:3.943196 +step:1841 train loss:3.965270 +step:1842 train loss:3.939261 +step:1843 train loss:3.886340 +step:1844 train loss:3.952379 +step:1845 train loss:3.919869 +step:1846 train loss:3.974652 +step:1847 train loss:4.026201 +step:1848 train loss:3.821526 +step:1849 train loss:3.919809 +step:1850 train loss:3.894225 +step:1851 train loss:3.937754 +step:1852 train loss:3.919832 +step:1853 train loss:3.978655 +step:1854 train loss:3.940893 +step:1855 train loss:3.926207 +step:1856 train loss:3.931686 +step:1857 train loss:3.934386 +step:1858 train loss:3.980775 +step:1859 train loss:3.929085 +step:1860 train loss:3.901317 +step:1861 train loss:3.917628 +step:1862 train loss:3.960144 +step:1863 train loss:3.998365 +step:1864 train loss:3.896397 +step:1865 train loss:3.919250 +step:1866 train loss:3.922946 +step:1867 train loss:3.954264 +step:1868 train loss:4.002278 +step:1869 train loss:3.920830 +step:1870 train loss:3.949316 +step:1871 train loss:3.890197 +step:1872 train loss:3.955280 +step:1873 train loss:4.016154 +step:1874 train loss:3.881415 +step:1875 train loss:3.958344 +step:1876 train loss:3.919143 +step:1877 train loss:3.961274 +step:1878 train loss:3.885461 +step:1879 train loss:3.945613 +step:1880 train loss:4.024500 +step:1881 train loss:3.949704 +step:1882 train loss:3.966684 +step:1883 train loss:3.986792 +step:1884 train loss:3.996383 +step:1885 train loss:3.958093 +step:1886 train loss:3.886107 +step:1887 train loss:3.902027 +step:1888 train loss:3.906958 +step:1889 train loss:3.918636 +step:1890 train loss:3.928221 +step:1891 train loss:3.859821 +step:1892 train loss:3.957162 +step:1893 train loss:3.878394 +step:1894 train loss:3.896914 +step:1895 train loss:3.932007 +step:1896 train loss:3.979966 +step:1897 train loss:3.879055 +step:1898 train loss:3.926186 +step:1899 train loss:3.938896 +step:1900 train loss:3.892530 +step:1901 train loss:3.966970 +step:1902 train loss:3.961420 +step:1903 train loss:3.901222 +step:1904 train loss:3.889679 +step:1905 train loss:3.890831 +step:1906 train loss:3.944999 +step:1907 train loss:3.892519 +step:1908 train loss:3.906164 +step:1909 train loss:4.003321 +step:1910 train loss:3.889337 +step:1911 train loss:3.897775 +step:1912 train loss:3.949874 +step:1913 train loss:3.886686 +step:1914 train loss:3.922089 +step:1915 train loss:3.888405 +step:1916 train loss:3.937663 +step:1917 train loss:3.921953 +step:1918 train loss:3.832579 +step:1919 train loss:3.982443 +step:1920 train loss:4.090404 +step:1921 train loss:3.870186 +step:1922 train loss:3.848734 +step:1923 train loss:3.947201 +step:1924 train loss:3.985801 +step:1925 train loss:3.930322 +step:1926 train loss:3.868793 +step:1927 train loss:3.950281 +step:1928 train loss:3.865006 +step:1929 train loss:3.894264 +step:1930 train loss:3.965080 +step:1931 train loss:3.875556 +step:1932 train loss:3.925665 +step:1933 train loss:3.922123 +step:1934 train loss:3.995614 +step:1935 train loss:3.945938 +step:1936 train loss:3.913488 +step:1937 train loss:3.852640 +step:1938 train loss:4.217505 +step:1939 train loss:3.960504 +step:1940 train loss:3.941652 +step:1941 train loss:3.944746 +step:1942 train loss:3.940635 +step:1943 train loss:3.932405 +step:1944 train loss:3.894991 +step:1945 train loss:3.894922 +step:1946 train loss:3.917412 +step:1947 train loss:3.942585 +step:1948 train loss:3.849994 +step:1949 train loss:3.960743 +step:1950 train loss:3.898978 +step:1951 train loss:3.920959 +step:1952 train loss:3.949010 +step:1953 train loss:3.879511 +step:1954 train loss:3.915399 +step:1955 train loss:3.867409 +step:1956 train loss:3.949112 +step:1957 train loss:3.974831 +step:1958 train loss:3.990307 +step:1959 train loss:3.861525 +step:1960 train loss:3.899441 +step:1961 train loss:3.932083 +step:1962 train loss:3.924880 +step:1963 train loss:3.900602 +step:1964 train loss:3.938363 +step:1965 train loss:3.973606 +step:1966 train loss:3.881248 +step:1967 train loss:3.942012 +step:1968 train loss:3.879693 +step:1969 train loss:3.896281 +step:1970 train loss:3.961012 +step:1971 train loss:3.862161 +step:1972 train loss:3.971197 +step:1973 train loss:3.866566 +step:1974 train loss:3.911064 +step:1975 train loss:3.874515 +step:1976 train loss:3.895041 +step:1977 train loss:3.940819 +step:1978 train loss:3.883779 +step:1979 train loss:3.861298 +step:1980 train loss:3.900205 +step:1981 train loss:3.879771 +step:1982 train loss:3.962974 +step:1983 train loss:3.906755 +step:1984 train loss:3.946968 +step:1985 train loss:3.933285 +step:1986 train loss:3.922073 +step:1987 train loss:3.878858 +step:1988 train loss:3.906044 +step:1989 train loss:4.043872 +step:1990 train loss:3.881495 +step:1991 train loss:3.872393 +step:1992 train loss:3.883878 +step:1993 train loss:3.918330 +step:1994 train loss:3.908557 +step:1995 train loss:3.862171 +step:1996 train loss:3.914912 +step:1997 train loss:3.919419 +step:1998 train loss:3.872280 +step:1999 train loss:3.981573 +step:2000 validation loss:3.850694 total_sharp:1.1283e-02 L1_sharp:3.1717e-01 L2_sharp:1.2518e-01 L3_sharp:1.0358e-01 L4_sharp:4.7555e-02 L5_sharp:5.0288e-02 L6_sharp:5.0988e-02 L7_sharp:4.8821e-02 L8_sharp:4.3476e-02 L9_sharp:3.5504e-02 L10_sharp:3.0301e-02 L11_sharp:3.2008e-02 L12_sharp:5.8790e-02 total_fnorm:1.3834e+00 total_l1_linf:8.2862e+03 total_spectral:1.3834e+00 L1_fnorm:6.1830e-02 L2_fnorm:5.7598e-02 L3_fnorm:5.6111e-02 L4_fnorm:5.8937e-02 L5_fnorm:5.9865e-02 L6_fnorm:6.0855e-02 L7_fnorm:6.0961e-02 L8_fnorm:6.1063e-02 L9_fnorm:6.1093e-02 L10_fnorm:6.1104e-02 L11_fnorm:6.0959e-02 L12_fnorm:6.1326e-02 L1_l1linf:2.6606e-01 L2_l1linf:2.6230e-01 L3_l1linf:2.8704e-01 L4_l1linf:2.8862e-01 L5_l1linf:2.8094e-01 L6_l1linf:2.6968e-01 L7_l1linf:2.6070e-01 L8_l1linf:2.5321e-01 L9_l1linf:2.7588e-01 L10_l1linf:2.8981e-01 L11_l1linf:2.8902e-01 L12_l1linf:3.0642e-01 L1_spectral:5.8868e-03 L2_spectral:5.9246e-03 L3_spectral:6.4720e-03 L4_spectral:6.4680e-03 L5_spectral:6.3795e-03 L6_spectral:6.0477e-03 L7_spectral:5.8277e-03 L8_spectral:5.7009e-03 L9_spectral:6.1834e-03 L10_spectral:6.5124e-03 L11_spectral:6.4932e-03 L12_spectral:6.7621e-03 ip_v_neg_g:1.2053e-02 cos_v_neg_g:1.6731e-03 v_norm:1.3834e+00 g_norm:5.2073e+00 hv_norm:2.1657e+00 cos_v_hv:7.2074e-03 hg_norm:1.1235e+03 cos_g_hg:5.6715e-01 v_par:6.7486e-05 v_perp:1.3834e+00 L1_cos_v_neg_g:1.7523e-02 L1_v_norm:6.1830e-02 L2_cos_v_neg_g:1.6919e-02 L2_v_norm:5.7598e-02 L3_cos_v_neg_g:1.3391e-02 L3_v_norm:5.6111e-02 L4_cos_v_neg_g:1.0819e-02 L4_v_norm:5.8937e-02 L5_cos_v_neg_g:1.0450e-02 L5_v_norm:5.9865e-02 L6_cos_v_neg_g:1.0155e-02 L6_v_norm:6.0855e-02 L7_cos_v_neg_g:9.5549e-03 L7_v_norm:6.0961e-02 L8_cos_v_neg_g:9.2656e-03 L8_v_norm:6.1063e-02 L9_cos_v_neg_g:8.7361e-03 L9_v_norm:6.1093e-02 L10_cos_v_neg_g:7.8504e-03 L10_v_norm:6.1104e-02 L11_cos_v_neg_g:8.1895e-03 L11_v_norm:6.0959e-02 L12_cos_v_neg_g:8.0553e-03 L12_v_norm:6.1326e-02 +step:2000 train loss:3.951825 +step:2001 train loss:3.873427 +step:2002 train loss:3.972756 +step:2003 train loss:4.018106 +step:2004 train loss:3.891980 +step:2005 train loss:3.989154 +step:2006 train loss:3.876718 +step:2007 train loss:3.953313 +step:2008 train loss:3.895987 +step:2009 train loss:3.895917 +step:2010 train loss:4.022958 +step:2011 train loss:3.876166 +step:2012 train loss:3.900195 +step:2013 train loss:3.917007 +step:2014 train loss:3.805568 +step:2015 train loss:3.931021 +step:2016 train loss:3.907890 +step:2017 train loss:3.911168 +step:2018 train loss:3.877908 +step:2019 train loss:3.907536 +step:2020 train loss:3.915653 +step:2021 train loss:3.877666 +step:2022 train loss:3.922764 +step:2023 train loss:3.898637 +step:2024 train loss:3.951042 +step:2025 train loss:3.891970 +step:2026 train loss:3.871070 +step:2027 train loss:3.898810 +step:2028 train loss:3.831244 +step:2029 train loss:3.859449 +step:2030 train loss:3.865270 +step:2031 train loss:3.827923 +step:2032 train loss:3.879657 +step:2033 train loss:3.877514 +step:2034 train loss:3.871907 +step:2035 train loss:3.913881 +step:2036 train loss:3.904566 +step:2037 train loss:3.890417 +step:2038 train loss:3.888889 +step:2039 train loss:3.877478 +step:2040 train loss:3.909037 +step:2041 train loss:3.912498 +step:2042 train loss:3.842526 +step:2043 train loss:3.999901 +step:2044 train loss:3.864547 +step:2045 train loss:3.884839 +step:2046 train loss:3.892914 +step:2047 train loss:3.869166 +step:2048 train loss:3.911508 +step:2049 train loss:3.866179 +step:2050 train loss:3.892419 +step:2051 train loss:3.857313 +step:2052 train loss:3.903749 +step:2053 train loss:3.907699 +step:2054 train loss:3.873823 +step:2055 train loss:3.874538 +step:2056 train loss:3.918712 +step:2057 train loss:3.927082 +step:2058 train loss:3.892922 +step:2059 train loss:3.970897 +step:2060 train loss:3.918733 +step:2061 train loss:3.871101 +step:2062 train loss:3.897402 +step:2063 train loss:3.800532 +step:2064 train loss:3.920396 +step:2065 train loss:3.929169 +step:2066 train loss:3.787815 +step:2067 train loss:3.834288 +step:2068 train loss:3.941503 +step:2069 train loss:3.875333 +step:2070 train loss:3.878823 +step:2071 train loss:3.922485 +step:2072 train loss:3.849476 +step:2073 train loss:3.903487 +step:2074 train loss:3.882478 +step:2075 train loss:3.963668 +step:2076 train loss:3.909178 +step:2077 train loss:3.921999 +step:2078 train loss:3.876381 +step:2079 train loss:4.028725 +step:2080 train loss:3.847235 +step:2081 train loss:3.957636 +step:2082 train loss:3.885520 +step:2083 train loss:3.874248 +step:2084 train loss:3.852152 +step:2085 train loss:3.896456 +step:2086 train loss:3.910830 +step:2087 train loss:3.948016 +step:2088 train loss:3.815420 +step:2089 train loss:3.845017 +step:2090 train loss:3.881373 +step:2091 train loss:3.897773 +step:2092 train loss:3.878639 +step:2093 train loss:3.868075 +step:2094 train loss:3.905498 +step:2095 train loss:3.849477 +step:2096 train loss:3.839504 +step:2097 train loss:3.872337 +step:2098 train loss:3.874648 +step:2099 train loss:3.853866 +step:2100 train loss:3.920861 +step:2101 train loss:3.912380 +step:2102 train loss:3.880016 +step:2103 train loss:3.896885 +step:2104 train loss:3.874441 +step:2105 train loss:3.880050 +step:2106 train loss:3.874429 +step:2107 train loss:3.942173 +step:2108 train loss:3.863271 +step:2109 train loss:3.822487 +step:2110 train loss:3.920117 +step:2111 train loss:3.865602 +step:2112 train loss:3.924521 +step:2113 train loss:3.862965 +step:2114 train loss:3.868813 +step:2115 train loss:3.919921 +step:2116 train loss:3.851246 +step:2117 train loss:3.868513 +step:2118 train loss:3.862147 +step:2119 train loss:3.796258 +step:2120 train loss:3.883834 +step:2121 train loss:3.871571 +step:2122 train loss:3.883097 +step:2123 train loss:3.937339 +step:2124 train loss:3.939956 +step:2125 train loss:3.847036 +step:2126 train loss:3.854185 +step:2127 train loss:3.844154 +step:2128 train loss:3.839263 +step:2129 train loss:3.864361 +step:2130 train loss:3.869584 +step:2131 train loss:3.892292 +step:2132 train loss:3.821061 +step:2133 train loss:3.931328 +step:2134 train loss:3.881433 +step:2135 train loss:3.841468 +step:2136 train loss:3.931513 +step:2137 train loss:3.897643 +step:2138 train loss:3.854099 +step:2139 train loss:3.855435 +step:2140 train loss:3.859830 +step:2141 train loss:3.907484 +step:2142 train loss:3.877431 +step:2143 train loss:3.800897 +step:2144 train loss:3.907972 +step:2145 train loss:3.878524 +step:2146 train loss:3.912869 +step:2147 train loss:4.018136 +step:2148 train loss:3.819469 +step:2149 train loss:3.829200 +step:2150 train loss:3.856416 +step:2151 train loss:3.890211 +step:2152 train loss:3.884505 +step:2153 train loss:3.924655 +step:2154 train loss:3.841453 +step:2155 train loss:3.921797 +step:2156 train loss:3.845159 +step:2157 train loss:3.922518 +step:2158 train loss:3.958025 +step:2159 train loss:3.884544 +step:2160 train loss:3.960830 +step:2161 train loss:3.859351 +step:2162 train loss:3.864823 +step:2163 train loss:3.841413 +step:2164 train loss:3.866163 +step:2165 train loss:3.841384 +step:2166 train loss:3.958464 +step:2167 train loss:3.866848 +step:2168 train loss:3.880289 +step:2169 train loss:3.831848 +step:2170 train loss:3.976970 +step:2171 train loss:3.937592 +step:2172 train loss:3.872413 +step:2173 train loss:3.861612 +step:2174 train loss:3.924450 +step:2175 train loss:3.857825 +step:2176 train loss:3.935565 +step:2177 train loss:3.907943 +step:2178 train loss:3.832582 +step:2179 train loss:3.898791 +step:2180 train loss:3.915324 +step:2181 train loss:3.845165 +step:2182 train loss:3.895341 +step:2183 train loss:3.888520 +step:2184 train loss:3.840824 +step:2185 train loss:3.819392 +step:2186 train loss:3.859753 +step:2187 train loss:3.869293 +step:2188 train loss:3.918228 +step:2189 train loss:3.808468 +step:2190 train loss:3.854937 +step:2191 train loss:3.910735 +step:2192 train loss:3.838607 +step:2193 train loss:3.808165 +step:2194 train loss:3.814588 +step:2195 train loss:3.839281 +step:2196 train loss:3.847560 +step:2197 train loss:3.827213 +step:2198 train loss:3.851031 +step:2199 train loss:3.921211 +step:2200 train loss:3.854732 +step:2201 train loss:3.860240 +step:2202 train loss:3.823256 +step:2203 train loss:3.846711 +step:2204 train loss:3.876810 +step:2205 train loss:3.858380 +step:2206 train loss:3.858898 +step:2207 train loss:3.852416 +step:2208 train loss:3.829782 +step:2209 train loss:4.112104 +step:2210 train loss:3.884245 +step:2211 train loss:3.876548 +step:2212 train loss:3.850285 +step:2213 train loss:3.932101 +step:2214 train loss:3.923973 +step:2215 train loss:3.848797 +step:2216 train loss:3.818321 +step:2217 train loss:3.845272 +step:2218 train loss:3.845293 +step:2219 train loss:3.879158 +step:2220 train loss:3.822797 +step:2221 train loss:3.853816 +step:2222 train loss:3.870515 +step:2223 train loss:3.909014 +step:2224 train loss:3.884598 +step:2225 train loss:3.823303 +step:2226 train loss:3.888977 +step:2227 train loss:3.889490 +step:2228 train loss:3.887167 +step:2229 train loss:3.827218 +step:2230 train loss:3.954014 +step:2231 train loss:3.871963 +step:2232 train loss:3.864019 +step:2233 train loss:3.910248 +step:2234 train loss:3.808498 +step:2235 train loss:3.895205 +step:2236 train loss:3.833459 +step:2237 train loss:3.967557 +step:2238 train loss:3.772008 +step:2239 train loss:3.850816 +step:2240 train loss:3.864156 +step:2241 train loss:3.779706 +step:2242 train loss:3.919637 +step:2243 train loss:3.956180 +step:2244 train loss:3.834814 +step:2245 train loss:3.834806 +step:2246 train loss:3.802771 +step:2247 train loss:3.805737 +step:2248 train loss:3.859985 +step:2249 train loss:3.843616 +step:2250 validation loss:3.799471 +step:2250 train loss:3.855023 +step:2251 train loss:3.819824 +step:2252 train loss:3.822468 +step:2253 train loss:3.846762 +step:2254 train loss:3.852262 +step:2255 train loss:3.813822 +step:2256 train loss:3.862544 +step:2257 train loss:3.851819 +step:2258 train loss:3.843318 +step:2259 train loss:3.859521 +step:2260 train loss:3.810946 +step:2261 train loss:3.890042 +step:2262 train loss:3.909031 +step:2263 train loss:3.865254 +step:2264 train loss:3.978790 +step:2265 train loss:3.826939 +step:2266 train loss:3.872176 +step:2267 train loss:3.832540 +step:2268 train loss:3.835522 +step:2269 train loss:3.838450 +step:2270 train loss:3.830174 +step:2271 train loss:3.845782 +step:2272 train loss:3.884119 +step:2273 train loss:3.802902 +step:2274 train loss:3.834512 +step:2275 train loss:3.791423 +step:2276 train loss:3.863577 +step:2277 train loss:3.874974 +step:2278 train loss:3.857597 +step:2279 train loss:3.838695 +step:2280 train loss:3.745986 +step:2281 train loss:3.892958 +step:2282 train loss:3.822710 +step:2283 train loss:3.805066 +step:2284 train loss:3.824040 +step:2285 train loss:3.875836 +step:2286 train loss:3.839165 +step:2287 train loss:3.875533 +step:2288 train loss:3.847177 +step:2289 train loss:3.843895 +step:2290 train loss:3.851066 +step:2291 train loss:3.838728 +step:2292 train loss:3.877476 +step:2293 train loss:3.855716 +step:2294 train loss:3.853598 +step:2295 train loss:3.906415 +step:2296 train loss:3.839696 +step:2297 train loss:3.814790 +step:2298 train loss:3.872852 +step:2299 train loss:3.848954 +step:2300 train loss:3.762967 +step:2301 train loss:3.860172 +step:2302 train loss:3.874053 +step:2303 train loss:3.842924 +step:2304 train loss:3.836025 +step:2305 train loss:3.879710 +step:2306 train loss:3.870573 +step:2307 train loss:3.844067 +step:2308 train loss:3.867286 +step:2309 train loss:3.823680 +step:2310 train loss:3.810908 +step:2311 train loss:3.799144 +step:2312 train loss:3.866671 +step:2313 train loss:3.781086 +step:2314 train loss:3.857370 +step:2315 train loss:3.871596 +step:2316 train loss:3.908580 +step:2317 train loss:3.777210 +step:2318 train loss:3.818574 +step:2319 train loss:3.872958 +step:2320 train loss:3.840698 +step:2321 train loss:3.812765 +step:2322 train loss:3.827039 +step:2323 train loss:3.822202 +step:2324 train loss:3.849590 +step:2325 train loss:3.792715 +step:2326 train loss:3.819793 +step:2327 train loss:3.933167 +step:2328 train loss:3.882790 +step:2329 train loss:3.836116 +step:2330 train loss:3.795159 +step:2331 train loss:3.838196 +step:2332 train loss:3.765336 +step:2333 train loss:3.826482 +step:2334 train loss:3.804862 +step:2335 train loss:3.791647 +step:2336 train loss:4.042228 +step:2337 train loss:3.818376 +step:2338 train loss:3.858737 +step:2339 train loss:3.857367 +step:2340 train loss:3.869811 +step:2341 train loss:3.861974 +step:2342 train loss:3.812453 +step:2343 train loss:3.837601 +step:2344 train loss:3.879668 +step:2345 train loss:3.834260 +step:2346 train loss:3.862387 +step:2347 train loss:3.784316 +step:2348 train loss:3.844507 +step:2349 train loss:3.793030 +step:2350 train loss:3.850722 +step:2351 train loss:3.858238 +step:2352 train loss:3.859681 +step:2353 train loss:3.816516 +step:2354 train loss:3.863857 +step:2355 train loss:3.851291 +step:2356 train loss:3.890357 +step:2357 train loss:3.797418 +step:2358 train loss:3.812098 +step:2359 train loss:3.837199 +step:2360 train loss:3.859509 +step:2361 train loss:3.893863 +step:2362 train loss:3.722979 +step:2363 train loss:3.913538 +step:2364 train loss:3.865278 +step:2365 train loss:3.836099 +step:2366 train loss:3.789623 +step:2367 train loss:3.855073 +step:2368 train loss:3.846150 +step:2369 train loss:3.832984 +step:2370 train loss:3.848570 +step:2371 train loss:3.905847 +step:2372 train loss:3.757196 +step:2373 train loss:3.898031 +step:2374 train loss:3.878998 +step:2375 train loss:3.864466 +step:2376 train loss:3.858317 +step:2377 train loss:3.799702 +step:2378 train loss:3.847696 +step:2379 train loss:3.831988 +step:2380 train loss:3.894019 +step:2381 train loss:3.986882 +step:2382 train loss:3.774299 +step:2383 train loss:3.826673 +step:2384 train loss:3.852184 +step:2385 train loss:3.754490 +step:2386 train loss:3.912066 +step:2387 train loss:3.793651 +step:2388 train loss:3.843874 +step:2389 train loss:3.865035 +step:2390 train loss:3.813292 +step:2391 train loss:3.837291 +step:2392 train loss:3.862805 +step:2393 train loss:3.818834 +step:2394 train loss:3.839206 +step:2395 train loss:3.831868 +step:2396 train loss:3.838345 +step:2397 train loss:3.812179 +step:2398 train loss:3.868088 +step:2399 train loss:3.829362 +step:2400 train loss:3.808787 +step:2401 train loss:3.849771 +step:2402 train loss:3.800841 +step:2403 train loss:3.850990 +step:2404 train loss:3.809040 +step:2405 train loss:3.811461 +step:2406 train loss:3.836995 +step:2407 train loss:3.781156 +step:2408 train loss:3.827325 +step:2409 train loss:3.816526 +step:2410 train loss:3.815485 +step:2411 train loss:3.889078 +step:2412 train loss:3.875122 +step:2413 train loss:3.913787 +step:2414 train loss:3.803778 +step:2415 train loss:3.795224 +step:2416 train loss:3.811802 +step:2417 train loss:3.848756 +step:2418 train loss:3.868255 +step:2419 train loss:3.797559 +step:2420 train loss:3.816234 +step:2421 train loss:3.848047 +step:2422 train loss:3.898469 +step:2423 train loss:3.830098 +step:2424 train loss:3.798572 +step:2425 train loss:3.859182 +step:2426 train loss:3.799276 +step:2427 train loss:3.819929 +step:2428 train loss:3.903573 +step:2429 train loss:3.855540 +step:2430 train loss:3.948640 +step:2431 train loss:3.859766 +step:2432 train loss:3.830508 +step:2433 train loss:3.807274 +step:2434 train loss:3.792840 +step:2435 train loss:3.849669 +step:2436 train loss:3.808646 +step:2437 train loss:3.839294 +step:2438 train loss:3.880789 +step:2439 train loss:3.866926 +step:2440 train loss:3.807665 +step:2441 train loss:3.843388 +step:2442 train loss:3.836280 +step:2443 train loss:3.799046 +step:2444 train loss:3.836896 +step:2445 train loss:3.831241 +step:2446 train loss:3.803024 +step:2447 train loss:3.785600 +step:2448 train loss:3.834831 +step:2449 train loss:3.864836 +step:2450 train loss:3.823362 +step:2451 train loss:3.742449 +step:2452 train loss:3.844172 +step:2453 train loss:3.815491 +step:2454 train loss:3.812390 +step:2455 train loss:3.863194 +step:2456 train loss:3.816400 +step:2457 train loss:3.874977 +step:2458 train loss:3.854077 +step:2459 train loss:3.828475 +step:2460 train loss:3.835308 +step:2461 train loss:3.864775 +step:2462 train loss:3.837347 +step:2463 train loss:3.810856 +step:2464 train loss:3.826966 +step:2465 train loss:3.905206 +step:2466 train loss:3.987424 +step:2467 train loss:3.894209 +step:2468 train loss:3.789519 +step:2469 train loss:3.860176 +step:2470 train loss:3.908510 +step:2471 train loss:3.908563 +step:2472 train loss:3.888120 +step:2473 train loss:3.824332 +step:2474 train loss:3.782361 +step:2475 train loss:3.838686 +step:2476 train loss:3.913538 +step:2477 train loss:3.829035 +step:2478 train loss:3.784570 +step:2479 train loss:3.825969 +step:2480 train loss:3.819380 +step:2481 train loss:4.010895 +step:2482 train loss:3.820908 +step:2483 train loss:3.849131 +step:2484 train loss:3.800638 +step:2485 train loss:3.788594 +step:2486 train loss:3.826545 +step:2487 train loss:3.861248 +step:2488 train loss:3.770769 +step:2489 train loss:3.883604 +step:2490 train loss:3.803320 +step:2491 train loss:3.815376 +step:2492 train loss:3.857224 +step:2493 train loss:3.893980 +step:2494 train loss:3.814725 +step:2495 train loss:3.846889 +step:2496 train loss:3.823475 +step:2497 train loss:3.839884 +step:2498 train loss:3.845135 +step:2499 train loss:3.839414 +step:2500 validation loss:3.762931 total_sharp:7.6708e-03 L1_sharp:6.7258e-02 L2_sharp:4.1951e-02 L3_sharp:5.4366e-02 L4_sharp:3.4489e-02 L5_sharp:3.5527e-02 L6_sharp:3.7633e-02 L7_sharp:4.1860e-02 L8_sharp:4.3552e-02 L9_sharp:2.9753e-02 L10_sharp:2.3385e-02 L11_sharp:2.1461e-02 L12_sharp:3.9330e-02 total_fnorm:1.3582e+00 total_l1_linf:8.1449e+03 total_spectral:1.3582e+00 L1_fnorm:6.1569e-02 L2_fnorm:5.8600e-02 L3_fnorm:5.7375e-02 L4_fnorm:5.9334e-02 L5_fnorm:6.0108e-02 L6_fnorm:6.0949e-02 L7_fnorm:6.1015e-02 L8_fnorm:6.1153e-02 L9_fnorm:6.1041e-02 L10_fnorm:6.1054e-02 L11_fnorm:6.0972e-02 L12_fnorm:6.1068e-02 L1_l1linf:2.5378e-01 L2_l1linf:2.8599e-01 L3_l1linf:2.9774e-01 L4_l1linf:2.9988e-01 L5_l1linf:2.9335e-01 L6_l1linf:2.7726e-01 L7_l1linf:2.6857e-01 L8_l1linf:2.8016e-01 L9_l1linf:2.7999e-01 L10_l1linf:2.9135e-01 L11_l1linf:2.9381e-01 L12_l1linf:3.0075e-01 L1_spectral:5.8015e-03 L2_spectral:6.4284e-03 L3_spectral:6.7181e-03 L4_spectral:6.7318e-03 L5_spectral:6.6332e-03 L6_spectral:6.2558e-03 L7_spectral:6.0376e-03 L8_spectral:6.2775e-03 L9_spectral:6.2928e-03 L10_spectral:6.5670e-03 L11_spectral:6.6177e-03 L12_spectral:6.7668e-03 ip_v_neg_g:5.0868e-03 cos_v_neg_g:8.9655e-04 v_norm:1.3582e+00 g_norm:4.1774e+00 hv_norm:1.3749e+00 cos_v_hv:7.5777e-03 hg_norm:3.5956e+02 cos_g_hg:4.9863e-01 v_par:3.8521e-05 v_perp:1.3582e+00 L1_cos_v_neg_g:6.4001e-03 L1_v_norm:6.1569e-02 L2_cos_v_neg_g:6.8418e-03 L2_v_norm:5.8600e-02 L3_cos_v_neg_g:6.5250e-03 L3_v_norm:5.7375e-02 L4_cos_v_neg_g:5.6813e-03 L4_v_norm:5.9334e-02 L5_cos_v_neg_g:5.3396e-03 L5_v_norm:6.0108e-02 L6_cos_v_neg_g:5.6339e-03 L6_v_norm:6.0949e-02 L7_cos_v_neg_g:5.2487e-03 L7_v_norm:6.1015e-02 L8_cos_v_neg_g:5.3631e-03 L8_v_norm:6.1153e-02 L9_cos_v_neg_g:4.3251e-03 L9_v_norm:6.1041e-02 L10_cos_v_neg_g:4.5765e-03 L10_v_norm:6.1054e-02 L11_cos_v_neg_g:5.1900e-03 L11_v_norm:6.0972e-02 L12_cos_v_neg_g:5.4274e-03 L12_v_norm:6.1068e-02 +step:2500 train loss:3.783671 +step:2501 train loss:3.847509 +step:2502 train loss:3.836718 +step:2503 train loss:3.764784 +step:2504 train loss:3.800819 +step:2505 train loss:3.823640 +step:2506 train loss:3.789923 +step:2507 train loss:3.816277 +step:2508 train loss:3.762289 +step:2509 train loss:3.782722 +step:2510 train loss:3.780169 +step:2511 train loss:3.825871 +step:2512 train loss:3.870530 +step:2513 train loss:3.817499 +step:2514 train loss:3.804357 +step:2515 train loss:3.949477 +step:2516 train loss:3.827573 +step:2517 train loss:3.892151 +step:2518 train loss:3.853584 +step:2519 train loss:3.826592 +step:2520 train loss:3.833598 +step:2521 train loss:3.804833 +step:2522 train loss:3.847270 +step:2523 train loss:3.763190 +step:2524 train loss:3.821049 +step:2525 train loss:3.810573 +step:2526 train loss:3.862724 +step:2527 train loss:3.851920 +step:2528 train loss:3.836454 +step:2529 train loss:3.857130 +step:2530 train loss:3.832874 +step:2531 train loss:3.772127 +step:2532 train loss:3.874884 +step:2533 train loss:3.763470 +step:2534 train loss:3.862413 +step:2535 train loss:3.815799 +step:2536 train loss:3.739827 +step:2537 train loss:3.854389 +step:2538 train loss:3.833026 +step:2539 train loss:3.851977 +step:2540 train loss:3.787340 +step:2541 train loss:3.814457 +step:2542 train loss:3.826277 +step:2543 train loss:3.815663 +step:2544 train loss:3.803126 +step:2545 train loss:3.789949 +step:2546 train loss:3.756768 +step:2547 train loss:3.803447 +step:2548 train loss:3.822401 +step:2549 train loss:3.825695 +step:2550 train loss:3.956731 +step:2551 train loss:4.031880 +step:2552 train loss:3.765082 +step:2553 train loss:3.799973 +step:2554 train loss:3.943503 +step:2555 train loss:3.829426 +step:2556 train loss:3.757743 +step:2557 train loss:3.850937 +step:2558 train loss:3.843755 +step:2559 train loss:3.796978 +step:2560 train loss:3.782959 +step:2561 train loss:3.881135 +step:2562 train loss:3.833138 +step:2563 train loss:3.767354 +step:2564 train loss:3.835831 +step:2565 train loss:3.818247 +step:2566 train loss:3.796590 +step:2567 train loss:3.776829 +step:2568 train loss:3.833822 +step:2569 train loss:3.840156 +step:2570 train loss:3.790980 +step:2571 train loss:3.872085 +step:2572 train loss:3.836076 +step:2573 train loss:3.765211 +step:2574 train loss:3.813467 +step:2575 train loss:3.858495 +step:2576 train loss:3.810514 +step:2577 train loss:3.773798 +step:2578 train loss:3.812697 +step:2579 train loss:3.791410 +step:2580 train loss:3.763688 +step:2581 train loss:3.777111 +step:2582 train loss:3.786451 +step:2583 train loss:3.809907 +step:2584 train loss:3.825905 +step:2585 train loss:3.787952 +step:2586 train loss:3.813197 +step:2587 train loss:3.743443 +step:2588 train loss:3.775736 +step:2589 train loss:3.854157 +step:2590 train loss:3.777802 +step:2591 train loss:3.835341 +step:2592 train loss:3.885394 +step:2593 train loss:3.840867 +step:2594 train loss:3.802248 +step:2595 train loss:3.809912 +step:2596 train loss:3.852267 +step:2597 train loss:3.734849 +step:2598 train loss:3.890742 +step:2599 train loss:3.836505 +step:2600 train loss:3.868306 +step:2601 train loss:3.802599 +step:2602 train loss:3.836988 +step:2603 train loss:3.832702 +step:2604 train loss:3.752333 +step:2605 train loss:3.880237 +step:2606 train loss:3.828985 +step:2607 train loss:3.787734 +step:2608 train loss:3.762567 +step:2609 train loss:3.787067 +step:2610 train loss:3.812328 +step:2611 train loss:3.849185 +step:2612 train loss:3.810083 +step:2613 train loss:3.784117 +step:2614 train loss:3.773309 +step:2615 train loss:3.770924 +step:2616 train loss:3.845118 +step:2617 train loss:3.805271 +step:2618 train loss:3.770743 +step:2619 train loss:3.789139 +step:2620 train loss:3.780772 +step:2621 train loss:3.793334 +step:2622 train loss:3.869035 +step:2623 train loss:3.743083 +step:2624 train loss:3.755355 +step:2625 train loss:3.828605 +step:2626 train loss:3.821805 +step:2627 train loss:3.799154 +step:2628 train loss:3.854613 +step:2629 train loss:3.801825 +step:2630 train loss:3.795467 +step:2631 train loss:3.827085 +step:2632 train loss:3.794088 +step:2633 train loss:3.777237 +step:2634 train loss:3.825011 +step:2635 train loss:3.808021 +step:2636 train loss:3.857151 +step:2637 train loss:3.808019 +step:2638 train loss:3.791577 +step:2639 train loss:3.845672 +step:2640 train loss:3.764462 +step:2641 train loss:3.822973 +step:2642 train loss:3.742727 +step:2643 train loss:3.742003 +step:2644 train loss:3.837157 +step:2645 train loss:3.770431 +step:2646 train loss:3.803680 +step:2647 train loss:3.822818 +step:2648 train loss:3.855498 +step:2649 train loss:3.769971 +step:2650 train loss:3.759204 +step:2651 train loss:3.800227 +step:2652 train loss:3.771474 +step:2653 train loss:3.839575 +step:2654 train loss:3.798131 +step:2655 train loss:3.787318 +step:2656 train loss:3.805777 +step:2657 train loss:3.832474 +step:2658 train loss:3.840811 +step:2659 train loss:3.819225 +step:2660 train loss:3.809494 +step:2661 train loss:3.854121 +step:2662 train loss:3.829997 +step:2663 train loss:3.805418 +step:2664 train loss:3.821271 +step:2665 train loss:3.767486 +step:2666 train loss:3.796040 +step:2667 train loss:3.801307 +step:2668 train loss:3.780265 +step:2669 train loss:3.785060 +step:2670 train loss:3.810469 +step:2671 train loss:3.785643 +step:2672 train loss:3.808105 +step:2673 train loss:3.740647 +step:2674 train loss:3.834770 +step:2675 train loss:3.806885 +step:2676 train loss:3.828091 +step:2677 train loss:3.809633 +step:2678 train loss:3.793945 +step:2679 train loss:3.777772 +step:2680 train loss:3.763063 +step:2681 train loss:3.733650 +step:2682 train loss:3.819968 +step:2683 train loss:3.790977 +step:2684 train loss:3.821045 +step:2685 train loss:3.741599 +step:2686 train loss:3.755058 +step:2687 train loss:3.831172 +step:2688 train loss:3.844590 +step:2689 train loss:3.749441 +step:2690 train loss:3.836754 +step:2691 train loss:3.805259 +step:2692 train loss:3.830504 +step:2693 train loss:3.886050 +step:2694 train loss:3.782691 +step:2695 train loss:3.802051 +step:2696 train loss:3.803461 +step:2697 train loss:3.797516 +step:2698 train loss:3.807004 +step:2699 train loss:3.823329 +step:2700 train loss:3.797364 +step:2701 train loss:3.862346 +step:2702 train loss:3.799198 +step:2703 train loss:3.760723 +step:2704 train loss:3.832570 +step:2705 train loss:3.821696 +step:2706 train loss:3.753807 +step:2707 train loss:3.719864 +step:2708 train loss:3.815021 +step:2709 train loss:3.795591 +step:2710 train loss:3.803510 +step:2711 train loss:3.767669 +step:2712 train loss:3.833201 +step:2713 train loss:3.835739 +step:2714 train loss:3.774774 +step:2715 train loss:3.770154 +step:2716 train loss:3.841194 +step:2717 train loss:3.804413 +step:2718 train loss:3.802199 +step:2719 train loss:3.801591 +step:2720 train loss:3.767461 +step:2721 train loss:3.846673 +step:2722 train loss:3.775096 +step:2723 train loss:3.762254 +step:2724 train loss:3.784972 +step:2725 train loss:3.787178 +step:2726 train loss:3.758551 +step:2727 train loss:3.819498 +step:2728 train loss:3.756862 +step:2729 train loss:3.885879 +step:2730 train loss:3.829065 +step:2731 train loss:3.869034 +step:2732 train loss:3.778752 +step:2733 train loss:3.776434 +step:2734 train loss:3.819102 +step:2735 train loss:3.821797 +step:2736 train loss:3.745462 +step:2737 train loss:3.799620 +step:2738 train loss:3.855458 +step:2739 train loss:3.778762 +step:2740 train loss:3.778583 +step:2741 train loss:3.767673 +step:2742 train loss:3.688351 +step:2743 train loss:3.795993 +step:2744 train loss:3.815338 +step:2745 train loss:3.772713 +step:2746 train loss:3.792440 +step:2747 train loss:3.774335 +step:2748 train loss:3.734810 +step:2749 train loss:3.797680 +step:2750 validation loss:3.728433 +step:2750 train loss:3.808750 +step:2751 train loss:3.832818 +step:2752 train loss:3.814390 +step:2753 train loss:3.809845 +step:2754 train loss:3.745790 +step:2755 train loss:3.814905 +step:2756 train loss:3.786644 +step:2757 train loss:3.775007 +step:2758 train loss:3.802370 +step:2759 train loss:3.812309 +step:2760 train loss:3.723483 +step:2761 train loss:3.740707 +step:2762 train loss:3.757797 +step:2763 train loss:3.775273 +step:2764 train loss:3.720299 +step:2765 train loss:3.768229 +step:2766 train loss:3.858804 +step:2767 train loss:3.733184 +step:2768 train loss:3.793528 +step:2769 train loss:3.768118 +step:2770 train loss:3.787120 +step:2771 train loss:3.811963 +step:2772 train loss:3.777445 +step:2773 train loss:3.776318 +step:2774 train loss:3.772452 +step:2775 train loss:3.786234 +step:2776 train loss:3.738942 +step:2777 train loss:3.770597 +step:2778 train loss:3.781551 +step:2779 train loss:3.807539 +step:2780 train loss:3.777972 +step:2781 train loss:3.763552 +step:2782 train loss:3.753411 +step:2783 train loss:3.784204 +step:2784 train loss:3.791619 +step:2785 train loss:3.862771 +step:2786 train loss:3.828526 +step:2787 train loss:3.786342 +step:2788 train loss:3.785081 +step:2789 train loss:3.779057 +step:2790 train loss:3.717064 +step:2791 train loss:3.819606 +step:2792 train loss:3.805861 +step:2793 train loss:3.771055 +step:2794 train loss:3.782806 +step:2795 train loss:3.796203 +step:2796 train loss:3.790845 +step:2797 train loss:3.834648 +step:2798 train loss:3.823161 +step:2799 train loss:3.731512 +step:2800 train loss:3.777212 +step:2801 train loss:3.810933 +step:2802 train loss:3.838271 +step:2803 train loss:3.811540 +step:2804 train loss:3.743747 +step:2805 train loss:3.784796 +step:2806 train loss:3.780636 +step:2807 train loss:3.810110 +step:2808 train loss:3.748709 +step:2809 train loss:3.816684 +step:2810 train loss:3.806974 +step:2811 train loss:3.796734 +step:2812 train loss:3.843469 +step:2813 train loss:3.813044 +step:2814 train loss:3.802301 +step:2815 train loss:3.814102 +step:2816 train loss:3.818593 +step:2817 train loss:3.750721 +step:2818 train loss:3.856604 +step:2819 train loss:3.783700 +step:2820 train loss:3.777835 +step:2821 train loss:3.757410 +step:2822 train loss:3.802167 +step:2823 train loss:3.750933 +step:2824 train loss:3.644011 +step:2825 train loss:3.794927 +step:2826 train loss:3.790973 +step:2827 train loss:3.818650 +step:2828 train loss:3.804164 +step:2829 train loss:3.795464 +step:2830 train loss:3.824862 +step:2831 train loss:3.767647 +step:2832 train loss:3.734933 +step:2833 train loss:3.795057 +step:2834 train loss:3.746948 +step:2835 train loss:3.780807 +step:2836 train loss:3.785706 +step:2837 train loss:3.785223 +step:2838 train loss:3.726420 +step:2839 train loss:3.822923 +step:2840 train loss:3.784352 +step:2841 train loss:3.864302 +step:2842 train loss:3.807727 +step:2843 train loss:3.800773 +step:2844 train loss:3.825893 +step:2845 train loss:3.785016 +step:2846 train loss:3.733884 +step:2847 train loss:3.824304 +step:2848 train loss:3.777846 +step:2849 train loss:3.768982 +step:2850 train loss:3.828820 +step:2851 train loss:3.781887 +step:2852 train loss:3.864094 +step:2853 train loss:3.778841 +step:2854 train loss:3.720591 +step:2855 train loss:3.796175 +step:2856 train loss:3.720756 +step:2857 train loss:3.825815 +step:2858 train loss:3.781687 +step:2859 train loss:3.769267 +step:2860 train loss:3.762001 +step:2861 train loss:3.742564 +step:2862 train loss:3.772623 +step:2863 train loss:3.754946 +step:2864 train loss:3.760752 +step:2865 train loss:3.839355 +step:2866 train loss:3.850743 +step:2867 train loss:3.788798 +step:2868 train loss:3.788926 +step:2869 train loss:3.750119 +step:2870 train loss:3.836428 +step:2871 train loss:3.833372 +step:2872 train loss:3.795691 +step:2873 train loss:3.802899 +step:2874 train loss:3.780754 +step:2875 train loss:3.733240 +step:2876 train loss:3.779398 +step:2877 train loss:3.762003 +step:2878 train loss:3.776536 +step:2879 train loss:3.743045 +step:2880 train loss:3.761861 +step:2881 train loss:3.754051 +step:2882 train loss:3.687677 +step:2883 train loss:3.775119 +step:2884 train loss:3.844811 +step:2885 train loss:3.740052 +step:2886 train loss:3.786453 +step:2887 train loss:3.811990 +step:2888 train loss:3.784276 +step:2889 train loss:3.769150 +step:2890 train loss:3.742725 +step:2891 train loss:3.782592 +step:2892 train loss:3.790349 +step:2893 train loss:3.771680 +step:2894 train loss:3.743471 +step:2895 train loss:3.792742 +step:2896 train loss:3.838497 +step:2897 train loss:3.815481 +step:2898 train loss:3.949984 +step:2899 train loss:3.705266 +step:2900 train loss:3.782465 +step:2901 train loss:3.732780 +step:2902 train loss:3.733307 +step:2903 train loss:3.747855 +step:2904 train loss:3.775584 +step:2905 train loss:3.834633 +step:2906 train loss:3.806139 +step:2907 train loss:3.978996 +step:2908 train loss:3.726962 +step:2909 train loss:3.804341 +step:2910 train loss:3.776395 +step:2911 train loss:3.802855 +step:2912 train loss:3.761250 +step:2913 train loss:3.795405 +step:2914 train loss:3.822956 +step:2915 train loss:3.818824 +step:2916 train loss:3.776792 +step:2917 train loss:3.810895 +step:2918 train loss:3.801516 +step:2919 train loss:3.747627 +step:2920 train loss:3.799783 +step:2921 train loss:3.754454 +step:2922 train loss:3.778478 +step:2923 train loss:3.844897 +step:2924 train loss:3.780943 +step:2925 train loss:3.734246 +step:2926 train loss:3.824532 +step:2927 train loss:3.733902 +step:2928 train loss:3.701161 +step:2929 train loss:3.719178 +step:2930 train loss:3.738582 +step:2931 train loss:3.895569 +step:2932 train loss:3.809327 +step:2933 train loss:3.775375 +step:2934 train loss:3.770924 +step:2935 train loss:3.790867 +step:2936 train loss:3.741060 +step:2937 train loss:3.757390 +step:2938 train loss:3.779214 +step:2939 train loss:3.852216 +step:2940 train loss:3.751215 +step:2941 train loss:3.788392 +step:2942 train loss:3.748224 +step:2943 train loss:4.024420 +step:2944 train loss:3.852145 +step:2945 train loss:3.809327 +step:2946 train loss:3.819531 +step:2947 train loss:3.783348 +step:2948 train loss:3.738954 +step:2949 train loss:3.827760 +step:2950 train loss:3.783550 +step:2951 train loss:3.677879 +step:2952 train loss:3.750871 +step:2953 train loss:3.664026 +step:2954 train loss:3.755766 +step:2955 train loss:3.820490 +step:2956 train loss:3.770990 +step:2957 train loss:3.774287 +step:2958 train loss:3.726946 +step:2959 train loss:3.750062 +step:2960 train loss:3.841325 +step:2961 train loss:3.705407 +step:2962 train loss:3.782113 +step:2963 train loss:3.777453 +step:2964 train loss:3.757469 +step:2965 train loss:3.784597 +step:2966 train loss:3.758317 +step:2967 train loss:3.757780 +step:2968 train loss:3.729378 +step:2969 train loss:3.741166 +step:2970 train loss:3.807656 +step:2971 train loss:3.739131 +step:2972 train loss:3.719257 +step:2973 train loss:3.717701 +step:2974 train loss:3.757486 +step:2975 train loss:3.722607 +step:2976 train loss:3.763492 +step:2977 train loss:3.754621 +step:2978 train loss:3.838733 +step:2979 train loss:3.818838 +step:2980 train loss:3.828351 +step:2981 train loss:3.783043 +step:2982 train loss:3.770422 +step:2983 train loss:3.723754 +step:2984 train loss:3.697546 +step:2985 train loss:3.809402 +step:2986 train loss:3.705950 +step:2987 train loss:3.833074 +step:2988 train loss:3.759041 +step:2989 train loss:3.788981 +step:2990 train loss:3.741270 +step:2991 train loss:3.812305 +step:2992 train loss:3.804147 +step:2993 train loss:3.771019 +step:2994 train loss:3.756690 +step:2995 train loss:3.827925 +step:2996 train loss:3.753085 +step:2997 train loss:3.660684 +step:2998 train loss:3.775773 +step:2999 train loss:3.816575 +step:3000 validation loss:3.700478 total_sharp:7.7803e-03 L1_sharp:1.3387e-01 L2_sharp:5.2318e-02 L3_sharp:5.8442e-02 L4_sharp:2.8342e-02 L5_sharp:2.9051e-02 L6_sharp:3.6130e-02 L7_sharp:4.1725e-02 L8_sharp:4.2778e-02 L9_sharp:3.0919e-02 L10_sharp:2.3956e-02 L11_sharp:2.2094e-02 L12_sharp:3.1967e-02 total_fnorm:1.3448e+00 total_l1_linf:8.0756e+03 total_spectral:1.3448e+00 L1_fnorm:6.1483e-02 L2_fnorm:5.8307e-02 L3_fnorm:5.6979e-02 L4_fnorm:5.9349e-02 L5_fnorm:6.0411e-02 L6_fnorm:6.1156e-02 L7_fnorm:6.1081e-02 L8_fnorm:6.1192e-02 L9_fnorm:6.1154e-02 L10_fnorm:6.1217e-02 L11_fnorm:6.1117e-02 L12_fnorm:6.1249e-02 L1_l1linf:2.7916e-01 L2_l1linf:3.1061e-01 L3_l1linf:3.2630e-01 L4_l1linf:3.1877e-01 L5_l1linf:3.0636e-01 L6_l1linf:2.8874e-01 L7_l1linf:2.8522e-01 L8_l1linf:2.8798e-01 L9_l1linf:3.2160e-01 L10_l1linf:3.3054e-01 L11_l1linf:3.3484e-01 L12_l1linf:3.2471e-01 L1_spectral:6.3632e-03 L2_spectral:7.0396e-03 L3_spectral:7.3106e-03 L4_spectral:7.2066e-03 L5_spectral:6.8889e-03 L6_spectral:6.5348e-03 L7_spectral:6.4469e-03 L8_spectral:6.4533e-03 L9_spectral:7.2009e-03 L10_spectral:7.4067e-03 L11_spectral:7.5054e-03 L12_spectral:7.3845e-03 ip_v_neg_g:9.3419e-03 cos_v_neg_g:1.7864e-03 v_norm:1.3448e+00 g_norm:3.8886e+00 hv_norm:1.1660e+00 cos_v_hv:8.9737e-03 hg_norm:2.7600e+02 cos_g_hg:4.8808e-01 v_par:5.9083e-05 v_perp:1.3448e+00 L1_cos_v_neg_g:1.3569e-02 L1_v_norm:6.1483e-02 L2_cos_v_neg_g:1.4662e-02 L2_v_norm:5.8307e-02 L3_cos_v_neg_g:1.2216e-02 L3_v_norm:5.6979e-02 L4_cos_v_neg_g:1.0376e-02 L4_v_norm:5.9349e-02 L5_cos_v_neg_g:1.0337e-02 L5_v_norm:6.0411e-02 L6_cos_v_neg_g:1.1573e-02 L6_v_norm:6.1156e-02 L7_cos_v_neg_g:1.1387e-02 L7_v_norm:6.1081e-02 L8_cos_v_neg_g:1.2048e-02 L8_v_norm:6.1192e-02 L9_cos_v_neg_g:1.0912e-02 L9_v_norm:6.1154e-02 L10_cos_v_neg_g:9.7907e-03 L10_v_norm:6.1217e-02 L11_cos_v_neg_g:7.8384e-03 L11_v_norm:6.1117e-02 L12_cos_v_neg_g:7.2257e-03 L12_v_norm:6.1249e-02 +step:3000 train loss:3.708542 +step:3001 train loss:3.761502 +step:3002 train loss:3.761410 +step:3003 train loss:3.757471 +step:3004 train loss:3.784622 +step:3005 train loss:3.683384 +step:3006 train loss:3.732991 +step:3007 train loss:3.763600 +step:3008 train loss:3.810817 +step:3009 train loss:3.765737 +step:3010 train loss:3.784998 +step:3011 train loss:3.769666 +step:3012 train loss:3.747705 +step:3013 train loss:3.790455 +step:3014 train loss:3.748663 +step:3015 train loss:3.747389 +step:3016 train loss:3.769242 +step:3017 train loss:3.787712 +step:3018 train loss:3.720118 +step:3019 train loss:3.757116 +step:3020 train loss:3.776206 +step:3021 train loss:3.741275 +step:3022 train loss:3.831823 +step:3023 train loss:3.779682 +step:3024 train loss:3.767424 +step:3025 train loss:3.780325 +step:3026 train loss:3.750031 +step:3027 train loss:3.727614 +step:3028 train loss:3.779424 +step:3029 train loss:3.765675 +step:3030 train loss:3.740582 +step:3031 train loss:3.723781 +step:3032 train loss:3.712323 +step:3033 train loss:3.739548 +step:3034 train loss:3.784743 +step:3035 train loss:3.763480 +step:3036 train loss:3.724291 +step:3037 train loss:3.688449 +step:3038 train loss:3.805409 +step:3039 train loss:3.682865 +step:3040 train loss:3.671203 +step:3041 train loss:3.798916 +step:3042 train loss:3.733127 +step:3043 train loss:3.792750 +step:3044 train loss:3.687808 +step:3045 train loss:3.734783 +step:3046 train loss:3.708289 +step:3047 train loss:3.740493 +step:3048 train loss:3.702566 +step:3049 train loss:3.784935 +step:3050 train loss:3.671427 +step:3051 train loss:3.689948 +step:3052 train loss:3.707295 +step:3053 train loss:3.777791 +step:3054 train loss:3.850109 +step:3055 train loss:3.688178 +step:3056 train loss:3.719306 +step:3057 train loss:3.753633 +step:3058 train loss:3.702671 +step:3059 train loss:3.731444 +step:3060 train loss:3.728769 +step:3061 train loss:3.712708 +step:3062 train loss:3.765889 +step:3063 train loss:3.749707 +step:3064 train loss:3.774886 +step:3065 train loss:3.787856 +step:3066 train loss:3.688982 +step:3067 train loss:3.736568 +step:3068 train loss:3.790184 +step:3069 train loss:3.802927 +step:3070 train loss:3.732193 +step:3071 train loss:3.749805 +step:3072 train loss:3.751328 +step:3073 train loss:3.786741 +step:3074 train loss:3.724312 +step:3075 train loss:3.760043 +step:3076 train loss:3.695625 +step:3077 train loss:3.693858 +step:3078 train loss:3.722593 +step:3079 train loss:3.769911 +step:3080 train loss:3.762033 +step:3081 train loss:3.806621 +step:3082 train loss:3.782058 +step:3083 train loss:3.712100 +step:3084 train loss:3.792627 +step:3085 train loss:3.719683 +step:3086 train loss:3.781390 +step:3087 train loss:3.750920 +step:3088 train loss:3.831933 +step:3089 train loss:3.706889 +step:3090 train loss:3.779900 +step:3091 train loss:3.703487 +step:3092 train loss:3.727195 +step:3093 train loss:3.748501 +step:3094 train loss:3.734327 +step:3095 train loss:3.817741 +step:3096 train loss:3.746827 +step:3097 train loss:3.758607 +step:3098 train loss:3.735266 +step:3099 train loss:3.743542 +step:3100 train loss:3.769456 +step:3101 train loss:3.852808 +step:3102 train loss:3.777387 +step:3103 train loss:3.703220 +step:3104 train loss:3.785971 +step:3105 train loss:3.757643 +step:3106 train loss:3.751977 +step:3107 train loss:3.735116 +step:3108 train loss:3.709423 +step:3109 train loss:3.764010 +step:3110 train loss:3.694454 +step:3111 train loss:3.730754 +step:3112 train loss:3.664057 +step:3113 train loss:3.786399 +step:3114 train loss:3.698447 +step:3115 train loss:3.740670 +step:3116 train loss:3.620118 +step:3117 train loss:3.643674 +step:3118 train loss:3.741383 +step:3119 train loss:3.748701 +step:3120 train loss:3.750970 +step:3121 train loss:3.694391 +step:3122 train loss:3.778196 +step:3123 train loss:3.693851 +step:3124 train loss:3.755846 +step:3125 train loss:3.770473 +step:3126 train loss:3.873279 +step:3127 train loss:3.721397 +step:3128 train loss:3.749195 +step:3129 train loss:3.730905 +step:3130 train loss:3.706448 +step:3131 train loss:3.787686 +step:3132 train loss:3.772774 +step:3133 train loss:3.741590 +step:3134 train loss:3.637678 +step:3135 train loss:3.731411 +step:3136 train loss:3.705245 +step:3137 train loss:3.839996 +step:3138 train loss:3.740104 +step:3139 train loss:3.721280 +step:3140 train loss:3.741455 +step:3141 train loss:3.746375 +step:3142 train loss:3.682153 +step:3143 train loss:3.768036 +step:3144 train loss:3.715025 +step:3145 train loss:3.699079 +step:3146 train loss:3.712593 +step:3147 train loss:3.824159 +step:3148 train loss:3.728829 +step:3149 train loss:3.783124 +step:3150 train loss:3.767690 +step:3151 train loss:3.737954 +step:3152 train loss:3.734139 +step:3153 train loss:3.696026 +step:3154 train loss:3.777375 +step:3155 train loss:3.719869 +step:3156 train loss:3.769655 +step:3157 train loss:3.774286 +step:3158 train loss:3.744529 +step:3159 train loss:3.681593 +step:3160 train loss:3.732616 +step:3161 train loss:3.703357 +step:3162 train loss:3.761487 +step:3163 train loss:3.742233 +step:3164 train loss:3.720744 +step:3165 train loss:3.737967 +step:3166 train loss:3.777451 +step:3167 train loss:3.738202 +step:3168 train loss:3.816247 +step:3169 train loss:3.729029 +step:3170 train loss:3.711936 +step:3171 train loss:3.700300 +step:3172 train loss:3.705359 +step:3173 train loss:3.651217 +step:3174 train loss:3.764132 +step:3175 train loss:3.731545 +step:3176 train loss:3.742512 +step:3177 train loss:3.708488 +step:3178 train loss:3.686459 +step:3179 train loss:3.762837 +step:3180 train loss:3.693489 +step:3181 train loss:3.776689 +step:3182 train loss:3.783652 +step:3183 train loss:3.726116 +step:3184 train loss:3.723867 +step:3185 train loss:3.783779 +step:3186 train loss:3.740106 +step:3187 train loss:3.759826 +step:3188 train loss:3.800492 +step:3189 train loss:3.747202 +step:3190 train loss:3.701453 +step:3191 train loss:3.707180 +step:3192 train loss:3.669874 +step:3193 train loss:3.749442 +step:3194 train loss:3.712811 +step:3195 train loss:3.697911 +step:3196 train loss:3.747375 +step:3197 train loss:3.712009 +step:3198 train loss:3.739214 +step:3199 train loss:3.727470 +step:3200 train loss:3.730805 +step:3201 train loss:3.698483 +step:3202 train loss:3.756232 +step:3203 train loss:3.819412 +step:3204 train loss:3.783927 +step:3205 train loss:3.630495 +step:3206 train loss:3.911368 +step:3207 train loss:3.671448 +step:3208 train loss:3.736557 +step:3209 train loss:3.729050 +step:3210 train loss:3.711060 +step:3211 train loss:3.737271 +step:3212 train loss:3.748616 +step:3213 train loss:3.689924 +step:3214 train loss:3.794485 +step:3215 train loss:3.797745 +step:3216 train loss:3.667609 +step:3217 train loss:3.748815 +step:3218 train loss:3.790904 +step:3219 train loss:3.704864 +step:3220 train loss:3.777636 +step:3221 train loss:3.689121 +step:3222 train loss:3.732943 +step:3223 train loss:3.748653 +step:3224 train loss:3.763469 +step:3225 train loss:3.685825 +step:3226 train loss:3.717111 +step:3227 train loss:3.744341 +step:3228 train loss:3.740944 +step:3229 train loss:3.774979 +step:3230 train loss:3.786458 +step:3231 train loss:3.724146 +step:3232 train loss:3.734814 +step:3233 train loss:3.709347 +step:3234 train loss:3.696354 +step:3235 train loss:3.697510 +step:3236 train loss:3.718688 +step:3237 train loss:3.716775 +step:3238 train loss:3.739125 +step:3239 train loss:3.636965 +step:3240 train loss:3.752690 +step:3241 train loss:3.747520 +step:3242 train loss:3.803767 +step:3243 train loss:3.744112 +step:3244 train loss:3.760220 +step:3245 train loss:3.663243 +step:3246 train loss:3.791914 +step:3247 train loss:3.732884 +step:3248 train loss:3.755795 +step:3249 train loss:3.699006 +step:3250 validation loss:3.670579 +step:3250 train loss:3.700336 +step:3251 train loss:3.809309 +step:3252 train loss:3.740584 +step:3253 train loss:3.740052 +step:3254 train loss:3.807983 +step:3255 train loss:3.749116 +step:3256 train loss:3.745631 +step:3257 train loss:3.725361 +step:3258 train loss:3.658779 +step:3259 train loss:3.635787 +step:3260 train loss:3.750063 +step:3261 train loss:3.732882 +step:3262 train loss:3.720809 +step:3263 train loss:3.703930 +step:3264 train loss:3.815389 +step:3265 train loss:3.725803 +step:3266 train loss:3.752656 +step:3267 train loss:3.716629 +step:3268 train loss:3.721031 +step:3269 train loss:3.732465 +step:3270 train loss:3.761342 +step:3271 train loss:3.726430 +step:3272 train loss:3.702466 +step:3273 train loss:3.714744 +step:3274 train loss:3.846985 +step:3275 train loss:3.717873 +step:3276 train loss:3.786318 +step:3277 train loss:3.723874 +step:3278 train loss:3.699628 +step:3279 train loss:3.727453 +step:3280 train loss:3.755025 +step:3281 train loss:3.680438 +step:3282 train loss:3.749595 +step:3283 train loss:3.722839 +step:3284 train loss:3.683307 +step:3285 train loss:3.702560 +step:3286 train loss:3.733831 +step:3287 train loss:3.671364 +step:3288 train loss:3.751844 +step:3289 train loss:3.695890 +step:3290 train loss:3.729095 +step:3291 train loss:3.687209 +step:3292 train loss:3.711007 +step:3293 train loss:3.755558 +step:3294 train loss:3.767309 +step:3295 train loss:3.677900 +step:3296 train loss:3.733351 +step:3297 train loss:3.695100 +step:3298 train loss:3.696458 +step:3299 train loss:3.822013 +step:3300 train loss:3.662987 +step:3301 train loss:3.743917 +step:3302 train loss:3.711467 +step:3303 train loss:3.728943 +step:3304 train loss:3.696342 +step:3305 train loss:3.786105 +step:3306 train loss:3.720639 +step:3307 train loss:3.740952 +step:3308 train loss:3.697257 +step:3309 train loss:3.753586 +step:3310 train loss:3.670708 +step:3311 train loss:3.723650 +step:3312 train loss:3.693068 +step:3313 train loss:3.728606 +step:3314 train loss:3.723704 +step:3315 train loss:3.803988 +step:3316 train loss:3.657166 +step:3317 train loss:3.748219 +step:3318 train loss:3.758832 +step:3319 train loss:3.685756 +step:3320 train loss:3.844584 +step:3321 train loss:3.747195 +step:3322 train loss:3.746078 +step:3323 train loss:3.852050 +step:3324 train loss:3.770530 +step:3325 train loss:3.743074 +step:3326 train loss:3.733995 +step:3327 train loss:3.746068 +step:3328 train loss:3.724803 +step:3329 train loss:3.724186 +step:3330 train loss:3.716968 +step:3331 train loss:3.762682 +step:3332 train loss:3.786541 +step:3333 train loss:3.751480 +step:3334 train loss:3.683866 +step:3335 train loss:3.696870 +step:3336 train loss:3.732878 +step:3337 train loss:3.731858 +step:3338 train loss:3.720154 +step:3339 train loss:3.713464 +step:3340 train loss:3.749337 +step:3341 train loss:3.699625 +step:3342 train loss:3.748245 +step:3343 train loss:3.682365 +step:3344 train loss:3.743532 +step:3345 train loss:3.694251 +step:3346 train loss:3.706302 +step:3347 train loss:3.712721 +step:3348 train loss:3.728483 +step:3349 train loss:3.718641 +step:3350 train loss:3.742240 +step:3351 train loss:3.796358 +step:3352 train loss:3.736733 +step:3353 train loss:3.833505 +step:3354 train loss:3.680804 +step:3355 train loss:3.786237 +step:3356 train loss:3.737069 +step:3357 train loss:3.749206 +step:3358 train loss:3.688365 +step:3359 train loss:3.722059 +step:3360 train loss:3.712128 +step:3361 train loss:3.714345 +step:3362 train loss:3.704941 +step:3363 train loss:3.705411 +step:3364 train loss:3.686437 +step:3365 train loss:3.725217 +step:3366 train loss:3.755575 +step:3367 train loss:3.708239 +step:3368 train loss:3.802399 +step:3369 train loss:3.715013 +step:3370 train loss:3.776981 +step:3371 train loss:3.764582 +step:3372 train loss:3.730818 +step:3373 train loss:3.741260 +step:3374 train loss:3.786967 +step:3375 train loss:3.721767 +step:3376 train loss:3.725800 +step:3377 train loss:3.715647 +step:3378 train loss:3.692337 +step:3379 train loss:3.770159 +step:3380 train loss:3.750480 +step:3381 train loss:3.734239 +step:3382 train loss:3.751598 +step:3383 train loss:3.758243 +step:3384 train loss:3.689732 +step:3385 train loss:3.740278 +step:3386 train loss:3.718881 +step:3387 train loss:3.793560 +step:3388 train loss:3.697262 +step:3389 train loss:3.893772 +step:3390 train loss:3.629370 +step:3391 train loss:3.715277 +step:3392 train loss:3.698143 +step:3393 train loss:3.730176 +step:3394 train loss:3.686822 +step:3395 train loss:3.757359 +step:3396 train loss:3.672099 +step:3397 train loss:3.748526 +step:3398 train loss:3.714396 +step:3399 train loss:3.734864 +step:3400 train loss:3.682872 +step:3401 train loss:3.717444 +step:3402 train loss:3.878849 +step:3403 train loss:3.765502 +step:3404 train loss:3.880615 +step:3405 train loss:3.734087 +step:3406 train loss:3.714908 +step:3407 train loss:3.712146 +step:3408 train loss:3.692860 +step:3409 train loss:3.658574 +step:3410 train loss:3.694448 +step:3411 train loss:3.760519 +step:3412 train loss:3.686046 +step:3413 train loss:3.675315 +step:3414 train loss:3.714360 +step:3415 train loss:3.687998 +step:3416 train loss:3.692058 +step:3417 train loss:3.773922 +step:3418 train loss:3.771137 +step:3419 train loss:3.733318 +step:3420 train loss:3.705686 +step:3421 train loss:3.739072 +step:3422 train loss:3.754555 +step:3423 train loss:3.775513 +step:3424 train loss:3.656202 +step:3425 train loss:3.681484 +step:3426 train loss:3.675789 +step:3427 train loss:3.737269 +step:3428 train loss:3.662908 +step:3429 train loss:3.726384 +step:3430 train loss:3.688816 +step:3431 train loss:3.745158 +step:3432 train loss:3.728748 +step:3433 train loss:3.690800 +step:3434 train loss:3.775351 +step:3435 train loss:3.711069 +step:3436 train loss:3.805158 +step:3437 train loss:3.632879 +step:3438 train loss:3.739979 +step:3439 train loss:3.713348 +step:3440 train loss:3.807574 +step:3441 train loss:3.701475 +step:3442 train loss:3.770891 +step:3443 train loss:3.704418 +step:3444 train loss:3.722696 +step:3445 train loss:3.770164 +step:3446 train loss:3.675589 +step:3447 train loss:3.747912 +step:3448 train loss:3.703737 +step:3449 train loss:3.737820 +step:3450 train loss:3.641764 +step:3451 train loss:3.761502 +step:3452 train loss:3.712556 +step:3453 train loss:3.762930 +step:3454 train loss:3.790903 +step:3455 train loss:3.848525 +step:3456 train loss:3.786614 +step:3457 train loss:3.779546 +step:3458 train loss:3.708171 +step:3459 train loss:3.720247 +step:3460 train loss:3.662229 +step:3461 train loss:3.726500 +step:3462 train loss:3.727400 +step:3463 train loss:3.698783 +step:3464 train loss:3.750075 +step:3465 train loss:3.681131 +step:3466 train loss:3.750270 +step:3467 train loss:3.705633 +step:3468 train loss:3.719956 +step:3469 train loss:3.731999 +step:3470 train loss:3.711675 +step:3471 train loss:3.752285 +step:3472 train loss:3.637550 +step:3473 train loss:3.758964 +step:3474 train loss:3.658051 +step:3475 train loss:3.737408 +step:3476 train loss:3.707793 +step:3477 train loss:3.728632 +step:3478 train loss:3.704861 +step:3479 train loss:3.733700 +step:3480 train loss:3.753318 +step:3481 train loss:3.734925 +step:3482 train loss:3.713927 +step:3483 train loss:3.857075 +step:3484 train loss:3.699135 +step:3485 train loss:3.686676 +step:3486 train loss:3.737699 +step:3487 train loss:3.781220 +step:3488 train loss:3.683306 +step:3489 train loss:3.735623 +step:3490 train loss:3.704584 +step:3491 train loss:3.744025 +step:3492 train loss:3.776688 +step:3493 train loss:3.748939 +step:3494 train loss:3.742700 +step:3495 train loss:3.719809 +step:3496 train loss:3.685549 +step:3497 train loss:3.796520 +step:3498 train loss:3.744027 +step:3499 train loss:3.676093 +step:3500 validation loss:3.648266 total_sharp:6.7525e-03 L1_sharp:5.8438e-02 L2_sharp:3.5552e-02 L3_sharp:5.4070e-02 L4_sharp:3.5902e-02 L5_sharp:3.4497e-02 L6_sharp:4.2243e-02 L7_sharp:4.2422e-02 L8_sharp:3.8935e-02 L9_sharp:2.8332e-02 L10_sharp:1.9933e-02 L11_sharp:1.8390e-02 L12_sharp:2.5231e-02 total_fnorm:1.3493e+00 total_l1_linf:8.0922e+03 total_spectral:1.3493e+00 L1_fnorm:6.1524e-02 L2_fnorm:5.8723e-02 L3_fnorm:5.7772e-02 L4_fnorm:5.9352e-02 L5_fnorm:6.0348e-02 L6_fnorm:6.1343e-02 L7_fnorm:6.1163e-02 L8_fnorm:6.1144e-02 L9_fnorm:6.1168e-02 L10_fnorm:6.1183e-02 L11_fnorm:6.1057e-02 L12_fnorm:6.1060e-02 L1_l1linf:2.9019e-01 L2_l1linf:3.2847e-01 L3_l1linf:3.4232e-01 L4_l1linf:3.4907e-01 L5_l1linf:3.2516e-01 L6_l1linf:3.4291e-01 L7_l1linf:2.9624e-01 L8_l1linf:2.9875e-01 L9_l1linf:3.1762e-01 L10_l1linf:3.2607e-01 L11_l1linf:3.2679e-01 L12_l1linf:2.9626e-01 L1_spectral:6.5302e-03 L2_spectral:7.3724e-03 L3_spectral:7.6196e-03 L4_spectral:7.7971e-03 L5_spectral:7.2779e-03 L6_spectral:7.6075e-03 L7_spectral:6.6169e-03 L8_spectral:6.6857e-03 L9_spectral:7.1507e-03 L10_spectral:7.3394e-03 L11_spectral:7.3128e-03 L12_spectral:6.7792e-03 ip_v_neg_g:8.8197e-03 cos_v_neg_g:1.5951e-03 v_norm:1.3493e+00 g_norm:4.0978e+00 hv_norm:1.0522e+00 cos_v_hv:8.6594e-03 hg_norm:3.7934e+02 cos_g_hg:5.5780e-01 v_par:5.4040e-05 v_perp:1.3493e+00 L1_cos_v_neg_g:1.2804e-02 L1_v_norm:6.1524e-02 L2_cos_v_neg_g:1.3444e-02 L2_v_norm:5.8723e-02 L3_cos_v_neg_g:1.4013e-02 L3_v_norm:5.7772e-02 L4_cos_v_neg_g:1.1966e-02 L4_v_norm:5.9352e-02 L5_cos_v_neg_g:1.1463e-02 L5_v_norm:6.0348e-02 L6_cos_v_neg_g:1.2948e-02 L6_v_norm:6.1343e-02 L7_cos_v_neg_g:1.1128e-02 L7_v_norm:6.1163e-02 L8_cos_v_neg_g:1.0373e-02 L8_v_norm:6.1144e-02 L9_cos_v_neg_g:9.2365e-03 L9_v_norm:6.1168e-02 L10_cos_v_neg_g:6.3061e-03 L10_v_norm:6.1183e-02 L11_cos_v_neg_g:5.0106e-03 L11_v_norm:6.1057e-02 L12_cos_v_neg_g:4.6570e-03 L12_v_norm:6.1060e-02 +step:3500 train loss:3.696193 +step:3501 train loss:3.822304 +step:3502 train loss:3.802339 +step:3503 train loss:3.752828 +step:3504 train loss:3.705899 +step:3505 train loss:3.720827 +step:3506 train loss:3.617801 +step:3507 train loss:3.738443 +step:3508 train loss:3.683241 +step:3509 train loss:3.749613 +step:3510 train loss:3.682940 +step:3511 train loss:3.718513 +step:3512 train loss:3.857625 +step:3513 train loss:3.676530 +step:3514 train loss:3.693486 +step:3515 train loss:3.941459 +step:3516 train loss:3.736846 +step:3517 train loss:3.696719 +step:3518 train loss:3.700251 +step:3519 train loss:3.693542 +step:3520 train loss:3.725734 +step:3521 train loss:3.716435 +step:3522 train loss:3.626473 +step:3523 train loss:3.731965 +step:3524 train loss:3.714756 +step:3525 train loss:3.700529 +step:3526 train loss:3.725482 +step:3527 train loss:3.677784 +step:3528 train loss:3.728607 +step:3529 train loss:3.706690 +step:3530 train loss:3.699701 +step:3531 train loss:3.691864 +step:3532 train loss:3.875648 +step:3533 train loss:3.697501 +step:3534 train loss:3.713532 +step:3535 train loss:3.690728 +step:3536 train loss:3.688139 +step:3537 train loss:3.698390 +step:3538 train loss:3.731525 +step:3539 train loss:3.679547 +step:3540 train loss:3.743720 +step:3541 train loss:3.710526 +step:3542 train loss:3.722776 +step:3543 train loss:3.642309 +step:3544 train loss:3.661268 +step:3545 train loss:3.666111 +step:3546 train loss:3.731729 +step:3547 train loss:3.738658 +step:3548 train loss:3.713759 +step:3549 train loss:3.711013 +step:3550 train loss:3.696916 +step:3551 train loss:3.726397 +step:3552 train loss:3.619098 +step:3553 train loss:3.742978 +step:3554 train loss:3.734383 +step:3555 train loss:3.720825 +step:3556 train loss:3.745287 +step:3557 train loss:3.733843 +step:3558 train loss:3.708876 +step:3559 train loss:3.652574 +step:3560 train loss:3.745367 +step:3561 train loss:3.736104 +step:3562 train loss:3.909058 +step:3563 train loss:3.768624 +step:3564 train loss:3.727820 +step:3565 train loss:3.729676 +step:3566 train loss:3.704186 +step:3567 train loss:3.642464 +step:3568 train loss:3.670362 +step:3569 train loss:3.755367 +step:3570 train loss:3.779476 +step:3571 train loss:3.758635 +step:3572 train loss:3.746501 +step:3573 train loss:3.706294 +step:3574 train loss:3.703831 +step:3575 train loss:3.695094 +step:3576 train loss:3.678983 +step:3577 train loss:3.688803 +step:3578 train loss:3.772158 +step:3579 train loss:3.679325 +step:3580 train loss:3.762385 +step:3581 train loss:3.703588 +step:3582 train loss:3.758430 +step:3583 train loss:3.697976 +step:3584 train loss:3.671266 +step:3585 train loss:3.721141 +step:3586 train loss:3.669927 +step:3587 train loss:3.765115 +step:3588 train loss:3.895138 +step:3589 train loss:3.727973 +step:3590 train loss:3.712214 +step:3591 train loss:3.719651 +step:3592 train loss:3.681625 +step:3593 train loss:3.653546 +step:3594 train loss:3.707755 +step:3595 train loss:3.679955 +step:3596 train loss:3.757858 +step:3597 train loss:3.734050 +step:3598 train loss:3.686035 +step:3599 train loss:3.739059 +step:3600 train loss:3.675932 +step:3601 train loss:3.694440 +step:3602 train loss:3.682158 +step:3603 train loss:3.696977 +step:3604 train loss:3.723921 +step:3605 train loss:3.828691 +step:3606 train loss:3.728594 +step:3607 train loss:3.710128 +step:3608 train loss:3.729472 +step:3609 train loss:3.712891 +step:3610 train loss:3.681424 +step:3611 train loss:3.684849 +step:3612 train loss:3.751661 +step:3613 train loss:3.722252 +step:3614 train loss:3.665944 +step:3615 train loss:3.706458 +step:3616 train loss:3.660366 +step:3617 train loss:3.734811 +step:3618 train loss:3.690928 +step:3619 train loss:3.678689 +step:3620 train loss:3.695070 +step:3621 train loss:3.656183 +step:3622 train loss:3.763626 +step:3623 train loss:3.751210 +step:3624 train loss:3.724700 +step:3625 train loss:3.703254 +step:3626 train loss:3.711135 +step:3627 train loss:3.708187 +step:3628 train loss:3.693019 +step:3629 train loss:3.698436 +step:3630 train loss:3.776279 +step:3631 train loss:3.704918 +step:3632 train loss:3.737817 +step:3633 train loss:3.695908 +step:3634 train loss:3.698411 +step:3635 train loss:3.686821 +step:3636 train loss:3.757042 +step:3637 train loss:3.836335 +step:3638 train loss:3.748744 +step:3639 train loss:3.737834 +step:3640 train loss:3.744484 +step:3641 train loss:3.785305 +step:3642 train loss:3.678231 +step:3643 train loss:3.847954 +step:3644 train loss:3.741928 +step:3645 train loss:3.708569 +step:3646 train loss:3.832020 +step:3647 train loss:3.719371 +step:3648 train loss:3.712867 +step:3649 train loss:3.661865 +step:3650 train loss:3.704239 +step:3651 train loss:3.700115 +step:3652 train loss:3.687300 +step:3653 train loss:3.624041 +step:3654 train loss:3.683312 +step:3655 train loss:3.674218 +step:3656 train loss:3.706801 +step:3657 train loss:3.724793 +step:3658 train loss:3.720400 +step:3659 train loss:3.707858 +step:3660 train loss:3.675291 +step:3661 train loss:3.707225 +step:3662 train loss:3.678771 +step:3663 train loss:3.717911 +step:3664 train loss:3.672395 +step:3665 train loss:3.714345 +step:3666 train loss:3.754027 +step:3667 train loss:3.841985 +step:3668 train loss:3.722276 +step:3669 train loss:3.680507 +step:3670 train loss:3.727303 +step:3671 train loss:3.688316 +step:3672 train loss:3.725116 +step:3673 train loss:3.706621 +step:3674 train loss:3.723159 +step:3675 train loss:3.736700 +step:3676 train loss:3.699329 +step:3677 train loss:3.659963 +step:3678 train loss:3.723094 +step:3679 train loss:3.623304 +step:3680 train loss:3.724896 +step:3681 train loss:3.757083 +step:3682 train loss:3.736788 +step:3683 train loss:3.683412 +step:3684 train loss:3.681106 +step:3685 train loss:3.708581 +step:3686 train loss:3.736146 +step:3687 train loss:3.689865 +step:3688 train loss:3.667202 +step:3689 train loss:3.701004 +step:3690 train loss:3.691508 +step:3691 train loss:3.673523 +step:3692 train loss:3.731728 +step:3693 train loss:3.862771 +step:3694 train loss:3.679817 +step:3695 train loss:3.736905 +step:3696 train loss:3.699913 +step:3697 train loss:3.692165 +step:3698 train loss:3.633204 +step:3699 train loss:3.657847 +step:3700 train loss:3.689951 +step:3701 train loss:3.707133 +step:3702 train loss:3.728835 +step:3703 train loss:3.686765 +step:3704 train loss:3.728776 +step:3705 train loss:3.710738 +step:3706 train loss:3.662982 +step:3707 train loss:3.717891 +step:3708 train loss:3.693093 +step:3709 train loss:3.613416 +step:3710 train loss:3.740885 +step:3711 train loss:3.687286 +step:3712 train loss:3.727160 +step:3713 train loss:3.677701 +step:3714 train loss:3.696134 +step:3715 train loss:3.811741 +step:3716 train loss:3.717750 +step:3717 train loss:3.694094 +step:3718 train loss:3.698030 +step:3719 train loss:3.693277 +step:3720 train loss:3.705234 +step:3721 train loss:3.761005 +step:3722 train loss:3.773715 +step:3723 train loss:3.661165 +step:3724 train loss:3.716907 +step:3725 train loss:3.695836 +step:3726 train loss:3.713939 +step:3727 train loss:3.788067 +step:3728 train loss:3.751592 +step:3729 train loss:3.650300 +step:3730 train loss:3.668941 +step:3731 train loss:3.691372 +step:3732 train loss:3.842556 +step:3733 train loss:3.701375 +step:3734 train loss:3.706792 +step:3735 train loss:3.646172 +step:3736 train loss:3.701378 +step:3737 train loss:3.752899 +step:3738 train loss:3.776055 +step:3739 train loss:3.690207 +step:3740 train loss:3.594629 +step:3741 train loss:3.801459 +step:3742 train loss:3.711548 +step:3743 train loss:3.687790 +step:3744 train loss:3.686132 +step:3745 train loss:3.706327 +step:3746 train loss:3.669342 +step:3747 train loss:3.689567 +step:3748 train loss:3.728293 +step:3749 train loss:3.713572 +step:3750 validation loss:3.630698 +step:3750 train loss:3.721427 +step:3751 train loss:3.812495 +step:3752 train loss:3.747990 +step:3753 train loss:3.661921 +step:3754 train loss:3.715147 +step:3755 train loss:3.890119 +step:3756 train loss:3.671267 +step:3757 train loss:3.668021 +step:3758 train loss:3.696597 +step:3759 train loss:3.642761 +step:3760 train loss:3.641856 +step:3761 train loss:3.693115 +step:3762 train loss:3.684909 +step:3763 train loss:3.687609 +step:3764 train loss:3.679682 +step:3765 train loss:3.677441 +step:3766 train loss:3.647473 +step:3767 train loss:3.731485 +step:3768 train loss:3.672309 +step:3769 train loss:3.930816 +step:3770 train loss:3.724917 +step:3771 train loss:3.735242 +step:3772 train loss:3.691741 +step:3773 train loss:3.685850 +step:3774 train loss:3.691687 +step:3775 train loss:3.685258 +step:3776 train loss:3.686650 +step:3777 train loss:3.647698 +step:3778 train loss:3.664317 +step:3779 train loss:3.647797 +step:3780 train loss:3.731124 +step:3781 train loss:3.698301 +step:3782 train loss:3.615720 +step:3783 train loss:3.721466 +step:3784 train loss:3.732036 +step:3785 train loss:3.643439 +step:3786 train loss:3.749463 +step:3787 train loss:3.661804 +step:3788 train loss:3.674780 +step:3789 train loss:3.581190 +step:3790 train loss:3.699273 +step:3791 train loss:3.719410 +step:3792 train loss:3.687944 +step:3793 train loss:3.689798 +step:3794 train loss:3.718254 +step:3795 train loss:3.684329 +step:3796 train loss:3.702347 +step:3797 train loss:3.680608 +step:3798 train loss:3.686978 +step:3799 train loss:3.697101 +step:3800 train loss:3.604891 +step:3801 train loss:3.721008 +step:3802 train loss:3.648737 +step:3803 train loss:3.730274 +step:3804 train loss:3.739375 +step:3805 train loss:3.700723 +step:3806 train loss:3.717133 +step:3807 train loss:3.736483 +step:3808 train loss:3.692789 +step:3809 train loss:3.709590 +step:3810 train loss:3.708194 +step:3811 train loss:3.693548 +step:3812 train loss:3.697150 +step:3813 train loss:3.651974 +step:3814 train loss:3.694578 +step:3815 train loss:3.698502 +step:3816 train loss:3.714192 +step:3817 train loss:3.732375 +step:3818 train loss:3.706115 +step:3819 train loss:3.717087 +step:3820 train loss:3.717644 +step:3821 train loss:3.674290 +step:3822 train loss:3.756066 +step:3823 train loss:3.652266 +step:3824 train loss:3.665130 +step:3825 train loss:3.673514 +step:3826 train loss:3.761567 +step:3827 train loss:3.763939 +step:3828 train loss:3.651837 +step:3829 train loss:3.670849 +step:3830 train loss:3.731482 +step:3831 train loss:3.665493 +step:3832 train loss:3.725276 +step:3833 train loss:3.667255 +step:3834 train loss:3.631546 +step:3835 train loss:3.675725 +step:3836 train loss:3.649677 +step:3837 train loss:3.718919 +step:3838 train loss:3.672852 +step:3839 train loss:3.713500 +step:3840 train loss:3.725724 +step:3841 train loss:3.672167 +step:3842 train loss:3.704013 +step:3843 train loss:3.719365 +step:3844 train loss:3.689673 +step:3845 train loss:3.711873 +step:3846 train loss:3.753536 +step:3847 train loss:3.651045 +step:3848 train loss:3.657352 +step:3849 train loss:3.669068 +step:3850 train loss:3.692586 +step:3851 train loss:3.828751 +step:3852 train loss:3.807046 +step:3853 train loss:3.706393 +step:3854 train loss:3.663719 +step:3855 train loss:3.718263 +step:3856 train loss:3.642989 +step:3857 train loss:3.704206 +step:3858 train loss:3.618149 +step:3859 train loss:3.664100 +step:3860 train loss:3.733271 +step:3861 train loss:3.706159 +step:3862 train loss:3.644562 +step:3863 train loss:3.693461 +step:3864 train loss:3.662829 +step:3865 train loss:3.702378 +step:3866 train loss:3.720237 +step:3867 train loss:3.718617 +step:3868 train loss:3.665706 +step:3869 train loss:3.665733 +step:3870 train loss:3.643307 +step:3871 train loss:3.634650 +step:3872 train loss:3.772097 +step:3873 train loss:3.696810 +step:3874 train loss:3.708556 +step:3875 train loss:3.820356 +step:3876 train loss:3.690851 +step:3877 train loss:3.718544 +step:3878 train loss:3.743007 +step:3879 train loss:3.732542 +step:3880 train loss:3.813223 +step:3881 train loss:3.636339 +step:3882 train loss:3.671721 +step:3883 train loss:3.683872 +step:3884 train loss:3.677428 +step:3885 train loss:3.692071 +step:3886 train loss:3.753617 +step:3887 train loss:3.731072 +step:3888 train loss:3.694561 +step:3889 train loss:3.665889 +step:3890 train loss:3.699964 +step:3891 train loss:3.718061 +step:3892 train loss:3.627310 +step:3893 train loss:3.733012 +step:3894 train loss:3.681239 +step:3895 train loss:3.700263 +step:3896 train loss:3.692091 +step:3897 train loss:3.657372 +step:3898 train loss:3.719567 +step:3899 train loss:3.758060 +step:3900 train loss:3.714534 +step:3901 train loss:3.729234 +step:3902 train loss:3.658838 +step:3903 train loss:3.671824 +step:3904 train loss:3.706607 +step:3905 train loss:3.642036 +step:3906 train loss:3.676632 +step:3907 train loss:3.709845 +step:3908 train loss:3.788536 +step:3909 train loss:3.679190 +step:3910 train loss:3.704182 +step:3911 train loss:3.721570 +step:3912 train loss:3.667848 +step:3913 train loss:3.684650 +step:3914 train loss:3.705458 +step:3915 train loss:3.672353 +step:3916 train loss:3.708451 +step:3917 train loss:3.752820 +step:3918 train loss:3.726744 +step:3919 train loss:3.705842 +step:3920 train loss:3.681819 +step:3921 train loss:3.719863 +step:3922 train loss:3.724967 +step:3923 train loss:3.712896 +step:3924 train loss:3.649126 +step:3925 train loss:3.848877 +step:3926 train loss:3.696010 +step:3927 train loss:3.672039 +step:3928 train loss:3.751785 +step:3929 train loss:3.814354 +step:3930 train loss:3.705722 +step:3931 train loss:3.642667 +step:3932 train loss:3.692765 +step:3933 train loss:3.711849 +step:3934 train loss:3.662618 +step:3935 train loss:3.641191 +step:3936 train loss:3.734905 +step:3937 train loss:3.695032 +step:3938 train loss:3.701304 +step:3939 train loss:3.729200 +step:3940 train loss:3.677450 +step:3941 train loss:3.761705 +step:3942 train loss:3.721335 +step:3943 train loss:3.709207 +step:3944 train loss:3.757422 +step:3945 train loss:3.669554 +step:3946 train loss:3.612182 +step:3947 train loss:3.741506 +step:3948 train loss:3.710347 +step:3949 train loss:3.876916 +step:3950 train loss:3.674281 +step:3951 train loss:3.599468 +step:3952 train loss:3.568295 +step:3953 train loss:3.642802 +step:3954 train loss:3.691798 +step:3955 train loss:3.718951 +step:3956 train loss:3.675580 +step:3957 train loss:3.727133 +step:3958 train loss:3.707535 +step:3959 train loss:3.740332 +step:3960 train loss:3.663924 +step:3961 train loss:3.693259 +step:3962 train loss:3.697324 +step:3963 train loss:3.671641 +step:3964 train loss:3.653507 +step:3965 train loss:3.708488 +step:3966 train loss:3.657998 +step:3967 train loss:3.709941 +step:3968 train loss:3.724077 +step:3969 train loss:3.633584 +step:3970 train loss:3.744184 +step:3971 train loss:3.658114 +step:3972 train loss:3.692224 +step:3973 train loss:3.651123 +step:3974 train loss:3.742642 +step:3975 train loss:3.696239 +step:3976 train loss:3.649646 +step:3977 train loss:3.708766 +step:3978 train loss:3.676621 +step:3979 train loss:3.663211 +step:3980 train loss:3.731320 +step:3981 train loss:3.667996 +step:3982 train loss:3.684543 +step:3983 train loss:3.671308 +step:3984 train loss:3.703743 +step:3985 train loss:3.684031 +step:3986 train loss:3.694239 +step:3987 train loss:3.702426 +step:3988 train loss:3.640992 +step:3989 train loss:3.714566 +step:3990 train loss:3.705966 +step:3991 train loss:3.720964 +step:3992 train loss:3.678711 +step:3993 train loss:3.712145 +step:3994 train loss:3.658000 +step:3995 train loss:3.713834 +step:3996 train loss:3.631939 +step:3997 train loss:3.710751 +step:3998 train loss:3.590993 +step:3999 train loss:3.746527 +step:4000 validation loss:3.611095 total_sharp:4.8223e-03 L1_sharp:3.3485e-02 L2_sharp:1.7175e-02 L3_sharp:3.3197e-02 L4_sharp:2.0145e-02 L5_sharp:2.1912e-02 L6_sharp:2.5746e-02 L7_sharp:3.4730e-02 L8_sharp:3.6630e-02 L9_sharp:2.5367e-02 L10_sharp:1.9684e-02 L11_sharp:2.1006e-02 L12_sharp:3.3419e-02 total_fnorm:1.3494e+00 total_l1_linf:8.0942e+03 total_spectral:1.3494e+00 L1_fnorm:6.1167e-02 L2_fnorm:5.9160e-02 L3_fnorm:5.8291e-02 L4_fnorm:5.9647e-02 L5_fnorm:6.0521e-02 L6_fnorm:6.1116e-02 L7_fnorm:6.1117e-02 L8_fnorm:6.1270e-02 L9_fnorm:6.1215e-02 L10_fnorm:6.1207e-02 L11_fnorm:6.1176e-02 L12_fnorm:6.1269e-02 L1_l1linf:2.5142e-01 L2_l1linf:2.9026e-01 L3_l1linf:3.1291e-01 L4_l1linf:3.1986e-01 L5_l1linf:3.0131e-01 L6_l1linf:2.7490e-01 L7_l1linf:2.8830e-01 L8_l1linf:3.0908e-01 L9_l1linf:3.1975e-01 L10_l1linf:3.3505e-01 L11_l1linf:3.5566e-01 L12_l1linf:3.6390e-01 L1_spectral:5.7935e-03 L2_spectral:6.6135e-03 L3_spectral:7.0721e-03 L4_spectral:7.2818e-03 L5_spectral:6.8278e-03 L6_spectral:6.2408e-03 L7_spectral:6.4656e-03 L8_spectral:6.8395e-03 L9_spectral:7.1198e-03 L10_spectral:7.5018e-03 L11_spectral:7.9557e-03 L12_spectral:8.1530e-03 ip_v_neg_g:4.9794e-03 cos_v_neg_g:1.0458e-03 v_norm:1.3494e+00 g_norm:3.5286e+00 hv_norm:7.8026e-01 cos_v_hv:8.3395e-03 hg_norm:1.7875e+02 cos_g_hg:4.6539e-01 v_par:3.7305e-05 v_perp:1.3494e+00 L1_cos_v_neg_g:5.6303e-03 L1_v_norm:6.1167e-02 L2_cos_v_neg_g:6.5462e-03 L2_v_norm:5.9160e-02 L3_cos_v_neg_g:4.6728e-03 L3_v_norm:5.8291e-02 L4_cos_v_neg_g:4.9772e-03 L4_v_norm:5.9647e-02 L5_cos_v_neg_g:4.7886e-03 L5_v_norm:6.0521e-02 L6_cos_v_neg_g:5.4030e-03 L6_v_norm:6.1116e-02 L7_cos_v_neg_g:7.1853e-03 L7_v_norm:6.1117e-02 L8_cos_v_neg_g:8.3206e-03 L8_v_norm:6.1270e-02 L9_cos_v_neg_g:8.3705e-03 L9_v_norm:6.1215e-02 L10_cos_v_neg_g:7.0106e-03 L10_v_norm:6.1207e-02 L11_cos_v_neg_g:6.0848e-03 L11_v_norm:6.1176e-02 L12_cos_v_neg_g:5.2430e-03 L12_v_norm:6.1269e-02 +step:4000 train loss:3.625821 +step:4001 train loss:3.703348 +step:4002 train loss:3.680994 +step:4003 train loss:3.717542 +step:4004 train loss:3.625651 +step:4005 train loss:3.718530 +step:4006 train loss:3.726935 +step:4007 train loss:3.648662 +step:4008 train loss:3.606016 +step:4009 train loss:3.689215 +step:4010 train loss:3.665666 +step:4011 train loss:3.673631 +step:4012 train loss:3.688215 +step:4013 train loss:3.663230 +step:4014 train loss:3.677991 +step:4015 train loss:3.669439 +step:4016 train loss:3.679370 +step:4017 train loss:3.641102 +step:4018 train loss:3.580647 +step:4019 train loss:3.637476 +step:4020 train loss:3.703783 +step:4021 train loss:3.649823 +step:4022 train loss:3.654458 +step:4023 train loss:3.668224 +step:4024 train loss:3.576974 +step:4025 train loss:3.702880 +step:4026 train loss:3.692439 +step:4027 train loss:3.701608 +step:4028 train loss:3.716110 +step:4029 train loss:3.746866 +step:4030 train loss:3.663485 +step:4031 train loss:3.703108 +step:4032 train loss:3.662583 +step:4033 train loss:3.693891 +step:4034 train loss:3.709648 +step:4035 train loss:3.689583 +step:4036 train loss:3.685958 +step:4037 train loss:3.700448 +step:4038 train loss:3.621581 +step:4039 train loss:3.675035 +step:4040 train loss:3.656169 +step:4041 train loss:3.650349 +step:4042 train loss:3.670760 +step:4043 train loss:3.655415 +step:4044 train loss:3.691030 +step:4045 train loss:3.693655 +step:4046 train loss:3.650839 +step:4047 train loss:3.679747 +step:4048 train loss:3.688586 +step:4049 train loss:3.652404 +step:4050 train loss:3.754686 +step:4051 train loss:3.668810 +step:4052 train loss:3.689498 +step:4053 train loss:3.738266 +step:4054 train loss:3.709494 +step:4055 train loss:3.725999 +step:4056 train loss:3.723775 +step:4057 train loss:3.658527 +step:4058 train loss:3.639952 +step:4059 train loss:3.724633 +step:4060 train loss:3.666160 +step:4061 train loss:3.636476 +step:4062 train loss:3.749346 +step:4063 train loss:3.700144 +step:4064 train loss:3.668705 +step:4065 train loss:3.653064 +step:4066 train loss:3.682684 +step:4067 train loss:3.707465 +step:4068 train loss:3.672071 +step:4069 train loss:3.731369 +step:4070 train loss:3.647166 +step:4071 train loss:3.622092 +step:4072 train loss:3.696180 +step:4073 train loss:3.631955 +step:4074 train loss:3.682949 +step:4075 train loss:3.750254 +step:4076 train loss:3.605169 +step:4077 train loss:3.682646 +step:4078 train loss:3.782803 +step:4079 train loss:3.725466 +step:4080 train loss:3.668571 +step:4081 train loss:3.639857 +step:4082 train loss:3.693815 +step:4083 train loss:3.630734 +step:4084 train loss:3.649477 +step:4085 train loss:3.887927 +step:4086 train loss:3.652080 +step:4087 train loss:3.694165 +step:4088 train loss:3.681543 +step:4089 train loss:3.670337 +step:4090 train loss:3.689717 +step:4091 train loss:3.715052 +step:4092 train loss:3.636811 +step:4093 train loss:3.663951 +step:4094 train loss:3.687059 +step:4095 train loss:3.641484 +step:4096 train loss:3.675136 +step:4097 train loss:3.676535 +step:4098 train loss:3.649168 +step:4099 train loss:3.648943 +step:4100 train loss:3.704897 +step:4101 train loss:3.627403 +step:4102 train loss:3.662855 +step:4103 train loss:3.868517 +step:4104 train loss:3.682158 +step:4105 train loss:3.649780 +step:4106 train loss:3.720199 +step:4107 train loss:3.644304 +step:4108 train loss:3.643692 +step:4109 train loss:3.698718 +step:4110 train loss:3.707736 +step:4111 train loss:3.683362 +step:4112 train loss:3.703298 +step:4113 train loss:3.661795 +step:4114 train loss:3.609166 +step:4115 train loss:3.648158 +step:4116 train loss:3.634713 +step:4117 train loss:3.648031 +step:4118 train loss:3.701822 +step:4119 train loss:3.725931 +step:4120 train loss:3.648670 +step:4121 train loss:3.641683 +step:4122 train loss:3.707262 +step:4123 train loss:3.722112 +step:4124 train loss:3.697882 +step:4125 train loss:3.733615 +step:4126 train loss:3.667372 +step:4127 train loss:3.688277 +step:4128 train loss:3.680196 +step:4129 train loss:3.728651 +step:4130 train loss:3.656965 +step:4131 train loss:3.692543 +step:4132 train loss:3.709378 +step:4133 train loss:3.661415 +step:4134 train loss:3.714844 +step:4135 train loss:3.648749 +step:4136 train loss:3.668256 +step:4137 train loss:3.643111 +step:4138 train loss:3.647683 +step:4139 train loss:3.694988 +step:4140 train loss:3.652600 +step:4141 train loss:3.617770 +step:4142 train loss:3.661590 +step:4143 train loss:3.699122 +step:4144 train loss:3.653432 +step:4145 train loss:3.618462 +step:4146 train loss:3.686862 +step:4147 train loss:3.662368 +step:4148 train loss:3.656025 +step:4149 train loss:3.736674 +step:4150 train loss:3.698752 +step:4151 train loss:3.682324 +step:4152 train loss:3.703228 +step:4153 train loss:3.709516 +step:4154 train loss:3.717448 +step:4155 train loss:3.742966 +step:4156 train loss:3.614081 +step:4157 train loss:3.635726 +step:4158 train loss:3.695919 +step:4159 train loss:3.593725 +step:4160 train loss:3.687186 +step:4161 train loss:3.688612 +step:4162 train loss:3.594286 +step:4163 train loss:3.678966 +step:4164 train loss:3.626107 +step:4165 train loss:3.625463 +step:4166 train loss:3.690992 +step:4167 train loss:3.689969 +step:4168 train loss:3.681664 +step:4169 train loss:3.707690 +step:4170 train loss:3.826283 +step:4171 train loss:3.675053 +step:4172 train loss:3.694481 +step:4173 train loss:3.692791 +step:4174 train loss:3.652996 +step:4175 train loss:3.745098 +step:4176 train loss:3.666078 +step:4177 train loss:3.691668 +step:4178 train loss:3.670035 +step:4179 train loss:3.626019 +step:4180 train loss:3.621680 +step:4181 train loss:3.672582 +step:4182 train loss:3.656706 +step:4183 train loss:3.591422 +step:4184 train loss:3.663936 +step:4185 train loss:3.730748 +step:4186 train loss:3.708106 +step:4187 train loss:3.714369 +step:4188 train loss:3.689473 +step:4189 train loss:3.649232 +step:4190 train loss:3.693082 +step:4191 train loss:3.640358 +step:4192 train loss:3.728369 +step:4193 train loss:3.637011 +step:4194 train loss:3.620774 +step:4195 train loss:3.616149 +step:4196 train loss:3.686715 +step:4197 train loss:3.698749 +step:4198 train loss:3.622708 +step:4199 train loss:3.705691 +step:4200 train loss:3.668109 +step:4201 train loss:3.647049 +step:4202 train loss:3.665815 +step:4203 train loss:3.672448 +step:4204 train loss:3.666779 +step:4205 train loss:3.680073 +step:4206 train loss:3.700357 +step:4207 train loss:3.698826 +step:4208 train loss:3.663885 +step:4209 train loss:3.727926 +step:4210 train loss:3.757386 +step:4211 train loss:3.639144 +step:4212 train loss:3.680174 +step:4213 train loss:3.632905 +step:4214 train loss:3.639077 +step:4215 train loss:3.653295 +step:4216 train loss:3.628890 +step:4217 train loss:3.651278 +step:4218 train loss:3.692974 +step:4219 train loss:3.691472 +step:4220 train loss:3.766819 +step:4221 train loss:3.654880 +step:4222 train loss:3.716251 +step:4223 train loss:3.636141 +step:4224 train loss:3.711633 +step:4225 train loss:3.637895 +step:4226 train loss:3.695353 +step:4227 train loss:3.671664 +step:4228 train loss:3.644087 +step:4229 train loss:3.655798 +step:4230 train loss:3.638354 +step:4231 train loss:3.626447 +step:4232 train loss:3.675434 +step:4233 train loss:3.583913 +step:4234 train loss:3.666561 +step:4235 train loss:3.744305 +step:4236 train loss:3.711614 +step:4237 train loss:3.694746 +step:4238 train loss:3.705066 +step:4239 train loss:3.755791 +step:4240 train loss:3.662553 +step:4241 train loss:3.592569 +step:4242 train loss:3.709041 +step:4243 train loss:3.709274 +step:4244 train loss:3.722484 +step:4245 train loss:3.778483 +step:4246 train loss:3.652141 +step:4247 train loss:3.710022 +step:4248 train loss:3.661433 +step:4249 train loss:3.668279 +step:4250 validation loss:3.595405 +step:4250 train loss:3.648800 +step:4251 train loss:3.744673 +step:4252 train loss:3.654387 +step:4253 train loss:3.644893 +step:4254 train loss:3.656317 +step:4255 train loss:3.637609 +step:4256 train loss:3.654402 +step:4257 train loss:3.711298 +step:4258 train loss:3.571401 +step:4259 train loss:3.636549 +step:4260 train loss:3.702911 +step:4261 train loss:3.686452 +step:4262 train loss:3.829936 +step:4263 train loss:3.756955 +step:4264 train loss:3.698208 +step:4265 train loss:3.690192 +step:4266 train loss:3.686965 +step:4267 train loss:3.687098 +step:4268 train loss:3.634284 +step:4269 train loss:3.727862 +step:4270 train loss:3.708288 +step:4271 train loss:3.624143 +step:4272 train loss:3.677505 +step:4273 train loss:3.654202 +step:4274 train loss:3.639386 +step:4275 train loss:3.661542 +step:4276 train loss:3.627735 +step:4277 train loss:3.762481 +step:4278 train loss:3.611402 +step:4279 train loss:3.641484 +step:4280 train loss:3.724185 +step:4281 train loss:3.707091 +step:4282 train loss:3.772703 +step:4283 train loss:3.626295 +step:4284 train loss:3.654523 +step:4285 train loss:3.658431 +step:4286 train loss:3.723954 +step:4287 train loss:3.722820 +step:4288 train loss:3.701114 +step:4289 train loss:3.655128 +step:4290 train loss:3.665136 +step:4291 train loss:3.623488 +step:4292 train loss:3.667920 +step:4293 train loss:3.677954 +step:4294 train loss:3.666728 +step:4295 train loss:3.601012 +step:4296 train loss:3.673007 +step:4297 train loss:3.654298 +step:4298 train loss:3.665731 +step:4299 train loss:3.662894 +step:4300 train loss:3.778626 +step:4301 train loss:3.596496 +step:4302 train loss:3.735233 +step:4303 train loss:3.614382 +step:4304 train loss:3.619738 +step:4305 train loss:3.639565 +step:4306 train loss:3.716260 +step:4307 train loss:3.629732 +step:4308 train loss:3.627309 +step:4309 train loss:3.699658 +step:4310 train loss:3.634067 +step:4311 train loss:3.692347 +step:4312 train loss:3.684531 +step:4313 train loss:3.677854 +step:4314 train loss:3.624566 +step:4315 train loss:3.658353 +step:4316 train loss:3.605518 +step:4317 train loss:3.661160 +step:4318 train loss:3.701234 +step:4319 train loss:3.651762 +step:4320 train loss:3.711744 +step:4321 train loss:3.693826 +step:4322 train loss:3.649044 +step:4323 train loss:3.586162 +step:4324 train loss:3.679563 +step:4325 train loss:3.654843 +step:4326 train loss:3.647048 +step:4327 train loss:3.753653 +step:4328 train loss:3.662502 +step:4329 train loss:3.618654 +step:4330 train loss:3.663478 +step:4331 train loss:3.679658 +step:4332 train loss:3.706607 +step:4333 train loss:3.668817 +step:4334 train loss:3.683116 +step:4335 train loss:3.680448 +step:4336 train loss:3.693690 +step:4337 train loss:3.657419 +step:4338 train loss:3.782298 +step:4339 train loss:3.681803 +step:4340 train loss:3.687968 +step:4341 train loss:3.655547 +step:4342 train loss:3.670534 +step:4343 train loss:3.790033 +step:4344 train loss:3.679972 +step:4345 train loss:3.697186 +step:4346 train loss:3.711385 +step:4347 train loss:3.720611 +step:4348 train loss:3.631057 +step:4349 train loss:3.718628 +step:4350 train loss:3.654389 +step:4351 train loss:3.609811 +step:4352 train loss:3.684092 +step:4353 train loss:3.631817 +step:4354 train loss:3.684653 +step:4355 train loss:3.649936 +step:4356 train loss:3.673429 +step:4357 train loss:3.653939 +step:4358 train loss:3.750122 +step:4359 train loss:3.700804 +step:4360 train loss:3.615092 +step:4361 train loss:3.663136 +step:4362 train loss:3.682481 +step:4363 train loss:3.700024 +step:4364 train loss:3.667028 +step:4365 train loss:3.649213 +step:4366 train loss:3.694828 +step:4367 train loss:3.709705 +step:4368 train loss:3.683453 +step:4369 train loss:3.553509 +step:4370 train loss:3.682218 +step:4371 train loss:3.594932 +step:4372 train loss:3.744024 +step:4373 train loss:3.681227 +step:4374 train loss:3.648410 +step:4375 train loss:3.695279 +step:4376 train loss:3.704654 +step:4377 train loss:3.639920 +step:4378 train loss:3.649925 +step:4379 train loss:3.731750 +step:4380 train loss:3.714182 +step:4381 train loss:3.614565 +step:4382 train loss:3.661537 +step:4383 train loss:3.689580 +step:4384 train loss:3.687850 +step:4385 train loss:3.612290 +step:4386 train loss:3.670376 +step:4387 train loss:3.639420 +step:4388 train loss:3.657388 +step:4389 train loss:3.688529 +step:4390 train loss:3.726941 +step:4391 train loss:3.653489 +step:4392 train loss:3.727044 +step:4393 train loss:3.687227 +step:4394 train loss:3.622115 +step:4395 train loss:3.681731 +step:4396 train loss:3.654549 +step:4397 train loss:3.697856 +step:4398 train loss:3.646043 +step:4399 train loss:3.639085 +step:4400 train loss:3.643855 +step:4401 train loss:3.701689 +step:4402 train loss:3.700021 +step:4403 train loss:3.651380 +step:4404 train loss:3.683463 +step:4405 train loss:3.604394 +step:4406 train loss:3.682332 +step:4407 train loss:3.618111 +step:4408 train loss:3.713687 +step:4409 train loss:3.669942 +step:4410 train loss:3.676732 +step:4411 train loss:3.636490 +step:4412 train loss:3.749522 +step:4413 train loss:3.647316 +step:4414 train loss:3.654823 +step:4415 train loss:3.640165 +step:4416 train loss:3.634636 +step:4417 train loss:3.625654 +step:4418 train loss:3.699662 +step:4419 train loss:3.668945 +step:4420 train loss:3.676705 +step:4421 train loss:3.703603 +step:4422 train loss:3.718518 +step:4423 train loss:3.677598 +step:4424 train loss:3.663613 +step:4425 train loss:3.625797 +step:4426 train loss:3.699123 +step:4427 train loss:3.661769 +step:4428 train loss:3.598025 +step:4429 train loss:3.662126 +step:4430 train loss:3.699280 +step:4431 train loss:3.692878 +step:4432 train loss:3.599249 +step:4433 train loss:3.652580 +step:4434 train loss:3.652638 +step:4435 train loss:3.681075 +step:4436 train loss:3.617698 +step:4437 train loss:3.694811 +step:4438 train loss:3.663631 +step:4439 train loss:3.668710 +step:4440 train loss:3.667762 +step:4441 train loss:3.669466 +step:4442 train loss:3.721220 +step:4443 train loss:3.651827 +step:4444 train loss:3.736338 +step:4445 train loss:3.700663 +step:4446 train loss:3.632231 +step:4447 train loss:3.680686 +step:4448 train loss:3.699685 +step:4449 train loss:3.635420 +step:4450 train loss:3.655071 +step:4451 train loss:3.708119 +step:4452 train loss:3.761814 +step:4453 train loss:3.694613 +step:4454 train loss:3.664358 +step:4455 train loss:3.711688 +step:4456 train loss:3.655378 +step:4457 train loss:3.656682 +step:4458 train loss:3.667605 +step:4459 train loss:3.701237 +step:4460 train loss:3.611993 +step:4461 train loss:3.586925 +step:4462 train loss:3.641829 +step:4463 train loss:3.661836 +step:4464 train loss:3.633544 +step:4465 train loss:3.665732 +step:4466 train loss:3.764385 +step:4467 train loss:3.643573 +step:4468 train loss:3.636829 +step:4469 train loss:3.629550 +step:4470 train loss:3.604746 +step:4471 train loss:3.668065 +step:4472 train loss:3.588992 +step:4473 train loss:3.676506 +step:4474 train loss:3.702721 +step:4475 train loss:3.666317 +step:4476 train loss:3.625865 +step:4477 train loss:3.609724 +step:4478 train loss:3.671330 +step:4479 train loss:3.772846 +step:4480 train loss:3.607716 +step:4481 train loss:3.678414 +step:4482 train loss:3.638847 +step:4483 train loss:3.635085 +step:4484 train loss:3.680285 +step:4485 train loss:3.643076 +step:4486 train loss:3.741602 +step:4487 train loss:3.640066 +step:4488 train loss:3.636397 +step:4489 train loss:3.593740 +step:4490 train loss:3.676962 +step:4491 train loss:3.626598 +step:4492 train loss:3.659777 +step:4493 train loss:3.645640 +step:4494 train loss:3.640348 +step:4495 train loss:3.706544 +step:4496 train loss:3.648169 +step:4497 train loss:3.730139 +step:4498 train loss:3.624203 +step:4499 train loss:3.673280 +step:4500 validation loss:3.579478 total_sharp:6.7510e-03 L1_sharp:4.9339e-02 L2_sharp:2.9570e-02 L3_sharp:5.0581e-02 L4_sharp:2.5584e-02 L5_sharp:3.1501e-02 L6_sharp:4.2438e-02 L7_sharp:4.6036e-02 L8_sharp:4.2112e-02 L9_sharp:3.2422e-02 L10_sharp:2.5524e-02 L11_sharp:2.4090e-02 L12_sharp:3.4246e-02 total_fnorm:1.3565e+00 total_l1_linf:8.1306e+03 total_spectral:1.3565e+00 L1_fnorm:6.1582e-02 L2_fnorm:5.8938e-02 L3_fnorm:5.8098e-02 L4_fnorm:5.9830e-02 L5_fnorm:6.0836e-02 L6_fnorm:6.1510e-02 L7_fnorm:6.1429e-02 L8_fnorm:6.1356e-02 L9_fnorm:6.1258e-02 L10_fnorm:6.1347e-02 L11_fnorm:6.1306e-02 L12_fnorm:6.1172e-02 L1_l1linf:3.3103e-01 L2_l1linf:3.6800e-01 L3_l1linf:3.7408e-01 L4_l1linf:3.6468e-01 L5_l1linf:3.7503e-01 L6_l1linf:3.6392e-01 L7_l1linf:3.4846e-01 L8_l1linf:3.2671e-01 L9_l1linf:3.5190e-01 L10_l1linf:3.6403e-01 L11_l1linf:3.7358e-01 L12_l1linf:3.3802e-01 L1_spectral:7.4054e-03 L2_spectral:8.3059e-03 L3_spectral:8.3243e-03 L4_spectral:8.1406e-03 L5_spectral:8.3570e-03 L6_spectral:8.0945e-03 L7_spectral:7.7820e-03 L8_spectral:7.3104e-03 L9_spectral:7.8403e-03 L10_spectral:8.1225e-03 L11_spectral:8.4525e-03 L12_spectral:7.7246e-03 ip_v_neg_g:6.2149e-03 cos_v_neg_g:1.2944e-03 v_norm:1.3565e+00 g_norm:3.5395e+00 hv_norm:9.5925e-01 cos_v_hv:9.5469e-03 hg_norm:3.6856e+02 cos_g_hg:4.3025e-01 v_par:4.3312e-05 v_perp:1.3565e+00 L1_cos_v_neg_g:8.4700e-03 L1_v_norm:6.1582e-02 L2_cos_v_neg_g:6.7764e-03 L2_v_norm:5.8938e-02 L3_cos_v_neg_g:6.2397e-03 L3_v_norm:5.8098e-02 L4_cos_v_neg_g:6.2020e-03 L4_v_norm:5.9830e-02 L5_cos_v_neg_g:8.2307e-03 L5_v_norm:6.0836e-02 L6_cos_v_neg_g:9.0322e-03 L6_v_norm:6.1510e-02 L7_cos_v_neg_g:8.2585e-03 L7_v_norm:6.1429e-02 L8_cos_v_neg_g:8.3761e-03 L8_v_norm:6.1356e-02 L9_cos_v_neg_g:8.0853e-03 L9_v_norm:6.1258e-02 L10_cos_v_neg_g:7.2466e-03 L10_v_norm:6.1347e-02 L11_cos_v_neg_g:7.3106e-03 L11_v_norm:6.1306e-02 L12_cos_v_neg_g:6.9973e-03 L12_v_norm:6.1172e-02 +step:4500 train loss:3.581856 +step:4501 train loss:3.643060 +step:4502 train loss:3.767788 +step:4503 train loss:3.667801 +step:4504 train loss:3.681738 +step:4505 train loss:3.663623 +step:4506 train loss:3.634799 +step:4507 train loss:3.710137 +step:4508 train loss:3.645822 +step:4509 train loss:3.643177 +step:4510 train loss:3.677002 +step:4511 train loss:3.632709 +step:4512 train loss:3.652954 +step:4513 train loss:3.713420 +step:4514 train loss:3.619372 +step:4515 train loss:3.734418 +step:4516 train loss:3.706692 +step:4517 train loss:3.662298 +step:4518 train loss:3.602948 +step:4519 train loss:3.638282 +step:4520 train loss:3.650962 +step:4521 train loss:3.590570 +step:4522 train loss:3.647390 +step:4523 train loss:3.692122 +step:4524 train loss:3.675892 +step:4525 train loss:3.598751 +step:4526 train loss:3.639362 +step:4527 train loss:3.626434 +step:4528 train loss:3.656166 +step:4529 train loss:3.655222 +step:4530 train loss:3.748449 +step:4531 train loss:3.639059 +step:4532 train loss:3.661244 +step:4533 train loss:3.635557 +step:4534 train loss:3.727375 +step:4535 train loss:3.625789 +step:4536 train loss:3.697176 +step:4537 train loss:3.680333 +step:4538 train loss:3.658214 +step:4539 train loss:3.679798 +step:4540 train loss:3.656314 +step:4541 train loss:3.623516 +step:4542 train loss:3.674053 +step:4543 train loss:3.758709 +step:4544 train loss:3.701077 +step:4545 train loss:3.643413 +step:4546 train loss:3.735726 +step:4547 train loss:3.693403 +step:4548 train loss:3.698906 +step:4549 train loss:3.653229 +step:4550 train loss:3.621081 +step:4551 train loss:3.639068 +step:4552 train loss:3.640826 +step:4553 train loss:3.724087 +step:4554 train loss:3.617217 +step:4555 train loss:3.729136 +step:4556 train loss:3.664218 +step:4557 train loss:3.595426 +step:4558 train loss:3.680533 +step:4559 train loss:3.691934 +step:4560 train loss:3.628027 +step:4561 train loss:3.615306 +step:4562 train loss:3.658119 +step:4563 train loss:3.609732 +step:4564 train loss:3.635941 +step:4565 train loss:3.635501 +step:4566 train loss:3.609841 +step:4567 train loss:3.635267 +step:4568 train loss:3.635031 +step:4569 train loss:3.620429 +step:4570 train loss:3.670079 +step:4571 train loss:3.647718 +step:4572 train loss:3.643028 +step:4573 train loss:3.650902 +step:4574 train loss:3.796332 +step:4575 train loss:3.626362 +step:4576 train loss:3.616803 +step:4577 train loss:3.657672 +step:4578 train loss:3.696670 +step:4579 train loss:3.648139 +step:4580 train loss:3.708995 +step:4581 train loss:3.646073 +step:4582 train loss:3.642044 +step:4583 train loss:3.648233 +step:4584 train loss:3.619936 +step:4585 train loss:3.697909 +step:4586 train loss:3.687096 +step:4587 train loss:3.586933 +step:4588 train loss:3.629696 +step:4589 train loss:3.706380 +step:4590 train loss:3.675485 +step:4591 train loss:3.613198 +step:4592 train loss:3.698992 +step:4593 train loss:3.617880 +step:4594 train loss:3.648876 +step:4595 train loss:3.672594 +step:4596 train loss:3.613211 +step:4597 train loss:3.747551 +step:4598 train loss:3.668036 +step:4599 train loss:3.619115 +step:4600 train loss:3.626626 +step:4601 train loss:3.650020 +step:4602 train loss:3.600951 +step:4603 train loss:3.615819 +step:4604 train loss:3.721750 +step:4605 train loss:3.640545 +step:4606 train loss:3.667584 +step:4607 train loss:3.648400 +step:4608 train loss:3.683270 +step:4609 train loss:3.641933 +step:4610 train loss:3.684097 +step:4611 train loss:3.712016 +step:4612 train loss:3.708242 +step:4613 train loss:3.688588 +step:4614 train loss:3.682209 +step:4615 train loss:3.622987 +step:4616 train loss:3.606939 +step:4617 train loss:3.650184 +step:4618 train loss:3.667629 +step:4619 train loss:3.627428 +step:4620 train loss:3.642023 +step:4621 train loss:3.644842 +step:4622 train loss:3.582864 +step:4623 train loss:3.690338 +step:4624 train loss:3.675667 +step:4625 train loss:3.632903 +step:4626 train loss:3.675023 +step:4627 train loss:3.644740 +step:4628 train loss:3.632712 +step:4629 train loss:3.669051 +step:4630 train loss:3.726425 +step:4631 train loss:3.730069 +step:4632 train loss:3.622998 +step:4633 train loss:3.636635 +step:4634 train loss:3.709909 +step:4635 train loss:3.675343 +step:4636 train loss:3.689396 +step:4637 train loss:3.624917 +step:4638 train loss:3.634436 +step:4639 train loss:3.630266 +step:4640 train loss:3.638373 +step:4641 train loss:3.644574 +step:4642 train loss:3.677345 +step:4643 train loss:3.639360 +step:4644 train loss:3.662187 +step:4645 train loss:3.676997 +step:4646 train loss:3.633020 +step:4647 train loss:3.590539 +step:4648 train loss:3.697784 +step:4649 train loss:3.711019 +step:4650 train loss:3.656111 +step:4651 train loss:3.658301 +step:4652 train loss:3.648260 +step:4653 train loss:3.705547 +step:4654 train loss:3.700169 +step:4655 train loss:3.604022 +step:4656 train loss:3.636303 +step:4657 train loss:3.691073 +step:4658 train loss:3.647506 +step:4659 train loss:3.659054 +step:4660 train loss:3.703414 +step:4661 train loss:3.621096 +step:4662 train loss:3.635483 +step:4663 train loss:3.649368 +step:4664 train loss:3.699495 +step:4665 train loss:3.694811 +step:4666 train loss:3.690531 +step:4667 train loss:3.684274 +step:4668 train loss:3.645988 +step:4669 train loss:3.657758 +step:4670 train loss:3.688576 +step:4671 train loss:3.701064 +step:4672 train loss:3.562624 +step:4673 train loss:3.597677 +step:4674 train loss:3.725020 +step:4675 train loss:3.631169 +step:4676 train loss:3.592443 +step:4677 train loss:3.595621 +step:4678 train loss:3.568287 +step:4679 train loss:3.667840 +step:4680 train loss:3.607449 +step:4681 train loss:3.657882 +step:4682 train loss:3.607318 +step:4683 train loss:3.576236 +step:4684 train loss:3.693864 +step:4685 train loss:3.630386 +step:4686 train loss:3.640831 +step:4687 train loss:3.677155 +step:4688 train loss:3.608242 +step:4689 train loss:3.682903 +step:4690 train loss:3.624780 +step:4691 train loss:3.660684 +step:4692 train loss:3.588001 +step:4693 train loss:3.627290 +step:4694 train loss:3.668675 +step:4695 train loss:3.688235 +step:4696 train loss:3.675283 +step:4697 train loss:3.587610 +step:4698 train loss:3.606072 +step:4699 train loss:3.656538 +step:4700 train loss:3.626035 +step:4701 train loss:3.634252 +step:4702 train loss:3.587896 +step:4703 train loss:3.669102 +step:4704 train loss:3.658827 +step:4705 train loss:3.601409 +step:4706 train loss:3.608977 +step:4707 train loss:3.597101 +step:4708 train loss:3.664095 +step:4709 train loss:3.608805 +step:4710 train loss:3.624883 +step:4711 train loss:3.685667 +step:4712 train loss:3.584935 +step:4713 train loss:3.687890 +step:4714 train loss:3.586610 +step:4715 train loss:3.678109 +step:4716 train loss:3.646080 +step:4717 train loss:3.577695 +step:4718 train loss:3.667655 +step:4719 train loss:3.593134 +step:4720 train loss:3.691787 +step:4721 train loss:3.647084 +step:4722 train loss:3.702518 +step:4723 train loss:3.597687 +step:4724 train loss:3.646807 +step:4725 train loss:3.582643 +step:4726 train loss:3.630116 +step:4727 train loss:3.635439 +step:4728 train loss:3.642902 +step:4729 train loss:3.672537 +step:4730 train loss:3.570627 +step:4731 train loss:3.633355 +step:4732 train loss:3.585853 +step:4733 train loss:3.520988 +step:4734 train loss:3.658327 +step:4735 train loss:3.610587 +step:4736 train loss:3.651873 +step:4737 train loss:3.534279 +step:4738 train loss:3.679129 +step:4739 train loss:3.558560 +step:4740 train loss:3.667438 +step:4741 train loss:3.636206 +step:4742 train loss:3.600462 +step:4743 train loss:3.592893 +step:4744 train loss:3.639853 +step:4745 train loss:3.658650 +step:4746 train loss:3.698363 +step:4747 train loss:3.659914 +step:4748 train loss:3.560256 +step:4749 train loss:3.626537 +step:4750 validation loss:3.568348 +step:4750 train loss:3.572910 +step:4751 train loss:3.668089 +step:4752 train loss:3.599655 +step:4753 train loss:3.707442 +step:4754 train loss:3.574790 +step:4755 train loss:3.619893 +step:4756 train loss:3.691344 +step:4757 train loss:3.617067 +step:4758 train loss:3.632371 +step:4759 train loss:3.633739 +step:4760 train loss:3.661998 +step:4761 train loss:3.581778 +step:4762 train loss:3.613588 +step:4763 train loss:3.635835 +step:4764 train loss:3.697443 +step:4765 train loss:3.590208 +step:4766 train loss:3.611559 +step:4767 train loss:3.564098 +step:4768 train loss:3.622433 +step:4769 train loss:3.646987 +step:4770 train loss:3.607228 +step:4771 train loss:3.618615 +step:4772 train loss:3.592855 +step:4773 train loss:3.626835 +step:4774 train loss:3.569615 +step:4775 train loss:3.703001 +step:4776 train loss:3.566547 +step:4777 train loss:3.642150 +step:4778 train loss:3.580062 +step:4779 train loss:3.629642 +step:4780 train loss:3.566045 +step:4781 train loss:3.575202 +step:4782 train loss:3.679346 +step:4783 train loss:3.669398 +step:4784 train loss:3.629583 +step:4785 train loss:3.626512 +step:4786 train loss:3.737806 +step:4787 train loss:3.571842 +step:4788 train loss:3.593871 +step:4789 train loss:3.620004 +step:4790 train loss:3.670469 +step:4791 train loss:3.637083 +step:4792 train loss:3.678648 +step:4793 train loss:3.594803 +step:4794 train loss:3.670739 +step:4795 train loss:3.616969 +step:4796 train loss:3.608957 +step:4797 train loss:3.614598 +step:4798 train loss:3.622890 +step:4799 train loss:3.619756 +step:4800 train loss:3.651315 +step:4801 train loss:3.641617 +step:4802 train loss:3.680718 +step:4803 train loss:3.662863 +step:4804 train loss:3.621987 +step:4805 train loss:3.615544 +step:4806 train loss:3.593312 +step:4807 train loss:3.702696 +step:4808 train loss:3.572945 +step:4809 train loss:3.677416 +step:4810 train loss:3.617820 +step:4811 train loss:3.636178 +step:4812 train loss:3.611563 +step:4813 train loss:3.569201 +step:4814 train loss:3.561588 +step:4815 train loss:3.555094 +step:4816 train loss:3.620834 +step:4817 train loss:3.558642 +step:4818 train loss:3.626657 +step:4819 train loss:3.619880 +step:4820 train loss:3.873395 +step:4821 train loss:3.645750 +step:4822 train loss:3.655045 +step:4823 train loss:3.585828 +step:4824 train loss:3.595815 +step:4825 train loss:3.574521 +step:4826 train loss:3.662588 +step:4827 train loss:3.610500 +step:4828 train loss:3.550934 +step:4829 train loss:3.655014 +step:4830 train loss:3.597032 +step:4831 train loss:3.744970 +step:4832 train loss:3.615131 +step:4833 train loss:3.650953 +step:4834 train loss:3.554187 +step:4835 train loss:3.643677 +step:4836 train loss:3.623631 +step:4837 train loss:3.653372 +step:4838 train loss:3.591294 +step:4839 train loss:3.659692 +step:4840 train loss:3.564431 +step:4841 train loss:3.662125 +step:4842 train loss:3.574920 +step:4843 train loss:3.655443 +step:4844 train loss:3.655021 +step:4845 train loss:3.593436 +step:4846 train loss:3.607182 +step:4847 train loss:3.593727 +step:4848 train loss:3.616485 +step:4849 train loss:3.571616 +step:4850 train loss:3.578526 +step:4851 train loss:3.575232 +step:4852 train loss:3.653641 +step:4853 train loss:3.629297 +step:4854 train loss:3.606747 +step:4855 train loss:3.670533 +step:4856 train loss:3.640900 +step:4857 train loss:3.648098 +step:4858 train loss:3.731645 +step:4859 train loss:3.577524 +step:4860 train loss:3.652250 +step:4861 train loss:3.622740 +step:4862 train loss:3.658216 +step:4863 train loss:3.593382 +step:4864 train loss:3.604488 +step:4865 train loss:3.597720 +step:4866 train loss:3.644292 +step:4867 train loss:3.610660 +step:4868 train loss:3.629050 +step:4869 train loss:3.577562 +step:4870 train loss:3.609576 +step:4871 train loss:3.693360 +step:4872 train loss:3.638066 +step:4873 train loss:3.633629 +step:4874 train loss:3.606790 +step:4875 train loss:3.573104 +step:4876 train loss:3.585891 +step:4877 train loss:3.587070 +step:4878 train loss:3.626893 +step:4879 train loss:3.588899 +step:4880 train loss:3.615983 +step:4881 train loss:3.559375 +step:4882 train loss:3.762987 +step:4883 train loss:3.571170 +step:4884 train loss:3.603390 +step:4885 train loss:3.575407 +step:4886 train loss:3.655890 +step:4887 train loss:3.605960 +step:4888 train loss:3.616863 +step:4889 train loss:3.605119 +step:4890 train loss:3.652743 +step:4891 train loss:3.585715 +step:4892 train loss:3.594744 +step:4893 train loss:3.638024 +step:4894 train loss:3.575006 +step:4895 train loss:3.605042 +step:4896 train loss:3.590343 +step:4897 train loss:3.661800 +step:4898 train loss:3.612570 +step:4899 train loss:3.593516 +step:4900 train loss:3.642333 +step:4901 train loss:3.588023 +step:4902 train loss:3.583648 +step:4903 train loss:3.603678 +step:4904 train loss:3.617197 +step:4905 train loss:3.613322 +step:4906 train loss:3.615289 +step:4907 train loss:3.686261 +step:4908 train loss:3.593071 +step:4909 train loss:3.598625 +step:4910 train loss:3.621325 +step:4911 train loss:3.674057 +step:4912 train loss:3.647751 +step:4913 train loss:3.625386 +step:4914 train loss:3.617736 +step:4915 train loss:3.599626 +step:4916 train loss:3.539020 +step:4917 train loss:3.564482 +step:4918 train loss:3.598902 +step:4919 train loss:3.587313 +step:4920 train loss:3.592025 +step:4921 train loss:3.750958 +step:4922 train loss:3.645907 +step:4923 train loss:3.660385 +step:4924 train loss:3.661122 +step:4925 train loss:3.594829 +step:4926 train loss:3.588793 +step:4927 train loss:3.616855 +step:4928 train loss:3.656040 +step:4929 train loss:3.610414 +step:4930 train loss:3.593293 +step:4931 train loss:3.584826 +step:4932 train loss:3.595790 +step:4933 train loss:3.588773 +step:4934 train loss:3.655887 +step:4935 train loss:3.642423 +step:4936 train loss:3.606291 +step:4937 train loss:3.714895 +step:4938 train loss:3.699540 +step:4939 train loss:3.567091 +step:4940 train loss:3.645349 +step:4941 train loss:3.549079 +step:4942 train loss:3.588900 +step:4943 train loss:3.592130 +step:4944 train loss:3.591749 +step:4945 train loss:3.639540 +step:4946 train loss:3.610637 +step:4947 train loss:3.597117 +step:4948 train loss:3.632247 +step:4949 train loss:3.541144 +step:4950 train loss:3.622848 +step:4951 train loss:3.670609 +step:4952 train loss:3.611474 +step:4953 train loss:3.644583 +step:4954 train loss:3.548795 +step:4955 train loss:3.624249 +step:4956 train loss:3.652856 +step:4957 train loss:3.648921 +step:4958 train loss:3.562326 +step:4959 train loss:3.679535 +step:4960 train loss:3.606884 +step:4961 train loss:3.625044 +step:4962 train loss:3.588885 +step:4963 train loss:3.634083 +step:4964 train loss:3.584386 +step:4965 train loss:3.737775 +step:4966 train loss:3.584625 +step:4967 train loss:3.693762 +step:4968 train loss:3.584077 +step:4969 train loss:3.626832 +step:4970 train loss:3.615773 +step:4971 train loss:3.568060 +step:4972 train loss:3.614465 +step:4973 train loss:3.617959 +step:4974 train loss:3.609604 +step:4975 train loss:3.692120 +step:4976 train loss:3.672061 +step:4977 train loss:3.618114 +step:4978 train loss:3.606767 +step:4979 train loss:3.603030 +step:4980 train loss:3.712738 +step:4981 train loss:3.550333 +step:4982 train loss:3.633821 +step:4983 train loss:3.552404 +step:4984 train loss:3.741224 +step:4985 train loss:3.639291 +step:4986 train loss:3.582476 +step:4987 train loss:3.602482 +step:4988 train loss:3.799591 +step:4989 train loss:3.602654 +step:4990 train loss:3.599455 +step:4991 train loss:3.611455 +step:4992 train loss:3.599376 +step:4993 train loss:3.575071 +step:4994 train loss:3.686860 +step:4995 train loss:3.610224 +step:4996 train loss:3.696645 +step:4997 train loss:3.596751 +step:4998 train loss:3.602119 +step:4999 train loss:3.585290 +step:5000 validation loss:3.553995 total_sharp:4.5990e-03 L1_sharp:6.9188e-02 L2_sharp:2.7052e-02 L3_sharp:3.6962e-02 L4_sharp:1.9756e-02 L5_sharp:2.5134e-02 L6_sharp:2.5805e-02 L7_sharp:2.9246e-02 L8_sharp:2.9872e-02 L9_sharp:2.1690e-02 L10_sharp:1.7171e-02 L11_sharp:1.6135e-02 L12_sharp:2.3607e-02 total_fnorm:1.3466e+00 total_l1_linf:8.0796e+03 total_spectral:1.3466e+00 L1_fnorm:6.1084e-02 L2_fnorm:5.8651e-02 L3_fnorm:5.7834e-02 L4_fnorm:5.9708e-02 L5_fnorm:6.0604e-02 L6_fnorm:6.1192e-02 L7_fnorm:6.1090e-02 L8_fnorm:6.1113e-02 L9_fnorm:6.1125e-02 L10_fnorm:6.1271e-02 L11_fnorm:6.1249e-02 L12_fnorm:6.1194e-02 L1_l1linf:2.7491e-01 L2_l1linf:3.2783e-01 L3_l1linf:3.3144e-01 L4_l1linf:3.3016e-01 L5_l1linf:3.2665e-01 L6_l1linf:3.0840e-01 L7_l1linf:2.8482e-01 L8_l1linf:2.8321e-01 L9_l1linf:3.0130e-01 L10_l1linf:3.4620e-01 L11_l1linf:3.5295e-01 L12_l1linf:3.4335e-01 L1_spectral:6.3830e-03 L2_spectral:7.3956e-03 L3_spectral:7.4744e-03 L4_spectral:7.4886e-03 L5_spectral:7.3190e-03 L6_spectral:6.9610e-03 L7_spectral:6.4805e-03 L8_spectral:6.4569e-03 L9_spectral:6.8154e-03 L10_spectral:7.7713e-03 L11_spectral:7.9543e-03 L12_spectral:7.8481e-03 ip_v_neg_g:5.0622e-03 cos_v_neg_g:1.0821e-03 v_norm:1.3466e+00 g_norm:3.4741e+00 hv_norm:7.6344e-01 cos_v_hv:8.1118e-03 hg_norm:2.8875e+02 cos_g_hg:4.6296e-01 v_par:4.1580e-05 v_perp:1.3466e+00 L1_cos_v_neg_g:8.7483e-03 L1_v_norm:6.1084e-02 L2_cos_v_neg_g:7.9606e-03 L2_v_norm:5.8651e-02 L3_cos_v_neg_g:8.8110e-03 L3_v_norm:5.7834e-02 L4_cos_v_neg_g:5.7642e-03 L4_v_norm:5.9708e-02 L5_cos_v_neg_g:5.2725e-03 L5_v_norm:6.0604e-02 L6_cos_v_neg_g:5.8809e-03 L6_v_norm:6.1192e-02 L7_cos_v_neg_g:5.1978e-03 L7_v_norm:6.1090e-02 L8_cos_v_neg_g:5.7289e-03 L8_v_norm:6.1113e-02 L9_cos_v_neg_g:4.8776e-03 L9_v_norm:6.1125e-02 L10_cos_v_neg_g:5.6377e-03 L10_v_norm:6.1271e-02 L11_cos_v_neg_g:5.4946e-03 L11_v_norm:6.1249e-02 L12_cos_v_neg_g:5.9038e-03 L12_v_norm:6.1194e-02 +step:5000 train loss:3.699951 +step:5001 train loss:3.566523 +step:5002 train loss:3.621953 +step:5003 train loss:3.617904 +step:5004 train loss:3.610293 +step:5005 train loss:3.605635 +step:5006 train loss:3.648237 +step:5007 train loss:3.651593 +step:5008 train loss:3.586427 +step:5009 train loss:3.635452 +step:5010 train loss:3.584127 +step:5011 train loss:3.613852 +step:5012 train loss:3.587486 +step:5013 train loss:3.689498 +step:5014 train loss:3.603943 +step:5015 train loss:3.679215 +step:5016 train loss:3.605981 +step:5017 train loss:3.652381 +step:5018 train loss:3.570652 +step:5019 train loss:3.605433 +step:5020 train loss:3.598083 +step:5021 train loss:3.612574 +step:5022 train loss:3.648535 +step:5023 train loss:3.616988 +step:5024 train loss:3.669505 +step:5025 train loss:3.552712 +step:5026 train loss:3.678658 +step:5027 train loss:3.610198 +step:5028 train loss:3.678900 +step:5029 train loss:3.573620 +step:5030 train loss:3.614244 +step:5031 train loss:3.600277 +step:5032 train loss:3.627553 +step:5033 train loss:3.612956 +step:5034 train loss:3.607164 +step:5035 train loss:3.694650 +step:5036 train loss:3.641849 +step:5037 train loss:3.592478 +step:5038 train loss:3.643966 +step:5039 train loss:3.657150 +step:5040 train loss:3.619856 +step:5041 train loss:3.635448 +step:5042 train loss:3.539901 +step:5043 train loss:3.680976 +step:5044 train loss:3.597604 +step:5045 train loss:3.650871 +step:5046 train loss:3.571774 +step:5047 train loss:3.647193 +step:5048 train loss:3.561736 +step:5049 train loss:3.696002 +step:5050 train loss:3.583924 +step:5051 train loss:3.627888 +step:5052 train loss:3.526021 +step:5053 train loss:3.708292 +step:5054 train loss:3.598330 +step:5055 train loss:3.623843 +step:5056 train loss:3.658464 +step:5057 train loss:3.589346 +step:5058 train loss:3.619877 +step:5059 train loss:3.580312 +step:5060 train loss:3.627148 +step:5061 train loss:3.624125 +step:5062 train loss:3.592251 +step:5063 train loss:3.587980 +step:5064 train loss:3.595522 +step:5065 train loss:3.579937 +step:5066 train loss:3.638681 +step:5067 train loss:3.623337 +step:5068 train loss:3.606892 +step:5069 train loss:3.578835 +step:5070 train loss:3.608466 +step:5071 train loss:3.676649 +step:5072 train loss:3.568383 +step:5073 train loss:3.578049 +step:5074 train loss:3.526166 +step:5075 train loss:3.594206 +step:5076 train loss:3.525269 +step:5077 train loss:3.586312 +step:5078 train loss:3.606953 +step:5079 train loss:3.631033 +step:5080 train loss:3.607094 +step:5081 train loss:3.617212 +step:5082 train loss:3.607739 +step:5083 train loss:3.660095 +step:5084 train loss:3.641495 +step:5085 train loss:3.603065 +step:5086 train loss:3.680084 +step:5087 train loss:3.663346 +step:5088 train loss:3.583878 +step:5089 train loss:3.650696 +step:5090 train loss:3.596030 +step:5091 train loss:3.597593 +step:5092 train loss:3.697066 +step:5093 train loss:3.578005 +step:5094 train loss:3.574965 +step:5095 train loss:3.625569 +step:5096 train loss:3.594442 +step:5097 train loss:3.603331 +step:5098 train loss:3.607186 +step:5099 train loss:3.568434 +step:5100 train loss:3.581600 +step:5101 train loss:3.774453 +step:5102 train loss:3.620195 +step:5103 train loss:3.628312 +step:5104 train loss:3.677908 +step:5105 train loss:3.609536 +step:5106 train loss:3.571336 +step:5107 train loss:3.591581 +step:5108 train loss:3.583714 +step:5109 train loss:3.662680 +step:5110 train loss:3.575946 +step:5111 train loss:3.667817 +step:5112 train loss:3.578599 +step:5113 train loss:3.559762 +step:5114 train loss:3.606253 +step:5115 train loss:3.567711 +step:5116 train loss:3.623996 +step:5117 train loss:3.568028 +step:5118 train loss:3.596976 +step:5119 train loss:3.579466 +step:5120 train loss:3.621101 +step:5121 train loss:3.571098 +step:5122 train loss:3.582018 +step:5123 train loss:3.566501 +step:5124 train loss:3.528134 +step:5125 train loss:3.638213 +step:5126 train loss:3.624723 +step:5127 train loss:3.626673 +step:5128 train loss:3.640950 +step:5129 train loss:3.567203 +step:5130 train loss:3.580601 +step:5131 train loss:3.519261 +step:5132 train loss:3.639813 +step:5133 train loss:3.608115 +step:5134 train loss:3.609959 +step:5135 train loss:3.562394 +step:5136 train loss:3.630029 +step:5137 train loss:3.628190 +step:5138 train loss:3.605688 +step:5139 train loss:3.643326 +step:5140 train loss:3.616595 +step:5141 train loss:3.645924 +step:5142 train loss:3.594321 +step:5143 train loss:3.622519 +step:5144 train loss:3.618197 +step:5145 train loss:3.562525 +step:5146 train loss:3.556144 +step:5147 train loss:3.633876 +step:5148 train loss:3.560006 +step:5149 train loss:3.633642 +step:5150 train loss:3.608691 +step:5151 train loss:3.575117 +step:5152 train loss:3.621765 +step:5153 train loss:3.592366 +step:5154 train loss:3.606074 +step:5155 train loss:3.609373 +step:5156 train loss:3.589479 +step:5157 train loss:3.592531 +step:5158 train loss:3.615328 +step:5159 train loss:3.648735 +step:5160 train loss:3.720720 +step:5161 train loss:3.647495 +step:5162 train loss:3.664453 +step:5163 train loss:3.578841 +step:5164 train loss:3.645117 +step:5165 train loss:3.657847 +step:5166 train loss:3.598372 +step:5167 train loss:3.693605 +step:5168 train loss:3.608932 +step:5169 train loss:3.641916 +step:5170 train loss:3.619119 +step:5171 train loss:3.662370 +step:5172 train loss:3.582182 +step:5173 train loss:3.648347 +step:5174 train loss:3.582202 +step:5175 train loss:3.614614 +step:5176 train loss:3.602643 +step:5177 train loss:3.600440 +step:5178 train loss:3.666190 +step:5179 train loss:3.577813 +step:5180 train loss:3.658830 +step:5181 train loss:3.602698 +step:5182 train loss:3.661691 +step:5183 train loss:3.590742 +step:5184 train loss:3.572267 +step:5185 train loss:3.595541 +step:5186 train loss:3.653126 +step:5187 train loss:3.646944 +step:5188 train loss:3.580972 +step:5189 train loss:3.624475 +step:5190 train loss:3.607578 +step:5191 train loss:3.587028 +step:5192 train loss:3.570806 +step:5193 train loss:3.657074 +step:5194 train loss:3.608305 +step:5195 train loss:3.580459 +step:5196 train loss:3.650004 +step:5197 train loss:3.698207 +step:5198 train loss:3.608502 +step:5199 train loss:3.594079 +step:5200 train loss:3.618444 +step:5201 train loss:3.608447 +step:5202 train loss:3.611898 +step:5203 train loss:3.616964 +step:5204 train loss:3.587282 +step:5205 train loss:3.630030 +step:5206 train loss:3.567825 +step:5207 train loss:3.570120 +step:5208 train loss:3.633839 +step:5209 train loss:3.652179 +step:5210 train loss:3.555988 +step:5211 train loss:3.600148 +step:5212 train loss:3.618213 +step:5213 train loss:3.592721 +step:5214 train loss:3.640313 +step:5215 train loss:3.751093 +step:5216 train loss:3.603730 +step:5217 train loss:3.581460 +step:5218 train loss:3.587111 +step:5219 train loss:3.649176 +step:5220 train loss:3.568703 +step:5221 train loss:3.569269 +step:5222 train loss:3.651279 +step:5223 train loss:3.643720 +step:5224 train loss:3.542650 +step:5225 train loss:3.690357 +step:5226 train loss:3.604064 +step:5227 train loss:3.676167 +step:5228 train loss:3.649674 +step:5229 train loss:3.586878 +step:5230 train loss:3.603065 +step:5231 train loss:3.548021 +step:5232 train loss:3.670722 +step:5233 train loss:3.632728 +step:5234 train loss:3.635550 +step:5235 train loss:3.585405 +step:5236 train loss:3.660802 +step:5237 train loss:3.713074 +step:5238 train loss:3.615807 +step:5239 train loss:3.677001 +step:5240 train loss:3.560946 +step:5241 train loss:3.619921 +step:5242 train loss:3.590071 +step:5243 train loss:3.593794 +step:5244 train loss:3.594384 +step:5245 train loss:3.637010 +step:5246 train loss:3.679864 +step:5247 train loss:3.608895 +step:5248 train loss:3.579112 +step:5249 train loss:3.636473 +step:5250 validation loss:3.539094 +step:5250 train loss:3.606648 +step:5251 train loss:3.670543 +step:5252 train loss:3.560458 +step:5253 train loss:3.711441 +step:5254 train loss:3.585912 +step:5255 train loss:3.658966 +step:5256 train loss:3.572325 +step:5257 train loss:3.626677 +step:5258 train loss:3.624470 +step:5259 train loss:3.611187 +step:5260 train loss:3.604921 +step:5261 train loss:3.595432 +step:5262 train loss:3.635157 +step:5263 train loss:3.620821 +step:5264 train loss:3.574344 +step:5265 train loss:3.654106 +step:5266 train loss:3.571634 +step:5267 train loss:3.582607 +step:5268 train loss:3.564409 +step:5269 train loss:3.568301 +step:5270 train loss:3.616403 +step:5271 train loss:3.542557 +step:5272 train loss:3.634054 +step:5273 train loss:3.544725 +step:5274 train loss:3.593025 +step:5275 train loss:3.606809 +step:5276 train loss:3.732197 +step:5277 train loss:3.631115 +step:5278 train loss:3.578868 +step:5279 train loss:3.627173 +step:5280 train loss:3.604491 +step:5281 train loss:3.597189 +step:5282 train loss:3.570086 +step:5283 train loss:3.569537 +step:5284 train loss:3.577325 +step:5285 train loss:3.646190 +step:5286 train loss:3.553935 +step:5287 train loss:3.655021 +step:5288 train loss:3.630442 +step:5289 train loss:3.600523 +step:5290 train loss:3.652094 +step:5291 train loss:3.606650 +step:5292 train loss:3.623861 +step:5293 train loss:3.592994 +step:5294 train loss:3.582326 +step:5295 train loss:3.588496 +step:5296 train loss:3.578588 +step:5297 train loss:3.602283 +step:5298 train loss:3.546957 +step:5299 train loss:3.638802 +step:5300 train loss:3.588503 +step:5301 train loss:3.658349 +step:5302 train loss:3.664840 +step:5303 train loss:3.524083 +step:5304 train loss:3.557990 +step:5305 train loss:3.537419 +step:5306 train loss:3.568813 +step:5307 train loss:3.572659 +step:5308 train loss:3.664647 +step:5309 train loss:3.616596 +step:5310 train loss:3.600781 +step:5311 train loss:3.671422 +step:5312 train loss:3.549244 +step:5313 train loss:3.640432 +step:5314 train loss:3.634929 +step:5315 train loss:3.595554 +step:5316 train loss:3.626480 +step:5317 train loss:3.644585 +step:5318 train loss:3.600420 +step:5319 train loss:3.625745 +step:5320 train loss:3.577001 +step:5321 train loss:3.700271 +step:5322 train loss:3.608967 +step:5323 train loss:3.610072 +step:5324 train loss:3.554275 +step:5325 train loss:3.637136 +step:5326 train loss:3.627613 +step:5327 train loss:3.519690 +step:5328 train loss:3.655135 +step:5329 train loss:3.620256 +step:5330 train loss:3.616236 +step:5331 train loss:3.668375 +step:5332 train loss:3.591979 +step:5333 train loss:3.653455 +step:5334 train loss:3.629400 +step:5335 train loss:3.686853 +step:5336 train loss:3.724658 +step:5337 train loss:3.558304 +step:5338 train loss:3.565541 +step:5339 train loss:3.589762 +step:5340 train loss:3.614047 +step:5341 train loss:3.628396 +step:5342 train loss:3.527588 +step:5343 train loss:3.687158 +step:5344 train loss:3.570935 +step:5345 train loss:3.570741 +step:5346 train loss:3.572021 +step:5347 train loss:3.594916 +step:5348 train loss:3.639562 +step:5349 train loss:3.576233 +step:5350 train loss:3.618712 +step:5351 train loss:3.692858 +step:5352 train loss:3.731287 +step:5353 train loss:3.641678 +step:5354 train loss:3.611386 +step:5355 train loss:3.576869 +step:5356 train loss:3.602455 +step:5357 train loss:3.579106 +step:5358 train loss:3.602026 +step:5359 train loss:3.615449 +step:5360 train loss:3.585571 +step:5361 train loss:3.588669 +step:5362 train loss:3.572277 +step:5363 train loss:3.569315 +step:5364 train loss:3.570092 +step:5365 train loss:3.603856 +step:5366 train loss:3.633497 +step:5367 train loss:3.564702 +step:5368 train loss:3.628247 +step:5369 train loss:3.646106 +step:5370 train loss:3.544575 +step:5371 train loss:3.600756 +step:5372 train loss:3.618361 +step:5373 train loss:3.661232 +step:5374 train loss:3.542262 +step:5375 train loss:3.588495 +step:5376 train loss:3.655805 +step:5377 train loss:3.591681 +step:5378 train loss:3.566997 +step:5379 train loss:3.570546 +step:5380 train loss:3.604499 +step:5381 train loss:3.646268 +step:5382 train loss:3.545444 +step:5383 train loss:3.619412 +step:5384 train loss:3.628906 +step:5385 train loss:3.623817 +step:5386 train loss:3.608444 +step:5387 train loss:3.613839 +step:5388 train loss:3.626192 +step:5389 train loss:3.555533 +step:5390 train loss:3.585511 +step:5391 train loss:3.524980 +step:5392 train loss:3.589493 +step:5393 train loss:3.584126 +step:5394 train loss:3.574218 +step:5395 train loss:3.647993 +step:5396 train loss:3.612271 +step:5397 train loss:3.633183 +step:5398 train loss:3.629584 +step:5399 train loss:3.661673 +step:5400 train loss:3.666781 +step:5401 train loss:3.628482 +step:5402 train loss:3.731402 +step:5403 train loss:3.640557 +step:5404 train loss:3.612962 +step:5405 train loss:3.683957 +step:5406 train loss:3.643605 +step:5407 train loss:3.573062 +step:5408 train loss:3.716512 +step:5409 train loss:3.557493 +step:5410 train loss:3.622985 +step:5411 train loss:3.608349 +step:5412 train loss:3.581576 +step:5413 train loss:3.634631 +step:5414 train loss:3.609754 +step:5415 train loss:3.589803 +step:5416 train loss:3.582488 +step:5417 train loss:3.652138 +step:5418 train loss:3.668133 +step:5419 train loss:3.569788 +step:5420 train loss:3.632497 +step:5421 train loss:3.601446 +step:5422 train loss:3.643523 +step:5423 train loss:3.623183 +step:5424 train loss:3.528313 +step:5425 train loss:3.593187 +step:5426 train loss:3.679834 +step:5427 train loss:3.571337 +step:5428 train loss:3.607791 +step:5429 train loss:3.536730 +step:5430 train loss:3.576520 +step:5431 train loss:3.637909 +step:5432 train loss:3.614480 +step:5433 train loss:3.620124 +step:5434 train loss:3.568477 +step:5435 train loss:3.565912 +step:5436 train loss:3.571201 +step:5437 train loss:3.606124 +step:5438 train loss:3.587607 +step:5439 train loss:3.592433 +step:5440 train loss:3.636086 +step:5441 train loss:3.656615 +step:5442 train loss:3.576383 +step:5443 train loss:3.577619 +step:5444 train loss:3.523047 +step:5445 train loss:3.609540 +step:5446 train loss:3.578770 +step:5447 train loss:3.615705 +step:5448 train loss:3.673671 +step:5449 train loss:3.561864 +step:5450 train loss:3.596931 +step:5451 train loss:3.590706 +step:5452 train loss:3.604638 +step:5453 train loss:3.662637 +step:5454 train loss:3.587219 +step:5455 train loss:3.572910 +step:5456 train loss:3.713249 +step:5457 train loss:3.590812 +step:5458 train loss:3.627960 +step:5459 train loss:3.571422 +step:5460 train loss:3.585759 +step:5461 train loss:3.591660 +step:5462 train loss:3.592119 +step:5463 train loss:3.602352 +step:5464 train loss:3.604785 +step:5465 train loss:3.547755 +step:5466 train loss:3.623134 +step:5467 train loss:3.607942 +step:5468 train loss:3.614843 +step:5469 train loss:3.707765 +step:5470 train loss:3.601263 +step:5471 train loss:3.674274 +step:5472 train loss:3.621014 +step:5473 train loss:3.523218 +step:5474 train loss:3.859509 +step:5475 train loss:3.534549 +step:5476 train loss:3.610711 +step:5477 train loss:3.613411 +step:5478 train loss:3.611478 +step:5479 train loss:3.754671 +step:5480 train loss:3.599309 +step:5481 train loss:3.662117 +step:5482 train loss:3.575620 +step:5483 train loss:3.611303 +step:5484 train loss:3.650609 +step:5485 train loss:3.567159 +step:5486 train loss:3.611586 +step:5487 train loss:3.612919 +step:5488 train loss:3.525517 +step:5489 train loss:3.630577 +step:5490 train loss:3.576413 +step:5491 train loss:3.676806 +step:5492 train loss:3.607333 +step:5493 train loss:3.534916 +step:5494 train loss:3.591589 +step:5495 train loss:3.568806 +step:5496 train loss:3.566851 +step:5497 train loss:3.685725 +step:5498 train loss:3.552298 +step:5499 train loss:3.690568 +step:5500 validation loss:3.531385 total_sharp:3.8270e-03 L1_sharp:4.1925e-02 L2_sharp:1.6392e-02 L3_sharp:1.9367e-02 L4_sharp:1.4297e-02 L5_sharp:1.6634e-02 L6_sharp:2.1886e-02 L7_sharp:2.2794e-02 L8_sharp:2.4993e-02 L9_sharp:1.8922e-02 L10_sharp:1.3515e-02 L11_sharp:1.3633e-02 L12_sharp:1.8538e-02 total_fnorm:1.3462e+00 total_l1_linf:8.0831e+03 total_spectral:1.3462e+00 L1_fnorm:6.1298e-02 L2_fnorm:5.8953e-02 L3_fnorm:5.8668e-02 L4_fnorm:6.0002e-02 L5_fnorm:6.0894e-02 L6_fnorm:6.1368e-02 L7_fnorm:6.1266e-02 L8_fnorm:6.1245e-02 L9_fnorm:6.1342e-02 L10_fnorm:6.1375e-02 L11_fnorm:6.1363e-02 L12_fnorm:6.1387e-02 L1_l1linf:3.2426e-01 L2_l1linf:3.4226e-01 L3_l1linf:3.4896e-01 L4_l1linf:3.6429e-01 L5_l1linf:3.4193e-01 L6_l1linf:3.4975e-01 L7_l1linf:3.1848e-01 L8_l1linf:3.0666e-01 L9_l1linf:3.4311e-01 L10_l1linf:3.6788e-01 L11_l1linf:3.8472e-01 L12_l1linf:3.7911e-01 L1_spectral:7.2702e-03 L2_spectral:7.7814e-03 L3_spectral:7.8368e-03 L4_spectral:8.1543e-03 L5_spectral:7.6507e-03 L6_spectral:7.8478e-03 L7_spectral:7.1675e-03 L8_spectral:6.8740e-03 L9_spectral:7.6918e-03 L10_spectral:8.2098e-03 L11_spectral:8.6324e-03 L12_spectral:8.4781e-03 ip_v_neg_g:-6.5909e-04 cos_v_neg_g:-9.0732e-05 v_norm:1.3462e+00 g_norm:5.3961e+00 hv_norm:7.9959e-01 cos_v_hv:6.4431e-03 hg_norm:9.8450e+02 cos_g_hg:6.9004e-01 v_par:2.2989e-05 v_perp:1.3462e+00 L1_cos_v_neg_g:2.0415e-03 L1_v_norm:6.1298e-02 L2_cos_v_neg_g:1.9045e-03 L2_v_norm:5.8953e-02 L3_cos_v_neg_g:8.7834e-04 L3_v_norm:5.8668e-02 L4_cos_v_neg_g:8.8093e-04 L4_v_norm:6.0002e-02 L5_cos_v_neg_g:2.2412e-03 L5_v_norm:6.0894e-02 L6_cos_v_neg_g:-4.9570e-04 L6_v_norm:6.1368e-02 L7_cos_v_neg_g:-1.6233e-03 L7_v_norm:6.1266e-02 L8_cos_v_neg_g:-1.8084e-03 L8_v_norm:6.1245e-02 L9_cos_v_neg_g:-2.7747e-03 L9_v_norm:6.1342e-02 L10_cos_v_neg_g:-1.3748e-03 L10_v_norm:6.1375e-02 L11_cos_v_neg_g:-9.7452e-04 L11_v_norm:6.1363e-02 L12_cos_v_neg_g:-1.3886e-03 L12_v_norm:6.1387e-02 +step:5500 train loss:3.606231 +step:5501 train loss:3.677482 +step:5502 train loss:3.625484 +step:5503 train loss:3.591340 +step:5504 train loss:3.637591 +step:5505 train loss:3.600773 +step:5506 train loss:3.642101 +step:5507 train loss:3.629642 +step:5508 train loss:3.652483 +step:5509 train loss:3.662679 +step:5510 train loss:3.637456 +step:5511 train loss:3.628635 +step:5512 train loss:3.751745 +step:5513 train loss:3.552013 +step:5514 train loss:3.611343 +step:5515 train loss:3.639879 +step:5516 train loss:3.662618 +step:5517 train loss:3.621813 +step:5518 train loss:3.648215 +step:5519 train loss:3.682490 +step:5520 train loss:3.589811 +step:5521 train loss:3.602251 +step:5522 train loss:3.567923 +step:5523 train loss:3.614655 +step:5524 train loss:3.661644 +step:5525 train loss:3.571022 +step:5526 train loss:3.582362 +step:5527 train loss:3.605408 +step:5528 train loss:3.711428 +step:5529 train loss:3.669946 +step:5530 train loss:3.640089 +step:5531 train loss:3.576221 +step:5532 train loss:3.601390 +step:5533 train loss:3.635771 +step:5534 train loss:3.549912 +step:5535 train loss:3.602373 +step:5536 train loss:3.539585 +step:5537 train loss:3.586015 +step:5538 train loss:3.579820 +step:5539 train loss:3.527660 +step:5540 train loss:3.749709 +step:5541 train loss:3.558550 +step:5542 train loss:3.608519 +step:5543 train loss:3.598509 +step:5544 train loss:3.583251 +step:5545 train loss:3.579027 +step:5546 train loss:3.616462 +step:5547 train loss:3.546828 +step:5548 train loss:3.589866 +step:5549 train loss:3.594404 +step:5550 train loss:3.615530 +step:5551 train loss:3.623466 +step:5552 train loss:3.577663 +step:5553 train loss:3.606158 +step:5554 train loss:3.580307 +step:5555 train loss:3.587551 +step:5556 train loss:3.603733 +step:5557 train loss:3.669713 +step:5558 train loss:3.590386 +step:5559 train loss:3.595538 +step:5560 train loss:3.587336 +step:5561 train loss:3.624985 +step:5562 train loss:3.579398 +step:5563 train loss:3.558406 +step:5564 train loss:3.594579 +step:5565 train loss:3.661334 +step:5566 train loss:3.563299 +step:5567 train loss:3.682533 +step:5568 train loss:3.803258 +step:5569 train loss:3.590833 +step:5570 train loss:3.520741 +step:5571 train loss:3.610121 +step:5572 train loss:3.551198 +step:5573 train loss:3.539690 +step:5574 train loss:3.507838 +step:5575 train loss:3.604704 +step:5576 train loss:3.588785 +step:5577 train loss:3.595803 +step:5578 train loss:3.623236 +step:5579 train loss:3.580651 +step:5580 train loss:3.605005 +step:5581 train loss:3.625714 +step:5582 train loss:3.604107 +step:5583 train loss:3.616693 +step:5584 train loss:3.734349 +step:5585 train loss:3.640013 +step:5586 train loss:3.572344 +step:5587 train loss:3.605870 +step:5588 train loss:3.623850 +step:5589 train loss:3.621975 +step:5590 train loss:3.683068 +step:5591 train loss:3.549528 +step:5592 train loss:3.722615 +step:5593 train loss:3.599854 +step:5594 train loss:3.605674 +step:5595 train loss:3.598066 +step:5596 train loss:3.546239 +step:5597 train loss:3.565843 +step:5598 train loss:3.570599 +step:5599 train loss:3.576653 +step:5600 train loss:3.618439 +step:5601 train loss:3.641575 +step:5602 train loss:3.577078 +step:5603 train loss:3.618720 +step:5604 train loss:3.615149 +step:5605 train loss:3.586948 +step:5606 train loss:3.591757 +step:5607 train loss:3.621857 +step:5608 train loss:3.565605 +step:5609 train loss:3.614500 +step:5610 train loss:3.573072 +step:5611 train loss:3.613165 +step:5612 train loss:3.642705 +step:5613 train loss:3.604204 +step:5614 train loss:3.567813 +step:5615 train loss:3.669401 +step:5616 train loss:3.568001 +step:5617 train loss:3.655555 +step:5618 train loss:3.639919 +step:5619 train loss:3.594095 +step:5620 train loss:3.593628 +step:5621 train loss:3.669832 +step:5622 train loss:3.552063 +step:5623 train loss:3.590263 +step:5624 train loss:3.579422 +step:5625 train loss:3.614753 +step:5626 train loss:3.606373 +step:5627 train loss:3.580878 +step:5628 train loss:3.620311 +step:5629 train loss:3.599888 +step:5630 train loss:3.530153 +step:5631 train loss:3.572240 +step:5632 train loss:3.615441 +step:5633 train loss:3.608201 +step:5634 train loss:3.560781 +step:5635 train loss:3.600904 +step:5636 train loss:3.577095 +step:5637 train loss:3.718028 +step:5638 train loss:3.625751 +step:5639 train loss:3.604506 +step:5640 train loss:3.607519 +step:5641 train loss:3.648374 +step:5642 train loss:3.581634 +step:5643 train loss:3.598404 +step:5644 train loss:3.681316 +step:5645 train loss:3.637597 +step:5646 train loss:3.635648 +step:5647 train loss:3.624183 +step:5648 train loss:3.615303 +step:5649 train loss:3.526942 +step:5650 train loss:3.532076 +step:5651 train loss:3.607566 +step:5652 train loss:3.608617 +step:5653 train loss:3.575965 +step:5654 train loss:3.705992 +step:5655 train loss:3.565959 +step:5656 train loss:3.590178 +step:5657 train loss:3.659308 +step:5658 train loss:3.559919 +step:5659 train loss:3.597771 +step:5660 train loss:3.647162 +step:5661 train loss:3.587241 +step:5662 train loss:3.625382 +step:5663 train loss:3.516435 +step:5664 train loss:3.490026 +step:5665 train loss:3.611908 +step:5666 train loss:3.614892 +step:5667 train loss:3.649195 +step:5668 train loss:3.578321 +step:5669 train loss:3.593189 +step:5670 train loss:3.593551 +step:5671 train loss:3.577095 +step:5672 train loss:3.629116 +step:5673 train loss:3.595429 +step:5674 train loss:3.668821 +step:5675 train loss:3.581686 +step:5676 train loss:3.729556 +step:5677 train loss:3.625863 +step:5678 train loss:3.600702 +step:5679 train loss:3.590223 +step:5680 train loss:3.626119 +step:5681 train loss:3.592900 +step:5682 train loss:3.606378 +step:5683 train loss:3.565491 +step:5684 train loss:3.574609 +step:5685 train loss:3.618739 +step:5686 train loss:3.634940 +step:5687 train loss:3.582762 +step:5688 train loss:3.672072 +step:5689 train loss:3.575990 +step:5690 train loss:3.729277 +step:5691 train loss:3.558346 +step:5692 train loss:3.547124 +step:5693 train loss:3.553200 +step:5694 train loss:3.573251 +step:5695 train loss:3.588349 +step:5696 train loss:3.639353 +step:5697 train loss:3.566349 +step:5698 train loss:3.585136 +step:5699 train loss:3.598508 +step:5700 train loss:3.594021 +step:5701 train loss:3.587296 +step:5702 train loss:3.657807 +step:5703 train loss:3.555823 +step:5704 train loss:3.599335 +step:5705 train loss:3.608389 +step:5706 train loss:3.632222 +step:5707 train loss:3.548152 +step:5708 train loss:3.632612 +step:5709 train loss:3.638827 +step:5710 train loss:3.627395 +step:5711 train loss:3.650381 +step:5712 train loss:3.632057 +step:5713 train loss:3.557369 +step:5714 train loss:3.641213 +step:5715 train loss:3.599962 +step:5716 train loss:3.603610 +step:5717 train loss:3.629798 +step:5718 train loss:3.574102 +step:5719 train loss:3.646310 +step:5720 train loss:3.617182 +step:5721 train loss:3.549488 +step:5722 train loss:3.557759 +step:5723 train loss:3.638429 +step:5724 train loss:3.557457 +step:5725 train loss:3.630984 +step:5726 train loss:3.623623 +step:5727 train loss:3.580695 +step:5728 train loss:3.584664 +step:5729 train loss:3.585677 +step:5730 train loss:3.655590 +step:5731 train loss:3.526102 +step:5732 train loss:3.585831 +step:5733 train loss:3.573714 +step:5734 train loss:3.591082 +step:5735 train loss:3.581351 +step:5736 train loss:3.585193 +step:5737 train loss:3.606430 +step:5738 train loss:3.571023 +step:5739 train loss:3.582628 +step:5740 train loss:3.622431 +step:5741 train loss:3.598684 +step:5742 train loss:3.650272 +step:5743 train loss:3.617826 +step:5744 train loss:3.574569 +step:5745 train loss:3.580065 +step:5746 train loss:3.607810 +step:5747 train loss:3.594340 +step:5748 train loss:3.640989 +step:5749 train loss:3.597814 +step:5750 validation loss:3.525610 +step:5750 train loss:3.604460 +step:5751 train loss:3.618880 +step:5752 train loss:3.605528 +step:5753 train loss:3.574805 +step:5754 train loss:3.583320 +step:5755 train loss:3.599795 +step:5756 train loss:3.587096 +step:5757 train loss:3.649492 +step:5758 train loss:3.583555 +step:5759 train loss:3.546273 +step:5760 train loss:3.627924 +step:5761 train loss:3.622279 +step:5762 train loss:3.582297 +step:5763 train loss:3.608158 +step:5764 train loss:3.571602 +step:5765 train loss:3.691242 +step:5766 train loss:3.597465 +step:5767 train loss:3.632625 +step:5768 train loss:3.571014 +step:5769 train loss:3.693004 +step:5770 train loss:3.614551 +step:5771 train loss:3.639598 +step:5772 train loss:3.593445 +step:5773 train loss:3.573531 +step:5774 train loss:3.579710 +step:5775 train loss:3.651337 +step:5776 train loss:3.637966 +step:5777 train loss:3.554267 +step:5778 train loss:3.638574 +step:5779 train loss:3.601955 +step:5780 train loss:3.571414 +step:5781 train loss:3.638100 +step:5782 train loss:3.596704 +step:5783 train loss:3.555785 +step:5784 train loss:3.660136 +step:5785 train loss:3.648753 +step:5786 train loss:3.558508 +step:5787 train loss:3.606864 +step:5788 train loss:3.615049 +step:5789 train loss:3.559997 +step:5790 train loss:3.658870 +step:5791 train loss:3.588076 +step:5792 train loss:3.859359 +step:5793 train loss:3.631248 +step:5794 train loss:3.649590 +step:5795 train loss:3.642071 +step:5796 train loss:3.624172 +step:5797 train loss:3.604625 +step:5798 train loss:3.603606 +step:5799 train loss:3.573544 +step:5800 train loss:3.732674 +step:5801 train loss:3.607136 +step:5802 train loss:3.593684 +step:5803 train loss:3.607019 +step:5804 train loss:3.626501 +step:5805 train loss:3.587051 +step:5806 train loss:3.628940 +step:5807 train loss:3.551923 +step:5808 train loss:3.582785 +step:5809 train loss:3.595706 +step:5810 train loss:3.568635 +step:5811 train loss:3.583601 +step:5812 train loss:3.563280 +step:5813 train loss:3.576235 +step:5814 train loss:3.567911 +step:5815 train loss:3.573423 +step:5816 train loss:3.631756 +step:5817 train loss:3.646481 +step:5818 train loss:3.617619 +step:5819 train loss:3.669209 +step:5820 train loss:3.614365 +step:5821 train loss:3.601603 +step:5822 train loss:3.619040 +step:5823 train loss:3.621011 +step:5824 train loss:3.571907 +step:5825 train loss:3.665850 +step:5826 train loss:3.580080 +step:5827 train loss:3.546179 +step:5828 train loss:3.531007 +step:5829 train loss:3.596640 +step:5830 train loss:3.568868 +step:5831 train loss:3.542511 +step:5832 train loss:3.654233 +step:5833 train loss:3.632812 +step:5834 train loss:3.617880 +step:5835 train loss:3.567852 +step:5836 train loss:3.530708 +step:5837 train loss:3.651887 +step:5838 train loss:3.633615 +step:5839 train loss:3.609337 +step:5840 train loss:3.690536 +step:5841 train loss:3.615099 +step:5842 train loss:3.626625 +step:5843 train loss:3.572075 +step:5844 train loss:3.642897 +step:5845 train loss:3.549664 +step:5846 train loss:3.597822 +step:5847 train loss:3.624417 +step:5848 train loss:3.692971 +step:5849 train loss:3.586731 +step:5850 train loss:3.615720 +step:5851 train loss:3.581099 +step:5852 train loss:3.668405 +step:5853 train loss:3.761463 +step:5854 train loss:3.550652 +step:5855 train loss:3.609957 +step:5856 train loss:3.582958 +step:5857 train loss:3.593744 +step:5858 train loss:3.566614 +step:5859 train loss:3.571764 +step:5860 train loss:3.674421 +step:5861 train loss:3.559082 +step:5862 train loss:3.672077 +step:5863 train loss:3.612081 +step:5864 train loss:3.598922 +step:5865 train loss:3.602772 +step:5866 train loss:3.596376 +step:5867 train loss:3.677649 +step:5868 train loss:3.599415 +step:5869 train loss:3.624879 +step:5870 train loss:3.600649 +step:5871 train loss:3.583635 +step:5872 train loss:3.611644 +step:5873 train loss:3.589635 +step:5874 train loss:3.670773 +step:5875 train loss:3.600228 +step:5876 train loss:3.580588 +step:5877 train loss:3.589102 +step:5878 train loss:3.588408 +step:5879 train loss:3.560277 +step:5880 train loss:3.757981 +step:5881 train loss:3.597006 +step:5882 train loss:3.567701 +step:5883 train loss:3.569280 +step:5884 train loss:3.587750 +step:5885 train loss:3.584296 +step:5886 train loss:3.603014 +step:5887 train loss:3.604346 +step:5888 train loss:3.587264 +step:5889 train loss:3.565496 +step:5890 train loss:3.610438 +step:5891 train loss:3.556675 +step:5892 train loss:3.634860 +step:5893 train loss:3.559620 +step:5894 train loss:3.547567 +step:5895 train loss:3.554714 +step:5896 train loss:3.564831 +step:5897 train loss:3.633642 +step:5898 train loss:3.849263 +step:5899 train loss:3.582631 +step:5900 train loss:3.632165 +step:5901 train loss:3.583887 +step:5902 train loss:3.595453 +step:5903 train loss:3.586162 +step:5904 train loss:3.615643 +step:5905 train loss:3.722178 +step:5906 train loss:3.663102 +step:5907 train loss:3.604568 +step:5908 train loss:3.582837 +step:5909 train loss:3.574517 +step:5910 train loss:3.561510 +step:5911 train loss:3.577573 +step:5912 train loss:3.592890 +step:5913 train loss:3.615498 +step:5914 train loss:3.596090 +step:5915 train loss:3.719779 +step:5916 train loss:3.599983 +step:5917 train loss:3.573503 +step:5918 train loss:3.569194 +step:5919 train loss:3.595766 +step:5920 train loss:3.594887 +step:5921 train loss:3.567511 +step:5922 train loss:3.623211 +step:5923 train loss:3.617851 +step:5924 train loss:3.572786 +step:5925 train loss:3.695326 +step:5926 train loss:3.581170 +step:5927 train loss:3.559752 +step:5928 train loss:3.597482 +step:5929 train loss:3.615659 +step:5930 train loss:3.565630 +step:5931 train loss:3.549146 +step:5932 train loss:3.589076 +step:5933 train loss:3.643527 +step:5934 train loss:3.556585 +step:5935 train loss:3.584604 +step:5936 train loss:3.573334 +step:5937 train loss:3.553653 +step:5938 train loss:3.571822 +step:5939 train loss:3.549514 +step:5940 train loss:3.632657 +step:5941 train loss:3.566610 +step:5942 train loss:3.583159 +step:5943 train loss:3.587222 +step:5944 train loss:3.638920 +step:5945 train loss:3.571037 +step:5946 train loss:3.551686 +step:5947 train loss:3.565291 +step:5948 train loss:3.602880 +step:5949 train loss:3.647665 +step:5950 train loss:3.608169 +step:5951 train loss:3.610554 +step:5952 train loss:3.535091 +step:5953 train loss:3.574411 +step:5954 train loss:3.585134 +step:5955 train loss:3.591466 +step:5956 train loss:3.568740 +step:5957 train loss:3.536876 +step:5958 train loss:3.610539 +step:5959 train loss:3.570558 +step:5960 train loss:3.543561 +step:5961 train loss:3.568939 +step:5962 train loss:3.601101 +step:5963 train loss:3.636792 +step:5964 train loss:3.591512 +step:5965 train loss:3.609332 +step:5966 train loss:3.605509 +step:5967 train loss:3.569897 +step:5968 train loss:3.644426 +step:5969 train loss:3.586787 +step:5970 train loss:3.604664 +step:5971 train loss:3.553295 +step:5972 train loss:3.582391 +step:5973 train loss:3.571348 +step:5974 train loss:3.595313 +step:5975 train loss:3.566305 +step:5976 train loss:3.605340 +step:5977 train loss:3.564854 +step:5978 train loss:3.549112 +step:5979 train loss:3.583916 +step:5980 train loss:3.653307 +step:5981 train loss:3.547725 +step:5982 train loss:3.558684 +step:5983 train loss:3.627403 +step:5984 train loss:3.569843 +step:5985 train loss:3.612446 +step:5986 train loss:3.588587 +step:5987 train loss:3.572537 +step:5988 train loss:3.582789 +step:5989 train loss:3.600839 +step:5990 train loss:3.530703 +step:5991 train loss:3.595563 +step:5992 train loss:3.627941 +step:5993 train loss:3.578748 +step:5994 train loss:3.599317 +step:5995 train loss:3.491653 +step:5996 train loss:3.655169 +step:5997 train loss:3.637578 +step:5998 train loss:3.515378 +step:5999 train loss:3.540507 +step:6000 validation loss:3.512525 total_sharp:5.3467e-03 L1_sharp:6.3240e-02 L2_sharp:1.5559e-02 L3_sharp:3.6750e-02 L4_sharp:1.9377e-02 L5_sharp:2.3757e-02 L6_sharp:2.5189e-02 L7_sharp:3.1890e-02 L8_sharp:3.5969e-02 L9_sharp:2.6952e-02 L10_sharp:1.9797e-02 L11_sharp:2.0239e-02 L12_sharp:3.7403e-02 total_fnorm:1.3440e+00 total_l1_linf:8.0675e+03 total_spectral:1.3440e+00 L1_fnorm:6.1142e-02 L2_fnorm:5.9090e-02 L3_fnorm:5.8463e-02 L4_fnorm:5.9872e-02 L5_fnorm:6.0835e-02 L6_fnorm:6.1241e-02 L7_fnorm:6.1275e-02 L8_fnorm:6.1213e-02 L9_fnorm:6.1227e-02 L10_fnorm:6.1327e-02 L11_fnorm:6.1302e-02 L12_fnorm:6.1251e-02 L1_l1linf:3.0513e-01 L2_l1linf:3.5084e-01 L3_l1linf:3.5853e-01 L4_l1linf:3.4556e-01 L5_l1linf:3.2428e-01 L6_l1linf:3.1132e-01 L7_l1linf:3.1352e-01 L8_l1linf:3.0404e-01 L9_l1linf:3.2478e-01 L10_l1linf:3.3561e-01 L11_l1linf:3.5973e-01 L12_l1linf:3.5545e-01 L1_spectral:6.8874e-03 L2_spectral:7.9334e-03 L3_spectral:8.0652e-03 L4_spectral:7.7431e-03 L5_spectral:7.3215e-03 L6_spectral:7.0021e-03 L7_spectral:7.0602e-03 L8_spectral:6.8446e-03 L9_spectral:7.2825e-03 L10_spectral:7.5405e-03 L11_spectral:8.0466e-03 L12_spectral:8.0063e-03 ip_v_neg_g:4.3389e-03 cos_v_neg_g:1.0173e-03 v_norm:1.3440e+00 g_norm:3.1734e+00 hv_norm:8.2833e-01 cos_v_hv:8.6753e-03 hg_norm:1.5199e+02 cos_g_hg:4.2354e-01 v_par:3.0151e-05 v_perp:1.3440e+00 L1_cos_v_neg_g:1.0327e-02 L1_v_norm:6.1142e-02 L2_cos_v_neg_g:7.3829e-03 L2_v_norm:5.9090e-02 L3_cos_v_neg_g:5.3942e-03 L3_v_norm:5.8463e-02 L4_cos_v_neg_g:6.1147e-03 L4_v_norm:5.9872e-02 L5_cos_v_neg_g:5.7528e-03 L5_v_norm:6.0835e-02 L6_cos_v_neg_g:6.3613e-03 L6_v_norm:6.1241e-02 L7_cos_v_neg_g:5.2715e-03 L7_v_norm:6.1275e-02 L8_cos_v_neg_g:5.8469e-03 L8_v_norm:6.1213e-02 L9_cos_v_neg_g:6.6803e-03 L9_v_norm:6.1227e-02 L10_cos_v_neg_g:5.6180e-03 L10_v_norm:6.1327e-02 L11_cos_v_neg_g:4.9234e-03 L11_v_norm:6.1302e-02 L12_cos_v_neg_g:3.1693e-03 L12_v_norm:6.1251e-02 +step:6000 train loss:3.592008 +step:6001 train loss:3.555957 +step:6002 train loss:3.583044 +step:6003 train loss:3.605812 +step:6004 train loss:3.554909 +step:6005 train loss:3.627608 +step:6006 train loss:3.534725 +step:6007 train loss:3.557756 +step:6008 train loss:3.570940 +step:6009 train loss:3.608668 +step:6010 train loss:3.598162 +step:6011 train loss:3.586805 +step:6012 train loss:3.554765 +step:6013 train loss:3.615764 +step:6014 train loss:3.633201 +step:6015 train loss:3.631480 +step:6016 train loss:3.599573 +step:6017 train loss:3.607927 +step:6018 train loss:3.545563 +step:6019 train loss:3.581935 +step:6020 train loss:3.568169 +step:6021 train loss:3.495738 +step:6022 train loss:3.611524 +step:6023 train loss:3.545119 +step:6024 train loss:3.622551 +step:6025 train loss:3.589024 +step:6026 train loss:3.559572 +step:6027 train loss:3.601258 +step:6028 train loss:3.517367 +step:6029 train loss:3.631535 +step:6030 train loss:3.600937 +step:6031 train loss:3.573928 +step:6032 train loss:3.532444 +step:6033 train loss:3.587980 +step:6034 train loss:3.614192 +step:6035 train loss:3.532890 +step:6036 train loss:3.507138 +step:6037 train loss:3.618406 +step:6038 train loss:3.624478 +step:6039 train loss:3.607629 +step:6040 train loss:3.567961 +step:6041 train loss:3.547619 +step:6042 train loss:3.526776 +step:6043 train loss:3.588021 +step:6044 train loss:3.708006 +step:6045 train loss:3.549675 +step:6046 train loss:3.560731 +step:6047 train loss:3.595609 +step:6048 train loss:3.602949 +step:6049 train loss:3.583848 +step:6050 train loss:3.551227 +step:6051 train loss:3.604755 +step:6052 train loss:3.575885 +step:6053 train loss:3.692021 +step:6054 train loss:3.731712 +step:6055 train loss:3.546948 +step:6056 train loss:3.536084 +step:6057 train loss:3.571574 +step:6058 train loss:3.601182 +step:6059 train loss:3.603703 +step:6060 train loss:3.609085 +step:6061 train loss:3.626670 +step:6062 train loss:3.576310 +step:6063 train loss:3.591297 +step:6064 train loss:3.589239 +step:6065 train loss:3.587785 +step:6066 train loss:3.575071 +step:6067 train loss:3.614496 +step:6068 train loss:3.556437 +step:6069 train loss:3.510600 +step:6070 train loss:3.661782 +step:6071 train loss:3.603080 +step:6072 train loss:3.545707 +step:6073 train loss:3.585077 +step:6074 train loss:3.670886 +step:6075 train loss:3.590264 +step:6076 train loss:3.598692 +step:6077 train loss:3.600522 +step:6078 train loss:3.537973 +step:6079 train loss:3.566382 +step:6080 train loss:3.573274 +step:6081 train loss:3.610797 +step:6082 train loss:3.560203 +step:6083 train loss:3.573356 +step:6084 train loss:3.636964 +step:6085 train loss:3.633916 +step:6086 train loss:3.535031 +step:6087 train loss:3.580676 +step:6088 train loss:3.564354 +step:6089 train loss:3.623263 +step:6090 train loss:3.625486 +step:6091 train loss:3.574788 +step:6092 train loss:3.535201 +step:6093 train loss:3.596299 +step:6094 train loss:3.510181 +step:6095 train loss:3.676548 +step:6096 train loss:3.545687 +step:6097 train loss:3.623550 +step:6098 train loss:3.598126 +step:6099 train loss:3.655465 +step:6100 train loss:3.649312 +step:6101 train loss:3.583420 +step:6102 train loss:3.700840 +step:6103 train loss:3.586286 +step:6104 train loss:3.697137 +step:6105 train loss:3.628092 +step:6106 train loss:3.567554 +step:6107 train loss:3.631810 +step:6108 train loss:3.595416 +step:6109 train loss:3.667110 +step:6110 train loss:3.597102 +step:6111 train loss:3.631741 +step:6112 train loss:3.570126 +step:6113 train loss:3.597002 +step:6114 train loss:3.569829 +step:6115 train loss:3.631584 +step:6116 train loss:3.572696 +step:6117 train loss:3.627156 +step:6118 train loss:3.608396 +step:6119 train loss:3.617773 +step:6120 train loss:3.760424 +step:6121 train loss:3.597448 +step:6122 train loss:3.608166 +step:6123 train loss:3.589823 +step:6124 train loss:3.563746 +step:6125 train loss:3.553895 +step:6126 train loss:3.574670 +step:6127 train loss:3.562130 +step:6128 train loss:3.520941 +step:6129 train loss:3.759906 +step:6130 train loss:3.547592 +step:6131 train loss:3.523345 +step:6132 train loss:3.596072 +step:6133 train loss:3.560838 +step:6134 train loss:3.588154 +step:6135 train loss:3.671576 +step:6136 train loss:3.692779 +step:6137 train loss:3.554878 +step:6138 train loss:3.610337 +step:6139 train loss:3.589843 +step:6140 train loss:3.588234 +step:6141 train loss:3.548577 +step:6142 train loss:3.612479 +step:6143 train loss:3.578592 +step:6144 train loss:3.598431 +step:6145 train loss:3.843126 +step:6146 train loss:3.680116 +step:6147 train loss:3.764527 +step:6148 train loss:3.532832 +step:6149 train loss:3.659501 +step:6150 train loss:3.612247 +step:6151 train loss:3.565407 +step:6152 train loss:3.561520 +step:6153 train loss:3.630796 +step:6154 train loss:3.715983 +step:6155 train loss:3.584204 +step:6156 train loss:3.674133 +step:6157 train loss:3.607022 +step:6158 train loss:3.600385 +step:6159 train loss:3.566100 +step:6160 train loss:3.731230 +step:6161 train loss:3.582750 +step:6162 train loss:3.600865 +step:6163 train loss:3.631952 +step:6164 train loss:3.546721 +step:6165 train loss:3.613909 +step:6166 train loss:3.607327 +step:6167 train loss:3.625064 +step:6168 train loss:3.593686 +step:6169 train loss:3.595854 +step:6170 train loss:3.598538 +step:6171 train loss:3.567178 +step:6172 train loss:3.555003 +step:6173 train loss:3.606117 +step:6174 train loss:3.535111 +step:6175 train loss:3.546131 +step:6176 train loss:3.529047 +step:6177 train loss:3.625392 +step:6178 train loss:3.569419 +step:6179 train loss:3.579448 +step:6180 train loss:3.584481 +step:6181 train loss:3.620149 +step:6182 train loss:3.500866 +step:6183 train loss:3.509932 +step:6184 train loss:3.628574 +step:6185 train loss:3.583687 +step:6186 train loss:3.544874 +step:6187 train loss:3.585037 +step:6188 train loss:3.553125 +step:6189 train loss:3.592114 +step:6190 train loss:3.552299 +step:6191 train loss:3.586194 +step:6192 train loss:3.550798 +step:6193 train loss:3.619633 +step:6194 train loss:3.609035 +step:6195 train loss:3.589172 +step:6196 train loss:3.604716 +step:6197 train loss:3.628623 +step:6198 train loss:3.541561 +step:6199 train loss:3.565957 +step:6200 train loss:3.606768 +step:6201 train loss:3.650165 +step:6202 train loss:3.649951 +step:6203 train loss:3.650442 +step:6204 train loss:3.634635 +step:6205 train loss:3.571526 +step:6206 train loss:3.560024 +step:6207 train loss:3.617079 +step:6208 train loss:3.644968 +step:6209 train loss:3.612948 +step:6210 train loss:3.644355 +step:6211 train loss:3.562329 +step:6212 train loss:3.553820 +step:6213 train loss:3.568788 +step:6214 train loss:3.544568 +step:6215 train loss:3.719097 +step:6216 train loss:3.587184 +step:6217 train loss:3.647005 +step:6218 train loss:3.620634 +step:6219 train loss:3.635861 +step:6220 train loss:3.591331 +step:6221 train loss:3.556453 +step:6222 train loss:3.799411 +step:6223 train loss:3.556855 +step:6224 train loss:3.592422 +step:6225 train loss:3.569375 +step:6226 train loss:3.581750 +step:6227 train loss:3.583763 +step:6228 train loss:3.578941 +step:6229 train loss:3.620005 +step:6230 train loss:3.573895 +step:6231 train loss:3.686875 +step:6232 train loss:3.529184 +step:6233 train loss:3.568758 +step:6234 train loss:3.577919 +step:6235 train loss:3.605700 +step:6236 train loss:3.541354 +step:6237 train loss:3.566278 +step:6238 train loss:3.587860 +step:6239 train loss:3.575289 +step:6240 train loss:3.595329 +step:6241 train loss:3.581171 +step:6242 train loss:3.577610 +step:6243 train loss:3.613079 +step:6244 train loss:3.768210 +step:6245 train loss:3.565073 +step:6246 train loss:3.553930 +step:6247 train loss:3.547317 +step:6248 train loss:3.552072 +step:6249 train loss:3.491559 +step:6250 validation loss:3.503967 +step:6250 train loss:3.530320 +step:6251 train loss:3.547073 +step:6252 train loss:3.591553 +step:6253 train loss:3.601110 +step:6254 train loss:3.590822 +step:6255 train loss:3.555250 +step:6256 train loss:3.606442 +step:6257 train loss:3.605188 +step:6258 train loss:3.586103 +step:6259 train loss:3.591343 +step:6260 train loss:3.619102 +step:6261 train loss:3.641012 +step:6262 train loss:3.534153 +step:6263 train loss:3.566165 +step:6264 train loss:3.578622 +step:6265 train loss:3.564452 +step:6266 train loss:3.768840 +step:6267 train loss:3.571824 +step:6268 train loss:3.660000 +step:6269 train loss:3.534080 +step:6270 train loss:3.545775 +step:6271 train loss:3.595756 +step:6272 train loss:3.584586 +step:6273 train loss:3.783242 +step:6274 train loss:3.563288 +step:6275 train loss:3.595936 +step:6276 train loss:3.567712 +step:6277 train loss:3.550650 +step:6278 train loss:3.535790 +step:6279 train loss:3.590819 +step:6280 train loss:3.597522 +step:6281 train loss:3.530665 +step:6282 train loss:3.541489 +step:6283 train loss:3.631123 +step:6284 train loss:3.600049 +step:6285 train loss:3.598736 +step:6286 train loss:3.545915 +step:6287 train loss:3.573714 +step:6288 train loss:3.673685 +step:6289 train loss:3.536363 +step:6290 train loss:3.532465 +step:6291 train loss:3.565519 +step:6292 train loss:3.582857 +step:6293 train loss:3.572119 +step:6294 train loss:3.556229 +step:6295 train loss:3.579066 +step:6296 train loss:3.541781 +step:6297 train loss:3.669180 +step:6298 train loss:3.618213 +step:6299 train loss:3.509812 +step:6300 train loss:3.591990 +step:6301 train loss:3.619300 +step:6302 train loss:3.602505 +step:6303 train loss:3.572023 +step:6304 train loss:3.586225 +step:6305 train loss:3.558541 +step:6306 train loss:3.569924 +step:6307 train loss:3.578646 +step:6308 train loss:3.553463 +step:6309 train loss:3.550024 +step:6310 train loss:3.606705 +step:6311 train loss:3.560669 +step:6312 train loss:3.599913 +step:6313 train loss:3.531327 +step:6314 train loss:3.555243 +step:6315 train loss:3.609343 +step:6316 train loss:3.531495 +step:6317 train loss:3.526441 +step:6318 train loss:3.638889 +step:6319 train loss:3.566898 +step:6320 train loss:3.583714 +step:6321 train loss:3.568935 +step:6322 train loss:3.571281 +step:6323 train loss:3.505754 +step:6324 train loss:3.510062 +step:6325 train loss:3.607658 +step:6326 train loss:3.526265 +step:6327 train loss:3.601435 +step:6328 train loss:3.581340 +step:6329 train loss:3.500769 +step:6330 train loss:3.529297 +step:6331 train loss:3.548117 +step:6332 train loss:3.679178 +step:6333 train loss:3.559711 +step:6334 train loss:3.535931 +step:6335 train loss:3.506589 +step:6336 train loss:3.539223 +step:6337 train loss:3.563890 +step:6338 train loss:3.518188 +step:6339 train loss:3.564245 +step:6340 train loss:3.538428 +step:6341 train loss:3.556944 +step:6342 train loss:3.554013 +step:6343 train loss:3.652269 +step:6344 train loss:3.504253 +step:6345 train loss:3.519496 +step:6346 train loss:3.598051 +step:6347 train loss:3.472867 +step:6348 train loss:3.569269 +step:6349 train loss:3.543813 +step:6350 train loss:3.518408 +step:6351 train loss:3.517623 +step:6352 train loss:3.535170 +step:6353 train loss:3.554971 +step:6354 train loss:3.568298 +step:6355 train loss:3.573927 +step:6356 train loss:3.588373 +step:6357 train loss:3.443726 +step:6358 train loss:3.534434 +step:6359 train loss:3.588692 +step:6360 train loss:3.502182 +step:6361 train loss:3.503242 +step:6362 train loss:3.542537 +step:6363 train loss:3.524167 +step:6364 train loss:3.509103 +step:6365 train loss:3.579042 +step:6366 train loss:3.592537 +step:6367 train loss:3.519796 +step:6368 train loss:3.563506 +step:6369 train loss:3.529940 +step:6370 train loss:3.580775 +step:6371 train loss:3.499176 +step:6372 train loss:3.527744 +step:6373 train loss:3.554428 +step:6374 train loss:3.584229 +step:6375 train loss:3.543639 +step:6376 train loss:3.568489 +step:6377 train loss:3.566393 +step:6378 train loss:3.516498 +step:6379 train loss:3.557972 +step:6380 train loss:3.598921 +step:6381 train loss:3.564970 +step:6382 train loss:3.518690 +step:6383 train loss:3.585483 +step:6384 train loss:3.556244 +step:6385 train loss:3.536968 +step:6386 train loss:3.568787 +step:6387 train loss:3.549496 +step:6388 train loss:3.591269 +step:6389 train loss:3.596612 +step:6390 train loss:3.547891 +step:6391 train loss:3.535163 +step:6392 train loss:3.519704 +step:6393 train loss:3.574862 +step:6394 train loss:3.561946 +step:6395 train loss:3.738781 +step:6396 train loss:3.563311 +step:6397 train loss:3.506876 +step:6398 train loss:3.577579 +step:6399 train loss:3.518805 +step:6400 train loss:3.593433 +step:6401 train loss:3.627296 +step:6402 train loss:3.560741 +step:6403 train loss:3.552443 +step:6404 train loss:3.531183 +step:6405 train loss:3.557059 +step:6406 train loss:3.564666 +step:6407 train loss:3.623227 +step:6408 train loss:3.511490 +step:6409 train loss:3.500954 +step:6410 train loss:3.630667 +step:6411 train loss:3.559505 +step:6412 train loss:3.565045 +step:6413 train loss:3.570989 +step:6414 train loss:3.517351 +step:6415 train loss:3.578596 +step:6416 train loss:3.547866 +step:6417 train loss:3.517068 +step:6418 train loss:3.510880 +step:6419 train loss:3.595005 +step:6420 train loss:3.522127 +step:6421 train loss:3.547254 +step:6422 train loss:3.536521 +step:6423 train loss:3.545063 +step:6424 train loss:3.569137 +step:6425 train loss:3.564523 +step:6426 train loss:3.606328 +step:6427 train loss:3.568331 +step:6428 train loss:3.606261 +step:6429 train loss:3.569130 +step:6430 train loss:3.545124 +step:6431 train loss:3.520258 +step:6432 train loss:3.554487 +step:6433 train loss:3.568794 +step:6434 train loss:3.448268 +step:6435 train loss:3.627990 +step:6436 train loss:3.561804 +step:6437 train loss:3.526671 +step:6438 train loss:3.554417 +step:6439 train loss:3.527053 +step:6440 train loss:3.543865 +step:6441 train loss:3.538024 +step:6442 train loss:3.477427 +step:6443 train loss:3.533161 +step:6444 train loss:3.673342 +step:6445 train loss:3.577038 +step:6446 train loss:3.581567 +step:6447 train loss:3.562593 +step:6448 train loss:3.509209 +step:6449 train loss:3.536133 +step:6450 train loss:3.516975 +step:6451 train loss:3.508933 +step:6452 train loss:3.508025 +step:6453 train loss:3.553099 +step:6454 train loss:3.577011 +step:6455 train loss:3.565854 +step:6456 train loss:3.581474 +step:6457 train loss:3.561572 +step:6458 train loss:3.534244 +step:6459 train loss:3.514060 +step:6460 train loss:3.526116 +step:6461 train loss:3.524139 +step:6462 train loss:3.519067 +step:6463 train loss:3.616596 +step:6464 train loss:3.521558 +step:6465 train loss:3.564084 +step:6466 train loss:3.578784 +step:6467 train loss:3.505442 +step:6468 train loss:3.581913 +step:6469 train loss:3.493864 +step:6470 train loss:3.612751 +step:6471 train loss:3.523601 +step:6472 train loss:3.678068 +step:6473 train loss:3.562859 +step:6474 train loss:3.595492 +step:6475 train loss:3.538339 +step:6476 train loss:3.610760 +step:6477 train loss:3.540309 +step:6478 train loss:3.672572 +step:6479 train loss:3.585755 +step:6480 train loss:3.521667 +step:6481 train loss:3.578756 +step:6482 train loss:3.518640 +step:6483 train loss:3.581817 +step:6484 train loss:3.536254 +step:6485 train loss:3.598247 +step:6486 train loss:3.529884 +step:6487 train loss:3.530863 +step:6488 train loss:3.524327 +step:6489 train loss:3.529672 +step:6490 train loss:3.550257 +step:6491 train loss:3.521787 +step:6492 train loss:3.625350 +step:6493 train loss:3.531040 +step:6494 train loss:3.532747 +step:6495 train loss:3.532696 +step:6496 train loss:3.562147 +step:6497 train loss:3.582359 +step:6498 train loss:3.690079 +step:6499 train loss:3.660323 +step:6500 validation loss:3.493593 total_sharp:3.6991e-03 L1_sharp:3.6962e-02 L2_sharp:1.2206e-02 L3_sharp:2.4705e-02 L4_sharp:1.5331e-02 L5_sharp:1.6614e-02 L6_sharp:2.1851e-02 L7_sharp:2.4500e-02 L8_sharp:2.6626e-02 L9_sharp:1.9832e-02 L10_sharp:1.7073e-02 L11_sharp:1.4572e-02 L12_sharp:2.3654e-02 total_fnorm:1.3570e+00 total_l1_linf:8.1457e+03 total_spectral:1.3570e+00 L1_fnorm:6.0973e-02 L2_fnorm:5.8879e-02 L3_fnorm:5.8542e-02 L4_fnorm:5.9988e-02 L5_fnorm:6.0749e-02 L6_fnorm:6.1189e-02 L7_fnorm:6.1222e-02 L8_fnorm:6.1185e-02 L9_fnorm:6.1218e-02 L10_fnorm:6.1300e-02 L11_fnorm:6.1254e-02 L12_fnorm:6.1259e-02 L1_l1linf:2.8158e-01 L2_l1linf:3.3976e-01 L3_l1linf:3.3129e-01 L4_l1linf:3.4936e-01 L5_l1linf:3.1988e-01 L6_l1linf:3.0602e-01 L7_l1linf:2.9927e-01 L8_l1linf:2.9359e-01 L9_l1linf:3.0602e-01 L10_l1linf:3.5544e-01 L11_l1linf:3.6034e-01 L12_l1linf:3.5152e-01 L1_spectral:6.5173e-03 L2_spectral:7.6735e-03 L3_spectral:7.5032e-03 L4_spectral:7.7888e-03 L5_spectral:7.2359e-03 L6_spectral:6.8628e-03 L7_spectral:6.7274e-03 L8_spectral:6.6785e-03 L9_spectral:6.9972e-03 L10_spectral:8.0539e-03 L11_spectral:8.1204e-03 L12_spectral:8.0729e-03 ip_v_neg_g:1.5459e-03 cos_v_neg_g:3.6413e-04 v_norm:1.3570e+00 g_norm:3.1286e+00 hv_norm:5.9007e-01 cos_v_hv:8.5069e-03 hg_norm:1.3247e+02 cos_g_hg:4.6863e-01 v_par:1.8165e-05 v_perp:1.3570e+00 L1_cos_v_neg_g:1.4663e-03 L1_v_norm:6.0973e-02 L2_cos_v_neg_g:2.1252e-03 L2_v_norm:5.8879e-02 L3_cos_v_neg_g:3.2193e-03 L3_v_norm:5.8542e-02 L4_cos_v_neg_g:1.1326e-03 L4_v_norm:5.9988e-02 L5_cos_v_neg_g:9.8266e-04 L5_v_norm:6.0749e-02 L6_cos_v_neg_g:2.3202e-03 L6_v_norm:6.1189e-02 L7_cos_v_neg_g:2.7549e-03 L7_v_norm:6.1222e-02 L8_cos_v_neg_g:3.2689e-03 L8_v_norm:6.1185e-02 L9_cos_v_neg_g:3.4027e-03 L9_v_norm:6.1218e-02 L10_cos_v_neg_g:3.6358e-03 L10_v_norm:6.1300e-02 L11_cos_v_neg_g:3.0050e-03 L11_v_norm:6.1254e-02 L12_cos_v_neg_g:1.7031e-03 L12_v_norm:6.1259e-02 +step:6500 train loss:3.506585 +step:6501 train loss:3.525424 +step:6502 train loss:3.544749 +step:6503 train loss:3.603698 +step:6504 train loss:3.551107 +step:6505 train loss:3.559790 +step:6506 train loss:3.519452 +step:6507 train loss:3.587872 +step:6508 train loss:3.553055 +step:6509 train loss:3.538012 +step:6510 train loss:3.545466 +step:6511 train loss:3.560582 +step:6512 train loss:3.502072 +step:6513 train loss:3.570719 +step:6514 train loss:3.442769 +step:6515 train loss:3.535757 +step:6516 train loss:3.585373 +step:6517 train loss:3.499435 +step:6518 train loss:3.539435 +step:6519 train loss:3.528689 +step:6520 train loss:3.616955 +step:6521 train loss:3.593537 +step:6522 train loss:3.604942 +step:6523 train loss:3.499498 +step:6524 train loss:3.583564 +step:6525 train loss:3.568459 +step:6526 train loss:3.511262 +step:6527 train loss:3.560171 +step:6528 train loss:3.579906 +step:6529 train loss:3.607171 +step:6530 train loss:3.513176 +step:6531 train loss:3.595100 +step:6532 train loss:3.519273 +step:6533 train loss:3.558002 +step:6534 train loss:3.564600 +step:6535 train loss:3.543259 +step:6536 train loss:3.673528 +step:6537 train loss:3.480106 +step:6538 train loss:3.591233 +step:6539 train loss:3.516310 +step:6540 train loss:3.627760 +step:6541 train loss:3.608649 +step:6542 train loss:3.564609 +step:6543 train loss:3.520882 +step:6544 train loss:3.502568 +step:6545 train loss:3.491464 +step:6546 train loss:3.553157 +step:6547 train loss:3.608573 +step:6548 train loss:3.550149 +step:6549 train loss:3.565566 +step:6550 train loss:3.675811 +step:6551 train loss:3.556493 +step:6552 train loss:3.550706 +step:6553 train loss:3.588573 +step:6554 train loss:3.480148 +step:6555 train loss:3.564299 +step:6556 train loss:3.436305 +step:6557 train loss:3.783179 +step:6558 train loss:3.614578 +step:6559 train loss:3.525095 +step:6560 train loss:3.565671 +step:6561 train loss:3.538730 +step:6562 train loss:3.559142 +step:6563 train loss:3.450953 +step:6564 train loss:3.556027 +step:6565 train loss:3.460781 +step:6566 train loss:3.572596 +step:6567 train loss:3.543988 +step:6568 train loss:3.589518 +step:6569 train loss:3.536900 +step:6570 train loss:3.574236 +step:6571 train loss:3.504669 +step:6572 train loss:3.580139 +step:6573 train loss:3.590693 +step:6574 train loss:3.578440 +step:6575 train loss:3.523615 +step:6576 train loss:3.514995 +step:6577 train loss:3.580998 +step:6578 train loss:3.452767 +step:6579 train loss:3.554621 +step:6580 train loss:3.511226 +step:6581 train loss:3.521835 +step:6582 train loss:3.501209 +step:6583 train loss:3.602007 +step:6584 train loss:3.529800 +step:6585 train loss:3.567166 +step:6586 train loss:3.575609 +step:6587 train loss:3.582928 +step:6588 train loss:3.551429 +step:6589 train loss:3.582325 +step:6590 train loss:3.517866 +step:6591 train loss:3.571718 +step:6592 train loss:3.509428 +step:6593 train loss:3.521410 +step:6594 train loss:3.545280 +step:6595 train loss:3.531760 +step:6596 train loss:3.527005 +step:6597 train loss:3.552099 +step:6598 train loss:3.592738 +step:6599 train loss:3.485866 +step:6600 train loss:3.538360 +step:6601 train loss:3.598040 +step:6602 train loss:3.521811 +step:6603 train loss:3.551471 +step:6604 train loss:3.560719 +step:6605 train loss:3.540757 +step:6606 train loss:3.602130 +step:6607 train loss:3.522071 +step:6608 train loss:3.535933 +step:6609 train loss:3.504327 +step:6610 train loss:3.617395 +step:6611 train loss:3.539831 +step:6612 train loss:3.582519 +step:6613 train loss:3.499364 +step:6614 train loss:3.525954 +step:6615 train loss:3.526196 +step:6616 train loss:3.508728 +step:6617 train loss:3.550291 +step:6618 train loss:3.535575 +step:6619 train loss:3.506659 +step:6620 train loss:3.614219 +step:6621 train loss:3.490242 +step:6622 train loss:3.564221 +step:6623 train loss:3.493287 +step:6624 train loss:3.568015 +step:6625 train loss:3.609187 +step:6626 train loss:3.571928 +step:6627 train loss:3.522627 +step:6628 train loss:3.581042 +step:6629 train loss:3.483840 +step:6630 train loss:3.518199 +step:6631 train loss:3.556181 +step:6632 train loss:3.593485 +step:6633 train loss:3.544137 +step:6634 train loss:3.607739 +step:6635 train loss:3.508550 +step:6636 train loss:3.549851 +step:6637 train loss:3.516320 +step:6638 train loss:3.517805 +step:6639 train loss:3.528702 +step:6640 train loss:3.516050 +step:6641 train loss:3.531410 +step:6642 train loss:3.530954 +step:6643 train loss:3.610051 +step:6644 train loss:3.615176 +step:6645 train loss:3.489464 +step:6646 train loss:3.578861 +step:6647 train loss:3.535848 +step:6648 train loss:3.639092 +step:6649 train loss:3.566586 +step:6650 train loss:3.516022 +step:6651 train loss:3.562590 +step:6652 train loss:3.579084 +step:6653 train loss:3.520722 +step:6654 train loss:3.516990 +step:6655 train loss:3.556929 +step:6656 train loss:3.529523 +step:6657 train loss:3.554338 +step:6658 train loss:3.535886 +step:6659 train loss:3.689595 +step:6660 train loss:3.588454 +step:6661 train loss:3.512903 +step:6662 train loss:3.544809 +step:6663 train loss:3.479452 +step:6664 train loss:3.559532 +step:6665 train loss:3.568439 +step:6666 train loss:3.583544 +step:6667 train loss:3.497053 +step:6668 train loss:3.626554 +step:6669 train loss:3.508461 +step:6670 train loss:3.518034 +step:6671 train loss:3.598721 +step:6672 train loss:3.551709 +step:6673 train loss:3.560276 +step:6674 train loss:3.533951 +step:6675 train loss:3.549003 +step:6676 train loss:3.564745 +step:6677 train loss:3.516982 +step:6678 train loss:3.588221 +step:6679 train loss:3.623662 +step:6680 train loss:3.625437 +step:6681 train loss:3.576714 +step:6682 train loss:3.519942 +step:6683 train loss:3.543004 +step:6684 train loss:3.556509 +step:6685 train loss:3.567394 +step:6686 train loss:3.503226 +step:6687 train loss:3.519753 +step:6688 train loss:3.564044 +step:6689 train loss:3.572805 +step:6690 train loss:3.549584 +step:6691 train loss:3.580301 +step:6692 train loss:3.587114 +step:6693 train loss:3.620137 +step:6694 train loss:3.575537 +step:6695 train loss:3.547745 +step:6696 train loss:3.485028 +step:6697 train loss:3.696555 +step:6698 train loss:3.546463 +step:6699 train loss:3.542637 +step:6700 train loss:3.554399 +step:6701 train loss:3.612043 +step:6702 train loss:3.501930 +step:6703 train loss:3.550247 +step:6704 train loss:3.535169 +step:6705 train loss:3.544718 +step:6706 train loss:3.521291 +step:6707 train loss:3.598193 +step:6708 train loss:3.549093 +step:6709 train loss:3.579207 +step:6710 train loss:3.568753 +step:6711 train loss:3.521216 +step:6712 train loss:3.509915 +step:6713 train loss:3.534869 +step:6714 train loss:3.578488 +step:6715 train loss:3.519935 +step:6716 train loss:3.598884 +step:6717 train loss:3.540774 +step:6718 train loss:3.564847 +step:6719 train loss:3.598226 +step:6720 train loss:3.527453 +step:6721 train loss:3.545498 +step:6722 train loss:3.520670 +step:6723 train loss:3.650222 +step:6724 train loss:3.506056 +step:6725 train loss:3.570263 +step:6726 train loss:3.522726 +step:6727 train loss:3.588156 +step:6728 train loss:3.682281 +step:6729 train loss:3.545288 +step:6730 train loss:3.540650 +step:6731 train loss:3.582299 +step:6732 train loss:3.457983 +step:6733 train loss:3.595220 +step:6734 train loss:3.528540 +step:6735 train loss:3.550881 +step:6736 train loss:3.549531 +step:6737 train loss:3.547116 +step:6738 train loss:3.578219 +step:6739 train loss:3.535089 +step:6740 train loss:3.486639 +step:6741 train loss:3.594497 +step:6742 train loss:3.555730 +step:6743 train loss:3.559818 +step:6744 train loss:3.450464 +step:6745 train loss:3.605290 +step:6746 train loss:3.536177 +step:6747 train loss:3.529365 +step:6748 train loss:3.600801 +step:6749 train loss:3.583812 +step:6750 validation loss:3.483971 +step:6750 train loss:3.504514 +step:6751 train loss:3.539079 +step:6752 train loss:3.542943 +step:6753 train loss:3.576534 +step:6754 train loss:3.557904 +step:6755 train loss:3.567876 +step:6756 train loss:3.507379 +step:6757 train loss:3.479293 +step:6758 train loss:3.654178 +step:6759 train loss:3.544655 +step:6760 train loss:3.602603 +step:6761 train loss:3.534454 +step:6762 train loss:3.558679 +step:6763 train loss:3.455719 +step:6764 train loss:3.536136 +step:6765 train loss:3.540265 +step:6766 train loss:3.534668 +step:6767 train loss:3.487138 +step:6768 train loss:3.492974 +step:6769 train loss:3.455528 +step:6770 train loss:3.543315 +step:6771 train loss:3.544287 +step:6772 train loss:3.553260 +step:6773 train loss:3.533376 +step:6774 train loss:3.546172 +step:6775 train loss:3.589928 +step:6776 train loss:3.545512 +step:6777 train loss:3.621538 +step:6778 train loss:3.505731 +step:6779 train loss:3.560142 +step:6780 train loss:3.492275 +step:6781 train loss:3.555420 +step:6782 train loss:3.468681 +step:6783 train loss:3.503459 +step:6784 train loss:3.528392 +step:6785 train loss:3.515687 +step:6786 train loss:3.531801 +step:6787 train loss:3.606368 +step:6788 train loss:3.545877 +step:6789 train loss:3.553602 +step:6790 train loss:3.551176 +step:6791 train loss:3.564347 +step:6792 train loss:3.562145 +step:6793 train loss:3.560902 +step:6794 train loss:3.530181 +step:6795 train loss:3.532532 +step:6796 train loss:3.537264 +step:6797 train loss:3.633647 +step:6798 train loss:3.537050 +step:6799 train loss:3.526884 +step:6800 train loss:3.495463 +step:6801 train loss:3.626388 +step:6802 train loss:3.575359 +step:6803 train loss:3.564464 +step:6804 train loss:3.594235 +step:6805 train loss:3.552005 +step:6806 train loss:3.489463 +step:6807 train loss:3.545491 +step:6808 train loss:3.531649 +step:6809 train loss:3.559406 +step:6810 train loss:3.679335 +step:6811 train loss:3.583207 +step:6812 train loss:3.555534 +step:6813 train loss:3.569175 +step:6814 train loss:3.574779 +step:6815 train loss:3.622086 +step:6816 train loss:3.539125 +step:6817 train loss:3.563852 +step:6818 train loss:3.542650 +step:6819 train loss:3.524368 +step:6820 train loss:3.554478 +step:6821 train loss:3.517615 +step:6822 train loss:3.620942 +step:6823 train loss:3.600849 +step:6824 train loss:3.577634 +step:6825 train loss:3.525338 +step:6826 train loss:3.569503 +step:6827 train loss:3.554693 +step:6828 train loss:3.571906 +step:6829 train loss:3.558893 +step:6830 train loss:3.526185 +step:6831 train loss:3.486602 +step:6832 train loss:3.472649 +step:6833 train loss:3.489206 +step:6834 train loss:3.576972 +step:6835 train loss:3.547721 +step:6836 train loss:3.468367 +step:6837 train loss:3.533970 +step:6838 train loss:3.591053 +step:6839 train loss:3.676134 +step:6840 train loss:3.547885 +step:6841 train loss:3.506880 +step:6842 train loss:3.553266 +step:6843 train loss:3.660123 +step:6844 train loss:3.539158 +step:6845 train loss:3.592341 +step:6846 train loss:3.655682 +step:6847 train loss:3.586154 +step:6848 train loss:3.575397 +step:6849 train loss:3.599336 +step:6850 train loss:3.572852 +step:6851 train loss:3.499661 +step:6852 train loss:3.491218 +step:6853 train loss:3.481605 +step:6854 train loss:3.560639 +step:6855 train loss:3.532460 +step:6856 train loss:3.516875 +step:6857 train loss:3.567783 +step:6858 train loss:3.599710 +step:6859 train loss:3.506192 +step:6860 train loss:3.614871 +step:6861 train loss:3.642672 +step:6862 train loss:3.551812 +step:6863 train loss:3.547580 +step:6864 train loss:3.493865 +step:6865 train loss:3.567267 +step:6866 train loss:3.492592 +step:6867 train loss:3.670822 +step:6868 train loss:3.543753 +step:6869 train loss:3.577841 +step:6870 train loss:3.615010 +step:6871 train loss:3.530551 +step:6872 train loss:3.525801 +step:6873 train loss:3.545647 +step:6874 train loss:3.505493 +step:6875 train loss:3.510214 +step:6876 train loss:3.540711 +step:6877 train loss:3.582020 +step:6878 train loss:3.492818 +step:6879 train loss:3.540206 +step:6880 train loss:3.549759 +step:6881 train loss:3.512601 +step:6882 train loss:3.575369 +step:6883 train loss:3.557616 +step:6884 train loss:3.791133 +step:6885 train loss:3.559831 +step:6886 train loss:3.543264 +step:6887 train loss:3.480024 +step:6888 train loss:3.584610 +step:6889 train loss:3.465519 +step:6890 train loss:3.576451 +step:6891 train loss:3.583918 +step:6892 train loss:3.682705 +step:6893 train loss:3.516557 +step:6894 train loss:3.576080 +step:6895 train loss:3.577149 +step:6896 train loss:3.553271 +step:6897 train loss:3.506510 +step:6898 train loss:3.506353 +step:6899 train loss:3.595245 +step:6900 train loss:3.566756 +step:6901 train loss:3.518224 +step:6902 train loss:3.450034 +step:6903 train loss:3.495770 +step:6904 train loss:3.605809 +step:6905 train loss:3.640671 +step:6906 train loss:3.560316 +step:6907 train loss:3.575169 +step:6908 train loss:3.613695 +step:6909 train loss:3.605944 +step:6910 train loss:3.483218 +step:6911 train loss:3.610805 +step:6912 train loss:3.505404 +step:6913 train loss:3.540831 +step:6914 train loss:3.498439 +step:6915 train loss:3.525674 +step:6916 train loss:3.500481 +step:6917 train loss:3.626043 +step:6918 train loss:3.571718 +step:6919 train loss:3.565506 +step:6920 train loss:3.549857 +step:6921 train loss:3.616386 +step:6922 train loss:3.605933 +step:6923 train loss:3.472370 +step:6924 train loss:3.552265 +step:6925 train loss:3.526639 +step:6926 train loss:3.564080 +step:6927 train loss:3.617621 +step:6928 train loss:3.502976 +step:6929 train loss:3.516877 +step:6930 train loss:3.550051 +step:6931 train loss:3.550740 +step:6932 train loss:3.783370 +step:6933 train loss:3.615359 +step:6934 train loss:3.555592 +step:6935 train loss:3.540099 +step:6936 train loss:3.578843 +step:6937 train loss:3.523285 +step:6938 train loss:3.588377 +step:6939 train loss:3.520862 +step:6940 train loss:3.576765 +step:6941 train loss:3.492741 +step:6942 train loss:3.582946 +step:6943 train loss:3.471729 +step:6944 train loss:3.565934 +step:6945 train loss:3.503163 +step:6946 train loss:3.594333 +step:6947 train loss:3.518758 +step:6948 train loss:3.513852 +step:6949 train loss:3.591378 +step:6950 train loss:3.579912 +step:6951 train loss:3.584584 +step:6952 train loss:3.515557 +step:6953 train loss:3.559820 +step:6954 train loss:3.621805 +step:6955 train loss:3.538215 +step:6956 train loss:3.574148 +step:6957 train loss:3.563663 +step:6958 train loss:3.524943 +step:6959 train loss:3.562023 +step:6960 train loss:3.530710 +step:6961 train loss:3.536028 +step:6962 train loss:3.518595 +step:6963 train loss:3.488633 +step:6964 train loss:3.531568 +step:6965 train loss:3.523460 +step:6966 train loss:3.568198 +step:6967 train loss:3.503543 +step:6968 train loss:3.543078 +step:6969 train loss:3.563594 +step:6970 train loss:3.536621 +step:6971 train loss:3.602488 +step:6972 train loss:3.550972 +step:6973 train loss:3.508801 +step:6974 train loss:3.636858 +step:6975 train loss:3.540921 +step:6976 train loss:3.515444 +step:6977 train loss:3.551758 +step:6978 train loss:3.544442 +step:6979 train loss:3.554797 +step:6980 train loss:3.530736 +step:6981 train loss:3.590655 +step:6982 train loss:3.544598 +step:6983 train loss:3.534296 +step:6984 train loss:3.651467 +step:6985 train loss:3.498374 +step:6986 train loss:3.488536 +step:6987 train loss:3.537360 +step:6988 train loss:3.542645 +step:6989 train loss:3.687963 +step:6990 train loss:3.551564 +step:6991 train loss:3.507560 +step:6992 train loss:3.555823 +step:6993 train loss:3.621933 +step:6994 train loss:3.569474 +step:6995 train loss:3.519901 +step:6996 train loss:3.522261 +step:6997 train loss:3.602593 +step:6998 train loss:3.501142 +step:6999 train loss:3.551413 +step:7000 validation loss:3.476736 total_sharp:4.3516e-03 L1_sharp:5.2344e-02 L2_sharp:1.8051e-02 L3_sharp:4.4377e-02 L4_sharp:1.6535e-02 L5_sharp:2.1020e-02 L6_sharp:2.7809e-02 L7_sharp:3.5681e-02 L8_sharp:3.5226e-02 L9_sharp:2.2030e-02 L10_sharp:1.6327e-02 L11_sharp:1.4911e-02 L12_sharp:3.1078e-02 total_fnorm:1.3535e+00 total_l1_linf:8.1251e+03 total_spectral:1.3535e+00 L1_fnorm:6.1173e-02 L2_fnorm:5.9248e-02 L3_fnorm:5.8625e-02 L4_fnorm:6.0177e-02 L5_fnorm:6.1064e-02 L6_fnorm:6.1526e-02 L7_fnorm:6.1513e-02 L8_fnorm:6.1485e-02 L9_fnorm:6.1327e-02 L10_fnorm:6.1509e-02 L11_fnorm:6.1474e-02 L12_fnorm:6.1430e-02 L1_l1linf:3.3375e-01 L2_l1linf:3.9130e-01 L3_l1linf:3.9159e-01 L4_l1linf:3.6307e-01 L5_l1linf:3.4576e-01 L6_l1linf:3.5866e-01 L7_l1linf:3.4119e-01 L8_l1linf:3.4491e-01 L9_l1linf:3.3686e-01 L10_l1linf:3.6812e-01 L11_l1linf:3.9189e-01 L12_l1linf:3.8690e-01 L1_spectral:7.5223e-03 L2_spectral:8.7879e-03 L3_spectral:8.7458e-03 L4_spectral:8.1899e-03 L5_spectral:7.8244e-03 L6_spectral:8.0308e-03 L7_spectral:7.6070e-03 L8_spectral:7.6870e-03 L9_spectral:7.5265e-03 L10_spectral:8.2647e-03 L11_spectral:8.7274e-03 L12_spectral:8.7459e-03 ip_v_neg_g:3.2319e-03 cos_v_neg_g:7.4267e-04 v_norm:1.3535e+00 g_norm:3.2152e+00 hv_norm:6.8356e-01 cos_v_hv:8.6163e-03 hg_norm:1.4741e+02 cos_g_hg:4.5367e-01 v_par:2.0554e-05 v_perp:1.3535e+00 L1_cos_v_neg_g:7.4578e-03 L1_v_norm:6.1173e-02 L2_cos_v_neg_g:4.9169e-03 L2_v_norm:5.9248e-02 L3_cos_v_neg_g:6.3672e-03 L3_v_norm:5.8625e-02 L4_cos_v_neg_g:3.6403e-03 L4_v_norm:6.0177e-02 L5_cos_v_neg_g:4.3457e-03 L5_v_norm:6.1064e-02 L6_cos_v_neg_g:5.2838e-03 L6_v_norm:6.1526e-02 L7_cos_v_neg_g:4.0757e-03 L7_v_norm:6.1513e-02 L8_cos_v_neg_g:5.3550e-03 L8_v_norm:6.1485e-02 L9_cos_v_neg_g:4.4536e-03 L9_v_norm:6.1327e-02 L10_cos_v_neg_g:3.0818e-03 L10_v_norm:6.1509e-02 L11_cos_v_neg_g:2.1522e-03 L11_v_norm:6.1474e-02 L12_cos_v_neg_g:1.6510e-03 L12_v_norm:6.1430e-02 +step:7000 train loss:3.627714 +step:7001 train loss:3.555013 +step:7002 train loss:3.521815 +step:7003 train loss:3.547907 +step:7004 train loss:3.542977 +step:7005 train loss:3.525530 +step:7006 train loss:3.531347 +step:7007 train loss:3.581081 +step:7008 train loss:3.523786 +step:7009 train loss:3.563754 +step:7010 train loss:3.500490 +step:7011 train loss:3.556531 +step:7012 train loss:3.528887 +step:7013 train loss:3.602070 +step:7014 train loss:3.511187 +step:7015 train loss:3.569613 +step:7016 train loss:3.559453 +step:7017 train loss:3.524005 +step:7018 train loss:3.601639 +step:7019 train loss:3.529414 +step:7020 train loss:3.576127 +step:7021 train loss:3.520811 +step:7022 train loss:3.534871 +step:7023 train loss:3.553267 +step:7024 train loss:3.516001 +step:7025 train loss:3.565062 +step:7026 train loss:3.521911 +step:7027 train loss:3.584276 +step:7028 train loss:3.510268 +step:7029 train loss:3.498240 +step:7030 train loss:3.500337 +step:7031 train loss:3.555320 +step:7032 train loss:3.561176 +step:7033 train loss:3.537942 +step:7034 train loss:3.560523 +step:7035 train loss:3.609311 +step:7036 train loss:3.529300 +step:7037 train loss:3.555786 +step:7038 train loss:3.516794 +step:7039 train loss:3.572024 +step:7040 train loss:3.491240 +step:7041 train loss:3.582629 +step:7042 train loss:3.514614 +step:7043 train loss:3.487404 +step:7044 train loss:3.534586 +step:7045 train loss:3.533816 +step:7046 train loss:3.526988 +step:7047 train loss:3.565580 +step:7048 train loss:3.512468 +step:7049 train loss:3.522866 +step:7050 train loss:3.545204 +step:7051 train loss:3.563169 +step:7052 train loss:3.566766 +step:7053 train loss:3.528294 +step:7054 train loss:3.505514 +step:7055 train loss:3.573202 +step:7056 train loss:3.572992 +step:7057 train loss:3.498263 +step:7058 train loss:3.616055 +step:7059 train loss:3.524048 +step:7060 train loss:3.534221 +step:7061 train loss:3.508112 +step:7062 train loss:3.532740 +step:7063 train loss:3.590568 +step:7064 train loss:3.512815 +step:7065 train loss:3.569897 +step:7066 train loss:3.522693 +step:7067 train loss:3.559274 +step:7068 train loss:3.535256 +step:7069 train loss:3.497770 +step:7070 train loss:3.523782 +step:7071 train loss:3.494166 +step:7072 train loss:3.496566 +step:7073 train loss:3.491053 +step:7074 train loss:3.485168 +step:7075 train loss:3.506071 +step:7076 train loss:3.515208 +step:7077 train loss:3.525924 +step:7078 train loss:3.571862 +step:7079 train loss:3.582858 +step:7080 train loss:3.526855 +step:7081 train loss:3.547591 +step:7082 train loss:3.514688 +step:7083 train loss:3.545924 +step:7084 train loss:3.538994 +step:7085 train loss:3.497253 +step:7086 train loss:3.538388 +step:7087 train loss:3.514845 +step:7088 train loss:3.638039 +step:7089 train loss:3.530690 +step:7090 train loss:3.496402 +step:7091 train loss:3.508175 +step:7092 train loss:3.487959 +step:7093 train loss:3.582541 +step:7094 train loss:3.503808 +step:7095 train loss:3.516286 +step:7096 train loss:3.537277 +step:7097 train loss:3.523785 +step:7098 train loss:3.548606 +step:7099 train loss:3.504449 +step:7100 train loss:3.537169 +step:7101 train loss:3.605190 +step:7102 train loss:3.495492 +step:7103 train loss:3.520767 +step:7104 train loss:3.551772 +step:7105 train loss:3.529448 +step:7106 train loss:3.517342 +step:7107 train loss:3.553607 +step:7108 train loss:3.620677 +step:7109 train loss:3.548690 +step:7110 train loss:3.576114 +step:7111 train loss:3.553032 +step:7112 train loss:3.543784 +step:7113 train loss:3.541277 +step:7114 train loss:3.555057 +step:7115 train loss:3.598841 +step:7116 train loss:3.527123 +step:7117 train loss:3.563771 +step:7118 train loss:3.577307 +step:7119 train loss:3.537613 +step:7120 train loss:3.594047 +step:7121 train loss:3.510000 +step:7122 train loss:3.511911 +step:7123 train loss:3.453047 +step:7124 train loss:3.607832 +step:7125 train loss:3.462416 +step:7126 train loss:3.628957 +step:7127 train loss:3.585620 +step:7128 train loss:3.531802 +step:7129 train loss:3.537874 +step:7130 train loss:3.527941 +step:7131 train loss:3.466933 +step:7132 train loss:3.510171 +step:7133 train loss:3.554969 +step:7134 train loss:3.490197 +step:7135 train loss:3.542501 +step:7136 train loss:3.525702 +step:7137 train loss:3.503818 +step:7138 train loss:3.492432 +step:7139 train loss:3.496842 +step:7140 train loss:3.531279 +step:7141 train loss:3.526157 +step:7142 train loss:3.525151 +step:7143 train loss:3.561207 +step:7144 train loss:3.510059 +step:7145 train loss:3.528440 +step:7146 train loss:3.537326 +step:7147 train loss:3.560642 +step:7148 train loss:3.564600 +step:7149 train loss:3.568348 +step:7150 train loss:3.543507 +step:7151 train loss:3.508229 +step:7152 train loss:3.479094 +step:7153 train loss:3.517661 +step:7154 train loss:3.533333 +step:7155 train loss:3.549161 +step:7156 train loss:3.517763 +step:7157 train loss:3.538525 +step:7158 train loss:3.497500 +step:7159 train loss:3.546719 +step:7160 train loss:3.558881 +step:7161 train loss:3.508307 +step:7162 train loss:3.556040 +step:7163 train loss:3.493669 +step:7164 train loss:3.528454 +step:7165 train loss:3.534875 +step:7166 train loss:3.591050 +step:7167 train loss:3.569849 +step:7168 train loss:3.545217 +step:7169 train loss:3.523978 +step:7170 train loss:3.552315 +step:7171 train loss:3.499334 +step:7172 train loss:3.667001 +step:7173 train loss:3.507183 +step:7174 train loss:3.551297 +step:7175 train loss:3.525189 +step:7176 train loss:3.533303 +step:7177 train loss:3.549564 +step:7178 train loss:3.550411 +step:7179 train loss:3.536363 +step:7180 train loss:3.536903 +step:7181 train loss:3.564740 +step:7182 train loss:3.516415 +step:7183 train loss:3.591488 +step:7184 train loss:3.679410 +step:7185 train loss:3.593676 +step:7186 train loss:3.532453 +step:7187 train loss:3.542208 +step:7188 train loss:3.530704 +step:7189 train loss:3.530860 +step:7190 train loss:3.532422 +step:7191 train loss:3.524535 +step:7192 train loss:3.558834 +step:7193 train loss:3.474734 +step:7194 train loss:3.537804 +step:7195 train loss:3.515393 +step:7196 train loss:3.563092 +step:7197 train loss:3.540380 +step:7198 train loss:3.595517 +step:7199 train loss:3.555246 +step:7200 train loss:3.547229 +step:7201 train loss:3.554908 +step:7202 train loss:3.533033 +step:7203 train loss:3.548247 +step:7204 train loss:3.517344 +step:7205 train loss:3.475168 +step:7206 train loss:3.503965 +step:7207 train loss:3.681459 +step:7208 train loss:3.511204 +step:7209 train loss:3.595763 +step:7210 train loss:3.531563 +step:7211 train loss:3.561793 +step:7212 train loss:3.644664 +step:7213 train loss:3.492123 +step:7214 train loss:3.562634 +step:7215 train loss:3.529935 +step:7216 train loss:3.581119 +step:7217 train loss:3.539969 +step:7218 train loss:3.624174 +step:7219 train loss:3.534970 +step:7220 train loss:3.613474 +step:7221 train loss:3.494996 +step:7222 train loss:3.576956 +step:7223 train loss:3.494981 +step:7224 train loss:3.557311 +step:7225 train loss:3.535509 +step:7226 train loss:3.503163 +step:7227 train loss:3.523535 +step:7228 train loss:3.511007 +step:7229 train loss:3.514802 +step:7230 train loss:3.500563 +step:7231 train loss:3.631975 +step:7232 train loss:3.504411 +step:7233 train loss:3.573505 +step:7234 train loss:3.561663 +step:7235 train loss:3.531090 +step:7236 train loss:3.572095 +step:7237 train loss:3.522825 +step:7238 train loss:3.560841 +step:7239 train loss:3.514221 +step:7240 train loss:3.513140 +step:7241 train loss:3.525427 +step:7242 train loss:3.507576 +step:7243 train loss:3.551437 +step:7244 train loss:3.528343 +step:7245 train loss:3.530994 +step:7246 train loss:3.572581 +step:7247 train loss:3.526675 +step:7248 train loss:3.565787 +step:7249 train loss:3.516402 +step:7250 validation loss:3.472445 +step:7250 train loss:3.538216 +step:7251 train loss:3.581961 +step:7252 train loss:3.496246 +step:7253 train loss:3.584961 +step:7254 train loss:3.522397 +step:7255 train loss:3.495035 +step:7256 train loss:3.534483 +step:7257 train loss:3.576670 +step:7258 train loss:3.533631 +step:7259 train loss:3.516963 +step:7260 train loss:3.601117 +step:7261 train loss:3.558185 +step:7262 train loss:3.514693 +step:7263 train loss:3.556127 +step:7264 train loss:3.542092 +step:7265 train loss:3.447723 +step:7266 train loss:3.570253 +step:7267 train loss:3.490098 +step:7268 train loss:3.554326 +step:7269 train loss:3.557973 +step:7270 train loss:3.512745 +step:7271 train loss:3.529326 +step:7272 train loss:3.534105 +step:7273 train loss:3.531593 +step:7274 train loss:3.510360 +step:7275 train loss:3.580939 +step:7276 train loss:3.485485 +step:7277 train loss:3.534894 +step:7278 train loss:3.506404 +step:7279 train loss:3.487440 +step:7280 train loss:3.557117 +step:7281 train loss:3.578967 +step:7282 train loss:3.575937 +step:7283 train loss:3.468817 +step:7284 train loss:3.510137 +step:7285 train loss:3.538646 +step:7286 train loss:3.670371 +step:7287 train loss:3.577575 +step:7288 train loss:3.535003 +step:7289 train loss:3.536735 +step:7290 train loss:3.587009 +step:7291 train loss:3.548275 +step:7292 train loss:3.615415 +step:7293 train loss:3.514935 +step:7294 train loss:3.600679 +step:7295 train loss:3.487842 +step:7296 train loss:3.486254 +step:7297 train loss:3.530025 +step:7298 train loss:3.506882 +step:7299 train loss:3.548147 +step:7300 train loss:3.534263 +step:7301 train loss:3.484487 +step:7302 train loss:3.629493 +step:7303 train loss:3.518409 +step:7304 train loss:3.465564 +step:7305 train loss:3.541005 +step:7306 train loss:3.569638 +step:7307 train loss:3.575200 +step:7308 train loss:3.524168 +step:7309 train loss:3.490448 +step:7310 train loss:3.521528 +step:7311 train loss:3.506097 +step:7312 train loss:3.543868 +step:7313 train loss:3.582935 +step:7314 train loss:3.477531 +step:7315 train loss:3.472938 +step:7316 train loss:3.618551 +step:7317 train loss:3.553470 +step:7318 train loss:3.491765 +step:7319 train loss:3.517828 +step:7320 train loss:3.553092 +step:7321 train loss:3.580592 +step:7322 train loss:3.460949 +step:7323 train loss:3.515597 +step:7324 train loss:3.542845 +step:7325 train loss:3.506586 +step:7326 train loss:3.534163 +step:7327 train loss:3.510235 +step:7328 train loss:3.628448 +step:7329 train loss:3.471084 +step:7330 train loss:3.528159 +step:7331 train loss:3.522714 +step:7332 train loss:3.562204 +step:7333 train loss:3.543950 +step:7334 train loss:3.512596 +step:7335 train loss:3.510243 +step:7336 train loss:3.763609 +step:7337 train loss:3.550370 +step:7338 train loss:3.544970 +step:7339 train loss:3.556642 +step:7340 train loss:3.543572 +step:7341 train loss:3.534998 +step:7342 train loss:3.526479 +step:7343 train loss:3.539249 +step:7344 train loss:3.618247 +step:7345 train loss:3.478064 +step:7346 train loss:3.513561 +step:7347 train loss:3.508113 +step:7348 train loss:3.510599 +step:7349 train loss:3.612203 +step:7350 train loss:3.595566 +step:7351 train loss:3.529522 +step:7352 train loss:3.557981 +step:7353 train loss:3.540538 +step:7354 train loss:3.492309 +step:7355 train loss:3.671193 +step:7356 train loss:3.644598 +step:7357 train loss:3.564413 +step:7358 train loss:3.548208 +step:7359 train loss:3.514954 +step:7360 train loss:3.528621 +step:7361 train loss:3.479792 +step:7362 train loss:3.527158 +step:7363 train loss:3.538537 +step:7364 train loss:3.576097 +step:7365 train loss:3.557724 +step:7366 train loss:3.524877 +step:7367 train loss:3.597835 +step:7368 train loss:3.579470 +step:7369 train loss:3.571343 +step:7370 train loss:3.537350 +step:7371 train loss:3.494572 +step:7372 train loss:3.552027 +step:7373 train loss:3.574358 +step:7374 train loss:3.667366 +step:7375 train loss:3.491538 +step:7376 train loss:3.511917 +step:7377 train loss:3.556766 +step:7378 train loss:3.509065 +step:7379 train loss:3.633395 +step:7380 train loss:3.594913 +step:7381 train loss:3.558815 +step:7382 train loss:3.526620 +step:7383 train loss:3.618075 +step:7384 train loss:3.559898 +step:7385 train loss:3.517038 +step:7386 train loss:3.522910 +step:7387 train loss:3.565652 +step:7388 train loss:3.598410 +step:7389 train loss:3.541338 +step:7390 train loss:3.489176 +step:7391 train loss:3.520603 +step:7392 train loss:3.579112 +step:7393 train loss:3.544964 +step:7394 train loss:3.584194 +step:7395 train loss:3.472840 +step:7396 train loss:3.573051 +step:7397 train loss:3.503396 +step:7398 train loss:3.514489 +step:7399 train loss:3.563472 +step:7400 train loss:3.567055 +step:7401 train loss:3.483787 +step:7402 train loss:3.602257 +step:7403 train loss:3.485940 +step:7404 train loss:3.555396 +step:7405 train loss:3.678733 +step:7406 train loss:3.502771 +step:7407 train loss:3.553186 +step:7408 train loss:3.546761 +step:7409 train loss:3.524115 +step:7410 train loss:3.693017 +step:7411 train loss:3.535970 +step:7412 train loss:3.540949 +step:7413 train loss:3.590942 +step:7414 train loss:3.502063 +step:7415 train loss:3.560520 +step:7416 train loss:3.444015 +step:7417 train loss:3.564232 +step:7418 train loss:3.546448 +step:7419 train loss:3.516341 +step:7420 train loss:3.507152 +step:7421 train loss:3.540322 +step:7422 train loss:3.497881 +step:7423 train loss:3.636777 +step:7424 train loss:3.699949 +step:7425 train loss:3.588139 +step:7426 train loss:3.555336 +step:7427 train loss:3.526775 +step:7428 train loss:3.554868 +step:7429 train loss:3.562035 +step:7430 train loss:3.489492 +step:7431 train loss:3.496236 +step:7432 train loss:3.503200 +step:7433 train loss:3.599266 +step:7434 train loss:3.513556 +step:7435 train loss:3.598366 +step:7436 train loss:3.641768 +step:7437 train loss:3.462557 +step:7438 train loss:3.523260 +step:7439 train loss:3.533226 +step:7440 train loss:3.508487 +step:7441 train loss:3.474403 +step:7442 train loss:3.702421 +step:7443 train loss:3.525995 +step:7444 train loss:3.568529 +step:7445 train loss:3.498971 +step:7446 train loss:3.520632 +step:7447 train loss:3.445892 +step:7448 train loss:3.503494 +step:7449 train loss:3.516948 +step:7450 train loss:3.548729 +step:7451 train loss:3.582022 +step:7452 train loss:3.511859 +step:7453 train loss:3.536408 +step:7454 train loss:3.520160 +step:7455 train loss:3.532211 +step:7456 train loss:3.505139 +step:7457 train loss:3.514416 +step:7458 train loss:3.552041 +step:7459 train loss:3.529514 +step:7460 train loss:3.537631 +step:7461 train loss:3.573694 +step:7462 train loss:3.510194 +step:7463 train loss:3.573214 +step:7464 train loss:3.498025 +step:7465 train loss:3.506057 +step:7466 train loss:3.508550 +step:7467 train loss:3.516159 +step:7468 train loss:3.567339 +step:7469 train loss:3.500420 +step:7470 train loss:3.531707 +step:7471 train loss:3.519884 +step:7472 train loss:3.555771 +step:7473 train loss:3.494305 +step:7474 train loss:3.480060 +step:7475 train loss:3.512188 +step:7476 train loss:3.548317 +step:7477 train loss:3.521966 +step:7478 train loss:3.520177 +step:7479 train loss:3.531225 +step:7480 train loss:3.811601 +step:7481 train loss:3.462163 +step:7482 train loss:3.532459 +step:7483 train loss:3.527802 +step:7484 train loss:3.548474 +step:7485 train loss:3.532772 +step:7486 train loss:3.562245 +step:7487 train loss:3.554808 +step:7488 train loss:3.573953 +step:7489 train loss:3.571560 +step:7490 train loss:3.515269 +step:7491 train loss:3.536926 +step:7492 train loss:3.643070 +step:7493 train loss:3.619778 +step:7494 train loss:3.642210 +step:7495 train loss:3.513063 +step:7496 train loss:3.501466 +step:7497 train loss:3.600119 +step:7498 train loss:3.534062 +step:7499 train loss:3.574728 +step:7500 validation loss:3.467161 total_sharp:4.5715e-03 L1_sharp:2.2347e-01 L2_sharp:3.9525e-02 L3_sharp:3.4911e-02 L4_sharp:1.7442e-02 L5_sharp:1.9593e-02 L6_sharp:2.2353e-02 L7_sharp:2.3358e-02 L8_sharp:2.7815e-02 L9_sharp:2.0738e-02 L10_sharp:1.4936e-02 L11_sharp:1.4129e-02 L12_sharp:2.3052e-02 total_fnorm:1.3517e+00 total_l1_linf:8.1033e+03 total_spectral:1.3517e+00 L1_fnorm:6.0852e-02 L2_fnorm:5.8983e-02 L3_fnorm:5.8660e-02 L4_fnorm:6.0191e-02 L5_fnorm:6.1014e-02 L6_fnorm:6.1363e-02 L7_fnorm:6.1290e-02 L8_fnorm:6.1194e-02 L9_fnorm:6.1264e-02 L10_fnorm:6.1421e-02 L11_fnorm:6.1415e-02 L12_fnorm:6.1342e-02 L1_l1linf:3.8375e-01 L2_l1linf:4.0699e-01 L3_l1linf:3.8957e-01 L4_l1linf:3.7201e-01 L5_l1linf:3.5674e-01 L6_l1linf:3.3689e-01 L7_l1linf:2.9966e-01 L8_l1linf:3.0486e-01 L9_l1linf:3.4088e-01 L10_l1linf:3.6514e-01 L11_l1linf:3.8840e-01 L12_l1linf:3.7247e-01 L1_spectral:8.5176e-03 L2_spectral:9.0956e-03 L3_spectral:8.6947e-03 L4_spectral:8.4359e-03 L5_spectral:8.0091e-03 L6_spectral:7.5476e-03 L7_spectral:6.7584e-03 L8_spectral:6.8248e-03 L9_spectral:7.6790e-03 L10_spectral:8.2123e-03 L11_spectral:8.7044e-03 L12_spectral:8.3875e-03 ip_v_neg_g:4.5870e-03 cos_v_neg_g:1.0083e-03 v_norm:1.3517e+00 g_norm:3.3655e+00 hv_norm:8.2037e-01 cos_v_hv:7.5323e-03 hg_norm:2.0663e+02 cos_g_hg:4.6631e-01 v_par:3.0111e-05 v_perp:1.3517e+00 L1_cos_v_neg_g:1.2115e-02 L1_v_norm:6.0852e-02 L2_cos_v_neg_g:7.6022e-03 L2_v_norm:5.8983e-02 L3_cos_v_neg_g:6.7284e-03 L3_v_norm:5.8660e-02 L4_cos_v_neg_g:4.9887e-03 L4_v_norm:6.0191e-02 L5_cos_v_neg_g:3.6302e-03 L5_v_norm:6.1014e-02 L6_cos_v_neg_g:3.6685e-03 L6_v_norm:6.1363e-02 L7_cos_v_neg_g:4.0219e-03 L7_v_norm:6.1290e-02 L8_cos_v_neg_g:5.3379e-03 L8_v_norm:6.1194e-02 L9_cos_v_neg_g:5.9085e-03 L9_v_norm:6.1264e-02 L10_cos_v_neg_g:6.5018e-03 L10_v_norm:6.1421e-02 L11_cos_v_neg_g:5.6955e-03 L11_v_norm:6.1415e-02 L12_cos_v_neg_g:5.6470e-03 L12_v_norm:6.1342e-02 +step:7500 train loss:3.518519 +step:7501 train loss:3.507111 +step:7502 train loss:3.500012 +step:7503 train loss:3.477759 +step:7504 train loss:3.501226 +step:7505 train loss:3.490712 +step:7506 train loss:3.552888 +step:7507 train loss:3.469383 +step:7508 train loss:3.538512 +step:7509 train loss:3.509975 +step:7510 train loss:3.539341 +step:7511 train loss:3.545985 +step:7512 train loss:3.808492 +step:7513 train loss:3.498845 +step:7514 train loss:3.526701 +step:7515 train loss:3.493609 +step:7516 train loss:3.505720 +step:7517 train loss:3.539765 +step:7518 train loss:3.517044 +step:7519 train loss:3.528476 +step:7520 train loss:3.595326 +step:7521 train loss:3.479808 +step:7522 train loss:3.534722 +step:7523 train loss:3.567842 +step:7524 train loss:3.515380 +step:7525 train loss:3.516777 +step:7526 train loss:3.465469 +step:7527 train loss:3.474695 +step:7528 train loss:3.574932 +step:7529 train loss:3.550377 +step:7530 train loss:3.497838 +step:7531 train loss:3.570452 +step:7532 train loss:3.562403 +step:7533 train loss:3.488074 +step:7534 train loss:3.550628 +step:7535 train loss:3.552634 +step:7536 train loss:3.587668 +step:7537 train loss:3.607100 +step:7538 train loss:3.633134 +step:7539 train loss:3.531955 +step:7540 train loss:3.518909 +step:7541 train loss:3.574041 +step:7542 train loss:3.533940 +step:7543 train loss:3.488330 +step:7544 train loss:3.533938 +step:7545 train loss:3.520432 +step:7546 train loss:3.476274 +step:7547 train loss:3.523581 +step:7548 train loss:3.535177 +step:7549 train loss:3.516986 +step:7550 train loss:3.516465 +step:7551 train loss:3.617159 +step:7552 train loss:3.530181 +step:7553 train loss:3.567409 +step:7554 train loss:3.491303 +step:7555 train loss:3.585240 +step:7556 train loss:3.485997 +step:7557 train loss:3.581364 +step:7558 train loss:3.568958 +step:7559 train loss:3.527209 +step:7560 train loss:3.621290 +step:7561 train loss:3.590672 +step:7562 train loss:3.495735 +step:7563 train loss:3.491196 +step:7564 train loss:3.545271 +step:7565 train loss:3.561496 +step:7566 train loss:3.555069 +step:7567 train loss:3.571913 +step:7568 train loss:3.514735 +step:7569 train loss:3.575346 +step:7570 train loss:3.558791 +step:7571 train loss:3.639403 +step:7572 train loss:3.488164 +step:7573 train loss:3.555821 +step:7574 train loss:3.521844 +step:7575 train loss:3.514142 +step:7576 train loss:3.522663 +step:7577 train loss:3.540204 +step:7578 train loss:3.595975 +step:7579 train loss:3.531691 +step:7580 train loss:3.520574 +step:7581 train loss:3.506083 +step:7582 train loss:3.562754 +step:7583 train loss:3.498807 +step:7584 train loss:3.481539 +step:7585 train loss:3.449660 +step:7586 train loss:3.487475 +step:7587 train loss:3.549430 +step:7588 train loss:3.677901 +step:7589 train loss:3.497602 +step:7590 train loss:3.568091 +step:7591 train loss:3.572417 +step:7592 train loss:3.527347 +step:7593 train loss:3.553482 +step:7594 train loss:3.551759 +step:7595 train loss:3.520966 +step:7596 train loss:3.572773 +step:7597 train loss:3.477498 +step:7598 train loss:3.539789 +step:7599 train loss:3.533509 +step:7600 train loss:3.491226 +step:7601 train loss:3.604287 +step:7602 train loss:3.543791 +step:7603 train loss:3.506995 +step:7604 train loss:3.648671 +step:7605 train loss:3.540810 +step:7606 train loss:3.573962 +step:7607 train loss:3.524908 +step:7608 train loss:3.537034 +step:7609 train loss:3.570672 +step:7610 train loss:3.530176 +step:7611 train loss:3.505907 +step:7612 train loss:3.451876 +step:7613 train loss:3.498896 +step:7614 train loss:3.566132 +step:7615 train loss:3.527454 +step:7616 train loss:3.595407 +step:7617 train loss:3.494204 +step:7618 train loss:3.581743 +step:7619 train loss:3.524166 +step:7620 train loss:3.510394 +step:7621 train loss:3.458079 +step:7622 train loss:3.733925 +step:7623 train loss:3.747182 +step:7624 train loss:3.561394 +step:7625 train loss:3.597689 +step:7626 train loss:3.516995 +step:7627 train loss:3.589137 +step:7628 train loss:3.470258 +step:7629 train loss:3.531006 +step:7630 train loss:3.543508 +step:7631 train loss:3.524023 +step:7632 train loss:3.575762 +step:7633 train loss:3.643613 +step:7634 train loss:3.604891 +step:7635 train loss:3.509484 +step:7636 train loss:3.535761 +step:7637 train loss:3.484966 +step:7638 train loss:3.594741 +step:7639 train loss:3.521683 +step:7640 train loss:3.503549 +step:7641 train loss:3.533550 +step:7642 train loss:3.869744 +step:7643 train loss:3.620355 +step:7644 train loss:3.546234 +step:7645 train loss:3.532299 +step:7646 train loss:3.520542 +step:7647 train loss:3.512640 +step:7648 train loss:3.548828 +step:7649 train loss:3.506276 +step:7650 train loss:3.554111 +step:7651 train loss:3.576095 +step:7652 train loss:3.457296 +step:7653 train loss:3.653300 +step:7654 train loss:3.510006 +step:7655 train loss:3.527035 +step:7656 train loss:3.503129 +step:7657 train loss:3.515869 +step:7658 train loss:3.471168 +step:7659 train loss:3.536168 +step:7660 train loss:3.469613 +step:7661 train loss:3.485040 +step:7662 train loss:3.486876 +step:7663 train loss:3.536854 +step:7664 train loss:3.494039 +step:7665 train loss:3.467837 +step:7666 train loss:3.575657 +step:7667 train loss:3.488118 +step:7668 train loss:3.598602 +step:7669 train loss:3.532812 +step:7670 train loss:3.488291 +step:7671 train loss:3.542991 +step:7672 train loss:3.563303 +step:7673 train loss:3.528730 +step:7674 train loss:3.566028 +step:7675 train loss:3.620514 +step:7676 train loss:3.587996 +step:7677 train loss:3.616255 +step:7678 train loss:3.553795 +step:7679 train loss:3.575797 +step:7680 train loss:3.582937 +step:7681 train loss:3.549742 +step:7682 train loss:3.522517 +step:7683 train loss:3.523178 +step:7684 train loss:3.496172 +step:7685 train loss:3.473588 +step:7686 train loss:3.594650 +step:7687 train loss:3.508729 +step:7688 train loss:3.478248 +step:7689 train loss:3.527582 +step:7690 train loss:3.491529 +step:7691 train loss:3.521036 +step:7692 train loss:3.553727 +step:7693 train loss:3.557509 +step:7694 train loss:3.607244 +step:7695 train loss:3.534256 +step:7696 train loss:3.508394 +step:7697 train loss:3.497540 +step:7698 train loss:3.555712 +step:7699 train loss:3.553519 +step:7700 train loss:3.451987 +step:7701 train loss:3.570564 +step:7702 train loss:3.511456 +step:7703 train loss:3.516904 +step:7704 train loss:3.567679 +step:7705 train loss:3.526054 +step:7706 train loss:3.461422 +step:7707 train loss:3.580639 +step:7708 train loss:3.518483 +step:7709 train loss:3.538742 +step:7710 train loss:3.601510 +step:7711 train loss:3.563759 +step:7712 train loss:3.507809 +step:7713 train loss:3.587527 +step:7714 train loss:3.535135 +step:7715 train loss:3.484564 +step:7716 train loss:3.524283 +step:7717 train loss:3.549815 +step:7718 train loss:3.553967 +step:7719 train loss:3.511205 +step:7720 train loss:3.525307 +step:7721 train loss:3.566412 +step:7722 train loss:3.495355 +step:7723 train loss:3.866283 +step:7724 train loss:3.533481 +step:7725 train loss:3.431414 +step:7726 train loss:3.518563 +step:7727 train loss:3.544035 +step:7728 train loss:3.497024 +step:7729 train loss:3.508114 +step:7730 train loss:3.527686 +step:7731 train loss:3.558861 +step:7732 train loss:3.577611 +step:7733 train loss:3.490422 +step:7734 train loss:3.517805 +step:7735 train loss:3.600761 +step:7736 train loss:3.550236 +step:7737 train loss:3.566751 +step:7738 train loss:3.470256 +step:7739 train loss:3.545454 +step:7740 train loss:3.491333 +step:7741 train loss:3.529653 +step:7742 train loss:3.526999 +step:7743 train loss:3.480639 +step:7744 train loss:3.612008 +step:7745 train loss:3.498019 +step:7746 train loss:3.472117 +step:7747 train loss:3.566135 +step:7748 train loss:3.547060 +step:7749 train loss:3.471238 +step:7750 validation loss:3.458876 +step:7750 train loss:3.630689 +step:7751 train loss:3.514897 +step:7752 train loss:3.506278 +step:7753 train loss:3.509626 +step:7754 train loss:3.482544 +step:7755 train loss:3.548305 +step:7756 train loss:3.576530 +step:7757 train loss:3.526689 +step:7758 train loss:3.495696 +step:7759 train loss:3.524253 +step:7760 train loss:3.551059 +step:7761 train loss:3.543919 +step:7762 train loss:3.529811 +step:7763 train loss:3.512146 +step:7764 train loss:3.518906 +step:7765 train loss:3.473242 +step:7766 train loss:3.538208 +step:7767 train loss:3.541704 +step:7768 train loss:3.497009 +step:7769 train loss:3.560293 +step:7770 train loss:3.576806 +step:7771 train loss:3.552146 +step:7772 train loss:3.524757 +step:7773 train loss:3.583642 +step:7774 train loss:3.482885 +step:7775 train loss:3.468489 +step:7776 train loss:3.572089 +step:7777 train loss:3.527543 +step:7778 train loss:3.484986 +step:7779 train loss:3.529155 +step:7780 train loss:3.523791 +step:7781 train loss:3.532046 +step:7782 train loss:3.513811 +step:7783 train loss:3.497521 +step:7784 train loss:3.494482 +step:7785 train loss:3.536316 +step:7786 train loss:3.494262 +step:7787 train loss:3.573039 +step:7788 train loss:3.524606 +step:7789 train loss:3.460272 +step:7790 train loss:3.519819 +step:7791 train loss:3.553295 +step:7792 train loss:3.512925 +step:7793 train loss:3.535712 +step:7794 train loss:3.522720 +step:7795 train loss:3.552222 +step:7796 train loss:3.517925 +step:7797 train loss:3.536096 +step:7798 train loss:3.529588 +step:7799 train loss:3.518964 +step:7800 train loss:3.473766 +step:7801 train loss:3.539206 +step:7802 train loss:3.520952 +step:7803 train loss:3.570657 +step:7804 train loss:3.530453 +step:7805 train loss:3.528850 +step:7806 train loss:3.544925 +step:7807 train loss:3.615619 +step:7808 train loss:3.476629 +step:7809 train loss:3.452672 +step:7810 train loss:3.542587 +step:7811 train loss:3.474925 +step:7812 train loss:3.495798 +step:7813 train loss:3.581944 +step:7814 train loss:3.651459 +step:7815 train loss:3.466232 +step:7816 train loss:3.550348 +step:7817 train loss:3.581069 +step:7818 train loss:3.479246 +step:7819 train loss:3.532976 +step:7820 train loss:3.574209 +step:7821 train loss:3.505766 +step:7822 train loss:3.464472 +step:7823 train loss:3.525564 +step:7824 train loss:3.520445 +step:7825 train loss:3.504906 +step:7826 train loss:3.504198 +step:7827 train loss:3.542605 +step:7828 train loss:3.536191 +step:7829 train loss:3.490210 +step:7830 train loss:3.499331 +step:7831 train loss:3.504991 +step:7832 train loss:3.569820 +step:7833 train loss:3.549566 +step:7834 train loss:3.511155 +step:7835 train loss:3.537689 +step:7836 train loss:3.645961 +step:7837 train loss:3.535163 +step:7838 train loss:3.504243 +step:7839 train loss:3.462614 +step:7840 train loss:3.478875 +step:7841 train loss:3.576511 +step:7842 train loss:3.560209 +step:7843 train loss:3.612831 +step:7844 train loss:3.542966 +step:7845 train loss:3.521399 +step:7846 train loss:3.632514 +step:7847 train loss:3.522982 +step:7848 train loss:3.533279 +step:7849 train loss:3.549085 +step:7850 train loss:3.516959 +step:7851 train loss:3.547790 +step:7852 train loss:3.519987 +step:7853 train loss:3.490966 +step:7854 train loss:3.521796 +step:7855 train loss:3.523450 +step:7856 train loss:3.526871 +step:7857 train loss:3.513575 +step:7858 train loss:3.522897 +step:7859 train loss:3.529451 +step:7860 train loss:3.567151 +step:7861 train loss:3.554033 +step:7862 train loss:3.495937 +step:7863 train loss:3.598973 +step:7864 train loss:3.440465 +step:7865 train loss:3.518243 +step:7866 train loss:3.492500 +step:7867 train loss:3.537960 +step:7868 train loss:3.515896 +step:7869 train loss:3.521044 +step:7870 train loss:3.439256 +step:7871 train loss:3.504963 +step:7872 train loss:3.496145 +step:7873 train loss:3.574842 +step:7874 train loss:3.515505 +step:7875 train loss:3.519541 +step:7876 train loss:3.539442 +step:7877 train loss:3.492661 +step:7878 train loss:3.531256 +step:7879 train loss:3.869331 +step:7880 train loss:3.522048 +step:7881 train loss:3.550198 +step:7882 train loss:3.628695 +step:7883 train loss:3.443204 +step:7884 train loss:3.536652 +step:7885 train loss:3.516454 +step:7886 train loss:3.516831 +step:7887 train loss:3.510564 +step:7888 train loss:3.542311 +step:7889 train loss:3.591783 +step:7890 train loss:3.495597 +step:7891 train loss:3.547399 +step:7892 train loss:3.516398 +step:7893 train loss:3.493653 +step:7894 train loss:3.513969 +step:7895 train loss:3.498425 +step:7896 train loss:3.498403 +step:7897 train loss:3.518379 +step:7898 train loss:3.530105 +step:7899 train loss:3.517162 +step:7900 train loss:3.489171 +step:7901 train loss:3.477041 +step:7902 train loss:3.627142 +step:7903 train loss:3.472518 +step:7904 train loss:3.522803 +step:7905 train loss:3.588697 +step:7906 train loss:3.486153 +step:7907 train loss:3.512644 +step:7908 train loss:3.565140 +step:7909 train loss:3.614707 +step:7910 train loss:3.496115 +step:7911 train loss:3.516793 +step:7912 train loss:3.520101 +step:7913 train loss:3.493937 +step:7914 train loss:3.528792 +step:7915 train loss:3.634548 +step:7916 train loss:3.503840 +step:7917 train loss:3.563491 +step:7918 train loss:3.505321 +step:7919 train loss:3.496606 +step:7920 train loss:3.534516 +step:7921 train loss:3.539906 +step:7922 train loss:3.514172 +step:7923 train loss:3.563259 +step:7924 train loss:3.522152 +step:7925 train loss:3.544940 +step:7926 train loss:3.450392 +step:7927 train loss:3.726056 +step:7928 train loss:3.554412 +step:7929 train loss:3.517130 +step:7930 train loss:3.477300 +step:7931 train loss:3.503624 +step:7932 train loss:3.526120 +step:7933 train loss:3.541013 +step:7934 train loss:3.632253 +step:7935 train loss:3.553503 +step:7936 train loss:3.527910 +step:7937 train loss:3.477368 +step:7938 train loss:3.490744 +step:7939 train loss:3.539811 +step:7940 train loss:3.522730 +step:7941 train loss:3.549300 +step:7942 train loss:3.539976 +step:7943 train loss:3.551958 +step:7944 train loss:3.472959 +step:7945 train loss:3.576658 +step:7946 train loss:3.523283 +step:7947 train loss:3.537577 +step:7948 train loss:3.495119 +step:7949 train loss:3.545160 +step:7950 train loss:3.602692 +step:7951 train loss:3.568810 +step:7952 train loss:3.712209 +step:7953 train loss:3.606274 +step:7954 train loss:3.509390 +step:7955 train loss:3.497008 +step:7956 train loss:3.500879 +step:7957 train loss:3.577331 +step:7958 train loss:3.585416 +step:7959 train loss:3.541571 +step:7960 train loss:3.602614 +step:7961 train loss:3.512804 +step:7962 train loss:3.483861 +step:7963 train loss:3.521323 +step:7964 train loss:3.518696 +step:7965 train loss:3.528738 +step:7966 train loss:3.498769 +step:7967 train loss:3.525348 +step:7968 train loss:3.534311 +step:7969 train loss:3.491045 +step:7970 train loss:3.460316 +step:7971 train loss:3.543302 +step:7972 train loss:3.520813 +step:7973 train loss:3.492678 +step:7974 train loss:3.531837 +step:7975 train loss:3.518512 +step:7976 train loss:3.536849 +step:7977 train loss:3.569448 +step:7978 train loss:3.592680 +step:7979 train loss:3.537475 +step:7980 train loss:3.445088 +step:7981 train loss:3.483041 +step:7982 train loss:3.532780 +step:7983 train loss:3.547080 +step:7984 train loss:3.588151 +step:7985 train loss:3.515040 +step:7986 train loss:3.535335 +step:7987 train loss:3.588997 +step:7988 train loss:3.564082 +step:7989 train loss:3.468045 +step:7990 train loss:3.485803 +step:7991 train loss:3.498150 +step:7992 train loss:3.522929 +step:7993 train loss:3.504744 +step:7994 train loss:3.556581 +step:7995 train loss:3.558511 +step:7996 train loss:3.525072 +step:7997 train loss:3.544501 +step:7998 train loss:3.569831 +step:7999 train loss:3.499365 +step:8000 validation loss:3.451076 total_sharp:4.7727e-03 L1_sharp:1.2592e-01 L2_sharp:2.7014e-02 L3_sharp:2.7192e-02 L4_sharp:1.5311e-02 L5_sharp:1.8933e-02 L6_sharp:2.1149e-02 L7_sharp:2.7943e-02 L8_sharp:3.0738e-02 L9_sharp:2.3145e-02 L10_sharp:2.0133e-02 L11_sharp:2.0906e-02 L12_sharp:4.2178e-02 total_fnorm:1.3437e+00 total_l1_linf:8.0684e+03 total_spectral:1.3437e+00 L1_fnorm:6.1014e-02 L2_fnorm:5.8889e-02 L3_fnorm:5.8750e-02 L4_fnorm:6.0258e-02 L5_fnorm:6.0937e-02 L6_fnorm:6.1202e-02 L7_fnorm:6.1401e-02 L8_fnorm:6.1261e-02 L9_fnorm:6.1373e-02 L10_fnorm:6.1601e-02 L11_fnorm:6.1670e-02 L12_fnorm:6.1726e-02 L1_l1linf:3.6349e-01 L2_l1linf:3.7215e-01 L3_l1linf:3.5631e-01 L4_l1linf:3.5529e-01 L5_l1linf:3.4846e-01 L6_l1linf:3.1650e-01 L7_l1linf:3.2455e-01 L8_l1linf:3.2974e-01 L9_l1linf:3.4805e-01 L10_l1linf:3.9132e-01 L11_l1linf:4.1723e-01 L12_l1linf:4.5355e-01 L1_spectral:8.0764e-03 L2_spectral:8.3994e-03 L3_spectral:8.0113e-03 L4_spectral:7.9993e-03 L5_spectral:7.7761e-03 L6_spectral:7.1400e-03 L7_spectral:7.3309e-03 L8_spectral:7.3553e-03 L9_spectral:7.7888e-03 L10_spectral:8.7438e-03 L11_spectral:9.2755e-03 L12_spectral:9.9822e-03 ip_v_neg_g:4.2769e-03 cos_v_neg_g:1.0191e-03 v_norm:1.3437e+00 g_norm:3.1233e+00 hv_norm:8.2528e-01 cos_v_hv:7.7710e-03 hg_norm:1.7327e+02 cos_g_hg:4.5241e-01 v_par:3.4368e-05 v_perp:1.3437e+00 L1_cos_v_neg_g:5.9596e-03 L1_v_norm:6.1014e-02 L2_cos_v_neg_g:6.3930e-03 L2_v_norm:5.8889e-02 L3_cos_v_neg_g:6.2227e-03 L3_v_norm:5.8750e-02 L4_cos_v_neg_g:5.8084e-03 L4_v_norm:6.0258e-02 L5_cos_v_neg_g:4.3600e-03 L5_v_norm:6.0937e-02 L6_cos_v_neg_g:4.5632e-03 L6_v_norm:6.1202e-02 L7_cos_v_neg_g:6.8228e-03 L7_v_norm:6.1401e-02 L8_cos_v_neg_g:7.8133e-03 L8_v_norm:6.1261e-02 L9_cos_v_neg_g:6.2141e-03 L9_v_norm:6.1373e-02 L10_cos_v_neg_g:5.6231e-03 L10_v_norm:6.1601e-02 L11_cos_v_neg_g:4.5891e-03 L11_v_norm:6.1670e-02 L12_cos_v_neg_g:5.4626e-03 L12_v_norm:6.1726e-02 +step:8000 train loss:3.567859 +step:8001 train loss:3.528484 +step:8002 train loss:3.548227 +step:8003 train loss:3.565985 +step:8004 train loss:3.541900 +step:8005 train loss:3.462844 +step:8006 train loss:3.541787 +step:8007 train loss:3.509109 +step:8008 train loss:3.534575 +step:8009 train loss:3.608041 +step:8010 train loss:3.821813 +step:8011 train loss:3.490519 +step:8012 train loss:3.568726 +step:8013 train loss:3.520507 +step:8014 train loss:3.534482 +step:8015 train loss:3.532447 +step:8016 train loss:3.521023 +step:8017 train loss:3.542009 +step:8018 train loss:3.503603 +step:8019 train loss:3.470627 +step:8020 train loss:3.509648 +step:8021 train loss:3.585937 +step:8022 train loss:3.502447 +step:8023 train loss:3.534495 +step:8024 train loss:3.389911 +step:8025 train loss:3.511694 +step:8026 train loss:3.519074 +step:8027 train loss:3.527371 +step:8028 train loss:3.583156 +step:8029 train loss:3.513489 +step:8030 train loss:3.471807 +step:8031 train loss:3.532609 +step:8032 train loss:3.517221 +step:8033 train loss:3.469203 +step:8034 train loss:3.504976 +step:8035 train loss:3.488593 +step:8036 train loss:3.486024 +step:8037 train loss:3.454700 +step:8038 train loss:3.468749 +step:8039 train loss:3.563508 +step:8040 train loss:3.497381 +step:8041 train loss:3.496325 +step:8042 train loss:3.529334 +step:8043 train loss:3.473833 +step:8044 train loss:3.487376 +step:8045 train loss:3.556660 +step:8046 train loss:3.482420 +step:8047 train loss:3.484284 +step:8048 train loss:3.515864 +step:8049 train loss:3.561918 +step:8050 train loss:3.503001 +step:8051 train loss:3.477927 +step:8052 train loss:3.540647 +step:8053 train loss:3.494529 +step:8054 train loss:3.530026 +step:8055 train loss:3.559447 +step:8056 train loss:3.528632 +step:8057 train loss:3.602846 +step:8058 train loss:3.509822 +step:8059 train loss:3.569198 +step:8060 train loss:3.538609 +step:8061 train loss:3.430593 +step:8062 train loss:3.560301 +step:8063 train loss:3.522134 +step:8064 train loss:3.484053 +step:8065 train loss:3.546865 +step:8066 train loss:3.505983 +step:8067 train loss:3.569180 +step:8068 train loss:3.497401 +step:8069 train loss:3.519618 +step:8070 train loss:3.486695 +step:8071 train loss:3.496115 +step:8072 train loss:3.536577 +step:8073 train loss:3.490270 +step:8074 train loss:3.501595 +step:8075 train loss:3.484943 +step:8076 train loss:3.536011 +step:8077 train loss:3.543717 +step:8078 train loss:3.485561 +step:8079 train loss:3.507190 +step:8080 train loss:3.494458 +step:8081 train loss:3.512818 +step:8082 train loss:3.529096 +step:8083 train loss:3.432508 +step:8084 train loss:3.567624 +step:8085 train loss:3.438426 +step:8086 train loss:3.567545 +step:8087 train loss:3.459565 +step:8088 train loss:3.510417 +step:8089 train loss:3.544473 +step:8090 train loss:3.566177 +step:8091 train loss:3.510589 +step:8092 train loss:3.492367 +step:8093 train loss:3.500394 +step:8094 train loss:3.501212 +step:8095 train loss:3.527122 +step:8096 train loss:3.526744 +step:8097 train loss:3.454690 +step:8098 train loss:3.470816 +step:8099 train loss:3.455783 +step:8100 train loss:3.513937 +step:8101 train loss:3.588870 +step:8102 train loss:3.526631 +step:8103 train loss:3.479138 +step:8104 train loss:3.528549 +step:8105 train loss:3.526708 +step:8106 train loss:3.490498 +step:8107 train loss:3.470881 +step:8108 train loss:3.487683 +step:8109 train loss:3.484036 +step:8110 train loss:3.548406 +step:8111 train loss:3.470076 +step:8112 train loss:3.491100 +step:8113 train loss:3.478098 +step:8114 train loss:3.422160 +step:8115 train loss:3.477330 +step:8116 train loss:3.513515 +step:8117 train loss:3.484182 +step:8118 train loss:3.475989 +step:8119 train loss:3.519650 +step:8120 train loss:3.466114 +step:8121 train loss:3.523266 +step:8122 train loss:3.505800 +step:8123 train loss:3.514262 +step:8124 train loss:3.476699 +step:8125 train loss:3.459909 +step:8126 train loss:3.449166 +step:8127 train loss:3.545300 +step:8128 train loss:3.551847 +step:8129 train loss:3.470544 +step:8130 train loss:3.499848 +step:8131 train loss:3.470153 +step:8132 train loss:3.539366 +step:8133 train loss:3.462056 +step:8134 train loss:3.498318 +step:8135 train loss:3.489770 +step:8136 train loss:3.500374 +step:8137 train loss:3.562417 +step:8138 train loss:3.471819 +step:8139 train loss:3.544426 +step:8140 train loss:3.474038 +step:8141 train loss:3.496241 +step:8142 train loss:3.476568 +step:8143 train loss:3.528297 +step:8144 train loss:3.504787 +step:8145 train loss:3.474395 +step:8146 train loss:3.481332 +step:8147 train loss:3.505687 +step:8148 train loss:3.597653 +step:8149 train loss:3.506942 +step:8150 train loss:3.488773 +step:8151 train loss:3.481308 +step:8152 train loss:3.578390 +step:8153 train loss:3.454229 +step:8154 train loss:3.473569 +step:8155 train loss:3.497695 +step:8156 train loss:3.479715 +step:8157 train loss:3.501396 +step:8158 train loss:3.512095 +step:8159 train loss:3.528139 +step:8160 train loss:3.479711 +step:8161 train loss:3.523633 +step:8162 train loss:3.453788 +step:8163 train loss:3.514749 +step:8164 train loss:3.501196 +step:8165 train loss:3.551862 +step:8166 train loss:3.553746 +step:8167 train loss:3.457045 +step:8168 train loss:3.438947 +step:8169 train loss:3.487641 +step:8170 train loss:3.438553 +step:8171 train loss:3.498941 +step:8172 train loss:3.496121 +step:8173 train loss:3.497332 +step:8174 train loss:3.506735 +step:8175 train loss:3.469052 +step:8176 train loss:3.463230 +step:8177 train loss:3.510592 +step:8178 train loss:3.594590 +step:8179 train loss:3.502198 +step:8180 train loss:3.527370 +step:8181 train loss:3.525342 +step:8182 train loss:3.485584 +step:8183 train loss:3.472484 +step:8184 train loss:3.466108 +step:8185 train loss:3.505950 +step:8186 train loss:3.511180 +step:8187 train loss:3.518192 +step:8188 train loss:3.447701 +step:8189 train loss:3.596020 +step:8190 train loss:3.527124 +step:8191 train loss:3.531320 +step:8192 train loss:3.641767 +step:8193 train loss:3.512593 +step:8194 train loss:3.447075 +step:8195 train loss:3.545834 +step:8196 train loss:3.461580 +step:8197 train loss:3.490477 +step:8198 train loss:3.498183 +step:8199 train loss:3.500466 +step:8200 train loss:3.478221 +step:8201 train loss:3.593782 +step:8202 train loss:3.509722 +step:8203 train loss:3.528280 +step:8204 train loss:3.439890 +step:8205 train loss:3.446593 +step:8206 train loss:3.569826 +step:8207 train loss:3.494709 +step:8208 train loss:3.514990 +step:8209 train loss:3.558625 +step:8210 train loss:3.542535 +step:8211 train loss:3.475217 +step:8212 train loss:3.533154 +step:8213 train loss:3.543910 +step:8214 train loss:3.581564 +step:8215 train loss:3.554883 +step:8216 train loss:3.538420 +step:8217 train loss:3.516723 +step:8218 train loss:3.523981 +step:8219 train loss:3.660561 +step:8220 train loss:3.487775 +step:8221 train loss:3.510074 +step:8222 train loss:3.459606 +step:8223 train loss:3.480052 +step:8224 train loss:3.488592 +step:8225 train loss:3.542209 +step:8226 train loss:3.468979 +step:8227 train loss:3.538233 +step:8228 train loss:3.425612 +step:8229 train loss:3.468216 +step:8230 train loss:3.485724 +step:8231 train loss:3.508633 +step:8232 train loss:3.509343 +step:8233 train loss:3.554328 +step:8234 train loss:3.547782 +step:8235 train loss:3.521516 +step:8236 train loss:3.506069 +step:8237 train loss:3.456593 +step:8238 train loss:3.708647 +step:8239 train loss:3.541924 +step:8240 train loss:3.488755 +step:8241 train loss:3.459414 +step:8242 train loss:3.497671 +step:8243 train loss:3.489897 +step:8244 train loss:3.501868 +step:8245 train loss:3.484398 +step:8246 train loss:3.552335 +step:8247 train loss:3.585196 +step:8248 train loss:3.501258 +step:8249 train loss:3.494036 +step:8250 validation loss:3.442545 +step:8250 train loss:3.482049 +step:8251 train loss:3.579375 +step:8252 train loss:3.514895 +step:8253 train loss:3.483793 +step:8254 train loss:3.454305 +step:8255 train loss:3.489306 +step:8256 train loss:3.468946 +step:8257 train loss:3.578094 +step:8258 train loss:3.498217 +step:8259 train loss:3.480873 +step:8260 train loss:3.482376 +step:8261 train loss:3.480269 +step:8262 train loss:3.494009 +step:8263 train loss:3.509231 +step:8264 train loss:3.472971 +step:8265 train loss:3.465432 +step:8266 train loss:3.473763 +step:8267 train loss:3.404811 +step:8268 train loss:3.530361 +step:8269 train loss:3.460948 +step:8270 train loss:3.514679 +step:8271 train loss:3.542087 +step:8272 train loss:3.566548 +step:8273 train loss:3.445630 +step:8274 train loss:3.506637 +step:8275 train loss:3.466632 +step:8276 train loss:3.502481 +step:8277 train loss:3.573429 +step:8278 train loss:3.588975 +step:8279 train loss:3.498868 +step:8280 train loss:3.487522 +step:8281 train loss:3.457287 +step:8282 train loss:3.514661 +step:8283 train loss:3.502301 +step:8284 train loss:3.486421 +step:8285 train loss:3.475766 +step:8286 train loss:3.588868 +step:8287 train loss:3.522501 +step:8288 train loss:3.495767 +step:8289 train loss:3.508903 +step:8290 train loss:3.447488 +step:8291 train loss:3.488495 +step:8292 train loss:3.518469 +step:8293 train loss:3.491908 +step:8294 train loss:3.462392 +step:8295 train loss:3.499603 +step:8296 train loss:3.565333 +step:8297 train loss:3.645284 +step:8298 train loss:3.467906 +step:8299 train loss:3.503388 +step:8300 train loss:3.513759 +step:8301 train loss:3.486594 +step:8302 train loss:3.543725 +step:8303 train loss:3.678674 +step:8304 train loss:3.484108 +step:8305 train loss:3.530352 +step:8306 train loss:3.507591 +step:8307 train loss:3.524250 +step:8308 train loss:3.523229 +step:8309 train loss:3.544874 +step:8310 train loss:3.462861 +step:8311 train loss:3.554501 +step:8312 train loss:3.546302 +step:8313 train loss:3.611056 +step:8314 train loss:3.479957 +step:8315 train loss:3.429281 +step:8316 train loss:3.488730 +step:8317 train loss:3.512211 +step:8318 train loss:3.501027 +step:8319 train loss:3.537889 +step:8320 train loss:3.559767 +step:8321 train loss:3.466873 +step:8322 train loss:3.483165 +step:8323 train loss:3.519834 +step:8324 train loss:3.494851 +step:8325 train loss:3.548786 +step:8326 train loss:3.516351 +step:8327 train loss:3.505305 +step:8328 train loss:3.576430 +step:8329 train loss:3.484157 +step:8330 train loss:3.527153 +step:8331 train loss:3.452379 +step:8332 train loss:3.554086 +step:8333 train loss:3.569715 +step:8334 train loss:3.437192 +step:8335 train loss:3.498844 +step:8336 train loss:3.592723 +step:8337 train loss:3.526331 +step:8338 train loss:3.491235 +step:8339 train loss:3.471266 +step:8340 train loss:3.562465 +step:8341 train loss:3.461507 +step:8342 train loss:3.536130 +step:8343 train loss:3.447445 +step:8344 train loss:3.494679 +step:8345 train loss:3.528919 +step:8346 train loss:3.611018 +step:8347 train loss:3.499393 +step:8348 train loss:3.528533 +step:8349 train loss:3.500596 +step:8350 train loss:3.520322 +step:8351 train loss:3.461179 +step:8352 train loss:3.547765 +step:8353 train loss:3.504795 +step:8354 train loss:3.485223 +step:8355 train loss:3.484931 +step:8356 train loss:3.481073 +step:8357 train loss:3.494969 +step:8358 train loss:3.468968 +step:8359 train loss:3.464281 +step:8360 train loss:3.512044 +step:8361 train loss:3.525312 +step:8362 train loss:3.544793 +step:8363 train loss:3.542763 +step:8364 train loss:3.507323 +step:8365 train loss:3.653339 +step:8366 train loss:3.495745 +step:8367 train loss:3.469143 +step:8368 train loss:3.437370 +step:8369 train loss:3.470735 +step:8370 train loss:3.551035 +step:8371 train loss:3.522934 +step:8372 train loss:3.499296 +step:8373 train loss:3.511648 +step:8374 train loss:3.443877 +step:8375 train loss:3.507411 +step:8376 train loss:3.544374 +step:8377 train loss:3.371851 +step:8378 train loss:3.588311 +step:8379 train loss:3.449438 +step:8380 train loss:3.460565 +step:8381 train loss:3.464643 +step:8382 train loss:3.491273 +step:8383 train loss:3.450048 +step:8384 train loss:3.494418 +step:8385 train loss:3.504628 +step:8386 train loss:3.487034 +step:8387 train loss:3.647177 +step:8388 train loss:3.558307 +step:8389 train loss:3.536415 +step:8390 train loss:3.537792 +step:8391 train loss:3.469870 +step:8392 train loss:3.480109 +step:8393 train loss:3.433809 +step:8394 train loss:3.529914 +step:8395 train loss:3.532490 +step:8396 train loss:3.559470 +step:8397 train loss:3.490571 +step:8398 train loss:3.509161 +step:8399 train loss:3.477177 +step:8400 train loss:3.482065 +step:8401 train loss:3.486563 +step:8402 train loss:3.471002 +step:8403 train loss:3.487665 +step:8404 train loss:3.495064 +step:8405 train loss:3.446241 +step:8406 train loss:3.487121 +step:8407 train loss:3.530701 +step:8408 train loss:3.503108 +step:8409 train loss:3.425467 +step:8410 train loss:3.488055 +step:8411 train loss:3.514354 +step:8412 train loss:3.573765 +step:8413 train loss:3.549398 +step:8414 train loss:3.546434 +step:8415 train loss:3.466765 +step:8416 train loss:3.513556 +step:8417 train loss:3.428935 +step:8418 train loss:3.534526 +step:8419 train loss:3.489874 +step:8420 train loss:3.566703 +step:8421 train loss:3.481494 +step:8422 train loss:3.498755 +step:8423 train loss:3.515130 +step:8424 train loss:3.519520 +step:8425 train loss:3.578443 +step:8426 train loss:3.548854 +step:8427 train loss:3.467225 +step:8428 train loss:3.479698 +step:8429 train loss:3.541919 +step:8430 train loss:3.480684 +step:8431 train loss:3.487439 +step:8432 train loss:3.488169 +step:8433 train loss:3.461548 +step:8434 train loss:3.498408 +step:8435 train loss:3.418458 +step:8436 train loss:3.499005 +step:8437 train loss:3.543158 +step:8438 train loss:3.520031 +step:8439 train loss:3.462680 +step:8440 train loss:3.431729 +step:8441 train loss:3.488850 +step:8442 train loss:3.512752 +step:8443 train loss:3.470956 +step:8444 train loss:3.505053 +step:8445 train loss:3.452433 +step:8446 train loss:3.502404 +step:8447 train loss:3.515864 +step:8448 train loss:3.497694 +step:8449 train loss:3.488866 +step:8450 train loss:3.478848 +step:8451 train loss:3.509979 +step:8452 train loss:3.484900 +step:8453 train loss:3.465033 +step:8454 train loss:3.512815 +step:8455 train loss:3.587205 +step:8456 train loss:3.562487 +step:8457 train loss:3.617837 +step:8458 train loss:3.507464 +step:8459 train loss:3.512078 +step:8460 train loss:3.441794 +step:8461 train loss:3.599236 +step:8462 train loss:3.469646 +step:8463 train loss:3.508780 +step:8464 train loss:3.521802 +step:8465 train loss:3.530155 +step:8466 train loss:3.501195 +step:8467 train loss:3.506254 +step:8468 train loss:3.759162 +step:8469 train loss:3.466581 +step:8470 train loss:3.462622 +step:8471 train loss:3.504907 +step:8472 train loss:3.526003 +step:8473 train loss:3.482316 +step:8474 train loss:3.607436 +step:8475 train loss:3.562668 +step:8476 train loss:3.512552 +step:8477 train loss:3.503034 +step:8478 train loss:3.482049 +step:8479 train loss:3.485028 +step:8480 train loss:3.577632 +step:8481 train loss:3.481709 +step:8482 train loss:3.478244 +step:8483 train loss:3.619666 +step:8484 train loss:3.501270 +step:8485 train loss:3.549811 +step:8486 train loss:3.460785 +step:8487 train loss:3.516815 +step:8488 train loss:3.460974 +step:8489 train loss:3.539866 +step:8490 train loss:3.527552 +step:8491 train loss:3.547019 +step:8492 train loss:3.499846 +step:8493 train loss:3.572452 +step:8494 train loss:3.437966 +step:8495 train loss:3.535250 +step:8496 train loss:3.478860 +step:8497 train loss:3.514599 +step:8498 train loss:3.528267 +step:8499 train loss:3.506227 +step:8500 validation loss:3.440930 total_sharp:3.5250e-03 L1_sharp:2.9637e-02 L2_sharp:1.5952e-02 L3_sharp:3.3248e-02 L4_sharp:1.6317e-02 L5_sharp:2.0134e-02 L6_sharp:2.0084e-02 L7_sharp:2.4150e-02 L8_sharp:2.5972e-02 L9_sharp:1.7892e-02 L10_sharp:1.4658e-02 L11_sharp:1.4408e-02 L12_sharp:2.8969e-02 total_fnorm:1.3479e+00 total_l1_linf:8.0783e+03 total_spectral:1.3479e+00 L1_fnorm:6.0887e-02 L2_fnorm:5.8849e-02 L3_fnorm:5.8811e-02 L4_fnorm:6.0139e-02 L5_fnorm:6.0823e-02 L6_fnorm:6.1151e-02 L7_fnorm:6.1210e-02 L8_fnorm:6.1130e-02 L9_fnorm:6.1137e-02 L10_fnorm:6.1323e-02 L11_fnorm:6.1162e-02 L12_fnorm:6.1304e-02 L1_l1linf:3.2299e-01 L2_l1linf:3.6156e-01 L3_l1linf:3.7973e-01 L4_l1linf:3.6225e-01 L5_l1linf:3.4212e-01 L6_l1linf:2.9664e-01 L7_l1linf:2.9508e-01 L8_l1linf:2.9409e-01 L9_l1linf:3.1622e-01 L10_l1linf:3.5550e-01 L11_l1linf:3.6792e-01 L12_l1linf:3.7057e-01 L1_spectral:7.3487e-03 L2_spectral:8.1552e-03 L3_spectral:8.4446e-03 L4_spectral:8.1727e-03 L5_spectral:7.6927e-03 L6_spectral:6.6476e-03 L7_spectral:6.6039e-03 L8_spectral:6.6288e-03 L9_spectral:7.1367e-03 L10_spectral:7.9845e-03 L11_spectral:8.2364e-03 L12_spectral:8.3285e-03 ip_v_neg_g:3.2121e-03 cos_v_neg_g:7.8275e-04 v_norm:1.3479e+00 g_norm:3.0445e+00 hv_norm:5.5049e-01 cos_v_hv:8.6310e-03 hg_norm:1.0735e+02 cos_g_hg:4.5590e-01 v_par:2.3607e-05 v_perp:1.3479e+00 L1_cos_v_neg_g:5.1558e-03 L1_v_norm:6.0887e-02 L2_cos_v_neg_g:4.6208e-03 L2_v_norm:5.8849e-02 L3_cos_v_neg_g:4.1262e-03 L3_v_norm:5.8811e-02 L4_cos_v_neg_g:4.6273e-03 L4_v_norm:6.0139e-02 L5_cos_v_neg_g:5.5440e-03 L5_v_norm:6.0823e-02 L6_cos_v_neg_g:5.3397e-03 L6_v_norm:6.1151e-02 L7_cos_v_neg_g:5.8620e-03 L7_v_norm:6.1210e-02 L8_cos_v_neg_g:5.6467e-03 L8_v_norm:6.1130e-02 L9_cos_v_neg_g:4.7074e-03 L9_v_norm:6.1137e-02 L10_cos_v_neg_g:5.1429e-03 L10_v_norm:6.1323e-02 L11_cos_v_neg_g:3.9587e-03 L11_v_norm:6.1162e-02 L12_cos_v_neg_g:2.2119e-03 L12_v_norm:6.1304e-02 +step:8500 train loss:3.501644 +step:8501 train loss:3.717231 +step:8502 train loss:3.732534 +step:8503 train loss:3.494008 +step:8504 train loss:3.490535 +step:8505 train loss:3.468243 +step:8506 train loss:3.539006 +step:8507 train loss:3.476053 +step:8508 train loss:3.511954 +step:8509 train loss:3.452762 +step:8510 train loss:3.475869 +step:8511 train loss:3.431455 +step:8512 train loss:3.531369 +step:8513 train loss:3.536857 +step:8514 train loss:3.483279 +step:8515 train loss:3.575786 +step:8516 train loss:3.493810 +step:8517 train loss:3.515578 +step:8518 train loss:3.407054 +step:8519 train loss:3.498850 +step:8520 train loss:3.467901 +step:8521 train loss:3.505651 +step:8522 train loss:3.403173 +step:8523 train loss:3.497493 +step:8524 train loss:3.486822 +step:8525 train loss:3.552996 +step:8526 train loss:3.531651 +step:8527 train loss:3.475275 +step:8528 train loss:3.559258 +step:8529 train loss:3.515889 +step:8530 train loss:3.549862 +step:8531 train loss:3.538462 +step:8532 train loss:3.575308 +step:8533 train loss:3.529639 +step:8534 train loss:3.529704 +step:8535 train loss:3.499864 +step:8536 train loss:3.590466 +step:8537 train loss:3.503820 +step:8538 train loss:3.572497 +step:8539 train loss:3.494740 +step:8540 train loss:3.521257 +step:8541 train loss:3.462113 +step:8542 train loss:3.529160 +step:8543 train loss:3.441461 +step:8544 train loss:3.439887 +step:8545 train loss:3.489732 +step:8546 train loss:3.442996 +step:8547 train loss:3.497408 +step:8548 train loss:3.467941 +step:8549 train loss:3.508280 +step:8550 train loss:3.464072 +step:8551 train loss:3.512498 +step:8552 train loss:3.514486 +step:8553 train loss:3.516207 +step:8554 train loss:3.493110 +step:8555 train loss:3.504703 +step:8556 train loss:3.583773 +step:8557 train loss:3.480596 +step:8558 train loss:3.518359 +step:8559 train loss:3.510591 +step:8560 train loss:3.490797 +step:8561 train loss:3.445153 +step:8562 train loss:3.475874 +step:8563 train loss:3.472087 +step:8564 train loss:3.541413 +step:8565 train loss:3.518468 +step:8566 train loss:3.537330 +step:8567 train loss:3.483646 +step:8568 train loss:3.501864 +step:8569 train loss:3.509336 +step:8570 train loss:3.453475 +step:8571 train loss:3.496847 +step:8572 train loss:3.511144 +step:8573 train loss:3.585644 +step:8574 train loss:3.515285 +step:8575 train loss:3.514742 +step:8576 train loss:3.547738 +step:8577 train loss:3.630054 +step:8578 train loss:3.540336 +step:8579 train loss:3.526591 +step:8580 train loss:3.461470 +step:8581 train loss:3.500905 +step:8582 train loss:3.506814 +step:8583 train loss:3.505838 +step:8584 train loss:3.494607 +step:8585 train loss:3.576158 +step:8586 train loss:3.493583 +step:8587 train loss:3.502954 +step:8588 train loss:3.549543 +step:8589 train loss:3.495663 +step:8590 train loss:3.488876 +step:8591 train loss:3.492080 +step:8592 train loss:3.450468 +step:8593 train loss:3.529621 +step:8594 train loss:3.553955 +step:8595 train loss:3.475781 +step:8596 train loss:3.517609 +step:8597 train loss:3.481329 +step:8598 train loss:3.535050 +step:8599 train loss:3.507388 +step:8600 train loss:3.512029 +step:8601 train loss:3.499194 +step:8602 train loss:3.473203 +step:8603 train loss:3.531175 +step:8604 train loss:3.477000 +step:8605 train loss:3.488561 +step:8606 train loss:3.500529 +step:8607 train loss:3.509024 +step:8608 train loss:3.551798 +step:8609 train loss:3.450789 +step:8610 train loss:3.521294 +step:8611 train loss:3.452902 +step:8612 train loss:3.532963 +step:8613 train loss:3.466239 +step:8614 train loss:3.527882 +step:8615 train loss:3.568678 +step:8616 train loss:3.451788 +step:8617 train loss:3.519142 +step:8618 train loss:3.498205 +step:8619 train loss:3.450768 +step:8620 train loss:3.491026 +step:8621 train loss:3.524550 +step:8622 train loss:3.483053 +step:8623 train loss:3.493054 +step:8624 train loss:3.569789 +step:8625 train loss:3.490166 +step:8626 train loss:3.499321 +step:8627 train loss:3.494666 +step:8628 train loss:3.529544 +step:8629 train loss:3.436932 +step:8630 train loss:3.536210 +step:8631 train loss:3.479588 +step:8632 train loss:3.534744 +step:8633 train loss:3.482924 +step:8634 train loss:3.711927 +step:8635 train loss:3.509437 +step:8636 train loss:3.552754 +step:8637 train loss:3.480159 +step:8638 train loss:3.479307 +step:8639 train loss:3.536381 +step:8640 train loss:3.450362 +step:8641 train loss:3.548598 +step:8642 train loss:3.500282 +step:8643 train loss:3.609991 +step:8644 train loss:3.453967 +step:8645 train loss:3.523675 +step:8646 train loss:3.487695 +step:8647 train loss:3.512749 +step:8648 train loss:3.460516 +step:8649 train loss:3.543044 +step:8650 train loss:3.498457 +step:8651 train loss:3.511152 +step:8652 train loss:3.480406 +step:8653 train loss:3.513155 +step:8654 train loss:3.554263 +step:8655 train loss:3.486098 +step:8656 train loss:3.526749 +step:8657 train loss:3.529695 +step:8658 train loss:3.500228 +step:8659 train loss:3.493201 +step:8660 train loss:3.438463 +step:8661 train loss:3.497245 +step:8662 train loss:3.439420 +step:8663 train loss:3.513263 +step:8664 train loss:3.428460 +step:8665 train loss:3.450209 +step:8666 train loss:3.528708 +step:8667 train loss:3.420344 +step:8668 train loss:3.529767 +step:8669 train loss:3.565537 +step:8670 train loss:3.465995 +step:8671 train loss:3.464866 +step:8672 train loss:3.681136 +step:8673 train loss:3.445979 +step:8674 train loss:3.514814 +step:8675 train loss:3.556441 +step:8676 train loss:3.501097 +step:8677 train loss:3.523018 +step:8678 train loss:3.472535 +step:8679 train loss:3.530172 +step:8680 train loss:3.508465 +step:8681 train loss:3.511209 +step:8682 train loss:3.466529 +step:8683 train loss:3.483534 +step:8684 train loss:3.557801 +step:8685 train loss:3.503597 +step:8686 train loss:3.492681 +step:8687 train loss:3.448689 +step:8688 train loss:3.464788 +step:8689 train loss:3.536417 +step:8690 train loss:3.473289 +step:8691 train loss:3.551017 +step:8692 train loss:3.439106 +step:8693 train loss:3.529811 +step:8694 train loss:3.528241 +step:8695 train loss:3.514091 +step:8696 train loss:3.539175 +step:8697 train loss:3.492286 +step:8698 train loss:3.533108 +step:8699 train loss:3.483418 +step:8700 train loss:3.509614 +step:8701 train loss:3.472115 +step:8702 train loss:3.455863 +step:8703 train loss:3.471851 +step:8704 train loss:3.426182 +step:8705 train loss:3.506540 +step:8706 train loss:3.528359 +step:8707 train loss:3.523348 +step:8708 train loss:3.468787 +step:8709 train loss:3.532791 +step:8710 train loss:3.459660 +step:8711 train loss:3.513422 +step:8712 train loss:3.421918 +step:8713 train loss:3.497231 +step:8714 train loss:3.605046 +step:8715 train loss:3.460619 +step:8716 train loss:3.514255 +step:8717 train loss:3.484658 +step:8718 train loss:3.524377 +step:8719 train loss:3.491218 +step:8720 train loss:3.603671 +step:8721 train loss:3.495668 +step:8722 train loss:3.586585 +step:8723 train loss:3.458200 +step:8724 train loss:3.469953 +step:8725 train loss:3.499885 +step:8726 train loss:3.451868 +step:8727 train loss:3.531278 +step:8728 train loss:3.489525 +step:8729 train loss:3.494328 +step:8730 train loss:3.470804 +step:8731 train loss:3.475121 +step:8732 train loss:3.577636 +step:8733 train loss:3.498144 +step:8734 train loss:3.538130 +step:8735 train loss:3.604466 +step:8736 train loss:3.463432 +step:8737 train loss:3.490089 +step:8738 train loss:3.470814 +step:8739 train loss:3.530791 +step:8740 train loss:3.452758 +step:8741 train loss:3.507800 +step:8742 train loss:3.465270 +step:8743 train loss:3.502343 +step:8744 train loss:3.523747 +step:8745 train loss:3.564848 +step:8746 train loss:3.463012 +step:8747 train loss:3.565629 +step:8748 train loss:3.475841 +step:8749 train loss:3.515090 +step:8750 validation loss:3.432700 +step:8750 train loss:3.524462 +step:8751 train loss:3.563165 +step:8752 train loss:3.421429 +step:8753 train loss:3.469436 +step:8754 train loss:3.522318 +step:8755 train loss:3.502030 +step:8756 train loss:3.548270 +step:8757 train loss:3.460923 +step:8758 train loss:3.615289 +step:8759 train loss:3.463686 +step:8760 train loss:3.493788 +step:8761 train loss:3.572914 +step:8762 train loss:3.467692 +step:8763 train loss:3.440497 +step:8764 train loss:3.513483 +step:8765 train loss:3.579946 +step:8766 train loss:3.512485 +step:8767 train loss:3.471184 +step:8768 train loss:3.512432 +step:8769 train loss:3.484187 +step:8770 train loss:3.530215 +step:8771 train loss:3.502159 +step:8772 train loss:3.520514 +step:8773 train loss:3.481265 +step:8774 train loss:3.514614 +step:8775 train loss:3.515477 +step:8776 train loss:3.458324 +step:8777 train loss:3.495015 +step:8778 train loss:3.504149 +step:8779 train loss:3.525937 +step:8780 train loss:3.491086 +step:8781 train loss:3.494396 +step:8782 train loss:3.516698 +step:8783 train loss:3.494856 +step:8784 train loss:3.521754 +step:8785 train loss:3.507916 +step:8786 train loss:3.583009 +step:8787 train loss:3.525300 +step:8788 train loss:3.429421 +step:8789 train loss:3.527091 +step:8790 train loss:3.455424 +step:8791 train loss:3.505919 +step:8792 train loss:3.444354 +step:8793 train loss:3.534226 +step:8794 train loss:3.457441 +step:8795 train loss:3.527848 +step:8796 train loss:3.670861 +step:8797 train loss:3.417555 +step:8798 train loss:3.575484 +step:8799 train loss:3.491681 +step:8800 train loss:3.486758 +step:8801 train loss:3.505847 +step:8802 train loss:3.564310 +step:8803 train loss:3.522087 +step:8804 train loss:3.505277 +step:8805 train loss:3.523513 +step:8806 train loss:3.491462 +step:8807 train loss:3.484013 +step:8808 train loss:3.439368 +step:8809 train loss:3.563435 +step:8810 train loss:3.467630 +step:8811 train loss:3.455624 +step:8812 train loss:3.500073 +step:8813 train loss:3.409365 +step:8814 train loss:3.597631 +step:8815 train loss:3.444744 +step:8816 train loss:3.561365 +step:8817 train loss:3.498520 +step:8818 train loss:3.429442 +step:8819 train loss:3.548754 +step:8820 train loss:3.479476 +step:8821 train loss:3.503516 +step:8822 train loss:3.484870 +step:8823 train loss:3.499351 +step:8824 train loss:3.560295 +step:8825 train loss:3.536172 +step:8826 train loss:3.506991 +step:8827 train loss:3.467019 +step:8828 train loss:3.508438 +step:8829 train loss:3.485761 +step:8830 train loss:3.466185 +step:8831 train loss:3.541979 +step:8832 train loss:3.478689 +step:8833 train loss:3.512160 +step:8834 train loss:3.478095 +step:8835 train loss:3.415648 +step:8836 train loss:3.543826 +step:8837 train loss:3.444584 +step:8838 train loss:3.490000 +step:8839 train loss:3.476096 +step:8840 train loss:3.477430 +step:8841 train loss:3.491419 +step:8842 train loss:3.501514 +step:8843 train loss:3.514195 +step:8844 train loss:3.479578 +step:8845 train loss:3.501091 +step:8846 train loss:3.468505 +step:8847 train loss:3.505860 +step:8848 train loss:3.552086 +step:8849 train loss:3.531747 +step:8850 train loss:3.525180 +step:8851 train loss:3.408358 +step:8852 train loss:3.511317 +step:8853 train loss:3.492248 +step:8854 train loss:3.462519 +step:8855 train loss:3.531032 +step:8856 train loss:3.523043 +step:8857 train loss:3.591489 +step:8858 train loss:3.457544 +step:8859 train loss:3.528940 +step:8860 train loss:3.488283 +step:8861 train loss:3.468663 +step:8862 train loss:3.471247 +step:8863 train loss:3.453444 +step:8864 train loss:3.520669 +step:8865 train loss:3.515364 +step:8866 train loss:3.396423 +step:8867 train loss:3.501321 +step:8868 train loss:3.527721 +step:8869 train loss:3.611455 +step:8870 train loss:3.490695 +step:8871 train loss:3.513792 +step:8872 train loss:3.498682 +step:8873 train loss:3.498742 +step:8874 train loss:3.551385 +step:8875 train loss:3.485707 +step:8876 train loss:3.523240 +step:8877 train loss:3.506457 +step:8878 train loss:3.555370 +step:8879 train loss:3.516319 +step:8880 train loss:3.463796 +step:8881 train loss:3.428516 +step:8882 train loss:3.499244 +step:8883 train loss:3.486029 +step:8884 train loss:3.574244 +step:8885 train loss:3.509337 +step:8886 train loss:3.511957 +step:8887 train loss:3.537001 +step:8888 train loss:3.499342 +step:8889 train loss:3.501111 +step:8890 train loss:3.493529 +step:8891 train loss:3.464982 +step:8892 train loss:3.547603 +step:8893 train loss:3.489267 +step:8894 train loss:3.505387 +step:8895 train loss:3.536281 +step:8896 train loss:3.454611 +step:8897 train loss:3.542324 +step:8898 train loss:3.477421 +step:8899 train loss:3.498701 +step:8900 train loss:3.464879 +step:8901 train loss:3.481995 +step:8902 train loss:3.521225 +step:8903 train loss:3.460991 +step:8904 train loss:3.512776 +step:8905 train loss:3.486792 +step:8906 train loss:3.477451 +step:8907 train loss:3.489201 +step:8908 train loss:3.553976 +step:8909 train loss:3.499603 +step:8910 train loss:3.461143 +step:8911 train loss:3.559609 +step:8912 train loss:3.455311 +step:8913 train loss:3.465278 +step:8914 train loss:3.563232 +step:8915 train loss:3.501956 +step:8916 train loss:3.532332 +step:8917 train loss:3.488752 +step:8918 train loss:3.493091 +step:8919 train loss:3.480421 +step:8920 train loss:3.505243 +step:8921 train loss:3.502462 +step:8922 train loss:3.482477 +step:8923 train loss:3.673826 +step:8924 train loss:3.562319 +step:8925 train loss:3.493118 +step:8926 train loss:3.503973 +step:8927 train loss:3.531464 +step:8928 train loss:3.485977 +step:8929 train loss:3.479761 +step:8930 train loss:3.537452 +step:8931 train loss:3.448224 +step:8932 train loss:3.551911 +step:8933 train loss:3.457847 +step:8934 train loss:3.497311 +step:8935 train loss:3.509923 +step:8936 train loss:3.546200 +step:8937 train loss:3.546187 +step:8938 train loss:3.485137 +step:8939 train loss:3.551657 +step:8940 train loss:3.506080 +step:8941 train loss:3.448927 +step:8942 train loss:3.527613 +step:8943 train loss:3.457269 +step:8944 train loss:3.507174 +step:8945 train loss:3.525724 +step:8946 train loss:3.370375 +step:8947 train loss:3.561837 +step:8948 train loss:3.410736 +step:8949 train loss:3.412646 +step:8950 train loss:3.458668 +step:8951 train loss:3.494774 +step:8952 train loss:3.515359 +step:8953 train loss:3.470000 +step:8954 train loss:3.575316 +step:8955 train loss:3.489264 +step:8956 train loss:3.516803 +step:8957 train loss:3.506150 +step:8958 train loss:3.483888 +step:8959 train loss:3.474606 +step:8960 train loss:3.441209 +step:8961 train loss:3.465349 +step:8962 train loss:3.518623 +step:8963 train loss:3.496273 +step:8964 train loss:3.480231 +step:8965 train loss:3.520194 +step:8966 train loss:3.478981 +step:8967 train loss:3.459695 +step:8968 train loss:3.444102 +step:8969 train loss:3.433611 +step:8970 train loss:3.513875 +step:8971 train loss:3.463721 +step:8972 train loss:3.664619 +step:8973 train loss:3.547689 +step:8974 train loss:3.507552 +step:8975 train loss:3.508582 +step:8976 train loss:3.473309 +step:8977 train loss:3.559008 +step:8978 train loss:3.543962 +step:8979 train loss:3.460897 +step:8980 train loss:3.554981 +step:8981 train loss:3.506081 +step:8982 train loss:3.482191 +step:8983 train loss:3.423453 +step:8984 train loss:3.548014 +step:8985 train loss:3.465757 +step:8986 train loss:3.502465 +step:8987 train loss:3.476063 +step:8988 train loss:3.525154 +step:8989 train loss:3.435302 +step:8990 train loss:3.574562 +step:8991 train loss:3.426455 +step:8992 train loss:3.485122 +step:8993 train loss:3.577693 +step:8994 train loss:3.478430 +step:8995 train loss:3.505459 +step:8996 train loss:3.476032 +step:8997 train loss:3.424252 +step:8998 train loss:3.429179 +step:8999 train loss:3.454364 +step:9000 validation loss:3.427271 total_sharp:3.7029e-03 L1_sharp:5.3068e-02 L2_sharp:1.4541e-02 L3_sharp:2.1386e-02 L4_sharp:1.2820e-02 L5_sharp:1.7147e-02 L6_sharp:1.9589e-02 L7_sharp:2.3083e-02 L8_sharp:2.4505e-02 L9_sharp:1.8415e-02 L10_sharp:1.3198e-02 L11_sharp:1.3828e-02 L12_sharp:2.3272e-02 total_fnorm:1.3457e+00 total_l1_linf:8.0758e+03 total_spectral:1.3457e+00 L1_fnorm:6.0941e-02 L2_fnorm:5.9046e-02 L3_fnorm:5.8992e-02 L4_fnorm:6.0369e-02 L5_fnorm:6.0917e-02 L6_fnorm:6.1191e-02 L7_fnorm:6.1383e-02 L8_fnorm:6.1196e-02 L9_fnorm:6.1272e-02 L10_fnorm:6.1466e-02 L11_fnorm:6.1365e-02 L12_fnorm:6.1327e-02 L1_l1linf:3.2678e-01 L2_l1linf:3.7077e-01 L3_l1linf:3.6383e-01 L4_l1linf:3.6188e-01 L5_l1linf:3.3452e-01 L6_l1linf:3.0949e-01 L7_l1linf:3.0771e-01 L8_l1linf:3.0624e-01 L9_l1linf:3.2356e-01 L10_l1linf:3.5346e-01 L11_l1linf:3.7070e-01 L12_l1linf:3.5866e-01 L1_spectral:7.3697e-03 L2_spectral:8.3828e-03 L3_spectral:8.1568e-03 L4_spectral:8.1180e-03 L5_spectral:7.5768e-03 L6_spectral:6.9158e-03 L7_spectral:6.9488e-03 L8_spectral:6.9298e-03 L9_spectral:7.2711e-03 L10_spectral:8.0035e-03 L11_spectral:8.2798e-03 L12_spectral:8.1309e-03 ip_v_neg_g:2.1456e-03 cos_v_neg_g:4.7692e-04 v_norm:1.3457e+00 g_norm:3.3431e+00 hv_norm:6.9179e-01 cos_v_hv:7.2031e-03 hg_norm:6.3440e+02 cos_g_hg:4.6487e-01 v_par:3.2813e-05 v_perp:1.3457e+00 L1_cos_v_neg_g:-1.0150e-04 L1_v_norm:6.0941e-02 L2_cos_v_neg_g:-7.8211e-04 L2_v_norm:5.9046e-02 L3_cos_v_neg_g:2.0548e-04 L3_v_norm:5.8992e-02 L4_cos_v_neg_g:8.9181e-04 L4_v_norm:6.0369e-02 L5_cos_v_neg_g:2.4475e-03 L5_v_norm:6.0917e-02 L6_cos_v_neg_g:2.7943e-03 L6_v_norm:6.1191e-02 L7_cos_v_neg_g:1.6171e-03 L7_v_norm:6.1383e-02 L8_cos_v_neg_g:3.0316e-03 L8_v_norm:6.1196e-02 L9_cos_v_neg_g:4.0867e-03 L9_v_norm:6.1272e-02 L10_cos_v_neg_g:3.5672e-03 L10_v_norm:6.1466e-02 L11_cos_v_neg_g:4.7669e-03 L11_v_norm:6.1365e-02 L12_cos_v_neg_g:6.4912e-03 L12_v_norm:6.1327e-02 +step:9000 train loss:3.540176 +step:9001 train loss:3.508937 +step:9002 train loss:3.513483 +step:9003 train loss:3.453971 +step:9004 train loss:3.454665 +step:9005 train loss:3.466448 +step:9006 train loss:3.469665 +step:9007 train loss:3.488635 +step:9008 train loss:3.445075 +step:9009 train loss:3.439649 +step:9010 train loss:3.476913 +step:9011 train loss:3.475786 +step:9012 train loss:3.586894 +step:9013 train loss:3.409167 +step:9014 train loss:3.484053 +step:9015 train loss:3.484368 +step:9016 train loss:3.559795 +step:9017 train loss:3.502953 +step:9018 train loss:3.423308 +step:9019 train loss:3.508924 +step:9020 train loss:3.517430 +step:9021 train loss:3.477421 +step:9022 train loss:3.489020 +step:9023 train loss:3.486187 +step:9024 train loss:3.504976 +step:9025 train loss:3.490313 +step:9026 train loss:3.448595 +step:9027 train loss:3.493355 +step:9028 train loss:3.513042 +step:9029 train loss:3.531851 +step:9030 train loss:3.529913 +step:9031 train loss:3.494045 +step:9032 train loss:3.504159 +step:9033 train loss:3.489465 +step:9034 train loss:3.501280 +step:9035 train loss:3.502818 +step:9036 train loss:3.450987 +step:9037 train loss:3.445685 +step:9038 train loss:3.570540 +step:9039 train loss:3.473985 +step:9040 train loss:3.488210 +step:9041 train loss:3.537765 +step:9042 train loss:3.392113 +step:9043 train loss:3.487034 +step:9044 train loss:3.507381 +step:9045 train loss:3.452287 +step:9046 train loss:3.496444 +step:9047 train loss:3.488820 +step:9048 train loss:3.469246 +step:9049 train loss:3.502695 +step:9050 train loss:3.459075 +step:9051 train loss:3.495697 +step:9052 train loss:3.426394 +step:9053 train loss:3.551089 +step:9054 train loss:3.563382 +step:9055 train loss:3.486253 +step:9056 train loss:3.549490 +step:9057 train loss:3.402061 +step:9058 train loss:3.487540 +step:9059 train loss:3.562798 +step:9060 train loss:3.498821 +step:9061 train loss:3.521146 +step:9062 train loss:3.451418 +step:9063 train loss:3.585316 +step:9064 train loss:3.474227 +step:9065 train loss:3.482342 +step:9066 train loss:3.500859 +step:9067 train loss:3.465556 +step:9068 train loss:3.536689 +step:9069 train loss:3.494093 +step:9070 train loss:3.545330 +step:9071 train loss:3.480445 +step:9072 train loss:3.498168 +step:9073 train loss:3.463475 +step:9074 train loss:3.542563 +step:9075 train loss:3.488293 +step:9076 train loss:3.455680 +step:9077 train loss:3.532758 +step:9078 train loss:3.469351 +step:9079 train loss:3.515108 +step:9080 train loss:3.449124 +step:9081 train loss:3.487024 +step:9082 train loss:3.512849 +step:9083 train loss:3.543219 +step:9084 train loss:3.436281 +step:9085 train loss:3.504951 +step:9086 train loss:3.488697 +step:9087 train loss:3.435524 +step:9088 train loss:3.498123 +step:9089 train loss:3.513893 +step:9090 train loss:3.446095 +step:9091 train loss:3.548958 +step:9092 train loss:3.473250 +step:9093 train loss:3.470929 +step:9094 train loss:3.597861 +step:9095 train loss:3.465947 +step:9096 train loss:3.479997 +step:9097 train loss:3.463251 +step:9098 train loss:3.458091 +step:9099 train loss:3.582481 +step:9100 train loss:3.614772 +step:9101 train loss:3.533287 +step:9102 train loss:3.477011 +step:9103 train loss:3.482220 +step:9104 train loss:3.568178 +step:9105 train loss:3.431573 +step:9106 train loss:3.555153 +step:9107 train loss:3.491030 +step:9108 train loss:3.472558 +step:9109 train loss:3.497971 +step:9110 train loss:3.500917 +step:9111 train loss:3.481833 +step:9112 train loss:3.483019 +step:9113 train loss:3.511608 +step:9114 train loss:3.461184 +step:9115 train loss:3.487002 +step:9116 train loss:3.512799 +step:9117 train loss:3.521657 +step:9118 train loss:3.490294 +step:9119 train loss:3.413345 +step:9120 train loss:3.513218 +step:9121 train loss:3.542869 +step:9122 train loss:3.490074 +step:9123 train loss:3.508840 +step:9124 train loss:3.539290 +step:9125 train loss:3.488516 +step:9126 train loss:3.467548 +step:9127 train loss:3.499165 +step:9128 train loss:3.555723 +step:9129 train loss:3.510344 +step:9130 train loss:3.523392 +step:9131 train loss:3.504099 +step:9132 train loss:3.513371 +step:9133 train loss:3.499574 +step:9134 train loss:3.472793 +step:9135 train loss:3.502122 +step:9136 train loss:3.498507 +step:9137 train loss:3.552247 +step:9138 train loss:3.469973 +step:9139 train loss:3.546478 +step:9140 train loss:3.469018 +step:9141 train loss:3.445434 +step:9142 train loss:3.626017 +step:9143 train loss:3.452245 +step:9144 train loss:3.544122 +step:9145 train loss:3.553891 +step:9146 train loss:3.466657 +step:9147 train loss:3.542049 +step:9148 train loss:3.562046 +step:9149 train loss:3.468887 +step:9150 train loss:3.491605 +step:9151 train loss:3.552048 +step:9152 train loss:3.510183 +step:9153 train loss:3.475218 +step:9154 train loss:3.489871 +step:9155 train loss:3.456121 +step:9156 train loss:3.457658 +step:9157 train loss:3.477346 +step:9158 train loss:3.459033 +step:9159 train loss:3.543617 +step:9160 train loss:3.429007 +step:9161 train loss:3.457061 +step:9162 train loss:3.545293 +step:9163 train loss:3.490364 +step:9164 train loss:3.460684 +step:9165 train loss:3.455950 +step:9166 train loss:3.513650 +step:9167 train loss:3.456345 +step:9168 train loss:3.497318 +step:9169 train loss:3.436946 +step:9170 train loss:3.456404 +step:9171 train loss:3.523111 +step:9172 train loss:3.445286 +step:9173 train loss:3.567983 +step:9174 train loss:3.498258 +step:9175 train loss:3.474618 +step:9176 train loss:3.454468 +step:9177 train loss:3.504500 +step:9178 train loss:3.444849 +step:9179 train loss:3.406821 +step:9180 train loss:3.501366 +step:9181 train loss:3.508201 +step:9182 train loss:3.481667 +step:9183 train loss:3.487582 +step:9184 train loss:3.482934 +step:9185 train loss:3.497161 +step:9186 train loss:3.458322 +step:9187 train loss:3.533243 +step:9188 train loss:3.570606 +step:9189 train loss:3.490863 +step:9190 train loss:3.497904 +step:9191 train loss:3.491374 +step:9192 train loss:3.503443 +step:9193 train loss:3.500809 +step:9194 train loss:3.439874 +step:9195 train loss:3.429527 +step:9196 train loss:3.481068 +step:9197 train loss:3.440330 +step:9198 train loss:3.511867 +step:9199 train loss:3.460797 +step:9200 train loss:3.487595 +step:9201 train loss:3.521297 +step:9202 train loss:3.509878 +step:9203 train loss:3.466035 +step:9204 train loss:3.661947 +step:9205 train loss:3.578002 +step:9206 train loss:3.491066 +step:9207 train loss:3.543272 +step:9208 train loss:3.518471 +step:9209 train loss:3.542583 +step:9210 train loss:3.433211 +step:9211 train loss:3.460951 +step:9212 train loss:3.461626 +step:9213 train loss:3.526283 +step:9214 train loss:3.464888 +step:9215 train loss:3.533218 +step:9216 train loss:3.494164 +step:9217 train loss:3.437662 +step:9218 train loss:3.526258 +step:9219 train loss:3.486789 +step:9220 train loss:3.531692 +step:9221 train loss:3.583899 +step:9222 train loss:3.527106 +step:9223 train loss:3.695666 +step:9224 train loss:3.533407 +step:9225 train loss:3.465524 +step:9226 train loss:3.480482 +step:9227 train loss:3.498160 +step:9228 train loss:3.500306 +step:9229 train loss:3.459215 +step:9230 train loss:3.521541 +step:9231 train loss:3.405885 +step:9232 train loss:3.464408 +step:9233 train loss:3.486658 +step:9234 train loss:3.541195 +step:9235 train loss:3.544983 +step:9236 train loss:3.451681 +step:9237 train loss:3.515374 +step:9238 train loss:3.489151 +step:9239 train loss:3.478735 +step:9240 train loss:3.448973 +step:9241 train loss:3.480353 +step:9242 train loss:3.488944 +step:9243 train loss:3.486589 +step:9244 train loss:3.461455 +step:9245 train loss:3.468077 +step:9246 train loss:3.467138 +step:9247 train loss:3.478572 +step:9248 train loss:3.487540 +step:9249 train loss:3.487577 +step:9250 validation loss:3.424826 +step:9250 train loss:3.525583 +step:9251 train loss:3.467973 +step:9252 train loss:3.535449 +step:9253 train loss:3.530836 +step:9254 train loss:3.459191 +step:9255 train loss:3.576946 +step:9256 train loss:3.458204 +step:9257 train loss:3.399091 +step:9258 train loss:3.478099 +step:9259 train loss:3.482170 +step:9260 train loss:3.578516 +step:9261 train loss:3.457847 +step:9262 train loss:3.529170 +step:9263 train loss:3.432558 +step:9264 train loss:3.577842 +step:9265 train loss:3.603903 +step:9266 train loss:3.534289 +step:9267 train loss:3.481328 +step:9268 train loss:3.474263 +step:9269 train loss:3.499785 +step:9270 train loss:3.423299 +step:9271 train loss:3.535088 +step:9272 train loss:3.475488 +step:9273 train loss:3.496848 +step:9274 train loss:3.498820 +step:9275 train loss:3.495813 +step:9276 train loss:3.523606 +step:9277 train loss:3.497885 +step:9278 train loss:3.511094 +step:9279 train loss:3.505067 +step:9280 train loss:3.502375 +step:9281 train loss:3.476751 +step:9282 train loss:3.598414 +step:9283 train loss:3.482527 +step:9284 train loss:3.447965 +step:9285 train loss:3.467354 +step:9286 train loss:3.522138 +step:9287 train loss:3.491613 +step:9288 train loss:3.499153 +step:9289 train loss:3.466512 +step:9290 train loss:3.497195 +step:9291 train loss:3.475733 +step:9292 train loss:3.512261 +step:9293 train loss:3.569784 +step:9294 train loss:3.492531 +step:9295 train loss:3.476966 +step:9296 train loss:3.428934 +step:9297 train loss:3.497417 +step:9298 train loss:3.440312 +step:9299 train loss:3.420688 +step:9300 train loss:3.528379 +step:9301 train loss:3.554935 +step:9302 train loss:3.491219 +step:9303 train loss:3.541190 +step:9304 train loss:3.461177 +step:9305 train loss:3.454865 +step:9306 train loss:3.457018 +step:9307 train loss:3.456640 +step:9308 train loss:3.431458 +step:9309 train loss:3.420268 +step:9310 train loss:3.476298 +step:9311 train loss:3.537771 +step:9312 train loss:3.489021 +step:9313 train loss:3.434236 +step:9314 train loss:3.463856 +step:9315 train loss:3.497545 +step:9316 train loss:3.480259 +step:9317 train loss:3.455217 +step:9318 train loss:3.542892 +step:9319 train loss:3.452807 +step:9320 train loss:3.473983 +step:9321 train loss:3.488062 +step:9322 train loss:3.494781 +step:9323 train loss:3.569854 +step:9324 train loss:3.512807 +step:9325 train loss:3.453590 +step:9326 train loss:3.529173 +step:9327 train loss:3.525457 +step:9328 train loss:3.528777 +step:9329 train loss:3.412986 +step:9330 train loss:3.583448 +step:9331 train loss:3.512128 +step:9332 train loss:3.535320 +step:9333 train loss:3.552809 +step:9334 train loss:3.488142 +step:9335 train loss:3.585617 +step:9336 train loss:3.543732 +step:9337 train loss:3.497344 +step:9338 train loss:3.552500 +step:9339 train loss:3.529252 +step:9340 train loss:3.489787 +step:9341 train loss:3.578462 +step:9342 train loss:3.474733 +step:9343 train loss:3.471084 +step:9344 train loss:3.471062 +step:9345 train loss:3.612051 +step:9346 train loss:3.450246 +step:9347 train loss:3.465539 +step:9348 train loss:3.491497 +step:9349 train loss:3.436274 +step:9350 train loss:3.511995 +step:9351 train loss:3.488047 +step:9352 train loss:3.473444 +step:9353 train loss:3.506541 +step:9354 train loss:3.473561 +step:9355 train loss:3.468986 +step:9356 train loss:3.514475 +step:9357 train loss:3.467189 +step:9358 train loss:3.501841 +step:9359 train loss:3.442758 +step:9360 train loss:3.462550 +step:9361 train loss:3.458873 +step:9362 train loss:3.447478 +step:9363 train loss:3.513189 +step:9364 train loss:3.490727 +step:9365 train loss:3.495294 +step:9366 train loss:3.489974 +step:9367 train loss:3.503601 +step:9368 train loss:3.477096 +step:9369 train loss:3.477512 +step:9370 train loss:3.484365 +step:9371 train loss:3.506277 +step:9372 train loss:3.470414 +step:9373 train loss:3.453935 +step:9374 train loss:3.492814 +step:9375 train loss:3.504316 +step:9376 train loss:3.442413 +step:9377 train loss:3.518110 +step:9378 train loss:3.516044 +step:9379 train loss:3.543786 +step:9380 train loss:3.475297 +step:9381 train loss:3.483755 +step:9382 train loss:3.461269 +step:9383 train loss:3.455426 +step:9384 train loss:3.425574 +step:9385 train loss:3.499428 +step:9386 train loss:3.524549 +step:9387 train loss:3.502686 +step:9388 train loss:3.442260 +step:9389 train loss:3.457568 +step:9390 train loss:3.498693 +step:9391 train loss:3.506986 +step:9392 train loss:3.468463 +step:9393 train loss:3.460814 +step:9394 train loss:3.489175 +step:9395 train loss:3.484574 +step:9396 train loss:3.630055 +step:9397 train loss:3.519094 +step:9398 train loss:3.540164 +step:9399 train loss:3.493529 +step:9400 train loss:3.493884 +step:9401 train loss:3.487957 +step:9402 train loss:3.488799 +step:9403 train loss:3.423679 +step:9404 train loss:3.497157 +step:9405 train loss:3.457802 +step:9406 train loss:3.510647 +step:9407 train loss:3.453107 +step:9408 train loss:3.389672 +step:9409 train loss:3.455287 +step:9410 train loss:3.537125 +step:9411 train loss:3.498622 +step:9412 train loss:3.527606 +step:9413 train loss:3.546177 +step:9414 train loss:3.482809 +step:9415 train loss:3.475163 +step:9416 train loss:3.488999 +step:9417 train loss:3.444432 +step:9418 train loss:3.472665 +step:9419 train loss:3.440570 +step:9420 train loss:3.458318 +step:9421 train loss:3.507337 +step:9422 train loss:3.459442 +step:9423 train loss:3.525492 +step:9424 train loss:3.463296 +step:9425 train loss:3.505371 +step:9426 train loss:3.507660 +step:9427 train loss:3.481294 +step:9428 train loss:3.588450 +step:9429 train loss:3.477325 +step:9430 train loss:3.435884 +step:9431 train loss:3.524542 +step:9432 train loss:3.489352 +step:9433 train loss:3.527722 +step:9434 train loss:3.478453 +step:9435 train loss:3.505371 +step:9436 train loss:3.475340 +step:9437 train loss:3.489077 +step:9438 train loss:3.478773 +step:9439 train loss:3.481444 +step:9440 train loss:3.472701 +step:9441 train loss:3.483371 +step:9442 train loss:3.422937 +step:9443 train loss:3.477632 +step:9444 train loss:3.544277 +step:9445 train loss:3.474838 +step:9446 train loss:3.449745 +step:9447 train loss:3.520307 +step:9448 train loss:3.454887 +step:9449 train loss:3.477955 +step:9450 train loss:3.519113 +step:9451 train loss:3.434798 +step:9452 train loss:3.488406 +step:9453 train loss:3.467161 +step:9454 train loss:3.527303 +step:9455 train loss:3.510378 +step:9456 train loss:3.433589 +step:9457 train loss:3.481159 +step:9458 train loss:3.468551 +step:9459 train loss:3.460856 +step:9460 train loss:3.502819 +step:9461 train loss:3.531578 +step:9462 train loss:3.478164 +step:9463 train loss:3.509028 +step:9464 train loss:3.463883 +step:9465 train loss:3.553243 +step:9466 train loss:3.501557 +step:9467 train loss:3.525743 +step:9468 train loss:3.472655 +step:9469 train loss:3.459136 +step:9470 train loss:3.459727 +step:9471 train loss:3.498938 +step:9472 train loss:3.522839 +step:9473 train loss:3.512810 +step:9474 train loss:3.456441 +step:9475 train loss:3.449345 +step:9476 train loss:3.669410 +step:9477 train loss:3.541502 +step:9478 train loss:3.515944 +step:9479 train loss:3.614335 +step:9480 train loss:3.461659 +step:9481 train loss:3.497149 +step:9482 train loss:3.522091 +step:9483 train loss:3.477803 +step:9484 train loss:3.509351 +step:9485 train loss:3.430346 +step:9486 train loss:3.467842 +step:9487 train loss:3.500441 +step:9488 train loss:3.453551 +step:9489 train loss:3.498619 +step:9490 train loss:3.464684 +step:9491 train loss:3.509347 +step:9492 train loss:3.528672 +step:9493 train loss:3.497479 +step:9494 train loss:3.508035 +step:9495 train loss:3.460994 +step:9496 train loss:3.522698 +step:9497 train loss:3.538101 +step:9498 train loss:3.484850 +step:9499 train loss:3.536222 +step:9500 validation loss:3.424662 total_sharp:3.6293e-03 L1_sharp:3.5196e-02 L2_sharp:1.7221e-02 L3_sharp:2.4223e-02 L4_sharp:1.4353e-02 L5_sharp:1.6781e-02 L6_sharp:2.1331e-02 L7_sharp:2.2441e-02 L8_sharp:2.7199e-02 L9_sharp:2.0860e-02 L10_sharp:1.5170e-02 L11_sharp:1.4148e-02 L12_sharp:2.7419e-02 total_fnorm:1.3420e+00 total_l1_linf:8.0505e+03 total_spectral:1.3420e+00 L1_fnorm:6.0937e-02 L2_fnorm:5.8991e-02 L3_fnorm:5.9055e-02 L4_fnorm:6.0321e-02 L5_fnorm:6.0919e-02 L6_fnorm:6.1302e-02 L7_fnorm:6.1288e-02 L8_fnorm:6.1290e-02 L9_fnorm:6.1348e-02 L10_fnorm:6.1481e-02 L11_fnorm:6.1477e-02 L12_fnorm:6.1404e-02 L1_l1linf:3.3693e-01 L2_l1linf:3.7784e-01 L3_l1linf:3.6759e-01 L4_l1linf:3.4394e-01 L5_l1linf:3.3395e-01 L6_l1linf:3.2454e-01 L7_l1linf:3.0146e-01 L8_l1linf:3.1094e-01 L9_l1linf:3.4579e-01 L10_l1linf:3.8741e-01 L11_l1linf:4.0110e-01 L12_l1linf:3.9111e-01 L1_spectral:7.6184e-03 L2_spectral:8.5630e-03 L3_spectral:8.3024e-03 L4_spectral:7.7739e-03 L5_spectral:7.5859e-03 L6_spectral:7.3485e-03 L7_spectral:6.7921e-03 L8_spectral:7.0482e-03 L9_spectral:7.7916e-03 L10_spectral:8.6379e-03 L11_spectral:9.0093e-03 L12_spectral:8.7809e-03 ip_v_neg_g:3.7021e-03 cos_v_neg_g:9.1465e-04 v_norm:1.3420e+00 g_norm:3.0161e+00 hv_norm:5.9950e-01 cos_v_hv:8.1243e-03 hg_norm:1.3934e+02 cos_g_hg:4.7304e-01 v_par:2.8399e-05 v_perp:1.3420e+00 L1_cos_v_neg_g:7.2552e-03 L1_v_norm:6.0937e-02 L2_cos_v_neg_g:7.5675e-03 L2_v_norm:5.8991e-02 L3_cos_v_neg_g:4.9756e-03 L3_v_norm:5.9055e-02 L4_cos_v_neg_g:5.5354e-03 L4_v_norm:6.0321e-02 L5_cos_v_neg_g:4.2054e-03 L5_v_norm:6.0919e-02 L6_cos_v_neg_g:5.0586e-03 L6_v_norm:6.1302e-02 L7_cos_v_neg_g:5.6566e-03 L7_v_norm:6.1288e-02 L8_cos_v_neg_g:7.4654e-03 L8_v_norm:6.1290e-02 L9_cos_v_neg_g:7.5921e-03 L9_v_norm:6.1348e-02 L10_cos_v_neg_g:5.7460e-03 L10_v_norm:6.1481e-02 L11_cos_v_neg_g:4.4953e-03 L11_v_norm:6.1477e-02 L12_cos_v_neg_g:3.4925e-03 L12_v_norm:6.1404e-02 +step:9500 train loss:3.525897 +step:9501 train loss:3.504225 +step:9502 train loss:3.476814 +step:9503 train loss:3.491588 +step:9504 train loss:3.445280 +step:9505 train loss:3.471675 +step:9506 train loss:3.485849 +step:9507 train loss:3.473647 +step:9508 train loss:3.668314 +step:9509 train loss:3.483607 +step:9510 train loss:3.472317 +step:9511 train loss:3.495958 +step:9512 train loss:3.529273 +step:9513 train loss:3.519098 +step:9514 train loss:3.488319 +step:9515 train loss:3.387275 +step:9516 train loss:3.488591 +step:9517 train loss:3.524885 +step:9518 train loss:3.499309 +step:9519 train loss:3.507417 +step:9520 train loss:3.399496 +step:9521 train loss:3.390399 +step:9522 train loss:3.510304 +step:9523 train loss:3.505333 +step:9524 train loss:3.509768 +step:9525 train loss:3.552876 +step:9526 train loss:3.569273 +step:9527 train loss:3.526856 +step:9528 train loss:3.457513 +step:9529 train loss:3.499066 +step:9530 train loss:3.547631 +step:9531 train loss:3.454219 +step:9532 train loss:3.504644 +step:9533 train loss:3.476229 +step:9534 train loss:3.556751 +step:9535 train loss:3.478221 +step:9536 train loss:3.458480 +step:9537 train loss:3.406618 +step:9538 train loss:3.421516 +step:9539 train loss:3.493906 +step:9540 train loss:3.413753 +step:9541 train loss:3.472579 +step:9542 train loss:3.599868 +step:9543 train loss:3.498284 +step:9544 train loss:3.537506 +step:9545 train loss:3.470650 +step:9546 train loss:3.497696 +step:9547 train loss:3.540505 +step:9548 train loss:3.479902 +step:9549 train loss:3.448054 +step:9550 train loss:3.478944 +step:9551 train loss:3.472963 +step:9552 train loss:3.498658 +step:9553 train loss:3.491908 +step:9554 train loss:3.536563 +step:9555 train loss:3.543053 +step:9556 train loss:3.449818 +step:9557 train loss:3.471674 +step:9558 train loss:3.534484 +step:9559 train loss:3.541042 +step:9560 train loss:3.452031 +step:9561 train loss:3.481044 +step:9562 train loss:3.518022 +step:9563 train loss:3.464636 +step:9564 train loss:3.501302 +step:9565 train loss:3.479881 +step:9566 train loss:3.452941 +step:9567 train loss:3.518209 +step:9568 train loss:3.489712 +step:9569 train loss:3.531408 +step:9570 train loss:3.424873 +step:9571 train loss:3.499355 +step:9572 train loss:3.442845 +step:9573 train loss:3.474691 +step:9574 train loss:3.449512 +step:9575 train loss:3.521412 +step:9576 train loss:3.411244 +step:9577 train loss:3.462857 +step:9578 train loss:3.467500 +step:9579 train loss:3.466010 +step:9580 train loss:3.530114 +step:9581 train loss:3.522046 +step:9582 train loss:3.484532 +step:9583 train loss:3.517910 +step:9584 train loss:3.454964 +step:9585 train loss:3.473650 +step:9586 train loss:3.524832 +step:9587 train loss:3.492071 +step:9588 train loss:3.480711 +step:9589 train loss:3.538313 +step:9590 train loss:3.501496 +step:9591 train loss:3.467875 +step:9592 train loss:3.488281 +step:9593 train loss:3.489337 +step:9594 train loss:3.505426 +step:9595 train loss:3.482515 +step:9596 train loss:3.568081 +step:9597 train loss:3.474369 +step:9598 train loss:3.437157 +step:9599 train loss:3.444927 +step:9600 train loss:3.529274 +step:9601 train loss:3.445652 +step:9602 train loss:3.531263 +step:9603 train loss:3.524642 +step:9604 train loss:3.405470 +step:9605 train loss:3.494693 +step:9606 train loss:3.548620 +step:9607 train loss:3.468971 +step:9608 train loss:3.475308 +step:9609 train loss:3.484935 +step:9610 train loss:3.528518 +step:9611 train loss:3.460817 +step:9612 train loss:3.471423 +step:9613 train loss:3.507443 +step:9614 train loss:3.478641 +step:9615 train loss:3.667315 +step:9616 train loss:3.479435 +step:9617 train loss:3.473349 +step:9618 train loss:3.420104 +step:9619 train loss:3.484311 +step:9620 train loss:3.540332 +step:9621 train loss:3.461351 +step:9622 train loss:3.474959 +step:9623 train loss:3.514841 +step:9624 train loss:3.503378 +step:9625 train loss:3.515325 +step:9626 train loss:3.488707 +step:9627 train loss:3.566926 +step:9628 train loss:3.533649 +step:9629 train loss:3.448985 +step:9630 train loss:3.505559 +step:9631 train loss:3.491302 +step:9632 train loss:3.462337 +step:9633 train loss:3.505223 +step:9634 train loss:3.570578 +step:9635 train loss:3.474094 +step:9636 train loss:3.421498 +step:9637 train loss:3.553196 +step:9638 train loss:3.435849 +step:9639 train loss:3.406817 +step:9640 train loss:3.529033 +step:9641 train loss:3.501446 +step:9642 train loss:3.476878 +step:9643 train loss:3.483556 +step:9644 train loss:3.536433 +step:9645 train loss:3.464283 +step:9646 train loss:3.499938 +step:9647 train loss:3.512518 +step:9648 train loss:3.463247 +step:9649 train loss:3.434672 +step:9650 train loss:3.450319 +step:9651 train loss:3.543093 +step:9652 train loss:3.523932 +step:9653 train loss:3.467535 +step:9654 train loss:3.447133 +step:9655 train loss:3.443999 +step:9656 train loss:3.436465 +step:9657 train loss:3.464815 +step:9658 train loss:3.523028 +step:9659 train loss:3.628775 +step:9660 train loss:3.411999 +step:9661 train loss:3.430642 +step:9662 train loss:3.450500 +step:9663 train loss:3.493520 +step:9664 train loss:3.544065 +step:9665 train loss:3.386409 +step:9666 train loss:3.429205 +step:9667 train loss:3.566297 +step:9668 train loss:3.548762 +step:9669 train loss:3.563206 +step:9670 train loss:3.545280 +step:9671 train loss:3.543772 +step:9672 train loss:3.457026 +step:9673 train loss:3.479134 +step:9674 train loss:3.490752 +step:9675 train loss:3.490004 +step:9676 train loss:3.446783 +step:9677 train loss:3.455363 +step:9678 train loss:3.489650 +step:9679 train loss:3.480100 +step:9680 train loss:3.480014 +step:9681 train loss:3.464561 +step:9682 train loss:3.533646 +step:9683 train loss:3.506573 +step:9684 train loss:3.426691 +step:9685 train loss:3.509266 +step:9686 train loss:3.543816 +step:9687 train loss:3.450213 +step:9688 train loss:3.535422 +step:9689 train loss:3.635865 +step:9690 train loss:3.478714 +step:9691 train loss:3.465545 +step:9692 train loss:3.424479 +step:9693 train loss:3.426599 +step:9694 train loss:3.445486 +step:9695 train loss:3.551039 +step:9696 train loss:3.585701 +step:9697 train loss:3.495333 +step:9698 train loss:3.530118 +step:9699 train loss:3.493062 +step:9700 train loss:3.489601 +step:9701 train loss:3.539988 +step:9702 train loss:3.458007 +step:9703 train loss:3.480503 +step:9704 train loss:3.562323 +step:9705 train loss:3.460342 +step:9706 train loss:3.455650 +step:9707 train loss:3.502512 +step:9708 train loss:3.452912 +step:9709 train loss:3.474507 +step:9710 train loss:3.493503 +step:9711 train loss:3.465495 +step:9712 train loss:3.477355 +step:9713 train loss:3.527248 +step:9714 train loss:3.484101 +step:9715 train loss:3.504817 +step:9716 train loss:3.527825 +step:9717 train loss:3.445741 +step:9718 train loss:3.450690 +step:9719 train loss:3.533808 +step:9720 train loss:3.467742 +step:9721 train loss:3.457721 +step:9722 train loss:3.518558 +step:9723 train loss:3.467351 +step:9724 train loss:3.497925 +step:9725 train loss:3.547757 +step:9726 train loss:3.489899 +step:9727 train loss:3.465948 +step:9728 train loss:3.505663 +step:9729 train loss:3.532778 +step:9730 train loss:3.605053 +step:9731 train loss:3.522316 +step:9732 train loss:3.486034 +step:9733 train loss:3.526773 +step:9734 train loss:3.446617 +step:9735 train loss:3.553966 +step:9736 train loss:3.455010 +step:9737 train loss:3.514661 +step:9738 train loss:3.479705 +step:9739 train loss:3.553126 +step:9740 train loss:3.516646 +step:9741 train loss:3.458554 +step:9742 train loss:3.551712 +step:9743 train loss:3.425448 +step:9744 train loss:3.484808 +step:9745 train loss:3.444784 +step:9746 train loss:3.481174 +step:9747 train loss:3.471651 +step:9748 train loss:3.371396 +step:9749 train loss:3.468468 +step:9750 validation loss:3.416744 +step:9750 train loss:3.451314 +step:9751 train loss:3.589036 +step:9752 train loss:3.475727 +step:9753 train loss:3.434532 +step:9754 train loss:3.464656 +step:9755 train loss:3.462645 +step:9756 train loss:3.462742 +step:9757 train loss:3.426533 +step:9758 train loss:3.421138 +step:9759 train loss:3.469233 +step:9760 train loss:3.413163 +step:9761 train loss:3.453725 +step:9762 train loss:3.451628 +step:9763 train loss:3.474566 +step:9764 train loss:3.459621 +step:9765 train loss:3.420632 +step:9766 train loss:3.510523 +step:9767 train loss:3.465949 +step:9768 train loss:3.478143 +step:9769 train loss:3.431225 +step:9770 train loss:3.431479 +step:9771 train loss:3.482932 +step:9772 train loss:3.494593 +step:9773 train loss:3.469838 +step:9774 train loss:3.441548 +step:9775 train loss:3.530940 +step:9776 train loss:3.527924 +step:9777 train loss:3.418057 +step:9778 train loss:3.427383 +step:9779 train loss:3.431025 +step:9780 train loss:3.429037 +step:9781 train loss:3.447328 +step:9782 train loss:3.525760 +step:9783 train loss:3.436233 +step:9784 train loss:3.462764 +step:9785 train loss:3.453815 +step:9786 train loss:3.490196 +step:9787 train loss:3.513471 +step:9788 train loss:3.441361 +step:9789 train loss:3.452668 +step:9790 train loss:3.412307 +step:9791 train loss:3.462755 +step:9792 train loss:3.479031 +step:9793 train loss:3.493332 +step:9794 train loss:3.472266 +step:9795 train loss:3.474919 +step:9796 train loss:3.459833 +step:9797 train loss:3.454209 +step:9798 train loss:3.471945 +step:9799 train loss:3.474474 +step:9800 train loss:3.545222 +step:9801 train loss:3.468758 +step:9802 train loss:3.526981 +step:9803 train loss:3.384546 +step:9804 train loss:3.480732 +step:9805 train loss:3.485234 +step:9806 train loss:3.459418 +step:9807 train loss:3.426476 +step:9808 train loss:3.341275 +step:9809 train loss:3.530046 +step:9810 train loss:3.485353 +step:9811 train loss:3.469459 +step:9812 train loss:3.443414 +step:9813 train loss:3.524089 +step:9814 train loss:3.514677 +step:9815 train loss:3.417876 +step:9816 train loss:3.422184 +step:9817 train loss:3.452134 +step:9818 train loss:3.479935 +step:9819 train loss:3.450308 +step:9820 train loss:3.520003 +step:9821 train loss:3.498211 +step:9822 train loss:3.472710 +step:9823 train loss:3.533530 +step:9824 train loss:3.435951 +step:9825 train loss:3.522626 +step:9826 train loss:3.518426 +step:9827 train loss:3.525143 +step:9828 train loss:3.439740 +step:9829 train loss:3.448078 +step:9830 train loss:3.437490 +step:9831 train loss:3.494016 +step:9832 train loss:3.503213 +step:9833 train loss:3.417145 +step:9834 train loss:3.468210 +step:9835 train loss:3.435315 +step:9836 train loss:3.498192 +step:9837 train loss:3.470270 +step:9838 train loss:3.508846 +step:9839 train loss:3.484132 +step:9840 train loss:3.453249 +step:9841 train loss:3.459862 +step:9842 train loss:3.521828 +step:9843 train loss:3.512150 +step:9844 train loss:3.462414 +step:9845 train loss:3.491136 +step:9846 train loss:3.429789 +step:9847 train loss:3.558499 +step:9848 train loss:3.482221 +step:9849 train loss:3.506996 +step:9850 train loss:3.426514 +step:9851 train loss:3.478947 +step:9852 train loss:3.443841 +step:9853 train loss:3.466884 +step:9854 train loss:3.477506 +step:9855 train loss:3.424298 +step:9856 train loss:3.429191 +step:9857 train loss:3.416980 +step:9858 train loss:3.481376 +step:9859 train loss:3.400341 +step:9860 train loss:3.640276 +step:9861 train loss:3.463018 +step:9862 train loss:3.431075 +step:9863 train loss:3.412774 +step:9864 train loss:3.539005 +step:9865 train loss:3.416116 +step:9866 train loss:3.456371 +step:9867 train loss:3.455621 +step:9868 train loss:3.514592 +step:9869 train loss:3.479478 +step:9870 train loss:3.448951 +step:9871 train loss:3.490084 +step:9872 train loss:3.430290 +step:9873 train loss:3.484274 +step:9874 train loss:3.448971 +step:9875 train loss:3.453453 +step:9876 train loss:3.415224 +step:9877 train loss:3.465745 +step:9878 train loss:3.498406 +step:9879 train loss:3.501233 +step:9880 train loss:3.430955 +step:9881 train loss:3.485204 +step:9882 train loss:3.446454 +step:9883 train loss:3.454915 +step:9884 train loss:3.448180 +step:9885 train loss:3.512314 +step:9886 train loss:3.477256 +step:9887 train loss:3.478868 +step:9888 train loss:3.500705 +step:9889 train loss:3.534432 +step:9890 train loss:3.444730 +step:9891 train loss:3.449277 +step:9892 train loss:3.422883 +step:9893 train loss:3.542158 +step:9894 train loss:3.454174 +step:9895 train loss:3.388977 +step:9896 train loss:3.545205 +step:9897 train loss:3.420186 +step:9898 train loss:3.488647 +step:9899 train loss:3.468370 +step:9900 train loss:3.511838 +step:9901 train loss:3.436656 +step:9902 train loss:3.484673 +step:9903 train loss:3.452472 +step:9904 train loss:3.502754 +step:9905 train loss:3.405405 +step:9906 train loss:3.446589 +step:9907 train loss:3.454245 +step:9908 train loss:3.452215 +step:9909 train loss:3.468124 +step:9910 train loss:3.492018 +step:9911 train loss:3.575641 +step:9912 train loss:3.451059 +step:9913 train loss:3.456132 +step:9914 train loss:3.463459 +step:9915 train loss:3.462463 +step:9916 train loss:3.413391 +step:9917 train loss:3.450295 +step:9918 train loss:3.446124 +step:9919 train loss:3.609855 +step:9920 train loss:3.397294 +step:9921 train loss:3.490185 +step:9922 train loss:3.449313 +step:9923 train loss:3.505443 +step:9924 train loss:3.421303 +step:9925 train loss:3.478207 +step:9926 train loss:3.458041 +step:9927 train loss:3.502414 +step:9928 train loss:3.427429 +step:9929 train loss:3.465213 +step:9930 train loss:3.558183 +step:9931 train loss:3.521201 +step:9932 train loss:3.405660 +step:9933 train loss:3.502198 +step:9934 train loss:3.420171 +step:9935 train loss:3.537519 +step:9936 train loss:3.443527 +step:9937 train loss:3.470422 +step:9938 train loss:3.455442 +step:9939 train loss:3.521944 +step:9940 train loss:3.554613 +step:9941 train loss:3.430182 +step:9942 train loss:3.472849 +step:9943 train loss:3.601364 +step:9944 train loss:3.471213 +step:9945 train loss:3.493673 +step:9946 train loss:3.465157 +step:9947 train loss:3.413653 +step:9948 train loss:3.459712 +step:9949 train loss:3.353917 +step:9950 train loss:3.506302 +step:9951 train loss:3.422608 +step:9952 train loss:3.494430 +step:9953 train loss:3.457281 +step:9954 train loss:3.515660 +step:9955 train loss:3.486594 +step:9956 train loss:3.492532 +step:9957 train loss:3.467897 +step:9958 train loss:3.522274 +step:9959 train loss:3.422041 +step:9960 train loss:3.455608 +step:9961 train loss:3.462486 +step:9962 train loss:3.512394 +step:9963 train loss:3.403675 +step:9964 train loss:3.458144 +step:9965 train loss:3.461396 +step:9966 train loss:3.518438 +step:9967 train loss:3.433916 +step:9968 train loss:3.497592 +step:9969 train loss:3.413517 +step:9970 train loss:3.453483 +step:9971 train loss:3.496304 +step:9972 train loss:3.516610 +step:9973 train loss:3.495017 +step:9974 train loss:3.482730 +step:9975 train loss:3.451288 +step:9976 train loss:3.409277 +step:9977 train loss:3.461065 +step:9978 train loss:3.459180 +step:9979 train loss:3.470290 +step:9980 train loss:3.524758 +step:9981 train loss:3.432963 +step:9982 train loss:3.493752 +step:9983 train loss:3.413414 +step:9984 train loss:3.477640 +step:9985 train loss:3.419922 +step:9986 train loss:3.473946 +step:9987 train loss:3.516573 +step:9988 train loss:3.531626 +step:9989 train loss:3.424842 +step:9990 train loss:3.565118 +step:9991 train loss:3.410583 +step:9992 train loss:3.485856 +step:9993 train loss:3.477630 +step:9994 train loss:3.592600 +step:9995 train loss:3.531787 +step:9996 train loss:3.444028 +step:9997 train loss:3.483590 +step:9998 train loss:3.538058 +step:9999 train loss:3.504183 +step:10000 validation loss:3.411666 total_sharp:5.0910e-03 L1_sharp:5.1001e-01 L2_sharp:4.4768e-02 L3_sharp:2.6924e-02 L4_sharp:1.3245e-02 L5_sharp:1.5142e-02 L6_sharp:2.2602e-02 L7_sharp:2.3963e-02 L8_sharp:2.5397e-02 L9_sharp:1.9110e-02 L10_sharp:1.2869e-02 L11_sharp:1.3068e-02 L12_sharp:1.9421e-02 total_fnorm:1.3466e+00 total_l1_linf:8.0771e+03 total_spectral:1.3466e+00 L1_fnorm:6.0889e-02 L2_fnorm:5.8776e-02 L3_fnorm:5.8840e-02 L4_fnorm:6.0309e-02 L5_fnorm:6.0977e-02 L6_fnorm:6.1278e-02 L7_fnorm:6.1297e-02 L8_fnorm:6.1240e-02 L9_fnorm:6.1373e-02 L10_fnorm:6.1446e-02 L11_fnorm:6.1456e-02 L12_fnorm:6.1461e-02 L1_l1linf:3.6383e-01 L2_l1linf:3.9535e-01 L3_l1linf:3.8046e-01 L4_l1linf:3.6398e-01 L5_l1linf:3.4202e-01 L6_l1linf:3.2967e-01 L7_l1linf:3.1192e-01 L8_l1linf:3.1612e-01 L9_l1linf:3.4934e-01 L10_l1linf:3.7985e-01 L11_l1linf:4.0865e-01 L12_l1linf:4.0855e-01 L1_spectral:8.0598e-03 L2_spectral:8.8031e-03 L3_spectral:8.5591e-03 L4_spectral:8.2241e-03 L5_spectral:7.7161e-03 L6_spectral:7.4040e-03 L7_spectral:7.0738e-03 L8_spectral:7.1404e-03 L9_spectral:7.8385e-03 L10_spectral:8.4583e-03 L11_spectral:9.1589e-03 L12_spectral:9.2296e-03 ip_v_neg_g:5.6563e-03 cos_v_neg_g:1.1139e-03 v_norm:1.3466e+00 g_norm:3.7708e+00 hv_norm:1.4288e+00 cos_v_hv:4.7982e-03 hg_norm:8.0280e+02 cos_g_hg:5.5543e-01 v_par:4.3318e-05 v_perp:1.3466e+00 L1_cos_v_neg_g:1.9812e-02 L1_v_norm:6.0889e-02 L2_cos_v_neg_g:8.1739e-03 L2_v_norm:5.8776e-02 L3_cos_v_neg_g:6.0080e-03 L3_v_norm:5.8840e-02 L4_cos_v_neg_g:1.4229e-03 L4_v_norm:6.0309e-02 L5_cos_v_neg_g:3.9445e-03 L5_v_norm:6.0977e-02 L6_cos_v_neg_g:4.3540e-03 L6_v_norm:6.1278e-02 L7_cos_v_neg_g:5.8528e-03 L7_v_norm:6.1297e-02 L8_cos_v_neg_g:6.7817e-03 L8_v_norm:6.1240e-02 L9_cos_v_neg_g:5.4598e-03 L9_v_norm:6.1373e-02 L10_cos_v_neg_g:5.2068e-03 L10_v_norm:6.1446e-02 L11_cos_v_neg_g:4.7378e-03 L11_v_norm:6.1456e-02 L12_cos_v_neg_g:4.9679e-03 L12_v_norm:6.1461e-02 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..bc5e8a4ca5706f8662d3bc89fe197b6040c4558a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.002, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "0f5464ae-6e24-4120-8982-94d183003d71", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..82f52915e1bb1e36564881d9bfeb526af5e36c6c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.442337989807129, + "total_l1_linf_norm": 9800.013671875, + "total_spectral_norm": 1.4423376321792603, + "layer_1_update_fnorm": 0.12263660877943039, + "layer_1_max_l1_linf_norm": 0.20365744829177856, + "layer_1_max_spectral_norm": 0.004600452724844217, + "layer_2_update_fnorm": 0.11609659343957901, + "layer_2_max_l1_linf_norm": 0.22641710937023163, + "layer_2_max_spectral_norm": 0.005147495772689581, + "layer_3_update_fnorm": 0.1123834177851677, + "layer_3_max_l1_linf_norm": 0.23501461744308472, + "layer_3_max_spectral_norm": 0.005328433588147163, + "layer_4_update_fnorm": 0.11609119921922684, + "layer_4_max_l1_linf_norm": 0.24466411769390106, + "layer_4_max_spectral_norm": 0.0055507174693048, + "layer_5_update_fnorm": 0.11777809262275696, + "layer_5_max_l1_linf_norm": 0.24984949827194214, + "layer_5_max_spectral_norm": 0.0056511531583964825, + "layer_6_update_fnorm": 0.12054436653852463, + "layer_6_max_l1_linf_norm": 0.25467073917388916, + "layer_6_max_spectral_norm": 0.00579501036554575, + "layer_7_update_fnorm": 0.12052010744810104, + "layer_7_max_l1_linf_norm": 0.26074767112731934, + "layer_7_max_spectral_norm": 0.00590281980112195, + "layer_8_update_fnorm": 0.12095130980014801, + "layer_8_max_l1_linf_norm": 0.2604270577430725, + "layer_8_max_spectral_norm": 0.005933323409408331, + "layer_9_update_fnorm": 0.12039916962385178, + "layer_9_max_l1_linf_norm": 0.25616833567619324, + "layer_9_max_spectral_norm": 0.0057639447040855885, + "layer_10_update_fnorm": 0.12104736268520355, + "layer_10_max_l1_linf_norm": 0.2579573392868042, + "layer_10_max_spectral_norm": 0.005829866975545883, + "layer_11_update_fnorm": 0.12086397409439087, + "layer_11_max_l1_linf_norm": 0.24493266642093658, + "layer_11_max_spectral_norm": 0.005537967197597027, + "layer_12_update_fnorm": 0.12134017050266266, + "layer_12_max_l1_linf_norm": 0.23626314103603363, + "layer_12_max_spectral_norm": 0.0053604114800691605, + "total_sharpness": 0.01566031202673912, + "ip_v_neg_g": 0.013552725315093994, + "cos_v_neg_g": 0.003158723236992955, + "v_norm": 1.442337989807129, + "g_norm": 2.9747331142425537, + "hv_norm": 1.5152041912078857, + "cos_v_hv": 0.01490720920264721, + "hg_norm": 133.3668212890625, + "cos_g_hg": 0.4947303235530853, + "v_parallel_norm": 0.00016450016119051725, + "v_perp_norm": 1.442337989807129, + "layer_1_v_norm": 0.12263660877943039, + "layer_1_cos_v_neg_g": 0.01597469672560692, + "layer_2_v_norm": 0.11609659343957901, + "layer_2_cos_v_neg_g": 0.016627155244350433, + "layer_3_v_norm": 0.11238342523574829, + "layer_3_cos_v_neg_g": 0.013769892044365406, + "layer_4_v_norm": 0.11609119921922684, + "layer_4_cos_v_neg_g": 0.0106252022087574, + "layer_5_v_norm": 0.11777809262275696, + "layer_5_cos_v_neg_g": 0.011100547388195992, + "layer_6_v_norm": 0.12054435908794403, + "layer_6_cos_v_neg_g": 0.00992168951779604, + "layer_7_v_norm": 0.12052010744810104, + "layer_7_cos_v_neg_g": 0.009985469281673431, + "layer_8_v_norm": 0.12095130980014801, + "layer_8_cos_v_neg_g": 0.009072770364582539, + "layer_9_v_norm": 0.12039916962385178, + "layer_9_cos_v_neg_g": 0.008026516065001488, + "layer_10_v_norm": 0.12104736268520355, + "layer_10_cos_v_neg_g": 0.007448551710695028, + "layer_11_v_norm": 0.12086396664381027, + "layer_11_cos_v_neg_g": 0.007244473788887262, + "layer_12_v_norm": 0.12134017050266266, + "layer_12_cos_v_neg_g": 0.00618296442553401, + "layer_1_sharpness": 0.09174272418022156, + "layer_2_sharpness": 0.05201098322868347, + "layer_3_sharpness": 0.037034377455711365, + "layer_4_sharpness": 0.020352229475975037, + "layer_5_sharpness": 0.021467503160238266, + "layer_6_sharpness": 0.020458977669477463, + "layer_7_sharpness": 0.022004421800374985, + "layer_8_sharpness": 0.018391676247119904, + "layer_9_sharpness": 0.014944246038794518, + "layer_10_sharpness": 0.010645076632499695, + "layer_11_sharpness": 0.010306211188435555, + "layer_12_sharpness": 0.01233464665710926 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..a1a84209697675dee813e3a9f5e044be4406d492 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3934193849563599, + "total_l1_linf_norm": 9503.9765625, + "total_spectral_norm": 1.3934193849563599, + "layer_1_update_fnorm": 0.11873168498277664, + "layer_1_max_l1_linf_norm": 0.3629944324493408, + "layer_1_max_spectral_norm": 0.008130147121846676, + "layer_2_update_fnorm": 0.11478327214717865, + "layer_2_max_l1_linf_norm": 0.4355461597442627, + "layer_2_max_spectral_norm": 0.00978228822350502, + "layer_3_update_fnorm": 0.11601391434669495, + "layer_3_max_l1_linf_norm": 0.42203766107559204, + "layer_3_max_spectral_norm": 0.009498844854533672, + "layer_4_update_fnorm": 0.11912824958562851, + "layer_4_max_l1_linf_norm": 0.40698206424713135, + "layer_4_max_spectral_norm": 0.00913885235786438, + "layer_5_update_fnorm": 0.12029707431793213, + "layer_5_max_l1_linf_norm": 0.38822734355926514, + "layer_5_max_spectral_norm": 0.008683568798005581, + "layer_6_update_fnorm": 0.12072779983282089, + "layer_6_max_l1_linf_norm": 0.34848278760910034, + "layer_6_max_spectral_norm": 0.007829409092664719, + "layer_7_update_fnorm": 0.1208762601017952, + "layer_7_max_l1_linf_norm": 0.33583277463912964, + "layer_7_max_spectral_norm": 0.0076044462621212006, + "layer_8_update_fnorm": 0.120804063975811, + "layer_8_max_l1_linf_norm": 0.35205161571502686, + "layer_8_max_spectral_norm": 0.007905395701527596, + "layer_9_update_fnorm": 0.12111726403236389, + "layer_9_max_l1_linf_norm": 0.3709571659564972, + "layer_9_max_spectral_norm": 0.008304087445139885, + "layer_10_update_fnorm": 0.12112202495336533, + "layer_10_max_l1_linf_norm": 0.40541064739227295, + "layer_10_max_spectral_norm": 0.009071026928722858, + "layer_11_update_fnorm": 0.12101420760154724, + "layer_11_max_l1_linf_norm": 0.4250592887401581, + "layer_11_max_spectral_norm": 0.009508651681244373, + "layer_12_update_fnorm": 0.12077637761831284, + "layer_12_max_l1_linf_norm": 0.4338495135307312, + "layer_12_max_spectral_norm": 0.0096677141264081, + "total_sharpness": 0.0034617874771356583, + "ip_v_neg_g": 0.0031622168608009815, + "cos_v_neg_g": 0.0009604421211406589, + "v_norm": 1.3934193849563599, + "g_norm": 2.362863302230835, + "hv_norm": 0.43752914667129517, + "cos_v_hv": 0.011024915613234043, + "hg_norm": 196.09396362304688, + "cos_g_hg": 0.601952314376831, + "v_parallel_norm": 4.113349496037699e-05, + "v_perp_norm": 1.3934193849563599, + "layer_1_v_norm": 0.11873168498277664, + "layer_1_cos_v_neg_g": 0.007131792139261961, + "layer_2_v_norm": 0.11478327214717865, + "layer_2_cos_v_neg_g": 0.0029688635841012, + "layer_3_v_norm": 0.11601390689611435, + "layer_3_cos_v_neg_g": 0.0011840838706120849, + "layer_4_v_norm": 0.11912824958562851, + "layer_4_cos_v_neg_g": 0.0016277891118079424, + "layer_5_v_norm": 0.12029707431793213, + "layer_5_cos_v_neg_g": 0.002328783506527543, + "layer_6_v_norm": 0.12072779983282089, + "layer_6_cos_v_neg_g": 0.0028093892615288496, + "layer_7_v_norm": 0.1208762601017952, + "layer_7_cos_v_neg_g": 0.003650908824056387, + "layer_8_v_norm": 0.120804063975811, + "layer_8_cos_v_neg_g": 0.003306867554783821, + "layer_9_v_norm": 0.12111726403236389, + "layer_9_cos_v_neg_g": 0.0032189330086112022, + "layer_10_v_norm": 0.12112202495336533, + "layer_10_cos_v_neg_g": 0.003092834260314703, + "layer_11_v_norm": 0.12101420760154724, + "layer_11_cos_v_neg_g": 0.002459758659824729, + "layer_12_v_norm": 0.12077637761831284, + "layer_12_cos_v_neg_g": 0.0028090858832001686, + "layer_1_sharpness": 0.030964739620685577, + "layer_2_sharpness": 0.00637122942134738, + "layer_3_sharpness": 0.0057418737560510635, + "layer_4_sharpness": 0.0031736588571220636, + "layer_5_sharpness": 0.004662598483264446, + "layer_6_sharpness": 0.006357492879033089, + "layer_7_sharpness": 0.006354173645377159, + "layer_8_sharpness": 0.006361222360283136, + "layer_9_sharpness": 0.004436349030584097, + "layer_10_sharpness": 0.0031720309052616358, + "layer_11_sharpness": 0.0032580525148659945, + "layer_12_sharpness": 0.005143929738551378 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..88a5fb2c4b48a0fd6d90bf8118d41e257149cd66 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.39031982421875, + "total_l1_linf_norm": 9503.884765625, + "total_spectral_norm": 1.3903197050094604, + "layer_1_update_fnorm": 0.12191233783960342, + "layer_1_max_l1_linf_norm": 0.20821711421012878, + "layer_1_max_spectral_norm": 0.00476621650159359, + "layer_2_update_fnorm": 0.11643651127815247, + "layer_2_max_l1_linf_norm": 0.24085161089897156, + "layer_2_max_spectral_norm": 0.005458193831145763, + "layer_3_update_fnorm": 0.11377480626106262, + "layer_3_max_l1_linf_norm": 0.26516222953796387, + "layer_3_max_spectral_norm": 0.006010747980326414, + "layer_4_update_fnorm": 0.11611823737621307, + "layer_4_max_l1_linf_norm": 0.2757660746574402, + "layer_4_max_spectral_norm": 0.006235236302018166, + "layer_5_update_fnorm": 0.11854568123817444, + "layer_5_max_l1_linf_norm": 0.2671010196208954, + "layer_5_max_spectral_norm": 0.006071803625673056, + "layer_6_update_fnorm": 0.12049483507871628, + "layer_6_max_l1_linf_norm": 0.24304428696632385, + "layer_6_max_spectral_norm": 0.005517137236893177, + "layer_7_update_fnorm": 0.12071441113948822, + "layer_7_max_l1_linf_norm": 0.2496008425951004, + "layer_7_max_spectral_norm": 0.00558937294408679, + "layer_8_update_fnorm": 0.12085752189159393, + "layer_8_max_l1_linf_norm": 0.2675707936286926, + "layer_8_max_spectral_norm": 0.0059629203751683235, + "layer_9_update_fnorm": 0.12032351642847061, + "layer_9_max_l1_linf_norm": 0.2768613398075104, + "layer_9_max_spectral_norm": 0.0061936154961586, + "layer_10_update_fnorm": 0.12055693566799164, + "layer_10_max_l1_linf_norm": 0.2959757149219513, + "layer_10_max_spectral_norm": 0.0066487728618085384, + "layer_11_update_fnorm": 0.12044191360473633, + "layer_11_max_l1_linf_norm": 0.30310705304145813, + "layer_11_max_spectral_norm": 0.006768836174160242, + "layer_12_update_fnorm": 0.1209128350019455, + "layer_12_max_l1_linf_norm": 0.2945045828819275, + "layer_12_max_spectral_norm": 0.006566117517650127, + "total_sharpness": 0.009706750512123108, + "ip_v_neg_g": 0.009742344729602337, + "cos_v_neg_g": 0.002646113745868206, + "v_norm": 1.39031982421875, + "g_norm": 2.6481359004974365, + "hv_norm": 0.8653162121772766, + "cos_v_hv": 0.01559602003544569, + "hg_norm": 95.64275360107422, + "cos_g_hg": 0.4728640615940094, + "v_parallel_norm": 9.857722761807963e-05, + "v_perp_norm": 1.39031982421875, + "layer_1_v_norm": 0.12191233783960342, + "layer_1_cos_v_neg_g": 0.008421973325312138, + "layer_2_v_norm": 0.11643651127815247, + "layer_2_cos_v_neg_g": 0.010380622930824757, + "layer_3_v_norm": 0.11377481371164322, + "layer_3_cos_v_neg_g": 0.012500499375164509, + "layer_4_v_norm": 0.11611823737621307, + "layer_4_cos_v_neg_g": 0.009644407778978348, + "layer_5_v_norm": 0.11854568123817444, + "layer_5_cos_v_neg_g": 0.009122621268033981, + "layer_6_v_norm": 0.12049483507871628, + "layer_6_cos_v_neg_g": 0.008468963205814362, + "layer_7_v_norm": 0.12071441113948822, + "layer_7_cos_v_neg_g": 0.009074158035218716, + "layer_8_v_norm": 0.12085752189159393, + "layer_8_cos_v_neg_g": 0.008453905582427979, + "layer_9_v_norm": 0.12032351642847061, + "layer_9_cos_v_neg_g": 0.008056734688580036, + "layer_10_v_norm": 0.12055693566799164, + "layer_10_cos_v_neg_g": 0.00729561410844326, + "layer_11_v_norm": 0.12044192105531693, + "layer_11_cos_v_neg_g": 0.0071402667090296745, + "layer_12_v_norm": 0.1209128350019455, + "layer_12_cos_v_neg_g": 0.006269804202020168, + "layer_1_sharpness": 0.028121447190642357, + "layer_2_sharpness": 0.013447660021483898, + "layer_3_sharpness": 0.023765815421938896, + "layer_4_sharpness": 0.011428321711719036, + "layer_5_sharpness": 0.012249937281012535, + "layer_6_sharpness": 0.012841090559959412, + "layer_7_sharpness": 0.015735195949673653, + "layer_8_sharpness": 0.014158771373331547, + "layer_9_sharpness": 0.011047779582440853, + "layer_10_sharpness": 0.00869667623192072, + "layer_11_sharpness": 0.008518815040588379, + "layer_12_sharpness": 0.012942887842655182 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..595119234813f2f9b713bb0c576dfd9e4a256330 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3960384130477905, + "total_l1_linf_norm": 9527.7021484375, + "total_spectral_norm": 1.396038293838501, + "layer_1_update_fnorm": 0.12049461156129837, + "layer_1_max_l1_linf_norm": 0.3215913772583008, + "layer_1_max_spectral_norm": 0.0069645848125219345, + "layer_2_update_fnorm": 0.11343539506196976, + "layer_2_max_l1_linf_norm": 0.3015875220298767, + "layer_2_max_spectral_norm": 0.006809231359511614, + "layer_3_update_fnorm": 0.11292769014835358, + "layer_3_max_l1_linf_norm": 0.3008192181587219, + "layer_3_max_spectral_norm": 0.006783881690353155, + "layer_4_update_fnorm": 0.11680576950311661, + "layer_4_max_l1_linf_norm": 0.31727659702301025, + "layer_4_max_spectral_norm": 0.007143112365156412, + "layer_5_update_fnorm": 0.11913791298866272, + "layer_5_max_l1_linf_norm": 0.30871641635894775, + "layer_5_max_spectral_norm": 0.006895836442708969, + "layer_6_update_fnorm": 0.1208929568529129, + "layer_6_max_l1_linf_norm": 0.2845824062824249, + "layer_6_max_spectral_norm": 0.006420616991817951, + "layer_7_update_fnorm": 0.12077899277210236, + "layer_7_max_l1_linf_norm": 0.2702018618583679, + "layer_7_max_spectral_norm": 0.0061320532113313675, + "layer_8_update_fnorm": 0.12094547599554062, + "layer_8_max_l1_linf_norm": 0.2759842574596405, + "layer_8_max_spectral_norm": 0.006208918057382107, + "layer_9_update_fnorm": 0.12060337513685226, + "layer_9_max_l1_linf_norm": 0.3040481209754944, + "layer_9_max_spectral_norm": 0.006881605833768845, + "layer_10_update_fnorm": 0.12079441547393799, + "layer_10_max_l1_linf_norm": 0.3172762989997864, + "layer_10_max_spectral_norm": 0.007158751133829355, + "layer_11_update_fnorm": 0.12064186483621597, + "layer_11_max_l1_linf_norm": 0.31922489404678345, + "layer_11_max_spectral_norm": 0.007182380184531212, + "layer_12_update_fnorm": 0.12074372172355652, + "layer_12_max_l1_linf_norm": 0.3014964163303375, + "layer_12_max_spectral_norm": 0.006871117744594812, + "total_sharpness": 0.009650234133005142, + "ip_v_neg_g": 0.010688062757253647, + "cos_v_neg_g": 0.0028904348146170378, + "v_norm": 1.3960384130477905, + "g_norm": 2.6487345695495605, + "hv_norm": 1.020698070526123, + "cos_v_hv": 0.013198904693126678, + "hg_norm": 100.26532745361328, + "cos_g_hg": 0.5219594240188599, + "v_parallel_norm": 9.798991231946275e-05, + "v_perp_norm": 1.3960384130477905, + "layer_1_v_norm": 0.12049461156129837, + "layer_1_cos_v_neg_g": 0.019441401585936546, + "layer_2_v_norm": 0.11343539506196976, + "layer_2_cos_v_neg_g": 0.0179434884339571, + "layer_3_v_norm": 0.11292769014835358, + "layer_3_cos_v_neg_g": 0.0143122598528862, + "layer_4_v_norm": 0.11680576950311661, + "layer_4_cos_v_neg_g": 0.010263856500387192, + "layer_5_v_norm": 0.11913791298866272, + "layer_5_cos_v_neg_g": 0.009156913496553898, + "layer_6_v_norm": 0.1208929568529129, + "layer_6_cos_v_neg_g": 0.00725093949586153, + "layer_7_v_norm": 0.12077899277210236, + "layer_7_cos_v_neg_g": 0.007737330626696348, + "layer_8_v_norm": 0.12094547599554062, + "layer_8_cos_v_neg_g": 0.007888982072472572, + "layer_9_v_norm": 0.12060337513685226, + "layer_9_cos_v_neg_g": 0.007067989092320204, + "layer_10_v_norm": 0.12079441547393799, + "layer_10_cos_v_neg_g": 0.006350955460220575, + "layer_11_v_norm": 0.12064186483621597, + "layer_11_cos_v_neg_g": 0.005243760067969561, + "layer_12_v_norm": 0.12074372172355652, + "layer_12_cos_v_neg_g": 0.003999035805463791, + "layer_1_sharpness": 0.1409282088279724, + "layer_2_sharpness": 0.034783583134412766, + "layer_3_sharpness": 0.02346683293581009, + "layer_4_sharpness": 0.009923761710524559, + "layer_5_sharpness": 0.01062801294028759, + "layer_6_sharpness": 0.010013067163527012, + "layer_7_sharpness": 0.0115007059648633, + "layer_8_sharpness": 0.010394956916570663, + "layer_9_sharpness": 0.008553454652428627, + "layer_10_sharpness": 0.007011385168880224, + "layer_11_sharpness": 0.0062308344058692455, + "layer_12_sharpness": 0.009918566793203354 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..0c7551206729507b26b0415fce2338c1df896fc2 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.386755108833313, + "total_l1_linf_norm": 9466.2265625, + "total_spectral_norm": 1.386755108833313, + "layer_1_update_fnorm": 0.12057319283485413, + "layer_1_max_l1_linf_norm": 0.2490990161895752, + "layer_1_max_spectral_norm": 0.005697226617485285, + "layer_2_update_fnorm": 0.1146765947341919, + "layer_2_max_l1_linf_norm": 0.31473004817962646, + "layer_2_max_spectral_norm": 0.007126018404960632, + "layer_3_update_fnorm": 0.11359533667564392, + "layer_3_max_l1_linf_norm": 0.3543294668197632, + "layer_3_max_spectral_norm": 0.007918884977698326, + "layer_4_update_fnorm": 0.11704486608505249, + "layer_4_max_l1_linf_norm": 0.34369510412216187, + "layer_4_max_spectral_norm": 0.007737243082374334, + "layer_5_update_fnorm": 0.11922923475503922, + "layer_5_max_l1_linf_norm": 0.3380557894706726, + "layer_5_max_spectral_norm": 0.007624263875186443, + "layer_6_update_fnorm": 0.12087862938642502, + "layer_6_max_l1_linf_norm": 0.2751423418521881, + "layer_6_max_spectral_norm": 0.0062981559894979, + "layer_7_update_fnorm": 0.12083695083856583, + "layer_7_max_l1_linf_norm": 0.26754599809646606, + "layer_7_max_spectral_norm": 0.00615166500210762, + "layer_8_update_fnorm": 0.12097715586423874, + "layer_8_max_l1_linf_norm": 0.2867982089519501, + "layer_8_max_spectral_norm": 0.006448798347264528, + "layer_9_update_fnorm": 0.12089467793703079, + "layer_9_max_l1_linf_norm": 0.3106441795825958, + "layer_9_max_spectral_norm": 0.00702094379812479, + "layer_10_update_fnorm": 0.12095629423856735, + "layer_10_max_l1_linf_norm": 0.32399702072143555, + "layer_10_max_spectral_norm": 0.0072980476543307304, + "layer_11_update_fnorm": 0.12075656652450562, + "layer_11_max_l1_linf_norm": 0.3377869725227356, + "layer_11_max_spectral_norm": 0.0075859567150473595, + "layer_12_update_fnorm": 0.12065048515796661, + "layer_12_max_l1_linf_norm": 0.32783663272857666, + "layer_12_max_spectral_norm": 0.007358675356954336, + "total_sharpness": 0.0071873851120471954, + "ip_v_neg_g": 0.006824745796620846, + "cos_v_neg_g": 0.0020265839993953705, + "v_norm": 1.386755108833313, + "g_norm": 2.428410530090332, + "hv_norm": 0.8458678722381592, + "cos_v_hv": 0.011783333495259285, + "hg_norm": 89.11267852783203, + "cos_g_hg": 0.4908714294433594, + "v_parallel_norm": 7.702779112150893e-05, + "v_perp_norm": 1.386755108833313, + "layer_1_v_norm": 0.12057319283485413, + "layer_1_cos_v_neg_g": 0.015279671177268028, + "layer_2_v_norm": 0.1146765947341919, + "layer_2_cos_v_neg_g": 0.01297107245773077, + "layer_3_v_norm": 0.11359533667564392, + "layer_3_cos_v_neg_g": 0.009414355270564556, + "layer_4_v_norm": 0.11704486608505249, + "layer_4_cos_v_neg_g": 0.006191493012011051, + "layer_5_v_norm": 0.11922923475503922, + "layer_5_cos_v_neg_g": 0.008024136535823345, + "layer_6_v_norm": 0.12087862938642502, + "layer_6_cos_v_neg_g": 0.006176746916025877, + "layer_7_v_norm": 0.12083695083856583, + "layer_7_cos_v_neg_g": 0.0042725736275315285, + "layer_8_v_norm": 0.12097715586423874, + "layer_8_cos_v_neg_g": 0.004307135473936796, + "layer_9_v_norm": 0.12089467793703079, + "layer_9_cos_v_neg_g": 0.0035565916914492846, + "layer_10_v_norm": 0.12095629423856735, + "layer_10_cos_v_neg_g": 0.0034988566767424345, + "layer_11_v_norm": 0.12075656652450562, + "layer_11_cos_v_neg_g": 0.003238501027226448, + "layer_12_v_norm": 0.12065048515796661, + "layer_12_cos_v_neg_g": 0.00268137757666409, + "layer_1_sharpness": 0.08204686641693115, + "layer_2_sharpness": 0.0210791677236557, + "layer_3_sharpness": 0.01838178187608719, + "layer_4_sharpness": 0.006827608682215214, + "layer_5_sharpness": 0.0105569614097476, + "layer_6_sharpness": 0.008008506149053574, + "layer_7_sharpness": 0.00988532230257988, + "layer_8_sharpness": 0.009643237106502056, + "layer_9_sharpness": 0.00721996882930398, + "layer_10_sharpness": 0.005477081518620253, + "layer_11_sharpness": 0.005773262120783329, + "layer_12_sharpness": 0.008779690600931644 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..5878206085628159102a3e88df58b568b1dc393c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.388322114944458, + "total_l1_linf_norm": 9480.1953125, + "total_spectral_norm": 1.3883222341537476, + "layer_1_update_fnorm": 0.12043178081512451, + "layer_1_max_l1_linf_norm": 0.29083508253097534, + "layer_1_max_spectral_norm": 0.006614200305193663, + "layer_2_update_fnorm": 0.11473353952169418, + "layer_2_max_l1_linf_norm": 0.33323097229003906, + "layer_2_max_spectral_norm": 0.007611988577991724, + "layer_3_update_fnorm": 0.11444702744483948, + "layer_3_max_l1_linf_norm": 0.3576720952987671, + "layer_3_max_spectral_norm": 0.008030282333493233, + "layer_4_update_fnorm": 0.11746560782194138, + "layer_4_max_l1_linf_norm": 0.3529323637485504, + "layer_4_max_spectral_norm": 0.008055971935391426, + "layer_5_update_fnorm": 0.11984837055206299, + "layer_5_max_l1_linf_norm": 0.31694158911705017, + "layer_5_max_spectral_norm": 0.0071968454867601395, + "layer_6_update_fnorm": 0.12089188396930695, + "layer_6_max_l1_linf_norm": 0.30798619985580444, + "layer_6_max_spectral_norm": 0.006940705236047506, + "layer_7_update_fnorm": 0.12085162848234177, + "layer_7_max_l1_linf_norm": 0.2881452143192291, + "layer_7_max_spectral_norm": 0.006461009848862886, + "layer_8_update_fnorm": 0.12088926881551743, + "layer_8_max_l1_linf_norm": 0.31281623244285583, + "layer_8_max_spectral_norm": 0.007080764975398779, + "layer_9_update_fnorm": 0.12082577496767044, + "layer_9_max_l1_linf_norm": 0.3347492218017578, + "layer_9_max_spectral_norm": 0.007513252552598715, + "layer_10_update_fnorm": 0.1208336129784584, + "layer_10_max_l1_linf_norm": 0.3628090023994446, + "layer_10_max_spectral_norm": 0.008127075619995594, + "layer_11_update_fnorm": 0.12066914886236191, + "layer_11_max_l1_linf_norm": 0.39310556650161743, + "layer_11_max_spectral_norm": 0.008786392398178577, + "layer_12_update_fnorm": 0.12079586088657379, + "layer_12_max_l1_linf_norm": 0.3862379193305969, + "layer_12_max_spectral_norm": 0.008632242679595947, + "total_sharpness": 0.0058447872288525105, + "ip_v_neg_g": 0.007223112508654594, + "cos_v_neg_g": 0.0023925108835101128, + "v_norm": 1.388322114944458, + "g_norm": 2.1746041774749756, + "hv_norm": 0.5298210978507996, + "cos_v_hv": 0.015315447002649307, + "hg_norm": 43.5408821105957, + "cos_g_hg": 0.46323636174201965, + "v_parallel_norm": 7.32805419829674e-05, + "v_perp_norm": 1.388322114944458, + "layer_1_v_norm": 0.12043178081512451, + "layer_1_cos_v_neg_g": 0.007845447398722172, + "layer_2_v_norm": 0.11473353952169418, + "layer_2_cos_v_neg_g": 0.00783113855868578, + "layer_3_v_norm": 0.11444702744483948, + "layer_3_cos_v_neg_g": 0.0069548203609883785, + "layer_4_v_norm": 0.11746560782194138, + "layer_4_cos_v_neg_g": 0.006104995962232351, + "layer_5_v_norm": 0.11984837055206299, + "layer_5_cos_v_neg_g": 0.007757894229143858, + "layer_6_v_norm": 0.12089188396930695, + "layer_6_cos_v_neg_g": 0.006546244490891695, + "layer_7_v_norm": 0.12085162848234177, + "layer_7_cos_v_neg_g": 0.008649799972772598, + "layer_8_v_norm": 0.12088926881551743, + "layer_8_cos_v_neg_g": 0.010201247408986092, + "layer_9_v_norm": 0.12082577496767044, + "layer_9_cos_v_neg_g": 0.00953932385891676, + "layer_10_v_norm": 0.1208336129784584, + "layer_10_cos_v_neg_g": 0.00843886286020279, + "layer_11_v_norm": 0.12066914141178131, + "layer_11_cos_v_neg_g": 0.007069864775985479, + "layer_12_v_norm": 0.12079586088657379, + "layer_12_cos_v_neg_g": 0.006400503683835268, + "layer_1_sharpness": 0.020013822242617607, + "layer_2_sharpness": 0.007642875891178846, + "layer_3_sharpness": 0.013286100700497627, + "layer_4_sharpness": 0.0059237428940832615, + "layer_5_sharpness": 0.006880216300487518, + "layer_6_sharpness": 0.008678605780005455, + "layer_7_sharpness": 0.009478592313826084, + "layer_8_sharpness": 0.010093745775520802, + "layer_9_sharpness": 0.007175651378929615, + "layer_10_sharpness": 0.005897774826735258, + "layer_11_sharpness": 0.006162119098007679, + "layer_12_sharpness": 0.008452432230114937 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..e3c38dc508b70dd500579a7e97eb9f4b5cd38ccf --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3903766870498657, + "total_l1_linf_norm": 9491.01953125, + "total_spectral_norm": 1.3903768062591553, + "layer_1_update_fnorm": 0.12015053629875183, + "layer_1_max_l1_linf_norm": 0.28652554750442505, + "layer_1_max_spectral_norm": 0.006507040932774544, + "layer_2_update_fnorm": 0.114884153008461, + "layer_2_max_l1_linf_norm": 0.3530811071395874, + "layer_2_max_spectral_norm": 0.007909992709755898, + "layer_3_update_fnorm": 0.11528559029102325, + "layer_3_max_l1_linf_norm": 0.34597048163414, + "layer_3_max_spectral_norm": 0.007763296831399202, + "layer_4_update_fnorm": 0.11784238368272781, + "layer_4_max_l1_linf_norm": 0.35534679889678955, + "layer_4_max_spectral_norm": 0.008006207644939423, + "layer_5_update_fnorm": 0.12003372609615326, + "layer_5_max_l1_linf_norm": 0.3322240710258484, + "layer_5_max_spectral_norm": 0.0074488315731287, + "layer_6_update_fnorm": 0.12104848772287369, + "layer_6_max_l1_linf_norm": 0.3151398003101349, + "layer_6_max_spectral_norm": 0.007064794655889273, + "layer_7_update_fnorm": 0.12100441753864288, + "layer_7_max_l1_linf_norm": 0.32183337211608887, + "layer_7_max_spectral_norm": 0.007204873953014612, + "layer_8_update_fnorm": 0.12098709493875504, + "layer_8_max_l1_linf_norm": 0.2961389720439911, + "layer_8_max_spectral_norm": 0.006662313360720873, + "layer_9_update_fnorm": 0.12109363824129105, + "layer_9_max_l1_linf_norm": 0.31098148226737976, + "layer_9_max_spectral_norm": 0.007041078992187977, + "layer_10_update_fnorm": 0.12117373198270798, + "layer_10_max_l1_linf_norm": 0.3378932476043701, + "layer_10_max_spectral_norm": 0.007629644125699997, + "layer_11_update_fnorm": 0.12104246020317078, + "layer_11_max_l1_linf_norm": 0.35854604840278625, + "layer_11_max_spectral_norm": 0.008057605475187302, + "layer_12_update_fnorm": 0.12080299109220505, + "layer_12_max_l1_linf_norm": 0.3522632122039795, + "layer_12_max_spectral_norm": 0.008006483316421509, + "total_sharpness": 0.005108126904815435, + "ip_v_neg_g": 0.004914544057101011, + "cos_v_neg_g": 0.0015492155216634274, + "v_norm": 1.3903766870498657, + "g_norm": 2.2815968990325928, + "hv_norm": 0.5645872950553894, + "cos_v_hv": 0.01257948949933052, + "hg_norm": 67.94917297363281, + "cos_g_hg": 0.5162755846977234, + "v_parallel_norm": 5.955780579824932e-05, + "v_perp_norm": 1.3903766870498657, + "layer_1_v_norm": 0.12015053629875183, + "layer_1_cos_v_neg_g": 0.006763132754713297, + "layer_2_v_norm": 0.114884153008461, + "layer_2_cos_v_neg_g": 0.0036740771029144526, + "layer_3_v_norm": 0.11528559029102325, + "layer_3_cos_v_neg_g": 0.006208780687302351, + "layer_4_v_norm": 0.11784238368272781, + "layer_4_cos_v_neg_g": 0.0048010521568357944, + "layer_5_v_norm": 0.12003372609615326, + "layer_5_cos_v_neg_g": 0.0054931133054196835, + "layer_6_v_norm": 0.12104848772287369, + "layer_6_cos_v_neg_g": 0.005520337726920843, + "layer_7_v_norm": 0.12100441753864288, + "layer_7_cos_v_neg_g": 0.005977441556751728, + "layer_8_v_norm": 0.12098710238933563, + "layer_8_cos_v_neg_g": 0.0052580274641513824, + "layer_9_v_norm": 0.12109363824129105, + "layer_9_cos_v_neg_g": 0.004606743808835745, + "layer_10_v_norm": 0.12117373198270798, + "layer_10_cos_v_neg_g": 0.005223201122134924, + "layer_11_v_norm": 0.12104245275259018, + "layer_11_cos_v_neg_g": 0.004172503016889095, + "layer_12_v_norm": 0.12080299109220505, + "layer_12_cos_v_neg_g": 0.0045301890932023525, + "layer_1_sharpness": 0.017634768038988113, + "layer_2_sharpness": 0.008737169206142426, + "layer_3_sharpness": 0.011105924844741821, + "layer_4_sharpness": 0.005705305840820074, + "layer_5_sharpness": 0.007508722133934498, + "layer_6_sharpness": 0.009524672292172909, + "layer_7_sharpness": 0.010649180971086025, + "layer_8_sharpness": 0.00958639569580555, + "layer_9_sharpness": 0.006703170482069254, + "layer_10_sharpness": 0.0050836228765547276, + "layer_11_sharpness": 0.004600046668201685, + "layer_12_sharpness": 0.006247430574148893 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..a27a06c0412b0f70ee3cb7627c8f368f89727076 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3920555114746094, + "total_l1_linf_norm": 9494.4072265625, + "total_spectral_norm": 1.392055630683899, + "layer_1_update_fnorm": 0.11975640058517456, + "layer_1_max_l1_linf_norm": 0.3056122660636902, + "layer_1_max_spectral_norm": 0.0069249640218913555, + "layer_2_update_fnorm": 0.11467322707176208, + "layer_2_max_l1_linf_norm": 0.3497273921966553, + "layer_2_max_spectral_norm": 0.007870856672525406, + "layer_3_update_fnorm": 0.11471343040466309, + "layer_3_max_l1_linf_norm": 0.34562304615974426, + "layer_3_max_spectral_norm": 0.007847961969673634, + "layer_4_update_fnorm": 0.1154213398694992, + "layer_4_max_l1_linf_norm": 0.3811670243740082, + "layer_4_max_spectral_norm": 0.008537297137081623, + "layer_5_update_fnorm": 0.11933405697345734, + "layer_5_max_l1_linf_norm": 0.3515295088291168, + "layer_5_max_spectral_norm": 0.00788904633373022, + "layer_6_update_fnorm": 0.12043602764606476, + "layer_6_max_l1_linf_norm": 0.3267989456653595, + "layer_6_max_spectral_norm": 0.007329375948756933, + "layer_7_update_fnorm": 0.12030712515115738, + "layer_7_max_l1_linf_norm": 0.3235163688659668, + "layer_7_max_spectral_norm": 0.00731525057926774, + "layer_8_update_fnorm": 0.12066376954317093, + "layer_8_max_l1_linf_norm": 0.33231601119041443, + "layer_8_max_spectral_norm": 0.007547407411038876, + "layer_9_update_fnorm": 0.12099318206310272, + "layer_9_max_l1_linf_norm": 0.33895760774612427, + "layer_9_max_spectral_norm": 0.007630904670804739, + "layer_10_update_fnorm": 0.12108585983514786, + "layer_10_max_l1_linf_norm": 0.3637554347515106, + "layer_10_max_spectral_norm": 0.008175618946552277, + "layer_11_update_fnorm": 0.1209815964102745, + "layer_11_max_l1_linf_norm": 0.38150671124458313, + "layer_11_max_spectral_norm": 0.008522636257112026, + "layer_12_update_fnorm": 0.12078140676021576, + "layer_12_max_l1_linf_norm": 0.38959258794784546, + "layer_12_max_spectral_norm": 0.008707661181688309, + "total_sharpness": 0.004888023715466261, + "ip_v_neg_g": 0.006074761506170034, + "cos_v_neg_g": 0.0021020532585680485, + "v_norm": 1.3920555114746094, + "g_norm": 2.076007843017578, + "hv_norm": 0.5144767165184021, + "cos_v_hv": 0.013225867412984371, + "hg_norm": 42.97552490234375, + "cos_g_hg": 0.4706401228904724, + "v_parallel_norm": 6.867814954603091e-05, + "v_perp_norm": 1.3920555114746094, + "layer_1_v_norm": 0.11975640058517456, + "layer_1_cos_v_neg_g": 0.006070099305361509, + "layer_2_v_norm": 0.11467322707176208, + "layer_2_cos_v_neg_g": 0.006130794528871775, + "layer_3_v_norm": 0.11471343040466309, + "layer_3_cos_v_neg_g": 0.005148414056748152, + "layer_4_v_norm": 0.1154213398694992, + "layer_4_cos_v_neg_g": 0.004799142014235258, + "layer_5_v_norm": 0.11933405697345734, + "layer_5_cos_v_neg_g": 0.007083558943122625, + "layer_6_v_norm": 0.12043602764606476, + "layer_6_cos_v_neg_g": 0.008311822079122066, + "layer_7_v_norm": 0.12030712515115738, + "layer_7_cos_v_neg_g": 0.00965936854481697, + "layer_8_v_norm": 0.12066376209259033, + "layer_8_cos_v_neg_g": 0.010005791671574116, + "layer_9_v_norm": 0.12099318206310272, + "layer_9_cos_v_neg_g": 0.00815946888178587, + "layer_10_v_norm": 0.12108585983514786, + "layer_10_cos_v_neg_g": 0.006278790067881346, + "layer_11_v_norm": 0.1209815964102745, + "layer_11_cos_v_neg_g": 0.005630332976579666, + "layer_12_v_norm": 0.12078140676021576, + "layer_12_cos_v_neg_g": 0.003960751462727785, + "layer_1_sharpness": 0.02030991204082966, + "layer_2_sharpness": 0.006238306872546673, + "layer_3_sharpness": 0.009027337655425072, + "layer_4_sharpness": 0.005829307716339827, + "layer_5_sharpness": 0.00772812869399786, + "layer_6_sharpness": 0.009112679399549961, + "layer_7_sharpness": 0.010907531715929508, + "layer_8_sharpness": 0.010094835422933102, + "layer_9_sharpness": 0.006775969639420509, + "layer_10_sharpness": 0.004677646327763796, + "layer_11_sharpness": 0.004333919379860163, + "layer_12_sharpness": 0.005552190355956554 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..c9e5158b0a5b6e20de7152ef1972bc1948963e3c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.4017857313156128, + "total_l1_linf_norm": 9551.1611328125, + "total_spectral_norm": 1.4017854928970337, + "layer_1_update_fnorm": 0.12003980576992035, + "layer_1_max_l1_linf_norm": 0.36027631163597107, + "layer_1_max_spectral_norm": 0.008079849183559418, + "layer_2_update_fnorm": 0.11489316076040268, + "layer_2_max_l1_linf_norm": 0.425417423248291, + "layer_2_max_spectral_norm": 0.00958903320133686, + "layer_3_update_fnorm": 0.11494940519332886, + "layer_3_max_l1_linf_norm": 0.41368889808654785, + "layer_3_max_spectral_norm": 0.00927447248250246, + "layer_4_update_fnorm": 0.1178814098238945, + "layer_4_max_l1_linf_norm": 0.421927273273468, + "layer_4_max_spectral_norm": 0.009459546767175198, + "layer_5_update_fnorm": 0.11992441117763519, + "layer_5_max_l1_linf_norm": 0.39112138748168945, + "layer_5_max_spectral_norm": 0.008776438422501087, + "layer_6_update_fnorm": 0.12085039168596268, + "layer_6_max_l1_linf_norm": 0.3935129642486572, + "layer_6_max_spectral_norm": 0.008777901530265808, + "layer_7_update_fnorm": 0.1209183782339096, + "layer_7_max_l1_linf_norm": 0.35818588733673096, + "layer_7_max_spectral_norm": 0.008091317489743233, + "layer_8_update_fnorm": 0.12091628462076187, + "layer_8_max_l1_linf_norm": 0.345192551612854, + "layer_8_max_spectral_norm": 0.0077486722730100155, + "layer_9_update_fnorm": 0.12117428332567215, + "layer_9_max_l1_linf_norm": 0.371883362531662, + "layer_9_max_spectral_norm": 0.0082765594124794, + "layer_10_update_fnorm": 0.121226966381073, + "layer_10_max_l1_linf_norm": 0.39430832862854004, + "layer_10_max_spectral_norm": 0.00884715374559164, + "layer_11_update_fnorm": 0.12106654047966003, + "layer_11_max_l1_linf_norm": 0.4019828140735626, + "layer_11_max_spectral_norm": 0.008936998434364796, + "layer_12_update_fnorm": 0.12080886960029602, + "layer_12_max_l1_linf_norm": 0.3936893343925476, + "layer_12_max_spectral_norm": 0.008820705115795135, + "total_sharpness": 0.006004995200783014, + "ip_v_neg_g": 0.0073824552819132805, + "cos_v_neg_g": 0.0023809524718672037, + "v_norm": 1.4017857313156128, + "g_norm": 2.2119150161743164, + "hv_norm": 0.6089643836021423, + "cos_v_hv": 0.013823002576828003, + "hg_norm": 102.43179321289062, + "cos_g_hg": 0.5076903700828552, + "v_parallel_norm": 7.959518552524969e-05, + "v_perp_norm": 1.4017857313156128, + "layer_1_v_norm": 0.12003980576992035, + "layer_1_cos_v_neg_g": 0.008077317848801613, + "layer_2_v_norm": 0.11489316076040268, + "layer_2_cos_v_neg_g": 0.005393856205046177, + "layer_3_v_norm": 0.11494940519332886, + "layer_3_cos_v_neg_g": 0.0044447025284171104, + "layer_4_v_norm": 0.1178814098238945, + "layer_4_cos_v_neg_g": 0.006648014299571514, + "layer_5_v_norm": 0.11992441117763519, + "layer_5_cos_v_neg_g": 0.008545158430933952, + "layer_6_v_norm": 0.12085039913654327, + "layer_6_cos_v_neg_g": 0.010897274129092693, + "layer_7_v_norm": 0.1209183782339096, + "layer_7_cos_v_neg_g": 0.010172798298299313, + "layer_8_v_norm": 0.12091629207134247, + "layer_8_cos_v_neg_g": 0.009190071374177933, + "layer_9_v_norm": 0.12117428332567215, + "layer_9_cos_v_neg_g": 0.008696562610566616, + "layer_10_v_norm": 0.121226966381073, + "layer_10_cos_v_neg_g": 0.007062343414872885, + "layer_11_v_norm": 0.12106653302907944, + "layer_11_cos_v_neg_g": 0.0058481646701693535, + "layer_12_v_norm": 0.12080886960029602, + "layer_12_cos_v_neg_g": 0.005453186109662056, + "layer_1_sharpness": 0.020411811769008636, + "layer_2_sharpness": 0.009476716630160809, + "layer_3_sharpness": 0.01274736225605011, + "layer_4_sharpness": 0.005835475400090218, + "layer_5_sharpness": 0.008186312392354012, + "layer_6_sharpness": 0.011576971970498562, + "layer_7_sharpness": 0.011842794716358185, + "layer_8_sharpness": 0.010617190971970558, + "layer_9_sharpness": 0.007458443287760019, + "layer_10_sharpness": 0.005375379230827093, + "layer_11_sharpness": 0.004748963750898838, + "layer_12_sharpness": 0.007712435442954302 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..f706c5851a7a160aebd36cedd2144996db689200 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.024059534072876, + "total_l1_linf_norm": 6968.45751953125, + "total_spectral_norm": 1.0240594148635864, + "layer_1_update_fnorm": 0.0875387042760849, + "layer_1_max_l1_linf_norm": 0.13008540868759155, + "layer_1_max_spectral_norm": 0.0029331515543162823, + "layer_2_update_fnorm": 0.0835018903017044, + "layer_2_max_l1_linf_norm": 0.15152126550674438, + "layer_2_max_spectral_norm": 0.00341817713342607, + "layer_3_update_fnorm": 0.08098108321428299, + "layer_3_max_l1_linf_norm": 0.15973889827728271, + "layer_3_max_spectral_norm": 0.003612050786614418, + "layer_4_update_fnorm": 0.08113166689872742, + "layer_4_max_l1_linf_norm": 0.167400524020195, + "layer_4_max_spectral_norm": 0.004166644066572189, + "layer_5_update_fnorm": 0.08175158500671387, + "layer_5_max_l1_linf_norm": 0.17818665504455566, + "layer_5_max_spectral_norm": 0.0043423655442893505, + "layer_6_update_fnorm": 0.0831085816025734, + "layer_6_max_l1_linf_norm": 0.18087486922740936, + "layer_6_max_spectral_norm": 0.005115171428769827, + "layer_7_update_fnorm": 0.08445357531309128, + "layer_7_max_l1_linf_norm": 0.18178465962409973, + "layer_7_max_spectral_norm": 0.006108148023486137, + "layer_8_update_fnorm": 0.08492308855056763, + "layer_8_max_l1_linf_norm": 0.18456456065177917, + "layer_8_max_spectral_norm": 0.005921355914324522, + "layer_9_update_fnorm": 0.08554906398057938, + "layer_9_max_l1_linf_norm": 0.18955571949481964, + "layer_9_max_spectral_norm": 0.006449580658227205, + "layer_10_update_fnorm": 0.08586900681257248, + "layer_10_max_l1_linf_norm": 0.200917050242424, + "layer_10_max_spectral_norm": 0.005497352220118046, + "layer_11_update_fnorm": 0.08597494661808014, + "layer_11_max_l1_linf_norm": 0.21078073978424072, + "layer_11_max_spectral_norm": 0.004969645291566849, + "layer_12_update_fnorm": 0.08583135157823563, + "layer_12_max_l1_linf_norm": 0.21590228378772736, + "layer_12_max_spectral_norm": 0.005143942777067423, + "total_sharpness": 0.06497998535633087, + "ip_v_neg_g": 0.03088153339922428, + "cos_v_neg_g": 0.005502207204699516, + "v_norm": 1.024059534072876, + "g_norm": 5.480708599090576, + "hv_norm": 5.51997184753418, + "cos_v_hv": 0.012055018916726112, + "hg_norm": 2203.416748046875, + "cos_g_hg": 0.6691152453422546, + "v_parallel_norm": 0.00033781788079068065, + "v_perp_norm": 1.0240594148635864, + "layer_1_v_norm": 0.0875387042760849, + "layer_1_cos_v_neg_g": 0.029766157269477844, + "layer_2_v_norm": 0.0835018903017044, + "layer_2_cos_v_neg_g": 0.021830815821886063, + "layer_3_v_norm": 0.08098107576370239, + "layer_3_cos_v_neg_g": 0.02257741428911686, + "layer_4_v_norm": 0.08113166689872742, + "layer_4_cos_v_neg_g": 0.02290610410273075, + "layer_5_v_norm": 0.08175158500671387, + "layer_5_cos_v_neg_g": 0.020644208416342735, + "layer_6_v_norm": 0.0831085816025734, + "layer_6_cos_v_neg_g": 0.017780417576432228, + "layer_7_v_norm": 0.08445357531309128, + "layer_7_cos_v_neg_g": 0.015566316433250904, + "layer_8_v_norm": 0.08492308109998703, + "layer_8_cos_v_neg_g": 0.014035598374903202, + "layer_9_v_norm": 0.08554906398057938, + "layer_9_cos_v_neg_g": 0.014370578341186047, + "layer_10_v_norm": 0.08586900681257248, + "layer_10_cos_v_neg_g": 0.014727495610713959, + "layer_11_v_norm": 0.08597494661808014, + "layer_11_cos_v_neg_g": 0.016400471329689026, + "layer_12_v_norm": 0.08583135157823563, + "layer_12_cos_v_neg_g": 0.016715839505195618, + "layer_1_sharpness": 0.7513284087181091, + "layer_2_sharpness": 0.16380083560943604, + "layer_3_sharpness": 0.1532140076160431, + "layer_4_sharpness": 0.13604949414730072, + "layer_5_sharpness": 0.09085527807474136, + "layer_6_sharpness": 0.05888911709189415, + "layer_7_sharpness": 0.03851696103811264, + "layer_8_sharpness": 0.028067808598279953, + "layer_9_sharpness": 0.024570558220148087, + "layer_10_sharpness": 0.025983422994613647, + "layer_11_sharpness": 0.03946458548307419, + "layer_12_sharpness": 0.047090794891119 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..519b37eb817413c748d6f5101859af3460b52298 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3916497230529785, + "total_l1_linf_norm": 9500.2080078125, + "total_spectral_norm": 1.3916499614715576, + "layer_1_update_fnorm": 0.11947157979011536, + "layer_1_max_l1_linf_norm": 0.32993724942207336, + "layer_1_max_spectral_norm": 0.00745999114587903, + "layer_2_update_fnorm": 0.11385942250490189, + "layer_2_max_l1_linf_norm": 0.3605342507362366, + "layer_2_max_spectral_norm": 0.008117078803479671, + "layer_3_update_fnorm": 0.11556411534547806, + "layer_3_max_l1_linf_norm": 0.35889631509780884, + "layer_3_max_spectral_norm": 0.008125551976263523, + "layer_4_update_fnorm": 0.11853666603565216, + "layer_4_max_l1_linf_norm": 0.3662540912628174, + "layer_4_max_spectral_norm": 0.008281035348773003, + "layer_5_update_fnorm": 0.12041172385215759, + "layer_5_max_l1_linf_norm": 0.36245137453079224, + "layer_5_max_spectral_norm": 0.008162476122379303, + "layer_6_update_fnorm": 0.120918408036232, + "layer_6_max_l1_linf_norm": 0.3031497001647949, + "layer_6_max_spectral_norm": 0.006895783822983503, + "layer_7_update_fnorm": 0.12106084078550339, + "layer_7_max_l1_linf_norm": 0.2992340922355652, + "layer_7_max_spectral_norm": 0.006788396276533604, + "layer_8_update_fnorm": 0.12112981826066971, + "layer_8_max_l1_linf_norm": 0.3167220950126648, + "layer_8_max_spectral_norm": 0.0071922848001122475, + "layer_9_update_fnorm": 0.12136301398277283, + "layer_9_max_l1_linf_norm": 0.322446346282959, + "layer_9_max_spectral_norm": 0.007300200872123241, + "layer_10_update_fnorm": 0.12126998603343964, + "layer_10_max_l1_linf_norm": 0.3662033677101135, + "layer_10_max_spectral_norm": 0.00827429722994566, + "layer_11_update_fnorm": 0.12108169496059418, + "layer_11_max_l1_linf_norm": 0.3880816698074341, + "layer_11_max_spectral_norm": 0.008758372627198696, + "layer_12_update_fnorm": 0.12084392458200455, + "layer_12_max_l1_linf_norm": 0.38818439841270447, + "layer_12_max_spectral_norm": 0.008724556304514408, + "total_sharpness": 0.004110307898372412, + "ip_v_neg_g": 0.006181356497108936, + "cos_v_neg_g": 0.00205498025752604, + "v_norm": 1.3916497230529785, + "g_norm": 2.161454916000366, + "hv_norm": 0.5189279913902283, + "cos_v_hv": 0.011022934690117836, + "hg_norm": 79.00963592529297, + "cos_g_hg": 0.47209739685058594, + "v_parallel_norm": 8.75554615049623e-05, + "v_perp_norm": 1.3916497230529785, + "layer_1_v_norm": 0.11947157979011536, + "layer_1_cos_v_neg_g": 0.009130737744271755, + "layer_2_v_norm": 0.11385942250490189, + "layer_2_cos_v_neg_g": 0.009966571815311909, + "layer_3_v_norm": 0.11556411534547806, + "layer_3_cos_v_neg_g": 0.009088892489671707, + "layer_4_v_norm": 0.11853666603565216, + "layer_4_cos_v_neg_g": 0.006406743079423904, + "layer_5_v_norm": 0.12041172385215759, + "layer_5_cos_v_neg_g": 0.005976748652756214, + "layer_6_v_norm": 0.12091841548681259, + "layer_6_cos_v_neg_g": 0.004592789337038994, + "layer_7_v_norm": 0.12106084078550339, + "layer_7_cos_v_neg_g": 0.005488571710884571, + "layer_8_v_norm": 0.12112981826066971, + "layer_8_cos_v_neg_g": 0.005201186519116163, + "layer_9_v_norm": 0.12136301398277283, + "layer_9_cos_v_neg_g": 0.00499001843854785, + "layer_10_v_norm": 0.12126998603343964, + "layer_10_cos_v_neg_g": 0.005747716408222914, + "layer_11_v_norm": 0.12108168751001358, + "layer_11_cos_v_neg_g": 0.006276288535445929, + "layer_12_v_norm": 0.12084392458200455, + "layer_12_cos_v_neg_g": 0.004717416595667601, + "layer_1_sharpness": 0.044495679438114166, + "layer_2_sharpness": 0.010765225626528263, + "layer_3_sharpness": 0.012068754993379116, + "layer_4_sharpness": 0.0052455635741353035, + "layer_5_sharpness": 0.006903862114995718, + "layer_6_sharpness": 0.0066580320708453655, + "layer_7_sharpness": 0.00678950734436512, + "layer_8_sharpness": 0.006702620070427656, + "layer_9_sharpness": 0.0047286925837397575, + "layer_10_sharpness": 0.0037971530109643936, + "layer_11_sharpness": 0.0038227064069360495, + "layer_12_sharpness": 0.006683711428195238 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..9010dd2a52fd92fd031035d13769ee53629f5c5a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3937652111053467, + "total_l1_linf_norm": 9516.16015625, + "total_spectral_norm": 1.3937653303146362, + "layer_1_update_fnorm": 0.11945544183254242, + "layer_1_max_l1_linf_norm": 0.3433893322944641, + "layer_1_max_spectral_norm": 0.0077045029029250145, + "layer_2_update_fnorm": 0.11494776606559753, + "layer_2_max_l1_linf_norm": 0.390403687953949, + "layer_2_max_spectral_norm": 0.008788470178842545, + "layer_3_update_fnorm": 0.11585360020399094, + "layer_3_max_l1_linf_norm": 0.3931695818901062, + "layer_3_max_spectral_norm": 0.00879832822829485, + "layer_4_update_fnorm": 0.11847053468227386, + "layer_4_max_l1_linf_norm": 0.38653451204299927, + "layer_4_max_spectral_norm": 0.008699148893356323, + "layer_5_update_fnorm": 0.12031663209199905, + "layer_5_max_l1_linf_norm": 0.37755322456359863, + "layer_5_max_spectral_norm": 0.008397972211241722, + "layer_6_update_fnorm": 0.1208576038479805, + "layer_6_max_l1_linf_norm": 0.3263280987739563, + "layer_6_max_spectral_norm": 0.007367028389126062, + "layer_7_update_fnorm": 0.1210939958691597, + "layer_7_max_l1_linf_norm": 0.35452336072921753, + "layer_7_max_spectral_norm": 0.007923847064375877, + "layer_8_update_fnorm": 0.12105784565210342, + "layer_8_max_l1_linf_norm": 0.34072345495224, + "layer_8_max_spectral_norm": 0.007710866164416075, + "layer_9_update_fnorm": 0.12119690328836441, + "layer_9_max_l1_linf_norm": 0.35725200176239014, + "layer_9_max_spectral_norm": 0.008077684789896011, + "layer_10_update_fnorm": 0.12138905376195908, + "layer_10_max_l1_linf_norm": 0.3803443908691406, + "layer_10_max_spectral_norm": 0.00854057352989912, + "layer_11_update_fnorm": 0.12115263938903809, + "layer_11_max_l1_linf_norm": 0.3966676592826843, + "layer_11_max_spectral_norm": 0.009022418409585953, + "layer_12_update_fnorm": 0.12074275314807892, + "layer_12_max_l1_linf_norm": 0.3858276605606079, + "layer_12_max_spectral_norm": 0.008697251789271832, + "total_sharpness": 0.0037470022216439247, + "ip_v_neg_g": 0.0009833737276494503, + "cos_v_neg_g": 0.00023463812249246985, + "v_norm": 1.3937652111053467, + "g_norm": 3.00697922706604, + "hv_norm": 0.5231309533119202, + "cos_v_hv": 0.009983046911656857, + "hg_norm": 181.55113220214844, + "cos_g_hg": 0.6561713218688965, + "v_parallel_norm": 5.5670312576694414e-05, + "v_perp_norm": 1.3937652111053467, + "layer_1_v_norm": 0.11945544183254242, + "layer_1_cos_v_neg_g": 0.003575284732505679, + "layer_2_v_norm": 0.11494776606559753, + "layer_2_cos_v_neg_g": 0.0026711663231253624, + "layer_3_v_norm": 0.11585360765457153, + "layer_3_cos_v_neg_g": 0.0016110340366140008, + "layer_4_v_norm": 0.11847053468227386, + "layer_4_cos_v_neg_g": 4.961433296557516e-05, + "layer_5_v_norm": 0.12031663209199905, + "layer_5_cos_v_neg_g": 8.85065965121612e-05, + "layer_6_v_norm": 0.1208576038479805, + "layer_6_cos_v_neg_g": 0.0003722893015947193, + "layer_7_v_norm": 0.1210939958691597, + "layer_7_cos_v_neg_g": -0.00037520710611715913, + "layer_8_v_norm": 0.12105785310268402, + "layer_8_cos_v_neg_g": 0.0001722454180708155, + "layer_9_v_norm": 0.12119690328836441, + "layer_9_cos_v_neg_g": 0.0011771456338465214, + "layer_10_v_norm": 0.12138905376195908, + "layer_10_cos_v_neg_g": 0.0025736994575709105, + "layer_11_v_norm": 0.12115264683961868, + "layer_11_cos_v_neg_g": 0.0017805533716455102, + "layer_12_v_norm": 0.12074275314807892, + "layer_12_cos_v_neg_g": -0.000149539191625081, + "layer_1_sharpness": 0.015686407685279846, + "layer_2_sharpness": 0.005388983525335789, + "layer_3_sharpness": 0.008175576105713844, + "layer_4_sharpness": 0.004086577333509922, + "layer_5_sharpness": 0.004683319013565779, + "layer_6_sharpness": 0.006238817237317562, + "layer_7_sharpness": 0.007071455009281635, + "layer_8_sharpness": 0.007153231184929609, + "layer_9_sharpness": 0.004603833891451359, + "layer_10_sharpness": 0.0034809308126568794, + "layer_11_sharpness": 0.00324103026650846, + "layer_12_sharpness": 0.006286852993071079 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..036647cdaac8b9aab8bfe2729e30cbe8703629f7 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3919353485107422, + "total_l1_linf_norm": 9503.1494140625, + "total_spectral_norm": 1.3919353485107422, + "layer_1_update_fnorm": 0.11953055113554001, + "layer_1_max_l1_linf_norm": 0.3363906741142273, + "layer_1_max_spectral_norm": 0.00757202971726656, + "layer_2_update_fnorm": 0.1146869882941246, + "layer_2_max_l1_linf_norm": 0.40136992931365967, + "layer_2_max_spectral_norm": 0.009101320058107376, + "layer_3_update_fnorm": 0.11544439941644669, + "layer_3_max_l1_linf_norm": 0.3865848183631897, + "layer_3_max_spectral_norm": 0.00865932647138834, + "layer_4_update_fnorm": 0.11875123530626297, + "layer_4_max_l1_linf_norm": 0.3853166997432709, + "layer_4_max_spectral_norm": 0.00862550176680088, + "layer_5_update_fnorm": 0.12053387612104416, + "layer_5_max_l1_linf_norm": 0.3627626299858093, + "layer_5_max_spectral_norm": 0.008242465555667877, + "layer_6_update_fnorm": 0.12104126811027527, + "layer_6_max_l1_linf_norm": 0.31226205825805664, + "layer_6_max_spectral_norm": 0.006989551708102226, + "layer_7_update_fnorm": 0.12119685858488083, + "layer_7_max_l1_linf_norm": 0.3080490827560425, + "layer_7_max_spectral_norm": 0.006992910988628864, + "layer_8_update_fnorm": 0.12122198939323425, + "layer_8_max_l1_linf_norm": 0.3198082447052002, + "layer_8_max_spectral_norm": 0.007226794026792049, + "layer_9_update_fnorm": 0.12143830209970474, + "layer_9_max_l1_linf_norm": 0.33992141485214233, + "layer_9_max_spectral_norm": 0.007667140569537878, + "layer_10_update_fnorm": 0.12151141464710236, + "layer_10_max_l1_linf_norm": 0.3746245503425598, + "layer_10_max_spectral_norm": 0.008370166644454002, + "layer_11_update_fnorm": 0.12109116464853287, + "layer_11_max_l1_linf_norm": 0.39068877696990967, + "layer_11_max_spectral_norm": 0.008786659687757492, + "layer_12_update_fnorm": 0.12094306945800781, + "layer_12_max_l1_linf_norm": 0.4073559641838074, + "layer_12_max_spectral_norm": 0.009190804325044155, + "total_sharpness": 0.0044555338099598885, + "ip_v_neg_g": 0.004344220273196697, + "cos_v_neg_g": 0.0016121250810101628, + "v_norm": 1.3919353485107422, + "g_norm": 1.9359495639801025, + "hv_norm": 0.49799031019210815, + "cos_v_hv": 0.012453685514628887, + "hg_norm": 38.534847259521484, + "cos_g_hg": 0.46896085143089294, + "v_parallel_norm": 6.101371764088981e-05, + "v_perp_norm": 1.3919353485107422, + "layer_1_v_norm": 0.11953055113554001, + "layer_1_cos_v_neg_g": 0.008781228214502335, + "layer_2_v_norm": 0.1146869882941246, + "layer_2_cos_v_neg_g": 0.009205441921949387, + "layer_3_v_norm": 0.11544439941644669, + "layer_3_cos_v_neg_g": 0.005863635800778866, + "layer_4_v_norm": 0.11875123530626297, + "layer_4_cos_v_neg_g": 0.004264662973582745, + "layer_5_v_norm": 0.12053387612104416, + "layer_5_cos_v_neg_g": 0.00421253964304924, + "layer_6_v_norm": 0.12104126811027527, + "layer_6_cos_v_neg_g": 0.004375983960926533, + "layer_7_v_norm": 0.12119685858488083, + "layer_7_cos_v_neg_g": 0.004220824223011732, + "layer_8_v_norm": 0.12122198194265366, + "layer_8_cos_v_neg_g": 0.00468954723328352, + "layer_9_v_norm": 0.12143830209970474, + "layer_9_cos_v_neg_g": 0.004824188072234392, + "layer_10_v_norm": 0.12151141464710236, + "layer_10_cos_v_neg_g": 0.0034306677989661694, + "layer_11_v_norm": 0.12109116464853287, + "layer_11_cos_v_neg_g": 0.0031750432681292295, + "layer_12_v_norm": 0.12094306945800781, + "layer_12_cos_v_neg_g": 0.0031043922062963247, + "layer_1_sharpness": 0.02836531773209572, + "layer_2_sharpness": 0.01169891469180584, + "layer_3_sharpness": 0.009540083818137646, + "layer_4_sharpness": 0.00445622717961669, + "layer_5_sharpness": 0.0053896368481218815, + "layer_6_sharpness": 0.006435759365558624, + "layer_7_sharpness": 0.00870172493159771, + "layer_8_sharpness": 0.0072601670399308205, + "layer_9_sharpness": 0.00513374712318182, + "layer_10_sharpness": 0.0038578712847083807, + "layer_11_sharpness": 0.004331751260906458, + "layer_12_sharpness": 0.008914370089769363 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..16f5272f095f7b99a72c38122915cdc8793c475c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.4027210474014282, + "total_l1_linf_norm": 9565.84765625, + "total_spectral_norm": 1.4027211666107178, + "layer_1_update_fnorm": 0.11864397674798965, + "layer_1_max_l1_linf_norm": 0.3431243896484375, + "layer_1_max_spectral_norm": 0.007743805646896362, + "layer_2_update_fnorm": 0.11388504505157471, + "layer_2_max_l1_linf_norm": 0.39835217595100403, + "layer_2_max_spectral_norm": 0.008989098481833935, + "layer_3_update_fnorm": 0.11524701118469238, + "layer_3_max_l1_linf_norm": 0.4064284861087799, + "layer_3_max_spectral_norm": 0.009156590327620506, + "layer_4_update_fnorm": 0.11801286786794662, + "layer_4_max_l1_linf_norm": 0.391929030418396, + "layer_4_max_spectral_norm": 0.008824611082673073, + "layer_5_update_fnorm": 0.12024135142564774, + "layer_5_max_l1_linf_norm": 0.3822505474090576, + "layer_5_max_spectral_norm": 0.008636906743049622, + "layer_6_update_fnorm": 0.1208517774939537, + "layer_6_max_l1_linf_norm": 0.3348291218280792, + "layer_6_max_spectral_norm": 0.007448717951774597, + "layer_7_update_fnorm": 0.12111077457666397, + "layer_7_max_l1_linf_norm": 0.31545689702033997, + "layer_7_max_spectral_norm": 0.00711955176666379, + "layer_8_update_fnorm": 0.12109128385782242, + "layer_8_max_l1_linf_norm": 0.3423836827278137, + "layer_8_max_spectral_norm": 0.007673193700611591, + "layer_9_update_fnorm": 0.12129738181829453, + "layer_9_max_l1_linf_norm": 0.35601621866226196, + "layer_9_max_spectral_norm": 0.007982653565704823, + "layer_10_update_fnorm": 0.12136183679103851, + "layer_10_max_l1_linf_norm": 0.39737212657928467, + "layer_10_max_spectral_norm": 0.008847136981785297, + "layer_11_update_fnorm": 0.12106968462467194, + "layer_11_max_l1_linf_norm": 0.40890151262283325, + "layer_11_max_spectral_norm": 0.009139400906860828, + "layer_12_update_fnorm": 0.12087436020374298, + "layer_12_max_l1_linf_norm": 0.42034655809402466, + "layer_12_max_spectral_norm": 0.009434894658625126, + "total_sharpness": 0.0040123616345226765, + "ip_v_neg_g": 0.003480616956949234, + "cos_v_neg_g": 0.0012865543831139803, + "v_norm": 1.4027210474014282, + "g_norm": 1.9286648035049438, + "hv_norm": 0.4815864562988281, + "cos_v_hv": 0.011686841025948524, + "hg_norm": 40.16593933105469, + "cos_g_hg": 0.48548388481140137, + "v_parallel_norm": 5.5660602811258286e-05, + "v_perp_norm": 1.4027210474014282, + "layer_1_v_norm": 0.11864397674798965, + "layer_1_cos_v_neg_g": 0.0072987074963748455, + "layer_2_v_norm": 0.11388504505157471, + "layer_2_cos_v_neg_g": 0.005569819360971451, + "layer_3_v_norm": 0.11524701118469238, + "layer_3_cos_v_neg_g": 0.0034571478608995676, + "layer_4_v_norm": 0.11801286786794662, + "layer_4_cos_v_neg_g": 0.002077657263725996, + "layer_5_v_norm": 0.12024135142564774, + "layer_5_cos_v_neg_g": 0.003246541367843747, + "layer_6_v_norm": 0.1208517774939537, + "layer_6_cos_v_neg_g": 0.003122796304523945, + "layer_7_v_norm": 0.12111077457666397, + "layer_7_cos_v_neg_g": 0.0034917714074254036, + "layer_8_v_norm": 0.12109128385782242, + "layer_8_cos_v_neg_g": 0.0044973003678023815, + "layer_9_v_norm": 0.12129738181829453, + "layer_9_cos_v_neg_g": 0.004740559030324221, + "layer_10_v_norm": 0.12136183679103851, + "layer_10_cos_v_neg_g": 0.0038408227264881134, + "layer_11_v_norm": 0.12106968462467194, + "layer_11_cos_v_neg_g": 0.0038327956572175026, + "layer_12_v_norm": 0.12087436020374298, + "layer_12_cos_v_neg_g": 0.004106846638023853, + "layer_1_sharpness": 0.02947089448571205, + "layer_2_sharpness": 0.011856192722916603, + "layer_3_sharpness": 0.007980497553944588, + "layer_4_sharpness": 0.0044267200864851475, + "layer_5_sharpness": 0.005644605029374361, + "layer_6_sharpness": 0.007368127349764109, + "layer_7_sharpness": 0.007424389477819204, + "layer_8_sharpness": 0.00721854018047452, + "layer_9_sharpness": 0.005504883825778961, + "layer_10_sharpness": 0.003671009559184313, + "layer_11_sharpness": 0.0035114579368382692, + "layer_12_sharpness": 0.004866105038672686 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..100ff482d4641420e9d964cfbae509c806e13525 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.399100422859192, + "total_l1_linf_norm": 9548.83984375, + "total_spectral_norm": 1.3991000652313232, + "layer_1_update_fnorm": 0.11916103959083557, + "layer_1_max_l1_linf_norm": 0.34962183237075806, + "layer_1_max_spectral_norm": 0.00789377000182867, + "layer_2_update_fnorm": 0.11510945856571198, + "layer_2_max_l1_linf_norm": 0.43320342898368835, + "layer_2_max_spectral_norm": 0.00975835882127285, + "layer_3_update_fnorm": 0.11569403111934662, + "layer_3_max_l1_linf_norm": 0.3978175222873688, + "layer_3_max_spectral_norm": 0.008916601538658142, + "layer_4_update_fnorm": 0.11913549900054932, + "layer_4_max_l1_linf_norm": 0.40717384219169617, + "layer_4_max_spectral_norm": 0.009153103455901146, + "layer_5_update_fnorm": 0.12044713646173477, + "layer_5_max_l1_linf_norm": 0.3820914030075073, + "layer_5_max_spectral_norm": 0.008654913865029812, + "layer_6_update_fnorm": 0.12091267853975296, + "layer_6_max_l1_linf_norm": 0.3675647974014282, + "layer_6_max_spectral_norm": 0.00827607698738575, + "layer_7_update_fnorm": 0.12114371359348297, + "layer_7_max_l1_linf_norm": 0.36781588196754456, + "layer_7_max_spectral_norm": 0.008222092874348164, + "layer_8_update_fnorm": 0.1211831197142601, + "layer_8_max_l1_linf_norm": 0.37158656120300293, + "layer_8_max_spectral_norm": 0.008307069540023804, + "layer_9_update_fnorm": 0.12135327607393265, + "layer_9_max_l1_linf_norm": 0.3914939761161804, + "layer_9_max_spectral_norm": 0.00871028658002615, + "layer_10_update_fnorm": 0.1214115172624588, + "layer_10_max_l1_linf_norm": 0.39973676204681396, + "layer_10_max_spectral_norm": 0.008928460069000721, + "layer_11_update_fnorm": 0.12116232514381409, + "layer_11_max_l1_linf_norm": 0.4178508520126343, + "layer_11_max_spectral_norm": 0.009410165250301361, + "layer_12_update_fnorm": 0.1209292784333229, + "layer_12_max_l1_linf_norm": 0.4094509482383728, + "layer_12_max_spectral_norm": 0.009218872524797916, + "total_sharpness": 0.003934936132282019, + "ip_v_neg_g": 0.0026784739457070827, + "cos_v_neg_g": 0.0009825387969613075, + "v_norm": 1.399100422859192, + "g_norm": 1.9484479427337646, + "hv_norm": 0.45036423206329346, + "cos_v_hv": 0.012224262580275536, + "hg_norm": 43.55622100830078, + "cos_g_hg": 0.4228176176548004, + "v_parallel_norm": 3.838833799818531e-05, + "v_perp_norm": 1.399100422859192, + "layer_1_v_norm": 0.11916103959083557, + "layer_1_cos_v_neg_g": 0.004104257561266422, + "layer_2_v_norm": 0.11510945856571198, + "layer_2_cos_v_neg_g": 0.003468441776931286, + "layer_3_v_norm": 0.11569403111934662, + "layer_3_cos_v_neg_g": 0.0016848249360918999, + "layer_4_v_norm": 0.11913549900054932, + "layer_4_cos_v_neg_g": 0.002530478872358799, + "layer_5_v_norm": 0.12044713646173477, + "layer_5_cos_v_neg_g": 0.0032732028048485518, + "layer_6_v_norm": 0.12091268599033356, + "layer_6_cos_v_neg_g": 0.003030593041330576, + "layer_7_v_norm": 0.12114371359348297, + "layer_7_cos_v_neg_g": 0.0030913976952433586, + "layer_8_v_norm": 0.1211831271648407, + "layer_8_cos_v_neg_g": 0.00379530293866992, + "layer_9_v_norm": 0.12135327607393265, + "layer_9_cos_v_neg_g": 0.0036307028494775295, + "layer_10_v_norm": 0.1214115172624588, + "layer_10_cos_v_neg_g": 0.0031610887963324785, + "layer_11_v_norm": 0.12116232514381409, + "layer_11_cos_v_neg_g": 0.0024545772466808558, + "layer_12_v_norm": 0.1209292784333229, + "layer_12_cos_v_neg_g": 0.003313523717224598, + "layer_1_sharpness": 0.02104196697473526, + "layer_2_sharpness": 0.009484204463660717, + "layer_3_sharpness": 0.008750866167247295, + "layer_4_sharpness": 0.0038848496042191982, + "layer_5_sharpness": 0.005343254189938307, + "layer_6_sharpness": 0.00774514302611351, + "layer_7_sharpness": 0.009039588272571564, + "layer_8_sharpness": 0.007791365031152964, + "layer_9_sharpness": 0.0060195294208824635, + "layer_10_sharpness": 0.004200148396193981, + "layer_11_sharpness": 0.0035970488097518682, + "layer_12_sharpness": 0.004973652772605419 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..7d0284b55e3387bd856660b8c1e68d11b5f51d20 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3996919393539429, + "total_l1_linf_norm": 9538.1103515625, + "total_spectral_norm": 1.3996920585632324, + "layer_1_update_fnorm": 0.1191386952996254, + "layer_1_max_l1_linf_norm": 0.38566601276397705, + "layer_1_max_spectral_norm": 0.008632676675915718, + "layer_2_update_fnorm": 0.11365550756454468, + "layer_2_max_l1_linf_norm": 0.43742281198501587, + "layer_2_max_spectral_norm": 0.009792453609406948, + "layer_3_update_fnorm": 0.11528091877698898, + "layer_3_max_l1_linf_norm": 0.416984885931015, + "layer_3_max_spectral_norm": 0.009448623284697533, + "layer_4_update_fnorm": 0.11873890459537506, + "layer_4_max_l1_linf_norm": 0.4043999910354614, + "layer_4_max_spectral_norm": 0.009119068272411823, + "layer_5_update_fnorm": 0.12039347738027573, + "layer_5_max_l1_linf_norm": 0.371408075094223, + "layer_5_max_spectral_norm": 0.008430716581642628, + "layer_6_update_fnorm": 0.12095030397176743, + "layer_6_max_l1_linf_norm": 0.3333413004875183, + "layer_6_max_spectral_norm": 0.007519219536334276, + "layer_7_update_fnorm": 0.12102397531270981, + "layer_7_max_l1_linf_norm": 0.3193460702896118, + "layer_7_max_spectral_norm": 0.0072971126064658165, + "layer_8_update_fnorm": 0.12104089558124542, + "layer_8_max_l1_linf_norm": 0.3351396322250366, + "layer_8_max_spectral_norm": 0.0075869387947022915, + "layer_9_update_fnorm": 0.12129154056310654, + "layer_9_max_l1_linf_norm": 0.37083733081817627, + "layer_9_max_spectral_norm": 0.008381078951060772, + "layer_10_update_fnorm": 0.12127503007650375, + "layer_10_max_l1_linf_norm": 0.40956124663352966, + "layer_10_max_spectral_norm": 0.009206736460328102, + "layer_11_update_fnorm": 0.12117550522089005, + "layer_11_max_l1_linf_norm": 0.43423739075660706, + "layer_11_max_spectral_norm": 0.009724650532007217, + "layer_12_update_fnorm": 0.12088846415281296, + "layer_12_max_l1_linf_norm": 0.40666913986206055, + "layer_12_max_spectral_norm": 0.009157655760645866, + "total_sharpness": 0.003990942146629095, + "ip_v_neg_g": 0.003955754451453686, + "cos_v_neg_g": 0.0013450939441099763, + "v_norm": 1.3996919393539429, + "g_norm": 2.101088047027588, + "hv_norm": 0.49919095635414124, + "cos_v_hv": 0.011190286837518215, + "hg_norm": 69.8932876586914, + "cos_g_hg": 0.47253578901290894, + "v_parallel_norm": 4.6515830035787076e-05, + "v_perp_norm": 1.3996919393539429, + "layer_1_v_norm": 0.1191386952996254, + "layer_1_cos_v_neg_g": 0.005959860514849424, + "layer_2_v_norm": 0.11365550756454468, + "layer_2_cos_v_neg_g": 0.004873186349868774, + "layer_3_v_norm": 0.11528091877698898, + "layer_3_cos_v_neg_g": 0.003639849368482828, + "layer_4_v_norm": 0.11873890459537506, + "layer_4_cos_v_neg_g": 0.0027587790973484516, + "layer_5_v_norm": 0.12039347738027573, + "layer_5_cos_v_neg_g": 0.003363850526511669, + "layer_6_v_norm": 0.12095030397176743, + "layer_6_cos_v_neg_g": 0.004293244332075119, + "layer_7_v_norm": 0.12102397531270981, + "layer_7_cos_v_neg_g": 0.0044240932911634445, + "layer_8_v_norm": 0.12104090303182602, + "layer_8_cos_v_neg_g": 0.0035020236391574144, + "layer_9_v_norm": 0.12129154056310654, + "layer_9_cos_v_neg_g": 0.005065759643912315, + "layer_10_v_norm": 0.12127503007650375, + "layer_10_cos_v_neg_g": 0.0053029051050543785, + "layer_11_v_norm": 0.12117550522089005, + "layer_11_cos_v_neg_g": 0.005445551592856646, + "layer_12_v_norm": 0.12088846415281296, + "layer_12_cos_v_neg_g": 0.0031374634709209204, + "layer_1_sharpness": 0.048701804131269455, + "layer_2_sharpness": 0.01584988459944725, + "layer_3_sharpness": 0.007213251665234566, + "layer_4_sharpness": 0.00334295816719532, + "layer_5_sharpness": 0.004735119640827179, + "layer_6_sharpness": 0.006268823519349098, + "layer_7_sharpness": 0.0068833595141768456, + "layer_8_sharpness": 0.006703446619212627, + "layer_9_sharpness": 0.005465415306389332, + "layer_10_sharpness": 0.004057785030454397, + "layer_11_sharpness": 0.00396969448775053, + "layer_12_sharpness": 0.004850110504776239 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..4b116a17e74005a2ceb0ec1a32d1455d43cea199 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3913013935089111, + "total_l1_linf_norm": 9502.947265625, + "total_spectral_norm": 1.3913013935089111, + "layer_1_update_fnorm": 0.1187751516699791, + "layer_1_max_l1_linf_norm": 0.3717349171638489, + "layer_1_max_spectral_norm": 0.00823015347123146, + "layer_2_update_fnorm": 0.11384511739015579, + "layer_2_max_l1_linf_norm": 0.42613470554351807, + "layer_2_max_spectral_norm": 0.009524649940431118, + "layer_3_update_fnorm": 0.11600232124328613, + "layer_3_max_l1_linf_norm": 0.4050753116607666, + "layer_3_max_spectral_norm": 0.009067642502486706, + "layer_4_update_fnorm": 0.11907826364040375, + "layer_4_max_l1_linf_norm": 0.38993626832962036, + "layer_4_max_spectral_norm": 0.008823251351714134, + "layer_5_update_fnorm": 0.12066284567117691, + "layer_5_max_l1_linf_norm": 0.37303194403648376, + "layer_5_max_spectral_norm": 0.008367343805730343, + "layer_6_update_fnorm": 0.12107522040605545, + "layer_6_max_l1_linf_norm": 0.33685949444770813, + "layer_6_max_spectral_norm": 0.007621916942298412, + "layer_7_update_fnorm": 0.12120741605758667, + "layer_7_max_l1_linf_norm": 0.32427671551704407, + "layer_7_max_spectral_norm": 0.007359953131526709, + "layer_8_update_fnorm": 0.12110377848148346, + "layer_8_max_l1_linf_norm": 0.3275020122528076, + "layer_8_max_spectral_norm": 0.007395140826702118, + "layer_9_update_fnorm": 0.12143334746360779, + "layer_9_max_l1_linf_norm": 0.365442156791687, + "layer_9_max_spectral_norm": 0.00822696927934885, + "layer_10_update_fnorm": 0.12151331454515457, + "layer_10_max_l1_linf_norm": 0.3880840241909027, + "layer_10_max_spectral_norm": 0.008753706701099873, + "layer_11_update_fnorm": 0.12120898067951202, + "layer_11_max_l1_linf_norm": 0.3950345814228058, + "layer_11_max_spectral_norm": 0.00882056076079607, + "layer_12_update_fnorm": 0.12090820074081421, + "layer_12_max_l1_linf_norm": 0.4039294719696045, + "layer_12_max_spectral_norm": 0.00908159650862217, + "total_sharpness": 0.0043948558159172535, + "ip_v_neg_g": 0.003925664816051722, + "cos_v_neg_g": 0.001455006655305624, + "v_norm": 1.3913013935089111, + "g_norm": 1.9392197132110596, + "hv_norm": 0.6628636717796326, + "cos_v_hv": 0.009224474430084229, + "hg_norm": 48.50783157348633, + "cos_g_hg": 0.4519699215888977, + "v_parallel_norm": 6.614681478822604e-05, + "v_perp_norm": 1.3913013935089111, + "layer_1_v_norm": 0.1187751516699791, + "layer_1_cos_v_neg_g": 0.004380767233669758, + "layer_2_v_norm": 0.11384511739015579, + "layer_2_cos_v_neg_g": 0.005142576526850462, + "layer_3_v_norm": 0.11600232869386673, + "layer_3_cos_v_neg_g": 0.0032864839304238558, + "layer_4_v_norm": 0.11907826364040375, + "layer_4_cos_v_neg_g": 0.003239269135519862, + "layer_5_v_norm": 0.12066284567117691, + "layer_5_cos_v_neg_g": 0.0042687696404755116, + "layer_6_v_norm": 0.12107521295547485, + "layer_6_cos_v_neg_g": 0.005516359582543373, + "layer_7_v_norm": 0.12120741605758667, + "layer_7_cos_v_neg_g": 0.006089703645557165, + "layer_8_v_norm": 0.12110377848148346, + "layer_8_cos_v_neg_g": 0.005874696187674999, + "layer_9_v_norm": 0.12143334746360779, + "layer_9_cos_v_neg_g": 0.005203329026699066, + "layer_10_v_norm": 0.12151331454515457, + "layer_10_cos_v_neg_g": 0.004326831083744764, + "layer_11_v_norm": 0.12120898067951202, + "layer_11_cos_v_neg_g": 0.002877421211451292, + "layer_12_v_norm": 0.12090820074081421, + "layer_12_cos_v_neg_g": 0.0036139346193522215, + "layer_1_sharpness": 0.06329308450222015, + "layer_2_sharpness": 0.018583493307232857, + "layer_3_sharpness": 0.007148578763008118, + "layer_4_sharpness": 0.003562025260180235, + "layer_5_sharpness": 0.00486033083871007, + "layer_6_sharpness": 0.006651054602116346, + "layer_7_sharpness": 0.0073991636745631695, + "layer_8_sharpness": 0.006949683651328087, + "layer_9_sharpness": 0.005551882088184357, + "layer_10_sharpness": 0.004287194926291704, + "layer_11_sharpness": 0.003970862366259098, + "layer_12_sharpness": 0.005316580645740032 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..c16cfad6da51308be93566afcdf6b0a1b21313ce --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.39240562915802, + "total_l1_linf_norm": 9489.5849609375, + "total_spectral_norm": 1.3924055099487305, + "layer_1_update_fnorm": 0.11848020553588867, + "layer_1_max_l1_linf_norm": 0.3523162007331848, + "layer_1_max_spectral_norm": 0.007943360134959221, + "layer_2_update_fnorm": 0.11459512263536453, + "layer_2_max_l1_linf_norm": 0.4210606813430786, + "layer_2_max_spectral_norm": 0.009531916119158268, + "layer_3_update_fnorm": 0.11568111926317215, + "layer_3_max_l1_linf_norm": 0.4085010886192322, + "layer_3_max_spectral_norm": 0.009228749200701714, + "layer_4_update_fnorm": 0.11866570264101028, + "layer_4_max_l1_linf_norm": 0.4057825207710266, + "layer_4_max_spectral_norm": 0.009165827184915543, + "layer_5_update_fnorm": 0.1201825961470604, + "layer_5_max_l1_linf_norm": 0.37225818634033203, + "layer_5_max_spectral_norm": 0.008410155773162842, + "layer_6_update_fnorm": 0.12077509611845016, + "layer_6_max_l1_linf_norm": 0.31303757429122925, + "layer_6_max_spectral_norm": 0.007083569653332233, + "layer_7_update_fnorm": 0.12099875509738922, + "layer_7_max_l1_linf_norm": 0.31115323305130005, + "layer_7_max_spectral_norm": 0.007023695390671492, + "layer_8_update_fnorm": 0.12081063538789749, + "layer_8_max_l1_linf_norm": 0.3108087182044983, + "layer_8_max_spectral_norm": 0.007030439097434282, + "layer_9_update_fnorm": 0.12098637223243713, + "layer_9_max_l1_linf_norm": 0.34247493743896484, + "layer_9_max_spectral_norm": 0.007704562041908503, + "layer_10_update_fnorm": 0.120755635201931, + "layer_10_max_l1_linf_norm": 0.35739877820014954, + "layer_10_max_spectral_norm": 0.008132180199027061, + "layer_11_update_fnorm": 0.12029874324798584, + "layer_11_max_l1_linf_norm": 0.3890983462333679, + "layer_11_max_spectral_norm": 0.008744323626160622, + "layer_12_update_fnorm": 0.12040060013532639, + "layer_12_max_l1_linf_norm": 0.39433982968330383, + "layer_12_max_spectral_norm": 0.008927211165428162, + "total_sharpness": 0.0030891625210642815, + "ip_v_neg_g": 0.003980831243097782, + "cos_v_neg_g": 0.001496251323260367, + "v_norm": 1.39240562915802, + "g_norm": 1.9107481241226196, + "hv_norm": 0.37739109992980957, + "cos_v_hv": 0.011397638358175755, + "hg_norm": 41.68259048461914, + "cos_g_hg": 0.43801525235176086, + "v_parallel_norm": 5.387462078942917e-05, + "v_perp_norm": 1.39240562915802, + "layer_1_v_norm": 0.11848020553588867, + "layer_1_cos_v_neg_g": 0.006893645040690899, + "layer_2_v_norm": 0.11459512263536453, + "layer_2_cos_v_neg_g": 0.006813620682805777, + "layer_3_v_norm": 0.11568111926317215, + "layer_3_cos_v_neg_g": 0.005235254764556885, + "layer_4_v_norm": 0.11866570264101028, + "layer_4_cos_v_neg_g": 0.004996230825781822, + "layer_5_v_norm": 0.1201825961470604, + "layer_5_cos_v_neg_g": 0.005051096901297569, + "layer_6_v_norm": 0.12077510356903076, + "layer_6_cos_v_neg_g": 0.005671556573361158, + "layer_7_v_norm": 0.12099875509738922, + "layer_7_cos_v_neg_g": 0.006516189780086279, + "layer_8_v_norm": 0.1208106279373169, + "layer_8_cos_v_neg_g": 0.004694104660302401, + "layer_9_v_norm": 0.12098637223243713, + "layer_9_cos_v_neg_g": 0.004141103941947222, + "layer_10_v_norm": 0.120755635201931, + "layer_10_cos_v_neg_g": 0.003821540856733918, + "layer_11_v_norm": 0.12029874324798584, + "layer_11_cos_v_neg_g": 0.0035274592228233814, + "layer_12_v_norm": 0.12040060013532639, + "layer_12_cos_v_neg_g": 0.0019487458048388362, + "layer_1_sharpness": 0.024635113775730133, + "layer_2_sharpness": 0.007271883077919483, + "layer_3_sharpness": 0.0067786057479679585, + "layer_4_sharpness": 0.003617251291871071, + "layer_5_sharpness": 0.0048772008158266544, + "layer_6_sharpness": 0.0056414175778627396, + "layer_7_sharpness": 0.006345354951918125, + "layer_8_sharpness": 0.005820178426802158, + "layer_9_sharpness": 0.004450635518878698, + "layer_10_sharpness": 0.003145243041217327, + "layer_11_sharpness": 0.0032931610476225615, + "layer_12_sharpness": 0.006503106094896793 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..028ca3007d3593e530bfd0d0f10d6a6e1a88a8d0 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3934886455535889, + "total_l1_linf_norm": 9512.2919921875, + "total_spectral_norm": 1.3934887647628784, + "layer_1_update_fnorm": 0.11885428428649902, + "layer_1_max_l1_linf_norm": 0.3460630178451538, + "layer_1_max_spectral_norm": 0.007779388222843409, + "layer_2_update_fnorm": 0.11483240872621536, + "layer_2_max_l1_linf_norm": 0.41705256700515747, + "layer_2_max_spectral_norm": 0.009396122768521309, + "layer_3_update_fnorm": 0.11640273034572601, + "layer_3_max_l1_linf_norm": 0.3947335481643677, + "layer_3_max_spectral_norm": 0.0088877584785223, + "layer_4_update_fnorm": 0.11931400746107101, + "layer_4_max_l1_linf_norm": 0.39057570695877075, + "layer_4_max_spectral_norm": 0.008762630634009838, + "layer_5_update_fnorm": 0.12065888941287994, + "layer_5_max_l1_linf_norm": 0.38510915637016296, + "layer_5_max_spectral_norm": 0.008668482303619385, + "layer_6_update_fnorm": 0.12101320177316666, + "layer_6_max_l1_linf_norm": 0.32656124234199524, + "layer_6_max_spectral_norm": 0.007360933814197779, + "layer_7_update_fnorm": 0.12114347517490387, + "layer_7_max_l1_linf_norm": 0.3280564844608307, + "layer_7_max_spectral_norm": 0.007409839425235987, + "layer_8_update_fnorm": 0.12116032838821411, + "layer_8_max_l1_linf_norm": 0.3329790532588959, + "layer_8_max_spectral_norm": 0.00750385457649827, + "layer_9_update_fnorm": 0.1213715672492981, + "layer_9_max_l1_linf_norm": 0.34255003929138184, + "layer_9_max_spectral_norm": 0.007737861480563879, + "layer_10_update_fnorm": 0.1213308721780777, + "layer_10_max_l1_linf_norm": 0.36581090092658997, + "layer_10_max_spectral_norm": 0.008227289654314518, + "layer_11_update_fnorm": 0.1212213858962059, + "layer_11_max_l1_linf_norm": 0.3980831801891327, + "layer_11_max_spectral_norm": 0.008982870727777481, + "layer_12_update_fnorm": 0.12086758762598038, + "layer_12_max_l1_linf_norm": 0.3891545534133911, + "layer_12_max_spectral_norm": 0.008795271627604961, + "total_sharpness": 0.0030362613033503294, + "ip_v_neg_g": 0.0011896539945155382, + "cos_v_neg_g": 0.0004197694652248174, + "v_norm": 1.3934886455535889, + "g_norm": 2.0337913036346436, + "hv_norm": 0.4049612581729889, + "cos_v_hv": 0.010447901673614979, + "hg_norm": 63.017024993896484, + "cos_g_hg": 0.5090407133102417, + "v_parallel_norm": 4.497511690715328e-05, + "v_perp_norm": 1.3934886455535889, + "layer_1_v_norm": 0.11885428428649902, + "layer_1_cos_v_neg_g": 0.003379554720595479, + "layer_2_v_norm": 0.11483240872621536, + "layer_2_cos_v_neg_g": -0.0005087526515126228, + "layer_3_v_norm": 0.11640273034572601, + "layer_3_cos_v_neg_g": -0.00038828945253044367, + "layer_4_v_norm": 0.11931400746107101, + "layer_4_cos_v_neg_g": 9.333252819487825e-05, + "layer_5_v_norm": 0.12065888941287994, + "layer_5_cos_v_neg_g": 0.0007175183854997158, + "layer_6_v_norm": 0.12101320177316666, + "layer_6_cos_v_neg_g": 0.00044512454769574106, + "layer_7_v_norm": 0.12114347517490387, + "layer_7_cos_v_neg_g": 0.0010200500255450606, + "layer_8_v_norm": 0.12116032838821411, + "layer_8_cos_v_neg_g": 0.0016851156251505017, + "layer_9_v_norm": 0.1213715672492981, + "layer_9_cos_v_neg_g": 0.0015617192257195711, + "layer_10_v_norm": 0.1213308721780777, + "layer_10_cos_v_neg_g": 0.0017778831534087658, + "layer_11_v_norm": 0.1212213858962059, + "layer_11_cos_v_neg_g": 0.0017839598003774881, + "layer_12_v_norm": 0.12086758762598038, + "layer_12_cos_v_neg_g": 0.002004180336371064, + "layer_1_sharpness": 0.014213057234883308, + "layer_2_sharpness": 0.0069259414449334145, + "layer_3_sharpness": 0.004322534892708063, + "layer_4_sharpness": 0.003073323518037796, + "layer_5_sharpness": 0.004161357413977385, + "layer_6_sharpness": 0.0055887820199131966, + "layer_7_sharpness": 0.006151852663606405, + "layer_8_sharpness": 0.006095760501921177, + "layer_9_sharpness": 0.004671077709645033, + "layer_10_sharpness": 0.0032112703192979097, + "layer_11_sharpness": 0.0030280083883553743, + "layer_12_sharpness": 0.004355413373559713 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..1d6e93d8a82a1ea7c266511f2e38de33483b139f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.3887016773223877, + "total_l1_linf_norm": 9476.3525390625, + "total_spectral_norm": 1.3887016773223877, + "layer_1_update_fnorm": 0.11874881386756897, + "layer_1_max_l1_linf_norm": 0.3714001178741455, + "layer_1_max_spectral_norm": 0.00835979264229536, + "layer_2_update_fnorm": 0.11417823284864426, + "layer_2_max_l1_linf_norm": 0.42979028820991516, + "layer_2_max_spectral_norm": 0.009681878611445427, + "layer_3_update_fnorm": 0.11583879590034485, + "layer_3_max_l1_linf_norm": 0.42436522245407104, + "layer_3_max_spectral_norm": 0.009610029868781567, + "layer_4_update_fnorm": 0.11891139298677444, + "layer_4_max_l1_linf_norm": 0.3902786672115326, + "layer_4_max_spectral_norm": 0.00880628265440464, + "layer_5_update_fnorm": 0.12048746645450592, + "layer_5_max_l1_linf_norm": 0.38130125403404236, + "layer_5_max_spectral_norm": 0.008599397726356983, + "layer_6_update_fnorm": 0.12084488570690155, + "layer_6_max_l1_linf_norm": 0.3252246379852295, + "layer_6_max_spectral_norm": 0.00738135538995266, + "layer_7_update_fnorm": 0.1211259588599205, + "layer_7_max_l1_linf_norm": 0.32523247599601746, + "layer_7_max_spectral_norm": 0.007340597920119762, + "layer_8_update_fnorm": 0.1209598034620285, + "layer_8_max_l1_linf_norm": 0.3431336283683777, + "layer_8_max_spectral_norm": 0.007759544998407364, + "layer_9_update_fnorm": 0.1212630644440651, + "layer_9_max_l1_linf_norm": 0.3422362506389618, + "layer_9_max_spectral_norm": 0.007785012945532799, + "layer_10_update_fnorm": 0.12121430784463882, + "layer_10_max_l1_linf_norm": 0.3731996417045593, + "layer_10_max_spectral_norm": 0.008414813317358494, + "layer_11_update_fnorm": 0.12110427767038345, + "layer_11_max_l1_linf_norm": 0.4031912684440613, + "layer_11_max_spectral_norm": 0.009062562137842178, + "layer_12_update_fnorm": 0.12090426683425903, + "layer_12_max_l1_linf_norm": 0.4020001292228699, + "layer_12_max_spectral_norm": 0.009084822610020638, + "total_sharpness": 0.003573008580133319, + "ip_v_neg_g": 0.003741658292710781, + "cos_v_neg_g": 0.0014125361340120435, + "v_norm": 1.3887016773223877, + "g_norm": 1.9074606895446777, + "hv_norm": 0.4252277612686157, + "cos_v_hv": 0.011668671853840351, + "hg_norm": 42.406890869140625, + "cos_g_hg": 0.4829922616481781, + "v_parallel_norm": 5.912801861995831e-05, + "v_perp_norm": 1.3887016773223877, + "layer_1_v_norm": 0.11874881386756897, + "layer_1_cos_v_neg_g": 0.006662121973931789, + "layer_2_v_norm": 0.11417823284864426, + "layer_2_cos_v_neg_g": 0.006377467419952154, + "layer_3_v_norm": 0.11583879590034485, + "layer_3_cos_v_neg_g": 0.004463574383407831, + "layer_4_v_norm": 0.11891139298677444, + "layer_4_cos_v_neg_g": 0.0032220198772847652, + "layer_5_v_norm": 0.12048746645450592, + "layer_5_cos_v_neg_g": 0.0037297112867236137, + "layer_6_v_norm": 0.12084488570690155, + "layer_6_cos_v_neg_g": 0.004111425951123238, + "layer_7_v_norm": 0.1211259588599205, + "layer_7_cos_v_neg_g": 0.005289268214255571, + "layer_8_v_norm": 0.1209598034620285, + "layer_8_cos_v_neg_g": 0.005454505328088999, + "layer_9_v_norm": 0.1212630644440651, + "layer_9_cos_v_neg_g": 0.0048992750234901905, + "layer_10_v_norm": 0.12121430784463882, + "layer_10_cos_v_neg_g": 0.004451430402696133, + "layer_11_v_norm": 0.12110428512096405, + "layer_11_cos_v_neg_g": 0.0036046842578798532, + "layer_12_v_norm": 0.12090426683425903, + "layer_12_cos_v_neg_g": 0.0023469922598451376, + "layer_1_sharpness": 0.019085779786109924, + "layer_2_sharpness": 0.00772481132298708, + "layer_3_sharpness": 0.006465226411819458, + "layer_4_sharpness": 0.0036101569421589375, + "layer_5_sharpness": 0.00485237967222929, + "layer_6_sharpness": 0.00612430926412344, + "layer_7_sharpness": 0.007060145493596792, + "layer_8_sharpness": 0.00743444450199604, + "layer_9_sharpness": 0.0050680264830589294, + "layer_10_sharpness": 0.003303473349660635, + "layer_11_sharpness": 0.003436378203332424, + "layer_12_sharpness": 0.004221777897328138 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d58f5d722434ebb064fbd80897249f523d90b43 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.002_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019236 +step:1 train loss:11.024580 +step:2 train loss:11.017786 +step:3 train loss:11.015248 +step:4 train loss:11.009224 +step:5 train loss:11.002455 +step:6 train loss:10.991627 +step:7 train loss:10.982853 +step:8 train loss:10.971094 +step:9 train loss:10.957867 +step:10 train loss:10.941467 +step:11 train loss:10.929579 +step:12 train loss:10.906958 +step:13 train loss:10.890130 +step:14 train loss:10.866108 +step:15 train loss:10.845673 +step:16 train loss:10.822094 +step:17 train loss:10.799590 +step:18 train loss:10.772132 +step:19 train loss:10.744883 +step:20 train loss:10.712521 +step:21 train loss:10.686361 +step:22 train loss:10.647101 +step:23 train loss:10.620276 +step:24 train loss:10.577477 +step:25 train loss:10.551258 +step:26 train loss:10.506709 +step:27 train loss:10.467842 +step:28 train loss:10.434715 +step:29 train loss:10.394001 +step:30 train loss:10.352176 +step:31 train loss:10.303435 +step:32 train loss:10.254769 +step:33 train loss:10.213554 +step:34 train loss:10.175103 +step:35 train loss:10.116163 +step:36 train loss:10.070840 +step:37 train loss:10.009063 +step:38 train loss:9.971458 +step:39 train loss:9.911036 +step:40 train loss:9.862492 +step:41 train loss:9.797664 +step:42 train loss:9.759998 +step:43 train loss:9.677557 +step:44 train loss:9.632718 +step:45 train loss:9.568302 +step:46 train loss:9.524688 +step:47 train loss:9.460234 +step:48 train loss:9.396908 +step:49 train loss:9.325592 +step:50 train loss:9.257504 +step:51 train loss:9.189048 +step:52 train loss:9.145910 +step:53 train loss:9.078447 +step:54 train loss:9.017400 +step:55 train loss:8.942318 +step:56 train loss:8.877590 +step:57 train loss:8.827387 +step:58 train loss:8.741039 +step:59 train loss:8.688875 +step:60 train loss:8.621756 +step:61 train loss:8.564535 +step:62 train loss:8.499981 +step:63 train loss:8.474393 +step:64 train loss:8.362597 +step:65 train loss:8.314816 +step:66 train loss:8.268595 +step:67 train loss:8.216271 +step:68 train loss:8.158589 +step:69 train loss:8.091734 +step:70 train loss:8.035707 +step:71 train loss:7.966009 +step:72 train loss:7.952359 +step:73 train loss:7.886920 +step:74 train loss:7.865457 +step:75 train loss:7.790602 +step:76 train loss:7.823627 +step:77 train loss:7.739275 +step:78 train loss:7.583841 +step:79 train loss:7.646844 +step:80 train loss:7.616093 +step:81 train loss:7.639421 +step:82 train loss:7.609092 +step:83 train loss:7.566003 +step:84 train loss:7.527968 +step:85 train loss:7.500962 +step:86 train loss:7.477062 +step:87 train loss:7.447413 +step:88 train loss:7.450269 +step:89 train loss:7.412117 +step:90 train loss:7.444789 +step:91 train loss:7.442373 +step:92 train loss:7.434114 +step:93 train loss:7.375933 +step:94 train loss:7.350438 +step:95 train loss:7.295458 +step:96 train loss:7.375121 +step:97 train loss:7.314222 +step:98 train loss:7.302282 +step:99 train loss:7.257216 +step:100 train loss:7.306890 +step:101 train loss:7.193361 +step:102 train loss:7.181815 +step:103 train loss:7.163523 +step:104 train loss:7.196711 +step:105 train loss:7.237711 +step:106 train loss:7.172992 +step:107 train loss:7.125220 +step:108 train loss:7.132435 +step:109 train loss:7.161960 +step:110 train loss:7.076204 +step:111 train loss:7.079937 +step:112 train loss:7.061610 +step:113 train loss:7.017989 +step:114 train loss:7.073534 +step:115 train loss:7.008986 +step:116 train loss:6.979212 +step:117 train loss:6.909903 +step:118 train loss:6.967874 +step:119 train loss:6.904777 +step:120 train loss:6.910783 +step:121 train loss:6.820719 +step:122 train loss:6.909665 +step:123 train loss:6.818470 +step:124 train loss:6.800270 +step:125 train loss:6.767499 +step:126 train loss:6.857353 +step:127 train loss:6.765767 +step:128 train loss:6.796183 +step:129 train loss:6.768472 +step:130 train loss:6.804365 +step:131 train loss:6.737147 +step:132 train loss:6.645259 +step:133 train loss:6.710891 +step:134 train loss:6.673386 +step:135 train loss:6.577938 +step:136 train loss:6.615635 +step:137 train loss:6.613215 +step:138 train loss:6.545146 +step:139 train loss:6.621140 +step:140 train loss:6.526426 +step:141 train loss:6.625242 +step:142 train loss:6.566401 +step:143 train loss:6.573082 +step:144 train loss:6.542583 +step:145 train loss:6.473821 +step:146 train loss:6.486625 +step:147 train loss:6.537324 +step:148 train loss:6.542338 +step:149 train loss:6.486496 +step:150 train loss:6.494830 +step:151 train loss:6.400015 +step:152 train loss:6.435820 +step:153 train loss:6.414867 +step:154 train loss:6.495903 +step:155 train loss:6.468059 +step:156 train loss:6.496357 +step:157 train loss:6.399416 +step:158 train loss:6.383728 +step:159 train loss:6.417413 +step:160 train loss:6.394296 +step:161 train loss:6.387313 +step:162 train loss:6.357867 +step:163 train loss:6.371432 +step:164 train loss:6.382645 +step:165 train loss:6.386927 +step:166 train loss:6.336914 +step:167 train loss:6.338814 +step:168 train loss:6.311129 +step:169 train loss:6.260616 +step:170 train loss:6.226465 +step:171 train loss:6.343608 +step:172 train loss:6.272195 +step:173 train loss:6.317667 +step:174 train loss:6.318760 +step:175 train loss:6.280886 +step:176 train loss:6.232757 +step:177 train loss:6.276104 +step:178 train loss:6.279695 +step:179 train loss:6.236159 +step:180 train loss:6.220895 +step:181 train loss:6.253428 +step:182 train loss:6.185636 +step:183 train loss:6.275318 +step:184 train loss:6.242480 +step:185 train loss:6.159875 +step:186 train loss:6.302555 +step:187 train loss:6.239533 +step:188 train loss:6.060608 +step:189 train loss:6.219072 +step:190 train loss:6.210073 +step:191 train loss:6.130909 +step:192 train loss:6.045124 +step:193 train loss:6.196591 +step:194 train loss:6.213760 +step:195 train loss:6.206047 +step:196 train loss:6.175800 +step:197 train loss:6.171524 +step:198 train loss:6.115553 +step:199 train loss:6.191552 +step:200 train loss:6.229741 +step:201 train loss:6.161210 +step:202 train loss:6.164470 +step:203 train loss:6.121763 +step:204 train loss:6.156673 +step:205 train loss:6.008028 +step:206 train loss:6.143284 +step:207 train loss:6.118241 +step:208 train loss:6.061602 +step:209 train loss:6.053972 +step:210 train loss:6.054951 +step:211 train loss:6.124718 +step:212 train loss:6.078153 +step:213 train loss:6.089726 +step:214 train loss:6.074311 +step:215 train loss:6.094705 +step:216 train loss:6.040648 +step:217 train loss:6.051750 +step:218 train loss:6.024975 +step:219 train loss:6.002210 +step:220 train loss:6.044712 +step:221 train loss:5.998403 +step:222 train loss:6.036910 +step:223 train loss:6.059367 +step:224 train loss:6.043986 +step:225 train loss:5.982927 +step:226 train loss:5.986811 +step:227 train loss:6.047598 +step:228 train loss:6.014262 +step:229 train loss:6.080581 +step:230 train loss:5.948505 +step:231 train loss:6.004709 +step:232 train loss:5.988400 +step:233 train loss:5.963038 +step:234 train loss:5.957652 +step:235 train loss:6.038661 +step:236 train loss:5.987398 +step:237 train loss:6.020130 +step:238 train loss:6.018801 +step:239 train loss:5.929849 +step:240 train loss:6.001858 +step:241 train loss:6.033790 +step:242 train loss:6.013671 +step:243 train loss:5.925674 +step:244 train loss:5.954040 +step:245 train loss:5.934894 +step:246 train loss:5.934054 +step:247 train loss:5.924327 +step:248 train loss:5.885782 +step:249 train loss:5.944387 +step:250 validation loss:5.929126 +step:250 train loss:5.907756 +step:251 train loss:5.947447 +step:252 train loss:5.900949 +step:253 train loss:5.906796 +step:254 train loss:5.869533 +step:255 train loss:5.905956 +step:256 train loss:5.903941 +step:257 train loss:5.954009 +step:258 train loss:5.854620 +step:259 train loss:5.879152 +step:260 train loss:5.849466 +step:261 train loss:5.852642 +step:262 train loss:5.911499 +step:263 train loss:5.875836 +step:264 train loss:5.844789 +step:265 train loss:5.864686 +step:266 train loss:5.828648 +step:267 train loss:5.864713 +step:268 train loss:5.816113 +step:269 train loss:5.845649 +step:270 train loss:5.862305 +step:271 train loss:5.856183 +step:272 train loss:5.806086 +step:273 train loss:5.877666 +step:274 train loss:5.789065 +step:275 train loss:5.829462 +step:276 train loss:5.798591 +step:277 train loss:5.800965 +step:278 train loss:5.775480 +step:279 train loss:5.745768 +step:280 train loss:5.822749 +step:281 train loss:5.897341 +step:282 train loss:5.777075 +step:283 train loss:5.794195 +step:284 train loss:5.758664 +step:285 train loss:5.812307 +step:286 train loss:5.787498 +step:287 train loss:5.762237 +step:288 train loss:5.744523 +step:289 train loss:5.767781 +step:290 train loss:5.822463 +step:291 train loss:5.751047 +step:292 train loss:5.805127 +step:293 train loss:5.730834 +step:294 train loss:5.846071 +step:295 train loss:5.737273 +step:296 train loss:5.794670 +step:297 train loss:5.824731 +step:298 train loss:5.716547 +step:299 train loss:5.785352 +step:300 train loss:5.710490 +step:301 train loss:5.743570 +step:302 train loss:5.717987 +step:303 train loss:5.730410 +step:304 train loss:5.758085 +step:305 train loss:5.685432 +step:306 train loss:5.707586 +step:307 train loss:5.727688 +step:308 train loss:5.643910 +step:309 train loss:5.777818 +step:310 train loss:5.741990 +step:311 train loss:5.720016 +step:312 train loss:5.704143 +step:313 train loss:5.729944 +step:314 train loss:5.709214 +step:315 train loss:5.669983 +step:316 train loss:5.666242 +step:317 train loss:5.631907 +step:318 train loss:5.627882 +step:319 train loss:5.705493 +step:320 train loss:5.625101 +step:321 train loss:5.678669 +step:322 train loss:5.669612 +step:323 train loss:5.733074 +step:324 train loss:5.677799 +step:325 train loss:5.697969 +step:326 train loss:5.699463 +step:327 train loss:5.681533 +step:328 train loss:5.657270 +step:329 train loss:5.668005 +step:330 train loss:5.599572 +step:331 train loss:5.624224 +step:332 train loss:5.606959 +step:333 train loss:5.546675 +step:334 train loss:5.641263 +step:335 train loss:5.687005 +step:336 train loss:5.814991 +step:337 train loss:5.704161 +step:338 train loss:5.618327 +step:339 train loss:5.576610 +step:340 train loss:5.579971 +step:341 train loss:5.571514 +step:342 train loss:5.640918 +step:343 train loss:5.615126 +step:344 train loss:5.569956 +step:345 train loss:5.539072 +step:346 train loss:5.588754 +step:347 train loss:5.528695 +step:348 train loss:5.534243 +step:349 train loss:5.472468 +step:350 train loss:5.510569 +step:351 train loss:5.574662 +step:352 train loss:5.535403 +step:353 train loss:5.557772 +step:354 train loss:5.516115 +step:355 train loss:5.560203 +step:356 train loss:5.529747 +step:357 train loss:5.599311 +step:358 train loss:5.630393 +step:359 train loss:5.465508 +step:360 train loss:5.581963 +step:361 train loss:5.566026 +step:362 train loss:5.541644 +step:363 train loss:5.493799 +step:364 train loss:5.611041 +step:365 train loss:5.553945 +step:366 train loss:5.520515 +step:367 train loss:5.546834 +step:368 train loss:5.513525 +step:369 train loss:5.495025 +step:370 train loss:5.547218 +step:371 train loss:5.483528 +step:372 train loss:5.553702 +step:373 train loss:5.498636 +step:374 train loss:5.490418 +step:375 train loss:5.522209 +step:376 train loss:5.511797 +step:377 train loss:5.403596 +step:378 train loss:5.481346 +step:379 train loss:5.532611 +step:380 train loss:5.459384 +step:381 train loss:5.518443 +step:382 train loss:5.521783 +step:383 train loss:5.479942 +step:384 train loss:5.463094 +step:385 train loss:5.449271 +step:386 train loss:5.484187 +step:387 train loss:5.487714 +step:388 train loss:5.450529 +step:389 train loss:5.467496 +step:390 train loss:5.449965 +step:391 train loss:5.459119 +step:392 train loss:5.446370 +step:393 train loss:5.442915 +step:394 train loss:5.486865 +step:395 train loss:5.414438 +step:396 train loss:5.379245 +step:397 train loss:5.448195 +step:398 train loss:5.440345 +step:399 train loss:5.439598 +step:400 train loss:5.405379 +step:401 train loss:5.445748 +step:402 train loss:5.422492 +step:403 train loss:5.427639 +step:404 train loss:5.402625 +step:405 train loss:5.401068 +step:406 train loss:5.435317 +step:407 train loss:5.416523 +step:408 train loss:5.489944 +step:409 train loss:5.403021 +step:410 train loss:5.381984 +step:411 train loss:5.365524 +step:412 train loss:5.456122 +step:413 train loss:5.341599 +step:414 train loss:5.420719 +step:415 train loss:5.386506 +step:416 train loss:5.395582 +step:417 train loss:5.418262 +step:418 train loss:5.361317 +step:419 train loss:5.350108 +step:420 train loss:5.338361 +step:421 train loss:5.330644 +step:422 train loss:5.331684 +step:423 train loss:5.338724 +step:424 train loss:5.310144 +step:425 train loss:5.390360 +step:426 train loss:5.375110 +step:427 train loss:5.306489 +step:428 train loss:5.376517 +step:429 train loss:5.282005 +step:430 train loss:5.318330 +step:431 train loss:5.355482 +step:432 train loss:5.370318 +step:433 train loss:5.358483 +step:434 train loss:5.317429 +step:435 train loss:5.369788 +step:436 train loss:5.391758 +step:437 train loss:5.346234 +step:438 train loss:5.308641 +step:439 train loss:5.298818 +step:440 train loss:5.339021 +step:441 train loss:5.281020 +step:442 train loss:5.278014 +step:443 train loss:5.296093 +step:444 train loss:5.327853 +step:445 train loss:5.333338 +step:446 train loss:5.269224 +step:447 train loss:5.284626 +step:448 train loss:5.349259 +step:449 train loss:5.303558 +step:450 train loss:5.291906 +step:451 train loss:5.273105 +step:452 train loss:5.350795 +step:453 train loss:5.273451 +step:454 train loss:5.236693 +step:455 train loss:5.321163 +step:456 train loss:5.282031 +step:457 train loss:5.257457 +step:458 train loss:5.275663 +step:459 train loss:5.226179 +step:460 train loss:5.325742 +step:461 train loss:5.285784 +step:462 train loss:5.180501 +step:463 train loss:5.234767 +step:464 train loss:5.286692 +step:465 train loss:5.244370 +step:466 train loss:5.266572 +step:467 train loss:5.227268 +step:468 train loss:5.274147 +step:469 train loss:5.245260 +step:470 train loss:5.206943 +step:471 train loss:5.303434 +step:472 train loss:5.177206 +step:473 train loss:5.256223 +step:474 train loss:5.237243 +step:475 train loss:5.259276 +step:476 train loss:5.227672 +step:477 train loss:5.171666 +step:478 train loss:5.192066 +step:479 train loss:5.186739 +step:480 train loss:5.208697 +step:481 train loss:5.220819 +step:482 train loss:5.145268 +step:483 train loss:5.220329 +step:484 train loss:5.174780 +step:485 train loss:5.149644 +step:486 train loss:5.213793 +step:487 train loss:5.184620 +step:488 train loss:5.178554 +step:489 train loss:5.174431 +step:490 train loss:5.155383 +step:491 train loss:5.172028 +step:492 train loss:5.169583 +step:493 train loss:5.168250 +step:494 train loss:5.180387 +step:495 train loss:5.127577 +step:496 train loss:5.224799 +step:497 train loss:5.099850 +step:498 train loss:5.205565 +step:499 train loss:5.173980 +step:500 validation loss:5.141858 total_sharp:6.4980e-02 L1_sharp:7.5133e-01 L2_sharp:1.6380e-01 L3_sharp:1.5321e-01 L4_sharp:1.3605e-01 L5_sharp:9.0855e-02 L6_sharp:5.8889e-02 L7_sharp:3.8517e-02 L8_sharp:2.8068e-02 L9_sharp:2.4571e-02 L10_sharp:2.5983e-02 L11_sharp:3.9465e-02 L12_sharp:4.7091e-02 total_fnorm:1.0241e+00 total_l1_linf:6.9685e+03 total_spectral:1.0241e+00 L1_fnorm:8.7539e-02 L2_fnorm:8.3502e-02 L3_fnorm:8.0981e-02 L4_fnorm:8.1132e-02 L5_fnorm:8.1752e-02 L6_fnorm:8.3109e-02 L7_fnorm:8.4454e-02 L8_fnorm:8.4923e-02 L9_fnorm:8.5549e-02 L10_fnorm:8.5869e-02 L11_fnorm:8.5975e-02 L12_fnorm:8.5831e-02 L1_l1linf:1.3009e-01 L2_l1linf:1.5152e-01 L3_l1linf:1.5974e-01 L4_l1linf:1.6740e-01 L5_l1linf:1.7819e-01 L6_l1linf:1.8087e-01 L7_l1linf:1.8178e-01 L8_l1linf:1.8456e-01 L9_l1linf:1.8956e-01 L10_l1linf:2.0092e-01 L11_l1linf:2.1078e-01 L12_l1linf:2.1590e-01 L1_spectral:2.9332e-03 L2_spectral:3.4182e-03 L3_spectral:3.6121e-03 L4_spectral:4.1666e-03 L5_spectral:4.3424e-03 L6_spectral:5.1152e-03 L7_spectral:6.1081e-03 L8_spectral:5.9214e-03 L9_spectral:6.4496e-03 L10_spectral:5.4974e-03 L11_spectral:4.9696e-03 L12_spectral:5.1439e-03 ip_v_neg_g:3.0882e-02 cos_v_neg_g:5.5022e-03 v_norm:1.0241e+00 g_norm:5.4807e+00 hv_norm:5.5200e+00 cos_v_hv:1.2055e-02 hg_norm:2.2034e+03 cos_g_hg:6.6912e-01 v_par:3.3782e-04 v_perp:1.0241e+00 L1_cos_v_neg_g:2.9766e-02 L1_v_norm:8.7539e-02 L2_cos_v_neg_g:2.1831e-02 L2_v_norm:8.3502e-02 L3_cos_v_neg_g:2.2577e-02 L3_v_norm:8.0981e-02 L4_cos_v_neg_g:2.2906e-02 L4_v_norm:8.1132e-02 L5_cos_v_neg_g:2.0644e-02 L5_v_norm:8.1752e-02 L6_cos_v_neg_g:1.7780e-02 L6_v_norm:8.3109e-02 L7_cos_v_neg_g:1.5566e-02 L7_v_norm:8.4454e-02 L8_cos_v_neg_g:1.4036e-02 L8_v_norm:8.4923e-02 L9_cos_v_neg_g:1.4371e-02 L9_v_norm:8.5549e-02 L10_cos_v_neg_g:1.4727e-02 L10_v_norm:8.5869e-02 L11_cos_v_neg_g:1.6400e-02 L11_v_norm:8.5975e-02 L12_cos_v_neg_g:1.6716e-02 L12_v_norm:8.5831e-02 +step:500 train loss:5.167613 +step:501 train loss:5.133127 +step:502 train loss:5.174323 +step:503 train loss:5.095710 +step:504 train loss:5.178614 +step:505 train loss:5.116475 +step:506 train loss:5.114361 +step:507 train loss:5.123590 +step:508 train loss:5.146700 +step:509 train loss:5.139738 +step:510 train loss:5.077216 +step:511 train loss:5.082664 +step:512 train loss:5.076386 +step:513 train loss:5.093709 +step:514 train loss:5.170874 +step:515 train loss:5.113999 +step:516 train loss:5.180264 +step:517 train loss:5.108907 +step:518 train loss:5.091156 +step:519 train loss:5.150465 +step:520 train loss:5.086987 +step:521 train loss:5.090248 +step:522 train loss:5.109321 +step:523 train loss:5.105216 +step:524 train loss:5.048733 +step:525 train loss:5.049216 +step:526 train loss:5.070096 +step:527 train loss:5.068000 +step:528 train loss:5.063534 +step:529 train loss:5.090303 +step:530 train loss:5.048279 +step:531 train loss:5.078442 +step:532 train loss:5.035368 +step:533 train loss:4.998773 +step:534 train loss:5.074224 +step:535 train loss:5.063758 +step:536 train loss:5.120233 +step:537 train loss:5.006504 +step:538 train loss:4.966265 +step:539 train loss:5.070044 +step:540 train loss:5.101594 +step:541 train loss:5.004176 +step:542 train loss:5.026598 +step:543 train loss:5.031643 +step:544 train loss:5.042417 +step:545 train loss:5.019741 +step:546 train loss:4.985161 +step:547 train loss:5.011145 +step:548 train loss:4.959101 +step:549 train loss:5.013391 +step:550 train loss:4.995917 +step:551 train loss:4.995820 +step:552 train loss:5.099859 +step:553 train loss:5.054953 +step:554 train loss:4.997733 +step:555 train loss:5.053399 +step:556 train loss:5.001938 +step:557 train loss:4.970969 +step:558 train loss:4.939950 +step:559 train loss:5.002175 +step:560 train loss:5.050892 +step:561 train loss:4.917752 +step:562 train loss:4.910491 +step:563 train loss:4.984022 +step:564 train loss:4.943604 +step:565 train loss:4.965086 +step:566 train loss:4.976022 +step:567 train loss:4.961907 +step:568 train loss:4.995237 +step:569 train loss:4.962892 +step:570 train loss:4.899190 +step:571 train loss:4.931969 +step:572 train loss:4.920030 +step:573 train loss:4.923492 +step:574 train loss:4.971913 +step:575 train loss:4.916822 +step:576 train loss:4.926929 +step:577 train loss:4.945065 +step:578 train loss:4.926347 +step:579 train loss:4.969098 +step:580 train loss:4.897584 +step:581 train loss:4.952384 +step:582 train loss:4.926213 +step:583 train loss:4.934248 +step:584 train loss:4.912786 +step:585 train loss:4.904228 +step:586 train loss:4.898576 +step:587 train loss:4.974588 +step:588 train loss:4.883348 +step:589 train loss:4.940100 +step:590 train loss:4.955812 +step:591 train loss:4.884137 +step:592 train loss:4.868177 +step:593 train loss:4.879345 +step:594 train loss:4.850390 +step:595 train loss:4.901026 +step:596 train loss:4.881842 +step:597 train loss:4.912559 +step:598 train loss:4.867241 +step:599 train loss:4.874174 +step:600 train loss:4.838543 +step:601 train loss:4.827262 +step:602 train loss:4.838392 +step:603 train loss:4.885952 +step:604 train loss:4.861234 +step:605 train loss:4.899874 +step:606 train loss:4.833225 +step:607 train loss:4.826978 +step:608 train loss:4.829396 +step:609 train loss:4.805108 +step:610 train loss:4.807473 +step:611 train loss:4.818491 +step:612 train loss:4.853832 +step:613 train loss:4.772531 +step:614 train loss:4.821893 +step:615 train loss:4.875265 +step:616 train loss:4.792220 +step:617 train loss:4.831857 +step:618 train loss:4.792433 +step:619 train loss:4.835473 +step:620 train loss:4.860608 +step:621 train loss:4.780012 +step:622 train loss:4.863593 +step:623 train loss:4.843091 +step:624 train loss:4.818048 +step:625 train loss:4.829660 +step:626 train loss:4.818738 +step:627 train loss:4.805033 +step:628 train loss:4.806916 +step:629 train loss:4.736918 +step:630 train loss:4.774024 +step:631 train loss:4.756256 +step:632 train loss:4.770710 +step:633 train loss:4.800328 +step:634 train loss:4.789251 +step:635 train loss:4.736537 +step:636 train loss:4.826014 +step:637 train loss:4.730753 +step:638 train loss:4.677372 +step:639 train loss:4.797602 +step:640 train loss:4.742867 +step:641 train loss:4.770860 +step:642 train loss:4.808372 +step:643 train loss:4.703529 +step:644 train loss:4.789707 +step:645 train loss:4.749887 +step:646 train loss:4.737019 +step:647 train loss:4.748475 +step:648 train loss:4.841780 +step:649 train loss:4.738234 +step:650 train loss:4.795733 +step:651 train loss:4.689681 +step:652 train loss:4.711300 +step:653 train loss:4.697817 +step:654 train loss:4.712063 +step:655 train loss:4.749206 +step:656 train loss:4.692219 +step:657 train loss:4.748868 +step:658 train loss:4.670200 +step:659 train loss:4.761938 +step:660 train loss:4.730448 +step:661 train loss:4.774920 +step:662 train loss:4.757313 +step:663 train loss:4.746657 +step:664 train loss:4.647585 +step:665 train loss:4.664279 +step:666 train loss:4.663861 +step:667 train loss:4.727052 +step:668 train loss:4.701890 +step:669 train loss:4.684103 +step:670 train loss:4.707703 +step:671 train loss:4.678210 +step:672 train loss:4.643229 +step:673 train loss:4.739167 +step:674 train loss:4.722587 +step:675 train loss:4.632082 +step:676 train loss:4.717250 +step:677 train loss:4.652712 +step:678 train loss:4.637798 +step:679 train loss:4.676704 +step:680 train loss:4.645363 +step:681 train loss:4.691203 +step:682 train loss:4.596566 +step:683 train loss:4.663171 +step:684 train loss:4.704023 +step:685 train loss:4.644810 +step:686 train loss:4.733941 +step:687 train loss:4.673427 +step:688 train loss:4.604091 +step:689 train loss:4.635575 +step:690 train loss:4.614897 +step:691 train loss:4.635049 +step:692 train loss:4.643090 +step:693 train loss:4.635757 +step:694 train loss:4.635542 +step:695 train loss:4.587342 +step:696 train loss:4.541543 +step:697 train loss:4.675442 +step:698 train loss:4.583409 +step:699 train loss:4.598663 +step:700 train loss:4.659689 +step:701 train loss:4.562301 +step:702 train loss:4.619075 +step:703 train loss:4.578027 +step:704 train loss:4.516125 +step:705 train loss:4.582180 +step:706 train loss:4.461941 +step:707 train loss:4.536253 +step:708 train loss:4.626754 +step:709 train loss:4.586220 +step:710 train loss:4.561868 +step:711 train loss:4.596834 +step:712 train loss:4.566230 +step:713 train loss:4.530906 +step:714 train loss:4.604546 +step:715 train loss:4.494232 +step:716 train loss:4.655592 +step:717 train loss:4.528512 +step:718 train loss:4.598176 +step:719 train loss:4.557785 +step:720 train loss:4.526920 +step:721 train loss:4.567470 +step:722 train loss:4.548504 +step:723 train loss:4.591865 +step:724 train loss:4.582167 +step:725 train loss:4.513485 +step:726 train loss:4.525630 +step:727 train loss:4.561438 +step:728 train loss:4.531658 +step:729 train loss:4.490824 +step:730 train loss:4.566798 +step:731 train loss:4.599629 +step:732 train loss:4.555785 +step:733 train loss:4.540323 +step:734 train loss:4.546532 +step:735 train loss:4.616619 +step:736 train loss:4.530661 +step:737 train loss:4.529590 +step:738 train loss:4.573484 +step:739 train loss:4.507144 +step:740 train loss:4.540586 +step:741 train loss:4.620524 +step:742 train loss:4.511465 +step:743 train loss:4.491569 +step:744 train loss:4.519470 +step:745 train loss:4.439524 +step:746 train loss:4.487336 +step:747 train loss:4.499743 +step:748 train loss:4.475765 +step:749 train loss:4.507878 +step:750 validation loss:4.455858 +step:750 train loss:4.439450 +step:751 train loss:4.487066 +step:752 train loss:4.423968 +step:753 train loss:4.478825 +step:754 train loss:4.472086 +step:755 train loss:4.537028 +step:756 train loss:4.498268 +step:757 train loss:4.578440 +step:758 train loss:4.465930 +step:759 train loss:4.473464 +step:760 train loss:4.434391 +step:761 train loss:4.467881 +step:762 train loss:4.435402 +step:763 train loss:4.441321 +step:764 train loss:4.428803 +step:765 train loss:4.423706 +step:766 train loss:4.499950 +step:767 train loss:4.618834 +step:768 train loss:4.446984 +step:769 train loss:4.473616 +step:770 train loss:4.494643 +step:771 train loss:4.559715 +step:772 train loss:4.480090 +step:773 train loss:4.418767 +step:774 train loss:4.451005 +step:775 train loss:4.439600 +step:776 train loss:4.454644 +step:777 train loss:4.419347 +step:778 train loss:4.383433 +step:779 train loss:4.398705 +step:780 train loss:4.459806 +step:781 train loss:4.390987 +step:782 train loss:4.417747 +step:783 train loss:4.396229 +step:784 train loss:4.400800 +step:785 train loss:4.389342 +step:786 train loss:4.396481 +step:787 train loss:4.343361 +step:788 train loss:4.429956 +step:789 train loss:4.399621 +step:790 train loss:4.372562 +step:791 train loss:4.447576 +step:792 train loss:4.477177 +step:793 train loss:4.436511 +step:794 train loss:4.415955 +step:795 train loss:4.386657 +step:796 train loss:4.666867 +step:797 train loss:4.397889 +step:798 train loss:4.393852 +step:799 train loss:4.403133 +step:800 train loss:4.477491 +step:801 train loss:4.390643 +step:802 train loss:4.527625 +step:803 train loss:4.409738 +step:804 train loss:4.357451 +step:805 train loss:4.424065 +step:806 train loss:4.349938 +step:807 train loss:4.380788 +step:808 train loss:4.383090 +step:809 train loss:4.353994 +step:810 train loss:4.333006 +step:811 train loss:4.419576 +step:812 train loss:4.382891 +step:813 train loss:4.391439 +step:814 train loss:4.460761 +step:815 train loss:4.417661 +step:816 train loss:4.349370 +step:817 train loss:4.389433 +step:818 train loss:4.358500 +step:819 train loss:4.347140 +step:820 train loss:4.345108 +step:821 train loss:4.304381 +step:822 train loss:4.303134 +step:823 train loss:4.383621 +step:824 train loss:4.288049 +step:825 train loss:4.266750 +step:826 train loss:4.344474 +step:827 train loss:4.244333 +step:828 train loss:4.321317 +step:829 train loss:4.328465 +step:830 train loss:4.326995 +step:831 train loss:4.369799 +step:832 train loss:4.415622 +step:833 train loss:4.368015 +step:834 train loss:4.358819 +step:835 train loss:4.326823 +step:836 train loss:4.320694 +step:837 train loss:4.308102 +step:838 train loss:4.296072 +step:839 train loss:4.300431 +step:840 train loss:4.337088 +step:841 train loss:4.333060 +step:842 train loss:4.320367 +step:843 train loss:4.325367 +step:844 train loss:4.290976 +step:845 train loss:4.270199 +step:846 train loss:4.370326 +step:847 train loss:4.338699 +step:848 train loss:4.291498 +step:849 train loss:4.322101 +step:850 train loss:4.338941 +step:851 train loss:4.297122 +step:852 train loss:4.384975 +step:853 train loss:4.272294 +step:854 train loss:4.311863 +step:855 train loss:4.315397 +step:856 train loss:4.269677 +step:857 train loss:4.311691 +step:858 train loss:4.350839 +step:859 train loss:4.258949 +step:860 train loss:4.283875 +step:861 train loss:4.325706 +step:862 train loss:4.266525 +step:863 train loss:4.277883 +step:864 train loss:4.264782 +step:865 train loss:4.276193 +step:866 train loss:4.304327 +step:867 train loss:4.420430 +step:868 train loss:4.276432 +step:869 train loss:4.291715 +step:870 train loss:4.236584 +step:871 train loss:4.229538 +step:872 train loss:4.302278 +step:873 train loss:4.275506 +step:874 train loss:4.294668 +step:875 train loss:4.212699 +step:876 train loss:4.305937 +step:877 train loss:4.235978 +step:878 train loss:4.343611 +step:879 train loss:4.234374 +step:880 train loss:4.345459 +step:881 train loss:4.274592 +step:882 train loss:4.236148 +step:883 train loss:4.280951 +step:884 train loss:4.305762 +step:885 train loss:4.244504 +step:886 train loss:4.252692 +step:887 train loss:4.269550 +step:888 train loss:4.369697 +step:889 train loss:4.301203 +step:890 train loss:4.243194 +step:891 train loss:4.201490 +step:892 train loss:4.189628 +step:893 train loss:4.265618 +step:894 train loss:4.231616 +step:895 train loss:4.211325 +step:896 train loss:4.302726 +step:897 train loss:4.229459 +step:898 train loss:4.247905 +step:899 train loss:4.254566 +step:900 train loss:4.285279 +step:901 train loss:4.217698 +step:902 train loss:4.250255 +step:903 train loss:4.347783 +step:904 train loss:4.355592 +step:905 train loss:4.233083 +step:906 train loss:4.249216 +step:907 train loss:4.271511 +step:908 train loss:4.290221 +step:909 train loss:4.232909 +step:910 train loss:4.272268 +step:911 train loss:4.389352 +step:912 train loss:4.197147 +step:913 train loss:4.259810 +step:914 train loss:4.209982 +step:915 train loss:4.241374 +step:916 train loss:4.295041 +step:917 train loss:4.254907 +step:918 train loss:4.321808 +step:919 train loss:4.405727 +step:920 train loss:4.153383 +step:921 train loss:4.277094 +step:922 train loss:4.245794 +step:923 train loss:4.166658 +step:924 train loss:4.214293 +step:925 train loss:4.171856 +step:926 train loss:4.273959 +step:927 train loss:4.176231 +step:928 train loss:4.253564 +step:929 train loss:4.227217 +step:930 train loss:4.224650 +step:931 train loss:4.262268 +step:932 train loss:4.204811 +step:933 train loss:4.249125 +step:934 train loss:4.284698 +step:935 train loss:4.267449 +step:936 train loss:4.238426 +step:937 train loss:4.235991 +step:938 train loss:4.232670 +step:939 train loss:4.135129 +step:940 train loss:4.233760 +step:941 train loss:4.180103 +step:942 train loss:4.167834 +step:943 train loss:4.266109 +step:944 train loss:4.217768 +step:945 train loss:4.218314 +step:946 train loss:4.235462 +step:947 train loss:4.390032 +step:948 train loss:4.192325 +step:949 train loss:4.232768 +step:950 train loss:4.167999 +step:951 train loss:4.211083 +step:952 train loss:4.259444 +step:953 train loss:4.189550 +step:954 train loss:4.225569 +step:955 train loss:4.160751 +step:956 train loss:4.189704 +step:957 train loss:4.185033 +step:958 train loss:4.260818 +step:959 train loss:4.191038 +step:960 train loss:4.287024 +step:961 train loss:4.230058 +step:962 train loss:4.194086 +step:963 train loss:4.176582 +step:964 train loss:4.211444 +step:965 train loss:4.136000 +step:966 train loss:4.149020 +step:967 train loss:4.209051 +step:968 train loss:4.212553 +step:969 train loss:4.165258 +step:970 train loss:4.216899 +step:971 train loss:4.196794 +step:972 train loss:4.116109 +step:973 train loss:4.219481 +step:974 train loss:4.155807 +step:975 train loss:4.239864 +step:976 train loss:4.197660 +step:977 train loss:4.189474 +step:978 train loss:4.187141 +step:979 train loss:4.176476 +step:980 train loss:4.180919 +step:981 train loss:4.155200 +step:982 train loss:4.172129 +step:983 train loss:4.179431 +step:984 train loss:4.205961 +step:985 train loss:4.170024 +step:986 train loss:4.198007 +step:987 train loss:4.227085 +step:988 train loss:4.206812 +step:989 train loss:4.178674 +step:990 train loss:4.169185 +step:991 train loss:4.091857 +step:992 train loss:4.160931 +step:993 train loss:4.184460 +step:994 train loss:4.118234 +step:995 train loss:4.135962 +step:996 train loss:4.179913 +step:997 train loss:4.138214 +step:998 train loss:4.136061 +step:999 train loss:4.179453 +step:1000 validation loss:4.109203 total_sharp:1.5660e-02 L1_sharp:9.1743e-02 L2_sharp:5.2011e-02 L3_sharp:3.7034e-02 L4_sharp:2.0352e-02 L5_sharp:2.1468e-02 L6_sharp:2.0459e-02 L7_sharp:2.2004e-02 L8_sharp:1.8392e-02 L9_sharp:1.4944e-02 L10_sharp:1.0645e-02 L11_sharp:1.0306e-02 L12_sharp:1.2335e-02 total_fnorm:1.4423e+00 total_l1_linf:9.8000e+03 total_spectral:1.4423e+00 L1_fnorm:1.2264e-01 L2_fnorm:1.1610e-01 L3_fnorm:1.1238e-01 L4_fnorm:1.1609e-01 L5_fnorm:1.1778e-01 L6_fnorm:1.2054e-01 L7_fnorm:1.2052e-01 L8_fnorm:1.2095e-01 L9_fnorm:1.2040e-01 L10_fnorm:1.2105e-01 L11_fnorm:1.2086e-01 L12_fnorm:1.2134e-01 L1_l1linf:2.0366e-01 L2_l1linf:2.2642e-01 L3_l1linf:2.3501e-01 L4_l1linf:2.4466e-01 L5_l1linf:2.4985e-01 L6_l1linf:2.5467e-01 L7_l1linf:2.6075e-01 L8_l1linf:2.6043e-01 L9_l1linf:2.5617e-01 L10_l1linf:2.5796e-01 L11_l1linf:2.4493e-01 L12_l1linf:2.3626e-01 L1_spectral:4.6005e-03 L2_spectral:5.1475e-03 L3_spectral:5.3284e-03 L4_spectral:5.5507e-03 L5_spectral:5.6512e-03 L6_spectral:5.7950e-03 L7_spectral:5.9028e-03 L8_spectral:5.9333e-03 L9_spectral:5.7639e-03 L10_spectral:5.8299e-03 L11_spectral:5.5380e-03 L12_spectral:5.3604e-03 ip_v_neg_g:1.3553e-02 cos_v_neg_g:3.1587e-03 v_norm:1.4423e+00 g_norm:2.9747e+00 hv_norm:1.5152e+00 cos_v_hv:1.4907e-02 hg_norm:1.3337e+02 cos_g_hg:4.9473e-01 v_par:1.6450e-04 v_perp:1.4423e+00 L1_cos_v_neg_g:1.5975e-02 L1_v_norm:1.2264e-01 L2_cos_v_neg_g:1.6627e-02 L2_v_norm:1.1610e-01 L3_cos_v_neg_g:1.3770e-02 L3_v_norm:1.1238e-01 L4_cos_v_neg_g:1.0625e-02 L4_v_norm:1.1609e-01 L5_cos_v_neg_g:1.1101e-02 L5_v_norm:1.1778e-01 L6_cos_v_neg_g:9.9217e-03 L6_v_norm:1.2054e-01 L7_cos_v_neg_g:9.9855e-03 L7_v_norm:1.2052e-01 L8_cos_v_neg_g:9.0728e-03 L8_v_norm:1.2095e-01 L9_cos_v_neg_g:8.0265e-03 L9_v_norm:1.2040e-01 L10_cos_v_neg_g:7.4486e-03 L10_v_norm:1.2105e-01 L11_cos_v_neg_g:7.2445e-03 L11_v_norm:1.2086e-01 L12_cos_v_neg_g:6.1830e-03 L12_v_norm:1.2134e-01 +step:1000 train loss:4.186139 +step:1001 train loss:4.188141 +step:1002 train loss:4.176249 +step:1003 train loss:4.152069 +step:1004 train loss:4.130863 +step:1005 train loss:4.140474 +step:1006 train loss:4.227843 +step:1007 train loss:4.160676 +step:1008 train loss:4.150262 +step:1009 train loss:4.205611 +step:1010 train loss:4.170811 +step:1011 train loss:4.201632 +step:1012 train loss:4.143916 +step:1013 train loss:4.119367 +step:1014 train loss:4.118954 +step:1015 train loss:4.152020 +step:1016 train loss:4.170704 +step:1017 train loss:4.118909 +step:1018 train loss:4.178276 +step:1019 train loss:4.119635 +step:1020 train loss:4.124877 +step:1021 train loss:4.219174 +step:1022 train loss:4.119610 +step:1023 train loss:4.128236 +step:1024 train loss:4.211510 +step:1025 train loss:4.170711 +step:1026 train loss:4.109664 +step:1027 train loss:4.146029 +step:1028 train loss:4.153510 +step:1029 train loss:4.100662 +step:1030 train loss:4.191257 +step:1031 train loss:4.173938 +step:1032 train loss:4.137575 +step:1033 train loss:4.103415 +step:1034 train loss:4.166788 +step:1035 train loss:4.169219 +step:1036 train loss:4.086627 +step:1037 train loss:4.140201 +step:1038 train loss:4.165306 +step:1039 train loss:4.307095 +step:1040 train loss:4.140578 +step:1041 train loss:4.121963 +step:1042 train loss:4.141970 +step:1043 train loss:4.148009 +step:1044 train loss:4.131946 +step:1045 train loss:4.145277 +step:1046 train loss:4.086366 +step:1047 train loss:4.121495 +step:1048 train loss:4.113348 +step:1049 train loss:4.170983 +step:1050 train loss:4.132158 +step:1051 train loss:4.105969 +step:1052 train loss:4.214223 +step:1053 train loss:4.112048 +step:1054 train loss:4.102210 +step:1055 train loss:4.173217 +step:1056 train loss:4.112964 +step:1057 train loss:4.016131 +step:1058 train loss:4.116659 +step:1059 train loss:4.101379 +step:1060 train loss:4.096590 +step:1061 train loss:4.146462 +step:1062 train loss:4.112970 +step:1063 train loss:4.117462 +step:1064 train loss:4.106784 +step:1065 train loss:4.116145 +step:1066 train loss:4.091279 +step:1067 train loss:4.120433 +step:1068 train loss:4.078346 +step:1069 train loss:4.099501 +step:1070 train loss:4.111857 +step:1071 train loss:4.123784 +step:1072 train loss:4.149596 +step:1073 train loss:4.063609 +step:1074 train loss:4.080629 +step:1075 train loss:4.082000 +step:1076 train loss:4.152154 +step:1077 train loss:4.079500 +step:1078 train loss:4.131980 +step:1079 train loss:4.176638 +step:1080 train loss:4.050960 +step:1081 train loss:4.121006 +step:1082 train loss:4.114981 +step:1083 train loss:4.077718 +step:1084 train loss:4.060357 +step:1085 train loss:4.120307 +step:1086 train loss:4.101311 +step:1087 train loss:4.092103 +step:1088 train loss:4.093755 +step:1089 train loss:4.101250 +step:1090 train loss:4.050953 +step:1091 train loss:4.041177 +step:1092 train loss:4.145538 +step:1093 train loss:4.030242 +step:1094 train loss:4.092840 +step:1095 train loss:4.139017 +step:1096 train loss:4.072880 +step:1097 train loss:4.073038 +step:1098 train loss:4.041165 +step:1099 train loss:4.095873 +step:1100 train loss:4.146792 +step:1101 train loss:4.133408 +step:1102 train loss:4.144227 +step:1103 train loss:4.064698 +step:1104 train loss:4.098414 +step:1105 train loss:4.151422 +step:1106 train loss:4.086020 +step:1107 train loss:4.212246 +step:1108 train loss:4.146717 +step:1109 train loss:4.120044 +step:1110 train loss:4.069796 +step:1111 train loss:4.120816 +step:1112 train loss:4.030707 +step:1113 train loss:4.025181 +step:1114 train loss:4.011006 +step:1115 train loss:4.052644 +step:1116 train loss:4.116358 +step:1117 train loss:4.141810 +step:1118 train loss:4.165295 +step:1119 train loss:4.085261 +step:1120 train loss:4.108300 +step:1121 train loss:4.093806 +step:1122 train loss:4.074384 +step:1123 train loss:4.180312 +step:1124 train loss:4.057936 +step:1125 train loss:4.071929 +step:1126 train loss:4.035856 +step:1127 train loss:4.060532 +step:1128 train loss:4.061388 +step:1129 train loss:4.115664 +step:1130 train loss:4.035561 +step:1131 train loss:4.126215 +step:1132 train loss:4.072338 +step:1133 train loss:4.082449 +step:1134 train loss:4.056064 +step:1135 train loss:4.101477 +step:1136 train loss:4.117535 +step:1137 train loss:4.039630 +step:1138 train loss:4.115393 +step:1139 train loss:4.064482 +step:1140 train loss:4.144966 +step:1141 train loss:4.100049 +step:1142 train loss:4.031104 +step:1143 train loss:4.110445 +step:1144 train loss:4.132698 +step:1145 train loss:4.080481 +step:1146 train loss:4.034243 +step:1147 train loss:4.048887 +step:1148 train loss:4.076828 +step:1149 train loss:4.125152 +step:1150 train loss:4.129155 +step:1151 train loss:4.136486 +step:1152 train loss:4.038274 +step:1153 train loss:4.039609 +step:1154 train loss:4.022469 +step:1155 train loss:4.124438 +step:1156 train loss:4.026252 +step:1157 train loss:4.053144 +step:1158 train loss:4.102355 +step:1159 train loss:4.107231 +step:1160 train loss:4.034530 +step:1161 train loss:4.122321 +step:1162 train loss:4.062927 +step:1163 train loss:4.047559 +step:1164 train loss:3.955993 +step:1165 train loss:4.093331 +step:1166 train loss:4.022288 +step:1167 train loss:4.027400 +step:1168 train loss:4.083595 +step:1169 train loss:4.045254 +step:1170 train loss:4.051237 +step:1171 train loss:4.076664 +step:1172 train loss:4.037292 +step:1173 train loss:4.069318 +step:1174 train loss:4.008020 +step:1175 train loss:4.043132 +step:1176 train loss:4.159837 +step:1177 train loss:4.004562 +step:1178 train loss:4.062743 +step:1179 train loss:4.012486 +step:1180 train loss:4.044239 +step:1181 train loss:4.031464 +step:1182 train loss:4.089601 +step:1183 train loss:4.068232 +step:1184 train loss:4.009439 +step:1185 train loss:4.042290 +step:1186 train loss:4.035267 +step:1187 train loss:4.008405 +step:1188 train loss:4.041893 +step:1189 train loss:3.973147 +step:1190 train loss:4.031390 +step:1191 train loss:4.092399 +step:1192 train loss:4.044055 +step:1193 train loss:4.044655 +step:1194 train loss:4.162141 +step:1195 train loss:4.138027 +step:1196 train loss:4.028397 +step:1197 train loss:4.050857 +step:1198 train loss:4.036365 +step:1199 train loss:4.035492 +step:1200 train loss:4.097353 +step:1201 train loss:4.069342 +step:1202 train loss:4.004155 +step:1203 train loss:3.997330 +step:1204 train loss:4.035830 +step:1205 train loss:4.056756 +step:1206 train loss:3.985402 +step:1207 train loss:4.074504 +step:1208 train loss:4.047488 +step:1209 train loss:3.975332 +step:1210 train loss:4.072868 +step:1211 train loss:4.019382 +step:1212 train loss:4.044223 +step:1213 train loss:3.983532 +step:1214 train loss:4.059299 +step:1215 train loss:4.028616 +step:1216 train loss:4.035719 +step:1217 train loss:3.978562 +step:1218 train loss:4.047715 +step:1219 train loss:3.987119 +step:1220 train loss:4.014950 +step:1221 train loss:4.031888 +step:1222 train loss:4.073915 +step:1223 train loss:4.050261 +step:1224 train loss:4.020169 +step:1225 train loss:4.066267 +step:1226 train loss:4.008149 +step:1227 train loss:4.013470 +step:1228 train loss:4.022981 +step:1229 train loss:3.991269 +step:1230 train loss:3.988436 +step:1231 train loss:4.038477 +step:1232 train loss:3.998282 +step:1233 train loss:3.989633 +step:1234 train loss:4.076559 +step:1235 train loss:4.045122 +step:1236 train loss:3.958643 +step:1237 train loss:4.061769 +step:1238 train loss:4.008317 +step:1239 train loss:4.051267 +step:1240 train loss:3.964714 +step:1241 train loss:3.986157 +step:1242 train loss:4.017375 +step:1243 train loss:3.963594 +step:1244 train loss:4.083119 +step:1245 train loss:4.095571 +step:1246 train loss:4.026675 +step:1247 train loss:4.006742 +step:1248 train loss:4.035015 +step:1249 train loss:3.961819 +step:1250 validation loss:3.959102 +step:1250 train loss:3.980579 +step:1251 train loss:4.051635 +step:1252 train loss:4.001277 +step:1253 train loss:3.950616 +step:1254 train loss:3.986022 +step:1255 train loss:3.981553 +step:1256 train loss:4.030751 +step:1257 train loss:4.006545 +step:1258 train loss:4.058208 +step:1259 train loss:4.036389 +step:1260 train loss:3.948416 +step:1261 train loss:4.187355 +step:1262 train loss:4.028859 +step:1263 train loss:3.984546 +step:1264 train loss:3.999553 +step:1265 train loss:4.047836 +step:1266 train loss:3.996344 +step:1267 train loss:4.004490 +step:1268 train loss:4.012898 +step:1269 train loss:4.008683 +step:1270 train loss:3.935506 +step:1271 train loss:3.943781 +step:1272 train loss:3.973999 +step:1273 train loss:4.029381 +step:1274 train loss:3.993573 +step:1275 train loss:4.021628 +step:1276 train loss:4.019731 +step:1277 train loss:4.029604 +step:1278 train loss:3.973240 +step:1279 train loss:3.979620 +step:1280 train loss:3.997756 +step:1281 train loss:4.049727 +step:1282 train loss:3.978717 +step:1283 train loss:4.051723 +step:1284 train loss:3.994486 +step:1285 train loss:4.044600 +step:1286 train loss:3.942800 +step:1287 train loss:3.981032 +step:1288 train loss:4.011457 +step:1289 train loss:4.071192 +step:1290 train loss:4.025109 +step:1291 train loss:3.987939 +step:1292 train loss:3.970453 +step:1293 train loss:3.963898 +step:1294 train loss:4.011765 +step:1295 train loss:3.994012 +step:1296 train loss:4.038335 +step:1297 train loss:3.994159 +step:1298 train loss:4.011930 +step:1299 train loss:4.048540 +step:1300 train loss:3.970347 +step:1301 train loss:4.014194 +step:1302 train loss:3.972537 +step:1303 train loss:4.014747 +step:1304 train loss:4.042213 +step:1305 train loss:4.020201 +step:1306 train loss:4.011644 +step:1307 train loss:3.995189 +step:1308 train loss:3.949029 +step:1309 train loss:3.965079 +step:1310 train loss:3.954328 +step:1311 train loss:3.956851 +step:1312 train loss:4.034245 +step:1313 train loss:3.947637 +step:1314 train loss:3.954058 +step:1315 train loss:3.996008 +step:1316 train loss:3.973335 +step:1317 train loss:3.866323 +step:1318 train loss:4.025955 +step:1319 train loss:4.059812 +step:1320 train loss:3.975364 +step:1321 train loss:3.956565 +step:1322 train loss:4.058607 +step:1323 train loss:4.007485 +step:1324 train loss:4.107251 +step:1325 train loss:3.986259 +step:1326 train loss:4.016866 +step:1327 train loss:4.033558 +step:1328 train loss:3.939492 +step:1329 train loss:3.968527 +step:1330 train loss:3.992461 +step:1331 train loss:3.860509 +step:1332 train loss:4.034018 +step:1333 train loss:4.004134 +step:1334 train loss:4.000397 +step:1335 train loss:4.021120 +step:1336 train loss:4.026664 +step:1337 train loss:3.998298 +step:1338 train loss:3.979854 +step:1339 train loss:4.052053 +step:1340 train loss:4.021418 +step:1341 train loss:3.997013 +step:1342 train loss:3.972084 +step:1343 train loss:3.961363 +step:1344 train loss:4.029925 +step:1345 train loss:3.983944 +step:1346 train loss:4.068890 +step:1347 train loss:3.990228 +step:1348 train loss:3.952909 +step:1349 train loss:3.900074 +step:1350 train loss:3.941124 +step:1351 train loss:4.004207 +step:1352 train loss:3.978745 +step:1353 train loss:3.960792 +step:1354 train loss:3.960948 +step:1355 train loss:4.031741 +step:1356 train loss:3.939378 +step:1357 train loss:3.969042 +step:1358 train loss:3.962680 +step:1359 train loss:3.963948 +step:1360 train loss:3.995270 +step:1361 train loss:4.115713 +step:1362 train loss:4.024673 +step:1363 train loss:3.914061 +step:1364 train loss:3.936513 +step:1365 train loss:3.928193 +step:1366 train loss:3.971088 +step:1367 train loss:3.903677 +step:1368 train loss:3.935771 +step:1369 train loss:3.974854 +step:1370 train loss:3.989424 +step:1371 train loss:3.952318 +step:1372 train loss:3.982537 +step:1373 train loss:4.022051 +step:1374 train loss:4.020256 +step:1375 train loss:3.969719 +step:1376 train loss:3.995925 +step:1377 train loss:3.982579 +step:1378 train loss:3.970993 +step:1379 train loss:3.945334 +step:1380 train loss:4.012618 +step:1381 train loss:3.965379 +step:1382 train loss:3.939240 +step:1383 train loss:3.931422 +step:1384 train loss:4.002361 +step:1385 train loss:3.907823 +step:1386 train loss:3.977059 +step:1387 train loss:3.977475 +step:1388 train loss:3.946060 +step:1389 train loss:3.915938 +step:1390 train loss:3.956298 +step:1391 train loss:3.986677 +step:1392 train loss:3.969876 +step:1393 train loss:4.012451 +step:1394 train loss:3.948626 +step:1395 train loss:3.985540 +step:1396 train loss:3.973585 +step:1397 train loss:3.991109 +step:1398 train loss:3.996183 +step:1399 train loss:3.967392 +step:1400 train loss:3.945118 +step:1401 train loss:3.936651 +step:1402 train loss:3.943542 +step:1403 train loss:3.904708 +step:1404 train loss:3.964757 +step:1405 train loss:3.926516 +step:1406 train loss:3.957431 +step:1407 train loss:3.950967 +step:1408 train loss:3.930475 +step:1409 train loss:3.920316 +step:1410 train loss:3.936759 +step:1411 train loss:3.973817 +step:1412 train loss:4.023230 +step:1413 train loss:3.945296 +step:1414 train loss:3.978142 +step:1415 train loss:3.936779 +step:1416 train loss:3.990460 +step:1417 train loss:3.957619 +step:1418 train loss:3.898111 +step:1419 train loss:3.913299 +step:1420 train loss:3.932648 +step:1421 train loss:3.973582 +step:1422 train loss:3.949914 +step:1423 train loss:4.047061 +step:1424 train loss:3.949171 +step:1425 train loss:3.909477 +step:1426 train loss:3.933443 +step:1427 train loss:3.921461 +step:1428 train loss:3.903974 +step:1429 train loss:3.927819 +step:1430 train loss:3.939372 +step:1431 train loss:3.960853 +step:1432 train loss:3.945558 +step:1433 train loss:3.926919 +step:1434 train loss:3.902444 +step:1435 train loss:3.891695 +step:1436 train loss:3.967191 +step:1437 train loss:3.898218 +step:1438 train loss:3.895327 +step:1439 train loss:3.886173 +step:1440 train loss:3.922022 +step:1441 train loss:3.997351 +step:1442 train loss:3.959526 +step:1443 train loss:3.885728 +step:1444 train loss:3.900484 +step:1445 train loss:3.900146 +step:1446 train loss:3.935086 +step:1447 train loss:3.944668 +step:1448 train loss:3.908939 +step:1449 train loss:3.932679 +step:1450 train loss:3.950629 +step:1451 train loss:3.877403 +step:1452 train loss:3.932667 +step:1453 train loss:3.926743 +step:1454 train loss:3.916357 +step:1455 train loss:3.856259 +step:1456 train loss:3.930738 +step:1457 train loss:3.868011 +step:1458 train loss:4.004405 +step:1459 train loss:3.925321 +step:1460 train loss:3.895604 +step:1461 train loss:3.953107 +step:1462 train loss:3.962408 +step:1463 train loss:3.922874 +step:1464 train loss:3.904042 +step:1465 train loss:3.901418 +step:1466 train loss:3.865110 +step:1467 train loss:4.004464 +step:1468 train loss:3.887204 +step:1469 train loss:3.963012 +step:1470 train loss:3.898139 +step:1471 train loss:3.894512 +step:1472 train loss:3.900870 +step:1473 train loss:3.898525 +step:1474 train loss:3.842654 +step:1475 train loss:3.902108 +step:1476 train loss:3.986580 +step:1477 train loss:3.933111 +step:1478 train loss:3.868444 +step:1479 train loss:3.904509 +step:1480 train loss:3.898742 +step:1481 train loss:3.872987 +step:1482 train loss:3.937265 +step:1483 train loss:3.924980 +step:1484 train loss:3.960326 +step:1485 train loss:3.970430 +step:1486 train loss:3.903867 +step:1487 train loss:3.893477 +step:1488 train loss:3.896255 +step:1489 train loss:3.889746 +step:1490 train loss:3.946901 +step:1491 train loss:3.942563 +step:1492 train loss:3.932673 +step:1493 train loss:3.879674 +step:1494 train loss:3.911897 +step:1495 train loss:3.900603 +step:1496 train loss:3.865127 +step:1497 train loss:3.936848 +step:1498 train loss:3.847531 +step:1499 train loss:3.888298 +step:1500 validation loss:3.859764 total_sharp:9.7068e-03 L1_sharp:2.8121e-02 L2_sharp:1.3448e-02 L3_sharp:2.3766e-02 L4_sharp:1.1428e-02 L5_sharp:1.2250e-02 L6_sharp:1.2841e-02 L7_sharp:1.5735e-02 L8_sharp:1.4159e-02 L9_sharp:1.1048e-02 L10_sharp:8.6967e-03 L11_sharp:8.5188e-03 L12_sharp:1.2943e-02 total_fnorm:1.3903e+00 total_l1_linf:9.5039e+03 total_spectral:1.3903e+00 L1_fnorm:1.2191e-01 L2_fnorm:1.1644e-01 L3_fnorm:1.1377e-01 L4_fnorm:1.1612e-01 L5_fnorm:1.1855e-01 L6_fnorm:1.2049e-01 L7_fnorm:1.2071e-01 L8_fnorm:1.2086e-01 L9_fnorm:1.2032e-01 L10_fnorm:1.2056e-01 L11_fnorm:1.2044e-01 L12_fnorm:1.2091e-01 L1_l1linf:2.0822e-01 L2_l1linf:2.4085e-01 L3_l1linf:2.6516e-01 L4_l1linf:2.7577e-01 L5_l1linf:2.6710e-01 L6_l1linf:2.4304e-01 L7_l1linf:2.4960e-01 L8_l1linf:2.6757e-01 L9_l1linf:2.7686e-01 L10_l1linf:2.9598e-01 L11_l1linf:3.0311e-01 L12_l1linf:2.9450e-01 L1_spectral:4.7662e-03 L2_spectral:5.4582e-03 L3_spectral:6.0107e-03 L4_spectral:6.2352e-03 L5_spectral:6.0718e-03 L6_spectral:5.5171e-03 L7_spectral:5.5894e-03 L8_spectral:5.9629e-03 L9_spectral:6.1936e-03 L10_spectral:6.6488e-03 L11_spectral:6.7688e-03 L12_spectral:6.5661e-03 ip_v_neg_g:9.7423e-03 cos_v_neg_g:2.6461e-03 v_norm:1.3903e+00 g_norm:2.6481e+00 hv_norm:8.6532e-01 cos_v_hv:1.5596e-02 hg_norm:9.5643e+01 cos_g_hg:4.7286e-01 v_par:9.8577e-05 v_perp:1.3903e+00 L1_cos_v_neg_g:8.4220e-03 L1_v_norm:1.2191e-01 L2_cos_v_neg_g:1.0381e-02 L2_v_norm:1.1644e-01 L3_cos_v_neg_g:1.2500e-02 L3_v_norm:1.1377e-01 L4_cos_v_neg_g:9.6444e-03 L4_v_norm:1.1612e-01 L5_cos_v_neg_g:9.1226e-03 L5_v_norm:1.1855e-01 L6_cos_v_neg_g:8.4690e-03 L6_v_norm:1.2049e-01 L7_cos_v_neg_g:9.0742e-03 L7_v_norm:1.2071e-01 L8_cos_v_neg_g:8.4539e-03 L8_v_norm:1.2086e-01 L9_cos_v_neg_g:8.0567e-03 L9_v_norm:1.2032e-01 L10_cos_v_neg_g:7.2956e-03 L10_v_norm:1.2056e-01 L11_cos_v_neg_g:7.1403e-03 L11_v_norm:1.2044e-01 L12_cos_v_neg_g:6.2698e-03 L12_v_norm:1.2091e-01 +step:1500 train loss:3.883508 +step:1501 train loss:3.915128 +step:1502 train loss:3.845073 +step:1503 train loss:3.905200 +step:1504 train loss:3.869579 +step:1505 train loss:3.845891 +step:1506 train loss:3.834726 +step:1507 train loss:3.847876 +step:1508 train loss:3.865530 +step:1509 train loss:3.914138 +step:1510 train loss:3.855974 +step:1511 train loss:3.882881 +step:1512 train loss:3.862108 +step:1513 train loss:3.927745 +step:1514 train loss:3.885601 +step:1515 train loss:3.939698 +step:1516 train loss:3.870582 +step:1517 train loss:3.876982 +step:1518 train loss:3.962505 +step:1519 train loss:3.920975 +step:1520 train loss:3.965684 +step:1521 train loss:3.859470 +step:1522 train loss:3.924181 +step:1523 train loss:3.919047 +step:1524 train loss:3.849713 +step:1525 train loss:3.927312 +step:1526 train loss:3.844530 +step:1527 train loss:3.896917 +step:1528 train loss:3.953074 +step:1529 train loss:3.905361 +step:1530 train loss:3.950399 +step:1531 train loss:3.869131 +step:1532 train loss:3.943802 +step:1533 train loss:3.913661 +step:1534 train loss:3.863192 +step:1535 train loss:3.916281 +step:1536 train loss:3.940546 +step:1537 train loss:3.888417 +step:1538 train loss:3.903125 +step:1539 train loss:3.887875 +step:1540 train loss:3.909313 +step:1541 train loss:3.870611 +step:1542 train loss:3.962500 +step:1543 train loss:3.993021 +step:1544 train loss:3.856538 +step:1545 train loss:3.841202 +step:1546 train loss:3.882978 +step:1547 train loss:3.867948 +step:1548 train loss:3.908462 +step:1549 train loss:3.835350 +step:1550 train loss:3.952468 +step:1551 train loss:3.884908 +step:1552 train loss:3.912364 +step:1553 train loss:3.922816 +step:1554 train loss:3.931690 +step:1555 train loss:3.886007 +step:1556 train loss:3.867845 +step:1557 train loss:3.880498 +step:1558 train loss:3.905562 +step:1559 train loss:3.867872 +step:1560 train loss:3.947732 +step:1561 train loss:3.923147 +step:1562 train loss:3.810496 +step:1563 train loss:3.784723 +step:1564 train loss:3.928332 +step:1565 train loss:3.899311 +step:1566 train loss:3.921499 +step:1567 train loss:3.915331 +step:1568 train loss:3.870947 +step:1569 train loss:3.864921 +step:1570 train loss:3.882576 +step:1571 train loss:3.856647 +step:1572 train loss:3.859443 +step:1573 train loss:3.906435 +step:1574 train loss:3.863778 +step:1575 train loss:3.883290 +step:1576 train loss:3.842789 +step:1577 train loss:3.869629 +step:1578 train loss:3.851795 +step:1579 train loss:3.925889 +step:1580 train loss:3.884372 +step:1581 train loss:3.918169 +step:1582 train loss:3.922074 +step:1583 train loss:3.887551 +step:1584 train loss:3.812049 +step:1585 train loss:3.894705 +step:1586 train loss:3.866168 +step:1587 train loss:3.877933 +step:1588 train loss:3.862621 +step:1589 train loss:3.911973 +step:1590 train loss:3.820713 +step:1591 train loss:3.874437 +step:1592 train loss:3.828844 +step:1593 train loss:3.864718 +step:1594 train loss:3.865209 +step:1595 train loss:3.865200 +step:1596 train loss:3.870432 +step:1597 train loss:3.800168 +step:1598 train loss:3.901612 +step:1599 train loss:3.910505 +step:1600 train loss:3.786639 +step:1601 train loss:3.865617 +step:1602 train loss:3.926138 +step:1603 train loss:3.920506 +step:1604 train loss:3.841761 +step:1605 train loss:3.898530 +step:1606 train loss:3.941461 +step:1607 train loss:3.825524 +step:1608 train loss:3.860364 +step:1609 train loss:3.876559 +step:1610 train loss:3.933822 +step:1611 train loss:3.861536 +step:1612 train loss:3.784662 +step:1613 train loss:3.854656 +step:1614 train loss:3.968072 +step:1615 train loss:3.880697 +step:1616 train loss:3.885607 +step:1617 train loss:3.871880 +step:1618 train loss:3.874629 +step:1619 train loss:4.047727 +step:1620 train loss:3.839517 +step:1621 train loss:3.895004 +step:1622 train loss:3.819001 +step:1623 train loss:3.885341 +step:1624 train loss:3.855436 +step:1625 train loss:3.929097 +step:1626 train loss:3.818333 +step:1627 train loss:3.833592 +step:1628 train loss:3.849672 +step:1629 train loss:3.885682 +step:1630 train loss:3.898607 +step:1631 train loss:3.848032 +step:1632 train loss:3.821438 +step:1633 train loss:3.839397 +step:1634 train loss:3.897135 +step:1635 train loss:3.837961 +step:1636 train loss:3.822142 +step:1637 train loss:3.897636 +step:1638 train loss:3.999693 +step:1639 train loss:3.806782 +step:1640 train loss:3.887618 +step:1641 train loss:3.849025 +step:1642 train loss:3.948140 +step:1643 train loss:3.845486 +step:1644 train loss:3.855124 +step:1645 train loss:3.831878 +step:1646 train loss:3.913558 +step:1647 train loss:3.805203 +step:1648 train loss:3.868431 +step:1649 train loss:3.835366 +step:1650 train loss:3.847473 +step:1651 train loss:3.862271 +step:1652 train loss:3.883348 +step:1653 train loss:3.890836 +step:1654 train loss:3.883315 +step:1655 train loss:3.859602 +step:1656 train loss:3.852223 +step:1657 train loss:3.853304 +step:1658 train loss:3.826950 +step:1659 train loss:3.904260 +step:1660 train loss:3.804492 +step:1661 train loss:3.916488 +step:1662 train loss:3.851791 +step:1663 train loss:3.842335 +step:1664 train loss:3.940614 +step:1665 train loss:3.860388 +step:1666 train loss:3.871266 +step:1667 train loss:3.891329 +step:1668 train loss:3.864215 +step:1669 train loss:3.826185 +step:1670 train loss:3.880945 +step:1671 train loss:3.876900 +step:1672 train loss:3.871310 +step:1673 train loss:3.828968 +step:1674 train loss:3.829002 +step:1675 train loss:3.868999 +step:1676 train loss:4.136339 +step:1677 train loss:3.880685 +step:1678 train loss:3.790496 +step:1679 train loss:3.917910 +step:1680 train loss:3.842676 +step:1681 train loss:3.898887 +step:1682 train loss:3.854998 +step:1683 train loss:3.847686 +step:1684 train loss:3.799627 +step:1685 train loss:3.863364 +step:1686 train loss:3.847150 +step:1687 train loss:3.861488 +step:1688 train loss:3.841481 +step:1689 train loss:3.830580 +step:1690 train loss:3.853535 +step:1691 train loss:3.845457 +step:1692 train loss:3.861589 +step:1693 train loss:3.832146 +step:1694 train loss:3.786244 +step:1695 train loss:3.810029 +step:1696 train loss:3.818193 +step:1697 train loss:3.864344 +step:1698 train loss:3.859769 +step:1699 train loss:3.816446 +step:1700 train loss:3.895642 +step:1701 train loss:3.839623 +step:1702 train loss:3.830796 +step:1703 train loss:3.850427 +step:1704 train loss:3.856740 +step:1705 train loss:3.873239 +step:1706 train loss:3.878875 +step:1707 train loss:3.879709 +step:1708 train loss:3.802329 +step:1709 train loss:3.903844 +step:1710 train loss:3.817709 +step:1711 train loss:3.822894 +step:1712 train loss:3.850724 +step:1713 train loss:3.813848 +step:1714 train loss:4.183484 +step:1715 train loss:3.826537 +step:1716 train loss:3.816166 +step:1717 train loss:3.815667 +step:1718 train loss:3.892982 +step:1719 train loss:3.801675 +step:1720 train loss:3.887563 +step:1721 train loss:3.826175 +step:1722 train loss:3.797950 +step:1723 train loss:3.896408 +step:1724 train loss:3.847169 +step:1725 train loss:3.842641 +step:1726 train loss:3.843548 +step:1727 train loss:3.879307 +step:1728 train loss:3.885993 +step:1729 train loss:3.805506 +step:1730 train loss:3.882992 +step:1731 train loss:3.810481 +step:1732 train loss:3.826088 +step:1733 train loss:3.809432 +step:1734 train loss:3.866159 +step:1735 train loss:3.923874 +step:1736 train loss:3.836155 +step:1737 train loss:3.867478 +step:1738 train loss:3.821245 +step:1739 train loss:3.893563 +step:1740 train loss:3.881166 +step:1741 train loss:3.937964 +step:1742 train loss:3.919336 +step:1743 train loss:3.815997 +step:1744 train loss:3.828468 +step:1745 train loss:3.814176 +step:1746 train loss:3.800789 +step:1747 train loss:3.835203 +step:1748 train loss:3.772146 +step:1749 train loss:3.816975 +step:1750 validation loss:3.790247 +step:1750 train loss:3.851481 +step:1751 train loss:3.864573 +step:1752 train loss:3.840863 +step:1753 train loss:3.858165 +step:1754 train loss:3.850361 +step:1755 train loss:3.845599 +step:1756 train loss:3.868778 +step:1757 train loss:3.872167 +step:1758 train loss:3.791496 +step:1759 train loss:3.883842 +step:1760 train loss:3.834321 +step:1761 train loss:3.812989 +step:1762 train loss:3.810546 +step:1763 train loss:3.814145 +step:1764 train loss:4.107858 +step:1765 train loss:3.817024 +step:1766 train loss:3.909752 +step:1767 train loss:3.820613 +step:1768 train loss:3.801245 +step:1769 train loss:3.823984 +step:1770 train loss:3.836509 +step:1771 train loss:3.810871 +step:1772 train loss:3.920112 +step:1773 train loss:3.845132 +step:1774 train loss:3.846327 +step:1775 train loss:3.958848 +step:1776 train loss:3.833156 +step:1777 train loss:3.823475 +step:1778 train loss:3.879848 +step:1779 train loss:3.816438 +step:1780 train loss:3.868126 +step:1781 train loss:3.867292 +step:1782 train loss:3.901719 +step:1783 train loss:3.829339 +step:1784 train loss:3.922529 +step:1785 train loss:3.824547 +step:1786 train loss:3.821991 +step:1787 train loss:3.822602 +step:1788 train loss:3.843707 +step:1789 train loss:3.800236 +step:1790 train loss:3.814461 +step:1791 train loss:3.892282 +step:1792 train loss:3.892460 +step:1793 train loss:3.810745 +step:1794 train loss:3.853430 +step:1795 train loss:3.806392 +step:1796 train loss:3.792008 +step:1797 train loss:3.853169 +step:1798 train loss:3.798409 +step:1799 train loss:3.850729 +step:1800 train loss:3.880921 +step:1801 train loss:3.866876 +step:1802 train loss:3.876380 +step:1803 train loss:3.864257 +step:1804 train loss:3.864652 +step:1805 train loss:3.850751 +step:1806 train loss:3.863289 +step:1807 train loss:3.789725 +step:1808 train loss:3.854312 +step:1809 train loss:3.842200 +step:1810 train loss:3.831700 +step:1811 train loss:3.849222 +step:1812 train loss:3.830172 +step:1813 train loss:3.844423 +step:1814 train loss:3.911716 +step:1815 train loss:3.847642 +step:1816 train loss:3.800433 +step:1817 train loss:3.794236 +step:1818 train loss:3.850298 +step:1819 train loss:3.822736 +step:1820 train loss:3.856192 +step:1821 train loss:3.821497 +step:1822 train loss:3.800083 +step:1823 train loss:3.792423 +step:1824 train loss:3.867024 +step:1825 train loss:3.783743 +step:1826 train loss:3.825771 +step:1827 train loss:3.793559 +step:1828 train loss:3.843872 +step:1829 train loss:3.809814 +step:1830 train loss:4.005424 +step:1831 train loss:3.758372 +step:1832 train loss:3.809536 +step:1833 train loss:3.855217 +step:1834 train loss:3.806573 +step:1835 train loss:3.809026 +step:1836 train loss:3.847235 +step:1837 train loss:3.774015 +step:1838 train loss:3.871540 +step:1839 train loss:3.853940 +step:1840 train loss:3.821769 +step:1841 train loss:3.848217 +step:1842 train loss:3.822221 +step:1843 train loss:3.768956 +step:1844 train loss:3.834898 +step:1845 train loss:3.802824 +step:1846 train loss:3.862216 +step:1847 train loss:3.907960 +step:1848 train loss:3.711828 +step:1849 train loss:3.801556 +step:1850 train loss:3.777147 +step:1851 train loss:3.818987 +step:1852 train loss:3.802318 +step:1853 train loss:3.862438 +step:1854 train loss:3.823519 +step:1855 train loss:3.812434 +step:1856 train loss:3.812588 +step:1857 train loss:3.819012 +step:1858 train loss:3.865959 +step:1859 train loss:3.809992 +step:1860 train loss:3.787185 +step:1861 train loss:3.799398 +step:1862 train loss:3.842292 +step:1863 train loss:3.879343 +step:1864 train loss:3.780891 +step:1865 train loss:3.797990 +step:1866 train loss:3.799206 +step:1867 train loss:3.837130 +step:1868 train loss:3.884289 +step:1869 train loss:3.802282 +step:1870 train loss:3.829893 +step:1871 train loss:3.771330 +step:1872 train loss:3.838622 +step:1873 train loss:3.898170 +step:1874 train loss:3.765524 +step:1875 train loss:3.837969 +step:1876 train loss:3.801812 +step:1877 train loss:3.845164 +step:1878 train loss:3.766530 +step:1879 train loss:3.830594 +step:1880 train loss:3.904195 +step:1881 train loss:3.836777 +step:1882 train loss:3.853453 +step:1883 train loss:3.873842 +step:1884 train loss:3.886758 +step:1885 train loss:3.838814 +step:1886 train loss:3.772024 +step:1887 train loss:3.784754 +step:1888 train loss:3.791704 +step:1889 train loss:3.807203 +step:1890 train loss:3.806549 +step:1891 train loss:3.746749 +step:1892 train loss:3.839762 +step:1893 train loss:3.763447 +step:1894 train loss:3.783784 +step:1895 train loss:3.814634 +step:1896 train loss:3.869026 +step:1897 train loss:3.756579 +step:1898 train loss:3.810188 +step:1899 train loss:3.825148 +step:1900 train loss:3.778178 +step:1901 train loss:3.857134 +step:1902 train loss:3.843659 +step:1903 train loss:3.787698 +step:1904 train loss:3.774319 +step:1905 train loss:3.772482 +step:1906 train loss:3.832903 +step:1907 train loss:3.776361 +step:1908 train loss:3.794526 +step:1909 train loss:3.887529 +step:1910 train loss:3.774641 +step:1911 train loss:3.783677 +step:1912 train loss:3.836484 +step:1913 train loss:3.771596 +step:1914 train loss:3.811776 +step:1915 train loss:3.770456 +step:1916 train loss:3.827104 +step:1917 train loss:3.806717 +step:1918 train loss:3.716252 +step:1919 train loss:3.871597 +step:1920 train loss:3.972974 +step:1921 train loss:3.757454 +step:1922 train loss:3.735700 +step:1923 train loss:3.831420 +step:1924 train loss:3.871983 +step:1925 train loss:3.813703 +step:1926 train loss:3.752914 +step:1927 train loss:3.835658 +step:1928 train loss:3.751132 +step:1929 train loss:3.777105 +step:1930 train loss:3.848690 +step:1931 train loss:3.759914 +step:1932 train loss:3.809927 +step:1933 train loss:3.808204 +step:1934 train loss:3.883180 +step:1935 train loss:3.831698 +step:1936 train loss:3.801974 +step:1937 train loss:3.741282 +step:1938 train loss:4.109450 +step:1939 train loss:3.847917 +step:1940 train loss:3.822773 +step:1941 train loss:3.834020 +step:1942 train loss:3.823240 +step:1943 train loss:3.818131 +step:1944 train loss:3.777638 +step:1945 train loss:3.779683 +step:1946 train loss:3.805110 +step:1947 train loss:3.827065 +step:1948 train loss:3.736939 +step:1949 train loss:3.843852 +step:1950 train loss:3.784871 +step:1951 train loss:3.808872 +step:1952 train loss:3.831171 +step:1953 train loss:3.762579 +step:1954 train loss:3.801342 +step:1955 train loss:3.751069 +step:1956 train loss:3.837120 +step:1957 train loss:3.860708 +step:1958 train loss:3.876632 +step:1959 train loss:3.746236 +step:1960 train loss:3.784180 +step:1961 train loss:3.820969 +step:1962 train loss:3.813756 +step:1963 train loss:3.789685 +step:1964 train loss:3.826456 +step:1965 train loss:3.859899 +step:1966 train loss:3.767883 +step:1967 train loss:3.827187 +step:1968 train loss:3.765321 +step:1969 train loss:3.784628 +step:1970 train loss:3.848828 +step:1971 train loss:3.745687 +step:1972 train loss:3.859223 +step:1973 train loss:3.754726 +step:1974 train loss:3.798768 +step:1975 train loss:3.756976 +step:1976 train loss:3.784092 +step:1977 train loss:3.825800 +step:1978 train loss:3.767169 +step:1979 train loss:3.747826 +step:1980 train loss:3.788998 +step:1981 train loss:3.766751 +step:1982 train loss:3.850144 +step:1983 train loss:3.793960 +step:1984 train loss:3.834744 +step:1985 train loss:3.819722 +step:1986 train loss:3.811793 +step:1987 train loss:3.765204 +step:1988 train loss:3.794966 +step:1989 train loss:3.935819 +step:1990 train loss:3.767124 +step:1991 train loss:3.761387 +step:1992 train loss:3.771858 +step:1993 train loss:3.803071 +step:1994 train loss:3.797671 +step:1995 train loss:3.746704 +step:1996 train loss:3.804573 +step:1997 train loss:3.809994 +step:1998 train loss:3.756121 +step:1999 train loss:3.874655 +step:2000 validation loss:3.739947 total_sharp:9.6502e-03 L1_sharp:1.4093e-01 L2_sharp:3.4784e-02 L3_sharp:2.3467e-02 L4_sharp:9.9238e-03 L5_sharp:1.0628e-02 L6_sharp:1.0013e-02 L7_sharp:1.1501e-02 L8_sharp:1.0395e-02 L9_sharp:8.5535e-03 L10_sharp:7.0114e-03 L11_sharp:6.2308e-03 L12_sharp:9.9186e-03 total_fnorm:1.3960e+00 total_l1_linf:9.5277e+03 total_spectral:1.3960e+00 L1_fnorm:1.2049e-01 L2_fnorm:1.1344e-01 L3_fnorm:1.1293e-01 L4_fnorm:1.1681e-01 L5_fnorm:1.1914e-01 L6_fnorm:1.2089e-01 L7_fnorm:1.2078e-01 L8_fnorm:1.2095e-01 L9_fnorm:1.2060e-01 L10_fnorm:1.2079e-01 L11_fnorm:1.2064e-01 L12_fnorm:1.2074e-01 L1_l1linf:3.2159e-01 L2_l1linf:3.0159e-01 L3_l1linf:3.0082e-01 L4_l1linf:3.1728e-01 L5_l1linf:3.0872e-01 L6_l1linf:2.8458e-01 L7_l1linf:2.7020e-01 L8_l1linf:2.7598e-01 L9_l1linf:3.0405e-01 L10_l1linf:3.1728e-01 L11_l1linf:3.1922e-01 L12_l1linf:3.0150e-01 L1_spectral:6.9646e-03 L2_spectral:6.8092e-03 L3_spectral:6.7839e-03 L4_spectral:7.1431e-03 L5_spectral:6.8958e-03 L6_spectral:6.4206e-03 L7_spectral:6.1321e-03 L8_spectral:6.2089e-03 L9_spectral:6.8816e-03 L10_spectral:7.1588e-03 L11_spectral:7.1824e-03 L12_spectral:6.8711e-03 ip_v_neg_g:1.0688e-02 cos_v_neg_g:2.8904e-03 v_norm:1.3960e+00 g_norm:2.6487e+00 hv_norm:1.0207e+00 cos_v_hv:1.3199e-02 hg_norm:1.0027e+02 cos_g_hg:5.2196e-01 v_par:9.7990e-05 v_perp:1.3960e+00 L1_cos_v_neg_g:1.9441e-02 L1_v_norm:1.2049e-01 L2_cos_v_neg_g:1.7943e-02 L2_v_norm:1.1344e-01 L3_cos_v_neg_g:1.4312e-02 L3_v_norm:1.1293e-01 L4_cos_v_neg_g:1.0264e-02 L4_v_norm:1.1681e-01 L5_cos_v_neg_g:9.1569e-03 L5_v_norm:1.1914e-01 L6_cos_v_neg_g:7.2509e-03 L6_v_norm:1.2089e-01 L7_cos_v_neg_g:7.7373e-03 L7_v_norm:1.2078e-01 L8_cos_v_neg_g:7.8890e-03 L8_v_norm:1.2095e-01 L9_cos_v_neg_g:7.0680e-03 L9_v_norm:1.2060e-01 L10_cos_v_neg_g:6.3510e-03 L10_v_norm:1.2079e-01 L11_cos_v_neg_g:5.2438e-03 L11_v_norm:1.2064e-01 L12_cos_v_neg_g:3.9990e-03 L12_v_norm:1.2074e-01 +step:2000 train loss:3.836008 +step:2001 train loss:3.761049 +step:2002 train loss:3.859964 +step:2003 train loss:3.906258 +step:2004 train loss:3.779189 +step:2005 train loss:3.874985 +step:2006 train loss:3.759615 +step:2007 train loss:3.839566 +step:2008 train loss:3.782693 +step:2009 train loss:3.781133 +step:2010 train loss:3.909697 +step:2011 train loss:3.763492 +step:2012 train loss:3.790538 +step:2013 train loss:3.801544 +step:2014 train loss:3.701297 +step:2015 train loss:3.818115 +step:2016 train loss:3.798781 +step:2017 train loss:3.800529 +step:2018 train loss:3.764514 +step:2019 train loss:3.795184 +step:2020 train loss:3.803015 +step:2021 train loss:3.766606 +step:2022 train loss:3.811128 +step:2023 train loss:3.788603 +step:2024 train loss:3.842592 +step:2025 train loss:3.780159 +step:2026 train loss:3.760333 +step:2027 train loss:3.790231 +step:2028 train loss:3.719881 +step:2029 train loss:3.753009 +step:2030 train loss:3.755450 +step:2031 train loss:3.716344 +step:2032 train loss:3.770505 +step:2033 train loss:3.764846 +step:2034 train loss:3.764619 +step:2035 train loss:3.804723 +step:2036 train loss:3.794805 +step:2037 train loss:3.781631 +step:2038 train loss:3.777906 +step:2039 train loss:3.774596 +step:2040 train loss:3.799072 +step:2041 train loss:3.799703 +step:2042 train loss:3.732497 +step:2043 train loss:3.889994 +step:2044 train loss:3.755454 +step:2045 train loss:3.770956 +step:2046 train loss:3.781365 +step:2047 train loss:3.760184 +step:2048 train loss:3.800238 +step:2049 train loss:3.756271 +step:2050 train loss:3.781393 +step:2051 train loss:3.741580 +step:2052 train loss:3.795353 +step:2053 train loss:3.791330 +step:2054 train loss:3.767454 +step:2055 train loss:3.760918 +step:2056 train loss:3.809317 +step:2057 train loss:3.816299 +step:2058 train loss:3.779282 +step:2059 train loss:3.861378 +step:2060 train loss:3.805680 +step:2061 train loss:3.762825 +step:2062 train loss:3.790352 +step:2063 train loss:3.691926 +step:2064 train loss:3.812419 +step:2065 train loss:3.819714 +step:2066 train loss:3.677727 +step:2067 train loss:3.727678 +step:2068 train loss:3.831663 +step:2069 train loss:3.768003 +step:2070 train loss:3.769443 +step:2071 train loss:3.810719 +step:2072 train loss:3.740240 +step:2073 train loss:3.795130 +step:2074 train loss:3.773804 +step:2075 train loss:3.853780 +step:2076 train loss:3.794312 +step:2077 train loss:3.810852 +step:2078 train loss:3.767338 +step:2079 train loss:3.919257 +step:2080 train loss:3.737931 +step:2081 train loss:3.848741 +step:2082 train loss:3.777875 +step:2083 train loss:3.766469 +step:2084 train loss:3.742824 +step:2085 train loss:3.788000 +step:2086 train loss:3.800841 +step:2087 train loss:3.842964 +step:2088 train loss:3.707840 +step:2089 train loss:3.735877 +step:2090 train loss:3.772650 +step:2091 train loss:3.790622 +step:2092 train loss:3.770555 +step:2093 train loss:3.761481 +step:2094 train loss:3.800063 +step:2095 train loss:3.741201 +step:2096 train loss:3.732925 +step:2097 train loss:3.769390 +step:2098 train loss:3.768186 +step:2099 train loss:3.750482 +step:2100 train loss:3.811628 +step:2101 train loss:3.804619 +step:2102 train loss:3.770221 +step:2103 train loss:3.788567 +step:2104 train loss:3.767914 +step:2105 train loss:3.773159 +step:2106 train loss:3.770758 +step:2107 train loss:3.836033 +step:2108 train loss:3.752197 +step:2109 train loss:3.714259 +step:2110 train loss:3.811475 +step:2111 train loss:3.755959 +step:2112 train loss:3.820607 +step:2113 train loss:3.752474 +step:2114 train loss:3.759311 +step:2115 train loss:3.812702 +step:2116 train loss:3.741598 +step:2117 train loss:3.761591 +step:2118 train loss:3.751144 +step:2119 train loss:3.686429 +step:2120 train loss:3.770759 +step:2121 train loss:3.766279 +step:2122 train loss:3.773960 +step:2123 train loss:3.829791 +step:2124 train loss:3.837221 +step:2125 train loss:3.738009 +step:2126 train loss:3.743473 +step:2127 train loss:3.734321 +step:2128 train loss:3.730357 +step:2129 train loss:3.758008 +step:2130 train loss:3.764609 +step:2131 train loss:3.783471 +step:2132 train loss:3.714503 +step:2133 train loss:3.823442 +step:2134 train loss:3.772451 +step:2135 train loss:3.731655 +step:2136 train loss:3.825371 +step:2137 train loss:3.788138 +step:2138 train loss:3.746353 +step:2139 train loss:3.747441 +step:2140 train loss:3.751038 +step:2141 train loss:3.800612 +step:2142 train loss:3.771869 +step:2143 train loss:3.692696 +step:2144 train loss:3.799671 +step:2145 train loss:3.770135 +step:2146 train loss:3.806867 +step:2147 train loss:3.912963 +step:2148 train loss:3.713725 +step:2149 train loss:3.723524 +step:2150 train loss:3.748226 +step:2151 train loss:3.785940 +step:2152 train loss:3.778931 +step:2153 train loss:3.816523 +step:2154 train loss:3.738058 +step:2155 train loss:3.817688 +step:2156 train loss:3.742764 +step:2157 train loss:3.816025 +step:2158 train loss:3.854764 +step:2159 train loss:3.782374 +step:2160 train loss:3.852379 +step:2161 train loss:3.753181 +step:2162 train loss:3.759341 +step:2163 train loss:3.733847 +step:2164 train loss:3.755990 +step:2165 train loss:3.735710 +step:2166 train loss:3.851897 +step:2167 train loss:3.761481 +step:2168 train loss:3.771123 +step:2169 train loss:3.722810 +step:2170 train loss:3.872531 +step:2171 train loss:3.827983 +step:2172 train loss:3.762434 +step:2173 train loss:3.757164 +step:2174 train loss:3.817962 +step:2175 train loss:3.747231 +step:2176 train loss:3.828891 +step:2177 train loss:3.799608 +step:2178 train loss:3.725823 +step:2179 train loss:3.795036 +step:2180 train loss:3.811448 +step:2181 train loss:3.740492 +step:2182 train loss:3.788659 +step:2183 train loss:3.780189 +step:2184 train loss:3.734105 +step:2185 train loss:3.713954 +step:2186 train loss:3.754463 +step:2187 train loss:3.767384 +step:2188 train loss:3.811585 +step:2189 train loss:3.706093 +step:2190 train loss:3.751393 +step:2191 train loss:3.805077 +step:2192 train loss:3.735552 +step:2193 train loss:3.705825 +step:2194 train loss:3.708808 +step:2195 train loss:3.735554 +step:2196 train loss:3.743978 +step:2197 train loss:3.723822 +step:2198 train loss:3.750456 +step:2199 train loss:3.815409 +step:2200 train loss:3.748932 +step:2201 train loss:3.757024 +step:2202 train loss:3.718487 +step:2203 train loss:3.739126 +step:2204 train loss:3.772400 +step:2205 train loss:3.752529 +step:2206 train loss:3.755778 +step:2207 train loss:3.746526 +step:2208 train loss:3.728282 +step:2209 train loss:4.008353 +step:2210 train loss:3.777349 +step:2211 train loss:3.769165 +step:2212 train loss:3.746283 +step:2213 train loss:3.823534 +step:2214 train loss:3.819609 +step:2215 train loss:3.742350 +step:2216 train loss:3.707116 +step:2217 train loss:3.738745 +step:2218 train loss:3.741714 +step:2219 train loss:3.776301 +step:2220 train loss:3.716625 +step:2221 train loss:3.750637 +step:2222 train loss:3.762488 +step:2223 train loss:3.806468 +step:2224 train loss:3.776061 +step:2225 train loss:3.719346 +step:2226 train loss:3.784225 +step:2227 train loss:3.787189 +step:2228 train loss:3.781665 +step:2229 train loss:3.721818 +step:2230 train loss:3.849663 +step:2231 train loss:3.764935 +step:2232 train loss:3.759787 +step:2233 train loss:3.804976 +step:2234 train loss:3.699448 +step:2235 train loss:3.792036 +step:2236 train loss:3.730093 +step:2237 train loss:3.867703 +step:2238 train loss:3.665289 +step:2239 train loss:3.747378 +step:2240 train loss:3.759638 +step:2241 train loss:3.674158 +step:2242 train loss:3.821870 +step:2243 train loss:3.851612 +step:2244 train loss:3.730973 +step:2245 train loss:3.733261 +step:2246 train loss:3.695714 +step:2247 train loss:3.705043 +step:2248 train loss:3.759988 +step:2249 train loss:3.738292 +step:2250 validation loss:3.695081 +step:2250 train loss:3.756568 +step:2251 train loss:3.717667 +step:2252 train loss:3.720297 +step:2253 train loss:3.748210 +step:2254 train loss:3.754024 +step:2255 train loss:3.706263 +step:2256 train loss:3.760792 +step:2257 train loss:3.747596 +step:2258 train loss:3.740662 +step:2259 train loss:3.759179 +step:2260 train loss:3.706254 +step:2261 train loss:3.788108 +step:2262 train loss:3.810376 +step:2263 train loss:3.762205 +step:2264 train loss:3.879095 +step:2265 train loss:3.725174 +step:2266 train loss:3.768321 +step:2267 train loss:3.725938 +step:2268 train loss:3.734533 +step:2269 train loss:3.732538 +step:2270 train loss:3.724333 +step:2271 train loss:3.741241 +step:2272 train loss:3.775096 +step:2273 train loss:3.699553 +step:2274 train loss:3.729246 +step:2275 train loss:3.683757 +step:2276 train loss:3.759093 +step:2277 train loss:3.770851 +step:2278 train loss:3.751720 +step:2279 train loss:3.735783 +step:2280 train loss:3.642448 +step:2281 train loss:3.789484 +step:2282 train loss:3.720320 +step:2283 train loss:3.701253 +step:2284 train loss:3.718515 +step:2285 train loss:3.773390 +step:2286 train loss:3.736841 +step:2287 train loss:3.770262 +step:2288 train loss:3.742752 +step:2289 train loss:3.741746 +step:2290 train loss:3.749114 +step:2291 train loss:3.734343 +step:2292 train loss:3.775798 +step:2293 train loss:3.751993 +step:2294 train loss:3.752146 +step:2295 train loss:3.805415 +step:2296 train loss:3.738914 +step:2297 train loss:3.712741 +step:2298 train loss:3.771611 +step:2299 train loss:3.745949 +step:2300 train loss:3.662326 +step:2301 train loss:3.760154 +step:2302 train loss:3.770042 +step:2303 train loss:3.743207 +step:2304 train loss:3.733067 +step:2305 train loss:3.775557 +step:2306 train loss:3.764931 +step:2307 train loss:3.748284 +step:2308 train loss:3.762361 +step:2309 train loss:3.719464 +step:2310 train loss:3.707127 +step:2311 train loss:3.691649 +step:2312 train loss:3.763946 +step:2313 train loss:3.675605 +step:2314 train loss:3.752202 +step:2315 train loss:3.763630 +step:2316 train loss:3.805700 +step:2317 train loss:3.670587 +step:2318 train loss:3.718474 +step:2319 train loss:3.769722 +step:2320 train loss:3.738158 +step:2321 train loss:3.707212 +step:2322 train loss:3.726172 +step:2323 train loss:3.718127 +step:2324 train loss:3.751229 +step:2325 train loss:3.686857 +step:2326 train loss:3.721427 +step:2327 train loss:3.831601 +step:2328 train loss:3.778233 +step:2329 train loss:3.734925 +step:2330 train loss:3.695850 +step:2331 train loss:3.736129 +step:2332 train loss:3.660770 +step:2333 train loss:3.724771 +step:2334 train loss:3.702644 +step:2335 train loss:3.691297 +step:2336 train loss:3.940376 +step:2337 train loss:3.712252 +step:2338 train loss:3.757037 +step:2339 train loss:3.752876 +step:2340 train loss:3.770352 +step:2341 train loss:3.757881 +step:2342 train loss:3.710904 +step:2343 train loss:3.732413 +step:2344 train loss:3.777037 +step:2345 train loss:3.730230 +step:2346 train loss:3.759531 +step:2347 train loss:3.685683 +step:2348 train loss:3.740801 +step:2349 train loss:3.691217 +step:2350 train loss:3.751201 +step:2351 train loss:3.755780 +step:2352 train loss:3.757667 +step:2353 train loss:3.720701 +step:2354 train loss:3.763947 +step:2355 train loss:3.754734 +step:2356 train loss:3.791828 +step:2357 train loss:3.697847 +step:2358 train loss:3.713066 +step:2359 train loss:3.734050 +step:2360 train loss:3.761870 +step:2361 train loss:3.792779 +step:2362 train loss:3.625701 +step:2363 train loss:3.816264 +step:2364 train loss:3.764235 +step:2365 train loss:3.732474 +step:2366 train loss:3.683501 +step:2367 train loss:3.755323 +step:2368 train loss:3.743335 +step:2369 train loss:3.735214 +step:2370 train loss:3.747083 +step:2371 train loss:3.803170 +step:2372 train loss:3.657053 +step:2373 train loss:3.796349 +step:2374 train loss:3.779483 +step:2375 train loss:3.759896 +step:2376 train loss:3.755343 +step:2377 train loss:3.698335 +step:2378 train loss:3.745430 +step:2379 train loss:3.732070 +step:2380 train loss:3.791782 +step:2381 train loss:3.889685 +step:2382 train loss:3.672544 +step:2383 train loss:3.720487 +step:2384 train loss:3.751683 +step:2385 train loss:3.651788 +step:2386 train loss:3.809039 +step:2387 train loss:3.688947 +step:2388 train loss:3.741889 +step:2389 train loss:3.763747 +step:2390 train loss:3.713536 +step:2391 train loss:3.736738 +step:2392 train loss:3.763218 +step:2393 train loss:3.716051 +step:2394 train loss:3.741937 +step:2395 train loss:3.734004 +step:2396 train loss:3.736647 +step:2397 train loss:3.711752 +step:2398 train loss:3.771283 +step:2399 train loss:3.731036 +step:2400 train loss:3.709732 +step:2401 train loss:3.754232 +step:2402 train loss:3.701912 +step:2403 train loss:3.749236 +step:2404 train loss:3.710948 +step:2405 train loss:3.713039 +step:2406 train loss:3.736320 +step:2407 train loss:3.682163 +step:2408 train loss:3.725671 +step:2409 train loss:3.716662 +step:2410 train loss:3.712875 +step:2411 train loss:3.788837 +step:2412 train loss:3.770522 +step:2413 train loss:3.815724 +step:2414 train loss:3.704331 +step:2415 train loss:3.696456 +step:2416 train loss:3.711355 +step:2417 train loss:3.749665 +step:2418 train loss:3.766549 +step:2419 train loss:3.697419 +step:2420 train loss:3.715714 +step:2421 train loss:3.746696 +step:2422 train loss:3.800836 +step:2423 train loss:3.730911 +step:2424 train loss:3.696289 +step:2425 train loss:3.757947 +step:2426 train loss:3.698523 +step:2427 train loss:3.720519 +step:2428 train loss:3.805237 +step:2429 train loss:3.753540 +step:2430 train loss:3.848160 +step:2431 train loss:3.757374 +step:2432 train loss:3.729023 +step:2433 train loss:3.704566 +step:2434 train loss:3.693953 +step:2435 train loss:3.750672 +step:2436 train loss:3.708869 +step:2437 train loss:3.738493 +step:2438 train loss:3.783029 +step:2439 train loss:3.767295 +step:2440 train loss:3.709446 +step:2441 train loss:3.745540 +step:2442 train loss:3.738613 +step:2443 train loss:3.697111 +step:2444 train loss:3.735367 +step:2445 train loss:3.734225 +step:2446 train loss:3.703215 +step:2447 train loss:3.687527 +step:2448 train loss:3.737238 +step:2449 train loss:3.766284 +step:2450 train loss:3.724337 +step:2451 train loss:3.650628 +step:2452 train loss:3.745830 +step:2453 train loss:3.715645 +step:2454 train loss:3.713657 +step:2455 train loss:3.767220 +step:2456 train loss:3.719026 +step:2457 train loss:3.779006 +step:2458 train loss:3.756346 +step:2459 train loss:3.729927 +step:2460 train loss:3.740223 +step:2461 train loss:3.765477 +step:2462 train loss:3.739386 +step:2463 train loss:3.712490 +step:2464 train loss:3.727350 +step:2465 train loss:3.807593 +step:2466 train loss:3.889378 +step:2467 train loss:3.798443 +step:2468 train loss:3.688878 +step:2469 train loss:3.763973 +step:2470 train loss:3.807534 +step:2471 train loss:3.806909 +step:2472 train loss:3.784328 +step:2473 train loss:3.724550 +step:2474 train loss:3.685811 +step:2475 train loss:3.741647 +step:2476 train loss:3.815962 +step:2477 train loss:3.731362 +step:2478 train loss:3.686405 +step:2479 train loss:3.729692 +step:2480 train loss:3.722391 +step:2481 train loss:3.917144 +step:2482 train loss:3.721139 +step:2483 train loss:3.751272 +step:2484 train loss:3.703161 +step:2485 train loss:3.694961 +step:2486 train loss:3.727052 +step:2487 train loss:3.762311 +step:2488 train loss:3.671529 +step:2489 train loss:3.787923 +step:2490 train loss:3.705609 +step:2491 train loss:3.718930 +step:2492 train loss:3.760737 +step:2493 train loss:3.799756 +step:2494 train loss:3.719012 +step:2495 train loss:3.753134 +step:2496 train loss:3.726694 +step:2497 train loss:3.744624 +step:2498 train loss:3.751241 +step:2499 train loss:3.742730 +step:2500 validation loss:3.663682 total_sharp:7.1874e-03 L1_sharp:8.2047e-02 L2_sharp:2.1079e-02 L3_sharp:1.8382e-02 L4_sharp:6.8276e-03 L5_sharp:1.0557e-02 L6_sharp:8.0085e-03 L7_sharp:9.8853e-03 L8_sharp:9.6432e-03 L9_sharp:7.2200e-03 L10_sharp:5.4771e-03 L11_sharp:5.7733e-03 L12_sharp:8.7797e-03 total_fnorm:1.3868e+00 total_l1_linf:9.4662e+03 total_spectral:1.3868e+00 L1_fnorm:1.2057e-01 L2_fnorm:1.1468e-01 L3_fnorm:1.1360e-01 L4_fnorm:1.1704e-01 L5_fnorm:1.1923e-01 L6_fnorm:1.2088e-01 L7_fnorm:1.2084e-01 L8_fnorm:1.2098e-01 L9_fnorm:1.2089e-01 L10_fnorm:1.2096e-01 L11_fnorm:1.2076e-01 L12_fnorm:1.2065e-01 L1_l1linf:2.4910e-01 L2_l1linf:3.1473e-01 L3_l1linf:3.5433e-01 L4_l1linf:3.4370e-01 L5_l1linf:3.3806e-01 L6_l1linf:2.7514e-01 L7_l1linf:2.6755e-01 L8_l1linf:2.8680e-01 L9_l1linf:3.1064e-01 L10_l1linf:3.2400e-01 L11_l1linf:3.3779e-01 L12_l1linf:3.2784e-01 L1_spectral:5.6972e-03 L2_spectral:7.1260e-03 L3_spectral:7.9189e-03 L4_spectral:7.7372e-03 L5_spectral:7.6243e-03 L6_spectral:6.2982e-03 L7_spectral:6.1517e-03 L8_spectral:6.4488e-03 L9_spectral:7.0209e-03 L10_spectral:7.2980e-03 L11_spectral:7.5860e-03 L12_spectral:7.3587e-03 ip_v_neg_g:6.8247e-03 cos_v_neg_g:2.0266e-03 v_norm:1.3868e+00 g_norm:2.4284e+00 hv_norm:8.4587e-01 cos_v_hv:1.1783e-02 hg_norm:8.9113e+01 cos_g_hg:4.9087e-01 v_par:7.7028e-05 v_perp:1.3868e+00 L1_cos_v_neg_g:1.5280e-02 L1_v_norm:1.2057e-01 L2_cos_v_neg_g:1.2971e-02 L2_v_norm:1.1468e-01 L3_cos_v_neg_g:9.4144e-03 L3_v_norm:1.1360e-01 L4_cos_v_neg_g:6.1915e-03 L4_v_norm:1.1704e-01 L5_cos_v_neg_g:8.0241e-03 L5_v_norm:1.1923e-01 L6_cos_v_neg_g:6.1767e-03 L6_v_norm:1.2088e-01 L7_cos_v_neg_g:4.2726e-03 L7_v_norm:1.2084e-01 L8_cos_v_neg_g:4.3071e-03 L8_v_norm:1.2098e-01 L9_cos_v_neg_g:3.5566e-03 L9_v_norm:1.2089e-01 L10_cos_v_neg_g:3.4989e-03 L10_v_norm:1.2096e-01 L11_cos_v_neg_g:3.2385e-03 L11_v_norm:1.2076e-01 L12_cos_v_neg_g:2.6814e-03 L12_v_norm:1.2065e-01 +step:2500 train loss:3.688633 +step:2501 train loss:3.754765 +step:2502 train loss:3.741613 +step:2503 train loss:3.665016 +step:2504 train loss:3.706002 +step:2505 train loss:3.721866 +step:2506 train loss:3.689718 +step:2507 train loss:3.719996 +step:2508 train loss:3.663099 +step:2509 train loss:3.689447 +step:2510 train loss:3.681767 +step:2511 train loss:3.726953 +step:2512 train loss:3.772909 +step:2513 train loss:3.721826 +step:2514 train loss:3.705482 +step:2515 train loss:3.849274 +step:2516 train loss:3.726016 +step:2517 train loss:3.792913 +step:2518 train loss:3.754207 +step:2519 train loss:3.730898 +step:2520 train loss:3.735271 +step:2521 train loss:3.710162 +step:2522 train loss:3.747105 +step:2523 train loss:3.664780 +step:2524 train loss:3.723410 +step:2525 train loss:3.713494 +step:2526 train loss:3.765963 +step:2527 train loss:3.758486 +step:2528 train loss:3.742726 +step:2529 train loss:3.756961 +step:2530 train loss:3.735156 +step:2531 train loss:3.674924 +step:2532 train loss:3.777440 +step:2533 train loss:3.673354 +step:2534 train loss:3.765213 +step:2535 train loss:3.717676 +step:2536 train loss:3.642939 +step:2537 train loss:3.756026 +step:2538 train loss:3.734033 +step:2539 train loss:3.754768 +step:2540 train loss:3.689910 +step:2541 train loss:3.718853 +step:2542 train loss:3.729364 +step:2543 train loss:3.717918 +step:2544 train loss:3.706580 +step:2545 train loss:3.692000 +step:2546 train loss:3.659621 +step:2547 train loss:3.707955 +step:2548 train loss:3.731477 +step:2549 train loss:3.735105 +step:2550 train loss:3.863129 +step:2551 train loss:3.938845 +step:2552 train loss:3.671192 +step:2553 train loss:3.703928 +step:2554 train loss:3.849350 +step:2555 train loss:3.734345 +step:2556 train loss:3.656807 +step:2557 train loss:3.754717 +step:2558 train loss:3.746179 +step:2559 train loss:3.701163 +step:2560 train loss:3.688130 +step:2561 train loss:3.781703 +step:2562 train loss:3.736806 +step:2563 train loss:3.671662 +step:2564 train loss:3.737961 +step:2565 train loss:3.720963 +step:2566 train loss:3.700535 +step:2567 train loss:3.678954 +step:2568 train loss:3.733625 +step:2569 train loss:3.744426 +step:2570 train loss:3.692849 +step:2571 train loss:3.778313 +step:2572 train loss:3.738374 +step:2573 train loss:3.667693 +step:2574 train loss:3.722495 +step:2575 train loss:3.764736 +step:2576 train loss:3.716598 +step:2577 train loss:3.681922 +step:2578 train loss:3.718272 +step:2579 train loss:3.695097 +step:2580 train loss:3.668145 +step:2581 train loss:3.683253 +step:2582 train loss:3.689230 +step:2583 train loss:3.713742 +step:2584 train loss:3.731577 +step:2585 train loss:3.691899 +step:2586 train loss:3.716253 +step:2587 train loss:3.647662 +step:2588 train loss:3.682220 +step:2589 train loss:3.760909 +step:2590 train loss:3.682267 +step:2591 train loss:3.741773 +step:2592 train loss:3.789931 +step:2593 train loss:3.744725 +step:2594 train loss:3.708537 +step:2595 train loss:3.714362 +step:2596 train loss:3.754328 +step:2597 train loss:3.639564 +step:2598 train loss:3.795618 +step:2599 train loss:3.742415 +step:2600 train loss:3.774539 +step:2601 train loss:3.710018 +step:2602 train loss:3.739773 +step:2603 train loss:3.735707 +step:2604 train loss:3.655760 +step:2605 train loss:3.787702 +step:2606 train loss:3.731180 +step:2607 train loss:3.688972 +step:2608 train loss:3.666733 +step:2609 train loss:3.689572 +step:2610 train loss:3.715723 +step:2611 train loss:3.752590 +step:2612 train loss:3.716525 +step:2613 train loss:3.688877 +step:2614 train loss:3.678932 +step:2615 train loss:3.674071 +step:2616 train loss:3.751342 +step:2617 train loss:3.713482 +step:2618 train loss:3.676909 +step:2619 train loss:3.695055 +step:2620 train loss:3.686520 +step:2621 train loss:3.701261 +step:2622 train loss:3.774100 +step:2623 train loss:3.648890 +step:2624 train loss:3.662519 +step:2625 train loss:3.735011 +step:2626 train loss:3.727211 +step:2627 train loss:3.707383 +step:2628 train loss:3.766453 +step:2629 train loss:3.708308 +step:2630 train loss:3.701452 +step:2631 train loss:3.733371 +step:2632 train loss:3.698662 +step:2633 train loss:3.683180 +step:2634 train loss:3.730511 +step:2635 train loss:3.711325 +step:2636 train loss:3.764462 +step:2637 train loss:3.713165 +step:2638 train loss:3.700320 +step:2639 train loss:3.749166 +step:2640 train loss:3.668640 +step:2641 train loss:3.725845 +step:2642 train loss:3.648038 +step:2643 train loss:3.646355 +step:2644 train loss:3.738300 +step:2645 train loss:3.679432 +step:2646 train loss:3.710348 +step:2647 train loss:3.729580 +step:2648 train loss:3.762463 +step:2649 train loss:3.676363 +step:2650 train loss:3.665952 +step:2651 train loss:3.706191 +step:2652 train loss:3.680942 +step:2653 train loss:3.750092 +step:2654 train loss:3.703925 +step:2655 train loss:3.692826 +step:2656 train loss:3.713852 +step:2657 train loss:3.739472 +step:2658 train loss:3.747880 +step:2659 train loss:3.724925 +step:2660 train loss:3.712607 +step:2661 train loss:3.759696 +step:2662 train loss:3.733858 +step:2663 train loss:3.710935 +step:2664 train loss:3.721151 +step:2665 train loss:3.671703 +step:2666 train loss:3.700977 +step:2667 train loss:3.707385 +step:2668 train loss:3.687325 +step:2669 train loss:3.689304 +step:2670 train loss:3.716509 +step:2671 train loss:3.690321 +step:2672 train loss:3.713852 +step:2673 train loss:3.645792 +step:2674 train loss:3.742848 +step:2675 train loss:3.713618 +step:2676 train loss:3.733329 +step:2677 train loss:3.713584 +step:2678 train loss:3.701057 +step:2679 train loss:3.682846 +step:2680 train loss:3.666268 +step:2681 train loss:3.638830 +step:2682 train loss:3.725991 +step:2683 train loss:3.697567 +step:2684 train loss:3.730514 +step:2685 train loss:3.647037 +step:2686 train loss:3.658181 +step:2687 train loss:3.737148 +step:2688 train loss:3.750204 +step:2689 train loss:3.654796 +step:2690 train loss:3.744559 +step:2691 train loss:3.710836 +step:2692 train loss:3.736586 +step:2693 train loss:3.790879 +step:2694 train loss:3.691156 +step:2695 train loss:3.709131 +step:2696 train loss:3.712199 +step:2697 train loss:3.706079 +step:2698 train loss:3.714819 +step:2699 train loss:3.732483 +step:2700 train loss:3.703058 +step:2701 train loss:3.772408 +step:2702 train loss:3.707380 +step:2703 train loss:3.669573 +step:2704 train loss:3.749177 +step:2705 train loss:3.728085 +step:2706 train loss:3.661188 +step:2707 train loss:3.624935 +step:2708 train loss:3.725145 +step:2709 train loss:3.704273 +step:2710 train loss:3.708801 +step:2711 train loss:3.676689 +step:2712 train loss:3.742146 +step:2713 train loss:3.740793 +step:2714 train loss:3.685025 +step:2715 train loss:3.680021 +step:2716 train loss:3.750813 +step:2717 train loss:3.712549 +step:2718 train loss:3.710896 +step:2719 train loss:3.707769 +step:2720 train loss:3.676052 +step:2721 train loss:3.753548 +step:2722 train loss:3.682775 +step:2723 train loss:3.672993 +step:2724 train loss:3.692804 +step:2725 train loss:3.694832 +step:2726 train loss:3.667802 +step:2727 train loss:3.725598 +step:2728 train loss:3.664415 +step:2729 train loss:3.795604 +step:2730 train loss:3.741197 +step:2731 train loss:3.778514 +step:2732 train loss:3.687260 +step:2733 train loss:3.682993 +step:2734 train loss:3.733085 +step:2735 train loss:3.730201 +step:2736 train loss:3.654746 +step:2737 train loss:3.708005 +step:2738 train loss:3.766316 +step:2739 train loss:3.686499 +step:2740 train loss:3.684350 +step:2741 train loss:3.673334 +step:2742 train loss:3.600227 +step:2743 train loss:3.702409 +step:2744 train loss:3.727172 +step:2745 train loss:3.679060 +step:2746 train loss:3.697617 +step:2747 train loss:3.681111 +step:2748 train loss:3.644389 +step:2749 train loss:3.706117 +step:2750 validation loss:3.636747 +step:2750 train loss:3.716244 +step:2751 train loss:3.742698 +step:2752 train loss:3.724188 +step:2753 train loss:3.716276 +step:2754 train loss:3.655067 +step:2755 train loss:3.722403 +step:2756 train loss:3.696900 +step:2757 train loss:3.687231 +step:2758 train loss:3.714119 +step:2759 train loss:3.723241 +step:2760 train loss:3.635786 +step:2761 train loss:3.648158 +step:2762 train loss:3.666164 +step:2763 train loss:3.683577 +step:2764 train loss:3.629263 +step:2765 train loss:3.677973 +step:2766 train loss:3.770328 +step:2767 train loss:3.640588 +step:2768 train loss:3.706402 +step:2769 train loss:3.676273 +step:2770 train loss:3.695358 +step:2771 train loss:3.720623 +step:2772 train loss:3.685694 +step:2773 train loss:3.687104 +step:2774 train loss:3.680781 +step:2775 train loss:3.693582 +step:2776 train loss:3.648335 +step:2777 train loss:3.681663 +step:2778 train loss:3.690637 +step:2779 train loss:3.717709 +step:2780 train loss:3.687913 +step:2781 train loss:3.674262 +step:2782 train loss:3.663117 +step:2783 train loss:3.691122 +step:2784 train loss:3.700654 +step:2785 train loss:3.775242 +step:2786 train loss:3.739928 +step:2787 train loss:3.696536 +step:2788 train loss:3.693059 +step:2789 train loss:3.690134 +step:2790 train loss:3.627367 +step:2791 train loss:3.727811 +step:2792 train loss:3.717541 +step:2793 train loss:3.681853 +step:2794 train loss:3.691824 +step:2795 train loss:3.706906 +step:2796 train loss:3.698448 +step:2797 train loss:3.745226 +step:2798 train loss:3.731025 +step:2799 train loss:3.639295 +step:2800 train loss:3.683701 +step:2801 train loss:3.721065 +step:2802 train loss:3.746233 +step:2803 train loss:3.721898 +step:2804 train loss:3.653506 +step:2805 train loss:3.695154 +step:2806 train loss:3.686382 +step:2807 train loss:3.717846 +step:2808 train loss:3.655874 +step:2809 train loss:3.729442 +step:2810 train loss:3.715523 +step:2811 train loss:3.705163 +step:2812 train loss:3.754857 +step:2813 train loss:3.720489 +step:2814 train loss:3.709155 +step:2815 train loss:3.724254 +step:2816 train loss:3.728303 +step:2817 train loss:3.659884 +step:2818 train loss:3.766649 +step:2819 train loss:3.690982 +step:2820 train loss:3.687070 +step:2821 train loss:3.664120 +step:2822 train loss:3.708732 +step:2823 train loss:3.659364 +step:2824 train loss:3.558648 +step:2825 train loss:3.701150 +step:2826 train loss:3.697873 +step:2827 train loss:3.727975 +step:2828 train loss:3.718114 +step:2829 train loss:3.703138 +step:2830 train loss:3.734367 +step:2831 train loss:3.675956 +step:2832 train loss:3.648276 +step:2833 train loss:3.705501 +step:2834 train loss:3.654512 +step:2835 train loss:3.691101 +step:2836 train loss:3.698187 +step:2837 train loss:3.697453 +step:2838 train loss:3.639397 +step:2839 train loss:3.735874 +step:2840 train loss:3.694932 +step:2841 train loss:3.776339 +step:2842 train loss:3.720927 +step:2843 train loss:3.711066 +step:2844 train loss:3.739747 +step:2845 train loss:3.692719 +step:2846 train loss:3.644360 +step:2847 train loss:3.737318 +step:2848 train loss:3.692083 +step:2849 train loss:3.682087 +step:2850 train loss:3.739843 +step:2851 train loss:3.694885 +step:2852 train loss:3.776834 +step:2853 train loss:3.687609 +step:2854 train loss:3.635879 +step:2855 train loss:3.709039 +step:2856 train loss:3.627211 +step:2857 train loss:3.738890 +step:2858 train loss:3.693975 +step:2859 train loss:3.679085 +step:2860 train loss:3.672631 +step:2861 train loss:3.651086 +step:2862 train loss:3.684348 +step:2863 train loss:3.667310 +step:2864 train loss:3.672800 +step:2865 train loss:3.751634 +step:2866 train loss:3.760832 +step:2867 train loss:3.701912 +step:2868 train loss:3.701656 +step:2869 train loss:3.662073 +step:2870 train loss:3.745448 +step:2871 train loss:3.747131 +step:2872 train loss:3.708093 +step:2873 train loss:3.715480 +step:2874 train loss:3.691985 +step:2875 train loss:3.647334 +step:2876 train loss:3.690386 +step:2877 train loss:3.676287 +step:2878 train loss:3.686635 +step:2879 train loss:3.659232 +step:2880 train loss:3.672248 +step:2881 train loss:3.668051 +step:2882 train loss:3.599209 +step:2883 train loss:3.685888 +step:2884 train loss:3.757692 +step:2885 train loss:3.651751 +step:2886 train loss:3.699443 +step:2887 train loss:3.725379 +step:2888 train loss:3.697058 +step:2889 train loss:3.681448 +step:2890 train loss:3.654983 +step:2891 train loss:3.693651 +step:2892 train loss:3.699728 +step:2893 train loss:3.680170 +step:2894 train loss:3.654216 +step:2895 train loss:3.705113 +step:2896 train loss:3.748863 +step:2897 train loss:3.725849 +step:2898 train loss:3.865493 +step:2899 train loss:3.615393 +step:2900 train loss:3.691477 +step:2901 train loss:3.641421 +step:2902 train loss:3.642145 +step:2903 train loss:3.658538 +step:2904 train loss:3.685079 +step:2905 train loss:3.744826 +step:2906 train loss:3.718176 +step:2907 train loss:3.891536 +step:2908 train loss:3.635932 +step:2909 train loss:3.716135 +step:2910 train loss:3.687693 +step:2911 train loss:3.715568 +step:2912 train loss:3.672345 +step:2913 train loss:3.707392 +step:2914 train loss:3.734462 +step:2915 train loss:3.731156 +step:2916 train loss:3.685941 +step:2917 train loss:3.725237 +step:2918 train loss:3.714188 +step:2919 train loss:3.660149 +step:2920 train loss:3.711902 +step:2921 train loss:3.665910 +step:2922 train loss:3.689354 +step:2923 train loss:3.756436 +step:2924 train loss:3.692699 +step:2925 train loss:3.645302 +step:2926 train loss:3.737327 +step:2927 train loss:3.644432 +step:2928 train loss:3.613384 +step:2929 train loss:3.630324 +step:2930 train loss:3.649841 +step:2931 train loss:3.805811 +step:2932 train loss:3.722551 +step:2933 train loss:3.687502 +step:2934 train loss:3.682691 +step:2935 train loss:3.703163 +step:2936 train loss:3.650981 +step:2937 train loss:3.671751 +step:2938 train loss:3.688927 +step:2939 train loss:3.764492 +step:2940 train loss:3.662221 +step:2941 train loss:3.699198 +step:2942 train loss:3.659886 +step:2943 train loss:3.940057 +step:2944 train loss:3.763981 +step:2945 train loss:3.721873 +step:2946 train loss:3.733788 +step:2947 train loss:3.693660 +step:2948 train loss:3.649921 +step:2949 train loss:3.736663 +step:2950 train loss:3.696902 +step:2951 train loss:3.589455 +step:2952 train loss:3.663197 +step:2953 train loss:3.572178 +step:2954 train loss:3.666764 +step:2955 train loss:3.731799 +step:2956 train loss:3.678770 +step:2957 train loss:3.686450 +step:2958 train loss:3.637918 +step:2959 train loss:3.661869 +step:2960 train loss:3.753909 +step:2961 train loss:3.616613 +step:2962 train loss:3.694775 +step:2963 train loss:3.691748 +step:2964 train loss:3.671259 +step:2965 train loss:3.699175 +step:2966 train loss:3.671843 +step:2967 train loss:3.670751 +step:2968 train loss:3.647771 +step:2969 train loss:3.653003 +step:2970 train loss:3.722048 +step:2971 train loss:3.652592 +step:2972 train loss:3.635364 +step:2973 train loss:3.630641 +step:2974 train loss:3.670728 +step:2975 train loss:3.635904 +step:2976 train loss:3.676836 +step:2977 train loss:3.665864 +step:2978 train loss:3.749503 +step:2979 train loss:3.729814 +step:2980 train loss:3.742000 +step:2981 train loss:3.690602 +step:2982 train loss:3.679572 +step:2983 train loss:3.634831 +step:2984 train loss:3.608457 +step:2985 train loss:3.721746 +step:2986 train loss:3.617606 +step:2987 train loss:3.744872 +step:2988 train loss:3.672158 +step:2989 train loss:3.701796 +step:2990 train loss:3.653298 +step:2991 train loss:3.723751 +step:2992 train loss:3.716664 +step:2993 train loss:3.680327 +step:2994 train loss:3.671140 +step:2995 train loss:3.741111 +step:2996 train loss:3.666821 +step:2997 train loss:3.572345 +step:2998 train loss:3.689720 +step:2999 train loss:3.733868 +step:3000 validation loss:3.609245 total_sharp:5.8448e-03 L1_sharp:2.0014e-02 L2_sharp:7.6429e-03 L3_sharp:1.3286e-02 L4_sharp:5.9237e-03 L5_sharp:6.8802e-03 L6_sharp:8.6786e-03 L7_sharp:9.4786e-03 L8_sharp:1.0094e-02 L9_sharp:7.1757e-03 L10_sharp:5.8978e-03 L11_sharp:6.1621e-03 L12_sharp:8.4524e-03 total_fnorm:1.3883e+00 total_l1_linf:9.4802e+03 total_spectral:1.3883e+00 L1_fnorm:1.2043e-01 L2_fnorm:1.1473e-01 L3_fnorm:1.1445e-01 L4_fnorm:1.1747e-01 L5_fnorm:1.1985e-01 L6_fnorm:1.2089e-01 L7_fnorm:1.2085e-01 L8_fnorm:1.2089e-01 L9_fnorm:1.2083e-01 L10_fnorm:1.2083e-01 L11_fnorm:1.2067e-01 L12_fnorm:1.2080e-01 L1_l1linf:2.9084e-01 L2_l1linf:3.3323e-01 L3_l1linf:3.5767e-01 L4_l1linf:3.5293e-01 L5_l1linf:3.1694e-01 L6_l1linf:3.0799e-01 L7_l1linf:2.8815e-01 L8_l1linf:3.1282e-01 L9_l1linf:3.3475e-01 L10_l1linf:3.6281e-01 L11_l1linf:3.9311e-01 L12_l1linf:3.8624e-01 L1_spectral:6.6142e-03 L2_spectral:7.6120e-03 L3_spectral:8.0303e-03 L4_spectral:8.0560e-03 L5_spectral:7.1968e-03 L6_spectral:6.9407e-03 L7_spectral:6.4610e-03 L8_spectral:7.0808e-03 L9_spectral:7.5133e-03 L10_spectral:8.1271e-03 L11_spectral:8.7864e-03 L12_spectral:8.6322e-03 ip_v_neg_g:7.2231e-03 cos_v_neg_g:2.3925e-03 v_norm:1.3883e+00 g_norm:2.1746e+00 hv_norm:5.2982e-01 cos_v_hv:1.5315e-02 hg_norm:4.3541e+01 cos_g_hg:4.6324e-01 v_par:7.3281e-05 v_perp:1.3883e+00 L1_cos_v_neg_g:7.8454e-03 L1_v_norm:1.2043e-01 L2_cos_v_neg_g:7.8311e-03 L2_v_norm:1.1473e-01 L3_cos_v_neg_g:6.9548e-03 L3_v_norm:1.1445e-01 L4_cos_v_neg_g:6.1050e-03 L4_v_norm:1.1747e-01 L5_cos_v_neg_g:7.7579e-03 L5_v_norm:1.1985e-01 L6_cos_v_neg_g:6.5462e-03 L6_v_norm:1.2089e-01 L7_cos_v_neg_g:8.6498e-03 L7_v_norm:1.2085e-01 L8_cos_v_neg_g:1.0201e-02 L8_v_norm:1.2089e-01 L9_cos_v_neg_g:9.5393e-03 L9_v_norm:1.2083e-01 L10_cos_v_neg_g:8.4389e-03 L10_v_norm:1.2083e-01 L11_cos_v_neg_g:7.0699e-03 L11_v_norm:1.2067e-01 L12_cos_v_neg_g:6.4005e-03 L12_v_norm:1.2080e-01 +step:3000 train loss:3.625022 +step:3001 train loss:3.673069 +step:3002 train loss:3.672027 +step:3003 train loss:3.670471 +step:3004 train loss:3.696773 +step:3005 train loss:3.591656 +step:3006 train loss:3.644991 +step:3007 train loss:3.677989 +step:3008 train loss:3.723642 +step:3009 train loss:3.677850 +step:3010 train loss:3.700448 +step:3011 train loss:3.682009 +step:3012 train loss:3.659667 +step:3013 train loss:3.702256 +step:3014 train loss:3.662396 +step:3015 train loss:3.658604 +step:3016 train loss:3.675972 +step:3017 train loss:3.701733 +step:3018 train loss:3.630766 +step:3019 train loss:3.667732 +step:3020 train loss:3.691083 +step:3021 train loss:3.652315 +step:3022 train loss:3.743687 +step:3023 train loss:3.693474 +step:3024 train loss:3.678967 +step:3025 train loss:3.693965 +step:3026 train loss:3.660066 +step:3027 train loss:3.640726 +step:3028 train loss:3.691844 +step:3029 train loss:3.680373 +step:3030 train loss:3.653327 +step:3031 train loss:3.636046 +step:3032 train loss:3.628407 +step:3033 train loss:3.650106 +step:3034 train loss:3.696827 +step:3035 train loss:3.674984 +step:3036 train loss:3.639297 +step:3037 train loss:3.601101 +step:3038 train loss:3.715431 +step:3039 train loss:3.595416 +step:3040 train loss:3.579882 +step:3041 train loss:3.709092 +step:3042 train loss:3.645145 +step:3043 train loss:3.705749 +step:3044 train loss:3.602642 +step:3045 train loss:3.648408 +step:3046 train loss:3.625127 +step:3047 train loss:3.650100 +step:3048 train loss:3.621364 +step:3049 train loss:3.696580 +step:3050 train loss:3.584404 +step:3051 train loss:3.598921 +step:3052 train loss:3.619236 +step:3053 train loss:3.693925 +step:3054 train loss:3.762337 +step:3055 train loss:3.604643 +step:3056 train loss:3.630645 +step:3057 train loss:3.668030 +step:3058 train loss:3.617243 +step:3059 train loss:3.642808 +step:3060 train loss:3.643925 +step:3061 train loss:3.626926 +step:3062 train loss:3.678886 +step:3063 train loss:3.662812 +step:3064 train loss:3.688581 +step:3065 train loss:3.703124 +step:3066 train loss:3.602087 +step:3067 train loss:3.650174 +step:3068 train loss:3.701303 +step:3069 train loss:3.717846 +step:3070 train loss:3.644767 +step:3071 train loss:3.665422 +step:3072 train loss:3.664677 +step:3073 train loss:3.700807 +step:3074 train loss:3.636597 +step:3075 train loss:3.675011 +step:3076 train loss:3.609447 +step:3077 train loss:3.607846 +step:3078 train loss:3.638971 +step:3079 train loss:3.687349 +step:3080 train loss:3.678346 +step:3081 train loss:3.719170 +step:3082 train loss:3.695758 +step:3083 train loss:3.622889 +step:3084 train loss:3.707753 +step:3085 train loss:3.633213 +step:3086 train loss:3.696862 +step:3087 train loss:3.663574 +step:3088 train loss:3.743747 +step:3089 train loss:3.621373 +step:3090 train loss:3.690887 +step:3091 train loss:3.619470 +step:3092 train loss:3.636229 +step:3093 train loss:3.662562 +step:3094 train loss:3.648190 +step:3095 train loss:3.731996 +step:3096 train loss:3.659694 +step:3097 train loss:3.670180 +step:3098 train loss:3.650550 +step:3099 train loss:3.658369 +step:3100 train loss:3.684365 +step:3101 train loss:3.766870 +step:3102 train loss:3.689147 +step:3103 train loss:3.613889 +step:3104 train loss:3.697478 +step:3105 train loss:3.668944 +step:3106 train loss:3.664802 +step:3107 train loss:3.647947 +step:3108 train loss:3.620343 +step:3109 train loss:3.679852 +step:3110 train loss:3.604468 +step:3111 train loss:3.642982 +step:3112 train loss:3.577621 +step:3113 train loss:3.699112 +step:3114 train loss:3.613440 +step:3115 train loss:3.653571 +step:3116 train loss:3.535817 +step:3117 train loss:3.548065 +step:3118 train loss:3.653527 +step:3119 train loss:3.658512 +step:3120 train loss:3.662207 +step:3121 train loss:3.602662 +step:3122 train loss:3.689842 +step:3123 train loss:3.606317 +step:3124 train loss:3.671612 +step:3125 train loss:3.682009 +step:3126 train loss:3.789792 +step:3127 train loss:3.637723 +step:3128 train loss:3.664438 +step:3129 train loss:3.646166 +step:3130 train loss:3.625965 +step:3131 train loss:3.703250 +step:3132 train loss:3.686932 +step:3133 train loss:3.659579 +step:3134 train loss:3.553723 +step:3135 train loss:3.650626 +step:3136 train loss:3.618635 +step:3137 train loss:3.757963 +step:3138 train loss:3.655632 +step:3139 train loss:3.636091 +step:3140 train loss:3.657176 +step:3141 train loss:3.661665 +step:3142 train loss:3.596903 +step:3143 train loss:3.678968 +step:3144 train loss:3.629241 +step:3145 train loss:3.615286 +step:3146 train loss:3.630583 +step:3147 train loss:3.739439 +step:3148 train loss:3.642324 +step:3149 train loss:3.699929 +step:3150 train loss:3.685567 +step:3151 train loss:3.651664 +step:3152 train loss:3.651862 +step:3153 train loss:3.609290 +step:3154 train loss:3.693752 +step:3155 train loss:3.632162 +step:3156 train loss:3.685374 +step:3157 train loss:3.687368 +step:3158 train loss:3.659596 +step:3159 train loss:3.597756 +step:3160 train loss:3.647429 +step:3161 train loss:3.621716 +step:3162 train loss:3.674236 +step:3163 train loss:3.658697 +step:3164 train loss:3.635375 +step:3165 train loss:3.652917 +step:3166 train loss:3.690987 +step:3167 train loss:3.649461 +step:3168 train loss:3.727402 +step:3169 train loss:3.643802 +step:3170 train loss:3.624789 +step:3171 train loss:3.616503 +step:3172 train loss:3.620268 +step:3173 train loss:3.566889 +step:3174 train loss:3.678794 +step:3175 train loss:3.646022 +step:3176 train loss:3.655107 +step:3177 train loss:3.624881 +step:3178 train loss:3.600990 +step:3179 train loss:3.682044 +step:3180 train loss:3.612021 +step:3181 train loss:3.692121 +step:3182 train loss:3.695900 +step:3183 train loss:3.639402 +step:3184 train loss:3.638951 +step:3185 train loss:3.696168 +step:3186 train loss:3.655523 +step:3187 train loss:3.673087 +step:3188 train loss:3.716319 +step:3189 train loss:3.659950 +step:3190 train loss:3.615714 +step:3191 train loss:3.619859 +step:3192 train loss:3.587667 +step:3193 train loss:3.663429 +step:3194 train loss:3.629732 +step:3195 train loss:3.612125 +step:3196 train loss:3.661969 +step:3197 train loss:3.626487 +step:3198 train loss:3.669082 +step:3199 train loss:3.641666 +step:3200 train loss:3.648127 +step:3201 train loss:3.609854 +step:3202 train loss:3.676426 +step:3203 train loss:3.736633 +step:3204 train loss:3.700920 +step:3205 train loss:3.546596 +step:3206 train loss:3.828584 +step:3207 train loss:3.584487 +step:3208 train loss:3.652946 +step:3209 train loss:3.643878 +step:3210 train loss:3.625196 +step:3211 train loss:3.654456 +step:3212 train loss:3.664700 +step:3213 train loss:3.603240 +step:3214 train loss:3.708884 +step:3215 train loss:3.713361 +step:3216 train loss:3.584700 +step:3217 train loss:3.665646 +step:3218 train loss:3.704303 +step:3219 train loss:3.621409 +step:3220 train loss:3.691085 +step:3221 train loss:3.603436 +step:3222 train loss:3.648309 +step:3223 train loss:3.664428 +step:3224 train loss:3.673310 +step:3225 train loss:3.598792 +step:3226 train loss:3.635006 +step:3227 train loss:3.661553 +step:3228 train loss:3.657097 +step:3229 train loss:3.690056 +step:3230 train loss:3.702292 +step:3231 train loss:3.642154 +step:3232 train loss:3.651721 +step:3233 train loss:3.623472 +step:3234 train loss:3.613500 +step:3235 train loss:3.611942 +step:3236 train loss:3.633399 +step:3237 train loss:3.634612 +step:3238 train loss:3.646601 +step:3239 train loss:3.552381 +step:3240 train loss:3.665517 +step:3241 train loss:3.661611 +step:3242 train loss:3.719251 +step:3243 train loss:3.657011 +step:3244 train loss:3.674898 +step:3245 train loss:3.578754 +step:3246 train loss:3.705129 +step:3247 train loss:3.649997 +step:3248 train loss:3.670167 +step:3249 train loss:3.614737 +step:3250 validation loss:3.582214 +step:3250 train loss:3.614393 +step:3251 train loss:3.727745 +step:3252 train loss:3.653641 +step:3253 train loss:3.656764 +step:3254 train loss:3.722739 +step:3255 train loss:3.665351 +step:3256 train loss:3.659395 +step:3257 train loss:3.644227 +step:3258 train loss:3.573330 +step:3259 train loss:3.553567 +step:3260 train loss:3.666483 +step:3261 train loss:3.645514 +step:3262 train loss:3.636203 +step:3263 train loss:3.620157 +step:3264 train loss:3.732829 +step:3265 train loss:3.640705 +step:3266 train loss:3.670556 +step:3267 train loss:3.631930 +step:3268 train loss:3.635672 +step:3269 train loss:3.648829 +step:3270 train loss:3.679002 +step:3271 train loss:3.640319 +step:3272 train loss:3.623912 +step:3273 train loss:3.627519 +step:3274 train loss:3.762565 +step:3275 train loss:3.635292 +step:3276 train loss:3.702836 +step:3277 train loss:3.641051 +step:3278 train loss:3.617433 +step:3279 train loss:3.641716 +step:3280 train loss:3.671945 +step:3281 train loss:3.597361 +step:3282 train loss:3.669735 +step:3283 train loss:3.636619 +step:3284 train loss:3.603841 +step:3285 train loss:3.618782 +step:3286 train loss:3.649721 +step:3287 train loss:3.586131 +step:3288 train loss:3.671318 +step:3289 train loss:3.613168 +step:3290 train loss:3.644729 +step:3291 train loss:3.600968 +step:3292 train loss:3.627351 +step:3293 train loss:3.669197 +step:3294 train loss:3.685496 +step:3295 train loss:3.592889 +step:3296 train loss:3.653338 +step:3297 train loss:3.609758 +step:3298 train loss:3.612450 +step:3299 train loss:3.739458 +step:3300 train loss:3.585071 +step:3301 train loss:3.656188 +step:3302 train loss:3.634216 +step:3303 train loss:3.643254 +step:3304 train loss:3.611037 +step:3305 train loss:3.700316 +step:3306 train loss:3.634670 +step:3307 train loss:3.655188 +step:3308 train loss:3.613927 +step:3309 train loss:3.669221 +step:3310 train loss:3.587941 +step:3311 train loss:3.642015 +step:3312 train loss:3.610992 +step:3313 train loss:3.646603 +step:3314 train loss:3.643360 +step:3315 train loss:3.721286 +step:3316 train loss:3.572980 +step:3317 train loss:3.662194 +step:3318 train loss:3.675593 +step:3319 train loss:3.602167 +step:3320 train loss:3.764548 +step:3321 train loss:3.665900 +step:3322 train loss:3.666028 +step:3323 train loss:3.770850 +step:3324 train loss:3.690797 +step:3325 train loss:3.659128 +step:3326 train loss:3.651776 +step:3327 train loss:3.663153 +step:3328 train loss:3.644301 +step:3329 train loss:3.644686 +step:3330 train loss:3.634513 +step:3331 train loss:3.679597 +step:3332 train loss:3.701648 +step:3333 train loss:3.669359 +step:3334 train loss:3.599995 +step:3335 train loss:3.616174 +step:3336 train loss:3.647382 +step:3337 train loss:3.648228 +step:3338 train loss:3.636767 +step:3339 train loss:3.631224 +step:3340 train loss:3.668738 +step:3341 train loss:3.617490 +step:3342 train loss:3.665276 +step:3343 train loss:3.602618 +step:3344 train loss:3.664547 +step:3345 train loss:3.612692 +step:3346 train loss:3.624891 +step:3347 train loss:3.631797 +step:3348 train loss:3.646693 +step:3349 train loss:3.635796 +step:3350 train loss:3.661036 +step:3351 train loss:3.714960 +step:3352 train loss:3.656998 +step:3353 train loss:3.755145 +step:3354 train loss:3.602893 +step:3355 train loss:3.705515 +step:3356 train loss:3.656699 +step:3357 train loss:3.667961 +step:3358 train loss:3.606609 +step:3359 train loss:3.639915 +step:3360 train loss:3.630780 +step:3361 train loss:3.634772 +step:3362 train loss:3.623667 +step:3363 train loss:3.624505 +step:3364 train loss:3.604327 +step:3365 train loss:3.641967 +step:3366 train loss:3.675159 +step:3367 train loss:3.629092 +step:3368 train loss:3.723504 +step:3369 train loss:3.634715 +step:3370 train loss:3.718046 +step:3371 train loss:3.680338 +step:3372 train loss:3.644348 +step:3373 train loss:3.655788 +step:3374 train loss:3.704356 +step:3375 train loss:3.636849 +step:3376 train loss:3.643457 +step:3377 train loss:3.632390 +step:3378 train loss:3.608333 +step:3379 train loss:3.686474 +step:3380 train loss:3.667506 +step:3381 train loss:3.650091 +step:3382 train loss:3.670391 +step:3383 train loss:3.677819 +step:3384 train loss:3.609818 +step:3385 train loss:3.656371 +step:3386 train loss:3.638020 +step:3387 train loss:3.713436 +step:3388 train loss:3.613977 +step:3389 train loss:3.822738 +step:3390 train loss:3.547020 +step:3391 train loss:3.631557 +step:3392 train loss:3.615525 +step:3393 train loss:3.646426 +step:3394 train loss:3.605910 +step:3395 train loss:3.676346 +step:3396 train loss:3.588128 +step:3397 train loss:3.668291 +step:3398 train loss:3.634147 +step:3399 train loss:3.653487 +step:3400 train loss:3.600636 +step:3401 train loss:3.638345 +step:3402 train loss:3.798630 +step:3403 train loss:3.686027 +step:3404 train loss:3.800881 +step:3405 train loss:3.657501 +step:3406 train loss:3.630598 +step:3407 train loss:3.631524 +step:3408 train loss:3.609752 +step:3409 train loss:3.576641 +step:3410 train loss:3.610748 +step:3411 train loss:3.679502 +step:3412 train loss:3.600423 +step:3413 train loss:3.594270 +step:3414 train loss:3.631523 +step:3415 train loss:3.606556 +step:3416 train loss:3.611400 +step:3417 train loss:3.693159 +step:3418 train loss:3.689584 +step:3419 train loss:3.649366 +step:3420 train loss:3.626785 +step:3421 train loss:3.657772 +step:3422 train loss:3.673357 +step:3423 train loss:3.694512 +step:3424 train loss:3.575394 +step:3425 train loss:3.600881 +step:3426 train loss:3.593759 +step:3427 train loss:3.659294 +step:3428 train loss:3.576232 +step:3429 train loss:3.643413 +step:3430 train loss:3.610307 +step:3431 train loss:3.663717 +step:3432 train loss:3.648624 +step:3433 train loss:3.610039 +step:3434 train loss:3.692919 +step:3435 train loss:3.632721 +step:3436 train loss:3.725769 +step:3437 train loss:3.552861 +step:3438 train loss:3.659131 +step:3439 train loss:3.634747 +step:3440 train loss:3.730066 +step:3441 train loss:3.622833 +step:3442 train loss:3.690251 +step:3443 train loss:3.622046 +step:3444 train loss:3.642812 +step:3445 train loss:3.688498 +step:3446 train loss:3.594706 +step:3447 train loss:3.670288 +step:3448 train loss:3.624375 +step:3449 train loss:3.656180 +step:3450 train loss:3.560445 +step:3451 train loss:3.680142 +step:3452 train loss:3.629912 +step:3453 train loss:3.683049 +step:3454 train loss:3.708472 +step:3455 train loss:3.765082 +step:3456 train loss:3.703568 +step:3457 train loss:3.700551 +step:3458 train loss:3.626421 +step:3459 train loss:3.634927 +step:3460 train loss:3.580869 +step:3461 train loss:3.643568 +step:3462 train loss:3.644996 +step:3463 train loss:3.617576 +step:3464 train loss:3.672072 +step:3465 train loss:3.600105 +step:3466 train loss:3.670332 +step:3467 train loss:3.626041 +step:3468 train loss:3.641143 +step:3469 train loss:3.650749 +step:3470 train loss:3.633610 +step:3471 train loss:3.673214 +step:3472 train loss:3.556942 +step:3473 train loss:3.684893 +step:3474 train loss:3.575262 +step:3475 train loss:3.662161 +step:3476 train loss:3.627472 +step:3477 train loss:3.651113 +step:3478 train loss:3.624276 +step:3479 train loss:3.654603 +step:3480 train loss:3.675284 +step:3481 train loss:3.653376 +step:3482 train loss:3.637462 +step:3483 train loss:3.776383 +step:3484 train loss:3.619243 +step:3485 train loss:3.606355 +step:3486 train loss:3.660515 +step:3487 train loss:3.699125 +step:3488 train loss:3.605280 +step:3489 train loss:3.656505 +step:3490 train loss:3.621772 +step:3491 train loss:3.666174 +step:3492 train loss:3.697417 +step:3493 train loss:3.669576 +step:3494 train loss:3.663573 +step:3495 train loss:3.641487 +step:3496 train loss:3.608017 +step:3497 train loss:3.718189 +step:3498 train loss:3.663650 +step:3499 train loss:3.595035 +step:3500 validation loss:3.564330 total_sharp:5.1081e-03 L1_sharp:1.7635e-02 L2_sharp:8.7372e-03 L3_sharp:1.1106e-02 L4_sharp:5.7053e-03 L5_sharp:7.5087e-03 L6_sharp:9.5247e-03 L7_sharp:1.0649e-02 L8_sharp:9.5864e-03 L9_sharp:6.7032e-03 L10_sharp:5.0836e-03 L11_sharp:4.6000e-03 L12_sharp:6.2474e-03 total_fnorm:1.3904e+00 total_l1_linf:9.4910e+03 total_spectral:1.3904e+00 L1_fnorm:1.2015e-01 L2_fnorm:1.1488e-01 L3_fnorm:1.1529e-01 L4_fnorm:1.1784e-01 L5_fnorm:1.2003e-01 L6_fnorm:1.2105e-01 L7_fnorm:1.2100e-01 L8_fnorm:1.2099e-01 L9_fnorm:1.2109e-01 L10_fnorm:1.2117e-01 L11_fnorm:1.2104e-01 L12_fnorm:1.2080e-01 L1_l1linf:2.8653e-01 L2_l1linf:3.5308e-01 L3_l1linf:3.4597e-01 L4_l1linf:3.5535e-01 L5_l1linf:3.3222e-01 L6_l1linf:3.1514e-01 L7_l1linf:3.2183e-01 L8_l1linf:2.9614e-01 L9_l1linf:3.1098e-01 L10_l1linf:3.3789e-01 L11_l1linf:3.5855e-01 L12_l1linf:3.5226e-01 L1_spectral:6.5070e-03 L2_spectral:7.9100e-03 L3_spectral:7.7633e-03 L4_spectral:8.0062e-03 L5_spectral:7.4488e-03 L6_spectral:7.0648e-03 L7_spectral:7.2049e-03 L8_spectral:6.6623e-03 L9_spectral:7.0411e-03 L10_spectral:7.6296e-03 L11_spectral:8.0576e-03 L12_spectral:8.0065e-03 ip_v_neg_g:4.9145e-03 cos_v_neg_g:1.5492e-03 v_norm:1.3904e+00 g_norm:2.2816e+00 hv_norm:5.6459e-01 cos_v_hv:1.2579e-02 hg_norm:6.7949e+01 cos_g_hg:5.1628e-01 v_par:5.9558e-05 v_perp:1.3904e+00 L1_cos_v_neg_g:6.7631e-03 L1_v_norm:1.2015e-01 L2_cos_v_neg_g:3.6741e-03 L2_v_norm:1.1488e-01 L3_cos_v_neg_g:6.2088e-03 L3_v_norm:1.1529e-01 L4_cos_v_neg_g:4.8011e-03 L4_v_norm:1.1784e-01 L5_cos_v_neg_g:5.4931e-03 L5_v_norm:1.2003e-01 L6_cos_v_neg_g:5.5203e-03 L6_v_norm:1.2105e-01 L7_cos_v_neg_g:5.9774e-03 L7_v_norm:1.2100e-01 L8_cos_v_neg_g:5.2580e-03 L8_v_norm:1.2099e-01 L9_cos_v_neg_g:4.6067e-03 L9_v_norm:1.2109e-01 L10_cos_v_neg_g:5.2232e-03 L10_v_norm:1.2117e-01 L11_cos_v_neg_g:4.1725e-03 L11_v_norm:1.2104e-01 L12_cos_v_neg_g:4.5302e-03 L12_v_norm:1.2080e-01 +step:3500 train loss:3.615439 +step:3501 train loss:3.742529 +step:3502 train loss:3.723767 +step:3503 train loss:3.671621 +step:3504 train loss:3.627999 +step:3505 train loss:3.638829 +step:3506 train loss:3.542616 +step:3507 train loss:3.659067 +step:3508 train loss:3.600172 +step:3509 train loss:3.673167 +step:3510 train loss:3.601961 +step:3511 train loss:3.641697 +step:3512 train loss:3.776359 +step:3513 train loss:3.599734 +step:3514 train loss:3.613561 +step:3515 train loss:3.864734 +step:3516 train loss:3.657513 +step:3517 train loss:3.616868 +step:3518 train loss:3.621180 +step:3519 train loss:3.613612 +step:3520 train loss:3.645502 +step:3521 train loss:3.633592 +step:3522 train loss:3.549001 +step:3523 train loss:3.649231 +step:3524 train loss:3.631781 +step:3525 train loss:3.623242 +step:3526 train loss:3.644736 +step:3527 train loss:3.595788 +step:3528 train loss:3.647592 +step:3529 train loss:3.625672 +step:3530 train loss:3.621762 +step:3531 train loss:3.610635 +step:3532 train loss:3.798996 +step:3533 train loss:3.618077 +step:3534 train loss:3.634805 +step:3535 train loss:3.611882 +step:3536 train loss:3.607615 +step:3537 train loss:3.622531 +step:3538 train loss:3.649339 +step:3539 train loss:3.599984 +step:3540 train loss:3.663577 +step:3541 train loss:3.631645 +step:3542 train loss:3.639879 +step:3543 train loss:3.564011 +step:3544 train loss:3.579212 +step:3545 train loss:3.585174 +step:3546 train loss:3.650175 +step:3547 train loss:3.658474 +step:3548 train loss:3.633189 +step:3549 train loss:3.631155 +step:3550 train loss:3.619467 +step:3551 train loss:3.647542 +step:3552 train loss:3.542888 +step:3553 train loss:3.661879 +step:3554 train loss:3.657040 +step:3555 train loss:3.639386 +step:3556 train loss:3.667405 +step:3557 train loss:3.652368 +step:3558 train loss:3.627249 +step:3559 train loss:3.573599 +step:3560 train loss:3.666000 +step:3561 train loss:3.658498 +step:3562 train loss:3.830504 +step:3563 train loss:3.688792 +step:3564 train loss:3.648466 +step:3565 train loss:3.649472 +step:3566 train loss:3.624853 +step:3567 train loss:3.563318 +step:3568 train loss:3.594039 +step:3569 train loss:3.676538 +step:3570 train loss:3.699969 +step:3571 train loss:3.678150 +step:3572 train loss:3.669646 +step:3573 train loss:3.627777 +step:3574 train loss:3.627611 +step:3575 train loss:3.617034 +step:3576 train loss:3.598451 +step:3577 train loss:3.611359 +step:3578 train loss:3.692850 +step:3579 train loss:3.600661 +step:3580 train loss:3.684589 +step:3581 train loss:3.625170 +step:3582 train loss:3.683877 +step:3583 train loss:3.617640 +step:3584 train loss:3.597382 +step:3585 train loss:3.639159 +step:3586 train loss:3.594013 +step:3587 train loss:3.686083 +step:3588 train loss:3.820883 +step:3589 train loss:3.648016 +step:3590 train loss:3.631217 +step:3591 train loss:3.642005 +step:3592 train loss:3.600241 +step:3593 train loss:3.576778 +step:3594 train loss:3.629585 +step:3595 train loss:3.600987 +step:3596 train loss:3.680894 +step:3597 train loss:3.655946 +step:3598 train loss:3.609866 +step:3599 train loss:3.661457 +step:3600 train loss:3.596839 +step:3601 train loss:3.618951 +step:3602 train loss:3.602814 +step:3603 train loss:3.620497 +step:3604 train loss:3.645058 +step:3605 train loss:3.753561 +step:3606 train loss:3.649028 +step:3607 train loss:3.633416 +step:3608 train loss:3.650376 +step:3609 train loss:3.633520 +step:3610 train loss:3.602825 +step:3611 train loss:3.605260 +step:3612 train loss:3.674948 +step:3613 train loss:3.642905 +step:3614 train loss:3.591699 +step:3615 train loss:3.627854 +step:3616 train loss:3.582508 +step:3617 train loss:3.657639 +step:3618 train loss:3.612154 +step:3619 train loss:3.601288 +step:3620 train loss:3.615805 +step:3621 train loss:3.577204 +step:3622 train loss:3.685234 +step:3623 train loss:3.673444 +step:3624 train loss:3.646094 +step:3625 train loss:3.623170 +step:3626 train loss:3.633751 +step:3627 train loss:3.630758 +step:3628 train loss:3.615526 +step:3629 train loss:3.620920 +step:3630 train loss:3.699875 +step:3631 train loss:3.629286 +step:3632 train loss:3.655388 +step:3633 train loss:3.617152 +step:3634 train loss:3.618945 +step:3635 train loss:3.611296 +step:3636 train loss:3.681306 +step:3637 train loss:3.757107 +step:3638 train loss:3.671925 +step:3639 train loss:3.658917 +step:3640 train loss:3.667505 +step:3641 train loss:3.704921 +step:3642 train loss:3.601374 +step:3643 train loss:3.768123 +step:3644 train loss:3.661819 +step:3645 train loss:3.631238 +step:3646 train loss:3.752112 +step:3647 train loss:3.643643 +step:3648 train loss:3.636140 +step:3649 train loss:3.585163 +step:3650 train loss:3.625169 +step:3651 train loss:3.623951 +step:3652 train loss:3.608643 +step:3653 train loss:3.552150 +step:3654 train loss:3.604146 +step:3655 train loss:3.595709 +step:3656 train loss:3.628774 +step:3657 train loss:3.646758 +step:3658 train loss:3.642132 +step:3659 train loss:3.627552 +step:3660 train loss:3.601947 +step:3661 train loss:3.628641 +step:3662 train loss:3.601348 +step:3663 train loss:3.637849 +step:3664 train loss:3.594169 +step:3665 train loss:3.637594 +step:3666 train loss:3.674427 +step:3667 train loss:3.765766 +step:3668 train loss:3.646501 +step:3669 train loss:3.604237 +step:3670 train loss:3.653570 +step:3671 train loss:3.611743 +step:3672 train loss:3.648229 +step:3673 train loss:3.630037 +step:3674 train loss:3.646062 +step:3675 train loss:3.660116 +step:3676 train loss:3.622011 +step:3677 train loss:3.582538 +step:3678 train loss:3.644138 +step:3679 train loss:3.546198 +step:3680 train loss:3.648653 +step:3681 train loss:3.681506 +step:3682 train loss:3.662413 +step:3683 train loss:3.605706 +step:3684 train loss:3.601898 +step:3685 train loss:3.635045 +step:3686 train loss:3.658723 +step:3687 train loss:3.615516 +step:3688 train loss:3.589170 +step:3689 train loss:3.623071 +step:3690 train loss:3.615330 +step:3691 train loss:3.597308 +step:3692 train loss:3.658951 +step:3693 train loss:3.787688 +step:3694 train loss:3.604092 +step:3695 train loss:3.660240 +step:3696 train loss:3.623072 +step:3697 train loss:3.617972 +step:3698 train loss:3.554875 +step:3699 train loss:3.583526 +step:3700 train loss:3.611448 +step:3701 train loss:3.631867 +step:3702 train loss:3.652157 +step:3703 train loss:3.610331 +step:3704 train loss:3.655257 +step:3705 train loss:3.633461 +step:3706 train loss:3.589655 +step:3707 train loss:3.640080 +step:3708 train loss:3.618052 +step:3709 train loss:3.538154 +step:3710 train loss:3.664006 +step:3711 train loss:3.609654 +step:3712 train loss:3.647897 +step:3713 train loss:3.600295 +step:3714 train loss:3.617740 +step:3715 train loss:3.734529 +step:3716 train loss:3.641654 +step:3717 train loss:3.618185 +step:3718 train loss:3.619514 +step:3719 train loss:3.616918 +step:3720 train loss:3.625110 +step:3721 train loss:3.685273 +step:3722 train loss:3.695786 +step:3723 train loss:3.582948 +step:3724 train loss:3.638579 +step:3725 train loss:3.619810 +step:3726 train loss:3.638896 +step:3727 train loss:3.712203 +step:3728 train loss:3.677162 +step:3729 train loss:3.573497 +step:3730 train loss:3.592183 +step:3731 train loss:3.616182 +step:3732 train loss:3.767831 +step:3733 train loss:3.628250 +step:3734 train loss:3.628676 +step:3735 train loss:3.568836 +step:3736 train loss:3.625249 +step:3737 train loss:3.675205 +step:3738 train loss:3.700919 +step:3739 train loss:3.614112 +step:3740 train loss:3.518581 +step:3741 train loss:3.723411 +step:3742 train loss:3.635305 +step:3743 train loss:3.611856 +step:3744 train loss:3.605903 +step:3745 train loss:3.623976 +step:3746 train loss:3.589064 +step:3747 train loss:3.608670 +step:3748 train loss:3.650515 +step:3749 train loss:3.633438 +step:3750 validation loss:3.549334 +step:3750 train loss:3.643424 +step:3751 train loss:3.733038 +step:3752 train loss:3.677363 +step:3753 train loss:3.583907 +step:3754 train loss:3.638142 +step:3755 train loss:3.814492 +step:3756 train loss:3.594503 +step:3757 train loss:3.589566 +step:3758 train loss:3.619595 +step:3759 train loss:3.566510 +step:3760 train loss:3.562774 +step:3761 train loss:3.617967 +step:3762 train loss:3.609008 +step:3763 train loss:3.612480 +step:3764 train loss:3.600704 +step:3765 train loss:3.601704 +step:3766 train loss:3.571884 +step:3767 train loss:3.653382 +step:3768 train loss:3.597202 +step:3769 train loss:3.861164 +step:3770 train loss:3.649597 +step:3771 train loss:3.656363 +step:3772 train loss:3.616120 +step:3773 train loss:3.610255 +step:3774 train loss:3.612057 +step:3775 train loss:3.610486 +step:3776 train loss:3.608427 +step:3777 train loss:3.572162 +step:3778 train loss:3.589946 +step:3779 train loss:3.572388 +step:3780 train loss:3.654625 +step:3781 train loss:3.618649 +step:3782 train loss:3.540427 +step:3783 train loss:3.647277 +step:3784 train loss:3.656424 +step:3785 train loss:3.565808 +step:3786 train loss:3.675653 +step:3787 train loss:3.584704 +step:3788 train loss:3.598902 +step:3789 train loss:3.506864 +step:3790 train loss:3.622454 +step:3791 train loss:3.641883 +step:3792 train loss:3.613199 +step:3793 train loss:3.614524 +step:3794 train loss:3.640902 +step:3795 train loss:3.609727 +step:3796 train loss:3.627883 +step:3797 train loss:3.603682 +step:3798 train loss:3.613091 +step:3799 train loss:3.619298 +step:3800 train loss:3.527820 +step:3801 train loss:3.645507 +step:3802 train loss:3.571677 +step:3803 train loss:3.653405 +step:3804 train loss:3.664785 +step:3805 train loss:3.625820 +step:3806 train loss:3.642876 +step:3807 train loss:3.659869 +step:3808 train loss:3.619211 +step:3809 train loss:3.631581 +step:3810 train loss:3.631546 +step:3811 train loss:3.618461 +step:3812 train loss:3.618095 +step:3813 train loss:3.576436 +step:3814 train loss:3.619344 +step:3815 train loss:3.620438 +step:3816 train loss:3.637896 +step:3817 train loss:3.657542 +step:3818 train loss:3.629862 +step:3819 train loss:3.642641 +step:3820 train loss:3.641745 +step:3821 train loss:3.597883 +step:3822 train loss:3.681383 +step:3823 train loss:3.575288 +step:3824 train loss:3.590656 +step:3825 train loss:3.595002 +step:3826 train loss:3.694513 +step:3827 train loss:3.686246 +step:3828 train loss:3.573345 +step:3829 train loss:3.595581 +step:3830 train loss:3.654331 +step:3831 train loss:3.589388 +step:3832 train loss:3.648483 +step:3833 train loss:3.592507 +step:3834 train loss:3.555468 +step:3835 train loss:3.598613 +step:3836 train loss:3.574535 +step:3837 train loss:3.641864 +step:3838 train loss:3.596759 +step:3839 train loss:3.641893 +step:3840 train loss:3.650016 +step:3841 train loss:3.603367 +step:3842 train loss:3.628993 +step:3843 train loss:3.643653 +step:3844 train loss:3.617127 +step:3845 train loss:3.639314 +step:3846 train loss:3.678515 +step:3847 train loss:3.576919 +step:3848 train loss:3.583809 +step:3849 train loss:3.601267 +step:3850 train loss:3.618276 +step:3851 train loss:3.760641 +step:3852 train loss:3.736671 +step:3853 train loss:3.632848 +step:3854 train loss:3.594817 +step:3855 train loss:3.646083 +step:3856 train loss:3.569202 +step:3857 train loss:3.630921 +step:3858 train loss:3.545017 +step:3859 train loss:3.590018 +step:3860 train loss:3.657054 +step:3861 train loss:3.633137 +step:3862 train loss:3.569653 +step:3863 train loss:3.618172 +step:3864 train loss:3.590960 +step:3865 train loss:3.627137 +step:3866 train loss:3.646781 +step:3867 train loss:3.644429 +step:3868 train loss:3.591585 +step:3869 train loss:3.591922 +step:3870 train loss:3.568919 +step:3871 train loss:3.565546 +step:3872 train loss:3.698747 +step:3873 train loss:3.624630 +step:3874 train loss:3.634801 +step:3875 train loss:3.751849 +step:3876 train loss:3.617627 +step:3877 train loss:3.643939 +step:3878 train loss:3.670143 +step:3879 train loss:3.658010 +step:3880 train loss:3.743826 +step:3881 train loss:3.560965 +step:3882 train loss:3.597460 +step:3883 train loss:3.609499 +step:3884 train loss:3.602750 +step:3885 train loss:3.619101 +step:3886 train loss:3.678956 +step:3887 train loss:3.655804 +step:3888 train loss:3.620422 +step:3889 train loss:3.593715 +step:3890 train loss:3.630195 +step:3891 train loss:3.641740 +step:3892 train loss:3.553703 +step:3893 train loss:3.661301 +step:3894 train loss:3.610208 +step:3895 train loss:3.625433 +step:3896 train loss:3.618124 +step:3897 train loss:3.586606 +step:3898 train loss:3.645644 +step:3899 train loss:3.683236 +step:3900 train loss:3.638469 +step:3901 train loss:3.656284 +step:3902 train loss:3.581305 +step:3903 train loss:3.600434 +step:3904 train loss:3.632315 +step:3905 train loss:3.568831 +step:3906 train loss:3.601146 +step:3907 train loss:3.635260 +step:3908 train loss:3.713531 +step:3909 train loss:3.606525 +step:3910 train loss:3.631571 +step:3911 train loss:3.646736 +step:3912 train loss:3.598030 +step:3913 train loss:3.608471 +step:3914 train loss:3.629077 +step:3915 train loss:3.599487 +step:3916 train loss:3.636028 +step:3917 train loss:3.679049 +step:3918 train loss:3.651477 +step:3919 train loss:3.635100 +step:3920 train loss:3.604955 +step:3921 train loss:3.645604 +step:3922 train loss:3.648037 +step:3923 train loss:3.636657 +step:3924 train loss:3.575317 +step:3925 train loss:3.777637 +step:3926 train loss:3.619967 +step:3927 train loss:3.595971 +step:3928 train loss:3.678459 +step:3929 train loss:3.741401 +step:3930 train loss:3.633851 +step:3931 train loss:3.566767 +step:3932 train loss:3.621222 +step:3933 train loss:3.638545 +step:3934 train loss:3.587807 +step:3935 train loss:3.569979 +step:3936 train loss:3.658629 +step:3937 train loss:3.619065 +step:3938 train loss:3.628736 +step:3939 train loss:3.655883 +step:3940 train loss:3.604859 +step:3941 train loss:3.686394 +step:3942 train loss:3.649620 +step:3943 train loss:3.637264 +step:3944 train loss:3.683403 +step:3945 train loss:3.594663 +step:3946 train loss:3.537170 +step:3947 train loss:3.667514 +step:3948 train loss:3.635880 +step:3949 train loss:3.802622 +step:3950 train loss:3.604016 +step:3951 train loss:3.543548 +step:3952 train loss:3.490909 +step:3953 train loss:3.570101 +step:3954 train loss:3.617122 +step:3955 train loss:3.645644 +step:3956 train loss:3.604236 +step:3957 train loss:3.654930 +step:3958 train loss:3.634918 +step:3959 train loss:3.668386 +step:3960 train loss:3.593626 +step:3961 train loss:3.621884 +step:3962 train loss:3.624883 +step:3963 train loss:3.599995 +step:3964 train loss:3.580770 +step:3965 train loss:3.632154 +step:3966 train loss:3.585423 +step:3967 train loss:3.636030 +step:3968 train loss:3.650426 +step:3969 train loss:3.560818 +step:3970 train loss:3.670141 +step:3971 train loss:3.587199 +step:3972 train loss:3.617852 +step:3973 train loss:3.578383 +step:3974 train loss:3.673819 +step:3975 train loss:3.621915 +step:3976 train loss:3.579916 +step:3977 train loss:3.639372 +step:3978 train loss:3.601951 +step:3979 train loss:3.590388 +step:3980 train loss:3.661696 +step:3981 train loss:3.594601 +step:3982 train loss:3.613853 +step:3983 train loss:3.598569 +step:3984 train loss:3.632185 +step:3985 train loss:3.608362 +step:3986 train loss:3.622883 +step:3987 train loss:3.631622 +step:3988 train loss:3.583148 +step:3989 train loss:3.640924 +step:3990 train loss:3.635834 +step:3991 train loss:3.650744 +step:3992 train loss:3.604407 +step:3993 train loss:3.637476 +step:3994 train loss:3.589156 +step:3995 train loss:3.641555 +step:3996 train loss:3.560779 +step:3997 train loss:3.636613 +step:3998 train loss:3.516911 +step:3999 train loss:3.675641 +step:4000 validation loss:3.535620 total_sharp:4.8880e-03 L1_sharp:2.0310e-02 L2_sharp:6.2383e-03 L3_sharp:9.0273e-03 L4_sharp:5.8293e-03 L5_sharp:7.7281e-03 L6_sharp:9.1127e-03 L7_sharp:1.0908e-02 L8_sharp:1.0095e-02 L9_sharp:6.7760e-03 L10_sharp:4.6776e-03 L11_sharp:4.3339e-03 L12_sharp:5.5522e-03 total_fnorm:1.3921e+00 total_l1_linf:9.4944e+03 total_spectral:1.3921e+00 L1_fnorm:1.1976e-01 L2_fnorm:1.1467e-01 L3_fnorm:1.1471e-01 L4_fnorm:1.1542e-01 L5_fnorm:1.1933e-01 L6_fnorm:1.2044e-01 L7_fnorm:1.2031e-01 L8_fnorm:1.2066e-01 L9_fnorm:1.2099e-01 L10_fnorm:1.2109e-01 L11_fnorm:1.2098e-01 L12_fnorm:1.2078e-01 L1_l1linf:3.0561e-01 L2_l1linf:3.4973e-01 L3_l1linf:3.4562e-01 L4_l1linf:3.8117e-01 L5_l1linf:3.5153e-01 L6_l1linf:3.2680e-01 L7_l1linf:3.2352e-01 L8_l1linf:3.3232e-01 L9_l1linf:3.3896e-01 L10_l1linf:3.6376e-01 L11_l1linf:3.8151e-01 L12_l1linf:3.8959e-01 L1_spectral:6.9250e-03 L2_spectral:7.8709e-03 L3_spectral:7.8480e-03 L4_spectral:8.5373e-03 L5_spectral:7.8890e-03 L6_spectral:7.3294e-03 L7_spectral:7.3153e-03 L8_spectral:7.5474e-03 L9_spectral:7.6309e-03 L10_spectral:8.1756e-03 L11_spectral:8.5226e-03 L12_spectral:8.7077e-03 ip_v_neg_g:6.0748e-03 cos_v_neg_g:2.1021e-03 v_norm:1.3921e+00 g_norm:2.0760e+00 hv_norm:5.1448e-01 cos_v_hv:1.3226e-02 hg_norm:4.2976e+01 cos_g_hg:4.7064e-01 v_par:6.8678e-05 v_perp:1.3921e+00 L1_cos_v_neg_g:6.0701e-03 L1_v_norm:1.1976e-01 L2_cos_v_neg_g:6.1308e-03 L2_v_norm:1.1467e-01 L3_cos_v_neg_g:5.1484e-03 L3_v_norm:1.1471e-01 L4_cos_v_neg_g:4.7991e-03 L4_v_norm:1.1542e-01 L5_cos_v_neg_g:7.0836e-03 L5_v_norm:1.1933e-01 L6_cos_v_neg_g:8.3118e-03 L6_v_norm:1.2044e-01 L7_cos_v_neg_g:9.6594e-03 L7_v_norm:1.2031e-01 L8_cos_v_neg_g:1.0006e-02 L8_v_norm:1.2066e-01 L9_cos_v_neg_g:8.1595e-03 L9_v_norm:1.2099e-01 L10_cos_v_neg_g:6.2788e-03 L10_v_norm:1.2109e-01 L11_cos_v_neg_g:5.6303e-03 L11_v_norm:1.2098e-01 L12_cos_v_neg_g:3.9608e-03 L12_v_norm:1.2078e-01 +step:4000 train loss:3.554107 +step:4001 train loss:3.630145 +step:4002 train loss:3.610276 +step:4003 train loss:3.646156 +step:4004 train loss:3.550719 +step:4005 train loss:3.647928 +step:4006 train loss:3.653002 +step:4007 train loss:3.576118 +step:4008 train loss:3.533170 +step:4009 train loss:3.616402 +step:4010 train loss:3.590803 +step:4011 train loss:3.600842 +step:4012 train loss:3.614136 +step:4013 train loss:3.592336 +step:4014 train loss:3.604330 +step:4015 train loss:3.595188 +step:4016 train loss:3.606591 +step:4017 train loss:3.569999 +step:4018 train loss:3.514195 +step:4019 train loss:3.564485 +step:4020 train loss:3.629838 +step:4021 train loss:3.575999 +step:4022 train loss:3.580183 +step:4023 train loss:3.594483 +step:4024 train loss:3.506780 +step:4025 train loss:3.630332 +step:4026 train loss:3.618689 +step:4027 train loss:3.628845 +step:4028 train loss:3.640437 +step:4029 train loss:3.674593 +step:4030 train loss:3.590013 +step:4031 train loss:3.630908 +step:4032 train loss:3.588233 +step:4033 train loss:3.622894 +step:4034 train loss:3.633953 +step:4035 train loss:3.616422 +step:4036 train loss:3.611736 +step:4037 train loss:3.629166 +step:4038 train loss:3.547901 +step:4039 train loss:3.602723 +step:4040 train loss:3.583390 +step:4041 train loss:3.577075 +step:4042 train loss:3.595719 +step:4043 train loss:3.581697 +step:4044 train loss:3.616443 +step:4045 train loss:3.620607 +step:4046 train loss:3.578429 +step:4047 train loss:3.605879 +step:4048 train loss:3.615728 +step:4049 train loss:3.580451 +step:4050 train loss:3.683821 +step:4051 train loss:3.596073 +step:4052 train loss:3.618158 +step:4053 train loss:3.665962 +step:4054 train loss:3.635775 +step:4055 train loss:3.654367 +step:4056 train loss:3.650392 +step:4057 train loss:3.585759 +step:4058 train loss:3.566891 +step:4059 train loss:3.652543 +step:4060 train loss:3.593121 +step:4061 train loss:3.563591 +step:4062 train loss:3.675747 +step:4063 train loss:3.627050 +step:4064 train loss:3.595578 +step:4065 train loss:3.580960 +step:4066 train loss:3.609048 +step:4067 train loss:3.634041 +step:4068 train loss:3.598354 +step:4069 train loss:3.657442 +step:4070 train loss:3.575501 +step:4071 train loss:3.547301 +step:4072 train loss:3.622698 +step:4073 train loss:3.559643 +step:4074 train loss:3.611248 +step:4075 train loss:3.674688 +step:4076 train loss:3.534511 +step:4077 train loss:3.611453 +step:4078 train loss:3.710332 +step:4079 train loss:3.653287 +step:4080 train loss:3.597204 +step:4081 train loss:3.567220 +step:4082 train loss:3.619583 +step:4083 train loss:3.557759 +step:4084 train loss:3.575382 +step:4085 train loss:3.817130 +step:4086 train loss:3.578596 +step:4087 train loss:3.622075 +step:4088 train loss:3.609412 +step:4089 train loss:3.597527 +step:4090 train loss:3.619410 +step:4091 train loss:3.642439 +step:4092 train loss:3.565574 +step:4093 train loss:3.593906 +step:4094 train loss:3.611712 +step:4095 train loss:3.568832 +step:4096 train loss:3.602528 +step:4097 train loss:3.606756 +step:4098 train loss:3.578734 +step:4099 train loss:3.575127 +step:4100 train loss:3.629716 +step:4101 train loss:3.556624 +step:4102 train loss:3.594693 +step:4103 train loss:3.798502 +step:4104 train loss:3.610828 +step:4105 train loss:3.578780 +step:4106 train loss:3.645414 +step:4107 train loss:3.569149 +step:4108 train loss:3.573194 +step:4109 train loss:3.625657 +step:4110 train loss:3.636885 +step:4111 train loss:3.609031 +step:4112 train loss:3.628415 +step:4113 train loss:3.591034 +step:4114 train loss:3.536933 +step:4115 train loss:3.573266 +step:4116 train loss:3.558914 +step:4117 train loss:3.575723 +step:4118 train loss:3.629857 +step:4119 train loss:3.654277 +step:4120 train loss:3.574522 +step:4121 train loss:3.566952 +step:4122 train loss:3.639665 +step:4123 train loss:3.648323 +step:4124 train loss:3.624391 +step:4125 train loss:3.666543 +step:4126 train loss:3.597932 +step:4127 train loss:3.617712 +step:4128 train loss:3.610215 +step:4129 train loss:3.658262 +step:4130 train loss:3.587602 +step:4131 train loss:3.621448 +step:4132 train loss:3.635870 +step:4133 train loss:3.589777 +step:4134 train loss:3.642359 +step:4135 train loss:3.576952 +step:4136 train loss:3.598613 +step:4137 train loss:3.570184 +step:4138 train loss:3.576224 +step:4139 train loss:3.628213 +step:4140 train loss:3.581143 +step:4141 train loss:3.544447 +step:4142 train loss:3.590393 +step:4143 train loss:3.627311 +step:4144 train loss:3.580574 +step:4145 train loss:3.546325 +step:4146 train loss:3.615732 +step:4147 train loss:3.589659 +step:4148 train loss:3.581667 +step:4149 train loss:3.662538 +step:4150 train loss:3.627711 +step:4151 train loss:3.609558 +step:4152 train loss:3.630462 +step:4153 train loss:3.637322 +step:4154 train loss:3.645710 +step:4155 train loss:3.668531 +step:4156 train loss:3.543741 +step:4157 train loss:3.565032 +step:4158 train loss:3.621142 +step:4159 train loss:3.523428 +step:4160 train loss:3.614579 +step:4161 train loss:3.613815 +step:4162 train loss:3.526046 +step:4163 train loss:3.609622 +step:4164 train loss:3.552058 +step:4165 train loss:3.555722 +step:4166 train loss:3.622849 +step:4167 train loss:3.617241 +step:4168 train loss:3.608568 +step:4169 train loss:3.642149 +step:4170 train loss:3.758430 +step:4171 train loss:3.604877 +step:4172 train loss:3.623649 +step:4173 train loss:3.622451 +step:4174 train loss:3.585966 +step:4175 train loss:3.671579 +step:4176 train loss:3.596666 +step:4177 train loss:3.619248 +step:4178 train loss:3.596428 +step:4179 train loss:3.554260 +step:4180 train loss:3.549806 +step:4181 train loss:3.599863 +step:4182 train loss:3.585921 +step:4183 train loss:3.520126 +step:4184 train loss:3.590807 +step:4185 train loss:3.658703 +step:4186 train loss:3.634216 +step:4187 train loss:3.642764 +step:4188 train loss:3.618814 +step:4189 train loss:3.579035 +step:4190 train loss:3.618279 +step:4191 train loss:3.567678 +step:4192 train loss:3.658138 +step:4193 train loss:3.565052 +step:4194 train loss:3.550475 +step:4195 train loss:3.547627 +step:4196 train loss:3.612538 +step:4197 train loss:3.628597 +step:4198 train loss:3.553137 +step:4199 train loss:3.634009 +step:4200 train loss:3.595762 +step:4201 train loss:3.576213 +step:4202 train loss:3.594254 +step:4203 train loss:3.602540 +step:4204 train loss:3.596087 +step:4205 train loss:3.607575 +step:4206 train loss:3.627136 +step:4207 train loss:3.628770 +step:4208 train loss:3.591025 +step:4209 train loss:3.657010 +step:4210 train loss:3.687096 +step:4211 train loss:3.566807 +step:4212 train loss:3.610610 +step:4213 train loss:3.561745 +step:4214 train loss:3.569121 +step:4215 train loss:3.584875 +step:4216 train loss:3.557028 +step:4217 train loss:3.581969 +step:4218 train loss:3.621608 +step:4219 train loss:3.616690 +step:4220 train loss:3.696468 +step:4221 train loss:3.583239 +step:4222 train loss:3.646312 +step:4223 train loss:3.564886 +step:4224 train loss:3.641136 +step:4225 train loss:3.566763 +step:4226 train loss:3.624110 +step:4227 train loss:3.599796 +step:4228 train loss:3.574963 +step:4229 train loss:3.585109 +step:4230 train loss:3.567502 +step:4231 train loss:3.555866 +step:4232 train loss:3.606481 +step:4233 train loss:3.513987 +step:4234 train loss:3.597545 +step:4235 train loss:3.675878 +step:4236 train loss:3.644449 +step:4237 train loss:3.624794 +step:4238 train loss:3.635609 +step:4239 train loss:3.685838 +step:4240 train loss:3.592994 +step:4241 train loss:3.521960 +step:4242 train loss:3.640074 +step:4243 train loss:3.639950 +step:4244 train loss:3.653772 +step:4245 train loss:3.707450 +step:4246 train loss:3.581749 +step:4247 train loss:3.640297 +step:4248 train loss:3.590174 +step:4249 train loss:3.599695 +step:4250 validation loss:3.520571 +step:4250 train loss:3.575932 +step:4251 train loss:3.672388 +step:4252 train loss:3.581063 +step:4253 train loss:3.576478 +step:4254 train loss:3.582741 +step:4255 train loss:3.568814 +step:4256 train loss:3.584576 +step:4257 train loss:3.639635 +step:4258 train loss:3.500577 +step:4259 train loss:3.567874 +step:4260 train loss:3.632613 +step:4261 train loss:3.614928 +step:4262 train loss:3.759814 +step:4263 train loss:3.683664 +step:4264 train loss:3.626832 +step:4265 train loss:3.621325 +step:4266 train loss:3.617379 +step:4267 train loss:3.617849 +step:4268 train loss:3.561920 +step:4269 train loss:3.657682 +step:4270 train loss:3.637581 +step:4271 train loss:3.553363 +step:4272 train loss:3.605038 +step:4273 train loss:3.582779 +step:4274 train loss:3.570395 +step:4275 train loss:3.590364 +step:4276 train loss:3.558490 +step:4277 train loss:3.695123 +step:4278 train loss:3.544047 +step:4279 train loss:3.569288 +step:4280 train loss:3.653283 +step:4281 train loss:3.638473 +step:4282 train loss:3.702423 +step:4283 train loss:3.560128 +step:4284 train loss:3.586347 +step:4285 train loss:3.589824 +step:4286 train loss:3.652430 +step:4287 train loss:3.653445 +step:4288 train loss:3.631023 +step:4289 train loss:3.586905 +step:4290 train loss:3.592370 +step:4291 train loss:3.553178 +step:4292 train loss:3.597791 +step:4293 train loss:3.610280 +step:4294 train loss:3.595849 +step:4295 train loss:3.531644 +step:4296 train loss:3.605000 +step:4297 train loss:3.584013 +step:4298 train loss:3.597656 +step:4299 train loss:3.593368 +step:4300 train loss:3.711462 +step:4301 train loss:3.528848 +step:4302 train loss:3.666815 +step:4303 train loss:3.546643 +step:4304 train loss:3.550776 +step:4305 train loss:3.570527 +step:4306 train loss:3.644452 +step:4307 train loss:3.556801 +step:4308 train loss:3.558353 +step:4309 train loss:3.628855 +step:4310 train loss:3.566408 +step:4311 train loss:3.621599 +step:4312 train loss:3.616490 +step:4313 train loss:3.605974 +step:4314 train loss:3.554499 +step:4315 train loss:3.585108 +step:4316 train loss:3.532227 +step:4317 train loss:3.589907 +step:4318 train loss:3.630541 +step:4319 train loss:3.581501 +step:4320 train loss:3.640857 +step:4321 train loss:3.623003 +step:4322 train loss:3.577967 +step:4323 train loss:3.517229 +step:4324 train loss:3.607211 +step:4325 train loss:3.586164 +step:4326 train loss:3.576093 +step:4327 train loss:3.682644 +step:4328 train loss:3.592360 +step:4329 train loss:3.548589 +step:4330 train loss:3.593318 +step:4331 train loss:3.607997 +step:4332 train loss:3.638497 +step:4333 train loss:3.601075 +step:4334 train loss:3.610968 +step:4335 train loss:3.611877 +step:4336 train loss:3.624923 +step:4337 train loss:3.586023 +step:4338 train loss:3.707978 +step:4339 train loss:3.611743 +step:4340 train loss:3.620687 +step:4341 train loss:3.588649 +step:4342 train loss:3.602660 +step:4343 train loss:3.720263 +step:4344 train loss:3.610098 +step:4345 train loss:3.627082 +step:4346 train loss:3.638469 +step:4347 train loss:3.651757 +step:4348 train loss:3.563226 +step:4349 train loss:3.646158 +step:4350 train loss:3.585727 +step:4351 train loss:3.542044 +step:4352 train loss:3.614126 +step:4353 train loss:3.561635 +step:4354 train loss:3.620199 +step:4355 train loss:3.577559 +step:4356 train loss:3.601625 +step:4357 train loss:3.586727 +step:4358 train loss:3.681430 +step:4359 train loss:3.628024 +step:4360 train loss:3.545225 +step:4361 train loss:3.594149 +step:4362 train loss:3.614641 +step:4363 train loss:3.630585 +step:4364 train loss:3.597486 +step:4365 train loss:3.577142 +step:4366 train loss:3.625274 +step:4367 train loss:3.639766 +step:4368 train loss:3.614803 +step:4369 train loss:3.488777 +step:4370 train loss:3.612078 +step:4371 train loss:3.524781 +step:4372 train loss:3.673481 +step:4373 train loss:3.609883 +step:4374 train loss:3.580182 +step:4375 train loss:3.626216 +step:4376 train loss:3.636079 +step:4377 train loss:3.569386 +step:4378 train loss:3.581765 +step:4379 train loss:3.665641 +step:4380 train loss:3.642132 +step:4381 train loss:3.547565 +step:4382 train loss:3.593456 +step:4383 train loss:3.620078 +step:4384 train loss:3.618240 +step:4385 train loss:3.543872 +step:4386 train loss:3.599146 +step:4387 train loss:3.569155 +step:4388 train loss:3.586867 +step:4389 train loss:3.615888 +step:4390 train loss:3.657360 +step:4391 train loss:3.584551 +step:4392 train loss:3.656721 +step:4393 train loss:3.616247 +step:4394 train loss:3.553397 +step:4395 train loss:3.608835 +step:4396 train loss:3.585022 +step:4397 train loss:3.628369 +step:4398 train loss:3.575325 +step:4399 train loss:3.568104 +step:4400 train loss:3.573687 +step:4401 train loss:3.632679 +step:4402 train loss:3.629158 +step:4403 train loss:3.582981 +step:4404 train loss:3.613995 +step:4405 train loss:3.534005 +step:4406 train loss:3.613040 +step:4407 train loss:3.548211 +step:4408 train loss:3.639683 +step:4409 train loss:3.601333 +step:4410 train loss:3.607793 +step:4411 train loss:3.565751 +step:4412 train loss:3.687586 +step:4413 train loss:3.577116 +step:4414 train loss:3.585098 +step:4415 train loss:3.570830 +step:4416 train loss:3.563768 +step:4417 train loss:3.555625 +step:4418 train loss:3.628809 +step:4419 train loss:3.597900 +step:4420 train loss:3.607087 +step:4421 train loss:3.633436 +step:4422 train loss:3.649829 +step:4423 train loss:3.608268 +step:4424 train loss:3.590436 +step:4425 train loss:3.553689 +step:4426 train loss:3.632326 +step:4427 train loss:3.593688 +step:4428 train loss:3.532107 +step:4429 train loss:3.593965 +step:4430 train loss:3.629424 +step:4431 train loss:3.624122 +step:4432 train loss:3.528425 +step:4433 train loss:3.584951 +step:4434 train loss:3.580060 +step:4435 train loss:3.610813 +step:4436 train loss:3.547649 +step:4437 train loss:3.625108 +step:4438 train loss:3.594464 +step:4439 train loss:3.598317 +step:4440 train loss:3.598604 +step:4441 train loss:3.600703 +step:4442 train loss:3.654471 +step:4443 train loss:3.584769 +step:4444 train loss:3.671026 +step:4445 train loss:3.634111 +step:4446 train loss:3.567332 +step:4447 train loss:3.613692 +step:4448 train loss:3.635804 +step:4449 train loss:3.568817 +step:4450 train loss:3.586223 +step:4451 train loss:3.640920 +step:4452 train loss:3.698689 +step:4453 train loss:3.624642 +step:4454 train loss:3.598810 +step:4455 train loss:3.642592 +step:4456 train loss:3.589514 +step:4457 train loss:3.584552 +step:4458 train loss:3.599874 +step:4459 train loss:3.635653 +step:4460 train loss:3.543629 +step:4461 train loss:3.515934 +step:4462 train loss:3.577624 +step:4463 train loss:3.593058 +step:4464 train loss:3.566628 +step:4465 train loss:3.598569 +step:4466 train loss:3.695011 +step:4467 train loss:3.572568 +step:4468 train loss:3.570675 +step:4469 train loss:3.560278 +step:4470 train loss:3.538797 +step:4471 train loss:3.599981 +step:4472 train loss:3.525009 +step:4473 train loss:3.609606 +step:4474 train loss:3.633969 +step:4475 train loss:3.596498 +step:4476 train loss:3.555952 +step:4477 train loss:3.545257 +step:4478 train loss:3.602587 +step:4479 train loss:3.701436 +step:4480 train loss:3.539765 +step:4481 train loss:3.611557 +step:4482 train loss:3.571461 +step:4483 train loss:3.567521 +step:4484 train loss:3.610121 +step:4485 train loss:3.574193 +step:4486 train loss:3.675489 +step:4487 train loss:3.572017 +step:4488 train loss:3.568822 +step:4489 train loss:3.525389 +step:4490 train loss:3.608150 +step:4491 train loss:3.558970 +step:4492 train loss:3.591039 +step:4493 train loss:3.579009 +step:4494 train loss:3.573074 +step:4495 train loss:3.638087 +step:4496 train loss:3.580447 +step:4497 train loss:3.665391 +step:4498 train loss:3.557604 +step:4499 train loss:3.605719 +step:4500 validation loss:3.509172 total_sharp:6.0050e-03 L1_sharp:2.0412e-02 L2_sharp:9.4767e-03 L3_sharp:1.2747e-02 L4_sharp:5.8355e-03 L5_sharp:8.1863e-03 L6_sharp:1.1577e-02 L7_sharp:1.1843e-02 L8_sharp:1.0617e-02 L9_sharp:7.4584e-03 L10_sharp:5.3754e-03 L11_sharp:4.7490e-03 L12_sharp:7.7124e-03 total_fnorm:1.4018e+00 total_l1_linf:9.5512e+03 total_spectral:1.4018e+00 L1_fnorm:1.2004e-01 L2_fnorm:1.1489e-01 L3_fnorm:1.1495e-01 L4_fnorm:1.1788e-01 L5_fnorm:1.1992e-01 L6_fnorm:1.2085e-01 L7_fnorm:1.2092e-01 L8_fnorm:1.2092e-01 L9_fnorm:1.2117e-01 L10_fnorm:1.2123e-01 L11_fnorm:1.2107e-01 L12_fnorm:1.2081e-01 L1_l1linf:3.6028e-01 L2_l1linf:4.2542e-01 L3_l1linf:4.1369e-01 L4_l1linf:4.2193e-01 L5_l1linf:3.9112e-01 L6_l1linf:3.9351e-01 L7_l1linf:3.5819e-01 L8_l1linf:3.4519e-01 L9_l1linf:3.7188e-01 L10_l1linf:3.9431e-01 L11_l1linf:4.0198e-01 L12_l1linf:3.9369e-01 L1_spectral:8.0798e-03 L2_spectral:9.5890e-03 L3_spectral:9.2745e-03 L4_spectral:9.4595e-03 L5_spectral:8.7764e-03 L6_spectral:8.7779e-03 L7_spectral:8.0913e-03 L8_spectral:7.7487e-03 L9_spectral:8.2766e-03 L10_spectral:8.8472e-03 L11_spectral:8.9370e-03 L12_spectral:8.8207e-03 ip_v_neg_g:7.3825e-03 cos_v_neg_g:2.3810e-03 v_norm:1.4018e+00 g_norm:2.2119e+00 hv_norm:6.0896e-01 cos_v_hv:1.3823e-02 hg_norm:1.0243e+02 cos_g_hg:5.0769e-01 v_par:7.9595e-05 v_perp:1.4018e+00 L1_cos_v_neg_g:8.0773e-03 L1_v_norm:1.2004e-01 L2_cos_v_neg_g:5.3939e-03 L2_v_norm:1.1489e-01 L3_cos_v_neg_g:4.4447e-03 L3_v_norm:1.1495e-01 L4_cos_v_neg_g:6.6480e-03 L4_v_norm:1.1788e-01 L5_cos_v_neg_g:8.5452e-03 L5_v_norm:1.1992e-01 L6_cos_v_neg_g:1.0897e-02 L6_v_norm:1.2085e-01 L7_cos_v_neg_g:1.0173e-02 L7_v_norm:1.2092e-01 L8_cos_v_neg_g:9.1901e-03 L8_v_norm:1.2092e-01 L9_cos_v_neg_g:8.6966e-03 L9_v_norm:1.2117e-01 L10_cos_v_neg_g:7.0623e-03 L10_v_norm:1.2123e-01 L11_cos_v_neg_g:5.8482e-03 L11_v_norm:1.2107e-01 L12_cos_v_neg_g:5.4532e-03 L12_v_norm:1.2081e-01 +step:4500 train loss:3.515922 +step:4501 train loss:3.574697 +step:4502 train loss:3.699191 +step:4503 train loss:3.599588 +step:4504 train loss:3.613223 +step:4505 train loss:3.590794 +step:4506 train loss:3.565469 +step:4507 train loss:3.640280 +step:4508 train loss:3.578717 +step:4509 train loss:3.574913 +step:4510 train loss:3.609463 +step:4511 train loss:3.559627 +step:4512 train loss:3.584826 +step:4513 train loss:3.645225 +step:4514 train loss:3.550429 +step:4515 train loss:3.669135 +step:4516 train loss:3.639024 +step:4517 train loss:3.592480 +step:4518 train loss:3.533773 +step:4519 train loss:3.569999 +step:4520 train loss:3.580811 +step:4521 train loss:3.523955 +step:4522 train loss:3.577254 +step:4523 train loss:3.624211 +step:4524 train loss:3.608123 +step:4525 train loss:3.529052 +step:4526 train loss:3.571797 +step:4527 train loss:3.559072 +step:4528 train loss:3.591108 +step:4529 train loss:3.585073 +step:4530 train loss:3.682541 +step:4531 train loss:3.569900 +step:4532 train loss:3.591863 +step:4533 train loss:3.567730 +step:4534 train loss:3.660398 +step:4535 train loss:3.557526 +step:4536 train loss:3.630754 +step:4537 train loss:3.614219 +step:4538 train loss:3.589467 +step:4539 train loss:3.611116 +step:4540 train loss:3.585554 +step:4541 train loss:3.556393 +step:4542 train loss:3.603000 +step:4543 train loss:3.690686 +step:4544 train loss:3.632537 +step:4545 train loss:3.572238 +step:4546 train loss:3.667844 +step:4547 train loss:3.622297 +step:4548 train loss:3.630087 +step:4549 train loss:3.586265 +step:4550 train loss:3.553305 +step:4551 train loss:3.570141 +step:4552 train loss:3.574064 +step:4553 train loss:3.654785 +step:4554 train loss:3.550880 +step:4555 train loss:3.660635 +step:4556 train loss:3.596508 +step:4557 train loss:3.527640 +step:4558 train loss:3.612513 +step:4559 train loss:3.622256 +step:4560 train loss:3.559967 +step:4561 train loss:3.547840 +step:4562 train loss:3.590490 +step:4563 train loss:3.539994 +step:4564 train loss:3.569173 +step:4565 train loss:3.567753 +step:4566 train loss:3.542737 +step:4567 train loss:3.568132 +step:4568 train loss:3.568470 +step:4569 train loss:3.551946 +step:4570 train loss:3.602720 +step:4571 train loss:3.581915 +step:4572 train loss:3.574260 +step:4573 train loss:3.582969 +step:4574 train loss:3.728165 +step:4575 train loss:3.561333 +step:4576 train loss:3.549013 +step:4577 train loss:3.589215 +step:4578 train loss:3.629438 +step:4579 train loss:3.580331 +step:4580 train loss:3.639814 +step:4581 train loss:3.579200 +step:4582 train loss:3.573120 +step:4583 train loss:3.579329 +step:4584 train loss:3.550964 +step:4585 train loss:3.629861 +step:4586 train loss:3.618736 +step:4587 train loss:3.519444 +step:4588 train loss:3.562535 +step:4589 train loss:3.635354 +step:4590 train loss:3.606159 +step:4591 train loss:3.545445 +step:4592 train loss:3.633543 +step:4593 train loss:3.551777 +step:4594 train loss:3.582922 +step:4595 train loss:3.606872 +step:4596 train loss:3.544270 +step:4597 train loss:3.679453 +step:4598 train loss:3.598879 +step:4599 train loss:3.553540 +step:4600 train loss:3.560070 +step:4601 train loss:3.582901 +step:4602 train loss:3.532321 +step:4603 train loss:3.544728 +step:4604 train loss:3.654441 +step:4605 train loss:3.573468 +step:4606 train loss:3.600846 +step:4607 train loss:3.581444 +step:4608 train loss:3.614109 +step:4609 train loss:3.574670 +step:4610 train loss:3.615656 +step:4611 train loss:3.644372 +step:4612 train loss:3.639835 +step:4613 train loss:3.620267 +step:4614 train loss:3.615221 +step:4615 train loss:3.557559 +step:4616 train loss:3.538101 +step:4617 train loss:3.582609 +step:4618 train loss:3.598752 +step:4619 train loss:3.557333 +step:4620 train loss:3.576466 +step:4621 train loss:3.577023 +step:4622 train loss:3.517311 +step:4623 train loss:3.621445 +step:4624 train loss:3.606118 +step:4625 train loss:3.565568 +step:4626 train loss:3.607661 +step:4627 train loss:3.576951 +step:4628 train loss:3.565938 +step:4629 train loss:3.603061 +step:4630 train loss:3.659327 +step:4631 train loss:3.662109 +step:4632 train loss:3.557971 +step:4633 train loss:3.570298 +step:4634 train loss:3.641289 +step:4635 train loss:3.606824 +step:4636 train loss:3.622844 +step:4637 train loss:3.558971 +step:4638 train loss:3.561585 +step:4639 train loss:3.561664 +step:4640 train loss:3.571462 +step:4641 train loss:3.576876 +step:4642 train loss:3.609932 +step:4643 train loss:3.570026 +step:4644 train loss:3.595024 +step:4645 train loss:3.608191 +step:4646 train loss:3.564864 +step:4647 train loss:3.523522 +step:4648 train loss:3.626388 +step:4649 train loss:3.640050 +step:4650 train loss:3.589660 +step:4651 train loss:3.592257 +step:4652 train loss:3.577979 +step:4653 train loss:3.637213 +step:4654 train loss:3.632236 +step:4655 train loss:3.537646 +step:4656 train loss:3.571315 +step:4657 train loss:3.622938 +step:4658 train loss:3.578314 +step:4659 train loss:3.593088 +step:4660 train loss:3.634952 +step:4661 train loss:3.552022 +step:4662 train loss:3.566805 +step:4663 train loss:3.578827 +step:4664 train loss:3.631022 +step:4665 train loss:3.625404 +step:4666 train loss:3.623478 +step:4667 train loss:3.617810 +step:4668 train loss:3.579091 +step:4669 train loss:3.589868 +step:4670 train loss:3.620119 +step:4671 train loss:3.614936 +step:4672 train loss:3.493867 +step:4673 train loss:3.528097 +step:4674 train loss:3.657308 +step:4675 train loss:3.563355 +step:4676 train loss:3.524192 +step:4677 train loss:3.530042 +step:4678 train loss:3.501461 +step:4679 train loss:3.599590 +step:4680 train loss:3.539360 +step:4681 train loss:3.587420 +step:4682 train loss:3.540580 +step:4683 train loss:3.508599 +step:4684 train loss:3.624164 +step:4685 train loss:3.562451 +step:4686 train loss:3.572963 +step:4687 train loss:3.612481 +step:4688 train loss:3.542147 +step:4689 train loss:3.614823 +step:4690 train loss:3.560144 +step:4691 train loss:3.590518 +step:4692 train loss:3.523650 +step:4693 train loss:3.560143 +step:4694 train loss:3.600402 +step:4695 train loss:3.624878 +step:4696 train loss:3.606019 +step:4697 train loss:3.521966 +step:4698 train loss:3.539006 +step:4699 train loss:3.589807 +step:4700 train loss:3.560810 +step:4701 train loss:3.567489 +step:4702 train loss:3.523003 +step:4703 train loss:3.604493 +step:4704 train loss:3.591489 +step:4705 train loss:3.532537 +step:4706 train loss:3.542949 +step:4707 train loss:3.529543 +step:4708 train loss:3.595629 +step:4709 train loss:3.544501 +step:4710 train loss:3.555773 +step:4711 train loss:3.618553 +step:4712 train loss:3.519563 +step:4713 train loss:3.621591 +step:4714 train loss:3.521523 +step:4715 train loss:3.610549 +step:4716 train loss:3.580441 +step:4717 train loss:3.507175 +step:4718 train loss:3.601860 +step:4719 train loss:3.527660 +step:4720 train loss:3.625718 +step:4721 train loss:3.577403 +step:4722 train loss:3.635575 +step:4723 train loss:3.531435 +step:4724 train loss:3.582023 +step:4725 train loss:3.518304 +step:4726 train loss:3.563781 +step:4727 train loss:3.568410 +step:4728 train loss:3.576381 +step:4729 train loss:3.605329 +step:4730 train loss:3.505911 +step:4731 train loss:3.565916 +step:4732 train loss:3.521992 +step:4733 train loss:3.451804 +step:4734 train loss:3.590204 +step:4735 train loss:3.541479 +step:4736 train loss:3.585230 +step:4737 train loss:3.465569 +step:4738 train loss:3.615625 +step:4739 train loss:3.492649 +step:4740 train loss:3.600810 +step:4741 train loss:3.570176 +step:4742 train loss:3.530099 +step:4743 train loss:3.529784 +step:4744 train loss:3.571549 +step:4745 train loss:3.591608 +step:4746 train loss:3.628817 +step:4747 train loss:3.593130 +step:4748 train loss:3.489651 +step:4749 train loss:3.561480 +step:4750 validation loss:3.495255 +step:4750 train loss:3.508853 +step:4751 train loss:3.599446 +step:4752 train loss:3.533883 +step:4753 train loss:3.637975 +step:4754 train loss:3.508295 +step:4755 train loss:3.550868 +step:4756 train loss:3.627226 +step:4757 train loss:3.549335 +step:4758 train loss:3.569208 +step:4759 train loss:3.563078 +step:4760 train loss:3.596521 +step:4761 train loss:3.514561 +step:4762 train loss:3.545360 +step:4763 train loss:3.568733 +step:4764 train loss:3.630121 +step:4765 train loss:3.525221 +step:4766 train loss:3.541476 +step:4767 train loss:3.498042 +step:4768 train loss:3.551870 +step:4769 train loss:3.582989 +step:4770 train loss:3.538506 +step:4771 train loss:3.553893 +step:4772 train loss:3.523815 +step:4773 train loss:3.560694 +step:4774 train loss:3.506656 +step:4775 train loss:3.635251 +step:4776 train loss:3.500133 +step:4777 train loss:3.573656 +step:4778 train loss:3.514500 +step:4779 train loss:3.561641 +step:4780 train loss:3.500146 +step:4781 train loss:3.506807 +step:4782 train loss:3.614564 +step:4783 train loss:3.601748 +step:4784 train loss:3.565850 +step:4785 train loss:3.558247 +step:4786 train loss:3.672199 +step:4787 train loss:3.504890 +step:4788 train loss:3.527482 +step:4789 train loss:3.551081 +step:4790 train loss:3.606106 +step:4791 train loss:3.570044 +step:4792 train loss:3.613223 +step:4793 train loss:3.529070 +step:4794 train loss:3.602931 +step:4795 train loss:3.552089 +step:4796 train loss:3.541233 +step:4797 train loss:3.549941 +step:4798 train loss:3.555132 +step:4799 train loss:3.554762 +step:4800 train loss:3.583079 +step:4801 train loss:3.576548 +step:4802 train loss:3.613810 +step:4803 train loss:3.597719 +step:4804 train loss:3.555724 +step:4805 train loss:3.549625 +step:4806 train loss:3.527815 +step:4807 train loss:3.635444 +step:4808 train loss:3.507770 +step:4809 train loss:3.608389 +step:4810 train loss:3.551210 +step:4811 train loss:3.567445 +step:4812 train loss:3.546562 +step:4813 train loss:3.500862 +step:4814 train loss:3.498781 +step:4815 train loss:3.487865 +step:4816 train loss:3.555626 +step:4817 train loss:3.492086 +step:4818 train loss:3.559894 +step:4819 train loss:3.552946 +step:4820 train loss:3.809737 +step:4821 train loss:3.580282 +step:4822 train loss:3.589519 +step:4823 train loss:3.522686 +step:4824 train loss:3.529144 +step:4825 train loss:3.510158 +step:4826 train loss:3.596082 +step:4827 train loss:3.545368 +step:4828 train loss:3.484383 +step:4829 train loss:3.590817 +step:4830 train loss:3.531447 +step:4831 train loss:3.679137 +step:4832 train loss:3.546550 +step:4833 train loss:3.585766 +step:4834 train loss:3.487856 +step:4835 train loss:3.579064 +step:4836 train loss:3.558696 +step:4837 train loss:3.589016 +step:4838 train loss:3.527051 +step:4839 train loss:3.592430 +step:4840 train loss:3.499813 +step:4841 train loss:3.595059 +step:4842 train loss:3.510632 +step:4843 train loss:3.586413 +step:4844 train loss:3.588995 +step:4845 train loss:3.526011 +step:4846 train loss:3.544033 +step:4847 train loss:3.525768 +step:4848 train loss:3.552664 +step:4849 train loss:3.505358 +step:4850 train loss:3.513927 +step:4851 train loss:3.506362 +step:4852 train loss:3.590051 +step:4853 train loss:3.561566 +step:4854 train loss:3.543681 +step:4855 train loss:3.604426 +step:4856 train loss:3.576550 +step:4857 train loss:3.580600 +step:4858 train loss:3.662701 +step:4859 train loss:3.508471 +step:4860 train loss:3.585706 +step:4861 train loss:3.557264 +step:4862 train loss:3.591082 +step:4863 train loss:3.527449 +step:4864 train loss:3.538015 +step:4865 train loss:3.533235 +step:4866 train loss:3.575596 +step:4867 train loss:3.546264 +step:4868 train loss:3.562793 +step:4869 train loss:3.514183 +step:4870 train loss:3.544629 +step:4871 train loss:3.627634 +step:4872 train loss:3.570091 +step:4873 train loss:3.572251 +step:4874 train loss:3.538464 +step:4875 train loss:3.507766 +step:4876 train loss:3.519630 +step:4877 train loss:3.523508 +step:4878 train loss:3.560000 +step:4879 train loss:3.523562 +step:4880 train loss:3.547874 +step:4881 train loss:3.494108 +step:4882 train loss:3.694752 +step:4883 train loss:3.508072 +step:4884 train loss:3.535000 +step:4885 train loss:3.509612 +step:4886 train loss:3.587244 +step:4887 train loss:3.540997 +step:4888 train loss:3.549715 +step:4889 train loss:3.540659 +step:4890 train loss:3.583640 +step:4891 train loss:3.521664 +step:4892 train loss:3.526503 +step:4893 train loss:3.570550 +step:4894 train loss:3.508945 +step:4895 train loss:3.540679 +step:4896 train loss:3.519969 +step:4897 train loss:3.596770 +step:4898 train loss:3.546042 +step:4899 train loss:3.530452 +step:4900 train loss:3.574385 +step:4901 train loss:3.523821 +step:4902 train loss:3.517776 +step:4903 train loss:3.538087 +step:4904 train loss:3.550454 +step:4905 train loss:3.548795 +step:4906 train loss:3.549559 +step:4907 train loss:3.622481 +step:4908 train loss:3.527528 +step:4909 train loss:3.535604 +step:4910 train loss:3.553695 +step:4911 train loss:3.610601 +step:4912 train loss:3.580999 +step:4913 train loss:3.564627 +step:4914 train loss:3.550075 +step:4915 train loss:3.536695 +step:4916 train loss:3.473471 +step:4917 train loss:3.502048 +step:4918 train loss:3.533351 +step:4919 train loss:3.521945 +step:4920 train loss:3.521370 +step:4921 train loss:3.686853 +step:4922 train loss:3.576633 +step:4923 train loss:3.593683 +step:4924 train loss:3.592539 +step:4925 train loss:3.527760 +step:4926 train loss:3.518938 +step:4927 train loss:3.551621 +step:4928 train loss:3.591141 +step:4929 train loss:3.544844 +step:4930 train loss:3.525146 +step:4931 train loss:3.518508 +step:4932 train loss:3.531745 +step:4933 train loss:3.523224 +step:4934 train loss:3.589504 +step:4935 train loss:3.576114 +step:4936 train loss:3.538616 +step:4937 train loss:3.651028 +step:4938 train loss:3.636663 +step:4939 train loss:3.501048 +step:4940 train loss:3.583087 +step:4941 train loss:3.483066 +step:4942 train loss:3.524767 +step:4943 train loss:3.525548 +step:4944 train loss:3.528190 +step:4945 train loss:3.573056 +step:4946 train loss:3.547130 +step:4947 train loss:3.531994 +step:4948 train loss:3.571220 +step:4949 train loss:3.475497 +step:4950 train loss:3.555803 +step:4951 train loss:3.605441 +step:4952 train loss:3.548592 +step:4953 train loss:3.579230 +step:4954 train loss:3.484424 +step:4955 train loss:3.558607 +step:4956 train loss:3.587753 +step:4957 train loss:3.582774 +step:4958 train loss:3.497763 +step:4959 train loss:3.612800 +step:4960 train loss:3.543210 +step:4961 train loss:3.559910 +step:4962 train loss:3.522815 +step:4963 train loss:3.569693 +step:4964 train loss:3.520576 +step:4965 train loss:3.671755 +step:4966 train loss:3.520855 +step:4967 train loss:3.629038 +step:4968 train loss:3.519441 +step:4969 train loss:3.560018 +step:4970 train loss:3.552600 +step:4971 train loss:3.507288 +step:4972 train loss:3.545309 +step:4973 train loss:3.556121 +step:4974 train loss:3.542010 +step:4975 train loss:3.631670 +step:4976 train loss:3.606388 +step:4977 train loss:3.554723 +step:4978 train loss:3.541858 +step:4979 train loss:3.539948 +step:4980 train loss:3.648792 +step:4981 train loss:3.486387 +step:4982 train loss:3.565996 +step:4983 train loss:3.492342 +step:4984 train loss:3.673263 +step:4985 train loss:3.573091 +step:4986 train loss:3.516500 +step:4987 train loss:3.537279 +step:4988 train loss:3.735738 +step:4989 train loss:3.540833 +step:4990 train loss:3.531400 +step:4991 train loss:3.546760 +step:4992 train loss:3.533254 +step:4993 train loss:3.509554 +step:4994 train loss:3.623147 +step:4995 train loss:3.544224 +step:4996 train loss:3.631587 +step:4997 train loss:3.532522 +step:4998 train loss:3.534409 +step:4999 train loss:3.520954 +step:5000 validation loss:3.488338 total_sharp:4.1103e-03 L1_sharp:4.4496e-02 L2_sharp:1.0765e-02 L3_sharp:1.2069e-02 L4_sharp:5.2456e-03 L5_sharp:6.9039e-03 L6_sharp:6.6580e-03 L7_sharp:6.7895e-03 L8_sharp:6.7026e-03 L9_sharp:4.7287e-03 L10_sharp:3.7972e-03 L11_sharp:3.8227e-03 L12_sharp:6.6837e-03 total_fnorm:1.3916e+00 total_l1_linf:9.5002e+03 total_spectral:1.3916e+00 L1_fnorm:1.1947e-01 L2_fnorm:1.1386e-01 L3_fnorm:1.1556e-01 L4_fnorm:1.1854e-01 L5_fnorm:1.2041e-01 L6_fnorm:1.2092e-01 L7_fnorm:1.2106e-01 L8_fnorm:1.2113e-01 L9_fnorm:1.2136e-01 L10_fnorm:1.2127e-01 L11_fnorm:1.2108e-01 L12_fnorm:1.2084e-01 L1_l1linf:3.2994e-01 L2_l1linf:3.6053e-01 L3_l1linf:3.5890e-01 L4_l1linf:3.6625e-01 L5_l1linf:3.6245e-01 L6_l1linf:3.0315e-01 L7_l1linf:2.9923e-01 L8_l1linf:3.1672e-01 L9_l1linf:3.2245e-01 L10_l1linf:3.6620e-01 L11_l1linf:3.8808e-01 L12_l1linf:3.8818e-01 L1_spectral:7.4600e-03 L2_spectral:8.1171e-03 L3_spectral:8.1256e-03 L4_spectral:8.2810e-03 L5_spectral:8.1625e-03 L6_spectral:6.8958e-03 L7_spectral:6.7884e-03 L8_spectral:7.1923e-03 L9_spectral:7.3002e-03 L10_spectral:8.2743e-03 L11_spectral:8.7584e-03 L12_spectral:8.7246e-03 ip_v_neg_g:6.1814e-03 cos_v_neg_g:2.0550e-03 v_norm:1.3916e+00 g_norm:2.1615e+00 hv_norm:5.1893e-01 cos_v_hv:1.1023e-02 hg_norm:7.9010e+01 cos_g_hg:4.7210e-01 v_par:8.7555e-05 v_perp:1.3916e+00 L1_cos_v_neg_g:9.1307e-03 L1_v_norm:1.1947e-01 L2_cos_v_neg_g:9.9666e-03 L2_v_norm:1.1386e-01 L3_cos_v_neg_g:9.0889e-03 L3_v_norm:1.1556e-01 L4_cos_v_neg_g:6.4067e-03 L4_v_norm:1.1854e-01 L5_cos_v_neg_g:5.9767e-03 L5_v_norm:1.2041e-01 L6_cos_v_neg_g:4.5928e-03 L6_v_norm:1.2092e-01 L7_cos_v_neg_g:5.4886e-03 L7_v_norm:1.2106e-01 L8_cos_v_neg_g:5.2012e-03 L8_v_norm:1.2113e-01 L9_cos_v_neg_g:4.9900e-03 L9_v_norm:1.2136e-01 L10_cos_v_neg_g:5.7477e-03 L10_v_norm:1.2127e-01 L11_cos_v_neg_g:6.2763e-03 L11_v_norm:1.2108e-01 L12_cos_v_neg_g:4.7174e-03 L12_v_norm:1.2084e-01 +step:5000 train loss:3.633399 +step:5001 train loss:3.500565 +step:5002 train loss:3.556461 +step:5003 train loss:3.550352 +step:5004 train loss:3.543809 +step:5005 train loss:3.539932 +step:5006 train loss:3.584277 +step:5007 train loss:3.584959 +step:5008 train loss:3.522641 +step:5009 train loss:3.568682 +step:5010 train loss:3.520507 +step:5011 train loss:3.547538 +step:5012 train loss:3.524450 +step:5013 train loss:3.624324 +step:5014 train loss:3.537124 +step:5015 train loss:3.613639 +step:5016 train loss:3.542450 +step:5017 train loss:3.589996 +step:5018 train loss:3.506213 +step:5019 train loss:3.541353 +step:5020 train loss:3.534605 +step:5021 train loss:3.549033 +step:5022 train loss:3.580639 +step:5023 train loss:3.553810 +step:5024 train loss:3.603565 +step:5025 train loss:3.489555 +step:5026 train loss:3.612968 +step:5027 train loss:3.547819 +step:5028 train loss:3.615024 +step:5029 train loss:3.509934 +step:5030 train loss:3.549735 +step:5031 train loss:3.536470 +step:5032 train loss:3.564349 +step:5033 train loss:3.548751 +step:5034 train loss:3.543478 +step:5035 train loss:3.627959 +step:5036 train loss:3.581352 +step:5037 train loss:3.527243 +step:5038 train loss:3.580317 +step:5039 train loss:3.593489 +step:5040 train loss:3.557076 +step:5041 train loss:3.571769 +step:5042 train loss:3.475780 +step:5043 train loss:3.615423 +step:5044 train loss:3.535833 +step:5045 train loss:3.584904 +step:5046 train loss:3.504885 +step:5047 train loss:3.580223 +step:5048 train loss:3.496413 +step:5049 train loss:3.632314 +step:5050 train loss:3.520655 +step:5051 train loss:3.566311 +step:5052 train loss:3.460331 +step:5053 train loss:3.646036 +step:5054 train loss:3.531911 +step:5055 train loss:3.559944 +step:5056 train loss:3.591026 +step:5057 train loss:3.522933 +step:5058 train loss:3.554339 +step:5059 train loss:3.517667 +step:5060 train loss:3.563344 +step:5061 train loss:3.557779 +step:5062 train loss:3.527731 +step:5063 train loss:3.522731 +step:5064 train loss:3.531809 +step:5065 train loss:3.514064 +step:5066 train loss:3.579193 +step:5067 train loss:3.557420 +step:5068 train loss:3.542983 +step:5069 train loss:3.513955 +step:5070 train loss:3.543972 +step:5071 train loss:3.614830 +step:5072 train loss:3.504740 +step:5073 train loss:3.514492 +step:5074 train loss:3.462153 +step:5075 train loss:3.532248 +step:5076 train loss:3.461321 +step:5077 train loss:3.524132 +step:5078 train loss:3.533950 +step:5079 train loss:3.562060 +step:5080 train loss:3.541420 +step:5081 train loss:3.550226 +step:5082 train loss:3.540255 +step:5083 train loss:3.597229 +step:5084 train loss:3.577284 +step:5085 train loss:3.538147 +step:5086 train loss:3.614740 +step:5087 train loss:3.599536 +step:5088 train loss:3.518998 +step:5089 train loss:3.585205 +step:5090 train loss:3.532660 +step:5091 train loss:3.534168 +step:5092 train loss:3.631293 +step:5093 train loss:3.513346 +step:5094 train loss:3.511601 +step:5095 train loss:3.561901 +step:5096 train loss:3.532428 +step:5097 train loss:3.538084 +step:5098 train loss:3.543414 +step:5099 train loss:3.504673 +step:5100 train loss:3.519210 +step:5101 train loss:3.708335 +step:5102 train loss:3.556056 +step:5103 train loss:3.565653 +step:5104 train loss:3.615345 +step:5105 train loss:3.547689 +step:5106 train loss:3.507362 +step:5107 train loss:3.527610 +step:5108 train loss:3.519419 +step:5109 train loss:3.599871 +step:5110 train loss:3.510029 +step:5111 train loss:3.605588 +step:5112 train loss:3.512131 +step:5113 train loss:3.499671 +step:5114 train loss:3.539374 +step:5115 train loss:3.506430 +step:5116 train loss:3.561234 +step:5117 train loss:3.508596 +step:5118 train loss:3.532674 +step:5119 train loss:3.515592 +step:5120 train loss:3.559979 +step:5121 train loss:3.506704 +step:5122 train loss:3.518603 +step:5123 train loss:3.500937 +step:5124 train loss:3.467884 +step:5125 train loss:3.574364 +step:5126 train loss:3.559695 +step:5127 train loss:3.564025 +step:5128 train loss:3.576133 +step:5129 train loss:3.506761 +step:5130 train loss:3.516009 +step:5131 train loss:3.456962 +step:5132 train loss:3.573952 +step:5133 train loss:3.542219 +step:5134 train loss:3.544663 +step:5135 train loss:3.499979 +step:5136 train loss:3.566190 +step:5137 train loss:3.563256 +step:5138 train loss:3.544307 +step:5139 train loss:3.577226 +step:5140 train loss:3.554378 +step:5141 train loss:3.581806 +step:5142 train loss:3.534363 +step:5143 train loss:3.558075 +step:5144 train loss:3.553724 +step:5145 train loss:3.497173 +step:5146 train loss:3.493157 +step:5147 train loss:3.569156 +step:5148 train loss:3.492743 +step:5149 train loss:3.570724 +step:5150 train loss:3.545650 +step:5151 train loss:3.513586 +step:5152 train loss:3.557862 +step:5153 train loss:3.531355 +step:5154 train loss:3.544148 +step:5155 train loss:3.548638 +step:5156 train loss:3.527196 +step:5157 train loss:3.528043 +step:5158 train loss:3.549103 +step:5159 train loss:3.588269 +step:5160 train loss:3.655203 +step:5161 train loss:3.584087 +step:5162 train loss:3.600042 +step:5163 train loss:3.514946 +step:5164 train loss:3.580822 +step:5165 train loss:3.592716 +step:5166 train loss:3.529126 +step:5167 train loss:3.628220 +step:5168 train loss:3.546878 +step:5169 train loss:3.575418 +step:5170 train loss:3.556615 +step:5171 train loss:3.599532 +step:5172 train loss:3.520344 +step:5173 train loss:3.582607 +step:5174 train loss:3.516281 +step:5175 train loss:3.551615 +step:5176 train loss:3.539076 +step:5177 train loss:3.540582 +step:5178 train loss:3.602616 +step:5179 train loss:3.514277 +step:5180 train loss:3.596030 +step:5181 train loss:3.538383 +step:5182 train loss:3.599411 +step:5183 train loss:3.526678 +step:5184 train loss:3.510805 +step:5185 train loss:3.530113 +step:5186 train loss:3.590571 +step:5187 train loss:3.585052 +step:5188 train loss:3.516524 +step:5189 train loss:3.560695 +step:5190 train loss:3.541230 +step:5191 train loss:3.525082 +step:5192 train loss:3.507417 +step:5193 train loss:3.594108 +step:5194 train loss:3.544386 +step:5195 train loss:3.517405 +step:5196 train loss:3.585109 +step:5197 train loss:3.631026 +step:5198 train loss:3.544116 +step:5199 train loss:3.529459 +step:5200 train loss:3.557647 +step:5201 train loss:3.542325 +step:5202 train loss:3.552677 +step:5203 train loss:3.551468 +step:5204 train loss:3.525591 +step:5205 train loss:3.569311 +step:5206 train loss:3.504411 +step:5207 train loss:3.510127 +step:5208 train loss:3.569172 +step:5209 train loss:3.589407 +step:5210 train loss:3.490804 +step:5211 train loss:3.539343 +step:5212 train loss:3.553334 +step:5213 train loss:3.530202 +step:5214 train loss:3.576516 +step:5215 train loss:3.691532 +step:5216 train loss:3.540052 +step:5217 train loss:3.520206 +step:5218 train loss:3.524170 +step:5219 train loss:3.586265 +step:5220 train loss:3.505489 +step:5221 train loss:3.506844 +step:5222 train loss:3.588334 +step:5223 train loss:3.584925 +step:5224 train loss:3.480260 +step:5225 train loss:3.628821 +step:5226 train loss:3.542712 +step:5227 train loss:3.614712 +step:5228 train loss:3.585116 +step:5229 train loss:3.526544 +step:5230 train loss:3.538924 +step:5231 train loss:3.487202 +step:5232 train loss:3.611706 +step:5233 train loss:3.571097 +step:5234 train loss:3.574874 +step:5235 train loss:3.521218 +step:5236 train loss:3.599685 +step:5237 train loss:3.650510 +step:5238 train loss:3.551622 +step:5239 train loss:3.613541 +step:5240 train loss:3.496361 +step:5241 train loss:3.556804 +step:5242 train loss:3.526750 +step:5243 train loss:3.531736 +step:5244 train loss:3.531200 +step:5245 train loss:3.573417 +step:5246 train loss:3.617322 +step:5247 train loss:3.546344 +step:5248 train loss:3.517368 +step:5249 train loss:3.573202 +step:5250 validation loss:3.474311 +step:5250 train loss:3.545259 +step:5251 train loss:3.607451 +step:5252 train loss:3.497071 +step:5253 train loss:3.648869 +step:5254 train loss:3.523300 +step:5255 train loss:3.594226 +step:5256 train loss:3.507806 +step:5257 train loss:3.564167 +step:5258 train loss:3.560537 +step:5259 train loss:3.544870 +step:5260 train loss:3.543681 +step:5261 train loss:3.533114 +step:5262 train loss:3.574367 +step:5263 train loss:3.557307 +step:5264 train loss:3.511821 +step:5265 train loss:3.590919 +step:5266 train loss:3.508222 +step:5267 train loss:3.518802 +step:5268 train loss:3.499941 +step:5269 train loss:3.504324 +step:5270 train loss:3.554608 +step:5271 train loss:3.478903 +step:5272 train loss:3.575057 +step:5273 train loss:3.483340 +step:5274 train loss:3.532038 +step:5275 train loss:3.543138 +step:5276 train loss:3.668030 +step:5277 train loss:3.569706 +step:5278 train loss:3.516972 +step:5279 train loss:3.565204 +step:5280 train loss:3.539557 +step:5281 train loss:3.537886 +step:5282 train loss:3.506661 +step:5283 train loss:3.510506 +step:5284 train loss:3.518363 +step:5285 train loss:3.582003 +step:5286 train loss:3.492870 +step:5287 train loss:3.592822 +step:5288 train loss:3.567597 +step:5289 train loss:3.538776 +step:5290 train loss:3.589204 +step:5291 train loss:3.544419 +step:5292 train loss:3.560300 +step:5293 train loss:3.531251 +step:5294 train loss:3.518349 +step:5295 train loss:3.528239 +step:5296 train loss:3.518423 +step:5297 train loss:3.538867 +step:5298 train loss:3.485183 +step:5299 train loss:3.575165 +step:5300 train loss:3.527642 +step:5301 train loss:3.596014 +step:5302 train loss:3.600666 +step:5303 train loss:3.462528 +step:5304 train loss:3.490287 +step:5305 train loss:3.475598 +step:5306 train loss:3.505615 +step:5307 train loss:3.511818 +step:5308 train loss:3.605698 +step:5309 train loss:3.552145 +step:5310 train loss:3.540218 +step:5311 train loss:3.605103 +step:5312 train loss:3.488616 +step:5313 train loss:3.579137 +step:5314 train loss:3.574918 +step:5315 train loss:3.533163 +step:5316 train loss:3.562761 +step:5317 train loss:3.580734 +step:5318 train loss:3.536626 +step:5319 train loss:3.561791 +step:5320 train loss:3.515619 +step:5321 train loss:3.637440 +step:5322 train loss:3.547045 +step:5323 train loss:3.549764 +step:5324 train loss:3.491996 +step:5325 train loss:3.575801 +step:5326 train loss:3.562241 +step:5327 train loss:3.454407 +step:5328 train loss:3.592290 +step:5329 train loss:3.555151 +step:5330 train loss:3.555482 +step:5331 train loss:3.606485 +step:5332 train loss:3.528756 +step:5333 train loss:3.591455 +step:5334 train loss:3.568337 +step:5335 train loss:3.629655 +step:5336 train loss:3.662624 +step:5337 train loss:3.494993 +step:5338 train loss:3.502411 +step:5339 train loss:3.527842 +step:5340 train loss:3.550496 +step:5341 train loss:3.563019 +step:5342 train loss:3.466298 +step:5343 train loss:3.626288 +step:5344 train loss:3.506062 +step:5345 train loss:3.511193 +step:5346 train loss:3.508579 +step:5347 train loss:3.534333 +step:5348 train loss:3.578793 +step:5349 train loss:3.512908 +step:5350 train loss:3.557425 +step:5351 train loss:3.627875 +step:5352 train loss:3.673688 +step:5353 train loss:3.578788 +step:5354 train loss:3.548115 +step:5355 train loss:3.517210 +step:5356 train loss:3.537077 +step:5357 train loss:3.521033 +step:5358 train loss:3.540207 +step:5359 train loss:3.552398 +step:5360 train loss:3.524574 +step:5361 train loss:3.527486 +step:5362 train loss:3.510073 +step:5363 train loss:3.507082 +step:5364 train loss:3.508124 +step:5365 train loss:3.544030 +step:5366 train loss:3.570766 +step:5367 train loss:3.499510 +step:5368 train loss:3.566163 +step:5369 train loss:3.584589 +step:5370 train loss:3.482411 +step:5371 train loss:3.533766 +step:5372 train loss:3.562136 +step:5373 train loss:3.597331 +step:5374 train loss:3.481898 +step:5375 train loss:3.526059 +step:5376 train loss:3.592666 +step:5377 train loss:3.531847 +step:5378 train loss:3.505752 +step:5379 train loss:3.506981 +step:5380 train loss:3.542712 +step:5381 train loss:3.582682 +step:5382 train loss:3.486539 +step:5383 train loss:3.567728 +step:5384 train loss:3.566330 +step:5385 train loss:3.563047 +step:5386 train loss:3.546071 +step:5387 train loss:3.554837 +step:5388 train loss:3.560111 +step:5389 train loss:3.494394 +step:5390 train loss:3.522777 +step:5391 train loss:3.463399 +step:5392 train loss:3.528018 +step:5393 train loss:3.517880 +step:5394 train loss:3.514884 +step:5395 train loss:3.587458 +step:5396 train loss:3.551327 +step:5397 train loss:3.573026 +step:5398 train loss:3.568294 +step:5399 train loss:3.601706 +step:5400 train loss:3.603876 +step:5401 train loss:3.563543 +step:5402 train loss:3.672151 +step:5403 train loss:3.579032 +step:5404 train loss:3.554348 +step:5405 train loss:3.623199 +step:5406 train loss:3.581424 +step:5407 train loss:3.512744 +step:5408 train loss:3.655899 +step:5409 train loss:3.497504 +step:5410 train loss:3.560101 +step:5411 train loss:3.546859 +step:5412 train loss:3.522632 +step:5413 train loss:3.572716 +step:5414 train loss:3.550859 +step:5415 train loss:3.529631 +step:5416 train loss:3.521585 +step:5417 train loss:3.592700 +step:5418 train loss:3.604584 +step:5419 train loss:3.510928 +step:5420 train loss:3.570652 +step:5421 train loss:3.539640 +step:5422 train loss:3.586353 +step:5423 train loss:3.561577 +step:5424 train loss:3.465377 +step:5425 train loss:3.529556 +step:5426 train loss:3.620035 +step:5427 train loss:3.510898 +step:5428 train loss:3.546181 +step:5429 train loss:3.487587 +step:5430 train loss:3.516758 +step:5431 train loss:3.576581 +step:5432 train loss:3.555089 +step:5433 train loss:3.559179 +step:5434 train loss:3.509260 +step:5435 train loss:3.506218 +step:5436 train loss:3.507948 +step:5437 train loss:3.548843 +step:5438 train loss:3.525654 +step:5439 train loss:3.532654 +step:5440 train loss:3.575951 +step:5441 train loss:3.599965 +step:5442 train loss:3.516800 +step:5443 train loss:3.518094 +step:5444 train loss:3.459785 +step:5445 train loss:3.549589 +step:5446 train loss:3.517897 +step:5447 train loss:3.555906 +step:5448 train loss:3.611326 +step:5449 train loss:3.502148 +step:5450 train loss:3.536242 +step:5451 train loss:3.530287 +step:5452 train loss:3.544551 +step:5453 train loss:3.600259 +step:5454 train loss:3.525191 +step:5455 train loss:3.510469 +step:5456 train loss:3.652511 +step:5457 train loss:3.533791 +step:5458 train loss:3.564592 +step:5459 train loss:3.510545 +step:5460 train loss:3.523794 +step:5461 train loss:3.530638 +step:5462 train loss:3.530880 +step:5463 train loss:3.541809 +step:5464 train loss:3.544576 +step:5465 train loss:3.488070 +step:5466 train loss:3.562403 +step:5467 train loss:3.545462 +step:5468 train loss:3.553063 +step:5469 train loss:3.646527 +step:5470 train loss:3.540755 +step:5471 train loss:3.614519 +step:5472 train loss:3.559839 +step:5473 train loss:3.460905 +step:5474 train loss:3.798559 +step:5475 train loss:3.474521 +step:5476 train loss:3.553847 +step:5477 train loss:3.550658 +step:5478 train loss:3.552175 +step:5479 train loss:3.691836 +step:5480 train loss:3.536973 +step:5481 train loss:3.599555 +step:5482 train loss:3.513441 +step:5483 train loss:3.551290 +step:5484 train loss:3.590099 +step:5485 train loss:3.506461 +step:5486 train loss:3.554490 +step:5487 train loss:3.551705 +step:5488 train loss:3.467663 +step:5489 train loss:3.568833 +step:5490 train loss:3.516590 +step:5491 train loss:3.620648 +step:5492 train loss:3.546038 +step:5493 train loss:3.476058 +step:5494 train loss:3.530374 +step:5495 train loss:3.507601 +step:5496 train loss:3.505705 +step:5497 train loss:3.623044 +step:5498 train loss:3.491541 +step:5499 train loss:3.631819 +step:5500 validation loss:3.467008 total_sharp:3.7470e-03 L1_sharp:1.5686e-02 L2_sharp:5.3890e-03 L3_sharp:8.1756e-03 L4_sharp:4.0866e-03 L5_sharp:4.6833e-03 L6_sharp:6.2388e-03 L7_sharp:7.0715e-03 L8_sharp:7.1532e-03 L9_sharp:4.6038e-03 L10_sharp:3.4809e-03 L11_sharp:3.2410e-03 L12_sharp:6.2869e-03 total_fnorm:1.3938e+00 total_l1_linf:9.5162e+03 total_spectral:1.3938e+00 L1_fnorm:1.1946e-01 L2_fnorm:1.1495e-01 L3_fnorm:1.1585e-01 L4_fnorm:1.1847e-01 L5_fnorm:1.2032e-01 L6_fnorm:1.2086e-01 L7_fnorm:1.2109e-01 L8_fnorm:1.2106e-01 L9_fnorm:1.2120e-01 L10_fnorm:1.2139e-01 L11_fnorm:1.2115e-01 L12_fnorm:1.2074e-01 L1_l1linf:3.4339e-01 L2_l1linf:3.9040e-01 L3_l1linf:3.9317e-01 L4_l1linf:3.8653e-01 L5_l1linf:3.7755e-01 L6_l1linf:3.2633e-01 L7_l1linf:3.5452e-01 L8_l1linf:3.4072e-01 L9_l1linf:3.5725e-01 L10_l1linf:3.8034e-01 L11_l1linf:3.9667e-01 L12_l1linf:3.8583e-01 L1_spectral:7.7045e-03 L2_spectral:8.7885e-03 L3_spectral:8.7983e-03 L4_spectral:8.6991e-03 L5_spectral:8.3980e-03 L6_spectral:7.3670e-03 L7_spectral:7.9238e-03 L8_spectral:7.7109e-03 L9_spectral:8.0777e-03 L10_spectral:8.5406e-03 L11_spectral:9.0224e-03 L12_spectral:8.6973e-03 ip_v_neg_g:9.8337e-04 cos_v_neg_g:2.3464e-04 v_norm:1.3938e+00 g_norm:3.0070e+00 hv_norm:5.2313e-01 cos_v_hv:9.9830e-03 hg_norm:1.8155e+02 cos_g_hg:6.5617e-01 v_par:5.5670e-05 v_perp:1.3938e+00 L1_cos_v_neg_g:3.5753e-03 L1_v_norm:1.1946e-01 L2_cos_v_neg_g:2.6712e-03 L2_v_norm:1.1495e-01 L3_cos_v_neg_g:1.6110e-03 L3_v_norm:1.1585e-01 L4_cos_v_neg_g:4.9614e-05 L4_v_norm:1.1847e-01 L5_cos_v_neg_g:8.8507e-05 L5_v_norm:1.2032e-01 L6_cos_v_neg_g:3.7229e-04 L6_v_norm:1.2086e-01 L7_cos_v_neg_g:-3.7521e-04 L7_v_norm:1.2109e-01 L8_cos_v_neg_g:1.7225e-04 L8_v_norm:1.2106e-01 L9_cos_v_neg_g:1.1771e-03 L9_v_norm:1.2120e-01 L10_cos_v_neg_g:2.5737e-03 L10_v_norm:1.2139e-01 L11_cos_v_neg_g:1.7806e-03 L11_v_norm:1.2115e-01 L12_cos_v_neg_g:-1.4954e-04 L12_v_norm:1.2074e-01 +step:5500 train loss:3.541405 +step:5501 train loss:3.621597 +step:5502 train loss:3.564365 +step:5503 train loss:3.529745 +step:5504 train loss:3.577753 +step:5505 train loss:3.538661 +step:5506 train loss:3.585324 +step:5507 train loss:3.564289 +step:5508 train loss:3.592820 +step:5509 train loss:3.605678 +step:5510 train loss:3.572379 +step:5511 train loss:3.567904 +step:5512 train loss:3.692893 +step:5513 train loss:3.490149 +step:5514 train loss:3.551736 +step:5515 train loss:3.577562 +step:5516 train loss:3.602734 +step:5517 train loss:3.560351 +step:5518 train loss:3.588856 +step:5519 train loss:3.624266 +step:5520 train loss:3.530249 +step:5521 train loss:3.539792 +step:5522 train loss:3.509684 +step:5523 train loss:3.557033 +step:5524 train loss:3.599743 +step:5525 train loss:3.507684 +step:5526 train loss:3.520254 +step:5527 train loss:3.547571 +step:5528 train loss:3.649791 +step:5529 train loss:3.610396 +step:5530 train loss:3.578453 +step:5531 train loss:3.513619 +step:5532 train loss:3.541993 +step:5533 train loss:3.572526 +step:5534 train loss:3.491986 +step:5535 train loss:3.541738 +step:5536 train loss:3.481211 +step:5537 train loss:3.525705 +step:5538 train loss:3.522950 +step:5539 train loss:3.465135 +step:5540 train loss:3.690392 +step:5541 train loss:3.497346 +step:5542 train loss:3.549175 +step:5543 train loss:3.540609 +step:5544 train loss:3.524599 +step:5545 train loss:3.518732 +step:5546 train loss:3.552399 +step:5547 train loss:3.486803 +step:5548 train loss:3.530602 +step:5549 train loss:3.532462 +step:5550 train loss:3.557201 +step:5551 train loss:3.559982 +step:5552 train loss:3.517504 +step:5553 train loss:3.548918 +step:5554 train loss:3.518091 +step:5555 train loss:3.525329 +step:5556 train loss:3.543809 +step:5557 train loss:3.606323 +step:5558 train loss:3.526847 +step:5559 train loss:3.536463 +step:5560 train loss:3.528685 +step:5561 train loss:3.563856 +step:5562 train loss:3.515954 +step:5563 train loss:3.500222 +step:5564 train loss:3.535814 +step:5565 train loss:3.599963 +step:5566 train loss:3.501309 +step:5567 train loss:3.623956 +step:5568 train loss:3.742547 +step:5569 train loss:3.528919 +step:5570 train loss:3.462442 +step:5571 train loss:3.549925 +step:5572 train loss:3.488743 +step:5573 train loss:3.479098 +step:5574 train loss:3.445999 +step:5575 train loss:3.545404 +step:5576 train loss:3.528634 +step:5577 train loss:3.534196 +step:5578 train loss:3.564815 +step:5579 train loss:3.521266 +step:5580 train loss:3.542912 +step:5581 train loss:3.564975 +step:5582 train loss:3.542543 +step:5583 train loss:3.555433 +step:5584 train loss:3.674982 +step:5585 train loss:3.577579 +step:5586 train loss:3.514301 +step:5587 train loss:3.544794 +step:5588 train loss:3.562748 +step:5589 train loss:3.561476 +step:5590 train loss:3.618885 +step:5591 train loss:3.488051 +step:5592 train loss:3.669600 +step:5593 train loss:3.537945 +step:5594 train loss:3.546346 +step:5595 train loss:3.538014 +step:5596 train loss:3.485970 +step:5597 train loss:3.503635 +step:5598 train loss:3.512506 +step:5599 train loss:3.511416 +step:5600 train loss:3.559903 +step:5601 train loss:3.582693 +step:5602 train loss:3.517353 +step:5603 train loss:3.559683 +step:5604 train loss:3.554528 +step:5605 train loss:3.526054 +step:5606 train loss:3.532222 +step:5607 train loss:3.558876 +step:5608 train loss:3.505485 +step:5609 train loss:3.554765 +step:5610 train loss:3.513350 +step:5611 train loss:3.553555 +step:5612 train loss:3.581139 +step:5613 train loss:3.542783 +step:5614 train loss:3.507333 +step:5615 train loss:3.607553 +step:5616 train loss:3.507421 +step:5617 train loss:3.595593 +step:5618 train loss:3.578892 +step:5619 train loss:3.533682 +step:5620 train loss:3.534965 +step:5621 train loss:3.612335 +step:5622 train loss:3.493650 +step:5623 train loss:3.531250 +step:5624 train loss:3.519839 +step:5625 train loss:3.554834 +step:5626 train loss:3.548028 +step:5627 train loss:3.519736 +step:5628 train loss:3.562622 +step:5629 train loss:3.542120 +step:5630 train loss:3.471932 +step:5631 train loss:3.513457 +step:5632 train loss:3.556695 +step:5633 train loss:3.546190 +step:5634 train loss:3.503568 +step:5635 train loss:3.539460 +step:5636 train loss:3.518280 +step:5637 train loss:3.659783 +step:5638 train loss:3.565134 +step:5639 train loss:3.547103 +step:5640 train loss:3.547416 +step:5641 train loss:3.589931 +step:5642 train loss:3.521544 +step:5643 train loss:3.540647 +step:5644 train loss:3.619870 +step:5645 train loss:3.577878 +step:5646 train loss:3.575848 +step:5647 train loss:3.563965 +step:5648 train loss:3.555493 +step:5649 train loss:3.468701 +step:5650 train loss:3.474428 +step:5651 train loss:3.547011 +step:5652 train loss:3.547863 +step:5653 train loss:3.517364 +step:5654 train loss:3.645395 +step:5655 train loss:3.506415 +step:5656 train loss:3.531764 +step:5657 train loss:3.597651 +step:5658 train loss:3.500593 +step:5659 train loss:3.539485 +step:5660 train loss:3.586417 +step:5661 train loss:3.529221 +step:5662 train loss:3.570174 +step:5663 train loss:3.456128 +step:5664 train loss:3.428795 +step:5665 train loss:3.551797 +step:5666 train loss:3.555445 +step:5667 train loss:3.589503 +step:5668 train loss:3.521653 +step:5669 train loss:3.533254 +step:5670 train loss:3.534765 +step:5671 train loss:3.516963 +step:5672 train loss:3.569780 +step:5673 train loss:3.536124 +step:5674 train loss:3.606322 +step:5675 train loss:3.523155 +step:5676 train loss:3.669728 +step:5677 train loss:3.563271 +step:5678 train loss:3.544173 +step:5679 train loss:3.530182 +step:5680 train loss:3.565815 +step:5681 train loss:3.533560 +step:5682 train loss:3.543470 +step:5683 train loss:3.503365 +step:5684 train loss:3.515003 +step:5685 train loss:3.559847 +step:5686 train loss:3.575012 +step:5687 train loss:3.522630 +step:5688 train loss:3.610738 +step:5689 train loss:3.515977 +step:5690 train loss:3.665742 +step:5691 train loss:3.499086 +step:5692 train loss:3.490452 +step:5693 train loss:3.491871 +step:5694 train loss:3.513055 +step:5695 train loss:3.530804 +step:5696 train loss:3.580038 +step:5697 train loss:3.506674 +step:5698 train loss:3.524064 +step:5699 train loss:3.538494 +step:5700 train loss:3.535551 +step:5701 train loss:3.530220 +step:5702 train loss:3.597414 +step:5703 train loss:3.496032 +step:5704 train loss:3.540991 +step:5705 train loss:3.547604 +step:5706 train loss:3.572554 +step:5707 train loss:3.489276 +step:5708 train loss:3.575984 +step:5709 train loss:3.578138 +step:5710 train loss:3.570615 +step:5711 train loss:3.588009 +step:5712 train loss:3.574355 +step:5713 train loss:3.498882 +step:5714 train loss:3.580302 +step:5715 train loss:3.539008 +step:5716 train loss:3.545533 +step:5717 train loss:3.568842 +step:5718 train loss:3.513967 +step:5719 train loss:3.588105 +step:5720 train loss:3.556867 +step:5721 train loss:3.489323 +step:5722 train loss:3.501970 +step:5723 train loss:3.578911 +step:5724 train loss:3.500200 +step:5725 train loss:3.571430 +step:5726 train loss:3.565638 +step:5727 train loss:3.523505 +step:5728 train loss:3.524157 +step:5729 train loss:3.523396 +step:5730 train loss:3.597927 +step:5731 train loss:3.464553 +step:5732 train loss:3.525073 +step:5733 train loss:3.517395 +step:5734 train loss:3.530544 +step:5735 train loss:3.523510 +step:5736 train loss:3.527032 +step:5737 train loss:3.546813 +step:5738 train loss:3.512146 +step:5739 train loss:3.523726 +step:5740 train loss:3.563277 +step:5741 train loss:3.540091 +step:5742 train loss:3.591268 +step:5743 train loss:3.558290 +step:5744 train loss:3.517039 +step:5745 train loss:3.517780 +step:5746 train loss:3.548485 +step:5747 train loss:3.537589 +step:5748 train loss:3.581232 +step:5749 train loss:3.537402 +step:5750 validation loss:3.460303 +step:5750 train loss:3.544897 +step:5751 train loss:3.558264 +step:5752 train loss:3.546587 +step:5753 train loss:3.516258 +step:5754 train loss:3.522922 +step:5755 train loss:3.539256 +step:5756 train loss:3.527609 +step:5757 train loss:3.589941 +step:5758 train loss:3.524356 +step:5759 train loss:3.487714 +step:5760 train loss:3.569048 +step:5761 train loss:3.562555 +step:5762 train loss:3.523362 +step:5763 train loss:3.551091 +step:5764 train loss:3.511584 +step:5765 train loss:3.630821 +step:5766 train loss:3.539180 +step:5767 train loss:3.575652 +step:5768 train loss:3.512370 +step:5769 train loss:3.632488 +step:5770 train loss:3.555131 +step:5771 train loss:3.583894 +step:5772 train loss:3.531649 +step:5773 train loss:3.515172 +step:5774 train loss:3.520920 +step:5775 train loss:3.592320 +step:5776 train loss:3.575454 +step:5777 train loss:3.496876 +step:5778 train loss:3.584055 +step:5779 train loss:3.538380 +step:5780 train loss:3.513139 +step:5781 train loss:3.576337 +step:5782 train loss:3.535664 +step:5783 train loss:3.494213 +step:5784 train loss:3.601047 +step:5785 train loss:3.587036 +step:5786 train loss:3.500995 +step:5787 train loss:3.547318 +step:5788 train loss:3.558237 +step:5789 train loss:3.500640 +step:5790 train loss:3.602660 +step:5791 train loss:3.526882 +step:5792 train loss:3.804969 +step:5793 train loss:3.574139 +step:5794 train loss:3.593019 +step:5795 train loss:3.583229 +step:5796 train loss:3.564523 +step:5797 train loss:3.545609 +step:5798 train loss:3.544198 +step:5799 train loss:3.517156 +step:5800 train loss:3.671654 +step:5801 train loss:3.551026 +step:5802 train loss:3.535786 +step:5803 train loss:3.548509 +step:5804 train loss:3.568528 +step:5805 train loss:3.530437 +step:5806 train loss:3.568529 +step:5807 train loss:3.490796 +step:5808 train loss:3.523385 +step:5809 train loss:3.535987 +step:5810 train loss:3.508132 +step:5811 train loss:3.524130 +step:5812 train loss:3.502944 +step:5813 train loss:3.514936 +step:5814 train loss:3.509919 +step:5815 train loss:3.516483 +step:5816 train loss:3.573406 +step:5817 train loss:3.587739 +step:5818 train loss:3.558053 +step:5819 train loss:3.609133 +step:5820 train loss:3.549318 +step:5821 train loss:3.543712 +step:5822 train loss:3.559846 +step:5823 train loss:3.563580 +step:5824 train loss:3.514122 +step:5825 train loss:3.607358 +step:5826 train loss:3.520092 +step:5827 train loss:3.486609 +step:5828 train loss:3.475070 +step:5829 train loss:3.536727 +step:5830 train loss:3.509427 +step:5831 train loss:3.482330 +step:5832 train loss:3.595990 +step:5833 train loss:3.573619 +step:5834 train loss:3.558123 +step:5835 train loss:3.510659 +step:5836 train loss:3.473572 +step:5837 train loss:3.596290 +step:5838 train loss:3.573508 +step:5839 train loss:3.551516 +step:5840 train loss:3.632726 +step:5841 train loss:3.552924 +step:5842 train loss:3.570885 +step:5843 train loss:3.510017 +step:5844 train loss:3.581111 +step:5845 train loss:3.491684 +step:5846 train loss:3.541211 +step:5847 train loss:3.566234 +step:5848 train loss:3.632066 +step:5849 train loss:3.529342 +step:5850 train loss:3.557641 +step:5851 train loss:3.519283 +step:5852 train loss:3.611811 +step:5853 train loss:3.703442 +step:5854 train loss:3.490448 +step:5855 train loss:3.552311 +step:5856 train loss:3.523400 +step:5857 train loss:3.533652 +step:5858 train loss:3.508275 +step:5859 train loss:3.515639 +step:5860 train loss:3.616865 +step:5861 train loss:3.500415 +step:5862 train loss:3.613525 +step:5863 train loss:3.552081 +step:5864 train loss:3.539652 +step:5865 train loss:3.543404 +step:5866 train loss:3.535989 +step:5867 train loss:3.621315 +step:5868 train loss:3.540471 +step:5869 train loss:3.566287 +step:5870 train loss:3.543571 +step:5871 train loss:3.523362 +step:5872 train loss:3.552005 +step:5873 train loss:3.532427 +step:5874 train loss:3.612174 +step:5875 train loss:3.540001 +step:5876 train loss:3.523133 +step:5877 train loss:3.531188 +step:5878 train loss:3.530540 +step:5879 train loss:3.502224 +step:5880 train loss:3.702762 +step:5881 train loss:3.535364 +step:5882 train loss:3.509844 +step:5883 train loss:3.508024 +step:5884 train loss:3.528133 +step:5885 train loss:3.526684 +step:5886 train loss:3.545862 +step:5887 train loss:3.545535 +step:5888 train loss:3.523916 +step:5889 train loss:3.503910 +step:5890 train loss:3.550470 +step:5891 train loss:3.498114 +step:5892 train loss:3.578359 +step:5893 train loss:3.501453 +step:5894 train loss:3.488251 +step:5895 train loss:3.498476 +step:5896 train loss:3.503599 +step:5897 train loss:3.575288 +step:5898 train loss:3.792857 +step:5899 train loss:3.522931 +step:5900 train loss:3.572924 +step:5901 train loss:3.526681 +step:5902 train loss:3.534003 +step:5903 train loss:3.524826 +step:5904 train loss:3.557358 +step:5905 train loss:3.662364 +step:5906 train loss:3.601929 +step:5907 train loss:3.546059 +step:5908 train loss:3.521841 +step:5909 train loss:3.517142 +step:5910 train loss:3.503009 +step:5911 train loss:3.520914 +step:5912 train loss:3.545904 +step:5913 train loss:3.558646 +step:5914 train loss:3.538033 +step:5915 train loss:3.662639 +step:5916 train loss:3.542258 +step:5917 train loss:3.516211 +step:5918 train loss:3.513945 +step:5919 train loss:3.540053 +step:5920 train loss:3.538520 +step:5921 train loss:3.508155 +step:5922 train loss:3.564070 +step:5923 train loss:3.559203 +step:5924 train loss:3.517529 +step:5925 train loss:3.639274 +step:5926 train loss:3.520951 +step:5927 train loss:3.500829 +step:5928 train loss:3.539123 +step:5929 train loss:3.556111 +step:5930 train loss:3.507181 +step:5931 train loss:3.491271 +step:5932 train loss:3.531879 +step:5933 train loss:3.586413 +step:5934 train loss:3.498958 +step:5935 train loss:3.527576 +step:5936 train loss:3.515499 +step:5937 train loss:3.495372 +step:5938 train loss:3.513032 +step:5939 train loss:3.489889 +step:5940 train loss:3.572908 +step:5941 train loss:3.509431 +step:5942 train loss:3.523837 +step:5943 train loss:3.529222 +step:5944 train loss:3.582436 +step:5945 train loss:3.515162 +step:5946 train loss:3.495539 +step:5947 train loss:3.508651 +step:5948 train loss:3.546875 +step:5949 train loss:3.590494 +step:5950 train loss:3.550703 +step:5951 train loss:3.552708 +step:5952 train loss:3.475570 +step:5953 train loss:3.515487 +step:5954 train loss:3.528200 +step:5955 train loss:3.535720 +step:5956 train loss:3.511724 +step:5957 train loss:3.479037 +step:5958 train loss:3.553875 +step:5959 train loss:3.511956 +step:5960 train loss:3.485286 +step:5961 train loss:3.514249 +step:5962 train loss:3.544671 +step:5963 train loss:3.578096 +step:5964 train loss:3.536129 +step:5965 train loss:3.553133 +step:5966 train loss:3.546116 +step:5967 train loss:3.514613 +step:5968 train loss:3.588912 +step:5969 train loss:3.527039 +step:5970 train loss:3.545401 +step:5971 train loss:3.496337 +step:5972 train loss:3.524693 +step:5973 train loss:3.514157 +step:5974 train loss:3.537633 +step:5975 train loss:3.508294 +step:5976 train loss:3.549865 +step:5977 train loss:3.506140 +step:5978 train loss:3.491225 +step:5979 train loss:3.526835 +step:5980 train loss:3.598623 +step:5981 train loss:3.490513 +step:5982 train loss:3.503872 +step:5983 train loss:3.569297 +step:5984 train loss:3.509826 +step:5985 train loss:3.555755 +step:5986 train loss:3.531651 +step:5987 train loss:3.516354 +step:5988 train loss:3.524129 +step:5989 train loss:3.544297 +step:5990 train loss:3.474734 +step:5991 train loss:3.536790 +step:5992 train loss:3.570593 +step:5993 train loss:3.520823 +step:5994 train loss:3.541797 +step:5995 train loss:3.432139 +step:5996 train loss:3.598642 +step:5997 train loss:3.578048 +step:5998 train loss:3.456282 +step:5999 train loss:3.483740 +step:6000 validation loss:3.449323 total_sharp:4.4555e-03 L1_sharp:2.8365e-02 L2_sharp:1.1699e-02 L3_sharp:9.5401e-03 L4_sharp:4.4562e-03 L5_sharp:5.3896e-03 L6_sharp:6.4358e-03 L7_sharp:8.7017e-03 L8_sharp:7.2602e-03 L9_sharp:5.1337e-03 L10_sharp:3.8579e-03 L11_sharp:4.3318e-03 L12_sharp:8.9144e-03 total_fnorm:1.3919e+00 total_l1_linf:9.5031e+03 total_spectral:1.3919e+00 L1_fnorm:1.1953e-01 L2_fnorm:1.1469e-01 L3_fnorm:1.1544e-01 L4_fnorm:1.1875e-01 L5_fnorm:1.2053e-01 L6_fnorm:1.2104e-01 L7_fnorm:1.2120e-01 L8_fnorm:1.2122e-01 L9_fnorm:1.2144e-01 L10_fnorm:1.2151e-01 L11_fnorm:1.2109e-01 L12_fnorm:1.2094e-01 L1_l1linf:3.3639e-01 L2_l1linf:4.0137e-01 L3_l1linf:3.8658e-01 L4_l1linf:3.8532e-01 L5_l1linf:3.6276e-01 L6_l1linf:3.1226e-01 L7_l1linf:3.0805e-01 L8_l1linf:3.1981e-01 L9_l1linf:3.3992e-01 L10_l1linf:3.7462e-01 L11_l1linf:3.9069e-01 L12_l1linf:4.0736e-01 L1_spectral:7.5720e-03 L2_spectral:9.1013e-03 L3_spectral:8.6593e-03 L4_spectral:8.6255e-03 L5_spectral:8.2425e-03 L6_spectral:6.9896e-03 L7_spectral:6.9929e-03 L8_spectral:7.2268e-03 L9_spectral:7.6671e-03 L10_spectral:8.3702e-03 L11_spectral:8.7867e-03 L12_spectral:9.1908e-03 ip_v_neg_g:4.3442e-03 cos_v_neg_g:1.6121e-03 v_norm:1.3919e+00 g_norm:1.9359e+00 hv_norm:4.9799e-01 cos_v_hv:1.2454e-02 hg_norm:3.8535e+01 cos_g_hg:4.6896e-01 v_par:6.1014e-05 v_perp:1.3919e+00 L1_cos_v_neg_g:8.7812e-03 L1_v_norm:1.1953e-01 L2_cos_v_neg_g:9.2054e-03 L2_v_norm:1.1469e-01 L3_cos_v_neg_g:5.8636e-03 L3_v_norm:1.1544e-01 L4_cos_v_neg_g:4.2647e-03 L4_v_norm:1.1875e-01 L5_cos_v_neg_g:4.2125e-03 L5_v_norm:1.2053e-01 L6_cos_v_neg_g:4.3760e-03 L6_v_norm:1.2104e-01 L7_cos_v_neg_g:4.2208e-03 L7_v_norm:1.2120e-01 L8_cos_v_neg_g:4.6895e-03 L8_v_norm:1.2122e-01 L9_cos_v_neg_g:4.8242e-03 L9_v_norm:1.2144e-01 L10_cos_v_neg_g:3.4307e-03 L10_v_norm:1.2151e-01 L11_cos_v_neg_g:3.1750e-03 L11_v_norm:1.2109e-01 L12_cos_v_neg_g:3.1044e-03 L12_v_norm:1.2094e-01 +step:6000 train loss:3.532308 +step:6001 train loss:3.498331 +step:6002 train loss:3.526855 +step:6003 train loss:3.547314 +step:6004 train loss:3.500979 +step:6005 train loss:3.572028 +step:6006 train loss:3.477443 +step:6007 train loss:3.497895 +step:6008 train loss:3.514191 +step:6009 train loss:3.549742 +step:6010 train loss:3.539215 +step:6011 train loss:3.528728 +step:6012 train loss:3.495535 +step:6013 train loss:3.556732 +step:6014 train loss:3.573497 +step:6015 train loss:3.572772 +step:6016 train loss:3.539037 +step:6017 train loss:3.550764 +step:6018 train loss:3.486936 +step:6019 train loss:3.525119 +step:6020 train loss:3.512077 +step:6021 train loss:3.439661 +step:6022 train loss:3.554146 +step:6023 train loss:3.487251 +step:6024 train loss:3.565283 +step:6025 train loss:3.531528 +step:6026 train loss:3.503088 +step:6027 train loss:3.540774 +step:6028 train loss:3.457898 +step:6029 train loss:3.576808 +step:6030 train loss:3.540816 +step:6031 train loss:3.519412 +step:6032 train loss:3.478835 +step:6033 train loss:3.529459 +step:6034 train loss:3.558382 +step:6035 train loss:3.474057 +step:6036 train loss:3.448794 +step:6037 train loss:3.563156 +step:6038 train loss:3.566709 +step:6039 train loss:3.553417 +step:6040 train loss:3.511277 +step:6041 train loss:3.489258 +step:6042 train loss:3.466981 +step:6043 train loss:3.531818 +step:6044 train loss:3.647230 +step:6045 train loss:3.491323 +step:6046 train loss:3.502456 +step:6047 train loss:3.536160 +step:6048 train loss:3.549732 +step:6049 train loss:3.524599 +step:6050 train loss:3.495470 +step:6051 train loss:3.546231 +step:6052 train loss:3.518117 +step:6053 train loss:3.636894 +step:6054 train loss:3.676722 +step:6055 train loss:3.488581 +step:6056 train loss:3.482045 +step:6057 train loss:3.515094 +step:6058 train loss:3.544995 +step:6059 train loss:3.546369 +step:6060 train loss:3.553386 +step:6061 train loss:3.566853 +step:6062 train loss:3.522758 +step:6063 train loss:3.533350 +step:6064 train loss:3.529670 +step:6065 train loss:3.532996 +step:6066 train loss:3.517521 +step:6067 train loss:3.557808 +step:6068 train loss:3.499743 +step:6069 train loss:3.456146 +step:6070 train loss:3.608041 +step:6071 train loss:3.547864 +step:6072 train loss:3.488568 +step:6073 train loss:3.530897 +step:6074 train loss:3.612879 +step:6075 train loss:3.531948 +step:6076 train loss:3.544950 +step:6077 train loss:3.543423 +step:6078 train loss:3.480462 +step:6079 train loss:3.510286 +step:6080 train loss:3.515738 +step:6081 train loss:3.553980 +step:6082 train loss:3.502139 +step:6083 train loss:3.517842 +step:6084 train loss:3.581574 +step:6085 train loss:3.577452 +step:6086 train loss:3.478381 +step:6087 train loss:3.521095 +step:6088 train loss:3.507493 +step:6089 train loss:3.564146 +step:6090 train loss:3.567622 +step:6091 train loss:3.516604 +step:6092 train loss:3.476099 +step:6093 train loss:3.539926 +step:6094 train loss:3.453479 +step:6095 train loss:3.621755 +step:6096 train loss:3.490089 +step:6097 train loss:3.567210 +step:6098 train loss:3.539902 +step:6099 train loss:3.598754 +step:6100 train loss:3.592789 +step:6101 train loss:3.526128 +step:6102 train loss:3.644526 +step:6103 train loss:3.526287 +step:6104 train loss:3.641062 +step:6105 train loss:3.573803 +step:6106 train loss:3.512460 +step:6107 train loss:3.574084 +step:6108 train loss:3.537640 +step:6109 train loss:3.609564 +step:6110 train loss:3.540060 +step:6111 train loss:3.576716 +step:6112 train loss:3.515405 +step:6113 train loss:3.541683 +step:6114 train loss:3.514005 +step:6115 train loss:3.572426 +step:6116 train loss:3.515821 +step:6117 train loss:3.569891 +step:6118 train loss:3.551558 +step:6119 train loss:3.560585 +step:6120 train loss:3.706597 +step:6121 train loss:3.541459 +step:6122 train loss:3.550417 +step:6123 train loss:3.531258 +step:6124 train loss:3.507325 +step:6125 train loss:3.498420 +step:6126 train loss:3.515852 +step:6127 train loss:3.507932 +step:6128 train loss:3.484666 +step:6129 train loss:3.701652 +step:6130 train loss:3.487662 +step:6131 train loss:3.467368 +step:6132 train loss:3.540574 +step:6133 train loss:3.504147 +step:6134 train loss:3.533209 +step:6135 train loss:3.615513 +step:6136 train loss:3.635920 +step:6137 train loss:3.498520 +step:6138 train loss:3.550986 +step:6139 train loss:3.532576 +step:6140 train loss:3.533213 +step:6141 train loss:3.490411 +step:6142 train loss:3.555400 +step:6143 train loss:3.523251 +step:6144 train loss:3.542615 +step:6145 train loss:3.790726 +step:6146 train loss:3.625920 +step:6147 train loss:3.712523 +step:6148 train loss:3.477077 +step:6149 train loss:3.605656 +step:6150 train loss:3.556112 +step:6151 train loss:3.511972 +step:6152 train loss:3.508432 +step:6153 train loss:3.577392 +step:6154 train loss:3.660181 +step:6155 train loss:3.526904 +step:6156 train loss:3.625291 +step:6157 train loss:3.552478 +step:6158 train loss:3.541740 +step:6159 train loss:3.509531 +step:6160 train loss:3.676278 +step:6161 train loss:3.526608 +step:6162 train loss:3.545639 +step:6163 train loss:3.577391 +step:6164 train loss:3.492193 +step:6165 train loss:3.558198 +step:6166 train loss:3.553004 +step:6167 train loss:3.568633 +step:6168 train loss:3.545697 +step:6169 train loss:3.537455 +step:6170 train loss:3.541804 +step:6171 train loss:3.510695 +step:6172 train loss:3.500269 +step:6173 train loss:3.548330 +step:6174 train loss:3.477917 +step:6175 train loss:3.488639 +step:6176 train loss:3.474286 +step:6177 train loss:3.567889 +step:6178 train loss:3.514871 +step:6179 train loss:3.520998 +step:6180 train loss:3.529446 +step:6181 train loss:3.564275 +step:6182 train loss:3.444476 +step:6183 train loss:3.454868 +step:6184 train loss:3.570062 +step:6185 train loss:3.526932 +step:6186 train loss:3.486791 +step:6187 train loss:3.529525 +step:6188 train loss:3.496663 +step:6189 train loss:3.537601 +step:6190 train loss:3.496956 +step:6191 train loss:3.528660 +step:6192 train loss:3.497772 +step:6193 train loss:3.562089 +step:6194 train loss:3.554194 +step:6195 train loss:3.536621 +step:6196 train loss:3.549211 +step:6197 train loss:3.572087 +step:6198 train loss:3.485098 +step:6199 train loss:3.509548 +step:6200 train loss:3.551489 +step:6201 train loss:3.592152 +step:6202 train loss:3.594242 +step:6203 train loss:3.593066 +step:6204 train loss:3.578891 +step:6205 train loss:3.516070 +step:6206 train loss:3.501955 +step:6207 train loss:3.561489 +step:6208 train loss:3.588020 +step:6209 train loss:3.557813 +step:6210 train loss:3.587098 +step:6211 train loss:3.507802 +step:6212 train loss:3.496091 +step:6213 train loss:3.512774 +step:6214 train loss:3.488341 +step:6215 train loss:3.662709 +step:6216 train loss:3.534647 +step:6217 train loss:3.589166 +step:6218 train loss:3.568399 +step:6219 train loss:3.580991 +step:6220 train loss:3.533098 +step:6221 train loss:3.499080 +step:6222 train loss:3.744066 +step:6223 train loss:3.499608 +step:6224 train loss:3.533257 +step:6225 train loss:3.514453 +step:6226 train loss:3.524925 +step:6227 train loss:3.527508 +step:6228 train loss:3.522707 +step:6229 train loss:3.561732 +step:6230 train loss:3.520016 +step:6231 train loss:3.626951 +step:6232 train loss:3.473021 +step:6233 train loss:3.513776 +step:6234 train loss:3.519969 +step:6235 train loss:3.548576 +step:6236 train loss:3.483677 +step:6237 train loss:3.509960 +step:6238 train loss:3.532438 +step:6239 train loss:3.519039 +step:6240 train loss:3.539976 +step:6241 train loss:3.523512 +step:6242 train loss:3.520105 +step:6243 train loss:3.557429 +step:6244 train loss:3.710823 +step:6245 train loss:3.510197 +step:6246 train loss:3.498379 +step:6247 train loss:3.487666 +step:6248 train loss:3.496799 +step:6249 train loss:3.437559 +step:6250 validation loss:3.445912 +step:6250 train loss:3.471984 +step:6251 train loss:3.490515 +step:6252 train loss:3.531640 +step:6253 train loss:3.541826 +step:6254 train loss:3.533561 +step:6255 train loss:3.499567 +step:6256 train loss:3.549855 +step:6257 train loss:3.549670 +step:6258 train loss:3.529680 +step:6259 train loss:3.535677 +step:6260 train loss:3.563111 +step:6261 train loss:3.584616 +step:6262 train loss:3.476954 +step:6263 train loss:3.512017 +step:6264 train loss:3.519261 +step:6265 train loss:3.510469 +step:6266 train loss:3.714923 +step:6267 train loss:3.516012 +step:6268 train loss:3.601805 +step:6269 train loss:3.475173 +step:6270 train loss:3.490471 +step:6271 train loss:3.539321 +step:6272 train loss:3.529786 +step:6273 train loss:3.728632 +step:6274 train loss:3.506380 +step:6275 train loss:3.541460 +step:6276 train loss:3.511657 +step:6277 train loss:3.496003 +step:6278 train loss:3.480503 +step:6279 train loss:3.537284 +step:6280 train loss:3.541473 +step:6281 train loss:3.476643 +step:6282 train loss:3.487629 +step:6283 train loss:3.576266 +step:6284 train loss:3.543808 +step:6285 train loss:3.542404 +step:6286 train loss:3.489373 +step:6287 train loss:3.520240 +step:6288 train loss:3.618395 +step:6289 train loss:3.481067 +step:6290 train loss:3.479547 +step:6291 train loss:3.508784 +step:6292 train loss:3.528547 +step:6293 train loss:3.513434 +step:6294 train loss:3.501220 +step:6295 train loss:3.524365 +step:6296 train loss:3.487020 +step:6297 train loss:3.614345 +step:6298 train loss:3.561476 +step:6299 train loss:3.455929 +step:6300 train loss:3.534593 +step:6301 train loss:3.561102 +step:6302 train loss:3.545659 +step:6303 train loss:3.514070 +step:6304 train loss:3.532308 +step:6305 train loss:3.502500 +step:6306 train loss:3.512927 +step:6307 train loss:3.521373 +step:6308 train loss:3.498049 +step:6309 train loss:3.497504 +step:6310 train loss:3.549933 +step:6311 train loss:3.505569 +step:6312 train loss:3.541686 +step:6313 train loss:3.475679 +step:6314 train loss:3.501127 +step:6315 train loss:3.554118 +step:6316 train loss:3.477864 +step:6317 train loss:3.467185 +step:6318 train loss:3.582042 +step:6319 train loss:3.512204 +step:6320 train loss:3.527671 +step:6321 train loss:3.512120 +step:6322 train loss:3.514575 +step:6323 train loss:3.445176 +step:6324 train loss:3.457688 +step:6325 train loss:3.552703 +step:6326 train loss:3.468795 +step:6327 train loss:3.545122 +step:6328 train loss:3.523780 +step:6329 train loss:3.446140 +step:6330 train loss:3.472929 +step:6331 train loss:3.491045 +step:6332 train loss:3.625389 +step:6333 train loss:3.501115 +step:6334 train loss:3.478840 +step:6335 train loss:3.452882 +step:6336 train loss:3.482944 +step:6337 train loss:3.509033 +step:6338 train loss:3.460886 +step:6339 train loss:3.506306 +step:6340 train loss:3.488240 +step:6341 train loss:3.502684 +step:6342 train loss:3.499447 +step:6343 train loss:3.595737 +step:6344 train loss:3.448817 +step:6345 train loss:3.464777 +step:6346 train loss:3.542530 +step:6347 train loss:3.417717 +step:6348 train loss:3.511634 +step:6349 train loss:3.490240 +step:6350 train loss:3.462732 +step:6351 train loss:3.461005 +step:6352 train loss:3.478993 +step:6353 train loss:3.496934 +step:6354 train loss:3.510526 +step:6355 train loss:3.520447 +step:6356 train loss:3.531435 +step:6357 train loss:3.387845 +step:6358 train loss:3.477782 +step:6359 train loss:3.533437 +step:6360 train loss:3.444235 +step:6361 train loss:3.444657 +step:6362 train loss:3.488269 +step:6363 train loss:3.469295 +step:6364 train loss:3.452354 +step:6365 train loss:3.528589 +step:6366 train loss:3.537269 +step:6367 train loss:3.469277 +step:6368 train loss:3.507475 +step:6369 train loss:3.474664 +step:6370 train loss:3.528166 +step:6371 train loss:3.444010 +step:6372 train loss:3.473388 +step:6373 train loss:3.497884 +step:6374 train loss:3.529742 +step:6375 train loss:3.489242 +step:6376 train loss:3.513008 +step:6377 train loss:3.512933 +step:6378 train loss:3.458027 +step:6379 train loss:3.503417 +step:6380 train loss:3.546046 +step:6381 train loss:3.507697 +step:6382 train loss:3.464255 +step:6383 train loss:3.524716 +step:6384 train loss:3.502931 +step:6385 train loss:3.482616 +step:6386 train loss:3.512620 +step:6387 train loss:3.494291 +step:6388 train loss:3.533171 +step:6389 train loss:3.543793 +step:6390 train loss:3.491822 +step:6391 train loss:3.481188 +step:6392 train loss:3.465367 +step:6393 train loss:3.516829 +step:6394 train loss:3.509948 +step:6395 train loss:3.685292 +step:6396 train loss:3.508514 +step:6397 train loss:3.451452 +step:6398 train loss:3.521317 +step:6399 train loss:3.463289 +step:6400 train loss:3.537745 +step:6401 train loss:3.574772 +step:6402 train loss:3.502955 +step:6403 train loss:3.498302 +step:6404 train loss:3.476720 +step:6405 train loss:3.504868 +step:6406 train loss:3.509521 +step:6407 train loss:3.566437 +step:6408 train loss:3.459775 +step:6409 train loss:3.442696 +step:6410 train loss:3.578439 +step:6411 train loss:3.503479 +step:6412 train loss:3.512348 +step:6413 train loss:3.514167 +step:6414 train loss:3.462581 +step:6415 train loss:3.527332 +step:6416 train loss:3.490339 +step:6417 train loss:3.463656 +step:6418 train loss:3.454789 +step:6419 train loss:3.538179 +step:6420 train loss:3.466527 +step:6421 train loss:3.492164 +step:6422 train loss:3.481587 +step:6423 train loss:3.488521 +step:6424 train loss:3.515461 +step:6425 train loss:3.510435 +step:6426 train loss:3.551545 +step:6427 train loss:3.515520 +step:6428 train loss:3.546727 +step:6429 train loss:3.515129 +step:6430 train loss:3.488803 +step:6431 train loss:3.465825 +step:6432 train loss:3.499297 +step:6433 train loss:3.511536 +step:6434 train loss:3.403922 +step:6435 train loss:3.576356 +step:6436 train loss:3.506361 +step:6437 train loss:3.470543 +step:6438 train loss:3.501964 +step:6439 train loss:3.471825 +step:6440 train loss:3.489292 +step:6441 train loss:3.481828 +step:6442 train loss:3.422404 +step:6443 train loss:3.481405 +step:6444 train loss:3.617694 +step:6445 train loss:3.523053 +step:6446 train loss:3.524975 +step:6447 train loss:3.508241 +step:6448 train loss:3.455669 +step:6449 train loss:3.480464 +step:6450 train loss:3.463960 +step:6451 train loss:3.450888 +step:6452 train loss:3.456356 +step:6453 train loss:3.496319 +step:6454 train loss:3.522188 +step:6455 train loss:3.511835 +step:6456 train loss:3.528912 +step:6457 train loss:3.505562 +step:6458 train loss:3.479909 +step:6459 train loss:3.459559 +step:6460 train loss:3.469025 +step:6461 train loss:3.470979 +step:6462 train loss:3.463798 +step:6463 train loss:3.561571 +step:6464 train loss:3.467206 +step:6465 train loss:3.508756 +step:6466 train loss:3.523997 +step:6467 train loss:3.449246 +step:6468 train loss:3.526855 +step:6469 train loss:3.438562 +step:6470 train loss:3.559860 +step:6471 train loss:3.465533 +step:6472 train loss:3.624759 +step:6473 train loss:3.506793 +step:6474 train loss:3.541458 +step:6475 train loss:3.486155 +step:6476 train loss:3.556214 +step:6477 train loss:3.485634 +step:6478 train loss:3.616703 +step:6479 train loss:3.532338 +step:6480 train loss:3.466738 +step:6481 train loss:3.524356 +step:6482 train loss:3.462288 +step:6483 train loss:3.526622 +step:6484 train loss:3.482263 +step:6485 train loss:3.544138 +step:6486 train loss:3.477026 +step:6487 train loss:3.473126 +step:6488 train loss:3.470050 +step:6489 train loss:3.473152 +step:6490 train loss:3.497360 +step:6491 train loss:3.468932 +step:6492 train loss:3.570406 +step:6493 train loss:3.476757 +step:6494 train loss:3.477650 +step:6495 train loss:3.477173 +step:6496 train loss:3.506703 +step:6497 train loss:3.526361 +step:6498 train loss:3.634497 +step:6499 train loss:3.605474 +step:6500 validation loss:3.435987 total_sharp:4.0124e-03 L1_sharp:2.9471e-02 L2_sharp:1.1856e-02 L3_sharp:7.9805e-03 L4_sharp:4.4267e-03 L5_sharp:5.6446e-03 L6_sharp:7.3681e-03 L7_sharp:7.4244e-03 L8_sharp:7.2185e-03 L9_sharp:5.5049e-03 L10_sharp:3.6710e-03 L11_sharp:3.5115e-03 L12_sharp:4.8661e-03 total_fnorm:1.4027e+00 total_l1_linf:9.5658e+03 total_spectral:1.4027e+00 L1_fnorm:1.1864e-01 L2_fnorm:1.1389e-01 L3_fnorm:1.1525e-01 L4_fnorm:1.1801e-01 L5_fnorm:1.2024e-01 L6_fnorm:1.2085e-01 L7_fnorm:1.2111e-01 L8_fnorm:1.2109e-01 L9_fnorm:1.2130e-01 L10_fnorm:1.2136e-01 L11_fnorm:1.2107e-01 L12_fnorm:1.2087e-01 L1_l1linf:3.4312e-01 L2_l1linf:3.9835e-01 L3_l1linf:4.0643e-01 L4_l1linf:3.9193e-01 L5_l1linf:3.8225e-01 L6_l1linf:3.3483e-01 L7_l1linf:3.1546e-01 L8_l1linf:3.4238e-01 L9_l1linf:3.5602e-01 L10_l1linf:3.9737e-01 L11_l1linf:4.0890e-01 L12_l1linf:4.2035e-01 L1_spectral:7.7438e-03 L2_spectral:8.9891e-03 L3_spectral:9.1566e-03 L4_spectral:8.8246e-03 L5_spectral:8.6369e-03 L6_spectral:7.4487e-03 L7_spectral:7.1196e-03 L8_spectral:7.6732e-03 L9_spectral:7.9827e-03 L10_spectral:8.8471e-03 L11_spectral:9.1394e-03 L12_spectral:9.4349e-03 ip_v_neg_g:3.4806e-03 cos_v_neg_g:1.2866e-03 v_norm:1.4027e+00 g_norm:1.9287e+00 hv_norm:4.8159e-01 cos_v_hv:1.1687e-02 hg_norm:4.0166e+01 cos_g_hg:4.8548e-01 v_par:5.5661e-05 v_perp:1.4027e+00 L1_cos_v_neg_g:7.2987e-03 L1_v_norm:1.1864e-01 L2_cos_v_neg_g:5.5698e-03 L2_v_norm:1.1389e-01 L3_cos_v_neg_g:3.4571e-03 L3_v_norm:1.1525e-01 L4_cos_v_neg_g:2.0777e-03 L4_v_norm:1.1801e-01 L5_cos_v_neg_g:3.2465e-03 L5_v_norm:1.2024e-01 L6_cos_v_neg_g:3.1228e-03 L6_v_norm:1.2085e-01 L7_cos_v_neg_g:3.4918e-03 L7_v_norm:1.2111e-01 L8_cos_v_neg_g:4.4973e-03 L8_v_norm:1.2109e-01 L9_cos_v_neg_g:4.7406e-03 L9_v_norm:1.2130e-01 L10_cos_v_neg_g:3.8408e-03 L10_v_norm:1.2136e-01 L11_cos_v_neg_g:3.8328e-03 L11_v_norm:1.2107e-01 L12_cos_v_neg_g:4.1068e-03 L12_v_norm:1.2087e-01 +step:6500 train loss:3.452725 +step:6501 train loss:3.469403 +step:6502 train loss:3.490874 +step:6503 train loss:3.547115 +step:6504 train loss:3.496099 +step:6505 train loss:3.503856 +step:6506 train loss:3.463959 +step:6507 train loss:3.529998 +step:6508 train loss:3.500277 +step:6509 train loss:3.481862 +step:6510 train loss:3.491893 +step:6511 train loss:3.505233 +step:6512 train loss:3.447150 +step:6513 train loss:3.516576 +step:6514 train loss:3.390307 +step:6515 train loss:3.482170 +step:6516 train loss:3.530723 +step:6517 train loss:3.444249 +step:6518 train loss:3.484890 +step:6519 train loss:3.474250 +step:6520 train loss:3.563060 +step:6521 train loss:3.540579 +step:6522 train loss:3.549908 +step:6523 train loss:3.445498 +step:6524 train loss:3.529425 +step:6525 train loss:3.515987 +step:6526 train loss:3.451295 +step:6527 train loss:3.507340 +step:6528 train loss:3.526981 +step:6529 train loss:3.555037 +step:6530 train loss:3.460983 +step:6531 train loss:3.535557 +step:6532 train loss:3.466660 +step:6533 train loss:3.504497 +step:6534 train loss:3.513040 +step:6535 train loss:3.486704 +step:6536 train loss:3.619565 +step:6537 train loss:3.426448 +step:6538 train loss:3.537497 +step:6539 train loss:3.462227 +step:6540 train loss:3.574558 +step:6541 train loss:3.555099 +step:6542 train loss:3.511338 +step:6543 train loss:3.466185 +step:6544 train loss:3.446407 +step:6545 train loss:3.437525 +step:6546 train loss:3.497453 +step:6547 train loss:3.555893 +step:6548 train loss:3.494153 +step:6549 train loss:3.510680 +step:6550 train loss:3.620886 +step:6551 train loss:3.503188 +step:6552 train loss:3.493785 +step:6553 train loss:3.534034 +step:6554 train loss:3.424469 +step:6555 train loss:3.511381 +step:6556 train loss:3.383083 +step:6557 train loss:3.730058 +step:6558 train loss:3.561039 +step:6559 train loss:3.471642 +step:6560 train loss:3.510929 +step:6561 train loss:3.483160 +step:6562 train loss:3.506886 +step:6563 train loss:3.397004 +step:6564 train loss:3.501646 +step:6565 train loss:3.406636 +step:6566 train loss:3.517623 +step:6567 train loss:3.486777 +step:6568 train loss:3.535376 +step:6569 train loss:3.481263 +step:6570 train loss:3.519721 +step:6571 train loss:3.449885 +step:6572 train loss:3.525134 +step:6573 train loss:3.537286 +step:6574 train loss:3.522878 +step:6575 train loss:3.470809 +step:6576 train loss:3.459583 +step:6577 train loss:3.528641 +step:6578 train loss:3.397569 +step:6579 train loss:3.500033 +step:6580 train loss:3.453979 +step:6581 train loss:3.468335 +step:6582 train loss:3.444322 +step:6583 train loss:3.547616 +step:6584 train loss:3.476665 +step:6585 train loss:3.513129 +step:6586 train loss:3.521242 +step:6587 train loss:3.529303 +step:6588 train loss:3.498116 +step:6589 train loss:3.526382 +step:6590 train loss:3.462481 +step:6591 train loss:3.514883 +step:6592 train loss:3.458282 +step:6593 train loss:3.465649 +step:6594 train loss:3.491950 +step:6595 train loss:3.473621 +step:6596 train loss:3.473032 +step:6597 train loss:3.496692 +step:6598 train loss:3.538612 +step:6599 train loss:3.427643 +step:6600 train loss:3.487484 +step:6601 train loss:3.543718 +step:6602 train loss:3.470357 +step:6603 train loss:3.494756 +step:6604 train loss:3.507841 +step:6605 train loss:3.488756 +step:6606 train loss:3.549344 +step:6607 train loss:3.466780 +step:6608 train loss:3.480825 +step:6609 train loss:3.452698 +step:6610 train loss:3.563332 +step:6611 train loss:3.485810 +step:6612 train loss:3.530466 +step:6613 train loss:3.445734 +step:6614 train loss:3.473519 +step:6615 train loss:3.473720 +step:6616 train loss:3.454795 +step:6617 train loss:3.492523 +step:6618 train loss:3.481891 +step:6619 train loss:3.456215 +step:6620 train loss:3.559089 +step:6621 train loss:3.434479 +step:6622 train loss:3.510919 +step:6623 train loss:3.439654 +step:6624 train loss:3.513045 +step:6625 train loss:3.556669 +step:6626 train loss:3.513734 +step:6627 train loss:3.467005 +step:6628 train loss:3.527170 +step:6629 train loss:3.428515 +step:6630 train loss:3.465117 +step:6631 train loss:3.500145 +step:6632 train loss:3.541796 +step:6633 train loss:3.490386 +step:6634 train loss:3.553902 +step:6635 train loss:3.453090 +step:6636 train loss:3.496307 +step:6637 train loss:3.462873 +step:6638 train loss:3.462548 +step:6639 train loss:3.474670 +step:6640 train loss:3.463306 +step:6641 train loss:3.475091 +step:6642 train loss:3.475596 +step:6643 train loss:3.559748 +step:6644 train loss:3.561764 +step:6645 train loss:3.433695 +step:6646 train loss:3.524724 +step:6647 train loss:3.479928 +step:6648 train loss:3.583986 +step:6649 train loss:3.514396 +step:6650 train loss:3.462269 +step:6651 train loss:3.509409 +step:6652 train loss:3.523191 +step:6653 train loss:3.467182 +step:6654 train loss:3.461457 +step:6655 train loss:3.503094 +step:6656 train loss:3.475404 +step:6657 train loss:3.500611 +step:6658 train loss:3.482882 +step:6659 train loss:3.632676 +step:6660 train loss:3.535014 +step:6661 train loss:3.459787 +step:6662 train loss:3.492799 +step:6663 train loss:3.424573 +step:6664 train loss:3.505217 +step:6665 train loss:3.515700 +step:6666 train loss:3.526484 +step:6667 train loss:3.442909 +step:6668 train loss:3.570662 +step:6669 train loss:3.454286 +step:6670 train loss:3.464475 +step:6671 train loss:3.546925 +step:6672 train loss:3.497363 +step:6673 train loss:3.507833 +step:6674 train loss:3.479554 +step:6675 train loss:3.497156 +step:6676 train loss:3.507859 +step:6677 train loss:3.465008 +step:6678 train loss:3.533453 +step:6679 train loss:3.570051 +step:6680 train loss:3.571162 +step:6681 train loss:3.523678 +step:6682 train loss:3.465907 +step:6683 train loss:3.489464 +step:6684 train loss:3.501633 +step:6685 train loss:3.513367 +step:6686 train loss:3.446717 +step:6687 train loss:3.464044 +step:6688 train loss:3.510720 +step:6689 train loss:3.518718 +step:6690 train loss:3.493685 +step:6691 train loss:3.526889 +step:6692 train loss:3.533567 +step:6693 train loss:3.565721 +step:6694 train loss:3.520464 +step:6695 train loss:3.494339 +step:6696 train loss:3.431717 +step:6697 train loss:3.643632 +step:6698 train loss:3.491004 +step:6699 train loss:3.487917 +step:6700 train loss:3.502061 +step:6701 train loss:3.559910 +step:6702 train loss:3.446866 +step:6703 train loss:3.496312 +step:6704 train loss:3.479724 +step:6705 train loss:3.492411 +step:6706 train loss:3.468054 +step:6707 train loss:3.545409 +step:6708 train loss:3.497308 +step:6709 train loss:3.527028 +step:6710 train loss:3.515239 +step:6711 train loss:3.467225 +step:6712 train loss:3.455616 +step:6713 train loss:3.480863 +step:6714 train loss:3.524387 +step:6715 train loss:3.466386 +step:6716 train loss:3.546188 +step:6717 train loss:3.484603 +step:6718 train loss:3.509772 +step:6719 train loss:3.542219 +step:6720 train loss:3.474432 +step:6721 train loss:3.491768 +step:6722 train loss:3.467196 +step:6723 train loss:3.594875 +step:6724 train loss:3.453763 +step:6725 train loss:3.512110 +step:6726 train loss:3.467780 +step:6727 train loss:3.533252 +step:6728 train loss:3.630481 +step:6729 train loss:3.491503 +step:6730 train loss:3.485811 +step:6731 train loss:3.529507 +step:6732 train loss:3.404846 +step:6733 train loss:3.539932 +step:6734 train loss:3.470945 +step:6735 train loss:3.496308 +step:6736 train loss:3.493618 +step:6737 train loss:3.493895 +step:6738 train loss:3.522699 +step:6739 train loss:3.480946 +step:6740 train loss:3.430841 +step:6741 train loss:3.542935 +step:6742 train loss:3.500734 +step:6743 train loss:3.506814 +step:6744 train loss:3.397595 +step:6745 train loss:3.555648 +step:6746 train loss:3.479215 +step:6747 train loss:3.478499 +step:6748 train loss:3.548230 +step:6749 train loss:3.531177 +step:6750 validation loss:3.425557 +step:6750 train loss:3.449177 +step:6751 train loss:3.485943 +step:6752 train loss:3.487536 +step:6753 train loss:3.525605 +step:6754 train loss:3.502354 +step:6755 train loss:3.517061 +step:6756 train loss:3.455854 +step:6757 train loss:3.426435 +step:6758 train loss:3.600084 +step:6759 train loss:3.493413 +step:6760 train loss:3.548123 +step:6761 train loss:3.480268 +step:6762 train loss:3.500865 +step:6763 train loss:3.403424 +step:6764 train loss:3.482041 +step:6765 train loss:3.488533 +step:6766 train loss:3.482991 +step:6767 train loss:3.438127 +step:6768 train loss:3.439293 +step:6769 train loss:3.403930 +step:6770 train loss:3.488789 +step:6771 train loss:3.490248 +step:6772 train loss:3.502130 +step:6773 train loss:3.479562 +step:6774 train loss:3.495606 +step:6775 train loss:3.536398 +step:6776 train loss:3.492790 +step:6777 train loss:3.566245 +step:6778 train loss:3.455506 +step:6779 train loss:3.505084 +step:6780 train loss:3.438480 +step:6781 train loss:3.500222 +step:6782 train loss:3.416540 +step:6783 train loss:3.447935 +step:6784 train loss:3.476804 +step:6785 train loss:3.460682 +step:6786 train loss:3.478345 +step:6787 train loss:3.552871 +step:6788 train loss:3.493580 +step:6789 train loss:3.500177 +step:6790 train loss:3.501144 +step:6791 train loss:3.510184 +step:6792 train loss:3.510681 +step:6793 train loss:3.507785 +step:6794 train loss:3.475810 +step:6795 train loss:3.477711 +step:6796 train loss:3.482931 +step:6797 train loss:3.578983 +step:6798 train loss:3.482907 +step:6799 train loss:3.473665 +step:6800 train loss:3.439364 +step:6801 train loss:3.575054 +step:6802 train loss:3.523057 +step:6803 train loss:3.512035 +step:6804 train loss:3.539802 +step:6805 train loss:3.500633 +step:6806 train loss:3.436152 +step:6807 train loss:3.491275 +step:6808 train loss:3.478066 +step:6809 train loss:3.504277 +step:6810 train loss:3.628284 +step:6811 train loss:3.531803 +step:6812 train loss:3.500435 +step:6813 train loss:3.515932 +step:6814 train loss:3.522871 +step:6815 train loss:3.568881 +step:6816 train loss:3.484812 +step:6817 train loss:3.513091 +step:6818 train loss:3.487561 +step:6819 train loss:3.473850 +step:6820 train loss:3.500918 +step:6821 train loss:3.464730 +step:6822 train loss:3.565217 +step:6823 train loss:3.547940 +step:6824 train loss:3.523627 +step:6825 train loss:3.474291 +step:6826 train loss:3.515072 +step:6827 train loss:3.504556 +step:6828 train loss:3.518676 +step:6829 train loss:3.505704 +step:6830 train loss:3.472480 +step:6831 train loss:3.432363 +step:6832 train loss:3.422055 +step:6833 train loss:3.435524 +step:6834 train loss:3.524417 +step:6835 train loss:3.496125 +step:6836 train loss:3.413934 +step:6837 train loss:3.482047 +step:6838 train loss:3.538111 +step:6839 train loss:3.622743 +step:6840 train loss:3.497679 +step:6841 train loss:3.450250 +step:6842 train loss:3.500142 +step:6843 train loss:3.606280 +step:6844 train loss:3.486586 +step:6845 train loss:3.539666 +step:6846 train loss:3.601520 +step:6847 train loss:3.533820 +step:6848 train loss:3.521062 +step:6849 train loss:3.547691 +step:6850 train loss:3.517874 +step:6851 train loss:3.445410 +step:6852 train loss:3.438503 +step:6853 train loss:3.429772 +step:6854 train loss:3.506588 +step:6855 train loss:3.478643 +step:6856 train loss:3.463410 +step:6857 train loss:3.514918 +step:6858 train loss:3.546455 +step:6859 train loss:3.455287 +step:6860 train loss:3.564199 +step:6861 train loss:3.590181 +step:6862 train loss:3.497479 +step:6863 train loss:3.495818 +step:6864 train loss:3.441061 +step:6865 train loss:3.511409 +step:6866 train loss:3.439605 +step:6867 train loss:3.619736 +step:6868 train loss:3.492514 +step:6869 train loss:3.522799 +step:6870 train loss:3.562915 +step:6871 train loss:3.480528 +step:6872 train loss:3.471586 +step:6873 train loss:3.493638 +step:6874 train loss:3.454465 +step:6875 train loss:3.455626 +step:6876 train loss:3.486366 +step:6877 train loss:3.528666 +step:6878 train loss:3.442457 +step:6879 train loss:3.487800 +step:6880 train loss:3.497464 +step:6881 train loss:3.459496 +step:6882 train loss:3.527867 +step:6883 train loss:3.542046 +step:6884 train loss:3.739043 +step:6885 train loss:3.507635 +step:6886 train loss:3.489291 +step:6887 train loss:3.429006 +step:6888 train loss:3.532420 +step:6889 train loss:3.413119 +step:6890 train loss:3.524389 +step:6891 train loss:3.531339 +step:6892 train loss:3.629638 +step:6893 train loss:3.464682 +step:6894 train loss:3.523103 +step:6895 train loss:3.523638 +step:6896 train loss:3.500210 +step:6897 train loss:3.454242 +step:6898 train loss:3.456239 +step:6899 train loss:3.544093 +step:6900 train loss:3.515165 +step:6901 train loss:3.466907 +step:6902 train loss:3.398765 +step:6903 train loss:3.443139 +step:6904 train loss:3.550525 +step:6905 train loss:3.590718 +step:6906 train loss:3.507433 +step:6907 train loss:3.525837 +step:6908 train loss:3.558398 +step:6909 train loss:3.554204 +step:6910 train loss:3.430391 +step:6911 train loss:3.560344 +step:6912 train loss:3.452795 +step:6913 train loss:3.489578 +step:6914 train loss:3.445343 +step:6915 train loss:3.476243 +step:6916 train loss:3.448658 +step:6917 train loss:3.572802 +step:6918 train loss:3.519552 +step:6919 train loss:3.513622 +step:6920 train loss:3.497712 +step:6921 train loss:3.564018 +step:6922 train loss:3.553159 +step:6923 train loss:3.416753 +step:6924 train loss:3.499138 +step:6925 train loss:3.474429 +step:6926 train loss:3.513215 +step:6927 train loss:3.566880 +step:6928 train loss:3.452686 +step:6929 train loss:3.464024 +step:6930 train loss:3.499389 +step:6931 train loss:3.497579 +step:6932 train loss:3.732677 +step:6933 train loss:3.561362 +step:6934 train loss:3.500246 +step:6935 train loss:3.486283 +step:6936 train loss:3.527224 +step:6937 train loss:3.472541 +step:6938 train loss:3.535208 +step:6939 train loss:3.470049 +step:6940 train loss:3.525735 +step:6941 train loss:3.439432 +step:6942 train loss:3.528759 +step:6943 train loss:3.420402 +step:6944 train loss:3.510789 +step:6945 train loss:3.451959 +step:6946 train loss:3.544089 +step:6947 train loss:3.467762 +step:6948 train loss:3.460317 +step:6949 train loss:3.535069 +step:6950 train loss:3.527007 +step:6951 train loss:3.528959 +step:6952 train loss:3.463482 +step:6953 train loss:3.507678 +step:6954 train loss:3.572847 +step:6955 train loss:3.486293 +step:6956 train loss:3.522282 +step:6957 train loss:3.509851 +step:6958 train loss:3.472230 +step:6959 train loss:3.508962 +step:6960 train loss:3.474766 +step:6961 train loss:3.485927 +step:6962 train loss:3.464655 +step:6963 train loss:3.437351 +step:6964 train loss:3.476815 +step:6965 train loss:3.471505 +step:6966 train loss:3.512865 +step:6967 train loss:3.452311 +step:6968 train loss:3.491194 +step:6969 train loss:3.511562 +step:6970 train loss:3.486142 +step:6971 train loss:3.552403 +step:6972 train loss:3.498590 +step:6973 train loss:3.455159 +step:6974 train loss:3.584850 +step:6975 train loss:3.487366 +step:6976 train loss:3.463412 +step:6977 train loss:3.498038 +step:6978 train loss:3.490879 +step:6979 train loss:3.503527 +step:6980 train loss:3.477901 +step:6981 train loss:3.538893 +step:6982 train loss:3.491023 +step:6983 train loss:3.481653 +step:6984 train loss:3.600998 +step:6985 train loss:3.443196 +step:6986 train loss:3.436779 +step:6987 train loss:3.486753 +step:6988 train loss:3.491145 +step:6989 train loss:3.635765 +step:6990 train loss:3.500456 +step:6991 train loss:3.455388 +step:6992 train loss:3.504655 +step:6993 train loss:3.570855 +step:6994 train loss:3.517012 +step:6995 train loss:3.467985 +step:6996 train loss:3.464782 +step:6997 train loss:3.550844 +step:6998 train loss:3.448238 +step:6999 train loss:3.498494 +step:7000 validation loss:3.420084 total_sharp:3.9349e-03 L1_sharp:2.1042e-02 L2_sharp:9.4842e-03 L3_sharp:8.7509e-03 L4_sharp:3.8848e-03 L5_sharp:5.3433e-03 L6_sharp:7.7451e-03 L7_sharp:9.0396e-03 L8_sharp:7.7914e-03 L9_sharp:6.0195e-03 L10_sharp:4.2001e-03 L11_sharp:3.5970e-03 L12_sharp:4.9737e-03 total_fnorm:1.3991e+00 total_l1_linf:9.5488e+03 total_spectral:1.3991e+00 L1_fnorm:1.1916e-01 L2_fnorm:1.1511e-01 L3_fnorm:1.1569e-01 L4_fnorm:1.1914e-01 L5_fnorm:1.2045e-01 L6_fnorm:1.2091e-01 L7_fnorm:1.2114e-01 L8_fnorm:1.2118e-01 L9_fnorm:1.2135e-01 L10_fnorm:1.2141e-01 L11_fnorm:1.2116e-01 L12_fnorm:1.2093e-01 L1_l1linf:3.4962e-01 L2_l1linf:4.3320e-01 L3_l1linf:3.9782e-01 L4_l1linf:4.0717e-01 L5_l1linf:3.8209e-01 L6_l1linf:3.6756e-01 L7_l1linf:3.6782e-01 L8_l1linf:3.7159e-01 L9_l1linf:3.9149e-01 L10_l1linf:3.9974e-01 L11_l1linf:4.1785e-01 L12_l1linf:4.0945e-01 L1_spectral:7.8938e-03 L2_spectral:9.7584e-03 L3_spectral:8.9166e-03 L4_spectral:9.1531e-03 L5_spectral:8.6549e-03 L6_spectral:8.2761e-03 L7_spectral:8.2221e-03 L8_spectral:8.3071e-03 L9_spectral:8.7103e-03 L10_spectral:8.9285e-03 L11_spectral:9.4102e-03 L12_spectral:9.2189e-03 ip_v_neg_g:2.6785e-03 cos_v_neg_g:9.8254e-04 v_norm:1.3991e+00 g_norm:1.9484e+00 hv_norm:4.5036e-01 cos_v_hv:1.2224e-02 hg_norm:4.3556e+01 cos_g_hg:4.2282e-01 v_par:3.8388e-05 v_perp:1.3991e+00 L1_cos_v_neg_g:4.1043e-03 L1_v_norm:1.1916e-01 L2_cos_v_neg_g:3.4684e-03 L2_v_norm:1.1511e-01 L3_cos_v_neg_g:1.6848e-03 L3_v_norm:1.1569e-01 L4_cos_v_neg_g:2.5305e-03 L4_v_norm:1.1914e-01 L5_cos_v_neg_g:3.2732e-03 L5_v_norm:1.2045e-01 L6_cos_v_neg_g:3.0306e-03 L6_v_norm:1.2091e-01 L7_cos_v_neg_g:3.0914e-03 L7_v_norm:1.2114e-01 L8_cos_v_neg_g:3.7953e-03 L8_v_norm:1.2118e-01 L9_cos_v_neg_g:3.6307e-03 L9_v_norm:1.2135e-01 L10_cos_v_neg_g:3.1611e-03 L10_v_norm:1.2141e-01 L11_cos_v_neg_g:2.4546e-03 L11_v_norm:1.2116e-01 L12_cos_v_neg_g:3.3135e-03 L12_v_norm:1.2093e-01 +step:7000 train loss:3.574945 +step:7001 train loss:3.484960 +step:7002 train loss:3.469257 +step:7003 train loss:3.494641 +step:7004 train loss:3.490731 +step:7005 train loss:3.471527 +step:7006 train loss:3.479680 +step:7007 train loss:3.530124 +step:7008 train loss:3.473536 +step:7009 train loss:3.511363 +step:7010 train loss:3.449327 +step:7011 train loss:3.502329 +step:7012 train loss:3.473582 +step:7013 train loss:3.550367 +step:7014 train loss:3.457222 +step:7015 train loss:3.518519 +step:7016 train loss:3.504535 +step:7017 train loss:3.472065 +step:7018 train loss:3.550910 +step:7019 train loss:3.473896 +step:7020 train loss:3.523727 +step:7021 train loss:3.466922 +step:7022 train loss:3.485478 +step:7023 train loss:3.500709 +step:7024 train loss:3.461606 +step:7025 train loss:3.512792 +step:7026 train loss:3.469039 +step:7027 train loss:3.533250 +step:7028 train loss:3.457776 +step:7029 train loss:3.446974 +step:7030 train loss:3.449470 +step:7031 train loss:3.504040 +step:7032 train loss:3.507582 +step:7033 train loss:3.486809 +step:7034 train loss:3.508015 +step:7035 train loss:3.556191 +step:7036 train loss:3.479048 +step:7037 train loss:3.503201 +step:7038 train loss:3.465378 +step:7039 train loss:3.519441 +step:7040 train loss:3.438432 +step:7041 train loss:3.530653 +step:7042 train loss:3.461530 +step:7043 train loss:3.433825 +step:7044 train loss:3.481494 +step:7045 train loss:3.482090 +step:7046 train loss:3.476111 +step:7047 train loss:3.512537 +step:7048 train loss:3.461411 +step:7049 train loss:3.472214 +step:7050 train loss:3.494694 +step:7051 train loss:3.511254 +step:7052 train loss:3.514263 +step:7053 train loss:3.474990 +step:7054 train loss:3.453410 +step:7055 train loss:3.523384 +step:7056 train loss:3.516898 +step:7057 train loss:3.448415 +step:7058 train loss:3.566123 +step:7059 train loss:3.471616 +step:7060 train loss:3.481881 +step:7061 train loss:3.455073 +step:7062 train loss:3.480634 +step:7063 train loss:3.539476 +step:7064 train loss:3.459678 +step:7065 train loss:3.510908 +step:7066 train loss:3.469386 +step:7067 train loss:3.509074 +step:7068 train loss:3.482579 +step:7069 train loss:3.446083 +step:7070 train loss:3.470055 +step:7071 train loss:3.441507 +step:7072 train loss:3.440545 +step:7073 train loss:3.438525 +step:7074 train loss:3.435112 +step:7075 train loss:3.454507 +step:7076 train loss:3.462733 +step:7077 train loss:3.471108 +step:7078 train loss:3.519392 +step:7079 train loss:3.530019 +step:7080 train loss:3.473758 +step:7081 train loss:3.496045 +step:7082 train loss:3.461265 +step:7083 train loss:3.493372 +step:7084 train loss:3.485423 +step:7085 train loss:3.448405 +step:7086 train loss:3.484321 +step:7087 train loss:3.463260 +step:7088 train loss:3.585440 +step:7089 train loss:3.476229 +step:7090 train loss:3.442394 +step:7091 train loss:3.455263 +step:7092 train loss:3.435058 +step:7093 train loss:3.530495 +step:7094 train loss:3.451776 +step:7095 train loss:3.466726 +step:7096 train loss:3.482218 +step:7097 train loss:3.472598 +step:7098 train loss:3.495662 +step:7099 train loss:3.450433 +step:7100 train loss:3.484703 +step:7101 train loss:3.553530 +step:7102 train loss:3.443431 +step:7103 train loss:3.470099 +step:7104 train loss:3.500866 +step:7105 train loss:3.480195 +step:7106 train loss:3.465868 +step:7107 train loss:3.502535 +step:7108 train loss:3.569258 +step:7109 train loss:3.498345 +step:7110 train loss:3.522398 +step:7111 train loss:3.502827 +step:7112 train loss:3.494940 +step:7113 train loss:3.489750 +step:7114 train loss:3.503376 +step:7115 train loss:3.543932 +step:7116 train loss:3.475399 +step:7117 train loss:3.511404 +step:7118 train loss:3.523646 +step:7119 train loss:3.486252 +step:7120 train loss:3.539850 +step:7121 train loss:3.459517 +step:7122 train loss:3.459321 +step:7123 train loss:3.400025 +step:7124 train loss:3.557009 +step:7125 train loss:3.410758 +step:7126 train loss:3.574943 +step:7127 train loss:3.534680 +step:7128 train loss:3.481429 +step:7129 train loss:3.485661 +step:7130 train loss:3.476496 +step:7131 train loss:3.415867 +step:7132 train loss:3.457839 +step:7133 train loss:3.506810 +step:7134 train loss:3.436108 +step:7135 train loss:3.491556 +step:7136 train loss:3.473031 +step:7137 train loss:3.454271 +step:7138 train loss:3.440989 +step:7139 train loss:3.445537 +step:7140 train loss:3.479295 +step:7141 train loss:3.473946 +step:7142 train loss:3.473735 +step:7143 train loss:3.509654 +step:7144 train loss:3.458091 +step:7145 train loss:3.478794 +step:7146 train loss:3.487490 +step:7147 train loss:3.508673 +step:7148 train loss:3.510647 +step:7149 train loss:3.520101 +step:7150 train loss:3.491013 +step:7151 train loss:3.456243 +step:7152 train loss:3.428958 +step:7153 train loss:3.464371 +step:7154 train loss:3.480724 +step:7155 train loss:3.496054 +step:7156 train loss:3.469459 +step:7157 train loss:3.486069 +step:7158 train loss:3.442998 +step:7159 train loss:3.500429 +step:7160 train loss:3.505939 +step:7161 train loss:3.458171 +step:7162 train loss:3.505265 +step:7163 train loss:3.441141 +step:7164 train loss:3.477693 +step:7165 train loss:3.483347 +step:7166 train loss:3.539900 +step:7167 train loss:3.515773 +step:7168 train loss:3.492645 +step:7169 train loss:3.473780 +step:7170 train loss:3.500451 +step:7171 train loss:3.450848 +step:7172 train loss:3.613960 +step:7173 train loss:3.457171 +step:7174 train loss:3.501324 +step:7175 train loss:3.475442 +step:7176 train loss:3.485163 +step:7177 train loss:3.501807 +step:7178 train loss:3.499511 +step:7179 train loss:3.485983 +step:7180 train loss:3.485764 +step:7181 train loss:3.515887 +step:7182 train loss:3.464437 +step:7183 train loss:3.539896 +step:7184 train loss:3.628032 +step:7185 train loss:3.541703 +step:7186 train loss:3.481533 +step:7187 train loss:3.492800 +step:7188 train loss:3.479059 +step:7189 train loss:3.478088 +step:7190 train loss:3.481677 +step:7191 train loss:3.475072 +step:7192 train loss:3.503606 +step:7193 train loss:3.423528 +step:7194 train loss:3.486316 +step:7195 train loss:3.464236 +step:7196 train loss:3.511887 +step:7197 train loss:3.487025 +step:7198 train loss:3.546832 +step:7199 train loss:3.503865 +step:7200 train loss:3.494970 +step:7201 train loss:3.504764 +step:7202 train loss:3.480455 +step:7203 train loss:3.496806 +step:7204 train loss:3.464927 +step:7205 train loss:3.425010 +step:7206 train loss:3.452115 +step:7207 train loss:3.627494 +step:7208 train loss:3.461494 +step:7209 train loss:3.541181 +step:7210 train loss:3.479738 +step:7211 train loss:3.509020 +step:7212 train loss:3.591575 +step:7213 train loss:3.441262 +step:7214 train loss:3.510434 +step:7215 train loss:3.479601 +step:7216 train loss:3.528224 +step:7217 train loss:3.487668 +step:7218 train loss:3.575571 +step:7219 train loss:3.484275 +step:7220 train loss:3.563229 +step:7221 train loss:3.442910 +step:7222 train loss:3.525284 +step:7223 train loss:3.445078 +step:7224 train loss:3.505764 +step:7225 train loss:3.486279 +step:7226 train loss:3.453494 +step:7227 train loss:3.471747 +step:7228 train loss:3.461288 +step:7229 train loss:3.463208 +step:7230 train loss:3.450804 +step:7231 train loss:3.582094 +step:7232 train loss:3.451606 +step:7233 train loss:3.520118 +step:7234 train loss:3.508487 +step:7235 train loss:3.480265 +step:7236 train loss:3.518182 +step:7237 train loss:3.468989 +step:7238 train loss:3.510732 +step:7239 train loss:3.461700 +step:7240 train loss:3.461175 +step:7241 train loss:3.473765 +step:7242 train loss:3.458587 +step:7243 train loss:3.499964 +step:7244 train loss:3.474395 +step:7245 train loss:3.480145 +step:7246 train loss:3.520628 +step:7247 train loss:3.474521 +step:7248 train loss:3.515418 +step:7249 train loss:3.462767 +step:7250 validation loss:3.414400 +step:7250 train loss:3.488008 +step:7251 train loss:3.532496 +step:7252 train loss:3.443377 +step:7253 train loss:3.534614 +step:7254 train loss:3.470362 +step:7255 train loss:3.446977 +step:7256 train loss:3.484507 +step:7257 train loss:3.523726 +step:7258 train loss:3.481794 +step:7259 train loss:3.463332 +step:7260 train loss:3.549722 +step:7261 train loss:3.507494 +step:7262 train loss:3.462666 +step:7263 train loss:3.507342 +step:7264 train loss:3.489437 +step:7265 train loss:3.396263 +step:7266 train loss:3.518405 +step:7267 train loss:3.437931 +step:7268 train loss:3.500719 +step:7269 train loss:3.506866 +step:7270 train loss:3.463473 +step:7271 train loss:3.479887 +step:7272 train loss:3.482910 +step:7273 train loss:3.483183 +step:7274 train loss:3.458825 +step:7275 train loss:3.530009 +step:7276 train loss:3.435622 +step:7277 train loss:3.481957 +step:7278 train loss:3.456142 +step:7279 train loss:3.436078 +step:7280 train loss:3.503932 +step:7281 train loss:3.530084 +step:7282 train loss:3.527071 +step:7283 train loss:3.419521 +step:7284 train loss:3.459684 +step:7285 train loss:3.486900 +step:7286 train loss:3.620109 +step:7287 train loss:3.528637 +step:7288 train loss:3.480159 +step:7289 train loss:3.489272 +step:7290 train loss:3.533930 +step:7291 train loss:3.496908 +step:7292 train loss:3.564089 +step:7293 train loss:3.464109 +step:7294 train loss:3.549683 +step:7295 train loss:3.435603 +step:7296 train loss:3.436621 +step:7297 train loss:3.480802 +step:7298 train loss:3.457173 +step:7299 train loss:3.497636 +step:7300 train loss:3.481647 +step:7301 train loss:3.434192 +step:7302 train loss:3.576641 +step:7303 train loss:3.468383 +step:7304 train loss:3.413557 +step:7305 train loss:3.488863 +step:7306 train loss:3.520042 +step:7307 train loss:3.523561 +step:7308 train loss:3.474684 +step:7309 train loss:3.441057 +step:7310 train loss:3.470582 +step:7311 train loss:3.453562 +step:7312 train loss:3.495275 +step:7313 train loss:3.531680 +step:7314 train loss:3.427294 +step:7315 train loss:3.421467 +step:7316 train loss:3.566433 +step:7317 train loss:3.501311 +step:7318 train loss:3.440462 +step:7319 train loss:3.468957 +step:7320 train loss:3.502100 +step:7321 train loss:3.527752 +step:7322 train loss:3.408430 +step:7323 train loss:3.463569 +step:7324 train loss:3.491288 +step:7325 train loss:3.456280 +step:7326 train loss:3.481537 +step:7327 train loss:3.460327 +step:7328 train loss:3.573701 +step:7329 train loss:3.421007 +step:7330 train loss:3.475754 +step:7331 train loss:3.469953 +step:7332 train loss:3.514981 +step:7333 train loss:3.493225 +step:7334 train loss:3.460920 +step:7335 train loss:3.460504 +step:7336 train loss:3.711041 +step:7337 train loss:3.498458 +step:7338 train loss:3.494437 +step:7339 train loss:3.505644 +step:7340 train loss:3.495811 +step:7341 train loss:3.481441 +step:7342 train loss:3.475389 +step:7343 train loss:3.490439 +step:7344 train loss:3.566891 +step:7345 train loss:3.427139 +step:7346 train loss:3.461461 +step:7347 train loss:3.456880 +step:7348 train loss:3.460864 +step:7349 train loss:3.559326 +step:7350 train loss:3.545488 +step:7351 train loss:3.480978 +step:7352 train loss:3.508240 +step:7353 train loss:3.493137 +step:7354 train loss:3.439940 +step:7355 train loss:3.620944 +step:7356 train loss:3.592041 +step:7357 train loss:3.515212 +step:7358 train loss:3.494028 +step:7359 train loss:3.465699 +step:7360 train loss:3.474913 +step:7361 train loss:3.428276 +step:7362 train loss:3.478951 +step:7363 train loss:3.489248 +step:7364 train loss:3.525633 +step:7365 train loss:3.508096 +step:7366 train loss:3.471876 +step:7367 train loss:3.549848 +step:7368 train loss:3.528401 +step:7369 train loss:3.520954 +step:7370 train loss:3.484892 +step:7371 train loss:3.443573 +step:7372 train loss:3.502851 +step:7373 train loss:3.523844 +step:7374 train loss:3.617301 +step:7375 train loss:3.441682 +step:7376 train loss:3.459356 +step:7377 train loss:3.508353 +step:7378 train loss:3.457688 +step:7379 train loss:3.581379 +step:7380 train loss:3.546689 +step:7381 train loss:3.506996 +step:7382 train loss:3.475090 +step:7383 train loss:3.568885 +step:7384 train loss:3.509514 +step:7385 train loss:3.467897 +step:7386 train loss:3.470961 +step:7387 train loss:3.516211 +step:7388 train loss:3.548143 +step:7389 train loss:3.492759 +step:7390 train loss:3.431478 +step:7391 train loss:3.468371 +step:7392 train loss:3.528434 +step:7393 train loss:3.494068 +step:7394 train loss:3.531269 +step:7395 train loss:3.422438 +step:7396 train loss:3.522267 +step:7397 train loss:3.451025 +step:7398 train loss:3.466035 +step:7399 train loss:3.513208 +step:7400 train loss:3.515827 +step:7401 train loss:3.433318 +step:7402 train loss:3.551904 +step:7403 train loss:3.435796 +step:7404 train loss:3.505736 +step:7405 train loss:3.629323 +step:7406 train loss:3.454144 +step:7407 train loss:3.502551 +step:7408 train loss:3.500638 +step:7409 train loss:3.474109 +step:7410 train loss:3.642130 +step:7411 train loss:3.484422 +step:7412 train loss:3.487661 +step:7413 train loss:3.543130 +step:7414 train loss:3.452709 +step:7415 train loss:3.509784 +step:7416 train loss:3.391605 +step:7417 train loss:3.514170 +step:7418 train loss:3.494131 +step:7419 train loss:3.465443 +step:7420 train loss:3.458247 +step:7421 train loss:3.490045 +step:7422 train loss:3.449494 +step:7423 train loss:3.585895 +step:7424 train loss:3.649852 +step:7425 train loss:3.536587 +step:7426 train loss:3.504934 +step:7427 train loss:3.474852 +step:7428 train loss:3.493006 +step:7429 train loss:3.511994 +step:7430 train loss:3.438961 +step:7431 train loss:3.443528 +step:7432 train loss:3.452190 +step:7433 train loss:3.550265 +step:7434 train loss:3.461616 +step:7435 train loss:3.547980 +step:7436 train loss:3.591209 +step:7437 train loss:3.411605 +step:7438 train loss:3.472601 +step:7439 train loss:3.481767 +step:7440 train loss:3.456307 +step:7441 train loss:3.425129 +step:7442 train loss:3.653461 +step:7443 train loss:3.476043 +step:7444 train loss:3.516522 +step:7445 train loss:3.446864 +step:7446 train loss:3.469765 +step:7447 train loss:3.394424 +step:7448 train loss:3.455983 +step:7449 train loss:3.464603 +step:7450 train loss:3.495940 +step:7451 train loss:3.532306 +step:7452 train loss:3.458578 +step:7453 train loss:3.484053 +step:7454 train loss:3.468938 +step:7455 train loss:3.479376 +step:7456 train loss:3.454575 +step:7457 train loss:3.465233 +step:7458 train loss:3.500288 +step:7459 train loss:3.480283 +step:7460 train loss:3.488432 +step:7461 train loss:3.523260 +step:7462 train loss:3.461176 +step:7463 train loss:3.522047 +step:7464 train loss:3.446401 +step:7465 train loss:3.454238 +step:7466 train loss:3.456629 +step:7467 train loss:3.465208 +step:7468 train loss:3.518361 +step:7469 train loss:3.447868 +step:7470 train loss:3.481981 +step:7471 train loss:3.470160 +step:7472 train loss:3.502922 +step:7473 train loss:3.444732 +step:7474 train loss:3.431775 +step:7475 train loss:3.459399 +step:7476 train loss:3.497099 +step:7477 train loss:3.471152 +step:7478 train loss:3.465814 +step:7479 train loss:3.482677 +step:7480 train loss:3.762370 +step:7481 train loss:3.410795 +step:7482 train loss:3.483131 +step:7483 train loss:3.478426 +step:7484 train loss:3.498985 +step:7485 train loss:3.484861 +step:7486 train loss:3.510178 +step:7487 train loss:3.504297 +step:7488 train loss:3.523922 +step:7489 train loss:3.520235 +step:7490 train loss:3.465639 +step:7491 train loss:3.487957 +step:7492 train loss:3.596407 +step:7493 train loss:3.570278 +step:7494 train loss:3.593868 +step:7495 train loss:3.461742 +step:7496 train loss:3.451767 +step:7497 train loss:3.550403 +step:7498 train loss:3.483585 +step:7499 train loss:3.524524 +step:7500 validation loss:3.411339 total_sharp:3.9909e-03 L1_sharp:4.8702e-02 L2_sharp:1.5850e-02 L3_sharp:7.2133e-03 L4_sharp:3.3430e-03 L5_sharp:4.7351e-03 L6_sharp:6.2688e-03 L7_sharp:6.8834e-03 L8_sharp:6.7034e-03 L9_sharp:5.4654e-03 L10_sharp:4.0578e-03 L11_sharp:3.9697e-03 L12_sharp:4.8501e-03 total_fnorm:1.3997e+00 total_l1_linf:9.5381e+03 total_spectral:1.3997e+00 L1_fnorm:1.1914e-01 L2_fnorm:1.1366e-01 L3_fnorm:1.1528e-01 L4_fnorm:1.1874e-01 L5_fnorm:1.2039e-01 L6_fnorm:1.2095e-01 L7_fnorm:1.2102e-01 L8_fnorm:1.2104e-01 L9_fnorm:1.2129e-01 L10_fnorm:1.2128e-01 L11_fnorm:1.2118e-01 L12_fnorm:1.2089e-01 L1_l1linf:3.8567e-01 L2_l1linf:4.3742e-01 L3_l1linf:4.1698e-01 L4_l1linf:4.0440e-01 L5_l1linf:3.7141e-01 L6_l1linf:3.3334e-01 L7_l1linf:3.1935e-01 L8_l1linf:3.3514e-01 L9_l1linf:3.7084e-01 L10_l1linf:4.0956e-01 L11_l1linf:4.3424e-01 L12_l1linf:4.0667e-01 L1_spectral:8.6327e-03 L2_spectral:9.7925e-03 L3_spectral:9.4486e-03 L4_spectral:9.1191e-03 L5_spectral:8.4307e-03 L6_spectral:7.5192e-03 L7_spectral:7.2971e-03 L8_spectral:7.5869e-03 L9_spectral:8.3811e-03 L10_spectral:9.2067e-03 L11_spectral:9.7247e-03 L12_spectral:9.1577e-03 ip_v_neg_g:3.9558e-03 cos_v_neg_g:1.3451e-03 v_norm:1.3997e+00 g_norm:2.1011e+00 hv_norm:4.9919e-01 cos_v_hv:1.1190e-02 hg_norm:6.9893e+01 cos_g_hg:4.7254e-01 v_par:4.6516e-05 v_perp:1.3997e+00 L1_cos_v_neg_g:5.9599e-03 L1_v_norm:1.1914e-01 L2_cos_v_neg_g:4.8732e-03 L2_v_norm:1.1366e-01 L3_cos_v_neg_g:3.6398e-03 L3_v_norm:1.1528e-01 L4_cos_v_neg_g:2.7588e-03 L4_v_norm:1.1874e-01 L5_cos_v_neg_g:3.3639e-03 L5_v_norm:1.2039e-01 L6_cos_v_neg_g:4.2932e-03 L6_v_norm:1.2095e-01 L7_cos_v_neg_g:4.4241e-03 L7_v_norm:1.2102e-01 L8_cos_v_neg_g:3.5020e-03 L8_v_norm:1.2104e-01 L9_cos_v_neg_g:5.0658e-03 L9_v_norm:1.2129e-01 L10_cos_v_neg_g:5.3029e-03 L10_v_norm:1.2128e-01 L11_cos_v_neg_g:5.4456e-03 L11_v_norm:1.2118e-01 L12_cos_v_neg_g:3.1375e-03 L12_v_norm:1.2089e-01 +step:7500 train loss:3.468894 +step:7501 train loss:3.457961 +step:7502 train loss:3.448646 +step:7503 train loss:3.427374 +step:7504 train loss:3.451311 +step:7505 train loss:3.443605 +step:7506 train loss:3.502479 +step:7507 train loss:3.419435 +step:7508 train loss:3.488099 +step:7509 train loss:3.459374 +step:7510 train loss:3.489698 +step:7511 train loss:3.494143 +step:7512 train loss:3.760120 +step:7513 train loss:3.445535 +step:7514 train loss:3.479738 +step:7515 train loss:3.442809 +step:7516 train loss:3.457674 +step:7517 train loss:3.492203 +step:7518 train loss:3.463282 +step:7519 train loss:3.478929 +step:7520 train loss:3.543097 +step:7521 train loss:3.428725 +step:7522 train loss:3.486805 +step:7523 train loss:3.517631 +step:7524 train loss:3.465766 +step:7525 train loss:3.468001 +step:7526 train loss:3.414967 +step:7527 train loss:3.424769 +step:7528 train loss:3.524024 +step:7529 train loss:3.499933 +step:7530 train loss:3.447295 +step:7531 train loss:3.523828 +step:7532 train loss:3.511493 +step:7533 train loss:3.437793 +step:7534 train loss:3.501710 +step:7535 train loss:3.504573 +step:7536 train loss:3.536102 +step:7537 train loss:3.558938 +step:7538 train loss:3.581836 +step:7539 train loss:3.481878 +step:7540 train loss:3.465843 +step:7541 train loss:3.523255 +step:7542 train loss:3.482398 +step:7543 train loss:3.437892 +step:7544 train loss:3.483628 +step:7545 train loss:3.471320 +step:7546 train loss:3.425663 +step:7547 train loss:3.472682 +step:7548 train loss:3.485587 +step:7549 train loss:3.467104 +step:7550 train loss:3.468575 +step:7551 train loss:3.566395 +step:7552 train loss:3.480555 +step:7553 train loss:3.519524 +step:7554 train loss:3.439861 +step:7555 train loss:3.534521 +step:7556 train loss:3.435653 +step:7557 train loss:3.532348 +step:7558 train loss:3.518778 +step:7559 train loss:3.477950 +step:7560 train loss:3.572671 +step:7561 train loss:3.540185 +step:7562 train loss:3.447845 +step:7563 train loss:3.442626 +step:7564 train loss:3.494459 +step:7565 train loss:3.510758 +step:7566 train loss:3.504802 +step:7567 train loss:3.520918 +step:7568 train loss:3.464905 +step:7569 train loss:3.526241 +step:7570 train loss:3.507974 +step:7571 train loss:3.588071 +step:7572 train loss:3.442263 +step:7573 train loss:3.507315 +step:7574 train loss:3.469541 +step:7575 train loss:3.467724 +step:7576 train loss:3.471195 +step:7577 train loss:3.487790 +step:7578 train loss:3.548669 +step:7579 train loss:3.480891 +step:7580 train loss:3.470048 +step:7581 train loss:3.457599 +step:7582 train loss:3.512983 +step:7583 train loss:3.449530 +step:7584 train loss:3.431298 +step:7585 train loss:3.399270 +step:7586 train loss:3.436231 +step:7587 train loss:3.499718 +step:7588 train loss:3.630445 +step:7589 train loss:3.448386 +step:7590 train loss:3.516875 +step:7591 train loss:3.523387 +step:7592 train loss:3.479633 +step:7593 train loss:3.504411 +step:7594 train loss:3.502230 +step:7595 train loss:3.472080 +step:7596 train loss:3.523471 +step:7597 train loss:3.426613 +step:7598 train loss:3.491822 +step:7599 train loss:3.481241 +step:7600 train loss:3.443104 +step:7601 train loss:3.555970 +step:7602 train loss:3.493497 +step:7603 train loss:3.456901 +step:7604 train loss:3.600776 +step:7605 train loss:3.488866 +step:7606 train loss:3.521304 +step:7607 train loss:3.475888 +step:7608 train loss:3.487478 +step:7609 train loss:3.523068 +step:7610 train loss:3.479234 +step:7611 train loss:3.456200 +step:7612 train loss:3.400309 +step:7613 train loss:3.449107 +step:7614 train loss:3.518087 +step:7615 train loss:3.477436 +step:7616 train loss:3.544121 +step:7617 train loss:3.444909 +step:7618 train loss:3.530727 +step:7619 train loss:3.473960 +step:7620 train loss:3.459115 +step:7621 train loss:3.408499 +step:7622 train loss:3.685763 +step:7623 train loss:3.696760 +step:7624 train loss:3.510363 +step:7625 train loss:3.549688 +step:7626 train loss:3.466830 +step:7627 train loss:3.537731 +step:7628 train loss:3.419855 +step:7629 train loss:3.482969 +step:7630 train loss:3.491251 +step:7631 train loss:3.475112 +step:7632 train loss:3.526637 +step:7633 train loss:3.594323 +step:7634 train loss:3.553032 +step:7635 train loss:3.461503 +step:7636 train loss:3.484915 +step:7637 train loss:3.433167 +step:7638 train loss:3.546411 +step:7639 train loss:3.473240 +step:7640 train loss:3.453147 +step:7641 train loss:3.485595 +step:7642 train loss:3.819547 +step:7643 train loss:3.572022 +step:7644 train loss:3.498561 +step:7645 train loss:3.481064 +step:7646 train loss:3.468819 +step:7647 train loss:3.463264 +step:7648 train loss:3.495919 +step:7649 train loss:3.454257 +step:7650 train loss:3.506243 +step:7651 train loss:3.527457 +step:7652 train loss:3.406954 +step:7653 train loss:3.603101 +step:7654 train loss:3.459342 +step:7655 train loss:3.479084 +step:7656 train loss:3.456682 +step:7657 train loss:3.465211 +step:7658 train loss:3.423505 +step:7659 train loss:3.487895 +step:7660 train loss:3.419399 +step:7661 train loss:3.435012 +step:7662 train loss:3.440589 +step:7663 train loss:3.484949 +step:7664 train loss:3.444765 +step:7665 train loss:3.420390 +step:7666 train loss:3.529003 +step:7667 train loss:3.441267 +step:7668 train loss:3.551305 +step:7669 train loss:3.483706 +step:7670 train loss:3.439986 +step:7671 train loss:3.492356 +step:7672 train loss:3.514455 +step:7673 train loss:3.478615 +step:7674 train loss:3.516045 +step:7675 train loss:3.571292 +step:7676 train loss:3.541014 +step:7677 train loss:3.565269 +step:7678 train loss:3.507006 +step:7679 train loss:3.529787 +step:7680 train loss:3.533015 +step:7681 train loss:3.500818 +step:7682 train loss:3.470806 +step:7683 train loss:3.472502 +step:7684 train loss:3.445725 +step:7685 train loss:3.425077 +step:7686 train loss:3.544086 +step:7687 train loss:3.460215 +step:7688 train loss:3.426949 +step:7689 train loss:3.477178 +step:7690 train loss:3.443619 +step:7691 train loss:3.472061 +step:7692 train loss:3.504907 +step:7693 train loss:3.505481 +step:7694 train loss:3.557362 +step:7695 train loss:3.483885 +step:7696 train loss:3.459452 +step:7697 train loss:3.447317 +step:7698 train loss:3.509435 +step:7699 train loss:3.504274 +step:7700 train loss:3.405456 +step:7701 train loss:3.521167 +step:7702 train loss:3.464633 +step:7703 train loss:3.466388 +step:7704 train loss:3.519639 +step:7705 train loss:3.476665 +step:7706 train loss:3.412088 +step:7707 train loss:3.533530 +step:7708 train loss:3.469658 +step:7709 train loss:3.489472 +step:7710 train loss:3.552917 +step:7711 train loss:3.512567 +step:7712 train loss:3.461011 +step:7713 train loss:3.538615 +step:7714 train loss:3.483812 +step:7715 train loss:3.435506 +step:7716 train loss:3.477339 +step:7717 train loss:3.500210 +step:7718 train loss:3.505270 +step:7719 train loss:3.462874 +step:7720 train loss:3.475685 +step:7721 train loss:3.517349 +step:7722 train loss:3.448095 +step:7723 train loss:3.819837 +step:7724 train loss:3.485744 +step:7725 train loss:3.386501 +step:7726 train loss:3.467245 +step:7727 train loss:3.496958 +step:7728 train loss:3.445977 +step:7729 train loss:3.455557 +step:7730 train loss:3.479870 +step:7731 train loss:3.509121 +step:7732 train loss:3.530057 +step:7733 train loss:3.440577 +step:7734 train loss:3.465961 +step:7735 train loss:3.552240 +step:7736 train loss:3.501114 +step:7737 train loss:3.516807 +step:7738 train loss:3.419420 +step:7739 train loss:3.496339 +step:7740 train loss:3.444556 +step:7741 train loss:3.479093 +step:7742 train loss:3.480794 +step:7743 train loss:3.433242 +step:7744 train loss:3.560686 +step:7745 train loss:3.446150 +step:7746 train loss:3.418855 +step:7747 train loss:3.517424 +step:7748 train loss:3.499033 +step:7749 train loss:3.421232 +step:7750 validation loss:3.408098 +step:7750 train loss:3.582253 +step:7751 train loss:3.465866 +step:7752 train loss:3.456423 +step:7753 train loss:3.459690 +step:7754 train loss:3.433570 +step:7755 train loss:3.501446 +step:7756 train loss:3.527527 +step:7757 train loss:3.475806 +step:7758 train loss:3.446226 +step:7759 train loss:3.475736 +step:7760 train loss:3.502029 +step:7761 train loss:3.494884 +step:7762 train loss:3.479840 +step:7763 train loss:3.461758 +step:7764 train loss:3.470006 +step:7765 train loss:3.423542 +step:7766 train loss:3.489710 +step:7767 train loss:3.494210 +step:7768 train loss:3.447398 +step:7769 train loss:3.511663 +step:7770 train loss:3.528389 +step:7771 train loss:3.503359 +step:7772 train loss:3.476658 +step:7773 train loss:3.534321 +step:7774 train loss:3.432305 +step:7775 train loss:3.420053 +step:7776 train loss:3.526361 +step:7777 train loss:3.478896 +step:7778 train loss:3.435745 +step:7779 train loss:3.479250 +step:7780 train loss:3.474790 +step:7781 train loss:3.482187 +step:7782 train loss:3.468408 +step:7783 train loss:3.451783 +step:7784 train loss:3.448084 +step:7785 train loss:3.489280 +step:7786 train loss:3.445833 +step:7787 train loss:3.524146 +step:7788 train loss:3.477946 +step:7789 train loss:3.411786 +step:7790 train loss:3.473571 +step:7791 train loss:3.503452 +step:7792 train loss:3.463176 +step:7793 train loss:3.486726 +step:7794 train loss:3.473266 +step:7795 train loss:3.503653 +step:7796 train loss:3.468049 +step:7797 train loss:3.488907 +step:7798 train loss:3.480784 +step:7799 train loss:3.469644 +step:7800 train loss:3.425839 +step:7801 train loss:3.489652 +step:7802 train loss:3.471738 +step:7803 train loss:3.521425 +step:7804 train loss:3.483024 +step:7805 train loss:3.478573 +step:7806 train loss:3.498487 +step:7807 train loss:3.568271 +step:7808 train loss:3.428221 +step:7809 train loss:3.406531 +step:7810 train loss:3.492337 +step:7811 train loss:3.427207 +step:7812 train loss:3.445280 +step:7813 train loss:3.534482 +step:7814 train loss:3.606681 +step:7815 train loss:3.416691 +step:7816 train loss:3.502531 +step:7817 train loss:3.532599 +step:7818 train loss:3.430580 +step:7819 train loss:3.483053 +step:7820 train loss:3.523448 +step:7821 train loss:3.456525 +step:7822 train loss:3.416833 +step:7823 train loss:3.488853 +step:7824 train loss:3.469368 +step:7825 train loss:3.457398 +step:7826 train loss:3.455922 +step:7827 train loss:3.493237 +step:7828 train loss:3.484952 +step:7829 train loss:3.442840 +step:7830 train loss:3.452215 +step:7831 train loss:3.453142 +step:7832 train loss:3.523415 +step:7833 train loss:3.499245 +step:7834 train loss:3.466255 +step:7835 train loss:3.490149 +step:7836 train loss:3.599751 +step:7837 train loss:3.487617 +step:7838 train loss:3.456129 +step:7839 train loss:3.412701 +step:7840 train loss:3.430277 +step:7841 train loss:3.527576 +step:7842 train loss:3.513272 +step:7843 train loss:3.567341 +step:7844 train loss:3.494836 +step:7845 train loss:3.471394 +step:7846 train loss:3.586473 +step:7847 train loss:3.473692 +step:7848 train loss:3.485341 +step:7849 train loss:3.499818 +step:7850 train loss:3.470953 +step:7851 train loss:3.498917 +step:7852 train loss:3.471733 +step:7853 train loss:3.443403 +step:7854 train loss:3.475817 +step:7855 train loss:3.473869 +step:7856 train loss:3.478466 +step:7857 train loss:3.467221 +step:7858 train loss:3.474176 +step:7859 train loss:3.481851 +step:7860 train loss:3.517010 +step:7861 train loss:3.505508 +step:7862 train loss:3.448525 +step:7863 train loss:3.551210 +step:7864 train loss:3.392328 +step:7865 train loss:3.473225 +step:7866 train loss:3.443392 +step:7867 train loss:3.492660 +step:7868 train loss:3.469294 +step:7869 train loss:3.471178 +step:7870 train loss:3.388368 +step:7871 train loss:3.456263 +step:7872 train loss:3.446244 +step:7873 train loss:3.524132 +step:7874 train loss:3.466837 +step:7875 train loss:3.474134 +step:7876 train loss:3.490264 +step:7877 train loss:3.444509 +step:7878 train loss:3.482166 +step:7879 train loss:3.822460 +step:7880 train loss:3.473753 +step:7881 train loss:3.503226 +step:7882 train loss:3.579012 +step:7883 train loss:3.394755 +step:7884 train loss:3.485321 +step:7885 train loss:3.468570 +step:7886 train loss:3.467430 +step:7887 train loss:3.463130 +step:7888 train loss:3.493984 +step:7889 train loss:3.541253 +step:7890 train loss:3.448410 +step:7891 train loss:3.497830 +step:7892 train loss:3.468676 +step:7893 train loss:3.443691 +step:7894 train loss:3.465411 +step:7895 train loss:3.448975 +step:7896 train loss:3.449794 +step:7897 train loss:3.473724 +step:7898 train loss:3.480947 +step:7899 train loss:3.468952 +step:7900 train loss:3.438598 +step:7901 train loss:3.430412 +step:7902 train loss:3.576213 +step:7903 train loss:3.422009 +step:7904 train loss:3.473014 +step:7905 train loss:3.540709 +step:7906 train loss:3.436989 +step:7907 train loss:3.463040 +step:7908 train loss:3.515259 +step:7909 train loss:3.566214 +step:7910 train loss:3.444892 +step:7911 train loss:3.469053 +step:7912 train loss:3.471578 +step:7913 train loss:3.444141 +step:7914 train loss:3.482260 +step:7915 train loss:3.584068 +step:7916 train loss:3.456614 +step:7917 train loss:3.515067 +step:7918 train loss:3.456573 +step:7919 train loss:3.446389 +step:7920 train loss:3.486911 +step:7921 train loss:3.491621 +step:7922 train loss:3.466468 +step:7923 train loss:3.515507 +step:7924 train loss:3.475759 +step:7925 train loss:3.496458 +step:7926 train loss:3.399613 +step:7927 train loss:3.679544 +step:7928 train loss:3.507097 +step:7929 train loss:3.469619 +step:7930 train loss:3.430131 +step:7931 train loss:3.454339 +step:7932 train loss:3.478285 +step:7933 train loss:3.490806 +step:7934 train loss:3.585341 +step:7935 train loss:3.506459 +step:7936 train loss:3.478238 +step:7937 train loss:3.429127 +step:7938 train loss:3.441656 +step:7939 train loss:3.489586 +step:7940 train loss:3.474138 +step:7941 train loss:3.499261 +step:7942 train loss:3.491971 +step:7943 train loss:3.502598 +step:7944 train loss:3.423742 +step:7945 train loss:3.525619 +step:7946 train loss:3.475718 +step:7947 train loss:3.488521 +step:7948 train loss:3.447633 +step:7949 train loss:3.499004 +step:7950 train loss:3.552916 +step:7951 train loss:3.517889 +step:7952 train loss:3.662437 +step:7953 train loss:3.558540 +step:7954 train loss:3.462325 +step:7955 train loss:3.446839 +step:7956 train loss:3.452762 +step:7957 train loss:3.527563 +step:7958 train loss:3.538724 +step:7959 train loss:3.491825 +step:7960 train loss:3.554278 +step:7961 train loss:3.463839 +step:7962 train loss:3.435444 +step:7963 train loss:3.475154 +step:7964 train loss:3.469638 +step:7965 train loss:3.480602 +step:7966 train loss:3.451823 +step:7967 train loss:3.476380 +step:7968 train loss:3.483888 +step:7969 train loss:3.442338 +step:7970 train loss:3.411093 +step:7971 train loss:3.496330 +step:7972 train loss:3.472403 +step:7973 train loss:3.444033 +step:7974 train loss:3.483294 +step:7975 train loss:3.469459 +step:7976 train loss:3.490373 +step:7977 train loss:3.521214 +step:7978 train loss:3.543636 +step:7979 train loss:3.491436 +step:7980 train loss:3.395507 +step:7981 train loss:3.436240 +step:7982 train loss:3.485388 +step:7983 train loss:3.498547 +step:7984 train loss:3.537892 +step:7985 train loss:3.466729 +step:7986 train loss:3.488108 +step:7987 train loss:3.541842 +step:7988 train loss:3.516129 +step:7989 train loss:3.420685 +step:7990 train loss:3.436156 +step:7991 train loss:3.449811 +step:7992 train loss:3.475942 +step:7993 train loss:3.456838 +step:7994 train loss:3.512049 +step:7995 train loss:3.512682 +step:7996 train loss:3.477141 +step:7997 train loss:3.495862 +step:7998 train loss:3.520962 +step:7999 train loss:3.449728 +step:8000 validation loss:3.400776 total_sharp:4.3949e-03 L1_sharp:6.3293e-02 L2_sharp:1.8583e-02 L3_sharp:7.1486e-03 L4_sharp:3.5620e-03 L5_sharp:4.8603e-03 L6_sharp:6.6511e-03 L7_sharp:7.3992e-03 L8_sharp:6.9497e-03 L9_sharp:5.5519e-03 L10_sharp:4.2872e-03 L11_sharp:3.9709e-03 L12_sharp:5.3166e-03 total_fnorm:1.3913e+00 total_l1_linf:9.5029e+03 total_spectral:1.3913e+00 L1_fnorm:1.1878e-01 L2_fnorm:1.1385e-01 L3_fnorm:1.1600e-01 L4_fnorm:1.1908e-01 L5_fnorm:1.2066e-01 L6_fnorm:1.2108e-01 L7_fnorm:1.2121e-01 L8_fnorm:1.2110e-01 L9_fnorm:1.2143e-01 L10_fnorm:1.2151e-01 L11_fnorm:1.2121e-01 L12_fnorm:1.2091e-01 L1_l1linf:3.7173e-01 L2_l1linf:4.2613e-01 L3_l1linf:4.0508e-01 L4_l1linf:3.8994e-01 L5_l1linf:3.7303e-01 L6_l1linf:3.3686e-01 L7_l1linf:3.2428e-01 L8_l1linf:3.2750e-01 L9_l1linf:3.6544e-01 L10_l1linf:3.8808e-01 L11_l1linf:3.9503e-01 L12_l1linf:4.0393e-01 L1_spectral:8.2302e-03 L2_spectral:9.5246e-03 L3_spectral:9.0676e-03 L4_spectral:8.8233e-03 L5_spectral:8.3673e-03 L6_spectral:7.6219e-03 L7_spectral:7.3600e-03 L8_spectral:7.3951e-03 L9_spectral:8.2270e-03 L10_spectral:8.7537e-03 L11_spectral:8.8206e-03 L12_spectral:9.0816e-03 ip_v_neg_g:3.9257e-03 cos_v_neg_g:1.4550e-03 v_norm:1.3913e+00 g_norm:1.9392e+00 hv_norm:6.6286e-01 cos_v_hv:9.2245e-03 hg_norm:4.8508e+01 cos_g_hg:4.5197e-01 v_par:6.6147e-05 v_perp:1.3913e+00 L1_cos_v_neg_g:4.3808e-03 L1_v_norm:1.1878e-01 L2_cos_v_neg_g:5.1426e-03 L2_v_norm:1.1385e-01 L3_cos_v_neg_g:3.2865e-03 L3_v_norm:1.1600e-01 L4_cos_v_neg_g:3.2393e-03 L4_v_norm:1.1908e-01 L5_cos_v_neg_g:4.2688e-03 L5_v_norm:1.2066e-01 L6_cos_v_neg_g:5.5164e-03 L6_v_norm:1.2108e-01 L7_cos_v_neg_g:6.0897e-03 L7_v_norm:1.2121e-01 L8_cos_v_neg_g:5.8747e-03 L8_v_norm:1.2110e-01 L9_cos_v_neg_g:5.2033e-03 L9_v_norm:1.2143e-01 L10_cos_v_neg_g:4.3268e-03 L10_v_norm:1.2151e-01 L11_cos_v_neg_g:2.8774e-03 L11_v_norm:1.2121e-01 L12_cos_v_neg_g:3.6139e-03 L12_v_norm:1.2091e-01 +step:8000 train loss:3.520811 +step:8001 train loss:3.481232 +step:8002 train loss:3.500571 +step:8003 train loss:3.516910 +step:8004 train loss:3.494237 +step:8005 train loss:3.416003 +step:8006 train loss:3.491907 +step:8007 train loss:3.462469 +step:8008 train loss:3.487029 +step:8009 train loss:3.563176 +step:8010 train loss:3.780987 +step:8011 train loss:3.440977 +step:8012 train loss:3.521962 +step:8013 train loss:3.474210 +step:8014 train loss:3.488617 +step:8015 train loss:3.485752 +step:8016 train loss:3.475010 +step:8017 train loss:3.494719 +step:8018 train loss:3.457810 +step:8019 train loss:3.426324 +step:8020 train loss:3.465417 +step:8021 train loss:3.539488 +step:8022 train loss:3.457659 +step:8023 train loss:3.490086 +step:8024 train loss:3.356126 +step:8025 train loss:3.463896 +step:8026 train loss:3.474589 +step:8027 train loss:3.481415 +step:8028 train loss:3.536508 +step:8029 train loss:3.464659 +step:8030 train loss:3.427486 +step:8031 train loss:3.485847 +step:8032 train loss:3.469454 +step:8033 train loss:3.422056 +step:8034 train loss:3.457801 +step:8035 train loss:3.445225 +step:8036 train loss:3.435811 +step:8037 train loss:3.407496 +step:8038 train loss:3.421695 +step:8039 train loss:3.515958 +step:8040 train loss:3.448728 +step:8041 train loss:3.446781 +step:8042 train loss:3.482024 +step:8043 train loss:3.426892 +step:8044 train loss:3.439829 +step:8045 train loss:3.509233 +step:8046 train loss:3.432946 +step:8047 train loss:3.436056 +step:8048 train loss:3.469801 +step:8049 train loss:3.514087 +step:8050 train loss:3.457334 +step:8051 train loss:3.431529 +step:8052 train loss:3.493088 +step:8053 train loss:3.447141 +step:8054 train loss:3.482768 +step:8055 train loss:3.513510 +step:8056 train loss:3.477750 +step:8057 train loss:3.555791 +step:8058 train loss:3.461115 +step:8059 train loss:3.519492 +step:8060 train loss:3.493348 +step:8061 train loss:3.377521 +step:8062 train loss:3.510178 +step:8063 train loss:3.475742 +step:8064 train loss:3.434887 +step:8065 train loss:3.500163 +step:8066 train loss:3.460056 +step:8067 train loss:3.522163 +step:8068 train loss:3.449966 +step:8069 train loss:3.474610 +step:8070 train loss:3.439435 +step:8071 train loss:3.449477 +step:8072 train loss:3.492277 +step:8073 train loss:3.445097 +step:8074 train loss:3.454150 +step:8075 train loss:3.445039 +step:8076 train loss:3.487988 +step:8077 train loss:3.497439 +step:8078 train loss:3.437838 +step:8079 train loss:3.460221 +step:8080 train loss:3.446681 +step:8081 train loss:3.463917 +step:8082 train loss:3.485277 +step:8083 train loss:3.379565 +step:8084 train loss:3.520483 +step:8085 train loss:3.393063 +step:8086 train loss:3.519063 +step:8087 train loss:3.415392 +step:8088 train loss:3.460010 +step:8089 train loss:3.498167 +step:8090 train loss:3.521487 +step:8091 train loss:3.464101 +step:8092 train loss:3.444747 +step:8093 train loss:3.453144 +step:8094 train loss:3.453295 +step:8095 train loss:3.480183 +step:8096 train loss:3.481847 +step:8097 train loss:3.406770 +step:8098 train loss:3.422474 +step:8099 train loss:3.414412 +step:8100 train loss:3.463929 +step:8101 train loss:3.544168 +step:8102 train loss:3.479159 +step:8103 train loss:3.431189 +step:8104 train loss:3.483813 +step:8105 train loss:3.477362 +step:8106 train loss:3.444901 +step:8107 train loss:3.422231 +step:8108 train loss:3.442078 +step:8109 train loss:3.434741 +step:8110 train loss:3.499074 +step:8111 train loss:3.422412 +step:8112 train loss:3.441260 +step:8113 train loss:3.432491 +step:8114 train loss:3.375415 +step:8115 train loss:3.432287 +step:8116 train loss:3.465516 +step:8117 train loss:3.439344 +step:8118 train loss:3.428193 +step:8119 train loss:3.471889 +step:8120 train loss:3.419528 +step:8121 train loss:3.476759 +step:8122 train loss:3.461536 +step:8123 train loss:3.466547 +step:8124 train loss:3.430203 +step:8125 train loss:3.409708 +step:8126 train loss:3.407201 +step:8127 train loss:3.497024 +step:8128 train loss:3.502999 +step:8129 train loss:3.426247 +step:8130 train loss:3.451645 +step:8131 train loss:3.423254 +step:8132 train loss:3.491785 +step:8133 train loss:3.416126 +step:8134 train loss:3.451850 +step:8135 train loss:3.445179 +step:8136 train loss:3.453672 +step:8137 train loss:3.516204 +step:8138 train loss:3.424392 +step:8139 train loss:3.499142 +step:8140 train loss:3.426579 +step:8141 train loss:3.448448 +step:8142 train loss:3.431211 +step:8143 train loss:3.482399 +step:8144 train loss:3.459169 +step:8145 train loss:3.425141 +step:8146 train loss:3.436506 +step:8147 train loss:3.459629 +step:8148 train loss:3.553581 +step:8149 train loss:3.461364 +step:8150 train loss:3.440589 +step:8151 train loss:3.435418 +step:8152 train loss:3.528718 +step:8153 train loss:3.407480 +step:8154 train loss:3.424355 +step:8155 train loss:3.449689 +step:8156 train loss:3.432387 +step:8157 train loss:3.451960 +step:8158 train loss:3.464417 +step:8159 train loss:3.481890 +step:8160 train loss:3.433454 +step:8161 train loss:3.475463 +step:8162 train loss:3.406578 +step:8163 train loss:3.466690 +step:8164 train loss:3.454543 +step:8165 train loss:3.504306 +step:8166 train loss:3.506366 +step:8167 train loss:3.411532 +step:8168 train loss:3.392850 +step:8169 train loss:3.441645 +step:8170 train loss:3.392023 +step:8171 train loss:3.451555 +step:8172 train loss:3.447918 +step:8173 train loss:3.451299 +step:8174 train loss:3.459171 +step:8175 train loss:3.420470 +step:8176 train loss:3.416086 +step:8177 train loss:3.463001 +step:8178 train loss:3.549142 +step:8179 train loss:3.458113 +step:8180 train loss:3.479908 +step:8181 train loss:3.477987 +step:8182 train loss:3.438350 +step:8183 train loss:3.424832 +step:8184 train loss:3.419049 +step:8185 train loss:3.458738 +step:8186 train loss:3.462335 +step:8187 train loss:3.471389 +step:8188 train loss:3.399675 +step:8189 train loss:3.545809 +step:8190 train loss:3.480234 +step:8191 train loss:3.483310 +step:8192 train loss:3.596240 +step:8193 train loss:3.464217 +step:8194 train loss:3.401244 +step:8195 train loss:3.496130 +step:8196 train loss:3.414971 +step:8197 train loss:3.441012 +step:8198 train loss:3.451003 +step:8199 train loss:3.450435 +step:8200 train loss:3.432553 +step:8201 train loss:3.545653 +step:8202 train loss:3.462584 +step:8203 train loss:3.482019 +step:8204 train loss:3.391738 +step:8205 train loss:3.398748 +step:8206 train loss:3.523358 +step:8207 train loss:3.449160 +step:8208 train loss:3.469135 +step:8209 train loss:3.511155 +step:8210 train loss:3.495953 +step:8211 train loss:3.426987 +step:8212 train loss:3.485024 +step:8213 train loss:3.496068 +step:8214 train loss:3.532891 +step:8215 train loss:3.510479 +step:8216 train loss:3.489670 +step:8217 train loss:3.469335 +step:8218 train loss:3.479174 +step:8219 train loss:3.612635 +step:8220 train loss:3.439326 +step:8221 train loss:3.462682 +step:8222 train loss:3.413599 +step:8223 train loss:3.434105 +step:8224 train loss:3.442340 +step:8225 train loss:3.494217 +step:8226 train loss:3.423637 +step:8227 train loss:3.493361 +step:8228 train loss:3.381868 +step:8229 train loss:3.419733 +step:8230 train loss:3.439976 +step:8231 train loss:3.461682 +step:8232 train loss:3.462713 +step:8233 train loss:3.505722 +step:8234 train loss:3.504740 +step:8235 train loss:3.470874 +step:8236 train loss:3.458762 +step:8237 train loss:3.409618 +step:8238 train loss:3.662370 +step:8239 train loss:3.496975 +step:8240 train loss:3.440240 +step:8241 train loss:3.413345 +step:8242 train loss:3.452256 +step:8243 train loss:3.442456 +step:8244 train loss:3.454098 +step:8245 train loss:3.439374 +step:8246 train loss:3.504866 +step:8247 train loss:3.538956 +step:8248 train loss:3.455199 +step:8249 train loss:3.445865 +step:8250 validation loss:3.392523 +step:8250 train loss:3.435962 +step:8251 train loss:3.532312 +step:8252 train loss:3.471377 +step:8253 train loss:3.435765 +step:8254 train loss:3.410624 +step:8255 train loss:3.440878 +step:8256 train loss:3.424274 +step:8257 train loss:3.531242 +step:8258 train loss:3.450339 +step:8259 train loss:3.433711 +step:8260 train loss:3.435460 +step:8261 train loss:3.432272 +step:8262 train loss:3.448486 +step:8263 train loss:3.461403 +step:8264 train loss:3.426793 +step:8265 train loss:3.417446 +step:8266 train loss:3.427128 +step:8267 train loss:3.358382 +step:8268 train loss:3.483582 +step:8269 train loss:3.414068 +step:8270 train loss:3.467861 +step:8271 train loss:3.494714 +step:8272 train loss:3.521316 +step:8273 train loss:3.396663 +step:8274 train loss:3.460339 +step:8275 train loss:3.420793 +step:8276 train loss:3.456343 +step:8277 train loss:3.526013 +step:8278 train loss:3.539919 +step:8279 train loss:3.455232 +step:8280 train loss:3.439341 +step:8281 train loss:3.408792 +step:8282 train loss:3.468376 +step:8283 train loss:3.455807 +step:8284 train loss:3.437217 +step:8285 train loss:3.432341 +step:8286 train loss:3.534457 +step:8287 train loss:3.477307 +step:8288 train loss:3.447735 +step:8289 train loss:3.461670 +step:8290 train loss:3.399741 +step:8291 train loss:3.442587 +step:8292 train loss:3.468836 +step:8293 train loss:3.447844 +step:8294 train loss:3.414954 +step:8295 train loss:3.454387 +step:8296 train loss:3.519285 +step:8297 train loss:3.599001 +step:8298 train loss:3.421899 +step:8299 train loss:3.457962 +step:8300 train loss:3.467716 +step:8301 train loss:3.439263 +step:8302 train loss:3.496396 +step:8303 train loss:3.633157 +step:8304 train loss:3.437642 +step:8305 train loss:3.484322 +step:8306 train loss:3.460476 +step:8307 train loss:3.477674 +step:8308 train loss:3.475188 +step:8309 train loss:3.499413 +step:8310 train loss:3.414319 +step:8311 train loss:3.507672 +step:8312 train loss:3.497642 +step:8313 train loss:3.562245 +step:8314 train loss:3.434102 +step:8315 train loss:3.384167 +step:8316 train loss:3.440824 +step:8317 train loss:3.464303 +step:8318 train loss:3.454321 +step:8319 train loss:3.491683 +step:8320 train loss:3.513822 +step:8321 train loss:3.418786 +step:8322 train loss:3.434229 +step:8323 train loss:3.470205 +step:8324 train loss:3.448060 +step:8325 train loss:3.501728 +step:8326 train loss:3.472171 +step:8327 train loss:3.458051 +step:8328 train loss:3.532860 +step:8329 train loss:3.436302 +step:8330 train loss:3.480318 +step:8331 train loss:3.405092 +step:8332 train loss:3.508569 +step:8333 train loss:3.522712 +step:8334 train loss:3.391473 +step:8335 train loss:3.451344 +step:8336 train loss:3.546147 +step:8337 train loss:3.477623 +step:8338 train loss:3.445210 +step:8339 train loss:3.423399 +step:8340 train loss:3.516184 +step:8341 train loss:3.414574 +step:8342 train loss:3.489984 +step:8343 train loss:3.401916 +step:8344 train loss:3.448678 +step:8345 train loss:3.482526 +step:8346 train loss:3.566699 +step:8347 train loss:3.451590 +step:8348 train loss:3.481249 +step:8349 train loss:3.451962 +step:8350 train loss:3.475032 +step:8351 train loss:3.413661 +step:8352 train loss:3.502128 +step:8353 train loss:3.456673 +step:8354 train loss:3.437868 +step:8355 train loss:3.435198 +step:8356 train loss:3.434712 +step:8357 train loss:3.447587 +step:8358 train loss:3.423070 +step:8359 train loss:3.416034 +step:8360 train loss:3.466938 +step:8361 train loss:3.477255 +step:8362 train loss:3.498924 +step:8363 train loss:3.494431 +step:8364 train loss:3.461519 +step:8365 train loss:3.605558 +step:8366 train loss:3.449537 +step:8367 train loss:3.421850 +step:8368 train loss:3.390385 +step:8369 train loss:3.421161 +step:8370 train loss:3.503510 +step:8371 train loss:3.475024 +step:8372 train loss:3.451734 +step:8373 train loss:3.462580 +step:8374 train loss:3.396388 +step:8375 train loss:3.457797 +step:8376 train loss:3.498891 +step:8377 train loss:3.324183 +step:8378 train loss:3.541492 +step:8379 train loss:3.405347 +step:8380 train loss:3.414099 +step:8381 train loss:3.417217 +step:8382 train loss:3.443800 +step:8383 train loss:3.404584 +step:8384 train loss:3.447820 +step:8385 train loss:3.458230 +step:8386 train loss:3.440995 +step:8387 train loss:3.600801 +step:8388 train loss:3.511495 +step:8389 train loss:3.492312 +step:8390 train loss:3.492486 +step:8391 train loss:3.420796 +step:8392 train loss:3.431735 +step:8393 train loss:3.386045 +step:8394 train loss:3.483894 +step:8395 train loss:3.483868 +step:8396 train loss:3.510128 +step:8397 train loss:3.443328 +step:8398 train loss:3.461995 +step:8399 train loss:3.428800 +step:8400 train loss:3.433680 +step:8401 train loss:3.440858 +step:8402 train loss:3.421549 +step:8403 train loss:3.445239 +step:8404 train loss:3.444783 +step:8405 train loss:3.400944 +step:8406 train loss:3.443394 +step:8407 train loss:3.483035 +step:8408 train loss:3.453617 +step:8409 train loss:3.378720 +step:8410 train loss:3.440560 +step:8411 train loss:3.468274 +step:8412 train loss:3.528568 +step:8413 train loss:3.503954 +step:8414 train loss:3.496764 +step:8415 train loss:3.421389 +step:8416 train loss:3.466450 +step:8417 train loss:3.383142 +step:8418 train loss:3.487979 +step:8419 train loss:3.443079 +step:8420 train loss:3.520216 +step:8421 train loss:3.436950 +step:8422 train loss:3.454418 +step:8423 train loss:3.472041 +step:8424 train loss:3.474182 +step:8425 train loss:3.533728 +step:8426 train loss:3.503385 +step:8427 train loss:3.420735 +step:8428 train loss:3.434334 +step:8429 train loss:3.494640 +step:8430 train loss:3.435293 +step:8431 train loss:3.440832 +step:8432 train loss:3.441568 +step:8433 train loss:3.416921 +step:8434 train loss:3.453919 +step:8435 train loss:3.370718 +step:8436 train loss:3.455459 +step:8437 train loss:3.493605 +step:8438 train loss:3.475643 +step:8439 train loss:3.413218 +step:8440 train loss:3.385749 +step:8441 train loss:3.440413 +step:8442 train loss:3.467067 +step:8443 train loss:3.422598 +step:8444 train loss:3.457525 +step:8445 train loss:3.406363 +step:8446 train loss:3.457072 +step:8447 train loss:3.466637 +step:8448 train loss:3.450155 +step:8449 train loss:3.443080 +step:8450 train loss:3.433170 +step:8451 train loss:3.463003 +step:8452 train loss:3.436817 +step:8453 train loss:3.419205 +step:8454 train loss:3.467847 +step:8455 train loss:3.539051 +step:8456 train loss:3.518679 +step:8457 train loss:3.572238 +step:8458 train loss:3.458805 +step:8459 train loss:3.466245 +step:8460 train loss:3.397859 +step:8461 train loss:3.554878 +step:8462 train loss:3.425213 +step:8463 train loss:3.460821 +step:8464 train loss:3.477288 +step:8465 train loss:3.482388 +step:8466 train loss:3.457331 +step:8467 train loss:3.458181 +step:8468 train loss:3.712843 +step:8469 train loss:3.421123 +step:8470 train loss:3.415968 +step:8471 train loss:3.457504 +step:8472 train loss:3.479989 +step:8473 train loss:3.431431 +step:8474 train loss:3.560467 +step:8475 train loss:3.517070 +step:8476 train loss:3.466289 +step:8477 train loss:3.453347 +step:8478 train loss:3.437421 +step:8479 train loss:3.437992 +step:8480 train loss:3.514394 +step:8481 train loss:3.433205 +step:8482 train loss:3.429306 +step:8483 train loss:3.573268 +step:8484 train loss:3.458924 +step:8485 train loss:3.502424 +step:8486 train loss:3.414901 +step:8487 train loss:3.468800 +step:8488 train loss:3.415768 +step:8489 train loss:3.494108 +step:8490 train loss:3.479572 +step:8491 train loss:3.500622 +step:8492 train loss:3.454962 +step:8493 train loss:3.526892 +step:8494 train loss:3.389630 +step:8495 train loss:3.488196 +step:8496 train loss:3.431900 +step:8497 train loss:3.467374 +step:8498 train loss:3.481574 +step:8499 train loss:3.461963 +step:8500 validation loss:3.392198 total_sharp:3.0892e-03 L1_sharp:2.4635e-02 L2_sharp:7.2719e-03 L3_sharp:6.7786e-03 L4_sharp:3.6173e-03 L5_sharp:4.8772e-03 L6_sharp:5.6414e-03 L7_sharp:6.3454e-03 L8_sharp:5.8202e-03 L9_sharp:4.4506e-03 L10_sharp:3.1452e-03 L11_sharp:3.2932e-03 L12_sharp:6.5031e-03 total_fnorm:1.3924e+00 total_l1_linf:9.4896e+03 total_spectral:1.3924e+00 L1_fnorm:1.1848e-01 L2_fnorm:1.1460e-01 L3_fnorm:1.1568e-01 L4_fnorm:1.1867e-01 L5_fnorm:1.2018e-01 L6_fnorm:1.2078e-01 L7_fnorm:1.2100e-01 L8_fnorm:1.2081e-01 L9_fnorm:1.2099e-01 L10_fnorm:1.2076e-01 L11_fnorm:1.2030e-01 L12_fnorm:1.2040e-01 L1_l1linf:3.5232e-01 L2_l1linf:4.2106e-01 L3_l1linf:4.0850e-01 L4_l1linf:4.0578e-01 L5_l1linf:3.7226e-01 L6_l1linf:3.1304e-01 L7_l1linf:3.1115e-01 L8_l1linf:3.1081e-01 L9_l1linf:3.4247e-01 L10_l1linf:3.5740e-01 L11_l1linf:3.8910e-01 L12_l1linf:3.9434e-01 L1_spectral:7.9434e-03 L2_spectral:9.5319e-03 L3_spectral:9.2287e-03 L4_spectral:9.1658e-03 L5_spectral:8.4102e-03 L6_spectral:7.0836e-03 L7_spectral:7.0237e-03 L8_spectral:7.0304e-03 L9_spectral:7.7046e-03 L10_spectral:8.1322e-03 L11_spectral:8.7443e-03 L12_spectral:8.9272e-03 ip_v_neg_g:3.9808e-03 cos_v_neg_g:1.4963e-03 v_norm:1.3924e+00 g_norm:1.9107e+00 hv_norm:3.7739e-01 cos_v_hv:1.1398e-02 hg_norm:4.1683e+01 cos_g_hg:4.3802e-01 v_par:5.3875e-05 v_perp:1.3924e+00 L1_cos_v_neg_g:6.8936e-03 L1_v_norm:1.1848e-01 L2_cos_v_neg_g:6.8136e-03 L2_v_norm:1.1460e-01 L3_cos_v_neg_g:5.2353e-03 L3_v_norm:1.1568e-01 L4_cos_v_neg_g:4.9962e-03 L4_v_norm:1.1867e-01 L5_cos_v_neg_g:5.0511e-03 L5_v_norm:1.2018e-01 L6_cos_v_neg_g:5.6716e-03 L6_v_norm:1.2078e-01 L7_cos_v_neg_g:6.5162e-03 L7_v_norm:1.2100e-01 L8_cos_v_neg_g:4.6941e-03 L8_v_norm:1.2081e-01 L9_cos_v_neg_g:4.1411e-03 L9_v_norm:1.2099e-01 L10_cos_v_neg_g:3.8215e-03 L10_v_norm:1.2076e-01 L11_cos_v_neg_g:3.5275e-03 L11_v_norm:1.2030e-01 L12_cos_v_neg_g:1.9487e-03 L12_v_norm:1.2040e-01 +step:8500 train loss:3.453313 +step:8501 train loss:3.678055 +step:8502 train loss:3.685420 +step:8503 train loss:3.450261 +step:8504 train loss:3.442145 +step:8505 train loss:3.422629 +step:8506 train loss:3.492112 +step:8507 train loss:3.432141 +step:8508 train loss:3.466435 +step:8509 train loss:3.404455 +step:8510 train loss:3.430004 +step:8511 train loss:3.386105 +step:8512 train loss:3.484744 +step:8513 train loss:3.488188 +step:8514 train loss:3.438229 +step:8515 train loss:3.529756 +step:8516 train loss:3.448621 +step:8517 train loss:3.467586 +step:8518 train loss:3.362759 +step:8519 train loss:3.453839 +step:8520 train loss:3.422518 +step:8521 train loss:3.459290 +step:8522 train loss:3.354292 +step:8523 train loss:3.448526 +step:8524 train loss:3.440119 +step:8525 train loss:3.506407 +step:8526 train loss:3.486565 +step:8527 train loss:3.430359 +step:8528 train loss:3.513159 +step:8529 train loss:3.469384 +step:8530 train loss:3.501949 +step:8531 train loss:3.491549 +step:8532 train loss:3.532809 +step:8533 train loss:3.484028 +step:8534 train loss:3.480295 +step:8535 train loss:3.454699 +step:8536 train loss:3.543376 +step:8537 train loss:3.457453 +step:8538 train loss:3.527490 +step:8539 train loss:3.449733 +step:8540 train loss:3.476529 +step:8541 train loss:3.416093 +step:8542 train loss:3.484121 +step:8543 train loss:3.397331 +step:8544 train loss:3.396307 +step:8545 train loss:3.442359 +step:8546 train loss:3.395875 +step:8547 train loss:3.448884 +step:8548 train loss:3.421312 +step:8549 train loss:3.463829 +step:8550 train loss:3.416868 +step:8551 train loss:3.466508 +step:8552 train loss:3.467940 +step:8553 train loss:3.472636 +step:8554 train loss:3.443137 +step:8555 train loss:3.461456 +step:8556 train loss:3.537207 +step:8557 train loss:3.437420 +step:8558 train loss:3.471163 +step:8559 train loss:3.466238 +step:8560 train loss:3.443075 +step:8561 train loss:3.400602 +step:8562 train loss:3.426323 +step:8563 train loss:3.428299 +step:8564 train loss:3.497798 +step:8565 train loss:3.471668 +step:8566 train loss:3.493291 +step:8567 train loss:3.436657 +step:8568 train loss:3.455532 +step:8569 train loss:3.463631 +step:8570 train loss:3.408319 +step:8571 train loss:3.450807 +step:8572 train loss:3.468358 +step:8573 train loss:3.538628 +step:8574 train loss:3.472261 +step:8575 train loss:3.468592 +step:8576 train loss:3.504055 +step:8577 train loss:3.585479 +step:8578 train loss:3.495724 +step:8579 train loss:3.480333 +step:8580 train loss:3.415967 +step:8581 train loss:3.457508 +step:8582 train loss:3.462724 +step:8583 train loss:3.456647 +step:8584 train loss:3.448591 +step:8585 train loss:3.530317 +step:8586 train loss:3.448247 +step:8587 train loss:3.459595 +step:8588 train loss:3.501668 +step:8589 train loss:3.450686 +step:8590 train loss:3.444464 +step:8591 train loss:3.446391 +step:8592 train loss:3.406050 +step:8593 train loss:3.484184 +step:8594 train loss:3.508940 +step:8595 train loss:3.427245 +step:8596 train loss:3.475385 +step:8597 train loss:3.434467 +step:8598 train loss:3.488257 +step:8599 train loss:3.447665 +step:8600 train loss:3.465828 +step:8601 train loss:3.452723 +step:8602 train loss:3.425555 +step:8603 train loss:3.484894 +step:8604 train loss:3.429911 +step:8605 train loss:3.444676 +step:8606 train loss:3.453171 +step:8607 train loss:3.464172 +step:8608 train loss:3.505717 +step:8609 train loss:3.403316 +step:8610 train loss:3.476430 +step:8611 train loss:3.408586 +step:8612 train loss:3.484939 +step:8613 train loss:3.419347 +step:8614 train loss:3.482528 +step:8615 train loss:3.524199 +step:8616 train loss:3.406113 +step:8617 train loss:3.474018 +step:8618 train loss:3.451793 +step:8619 train loss:3.404465 +step:8620 train loss:3.445952 +step:8621 train loss:3.479226 +step:8622 train loss:3.435503 +step:8623 train loss:3.451102 +step:8624 train loss:3.524501 +step:8625 train loss:3.446368 +step:8626 train loss:3.452839 +step:8627 train loss:3.450021 +step:8628 train loss:3.481287 +step:8629 train loss:3.390603 +step:8630 train loss:3.489697 +step:8631 train loss:3.432073 +step:8632 train loss:3.487999 +step:8633 train loss:3.435941 +step:8634 train loss:3.667187 +step:8635 train loss:3.462489 +step:8636 train loss:3.507648 +step:8637 train loss:3.433171 +step:8638 train loss:3.434795 +step:8639 train loss:3.490575 +step:8640 train loss:3.405230 +step:8641 train loss:3.504383 +step:8642 train loss:3.455347 +step:8643 train loss:3.563468 +step:8644 train loss:3.408704 +step:8645 train loss:3.481297 +step:8646 train loss:3.440773 +step:8647 train loss:3.468145 +step:8648 train loss:3.415530 +step:8649 train loss:3.497939 +step:8650 train loss:3.453170 +step:8651 train loss:3.464810 +step:8652 train loss:3.434701 +step:8653 train loss:3.467353 +step:8654 train loss:3.511367 +step:8655 train loss:3.440555 +step:8656 train loss:3.483940 +step:8657 train loss:3.486192 +step:8658 train loss:3.455366 +step:8659 train loss:3.447690 +step:8660 train loss:3.393719 +step:8661 train loss:3.451119 +step:8662 train loss:3.396905 +step:8663 train loss:3.467628 +step:8664 train loss:3.382145 +step:8665 train loss:3.405308 +step:8666 train loss:3.481135 +step:8667 train loss:3.374703 +step:8668 train loss:3.481616 +step:8669 train loss:3.520417 +step:8670 train loss:3.418740 +step:8671 train loss:3.417799 +step:8672 train loss:3.637402 +step:8673 train loss:3.401381 +step:8674 train loss:3.470147 +step:8675 train loss:3.510032 +step:8676 train loss:3.454314 +step:8677 train loss:3.478132 +step:8678 train loss:3.426746 +step:8679 train loss:3.484666 +step:8680 train loss:3.461781 +step:8681 train loss:3.466835 +step:8682 train loss:3.421429 +step:8683 train loss:3.439764 +step:8684 train loss:3.511124 +step:8685 train loss:3.457277 +step:8686 train loss:3.447120 +step:8687 train loss:3.402816 +step:8688 train loss:3.419882 +step:8689 train loss:3.489271 +step:8690 train loss:3.429217 +step:8691 train loss:3.505900 +step:8692 train loss:3.394819 +step:8693 train loss:3.482970 +step:8694 train loss:3.483902 +step:8695 train loss:3.468575 +step:8696 train loss:3.492146 +step:8697 train loss:3.450411 +step:8698 train loss:3.485246 +step:8699 train loss:3.436812 +step:8700 train loss:3.462441 +step:8701 train loss:3.426595 +step:8702 train loss:3.414400 +step:8703 train loss:3.424696 +step:8704 train loss:3.381892 +step:8705 train loss:3.460416 +step:8706 train loss:3.481671 +step:8707 train loss:3.477415 +step:8708 train loss:3.423940 +step:8709 train loss:3.484453 +step:8710 train loss:3.413222 +step:8711 train loss:3.467532 +step:8712 train loss:3.374972 +step:8713 train loss:3.453396 +step:8714 train loss:3.559277 +step:8715 train loss:3.415521 +step:8716 train loss:3.467773 +step:8717 train loss:3.441185 +step:8718 train loss:3.477880 +step:8719 train loss:3.445827 +step:8720 train loss:3.556912 +step:8721 train loss:3.448699 +step:8722 train loss:3.542871 +step:8723 train loss:3.412580 +step:8724 train loss:3.426011 +step:8725 train loss:3.453311 +step:8726 train loss:3.407654 +step:8727 train loss:3.485951 +step:8728 train loss:3.443521 +step:8729 train loss:3.446788 +step:8730 train loss:3.427684 +step:8731 train loss:3.429161 +step:8732 train loss:3.531384 +step:8733 train loss:3.453075 +step:8734 train loss:3.492397 +step:8735 train loss:3.560873 +step:8736 train loss:3.420143 +step:8737 train loss:3.445946 +step:8738 train loss:3.425123 +step:8739 train loss:3.486112 +step:8740 train loss:3.407638 +step:8741 train loss:3.460416 +step:8742 train loss:3.416964 +step:8743 train loss:3.456689 +step:8744 train loss:3.476955 +step:8745 train loss:3.517819 +step:8746 train loss:3.416888 +step:8747 train loss:3.522764 +step:8748 train loss:3.432669 +step:8749 train loss:3.469228 +step:8750 validation loss:3.384695 +step:8750 train loss:3.479933 +step:8751 train loss:3.518761 +step:8752 train loss:3.375890 +step:8753 train loss:3.422859 +step:8754 train loss:3.475636 +step:8755 train loss:3.457963 +step:8756 train loss:3.501276 +step:8757 train loss:3.415001 +step:8758 train loss:3.570910 +step:8759 train loss:3.418306 +step:8760 train loss:3.451095 +step:8761 train loss:3.527627 +step:8762 train loss:3.422064 +step:8763 train loss:3.395552 +step:8764 train loss:3.470403 +step:8765 train loss:3.536564 +step:8766 train loss:3.467223 +step:8767 train loss:3.427366 +step:8768 train loss:3.464423 +step:8769 train loss:3.437534 +step:8770 train loss:3.483453 +step:8771 train loss:3.456922 +step:8772 train loss:3.475633 +step:8773 train loss:3.436652 +step:8774 train loss:3.471063 +step:8775 train loss:3.468469 +step:8776 train loss:3.415340 +step:8777 train loss:3.449971 +step:8778 train loss:3.461143 +step:8779 train loss:3.481821 +step:8780 train loss:3.446207 +step:8781 train loss:3.451922 +step:8782 train loss:3.471119 +step:8783 train loss:3.449831 +step:8784 train loss:3.476719 +step:8785 train loss:3.460245 +step:8786 train loss:3.538548 +step:8787 train loss:3.479418 +step:8788 train loss:3.384535 +step:8789 train loss:3.481783 +step:8790 train loss:3.410261 +step:8791 train loss:3.459839 +step:8792 train loss:3.399129 +step:8793 train loss:3.489844 +step:8794 train loss:3.410216 +step:8795 train loss:3.482129 +step:8796 train loss:3.624377 +step:8797 train loss:3.372861 +step:8798 train loss:3.527070 +step:8799 train loss:3.447499 +step:8800 train loss:3.439969 +step:8801 train loss:3.460170 +step:8802 train loss:3.518576 +step:8803 train loss:3.477142 +step:8804 train loss:3.459714 +step:8805 train loss:3.477533 +step:8806 train loss:3.447387 +step:8807 train loss:3.438157 +step:8808 train loss:3.392734 +step:8809 train loss:3.522956 +step:8810 train loss:3.421590 +step:8811 train loss:3.412591 +step:8812 train loss:3.455442 +step:8813 train loss:3.365285 +step:8814 train loss:3.554235 +step:8815 train loss:3.398899 +step:8816 train loss:3.516214 +step:8817 train loss:3.454899 +step:8818 train loss:3.385497 +step:8819 train loss:3.502300 +step:8820 train loss:3.433354 +step:8821 train loss:3.459866 +step:8822 train loss:3.437594 +step:8823 train loss:3.454485 +step:8824 train loss:3.515254 +step:8825 train loss:3.491398 +step:8826 train loss:3.458844 +step:8827 train loss:3.419993 +step:8828 train loss:3.462665 +step:8829 train loss:3.442127 +step:8830 train loss:3.420036 +step:8831 train loss:3.497451 +step:8832 train loss:3.433186 +step:8833 train loss:3.468635 +step:8834 train loss:3.431808 +step:8835 train loss:3.369674 +step:8836 train loss:3.497865 +step:8837 train loss:3.401809 +step:8838 train loss:3.443991 +step:8839 train loss:3.429641 +step:8840 train loss:3.434114 +step:8841 train loss:3.445399 +step:8842 train loss:3.455397 +step:8843 train loss:3.468505 +step:8844 train loss:3.432713 +step:8845 train loss:3.456185 +step:8846 train loss:3.421922 +step:8847 train loss:3.460449 +step:8848 train loss:3.507000 +step:8849 train loss:3.488006 +step:8850 train loss:3.480368 +step:8851 train loss:3.363583 +step:8852 train loss:3.464692 +step:8853 train loss:3.444930 +step:8854 train loss:3.416300 +step:8855 train loss:3.486516 +step:8856 train loss:3.476798 +step:8857 train loss:3.544780 +step:8858 train loss:3.411130 +step:8859 train loss:3.483561 +step:8860 train loss:3.441774 +step:8861 train loss:3.424200 +step:8862 train loss:3.424015 +step:8863 train loss:3.406771 +step:8864 train loss:3.476206 +step:8865 train loss:3.468866 +step:8866 train loss:3.352055 +step:8867 train loss:3.454863 +step:8868 train loss:3.481707 +step:8869 train loss:3.564913 +step:8870 train loss:3.446331 +step:8871 train loss:3.468765 +step:8872 train loss:3.452959 +step:8873 train loss:3.452962 +step:8874 train loss:3.507196 +step:8875 train loss:3.440706 +step:8876 train loss:3.479361 +step:8877 train loss:3.461744 +step:8878 train loss:3.510557 +step:8879 train loss:3.471452 +step:8880 train loss:3.416314 +step:8881 train loss:3.385054 +step:8882 train loss:3.456081 +step:8883 train loss:3.440762 +step:8884 train loss:3.531460 +step:8885 train loss:3.463463 +step:8886 train loss:3.469826 +step:8887 train loss:3.493476 +step:8888 train loss:3.454334 +step:8889 train loss:3.455718 +step:8890 train loss:3.448868 +step:8891 train loss:3.421053 +step:8892 train loss:3.502873 +step:8893 train loss:3.445436 +step:8894 train loss:3.463141 +step:8895 train loss:3.490977 +step:8896 train loss:3.408007 +step:8897 train loss:3.499762 +step:8898 train loss:3.433106 +step:8899 train loss:3.453013 +step:8900 train loss:3.421457 +step:8901 train loss:3.437402 +step:8902 train loss:3.478080 +step:8903 train loss:3.415300 +step:8904 train loss:3.469364 +step:8905 train loss:3.442398 +step:8906 train loss:3.432735 +step:8907 train loss:3.445700 +step:8908 train loss:3.510671 +step:8909 train loss:3.452006 +step:8910 train loss:3.416574 +step:8911 train loss:3.513567 +step:8912 train loss:3.410070 +step:8913 train loss:3.422097 +step:8914 train loss:3.517914 +step:8915 train loss:3.457623 +step:8916 train loss:3.486728 +step:8917 train loss:3.444611 +step:8918 train loss:3.447396 +step:8919 train loss:3.436792 +step:8920 train loss:3.462534 +step:8921 train loss:3.459195 +step:8922 train loss:3.437223 +step:8923 train loss:3.625918 +step:8924 train loss:3.519512 +step:8925 train loss:3.448364 +step:8926 train loss:3.458462 +step:8927 train loss:3.488855 +step:8928 train loss:3.441852 +step:8929 train loss:3.438444 +step:8930 train loss:3.492738 +step:8931 train loss:3.402066 +step:8932 train loss:3.507453 +step:8933 train loss:3.414932 +step:8934 train loss:3.454954 +step:8935 train loss:3.466024 +step:8936 train loss:3.501663 +step:8937 train loss:3.499897 +step:8938 train loss:3.439317 +step:8939 train loss:3.507893 +step:8940 train loss:3.461881 +step:8941 train loss:3.404311 +step:8942 train loss:3.481565 +step:8943 train loss:3.414578 +step:8944 train loss:3.464743 +step:8945 train loss:3.482416 +step:8946 train loss:3.329538 +step:8947 train loss:3.517832 +step:8948 train loss:3.365631 +step:8949 train loss:3.369396 +step:8950 train loss:3.413051 +step:8951 train loss:3.450456 +step:8952 train loss:3.469296 +step:8953 train loss:3.425667 +step:8954 train loss:3.530318 +step:8955 train loss:3.443472 +step:8956 train loss:3.472979 +step:8957 train loss:3.461977 +step:8958 train loss:3.440021 +step:8959 train loss:3.430575 +step:8960 train loss:3.396316 +step:8961 train loss:3.422266 +step:8962 train loss:3.473003 +step:8963 train loss:3.453941 +step:8964 train loss:3.435564 +step:8965 train loss:3.475419 +step:8966 train loss:3.437809 +step:8967 train loss:3.416727 +step:8968 train loss:3.399431 +step:8969 train loss:3.387764 +step:8970 train loss:3.468945 +step:8971 train loss:3.418068 +step:8972 train loss:3.617472 +step:8973 train loss:3.503969 +step:8974 train loss:3.463532 +step:8975 train loss:3.465043 +step:8976 train loss:3.428477 +step:8977 train loss:3.512757 +step:8978 train loss:3.498589 +step:8979 train loss:3.414150 +step:8980 train loss:3.510424 +step:8981 train loss:3.461615 +step:8982 train loss:3.433683 +step:8983 train loss:3.378490 +step:8984 train loss:3.504013 +step:8985 train loss:3.422160 +step:8986 train loss:3.455279 +step:8987 train loss:3.430971 +step:8988 train loss:3.481713 +step:8989 train loss:3.390469 +step:8990 train loss:3.531242 +step:8991 train loss:3.382657 +step:8992 train loss:3.439777 +step:8993 train loss:3.532884 +step:8994 train loss:3.436057 +step:8995 train loss:3.460552 +step:8996 train loss:3.431079 +step:8997 train loss:3.382341 +step:8998 train loss:3.385323 +step:8999 train loss:3.409720 +step:9000 validation loss:3.381233 total_sharp:3.0363e-03 L1_sharp:1.4213e-02 L2_sharp:6.9259e-03 L3_sharp:4.3225e-03 L4_sharp:3.0733e-03 L5_sharp:4.1614e-03 L6_sharp:5.5888e-03 L7_sharp:6.1519e-03 L8_sharp:6.0958e-03 L9_sharp:4.6711e-03 L10_sharp:3.2113e-03 L11_sharp:3.0280e-03 L12_sharp:4.3554e-03 total_fnorm:1.3935e+00 total_l1_linf:9.5123e+03 total_spectral:1.3935e+00 L1_fnorm:1.1885e-01 L2_fnorm:1.1483e-01 L3_fnorm:1.1640e-01 L4_fnorm:1.1931e-01 L5_fnorm:1.2066e-01 L6_fnorm:1.2101e-01 L7_fnorm:1.2114e-01 L8_fnorm:1.2116e-01 L9_fnorm:1.2137e-01 L10_fnorm:1.2133e-01 L11_fnorm:1.2122e-01 L12_fnorm:1.2087e-01 L1_l1linf:3.4606e-01 L2_l1linf:4.1705e-01 L3_l1linf:3.9473e-01 L4_l1linf:3.9058e-01 L5_l1linf:3.8511e-01 L6_l1linf:3.2656e-01 L7_l1linf:3.2806e-01 L8_l1linf:3.3298e-01 L9_l1linf:3.4255e-01 L10_l1linf:3.6581e-01 L11_l1linf:3.9808e-01 L12_l1linf:3.8915e-01 L1_spectral:7.7794e-03 L2_spectral:9.3961e-03 L3_spectral:8.8878e-03 L4_spectral:8.7626e-03 L5_spectral:8.6685e-03 L6_spectral:7.3609e-03 L7_spectral:7.4098e-03 L8_spectral:7.5039e-03 L9_spectral:7.7379e-03 L10_spectral:8.2273e-03 L11_spectral:8.9829e-03 L12_spectral:8.7953e-03 ip_v_neg_g:1.1897e-03 cos_v_neg_g:4.1977e-04 v_norm:1.3935e+00 g_norm:2.0338e+00 hv_norm:4.0496e-01 cos_v_hv:1.0448e-02 hg_norm:6.3017e+01 cos_g_hg:5.0904e-01 v_par:4.4975e-05 v_perp:1.3935e+00 L1_cos_v_neg_g:3.3796e-03 L1_v_norm:1.1885e-01 L2_cos_v_neg_g:-5.0875e-04 L2_v_norm:1.1483e-01 L3_cos_v_neg_g:-3.8829e-04 L3_v_norm:1.1640e-01 L4_cos_v_neg_g:9.3333e-05 L4_v_norm:1.1931e-01 L5_cos_v_neg_g:7.1752e-04 L5_v_norm:1.2066e-01 L6_cos_v_neg_g:4.4512e-04 L6_v_norm:1.2101e-01 L7_cos_v_neg_g:1.0201e-03 L7_v_norm:1.2114e-01 L8_cos_v_neg_g:1.6851e-03 L8_v_norm:1.2116e-01 L9_cos_v_neg_g:1.5617e-03 L9_v_norm:1.2137e-01 L10_cos_v_neg_g:1.7779e-03 L10_v_norm:1.2133e-01 L11_cos_v_neg_g:1.7840e-03 L11_v_norm:1.2122e-01 L12_cos_v_neg_g:2.0042e-03 L12_v_norm:1.2087e-01 +step:9000 train loss:3.496119 +step:9001 train loss:3.461961 +step:9002 train loss:3.472039 +step:9003 train loss:3.408986 +step:9004 train loss:3.409889 +step:9005 train loss:3.424118 +step:9006 train loss:3.425243 +step:9007 train loss:3.445724 +step:9008 train loss:3.398714 +step:9009 train loss:3.395506 +step:9010 train loss:3.433171 +step:9011 train loss:3.427114 +step:9012 train loss:3.543155 +step:9013 train loss:3.368324 +step:9014 train loss:3.440274 +step:9015 train loss:3.438953 +step:9016 train loss:3.517352 +step:9017 train loss:3.459088 +step:9018 train loss:3.379453 +step:9019 train loss:3.466116 +step:9020 train loss:3.473452 +step:9021 train loss:3.430967 +step:9022 train loss:3.444555 +step:9023 train loss:3.440449 +step:9024 train loss:3.463795 +step:9025 train loss:3.445566 +step:9026 train loss:3.403426 +step:9027 train loss:3.446917 +step:9028 train loss:3.468987 +step:9029 train loss:3.486642 +step:9030 train loss:3.485488 +step:9031 train loss:3.449594 +step:9032 train loss:3.460809 +step:9033 train loss:3.444262 +step:9034 train loss:3.454157 +step:9035 train loss:3.458265 +step:9036 train loss:3.407557 +step:9037 train loss:3.403957 +step:9038 train loss:3.526638 +step:9039 train loss:3.429281 +step:9040 train loss:3.444669 +step:9041 train loss:3.492522 +step:9042 train loss:3.348500 +step:9043 train loss:3.442940 +step:9044 train loss:3.461658 +step:9045 train loss:3.409122 +step:9046 train loss:3.452362 +step:9047 train loss:3.446837 +step:9048 train loss:3.426990 +step:9049 train loss:3.458553 +step:9050 train loss:3.413922 +step:9051 train loss:3.458340 +step:9052 train loss:3.380800 +step:9053 train loss:3.509914 +step:9054 train loss:3.517633 +step:9055 train loss:3.441704 +step:9056 train loss:3.503875 +step:9057 train loss:3.356120 +step:9058 train loss:3.446594 +step:9059 train loss:3.518967 +step:9060 train loss:3.451090 +step:9061 train loss:3.477506 +step:9062 train loss:3.407936 +step:9063 train loss:3.542542 +step:9064 train loss:3.429837 +step:9065 train loss:3.439373 +step:9066 train loss:3.458430 +step:9067 train loss:3.421370 +step:9068 train loss:3.493010 +step:9069 train loss:3.451066 +step:9070 train loss:3.499313 +step:9071 train loss:3.437497 +step:9072 train loss:3.457360 +step:9073 train loss:3.416835 +step:9074 train loss:3.498000 +step:9075 train loss:3.444326 +step:9076 train loss:3.412657 +step:9077 train loss:3.488510 +step:9078 train loss:3.425971 +step:9079 train loss:3.473042 +step:9080 train loss:3.404580 +step:9081 train loss:3.442923 +step:9082 train loss:3.471860 +step:9083 train loss:3.497410 +step:9084 train loss:3.389529 +step:9085 train loss:3.461441 +step:9086 train loss:3.443577 +step:9087 train loss:3.392824 +step:9088 train loss:3.452415 +step:9089 train loss:3.468362 +step:9090 train loss:3.403974 +step:9091 train loss:3.501949 +step:9092 train loss:3.428818 +step:9093 train loss:3.427430 +step:9094 train loss:3.553427 +step:9095 train loss:3.423138 +step:9096 train loss:3.436324 +step:9097 train loss:3.423542 +step:9098 train loss:3.414301 +step:9099 train loss:3.540443 +step:9100 train loss:3.570500 +step:9101 train loss:3.488338 +step:9102 train loss:3.429739 +step:9103 train loss:3.438075 +step:9104 train loss:3.523770 +step:9105 train loss:3.385055 +step:9106 train loss:3.513831 +step:9107 train loss:3.445168 +step:9108 train loss:3.430542 +step:9109 train loss:3.456167 +step:9110 train loss:3.456036 +step:9111 train loss:3.439010 +step:9112 train loss:3.438344 +step:9113 train loss:3.471490 +step:9114 train loss:3.417032 +step:9115 train loss:3.443660 +step:9116 train loss:3.468987 +step:9117 train loss:3.480584 +step:9118 train loss:3.449130 +step:9119 train loss:3.369070 +step:9120 train loss:3.469333 +step:9121 train loss:3.497617 +step:9122 train loss:3.446060 +step:9123 train loss:3.466192 +step:9124 train loss:3.494292 +step:9125 train loss:3.448138 +step:9126 train loss:3.423511 +step:9127 train loss:3.457638 +step:9128 train loss:3.510788 +step:9129 train loss:3.467892 +step:9130 train loss:3.479646 +step:9131 train loss:3.462619 +step:9132 train loss:3.466928 +step:9133 train loss:3.458903 +step:9134 train loss:3.426970 +step:9135 train loss:3.457233 +step:9136 train loss:3.455520 +step:9137 train loss:3.508796 +step:9138 train loss:3.426690 +step:9139 train loss:3.502369 +step:9140 train loss:3.424872 +step:9141 train loss:3.402636 +step:9142 train loss:3.581292 +step:9143 train loss:3.409105 +step:9144 train loss:3.502964 +step:9145 train loss:3.507765 +step:9146 train loss:3.422673 +step:9147 train loss:3.497031 +step:9148 train loss:3.514699 +step:9149 train loss:3.424373 +step:9150 train loss:3.450040 +step:9151 train loss:3.506724 +step:9152 train loss:3.465455 +step:9153 train loss:3.433001 +step:9154 train loss:3.444913 +step:9155 train loss:3.410833 +step:9156 train loss:3.412865 +step:9157 train loss:3.433797 +step:9158 train loss:3.413337 +step:9159 train loss:3.502697 +step:9160 train loss:3.386880 +step:9161 train loss:3.415648 +step:9162 train loss:3.499737 +step:9163 train loss:3.445156 +step:9164 train loss:3.418760 +step:9165 train loss:3.410720 +step:9166 train loss:3.472335 +step:9167 train loss:3.413901 +step:9168 train loss:3.455239 +step:9169 train loss:3.392563 +step:9170 train loss:3.411639 +step:9171 train loss:3.478074 +step:9172 train loss:3.401693 +step:9173 train loss:3.524181 +step:9174 train loss:3.451792 +step:9175 train loss:3.430371 +step:9176 train loss:3.411417 +step:9177 train loss:3.456048 +step:9178 train loss:3.402875 +step:9179 train loss:3.362843 +step:9180 train loss:3.457714 +step:9181 train loss:3.463590 +step:9182 train loss:3.436850 +step:9183 train loss:3.443882 +step:9184 train loss:3.439876 +step:9185 train loss:3.454282 +step:9186 train loss:3.416825 +step:9187 train loss:3.488516 +step:9188 train loss:3.525250 +step:9189 train loss:3.448811 +step:9190 train loss:3.453749 +step:9191 train loss:3.444275 +step:9192 train loss:3.459441 +step:9193 train loss:3.459980 +step:9194 train loss:3.393342 +step:9195 train loss:3.387205 +step:9196 train loss:3.436206 +step:9197 train loss:3.396423 +step:9198 train loss:3.468512 +step:9199 train loss:3.416840 +step:9200 train loss:3.445322 +step:9201 train loss:3.478948 +step:9202 train loss:3.466708 +step:9203 train loss:3.421647 +step:9204 train loss:3.620088 +step:9205 train loss:3.533310 +step:9206 train loss:3.449390 +step:9207 train loss:3.500752 +step:9208 train loss:3.478816 +step:9209 train loss:3.498270 +step:9210 train loss:3.390479 +step:9211 train loss:3.418191 +step:9212 train loss:3.418861 +step:9213 train loss:3.479116 +step:9214 train loss:3.421294 +step:9215 train loss:3.489820 +step:9216 train loss:3.448719 +step:9217 train loss:3.391528 +step:9218 train loss:3.484967 +step:9219 train loss:3.441491 +step:9220 train loss:3.488740 +step:9221 train loss:3.538614 +step:9222 train loss:3.484442 +step:9223 train loss:3.652460 +step:9224 train loss:3.489354 +step:9225 train loss:3.422495 +step:9226 train loss:3.440960 +step:9227 train loss:3.454182 +step:9228 train loss:3.457303 +step:9229 train loss:3.413974 +step:9230 train loss:3.476306 +step:9231 train loss:3.361797 +step:9232 train loss:3.421120 +step:9233 train loss:3.442302 +step:9234 train loss:3.496037 +step:9235 train loss:3.503134 +step:9236 train loss:3.407748 +step:9237 train loss:3.472161 +step:9238 train loss:3.442346 +step:9239 train loss:3.435578 +step:9240 train loss:3.403931 +step:9241 train loss:3.434504 +step:9242 train loss:3.446240 +step:9243 train loss:3.444289 +step:9244 train loss:3.418369 +step:9245 train loss:3.425131 +step:9246 train loss:3.424802 +step:9247 train loss:3.434578 +step:9248 train loss:3.445195 +step:9249 train loss:3.442361 +step:9250 validation loss:3.378325 +step:9250 train loss:3.483078 +step:9251 train loss:3.424320 +step:9252 train loss:3.493218 +step:9253 train loss:3.485832 +step:9254 train loss:3.416485 +step:9255 train loss:3.534455 +step:9256 train loss:3.413301 +step:9257 train loss:3.355606 +step:9258 train loss:3.435952 +step:9259 train loss:3.436457 +step:9260 train loss:3.534371 +step:9261 train loss:3.415541 +step:9262 train loss:3.487458 +step:9263 train loss:3.389075 +step:9264 train loss:3.532288 +step:9265 train loss:3.562417 +step:9266 train loss:3.489668 +step:9267 train loss:3.436476 +step:9268 train loss:3.430800 +step:9269 train loss:3.458107 +step:9270 train loss:3.382319 +step:9271 train loss:3.491163 +step:9272 train loss:3.430351 +step:9273 train loss:3.455390 +step:9274 train loss:3.455128 +step:9275 train loss:3.450112 +step:9276 train loss:3.481920 +step:9277 train loss:3.454202 +step:9278 train loss:3.466385 +step:9279 train loss:3.464307 +step:9280 train loss:3.459571 +step:9281 train loss:3.434086 +step:9282 train loss:3.554033 +step:9283 train loss:3.438177 +step:9284 train loss:3.401660 +step:9285 train loss:3.426147 +step:9286 train loss:3.475582 +step:9287 train loss:3.449642 +step:9288 train loss:3.455014 +step:9289 train loss:3.424934 +step:9290 train loss:3.454049 +step:9291 train loss:3.431887 +step:9292 train loss:3.467784 +step:9293 train loss:3.526952 +step:9294 train loss:3.449547 +step:9295 train loss:3.432445 +step:9296 train loss:3.386784 +step:9297 train loss:3.455925 +step:9298 train loss:3.396289 +step:9299 train loss:3.377925 +step:9300 train loss:3.482322 +step:9301 train loss:3.511014 +step:9302 train loss:3.451102 +step:9303 train loss:3.499484 +step:9304 train loss:3.417675 +step:9305 train loss:3.412235 +step:9306 train loss:3.413802 +step:9307 train loss:3.412498 +step:9308 train loss:3.390696 +step:9309 train loss:3.376425 +step:9310 train loss:3.432448 +step:9311 train loss:3.495493 +step:9312 train loss:3.444656 +step:9313 train loss:3.391062 +step:9314 train loss:3.422720 +step:9315 train loss:3.453321 +step:9316 train loss:3.438708 +step:9317 train loss:3.414700 +step:9318 train loss:3.498989 +step:9319 train loss:3.409184 +step:9320 train loss:3.430790 +step:9321 train loss:3.448329 +step:9322 train loss:3.452082 +step:9323 train loss:3.529833 +step:9324 train loss:3.466727 +step:9325 train loss:3.409781 +step:9326 train loss:3.486748 +step:9327 train loss:3.481163 +step:9328 train loss:3.483411 +step:9329 train loss:3.369915 +step:9330 train loss:3.539959 +step:9331 train loss:3.467929 +step:9332 train loss:3.492303 +step:9333 train loss:3.510732 +step:9334 train loss:3.445607 +step:9335 train loss:3.542553 +step:9336 train loss:3.498455 +step:9337 train loss:3.452187 +step:9338 train loss:3.508259 +step:9339 train loss:3.485970 +step:9340 train loss:3.445549 +step:9341 train loss:3.537534 +step:9342 train loss:3.429240 +step:9343 train loss:3.426364 +step:9344 train loss:3.428422 +step:9345 train loss:3.571487 +step:9346 train loss:3.409038 +step:9347 train loss:3.424853 +step:9348 train loss:3.448525 +step:9349 train loss:3.392802 +step:9350 train loss:3.468877 +step:9351 train loss:3.444709 +step:9352 train loss:3.430126 +step:9353 train loss:3.463928 +step:9354 train loss:3.430121 +step:9355 train loss:3.425535 +step:9356 train loss:3.474253 +step:9357 train loss:3.423640 +step:9358 train loss:3.456542 +step:9359 train loss:3.401279 +step:9360 train loss:3.416443 +step:9361 train loss:3.417800 +step:9362 train loss:3.404598 +step:9363 train loss:3.468975 +step:9364 train loss:3.445852 +step:9365 train loss:3.452888 +step:9366 train loss:3.447896 +step:9367 train loss:3.459255 +step:9368 train loss:3.435290 +step:9369 train loss:3.433481 +step:9370 train loss:3.439046 +step:9371 train loss:3.462847 +step:9372 train loss:3.427588 +step:9373 train loss:3.412159 +step:9374 train loss:3.449586 +step:9375 train loss:3.458855 +step:9376 train loss:3.399808 +step:9377 train loss:3.474493 +step:9378 train loss:3.473557 +step:9379 train loss:3.501971 +step:9380 train loss:3.433142 +step:9381 train loss:3.440609 +step:9382 train loss:3.417225 +step:9383 train loss:3.412553 +step:9384 train loss:3.379903 +step:9385 train loss:3.454960 +step:9386 train loss:3.483097 +step:9387 train loss:3.459535 +step:9388 train loss:3.399168 +step:9389 train loss:3.414737 +step:9390 train loss:3.456215 +step:9391 train loss:3.462607 +step:9392 train loss:3.424598 +step:9393 train loss:3.417523 +step:9394 train loss:3.447349 +step:9395 train loss:3.441679 +step:9396 train loss:3.590710 +step:9397 train loss:3.477258 +step:9398 train loss:3.497324 +step:9399 train loss:3.450387 +step:9400 train loss:3.452763 +step:9401 train loss:3.444105 +step:9402 train loss:3.445343 +step:9403 train loss:3.378693 +step:9404 train loss:3.453713 +step:9405 train loss:3.413806 +step:9406 train loss:3.469364 +step:9407 train loss:3.406818 +step:9408 train loss:3.347682 +step:9409 train loss:3.412425 +step:9410 train loss:3.493545 +step:9411 train loss:3.453459 +step:9412 train loss:3.486236 +step:9413 train loss:3.501547 +step:9414 train loss:3.438045 +step:9415 train loss:3.434088 +step:9416 train loss:3.446325 +step:9417 train loss:3.402428 +step:9418 train loss:3.429206 +step:9419 train loss:3.397178 +step:9420 train loss:3.415914 +step:9421 train loss:3.464900 +step:9422 train loss:3.417119 +step:9423 train loss:3.481085 +step:9424 train loss:3.419105 +step:9425 train loss:3.461250 +step:9426 train loss:3.464704 +step:9427 train loss:3.438990 +step:9428 train loss:3.545315 +step:9429 train loss:3.437760 +step:9430 train loss:3.394493 +step:9431 train loss:3.479390 +step:9432 train loss:3.443811 +step:9433 train loss:3.484544 +step:9434 train loss:3.436965 +step:9435 train loss:3.459815 +step:9436 train loss:3.433160 +step:9437 train loss:3.444112 +step:9438 train loss:3.437067 +step:9439 train loss:3.438449 +step:9440 train loss:3.430598 +step:9441 train loss:3.439284 +step:9442 train loss:3.379464 +step:9443 train loss:3.434960 +step:9444 train loss:3.500718 +step:9445 train loss:3.431813 +step:9446 train loss:3.407523 +step:9447 train loss:3.473887 +step:9448 train loss:3.412016 +step:9449 train loss:3.434229 +step:9450 train loss:3.477761 +step:9451 train loss:3.393037 +step:9452 train loss:3.443738 +step:9453 train loss:3.421004 +step:9454 train loss:3.484940 +step:9455 train loss:3.464694 +step:9456 train loss:3.388791 +step:9457 train loss:3.438401 +step:9458 train loss:3.424571 +step:9459 train loss:3.417790 +step:9460 train loss:3.457716 +step:9461 train loss:3.487202 +step:9462 train loss:3.437258 +step:9463 train loss:3.465140 +step:9464 train loss:3.419794 +step:9465 train loss:3.513561 +step:9466 train loss:3.459970 +step:9467 train loss:3.484321 +step:9468 train loss:3.431098 +step:9469 train loss:3.418603 +step:9470 train loss:3.416590 +step:9471 train loss:3.457576 +step:9472 train loss:3.481965 +step:9473 train loss:3.469830 +step:9474 train loss:3.414790 +step:9475 train loss:3.408397 +step:9476 train loss:3.625640 +step:9477 train loss:3.497187 +step:9478 train loss:3.475258 +step:9479 train loss:3.571237 +step:9480 train loss:3.418193 +step:9481 train loss:3.453909 +step:9482 train loss:3.477839 +step:9483 train loss:3.437222 +step:9484 train loss:3.468374 +step:9485 train loss:3.385686 +step:9486 train loss:3.424605 +step:9487 train loss:3.458709 +step:9488 train loss:3.409648 +step:9489 train loss:3.455985 +step:9490 train loss:3.421906 +step:9491 train loss:3.465379 +step:9492 train loss:3.487259 +step:9493 train loss:3.456435 +step:9494 train loss:3.465993 +step:9495 train loss:3.421062 +step:9496 train loss:3.479676 +step:9497 train loss:3.496309 +step:9498 train loss:3.442391 +step:9499 train loss:3.493041 +step:9500 validation loss:3.377473 total_sharp:3.5730e-03 L1_sharp:1.9086e-02 L2_sharp:7.7248e-03 L3_sharp:6.4652e-03 L4_sharp:3.6102e-03 L5_sharp:4.8524e-03 L6_sharp:6.1243e-03 L7_sharp:7.0601e-03 L8_sharp:7.4344e-03 L9_sharp:5.0680e-03 L10_sharp:3.3035e-03 L11_sharp:3.4364e-03 L12_sharp:4.2218e-03 total_fnorm:1.3887e+00 total_l1_linf:9.4764e+03 total_spectral:1.3887e+00 L1_fnorm:1.1875e-01 L2_fnorm:1.1418e-01 L3_fnorm:1.1584e-01 L4_fnorm:1.1891e-01 L5_fnorm:1.2049e-01 L6_fnorm:1.2084e-01 L7_fnorm:1.2113e-01 L8_fnorm:1.2096e-01 L9_fnorm:1.2126e-01 L10_fnorm:1.2121e-01 L11_fnorm:1.2110e-01 L12_fnorm:1.2090e-01 L1_l1linf:3.7140e-01 L2_l1linf:4.2979e-01 L3_l1linf:4.2437e-01 L4_l1linf:3.9028e-01 L5_l1linf:3.8130e-01 L6_l1linf:3.2522e-01 L7_l1linf:3.2523e-01 L8_l1linf:3.4313e-01 L9_l1linf:3.4224e-01 L10_l1linf:3.7320e-01 L11_l1linf:4.0319e-01 L12_l1linf:4.0200e-01 L1_spectral:8.3598e-03 L2_spectral:9.6819e-03 L3_spectral:9.6100e-03 L4_spectral:8.8063e-03 L5_spectral:8.5994e-03 L6_spectral:7.3814e-03 L7_spectral:7.3406e-03 L8_spectral:7.7595e-03 L9_spectral:7.7850e-03 L10_spectral:8.4148e-03 L11_spectral:9.0626e-03 L12_spectral:9.0848e-03 ip_v_neg_g:3.7417e-03 cos_v_neg_g:1.4125e-03 v_norm:1.3887e+00 g_norm:1.9075e+00 hv_norm:4.2523e-01 cos_v_hv:1.1669e-02 hg_norm:4.2407e+01 cos_g_hg:4.8299e-01 v_par:5.9128e-05 v_perp:1.3887e+00 L1_cos_v_neg_g:6.6621e-03 L1_v_norm:1.1875e-01 L2_cos_v_neg_g:6.3775e-03 L2_v_norm:1.1418e-01 L3_cos_v_neg_g:4.4636e-03 L3_v_norm:1.1584e-01 L4_cos_v_neg_g:3.2220e-03 L4_v_norm:1.1891e-01 L5_cos_v_neg_g:3.7297e-03 L5_v_norm:1.2049e-01 L6_cos_v_neg_g:4.1114e-03 L6_v_norm:1.2084e-01 L7_cos_v_neg_g:5.2893e-03 L7_v_norm:1.2113e-01 L8_cos_v_neg_g:5.4545e-03 L8_v_norm:1.2096e-01 L9_cos_v_neg_g:4.8993e-03 L9_v_norm:1.2126e-01 L10_cos_v_neg_g:4.4514e-03 L10_v_norm:1.2121e-01 L11_cos_v_neg_g:3.6047e-03 L11_v_norm:1.2110e-01 L12_cos_v_neg_g:2.3470e-03 L12_v_norm:1.2090e-01 +step:9500 train loss:3.481819 +step:9501 train loss:3.463711 +step:9502 train loss:3.435218 +step:9503 train loss:3.449522 +step:9504 train loss:3.401949 +step:9505 train loss:3.430107 +step:9506 train loss:3.446388 +step:9507 train loss:3.431289 +step:9508 train loss:3.626753 +step:9509 train loss:3.444612 +step:9510 train loss:3.428051 +step:9511 train loss:3.456831 +step:9512 train loss:3.485894 +step:9513 train loss:3.473472 +step:9514 train loss:3.444815 +step:9515 train loss:3.345041 +step:9516 train loss:3.445261 +step:9517 train loss:3.482380 +step:9518 train loss:3.456177 +step:9519 train loss:3.468879 +step:9520 train loss:3.354602 +step:9521 train loss:3.349309 +step:9522 train loss:3.467921 +step:9523 train loss:3.464178 +step:9524 train loss:3.465782 +step:9525 train loss:3.509150 +step:9526 train loss:3.525651 +step:9527 train loss:3.482036 +step:9528 train loss:3.413011 +step:9529 train loss:3.458559 +step:9530 train loss:3.503180 +step:9531 train loss:3.412056 +step:9532 train loss:3.459888 +step:9533 train loss:3.434285 +step:9534 train loss:3.514471 +step:9535 train loss:3.440535 +step:9536 train loss:3.417491 +step:9537 train loss:3.364175 +step:9538 train loss:3.380477 +step:9539 train loss:3.451027 +step:9540 train loss:3.368890 +step:9541 train loss:3.429521 +step:9542 train loss:3.557876 +step:9543 train loss:3.455855 +step:9544 train loss:3.496271 +step:9545 train loss:3.427747 +step:9546 train loss:3.454999 +step:9547 train loss:3.497027 +step:9548 train loss:3.439075 +step:9549 train loss:3.405592 +step:9550 train loss:3.435181 +step:9551 train loss:3.430915 +step:9552 train loss:3.457081 +step:9553 train loss:3.448263 +step:9554 train loss:3.494614 +step:9555 train loss:3.500026 +step:9556 train loss:3.408641 +step:9557 train loss:3.429148 +step:9558 train loss:3.491435 +step:9559 train loss:3.495576 +step:9560 train loss:3.412363 +step:9561 train loss:3.438619 +step:9562 train loss:3.476346 +step:9563 train loss:3.423724 +step:9564 train loss:3.459933 +step:9565 train loss:3.435334 +step:9566 train loss:3.406633 +step:9567 train loss:3.476294 +step:9568 train loss:3.446007 +step:9569 train loss:3.488225 +step:9570 train loss:3.380699 +step:9571 train loss:3.455963 +step:9572 train loss:3.401444 +step:9573 train loss:3.431132 +step:9574 train loss:3.408137 +step:9575 train loss:3.480355 +step:9576 train loss:3.372211 +step:9577 train loss:3.420783 +step:9578 train loss:3.424459 +step:9579 train loss:3.423007 +step:9580 train loss:3.485951 +step:9581 train loss:3.479780 +step:9582 train loss:3.448959 +step:9583 train loss:3.474580 +step:9584 train loss:3.410905 +step:9585 train loss:3.429957 +step:9586 train loss:3.484135 +step:9587 train loss:3.449941 +step:9588 train loss:3.436258 +step:9589 train loss:3.495237 +step:9590 train loss:3.459191 +step:9591 train loss:3.426429 +step:9592 train loss:3.446318 +step:9593 train loss:3.447391 +step:9594 train loss:3.463010 +step:9595 train loss:3.440487 +step:9596 train loss:3.527376 +step:9597 train loss:3.430524 +step:9598 train loss:3.394409 +step:9599 train loss:3.400291 +step:9600 train loss:3.484744 +step:9601 train loss:3.405477 +step:9602 train loss:3.490095 +step:9603 train loss:3.482233 +step:9604 train loss:3.364725 +step:9605 train loss:3.451397 +step:9606 train loss:3.506494 +step:9607 train loss:3.426406 +step:9608 train loss:3.434584 +step:9609 train loss:3.443878 +step:9610 train loss:3.484786 +step:9611 train loss:3.417041 +step:9612 train loss:3.429527 +step:9613 train loss:3.465443 +step:9614 train loss:3.435489 +step:9615 train loss:3.625531 +step:9616 train loss:3.438293 +step:9617 train loss:3.424483 +step:9618 train loss:3.377235 +step:9619 train loss:3.442129 +step:9620 train loss:3.497353 +step:9621 train loss:3.420061 +step:9622 train loss:3.434942 +step:9623 train loss:3.470341 +step:9624 train loss:3.460192 +step:9625 train loss:3.473746 +step:9626 train loss:3.444337 +step:9627 train loss:3.524379 +step:9628 train loss:3.490121 +step:9629 train loss:3.404967 +step:9630 train loss:3.465653 +step:9631 train loss:3.448553 +step:9632 train loss:3.418249 +step:9633 train loss:3.462703 +step:9634 train loss:3.529764 +step:9635 train loss:3.429680 +step:9636 train loss:3.381424 +step:9637 train loss:3.509870 +step:9638 train loss:3.391409 +step:9639 train loss:3.363482 +step:9640 train loss:3.486360 +step:9641 train loss:3.459594 +step:9642 train loss:3.437691 +step:9643 train loss:3.438686 +step:9644 train loss:3.494001 +step:9645 train loss:3.419905 +step:9646 train loss:3.460209 +step:9647 train loss:3.471516 +step:9648 train loss:3.420367 +step:9649 train loss:3.392569 +step:9650 train loss:3.409506 +step:9651 train loss:3.501160 +step:9652 train loss:3.480433 +step:9653 train loss:3.423752 +step:9654 train loss:3.405667 +step:9655 train loss:3.403369 +step:9656 train loss:3.395812 +step:9657 train loss:3.419307 +step:9658 train loss:3.477541 +step:9659 train loss:3.586726 +step:9660 train loss:3.371163 +step:9661 train loss:3.387391 +step:9662 train loss:3.408289 +step:9663 train loss:3.450808 +step:9664 train loss:3.501459 +step:9665 train loss:3.342104 +step:9666 train loss:3.387673 +step:9667 train loss:3.525333 +step:9668 train loss:3.504765 +step:9669 train loss:3.523305 +step:9670 train loss:3.502401 +step:9671 train loss:3.500912 +step:9672 train loss:3.415473 +step:9673 train loss:3.436916 +step:9674 train loss:3.445414 +step:9675 train loss:3.443984 +step:9676 train loss:3.403452 +step:9677 train loss:3.409893 +step:9678 train loss:3.446242 +step:9679 train loss:3.437383 +step:9680 train loss:3.437545 +step:9681 train loss:3.422114 +step:9682 train loss:3.491048 +step:9683 train loss:3.463216 +step:9684 train loss:3.381860 +step:9685 train loss:3.467028 +step:9686 train loss:3.500202 +step:9687 train loss:3.407373 +step:9688 train loss:3.493463 +step:9689 train loss:3.592107 +step:9690 train loss:3.436014 +step:9691 train loss:3.423753 +step:9692 train loss:3.385662 +step:9693 train loss:3.383034 +step:9694 train loss:3.402855 +step:9695 train loss:3.510447 +step:9696 train loss:3.543530 +step:9697 train loss:3.454686 +step:9698 train loss:3.487920 +step:9699 train loss:3.451627 +step:9700 train loss:3.447943 +step:9701 train loss:3.498742 +step:9702 train loss:3.415746 +step:9703 train loss:3.438139 +step:9704 train loss:3.518197 +step:9705 train loss:3.417398 +step:9706 train loss:3.414345 +step:9707 train loss:3.459677 +step:9708 train loss:3.409992 +step:9709 train loss:3.434346 +step:9710 train loss:3.451854 +step:9711 train loss:3.425097 +step:9712 train loss:3.435267 +step:9713 train loss:3.488472 +step:9714 train loss:3.443442 +step:9715 train loss:3.463250 +step:9716 train loss:3.483890 +step:9717 train loss:3.403514 +step:9718 train loss:3.409108 +step:9719 train loss:3.495095 +step:9720 train loss:3.425539 +step:9721 train loss:3.416271 +step:9722 train loss:3.478745 +step:9723 train loss:3.425173 +step:9724 train loss:3.452835 +step:9725 train loss:3.504572 +step:9726 train loss:3.447924 +step:9727 train loss:3.425773 +step:9728 train loss:3.463391 +step:9729 train loss:3.491483 +step:9730 train loss:3.564233 +step:9731 train loss:3.482048 +step:9732 train loss:3.442127 +step:9733 train loss:3.484787 +step:9734 train loss:3.407855 +step:9735 train loss:3.510324 +step:9736 train loss:3.414900 +step:9737 train loss:3.472230 +step:9738 train loss:3.440051 +step:9739 train loss:3.511297 +step:9740 train loss:3.476553 +step:9741 train loss:3.416580 +step:9742 train loss:3.509717 +step:9743 train loss:3.380443 +step:9744 train loss:3.445377 +step:9745 train loss:3.402978 +step:9746 train loss:3.438043 +step:9747 train loss:3.431263 +step:9748 train loss:3.327762 +step:9749 train loss:3.430135 +step:9750 validation loss:3.370155 +step:9750 train loss:3.411382 +step:9751 train loss:3.547911 +step:9752 train loss:3.436880 +step:9753 train loss:3.389940 +step:9754 train loss:3.423772 +step:9755 train loss:3.420387 +step:9756 train loss:3.421052 +step:9757 train loss:3.386683 +step:9758 train loss:3.377831 +step:9759 train loss:3.427699 +step:9760 train loss:3.369160 +step:9761 train loss:3.413435 +step:9762 train loss:3.410728 +step:9763 train loss:3.430031 +step:9764 train loss:3.417680 +step:9765 train loss:3.379066 +step:9766 train loss:3.468638 +step:9767 train loss:3.424820 +step:9768 train loss:3.437529 +step:9769 train loss:3.391780 +step:9770 train loss:3.387880 +step:9771 train loss:3.439176 +step:9772 train loss:3.450665 +step:9773 train loss:3.428432 +step:9774 train loss:3.400133 +step:9775 train loss:3.490049 +step:9776 train loss:3.486816 +step:9777 train loss:3.373372 +step:9778 train loss:3.384068 +step:9779 train loss:3.387418 +step:9780 train loss:3.387756 +step:9781 train loss:3.406582 +step:9782 train loss:3.482310 +step:9783 train loss:3.394344 +step:9784 train loss:3.420046 +step:9785 train loss:3.414431 +step:9786 train loss:3.447106 +step:9787 train loss:3.474340 +step:9788 train loss:3.399054 +step:9789 train loss:3.408654 +step:9790 train loss:3.369143 +step:9791 train loss:3.421635 +step:9792 train loss:3.434076 +step:9793 train loss:3.451630 +step:9794 train loss:3.427692 +step:9795 train loss:3.431625 +step:9796 train loss:3.418200 +step:9797 train loss:3.413955 +step:9798 train loss:3.430230 +step:9799 train loss:3.430865 +step:9800 train loss:3.502901 +step:9801 train loss:3.425657 +step:9802 train loss:3.483028 +step:9803 train loss:3.342111 +step:9804 train loss:3.437543 +step:9805 train loss:3.443788 +step:9806 train loss:3.416093 +step:9807 train loss:3.385320 +step:9808 train loss:3.302806 +step:9809 train loss:3.488326 +step:9810 train loss:3.444045 +step:9811 train loss:3.426067 +step:9812 train loss:3.403777 +step:9813 train loss:3.480374 +step:9814 train loss:3.474344 +step:9815 train loss:3.376309 +step:9816 train loss:3.381260 +step:9817 train loss:3.412296 +step:9818 train loss:3.436423 +step:9819 train loss:3.408629 +step:9820 train loss:3.476625 +step:9821 train loss:3.455068 +step:9822 train loss:3.428721 +step:9823 train loss:3.491354 +step:9824 train loss:3.394164 +step:9825 train loss:3.481812 +step:9826 train loss:3.472822 +step:9827 train loss:3.483008 +step:9828 train loss:3.396936 +step:9829 train loss:3.405183 +step:9830 train loss:3.395505 +step:9831 train loss:3.450814 +step:9832 train loss:3.463757 +step:9833 train loss:3.373796 +step:9834 train loss:3.426785 +step:9835 train loss:3.391200 +step:9836 train loss:3.460674 +step:9837 train loss:3.427434 +step:9838 train loss:3.465696 +step:9839 train loss:3.440487 +step:9840 train loss:3.412332 +step:9841 train loss:3.418685 +step:9842 train loss:3.478450 +step:9843 train loss:3.472956 +step:9844 train loss:3.419631 +step:9845 train loss:3.452421 +step:9846 train loss:3.385826 +step:9847 train loss:3.516132 +step:9848 train loss:3.438962 +step:9849 train loss:3.463227 +step:9850 train loss:3.384404 +step:9851 train loss:3.436822 +step:9852 train loss:3.403488 +step:9853 train loss:3.423112 +step:9854 train loss:3.437381 +step:9855 train loss:3.385398 +step:9856 train loss:3.386874 +step:9857 train loss:3.374935 +step:9858 train loss:3.439991 +step:9859 train loss:3.358929 +step:9860 train loss:3.597461 +step:9861 train loss:3.422799 +step:9862 train loss:3.388529 +step:9863 train loss:3.371059 +step:9864 train loss:3.496200 +step:9865 train loss:3.376169 +step:9866 train loss:3.414635 +step:9867 train loss:3.412197 +step:9868 train loss:3.471490 +step:9869 train loss:3.438089 +step:9870 train loss:3.405016 +step:9871 train loss:3.448980 +step:9872 train loss:3.387578 +step:9873 train loss:3.444482 +step:9874 train loss:3.405922 +step:9875 train loss:3.409214 +step:9876 train loss:3.372309 +step:9877 train loss:3.425090 +step:9878 train loss:3.457009 +step:9879 train loss:3.456733 +step:9880 train loss:3.389312 +step:9881 train loss:3.444543 +step:9882 train loss:3.403225 +step:9883 train loss:3.415050 +step:9884 train loss:3.406334 +step:9885 train loss:3.471134 +step:9886 train loss:3.436838 +step:9887 train loss:3.437708 +step:9888 train loss:3.461115 +step:9889 train loss:3.493470 +step:9890 train loss:3.404266 +step:9891 train loss:3.408523 +step:9892 train loss:3.379973 +step:9893 train loss:3.500276 +step:9894 train loss:3.412806 +step:9895 train loss:3.347749 +step:9896 train loss:3.502532 +step:9897 train loss:3.377344 +step:9898 train loss:3.448471 +step:9899 train loss:3.426610 +step:9900 train loss:3.472303 +step:9901 train loss:3.395094 +step:9902 train loss:3.441505 +step:9903 train loss:3.411943 +step:9904 train loss:3.462507 +step:9905 train loss:3.365663 +step:9906 train loss:3.405567 +step:9907 train loss:3.412916 +step:9908 train loss:3.410052 +step:9909 train loss:3.427779 +step:9910 train loss:3.449501 +step:9911 train loss:3.534966 +step:9912 train loss:3.409702 +step:9913 train loss:3.413818 +step:9914 train loss:3.420972 +step:9915 train loss:3.422549 +step:9916 train loss:3.371019 +step:9917 train loss:3.409346 +step:9918 train loss:3.403719 +step:9919 train loss:3.569025 +step:9920 train loss:3.357728 +step:9921 train loss:3.447827 +step:9922 train loss:3.407345 +step:9923 train loss:3.464394 +step:9924 train loss:3.380463 +step:9925 train loss:3.436013 +step:9926 train loss:3.415519 +step:9927 train loss:3.460332 +step:9928 train loss:3.387479 +step:9929 train loss:3.423162 +step:9930 train loss:3.515521 +step:9931 train loss:3.480229 +step:9932 train loss:3.365786 +step:9933 train loss:3.461163 +step:9934 train loss:3.381438 +step:9935 train loss:3.493858 +step:9936 train loss:3.401590 +step:9937 train loss:3.428154 +step:9938 train loss:3.414234 +step:9939 train loss:3.480736 +step:9940 train loss:3.514869 +step:9941 train loss:3.387211 +step:9942 train loss:3.433489 +step:9943 train loss:3.556676 +step:9944 train loss:3.431314 +step:9945 train loss:3.450021 +step:9946 train loss:3.425239 +step:9947 train loss:3.370790 +step:9948 train loss:3.417441 +step:9949 train loss:3.311037 +step:9950 train loss:3.465555 +step:9951 train loss:3.382054 +step:9952 train loss:3.451560 +step:9953 train loss:3.414864 +step:9954 train loss:3.471628 +step:9955 train loss:3.446263 +step:9956 train loss:3.448801 +step:9957 train loss:3.426522 +step:9958 train loss:3.480778 +step:9959 train loss:3.379787 +step:9960 train loss:3.411391 +step:9961 train loss:3.421347 +step:9962 train loss:3.470721 +step:9963 train loss:3.360121 +step:9964 train loss:3.416444 +step:9965 train loss:3.420650 +step:9966 train loss:3.477066 +step:9967 train loss:3.389338 +step:9968 train loss:3.455506 +step:9969 train loss:3.370692 +step:9970 train loss:3.413310 +step:9971 train loss:3.452757 +step:9972 train loss:3.474627 +step:9973 train loss:3.451147 +step:9974 train loss:3.439961 +step:9975 train loss:3.410286 +step:9976 train loss:3.370646 +step:9977 train loss:3.419238 +step:9978 train loss:3.418705 +step:9979 train loss:3.426609 +step:9980 train loss:3.483567 +step:9981 train loss:3.391665 +step:9982 train loss:3.450239 +step:9983 train loss:3.370235 +step:9984 train loss:3.435454 +step:9985 train loss:3.380304 +step:9986 train loss:3.432121 +step:9987 train loss:3.475765 +step:9988 train loss:3.489110 +step:9989 train loss:3.382216 +step:9990 train loss:3.522282 +step:9991 train loss:3.371589 +step:9992 train loss:3.443928 +step:9993 train loss:3.435457 +step:9994 train loss:3.550244 +step:9995 train loss:3.489250 +step:9996 train loss:3.401854 +step:9997 train loss:3.444652 +step:9998 train loss:3.495563 +step:9999 train loss:3.464809 +step:10000 validation loss:3.367266 total_sharp:3.4618e-03 L1_sharp:3.0965e-02 L2_sharp:6.3712e-03 L3_sharp:5.7419e-03 L4_sharp:3.1737e-03 L5_sharp:4.6626e-03 L6_sharp:6.3575e-03 L7_sharp:6.3542e-03 L8_sharp:6.3612e-03 L9_sharp:4.4363e-03 L10_sharp:3.1720e-03 L11_sharp:3.2581e-03 L12_sharp:5.1439e-03 total_fnorm:1.3934e+00 total_l1_linf:9.5040e+03 total_spectral:1.3934e+00 L1_fnorm:1.1873e-01 L2_fnorm:1.1478e-01 L3_fnorm:1.1601e-01 L4_fnorm:1.1913e-01 L5_fnorm:1.2030e-01 L6_fnorm:1.2073e-01 L7_fnorm:1.2088e-01 L8_fnorm:1.2080e-01 L9_fnorm:1.2112e-01 L10_fnorm:1.2112e-01 L11_fnorm:1.2101e-01 L12_fnorm:1.2078e-01 L1_l1linf:3.6299e-01 L2_l1linf:4.3555e-01 L3_l1linf:4.2204e-01 L4_l1linf:4.0698e-01 L5_l1linf:3.8823e-01 L6_l1linf:3.4848e-01 L7_l1linf:3.3583e-01 L8_l1linf:3.5205e-01 L9_l1linf:3.7096e-01 L10_l1linf:4.0541e-01 L11_l1linf:4.2506e-01 L12_l1linf:4.3385e-01 L1_spectral:8.1301e-03 L2_spectral:9.7823e-03 L3_spectral:9.4988e-03 L4_spectral:9.1389e-03 L5_spectral:8.6836e-03 L6_spectral:7.8294e-03 L7_spectral:7.6044e-03 L8_spectral:7.9054e-03 L9_spectral:8.3041e-03 L10_spectral:9.0710e-03 L11_spectral:9.5087e-03 L12_spectral:9.6677e-03 ip_v_neg_g:3.1622e-03 cos_v_neg_g:9.6044e-04 v_norm:1.3934e+00 g_norm:2.3629e+00 hv_norm:4.3753e-01 cos_v_hv:1.1025e-02 hg_norm:1.9609e+02 cos_g_hg:6.0195e-01 v_par:4.1133e-05 v_perp:1.3934e+00 L1_cos_v_neg_g:7.1318e-03 L1_v_norm:1.1873e-01 L2_cos_v_neg_g:2.9689e-03 L2_v_norm:1.1478e-01 L3_cos_v_neg_g:1.1841e-03 L3_v_norm:1.1601e-01 L4_cos_v_neg_g:1.6278e-03 L4_v_norm:1.1913e-01 L5_cos_v_neg_g:2.3288e-03 L5_v_norm:1.2030e-01 L6_cos_v_neg_g:2.8094e-03 L6_v_norm:1.2073e-01 L7_cos_v_neg_g:3.6509e-03 L7_v_norm:1.2088e-01 L8_cos_v_neg_g:3.3069e-03 L8_v_norm:1.2080e-01 L9_cos_v_neg_g:3.2189e-03 L9_v_norm:1.2112e-01 L10_cos_v_neg_g:3.0928e-03 L10_v_norm:1.2112e-01 L11_cos_v_neg_g:2.4598e-03 L11_v_norm:1.2101e-01 L12_cos_v_neg_g:2.8091e-03 L12_v_norm:1.2078e-01 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..d0e2f3c87afd87507126ae6bf765c4205d24a92f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.005, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "ec1414b6-304e-4e3c-8220-ba71dfe39c83", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..36e7fbdb542c90a88ccf73e89122269ad2592fd4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6726946830749512, + "total_l1_linf_norm": 13735.3388671875, + "total_spectral_norm": 1.6726946830749512, + "layer_1_update_fnorm": 0.3026123344898224, + "layer_1_max_l1_linf_norm": 0.22811609506607056, + "layer_1_max_spectral_norm": 0.006020676344633102, + "layer_2_update_fnorm": 0.28296035528182983, + "layer_2_max_l1_linf_norm": 0.24369460344314575, + "layer_2_max_spectral_norm": 0.006023065187036991, + "layer_3_update_fnorm": 0.2794894278049469, + "layer_3_max_l1_linf_norm": 0.2667970061302185, + "layer_3_max_spectral_norm": 0.006031177006661892, + "layer_4_update_fnorm": 0.28770342469215393, + "layer_4_max_l1_linf_norm": 0.26892566680908203, + "layer_4_max_spectral_norm": 0.006053514312952757, + "layer_5_update_fnorm": 0.29464972019195557, + "layer_5_max_l1_linf_norm": 0.27514010667800903, + "layer_5_max_spectral_norm": 0.006131299305707216, + "layer_6_update_fnorm": 0.3007885813713074, + "layer_6_max_l1_linf_norm": 0.2559514045715332, + "layer_6_max_spectral_norm": 0.00602443004027009, + "layer_7_update_fnorm": 0.29955101013183594, + "layer_7_max_l1_linf_norm": 0.25266605615615845, + "layer_7_max_spectral_norm": 0.006021031178534031, + "layer_8_update_fnorm": 0.30061963200569153, + "layer_8_max_l1_linf_norm": 0.26340264081954956, + "layer_8_max_spectral_norm": 0.006021407432854176, + "layer_9_update_fnorm": 0.30026358366012573, + "layer_9_max_l1_linf_norm": 0.278839111328125, + "layer_9_max_spectral_norm": 0.006273087114095688, + "layer_10_update_fnorm": 0.30048054456710815, + "layer_10_max_l1_linf_norm": 0.29206782579421997, + "layer_10_max_spectral_norm": 0.006532613653689623, + "layer_11_update_fnorm": 0.3007068634033203, + "layer_11_max_l1_linf_norm": 0.2863784432411194, + "layer_11_max_spectral_norm": 0.0064334268681705, + "layer_12_update_fnorm": 0.30063241720199585, + "layer_12_max_l1_linf_norm": 0.26725682616233826, + "layer_12_max_spectral_norm": 0.0061074611730873585, + "total_sharpness": 0.012715586461126804, + "ip_v_neg_g": 0.01707802340388298, + "cos_v_neg_g": 0.007257204968482256, + "v_norm": 1.6726946830749512, + "g_norm": 1.4068621397018433, + "hv_norm": 0.7841928601264954, + "cos_v_hv": 0.027122531086206436, + "hg_norm": 15.777036666870117, + "cos_g_hg": 0.4880801737308502, + "v_parallel_norm": 0.0003148750984109938, + "v_perp_norm": 1.6726946830749512, + "layer_1_v_norm": 0.3026123344898224, + "layer_1_cos_v_neg_g": 0.01517773699015379, + "layer_2_v_norm": 0.28296035528182983, + "layer_2_cos_v_neg_g": 0.018456604331731796, + "layer_3_v_norm": 0.2794894278049469, + "layer_3_cos_v_neg_g": 0.015885239467024803, + "layer_4_v_norm": 0.28770342469215393, + "layer_4_cos_v_neg_g": 0.01297811884433031, + "layer_5_v_norm": 0.29464972019195557, + "layer_5_cos_v_neg_g": 0.013984745368361473, + "layer_6_v_norm": 0.3007885813713074, + "layer_6_cos_v_neg_g": 0.010459820739924908, + "layer_7_v_norm": 0.29955101013183594, + "layer_7_cos_v_neg_g": 0.01082964800298214, + "layer_8_v_norm": 0.30061960220336914, + "layer_8_cos_v_neg_g": 0.010099191218614578, + "layer_9_v_norm": 0.30026358366012573, + "layer_9_cos_v_neg_g": 0.009541366249322891, + "layer_10_v_norm": 0.30048054456710815, + "layer_10_cos_v_neg_g": 0.009173575788736343, + "layer_11_v_norm": 0.3007068634033203, + "layer_11_cos_v_neg_g": 0.007492777891457081, + "layer_12_v_norm": 0.30063241720199585, + "layer_12_cos_v_neg_g": 0.006996097043156624, + "layer_1_sharpness": 0.025023628026247025, + "layer_2_sharpness": 0.008170676417648792, + "layer_3_sharpness": 0.006770872510969639, + "layer_4_sharpness": 0.0030170551035553217, + "layer_5_sharpness": 0.004641372244805098, + "layer_6_sharpness": 0.003987653646618128, + "layer_7_sharpness": 0.004667080007493496, + "layer_8_sharpness": 0.0036143455654382706, + "layer_9_sharpness": 0.0029585619922727346, + "layer_10_sharpness": 0.002459780080243945, + "layer_11_sharpness": 0.0019079571356996894, + "layer_12_sharpness": 0.002245368668809533 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..2b848ab521fb90c080b0bd5fba2b036efdbd42c1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6760320663452148, + "total_l1_linf_norm": 13724.8720703125, + "total_spectral_norm": 1.676032543182373, + "layer_1_update_fnorm": 0.289219468832016, + "layer_1_max_l1_linf_norm": 0.4143562912940979, + "layer_1_max_spectral_norm": 0.009284124709665775, + "layer_2_update_fnorm": 0.27585265040397644, + "layer_2_max_l1_linf_norm": 0.4540875554084778, + "layer_2_max_spectral_norm": 0.01033052895218134, + "layer_3_update_fnorm": 0.28328296542167664, + "layer_3_max_l1_linf_norm": 0.4533880650997162, + "layer_3_max_spectral_norm": 0.010190515778958797, + "layer_4_update_fnorm": 0.2960273027420044, + "layer_4_max_l1_linf_norm": 0.43050652742385864, + "layer_4_max_spectral_norm": 0.009824290871620178, + "layer_5_update_fnorm": 0.29780322313308716, + "layer_5_max_l1_linf_norm": 0.3980412483215332, + "layer_5_max_spectral_norm": 0.009014609269797802, + "layer_6_update_fnorm": 0.29995566606521606, + "layer_6_max_l1_linf_norm": 0.3606691062450409, + "layer_6_max_spectral_norm": 0.008122021332383156, + "layer_7_update_fnorm": 0.2990431785583496, + "layer_7_max_l1_linf_norm": 0.3477681279182434, + "layer_7_max_spectral_norm": 0.007858455181121826, + "layer_8_update_fnorm": 0.2986529469490051, + "layer_8_max_l1_linf_norm": 0.345426082611084, + "layer_8_max_spectral_norm": 0.00778499711304903, + "layer_9_update_fnorm": 0.2999328672885895, + "layer_9_max_l1_linf_norm": 0.3733224868774414, + "layer_9_max_spectral_norm": 0.00839787907898426, + "layer_10_update_fnorm": 0.2998869717121124, + "layer_10_max_l1_linf_norm": 0.41639530658721924, + "layer_10_max_spectral_norm": 0.009320289827883244, + "layer_11_update_fnorm": 0.2987898886203766, + "layer_11_max_l1_linf_norm": 0.4206274747848511, + "layer_11_max_spectral_norm": 0.009412549436092377, + "layer_12_update_fnorm": 0.29989296197891235, + "layer_12_max_l1_linf_norm": 0.407012939453125, + "layer_12_max_spectral_norm": 0.009110567159950733, + "total_sharpness": 0.0025199418887495995, + "ip_v_neg_g": 0.002632502233609557, + "cos_v_neg_g": 0.0009010769426822662, + "v_norm": 1.6760320663452148, + "g_norm": 1.743109107017517, + "hv_norm": 0.2942299544811249, + "cos_v_hv": 0.014354430139064789, + "hg_norm": 108.15245819091797, + "cos_g_hg": 0.679604709148407, + "v_parallel_norm": 8.945649460656568e-05, + "v_perp_norm": 1.6760320663452148, + "layer_1_v_norm": 0.289219468832016, + "layer_1_cos_v_neg_g": 0.0008904398418962955, + "layer_2_v_norm": 0.27585265040397644, + "layer_2_cos_v_neg_g": 0.002731259912252426, + "layer_3_v_norm": 0.28328296542167664, + "layer_3_cos_v_neg_g": 0.00028831796953454614, + "layer_4_v_norm": 0.2960273027420044, + "layer_4_cos_v_neg_g": 8.995517418952659e-05, + "layer_5_v_norm": 0.29780322313308716, + "layer_5_cos_v_neg_g": 0.0007986558484844863, + "layer_6_v_norm": 0.29995566606521606, + "layer_6_cos_v_neg_g": 0.001896349131129682, + "layer_7_v_norm": 0.2990431785583496, + "layer_7_cos_v_neg_g": 0.0031688741873949766, + "layer_8_v_norm": 0.2986529469490051, + "layer_8_cos_v_neg_g": 0.0034939460456371307, + "layer_9_v_norm": 0.2999328672885895, + "layer_9_cos_v_neg_g": 0.0024033007211983204, + "layer_10_v_norm": 0.2998869717121124, + "layer_10_cos_v_neg_g": 0.002852946985512972, + "layer_11_v_norm": 0.2987898886203766, + "layer_11_cos_v_neg_g": 0.0018696757033467293, + "layer_12_v_norm": 0.29989296197891235, + "layer_12_cos_v_neg_g": 0.0020559339318424463, + "layer_1_sharpness": 0.003988733980804682, + "layer_2_sharpness": 0.0019625225104391575, + "layer_3_sharpness": 0.001248479587957263, + "layer_4_sharpness": 0.0007682578871026635, + "layer_5_sharpness": 0.0008328699041157961, + "layer_6_sharpness": 0.001243469538167119, + "layer_7_sharpness": 0.0014348287368193269, + "layer_8_sharpness": 0.0013030500849708915, + "layer_9_sharpness": 0.0008906646398827434, + "layer_10_sharpness": 0.0007468760595656931, + "layer_11_sharpness": 0.0005955218803137541, + "layer_12_sharpness": 0.0006491349195130169 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..3a4aca84583757f7c4e19fa1c0c86a96aade644f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.68062162399292, + "total_l1_linf_norm": 13791.921875, + "total_spectral_norm": 1.680621862411499, + "layer_1_update_fnorm": 0.2998645007610321, + "layer_1_max_l1_linf_norm": 0.23548345267772675, + "layer_1_max_spectral_norm": 0.0060256244614720345, + "layer_2_update_fnorm": 0.28221338987350464, + "layer_2_max_l1_linf_norm": 0.28871768712997437, + "layer_2_max_spectral_norm": 0.006599967367947102, + "layer_3_update_fnorm": 0.2820245325565338, + "layer_3_max_l1_linf_norm": 0.3115086555480957, + "layer_3_max_spectral_norm": 0.006991853471845388, + "layer_4_update_fnorm": 0.29105445742607117, + "layer_4_max_l1_linf_norm": 0.32937732338905334, + "layer_4_max_spectral_norm": 0.007425585296005011, + "layer_5_update_fnorm": 0.29803475737571716, + "layer_5_max_l1_linf_norm": 0.30115804076194763, + "layer_5_max_spectral_norm": 0.006801828276365995, + "layer_6_update_fnorm": 0.3019030690193176, + "layer_6_max_l1_linf_norm": 0.2813659906387329, + "layer_6_max_spectral_norm": 0.006314295809715986, + "layer_7_update_fnorm": 0.3012295067310333, + "layer_7_max_l1_linf_norm": 0.26955798268318176, + "layer_7_max_spectral_norm": 0.006134973838925362, + "layer_8_update_fnorm": 0.30207589268684387, + "layer_8_max_l1_linf_norm": 0.28214871883392334, + "layer_8_max_spectral_norm": 0.006375467404723167, + "layer_9_update_fnorm": 0.3022812306880951, + "layer_9_max_l1_linf_norm": 0.3132374882698059, + "layer_9_max_spectral_norm": 0.007093587890267372, + "layer_10_update_fnorm": 0.30217403173446655, + "layer_10_max_l1_linf_norm": 0.32134073972702026, + "layer_10_max_spectral_norm": 0.007234443444758654, + "layer_11_update_fnorm": 0.3017808794975281, + "layer_11_max_l1_linf_norm": 0.33898383378982544, + "layer_11_max_spectral_norm": 0.007605855353176594, + "layer_12_update_fnorm": 0.3009849786758423, + "layer_12_max_l1_linf_norm": 0.3636099696159363, + "layer_12_max_spectral_norm": 0.008129606954753399, + "total_sharpness": 0.00695132277905941, + "ip_v_neg_g": 0.006458363961428404, + "cos_v_neg_g": 0.002844388596713543, + "v_norm": 1.68062162399292, + "g_norm": 1.3510257005691528, + "hv_norm": 0.531374454498291, + "cos_v_hv": 0.02198551781475544, + "hg_norm": 16.169204711914062, + "cos_g_hg": 0.4818097651004791, + "v_parallel_norm": 0.00014795552124269307, + "v_perp_norm": 1.68062162399292, + "layer_1_v_norm": 0.2998645007610321, + "layer_1_cos_v_neg_g": 0.004402841906994581, + "layer_2_v_norm": 0.28221338987350464, + "layer_2_cos_v_neg_g": 0.005887063220143318, + "layer_3_v_norm": 0.2820245623588562, + "layer_3_cos_v_neg_g": 0.005266295280307531, + "layer_4_v_norm": 0.29105445742607117, + "layer_4_cos_v_neg_g": 0.0055917915888130665, + "layer_5_v_norm": 0.29803475737571716, + "layer_5_cos_v_neg_g": 0.0047582704573869705, + "layer_6_v_norm": 0.3019030690193176, + "layer_6_cos_v_neg_g": 0.004684004932641983, + "layer_7_v_norm": 0.3012295067310333, + "layer_7_cos_v_neg_g": 0.005647499579936266, + "layer_8_v_norm": 0.30207589268684387, + "layer_8_cos_v_neg_g": 0.005071135237812996, + "layer_9_v_norm": 0.3022812306880951, + "layer_9_cos_v_neg_g": 0.0053250486962497234, + "layer_10_v_norm": 0.30217403173446655, + "layer_10_cos_v_neg_g": 0.004637318197637796, + "layer_11_v_norm": 0.3017808794975281, + "layer_11_cos_v_neg_g": 0.004156548995524645, + "layer_12_v_norm": 0.3009849786758423, + "layer_12_cos_v_neg_g": 0.003663081908598542, + "layer_1_sharpness": 0.011952630244195461, + "layer_2_sharpness": 0.0036765255499631166, + "layer_3_sharpness": 0.0046227797865867615, + "layer_4_sharpness": 0.0018264170503243804, + "layer_5_sharpness": 0.0020312510896474123, + "layer_6_sharpness": 0.0025013138074427843, + "layer_7_sharpness": 0.0030840698163956404, + "layer_8_sharpness": 0.0023195992689579725, + "layer_9_sharpness": 0.0019228237215429544, + "layer_10_sharpness": 0.0016028756508603692, + "layer_11_sharpness": 0.0011578970588743687, + "layer_12_sharpness": 0.002170252613723278 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..cbd8c4f17db99d0e9e7a94dfb8dadda5f86bab7c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6858078241348267, + "total_l1_linf_norm": 13828.4140625, + "total_spectral_norm": 1.6858079433441162, + "layer_1_update_fnorm": 0.29584622383117676, + "layer_1_max_l1_linf_norm": 0.2748941481113434, + "layer_1_max_spectral_norm": 0.006149244494736195, + "layer_2_update_fnorm": 0.2811521589756012, + "layer_2_max_l1_linf_norm": 0.34086495637893677, + "layer_2_max_spectral_norm": 0.007754709105938673, + "layer_3_update_fnorm": 0.2825034558773041, + "layer_3_max_l1_linf_norm": 0.35281726717948914, + "layer_3_max_spectral_norm": 0.008012714795768261, + "layer_4_update_fnorm": 0.29269635677337646, + "layer_4_max_l1_linf_norm": 0.3539912700653076, + "layer_4_max_spectral_norm": 0.007977878674864769, + "layer_5_update_fnorm": 0.2990380823612213, + "layer_5_max_l1_linf_norm": 0.3264126181602478, + "layer_5_max_spectral_norm": 0.007358702365309, + "layer_6_update_fnorm": 0.3022935390472412, + "layer_6_max_l1_linf_norm": 0.30492404103279114, + "layer_6_max_spectral_norm": 0.006909629795700312, + "layer_7_update_fnorm": 0.3016984760761261, + "layer_7_max_l1_linf_norm": 0.2949162721633911, + "layer_7_max_spectral_norm": 0.006702152080833912, + "layer_8_update_fnorm": 0.30274808406829834, + "layer_8_max_l1_linf_norm": 0.2968639135360718, + "layer_8_max_spectral_norm": 0.006673179566860199, + "layer_9_update_fnorm": 0.3031488358974457, + "layer_9_max_l1_linf_norm": 0.3207797408103943, + "layer_9_max_spectral_norm": 0.007307659834623337, + "layer_10_update_fnorm": 0.3027692437171936, + "layer_10_max_l1_linf_norm": 0.33634284138679504, + "layer_10_max_spectral_norm": 0.007630223874002695, + "layer_11_update_fnorm": 0.30237463116645813, + "layer_11_max_l1_linf_norm": 0.3486211597919464, + "layer_11_max_spectral_norm": 0.007849185727536678, + "layer_12_update_fnorm": 0.3012404143810272, + "layer_12_max_l1_linf_norm": 0.34948933124542236, + "layer_12_max_spectral_norm": 0.007886004634201527, + "total_sharpness": 0.005719804670661688, + "ip_v_neg_g": 0.01049647107720375, + "cos_v_neg_g": 0.004225302487611771, + "v_norm": 1.6858078241348267, + "g_norm": 1.4735926389694214, + "hv_norm": 0.5184061527252197, + "cos_v_hv": 0.018600264564156532, + "hg_norm": 28.476390838623047, + "cos_g_hg": 0.5378149151802063, + "v_parallel_norm": 0.0002624971675686538, + "v_perp_norm": 1.6858078241348267, + "layer_1_v_norm": 0.29584622383117676, + "layer_1_cos_v_neg_g": 0.008046582341194153, + "layer_2_v_norm": 0.2811521589756012, + "layer_2_cos_v_neg_g": 0.011853684671223164, + "layer_3_v_norm": 0.2825034558773041, + "layer_3_cos_v_neg_g": 0.012041131034493446, + "layer_4_v_norm": 0.29269635677337646, + "layer_4_cos_v_neg_g": 0.007936738431453705, + "layer_5_v_norm": 0.2990380823612213, + "layer_5_cos_v_neg_g": 0.007573592942208052, + "layer_6_v_norm": 0.3022935092449188, + "layer_6_cos_v_neg_g": 0.007485376670956612, + "layer_7_v_norm": 0.3016984760761261, + "layer_7_cos_v_neg_g": 0.008199975825846195, + "layer_8_v_norm": 0.3027481138706207, + "layer_8_cos_v_neg_g": 0.006033802404999733, + "layer_9_v_norm": 0.3031488358974457, + "layer_9_cos_v_neg_g": 0.007122628390789032, + "layer_10_v_norm": 0.3027692437171936, + "layer_10_cos_v_neg_g": 0.005793216172605753, + "layer_11_v_norm": 0.30237463116645813, + "layer_11_cos_v_neg_g": 0.00456062238663435, + "layer_12_v_norm": 0.3012404143810272, + "layer_12_cos_v_neg_g": 0.004065134562551975, + "layer_1_sharpness": 0.008941647596657276, + "layer_2_sharpness": 0.0026145982556045055, + "layer_3_sharpness": 0.003934082575142384, + "layer_4_sharpness": 0.0015783077105879784, + "layer_5_sharpness": 0.001774743665009737, + "layer_6_sharpness": 0.0023150620982050896, + "layer_7_sharpness": 0.002787887118756771, + "layer_8_sharpness": 0.0019141512457281351, + "layer_9_sharpness": 0.0014890740858390927, + "layer_10_sharpness": 0.0012186648091301322, + "layer_11_sharpness": 0.0008616297855041921, + "layer_12_sharpness": 0.0016069416888058186 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..a0390302166f944a04a8d75d66ccedab2d79de8f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6770299673080444, + "total_l1_linf_norm": 13753.2451171875, + "total_spectral_norm": 1.677030324935913, + "layer_1_update_fnorm": 0.2927528917789459, + "layer_1_max_l1_linf_norm": 0.2774389982223511, + "layer_1_max_spectral_norm": 0.0062430971302092075, + "layer_2_update_fnorm": 0.2780028283596039, + "layer_2_max_l1_linf_norm": 0.351435124874115, + "layer_2_max_spectral_norm": 0.008090106770396233, + "layer_3_update_fnorm": 0.2829143702983856, + "layer_3_max_l1_linf_norm": 0.36157339811325073, + "layer_3_max_spectral_norm": 0.008200112730264664, + "layer_4_update_fnorm": 0.29395750164985657, + "layer_4_max_l1_linf_norm": 0.36731356382369995, + "layer_4_max_spectral_norm": 0.008301104418933392, + "layer_5_update_fnorm": 0.2999507188796997, + "layer_5_max_l1_linf_norm": 0.3456880748271942, + "layer_5_max_spectral_norm": 0.007814877666532993, + "layer_6_update_fnorm": 0.3021719455718994, + "layer_6_max_l1_linf_norm": 0.3053210377693176, + "layer_6_max_spectral_norm": 0.006902433466166258, + "layer_7_update_fnorm": 0.3022869825363159, + "layer_7_max_l1_linf_norm": 0.2945108413696289, + "layer_7_max_spectral_norm": 0.006678189150989056, + "layer_8_update_fnorm": 0.30282077193260193, + "layer_8_max_l1_linf_norm": 0.30393874645233154, + "layer_8_max_spectral_norm": 0.006851191632449627, + "layer_9_update_fnorm": 0.30321717262268066, + "layer_9_max_l1_linf_norm": 0.33321261405944824, + "layer_9_max_spectral_norm": 0.007532194722443819, + "layer_10_update_fnorm": 0.30289703607559204, + "layer_10_max_l1_linf_norm": 0.35331153869628906, + "layer_10_max_spectral_norm": 0.007997200824320316, + "layer_11_update_fnorm": 0.3023490607738495, + "layer_11_max_l1_linf_norm": 0.3694442808628082, + "layer_11_max_spectral_norm": 0.008287940174341202, + "layer_12_update_fnorm": 0.3011946976184845, + "layer_12_max_l1_linf_norm": 0.3543272912502289, + "layer_12_max_spectral_norm": 0.007999640889465809, + "total_sharpness": 0.005501474719494581, + "ip_v_neg_g": 0.005335766822099686, + "cos_v_neg_g": 0.002431270433589816, + "v_norm": 1.6770299673080444, + "g_norm": 1.308647632598877, + "hv_norm": 0.5716167092323303, + "cos_v_hv": 0.01614042930305004, + "hg_norm": 20.183300018310547, + "cos_g_hg": 0.4669390618801117, + "v_parallel_norm": 0.00025609892327338457, + "v_perp_norm": 1.6770299673080444, + "layer_1_v_norm": 0.2927528917789459, + "layer_1_cos_v_neg_g": 0.00772059103474021, + "layer_2_v_norm": 0.2780028283596039, + "layer_2_cos_v_neg_g": 0.011668347753584385, + "layer_3_v_norm": 0.2829143702983856, + "layer_3_cos_v_neg_g": 0.004294469486922026, + "layer_4_v_norm": 0.29395750164985657, + "layer_4_cos_v_neg_g": 0.0018464751774445176, + "layer_5_v_norm": 0.2999507188796997, + "layer_5_cos_v_neg_g": 0.0013973698951303959, + "layer_6_v_norm": 0.3021719455718994, + "layer_6_cos_v_neg_g": 0.0024311495944857597, + "layer_7_v_norm": 0.3022869825363159, + "layer_7_cos_v_neg_g": 0.0022508581168949604, + "layer_8_v_norm": 0.30282077193260193, + "layer_8_cos_v_neg_g": 0.0042330678552389145, + "layer_9_v_norm": 0.30321717262268066, + "layer_9_cos_v_neg_g": 0.0030436860397458076, + "layer_10_v_norm": 0.30289703607559204, + "layer_10_cos_v_neg_g": 0.003124726004898548, + "layer_11_v_norm": 0.3023490607738495, + "layer_11_cos_v_neg_g": 0.0033186988439410925, + "layer_12_v_norm": 0.3011946976184845, + "layer_12_cos_v_neg_g": 0.0023325516376644373, + "layer_1_sharpness": 0.019598616287112236, + "layer_2_sharpness": 0.00822217483073473, + "layer_3_sharpness": 0.0020726241637021303, + "layer_4_sharpness": 0.0009799941908568144, + "layer_5_sharpness": 0.0014387950068339705, + "layer_6_sharpness": 0.0015685007674619555, + "layer_7_sharpness": 0.001911264844238758, + "layer_8_sharpness": 0.00175916135776788, + "layer_9_sharpness": 0.0012620305642485619, + "layer_10_sharpness": 0.0011526457965373993, + "layer_11_sharpness": 0.0008980800048448145, + "layer_12_sharpness": 0.0012896200641989708 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..41dc858813ffc4bdad75312b1590831f942869af --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.67535400390625, + "total_l1_linf_norm": 13737.2080078125, + "total_spectral_norm": 1.6753541231155396, + "layer_1_update_fnorm": 0.2917540967464447, + "layer_1_max_l1_linf_norm": 0.33773642778396606, + "layer_1_max_spectral_norm": 0.007605543825775385, + "layer_2_update_fnorm": 0.27577388286590576, + "layer_2_max_l1_linf_norm": 0.4186429977416992, + "layer_2_max_spectral_norm": 0.009448407217860222, + "layer_3_update_fnorm": 0.27920347452163696, + "layer_3_max_l1_linf_norm": 0.4298652410507202, + "layer_3_max_spectral_norm": 0.009681865572929382, + "layer_4_update_fnorm": 0.2924424707889557, + "layer_4_max_l1_linf_norm": 0.42531096935272217, + "layer_4_max_spectral_norm": 0.009579869918525219, + "layer_5_update_fnorm": 0.298777312040329, + "layer_5_max_l1_linf_norm": 0.4056794047355652, + "layer_5_max_spectral_norm": 0.009120985865592957, + "layer_6_update_fnorm": 0.30162712931632996, + "layer_6_max_l1_linf_norm": 0.3449541926383972, + "layer_6_max_spectral_norm": 0.007845770567655563, + "layer_7_update_fnorm": 0.30130070447921753, + "layer_7_max_l1_linf_norm": 0.3382348120212555, + "layer_7_max_spectral_norm": 0.007661609444767237, + "layer_8_update_fnorm": 0.30194684863090515, + "layer_8_max_l1_linf_norm": 0.35043245553970337, + "layer_8_max_spectral_norm": 0.00786412600427866, + "layer_9_update_fnorm": 0.30229130387306213, + "layer_9_max_l1_linf_norm": 0.38366788625717163, + "layer_9_max_spectral_norm": 0.00860679056495428, + "layer_10_update_fnorm": 0.3019353449344635, + "layer_10_max_l1_linf_norm": 0.3950939178466797, + "layer_10_max_spectral_norm": 0.008917638100683689, + "layer_11_update_fnorm": 0.3009946942329407, + "layer_11_max_l1_linf_norm": 0.4313807785511017, + "layer_11_max_spectral_norm": 0.009581520222127438, + "layer_12_update_fnorm": 0.3005189001560211, + "layer_12_max_l1_linf_norm": 0.43568772077560425, + "layer_12_max_spectral_norm": 0.00972250010818243, + "total_sharpness": 0.005328829400241375, + "ip_v_neg_g": 0.008169010281562805, + "cos_v_neg_g": 0.003955512773245573, + "v_norm": 1.67535400390625, + "g_norm": 1.2327076196670532, + "hv_norm": 0.44211551547050476, + "cos_v_hv": 0.020193083211779594, + "hg_norm": 15.019104957580566, + "cos_g_hg": 0.4764101803302765, + "v_parallel_norm": 0.00021958538854960352, + "v_perp_norm": 1.67535400390625, + "layer_1_v_norm": 0.2917540967464447, + "layer_1_cos_v_neg_g": 0.0064651877619326115, + "layer_2_v_norm": 0.27577388286590576, + "layer_2_cos_v_neg_g": 0.008343691937625408, + "layer_3_v_norm": 0.27920347452163696, + "layer_3_cos_v_neg_g": 0.007019665092229843, + "layer_4_v_norm": 0.2924424707889557, + "layer_4_cos_v_neg_g": 0.0059329550713300705, + "layer_5_v_norm": 0.298777312040329, + "layer_5_cos_v_neg_g": 0.007364274002611637, + "layer_6_v_norm": 0.30162715911865234, + "layer_6_cos_v_neg_g": 0.007045167963951826, + "layer_7_v_norm": 0.30130070447921753, + "layer_7_cos_v_neg_g": 0.007901443168520927, + "layer_8_v_norm": 0.30194684863090515, + "layer_8_cos_v_neg_g": 0.007965032011270523, + "layer_9_v_norm": 0.30229130387306213, + "layer_9_cos_v_neg_g": 0.007652865257114172, + "layer_10_v_norm": 0.3019353449344635, + "layer_10_cos_v_neg_g": 0.006738627795130014, + "layer_11_v_norm": 0.3009946644306183, + "layer_11_cos_v_neg_g": 0.005866017192602158, + "layer_12_v_norm": 0.3005189001560211, + "layer_12_cos_v_neg_g": 0.004380326718091965, + "layer_1_sharpness": 0.008375117555260658, + "layer_2_sharpness": 0.004294922109693289, + "layer_3_sharpness": 0.0029476184863597155, + "layer_4_sharpness": 0.0012556432047858834, + "layer_5_sharpness": 0.00200625229626894, + "layer_6_sharpness": 0.001822268939577043, + "layer_7_sharpness": 0.002241611946374178, + "layer_8_sharpness": 0.00199494743719697, + "layer_9_sharpness": 0.0014993721852079034, + "layer_10_sharpness": 0.0011519407853484154, + "layer_11_sharpness": 0.0010837373556569219, + "layer_12_sharpness": 0.001376016647554934 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..b325a98a1c699f9c3cfe36aa822e68e3559576ab --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6799665689468384, + "total_l1_linf_norm": 13774.654296875, + "total_spectral_norm": 1.6799665689468384, + "layer_1_update_fnorm": 0.2900700271129608, + "layer_1_max_l1_linf_norm": 0.31542158126831055, + "layer_1_max_spectral_norm": 0.007107717916369438, + "layer_2_update_fnorm": 0.2779799997806549, + "layer_2_max_l1_linf_norm": 0.4050278663635254, + "layer_2_max_spectral_norm": 0.009204025380313396, + "layer_3_update_fnorm": 0.28400495648384094, + "layer_3_max_l1_linf_norm": 0.40430545806884766, + "layer_3_max_spectral_norm": 0.009035578928887844, + "layer_4_update_fnorm": 0.2944790720939636, + "layer_4_max_l1_linf_norm": 0.39912205934524536, + "layer_4_max_spectral_norm": 0.009078476577997208, + "layer_5_update_fnorm": 0.2993203401565552, + "layer_5_max_l1_linf_norm": 0.3558601140975952, + "layer_5_max_spectral_norm": 0.00805068202316761, + "layer_6_update_fnorm": 0.3020659387111664, + "layer_6_max_l1_linf_norm": 0.33570700883865356, + "layer_6_max_spectral_norm": 0.007541676051914692, + "layer_7_update_fnorm": 0.3018574118614197, + "layer_7_max_l1_linf_norm": 0.31104522943496704, + "layer_7_max_spectral_norm": 0.007115058600902557, + "layer_8_update_fnorm": 0.30280837416648865, + "layer_8_max_l1_linf_norm": 0.3257785141468048, + "layer_8_max_spectral_norm": 0.007364185992628336, + "layer_9_update_fnorm": 0.3033907115459442, + "layer_9_max_l1_linf_norm": 0.34501874446868896, + "layer_9_max_spectral_norm": 0.007749522104859352, + "layer_10_update_fnorm": 0.3031407296657562, + "layer_10_max_l1_linf_norm": 0.3674389123916626, + "layer_10_max_spectral_norm": 0.008314932696521282, + "layer_11_update_fnorm": 0.3025466203689575, + "layer_11_max_l1_linf_norm": 0.3919559717178345, + "layer_11_max_spectral_norm": 0.008802128955721855, + "layer_12_update_fnorm": 0.30141791701316833, + "layer_12_max_l1_linf_norm": 0.3899288773536682, + "layer_12_max_spectral_norm": 0.008782343938946724, + "total_sharpness": 0.0038671738002449274, + "ip_v_neg_g": 0.005540305748581886, + "cos_v_neg_g": 0.002387851011008024, + "v_norm": 1.6799665689468384, + "g_norm": 1.3811023235321045, + "hv_norm": 0.39447280764579773, + "cos_v_hv": 0.016469379886984825, + "hg_norm": 27.311735153198242, + "cos_g_hg": 0.5481104254722595, + "v_parallel_norm": 0.00013590675371233374, + "v_perp_norm": 1.6799665689468384, + "layer_1_v_norm": 0.2900700271129608, + "layer_1_cos_v_neg_g": 0.00306720775552094, + "layer_2_v_norm": 0.2779799997806549, + "layer_2_cos_v_neg_g": 0.004743209108710289, + "layer_3_v_norm": 0.28400495648384094, + "layer_3_cos_v_neg_g": 0.005398737732321024, + "layer_4_v_norm": 0.2944790720939636, + "layer_4_cos_v_neg_g": 0.005170074291527271, + "layer_5_v_norm": 0.2993203401565552, + "layer_5_cos_v_neg_g": 0.005184372421354055, + "layer_6_v_norm": 0.302065908908844, + "layer_6_cos_v_neg_g": 0.005088026635348797, + "layer_7_v_norm": 0.3018574118614197, + "layer_7_cos_v_neg_g": 0.005509260576218367, + "layer_8_v_norm": 0.30280837416648865, + "layer_8_cos_v_neg_g": 0.005516787525266409, + "layer_9_v_norm": 0.3033907115459442, + "layer_9_cos_v_neg_g": 0.0037835121620446444, + "layer_10_v_norm": 0.3031407296657562, + "layer_10_cos_v_neg_g": 0.002362401457503438, + "layer_11_v_norm": 0.3025466203689575, + "layer_11_cos_v_neg_g": 0.003727609058842063, + "layer_12_v_norm": 0.30141791701316833, + "layer_12_cos_v_neg_g": 0.0026088447775691748, + "layer_1_sharpness": 0.0065336693078279495, + "layer_2_sharpness": 0.0027322587557137012, + "layer_3_sharpness": 0.0016767795896157622, + "layer_4_sharpness": 0.0009273593313992023, + "layer_5_sharpness": 0.0012381812557578087, + "layer_6_sharpness": 0.0015214059967547655, + "layer_7_sharpness": 0.00196224101819098, + "layer_8_sharpness": 0.0016429265961050987, + "layer_9_sharpness": 0.001197782694362104, + "layer_10_sharpness": 0.0009136561420746148, + "layer_11_sharpness": 0.0008941382402554154, + "layer_12_sharpness": 0.0010220687836408615 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..d19015bbe390783a681e73ea7aa3fe06695639d7 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6803971529006958, + "total_l1_linf_norm": 13773.537109375, + "total_spectral_norm": 1.6803972721099854, + "layer_1_update_fnorm": 0.2893134355545044, + "layer_1_max_l1_linf_norm": 0.3246147036552429, + "layer_1_max_spectral_norm": 0.007332623470574617, + "layer_2_update_fnorm": 0.2779557704925537, + "layer_2_max_l1_linf_norm": 0.40848928689956665, + "layer_2_max_spectral_norm": 0.009232263080775738, + "layer_3_update_fnorm": 0.28360408544540405, + "layer_3_max_l1_linf_norm": 0.42699310183525085, + "layer_3_max_spectral_norm": 0.009599908255040646, + "layer_4_update_fnorm": 0.29509419202804565, + "layer_4_max_l1_linf_norm": 0.4109346270561218, + "layer_4_max_spectral_norm": 0.009280423633754253, + "layer_5_update_fnorm": 0.2991867661476135, + "layer_5_max_l1_linf_norm": 0.384209543466568, + "layer_5_max_spectral_norm": 0.008689756505191326, + "layer_6_update_fnorm": 0.3016708791255951, + "layer_6_max_l1_linf_norm": 0.36775776743888855, + "layer_6_max_spectral_norm": 0.00828601699322462, + "layer_7_update_fnorm": 0.3006936013698578, + "layer_7_max_l1_linf_norm": 0.33737093210220337, + "layer_7_max_spectral_norm": 0.007653859909623861, + "layer_8_update_fnorm": 0.30219125747680664, + "layer_8_max_l1_linf_norm": 0.3401179611682892, + "layer_8_max_spectral_norm": 0.0076969158835709095, + "layer_9_update_fnorm": 0.30243897438049316, + "layer_9_max_l1_linf_norm": 0.39057549834251404, + "layer_9_max_spectral_norm": 0.008707794360816479, + "layer_10_update_fnorm": 0.302219420671463, + "layer_10_max_l1_linf_norm": 0.41062843799591064, + "layer_10_max_spectral_norm": 0.009213750250637531, + "layer_11_update_fnorm": 0.30154740810394287, + "layer_11_max_l1_linf_norm": 0.4208826720714569, + "layer_11_max_spectral_norm": 0.00939331017434597, + "layer_12_update_fnorm": 0.3009088933467865, + "layer_12_max_l1_linf_norm": 0.40090492367744446, + "layer_12_max_spectral_norm": 0.009011885151267052, + "total_sharpness": 0.004209571518003941, + "ip_v_neg_g": 0.006615778431296349, + "cos_v_neg_g": 0.003163557732477784, + "v_norm": 1.6803971529006958, + "g_norm": 1.2444952726364136, + "hv_norm": 0.395862340927124, + "cos_v_hv": 0.017869221046566963, + "hg_norm": 19.055713653564453, + "cos_g_hg": 0.4840710461139679, + "v_parallel_norm": 0.00020569474145304412, + "v_perp_norm": 1.6803971529006958, + "layer_1_v_norm": 0.2893134355545044, + "layer_1_cos_v_neg_g": 0.00440519955009222, + "layer_2_v_norm": 0.2779557704925537, + "layer_2_cos_v_neg_g": 0.003819480538368225, + "layer_3_v_norm": 0.28360408544540405, + "layer_3_cos_v_neg_g": 0.0043738833628594875, + "layer_4_v_norm": 0.29509419202804565, + "layer_4_cos_v_neg_g": 0.0039978185668587685, + "layer_5_v_norm": 0.2991867661476135, + "layer_5_cos_v_neg_g": 0.005736066959798336, + "layer_6_v_norm": 0.3016708791255951, + "layer_6_cos_v_neg_g": 0.00824756920337677, + "layer_7_v_norm": 0.3006936013698578, + "layer_7_cos_v_neg_g": 0.00786744523793459, + "layer_8_v_norm": 0.30219125747680664, + "layer_8_cos_v_neg_g": 0.006743970792740583, + "layer_9_v_norm": 0.30243897438049316, + "layer_9_cos_v_neg_g": 0.006812092382460833, + "layer_10_v_norm": 0.302219420671463, + "layer_10_cos_v_neg_g": 0.006195249501615763, + "layer_11_v_norm": 0.30154740810394287, + "layer_11_cos_v_neg_g": 0.005077246576547623, + "layer_12_v_norm": 0.3009088933467865, + "layer_12_cos_v_neg_g": 0.0023299423046410084, + "layer_1_sharpness": 0.009329002350568771, + "layer_2_sharpness": 0.002959455130621791, + "layer_3_sharpness": 0.0018077883869409561, + "layer_4_sharpness": 0.0010653819190338254, + "layer_5_sharpness": 0.0014122584834694862, + "layer_6_sharpness": 0.0018908310448750854, + "layer_7_sharpness": 0.0020141780842095613, + "layer_8_sharpness": 0.001755964825861156, + "layer_9_sharpness": 0.0012881426373496652, + "layer_10_sharpness": 0.0010649107862263918, + "layer_11_sharpness": 0.0008066374575719237, + "layer_12_sharpness": 0.001002867124043405 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..b1d7f19756ed60cfc99eb705c0e03d8afca1a64f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6865862607955933, + "total_l1_linf_norm": 13808.333984375, + "total_spectral_norm": 1.6865860223770142, + "layer_1_update_fnorm": 0.28883302211761475, + "layer_1_max_l1_linf_norm": 0.34529757499694824, + "layer_1_max_spectral_norm": 0.0077999066561460495, + "layer_2_update_fnorm": 0.2771398425102234, + "layer_2_max_l1_linf_norm": 0.43409186601638794, + "layer_2_max_spectral_norm": 0.009863479994237423, + "layer_3_update_fnorm": 0.2823765277862549, + "layer_3_max_l1_linf_norm": 0.4447169601917267, + "layer_3_max_spectral_norm": 0.009986251592636108, + "layer_4_update_fnorm": 0.2948041558265686, + "layer_4_max_l1_linf_norm": 0.44020795822143555, + "layer_4_max_spectral_norm": 0.009944015182554722, + "layer_5_update_fnorm": 0.29837900400161743, + "layer_5_max_l1_linf_norm": 0.4200724959373474, + "layer_5_max_spectral_norm": 0.009419825859367847, + "layer_6_update_fnorm": 0.3011830747127533, + "layer_6_max_l1_linf_norm": 0.3882898688316345, + "layer_6_max_spectral_norm": 0.008638793602585793, + "layer_7_update_fnorm": 0.3002490997314453, + "layer_7_max_l1_linf_norm": 0.3645334839820862, + "layer_7_max_spectral_norm": 0.008170336484909058, + "layer_8_update_fnorm": 0.3014933466911316, + "layer_8_max_l1_linf_norm": 0.36973243951797485, + "layer_8_max_spectral_norm": 0.008285505697131157, + "layer_9_update_fnorm": 0.30231592059135437, + "layer_9_max_l1_linf_norm": 0.4003726840019226, + "layer_9_max_spectral_norm": 0.008915030397474766, + "layer_10_update_fnorm": 0.30206388235092163, + "layer_10_max_l1_linf_norm": 0.4264007806777954, + "layer_10_max_spectral_norm": 0.009471183642745018, + "layer_11_update_fnorm": 0.30107152462005615, + "layer_11_max_l1_linf_norm": 0.445889413356781, + "layer_11_max_spectral_norm": 0.009990768507122993, + "layer_12_update_fnorm": 0.30100688338279724, + "layer_12_max_l1_linf_norm": 0.4357953667640686, + "layer_12_max_spectral_norm": 0.009777085855603218, + "total_sharpness": 0.005190946161746979, + "ip_v_neg_g": 0.009000995196402073, + "cos_v_neg_g": 0.004012677352875471, + "v_norm": 1.6865862607955933, + "g_norm": 1.3299880027770996, + "hv_norm": 0.5261932611465454, + "cos_v_hv": 0.01663833297789097, + "hg_norm": 35.317691802978516, + "cos_g_hg": 0.5217198133468628, + "v_parallel_norm": 0.00041270037763752043, + "v_perp_norm": 1.6865862607955933, + "layer_1_v_norm": 0.28883302211761475, + "layer_1_cos_v_neg_g": 0.008508274331688881, + "layer_2_v_norm": 0.2771398425102234, + "layer_2_cos_v_neg_g": 0.007307915017008781, + "layer_3_v_norm": 0.2823765277862549, + "layer_3_cos_v_neg_g": 0.004851449280977249, + "layer_4_v_norm": 0.2948041558265686, + "layer_4_cos_v_neg_g": 0.004416591953486204, + "layer_5_v_norm": 0.29837900400161743, + "layer_5_cos_v_neg_g": 0.005596446804702282, + "layer_6_v_norm": 0.3011830747127533, + "layer_6_cos_v_neg_g": 0.0060484157875180244, + "layer_7_v_norm": 0.3002490997314453, + "layer_7_cos_v_neg_g": 0.006696341559290886, + "layer_8_v_norm": 0.3014933466911316, + "layer_8_cos_v_neg_g": 0.0072257197462022305, + "layer_9_v_norm": 0.30231592059135437, + "layer_9_cos_v_neg_g": 0.007390200160443783, + "layer_10_v_norm": 0.30206388235092163, + "layer_10_cos_v_neg_g": 0.00765313720330596, + "layer_11_v_norm": 0.30107152462005615, + "layer_11_cos_v_neg_g": 0.006122366525232792, + "layer_12_v_norm": 0.30100688338279724, + "layer_12_cos_v_neg_g": 0.0064650243148207664, + "layer_1_sharpness": 0.01134196575731039, + "layer_2_sharpness": 0.0019763167947530746, + "layer_3_sharpness": 0.002038783859461546, + "layer_4_sharpness": 0.0010109881404787302, + "layer_5_sharpness": 0.001634273212403059, + "layer_6_sharpness": 0.00222810753621161, + "layer_7_sharpness": 0.0022580577060580254, + "layer_8_sharpness": 0.001889948733150959, + "layer_9_sharpness": 0.0014018346555531025, + "layer_10_sharpness": 0.0012336332583799958, + "layer_11_sharpness": 0.0011252063559368253, + "layer_12_sharpness": 0.002141854027286172 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..30c542955d34675b1864a867517edcfc38a3b018 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.2612969875335693, + "total_l1_linf_norm": 10328.03125, + "total_spectral_norm": 1.2612968683242798, + "layer_1_update_fnorm": 0.21803784370422363, + "layer_1_max_l1_linf_norm": 0.1580549031496048, + "layer_1_max_spectral_norm": 0.00430212402716279, + "layer_2_update_fnorm": 0.2089129388332367, + "layer_2_max_l1_linf_norm": 0.15823298692703247, + "layer_2_max_spectral_norm": 0.004304011818021536, + "layer_3_update_fnorm": 0.1999025046825409, + "layer_3_max_l1_linf_norm": 0.16089817881584167, + "layer_3_max_spectral_norm": 0.004303285386413336, + "layer_4_update_fnorm": 0.2029707431793213, + "layer_4_max_l1_linf_norm": 0.17939937114715576, + "layer_4_max_spectral_norm": 0.0043039871379733086, + "layer_5_update_fnorm": 0.20872707664966583, + "layer_5_max_l1_linf_norm": 0.18814635276794434, + "layer_5_max_spectral_norm": 0.004301008302718401, + "layer_6_update_fnorm": 0.21295861899852753, + "layer_6_max_l1_linf_norm": 0.19174805283546448, + "layer_6_max_spectral_norm": 0.005017674993723631, + "layer_7_update_fnorm": 0.21395234763622284, + "layer_7_max_l1_linf_norm": 0.20308949053287506, + "layer_7_max_spectral_norm": 0.004581116139888763, + "layer_8_update_fnorm": 0.21392761170864105, + "layer_8_max_l1_linf_norm": 0.19540633261203766, + "layer_8_max_spectral_norm": 0.005105909425765276, + "layer_9_update_fnorm": 0.21439437568187714, + "layer_9_max_l1_linf_norm": 0.19563573598861694, + "layer_9_max_spectral_norm": 0.0048327757976949215, + "layer_10_update_fnorm": 0.2151058465242386, + "layer_10_max_l1_linf_norm": 0.19776269793510437, + "layer_10_max_spectral_norm": 0.004832572769373655, + "layer_11_update_fnorm": 0.21485556662082672, + "layer_11_max_l1_linf_norm": 0.20026551187038422, + "layer_11_max_spectral_norm": 0.004704924765974283, + "layer_12_update_fnorm": 0.21563245356082916, + "layer_12_max_l1_linf_norm": 0.20129579305648804, + "layer_12_max_spectral_norm": 0.00492657907307148, + "total_sharpness": 0.04579206928610802, + "ip_v_neg_g": 0.030118731781840324, + "cos_v_neg_g": 0.009102080017328262, + "v_norm": 1.2612969875335693, + "g_norm": 2.623485565185547, + "hv_norm": 2.0189671516418457, + "cos_v_hv": 0.02860739827156067, + "hg_norm": 140.25851440429688, + "cos_g_hg": 0.5976855158805847, + "v_parallel_norm": 0.0005005295970477164, + "v_perp_norm": 1.2612968683242798, + "layer_1_v_norm": 0.21803784370422363, + "layer_1_cos_v_neg_g": 0.01772666722536087, + "layer_2_v_norm": 0.2089129388332367, + "layer_2_cos_v_neg_g": 0.022091105580329895, + "layer_3_v_norm": 0.1999025195837021, + "layer_3_cos_v_neg_g": 0.023596150800585747, + "layer_4_v_norm": 0.2029707431793213, + "layer_4_cos_v_neg_g": 0.019811859354376793, + "layer_5_v_norm": 0.20872707664966583, + "layer_5_cos_v_neg_g": 0.017645740881562233, + "layer_6_v_norm": 0.21295861899852753, + "layer_6_cos_v_neg_g": 0.016257204115390778, + "layer_7_v_norm": 0.21395234763622284, + "layer_7_cos_v_neg_g": 0.014989199116826057, + "layer_8_v_norm": 0.21392759680747986, + "layer_8_cos_v_neg_g": 0.014341731555759907, + "layer_9_v_norm": 0.21439437568187714, + "layer_9_cos_v_neg_g": 0.013524185866117477, + "layer_10_v_norm": 0.2151058465242386, + "layer_10_cos_v_neg_g": 0.01234656572341919, + "layer_11_v_norm": 0.21485556662082672, + "layer_11_cos_v_neg_g": 0.010146071203052998, + "layer_12_v_norm": 0.21563245356082916, + "layer_12_cos_v_neg_g": 0.0074520111083984375, + "layer_1_sharpness": 0.046686235815286636, + "layer_2_sharpness": 0.028133738785982132, + "layer_3_sharpness": 0.03146366775035858, + "layer_4_sharpness": 0.0198702160269022, + "layer_5_sharpness": 0.01606205478310585, + "layer_6_sharpness": 0.014065158553421497, + "layer_7_sharpness": 0.01306331716477871, + "layer_8_sharpness": 0.010268387384712696, + "layer_9_sharpness": 0.007897360250353813, + "layer_10_sharpness": 0.005886594764888287, + "layer_11_sharpness": 0.0060278549790382385, + "layer_12_sharpness": 0.0048588174395263195 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..b27c5adc02ef4f5c5de394692f5d6d526c456d21 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6800581216812134, + "total_l1_linf_norm": 13771.6455078125, + "total_spectral_norm": 1.6800577640533447, + "layer_1_update_fnorm": 0.2894347608089447, + "layer_1_max_l1_linf_norm": 0.3288252353668213, + "layer_1_max_spectral_norm": 0.007440411485731602, + "layer_2_update_fnorm": 0.2777908444404602, + "layer_2_max_l1_linf_norm": 0.4274154603481293, + "layer_2_max_spectral_norm": 0.009711596183478832, + "layer_3_update_fnorm": 0.2830236852169037, + "layer_3_max_l1_linf_norm": 0.43336331844329834, + "layer_3_max_spectral_norm": 0.009812426753342152, + "layer_4_update_fnorm": 0.2960376739501953, + "layer_4_max_l1_linf_norm": 0.414797842502594, + "layer_4_max_spectral_norm": 0.009391259402036667, + "layer_5_update_fnorm": 0.299685537815094, + "layer_5_max_l1_linf_norm": 0.3875378966331482, + "layer_5_max_spectral_norm": 0.008710472844541073, + "layer_6_update_fnorm": 0.30176305770874023, + "layer_6_max_l1_linf_norm": 0.36813583970069885, + "layer_6_max_spectral_norm": 0.00835298839956522, + "layer_7_update_fnorm": 0.30146339535713196, + "layer_7_max_l1_linf_norm": 0.3277202248573303, + "layer_7_max_spectral_norm": 0.007386131677776575, + "layer_8_update_fnorm": 0.30231261253356934, + "layer_8_max_l1_linf_norm": 0.3526875376701355, + "layer_8_max_spectral_norm": 0.007949866354465485, + "layer_9_update_fnorm": 0.3025071918964386, + "layer_9_max_l1_linf_norm": 0.38653695583343506, + "layer_9_max_spectral_norm": 0.008693866431713104, + "layer_10_update_fnorm": 0.30241432785987854, + "layer_10_max_l1_linf_norm": 0.41720789670944214, + "layer_10_max_spectral_norm": 0.0093926303088665, + "layer_11_update_fnorm": 0.3012026846408844, + "layer_11_max_l1_linf_norm": 0.41920915246009827, + "layer_11_max_spectral_norm": 0.009421946480870247, + "layer_12_update_fnorm": 0.30067718029022217, + "layer_12_max_l1_linf_norm": 0.41386526823043823, + "layer_12_max_spectral_norm": 0.009319356642663479, + "total_sharpness": 0.0033995015546679497, + "ip_v_neg_g": 0.0047140708193182945, + "cos_v_neg_g": 0.0021946607157588005, + "v_norm": 1.6800581216812134, + "g_norm": 1.2785108089447021, + "hv_norm": 0.4159895181655884, + "cos_v_hv": 0.013729576021432877, + "hg_norm": 39.72566604614258, + "cos_g_hg": 0.42578476667404175, + "v_parallel_norm": 0.0001792576804291457, + "v_perp_norm": 1.6800581216812134, + "layer_1_v_norm": 0.2894347608089447, + "layer_1_cos_v_neg_g": 0.0016941033536568284, + "layer_2_v_norm": 0.2777908444404602, + "layer_2_cos_v_neg_g": 0.007565642241388559, + "layer_3_v_norm": 0.2830236852169037, + "layer_3_cos_v_neg_g": 0.004611937329173088, + "layer_4_v_norm": 0.2960376739501953, + "layer_4_cos_v_neg_g": 0.003373602405190468, + "layer_5_v_norm": 0.299685537815094, + "layer_5_cos_v_neg_g": 0.0036700109485536814, + "layer_6_v_norm": 0.30176305770874023, + "layer_6_cos_v_neg_g": 0.003504424821585417, + "layer_7_v_norm": 0.30146339535713196, + "layer_7_cos_v_neg_g": 0.003714681603014469, + "layer_8_v_norm": 0.30231261253356934, + "layer_8_cos_v_neg_g": 0.002771155908703804, + "layer_9_v_norm": 0.3025071918964386, + "layer_9_cos_v_neg_g": 0.004303340800106525, + "layer_10_v_norm": 0.30241432785987854, + "layer_10_cos_v_neg_g": 0.004492052365094423, + "layer_11_v_norm": 0.3012026846408844, + "layer_11_cos_v_neg_g": 0.004949558526277542, + "layer_12_v_norm": 0.30067718029022217, + "layer_12_cos_v_neg_g": 0.003380637615919113, + "layer_1_sharpness": 0.008501428179442883, + "layer_2_sharpness": 0.0031454041600227356, + "layer_3_sharpness": 0.0019958647899329662, + "layer_4_sharpness": 0.0009925088379532099, + "layer_5_sharpness": 0.0012251706793904305, + "layer_6_sharpness": 0.0014603687450289726, + "layer_7_sharpness": 0.0016246529994532466, + "layer_8_sharpness": 0.0013669076142832637, + "layer_9_sharpness": 0.0010003484785556793, + "layer_10_sharpness": 0.0008405160624533892, + "layer_11_sharpness": 0.0007140312809497118, + "layer_12_sharpness": 0.0008147711050696671 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..5a248c2b0e07ef0ec1038faca71ec8ec7d900396 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6798874139785767, + "total_l1_linf_norm": 13772.9560546875, + "total_spectral_norm": 1.679887294769287, + "layer_1_update_fnorm": 0.28933292627334595, + "layer_1_max_l1_linf_norm": 0.36538851261138916, + "layer_1_max_spectral_norm": 0.008192680776119232, + "layer_2_update_fnorm": 0.27884024381637573, + "layer_2_max_l1_linf_norm": 0.4363764822483063, + "layer_2_max_spectral_norm": 0.0098800677806139, + "layer_3_update_fnorm": 0.2837774157524109, + "layer_3_max_l1_linf_norm": 0.43480151891708374, + "layer_3_max_spectral_norm": 0.00977413821965456, + "layer_4_update_fnorm": 0.2962064743041992, + "layer_4_max_l1_linf_norm": 0.407387375831604, + "layer_4_max_spectral_norm": 0.009215710684657097, + "layer_5_update_fnorm": 0.29903644323349, + "layer_5_max_l1_linf_norm": 0.4257458448410034, + "layer_5_max_spectral_norm": 0.009471224620938301, + "layer_6_update_fnorm": 0.30126091837882996, + "layer_6_max_l1_linf_norm": 0.37939614057540894, + "layer_6_max_spectral_norm": 0.008511083200573921, + "layer_7_update_fnorm": 0.3003627061843872, + "layer_7_max_l1_linf_norm": 0.34801703691482544, + "layer_7_max_spectral_norm": 0.007854005321860313, + "layer_8_update_fnorm": 0.3013656735420227, + "layer_8_max_l1_linf_norm": 0.3552524745464325, + "layer_8_max_spectral_norm": 0.007941586896777153, + "layer_9_update_fnorm": 0.3024035692214966, + "layer_9_max_l1_linf_norm": 0.3809528946876526, + "layer_9_max_spectral_norm": 0.00857993122190237, + "layer_10_update_fnorm": 0.3021979331970215, + "layer_10_max_l1_linf_norm": 0.410645067691803, + "layer_10_max_spectral_norm": 0.009205283597111702, + "layer_11_update_fnorm": 0.3013128936290741, + "layer_11_max_l1_linf_norm": 0.4266958236694336, + "layer_11_max_spectral_norm": 0.009548547677695751, + "layer_12_update_fnorm": 0.3009929060935974, + "layer_12_max_l1_linf_norm": 0.4509740173816681, + "layer_12_max_spectral_norm": 0.01008856575936079, + "total_sharpness": 0.003261786652728915, + "ip_v_neg_g": 0.004924457985907793, + "cos_v_neg_g": 0.0014919121749699116, + "v_norm": 1.6798874139785767, + "g_norm": 1.964875340461731, + "hv_norm": 0.4229571223258972, + "cos_v_hv": 0.012955059297382832, + "hg_norm": 63.133872985839844, + "cos_g_hg": 0.6744507551193237, + "v_parallel_norm": 0.00018019181152340025, + "v_perp_norm": 1.6798874139785767, + "layer_1_v_norm": 0.28933292627334595, + "layer_1_cos_v_neg_g": 0.0030121372547000647, + "layer_2_v_norm": 0.27884024381637573, + "layer_2_cos_v_neg_g": 0.004985040053725243, + "layer_3_v_norm": 0.2837774157524109, + "layer_3_cos_v_neg_g": 0.0026307764928787947, + "layer_4_v_norm": 0.2962064743041992, + "layer_4_cos_v_neg_g": 0.002464906545355916, + "layer_5_v_norm": 0.29903644323349, + "layer_5_cos_v_neg_g": 0.002943093655630946, + "layer_6_v_norm": 0.30126091837882996, + "layer_6_cos_v_neg_g": 0.004514062777161598, + "layer_7_v_norm": 0.3003627061843872, + "layer_7_cos_v_neg_g": 0.0036477455869317055, + "layer_8_v_norm": 0.3013656735420227, + "layer_8_cos_v_neg_g": 0.0035563551355153322, + "layer_9_v_norm": 0.3024035692214966, + "layer_9_cos_v_neg_g": 0.0025139176286756992, + "layer_10_v_norm": 0.3021979331970215, + "layer_10_cos_v_neg_g": 0.0028217865619808435, + "layer_11_v_norm": 0.3013128936290741, + "layer_11_cos_v_neg_g": 0.0025914169382303953, + "layer_12_v_norm": 0.3009929060935974, + "layer_12_cos_v_neg_g": 0.0003229255089536309, + "layer_1_sharpness": 0.007954441010951996, + "layer_2_sharpness": 0.0018772511975839734, + "layer_3_sharpness": 0.0013606734573841095, + "layer_4_sharpness": 0.0006797846872359514, + "layer_5_sharpness": 0.0010386245558038354, + "layer_6_sharpness": 0.0013036223826929927, + "layer_7_sharpness": 0.0013657608069479465, + "layer_8_sharpness": 0.0011642549652606249, + "layer_9_sharpness": 0.0008180254371836782, + "layer_10_sharpness": 0.0007688437472097576, + "layer_11_sharpness": 0.0007725566392764449, + "layer_12_sharpness": 0.002188400598242879 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..f33285c386bd66c918e56cf757426e048f728002 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.678519606590271, + "total_l1_linf_norm": 13761.248046875, + "total_spectral_norm": 1.678519368171692, + "layer_1_update_fnorm": 0.2898517847061157, + "layer_1_max_l1_linf_norm": 0.3608231544494629, + "layer_1_max_spectral_norm": 0.008124357089400291, + "layer_2_update_fnorm": 0.2745054364204407, + "layer_2_max_l1_linf_norm": 0.4583747088909149, + "layer_2_max_spectral_norm": 0.010261435993015766, + "layer_3_update_fnorm": 0.28350237011909485, + "layer_3_max_l1_linf_norm": 0.4424080550670624, + "layer_3_max_spectral_norm": 0.009947039186954498, + "layer_4_update_fnorm": 0.2964487671852112, + "layer_4_max_l1_linf_norm": 0.4216509461402893, + "layer_4_max_spectral_norm": 0.009548837319016457, + "layer_5_update_fnorm": 0.29936906695365906, + "layer_5_max_l1_linf_norm": 0.3846971392631531, + "layer_5_max_spectral_norm": 0.008673523552715778, + "layer_6_update_fnorm": 0.3018142580986023, + "layer_6_max_l1_linf_norm": 0.3581574559211731, + "layer_6_max_spectral_norm": 0.008103650994598866, + "layer_7_update_fnorm": 0.3013627529144287, + "layer_7_max_l1_linf_norm": 0.33844542503356934, + "layer_7_max_spectral_norm": 0.007682709489017725, + "layer_8_update_fnorm": 0.30206024646759033, + "layer_8_max_l1_linf_norm": 0.3439417779445648, + "layer_8_max_spectral_norm": 0.007731082383543253, + "layer_9_update_fnorm": 0.3029087781906128, + "layer_9_max_l1_linf_norm": 0.36936652660369873, + "layer_9_max_spectral_norm": 0.00828539114445448, + "layer_10_update_fnorm": 0.3025663197040558, + "layer_10_max_l1_linf_norm": 0.40949755907058716, + "layer_10_max_spectral_norm": 0.009169877506792545, + "layer_11_update_fnorm": 0.3013612627983093, + "layer_11_max_l1_linf_norm": 0.4243374466896057, + "layer_11_max_spectral_norm": 0.009483092464506626, + "layer_12_update_fnorm": 0.3011234998703003, + "layer_12_max_l1_linf_norm": 0.4266883432865143, + "layer_12_max_spectral_norm": 0.009627428837120533, + "total_sharpness": 0.0034608629066497087, + "ip_v_neg_g": 0.005831761285662651, + "cos_v_neg_g": 0.002897911239415407, + "v_norm": 1.678519606590271, + "g_norm": 1.1989145278930664, + "hv_norm": 0.3463670611381531, + "cos_v_hv": 0.01677159033715725, + "hg_norm": 18.752029418945312, + "cos_g_hg": 0.48145967721939087, + "v_parallel_norm": 0.00016790784138720483, + "v_perp_norm": 1.678519606590271, + "layer_1_v_norm": 0.2898517847061157, + "layer_1_cos_v_neg_g": 0.0038963770493865013, + "layer_2_v_norm": 0.2745054364204407, + "layer_2_cos_v_neg_g": 0.004498784895986319, + "layer_3_v_norm": 0.28350237011909485, + "layer_3_cos_v_neg_g": 0.004504429176449776, + "layer_4_v_norm": 0.2964487671852112, + "layer_4_cos_v_neg_g": 0.004271024372428656, + "layer_5_v_norm": 0.29936906695365906, + "layer_5_cos_v_neg_g": 0.004106815904378891, + "layer_6_v_norm": 0.3018142580986023, + "layer_6_cos_v_neg_g": 0.004692427348345518, + "layer_7_v_norm": 0.3013627529144287, + "layer_7_cos_v_neg_g": 0.005682287737727165, + "layer_8_v_norm": 0.30206024646759033, + "layer_8_cos_v_neg_g": 0.006326798815280199, + "layer_9_v_norm": 0.3029087781906128, + "layer_9_cos_v_neg_g": 0.0070889913477003574, + "layer_10_v_norm": 0.3025663197040558, + "layer_10_cos_v_neg_g": 0.00593836372718215, + "layer_11_v_norm": 0.3013612627983093, + "layer_11_cos_v_neg_g": 0.005187251605093479, + "layer_12_v_norm": 0.3011234998703003, + "layer_12_cos_v_neg_g": 0.003846924751996994, + "layer_1_sharpness": 0.005010079126805067, + "layer_2_sharpness": 0.0037926228251308203, + "layer_3_sharpness": 0.002009921008720994, + "layer_4_sharpness": 0.0008520336705259979, + "layer_5_sharpness": 0.0010201249970123172, + "layer_6_sharpness": 0.0013129706494510174, + "layer_7_sharpness": 0.0015116238500922918, + "layer_8_sharpness": 0.0015869197668507695, + "layer_9_sharpness": 0.00121365359518677, + "layer_10_sharpness": 0.0009676633053459227, + "layer_11_sharpness": 0.0008534832741133869, + "layer_12_sharpness": 0.0009669134160503745 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..59a79c1c484da749257d713d6904eb11ac1c4ef6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.684403657913208, + "total_l1_linf_norm": 13794.314453125, + "total_spectral_norm": 1.6844041347503662, + "layer_1_update_fnorm": 0.2846275269985199, + "layer_1_max_l1_linf_norm": 0.35805392265319824, + "layer_1_max_spectral_norm": 0.008106009103357792, + "layer_2_update_fnorm": 0.27733296155929565, + "layer_2_max_l1_linf_norm": 0.4073001742362976, + "layer_2_max_spectral_norm": 0.009335927665233612, + "layer_3_update_fnorm": 0.2807132303714752, + "layer_3_max_l1_linf_norm": 0.41533148288726807, + "layer_3_max_spectral_norm": 0.00933126825839281, + "layer_4_update_fnorm": 0.28259044885635376, + "layer_4_max_l1_linf_norm": 0.41209954023361206, + "layer_4_max_spectral_norm": 0.009270666167140007, + "layer_5_update_fnorm": 0.2980172634124756, + "layer_5_max_l1_linf_norm": 0.39141637086868286, + "layer_5_max_spectral_norm": 0.008862973190844059, + "layer_6_update_fnorm": 0.2999271750450134, + "layer_6_max_l1_linf_norm": 0.36705249547958374, + "layer_6_max_spectral_norm": 0.008264685980975628, + "layer_7_update_fnorm": 0.2998150587081909, + "layer_7_max_l1_linf_norm": 0.3552338480949402, + "layer_7_max_spectral_norm": 0.007969782687723637, + "layer_8_update_fnorm": 0.30115169286727905, + "layer_8_max_l1_linf_norm": 0.33641517162323, + "layer_8_max_spectral_norm": 0.0076596359722316265, + "layer_9_update_fnorm": 0.30189749598503113, + "layer_9_max_l1_linf_norm": 0.3665822148323059, + "layer_9_max_spectral_norm": 0.008279582485556602, + "layer_10_update_fnorm": 0.30177852511405945, + "layer_10_max_l1_linf_norm": 0.3968650698661804, + "layer_10_max_spectral_norm": 0.008936122059822083, + "layer_11_update_fnorm": 0.3008907437324524, + "layer_11_max_l1_linf_norm": 0.4171295166015625, + "layer_11_max_spectral_norm": 0.009355607442557812, + "layer_12_update_fnorm": 0.3007761538028717, + "layer_12_max_l1_linf_norm": 0.4259588420391083, + "layer_12_max_spectral_norm": 0.0096461595967412, + "total_sharpness": 0.0028404220938682556, + "ip_v_neg_g": 0.003253393340855837, + "cos_v_neg_g": 0.0016234739450737834, + "v_norm": 1.684403657913208, + "g_norm": 1.189720869064331, + "hv_norm": 0.2937246561050415, + "cos_v_hv": 0.01628878340125084, + "hg_norm": 14.609124183654785, + "cos_g_hg": 0.5017565488815308, + "v_parallel_norm": 0.00011366645048838109, + "v_perp_norm": 1.684403657913208, + "layer_1_v_norm": 0.2846275269985199, + "layer_1_cos_v_neg_g": 0.002023454988375306, + "layer_2_v_norm": 0.27733296155929565, + "layer_2_cos_v_neg_g": 0.0022174078039824963, + "layer_3_v_norm": 0.2807132601737976, + "layer_3_cos_v_neg_g": 0.002047085203230381, + "layer_4_v_norm": 0.28259044885635376, + "layer_4_cos_v_neg_g": 0.0023896002676337957, + "layer_5_v_norm": 0.2980172634124756, + "layer_5_cos_v_neg_g": 0.0024605782236903906, + "layer_6_v_norm": 0.2999271750450134, + "layer_6_cos_v_neg_g": 0.003209353657439351, + "layer_7_v_norm": 0.2998150587081909, + "layer_7_cos_v_neg_g": 0.004645492881536484, + "layer_8_v_norm": 0.30115169286727905, + "layer_8_cos_v_neg_g": 0.003919673152267933, + "layer_9_v_norm": 0.30189749598503113, + "layer_9_cos_v_neg_g": 0.0027666818350553513, + "layer_10_v_norm": 0.30177852511405945, + "layer_10_cos_v_neg_g": 0.002957940800115466, + "layer_11_v_norm": 0.3008907437324524, + "layer_11_cos_v_neg_g": 0.002984004095196724, + "layer_12_v_norm": 0.3007761538028717, + "layer_12_cos_v_neg_g": 0.0027289839927107096, + "layer_1_sharpness": 0.0037414701655507088, + "layer_2_sharpness": 0.0009252516319975257, + "layer_3_sharpness": 0.0015471415827050805, + "layer_4_sharpness": 0.0008111953502520919, + "layer_5_sharpness": 0.0009728923323564231, + "layer_6_sharpness": 0.001864225952886045, + "layer_7_sharpness": 0.0020194989629089832, + "layer_8_sharpness": 0.0014507519081234932, + "layer_9_sharpness": 0.0009818911785259843, + "layer_10_sharpness": 0.0007686725584790111, + "layer_11_sharpness": 0.0005923407152295113, + "layer_12_sharpness": 0.0006490534287877381 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..c014327cb82ea643c3c934e7bd63512df74bd7df --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.684679388999939, + "total_l1_linf_norm": 13801.755859375, + "total_spectral_norm": 1.6846792697906494, + "layer_1_update_fnorm": 0.2881244719028473, + "layer_1_max_l1_linf_norm": 0.37702304124832153, + "layer_1_max_spectral_norm": 0.008453655056655407, + "layer_2_update_fnorm": 0.27196794748306274, + "layer_2_max_l1_linf_norm": 0.4603807330131531, + "layer_2_max_spectral_norm": 0.010361204855144024, + "layer_3_update_fnorm": 0.2822690010070801, + "layer_3_max_l1_linf_norm": 0.45186543464660645, + "layer_3_max_spectral_norm": 0.010165765881538391, + "layer_4_update_fnorm": 0.2969304025173187, + "layer_4_max_l1_linf_norm": 0.42137783765792847, + "layer_4_max_spectral_norm": 0.009451871737837791, + "layer_5_update_fnorm": 0.2993125915527344, + "layer_5_max_l1_linf_norm": 0.40075117349624634, + "layer_5_max_spectral_norm": 0.008999386802315712, + "layer_6_update_fnorm": 0.30127665400505066, + "layer_6_max_l1_linf_norm": 0.35504475235939026, + "layer_6_max_spectral_norm": 0.008068902418017387, + "layer_7_update_fnorm": 0.3004707992076874, + "layer_7_max_l1_linf_norm": 0.3525756895542145, + "layer_7_max_spectral_norm": 0.007908727042376995, + "layer_8_update_fnorm": 0.30148205161094666, + "layer_8_max_l1_linf_norm": 0.3455016016960144, + "layer_8_max_spectral_norm": 0.007779007311910391, + "layer_9_update_fnorm": 0.30208033323287964, + "layer_9_max_l1_linf_norm": 0.38228005170822144, + "layer_9_max_spectral_norm": 0.008561239577829838, + "layer_10_update_fnorm": 0.30189722776412964, + "layer_10_max_l1_linf_norm": 0.41900986433029175, + "layer_10_max_spectral_norm": 0.009340048767626286, + "layer_11_update_fnorm": 0.3009566068649292, + "layer_11_max_l1_linf_norm": 0.44062626361846924, + "layer_11_max_spectral_norm": 0.009776169434189796, + "layer_12_update_fnorm": 0.30054646730422974, + "layer_12_max_l1_linf_norm": 0.4581787586212158, + "layer_12_max_spectral_norm": 0.010186522267758846, + "total_sharpness": 0.00350221642293036, + "ip_v_neg_g": 0.0033943597227334976, + "cos_v_neg_g": 0.001637325040064752, + "v_norm": 1.684679388999939, + "g_norm": 1.230568289756775, + "hv_norm": 0.40216192603111267, + "cos_v_hv": 0.01467098668217659, + "hg_norm": 17.399805068969727, + "cos_g_hg": 0.48941561579704285, + "v_parallel_norm": 0.00020168875926174223, + "v_perp_norm": 1.684679388999939, + "layer_1_v_norm": 0.2881244719028473, + "layer_1_cos_v_neg_g": 0.003359097521752119, + "layer_2_v_norm": 0.27196794748306274, + "layer_2_cos_v_neg_g": 0.0029568902682513, + "layer_3_v_norm": 0.2822690010070801, + "layer_3_cos_v_neg_g": 0.0014498940436169505, + "layer_4_v_norm": 0.2969304025173187, + "layer_4_cos_v_neg_g": 0.001914297346957028, + "layer_5_v_norm": 0.2993125915527344, + "layer_5_cos_v_neg_g": 0.001873733359389007, + "layer_6_v_norm": 0.30127665400505066, + "layer_6_cos_v_neg_g": 0.0025777213741093874, + "layer_7_v_norm": 0.3004707992076874, + "layer_7_cos_v_neg_g": 0.003117963904514909, + "layer_8_v_norm": 0.30148208141326904, + "layer_8_cos_v_neg_g": 0.00293160998262465, + "layer_9_v_norm": 0.30208033323287964, + "layer_9_cos_v_neg_g": 0.0027524742763489485, + "layer_10_v_norm": 0.30189722776412964, + "layer_10_cos_v_neg_g": 0.0026921106036752462, + "layer_11_v_norm": 0.3009566068649292, + "layer_11_cos_v_neg_g": 0.003171474440023303, + "layer_12_v_norm": 0.30054646730422974, + "layer_12_cos_v_neg_g": 0.004707786254584789, + "layer_1_sharpness": 0.011312141083180904, + "layer_2_sharpness": 0.008344891481101513, + "layer_3_sharpness": 0.0024316213093698025, + "layer_4_sharpness": 0.0008385967812500894, + "layer_5_sharpness": 0.0009094311972148716, + "layer_6_sharpness": 0.0011521305423229933, + "layer_7_sharpness": 0.0013084782985970378, + "layer_8_sharpness": 0.0012249043211340904, + "layer_9_sharpness": 0.0009505603229627013, + "layer_10_sharpness": 0.0008583575836382806, + "layer_11_sharpness": 0.0007455934537574649, + "layer_12_sharpness": 0.0013555519981309772 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..c3a0c7daacd115fc8e7e63b329eb87cd812b7935 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6834239959716797, + "total_l1_linf_norm": 13781.3173828125, + "total_spectral_norm": 1.6834243535995483, + "layer_1_update_fnorm": 0.28816527128219604, + "layer_1_max_l1_linf_norm": 0.4050653576850891, + "layer_1_max_spectral_norm": 0.00907869078218937, + "layer_2_update_fnorm": 0.27651089429855347, + "layer_2_max_l1_linf_norm": 0.4761848449707031, + "layer_2_max_spectral_norm": 0.010751890949904919, + "layer_3_update_fnorm": 0.2827869653701782, + "layer_3_max_l1_linf_norm": 0.4725579619407654, + "layer_3_max_spectral_norm": 0.010534304194152355, + "layer_4_update_fnorm": 0.2959630489349365, + "layer_4_max_l1_linf_norm": 0.45316818356513977, + "layer_4_max_spectral_norm": 0.01018119603395462, + "layer_5_update_fnorm": 0.2980165183544159, + "layer_5_max_l1_linf_norm": 0.42048728466033936, + "layer_5_max_spectral_norm": 0.009359827265143394, + "layer_6_update_fnorm": 0.3009589910507202, + "layer_6_max_l1_linf_norm": 0.39674368500709534, + "layer_6_max_spectral_norm": 0.008835975080728531, + "layer_7_update_fnorm": 0.29987385869026184, + "layer_7_max_l1_linf_norm": 0.38385987281799316, + "layer_7_max_spectral_norm": 0.008624101057648659, + "layer_8_update_fnorm": 0.30017194151878357, + "layer_8_max_l1_linf_norm": 0.3574502468109131, + "layer_8_max_spectral_norm": 0.008024713024497032, + "layer_9_update_fnorm": 0.3013535439968109, + "layer_9_max_l1_linf_norm": 0.3842964470386505, + "layer_9_max_spectral_norm": 0.00865199975669384, + "layer_10_update_fnorm": 0.30174311995506287, + "layer_10_max_l1_linf_norm": 0.4095586836338043, + "layer_10_max_spectral_norm": 0.00922673661261797, + "layer_11_update_fnorm": 0.3005237579345703, + "layer_11_max_l1_linf_norm": 0.4330816864967346, + "layer_11_max_spectral_norm": 0.009708012454211712, + "layer_12_update_fnorm": 0.30073803663253784, + "layer_12_max_l1_linf_norm": 0.4202900230884552, + "layer_12_max_spectral_norm": 0.009502192959189415, + "total_sharpness": 0.0029890311416238546, + "ip_v_neg_g": 0.004993450362235308, + "cos_v_neg_g": 0.0022297592367976904, + "v_norm": 1.6834239959716797, + "g_norm": 1.330298900604248, + "hv_norm": 0.31839218735694885, + "cos_v_hv": 0.015803802758455276, + "hg_norm": 25.646617889404297, + "cos_g_hg": 0.5435435175895691, + "v_parallel_norm": 0.00016261477139778435, + "v_perp_norm": 1.6834239959716797, + "layer_1_v_norm": 0.28816527128219604, + "layer_1_cos_v_neg_g": 0.0033421532716602087, + "layer_2_v_norm": 0.27651089429855347, + "layer_2_cos_v_neg_g": 0.0048042829148471355, + "layer_3_v_norm": 0.28278693556785583, + "layer_3_cos_v_neg_g": 0.0049441661685705185, + "layer_4_v_norm": 0.2959630489349365, + "layer_4_cos_v_neg_g": 0.0034177436027675867, + "layer_5_v_norm": 0.2980165183544159, + "layer_5_cos_v_neg_g": 0.003236396936699748, + "layer_6_v_norm": 0.3009589910507202, + "layer_6_cos_v_neg_g": 0.0030986506026238203, + "layer_7_v_norm": 0.29987385869026184, + "layer_7_cos_v_neg_g": 0.004225989803671837, + "layer_8_v_norm": 0.30017194151878357, + "layer_8_cos_v_neg_g": 0.003786020213738084, + "layer_9_v_norm": 0.3013535439968109, + "layer_9_cos_v_neg_g": 0.004547345917671919, + "layer_10_v_norm": 0.30174311995506287, + "layer_10_cos_v_neg_g": 0.0039038527756929398, + "layer_11_v_norm": 0.3005237281322479, + "layer_11_cos_v_neg_g": 0.0028409359510987997, + "layer_12_v_norm": 0.30073803663253784, + "layer_12_cos_v_neg_g": 0.0029148308094590902, + "layer_1_sharpness": 0.006470326334238052, + "layer_2_sharpness": 0.0012423779116943479, + "layer_3_sharpness": 0.0017994643421843648, + "layer_4_sharpness": 0.0009051274973899126, + "layer_5_sharpness": 0.0012034793617203832, + "layer_6_sharpness": 0.0015545734204351902, + "layer_7_sharpness": 0.0017041511600837111, + "layer_8_sharpness": 0.0014148701447993517, + "layer_9_sharpness": 0.0010898065520450473, + "layer_10_sharpness": 0.0008112937211990356, + "layer_11_sharpness": 0.0006184378289617598, + "layer_12_sharpness": 0.0007706937612965703 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..1d3f6eb2421f9335c89c27cd414b0d6873d5fddb --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6767820119857788, + "total_l1_linf_norm": 13740.4453125, + "total_spectral_norm": 1.6767820119857788, + "layer_1_update_fnorm": 0.28921443223953247, + "layer_1_max_l1_linf_norm": 0.37440577149391174, + "layer_1_max_spectral_norm": 0.0083831287920475, + "layer_2_update_fnorm": 0.27694493532180786, + "layer_2_max_l1_linf_norm": 0.47199565172195435, + "layer_2_max_spectral_norm": 0.010689565911889076, + "layer_3_update_fnorm": 0.2858898937702179, + "layer_3_max_l1_linf_norm": 0.44570815563201904, + "layer_3_max_spectral_norm": 0.009980732575058937, + "layer_4_update_fnorm": 0.2971956729888916, + "layer_4_max_l1_linf_norm": 0.41746607422828674, + "layer_4_max_spectral_norm": 0.009386075660586357, + "layer_5_update_fnorm": 0.2986457943916321, + "layer_5_max_l1_linf_norm": 0.3893362283706665, + "layer_5_max_spectral_norm": 0.008832946419715881, + "layer_6_update_fnorm": 0.30109938979148865, + "layer_6_max_l1_linf_norm": 0.3499668538570404, + "layer_6_max_spectral_norm": 0.007830602116882801, + "layer_7_update_fnorm": 0.2998653054237366, + "layer_7_max_l1_linf_norm": 0.33975812792778015, + "layer_7_max_spectral_norm": 0.00766085647046566, + "layer_8_update_fnorm": 0.30075109004974365, + "layer_8_max_l1_linf_norm": 0.322198748588562, + "layer_8_max_spectral_norm": 0.007323940750211477, + "layer_9_update_fnorm": 0.30189770460128784, + "layer_9_max_l1_linf_norm": 0.3563545048236847, + "layer_9_max_spectral_norm": 0.008031840436160564, + "layer_10_update_fnorm": 0.30186763405799866, + "layer_10_max_l1_linf_norm": 0.4034164547920227, + "layer_10_max_spectral_norm": 0.009086254984140396, + "layer_11_update_fnorm": 0.3011518716812134, + "layer_11_max_l1_linf_norm": 0.43568670749664307, + "layer_11_max_spectral_norm": 0.009770335629582405, + "layer_12_update_fnorm": 0.30104807019233704, + "layer_12_max_l1_linf_norm": 0.4215284287929535, + "layer_12_max_spectral_norm": 0.009499749168753624, + "total_sharpness": 0.0028225763235241175, + "ip_v_neg_g": 0.0038010289426892996, + "cos_v_neg_g": 0.0018622542265802622, + "v_norm": 1.6767820119857788, + "g_norm": 1.217266321182251, + "hv_norm": 0.30181851983070374, + "cos_v_hv": 0.01568109728395939, + "hg_norm": 20.376873016357422, + "cos_g_hg": 0.4891733229160309, + "v_parallel_norm": 0.00016327605408150703, + "v_perp_norm": 1.6767820119857788, + "layer_1_v_norm": 0.28921443223953247, + "layer_1_cos_v_neg_g": 0.0029761537443846464, + "layer_2_v_norm": 0.27694493532180786, + "layer_2_cos_v_neg_g": 0.0024052257649600506, + "layer_3_v_norm": 0.2858898639678955, + "layer_3_cos_v_neg_g": 0.002549388911575079, + "layer_4_v_norm": 0.2971956729888916, + "layer_4_cos_v_neg_g": 0.002077313605695963, + "layer_5_v_norm": 0.2986457943916321, + "layer_5_cos_v_neg_g": 0.002915403340011835, + "layer_6_v_norm": 0.30109938979148865, + "layer_6_cos_v_neg_g": 0.003675895743072033, + "layer_7_v_norm": 0.2998653054237366, + "layer_7_cos_v_neg_g": 0.003891055239364505, + "layer_8_v_norm": 0.30075109004974365, + "layer_8_cos_v_neg_g": 0.0035872338339686394, + "layer_9_v_norm": 0.30189770460128784, + "layer_9_cos_v_neg_g": 0.003510397393256426, + "layer_10_v_norm": 0.30186763405799866, + "layer_10_cos_v_neg_g": 0.0035006057005375624, + "layer_11_v_norm": 0.3011518716812134, + "layer_11_cos_v_neg_g": 0.003343605902045965, + "layer_12_v_norm": 0.30104807019233704, + "layer_12_cos_v_neg_g": 0.002421698532998562, + "layer_1_sharpness": 0.0046788668259978294, + "layer_2_sharpness": 0.0018025727476924658, + "layer_3_sharpness": 0.0009620534256100655, + "layer_4_sharpness": 0.000767753750551492, + "layer_5_sharpness": 0.001085914671421051, + "layer_6_sharpness": 0.0012592431157827377, + "layer_7_sharpness": 0.0016967403935268521, + "layer_8_sharpness": 0.0013066946994513273, + "layer_9_sharpness": 0.0009497144492343068, + "layer_10_sharpness": 0.0007768027717247605, + "layer_11_sharpness": 0.0007224493310786784, + "layer_12_sharpness": 0.0007114096079021692 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..034477c1635ec744654c6e920b058c524a30e11b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6707284450531006, + "total_l1_linf_norm": 13681.251953125, + "total_spectral_norm": 1.6707288026809692, + "layer_1_update_fnorm": 0.2880213260650635, + "layer_1_max_l1_linf_norm": 0.37434661388397217, + "layer_1_max_spectral_norm": 0.008493367582559586, + "layer_2_update_fnorm": 0.2757885456085205, + "layer_2_max_l1_linf_norm": 0.43311285972595215, + "layer_2_max_spectral_norm": 0.00987685564905405, + "layer_3_update_fnorm": 0.2836778163909912, + "layer_3_max_l1_linf_norm": 0.43319737911224365, + "layer_3_max_spectral_norm": 0.009789912961423397, + "layer_4_update_fnorm": 0.2957802712917328, + "layer_4_max_l1_linf_norm": 0.4165491759777069, + "layer_4_max_spectral_norm": 0.009382293559610844, + "layer_5_update_fnorm": 0.29814502596855164, + "layer_5_max_l1_linf_norm": 0.387614369392395, + "layer_5_max_spectral_norm": 0.008785855956375599, + "layer_6_update_fnorm": 0.3000638782978058, + "layer_6_max_l1_linf_norm": 0.336273729801178, + "layer_6_max_spectral_norm": 0.007593904621899128, + "layer_7_update_fnorm": 0.29912465810775757, + "layer_7_max_l1_linf_norm": 0.33627447485923767, + "layer_7_max_spectral_norm": 0.007582439575344324, + "layer_8_update_fnorm": 0.2994702458381653, + "layer_8_max_l1_linf_norm": 0.3199980556964874, + "layer_8_max_spectral_norm": 0.0072807930409908295, + "layer_9_update_fnorm": 0.3000714182853699, + "layer_9_max_l1_linf_norm": 0.3561807870864868, + "layer_9_max_spectral_norm": 0.008070295676589012, + "layer_10_update_fnorm": 0.2997229993343353, + "layer_10_max_l1_linf_norm": 0.40606480836868286, + "layer_10_max_spectral_norm": 0.009194250218570232, + "layer_11_update_fnorm": 0.2985643446445465, + "layer_11_max_l1_linf_norm": 0.4161742627620697, + "layer_11_max_spectral_norm": 0.009306827560067177, + "layer_12_update_fnorm": 0.29976505041122437, + "layer_12_max_l1_linf_norm": 0.4283764064311981, + "layer_12_max_spectral_norm": 0.009604264982044697, + "total_sharpness": 0.0029733730480074883, + "ip_v_neg_g": 0.00414699874818325, + "cos_v_neg_g": 0.002076920820400119, + "v_norm": 1.6707284450531006, + "g_norm": 1.195110559463501, + "hv_norm": 0.3115757405757904, + "cos_v_hv": 0.01594379171729088, + "hg_norm": 17.52161407470703, + "cos_g_hg": 0.4637271463871002, + "v_parallel_norm": 0.00014256648137234151, + "v_perp_norm": 1.6707284450531006, + "layer_1_v_norm": 0.2880213260650635, + "layer_1_cos_v_neg_g": 0.003416365012526512, + "layer_2_v_norm": 0.2757885456085205, + "layer_2_cos_v_neg_g": 0.004035592544823885, + "layer_3_v_norm": 0.2836778163909912, + "layer_3_cos_v_neg_g": 0.0030422795098274946, + "layer_4_v_norm": 0.2957802712917328, + "layer_4_cos_v_neg_g": 0.003248795634135604, + "layer_5_v_norm": 0.29814502596855164, + "layer_5_cos_v_neg_g": 0.00296667474322021, + "layer_6_v_norm": 0.3000638782978058, + "layer_6_cos_v_neg_g": 0.003289044601842761, + "layer_7_v_norm": 0.29912465810775757, + "layer_7_cos_v_neg_g": 0.0037719099782407284, + "layer_8_v_norm": 0.2994702458381653, + "layer_8_cos_v_neg_g": 0.0037834213580936193, + "layer_9_v_norm": 0.3000714182853699, + "layer_9_cos_v_neg_g": 0.003702537389472127, + "layer_10_v_norm": 0.2997229993343353, + "layer_10_cos_v_neg_g": 0.003958395216614008, + "layer_11_v_norm": 0.2985643446445465, + "layer_11_cos_v_neg_g": 0.004153385758399963, + "layer_12_v_norm": 0.29976505041122437, + "layer_12_cos_v_neg_g": 0.003288630163297057, + "layer_1_sharpness": 0.004301551263779402, + "layer_2_sharpness": 0.0013063319493085146, + "layer_3_sharpness": 0.001555620227009058, + "layer_4_sharpness": 0.0008087796741165221, + "layer_5_sharpness": 0.0011209063231945038, + "layer_6_sharpness": 0.001209781737998128, + "layer_7_sharpness": 0.0015374793438240886, + "layer_8_sharpness": 0.0014785241801291704, + "layer_9_sharpness": 0.0011800715001299977, + "layer_10_sharpness": 0.0007752167875878513, + "layer_11_sharpness": 0.0008400346850976348, + "layer_12_sharpness": 0.0023083353880792856 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..a24510897c29d2903b0b97c068dabf474bf0528b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6786047220230103, + "total_l1_linf_norm": 13750.984375, + "total_spectral_norm": 1.6786047220230103, + "layer_1_update_fnorm": 0.29025980830192566, + "layer_1_max_l1_linf_norm": 0.38288015127182007, + "layer_1_max_spectral_norm": 0.008529930375516415, + "layer_2_update_fnorm": 0.2785215973854065, + "layer_2_max_l1_linf_norm": 0.4587157964706421, + "layer_2_max_spectral_norm": 0.010330001823604107, + "layer_3_update_fnorm": 0.2844948470592499, + "layer_3_max_l1_linf_norm": 0.45412135124206543, + "layer_3_max_spectral_norm": 0.010126782581210136, + "layer_4_update_fnorm": 0.29688844084739685, + "layer_4_max_l1_linf_norm": 0.42940065264701843, + "layer_4_max_spectral_norm": 0.009574780240654945, + "layer_5_update_fnorm": 0.2979685962200165, + "layer_5_max_l1_linf_norm": 0.38744980096817017, + "layer_5_max_spectral_norm": 0.008716857060790062, + "layer_6_update_fnorm": 0.3002115786075592, + "layer_6_max_l1_linf_norm": 0.3587138056755066, + "layer_6_max_spectral_norm": 0.008044716902077198, + "layer_7_update_fnorm": 0.29939624667167664, + "layer_7_max_l1_linf_norm": 0.3523457944393158, + "layer_7_max_spectral_norm": 0.007874330505728722, + "layer_8_update_fnorm": 0.29997867345809937, + "layer_8_max_l1_linf_norm": 0.3459678292274475, + "layer_8_max_spectral_norm": 0.00778607465326786, + "layer_9_update_fnorm": 0.30064618587493896, + "layer_9_max_l1_linf_norm": 0.36870840191841125, + "layer_9_max_spectral_norm": 0.008321287110447884, + "layer_10_update_fnorm": 0.30064821243286133, + "layer_10_max_l1_linf_norm": 0.4080708622932434, + "layer_10_max_spectral_norm": 0.009099503979086876, + "layer_11_update_fnorm": 0.30012500286102295, + "layer_11_max_l1_linf_norm": 0.41763436794281006, + "layer_11_max_spectral_norm": 0.009326892904937267, + "layer_12_update_fnorm": 0.30050286650657654, + "layer_12_max_l1_linf_norm": 0.41347092390060425, + "layer_12_max_spectral_norm": 0.009393343701958656, + "total_sharpness": 0.0030309478752315044, + "ip_v_neg_g": 0.0031828025821596384, + "cos_v_neg_g": 0.0014750631526112556, + "v_norm": 1.6786047220230103, + "g_norm": 1.2854365110397339, + "hv_norm": 0.31862005591392517, + "cos_v_hv": 0.015968121588230133, + "hg_norm": 36.51346969604492, + "cos_g_hg": 0.5072816610336304, + "v_parallel_norm": 0.0001435411104466766, + "v_perp_norm": 1.6786047220230103, + "layer_1_v_norm": 0.29025980830192566, + "layer_1_cos_v_neg_g": 0.00055866310140118, + "layer_2_v_norm": 0.2785215973854065, + "layer_2_cos_v_neg_g": 0.00043092446867376566, + "layer_3_v_norm": 0.2844948470592499, + "layer_3_cos_v_neg_g": 0.00253425189293921, + "layer_4_v_norm": 0.29688844084739685, + "layer_4_cos_v_neg_g": 0.0022731239441782236, + "layer_5_v_norm": 0.2979685962200165, + "layer_5_cos_v_neg_g": 0.003429584437981248, + "layer_6_v_norm": 0.3002115488052368, + "layer_6_cos_v_neg_g": 0.002562165493145585, + "layer_7_v_norm": 0.29939624667167664, + "layer_7_cos_v_neg_g": 0.0035403487272560596, + "layer_8_v_norm": 0.29997867345809937, + "layer_8_cos_v_neg_g": 0.002682307967916131, + "layer_9_v_norm": 0.30064618587493896, + "layer_9_cos_v_neg_g": 0.0035831767600029707, + "layer_10_v_norm": 0.30064821243286133, + "layer_10_cos_v_neg_g": 0.004435560200363398, + "layer_11_v_norm": 0.30012500286102295, + "layer_11_cos_v_neg_g": 0.0028899575117975473, + "layer_12_v_norm": 0.30050286650657654, + "layer_12_cos_v_neg_g": 0.0022605268750339746, + "layer_1_sharpness": 0.0043657212518155575, + "layer_2_sharpness": 0.0013501867651939392, + "layer_3_sharpness": 0.0014101597480475903, + "layer_4_sharpness": 0.0009887367486953735, + "layer_5_sharpness": 0.0011277215089648962, + "layer_6_sharpness": 0.0014957459643483162, + "layer_7_sharpness": 0.0019339816644787788, + "layer_8_sharpness": 0.0014633116079494357, + "layer_9_sharpness": 0.0010060780914500356, + "layer_10_sharpness": 0.0007755572441965342, + "layer_11_sharpness": 0.0006606337265111506, + "layer_12_sharpness": 0.0007552144816145301 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..34fee37783be3fb0937a156b2ca00bb6f511dece --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.6738524436950684, + "total_l1_linf_norm": 13707.3330078125, + "total_spectral_norm": 1.6738520860671997, + "layer_1_update_fnorm": 0.28758394718170166, + "layer_1_max_l1_linf_norm": 0.39730972051620483, + "layer_1_max_spectral_norm": 0.008930455893278122, + "layer_2_update_fnorm": 0.2720586955547333, + "layer_2_max_l1_linf_norm": 0.4609520733356476, + "layer_2_max_spectral_norm": 0.010386163368821144, + "layer_3_update_fnorm": 0.2815016508102417, + "layer_3_max_l1_linf_norm": 0.4395688474178314, + "layer_3_max_spectral_norm": 0.009905066341161728, + "layer_4_update_fnorm": 0.29649320244789124, + "layer_4_max_l1_linf_norm": 0.41826242208480835, + "layer_4_max_spectral_norm": 0.009398362599313259, + "layer_5_update_fnorm": 0.29751870036125183, + "layer_5_max_l1_linf_norm": 0.3953048586845398, + "layer_5_max_spectral_norm": 0.008901497349143028, + "layer_6_update_fnorm": 0.3004131019115448, + "layer_6_max_l1_linf_norm": 0.35739976167678833, + "layer_6_max_spectral_norm": 0.008079303428530693, + "layer_7_update_fnorm": 0.2986774444580078, + "layer_7_max_l1_linf_norm": 0.3510003685951233, + "layer_7_max_spectral_norm": 0.007917990908026695, + "layer_8_update_fnorm": 0.29917582869529724, + "layer_8_max_l1_linf_norm": 0.3586661219596863, + "layer_8_max_spectral_norm": 0.00809071771800518, + "layer_9_update_fnorm": 0.30075958371162415, + "layer_9_max_l1_linf_norm": 0.36936426162719727, + "layer_9_max_spectral_norm": 0.008270484395325184, + "layer_10_update_fnorm": 0.30065494775772095, + "layer_10_max_l1_linf_norm": 0.3960523009300232, + "layer_10_max_spectral_norm": 0.00887957215309143, + "layer_11_update_fnorm": 0.29979977011680603, + "layer_11_max_l1_linf_norm": 0.41254422068595886, + "layer_11_max_spectral_norm": 0.009274525567889214, + "layer_12_update_fnorm": 0.30049797892570496, + "layer_12_max_l1_linf_norm": 0.4102597236633301, + "layer_12_max_spectral_norm": 0.009354508481919765, + "total_sharpness": 0.0038369130343198776, + "ip_v_neg_g": 0.0054199714213609695, + "cos_v_neg_g": 0.002538269152864814, + "v_norm": 1.6738524436950684, + "g_norm": 1.2756811380386353, + "hv_norm": 0.46389490365982056, + "cos_v_hv": 0.013844570145010948, + "hg_norm": 25.407461166381836, + "cos_g_hg": 0.5151644349098206, + "v_parallel_norm": 0.00030082924058660865, + "v_perp_norm": 1.6738524436950684, + "layer_1_v_norm": 0.28758394718170166, + "layer_1_cos_v_neg_g": 0.006771881133317947, + "layer_2_v_norm": 0.2720586955547333, + "layer_2_cos_v_neg_g": 0.003819806035608053, + "layer_3_v_norm": 0.2815016508102417, + "layer_3_cos_v_neg_g": 0.003910621628165245, + "layer_4_v_norm": 0.29649320244789124, + "layer_4_cos_v_neg_g": 0.003514478448778391, + "layer_5_v_norm": 0.29751870036125183, + "layer_5_cos_v_neg_g": 0.0034316470846533775, + "layer_6_v_norm": 0.3004131019115448, + "layer_6_cos_v_neg_g": 0.0030447733588516712, + "layer_7_v_norm": 0.2986774444580078, + "layer_7_cos_v_neg_g": 0.004087100736796856, + "layer_8_v_norm": 0.29917582869529724, + "layer_8_cos_v_neg_g": 0.004079838749021292, + "layer_9_v_norm": 0.30075958371162415, + "layer_9_cos_v_neg_g": 0.0038190477062016726, + "layer_10_v_norm": 0.30065494775772095, + "layer_10_cos_v_neg_g": 0.003831969341263175, + "layer_11_v_norm": 0.29979977011680603, + "layer_11_cos_v_neg_g": 0.003177722217515111, + "layer_12_v_norm": 0.30049797892570496, + "layer_12_cos_v_neg_g": 0.0030994995031505823, + "layer_1_sharpness": 0.01083033811300993, + "layer_2_sharpness": 0.006130389869213104, + "layer_3_sharpness": 0.0022764469031244516, + "layer_4_sharpness": 0.0009574946598149836, + "layer_5_sharpness": 0.0013947751140221953, + "layer_6_sharpness": 0.0015840217238292098, + "layer_7_sharpness": 0.0020444656256586313, + "layer_8_sharpness": 0.0015263009117916226, + "layer_9_sharpness": 0.0011272589908912778, + "layer_10_sharpness": 0.0008358224877156317, + "layer_11_sharpness": 0.0006001672009006143, + "layer_12_sharpness": 0.0006414544186554849 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..e659236b39cfc83416dbf2695ff6e932dfe73f84 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.005_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019228 +step:1 train loss:11.024086 +step:2 train loss:11.016348 +step:3 train loss:11.012337 +step:4 train loss:11.004464 +step:5 train loss:10.995119 +step:6 train loss:10.981576 +step:7 train loss:10.969131 +step:8 train loss:10.954198 +step:9 train loss:10.935842 +step:10 train loss:10.915127 +step:11 train loss:10.898175 +step:12 train loss:10.869276 +step:13 train loss:10.845726 +step:14 train loss:10.815995 +step:15 train loss:10.788208 +step:16 train loss:10.757727 +step:17 train loss:10.727579 +step:18 train loss:10.693663 +step:19 train loss:10.657505 +step:20 train loss:10.617252 +step:21 train loss:10.582929 +step:22 train loss:10.530613 +step:23 train loss:10.497788 +step:24 train loss:10.443578 +step:25 train loss:10.410450 +step:26 train loss:10.356606 +step:27 train loss:10.306190 +step:28 train loss:10.267368 +step:29 train loss:10.217352 +step:30 train loss:10.168247 +step:31 train loss:10.106798 +step:32 train loss:10.048084 +step:33 train loss:9.998808 +step:34 train loss:9.956496 +step:35 train loss:9.883986 +step:36 train loss:9.831174 +step:37 train loss:9.758967 +step:38 train loss:9.719498 +step:39 train loss:9.647981 +step:40 train loss:9.592960 +step:41 train loss:9.515919 +step:42 train loss:9.475842 +step:43 train loss:9.378466 +step:44 train loss:9.325150 +step:45 train loss:9.259472 +step:46 train loss:9.209506 +step:47 train loss:9.142371 +step:48 train loss:9.067808 +step:49 train loss:8.990666 +step:50 train loss:8.912474 +step:51 train loss:8.845129 +step:52 train loss:8.799441 +step:53 train loss:8.732457 +step:54 train loss:8.664772 +step:55 train loss:8.589218 +step:56 train loss:8.517214 +step:57 train loss:8.471676 +step:58 train loss:8.378853 +step:59 train loss:8.333941 +step:60 train loss:8.262003 +step:61 train loss:8.208105 +step:62 train loss:8.143102 +step:63 train loss:8.125093 +step:64 train loss:8.011005 +step:65 train loss:7.978058 +step:66 train loss:7.931424 +step:67 train loss:7.893657 +step:68 train loss:7.832014 +step:69 train loss:7.774698 +step:70 train loss:7.714579 +step:71 train loss:7.665654 +step:72 train loss:7.658670 +step:73 train loss:7.596319 +step:74 train loss:7.582424 +step:75 train loss:7.512040 +step:76 train loss:7.561900 +step:77 train loss:7.477777 +step:78 train loss:7.279371 +step:79 train loss:7.383029 +step:80 train loss:7.342869 +step:81 train loss:7.383143 +step:82 train loss:7.341402 +step:83 train loss:7.299847 +step:84 train loss:7.249929 +step:85 train loss:7.221681 +step:86 train loss:7.197398 +step:87 train loss:7.156265 +step:88 train loss:7.153668 +step:89 train loss:7.111061 +step:90 train loss:7.143303 +step:91 train loss:7.138770 +step:92 train loss:7.128780 +step:93 train loss:7.062614 +step:94 train loss:7.030995 +step:95 train loss:6.967295 +step:96 train loss:7.059029 +step:97 train loss:6.990199 +step:98 train loss:6.972780 +step:99 train loss:6.937540 +step:100 train loss:6.968010 +step:101 train loss:6.861890 +step:102 train loss:6.858318 +step:103 train loss:6.839957 +step:104 train loss:6.868383 +step:105 train loss:6.919828 +step:106 train loss:6.856204 +step:107 train loss:6.804029 +step:108 train loss:6.818516 +step:109 train loss:6.852078 +step:110 train loss:6.765973 +step:111 train loss:6.772657 +step:112 train loss:6.759972 +step:113 train loss:6.712753 +step:114 train loss:6.774099 +step:115 train loss:6.713443 +step:116 train loss:6.691156 +step:117 train loss:6.624358 +step:118 train loss:6.680637 +step:119 train loss:6.626886 +step:120 train loss:6.638788 +step:121 train loss:6.550662 +step:122 train loss:6.650716 +step:123 train loss:6.563142 +step:124 train loss:6.544559 +step:125 train loss:6.521261 +step:126 train loss:6.618026 +step:127 train loss:6.527010 +step:128 train loss:6.569211 +step:129 train loss:6.549956 +step:130 train loss:6.576417 +step:131 train loss:6.517119 +step:132 train loss:6.439255 +step:133 train loss:6.500162 +step:134 train loss:6.476457 +step:135 train loss:6.381364 +step:136 train loss:6.426245 +step:137 train loss:6.427529 +step:138 train loss:6.367210 +step:139 train loss:6.444179 +step:140 train loss:6.356586 +step:141 train loss:6.455825 +step:142 train loss:6.401188 +step:143 train loss:6.411540 +step:144 train loss:6.385943 +step:145 train loss:6.319873 +step:146 train loss:6.336608 +step:147 train loss:6.388125 +step:148 train loss:6.395613 +step:149 train loss:6.347729 +step:150 train loss:6.353592 +step:151 train loss:6.263986 +step:152 train loss:6.299027 +step:153 train loss:6.281651 +step:154 train loss:6.359832 +step:155 train loss:6.337213 +step:156 train loss:6.360777 +step:157 train loss:6.270782 +step:158 train loss:6.256885 +step:159 train loss:6.286835 +step:160 train loss:6.269251 +step:161 train loss:6.259961 +step:162 train loss:6.230788 +step:163 train loss:6.242210 +step:164 train loss:6.252306 +step:165 train loss:6.258807 +step:166 train loss:6.210470 +step:167 train loss:6.213088 +step:168 train loss:6.186661 +step:169 train loss:6.138595 +step:170 train loss:6.101625 +step:171 train loss:6.220932 +step:172 train loss:6.151893 +step:173 train loss:6.200199 +step:174 train loss:6.199237 +step:175 train loss:6.161350 +step:176 train loss:6.114670 +step:177 train loss:6.158362 +step:178 train loss:6.160546 +step:179 train loss:6.120059 +step:180 train loss:6.101428 +step:181 train loss:6.137790 +step:182 train loss:6.070337 +step:183 train loss:6.157285 +step:184 train loss:6.122469 +step:185 train loss:6.046896 +step:186 train loss:6.184776 +step:187 train loss:6.123646 +step:188 train loss:5.950385 +step:189 train loss:6.104900 +step:190 train loss:6.098101 +step:191 train loss:6.021617 +step:192 train loss:5.938824 +step:193 train loss:6.092547 +step:194 train loss:6.106352 +step:195 train loss:6.096942 +step:196 train loss:6.067390 +step:197 train loss:6.061714 +step:198 train loss:6.010220 +step:199 train loss:6.080680 +step:200 train loss:6.123713 +step:201 train loss:6.056018 +step:202 train loss:6.056520 +step:203 train loss:6.013958 +step:204 train loss:6.044253 +step:205 train loss:5.896599 +step:206 train loss:6.032490 +step:207 train loss:6.011646 +step:208 train loss:5.954509 +step:209 train loss:5.942719 +step:210 train loss:5.951668 +step:211 train loss:6.015851 +step:212 train loss:5.973595 +step:213 train loss:5.984146 +step:214 train loss:5.964733 +step:215 train loss:5.985221 +step:216 train loss:5.932530 +step:217 train loss:5.946425 +step:218 train loss:5.916278 +step:219 train loss:5.894296 +step:220 train loss:5.929541 +step:221 train loss:5.883285 +step:222 train loss:5.923285 +step:223 train loss:5.946280 +step:224 train loss:5.930301 +step:225 train loss:5.864781 +step:226 train loss:5.866422 +step:227 train loss:5.931367 +step:228 train loss:5.894382 +step:229 train loss:5.959041 +step:230 train loss:5.827265 +step:231 train loss:5.886144 +step:232 train loss:5.874988 +step:233 train loss:5.848721 +step:234 train loss:5.840225 +step:235 train loss:5.922190 +step:236 train loss:5.867459 +step:237 train loss:5.901213 +step:238 train loss:5.898061 +step:239 train loss:5.806444 +step:240 train loss:5.880591 +step:241 train loss:5.916488 +step:242 train loss:5.894790 +step:243 train loss:5.804384 +step:244 train loss:5.828930 +step:245 train loss:5.803519 +step:246 train loss:5.804872 +step:247 train loss:5.797952 +step:248 train loss:5.751303 +step:249 train loss:5.814649 +step:250 validation loss:5.802169 +step:250 train loss:5.781376 +step:251 train loss:5.819576 +step:252 train loss:5.772327 +step:253 train loss:5.774280 +step:254 train loss:5.739250 +step:255 train loss:5.780350 +step:256 train loss:5.772516 +step:257 train loss:5.828009 +step:258 train loss:5.731591 +step:259 train loss:5.754069 +step:260 train loss:5.716203 +step:261 train loss:5.725365 +step:262 train loss:5.780925 +step:263 train loss:5.753993 +step:264 train loss:5.715389 +step:265 train loss:5.735734 +step:266 train loss:5.703839 +step:267 train loss:5.739875 +step:268 train loss:5.682993 +step:269 train loss:5.706569 +step:270 train loss:5.725969 +step:271 train loss:5.726025 +step:272 train loss:5.669638 +step:273 train loss:5.743877 +step:274 train loss:5.653297 +step:275 train loss:5.691648 +step:276 train loss:5.665783 +step:277 train loss:5.662617 +step:278 train loss:5.641353 +step:279 train loss:5.615889 +step:280 train loss:5.685552 +step:281 train loss:5.756541 +step:282 train loss:5.634465 +step:283 train loss:5.657070 +step:284 train loss:5.618613 +step:285 train loss:5.670255 +step:286 train loss:5.651254 +step:287 train loss:5.628352 +step:288 train loss:5.602377 +step:289 train loss:5.626344 +step:290 train loss:5.681707 +step:291 train loss:5.608004 +step:292 train loss:5.661560 +step:293 train loss:5.587069 +step:294 train loss:5.699302 +step:295 train loss:5.603703 +step:296 train loss:5.646424 +step:297 train loss:5.680210 +step:298 train loss:5.572406 +step:299 train loss:5.633126 +step:300 train loss:5.561433 +step:301 train loss:5.584491 +step:302 train loss:5.563487 +step:303 train loss:5.574686 +step:304 train loss:5.600249 +step:305 train loss:5.530698 +step:306 train loss:5.552739 +step:307 train loss:5.565697 +step:308 train loss:5.491271 +step:309 train loss:5.621333 +step:310 train loss:5.592159 +step:311 train loss:5.571938 +step:312 train loss:5.554582 +step:313 train loss:5.572939 +step:314 train loss:5.547492 +step:315 train loss:5.510838 +step:316 train loss:5.502529 +step:317 train loss:5.479472 +step:318 train loss:5.465786 +step:319 train loss:5.549949 +step:320 train loss:5.470922 +step:321 train loss:5.516737 +step:322 train loss:5.510798 +step:323 train loss:5.572740 +step:324 train loss:5.514907 +step:325 train loss:5.536067 +step:326 train loss:5.539663 +step:327 train loss:5.531651 +step:328 train loss:5.487849 +step:329 train loss:5.506623 +step:330 train loss:5.433472 +step:331 train loss:5.458993 +step:332 train loss:5.441643 +step:333 train loss:5.374601 +step:334 train loss:5.471110 +step:335 train loss:5.518757 +step:336 train loss:5.660280 +step:337 train loss:5.539038 +step:338 train loss:5.460616 +step:339 train loss:5.418957 +step:340 train loss:5.415308 +step:341 train loss:5.410034 +step:342 train loss:5.473917 +step:343 train loss:5.452428 +step:344 train loss:5.403795 +step:345 train loss:5.371351 +step:346 train loss:5.429203 +step:347 train loss:5.364138 +step:348 train loss:5.369055 +step:349 train loss:5.304291 +step:350 train loss:5.338339 +step:351 train loss:5.398765 +step:352 train loss:5.358855 +step:353 train loss:5.385190 +step:354 train loss:5.337311 +step:355 train loss:5.381373 +step:356 train loss:5.340466 +step:357 train loss:5.421270 +step:358 train loss:5.447580 +step:359 train loss:5.280559 +step:360 train loss:5.399390 +step:361 train loss:5.378200 +step:362 train loss:5.350108 +step:363 train loss:5.297090 +step:364 train loss:5.419264 +step:365 train loss:5.361283 +step:366 train loss:5.333201 +step:367 train loss:5.358005 +step:368 train loss:5.322769 +step:369 train loss:5.303472 +step:370 train loss:5.349041 +step:371 train loss:5.289365 +step:372 train loss:5.361002 +step:373 train loss:5.295390 +step:374 train loss:5.290603 +step:375 train loss:5.318533 +step:376 train loss:5.308202 +step:377 train loss:5.200086 +step:378 train loss:5.269654 +step:379 train loss:5.316001 +step:380 train loss:5.244720 +step:381 train loss:5.307675 +step:382 train loss:5.318190 +step:383 train loss:5.262256 +step:384 train loss:5.256931 +step:385 train loss:5.236852 +step:386 train loss:5.269500 +step:387 train loss:5.274011 +step:388 train loss:5.233799 +step:389 train loss:5.240766 +step:390 train loss:5.218273 +step:391 train loss:5.234645 +step:392 train loss:5.208372 +step:393 train loss:5.207661 +step:394 train loss:5.256429 +step:395 train loss:5.178571 +step:396 train loss:5.151880 +step:397 train loss:5.209080 +step:398 train loss:5.201838 +step:399 train loss:5.213253 +step:400 train loss:5.167651 +step:401 train loss:5.220789 +step:402 train loss:5.180962 +step:403 train loss:5.189685 +step:404 train loss:5.162002 +step:405 train loss:5.159092 +step:406 train loss:5.197459 +step:407 train loss:5.173344 +step:408 train loss:5.247082 +step:409 train loss:5.160491 +step:410 train loss:5.133469 +step:411 train loss:5.111343 +step:412 train loss:5.211399 +step:413 train loss:5.087123 +step:414 train loss:5.180175 +step:415 train loss:5.136743 +step:416 train loss:5.138461 +step:417 train loss:5.166382 +step:418 train loss:5.120070 +step:419 train loss:5.101535 +step:420 train loss:5.071490 +step:421 train loss:5.074713 +step:422 train loss:5.063704 +step:423 train loss:5.074190 +step:424 train loss:5.042734 +step:425 train loss:5.124379 +step:426 train loss:5.102208 +step:427 train loss:5.024511 +step:428 train loss:5.091795 +step:429 train loss:4.995838 +step:430 train loss:5.041379 +step:431 train loss:5.085612 +step:432 train loss:5.087305 +step:433 train loss:5.068369 +step:434 train loss:5.030100 +step:435 train loss:5.081081 +step:436 train loss:5.099891 +step:437 train loss:5.058413 +step:438 train loss:5.017437 +step:439 train loss:5.004178 +step:440 train loss:5.055656 +step:441 train loss:4.988489 +step:442 train loss:4.979976 +step:443 train loss:5.007533 +step:444 train loss:5.051554 +step:445 train loss:5.039472 +step:446 train loss:4.971377 +step:447 train loss:4.983878 +step:448 train loss:5.051971 +step:449 train loss:5.001219 +step:450 train loss:4.982512 +step:451 train loss:4.968688 +step:452 train loss:5.058115 +step:453 train loss:4.973125 +step:454 train loss:4.922872 +step:455 train loss:5.001027 +step:456 train loss:4.954895 +step:457 train loss:4.946907 +step:458 train loss:4.965116 +step:459 train loss:4.914293 +step:460 train loss:5.015441 +step:461 train loss:4.968972 +step:462 train loss:4.856572 +step:463 train loss:4.916245 +step:464 train loss:4.973531 +step:465 train loss:4.935933 +step:466 train loss:4.957680 +step:467 train loss:4.910002 +step:468 train loss:4.957810 +step:469 train loss:4.934464 +step:470 train loss:4.887516 +step:471 train loss:4.989156 +step:472 train loss:4.854931 +step:473 train loss:4.932392 +step:474 train loss:4.909646 +step:475 train loss:4.952824 +step:476 train loss:4.912153 +step:477 train loss:4.836476 +step:478 train loss:4.869830 +step:479 train loss:4.857688 +step:480 train loss:4.889274 +step:481 train loss:4.900807 +step:482 train loss:4.812082 +step:483 train loss:4.894024 +step:484 train loss:4.842017 +step:485 train loss:4.806518 +step:486 train loss:4.875227 +step:487 train loss:4.842203 +step:488 train loss:4.840448 +step:489 train loss:4.840476 +step:490 train loss:4.811921 +step:491 train loss:4.830100 +step:492 train loss:4.828579 +step:493 train loss:4.821297 +step:494 train loss:4.831754 +step:495 train loss:4.774312 +step:496 train loss:4.869296 +step:497 train loss:4.746135 +step:498 train loss:4.873489 +step:499 train loss:4.833169 +step:500 validation loss:4.779185 total_sharp:4.5792e-02 L1_sharp:4.6686e-02 L2_sharp:2.8134e-02 L3_sharp:3.1464e-02 L4_sharp:1.9870e-02 L5_sharp:1.6062e-02 L6_sharp:1.4065e-02 L7_sharp:1.3063e-02 L8_sharp:1.0268e-02 L9_sharp:7.8974e-03 L10_sharp:5.8866e-03 L11_sharp:6.0279e-03 L12_sharp:4.8588e-03 total_fnorm:1.2613e+00 total_l1_linf:1.0328e+04 total_spectral:1.2613e+00 L1_fnorm:2.1804e-01 L2_fnorm:2.0891e-01 L3_fnorm:1.9990e-01 L4_fnorm:2.0297e-01 L5_fnorm:2.0873e-01 L6_fnorm:2.1296e-01 L7_fnorm:2.1395e-01 L8_fnorm:2.1393e-01 L9_fnorm:2.1439e-01 L10_fnorm:2.1511e-01 L11_fnorm:2.1486e-01 L12_fnorm:2.1563e-01 L1_l1linf:1.5805e-01 L2_l1linf:1.5823e-01 L3_l1linf:1.6090e-01 L4_l1linf:1.7940e-01 L5_l1linf:1.8815e-01 L6_l1linf:1.9175e-01 L7_l1linf:2.0309e-01 L8_l1linf:1.9541e-01 L9_l1linf:1.9564e-01 L10_l1linf:1.9776e-01 L11_l1linf:2.0027e-01 L12_l1linf:2.0130e-01 L1_spectral:4.3021e-03 L2_spectral:4.3040e-03 L3_spectral:4.3033e-03 L4_spectral:4.3040e-03 L5_spectral:4.3010e-03 L6_spectral:5.0177e-03 L7_spectral:4.5811e-03 L8_spectral:5.1059e-03 L9_spectral:4.8328e-03 L10_spectral:4.8326e-03 L11_spectral:4.7049e-03 L12_spectral:4.9266e-03 ip_v_neg_g:3.0119e-02 cos_v_neg_g:9.1021e-03 v_norm:1.2613e+00 g_norm:2.6235e+00 hv_norm:2.0190e+00 cos_v_hv:2.8607e-02 hg_norm:1.4026e+02 cos_g_hg:5.9769e-01 v_par:5.0053e-04 v_perp:1.2613e+00 L1_cos_v_neg_g:1.7727e-02 L1_v_norm:2.1804e-01 L2_cos_v_neg_g:2.2091e-02 L2_v_norm:2.0891e-01 L3_cos_v_neg_g:2.3596e-02 L3_v_norm:1.9990e-01 L4_cos_v_neg_g:1.9812e-02 L4_v_norm:2.0297e-01 L5_cos_v_neg_g:1.7646e-02 L5_v_norm:2.0873e-01 L6_cos_v_neg_g:1.6257e-02 L6_v_norm:2.1296e-01 L7_cos_v_neg_g:1.4989e-02 L7_v_norm:2.1395e-01 L8_cos_v_neg_g:1.4342e-02 L8_v_norm:2.1393e-01 L9_cos_v_neg_g:1.3524e-02 L9_v_norm:2.1439e-01 L10_cos_v_neg_g:1.2347e-02 L10_v_norm:2.1511e-01 L11_cos_v_neg_g:1.0146e-02 L11_v_norm:2.1486e-01 L12_cos_v_neg_g:7.4520e-03 L12_v_norm:2.1563e-01 +step:500 train loss:4.821034 +step:501 train loss:4.790823 +step:502 train loss:4.837508 +step:503 train loss:4.760966 +step:504 train loss:4.836542 +step:505 train loss:4.772800 +step:506 train loss:4.764660 +step:507 train loss:4.773656 +step:508 train loss:4.799601 +step:509 train loss:4.789816 +step:510 train loss:4.716300 +step:511 train loss:4.721706 +step:512 train loss:4.710835 +step:513 train loss:4.734587 +step:514 train loss:4.821822 +step:515 train loss:4.752635 +step:516 train loss:4.825489 +step:517 train loss:4.726069 +step:518 train loss:4.724598 +step:519 train loss:4.772995 +step:520 train loss:4.722580 +step:521 train loss:4.713624 +step:522 train loss:4.733639 +step:523 train loss:4.743222 +step:524 train loss:4.672413 +step:525 train loss:4.684518 +step:526 train loss:4.718887 +step:527 train loss:4.696188 +step:528 train loss:4.688627 +step:529 train loss:4.731392 +step:530 train loss:4.673443 +step:531 train loss:4.702776 +step:532 train loss:4.649534 +step:533 train loss:4.642466 +step:534 train loss:4.701333 +step:535 train loss:4.702378 +step:536 train loss:4.750116 +step:537 train loss:4.622970 +step:538 train loss:4.591458 +step:539 train loss:4.713057 +step:540 train loss:4.726098 +step:541 train loss:4.635078 +step:542 train loss:4.629643 +step:543 train loss:4.657848 +step:544 train loss:4.653712 +step:545 train loss:4.633031 +step:546 train loss:4.594389 +step:547 train loss:4.625890 +step:548 train loss:4.513075 +step:549 train loss:4.618246 +step:550 train loss:4.599520 +step:551 train loss:4.596828 +step:552 train loss:4.687514 +step:553 train loss:4.656298 +step:554 train loss:4.609954 +step:555 train loss:4.644081 +step:556 train loss:4.594974 +step:557 train loss:4.559859 +step:558 train loss:4.545509 +step:559 train loss:4.597851 +step:560 train loss:4.668180 +step:561 train loss:4.536347 +step:562 train loss:4.522996 +step:563 train loss:4.604764 +step:564 train loss:4.541264 +step:565 train loss:4.573850 +step:566 train loss:4.572245 +step:567 train loss:4.576493 +step:568 train loss:4.626394 +step:569 train loss:4.575082 +step:570 train loss:4.502101 +step:571 train loss:4.540795 +step:572 train loss:4.507251 +step:573 train loss:4.544530 +step:574 train loss:4.604383 +step:575 train loss:4.523477 +step:576 train loss:4.549288 +step:577 train loss:4.542288 +step:578 train loss:4.547990 +step:579 train loss:4.578504 +step:580 train loss:4.507169 +step:581 train loss:4.559060 +step:582 train loss:4.533910 +step:583 train loss:4.553459 +step:584 train loss:4.523705 +step:585 train loss:4.494015 +step:586 train loss:4.513077 +step:587 train loss:4.580008 +step:588 train loss:4.485136 +step:589 train loss:4.542462 +step:590 train loss:4.571659 +step:591 train loss:4.476299 +step:592 train loss:4.478086 +step:593 train loss:4.474386 +step:594 train loss:4.443656 +step:595 train loss:4.524692 +step:596 train loss:4.494888 +step:597 train loss:4.499692 +step:598 train loss:4.465162 +step:599 train loss:4.493708 +step:600 train loss:4.432924 +step:601 train loss:4.442992 +step:602 train loss:4.455878 +step:603 train loss:4.479244 +step:604 train loss:4.484685 +step:605 train loss:4.509575 +step:606 train loss:4.459149 +step:607 train loss:4.442331 +step:608 train loss:4.462417 +step:609 train loss:4.433678 +step:610 train loss:4.422247 +step:611 train loss:4.439103 +step:612 train loss:4.454713 +step:613 train loss:4.376755 +step:614 train loss:4.428073 +step:615 train loss:4.484868 +step:616 train loss:4.409162 +step:617 train loss:4.448565 +step:618 train loss:4.398612 +step:619 train loss:4.452814 +step:620 train loss:4.481575 +step:621 train loss:4.391755 +step:622 train loss:4.489464 +step:623 train loss:4.459536 +step:624 train loss:4.415429 +step:625 train loss:4.451626 +step:626 train loss:4.421719 +step:627 train loss:4.433991 +step:628 train loss:4.433403 +step:629 train loss:4.361839 +step:630 train loss:4.417625 +step:631 train loss:4.371050 +step:632 train loss:4.388631 +step:633 train loss:4.410056 +step:634 train loss:4.404127 +step:635 train loss:4.373815 +step:636 train loss:4.446114 +step:637 train loss:4.361718 +step:638 train loss:4.303236 +step:639 train loss:4.424772 +step:640 train loss:4.375475 +step:641 train loss:4.396046 +step:642 train loss:4.441528 +step:643 train loss:4.323984 +step:644 train loss:4.432858 +step:645 train loss:4.372085 +step:646 train loss:4.384582 +step:647 train loss:4.391543 +step:648 train loss:4.473291 +step:649 train loss:4.381637 +step:650 train loss:4.394354 +step:651 train loss:4.337955 +step:652 train loss:4.339338 +step:653 train loss:4.333408 +step:654 train loss:4.356359 +step:655 train loss:4.390563 +step:656 train loss:4.344715 +step:657 train loss:4.391398 +step:658 train loss:4.315859 +step:659 train loss:4.403385 +step:660 train loss:4.385230 +step:661 train loss:4.417562 +step:662 train loss:4.405762 +step:663 train loss:4.402700 +step:664 train loss:4.303943 +step:665 train loss:4.315801 +step:666 train loss:4.327399 +step:667 train loss:4.371120 +step:668 train loss:4.366862 +step:669 train loss:4.350467 +step:670 train loss:4.376195 +step:671 train loss:4.340550 +step:672 train loss:4.303555 +step:673 train loss:4.396988 +step:674 train loss:4.351612 +step:675 train loss:4.300111 +step:676 train loss:4.385896 +step:677 train loss:4.320533 +step:678 train loss:4.304317 +step:679 train loss:4.333149 +step:680 train loss:4.317763 +step:681 train loss:4.363154 +step:682 train loss:4.262282 +step:683 train loss:4.335078 +step:684 train loss:4.389076 +step:685 train loss:4.325274 +step:686 train loss:4.386643 +step:687 train loss:4.361018 +step:688 train loss:4.284065 +step:689 train loss:4.302415 +step:690 train loss:4.278238 +step:691 train loss:4.321344 +step:692 train loss:4.321045 +step:693 train loss:4.310017 +step:694 train loss:4.316516 +step:695 train loss:4.280464 +step:696 train loss:4.222617 +step:697 train loss:4.363360 +step:698 train loss:4.254779 +step:699 train loss:4.291586 +step:700 train loss:4.330788 +step:701 train loss:4.249928 +step:702 train loss:4.311593 +step:703 train loss:4.272560 +step:704 train loss:4.208322 +step:705 train loss:4.304266 +step:706 train loss:4.179103 +step:707 train loss:4.238107 +step:708 train loss:4.326038 +step:709 train loss:4.292746 +step:710 train loss:4.277403 +step:711 train loss:4.274681 +step:712 train loss:4.267287 +step:713 train loss:4.228042 +step:714 train loss:4.300878 +step:715 train loss:4.193080 +step:716 train loss:4.358536 +step:717 train loss:4.237883 +step:718 train loss:4.317199 +step:719 train loss:4.287413 +step:720 train loss:4.247451 +step:721 train loss:4.309508 +step:722 train loss:4.262735 +step:723 train loss:4.297900 +step:724 train loss:4.297655 +step:725 train loss:4.212010 +step:726 train loss:4.243827 +step:727 train loss:4.272797 +step:728 train loss:4.246228 +step:729 train loss:4.226566 +step:730 train loss:4.284422 +step:731 train loss:4.329149 +step:732 train loss:4.281773 +step:733 train loss:4.262949 +step:734 train loss:4.272956 +step:735 train loss:4.345066 +step:736 train loss:4.252029 +step:737 train loss:4.252311 +step:738 train loss:4.303991 +step:739 train loss:4.224917 +step:740 train loss:4.275445 +step:741 train loss:4.360169 +step:742 train loss:4.237711 +step:743 train loss:4.221522 +step:744 train loss:4.246818 +step:745 train loss:4.157741 +step:746 train loss:4.222462 +step:747 train loss:4.236102 +step:748 train loss:4.220804 +step:749 train loss:4.250063 +step:750 validation loss:4.189315 +step:750 train loss:4.182868 +step:751 train loss:4.248222 +step:752 train loss:4.173086 +step:753 train loss:4.239438 +step:754 train loss:4.231454 +step:755 train loss:4.295615 +step:756 train loss:4.256313 +step:757 train loss:4.319431 +step:758 train loss:4.217778 +step:759 train loss:4.231762 +step:760 train loss:4.191420 +step:761 train loss:4.224682 +step:762 train loss:4.192626 +step:763 train loss:4.201283 +step:764 train loss:4.188992 +step:765 train loss:4.193038 +step:766 train loss:4.261188 +step:767 train loss:4.382053 +step:768 train loss:4.215225 +step:769 train loss:4.232797 +step:770 train loss:4.269654 +step:771 train loss:4.332268 +step:772 train loss:4.243601 +step:773 train loss:4.188447 +step:774 train loss:4.211078 +step:775 train loss:4.204949 +step:776 train loss:4.225900 +step:777 train loss:4.197388 +step:778 train loss:4.141575 +step:779 train loss:4.174358 +step:780 train loss:4.239573 +step:781 train loss:4.177069 +step:782 train loss:4.192402 +step:783 train loss:4.171757 +step:784 train loss:4.177076 +step:785 train loss:4.166615 +step:786 train loss:4.168433 +step:787 train loss:4.121415 +step:788 train loss:4.210022 +step:789 train loss:4.181553 +step:790 train loss:4.154850 +step:791 train loss:4.229356 +step:792 train loss:4.262527 +step:793 train loss:4.217086 +step:794 train loss:4.201485 +step:795 train loss:4.171943 +step:796 train loss:4.463049 +step:797 train loss:4.187246 +step:798 train loss:4.167991 +step:799 train loss:4.183519 +step:800 train loss:4.244960 +step:801 train loss:4.176277 +step:802 train loss:4.315728 +step:803 train loss:4.196154 +step:804 train loss:4.146028 +step:805 train loss:4.209644 +step:806 train loss:4.153155 +step:807 train loss:4.170959 +step:808 train loss:4.179293 +step:809 train loss:4.140693 +step:810 train loss:4.125391 +step:811 train loss:4.208009 +step:812 train loss:4.174671 +step:813 train loss:4.189726 +step:814 train loss:4.263437 +step:815 train loss:4.219872 +step:816 train loss:4.142822 +step:817 train loss:4.193808 +step:818 train loss:4.156196 +step:819 train loss:4.148939 +step:820 train loss:4.146057 +step:821 train loss:4.108284 +step:822 train loss:4.104987 +step:823 train loss:4.187681 +step:824 train loss:4.090595 +step:825 train loss:4.080195 +step:826 train loss:4.153361 +step:827 train loss:4.049604 +step:828 train loss:4.127153 +step:829 train loss:4.130173 +step:830 train loss:4.130568 +step:831 train loss:4.172621 +step:832 train loss:4.215442 +step:833 train loss:4.171649 +step:834 train loss:4.161874 +step:835 train loss:4.127739 +step:836 train loss:4.127841 +step:837 train loss:4.120020 +step:838 train loss:4.100067 +step:839 train loss:4.108654 +step:840 train loss:4.142566 +step:841 train loss:4.142118 +step:842 train loss:4.134181 +step:843 train loss:4.142322 +step:844 train loss:4.097108 +step:845 train loss:4.081073 +step:846 train loss:4.186502 +step:847 train loss:4.152388 +step:848 train loss:4.107250 +step:849 train loss:4.129828 +step:850 train loss:4.154738 +step:851 train loss:4.110775 +step:852 train loss:4.205648 +step:853 train loss:4.092883 +step:854 train loss:4.121558 +step:855 train loss:4.125072 +step:856 train loss:4.081793 +step:857 train loss:4.132860 +step:858 train loss:4.167370 +step:859 train loss:4.067713 +step:860 train loss:4.095694 +step:861 train loss:4.140794 +step:862 train loss:4.080019 +step:863 train loss:4.093434 +step:864 train loss:4.081389 +step:865 train loss:4.095322 +step:866 train loss:4.125327 +step:867 train loss:4.249476 +step:868 train loss:4.095445 +step:869 train loss:4.111206 +step:870 train loss:4.052454 +step:871 train loss:4.046069 +step:872 train loss:4.125973 +step:873 train loss:4.091220 +step:874 train loss:4.115624 +step:875 train loss:4.038534 +step:876 train loss:4.119886 +step:877 train loss:4.059287 +step:878 train loss:4.162027 +step:879 train loss:4.051615 +step:880 train loss:4.167953 +step:881 train loss:4.086347 +step:882 train loss:4.053828 +step:883 train loss:4.106694 +step:884 train loss:4.129227 +step:885 train loss:4.062833 +step:886 train loss:4.081162 +step:887 train loss:4.086177 +step:888 train loss:4.195446 +step:889 train loss:4.124010 +step:890 train loss:4.067755 +step:891 train loss:4.028034 +step:892 train loss:4.020392 +step:893 train loss:4.095323 +step:894 train loss:4.057065 +step:895 train loss:4.039167 +step:896 train loss:4.121902 +step:897 train loss:4.054566 +step:898 train loss:4.081274 +step:899 train loss:4.085536 +step:900 train loss:4.115972 +step:901 train loss:4.042183 +step:902 train loss:4.078929 +step:903 train loss:4.182489 +step:904 train loss:4.185006 +step:905 train loss:4.057158 +step:906 train loss:4.076398 +step:907 train loss:4.103852 +step:908 train loss:4.116136 +step:909 train loss:4.058533 +step:910 train loss:4.104106 +step:911 train loss:4.219987 +step:912 train loss:4.023739 +step:913 train loss:4.082809 +step:914 train loss:4.035724 +step:915 train loss:4.073202 +step:916 train loss:4.121026 +step:917 train loss:4.085172 +step:918 train loss:4.155118 +step:919 train loss:4.245038 +step:920 train loss:3.982054 +step:921 train loss:4.110760 +step:922 train loss:4.079563 +step:923 train loss:4.000212 +step:924 train loss:4.050479 +step:925 train loss:4.011513 +step:926 train loss:4.110894 +step:927 train loss:4.012724 +step:928 train loss:4.093726 +step:929 train loss:4.065289 +step:930 train loss:4.067686 +step:931 train loss:4.101559 +step:932 train loss:4.040062 +step:933 train loss:4.085813 +step:934 train loss:4.122083 +step:935 train loss:4.111646 +step:936 train loss:4.078610 +step:937 train loss:4.079032 +step:938 train loss:4.072779 +step:939 train loss:3.973738 +step:940 train loss:4.072760 +step:941 train loss:4.016549 +step:942 train loss:3.999814 +step:943 train loss:4.102400 +step:944 train loss:4.055411 +step:945 train loss:4.057811 +step:946 train loss:4.071490 +step:947 train loss:4.237720 +step:948 train loss:4.029840 +step:949 train loss:4.071783 +step:950 train loss:4.004972 +step:951 train loss:4.052217 +step:952 train loss:4.094886 +step:953 train loss:4.026279 +step:954 train loss:4.073124 +step:955 train loss:4.007690 +step:956 train loss:4.034535 +step:957 train loss:4.030936 +step:958 train loss:4.106656 +step:959 train loss:4.038445 +step:960 train loss:4.136425 +step:961 train loss:4.075086 +step:962 train loss:4.038836 +step:963 train loss:4.021955 +step:964 train loss:4.050270 +step:965 train loss:3.975178 +step:966 train loss:3.991912 +step:967 train loss:4.053720 +step:968 train loss:4.056333 +step:969 train loss:4.008138 +step:970 train loss:4.058959 +step:971 train loss:4.040991 +step:972 train loss:3.959180 +step:973 train loss:4.065388 +step:974 train loss:3.996382 +step:975 train loss:4.095365 +step:976 train loss:4.042966 +step:977 train loss:4.032714 +step:978 train loss:4.032937 +step:979 train loss:4.022956 +step:980 train loss:4.026671 +step:981 train loss:4.003244 +step:982 train loss:4.019507 +step:983 train loss:4.030581 +step:984 train loss:4.049501 +step:985 train loss:4.016376 +step:986 train loss:4.048023 +step:987 train loss:4.076957 +step:988 train loss:4.057969 +step:989 train loss:4.028106 +step:990 train loss:4.026124 +step:991 train loss:3.940259 +step:992 train loss:4.011631 +step:993 train loss:4.039066 +step:994 train loss:3.970062 +step:995 train loss:3.985633 +step:996 train loss:4.030748 +step:997 train loss:3.992986 +step:998 train loss:3.988915 +step:999 train loss:4.032317 +step:1000 validation loss:3.959555 total_sharp:1.2716e-02 L1_sharp:2.5024e-02 L2_sharp:8.1707e-03 L3_sharp:6.7709e-03 L4_sharp:3.0171e-03 L5_sharp:4.6414e-03 L6_sharp:3.9877e-03 L7_sharp:4.6671e-03 L8_sharp:3.6143e-03 L9_sharp:2.9586e-03 L10_sharp:2.4598e-03 L11_sharp:1.9080e-03 L12_sharp:2.2454e-03 total_fnorm:1.6727e+00 total_l1_linf:1.3735e+04 total_spectral:1.6727e+00 L1_fnorm:3.0261e-01 L2_fnorm:2.8296e-01 L3_fnorm:2.7949e-01 L4_fnorm:2.8770e-01 L5_fnorm:2.9465e-01 L6_fnorm:3.0079e-01 L7_fnorm:2.9955e-01 L8_fnorm:3.0062e-01 L9_fnorm:3.0026e-01 L10_fnorm:3.0048e-01 L11_fnorm:3.0071e-01 L12_fnorm:3.0063e-01 L1_l1linf:2.2812e-01 L2_l1linf:2.4369e-01 L3_l1linf:2.6680e-01 L4_l1linf:2.6893e-01 L5_l1linf:2.7514e-01 L6_l1linf:2.5595e-01 L7_l1linf:2.5267e-01 L8_l1linf:2.6340e-01 L9_l1linf:2.7884e-01 L10_l1linf:2.9207e-01 L11_l1linf:2.8638e-01 L12_l1linf:2.6726e-01 L1_spectral:6.0207e-03 L2_spectral:6.0231e-03 L3_spectral:6.0312e-03 L4_spectral:6.0535e-03 L5_spectral:6.1313e-03 L6_spectral:6.0244e-03 L7_spectral:6.0210e-03 L8_spectral:6.0214e-03 L9_spectral:6.2731e-03 L10_spectral:6.5326e-03 L11_spectral:6.4334e-03 L12_spectral:6.1075e-03 ip_v_neg_g:1.7078e-02 cos_v_neg_g:7.2572e-03 v_norm:1.6727e+00 g_norm:1.4069e+00 hv_norm:7.8419e-01 cos_v_hv:2.7123e-02 hg_norm:1.5777e+01 cos_g_hg:4.8808e-01 v_par:3.1488e-04 v_perp:1.6727e+00 L1_cos_v_neg_g:1.5178e-02 L1_v_norm:3.0261e-01 L2_cos_v_neg_g:1.8457e-02 L2_v_norm:2.8296e-01 L3_cos_v_neg_g:1.5885e-02 L3_v_norm:2.7949e-01 L4_cos_v_neg_g:1.2978e-02 L4_v_norm:2.8770e-01 L5_cos_v_neg_g:1.3985e-02 L5_v_norm:2.9465e-01 L6_cos_v_neg_g:1.0460e-02 L6_v_norm:3.0079e-01 L7_cos_v_neg_g:1.0830e-02 L7_v_norm:2.9955e-01 L8_cos_v_neg_g:1.0099e-02 L8_v_norm:3.0062e-01 L9_cos_v_neg_g:9.5414e-03 L9_v_norm:3.0026e-01 L10_cos_v_neg_g:9.1736e-03 L10_v_norm:3.0048e-01 L11_cos_v_neg_g:7.4928e-03 L11_v_norm:3.0071e-01 L12_cos_v_neg_g:6.9961e-03 L12_v_norm:3.0063e-01 +step:1000 train loss:4.035928 +step:1001 train loss:4.039236 +step:1002 train loss:4.030735 +step:1003 train loss:4.000095 +step:1004 train loss:3.977890 +step:1005 train loss:3.989731 +step:1006 train loss:4.076118 +step:1007 train loss:4.013678 +step:1008 train loss:4.000308 +step:1009 train loss:4.061746 +step:1010 train loss:4.023807 +step:1011 train loss:4.051767 +step:1012 train loss:3.996404 +step:1013 train loss:3.971467 +step:1014 train loss:3.975356 +step:1015 train loss:4.008789 +step:1016 train loss:4.029127 +step:1017 train loss:3.976953 +step:1018 train loss:4.033806 +step:1019 train loss:3.982211 +step:1020 train loss:3.978326 +step:1021 train loss:4.075153 +step:1022 train loss:3.972502 +step:1023 train loss:3.982969 +step:1024 train loss:4.066309 +step:1025 train loss:4.029016 +step:1026 train loss:3.962529 +step:1027 train loss:4.001000 +step:1028 train loss:4.006984 +step:1029 train loss:3.958082 +step:1030 train loss:4.051059 +step:1031 train loss:4.032305 +step:1032 train loss:4.000355 +step:1033 train loss:3.962660 +step:1034 train loss:4.021449 +step:1035 train loss:4.028992 +step:1036 train loss:3.945400 +step:1037 train loss:4.003900 +step:1038 train loss:4.022735 +step:1039 train loss:4.177598 +step:1040 train loss:4.000044 +step:1041 train loss:3.979127 +step:1042 train loss:4.004021 +step:1043 train loss:4.010807 +step:1044 train loss:3.990859 +step:1045 train loss:4.008170 +step:1046 train loss:3.945081 +step:1047 train loss:3.982714 +step:1048 train loss:3.973010 +step:1049 train loss:4.031343 +step:1050 train loss:3.994481 +step:1051 train loss:3.963726 +step:1052 train loss:4.077298 +step:1053 train loss:3.970343 +step:1054 train loss:3.959390 +step:1055 train loss:4.035377 +step:1056 train loss:3.973791 +step:1057 train loss:3.875270 +step:1058 train loss:3.977094 +step:1059 train loss:3.959568 +step:1060 train loss:3.961245 +step:1061 train loss:4.009150 +step:1062 train loss:3.969983 +step:1063 train loss:3.978413 +step:1064 train loss:3.968753 +step:1065 train loss:3.977529 +step:1066 train loss:3.951795 +step:1067 train loss:3.985511 +step:1068 train loss:3.942941 +step:1069 train loss:3.961107 +step:1070 train loss:3.975464 +step:1071 train loss:3.988317 +step:1072 train loss:4.012473 +step:1073 train loss:3.928611 +step:1074 train loss:3.943835 +step:1075 train loss:3.941932 +step:1076 train loss:4.015225 +step:1077 train loss:3.940919 +step:1078 train loss:3.998822 +step:1079 train loss:4.037037 +step:1080 train loss:3.912215 +step:1081 train loss:3.987364 +step:1082 train loss:3.977592 +step:1083 train loss:3.941078 +step:1084 train loss:3.925604 +step:1085 train loss:3.987083 +step:1086 train loss:3.962539 +step:1087 train loss:3.959193 +step:1088 train loss:3.959552 +step:1089 train loss:3.968020 +step:1090 train loss:3.916263 +step:1091 train loss:3.908250 +step:1092 train loss:4.009253 +step:1093 train loss:3.896929 +step:1094 train loss:3.958715 +step:1095 train loss:4.007676 +step:1096 train loss:3.933889 +step:1097 train loss:3.939416 +step:1098 train loss:3.909244 +step:1099 train loss:3.962314 +step:1100 train loss:4.016622 +step:1101 train loss:3.999083 +step:1102 train loss:4.006827 +step:1103 train loss:3.931372 +step:1104 train loss:3.967598 +step:1105 train loss:4.016253 +step:1106 train loss:3.955316 +step:1107 train loss:4.079567 +step:1108 train loss:4.011305 +step:1109 train loss:3.993485 +step:1110 train loss:3.937689 +step:1111 train loss:3.988877 +step:1112 train loss:3.896778 +step:1113 train loss:3.892067 +step:1114 train loss:3.878171 +step:1115 train loss:3.919104 +step:1116 train loss:3.980083 +step:1117 train loss:4.017085 +step:1118 train loss:4.033056 +step:1119 train loss:3.951065 +step:1120 train loss:3.977949 +step:1121 train loss:3.955405 +step:1122 train loss:3.938676 +step:1123 train loss:4.043794 +step:1124 train loss:3.923730 +step:1125 train loss:3.934117 +step:1126 train loss:3.902664 +step:1127 train loss:3.928136 +step:1128 train loss:3.926453 +step:1129 train loss:3.981956 +step:1130 train loss:3.904506 +step:1131 train loss:3.996755 +step:1132 train loss:3.941707 +step:1133 train loss:3.951205 +step:1134 train loss:3.927341 +step:1135 train loss:3.970714 +step:1136 train loss:3.989770 +step:1137 train loss:3.913843 +step:1138 train loss:3.986079 +step:1139 train loss:3.934680 +step:1140 train loss:4.010321 +step:1141 train loss:3.968825 +step:1142 train loss:3.900345 +step:1143 train loss:3.978622 +step:1144 train loss:4.003297 +step:1145 train loss:3.948417 +step:1146 train loss:3.906234 +step:1147 train loss:3.918371 +step:1148 train loss:3.951369 +step:1149 train loss:3.998787 +step:1150 train loss:4.001046 +step:1151 train loss:4.012140 +step:1152 train loss:3.909734 +step:1153 train loss:3.912188 +step:1154 train loss:3.895898 +step:1155 train loss:3.998138 +step:1156 train loss:3.901862 +step:1157 train loss:3.928295 +step:1158 train loss:3.979767 +step:1159 train loss:3.983680 +step:1160 train loss:3.908064 +step:1161 train loss:3.995955 +step:1162 train loss:3.937390 +step:1163 train loss:3.922603 +step:1164 train loss:3.832241 +step:1165 train loss:3.966817 +step:1166 train loss:3.895570 +step:1167 train loss:3.901760 +step:1168 train loss:3.959602 +step:1169 train loss:3.920350 +step:1170 train loss:3.925405 +step:1171 train loss:3.952109 +step:1172 train loss:3.906551 +step:1173 train loss:3.944284 +step:1174 train loss:3.882604 +step:1175 train loss:3.918639 +step:1176 train loss:4.038293 +step:1177 train loss:3.878060 +step:1178 train loss:3.938457 +step:1179 train loss:3.885312 +step:1180 train loss:3.921814 +step:1181 train loss:3.909949 +step:1182 train loss:3.964014 +step:1183 train loss:3.940997 +step:1184 train loss:3.885219 +step:1185 train loss:3.914918 +step:1186 train loss:3.912585 +step:1187 train loss:3.885017 +step:1188 train loss:3.918320 +step:1189 train loss:3.856102 +step:1190 train loss:3.910703 +step:1191 train loss:3.968428 +step:1192 train loss:3.921113 +step:1193 train loss:3.922998 +step:1194 train loss:4.038693 +step:1195 train loss:4.016161 +step:1196 train loss:3.900980 +step:1197 train loss:3.931278 +step:1198 train loss:3.916436 +step:1199 train loss:3.910783 +step:1200 train loss:3.977862 +step:1201 train loss:3.947348 +step:1202 train loss:3.881524 +step:1203 train loss:3.877538 +step:1204 train loss:3.912950 +step:1205 train loss:3.937072 +step:1206 train loss:3.863637 +step:1207 train loss:3.952213 +step:1208 train loss:3.920570 +step:1209 train loss:3.855712 +step:1210 train loss:3.953757 +step:1211 train loss:3.894385 +step:1212 train loss:3.924972 +step:1213 train loss:3.865047 +step:1214 train loss:3.935957 +step:1215 train loss:3.906810 +step:1216 train loss:3.916171 +step:1217 train loss:3.854219 +step:1218 train loss:3.926782 +step:1219 train loss:3.864625 +step:1220 train loss:3.893263 +step:1221 train loss:3.908489 +step:1222 train loss:3.956650 +step:1223 train loss:3.928429 +step:1224 train loss:3.903421 +step:1225 train loss:3.945657 +step:1226 train loss:3.889728 +step:1227 train loss:3.894700 +step:1228 train loss:3.901995 +step:1229 train loss:3.871747 +step:1230 train loss:3.865337 +step:1231 train loss:3.920083 +step:1232 train loss:3.874800 +step:1233 train loss:3.870302 +step:1234 train loss:3.954974 +step:1235 train loss:3.922124 +step:1236 train loss:3.836797 +step:1237 train loss:3.943282 +step:1238 train loss:3.886870 +step:1239 train loss:3.928359 +step:1240 train loss:3.850613 +step:1241 train loss:3.866485 +step:1242 train loss:3.897851 +step:1243 train loss:3.842908 +step:1244 train loss:3.966776 +step:1245 train loss:3.979447 +step:1246 train loss:3.909125 +step:1247 train loss:3.888557 +step:1248 train loss:3.916466 +step:1249 train loss:3.843163 +step:1250 validation loss:3.841639 +step:1250 train loss:3.864717 +step:1251 train loss:3.933259 +step:1252 train loss:3.881696 +step:1253 train loss:3.832501 +step:1254 train loss:3.871233 +step:1255 train loss:3.860978 +step:1256 train loss:3.910101 +step:1257 train loss:3.887141 +step:1258 train loss:3.943366 +step:1259 train loss:3.915568 +step:1260 train loss:3.831309 +step:1261 train loss:4.074004 +step:1262 train loss:3.910938 +step:1263 train loss:3.861924 +step:1264 train loss:3.884519 +step:1265 train loss:3.928149 +step:1266 train loss:3.876337 +step:1267 train loss:3.884733 +step:1268 train loss:3.894952 +step:1269 train loss:3.892794 +step:1270 train loss:3.815246 +step:1271 train loss:3.824314 +step:1272 train loss:3.859028 +step:1273 train loss:3.910564 +step:1274 train loss:3.878927 +step:1275 train loss:3.908874 +step:1276 train loss:3.904307 +step:1277 train loss:3.912838 +step:1278 train loss:3.855174 +step:1279 train loss:3.864534 +step:1280 train loss:3.882180 +step:1281 train loss:3.930233 +step:1282 train loss:3.868104 +step:1283 train loss:3.933601 +step:1284 train loss:3.877198 +step:1285 train loss:3.926876 +step:1286 train loss:3.827057 +step:1287 train loss:3.862063 +step:1288 train loss:3.892697 +step:1289 train loss:3.959500 +step:1290 train loss:3.913583 +step:1291 train loss:3.873999 +step:1292 train loss:3.857319 +step:1293 train loss:3.853215 +step:1294 train loss:3.899443 +step:1295 train loss:3.884027 +step:1296 train loss:3.925642 +step:1297 train loss:3.881583 +step:1298 train loss:3.900810 +step:1299 train loss:3.938315 +step:1300 train loss:3.859621 +step:1301 train loss:3.902719 +step:1302 train loss:3.859129 +step:1303 train loss:3.901736 +step:1304 train loss:3.930593 +step:1305 train loss:3.904230 +step:1306 train loss:3.899953 +step:1307 train loss:3.880706 +step:1308 train loss:3.837096 +step:1309 train loss:3.853463 +step:1310 train loss:3.841385 +step:1311 train loss:3.843549 +step:1312 train loss:3.921041 +step:1313 train loss:3.834475 +step:1314 train loss:3.842341 +step:1315 train loss:3.881900 +step:1316 train loss:3.859708 +step:1317 train loss:3.751459 +step:1318 train loss:3.915407 +step:1319 train loss:3.948362 +step:1320 train loss:3.860224 +step:1321 train loss:3.848398 +step:1322 train loss:3.950881 +step:1323 train loss:3.896165 +step:1324 train loss:3.998425 +step:1325 train loss:3.871310 +step:1326 train loss:3.903516 +step:1327 train loss:3.923434 +step:1328 train loss:3.825950 +step:1329 train loss:3.856089 +step:1330 train loss:3.881557 +step:1331 train loss:3.777745 +step:1332 train loss:3.918209 +step:1333 train loss:3.893528 +step:1334 train loss:3.887393 +step:1335 train loss:3.912244 +step:1336 train loss:3.915215 +step:1337 train loss:3.888075 +step:1338 train loss:3.869187 +step:1339 train loss:3.943854 +step:1340 train loss:3.910537 +step:1341 train loss:3.887165 +step:1342 train loss:3.861560 +step:1343 train loss:3.854346 +step:1344 train loss:3.918155 +step:1345 train loss:3.872796 +step:1346 train loss:3.958265 +step:1347 train loss:3.888341 +step:1348 train loss:3.842935 +step:1349 train loss:3.789472 +step:1350 train loss:3.836063 +step:1351 train loss:3.898456 +step:1352 train loss:3.871333 +step:1353 train loss:3.851676 +step:1354 train loss:3.853845 +step:1355 train loss:3.925025 +step:1356 train loss:3.827971 +step:1357 train loss:3.861389 +step:1358 train loss:3.854061 +step:1359 train loss:3.854097 +step:1360 train loss:3.886452 +step:1361 train loss:4.011082 +step:1362 train loss:3.918515 +step:1363 train loss:3.800956 +step:1364 train loss:3.828528 +step:1365 train loss:3.818774 +step:1366 train loss:3.859610 +step:1367 train loss:3.792678 +step:1368 train loss:3.826651 +step:1369 train loss:3.863528 +step:1370 train loss:3.882548 +step:1371 train loss:3.845497 +step:1372 train loss:3.881935 +step:1373 train loss:3.914884 +step:1374 train loss:3.911340 +step:1375 train loss:3.859072 +step:1376 train loss:3.889327 +step:1377 train loss:3.884246 +step:1378 train loss:3.860557 +step:1379 train loss:3.839967 +step:1380 train loss:3.907636 +step:1381 train loss:3.862551 +step:1382 train loss:3.839287 +step:1383 train loss:3.825781 +step:1384 train loss:3.891538 +step:1385 train loss:3.800305 +step:1386 train loss:3.871634 +step:1387 train loss:3.871988 +step:1388 train loss:3.839939 +step:1389 train loss:3.809193 +step:1390 train loss:3.850748 +step:1391 train loss:3.881998 +step:1392 train loss:3.862773 +step:1393 train loss:3.905700 +step:1394 train loss:3.844468 +step:1395 train loss:3.882937 +step:1396 train loss:3.868232 +step:1397 train loss:3.887601 +step:1398 train loss:3.889944 +step:1399 train loss:3.859233 +step:1400 train loss:3.838004 +step:1401 train loss:3.834200 +step:1402 train loss:3.838584 +step:1403 train loss:3.801001 +step:1404 train loss:3.860234 +step:1405 train loss:3.820189 +step:1406 train loss:3.854647 +step:1407 train loss:3.849516 +step:1408 train loss:3.824461 +step:1409 train loss:3.815054 +step:1410 train loss:3.832139 +step:1411 train loss:3.867931 +step:1412 train loss:3.915779 +step:1413 train loss:3.840408 +step:1414 train loss:3.875194 +step:1415 train loss:3.835017 +step:1416 train loss:3.888301 +step:1417 train loss:3.854061 +step:1418 train loss:3.793994 +step:1419 train loss:3.809336 +step:1420 train loss:3.830342 +step:1421 train loss:3.873394 +step:1422 train loss:3.844066 +step:1423 train loss:3.943679 +step:1424 train loss:3.844088 +step:1425 train loss:3.804815 +step:1426 train loss:3.828478 +step:1427 train loss:3.813912 +step:1428 train loss:3.799704 +step:1429 train loss:3.821301 +step:1430 train loss:3.839082 +step:1431 train loss:3.855953 +step:1432 train loss:3.840706 +step:1433 train loss:3.823889 +step:1434 train loss:3.799169 +step:1435 train loss:3.785926 +step:1436 train loss:3.865582 +step:1437 train loss:3.793929 +step:1438 train loss:3.787553 +step:1439 train loss:3.785283 +step:1440 train loss:3.819417 +step:1441 train loss:3.897768 +step:1442 train loss:3.857623 +step:1443 train loss:3.784067 +step:1444 train loss:3.799174 +step:1445 train loss:3.794721 +step:1446 train loss:3.839080 +step:1447 train loss:3.844247 +step:1448 train loss:3.806698 +step:1449 train loss:3.830493 +step:1450 train loss:3.846280 +step:1451 train loss:3.776201 +step:1452 train loss:3.824873 +step:1453 train loss:3.828966 +step:1454 train loss:3.812211 +step:1455 train loss:3.758953 +step:1456 train loss:3.829191 +step:1457 train loss:3.766290 +step:1458 train loss:3.905673 +step:1459 train loss:3.827092 +step:1460 train loss:3.793645 +step:1461 train loss:3.853786 +step:1462 train loss:3.865400 +step:1463 train loss:3.820679 +step:1464 train loss:3.802246 +step:1465 train loss:3.797578 +step:1466 train loss:3.763946 +step:1467 train loss:3.902714 +step:1468 train loss:3.786561 +step:1469 train loss:3.858270 +step:1470 train loss:3.800564 +step:1471 train loss:3.793521 +step:1472 train loss:3.799547 +step:1473 train loss:3.793754 +step:1474 train loss:3.742795 +step:1475 train loss:3.800378 +step:1476 train loss:3.884563 +step:1477 train loss:3.835090 +step:1478 train loss:3.767516 +step:1479 train loss:3.804329 +step:1480 train loss:3.796848 +step:1481 train loss:3.772944 +step:1482 train loss:3.836081 +step:1483 train loss:3.825678 +step:1484 train loss:3.858972 +step:1485 train loss:3.871843 +step:1486 train loss:3.799845 +step:1487 train loss:3.793393 +step:1488 train loss:3.794076 +step:1489 train loss:3.788133 +step:1490 train loss:3.844516 +step:1491 train loss:3.846319 +step:1492 train loss:3.826750 +step:1493 train loss:3.778758 +step:1494 train loss:3.811242 +step:1495 train loss:3.803608 +step:1496 train loss:3.762024 +step:1497 train loss:3.838780 +step:1498 train loss:3.744746 +step:1499 train loss:3.788776 +step:1500 validation loss:3.760568 total_sharp:6.9513e-03 L1_sharp:1.1953e-02 L2_sharp:3.6765e-03 L3_sharp:4.6228e-03 L4_sharp:1.8264e-03 L5_sharp:2.0313e-03 L6_sharp:2.5013e-03 L7_sharp:3.0841e-03 L8_sharp:2.3196e-03 L9_sharp:1.9228e-03 L10_sharp:1.6029e-03 L11_sharp:1.1579e-03 L12_sharp:2.1703e-03 total_fnorm:1.6806e+00 total_l1_linf:1.3792e+04 total_spectral:1.6806e+00 L1_fnorm:2.9986e-01 L2_fnorm:2.8221e-01 L3_fnorm:2.8202e-01 L4_fnorm:2.9105e-01 L5_fnorm:2.9803e-01 L6_fnorm:3.0190e-01 L7_fnorm:3.0123e-01 L8_fnorm:3.0208e-01 L9_fnorm:3.0228e-01 L10_fnorm:3.0217e-01 L11_fnorm:3.0178e-01 L12_fnorm:3.0098e-01 L1_l1linf:2.3548e-01 L2_l1linf:2.8872e-01 L3_l1linf:3.1151e-01 L4_l1linf:3.2938e-01 L5_l1linf:3.0116e-01 L6_l1linf:2.8137e-01 L7_l1linf:2.6956e-01 L8_l1linf:2.8215e-01 L9_l1linf:3.1324e-01 L10_l1linf:3.2134e-01 L11_l1linf:3.3898e-01 L12_l1linf:3.6361e-01 L1_spectral:6.0256e-03 L2_spectral:6.6000e-03 L3_spectral:6.9919e-03 L4_spectral:7.4256e-03 L5_spectral:6.8018e-03 L6_spectral:6.3143e-03 L7_spectral:6.1350e-03 L8_spectral:6.3755e-03 L9_spectral:7.0936e-03 L10_spectral:7.2344e-03 L11_spectral:7.6059e-03 L12_spectral:8.1296e-03 ip_v_neg_g:6.4584e-03 cos_v_neg_g:2.8444e-03 v_norm:1.6806e+00 g_norm:1.3510e+00 hv_norm:5.3137e-01 cos_v_hv:2.1986e-02 hg_norm:1.6169e+01 cos_g_hg:4.8181e-01 v_par:1.4796e-04 v_perp:1.6806e+00 L1_cos_v_neg_g:4.4028e-03 L1_v_norm:2.9986e-01 L2_cos_v_neg_g:5.8871e-03 L2_v_norm:2.8221e-01 L3_cos_v_neg_g:5.2663e-03 L3_v_norm:2.8202e-01 L4_cos_v_neg_g:5.5918e-03 L4_v_norm:2.9105e-01 L5_cos_v_neg_g:4.7583e-03 L5_v_norm:2.9803e-01 L6_cos_v_neg_g:4.6840e-03 L6_v_norm:3.0190e-01 L7_cos_v_neg_g:5.6475e-03 L7_v_norm:3.0123e-01 L8_cos_v_neg_g:5.0711e-03 L8_v_norm:3.0208e-01 L9_cos_v_neg_g:5.3250e-03 L9_v_norm:3.0228e-01 L10_cos_v_neg_g:4.6373e-03 L10_v_norm:3.0217e-01 L11_cos_v_neg_g:4.1565e-03 L11_v_norm:3.0178e-01 L12_cos_v_neg_g:3.6631e-03 L12_v_norm:3.0098e-01 +step:1500 train loss:3.780659 +step:1501 train loss:3.814560 +step:1502 train loss:3.744149 +step:1503 train loss:3.803924 +step:1504 train loss:3.769987 +step:1505 train loss:3.746527 +step:1506 train loss:3.735725 +step:1507 train loss:3.745817 +step:1508 train loss:3.769215 +step:1509 train loss:3.810431 +step:1510 train loss:3.762012 +step:1511 train loss:3.787022 +step:1512 train loss:3.761499 +step:1513 train loss:3.827628 +step:1514 train loss:3.787575 +step:1515 train loss:3.836171 +step:1516 train loss:3.775928 +step:1517 train loss:3.775354 +step:1518 train loss:3.865391 +step:1519 train loss:3.818970 +step:1520 train loss:3.867100 +step:1521 train loss:3.759743 +step:1522 train loss:3.824023 +step:1523 train loss:3.819152 +step:1524 train loss:3.751326 +step:1525 train loss:3.827815 +step:1526 train loss:3.749253 +step:1527 train loss:3.796923 +step:1528 train loss:3.858546 +step:1529 train loss:3.806481 +step:1530 train loss:3.852374 +step:1531 train loss:3.770659 +step:1532 train loss:3.845737 +step:1533 train loss:3.812921 +step:1534 train loss:3.768528 +step:1535 train loss:3.811841 +step:1536 train loss:3.845719 +step:1537 train loss:3.788075 +step:1538 train loss:3.808191 +step:1539 train loss:3.789932 +step:1540 train loss:3.815731 +step:1541 train loss:3.770735 +step:1542 train loss:3.866200 +step:1543 train loss:3.898310 +step:1544 train loss:3.760250 +step:1545 train loss:3.744317 +step:1546 train loss:3.788060 +step:1547 train loss:3.769038 +step:1548 train loss:3.809346 +step:1549 train loss:3.733178 +step:1550 train loss:3.856452 +step:1551 train loss:3.786187 +step:1552 train loss:3.815547 +step:1553 train loss:3.824739 +step:1554 train loss:3.835353 +step:1555 train loss:3.789307 +step:1556 train loss:3.771878 +step:1557 train loss:3.785430 +step:1558 train loss:3.807735 +step:1559 train loss:3.774845 +step:1560 train loss:3.849113 +step:1561 train loss:3.825346 +step:1562 train loss:3.714447 +step:1563 train loss:3.692907 +step:1564 train loss:3.834448 +step:1565 train loss:3.803761 +step:1566 train loss:3.824811 +step:1567 train loss:3.818823 +step:1568 train loss:3.776032 +step:1569 train loss:3.767823 +step:1570 train loss:3.790043 +step:1571 train loss:3.752885 +step:1572 train loss:3.765859 +step:1573 train loss:3.809717 +step:1574 train loss:3.767623 +step:1575 train loss:3.786105 +step:1576 train loss:3.747125 +step:1577 train loss:3.771075 +step:1578 train loss:3.756381 +step:1579 train loss:3.832653 +step:1580 train loss:3.788857 +step:1581 train loss:3.824435 +step:1582 train loss:3.829839 +step:1583 train loss:3.793900 +step:1584 train loss:3.715315 +step:1585 train loss:3.800976 +step:1586 train loss:3.771823 +step:1587 train loss:3.783457 +step:1588 train loss:3.769023 +step:1589 train loss:3.818665 +step:1590 train loss:3.725621 +step:1591 train loss:3.776391 +step:1592 train loss:3.733393 +step:1593 train loss:3.768928 +step:1594 train loss:3.772780 +step:1595 train loss:3.771002 +step:1596 train loss:3.775062 +step:1597 train loss:3.702276 +step:1598 train loss:3.802930 +step:1599 train loss:3.817091 +step:1600 train loss:3.692463 +step:1601 train loss:3.771451 +step:1602 train loss:3.831549 +step:1603 train loss:3.825948 +step:1604 train loss:3.747652 +step:1605 train loss:3.806965 +step:1606 train loss:3.848245 +step:1607 train loss:3.730766 +step:1608 train loss:3.756800 +step:1609 train loss:3.784587 +step:1610 train loss:3.841137 +step:1611 train loss:3.767372 +step:1612 train loss:3.688334 +step:1613 train loss:3.762275 +step:1614 train loss:3.866993 +step:1615 train loss:3.790779 +step:1616 train loss:3.791127 +step:1617 train loss:3.781217 +step:1618 train loss:3.777547 +step:1619 train loss:3.957767 +step:1620 train loss:3.746690 +step:1621 train loss:3.801341 +step:1622 train loss:3.726900 +step:1623 train loss:3.791724 +step:1624 train loss:3.762394 +step:1625 train loss:3.843107 +step:1626 train loss:3.724034 +step:1627 train loss:3.739645 +step:1628 train loss:3.757926 +step:1629 train loss:3.795644 +step:1630 train loss:3.804776 +step:1631 train loss:3.759090 +step:1632 train loss:3.728354 +step:1633 train loss:3.750737 +step:1634 train loss:3.807797 +step:1635 train loss:3.746807 +step:1636 train loss:3.734061 +step:1637 train loss:3.808359 +step:1638 train loss:3.910465 +step:1639 train loss:3.713555 +step:1640 train loss:3.798390 +step:1641 train loss:3.758693 +step:1642 train loss:3.858959 +step:1643 train loss:3.756346 +step:1644 train loss:3.761834 +step:1645 train loss:3.740336 +step:1646 train loss:3.822886 +step:1647 train loss:3.714046 +step:1648 train loss:3.779088 +step:1649 train loss:3.740412 +step:1650 train loss:3.758975 +step:1651 train loss:3.770083 +step:1652 train loss:3.793570 +step:1653 train loss:3.798229 +step:1654 train loss:3.794152 +step:1655 train loss:3.767412 +step:1656 train loss:3.762140 +step:1657 train loss:3.764486 +step:1658 train loss:3.736081 +step:1659 train loss:3.811186 +step:1660 train loss:3.714828 +step:1661 train loss:3.830463 +step:1662 train loss:3.759709 +step:1663 train loss:3.753565 +step:1664 train loss:3.847990 +step:1665 train loss:3.768507 +step:1666 train loss:3.780223 +step:1667 train loss:3.802085 +step:1668 train loss:3.773576 +step:1669 train loss:3.731856 +step:1670 train loss:3.790701 +step:1671 train loss:3.789549 +step:1672 train loss:3.783349 +step:1673 train loss:3.739334 +step:1674 train loss:3.739152 +step:1675 train loss:3.778131 +step:1676 train loss:4.048704 +step:1677 train loss:3.788090 +step:1678 train loss:3.704927 +step:1679 train loss:3.828561 +step:1680 train loss:3.752476 +step:1681 train loss:3.807874 +step:1682 train loss:3.768965 +step:1683 train loss:3.759153 +step:1684 train loss:3.715679 +step:1685 train loss:3.776870 +step:1686 train loss:3.761417 +step:1687 train loss:3.778682 +step:1688 train loss:3.752840 +step:1689 train loss:3.743943 +step:1690 train loss:3.768509 +step:1691 train loss:3.759018 +step:1692 train loss:3.771597 +step:1693 train loss:3.751417 +step:1694 train loss:3.699313 +step:1695 train loss:3.725228 +step:1696 train loss:3.730889 +step:1697 train loss:3.780211 +step:1698 train loss:3.771324 +step:1699 train loss:3.730072 +step:1700 train loss:3.811339 +step:1701 train loss:3.751740 +step:1702 train loss:3.743740 +step:1703 train loss:3.760146 +step:1704 train loss:3.768773 +step:1705 train loss:3.786833 +step:1706 train loss:3.786407 +step:1707 train loss:3.790098 +step:1708 train loss:3.710839 +step:1709 train loss:3.816174 +step:1710 train loss:3.731192 +step:1711 train loss:3.737316 +step:1712 train loss:3.763031 +step:1713 train loss:3.726113 +step:1714 train loss:4.099425 +step:1715 train loss:3.739512 +step:1716 train loss:3.728623 +step:1717 train loss:3.729279 +step:1718 train loss:3.809660 +step:1719 train loss:3.711256 +step:1720 train loss:3.803643 +step:1721 train loss:3.740565 +step:1722 train loss:3.713466 +step:1723 train loss:3.810817 +step:1724 train loss:3.759514 +step:1725 train loss:3.758746 +step:1726 train loss:3.754901 +step:1727 train loss:3.793423 +step:1728 train loss:3.799960 +step:1729 train loss:3.722007 +step:1730 train loss:3.796227 +step:1731 train loss:3.723098 +step:1732 train loss:3.738899 +step:1733 train loss:3.722640 +step:1734 train loss:3.781367 +step:1735 train loss:3.833875 +step:1736 train loss:3.752032 +step:1737 train loss:3.777446 +step:1738 train loss:3.735086 +step:1739 train loss:3.808442 +step:1740 train loss:3.791907 +step:1741 train loss:3.852356 +step:1742 train loss:3.829213 +step:1743 train loss:3.726742 +step:1744 train loss:3.738640 +step:1745 train loss:3.728261 +step:1746 train loss:3.712966 +step:1747 train loss:3.752073 +step:1748 train loss:3.685844 +step:1749 train loss:3.731061 +step:1750 validation loss:3.705110 +step:1750 train loss:3.768853 +step:1751 train loss:3.778731 +step:1752 train loss:3.756678 +step:1753 train loss:3.770983 +step:1754 train loss:3.768886 +step:1755 train loss:3.763346 +step:1756 train loss:3.781105 +step:1757 train loss:3.789794 +step:1758 train loss:3.702889 +step:1759 train loss:3.798723 +step:1760 train loss:3.746918 +step:1761 train loss:3.730303 +step:1762 train loss:3.722147 +step:1763 train loss:3.727110 +step:1764 train loss:4.027656 +step:1765 train loss:3.730315 +step:1766 train loss:3.825649 +step:1767 train loss:3.733561 +step:1768 train loss:3.716461 +step:1769 train loss:3.736852 +step:1770 train loss:3.751008 +step:1771 train loss:3.724603 +step:1772 train loss:3.833338 +step:1773 train loss:3.758720 +step:1774 train loss:3.760231 +step:1775 train loss:3.874785 +step:1776 train loss:3.749114 +step:1777 train loss:3.739349 +step:1778 train loss:3.794321 +step:1779 train loss:3.737569 +step:1780 train loss:3.785048 +step:1781 train loss:3.782519 +step:1782 train loss:3.820620 +step:1783 train loss:3.740893 +step:1784 train loss:3.839484 +step:1785 train loss:3.739283 +step:1786 train loss:3.739008 +step:1787 train loss:3.735986 +step:1788 train loss:3.759362 +step:1789 train loss:3.717150 +step:1790 train loss:3.729285 +step:1791 train loss:3.812155 +step:1792 train loss:3.806690 +step:1793 train loss:3.726887 +step:1794 train loss:3.767291 +step:1795 train loss:3.721979 +step:1796 train loss:3.709183 +step:1797 train loss:3.767119 +step:1798 train loss:3.713346 +step:1799 train loss:3.764737 +step:1800 train loss:3.799392 +step:1801 train loss:3.783551 +step:1802 train loss:3.791016 +step:1803 train loss:3.780534 +step:1804 train loss:3.778082 +step:1805 train loss:3.768416 +step:1806 train loss:3.778214 +step:1807 train loss:3.706825 +step:1808 train loss:3.767934 +step:1809 train loss:3.758470 +step:1810 train loss:3.745171 +step:1811 train loss:3.763291 +step:1812 train loss:3.745119 +step:1813 train loss:3.758929 +step:1814 train loss:3.828645 +step:1815 train loss:3.763900 +step:1816 train loss:3.719369 +step:1817 train loss:3.711308 +step:1818 train loss:3.765322 +step:1819 train loss:3.738765 +step:1820 train loss:3.769896 +step:1821 train loss:3.736609 +step:1822 train loss:3.716463 +step:1823 train loss:3.708161 +step:1824 train loss:3.784408 +step:1825 train loss:3.700399 +step:1826 train loss:3.741985 +step:1827 train loss:3.707823 +step:1828 train loss:3.762705 +step:1829 train loss:3.724435 +step:1830 train loss:3.926512 +step:1831 train loss:3.674448 +step:1832 train loss:3.729828 +step:1833 train loss:3.771014 +step:1834 train loss:3.722088 +step:1835 train loss:3.723373 +step:1836 train loss:3.762240 +step:1837 train loss:3.689198 +step:1838 train loss:3.788296 +step:1839 train loss:3.771191 +step:1840 train loss:3.737343 +step:1841 train loss:3.766391 +step:1842 train loss:3.737292 +step:1843 train loss:3.684633 +step:1844 train loss:3.754380 +step:1845 train loss:3.718715 +step:1846 train loss:3.782830 +step:1847 train loss:3.824516 +step:1848 train loss:3.633631 +step:1849 train loss:3.715935 +step:1850 train loss:3.694486 +step:1851 train loss:3.737431 +step:1852 train loss:3.722431 +step:1853 train loss:3.778848 +step:1854 train loss:3.740932 +step:1855 train loss:3.731248 +step:1856 train loss:3.726140 +step:1857 train loss:3.737200 +step:1858 train loss:3.782241 +step:1859 train loss:3.726873 +step:1860 train loss:3.705905 +step:1861 train loss:3.717456 +step:1862 train loss:3.760752 +step:1863 train loss:3.799024 +step:1864 train loss:3.696496 +step:1865 train loss:3.716963 +step:1866 train loss:3.715629 +step:1867 train loss:3.756129 +step:1868 train loss:3.801195 +step:1869 train loss:3.723444 +step:1870 train loss:3.747482 +step:1871 train loss:3.687145 +step:1872 train loss:3.758714 +step:1873 train loss:3.820045 +step:1874 train loss:3.686538 +step:1875 train loss:3.754933 +step:1876 train loss:3.723356 +step:1877 train loss:3.764798 +step:1878 train loss:3.686407 +step:1879 train loss:3.751184 +step:1880 train loss:3.824835 +step:1881 train loss:3.756043 +step:1882 train loss:3.771266 +step:1883 train loss:3.794932 +step:1884 train loss:3.811954 +step:1885 train loss:3.756392 +step:1886 train loss:3.691915 +step:1887 train loss:3.701480 +step:1888 train loss:3.711438 +step:1889 train loss:3.731670 +step:1890 train loss:3.725961 +step:1891 train loss:3.667943 +step:1892 train loss:3.754065 +step:1893 train loss:3.682142 +step:1894 train loss:3.700558 +step:1895 train loss:3.736030 +step:1896 train loss:3.782529 +step:1897 train loss:3.676550 +step:1898 train loss:3.727772 +step:1899 train loss:3.743216 +step:1900 train loss:3.690354 +step:1901 train loss:3.780550 +step:1902 train loss:3.763707 +step:1903 train loss:3.707534 +step:1904 train loss:3.687966 +step:1905 train loss:3.689410 +step:1906 train loss:3.753958 +step:1907 train loss:3.693198 +step:1908 train loss:3.713037 +step:1909 train loss:3.806187 +step:1910 train loss:3.692931 +step:1911 train loss:3.702626 +step:1912 train loss:3.754250 +step:1913 train loss:3.692566 +step:1914 train loss:3.730496 +step:1915 train loss:3.689569 +step:1916 train loss:3.743908 +step:1917 train loss:3.724519 +step:1918 train loss:3.636201 +step:1919 train loss:3.791520 +step:1920 train loss:3.891766 +step:1921 train loss:3.678985 +step:1922 train loss:3.658501 +step:1923 train loss:3.747720 +step:1924 train loss:3.790506 +step:1925 train loss:3.732884 +step:1926 train loss:3.674410 +step:1927 train loss:3.756782 +step:1928 train loss:3.672390 +step:1929 train loss:3.697315 +step:1930 train loss:3.768018 +step:1931 train loss:3.680705 +step:1932 train loss:3.732371 +step:1933 train loss:3.729666 +step:1934 train loss:3.803719 +step:1935 train loss:3.750781 +step:1936 train loss:3.725421 +step:1937 train loss:3.659263 +step:1938 train loss:4.036638 +step:1939 train loss:3.768641 +step:1940 train loss:3.742732 +step:1941 train loss:3.755745 +step:1942 train loss:3.743278 +step:1943 train loss:3.741905 +step:1944 train loss:3.697228 +step:1945 train loss:3.700515 +step:1946 train loss:3.727389 +step:1947 train loss:3.746408 +step:1948 train loss:3.661559 +step:1949 train loss:3.768346 +step:1950 train loss:3.705067 +step:1951 train loss:3.731935 +step:1952 train loss:3.752017 +step:1953 train loss:3.685928 +step:1954 train loss:3.722795 +step:1955 train loss:3.678424 +step:1956 train loss:3.761029 +step:1957 train loss:3.781528 +step:1958 train loss:3.799962 +step:1959 train loss:3.667482 +step:1960 train loss:3.705705 +step:1961 train loss:3.743513 +step:1962 train loss:3.731336 +step:1963 train loss:3.708596 +step:1964 train loss:3.750694 +step:1965 train loss:3.779792 +step:1966 train loss:3.689436 +step:1967 train loss:3.745735 +step:1968 train loss:3.686453 +step:1969 train loss:3.707408 +step:1970 train loss:3.766937 +step:1971 train loss:3.667311 +step:1972 train loss:3.782447 +step:1973 train loss:3.677849 +step:1974 train loss:3.720642 +step:1975 train loss:3.677951 +step:1976 train loss:3.706441 +step:1977 train loss:3.749739 +step:1978 train loss:3.688914 +step:1979 train loss:3.669621 +step:1980 train loss:3.711623 +step:1981 train loss:3.691715 +step:1982 train loss:3.769010 +step:1983 train loss:3.716244 +step:1984 train loss:3.758170 +step:1985 train loss:3.740644 +step:1986 train loss:3.734187 +step:1987 train loss:3.688785 +step:1988 train loss:3.714338 +step:1989 train loss:3.862001 +step:1990 train loss:3.687261 +step:1991 train loss:3.684686 +step:1992 train loss:3.693988 +step:1993 train loss:3.722505 +step:1994 train loss:3.725393 +step:1995 train loss:3.667651 +step:1996 train loss:3.728132 +step:1997 train loss:3.731754 +step:1998 train loss:3.679262 +step:1999 train loss:3.799235 +step:2000 validation loss:3.659181 total_sharp:5.7198e-03 L1_sharp:8.9416e-03 L2_sharp:2.6146e-03 L3_sharp:3.9341e-03 L4_sharp:1.5783e-03 L5_sharp:1.7747e-03 L6_sharp:2.3151e-03 L7_sharp:2.7879e-03 L8_sharp:1.9142e-03 L9_sharp:1.4891e-03 L10_sharp:1.2187e-03 L11_sharp:8.6163e-04 L12_sharp:1.6069e-03 total_fnorm:1.6858e+00 total_l1_linf:1.3828e+04 total_spectral:1.6858e+00 L1_fnorm:2.9585e-01 L2_fnorm:2.8115e-01 L3_fnorm:2.8250e-01 L4_fnorm:2.9270e-01 L5_fnorm:2.9904e-01 L6_fnorm:3.0229e-01 L7_fnorm:3.0170e-01 L8_fnorm:3.0275e-01 L9_fnorm:3.0315e-01 L10_fnorm:3.0277e-01 L11_fnorm:3.0237e-01 L12_fnorm:3.0124e-01 L1_l1linf:2.7489e-01 L2_l1linf:3.4086e-01 L3_l1linf:3.5282e-01 L4_l1linf:3.5399e-01 L5_l1linf:3.2641e-01 L6_l1linf:3.0492e-01 L7_l1linf:2.9492e-01 L8_l1linf:2.9686e-01 L9_l1linf:3.2078e-01 L10_l1linf:3.3634e-01 L11_l1linf:3.4862e-01 L12_l1linf:3.4949e-01 L1_spectral:6.1492e-03 L2_spectral:7.7547e-03 L3_spectral:8.0127e-03 L4_spectral:7.9779e-03 L5_spectral:7.3587e-03 L6_spectral:6.9096e-03 L7_spectral:6.7022e-03 L8_spectral:6.6732e-03 L9_spectral:7.3077e-03 L10_spectral:7.6302e-03 L11_spectral:7.8492e-03 L12_spectral:7.8860e-03 ip_v_neg_g:1.0496e-02 cos_v_neg_g:4.2253e-03 v_norm:1.6858e+00 g_norm:1.4736e+00 hv_norm:5.1841e-01 cos_v_hv:1.8600e-02 hg_norm:2.8476e+01 cos_g_hg:5.3781e-01 v_par:2.6250e-04 v_perp:1.6858e+00 L1_cos_v_neg_g:8.0466e-03 L1_v_norm:2.9585e-01 L2_cos_v_neg_g:1.1854e-02 L2_v_norm:2.8115e-01 L3_cos_v_neg_g:1.2041e-02 L3_v_norm:2.8250e-01 L4_cos_v_neg_g:7.9367e-03 L4_v_norm:2.9270e-01 L5_cos_v_neg_g:7.5736e-03 L5_v_norm:2.9904e-01 L6_cos_v_neg_g:7.4854e-03 L6_v_norm:3.0229e-01 L7_cos_v_neg_g:8.2000e-03 L7_v_norm:3.0170e-01 L8_cos_v_neg_g:6.0338e-03 L8_v_norm:3.0275e-01 L9_cos_v_neg_g:7.1226e-03 L9_v_norm:3.0315e-01 L10_cos_v_neg_g:5.7932e-03 L10_v_norm:3.0277e-01 L11_cos_v_neg_g:4.5606e-03 L11_v_norm:3.0237e-01 L12_cos_v_neg_g:4.0651e-03 L12_v_norm:3.0124e-01 +step:2000 train loss:3.754484 +step:2001 train loss:3.686876 +step:2002 train loss:3.784853 +step:2003 train loss:3.832838 +step:2004 train loss:3.703472 +step:2005 train loss:3.802558 +step:2006 train loss:3.683073 +step:2007 train loss:3.761338 +step:2008 train loss:3.709517 +step:2009 train loss:3.704319 +step:2010 train loss:3.833560 +step:2011 train loss:3.688234 +step:2012 train loss:3.717046 +step:2013 train loss:3.725305 +step:2014 train loss:3.632828 +step:2015 train loss:3.740626 +step:2016 train loss:3.720952 +step:2017 train loss:3.725135 +step:2018 train loss:3.687550 +step:2019 train loss:3.719479 +step:2020 train loss:3.729054 +step:2021 train loss:3.686743 +step:2022 train loss:3.733887 +step:2023 train loss:3.712961 +step:2024 train loss:3.769803 +step:2025 train loss:3.702266 +step:2026 train loss:3.683385 +step:2027 train loss:3.714903 +step:2028 train loss:3.642622 +step:2029 train loss:3.678768 +step:2030 train loss:3.677678 +step:2031 train loss:3.639690 +step:2032 train loss:3.694976 +step:2033 train loss:3.685892 +step:2034 train loss:3.688209 +step:2035 train loss:3.726726 +step:2036 train loss:3.719938 +step:2037 train loss:3.706753 +step:2038 train loss:3.700823 +step:2039 train loss:3.697957 +step:2040 train loss:3.724999 +step:2041 train loss:3.722864 +step:2042 train loss:3.654687 +step:2043 train loss:3.809364 +step:2044 train loss:3.678701 +step:2045 train loss:3.693428 +step:2046 train loss:3.704950 +step:2047 train loss:3.686517 +step:2048 train loss:3.722756 +step:2049 train loss:3.680926 +step:2050 train loss:3.704742 +step:2051 train loss:3.664492 +step:2052 train loss:3.721486 +step:2053 train loss:3.713880 +step:2054 train loss:3.694180 +step:2055 train loss:3.689450 +step:2056 train loss:3.732021 +step:2057 train loss:3.741212 +step:2058 train loss:3.703105 +step:2059 train loss:3.786072 +step:2060 train loss:3.729702 +step:2061 train loss:3.688768 +step:2062 train loss:3.716351 +step:2063 train loss:3.617248 +step:2064 train loss:3.737316 +step:2065 train loss:3.743343 +step:2066 train loss:3.602198 +step:2067 train loss:3.651740 +step:2068 train loss:3.757237 +step:2069 train loss:3.695698 +step:2070 train loss:3.698613 +step:2071 train loss:3.734920 +step:2072 train loss:3.668422 +step:2073 train loss:3.720522 +step:2074 train loss:3.697860 +step:2075 train loss:3.782257 +step:2076 train loss:3.718741 +step:2077 train loss:3.738971 +step:2078 train loss:3.697187 +step:2079 train loss:3.843396 +step:2080 train loss:3.666616 +step:2081 train loss:3.772093 +step:2082 train loss:3.704489 +step:2083 train loss:3.693289 +step:2084 train loss:3.671426 +step:2085 train loss:3.712749 +step:2086 train loss:3.732340 +step:2087 train loss:3.769299 +step:2088 train loss:3.638508 +step:2089 train loss:3.659772 +step:2090 train loss:3.699745 +step:2091 train loss:3.717717 +step:2092 train loss:3.698996 +step:2093 train loss:3.691091 +step:2094 train loss:3.727279 +step:2095 train loss:3.666896 +step:2096 train loss:3.657944 +step:2097 train loss:3.695821 +step:2098 train loss:3.693200 +step:2099 train loss:3.680963 +step:2100 train loss:3.735781 +step:2101 train loss:3.734047 +step:2102 train loss:3.696743 +step:2103 train loss:3.713468 +step:2104 train loss:3.695289 +step:2105 train loss:3.698905 +step:2106 train loss:3.696830 +step:2107 train loss:3.761550 +step:2108 train loss:3.677347 +step:2109 train loss:3.641553 +step:2110 train loss:3.734511 +step:2111 train loss:3.681117 +step:2112 train loss:3.747059 +step:2113 train loss:3.679589 +step:2114 train loss:3.688468 +step:2115 train loss:3.739950 +step:2116 train loss:3.671529 +step:2117 train loss:3.692582 +step:2118 train loss:3.679253 +step:2119 train loss:3.611065 +step:2120 train loss:3.695694 +step:2121 train loss:3.691333 +step:2122 train loss:3.702698 +step:2123 train loss:3.754066 +step:2124 train loss:3.764176 +step:2125 train loss:3.663565 +step:2126 train loss:3.669578 +step:2127 train loss:3.658786 +step:2128 train loss:3.655802 +step:2129 train loss:3.684485 +step:2130 train loss:3.690134 +step:2131 train loss:3.710469 +step:2132 train loss:3.640051 +step:2133 train loss:3.748981 +step:2134 train loss:3.701614 +step:2135 train loss:3.659861 +step:2136 train loss:3.754284 +step:2137 train loss:3.714512 +step:2138 train loss:3.675435 +step:2139 train loss:3.675540 +step:2140 train loss:3.673159 +step:2141 train loss:3.730498 +step:2142 train loss:3.696104 +step:2143 train loss:3.619160 +step:2144 train loss:3.728621 +step:2145 train loss:3.696833 +step:2146 train loss:3.732847 +step:2147 train loss:3.842719 +step:2148 train loss:3.641383 +step:2149 train loss:3.649538 +step:2150 train loss:3.673520 +step:2151 train loss:3.711501 +step:2152 train loss:3.710059 +step:2153 train loss:3.746470 +step:2154 train loss:3.668118 +step:2155 train loss:3.747679 +step:2156 train loss:3.671318 +step:2157 train loss:3.743801 +step:2158 train loss:3.785227 +step:2159 train loss:3.713623 +step:2160 train loss:3.780118 +step:2161 train loss:3.681061 +step:2162 train loss:3.686015 +step:2163 train loss:3.659725 +step:2164 train loss:3.682937 +step:2165 train loss:3.665936 +step:2166 train loss:3.781687 +step:2167 train loss:3.689363 +step:2168 train loss:3.700046 +step:2169 train loss:3.650537 +step:2170 train loss:3.802352 +step:2171 train loss:3.754890 +step:2172 train loss:3.689906 +step:2173 train loss:3.688324 +step:2174 train loss:3.747350 +step:2175 train loss:3.672977 +step:2176 train loss:3.762638 +step:2177 train loss:3.729058 +step:2178 train loss:3.655246 +step:2179 train loss:3.724041 +step:2180 train loss:3.744994 +step:2181 train loss:3.668685 +step:2182 train loss:3.716972 +step:2183 train loss:3.710833 +step:2184 train loss:3.662457 +step:2185 train loss:3.643925 +step:2186 train loss:3.684155 +step:2187 train loss:3.695367 +step:2188 train loss:3.741653 +step:2189 train loss:3.633575 +step:2190 train loss:3.679515 +step:2191 train loss:3.733498 +step:2192 train loss:3.665192 +step:2193 train loss:3.634005 +step:2194 train loss:3.636800 +step:2195 train loss:3.665152 +step:2196 train loss:3.670060 +step:2197 train loss:3.654260 +step:2198 train loss:3.679775 +step:2199 train loss:3.742292 +step:2200 train loss:3.680429 +step:2201 train loss:3.687564 +step:2202 train loss:3.645037 +step:2203 train loss:3.667323 +step:2204 train loss:3.698220 +step:2205 train loss:3.681046 +step:2206 train loss:3.684067 +step:2207 train loss:3.678275 +step:2208 train loss:3.657050 +step:2209 train loss:3.938542 +step:2210 train loss:3.710079 +step:2211 train loss:3.697377 +step:2212 train loss:3.676986 +step:2213 train loss:3.748892 +step:2214 train loss:3.746701 +step:2215 train loss:3.673047 +step:2216 train loss:3.636297 +step:2217 train loss:3.672776 +step:2218 train loss:3.669858 +step:2219 train loss:3.705130 +step:2220 train loss:3.644984 +step:2221 train loss:3.680008 +step:2222 train loss:3.692850 +step:2223 train loss:3.737354 +step:2224 train loss:3.705861 +step:2225 train loss:3.648292 +step:2226 train loss:3.716988 +step:2227 train loss:3.714803 +step:2228 train loss:3.714796 +step:2229 train loss:3.654814 +step:2230 train loss:3.781047 +step:2231 train loss:3.697003 +step:2232 train loss:3.690327 +step:2233 train loss:3.737258 +step:2234 train loss:3.633716 +step:2235 train loss:3.722906 +step:2236 train loss:3.661587 +step:2237 train loss:3.800539 +step:2238 train loss:3.595519 +step:2239 train loss:3.680754 +step:2240 train loss:3.689839 +step:2241 train loss:3.604108 +step:2242 train loss:3.754382 +step:2243 train loss:3.781086 +step:2244 train loss:3.659827 +step:2245 train loss:3.666604 +step:2246 train loss:3.626733 +step:2247 train loss:3.636465 +step:2248 train loss:3.691316 +step:2249 train loss:3.668919 +step:2250 validation loss:3.625085 +step:2250 train loss:3.693697 +step:2251 train loss:3.648088 +step:2252 train loss:3.648832 +step:2253 train loss:3.678520 +step:2254 train loss:3.686489 +step:2255 train loss:3.638735 +step:2256 train loss:3.693800 +step:2257 train loss:3.676718 +step:2258 train loss:3.671178 +step:2259 train loss:3.690476 +step:2260 train loss:3.633863 +step:2261 train loss:3.716915 +step:2262 train loss:3.742005 +step:2263 train loss:3.693297 +step:2264 train loss:3.810816 +step:2265 train loss:3.656002 +step:2266 train loss:3.697130 +step:2267 train loss:3.656517 +step:2268 train loss:3.664967 +step:2269 train loss:3.661951 +step:2270 train loss:3.656484 +step:2271 train loss:3.673197 +step:2272 train loss:3.706288 +step:2273 train loss:3.626027 +step:2274 train loss:3.661490 +step:2275 train loss:3.611235 +step:2276 train loss:3.688608 +step:2277 train loss:3.705270 +step:2278 train loss:3.681242 +step:2279 train loss:3.664411 +step:2280 train loss:3.571614 +step:2281 train loss:3.719243 +step:2282 train loss:3.648884 +step:2283 train loss:3.630896 +step:2284 train loss:3.649109 +step:2285 train loss:3.707036 +step:2286 train loss:3.665875 +step:2287 train loss:3.701884 +step:2288 train loss:3.673111 +step:2289 train loss:3.674477 +step:2290 train loss:3.680805 +step:2291 train loss:3.664366 +step:2292 train loss:3.711226 +step:2293 train loss:3.683802 +step:2294 train loss:3.682081 +step:2295 train loss:3.738574 +step:2296 train loss:3.671025 +step:2297 train loss:3.644174 +step:2298 train loss:3.705256 +step:2299 train loss:3.678333 +step:2300 train loss:3.593625 +step:2301 train loss:3.693588 +step:2302 train loss:3.702049 +step:2303 train loss:3.676856 +step:2304 train loss:3.665772 +step:2305 train loss:3.706086 +step:2306 train loss:3.696669 +step:2307 train loss:3.677657 +step:2308 train loss:3.693897 +step:2309 train loss:3.650462 +step:2310 train loss:3.639740 +step:2311 train loss:3.623393 +step:2312 train loss:3.694326 +step:2313 train loss:3.608828 +step:2314 train loss:3.682677 +step:2315 train loss:3.697124 +step:2316 train loss:3.739710 +step:2317 train loss:3.603016 +step:2318 train loss:3.650657 +step:2319 train loss:3.702929 +step:2320 train loss:3.668850 +step:2321 train loss:3.637819 +step:2322 train loss:3.660845 +step:2323 train loss:3.651111 +step:2324 train loss:3.683430 +step:2325 train loss:3.619312 +step:2326 train loss:3.655317 +step:2327 train loss:3.766976 +step:2328 train loss:3.709876 +step:2329 train loss:3.669549 +step:2330 train loss:3.629622 +step:2331 train loss:3.669783 +step:2332 train loss:3.595633 +step:2333 train loss:3.653887 +step:2334 train loss:3.634824 +step:2335 train loss:3.622972 +step:2336 train loss:3.872281 +step:2337 train loss:3.647073 +step:2338 train loss:3.690968 +step:2339 train loss:3.685145 +step:2340 train loss:3.706047 +step:2341 train loss:3.691405 +step:2342 train loss:3.644776 +step:2343 train loss:3.668136 +step:2344 train loss:3.713096 +step:2345 train loss:3.662794 +step:2346 train loss:3.690255 +step:2347 train loss:3.620647 +step:2348 train loss:3.673635 +step:2349 train loss:3.623735 +step:2350 train loss:3.684212 +step:2351 train loss:3.686299 +step:2352 train loss:3.691080 +step:2353 train loss:3.658022 +step:2354 train loss:3.697173 +step:2355 train loss:3.689964 +step:2356 train loss:3.725534 +step:2357 train loss:3.629674 +step:2358 train loss:3.645288 +step:2359 train loss:3.666475 +step:2360 train loss:3.694170 +step:2361 train loss:3.723676 +step:2362 train loss:3.562105 +step:2363 train loss:3.750306 +step:2364 train loss:3.699248 +step:2365 train loss:3.665600 +step:2366 train loss:3.614979 +step:2367 train loss:3.689527 +step:2368 train loss:3.673618 +step:2369 train loss:3.674956 +step:2370 train loss:3.679500 +step:2371 train loss:3.738473 +step:2372 train loss:3.592479 +step:2373 train loss:3.731917 +step:2374 train loss:3.713357 +step:2375 train loss:3.696907 +step:2376 train loss:3.690823 +step:2377 train loss:3.627758 +step:2378 train loss:3.681086 +step:2379 train loss:3.664564 +step:2380 train loss:3.725432 +step:2381 train loss:3.825192 +step:2382 train loss:3.609310 +step:2383 train loss:3.651708 +step:2384 train loss:3.685330 +step:2385 train loss:3.586612 +step:2386 train loss:3.743923 +step:2387 train loss:3.622216 +step:2388 train loss:3.676093 +step:2389 train loss:3.697128 +step:2390 train loss:3.648124 +step:2391 train loss:3.673999 +step:2392 train loss:3.697571 +step:2393 train loss:3.648468 +step:2394 train loss:3.677668 +step:2395 train loss:3.668333 +step:2396 train loss:3.671663 +step:2397 train loss:3.644192 +step:2398 train loss:3.708405 +step:2399 train loss:3.672025 +step:2400 train loss:3.641490 +step:2401 train loss:3.688875 +step:2402 train loss:3.636918 +step:2403 train loss:3.684172 +step:2404 train loss:3.647040 +step:2405 train loss:3.647237 +step:2406 train loss:3.672039 +step:2407 train loss:3.616407 +step:2408 train loss:3.660648 +step:2409 train loss:3.651299 +step:2410 train loss:3.647004 +step:2411 train loss:3.724699 +step:2412 train loss:3.704445 +step:2413 train loss:3.754668 +step:2414 train loss:3.640510 +step:2415 train loss:3.629618 +step:2416 train loss:3.646408 +step:2417 train loss:3.685768 +step:2418 train loss:3.702163 +step:2419 train loss:3.630522 +step:2420 train loss:3.650613 +step:2421 train loss:3.681098 +step:2422 train loss:3.729448 +step:2423 train loss:3.667589 +step:2424 train loss:3.628685 +step:2425 train loss:3.693915 +step:2426 train loss:3.633743 +step:2427 train loss:3.651184 +step:2428 train loss:3.740951 +step:2429 train loss:3.686922 +step:2430 train loss:3.784254 +step:2431 train loss:3.692206 +step:2432 train loss:3.666544 +step:2433 train loss:3.637842 +step:2434 train loss:3.628157 +step:2435 train loss:3.688661 +step:2436 train loss:3.640611 +step:2437 train loss:3.674622 +step:2438 train loss:3.721766 +step:2439 train loss:3.705442 +step:2440 train loss:3.642948 +step:2441 train loss:3.682964 +step:2442 train loss:3.675070 +step:2443 train loss:3.630074 +step:2444 train loss:3.671268 +step:2445 train loss:3.669960 +step:2446 train loss:3.636666 +step:2447 train loss:3.621282 +step:2448 train loss:3.673474 +step:2449 train loss:3.701163 +step:2450 train loss:3.659629 +step:2451 train loss:3.589165 +step:2452 train loss:3.682376 +step:2453 train loss:3.653198 +step:2454 train loss:3.650391 +step:2455 train loss:3.701698 +step:2456 train loss:3.655537 +step:2457 train loss:3.718002 +step:2458 train loss:3.690271 +step:2459 train loss:3.665080 +step:2460 train loss:3.679427 +step:2461 train loss:3.703265 +step:2462 train loss:3.673110 +step:2463 train loss:3.650659 +step:2464 train loss:3.662833 +step:2465 train loss:3.744356 +step:2466 train loss:3.827894 +step:2467 train loss:3.732240 +step:2468 train loss:3.624045 +step:2469 train loss:3.702227 +step:2470 train loss:3.741247 +step:2471 train loss:3.740886 +step:2472 train loss:3.712935 +step:2473 train loss:3.662719 +step:2474 train loss:3.623430 +step:2475 train loss:3.676397 +step:2476 train loss:3.751229 +step:2477 train loss:3.669133 +step:2478 train loss:3.624162 +step:2479 train loss:3.663609 +step:2480 train loss:3.658707 +step:2481 train loss:3.859336 +step:2482 train loss:3.656884 +step:2483 train loss:3.686334 +step:2484 train loss:3.639029 +step:2485 train loss:3.630679 +step:2486 train loss:3.662121 +step:2487 train loss:3.699049 +step:2488 train loss:3.608510 +step:2489 train loss:3.717318 +step:2490 train loss:3.641621 +step:2491 train loss:3.657438 +step:2492 train loss:3.698520 +step:2493 train loss:3.735232 +step:2494 train loss:3.655378 +step:2495 train loss:3.688405 +step:2496 train loss:3.662399 +step:2497 train loss:3.681226 +step:2498 train loss:3.688076 +step:2499 train loss:3.678827 +step:2500 validation loss:3.595403 total_sharp:5.5015e-03 L1_sharp:1.9599e-02 L2_sharp:8.2222e-03 L3_sharp:2.0726e-03 L4_sharp:9.7999e-04 L5_sharp:1.4388e-03 L6_sharp:1.5685e-03 L7_sharp:1.9113e-03 L8_sharp:1.7592e-03 L9_sharp:1.2620e-03 L10_sharp:1.1526e-03 L11_sharp:8.9808e-04 L12_sharp:1.2896e-03 total_fnorm:1.6770e+00 total_l1_linf:1.3753e+04 total_spectral:1.6770e+00 L1_fnorm:2.9275e-01 L2_fnorm:2.7800e-01 L3_fnorm:2.8291e-01 L4_fnorm:2.9396e-01 L5_fnorm:2.9995e-01 L6_fnorm:3.0217e-01 L7_fnorm:3.0229e-01 L8_fnorm:3.0282e-01 L9_fnorm:3.0322e-01 L10_fnorm:3.0290e-01 L11_fnorm:3.0235e-01 L12_fnorm:3.0119e-01 L1_l1linf:2.7744e-01 L2_l1linf:3.5144e-01 L3_l1linf:3.6157e-01 L4_l1linf:3.6731e-01 L5_l1linf:3.4569e-01 L6_l1linf:3.0532e-01 L7_l1linf:2.9451e-01 L8_l1linf:3.0394e-01 L9_l1linf:3.3321e-01 L10_l1linf:3.5331e-01 L11_l1linf:3.6944e-01 L12_l1linf:3.5433e-01 L1_spectral:6.2431e-03 L2_spectral:8.0901e-03 L3_spectral:8.2001e-03 L4_spectral:8.3011e-03 L5_spectral:7.8149e-03 L6_spectral:6.9024e-03 L7_spectral:6.6782e-03 L8_spectral:6.8512e-03 L9_spectral:7.5322e-03 L10_spectral:7.9972e-03 L11_spectral:8.2879e-03 L12_spectral:7.9996e-03 ip_v_neg_g:5.3358e-03 cos_v_neg_g:2.4313e-03 v_norm:1.6770e+00 g_norm:1.3086e+00 hv_norm:5.7162e-01 cos_v_hv:1.6140e-02 hg_norm:2.0183e+01 cos_g_hg:4.6694e-01 v_par:2.5610e-04 v_perp:1.6770e+00 L1_cos_v_neg_g:7.7206e-03 L1_v_norm:2.9275e-01 L2_cos_v_neg_g:1.1668e-02 L2_v_norm:2.7800e-01 L3_cos_v_neg_g:4.2945e-03 L3_v_norm:2.8291e-01 L4_cos_v_neg_g:1.8465e-03 L4_v_norm:2.9396e-01 L5_cos_v_neg_g:1.3974e-03 L5_v_norm:2.9995e-01 L6_cos_v_neg_g:2.4311e-03 L6_v_norm:3.0217e-01 L7_cos_v_neg_g:2.2509e-03 L7_v_norm:3.0229e-01 L8_cos_v_neg_g:4.2331e-03 L8_v_norm:3.0282e-01 L9_cos_v_neg_g:3.0437e-03 L9_v_norm:3.0322e-01 L10_cos_v_neg_g:3.1247e-03 L10_v_norm:3.0290e-01 L11_cos_v_neg_g:3.3187e-03 L11_v_norm:3.0235e-01 L12_cos_v_neg_g:2.3326e-03 L12_v_norm:3.0119e-01 +step:2500 train loss:3.625251 +step:2501 train loss:3.694941 +step:2502 train loss:3.678352 +step:2503 train loss:3.603452 +step:2504 train loss:3.643956 +step:2505 train loss:3.660305 +step:2506 train loss:3.626563 +step:2507 train loss:3.659273 +step:2508 train loss:3.603474 +step:2509 train loss:3.628990 +step:2510 train loss:3.617749 +step:2511 train loss:3.663568 +step:2512 train loss:3.709118 +step:2513 train loss:3.658480 +step:2514 train loss:3.644231 +step:2515 train loss:3.775794 +step:2516 train loss:3.658633 +step:2517 train loss:3.724355 +step:2518 train loss:3.687244 +step:2519 train loss:3.667222 +step:2520 train loss:3.673190 +step:2521 train loss:3.648777 +step:2522 train loss:3.682900 +step:2523 train loss:3.601225 +step:2524 train loss:3.659220 +step:2525 train loss:3.652612 +step:2526 train loss:3.703927 +step:2527 train loss:3.700417 +step:2528 train loss:3.680502 +step:2529 train loss:3.698332 +step:2530 train loss:3.674340 +step:2531 train loss:3.612595 +step:2532 train loss:3.715265 +step:2533 train loss:3.611053 +step:2534 train loss:3.705214 +step:2535 train loss:3.656609 +step:2536 train loss:3.580670 +step:2537 train loss:3.693707 +step:2538 train loss:3.671804 +step:2539 train loss:3.694910 +step:2540 train loss:3.624841 +step:2541 train loss:3.659461 +step:2542 train loss:3.668151 +step:2543 train loss:3.654144 +step:2544 train loss:3.645734 +step:2545 train loss:3.630963 +step:2546 train loss:3.597173 +step:2547 train loss:3.645093 +step:2548 train loss:3.666269 +step:2549 train loss:3.670951 +step:2550 train loss:3.799948 +step:2551 train loss:3.878128 +step:2552 train loss:3.614457 +step:2553 train loss:3.641080 +step:2554 train loss:3.786544 +step:2555 train loss:3.671188 +step:2556 train loss:3.594058 +step:2557 train loss:3.693245 +step:2558 train loss:3.687463 +step:2559 train loss:3.640633 +step:2560 train loss:3.634515 +step:2561 train loss:3.717982 +step:2562 train loss:3.674947 +step:2563 train loss:3.610731 +step:2564 train loss:3.677742 +step:2565 train loss:3.657552 +step:2566 train loss:3.638415 +step:2567 train loss:3.618973 +step:2568 train loss:3.666456 +step:2569 train loss:3.682765 +step:2570 train loss:3.630991 +step:2571 train loss:3.719828 +step:2572 train loss:3.674034 +step:2573 train loss:3.606733 +step:2574 train loss:3.659666 +step:2575 train loss:3.703182 +step:2576 train loss:3.652606 +step:2577 train loss:3.619666 +step:2578 train loss:3.656411 +step:2579 train loss:3.633296 +step:2580 train loss:3.607161 +step:2581 train loss:3.620423 +step:2582 train loss:3.627746 +step:2583 train loss:3.652723 +step:2584 train loss:3.671230 +step:2585 train loss:3.631672 +step:2586 train loss:3.655335 +step:2587 train loss:3.584698 +step:2588 train loss:3.621466 +step:2589 train loss:3.698131 +step:2590 train loss:3.619332 +step:2591 train loss:3.683172 +step:2592 train loss:3.727341 +step:2593 train loss:3.683681 +step:2594 train loss:3.647389 +step:2595 train loss:3.651523 +step:2596 train loss:3.693236 +step:2597 train loss:3.574189 +step:2598 train loss:3.734719 +step:2599 train loss:3.682379 +step:2600 train loss:3.712784 +step:2601 train loss:3.650907 +step:2602 train loss:3.676015 +step:2603 train loss:3.675680 +step:2604 train loss:3.596322 +step:2605 train loss:3.730554 +step:2606 train loss:3.667971 +step:2607 train loss:3.627168 +step:2608 train loss:3.606676 +step:2609 train loss:3.628152 +step:2610 train loss:3.651269 +step:2611 train loss:3.691748 +step:2612 train loss:3.653489 +step:2613 train loss:3.627398 +step:2614 train loss:3.616188 +step:2615 train loss:3.610871 +step:2616 train loss:3.692247 +step:2617 train loss:3.650594 +step:2618 train loss:3.615844 +step:2619 train loss:3.631207 +step:2620 train loss:3.631461 +step:2621 train loss:3.637034 +step:2622 train loss:3.713676 +step:2623 train loss:3.584574 +step:2624 train loss:3.602320 +step:2625 train loss:3.672258 +step:2626 train loss:3.665070 +step:2627 train loss:3.642809 +step:2628 train loss:3.703266 +step:2629 train loss:3.643384 +step:2630 train loss:3.641411 +step:2631 train loss:3.670859 +step:2632 train loss:3.637077 +step:2633 train loss:3.622836 +step:2634 train loss:3.668290 +step:2635 train loss:3.654171 +step:2636 train loss:3.701584 +step:2637 train loss:3.656819 +step:2638 train loss:3.639339 +step:2639 train loss:3.688756 +step:2640 train loss:3.607327 +step:2641 train loss:3.665775 +step:2642 train loss:3.588480 +step:2643 train loss:3.585391 +step:2644 train loss:3.675483 +step:2645 train loss:3.620042 +step:2646 train loss:3.648578 +step:2647 train loss:3.669705 +step:2648 train loss:3.702647 +step:2649 train loss:3.616321 +step:2650 train loss:3.604325 +step:2651 train loss:3.645617 +step:2652 train loss:3.621833 +step:2653 train loss:3.688171 +step:2654 train loss:3.642938 +step:2655 train loss:3.629815 +step:2656 train loss:3.652094 +step:2657 train loss:3.679512 +step:2658 train loss:3.686496 +step:2659 train loss:3.664443 +step:2660 train loss:3.651701 +step:2661 train loss:3.698910 +step:2662 train loss:3.672745 +step:2663 train loss:3.651645 +step:2664 train loss:3.659071 +step:2665 train loss:3.610296 +step:2666 train loss:3.641282 +step:2667 train loss:3.650829 +step:2668 train loss:3.625364 +step:2669 train loss:3.637121 +step:2670 train loss:3.657240 +step:2671 train loss:3.632058 +step:2672 train loss:3.655846 +step:2673 train loss:3.586456 +step:2674 train loss:3.683721 +step:2675 train loss:3.652346 +step:2676 train loss:3.674402 +step:2677 train loss:3.651610 +step:2678 train loss:3.641767 +step:2679 train loss:3.623289 +step:2680 train loss:3.603903 +step:2681 train loss:3.578835 +step:2682 train loss:3.667221 +step:2683 train loss:3.638107 +step:2684 train loss:3.669929 +step:2685 train loss:3.587909 +step:2686 train loss:3.599879 +step:2687 train loss:3.677685 +step:2688 train loss:3.690683 +step:2689 train loss:3.593521 +step:2690 train loss:3.684349 +step:2691 train loss:3.651642 +step:2692 train loss:3.676715 +step:2693 train loss:3.728658 +step:2694 train loss:3.629900 +step:2695 train loss:3.648740 +step:2696 train loss:3.652452 +step:2697 train loss:3.644663 +step:2698 train loss:3.658088 +step:2699 train loss:3.674244 +step:2700 train loss:3.641683 +step:2701 train loss:3.711949 +step:2702 train loss:3.647714 +step:2703 train loss:3.606310 +step:2704 train loss:3.685194 +step:2705 train loss:3.666542 +step:2706 train loss:3.601139 +step:2707 train loss:3.567798 +step:2708 train loss:3.665790 +step:2709 train loss:3.643319 +step:2710 train loss:3.647078 +step:2711 train loss:3.616899 +step:2712 train loss:3.683118 +step:2713 train loss:3.679289 +step:2714 train loss:3.624659 +step:2715 train loss:3.619891 +step:2716 train loss:3.691987 +step:2717 train loss:3.652646 +step:2718 train loss:3.652015 +step:2719 train loss:3.646915 +step:2720 train loss:3.615929 +step:2721 train loss:3.694976 +step:2722 train loss:3.621143 +step:2723 train loss:3.614488 +step:2724 train loss:3.633976 +step:2725 train loss:3.634073 +step:2726 train loss:3.607981 +step:2727 train loss:3.666824 +step:2728 train loss:3.604390 +step:2729 train loss:3.734993 +step:2730 train loss:3.680665 +step:2731 train loss:3.719030 +step:2732 train loss:3.626096 +step:2733 train loss:3.620532 +step:2734 train loss:3.674414 +step:2735 train loss:3.671131 +step:2736 train loss:3.593431 +step:2737 train loss:3.648249 +step:2738 train loss:3.709470 +step:2739 train loss:3.625449 +step:2740 train loss:3.626814 +step:2741 train loss:3.611329 +step:2742 train loss:3.542498 +step:2743 train loss:3.643560 +step:2744 train loss:3.665050 +step:2745 train loss:3.621284 +step:2746 train loss:3.634438 +step:2747 train loss:3.619498 +step:2748 train loss:3.583386 +step:2749 train loss:3.651386 +step:2750 validation loss:3.571074 +step:2750 train loss:3.656546 +step:2751 train loss:3.681443 +step:2752 train loss:3.662848 +step:2753 train loss:3.653683 +step:2754 train loss:3.595396 +step:2755 train loss:3.663618 +step:2756 train loss:3.637121 +step:2757 train loss:3.627246 +step:2758 train loss:3.655099 +step:2759 train loss:3.664709 +step:2760 train loss:3.576654 +step:2761 train loss:3.588599 +step:2762 train loss:3.605705 +step:2763 train loss:3.624568 +step:2764 train loss:3.568272 +step:2765 train loss:3.620605 +step:2766 train loss:3.711802 +step:2767 train loss:3.582719 +step:2768 train loss:3.646045 +step:2769 train loss:3.619236 +step:2770 train loss:3.633701 +step:2771 train loss:3.659071 +step:2772 train loss:3.626030 +step:2773 train loss:3.629229 +step:2774 train loss:3.621975 +step:2775 train loss:3.634776 +step:2776 train loss:3.589542 +step:2777 train loss:3.622106 +step:2778 train loss:3.628902 +step:2779 train loss:3.658567 +step:2780 train loss:3.630995 +step:2781 train loss:3.616449 +step:2782 train loss:3.604674 +step:2783 train loss:3.632246 +step:2784 train loss:3.642331 +step:2785 train loss:3.716875 +step:2786 train loss:3.682462 +step:2787 train loss:3.638488 +step:2788 train loss:3.636315 +step:2789 train loss:3.631334 +step:2790 train loss:3.571685 +step:2791 train loss:3.668633 +step:2792 train loss:3.659902 +step:2793 train loss:3.624477 +step:2794 train loss:3.633790 +step:2795 train loss:3.648010 +step:2796 train loss:3.638473 +step:2797 train loss:3.687758 +step:2798 train loss:3.671559 +step:2799 train loss:3.581203 +step:2800 train loss:3.624358 +step:2801 train loss:3.661585 +step:2802 train loss:3.687560 +step:2803 train loss:3.662051 +step:2804 train loss:3.594537 +step:2805 train loss:3.638675 +step:2806 train loss:3.627857 +step:2807 train loss:3.658659 +step:2808 train loss:3.597816 +step:2809 train loss:3.671836 +step:2810 train loss:3.656811 +step:2811 train loss:3.647093 +step:2812 train loss:3.700447 +step:2813 train loss:3.664741 +step:2814 train loss:3.650604 +step:2815 train loss:3.665250 +step:2816 train loss:3.669183 +step:2817 train loss:3.603094 +step:2818 train loss:3.709884 +step:2819 train loss:3.633090 +step:2820 train loss:3.629954 +step:2821 train loss:3.607573 +step:2822 train loss:3.651623 +step:2823 train loss:3.600600 +step:2824 train loss:3.502046 +step:2825 train loss:3.641263 +step:2826 train loss:3.638643 +step:2827 train loss:3.668076 +step:2828 train loss:3.664916 +step:2829 train loss:3.644846 +step:2830 train loss:3.675423 +step:2831 train loss:3.618202 +step:2832 train loss:3.588452 +step:2833 train loss:3.648383 +step:2834 train loss:3.596107 +step:2835 train loss:3.634481 +step:2836 train loss:3.638402 +step:2837 train loss:3.639949 +step:2838 train loss:3.581548 +step:2839 train loss:3.680709 +step:2840 train loss:3.637610 +step:2841 train loss:3.717933 +step:2842 train loss:3.660916 +step:2843 train loss:3.653460 +step:2844 train loss:3.680373 +step:2845 train loss:3.633922 +step:2846 train loss:3.586600 +step:2847 train loss:3.680844 +step:2848 train loss:3.634237 +step:2849 train loss:3.623139 +step:2850 train loss:3.679688 +step:2851 train loss:3.635610 +step:2852 train loss:3.717203 +step:2853 train loss:3.629969 +step:2854 train loss:3.582219 +step:2855 train loss:3.650651 +step:2856 train loss:3.571830 +step:2857 train loss:3.680574 +step:2858 train loss:3.637467 +step:2859 train loss:3.615566 +step:2860 train loss:3.613107 +step:2861 train loss:3.593948 +step:2862 train loss:3.627057 +step:2863 train loss:3.609423 +step:2864 train loss:3.615444 +step:2865 train loss:3.692592 +step:2866 train loss:3.702792 +step:2867 train loss:3.643758 +step:2868 train loss:3.644526 +step:2869 train loss:3.602247 +step:2870 train loss:3.690269 +step:2871 train loss:3.689357 +step:2872 train loss:3.651762 +step:2873 train loss:3.654568 +step:2874 train loss:3.636920 +step:2875 train loss:3.587921 +step:2876 train loss:3.629323 +step:2877 train loss:3.613596 +step:2878 train loss:3.627405 +step:2879 train loss:3.597540 +step:2880 train loss:3.614679 +step:2881 train loss:3.609272 +step:2882 train loss:3.544458 +step:2883 train loss:3.624732 +step:2884 train loss:3.702949 +step:2885 train loss:3.593437 +step:2886 train loss:3.646406 +step:2887 train loss:3.667378 +step:2888 train loss:3.638083 +step:2889 train loss:3.624453 +step:2890 train loss:3.597039 +step:2891 train loss:3.638237 +step:2892 train loss:3.639116 +step:2893 train loss:3.624159 +step:2894 train loss:3.596928 +step:2895 train loss:3.648953 +step:2896 train loss:3.689063 +step:2897 train loss:3.670823 +step:2898 train loss:3.807666 +step:2899 train loss:3.560188 +step:2900 train loss:3.633523 +step:2901 train loss:3.585428 +step:2902 train loss:3.585598 +step:2903 train loss:3.605281 +step:2904 train loss:3.628189 +step:2905 train loss:3.687736 +step:2906 train loss:3.662373 +step:2907 train loss:3.836991 +step:2908 train loss:3.579008 +step:2909 train loss:3.659924 +step:2910 train loss:3.632486 +step:2911 train loss:3.662985 +step:2912 train loss:3.614968 +step:2913 train loss:3.652105 +step:2914 train loss:3.682499 +step:2915 train loss:3.673809 +step:2916 train loss:3.627697 +step:2917 train loss:3.670078 +step:2918 train loss:3.659080 +step:2919 train loss:3.604348 +step:2920 train loss:3.656121 +step:2921 train loss:3.609659 +step:2922 train loss:3.633543 +step:2923 train loss:3.700853 +step:2924 train loss:3.634408 +step:2925 train loss:3.589291 +step:2926 train loss:3.679427 +step:2927 train loss:3.589453 +step:2928 train loss:3.558369 +step:2929 train loss:3.575449 +step:2930 train loss:3.592764 +step:2931 train loss:3.749712 +step:2932 train loss:3.667582 +step:2933 train loss:3.630524 +step:2934 train loss:3.628206 +step:2935 train loss:3.647647 +step:2936 train loss:3.593062 +step:2937 train loss:3.616174 +step:2938 train loss:3.635597 +step:2939 train loss:3.710265 +step:2940 train loss:3.609001 +step:2941 train loss:3.645296 +step:2942 train loss:3.603164 +step:2943 train loss:3.885486 +step:2944 train loss:3.710068 +step:2945 train loss:3.665889 +step:2946 train loss:3.677804 +step:2947 train loss:3.638983 +step:2948 train loss:3.593986 +step:2949 train loss:3.680695 +step:2950 train loss:3.640638 +step:2951 train loss:3.535997 +step:2952 train loss:3.608216 +step:2953 train loss:3.516361 +step:2954 train loss:3.610822 +step:2955 train loss:3.677503 +step:2956 train loss:3.624213 +step:2957 train loss:3.631544 +step:2958 train loss:3.581358 +step:2959 train loss:3.605723 +step:2960 train loss:3.697055 +step:2961 train loss:3.559440 +step:2962 train loss:3.636903 +step:2963 train loss:3.635472 +step:2964 train loss:3.613839 +step:2965 train loss:3.641823 +step:2966 train loss:3.618503 +step:2967 train loss:3.614798 +step:2968 train loss:3.591363 +step:2969 train loss:3.597502 +step:2970 train loss:3.665200 +step:2971 train loss:3.596169 +step:2972 train loss:3.577751 +step:2973 train loss:3.573877 +step:2974 train loss:3.613321 +step:2975 train loss:3.579210 +step:2976 train loss:3.619012 +step:2977 train loss:3.608963 +step:2978 train loss:3.692331 +step:2979 train loss:3.671947 +step:2980 train loss:3.687165 +step:2981 train loss:3.631061 +step:2982 train loss:3.625300 +step:2983 train loss:3.575660 +step:2984 train loss:3.554458 +step:2985 train loss:3.665886 +step:2986 train loss:3.560340 +step:2987 train loss:3.689290 +step:2988 train loss:3.617590 +step:2989 train loss:3.646223 +step:2990 train loss:3.597118 +step:2991 train loss:3.665179 +step:2992 train loss:3.659243 +step:2993 train loss:3.624335 +step:2994 train loss:3.616401 +step:2995 train loss:3.685154 +step:2996 train loss:3.613134 +step:2997 train loss:3.517996 +step:2998 train loss:3.636622 +step:2999 train loss:3.680866 +step:3000 validation loss:3.552338 total_sharp:5.3288e-03 L1_sharp:8.3751e-03 L2_sharp:4.2949e-03 L3_sharp:2.9476e-03 L4_sharp:1.2556e-03 L5_sharp:2.0063e-03 L6_sharp:1.8223e-03 L7_sharp:2.2416e-03 L8_sharp:1.9949e-03 L9_sharp:1.4994e-03 L10_sharp:1.1519e-03 L11_sharp:1.0837e-03 L12_sharp:1.3760e-03 total_fnorm:1.6754e+00 total_l1_linf:1.3737e+04 total_spectral:1.6754e+00 L1_fnorm:2.9175e-01 L2_fnorm:2.7577e-01 L3_fnorm:2.7920e-01 L4_fnorm:2.9244e-01 L5_fnorm:2.9878e-01 L6_fnorm:3.0163e-01 L7_fnorm:3.0130e-01 L8_fnorm:3.0195e-01 L9_fnorm:3.0229e-01 L10_fnorm:3.0194e-01 L11_fnorm:3.0099e-01 L12_fnorm:3.0052e-01 L1_l1linf:3.3774e-01 L2_l1linf:4.1864e-01 L3_l1linf:4.2987e-01 L4_l1linf:4.2531e-01 L5_l1linf:4.0568e-01 L6_l1linf:3.4495e-01 L7_l1linf:3.3823e-01 L8_l1linf:3.5043e-01 L9_l1linf:3.8367e-01 L10_l1linf:3.9509e-01 L11_l1linf:4.3138e-01 L12_l1linf:4.3569e-01 L1_spectral:7.6055e-03 L2_spectral:9.4484e-03 L3_spectral:9.6819e-03 L4_spectral:9.5799e-03 L5_spectral:9.1210e-03 L6_spectral:7.8458e-03 L7_spectral:7.6616e-03 L8_spectral:7.8641e-03 L9_spectral:8.6068e-03 L10_spectral:8.9176e-03 L11_spectral:9.5815e-03 L12_spectral:9.7225e-03 ip_v_neg_g:8.1690e-03 cos_v_neg_g:3.9555e-03 v_norm:1.6754e+00 g_norm:1.2327e+00 hv_norm:4.4212e-01 cos_v_hv:2.0193e-02 hg_norm:1.5019e+01 cos_g_hg:4.7641e-01 v_par:2.1959e-04 v_perp:1.6754e+00 L1_cos_v_neg_g:6.4652e-03 L1_v_norm:2.9175e-01 L2_cos_v_neg_g:8.3437e-03 L2_v_norm:2.7577e-01 L3_cos_v_neg_g:7.0197e-03 L3_v_norm:2.7920e-01 L4_cos_v_neg_g:5.9330e-03 L4_v_norm:2.9244e-01 L5_cos_v_neg_g:7.3643e-03 L5_v_norm:2.9878e-01 L6_cos_v_neg_g:7.0452e-03 L6_v_norm:3.0163e-01 L7_cos_v_neg_g:7.9014e-03 L7_v_norm:3.0130e-01 L8_cos_v_neg_g:7.9650e-03 L8_v_norm:3.0195e-01 L9_cos_v_neg_g:7.6529e-03 L9_v_norm:3.0229e-01 L10_cos_v_neg_g:6.7386e-03 L10_v_norm:3.0194e-01 L11_cos_v_neg_g:5.8660e-03 L11_v_norm:3.0099e-01 L12_cos_v_neg_g:4.3803e-03 L12_v_norm:3.0052e-01 +step:3000 train loss:3.570775 +step:3001 train loss:3.620186 +step:3002 train loss:3.615638 +step:3003 train loss:3.612538 +step:3004 train loss:3.644556 +step:3005 train loss:3.541524 +step:3006 train loss:3.589024 +step:3007 train loss:3.621821 +step:3008 train loss:3.669501 +step:3009 train loss:3.621580 +step:3010 train loss:3.645851 +step:3011 train loss:3.624649 +step:3012 train loss:3.604994 +step:3013 train loss:3.647398 +step:3014 train loss:3.606690 +step:3015 train loss:3.608632 +step:3016 train loss:3.620395 +step:3017 train loss:3.649898 +step:3018 train loss:3.577814 +step:3019 train loss:3.615155 +step:3020 train loss:3.634727 +step:3021 train loss:3.594752 +step:3022 train loss:3.689109 +step:3023 train loss:3.638301 +step:3024 train loss:3.621354 +step:3025 train loss:3.638342 +step:3026 train loss:3.605278 +step:3027 train loss:3.588168 +step:3028 train loss:3.636257 +step:3029 train loss:3.624191 +step:3030 train loss:3.597836 +step:3031 train loss:3.578756 +step:3032 train loss:3.573219 +step:3033 train loss:3.592623 +step:3034 train loss:3.640972 +step:3035 train loss:3.619388 +step:3036 train loss:3.584579 +step:3037 train loss:3.543996 +step:3038 train loss:3.660352 +step:3039 train loss:3.543028 +step:3040 train loss:3.524368 +step:3041 train loss:3.654392 +step:3042 train loss:3.587489 +step:3043 train loss:3.650492 +step:3044 train loss:3.546868 +step:3045 train loss:3.592992 +step:3046 train loss:3.570349 +step:3047 train loss:3.590380 +step:3048 train loss:3.568745 +step:3049 train loss:3.638250 +step:3050 train loss:3.530054 +step:3051 train loss:3.540676 +step:3052 train loss:3.563037 +step:3053 train loss:3.637867 +step:3054 train loss:3.709805 +step:3055 train loss:3.549292 +step:3056 train loss:3.573009 +step:3057 train loss:3.613966 +step:3058 train loss:3.564495 +step:3059 train loss:3.587939 +step:3060 train loss:3.592113 +step:3061 train loss:3.567836 +step:3062 train loss:3.625877 +step:3063 train loss:3.609601 +step:3064 train loss:3.639334 +step:3065 train loss:3.652117 +step:3066 train loss:3.543842 +step:3067 train loss:3.595211 +step:3068 train loss:3.645494 +step:3069 train loss:3.665313 +step:3070 train loss:3.588624 +step:3071 train loss:3.613389 +step:3072 train loss:3.610874 +step:3073 train loss:3.647240 +step:3074 train loss:3.582134 +step:3075 train loss:3.621514 +step:3076 train loss:3.554492 +step:3077 train loss:3.554344 +step:3078 train loss:3.587570 +step:3079 train loss:3.632934 +step:3080 train loss:3.624327 +step:3081 train loss:3.661575 +step:3082 train loss:3.642776 +step:3083 train loss:3.569568 +step:3084 train loss:3.653636 +step:3085 train loss:3.578402 +step:3086 train loss:3.644518 +step:3087 train loss:3.609262 +step:3088 train loss:3.688108 +step:3089 train loss:3.564882 +step:3090 train loss:3.635614 +step:3091 train loss:3.568975 +step:3092 train loss:3.578948 +step:3093 train loss:3.608800 +step:3094 train loss:3.590967 +step:3095 train loss:3.679238 +step:3096 train loss:3.603975 +step:3097 train loss:3.612947 +step:3098 train loss:3.596783 +step:3099 train loss:3.604419 +step:3100 train loss:3.632256 +step:3101 train loss:3.712090 +step:3102 train loss:3.636607 +step:3103 train loss:3.561940 +step:3104 train loss:3.642942 +step:3105 train loss:3.615786 +step:3106 train loss:3.609806 +step:3107 train loss:3.596103 +step:3108 train loss:3.566324 +step:3109 train loss:3.624097 +step:3110 train loss:3.550969 +step:3111 train loss:3.588199 +step:3112 train loss:3.521595 +step:3113 train loss:3.645941 +step:3114 train loss:3.558689 +step:3115 train loss:3.602597 +step:3116 train loss:3.484556 +step:3117 train loss:3.490033 +step:3118 train loss:3.600787 +step:3119 train loss:3.603374 +step:3120 train loss:3.607710 +step:3121 train loss:3.548579 +step:3122 train loss:3.635033 +step:3123 train loss:3.549532 +step:3124 train loss:3.616381 +step:3125 train loss:3.626380 +step:3126 train loss:3.733444 +step:3127 train loss:3.583460 +step:3128 train loss:3.609175 +step:3129 train loss:3.593332 +step:3130 train loss:3.566608 +step:3131 train loss:3.648602 +step:3132 train loss:3.627694 +step:3133 train loss:3.606126 +step:3134 train loss:3.495339 +step:3135 train loss:3.598762 +step:3136 train loss:3.562599 +step:3137 train loss:3.703499 +step:3138 train loss:3.602130 +step:3139 train loss:3.582836 +step:3140 train loss:3.601757 +step:3141 train loss:3.605422 +step:3142 train loss:3.541516 +step:3143 train loss:3.626031 +step:3144 train loss:3.576015 +step:3145 train loss:3.561338 +step:3146 train loss:3.575881 +step:3147 train loss:3.682380 +step:3148 train loss:3.591141 +step:3149 train loss:3.644360 +step:3150 train loss:3.630832 +step:3151 train loss:3.596438 +step:3152 train loss:3.596555 +step:3153 train loss:3.552699 +step:3154 train loss:3.641243 +step:3155 train loss:3.576596 +step:3156 train loss:3.629674 +step:3157 train loss:3.630541 +step:3158 train loss:3.606045 +step:3159 train loss:3.543253 +step:3160 train loss:3.594018 +step:3161 train loss:3.570152 +step:3162 train loss:3.618069 +step:3163 train loss:3.606774 +step:3164 train loss:3.581232 +step:3165 train loss:3.601039 +step:3166 train loss:3.636404 +step:3167 train loss:3.596050 +step:3168 train loss:3.671395 +step:3169 train loss:3.589074 +step:3170 train loss:3.570847 +step:3171 train loss:3.562186 +step:3172 train loss:3.567149 +step:3173 train loss:3.514364 +step:3174 train loss:3.622051 +step:3175 train loss:3.590321 +step:3176 train loss:3.605119 +step:3177 train loss:3.571814 +step:3178 train loss:3.549697 +step:3179 train loss:3.627030 +step:3180 train loss:3.557938 +step:3181 train loss:3.639601 +step:3182 train loss:3.643575 +step:3183 train loss:3.586530 +step:3184 train loss:3.585964 +step:3185 train loss:3.644605 +step:3186 train loss:3.604408 +step:3187 train loss:3.620222 +step:3188 train loss:3.667032 +step:3189 train loss:3.601580 +step:3190 train loss:3.564770 +step:3191 train loss:3.562912 +step:3192 train loss:3.535080 +step:3193 train loss:3.606821 +step:3194 train loss:3.578292 +step:3195 train loss:3.558213 +step:3196 train loss:3.610713 +step:3197 train loss:3.572714 +step:3198 train loss:3.614946 +step:3199 train loss:3.586357 +step:3200 train loss:3.595940 +step:3201 train loss:3.556794 +step:3202 train loss:3.625215 +step:3203 train loss:3.682634 +step:3204 train loss:3.649554 +step:3205 train loss:3.495303 +step:3206 train loss:3.774787 +step:3207 train loss:3.529150 +step:3208 train loss:3.598866 +step:3209 train loss:3.588567 +step:3210 train loss:3.572730 +step:3211 train loss:3.602839 +step:3212 train loss:3.612362 +step:3213 train loss:3.550787 +step:3214 train loss:3.656883 +step:3215 train loss:3.662442 +step:3216 train loss:3.527477 +step:3217 train loss:3.613385 +step:3218 train loss:3.647860 +step:3219 train loss:3.567989 +step:3220 train loss:3.637924 +step:3221 train loss:3.552061 +step:3222 train loss:3.597435 +step:3223 train loss:3.612194 +step:3224 train loss:3.624225 +step:3225 train loss:3.548012 +step:3226 train loss:3.579533 +step:3227 train loss:3.608301 +step:3228 train loss:3.601307 +step:3229 train loss:3.635233 +step:3230 train loss:3.649468 +step:3231 train loss:3.592782 +step:3232 train loss:3.598945 +step:3233 train loss:3.570762 +step:3234 train loss:3.559316 +step:3235 train loss:3.559969 +step:3236 train loss:3.580301 +step:3237 train loss:3.583675 +step:3238 train loss:3.597916 +step:3239 train loss:3.499096 +step:3240 train loss:3.610067 +step:3241 train loss:3.609789 +step:3242 train loss:3.666445 +step:3243 train loss:3.603896 +step:3244 train loss:3.625497 +step:3245 train loss:3.527083 +step:3246 train loss:3.653113 +step:3247 train loss:3.597794 +step:3248 train loss:3.619723 +step:3249 train loss:3.562869 +step:3250 validation loss:3.527781 +step:3250 train loss:3.560717 +step:3251 train loss:3.675494 +step:3252 train loss:3.602649 +step:3253 train loss:3.605601 +step:3254 train loss:3.672303 +step:3255 train loss:3.617341 +step:3256 train loss:3.606559 +step:3257 train loss:3.591458 +step:3258 train loss:3.521327 +step:3259 train loss:3.503887 +step:3260 train loss:3.616106 +step:3261 train loss:3.593061 +step:3262 train loss:3.584431 +step:3263 train loss:3.571608 +step:3264 train loss:3.680414 +step:3265 train loss:3.588135 +step:3266 train loss:3.621437 +step:3267 train loss:3.580992 +step:3268 train loss:3.585970 +step:3269 train loss:3.599139 +step:3270 train loss:3.627311 +step:3271 train loss:3.587730 +step:3272 train loss:3.574096 +step:3273 train loss:3.573995 +step:3274 train loss:3.713080 +step:3275 train loss:3.582765 +step:3276 train loss:3.653195 +step:3277 train loss:3.590402 +step:3278 train loss:3.566604 +step:3279 train loss:3.587504 +step:3280 train loss:3.619616 +step:3281 train loss:3.542521 +step:3282 train loss:3.617100 +step:3283 train loss:3.585501 +step:3284 train loss:3.553903 +step:3285 train loss:3.565819 +step:3286 train loss:3.597089 +step:3287 train loss:3.532388 +step:3288 train loss:3.620420 +step:3289 train loss:3.559617 +step:3290 train loss:3.591464 +step:3291 train loss:3.547287 +step:3292 train loss:3.578632 +step:3293 train loss:3.618027 +step:3294 train loss:3.636666 +step:3295 train loss:3.540029 +step:3296 train loss:3.602783 +step:3297 train loss:3.558253 +step:3298 train loss:3.562972 +step:3299 train loss:3.687431 +step:3300 train loss:3.537232 +step:3301 train loss:3.607244 +step:3302 train loss:3.587523 +step:3303 train loss:3.592527 +step:3304 train loss:3.563190 +step:3305 train loss:3.651375 +step:3306 train loss:3.586482 +step:3307 train loss:3.603002 +step:3308 train loss:3.563012 +step:3309 train loss:3.618213 +step:3310 train loss:3.536685 +step:3311 train loss:3.592236 +step:3312 train loss:3.560884 +step:3313 train loss:3.595446 +step:3314 train loss:3.593095 +step:3315 train loss:3.670944 +step:3316 train loss:3.525669 +step:3317 train loss:3.608501 +step:3318 train loss:3.626889 +step:3319 train loss:3.547593 +step:3320 train loss:3.716634 +step:3321 train loss:3.612289 +step:3322 train loss:3.616793 +step:3323 train loss:3.719648 +step:3324 train loss:3.637892 +step:3325 train loss:3.607371 +step:3326 train loss:3.598604 +step:3327 train loss:3.613420 +step:3328 train loss:3.595762 +step:3329 train loss:3.593789 +step:3330 train loss:3.584975 +step:3331 train loss:3.631342 +step:3332 train loss:3.651008 +step:3333 train loss:3.617433 +step:3334 train loss:3.546875 +step:3335 train loss:3.570044 +step:3336 train loss:3.596649 +step:3337 train loss:3.597812 +step:3338 train loss:3.584571 +step:3339 train loss:3.582642 +step:3340 train loss:3.620643 +step:3341 train loss:3.560638 +step:3342 train loss:3.613150 +step:3343 train loss:3.551562 +step:3344 train loss:3.608833 +step:3345 train loss:3.561212 +step:3346 train loss:3.572629 +step:3347 train loss:3.574026 +step:3348 train loss:3.601014 +step:3349 train loss:3.581355 +step:3350 train loss:3.607254 +step:3351 train loss:3.663213 +step:3352 train loss:3.604881 +step:3353 train loss:3.704725 +step:3354 train loss:3.550674 +step:3355 train loss:3.655586 +step:3356 train loss:3.604064 +step:3357 train loss:3.615974 +step:3358 train loss:3.553311 +step:3359 train loss:3.584512 +step:3360 train loss:3.580208 +step:3361 train loss:3.582448 +step:3362 train loss:3.571921 +step:3363 train loss:3.575690 +step:3364 train loss:3.551774 +step:3365 train loss:3.594469 +step:3366 train loss:3.623934 +step:3367 train loss:3.579195 +step:3368 train loss:3.670884 +step:3369 train loss:3.584516 +step:3370 train loss:3.666320 +step:3371 train loss:3.630704 +step:3372 train loss:3.594090 +step:3373 train loss:3.607433 +step:3374 train loss:3.655926 +step:3375 train loss:3.586103 +step:3376 train loss:3.598404 +step:3377 train loss:3.580614 +step:3378 train loss:3.556461 +step:3379 train loss:3.634520 +step:3380 train loss:3.615099 +step:3381 train loss:3.598517 +step:3382 train loss:3.618794 +step:3383 train loss:3.623889 +step:3384 train loss:3.557905 +step:3385 train loss:3.603095 +step:3386 train loss:3.587462 +step:3387 train loss:3.661298 +step:3388 train loss:3.563052 +step:3389 train loss:3.778873 +step:3390 train loss:3.494108 +step:3391 train loss:3.580857 +step:3392 train loss:3.561917 +step:3393 train loss:3.595654 +step:3394 train loss:3.555455 +step:3395 train loss:3.624634 +step:3396 train loss:3.535993 +step:3397 train loss:3.617170 +step:3398 train loss:3.582685 +step:3399 train loss:3.603963 +step:3400 train loss:3.547820 +step:3401 train loss:3.589076 +step:3402 train loss:3.748904 +step:3403 train loss:3.636274 +step:3404 train loss:3.751366 +step:3405 train loss:3.610958 +step:3406 train loss:3.578605 +step:3407 train loss:3.580679 +step:3408 train loss:3.556567 +step:3409 train loss:3.524862 +step:3410 train loss:3.561745 +step:3411 train loss:3.628601 +step:3412 train loss:3.548512 +step:3413 train loss:3.544506 +step:3414 train loss:3.581503 +step:3415 train loss:3.556060 +step:3416 train loss:3.563658 +step:3417 train loss:3.639068 +step:3418 train loss:3.642038 +step:3419 train loss:3.598228 +step:3420 train loss:3.576497 +step:3421 train loss:3.605872 +step:3422 train loss:3.624863 +step:3423 train loss:3.642297 +step:3424 train loss:3.524539 +step:3425 train loss:3.547979 +step:3426 train loss:3.542024 +step:3427 train loss:3.606527 +step:3428 train loss:3.527221 +step:3429 train loss:3.591527 +step:3430 train loss:3.561150 +step:3431 train loss:3.614008 +step:3432 train loss:3.597785 +step:3433 train loss:3.558985 +step:3434 train loss:3.641317 +step:3435 train loss:3.582392 +step:3436 train loss:3.672060 +step:3437 train loss:3.498604 +step:3438 train loss:3.608545 +step:3439 train loss:3.581760 +step:3440 train loss:3.682191 +step:3441 train loss:3.573786 +step:3442 train loss:3.640776 +step:3443 train loss:3.574040 +step:3444 train loss:3.591172 +step:3445 train loss:3.639190 +step:3446 train loss:3.543403 +step:3447 train loss:3.619850 +step:3448 train loss:3.573459 +step:3449 train loss:3.600706 +step:3450 train loss:3.506965 +step:3451 train loss:3.627747 +step:3452 train loss:3.577489 +step:3453 train loss:3.636160 +step:3454 train loss:3.656796 +step:3455 train loss:3.707816 +step:3456 train loss:3.652410 +step:3457 train loss:3.651042 +step:3458 train loss:3.576358 +step:3459 train loss:3.585359 +step:3460 train loss:3.530877 +step:3461 train loss:3.593003 +step:3462 train loss:3.595535 +step:3463 train loss:3.566544 +step:3464 train loss:3.622582 +step:3465 train loss:3.549860 +step:3466 train loss:3.620154 +step:3467 train loss:3.574394 +step:3468 train loss:3.590530 +step:3469 train loss:3.598816 +step:3470 train loss:3.584090 +step:3471 train loss:3.625032 +step:3472 train loss:3.506644 +step:3473 train loss:3.636615 +step:3474 train loss:3.526389 +step:3475 train loss:3.613273 +step:3476 train loss:3.578367 +step:3477 train loss:3.603220 +step:3478 train loss:3.574102 +step:3479 train loss:3.606259 +step:3480 train loss:3.624870 +step:3481 train loss:3.602124 +step:3482 train loss:3.588912 +step:3483 train loss:3.727093 +step:3484 train loss:3.569877 +step:3485 train loss:3.556860 +step:3486 train loss:3.611043 +step:3487 train loss:3.651135 +step:3488 train loss:3.558782 +step:3489 train loss:3.608431 +step:3490 train loss:3.569977 +step:3491 train loss:3.616246 +step:3492 train loss:3.646977 +step:3493 train loss:3.620717 +step:3494 train loss:3.613153 +step:3495 train loss:3.594161 +step:3496 train loss:3.557257 +step:3497 train loss:3.669802 +step:3498 train loss:3.612017 +step:3499 train loss:3.544155 +step:3500 validation loss:3.514393 total_sharp:3.8672e-03 L1_sharp:6.5337e-03 L2_sharp:2.7323e-03 L3_sharp:1.6768e-03 L4_sharp:9.2736e-04 L5_sharp:1.2382e-03 L6_sharp:1.5214e-03 L7_sharp:1.9622e-03 L8_sharp:1.6429e-03 L9_sharp:1.1978e-03 L10_sharp:9.1366e-04 L11_sharp:8.9414e-04 L12_sharp:1.0221e-03 total_fnorm:1.6800e+00 total_l1_linf:1.3775e+04 total_spectral:1.6800e+00 L1_fnorm:2.9007e-01 L2_fnorm:2.7798e-01 L3_fnorm:2.8400e-01 L4_fnorm:2.9448e-01 L5_fnorm:2.9932e-01 L6_fnorm:3.0207e-01 L7_fnorm:3.0186e-01 L8_fnorm:3.0281e-01 L9_fnorm:3.0339e-01 L10_fnorm:3.0314e-01 L11_fnorm:3.0255e-01 L12_fnorm:3.0142e-01 L1_l1linf:3.1542e-01 L2_l1linf:4.0503e-01 L3_l1linf:4.0431e-01 L4_l1linf:3.9912e-01 L5_l1linf:3.5586e-01 L6_l1linf:3.3571e-01 L7_l1linf:3.1105e-01 L8_l1linf:3.2578e-01 L9_l1linf:3.4502e-01 L10_l1linf:3.6744e-01 L11_l1linf:3.9196e-01 L12_l1linf:3.8993e-01 L1_spectral:7.1077e-03 L2_spectral:9.2040e-03 L3_spectral:9.0356e-03 L4_spectral:9.0785e-03 L5_spectral:8.0507e-03 L6_spectral:7.5417e-03 L7_spectral:7.1151e-03 L8_spectral:7.3642e-03 L9_spectral:7.7495e-03 L10_spectral:8.3149e-03 L11_spectral:8.8021e-03 L12_spectral:8.7823e-03 ip_v_neg_g:5.5403e-03 cos_v_neg_g:2.3879e-03 v_norm:1.6800e+00 g_norm:1.3811e+00 hv_norm:3.9447e-01 cos_v_hv:1.6469e-02 hg_norm:2.7312e+01 cos_g_hg:5.4811e-01 v_par:1.3591e-04 v_perp:1.6800e+00 L1_cos_v_neg_g:3.0672e-03 L1_v_norm:2.9007e-01 L2_cos_v_neg_g:4.7432e-03 L2_v_norm:2.7798e-01 L3_cos_v_neg_g:5.3987e-03 L3_v_norm:2.8400e-01 L4_cos_v_neg_g:5.1701e-03 L4_v_norm:2.9448e-01 L5_cos_v_neg_g:5.1844e-03 L5_v_norm:2.9932e-01 L6_cos_v_neg_g:5.0880e-03 L6_v_norm:3.0207e-01 L7_cos_v_neg_g:5.5093e-03 L7_v_norm:3.0186e-01 L8_cos_v_neg_g:5.5168e-03 L8_v_norm:3.0281e-01 L9_cos_v_neg_g:3.7835e-03 L9_v_norm:3.0339e-01 L10_cos_v_neg_g:2.3624e-03 L10_v_norm:3.0314e-01 L11_cos_v_neg_g:3.7276e-03 L11_v_norm:3.0255e-01 L12_cos_v_neg_g:2.6088e-03 L12_v_norm:3.0142e-01 +step:3500 train loss:3.564669 +step:3501 train loss:3.691047 +step:3502 train loss:3.671486 +step:3503 train loss:3.622216 +step:3504 train loss:3.576839 +step:3505 train loss:3.589213 +step:3506 train loss:3.492992 +step:3507 train loss:3.611070 +step:3508 train loss:3.549751 +step:3509 train loss:3.622796 +step:3510 train loss:3.550934 +step:3511 train loss:3.592180 +step:3512 train loss:3.727481 +step:3513 train loss:3.546463 +step:3514 train loss:3.564232 +step:3515 train loss:3.817294 +step:3516 train loss:3.608158 +step:3517 train loss:3.565310 +step:3518 train loss:3.573474 +step:3519 train loss:3.562610 +step:3520 train loss:3.596974 +step:3521 train loss:3.584198 +step:3522 train loss:3.498529 +step:3523 train loss:3.601391 +step:3524 train loss:3.581734 +step:3525 train loss:3.575212 +step:3526 train loss:3.595730 +step:3527 train loss:3.544924 +step:3528 train loss:3.597931 +step:3529 train loss:3.576577 +step:3530 train loss:3.574162 +step:3531 train loss:3.559457 +step:3532 train loss:3.752814 +step:3533 train loss:3.567215 +step:3534 train loss:3.585951 +step:3535 train loss:3.560800 +step:3536 train loss:3.558054 +step:3537 train loss:3.573467 +step:3538 train loss:3.598494 +step:3539 train loss:3.548215 +step:3540 train loss:3.614353 +step:3541 train loss:3.584767 +step:3542 train loss:3.591859 +step:3543 train loss:3.517800 +step:3544 train loss:3.530908 +step:3545 train loss:3.537920 +step:3546 train loss:3.602973 +step:3547 train loss:3.611202 +step:3548 train loss:3.586053 +step:3549 train loss:3.581196 +step:3550 train loss:3.574217 +step:3551 train loss:3.596239 +step:3552 train loss:3.494765 +step:3553 train loss:3.613688 +step:3554 train loss:3.609826 +step:3555 train loss:3.589478 +step:3556 train loss:3.616311 +step:3557 train loss:3.604848 +step:3558 train loss:3.578022 +step:3559 train loss:3.523936 +step:3560 train loss:3.618194 +step:3561 train loss:3.608099 +step:3562 train loss:3.786354 +step:3563 train loss:3.642832 +step:3564 train loss:3.600652 +step:3565 train loss:3.602223 +step:3566 train loss:3.575135 +step:3567 train loss:3.515035 +step:3568 train loss:3.544279 +step:3569 train loss:3.627925 +step:3570 train loss:3.655025 +step:3571 train loss:3.627928 +step:3572 train loss:3.620256 +step:3573 train loss:3.579271 +step:3574 train loss:3.580907 +step:3575 train loss:3.567622 +step:3576 train loss:3.549759 +step:3577 train loss:3.563158 +step:3578 train loss:3.642960 +step:3579 train loss:3.554099 +step:3580 train loss:3.635029 +step:3581 train loss:3.575876 +step:3582 train loss:3.632951 +step:3583 train loss:3.567296 +step:3584 train loss:3.546199 +step:3585 train loss:3.587532 +step:3586 train loss:3.545215 +step:3587 train loss:3.635220 +step:3588 train loss:3.770451 +step:3589 train loss:3.600301 +step:3590 train loss:3.580762 +step:3591 train loss:3.591424 +step:3592 train loss:3.552713 +step:3593 train loss:3.526487 +step:3594 train loss:3.581017 +step:3595 train loss:3.550075 +step:3596 train loss:3.632816 +step:3597 train loss:3.604927 +step:3598 train loss:3.559290 +step:3599 train loss:3.611834 +step:3600 train loss:3.546896 +step:3601 train loss:3.569430 +step:3602 train loss:3.553167 +step:3603 train loss:3.573306 +step:3604 train loss:3.595406 +step:3605 train loss:3.704142 +step:3606 train loss:3.602164 +step:3607 train loss:3.584956 +step:3608 train loss:3.600981 +step:3609 train loss:3.584212 +step:3610 train loss:3.554303 +step:3611 train loss:3.555567 +step:3612 train loss:3.626155 +step:3613 train loss:3.590680 +step:3614 train loss:3.551795 +step:3615 train loss:3.578519 +step:3616 train loss:3.534364 +step:3617 train loss:3.609539 +step:3618 train loss:3.561562 +step:3619 train loss:3.552665 +step:3620 train loss:3.570549 +step:3621 train loss:3.528298 +step:3622 train loss:3.636593 +step:3623 train loss:3.627165 +step:3624 train loss:3.598615 +step:3625 train loss:3.574268 +step:3626 train loss:3.588822 +step:3627 train loss:3.583704 +step:3628 train loss:3.565115 +step:3629 train loss:3.570635 +step:3630 train loss:3.654379 +step:3631 train loss:3.581864 +step:3632 train loss:3.607269 +step:3633 train loss:3.569512 +step:3634 train loss:3.574602 +step:3635 train loss:3.563140 +step:3636 train loss:3.636120 +step:3637 train loss:3.709915 +step:3638 train loss:3.626029 +step:3639 train loss:3.611172 +step:3640 train loss:3.620887 +step:3641 train loss:3.656775 +step:3642 train loss:3.553367 +step:3643 train loss:3.724643 +step:3644 train loss:3.612211 +step:3645 train loss:3.589002 +step:3646 train loss:3.705441 +step:3647 train loss:3.592647 +step:3648 train loss:3.588418 +step:3649 train loss:3.535096 +step:3650 train loss:3.580229 +step:3651 train loss:3.575597 +step:3652 train loss:3.563508 +step:3653 train loss:3.503421 +step:3654 train loss:3.554837 +step:3655 train loss:3.550229 +step:3656 train loss:3.579971 +step:3657 train loss:3.600339 +step:3658 train loss:3.592978 +step:3659 train loss:3.577612 +step:3660 train loss:3.553416 +step:3661 train loss:3.577036 +step:3662 train loss:3.551749 +step:3663 train loss:3.587011 +step:3664 train loss:3.545880 +step:3665 train loss:3.589800 +step:3666 train loss:3.623984 +step:3667 train loss:3.719621 +step:3668 train loss:3.597175 +step:3669 train loss:3.554186 +step:3670 train loss:3.604657 +step:3671 train loss:3.563852 +step:3672 train loss:3.601398 +step:3673 train loss:3.582674 +step:3674 train loss:3.596260 +step:3675 train loss:3.610389 +step:3676 train loss:3.573359 +step:3677 train loss:3.532856 +step:3678 train loss:3.594662 +step:3679 train loss:3.499187 +step:3680 train loss:3.599696 +step:3681 train loss:3.632333 +step:3682 train loss:3.614188 +step:3683 train loss:3.555753 +step:3684 train loss:3.556194 +step:3685 train loss:3.587303 +step:3686 train loss:3.611391 +step:3687 train loss:3.570000 +step:3688 train loss:3.542296 +step:3689 train loss:3.574877 +step:3690 train loss:3.564182 +step:3691 train loss:3.549347 +step:3692 train loss:3.612819 +step:3693 train loss:3.746239 +step:3694 train loss:3.559511 +step:3695 train loss:3.610730 +step:3696 train loss:3.578262 +step:3697 train loss:3.570003 +step:3698 train loss:3.507957 +step:3699 train loss:3.534981 +step:3700 train loss:3.562965 +step:3701 train loss:3.583316 +step:3702 train loss:3.604490 +step:3703 train loss:3.562616 +step:3704 train loss:3.605706 +step:3705 train loss:3.587819 +step:3706 train loss:3.541454 +step:3707 train loss:3.591374 +step:3708 train loss:3.570420 +step:3709 train loss:3.491167 +step:3710 train loss:3.612378 +step:3711 train loss:3.560833 +step:3712 train loss:3.599852 +step:3713 train loss:3.552198 +step:3714 train loss:3.570414 +step:3715 train loss:3.686938 +step:3716 train loss:3.596341 +step:3717 train loss:3.568476 +step:3718 train loss:3.572165 +step:3719 train loss:3.568495 +step:3720 train loss:3.579143 +step:3721 train loss:3.634532 +step:3722 train loss:3.648607 +step:3723 train loss:3.535458 +step:3724 train loss:3.592425 +step:3725 train loss:3.572445 +step:3726 train loss:3.591020 +step:3727 train loss:3.665639 +step:3728 train loss:3.631249 +step:3729 train loss:3.525289 +step:3730 train loss:3.541803 +step:3731 train loss:3.567659 +step:3732 train loss:3.721948 +step:3733 train loss:3.581132 +step:3734 train loss:3.580259 +step:3735 train loss:3.521855 +step:3736 train loss:3.578154 +step:3737 train loss:3.627100 +step:3738 train loss:3.653332 +step:3739 train loss:3.566300 +step:3740 train loss:3.475673 +step:3741 train loss:3.675281 +step:3742 train loss:3.586128 +step:3743 train loss:3.564386 +step:3744 train loss:3.568977 +step:3745 train loss:3.577110 +step:3746 train loss:3.541130 +step:3747 train loss:3.561591 +step:3748 train loss:3.606033 +step:3749 train loss:3.585067 +step:3750 validation loss:3.500740 +step:3750 train loss:3.595111 +step:3751 train loss:3.687938 +step:3752 train loss:3.626130 +step:3753 train loss:3.535880 +step:3754 train loss:3.588335 +step:3755 train loss:3.772365 +step:3756 train loss:3.545933 +step:3757 train loss:3.540673 +step:3758 train loss:3.575927 +step:3759 train loss:3.520339 +step:3760 train loss:3.516085 +step:3761 train loss:3.571046 +step:3762 train loss:3.562882 +step:3763 train loss:3.567938 +step:3764 train loss:3.552597 +step:3765 train loss:3.555979 +step:3766 train loss:3.525188 +step:3767 train loss:3.608138 +step:3768 train loss:3.550215 +step:3769 train loss:3.824354 +step:3770 train loss:3.604455 +step:3771 train loss:3.610526 +step:3772 train loss:3.572638 +step:3773 train loss:3.564123 +step:3774 train loss:3.566970 +step:3775 train loss:3.565326 +step:3776 train loss:3.560005 +step:3777 train loss:3.525189 +step:3778 train loss:3.543146 +step:3779 train loss:3.525612 +step:3780 train loss:3.610204 +step:3781 train loss:3.572150 +step:3782 train loss:3.494853 +step:3783 train loss:3.602329 +step:3784 train loss:3.612673 +step:3785 train loss:3.520219 +step:3786 train loss:3.627948 +step:3787 train loss:3.538142 +step:3788 train loss:3.555395 +step:3789 train loss:3.473448 +step:3790 train loss:3.579144 +step:3791 train loss:3.598586 +step:3792 train loss:3.567834 +step:3793 train loss:3.570266 +step:3794 train loss:3.598383 +step:3795 train loss:3.567429 +step:3796 train loss:3.583382 +step:3797 train loss:3.560361 +step:3798 train loss:3.567538 +step:3799 train loss:3.574495 +step:3800 train loss:3.484859 +step:3801 train loss:3.600026 +step:3802 train loss:3.526198 +step:3803 train loss:3.609846 +step:3804 train loss:3.621250 +step:3805 train loss:3.580528 +step:3806 train loss:3.596629 +step:3807 train loss:3.615376 +step:3808 train loss:3.575154 +step:3809 train loss:3.584890 +step:3810 train loss:3.586415 +step:3811 train loss:3.573688 +step:3812 train loss:3.571081 +step:3813 train loss:3.529899 +step:3814 train loss:3.578422 +step:3815 train loss:3.577753 +step:3816 train loss:3.592233 +step:3817 train loss:3.611475 +step:3818 train loss:3.583254 +step:3819 train loss:3.597851 +step:3820 train loss:3.595319 +step:3821 train loss:3.550469 +step:3822 train loss:3.639586 +step:3823 train loss:3.527712 +step:3824 train loss:3.545284 +step:3825 train loss:3.549516 +step:3826 train loss:3.678000 +step:3827 train loss:3.636769 +step:3828 train loss:3.528601 +step:3829 train loss:3.549836 +step:3830 train loss:3.605421 +step:3831 train loss:3.543329 +step:3832 train loss:3.601614 +step:3833 train loss:3.544014 +step:3834 train loss:3.510314 +step:3835 train loss:3.555130 +step:3836 train loss:3.527764 +step:3837 train loss:3.595028 +step:3838 train loss:3.547486 +step:3839 train loss:3.594497 +step:3840 train loss:3.604655 +step:3841 train loss:3.557292 +step:3842 train loss:3.581494 +step:3843 train loss:3.597021 +step:3844 train loss:3.572644 +step:3845 train loss:3.596274 +step:3846 train loss:3.631349 +step:3847 train loss:3.532103 +step:3848 train loss:3.537094 +step:3849 train loss:3.560692 +step:3850 train loss:3.571886 +step:3851 train loss:3.718566 +step:3852 train loss:3.691975 +step:3853 train loss:3.587886 +step:3854 train loss:3.552678 +step:3855 train loss:3.597757 +step:3856 train loss:3.523409 +step:3857 train loss:3.585839 +step:3858 train loss:3.498817 +step:3859 train loss:3.544734 +step:3860 train loss:3.613113 +step:3861 train loss:3.585822 +step:3862 train loss:3.523244 +step:3863 train loss:3.573502 +step:3864 train loss:3.548921 +step:3865 train loss:3.581908 +step:3866 train loss:3.601951 +step:3867 train loss:3.596651 +step:3868 train loss:3.544140 +step:3869 train loss:3.547374 +step:3870 train loss:3.523116 +step:3871 train loss:3.531438 +step:3872 train loss:3.655542 +step:3873 train loss:3.577600 +step:3874 train loss:3.590064 +step:3875 train loss:3.702607 +step:3876 train loss:3.575101 +step:3877 train loss:3.601599 +step:3878 train loss:3.626440 +step:3879 train loss:3.614324 +step:3880 train loss:3.696999 +step:3881 train loss:3.518164 +step:3882 train loss:3.553581 +step:3883 train loss:3.567238 +step:3884 train loss:3.557155 +step:3885 train loss:3.575008 +step:3886 train loss:3.634766 +step:3887 train loss:3.613744 +step:3888 train loss:3.576783 +step:3889 train loss:3.547906 +step:3890 train loss:3.585403 +step:3891 train loss:3.595944 +step:3892 train loss:3.508205 +step:3893 train loss:3.615702 +step:3894 train loss:3.562694 +step:3895 train loss:3.582197 +step:3896 train loss:3.575124 +step:3897 train loss:3.542996 +step:3898 train loss:3.602973 +step:3899 train loss:3.638941 +step:3900 train loss:3.596135 +step:3901 train loss:3.610516 +step:3902 train loss:3.533791 +step:3903 train loss:3.553953 +step:3904 train loss:3.584152 +step:3905 train loss:3.522986 +step:3906 train loss:3.558295 +step:3907 train loss:3.590628 +step:3908 train loss:3.668834 +step:3909 train loss:3.559958 +step:3910 train loss:3.584604 +step:3911 train loss:3.601330 +step:3912 train loss:3.551591 +step:3913 train loss:3.565103 +step:3914 train loss:3.585734 +step:3915 train loss:3.555962 +step:3916 train loss:3.591109 +step:3917 train loss:3.637804 +step:3918 train loss:3.604459 +step:3919 train loss:3.582320 +step:3920 train loss:3.560626 +step:3921 train loss:3.600465 +step:3922 train loss:3.605381 +step:3923 train loss:3.589201 +step:3924 train loss:3.529645 +step:3925 train loss:3.733689 +step:3926 train loss:3.574592 +step:3927 train loss:3.552790 +step:3928 train loss:3.636248 +step:3929 train loss:3.703318 +step:3930 train loss:3.590842 +step:3931 train loss:3.522308 +step:3932 train loss:3.579359 +step:3933 train loss:3.596012 +step:3934 train loss:3.543441 +step:3935 train loss:3.523874 +step:3936 train loss:3.613898 +step:3937 train loss:3.574215 +step:3938 train loss:3.587025 +step:3939 train loss:3.610589 +step:3940 train loss:3.560708 +step:3941 train loss:3.643063 +step:3942 train loss:3.604458 +step:3943 train loss:3.591207 +step:3944 train loss:3.638838 +step:3945 train loss:3.548314 +step:3946 train loss:3.492625 +step:3947 train loss:3.624798 +step:3948 train loss:3.593823 +step:3949 train loss:3.761037 +step:3950 train loss:3.558305 +step:3951 train loss:3.505372 +step:3952 train loss:3.444969 +step:3953 train loss:3.525877 +step:3954 train loss:3.569080 +step:3955 train loss:3.599151 +step:3956 train loss:3.563780 +step:3957 train loss:3.605010 +step:3958 train loss:3.591226 +step:3959 train loss:3.623337 +step:3960 train loss:3.549635 +step:3961 train loss:3.577093 +step:3962 train loss:3.579299 +step:3963 train loss:3.555636 +step:3964 train loss:3.534774 +step:3965 train loss:3.586318 +step:3966 train loss:3.540646 +step:3967 train loss:3.595693 +step:3968 train loss:3.604800 +step:3969 train loss:3.515624 +step:3970 train loss:3.623338 +step:3971 train loss:3.542608 +step:3972 train loss:3.572568 +step:3973 train loss:3.533010 +step:3974 train loss:3.630662 +step:3975 train loss:3.576988 +step:3976 train loss:3.535049 +step:3977 train loss:3.593365 +step:3978 train loss:3.556957 +step:3979 train loss:3.546557 +step:3980 train loss:3.617899 +step:3981 train loss:3.549817 +step:3982 train loss:3.570887 +step:3983 train loss:3.552668 +step:3984 train loss:3.589446 +step:3985 train loss:3.563881 +step:3986 train loss:3.579666 +step:3987 train loss:3.588514 +step:3988 train loss:3.520978 +step:3989 train loss:3.596141 +step:3990 train loss:3.589903 +step:3991 train loss:3.606049 +step:3992 train loss:3.559326 +step:3993 train loss:3.590174 +step:3994 train loss:3.545917 +step:3995 train loss:3.596790 +step:3996 train loss:3.516037 +step:3997 train loss:3.592046 +step:3998 train loss:3.473561 +step:3999 train loss:3.632538 +step:4000 validation loss:3.491810 total_sharp:4.2096e-03 L1_sharp:9.3290e-03 L2_sharp:2.9595e-03 L3_sharp:1.8078e-03 L4_sharp:1.0654e-03 L5_sharp:1.4123e-03 L6_sharp:1.8908e-03 L7_sharp:2.0142e-03 L8_sharp:1.7560e-03 L9_sharp:1.2881e-03 L10_sharp:1.0649e-03 L11_sharp:8.0664e-04 L12_sharp:1.0029e-03 total_fnorm:1.6804e+00 total_l1_linf:1.3774e+04 total_spectral:1.6804e+00 L1_fnorm:2.8931e-01 L2_fnorm:2.7796e-01 L3_fnorm:2.8360e-01 L4_fnorm:2.9509e-01 L5_fnorm:2.9919e-01 L6_fnorm:3.0167e-01 L7_fnorm:3.0069e-01 L8_fnorm:3.0219e-01 L9_fnorm:3.0244e-01 L10_fnorm:3.0222e-01 L11_fnorm:3.0155e-01 L12_fnorm:3.0091e-01 L1_l1linf:3.2461e-01 L2_l1linf:4.0849e-01 L3_l1linf:4.2699e-01 L4_l1linf:4.1093e-01 L5_l1linf:3.8421e-01 L6_l1linf:3.6776e-01 L7_l1linf:3.3737e-01 L8_l1linf:3.4012e-01 L9_l1linf:3.9058e-01 L10_l1linf:4.1063e-01 L11_l1linf:4.2088e-01 L12_l1linf:4.0090e-01 L1_spectral:7.3326e-03 L2_spectral:9.2323e-03 L3_spectral:9.5999e-03 L4_spectral:9.2804e-03 L5_spectral:8.6898e-03 L6_spectral:8.2860e-03 L7_spectral:7.6539e-03 L8_spectral:7.6969e-03 L9_spectral:8.7078e-03 L10_spectral:9.2138e-03 L11_spectral:9.3933e-03 L12_spectral:9.0119e-03 ip_v_neg_g:6.6158e-03 cos_v_neg_g:3.1636e-03 v_norm:1.6804e+00 g_norm:1.2445e+00 hv_norm:3.9586e-01 cos_v_hv:1.7869e-02 hg_norm:1.9056e+01 cos_g_hg:4.8407e-01 v_par:2.0569e-04 v_perp:1.6804e+00 L1_cos_v_neg_g:4.4052e-03 L1_v_norm:2.8931e-01 L2_cos_v_neg_g:3.8195e-03 L2_v_norm:2.7796e-01 L3_cos_v_neg_g:4.3739e-03 L3_v_norm:2.8360e-01 L4_cos_v_neg_g:3.9978e-03 L4_v_norm:2.9509e-01 L5_cos_v_neg_g:5.7361e-03 L5_v_norm:2.9919e-01 L6_cos_v_neg_g:8.2476e-03 L6_v_norm:3.0167e-01 L7_cos_v_neg_g:7.8674e-03 L7_v_norm:3.0069e-01 L8_cos_v_neg_g:6.7440e-03 L8_v_norm:3.0219e-01 L9_cos_v_neg_g:6.8121e-03 L9_v_norm:3.0244e-01 L10_cos_v_neg_g:6.1952e-03 L10_v_norm:3.0222e-01 L11_cos_v_neg_g:5.0772e-03 L11_v_norm:3.0155e-01 L12_cos_v_neg_g:2.3299e-03 L12_v_norm:3.0091e-01 +step:4000 train loss:3.512366 +step:4001 train loss:3.585730 +step:4002 train loss:3.566249 +step:4003 train loss:3.601496 +step:4004 train loss:3.506781 +step:4005 train loss:3.604316 +step:4006 train loss:3.609348 +step:4007 train loss:3.531821 +step:4008 train loss:3.491988 +step:4009 train loss:3.571642 +step:4010 train loss:3.547755 +step:4011 train loss:3.555744 +step:4012 train loss:3.570559 +step:4013 train loss:3.546692 +step:4014 train loss:3.561633 +step:4015 train loss:3.549520 +step:4016 train loss:3.563668 +step:4017 train loss:3.525985 +step:4018 train loss:3.467688 +step:4019 train loss:3.521571 +step:4020 train loss:3.585202 +step:4021 train loss:3.530487 +step:4022 train loss:3.535005 +step:4023 train loss:3.547913 +step:4024 train loss:3.465533 +step:4025 train loss:3.585657 +step:4026 train loss:3.576127 +step:4027 train loss:3.583610 +step:4028 train loss:3.596339 +step:4029 train loss:3.630019 +step:4030 train loss:3.547718 +step:4031 train loss:3.583370 +step:4032 train loss:3.543137 +step:4033 train loss:3.579883 +step:4034 train loss:3.590717 +step:4035 train loss:3.571846 +step:4036 train loss:3.565349 +step:4037 train loss:3.585117 +step:4038 train loss:3.503227 +step:4039 train loss:3.559920 +step:4040 train loss:3.538385 +step:4041 train loss:3.530049 +step:4042 train loss:3.553631 +step:4043 train loss:3.535089 +step:4044 train loss:3.573291 +step:4045 train loss:3.575648 +step:4046 train loss:3.534683 +step:4047 train loss:3.561541 +step:4048 train loss:3.573934 +step:4049 train loss:3.537699 +step:4050 train loss:3.638482 +step:4051 train loss:3.549918 +step:4052 train loss:3.573980 +step:4053 train loss:3.621107 +step:4054 train loss:3.593003 +step:4055 train loss:3.608897 +step:4056 train loss:3.607346 +step:4057 train loss:3.542951 +step:4058 train loss:3.525558 +step:4059 train loss:3.610087 +step:4060 train loss:3.549571 +step:4061 train loss:3.517987 +step:4062 train loss:3.632943 +step:4063 train loss:3.582259 +step:4064 train loss:3.554781 +step:4065 train loss:3.535615 +step:4066 train loss:3.563548 +step:4067 train loss:3.588553 +step:4068 train loss:3.555099 +step:4069 train loss:3.614118 +step:4070 train loss:3.531364 +step:4071 train loss:3.505550 +step:4072 train loss:3.580558 +step:4073 train loss:3.514511 +step:4074 train loss:3.564284 +step:4075 train loss:3.631310 +step:4076 train loss:3.491566 +step:4077 train loss:3.567461 +step:4078 train loss:3.666556 +step:4079 train loss:3.608557 +step:4080 train loss:3.554924 +step:4081 train loss:3.524010 +step:4082 train loss:3.575939 +step:4083 train loss:3.512746 +step:4084 train loss:3.529332 +step:4085 train loss:3.776639 +step:4086 train loss:3.534680 +step:4087 train loss:3.578127 +step:4088 train loss:3.565355 +step:4089 train loss:3.552217 +step:4090 train loss:3.576149 +step:4091 train loss:3.598202 +step:4092 train loss:3.519613 +step:4093 train loss:3.550876 +step:4094 train loss:3.567178 +step:4095 train loss:3.522911 +step:4096 train loss:3.559469 +step:4097 train loss:3.560634 +step:4098 train loss:3.533135 +step:4099 train loss:3.535158 +step:4100 train loss:3.587194 +step:4101 train loss:3.512089 +step:4102 train loss:3.551264 +step:4103 train loss:3.757843 +step:4104 train loss:3.567352 +step:4105 train loss:3.535814 +step:4106 train loss:3.600412 +step:4107 train loss:3.526406 +step:4108 train loss:3.529337 +step:4109 train loss:3.582119 +step:4110 train loss:3.592872 +step:4111 train loss:3.565179 +step:4112 train loss:3.586049 +step:4113 train loss:3.545161 +step:4114 train loss:3.494346 +step:4115 train loss:3.528668 +step:4116 train loss:3.513345 +step:4117 train loss:3.534611 +step:4118 train loss:3.585337 +step:4119 train loss:3.611532 +step:4120 train loss:3.532336 +step:4121 train loss:3.523243 +step:4122 train loss:3.595685 +step:4123 train loss:3.602849 +step:4124 train loss:3.581484 +step:4125 train loss:3.623061 +step:4126 train loss:3.557407 +step:4127 train loss:3.573318 +step:4128 train loss:3.567308 +step:4129 train loss:3.610138 +step:4130 train loss:3.544459 +step:4131 train loss:3.575803 +step:4132 train loss:3.594765 +step:4133 train loss:3.543568 +step:4134 train loss:3.597923 +step:4135 train loss:3.531686 +step:4136 train loss:3.556112 +step:4137 train loss:3.526818 +step:4138 train loss:3.534563 +step:4139 train loss:3.585098 +step:4140 train loss:3.536655 +step:4141 train loss:3.500713 +step:4142 train loss:3.547494 +step:4143 train loss:3.582756 +step:4144 train loss:3.535949 +step:4145 train loss:3.503114 +step:4146 train loss:3.573142 +step:4147 train loss:3.544897 +step:4148 train loss:3.537982 +step:4149 train loss:3.618545 +step:4150 train loss:3.583774 +step:4151 train loss:3.565607 +step:4152 train loss:3.585371 +step:4153 train loss:3.592972 +step:4154 train loss:3.602342 +step:4155 train loss:3.624378 +step:4156 train loss:3.500495 +step:4157 train loss:3.523605 +step:4158 train loss:3.576176 +step:4159 train loss:3.481113 +step:4160 train loss:3.569494 +step:4161 train loss:3.567980 +step:4162 train loss:3.480581 +step:4163 train loss:3.565257 +step:4164 train loss:3.509584 +step:4165 train loss:3.513642 +step:4166 train loss:3.579129 +step:4167 train loss:3.567857 +step:4168 train loss:3.563580 +step:4169 train loss:3.599184 +step:4170 train loss:3.713227 +step:4171 train loss:3.561220 +step:4172 train loss:3.580855 +step:4173 train loss:3.578566 +step:4174 train loss:3.542328 +step:4175 train loss:3.629077 +step:4176 train loss:3.551872 +step:4177 train loss:3.577321 +step:4178 train loss:3.552325 +step:4179 train loss:3.510908 +step:4180 train loss:3.506977 +step:4181 train loss:3.554701 +step:4182 train loss:3.543353 +step:4183 train loss:3.479466 +step:4184 train loss:3.546383 +step:4185 train loss:3.617497 +step:4186 train loss:3.590197 +step:4187 train loss:3.598937 +step:4188 train loss:3.578393 +step:4189 train loss:3.534021 +step:4190 train loss:3.573004 +step:4191 train loss:3.524460 +step:4192 train loss:3.617577 +step:4193 train loss:3.520787 +step:4194 train loss:3.506520 +step:4195 train loss:3.504259 +step:4196 train loss:3.568465 +step:4197 train loss:3.583638 +step:4198 train loss:3.511242 +step:4199 train loss:3.592582 +step:4200 train loss:3.554131 +step:4201 train loss:3.533071 +step:4202 train loss:3.551981 +step:4203 train loss:3.559590 +step:4204 train loss:3.554924 +step:4205 train loss:3.563129 +step:4206 train loss:3.582549 +step:4207 train loss:3.586586 +step:4208 train loss:3.548928 +step:4209 train loss:3.615533 +step:4210 train loss:3.644253 +step:4211 train loss:3.526007 +step:4212 train loss:3.564653 +step:4213 train loss:3.517352 +step:4214 train loss:3.526499 +step:4215 train loss:3.542452 +step:4216 train loss:3.512884 +step:4217 train loss:3.539350 +step:4218 train loss:3.578008 +step:4219 train loss:3.584348 +step:4220 train loss:3.652400 +step:4221 train loss:3.539475 +step:4222 train loss:3.604108 +step:4223 train loss:3.520854 +step:4224 train loss:3.594393 +step:4225 train loss:3.526727 +step:4226 train loss:3.584172 +step:4227 train loss:3.554848 +step:4228 train loss:3.533392 +step:4229 train loss:3.543859 +step:4230 train loss:3.523866 +step:4231 train loss:3.513326 +step:4232 train loss:3.563473 +step:4233 train loss:3.473164 +step:4234 train loss:3.556061 +step:4235 train loss:3.634338 +step:4236 train loss:3.600973 +step:4237 train loss:3.581401 +step:4238 train loss:3.592092 +step:4239 train loss:3.643498 +step:4240 train loss:3.550600 +step:4241 train loss:3.476408 +step:4242 train loss:3.597655 +step:4243 train loss:3.594500 +step:4244 train loss:3.610764 +step:4245 train loss:3.663380 +step:4246 train loss:3.539617 +step:4247 train loss:3.597504 +step:4248 train loss:3.547912 +step:4249 train loss:3.553469 +step:4250 validation loss:3.478526 +step:4250 train loss:3.533578 +step:4251 train loss:3.629591 +step:4252 train loss:3.538867 +step:4253 train loss:3.534404 +step:4254 train loss:3.540689 +step:4255 train loss:3.524794 +step:4256 train loss:3.542436 +step:4257 train loss:3.595878 +step:4258 train loss:3.461285 +step:4259 train loss:3.524890 +step:4260 train loss:3.591772 +step:4261 train loss:3.572477 +step:4262 train loss:3.718584 +step:4263 train loss:3.639640 +step:4264 train loss:3.584380 +step:4265 train loss:3.579516 +step:4266 train loss:3.572577 +step:4267 train loss:3.574157 +step:4268 train loss:3.521549 +step:4269 train loss:3.617352 +step:4270 train loss:3.597236 +step:4271 train loss:3.510761 +step:4272 train loss:3.563681 +step:4273 train loss:3.538171 +step:4274 train loss:3.527888 +step:4275 train loss:3.548142 +step:4276 train loss:3.513967 +step:4277 train loss:3.652915 +step:4278 train loss:3.502582 +step:4279 train loss:3.527924 +step:4280 train loss:3.610904 +step:4281 train loss:3.596030 +step:4282 train loss:3.661445 +step:4283 train loss:3.515913 +step:4284 train loss:3.543968 +step:4285 train loss:3.547087 +step:4286 train loss:3.609355 +step:4287 train loss:3.609723 +step:4288 train loss:3.588150 +step:4289 train loss:3.543551 +step:4290 train loss:3.551138 +step:4291 train loss:3.507699 +step:4292 train loss:3.556109 +step:4293 train loss:3.565133 +step:4294 train loss:3.553233 +step:4295 train loss:3.484994 +step:4296 train loss:3.560337 +step:4297 train loss:3.539387 +step:4298 train loss:3.553037 +step:4299 train loss:3.552104 +step:4300 train loss:3.668164 +step:4301 train loss:3.484073 +step:4302 train loss:3.624637 +step:4303 train loss:3.503091 +step:4304 train loss:3.510828 +step:4305 train loss:3.529280 +step:4306 train loss:3.602454 +step:4307 train loss:3.512983 +step:4308 train loss:3.515770 +step:4309 train loss:3.585333 +step:4310 train loss:3.525288 +step:4311 train loss:3.577303 +step:4312 train loss:3.575135 +step:4313 train loss:3.564531 +step:4314 train loss:3.512685 +step:4315 train loss:3.541613 +step:4316 train loss:3.492123 +step:4317 train loss:3.545747 +step:4318 train loss:3.589349 +step:4319 train loss:3.538644 +step:4320 train loss:3.598885 +step:4321 train loss:3.583187 +step:4322 train loss:3.534552 +step:4323 train loss:3.474568 +step:4324 train loss:3.565249 +step:4325 train loss:3.544302 +step:4326 train loss:3.534558 +step:4327 train loss:3.642909 +step:4328 train loss:3.551330 +step:4329 train loss:3.503555 +step:4330 train loss:3.553239 +step:4331 train loss:3.565434 +step:4332 train loss:3.595335 +step:4333 train loss:3.558463 +step:4334 train loss:3.566538 +step:4335 train loss:3.567559 +step:4336 train loss:3.581518 +step:4337 train loss:3.543614 +step:4338 train loss:3.663844 +step:4339 train loss:3.567814 +step:4340 train loss:3.577737 +step:4341 train loss:3.544389 +step:4342 train loss:3.559276 +step:4343 train loss:3.679204 +step:4344 train loss:3.569025 +step:4345 train loss:3.582803 +step:4346 train loss:3.596778 +step:4347 train loss:3.609191 +step:4348 train loss:3.520002 +step:4349 train loss:3.604324 +step:4350 train loss:3.544369 +step:4351 train loss:3.498887 +step:4352 train loss:3.571887 +step:4353 train loss:3.520111 +step:4354 train loss:3.568294 +step:4355 train loss:3.535941 +step:4356 train loss:3.560261 +step:4357 train loss:3.544120 +step:4358 train loss:3.637009 +step:4359 train loss:3.585873 +step:4360 train loss:3.501559 +step:4361 train loss:3.553231 +step:4362 train loss:3.571013 +step:4363 train loss:3.589737 +step:4364 train loss:3.552875 +step:4365 train loss:3.536212 +step:4366 train loss:3.583335 +step:4367 train loss:3.601024 +step:4368 train loss:3.571725 +step:4369 train loss:3.449575 +step:4370 train loss:3.566882 +step:4371 train loss:3.483802 +step:4372 train loss:3.632802 +step:4373 train loss:3.567589 +step:4374 train loss:3.537807 +step:4375 train loss:3.584587 +step:4376 train loss:3.594778 +step:4377 train loss:3.525554 +step:4378 train loss:3.541458 +step:4379 train loss:3.623580 +step:4380 train loss:3.601206 +step:4381 train loss:3.505705 +step:4382 train loss:3.550480 +step:4383 train loss:3.578576 +step:4384 train loss:3.574921 +step:4385 train loss:3.502873 +step:4386 train loss:3.557537 +step:4387 train loss:3.527344 +step:4388 train loss:3.547563 +step:4389 train loss:3.575821 +step:4390 train loss:3.616050 +step:4391 train loss:3.540855 +step:4392 train loss:3.618059 +step:4393 train loss:3.572007 +step:4394 train loss:3.511520 +step:4395 train loss:3.568139 +step:4396 train loss:3.543880 +step:4397 train loss:3.587052 +step:4398 train loss:3.532538 +step:4399 train loss:3.525436 +step:4400 train loss:3.531331 +step:4401 train loss:3.591577 +step:4402 train loss:3.590394 +step:4403 train loss:3.541539 +step:4404 train loss:3.571108 +step:4405 train loss:3.494113 +step:4406 train loss:3.572412 +step:4407 train loss:3.509104 +step:4408 train loss:3.600325 +step:4409 train loss:3.561536 +step:4410 train loss:3.566606 +step:4411 train loss:3.523565 +step:4412 train loss:3.643443 +step:4413 train loss:3.533673 +step:4414 train loss:3.544092 +step:4415 train loss:3.528202 +step:4416 train loss:3.521791 +step:4417 train loss:3.514646 +step:4418 train loss:3.586521 +step:4419 train loss:3.555286 +step:4420 train loss:3.567288 +step:4421 train loss:3.591844 +step:4422 train loss:3.607399 +step:4423 train loss:3.567206 +step:4424 train loss:3.549772 +step:4425 train loss:3.510999 +step:4426 train loss:3.590050 +step:4427 train loss:3.549939 +step:4428 train loss:3.488770 +step:4429 train loss:3.553594 +step:4430 train loss:3.587344 +step:4431 train loss:3.582004 +step:4432 train loss:3.487421 +step:4433 train loss:3.542744 +step:4434 train loss:3.537105 +step:4435 train loss:3.569758 +step:4436 train loss:3.505297 +step:4437 train loss:3.581839 +step:4438 train loss:3.552198 +step:4439 train loss:3.556400 +step:4440 train loss:3.557221 +step:4441 train loss:3.557879 +step:4442 train loss:3.609567 +step:4443 train loss:3.544354 +step:4444 train loss:3.628521 +step:4445 train loss:3.591362 +step:4446 train loss:3.522810 +step:4447 train loss:3.564410 +step:4448 train loss:3.590110 +step:4449 train loss:3.525959 +step:4450 train loss:3.545393 +step:4451 train loss:3.593859 +step:4452 train loss:3.659306 +step:4453 train loss:3.582054 +step:4454 train loss:3.557273 +step:4455 train loss:3.600968 +step:4456 train loss:3.547180 +step:4457 train loss:3.544105 +step:4458 train loss:3.560235 +step:4459 train loss:3.593654 +step:4460 train loss:3.503204 +step:4461 train loss:3.475585 +step:4462 train loss:3.534931 +step:4463 train loss:3.553764 +step:4464 train loss:3.523965 +step:4465 train loss:3.556417 +step:4466 train loss:3.653741 +step:4467 train loss:3.530819 +step:4468 train loss:3.528344 +step:4469 train loss:3.519484 +step:4470 train loss:3.497709 +step:4471 train loss:3.555176 +step:4472 train loss:3.485354 +step:4473 train loss:3.569058 +step:4474 train loss:3.592485 +step:4475 train loss:3.558218 +step:4476 train loss:3.516388 +step:4477 train loss:3.503598 +step:4478 train loss:3.561969 +step:4479 train loss:3.657456 +step:4480 train loss:3.500382 +step:4481 train loss:3.570132 +step:4482 train loss:3.529226 +step:4483 train loss:3.525650 +step:4484 train loss:3.574370 +step:4485 train loss:3.532804 +step:4486 train loss:3.636155 +step:4487 train loss:3.531864 +step:4488 train loss:3.524587 +step:4489 train loss:3.484897 +step:4490 train loss:3.566508 +step:4491 train loss:3.517437 +step:4492 train loss:3.547807 +step:4493 train loss:3.536687 +step:4494 train loss:3.533669 +step:4495 train loss:3.595275 +step:4496 train loss:3.540462 +step:4497 train loss:3.623564 +step:4498 train loss:3.515924 +step:4499 train loss:3.563444 +step:4500 validation loss:3.464187 total_sharp:5.1909e-03 L1_sharp:1.1342e-02 L2_sharp:1.9763e-03 L3_sharp:2.0388e-03 L4_sharp:1.0110e-03 L5_sharp:1.6343e-03 L6_sharp:2.2281e-03 L7_sharp:2.2581e-03 L8_sharp:1.8899e-03 L9_sharp:1.4018e-03 L10_sharp:1.2336e-03 L11_sharp:1.1252e-03 L12_sharp:2.1419e-03 total_fnorm:1.6866e+00 total_l1_linf:1.3808e+04 total_spectral:1.6866e+00 L1_fnorm:2.8883e-01 L2_fnorm:2.7714e-01 L3_fnorm:2.8238e-01 L4_fnorm:2.9480e-01 L5_fnorm:2.9838e-01 L6_fnorm:3.0118e-01 L7_fnorm:3.0025e-01 L8_fnorm:3.0149e-01 L9_fnorm:3.0232e-01 L10_fnorm:3.0206e-01 L11_fnorm:3.0107e-01 L12_fnorm:3.0101e-01 L1_l1linf:3.4530e-01 L2_l1linf:4.3409e-01 L3_l1linf:4.4472e-01 L4_l1linf:4.4021e-01 L5_l1linf:4.2007e-01 L6_l1linf:3.8829e-01 L7_l1linf:3.6453e-01 L8_l1linf:3.6973e-01 L9_l1linf:4.0037e-01 L10_l1linf:4.2640e-01 L11_l1linf:4.4589e-01 L12_l1linf:4.3580e-01 L1_spectral:7.7999e-03 L2_spectral:9.8635e-03 L3_spectral:9.9863e-03 L4_spectral:9.9440e-03 L5_spectral:9.4198e-03 L6_spectral:8.6388e-03 L7_spectral:8.1703e-03 L8_spectral:8.2855e-03 L9_spectral:8.9150e-03 L10_spectral:9.4712e-03 L11_spectral:9.9908e-03 L12_spectral:9.7771e-03 ip_v_neg_g:9.0010e-03 cos_v_neg_g:4.0127e-03 v_norm:1.6866e+00 g_norm:1.3300e+00 hv_norm:5.2619e-01 cos_v_hv:1.6638e-02 hg_norm:3.5318e+01 cos_g_hg:5.2172e-01 v_par:4.1270e-04 v_perp:1.6866e+00 L1_cos_v_neg_g:8.5083e-03 L1_v_norm:2.8883e-01 L2_cos_v_neg_g:7.3079e-03 L2_v_norm:2.7714e-01 L3_cos_v_neg_g:4.8514e-03 L3_v_norm:2.8238e-01 L4_cos_v_neg_g:4.4166e-03 L4_v_norm:2.9480e-01 L5_cos_v_neg_g:5.5964e-03 L5_v_norm:2.9838e-01 L6_cos_v_neg_g:6.0484e-03 L6_v_norm:3.0118e-01 L7_cos_v_neg_g:6.6963e-03 L7_v_norm:3.0025e-01 L8_cos_v_neg_g:7.2257e-03 L8_v_norm:3.0149e-01 L9_cos_v_neg_g:7.3902e-03 L9_v_norm:3.0232e-01 L10_cos_v_neg_g:7.6531e-03 L10_v_norm:3.0206e-01 L11_cos_v_neg_g:6.1224e-03 L11_v_norm:3.0107e-01 L12_cos_v_neg_g:6.4650e-03 L12_v_norm:3.0101e-01 +step:4500 train loss:3.475099 +step:4501 train loss:3.534396 +step:4502 train loss:3.658034 +step:4503 train loss:3.560074 +step:4504 train loss:3.571429 +step:4505 train loss:3.549403 +step:4506 train loss:3.522822 +step:4507 train loss:3.600627 +step:4508 train loss:3.539004 +step:4509 train loss:3.533659 +step:4510 train loss:3.569651 +step:4511 train loss:3.516682 +step:4512 train loss:3.544359 +step:4513 train loss:3.603189 +step:4514 train loss:3.508172 +step:4515 train loss:3.629520 +step:4516 train loss:3.596378 +step:4517 train loss:3.554523 +step:4518 train loss:3.493048 +step:4519 train loss:3.530200 +step:4520 train loss:3.542264 +step:4521 train loss:3.484068 +step:4522 train loss:3.536826 +step:4523 train loss:3.584141 +step:4524 train loss:3.567736 +step:4525 train loss:3.487343 +step:4526 train loss:3.530032 +step:4527 train loss:3.516872 +step:4528 train loss:3.551141 +step:4529 train loss:3.543794 +step:4530 train loss:3.640752 +step:4531 train loss:3.529485 +step:4532 train loss:3.551482 +step:4533 train loss:3.527160 +step:4534 train loss:3.619621 +step:4535 train loss:3.516938 +step:4536 train loss:3.588215 +step:4537 train loss:3.575193 +step:4538 train loss:3.549524 +step:4539 train loss:3.569613 +step:4540 train loss:3.544362 +step:4541 train loss:3.517729 +step:4542 train loss:3.557451 +step:4543 train loss:3.650966 +step:4544 train loss:3.591065 +step:4545 train loss:3.531651 +step:4546 train loss:3.626263 +step:4547 train loss:3.582492 +step:4548 train loss:3.585999 +step:4549 train loss:3.548535 +step:4550 train loss:3.509634 +step:4551 train loss:3.528506 +step:4552 train loss:3.531336 +step:4553 train loss:3.617733 +step:4554 train loss:3.508973 +step:4555 train loss:3.618743 +step:4556 train loss:3.555964 +step:4557 train loss:3.486407 +step:4558 train loss:3.571796 +step:4559 train loss:3.581494 +step:4560 train loss:3.518547 +step:4561 train loss:3.506581 +step:4562 train loss:3.549020 +step:4563 train loss:3.500812 +step:4564 train loss:3.530649 +step:4565 train loss:3.529101 +step:4566 train loss:3.501335 +step:4567 train loss:3.526974 +step:4568 train loss:3.529870 +step:4569 train loss:3.511152 +step:4570 train loss:3.560952 +step:4571 train loss:3.543885 +step:4572 train loss:3.531354 +step:4573 train loss:3.540364 +step:4574 train loss:3.691483 +step:4575 train loss:3.515712 +step:4576 train loss:3.508710 +step:4577 train loss:3.549041 +step:4578 train loss:3.589215 +step:4579 train loss:3.540616 +step:4580 train loss:3.596978 +step:4581 train loss:3.539061 +step:4582 train loss:3.529973 +step:4583 train loss:3.536659 +step:4584 train loss:3.511121 +step:4585 train loss:3.593052 +step:4586 train loss:3.577044 +step:4587 train loss:3.477814 +step:4588 train loss:3.523001 +step:4589 train loss:3.594646 +step:4590 train loss:3.565709 +step:4591 train loss:3.505806 +step:4592 train loss:3.593852 +step:4593 train loss:3.509936 +step:4594 train loss:3.541920 +step:4595 train loss:3.566842 +step:4596 train loss:3.499903 +step:4597 train loss:3.636936 +step:4598 train loss:3.557345 +step:4599 train loss:3.514517 +step:4600 train loss:3.520199 +step:4601 train loss:3.542122 +step:4602 train loss:3.492710 +step:4603 train loss:3.504158 +step:4604 train loss:3.612253 +step:4605 train loss:3.531743 +step:4606 train loss:3.562788 +step:4607 train loss:3.542635 +step:4608 train loss:3.574790 +step:4609 train loss:3.534214 +step:4610 train loss:3.575768 +step:4611 train loss:3.602303 +step:4612 train loss:3.601877 +step:4613 train loss:3.580621 +step:4614 train loss:3.576447 +step:4615 train loss:3.516745 +step:4616 train loss:3.497986 +step:4617 train loss:3.544039 +step:4618 train loss:3.559159 +step:4619 train loss:3.517366 +step:4620 train loss:3.534581 +step:4621 train loss:3.536287 +step:4622 train loss:3.477299 +step:4623 train loss:3.582385 +step:4624 train loss:3.570837 +step:4625 train loss:3.526824 +step:4626 train loss:3.567206 +step:4627 train loss:3.534868 +step:4628 train loss:3.524479 +step:4629 train loss:3.563312 +step:4630 train loss:3.620706 +step:4631 train loss:3.623060 +step:4632 train loss:3.517350 +step:4633 train loss:3.531053 +step:4634 train loss:3.603279 +step:4635 train loss:3.565770 +step:4636 train loss:3.583001 +step:4637 train loss:3.520242 +step:4638 train loss:3.524054 +step:4639 train loss:3.521074 +step:4640 train loss:3.530543 +step:4641 train loss:3.537189 +step:4642 train loss:3.568692 +step:4643 train loss:3.530190 +step:4644 train loss:3.554226 +step:4645 train loss:3.568635 +step:4646 train loss:3.524796 +step:4647 train loss:3.488029 +step:4648 train loss:3.587672 +step:4649 train loss:3.600173 +step:4650 train loss:3.550553 +step:4651 train loss:3.554508 +step:4652 train loss:3.541951 +step:4653 train loss:3.597788 +step:4654 train loss:3.594533 +step:4655 train loss:3.498845 +step:4656 train loss:3.531084 +step:4657 train loss:3.583545 +step:4658 train loss:3.538816 +step:4659 train loss:3.552795 +step:4660 train loss:3.593583 +step:4661 train loss:3.512651 +step:4662 train loss:3.528656 +step:4663 train loss:3.548021 +step:4664 train loss:3.589161 +step:4665 train loss:3.589998 +step:4666 train loss:3.585419 +step:4667 train loss:3.577876 +step:4668 train loss:3.540788 +step:4669 train loss:3.550753 +step:4670 train loss:3.580519 +step:4671 train loss:3.582259 +step:4672 train loss:3.453409 +step:4673 train loss:3.487523 +step:4674 train loss:3.617329 +step:4675 train loss:3.526015 +step:4676 train loss:3.484022 +step:4677 train loss:3.486916 +step:4678 train loss:3.463395 +step:4679 train loss:3.559164 +step:4680 train loss:3.499488 +step:4681 train loss:3.548286 +step:4682 train loss:3.499143 +step:4683 train loss:3.469661 +step:4684 train loss:3.587418 +step:4685 train loss:3.523579 +step:4686 train loss:3.533633 +step:4687 train loss:3.573527 +step:4688 train loss:3.502070 +step:4689 train loss:3.577057 +step:4690 train loss:3.522188 +step:4691 train loss:3.552226 +step:4692 train loss:3.484800 +step:4693 train loss:3.520930 +step:4694 train loss:3.559551 +step:4695 train loss:3.585608 +step:4696 train loss:3.567030 +step:4697 train loss:3.481845 +step:4698 train loss:3.499352 +step:4699 train loss:3.549272 +step:4700 train loss:3.521712 +step:4701 train loss:3.526474 +step:4702 train loss:3.483986 +step:4703 train loss:3.564121 +step:4704 train loss:3.552465 +step:4705 train loss:3.494363 +step:4706 train loss:3.503274 +step:4707 train loss:3.490294 +step:4708 train loss:3.555191 +step:4709 train loss:3.506231 +step:4710 train loss:3.515767 +step:4711 train loss:3.580213 +step:4712 train loss:3.472950 +step:4713 train loss:3.580771 +step:4714 train loss:3.479383 +step:4715 train loss:3.569228 +step:4716 train loss:3.540601 +step:4717 train loss:3.466286 +step:4718 train loss:3.563642 +step:4719 train loss:3.485400 +step:4720 train loss:3.585488 +step:4721 train loss:3.538935 +step:4722 train loss:3.595685 +step:4723 train loss:3.491085 +step:4724 train loss:3.541119 +step:4725 train loss:3.479271 +step:4726 train loss:3.522546 +step:4727 train loss:3.531224 +step:4728 train loss:3.533082 +step:4729 train loss:3.567555 +step:4730 train loss:3.464381 +step:4731 train loss:3.523757 +step:4732 train loss:3.481618 +step:4733 train loss:3.411989 +step:4734 train loss:3.550214 +step:4735 train loss:3.500821 +step:4736 train loss:3.546410 +step:4737 train loss:3.424813 +step:4738 train loss:3.576024 +step:4739 train loss:3.453390 +step:4740 train loss:3.558901 +step:4741 train loss:3.529650 +step:4742 train loss:3.492789 +step:4743 train loss:3.492446 +step:4744 train loss:3.532487 +step:4745 train loss:3.556046 +step:4746 train loss:3.589202 +step:4747 train loss:3.552027 +step:4748 train loss:3.452926 +step:4749 train loss:3.521270 +step:4750 validation loss:3.454799 +step:4750 train loss:3.469316 +step:4751 train loss:3.559752 +step:4752 train loss:3.494801 +step:4753 train loss:3.598647 +step:4754 train loss:3.469510 +step:4755 train loss:3.507292 +step:4756 train loss:3.588356 +step:4757 train loss:3.508256 +step:4758 train loss:3.532734 +step:4759 train loss:3.524769 +step:4760 train loss:3.558052 +step:4761 train loss:3.475013 +step:4762 train loss:3.505521 +step:4763 train loss:3.529517 +step:4764 train loss:3.590967 +step:4765 train loss:3.487484 +step:4766 train loss:3.501725 +step:4767 train loss:3.458544 +step:4768 train loss:3.512609 +step:4769 train loss:3.543449 +step:4770 train loss:3.497238 +step:4771 train loss:3.514057 +step:4772 train loss:3.485502 +step:4773 train loss:3.523074 +step:4774 train loss:3.468465 +step:4775 train loss:3.595602 +step:4776 train loss:3.462681 +step:4777 train loss:3.532318 +step:4778 train loss:3.475609 +step:4779 train loss:3.524016 +step:4780 train loss:3.461679 +step:4781 train loss:3.466216 +step:4782 train loss:3.579019 +step:4783 train loss:3.561611 +step:4784 train loss:3.525807 +step:4785 train loss:3.521969 +step:4786 train loss:3.633170 +step:4787 train loss:3.464240 +step:4788 train loss:3.488986 +step:4789 train loss:3.510813 +step:4790 train loss:3.567834 +step:4791 train loss:3.530514 +step:4792 train loss:3.572317 +step:4793 train loss:3.489130 +step:4794 train loss:3.561240 +step:4795 train loss:3.512439 +step:4796 train loss:3.501170 +step:4797 train loss:3.513067 +step:4798 train loss:3.515957 +step:4799 train loss:3.517033 +step:4800 train loss:3.544247 +step:4801 train loss:3.540148 +step:4802 train loss:3.574907 +step:4803 train loss:3.559349 +step:4804 train loss:3.514503 +step:4805 train loss:3.510754 +step:4806 train loss:3.489213 +step:4807 train loss:3.595408 +step:4808 train loss:3.467846 +step:4809 train loss:3.569421 +step:4810 train loss:3.511563 +step:4811 train loss:3.528022 +step:4812 train loss:3.505942 +step:4813 train loss:3.459826 +step:4814 train loss:3.459040 +step:4815 train loss:3.448172 +step:4816 train loss:3.519131 +step:4817 train loss:3.453089 +step:4818 train loss:3.518078 +step:4819 train loss:3.511630 +step:4820 train loss:3.771682 +step:4821 train loss:3.542356 +step:4822 train loss:3.550674 +step:4823 train loss:3.482081 +step:4824 train loss:3.489098 +step:4825 train loss:3.470310 +step:4826 train loss:3.555676 +step:4827 train loss:3.505474 +step:4828 train loss:3.444947 +step:4829 train loss:3.552039 +step:4830 train loss:3.489832 +step:4831 train loss:3.640935 +step:4832 train loss:3.507366 +step:4833 train loss:3.545414 +step:4834 train loss:3.449102 +step:4835 train loss:3.537103 +step:4836 train loss:3.517436 +step:4837 train loss:3.547323 +step:4838 train loss:3.489300 +step:4839 train loss:3.551396 +step:4840 train loss:3.459694 +step:4841 train loss:3.555051 +step:4842 train loss:3.472810 +step:4843 train loss:3.546207 +step:4844 train loss:3.551105 +step:4845 train loss:3.484950 +step:4846 train loss:3.505356 +step:4847 train loss:3.484745 +step:4848 train loss:3.515854 +step:4849 train loss:3.463351 +step:4850 train loss:3.478964 +step:4851 train loss:3.466540 +step:4852 train loss:3.552831 +step:4853 train loss:3.521055 +step:4854 train loss:3.502744 +step:4855 train loss:3.566246 +step:4856 train loss:3.536053 +step:4857 train loss:3.541168 +step:4858 train loss:3.625384 +step:4859 train loss:3.467157 +step:4860 train loss:3.547398 +step:4861 train loss:3.518072 +step:4862 train loss:3.551749 +step:4863 train loss:3.487713 +step:4864 train loss:3.498700 +step:4865 train loss:3.493836 +step:4866 train loss:3.536376 +step:4867 train loss:3.507622 +step:4868 train loss:3.523307 +step:4869 train loss:3.477737 +step:4870 train loss:3.503319 +step:4871 train loss:3.589231 +step:4872 train loss:3.529336 +step:4873 train loss:3.532568 +step:4874 train loss:3.497927 +step:4875 train loss:3.470232 +step:4876 train loss:3.478783 +step:4877 train loss:3.484805 +step:4878 train loss:3.521001 +step:4879 train loss:3.483769 +step:4880 train loss:3.505720 +step:4881 train loss:3.457067 +step:4882 train loss:3.654038 +step:4883 train loss:3.469537 +step:4884 train loss:3.493860 +step:4885 train loss:3.471165 +step:4886 train loss:3.547608 +step:4887 train loss:3.501327 +step:4888 train loss:3.512394 +step:4889 train loss:3.507109 +step:4890 train loss:3.543096 +step:4891 train loss:3.483526 +step:4892 train loss:3.486081 +step:4893 train loss:3.533601 +step:4894 train loss:3.468234 +step:4895 train loss:3.503277 +step:4896 train loss:3.481758 +step:4897 train loss:3.560776 +step:4898 train loss:3.505084 +step:4899 train loss:3.492376 +step:4900 train loss:3.534521 +step:4901 train loss:3.493092 +step:4902 train loss:3.478791 +step:4903 train loss:3.501224 +step:4904 train loss:3.510123 +step:4905 train loss:3.512938 +step:4906 train loss:3.509601 +step:4907 train loss:3.585725 +step:4908 train loss:3.485852 +step:4909 train loss:3.497771 +step:4910 train loss:3.512621 +step:4911 train loss:3.572880 +step:4912 train loss:3.540788 +step:4913 train loss:3.525757 +step:4914 train loss:3.511276 +step:4915 train loss:3.498269 +step:4916 train loss:3.436231 +step:4917 train loss:3.464415 +step:4918 train loss:3.492975 +step:4919 train loss:3.486732 +step:4920 train loss:3.485149 +step:4921 train loss:3.649775 +step:4922 train loss:3.538942 +step:4923 train loss:3.556130 +step:4924 train loss:3.556146 +step:4925 train loss:3.489353 +step:4926 train loss:3.479591 +step:4927 train loss:3.514683 +step:4928 train loss:3.550598 +step:4929 train loss:3.509269 +step:4930 train loss:3.486851 +step:4931 train loss:3.481365 +step:4932 train loss:3.493191 +step:4933 train loss:3.483943 +step:4934 train loss:3.552696 +step:4935 train loss:3.537071 +step:4936 train loss:3.500101 +step:4937 train loss:3.611043 +step:4938 train loss:3.600922 +step:4939 train loss:3.465030 +step:4940 train loss:3.544802 +step:4941 train loss:3.444450 +step:4942 train loss:3.484267 +step:4943 train loss:3.485814 +step:4944 train loss:3.490477 +step:4945 train loss:3.535182 +step:4946 train loss:3.511748 +step:4947 train loss:3.492198 +step:4948 train loss:3.533323 +step:4949 train loss:3.438326 +step:4950 train loss:3.520943 +step:4951 train loss:3.567276 +step:4952 train loss:3.511924 +step:4953 train loss:3.541609 +step:4954 train loss:3.448432 +step:4955 train loss:3.521452 +step:4956 train loss:3.550022 +step:4957 train loss:3.542720 +step:4958 train loss:3.458847 +step:4959 train loss:3.573421 +step:4960 train loss:3.504396 +step:4961 train loss:3.521972 +step:4962 train loss:3.485390 +step:4963 train loss:3.530184 +step:4964 train loss:3.483392 +step:4965 train loss:3.633264 +step:4966 train loss:3.482563 +step:4967 train loss:3.592169 +step:4968 train loss:3.480579 +step:4969 train loss:3.524417 +step:4970 train loss:3.513387 +step:4971 train loss:3.470248 +step:4972 train loss:3.508222 +step:4973 train loss:3.516509 +step:4974 train loss:3.504270 +step:4975 train loss:3.596257 +step:4976 train loss:3.567488 +step:4977 train loss:3.515581 +step:4978 train loss:3.503453 +step:4979 train loss:3.504266 +step:4980 train loss:3.610822 +step:4981 train loss:3.447665 +step:4982 train loss:3.529370 +step:4983 train loss:3.454553 +step:4984 train loss:3.640216 +step:4985 train loss:3.536886 +step:4986 train loss:3.479342 +step:4987 train loss:3.499120 +step:4988 train loss:3.700327 +step:4989 train loss:3.503923 +step:4990 train loss:3.493176 +step:4991 train loss:3.510180 +step:4992 train loss:3.493558 +step:4993 train loss:3.472511 +step:4994 train loss:3.586724 +step:4995 train loss:3.506574 +step:4996 train loss:3.592480 +step:4997 train loss:3.495536 +step:4998 train loss:3.498396 +step:4999 train loss:3.484019 +step:5000 validation loss:3.449235 total_sharp:3.3995e-03 L1_sharp:8.5014e-03 L2_sharp:3.1454e-03 L3_sharp:1.9959e-03 L4_sharp:9.9251e-04 L5_sharp:1.2252e-03 L6_sharp:1.4604e-03 L7_sharp:1.6247e-03 L8_sharp:1.3669e-03 L9_sharp:1.0003e-03 L10_sharp:8.4052e-04 L11_sharp:7.1403e-04 L12_sharp:8.1477e-04 total_fnorm:1.6801e+00 total_l1_linf:1.3772e+04 total_spectral:1.6801e+00 L1_fnorm:2.8943e-01 L2_fnorm:2.7779e-01 L3_fnorm:2.8302e-01 L4_fnorm:2.9604e-01 L5_fnorm:2.9969e-01 L6_fnorm:3.0176e-01 L7_fnorm:3.0146e-01 L8_fnorm:3.0231e-01 L9_fnorm:3.0251e-01 L10_fnorm:3.0241e-01 L11_fnorm:3.0120e-01 L12_fnorm:3.0068e-01 L1_l1linf:3.2883e-01 L2_l1linf:4.2742e-01 L3_l1linf:4.3336e-01 L4_l1linf:4.1480e-01 L5_l1linf:3.8754e-01 L6_l1linf:3.6814e-01 L7_l1linf:3.2772e-01 L8_l1linf:3.5269e-01 L9_l1linf:3.8654e-01 L10_l1linf:4.1721e-01 L11_l1linf:4.1921e-01 L12_l1linf:4.1387e-01 L1_spectral:7.4404e-03 L2_spectral:9.7116e-03 L3_spectral:9.8124e-03 L4_spectral:9.3913e-03 L5_spectral:8.7105e-03 L6_spectral:8.3530e-03 L7_spectral:7.3861e-03 L8_spectral:7.9499e-03 L9_spectral:8.6939e-03 L10_spectral:9.3926e-03 L11_spectral:9.4219e-03 L12_spectral:9.3194e-03 ip_v_neg_g:4.7141e-03 cos_v_neg_g:2.1947e-03 v_norm:1.6801e+00 g_norm:1.2785e+00 hv_norm:4.1599e-01 cos_v_hv:1.3730e-02 hg_norm:3.9726e+01 cos_g_hg:4.2578e-01 v_par:1.7926e-04 v_perp:1.6801e+00 L1_cos_v_neg_g:1.6941e-03 L1_v_norm:2.8943e-01 L2_cos_v_neg_g:7.5656e-03 L2_v_norm:2.7779e-01 L3_cos_v_neg_g:4.6119e-03 L3_v_norm:2.8302e-01 L4_cos_v_neg_g:3.3736e-03 L4_v_norm:2.9604e-01 L5_cos_v_neg_g:3.6700e-03 L5_v_norm:2.9969e-01 L6_cos_v_neg_g:3.5044e-03 L6_v_norm:3.0176e-01 L7_cos_v_neg_g:3.7147e-03 L7_v_norm:3.0146e-01 L8_cos_v_neg_g:2.7712e-03 L8_v_norm:3.0231e-01 L9_cos_v_neg_g:4.3033e-03 L9_v_norm:3.0251e-01 L10_cos_v_neg_g:4.4921e-03 L10_v_norm:3.0241e-01 L11_cos_v_neg_g:4.9496e-03 L11_v_norm:3.0120e-01 L12_cos_v_neg_g:3.3806e-03 L12_v_norm:3.0068e-01 +step:5000 train loss:3.596491 +step:5001 train loss:3.460897 +step:5002 train loss:3.520129 +step:5003 train loss:3.511210 +step:5004 train loss:3.506341 +step:5005 train loss:3.501663 +step:5006 train loss:3.546842 +step:5007 train loss:3.544978 +step:5008 train loss:3.486553 +step:5009 train loss:3.527182 +step:5010 train loss:3.484698 +step:5011 train loss:3.508010 +step:5012 train loss:3.483826 +step:5013 train loss:3.586685 +step:5014 train loss:3.500983 +step:5015 train loss:3.577972 +step:5016 train loss:3.504076 +step:5017 train loss:3.551790 +step:5018 train loss:3.469905 +step:5019 train loss:3.507006 +step:5020 train loss:3.496101 +step:5021 train loss:3.514450 +step:5022 train loss:3.542333 +step:5023 train loss:3.519481 +step:5024 train loss:3.562091 +step:5025 train loss:3.454295 +step:5026 train loss:3.574683 +step:5027 train loss:3.510899 +step:5028 train loss:3.576767 +step:5029 train loss:3.473142 +step:5030 train loss:3.509802 +step:5031 train loss:3.496402 +step:5032 train loss:3.529034 +step:5033 train loss:3.508668 +step:5034 train loss:3.506926 +step:5035 train loss:3.590266 +step:5036 train loss:3.540422 +step:5037 train loss:3.490897 +step:5038 train loss:3.543884 +step:5039 train loss:3.554151 +step:5040 train loss:3.517583 +step:5041 train loss:3.535507 +step:5042 train loss:3.436640 +step:5043 train loss:3.577228 +step:5044 train loss:3.499064 +step:5045 train loss:3.548401 +step:5046 train loss:3.467220 +step:5047 train loss:3.544440 +step:5048 train loss:3.460440 +step:5049 train loss:3.595765 +step:5050 train loss:3.478393 +step:5051 train loss:3.530334 +step:5052 train loss:3.424628 +step:5053 train loss:3.609473 +step:5054 train loss:3.494693 +step:5055 train loss:3.522502 +step:5056 train loss:3.553040 +step:5057 train loss:3.486391 +step:5058 train loss:3.513309 +step:5059 train loss:3.480364 +step:5060 train loss:3.525947 +step:5061 train loss:3.520548 +step:5062 train loss:3.491829 +step:5063 train loss:3.484342 +step:5064 train loss:3.494307 +step:5065 train loss:3.477009 +step:5066 train loss:3.543096 +step:5067 train loss:3.520041 +step:5068 train loss:3.506236 +step:5069 train loss:3.475983 +step:5070 train loss:3.507468 +step:5071 train loss:3.576559 +step:5072 train loss:3.465802 +step:5073 train loss:3.477690 +step:5074 train loss:3.424353 +step:5075 train loss:3.496333 +step:5076 train loss:3.424555 +step:5077 train loss:3.487924 +step:5078 train loss:3.507608 +step:5079 train loss:3.522987 +step:5080 train loss:3.503672 +step:5081 train loss:3.512273 +step:5082 train loss:3.505548 +step:5083 train loss:3.559848 +step:5084 train loss:3.540333 +step:5085 train loss:3.502430 +step:5086 train loss:3.575248 +step:5087 train loss:3.561180 +step:5088 train loss:3.480244 +step:5089 train loss:3.549576 +step:5090 train loss:3.494485 +step:5091 train loss:3.498214 +step:5092 train loss:3.595402 +step:5093 train loss:3.477107 +step:5094 train loss:3.475829 +step:5095 train loss:3.528238 +step:5096 train loss:3.494966 +step:5097 train loss:3.501802 +step:5098 train loss:3.506168 +step:5099 train loss:3.469212 +step:5100 train loss:3.481794 +step:5101 train loss:3.673627 +step:5102 train loss:3.520204 +step:5103 train loss:3.529104 +step:5104 train loss:3.581620 +step:5105 train loss:3.510876 +step:5106 train loss:3.470847 +step:5107 train loss:3.490494 +step:5108 train loss:3.481805 +step:5109 train loss:3.564705 +step:5110 train loss:3.472160 +step:5111 train loss:3.570435 +step:5112 train loss:3.473444 +step:5113 train loss:3.463442 +step:5114 train loss:3.503146 +step:5115 train loss:3.470568 +step:5116 train loss:3.522120 +step:5117 train loss:3.469929 +step:5118 train loss:3.495114 +step:5119 train loss:3.478389 +step:5120 train loss:3.522033 +step:5121 train loss:3.468464 +step:5122 train loss:3.482514 +step:5123 train loss:3.465740 +step:5124 train loss:3.427516 +step:5125 train loss:3.535368 +step:5126 train loss:3.524040 +step:5127 train loss:3.527906 +step:5128 train loss:3.539192 +step:5129 train loss:3.466207 +step:5130 train loss:3.479525 +step:5131 train loss:3.420181 +step:5132 train loss:3.535201 +step:5133 train loss:3.505815 +step:5134 train loss:3.508414 +step:5135 train loss:3.464875 +step:5136 train loss:3.529472 +step:5137 train loss:3.526696 +step:5138 train loss:3.505722 +step:5139 train loss:3.541286 +step:5140 train loss:3.517987 +step:5141 train loss:3.543583 +step:5142 train loss:3.497674 +step:5143 train loss:3.519716 +step:5144 train loss:3.519805 +step:5145 train loss:3.461262 +step:5146 train loss:3.457688 +step:5147 train loss:3.533599 +step:5148 train loss:3.463316 +step:5149 train loss:3.534256 +step:5150 train loss:3.509272 +step:5151 train loss:3.479571 +step:5152 train loss:3.519823 +step:5153 train loss:3.493716 +step:5154 train loss:3.505068 +step:5155 train loss:3.513831 +step:5156 train loss:3.488920 +step:5157 train loss:3.492814 +step:5158 train loss:3.511205 +step:5159 train loss:3.550631 +step:5160 train loss:3.619514 +step:5161 train loss:3.546501 +step:5162 train loss:3.564069 +step:5163 train loss:3.475424 +step:5164 train loss:3.548026 +step:5165 train loss:3.554737 +step:5166 train loss:3.492253 +step:5167 train loss:3.591437 +step:5168 train loss:3.511735 +step:5169 train loss:3.536431 +step:5170 train loss:3.519754 +step:5171 train loss:3.561123 +step:5172 train loss:3.483677 +step:5173 train loss:3.548622 +step:5174 train loss:3.477895 +step:5175 train loss:3.516797 +step:5176 train loss:3.500972 +step:5177 train loss:3.505372 +step:5178 train loss:3.565974 +step:5179 train loss:3.477378 +step:5180 train loss:3.559293 +step:5181 train loss:3.502080 +step:5182 train loss:3.563019 +step:5183 train loss:3.491387 +step:5184 train loss:3.472596 +step:5185 train loss:3.496633 +step:5186 train loss:3.554170 +step:5187 train loss:3.546544 +step:5188 train loss:3.478602 +step:5189 train loss:3.524789 +step:5190 train loss:3.502925 +step:5191 train loss:3.489764 +step:5192 train loss:3.471710 +step:5193 train loss:3.560346 +step:5194 train loss:3.507195 +step:5195 train loss:3.480191 +step:5196 train loss:3.549688 +step:5197 train loss:3.605272 +step:5198 train loss:3.508725 +step:5199 train loss:3.490155 +step:5200 train loss:3.522593 +step:5201 train loss:3.504302 +step:5202 train loss:3.517115 +step:5203 train loss:3.514220 +step:5204 train loss:3.491495 +step:5205 train loss:3.533348 +step:5206 train loss:3.470080 +step:5207 train loss:3.474828 +step:5208 train loss:3.534316 +step:5209 train loss:3.549174 +step:5210 train loss:3.455900 +step:5211 train loss:3.504467 +step:5212 train loss:3.517169 +step:5213 train loss:3.495126 +step:5214 train loss:3.539760 +step:5215 train loss:3.655698 +step:5216 train loss:3.506522 +step:5217 train loss:3.484448 +step:5218 train loss:3.488203 +step:5219 train loss:3.552160 +step:5220 train loss:3.470508 +step:5221 train loss:3.470781 +step:5222 train loss:3.553060 +step:5223 train loss:3.543705 +step:5224 train loss:3.444648 +step:5225 train loss:3.594833 +step:5226 train loss:3.505810 +step:5227 train loss:3.582379 +step:5228 train loss:3.547627 +step:5229 train loss:3.490017 +step:5230 train loss:3.501303 +step:5231 train loss:3.453038 +step:5232 train loss:3.576502 +step:5233 train loss:3.533353 +step:5234 train loss:3.542626 +step:5235 train loss:3.485814 +step:5236 train loss:3.564210 +step:5237 train loss:3.613426 +step:5238 train loss:3.517901 +step:5239 train loss:3.577618 +step:5240 train loss:3.460638 +step:5241 train loss:3.520416 +step:5242 train loss:3.489125 +step:5243 train loss:3.495800 +step:5244 train loss:3.495674 +step:5245 train loss:3.540956 +step:5246 train loss:3.583853 +step:5247 train loss:3.508681 +step:5248 train loss:3.481799 +step:5249 train loss:3.538219 +step:5250 validation loss:3.434423 +step:5250 train loss:3.509474 +step:5251 train loss:3.570616 +step:5252 train loss:3.459287 +step:5253 train loss:3.611857 +step:5254 train loss:3.486745 +step:5255 train loss:3.560873 +step:5256 train loss:3.473118 +step:5257 train loss:3.530438 +step:5258 train loss:3.524902 +step:5259 train loss:3.510418 +step:5260 train loss:3.508942 +step:5261 train loss:3.495574 +step:5262 train loss:3.537874 +step:5263 train loss:3.521620 +step:5264 train loss:3.476533 +step:5265 train loss:3.554912 +step:5266 train loss:3.469107 +step:5267 train loss:3.482609 +step:5268 train loss:3.460389 +step:5269 train loss:3.470435 +step:5270 train loss:3.518127 +step:5271 train loss:3.444618 +step:5272 train loss:3.539615 +step:5273 train loss:3.444840 +step:5274 train loss:3.497357 +step:5275 train loss:3.507976 +step:5276 train loss:3.634946 +step:5277 train loss:3.533293 +step:5278 train loss:3.481043 +step:5279 train loss:3.531778 +step:5280 train loss:3.503046 +step:5281 train loss:3.500775 +step:5282 train loss:3.473286 +step:5283 train loss:3.475762 +step:5284 train loss:3.481038 +step:5285 train loss:3.543511 +step:5286 train loss:3.456512 +step:5287 train loss:3.555222 +step:5288 train loss:3.534320 +step:5289 train loss:3.500508 +step:5290 train loss:3.555857 +step:5291 train loss:3.509303 +step:5292 train loss:3.526310 +step:5293 train loss:3.495321 +step:5294 train loss:3.483936 +step:5295 train loss:3.490585 +step:5296 train loss:3.483595 +step:5297 train loss:3.501706 +step:5298 train loss:3.448738 +step:5299 train loss:3.539136 +step:5300 train loss:3.488015 +step:5301 train loss:3.560119 +step:5302 train loss:3.562864 +step:5303 train loss:3.425787 +step:5304 train loss:3.454197 +step:5305 train loss:3.438722 +step:5306 train loss:3.469511 +step:5307 train loss:3.474618 +step:5308 train loss:3.570817 +step:5309 train loss:3.515334 +step:5310 train loss:3.504192 +step:5311 train loss:3.566903 +step:5312 train loss:3.453752 +step:5313 train loss:3.541594 +step:5314 train loss:3.534487 +step:5315 train loss:3.493519 +step:5316 train loss:3.526598 +step:5317 train loss:3.543092 +step:5318 train loss:3.500187 +step:5319 train loss:3.526198 +step:5320 train loss:3.480404 +step:5321 train loss:3.599464 +step:5322 train loss:3.512350 +step:5323 train loss:3.512864 +step:5324 train loss:3.456332 +step:5325 train loss:3.543633 +step:5326 train loss:3.526565 +step:5327 train loss:3.417793 +step:5328 train loss:3.557929 +step:5329 train loss:3.521603 +step:5330 train loss:3.520382 +step:5331 train loss:3.567464 +step:5332 train loss:3.492774 +step:5333 train loss:3.556862 +step:5334 train loss:3.531037 +step:5335 train loss:3.592860 +step:5336 train loss:3.626152 +step:5337 train loss:3.458858 +step:5338 train loss:3.467979 +step:5339 train loss:3.493140 +step:5340 train loss:3.516850 +step:5341 train loss:3.525872 +step:5342 train loss:3.430078 +step:5343 train loss:3.589221 +step:5344 train loss:3.470321 +step:5345 train loss:3.474764 +step:5346 train loss:3.472594 +step:5347 train loss:3.499466 +step:5348 train loss:3.542624 +step:5349 train loss:3.474241 +step:5350 train loss:3.521147 +step:5351 train loss:3.592717 +step:5352 train loss:3.639090 +step:5353 train loss:3.541916 +step:5354 train loss:3.516054 +step:5355 train loss:3.479447 +step:5356 train loss:3.500771 +step:5357 train loss:3.483503 +step:5358 train loss:3.503312 +step:5359 train loss:3.516427 +step:5360 train loss:3.491048 +step:5361 train loss:3.491230 +step:5362 train loss:3.474686 +step:5363 train loss:3.472161 +step:5364 train loss:3.471078 +step:5365 train loss:3.508937 +step:5366 train loss:3.531907 +step:5367 train loss:3.469812 +step:5368 train loss:3.529794 +step:5369 train loss:3.547663 +step:5370 train loss:3.447929 +step:5371 train loss:3.499496 +step:5372 train loss:3.526932 +step:5373 train loss:3.560730 +step:5374 train loss:3.447972 +step:5375 train loss:3.491572 +step:5376 train loss:3.553232 +step:5377 train loss:3.496773 +step:5378 train loss:3.471367 +step:5379 train loss:3.470018 +step:5380 train loss:3.508678 +step:5381 train loss:3.545916 +step:5382 train loss:3.453249 +step:5383 train loss:3.511126 +step:5384 train loss:3.530821 +step:5385 train loss:3.527637 +step:5386 train loss:3.512482 +step:5387 train loss:3.518266 +step:5388 train loss:3.524003 +step:5389 train loss:3.457209 +step:5390 train loss:3.487038 +step:5391 train loss:3.425143 +step:5392 train loss:3.493716 +step:5393 train loss:3.483357 +step:5394 train loss:3.477096 +step:5395 train loss:3.547423 +step:5396 train loss:3.517531 +step:5397 train loss:3.537146 +step:5398 train loss:3.531196 +step:5399 train loss:3.565332 +step:5400 train loss:3.569018 +step:5401 train loss:3.529661 +step:5402 train loss:3.636363 +step:5403 train loss:3.543094 +step:5404 train loss:3.517764 +step:5405 train loss:3.587085 +step:5406 train loss:3.545688 +step:5407 train loss:3.476303 +step:5408 train loss:3.620318 +step:5409 train loss:3.461165 +step:5410 train loss:3.523541 +step:5411 train loss:3.508240 +step:5412 train loss:3.486970 +step:5413 train loss:3.534932 +step:5414 train loss:3.513249 +step:5415 train loss:3.491713 +step:5416 train loss:3.485701 +step:5417 train loss:3.556708 +step:5418 train loss:3.569676 +step:5419 train loss:3.475877 +step:5420 train loss:3.532587 +step:5421 train loss:3.504463 +step:5422 train loss:3.552031 +step:5423 train loss:3.525136 +step:5424 train loss:3.429873 +step:5425 train loss:3.497289 +step:5426 train loss:3.583255 +step:5427 train loss:3.479122 +step:5428 train loss:3.510921 +step:5429 train loss:3.443090 +step:5430 train loss:3.480234 +step:5431 train loss:3.542323 +step:5432 train loss:3.520569 +step:5433 train loss:3.523315 +step:5434 train loss:3.473659 +step:5435 train loss:3.472517 +step:5436 train loss:3.472435 +step:5437 train loss:3.513721 +step:5438 train loss:3.491590 +step:5439 train loss:3.501367 +step:5440 train loss:3.541319 +step:5441 train loss:3.563272 +step:5442 train loss:3.478654 +step:5443 train loss:3.479975 +step:5444 train loss:3.424453 +step:5445 train loss:3.510940 +step:5446 train loss:3.482416 +step:5447 train loss:3.520033 +step:5448 train loss:3.576202 +step:5449 train loss:3.464160 +step:5450 train loss:3.500926 +step:5451 train loss:3.493649 +step:5452 train loss:3.509538 +step:5453 train loss:3.566133 +step:5454 train loss:3.489114 +step:5455 train loss:3.476591 +step:5456 train loss:3.616776 +step:5457 train loss:3.497758 +step:5458 train loss:3.528528 +step:5459 train loss:3.474307 +step:5460 train loss:3.487895 +step:5461 train loss:3.492619 +step:5462 train loss:3.497139 +step:5463 train loss:3.506753 +step:5464 train loss:3.509712 +step:5465 train loss:3.453167 +step:5466 train loss:3.526182 +step:5467 train loss:3.507975 +step:5468 train loss:3.519158 +step:5469 train loss:3.609829 +step:5470 train loss:3.505427 +step:5471 train loss:3.578426 +step:5472 train loss:3.522536 +step:5473 train loss:3.428321 +step:5474 train loss:3.763244 +step:5475 train loss:3.438630 +step:5476 train loss:3.517702 +step:5477 train loss:3.515728 +step:5478 train loss:3.515999 +step:5479 train loss:3.656371 +step:5480 train loss:3.502295 +step:5481 train loss:3.563763 +step:5482 train loss:3.477525 +step:5483 train loss:3.515704 +step:5484 train loss:3.551664 +step:5485 train loss:3.468823 +step:5486 train loss:3.519287 +step:5487 train loss:3.517647 +step:5488 train loss:3.430286 +step:5489 train loss:3.534562 +step:5490 train loss:3.480390 +step:5491 train loss:3.584677 +step:5492 train loss:3.511124 +step:5493 train loss:3.442251 +step:5494 train loss:3.496999 +step:5495 train loss:3.467601 +step:5496 train loss:3.472379 +step:5497 train loss:3.588709 +step:5498 train loss:3.458021 +step:5499 train loss:3.596480 +step:5500 validation loss:3.429970 total_sharp:3.2618e-03 L1_sharp:7.9544e-03 L2_sharp:1.8773e-03 L3_sharp:1.3607e-03 L4_sharp:6.7978e-04 L5_sharp:1.0386e-03 L6_sharp:1.3036e-03 L7_sharp:1.3658e-03 L8_sharp:1.1643e-03 L9_sharp:8.1803e-04 L10_sharp:7.6884e-04 L11_sharp:7.7256e-04 L12_sharp:2.1884e-03 total_fnorm:1.6799e+00 total_l1_linf:1.3773e+04 total_spectral:1.6799e+00 L1_fnorm:2.8933e-01 L2_fnorm:2.7884e-01 L3_fnorm:2.8378e-01 L4_fnorm:2.9621e-01 L5_fnorm:2.9904e-01 L6_fnorm:3.0126e-01 L7_fnorm:3.0036e-01 L8_fnorm:3.0137e-01 L9_fnorm:3.0240e-01 L10_fnorm:3.0220e-01 L11_fnorm:3.0131e-01 L12_fnorm:3.0099e-01 L1_l1linf:3.6539e-01 L2_l1linf:4.3638e-01 L3_l1linf:4.3480e-01 L4_l1linf:4.0739e-01 L5_l1linf:4.2575e-01 L6_l1linf:3.7940e-01 L7_l1linf:3.4802e-01 L8_l1linf:3.5525e-01 L9_l1linf:3.8095e-01 L10_l1linf:4.1065e-01 L11_l1linf:4.2670e-01 L12_l1linf:4.5097e-01 L1_spectral:8.1927e-03 L2_spectral:9.8801e-03 L3_spectral:9.7741e-03 L4_spectral:9.2157e-03 L5_spectral:9.4712e-03 L6_spectral:8.5111e-03 L7_spectral:7.8540e-03 L8_spectral:7.9416e-03 L9_spectral:8.5799e-03 L10_spectral:9.2053e-03 L11_spectral:9.5485e-03 L12_spectral:1.0089e-02 ip_v_neg_g:4.9245e-03 cos_v_neg_g:1.4919e-03 v_norm:1.6799e+00 g_norm:1.9649e+00 hv_norm:4.2296e-01 cos_v_hv:1.2955e-02 hg_norm:6.3134e+01 cos_g_hg:6.7445e-01 v_par:1.8019e-04 v_perp:1.6799e+00 L1_cos_v_neg_g:3.0121e-03 L1_v_norm:2.8933e-01 L2_cos_v_neg_g:4.9850e-03 L2_v_norm:2.7884e-01 L3_cos_v_neg_g:2.6308e-03 L3_v_norm:2.8378e-01 L4_cos_v_neg_g:2.4649e-03 L4_v_norm:2.9621e-01 L5_cos_v_neg_g:2.9431e-03 L5_v_norm:2.9904e-01 L6_cos_v_neg_g:4.5141e-03 L6_v_norm:3.0126e-01 L7_cos_v_neg_g:3.6477e-03 L7_v_norm:3.0036e-01 L8_cos_v_neg_g:3.5564e-03 L8_v_norm:3.0137e-01 L9_cos_v_neg_g:2.5139e-03 L9_v_norm:3.0240e-01 L10_cos_v_neg_g:2.8218e-03 L10_v_norm:3.0220e-01 L11_cos_v_neg_g:2.5914e-03 L11_v_norm:3.0131e-01 L12_cos_v_neg_g:3.2293e-04 L12_v_norm:3.0099e-01 +step:5500 train loss:3.508512 +step:5501 train loss:3.583937 +step:5502 train loss:3.528451 +step:5503 train loss:3.496940 +step:5504 train loss:3.543367 +step:5505 train loss:3.503332 +step:5506 train loss:3.549145 +step:5507 train loss:3.530452 +step:5508 train loss:3.557685 +step:5509 train loss:3.572356 +step:5510 train loss:3.537317 +step:5511 train loss:3.532811 +step:5512 train loss:3.658485 +step:5513 train loss:3.455462 +step:5514 train loss:3.517764 +step:5515 train loss:3.543569 +step:5516 train loss:3.569215 +step:5517 train loss:3.524795 +step:5518 train loss:3.554714 +step:5519 train loss:3.588629 +step:5520 train loss:3.495801 +step:5521 train loss:3.504395 +step:5522 train loss:3.477487 +step:5523 train loss:3.520157 +step:5524 train loss:3.563263 +step:5525 train loss:3.473536 +step:5526 train loss:3.487271 +step:5527 train loss:3.515421 +step:5528 train loss:3.611418 +step:5529 train loss:3.579401 +step:5530 train loss:3.544036 +step:5531 train loss:3.477122 +step:5532 train loss:3.505595 +step:5533 train loss:3.536897 +step:5534 train loss:3.457209 +step:5535 train loss:3.506285 +step:5536 train loss:3.443427 +step:5537 train loss:3.493501 +step:5538 train loss:3.482035 +step:5539 train loss:3.430608 +step:5540 train loss:3.654904 +step:5541 train loss:3.464753 +step:5542 train loss:3.513939 +step:5543 train loss:3.505534 +step:5544 train loss:3.489523 +step:5545 train loss:3.482756 +step:5546 train loss:3.515448 +step:5547 train loss:3.451086 +step:5548 train loss:3.495075 +step:5549 train loss:3.498796 +step:5550 train loss:3.523478 +step:5551 train loss:3.523154 +step:5552 train loss:3.481957 +step:5553 train loss:3.514508 +step:5554 train loss:3.482648 +step:5555 train loss:3.490488 +step:5556 train loss:3.509695 +step:5557 train loss:3.569463 +step:5558 train loss:3.490249 +step:5559 train loss:3.503257 +step:5560 train loss:3.491324 +step:5561 train loss:3.529785 +step:5562 train loss:3.480569 +step:5563 train loss:3.463453 +step:5564 train loss:3.499589 +step:5565 train loss:3.564574 +step:5566 train loss:3.467373 +step:5567 train loss:3.588697 +step:5568 train loss:3.704511 +step:5569 train loss:3.492117 +step:5570 train loss:3.427598 +step:5571 train loss:3.513775 +step:5572 train loss:3.452462 +step:5573 train loss:3.445026 +step:5574 train loss:3.410557 +step:5575 train loss:3.511665 +step:5576 train loss:3.494189 +step:5577 train loss:3.500004 +step:5578 train loss:3.530028 +step:5579 train loss:3.485051 +step:5580 train loss:3.509882 +step:5581 train loss:3.531554 +step:5582 train loss:3.509130 +step:5583 train loss:3.518618 +step:5584 train loss:3.639794 +step:5585 train loss:3.542506 +step:5586 train loss:3.480263 +step:5587 train loss:3.508648 +step:5588 train loss:3.526339 +step:5589 train loss:3.527413 +step:5590 train loss:3.584621 +step:5591 train loss:3.452958 +step:5592 train loss:3.641912 +step:5593 train loss:3.501899 +step:5594 train loss:3.511406 +step:5595 train loss:3.505180 +step:5596 train loss:3.451024 +step:5597 train loss:3.470342 +step:5598 train loss:3.478249 +step:5599 train loss:3.476774 +step:5600 train loss:3.524553 +step:5601 train loss:3.549746 +step:5602 train loss:3.481270 +step:5603 train loss:3.525564 +step:5604 train loss:3.519052 +step:5605 train loss:3.489772 +step:5606 train loss:3.498190 +step:5607 train loss:3.522106 +step:5608 train loss:3.471450 +step:5609 train loss:3.523937 +step:5610 train loss:3.480060 +step:5611 train loss:3.519421 +step:5612 train loss:3.547843 +step:5613 train loss:3.507694 +step:5614 train loss:3.474140 +step:5615 train loss:3.573904 +step:5616 train loss:3.470148 +step:5617 train loss:3.560821 +step:5618 train loss:3.545499 +step:5619 train loss:3.499638 +step:5620 train loss:3.499179 +step:5621 train loss:3.577382 +step:5622 train loss:3.458705 +step:5623 train loss:3.496489 +step:5624 train loss:3.483759 +step:5625 train loss:3.519924 +step:5626 train loss:3.514660 +step:5627 train loss:3.483929 +step:5628 train loss:3.528850 +step:5629 train loss:3.507372 +step:5630 train loss:3.438194 +step:5631 train loss:3.479056 +step:5632 train loss:3.523586 +step:5633 train loss:3.509304 +step:5634 train loss:3.471234 +step:5635 train loss:3.504620 +step:5636 train loss:3.484982 +step:5637 train loss:3.621095 +step:5638 train loss:3.531158 +step:5639 train loss:3.512606 +step:5640 train loss:3.513623 +step:5641 train loss:3.554349 +step:5642 train loss:3.487111 +step:5643 train loss:3.502517 +step:5644 train loss:3.587640 +step:5645 train loss:3.545441 +step:5646 train loss:3.538990 +step:5647 train loss:3.530446 +step:5648 train loss:3.519195 +step:5649 train loss:3.432684 +step:5650 train loss:3.437708 +step:5651 train loss:3.514086 +step:5652 train loss:3.513576 +step:5653 train loss:3.483092 +step:5654 train loss:3.610835 +step:5655 train loss:3.470350 +step:5656 train loss:3.497277 +step:5657 train loss:3.563512 +step:5658 train loss:3.466926 +step:5659 train loss:3.503479 +step:5660 train loss:3.552585 +step:5661 train loss:3.494619 +step:5662 train loss:3.532652 +step:5663 train loss:3.421118 +step:5664 train loss:3.394947 +step:5665 train loss:3.515998 +step:5666 train loss:3.519921 +step:5667 train loss:3.554822 +step:5668 train loss:3.485368 +step:5669 train loss:3.498218 +step:5670 train loss:3.503251 +step:5671 train loss:3.483675 +step:5672 train loss:3.537640 +step:5673 train loss:3.500538 +step:5674 train loss:3.571896 +step:5675 train loss:3.485869 +step:5676 train loss:3.636360 +step:5677 train loss:3.527931 +step:5678 train loss:3.511397 +step:5679 train loss:3.496387 +step:5680 train loss:3.529749 +step:5681 train loss:3.499958 +step:5682 train loss:3.510602 +step:5683 train loss:3.472240 +step:5684 train loss:3.484126 +step:5685 train loss:3.526040 +step:5686 train loss:3.542773 +step:5687 train loss:3.489560 +step:5688 train loss:3.575218 +step:5689 train loss:3.482530 +step:5690 train loss:3.633111 +step:5691 train loss:3.463495 +step:5692 train loss:3.457664 +step:5693 train loss:3.458855 +step:5694 train loss:3.478058 +step:5695 train loss:3.495433 +step:5696 train loss:3.543533 +step:5697 train loss:3.471902 +step:5698 train loss:3.489838 +step:5699 train loss:3.502876 +step:5700 train loss:3.500793 +step:5701 train loss:3.497674 +step:5702 train loss:3.561828 +step:5703 train loss:3.463080 +step:5704 train loss:3.507292 +step:5705 train loss:3.510811 +step:5706 train loss:3.538229 +step:5707 train loss:3.457834 +step:5708 train loss:3.540988 +step:5709 train loss:3.543502 +step:5710 train loss:3.535519 +step:5711 train loss:3.556545 +step:5712 train loss:3.539986 +step:5713 train loss:3.464793 +step:5714 train loss:3.547663 +step:5715 train loss:3.503747 +step:5716 train loss:3.513696 +step:5717 train loss:3.535624 +step:5718 train loss:3.478723 +step:5719 train loss:3.553013 +step:5720 train loss:3.520494 +step:5721 train loss:3.453293 +step:5722 train loss:3.467784 +step:5723 train loss:3.543898 +step:5724 train loss:3.465957 +step:5725 train loss:3.541008 +step:5726 train loss:3.529483 +step:5727 train loss:3.487766 +step:5728 train loss:3.490912 +step:5729 train loss:3.488264 +step:5730 train loss:3.567140 +step:5731 train loss:3.429319 +step:5732 train loss:3.490419 +step:5733 train loss:3.484307 +step:5734 train loss:3.498112 +step:5735 train loss:3.488811 +step:5736 train loss:3.493904 +step:5737 train loss:3.511181 +step:5738 train loss:3.478322 +step:5739 train loss:3.489373 +step:5740 train loss:3.530188 +step:5741 train loss:3.508959 +step:5742 train loss:3.557620 +step:5743 train loss:3.525038 +step:5744 train loss:3.484032 +step:5745 train loss:3.482140 +step:5746 train loss:3.513986 +step:5747 train loss:3.502563 +step:5748 train loss:3.548224 +step:5749 train loss:3.505271 +step:5750 validation loss:3.426525 +step:5750 train loss:3.511099 +step:5751 train loss:3.524053 +step:5752 train loss:3.511312 +step:5753 train loss:3.483332 +step:5754 train loss:3.487573 +step:5755 train loss:3.504355 +step:5756 train loss:3.497299 +step:5757 train loss:3.554609 +step:5758 train loss:3.489736 +step:5759 train loss:3.454533 +step:5760 train loss:3.534627 +step:5761 train loss:3.528091 +step:5762 train loss:3.487380 +step:5763 train loss:3.514524 +step:5764 train loss:3.477721 +step:5765 train loss:3.597539 +step:5766 train loss:3.507566 +step:5767 train loss:3.541921 +step:5768 train loss:3.478723 +step:5769 train loss:3.602858 +step:5770 train loss:3.523017 +step:5771 train loss:3.549452 +step:5772 train loss:3.499582 +step:5773 train loss:3.480082 +step:5774 train loss:3.488039 +step:5775 train loss:3.559863 +step:5776 train loss:3.544411 +step:5777 train loss:3.461661 +step:5778 train loss:3.548768 +step:5779 train loss:3.502661 +step:5780 train loss:3.481177 +step:5781 train loss:3.547085 +step:5782 train loss:3.500245 +step:5783 train loss:3.464614 +step:5784 train loss:3.566262 +step:5785 train loss:3.552923 +step:5786 train loss:3.467844 +step:5787 train loss:3.513752 +step:5788 train loss:3.523961 +step:5789 train loss:3.464046 +step:5790 train loss:3.566851 +step:5791 train loss:3.494917 +step:5792 train loss:3.770955 +step:5793 train loss:3.539414 +step:5794 train loss:3.555388 +step:5795 train loss:3.550014 +step:5796 train loss:3.532262 +step:5797 train loss:3.512083 +step:5798 train loss:3.513266 +step:5799 train loss:3.484508 +step:5800 train loss:3.637444 +step:5801 train loss:3.514720 +step:5802 train loss:3.504848 +step:5803 train loss:3.513916 +step:5804 train loss:3.534296 +step:5805 train loss:3.499510 +step:5806 train loss:3.537115 +step:5807 train loss:3.460322 +step:5808 train loss:3.491056 +step:5809 train loss:3.501822 +step:5810 train loss:3.475054 +step:5811 train loss:3.494486 +step:5812 train loss:3.469358 +step:5813 train loss:3.482884 +step:5814 train loss:3.477509 +step:5815 train loss:3.481702 +step:5816 train loss:3.539595 +step:5817 train loss:3.553190 +step:5818 train loss:3.524786 +step:5819 train loss:3.576275 +step:5820 train loss:3.514417 +step:5821 train loss:3.511028 +step:5822 train loss:3.524901 +step:5823 train loss:3.529814 +step:5824 train loss:3.479254 +step:5825 train loss:3.575644 +step:5826 train loss:3.487159 +step:5827 train loss:3.454297 +step:5828 train loss:3.441067 +step:5829 train loss:3.503336 +step:5830 train loss:3.474748 +step:5831 train loss:3.452001 +step:5832 train loss:3.563703 +step:5833 train loss:3.542299 +step:5834 train loss:3.523781 +step:5835 train loss:3.474917 +step:5836 train loss:3.440857 +step:5837 train loss:3.562171 +step:5838 train loss:3.541152 +step:5839 train loss:3.516778 +step:5840 train loss:3.600833 +step:5841 train loss:3.519111 +step:5842 train loss:3.537079 +step:5843 train loss:3.477881 +step:5844 train loss:3.545612 +step:5845 train loss:3.458401 +step:5846 train loss:3.507397 +step:5847 train loss:3.533999 +step:5848 train loss:3.599015 +step:5849 train loss:3.495729 +step:5850 train loss:3.523849 +step:5851 train loss:3.486195 +step:5852 train loss:3.578688 +step:5853 train loss:3.667153 +step:5854 train loss:3.457056 +step:5855 train loss:3.520889 +step:5856 train loss:3.490504 +step:5857 train loss:3.501442 +step:5858 train loss:3.473743 +step:5859 train loss:3.482207 +step:5860 train loss:3.582158 +step:5861 train loss:3.468802 +step:5862 train loss:3.579990 +step:5863 train loss:3.519835 +step:5864 train loss:3.507422 +step:5865 train loss:3.509461 +step:5866 train loss:3.504181 +step:5867 train loss:3.587072 +step:5868 train loss:3.504034 +step:5869 train loss:3.532857 +step:5870 train loss:3.510333 +step:5871 train loss:3.488315 +step:5872 train loss:3.517682 +step:5873 train loss:3.501161 +step:5874 train loss:3.580093 +step:5875 train loss:3.505641 +step:5876 train loss:3.489523 +step:5877 train loss:3.496472 +step:5878 train loss:3.493260 +step:5879 train loss:3.466594 +step:5880 train loss:3.668489 +step:5881 train loss:3.503037 +step:5882 train loss:3.474868 +step:5883 train loss:3.480874 +step:5884 train loss:3.493720 +step:5885 train loss:3.492233 +step:5886 train loss:3.513705 +step:5887 train loss:3.509892 +step:5888 train loss:3.495752 +step:5889 train loss:3.471100 +step:5890 train loss:3.517805 +step:5891 train loss:3.463617 +step:5892 train loss:3.546626 +step:5893 train loss:3.465546 +step:5894 train loss:3.457683 +step:5895 train loss:3.468154 +step:5896 train loss:3.470505 +step:5897 train loss:3.542352 +step:5898 train loss:3.762431 +step:5899 train loss:3.488620 +step:5900 train loss:3.538903 +step:5901 train loss:3.499422 +step:5902 train loss:3.501745 +step:5903 train loss:3.492939 +step:5904 train loss:3.524026 +step:5905 train loss:3.632114 +step:5906 train loss:3.573384 +step:5907 train loss:3.513422 +step:5908 train loss:3.489662 +step:5909 train loss:3.484684 +step:5910 train loss:3.471305 +step:5911 train loss:3.488290 +step:5912 train loss:3.519320 +step:5913 train loss:3.525303 +step:5914 train loss:3.504784 +step:5915 train loss:3.627090 +step:5916 train loss:3.509285 +step:5917 train loss:3.479360 +step:5918 train loss:3.479784 +step:5919 train loss:3.503876 +step:5920 train loss:3.502956 +step:5921 train loss:3.474306 +step:5922 train loss:3.533319 +step:5923 train loss:3.524436 +step:5924 train loss:3.483919 +step:5925 train loss:3.604608 +step:5926 train loss:3.487510 +step:5927 train loss:3.467450 +step:5928 train loss:3.505077 +step:5929 train loss:3.524231 +step:5930 train loss:3.475700 +step:5931 train loss:3.457959 +step:5932 train loss:3.500782 +step:5933 train loss:3.555247 +step:5934 train loss:3.465693 +step:5935 train loss:3.492879 +step:5936 train loss:3.480471 +step:5937 train loss:3.460353 +step:5938 train loss:3.482259 +step:5939 train loss:3.457527 +step:5940 train loss:3.541214 +step:5941 train loss:3.476654 +step:5942 train loss:3.490462 +step:5943 train loss:3.496915 +step:5944 train loss:3.549843 +step:5945 train loss:3.483450 +step:5946 train loss:3.460039 +step:5947 train loss:3.474331 +step:5948 train loss:3.512983 +step:5949 train loss:3.559478 +step:5950 train loss:3.516080 +step:5951 train loss:3.519230 +step:5952 train loss:3.439548 +step:5953 train loss:3.481660 +step:5954 train loss:3.494838 +step:5955 train loss:3.498928 +step:5956 train loss:3.477745 +step:5957 train loss:3.447599 +step:5958 train loss:3.521459 +step:5959 train loss:3.478191 +step:5960 train loss:3.453866 +step:5961 train loss:3.479025 +step:5962 train loss:3.509202 +step:5963 train loss:3.544266 +step:5964 train loss:3.499601 +step:5965 train loss:3.518579 +step:5966 train loss:3.514389 +step:5967 train loss:3.478835 +step:5968 train loss:3.552961 +step:5969 train loss:3.491190 +step:5970 train loss:3.511849 +step:5971 train loss:3.461800 +step:5972 train loss:3.492673 +step:5973 train loss:3.477867 +step:5974 train loss:3.504717 +step:5975 train loss:3.476186 +step:5976 train loss:3.516235 +step:5977 train loss:3.472253 +step:5978 train loss:3.459199 +step:5979 train loss:3.492742 +step:5980 train loss:3.566731 +step:5981 train loss:3.454942 +step:5982 train loss:3.467456 +step:5983 train loss:3.535709 +step:5984 train loss:3.479294 +step:5985 train loss:3.523227 +step:5986 train loss:3.496052 +step:5987 train loss:3.481749 +step:5988 train loss:3.489781 +step:5989 train loss:3.509561 +step:5990 train loss:3.441559 +step:5991 train loss:3.503208 +step:5992 train loss:3.536736 +step:5993 train loss:3.487325 +step:5994 train loss:3.509025 +step:5995 train loss:3.399473 +step:5996 train loss:3.564524 +step:5997 train loss:3.547338 +step:5998 train loss:3.423842 +step:5999 train loss:3.452447 +step:6000 validation loss:3.417522 total_sharp:3.4609e-03 L1_sharp:5.0101e-03 L2_sharp:3.7926e-03 L3_sharp:2.0099e-03 L4_sharp:8.5203e-04 L5_sharp:1.0201e-03 L6_sharp:1.3130e-03 L7_sharp:1.5116e-03 L8_sharp:1.5869e-03 L9_sharp:1.2137e-03 L10_sharp:9.6766e-04 L11_sharp:8.5348e-04 L12_sharp:9.6691e-04 total_fnorm:1.6785e+00 total_l1_linf:1.3761e+04 total_spectral:1.6785e+00 L1_fnorm:2.8985e-01 L2_fnorm:2.7451e-01 L3_fnorm:2.8350e-01 L4_fnorm:2.9645e-01 L5_fnorm:2.9937e-01 L6_fnorm:3.0181e-01 L7_fnorm:3.0136e-01 L8_fnorm:3.0206e-01 L9_fnorm:3.0291e-01 L10_fnorm:3.0257e-01 L11_fnorm:3.0136e-01 L12_fnorm:3.0112e-01 L1_l1linf:3.6082e-01 L2_l1linf:4.5837e-01 L3_l1linf:4.4241e-01 L4_l1linf:4.2165e-01 L5_l1linf:3.8470e-01 L6_l1linf:3.5816e-01 L7_l1linf:3.3845e-01 L8_l1linf:3.4394e-01 L9_l1linf:3.6937e-01 L10_l1linf:4.0950e-01 L11_l1linf:4.2434e-01 L12_l1linf:4.2669e-01 L1_spectral:8.1244e-03 L2_spectral:1.0261e-02 L3_spectral:9.9470e-03 L4_spectral:9.5488e-03 L5_spectral:8.6735e-03 L6_spectral:8.1037e-03 L7_spectral:7.6827e-03 L8_spectral:7.7311e-03 L9_spectral:8.2854e-03 L10_spectral:9.1699e-03 L11_spectral:9.4831e-03 L12_spectral:9.6274e-03 ip_v_neg_g:5.8318e-03 cos_v_neg_g:2.8979e-03 v_norm:1.6785e+00 g_norm:1.1989e+00 hv_norm:3.4637e-01 cos_v_hv:1.6772e-02 hg_norm:1.8752e+01 cos_g_hg:4.8146e-01 v_par:1.6791e-04 v_perp:1.6785e+00 L1_cos_v_neg_g:3.8964e-03 L1_v_norm:2.8985e-01 L2_cos_v_neg_g:4.4988e-03 L2_v_norm:2.7451e-01 L3_cos_v_neg_g:4.5044e-03 L3_v_norm:2.8350e-01 L4_cos_v_neg_g:4.2710e-03 L4_v_norm:2.9645e-01 L5_cos_v_neg_g:4.1068e-03 L5_v_norm:2.9937e-01 L6_cos_v_neg_g:4.6924e-03 L6_v_norm:3.0181e-01 L7_cos_v_neg_g:5.6823e-03 L7_v_norm:3.0136e-01 L8_cos_v_neg_g:6.3268e-03 L8_v_norm:3.0206e-01 L9_cos_v_neg_g:7.0890e-03 L9_v_norm:3.0291e-01 L10_cos_v_neg_g:5.9384e-03 L10_v_norm:3.0257e-01 L11_cos_v_neg_g:5.1873e-03 L11_v_norm:3.0136e-01 L12_cos_v_neg_g:3.8469e-03 L12_v_norm:3.0112e-01 +step:6000 train loss:3.501033 +step:6001 train loss:3.467057 +step:6002 train loss:3.494492 +step:6003 train loss:3.513180 +step:6004 train loss:3.463708 +step:6005 train loss:3.537601 +step:6006 train loss:3.445598 +step:6007 train loss:3.468237 +step:6008 train loss:3.479889 +step:6009 train loss:3.517156 +step:6010 train loss:3.506368 +step:6011 train loss:3.496199 +step:6012 train loss:3.462713 +step:6013 train loss:3.520819 +step:6014 train loss:3.539798 +step:6015 train loss:3.540696 +step:6016 train loss:3.507082 +step:6017 train loss:3.518051 +step:6018 train loss:3.456688 +step:6019 train loss:3.494971 +step:6020 train loss:3.479622 +step:6021 train loss:3.408595 +step:6022 train loss:3.523987 +step:6023 train loss:3.457440 +step:6024 train loss:3.533309 +step:6025 train loss:3.496972 +step:6026 train loss:3.471393 +step:6027 train loss:3.508179 +step:6028 train loss:3.425015 +step:6029 train loss:3.543824 +step:6030 train loss:3.507730 +step:6031 train loss:3.487851 +step:6032 train loss:3.448320 +step:6033 train loss:3.499234 +step:6034 train loss:3.527002 +step:6035 train loss:3.441566 +step:6036 train loss:3.415234 +step:6037 train loss:3.531386 +step:6038 train loss:3.535326 +step:6039 train loss:3.519697 +step:6040 train loss:3.477863 +step:6041 train loss:3.454649 +step:6042 train loss:3.438035 +step:6043 train loss:3.499759 +step:6044 train loss:3.613910 +step:6045 train loss:3.460183 +step:6046 train loss:3.472918 +step:6047 train loss:3.502391 +step:6048 train loss:3.516582 +step:6049 train loss:3.492968 +step:6050 train loss:3.463763 +step:6051 train loss:3.512924 +step:6052 train loss:3.486594 +step:6053 train loss:3.605018 +step:6054 train loss:3.643709 +step:6055 train loss:3.457133 +step:6056 train loss:3.449155 +step:6057 train loss:3.483010 +step:6058 train loss:3.511161 +step:6059 train loss:3.513410 +step:6060 train loss:3.520993 +step:6061 train loss:3.534670 +step:6062 train loss:3.490794 +step:6063 train loss:3.500518 +step:6064 train loss:3.497695 +step:6065 train loss:3.500354 +step:6066 train loss:3.485936 +step:6067 train loss:3.525187 +step:6068 train loss:3.466529 +step:6069 train loss:3.425568 +step:6070 train loss:3.576514 +step:6071 train loss:3.514090 +step:6072 train loss:3.456390 +step:6073 train loss:3.497743 +step:6074 train loss:3.580219 +step:6075 train loss:3.499380 +step:6076 train loss:3.509171 +step:6077 train loss:3.510547 +step:6078 train loss:3.445391 +step:6079 train loss:3.477691 +step:6080 train loss:3.481861 +step:6081 train loss:3.519093 +step:6082 train loss:3.469712 +step:6083 train loss:3.483414 +step:6084 train loss:3.548577 +step:6085 train loss:3.543060 +step:6086 train loss:3.444544 +step:6087 train loss:3.490664 +step:6088 train loss:3.475940 +step:6089 train loss:3.532463 +step:6090 train loss:3.535136 +step:6091 train loss:3.483616 +step:6092 train loss:3.444201 +step:6093 train loss:3.507041 +step:6094 train loss:3.420927 +step:6095 train loss:3.590587 +step:6096 train loss:3.458204 +step:6097 train loss:3.533777 +step:6098 train loss:3.505594 +step:6099 train loss:3.564942 +step:6100 train loss:3.559830 +step:6101 train loss:3.493213 +step:6102 train loss:3.609672 +step:6103 train loss:3.494238 +step:6104 train loss:3.608425 +step:6105 train loss:3.540023 +step:6106 train loss:3.479047 +step:6107 train loss:3.543090 +step:6108 train loss:3.505582 +step:6109 train loss:3.576005 +step:6110 train loss:3.509501 +step:6111 train loss:3.542104 +step:6112 train loss:3.482696 +step:6113 train loss:3.510680 +step:6114 train loss:3.478543 +step:6115 train loss:3.538478 +step:6116 train loss:3.482112 +step:6117 train loss:3.538217 +step:6118 train loss:3.520746 +step:6119 train loss:3.527450 +step:6120 train loss:3.673140 +step:6121 train loss:3.509207 +step:6122 train loss:3.518080 +step:6123 train loss:3.499752 +step:6124 train loss:3.474648 +step:6125 train loss:3.466706 +step:6126 train loss:3.482760 +step:6127 train loss:3.475057 +step:6128 train loss:3.442637 +step:6129 train loss:3.666057 +step:6130 train loss:3.454403 +step:6131 train loss:3.438209 +step:6132 train loss:3.506998 +step:6133 train loss:3.470925 +step:6134 train loss:3.502116 +step:6135 train loss:3.583276 +step:6136 train loss:3.602322 +step:6137 train loss:3.467544 +step:6138 train loss:3.522403 +step:6139 train loss:3.501686 +step:6140 train loss:3.499656 +step:6141 train loss:3.459434 +step:6142 train loss:3.527160 +step:6143 train loss:3.490815 +step:6144 train loss:3.510901 +step:6145 train loss:3.759201 +step:6146 train loss:3.592840 +step:6147 train loss:3.679780 +step:6148 train loss:3.443832 +step:6149 train loss:3.574452 +step:6150 train loss:3.522334 +step:6151 train loss:3.478401 +step:6152 train loss:3.479235 +step:6153 train loss:3.544093 +step:6154 train loss:3.627247 +step:6155 train loss:3.495983 +step:6156 train loss:3.593486 +step:6157 train loss:3.519789 +step:6158 train loss:3.511601 +step:6159 train loss:3.477689 +step:6160 train loss:3.642453 +step:6161 train loss:3.494531 +step:6162 train loss:3.511181 +step:6163 train loss:3.545414 +step:6164 train loss:3.457563 +step:6165 train loss:3.524101 +step:6166 train loss:3.520772 +step:6167 train loss:3.537600 +step:6168 train loss:3.520663 +step:6169 train loss:3.505946 +step:6170 train loss:3.509712 +step:6171 train loss:3.479135 +step:6172 train loss:3.466769 +step:6173 train loss:3.515887 +step:6174 train loss:3.443833 +step:6175 train loss:3.457863 +step:6176 train loss:3.440542 +step:6177 train loss:3.533867 +step:6178 train loss:3.481850 +step:6179 train loss:3.492331 +step:6180 train loss:3.494806 +step:6181 train loss:3.529746 +step:6182 train loss:3.412895 +step:6183 train loss:3.425023 +step:6184 train loss:3.539405 +step:6185 train loss:3.493463 +step:6186 train loss:3.453927 +step:6187 train loss:3.498286 +step:6188 train loss:3.465851 +step:6189 train loss:3.503542 +step:6190 train loss:3.463622 +step:6191 train loss:3.495526 +step:6192 train loss:3.465743 +step:6193 train loss:3.528515 +step:6194 train loss:3.521557 +step:6195 train loss:3.503487 +step:6196 train loss:3.516492 +step:6197 train loss:3.540997 +step:6198 train loss:3.452519 +step:6199 train loss:3.476751 +step:6200 train loss:3.518469 +step:6201 train loss:3.560529 +step:6202 train loss:3.562055 +step:6203 train loss:3.563366 +step:6204 train loss:3.544776 +step:6205 train loss:3.481281 +step:6206 train loss:3.468843 +step:6207 train loss:3.529142 +step:6208 train loss:3.558476 +step:6209 train loss:3.523014 +step:6210 train loss:3.555377 +step:6211 train loss:3.474120 +step:6212 train loss:3.464901 +step:6213 train loss:3.478364 +step:6214 train loss:3.456421 +step:6215 train loss:3.630482 +step:6216 train loss:3.503210 +step:6217 train loss:3.555237 +step:6218 train loss:3.535568 +step:6219 train loss:3.548433 +step:6220 train loss:3.502521 +step:6221 train loss:3.466909 +step:6222 train loss:3.711458 +step:6223 train loss:3.467928 +step:6224 train loss:3.500180 +step:6225 train loss:3.481585 +step:6226 train loss:3.491266 +step:6227 train loss:3.495367 +step:6228 train loss:3.489847 +step:6229 train loss:3.529044 +step:6230 train loss:3.487152 +step:6231 train loss:3.594081 +step:6232 train loss:3.441854 +step:6233 train loss:3.479435 +step:6234 train loss:3.485793 +step:6235 train loss:3.517757 +step:6236 train loss:3.452286 +step:6237 train loss:3.477913 +step:6238 train loss:3.501325 +step:6239 train loss:3.487175 +step:6240 train loss:3.509013 +step:6241 train loss:3.490824 +step:6242 train loss:3.489537 +step:6243 train loss:3.526808 +step:6244 train loss:3.678858 +step:6245 train loss:3.477214 +step:6246 train loss:3.468607 +step:6247 train loss:3.457690 +step:6248 train loss:3.462045 +step:6249 train loss:3.405454 +step:6250 validation loss:3.410281 +step:6250 train loss:3.438039 +step:6251 train loss:3.459496 +step:6252 train loss:3.499239 +step:6253 train loss:3.510579 +step:6254 train loss:3.502917 +step:6255 train loss:3.466941 +step:6256 train loss:3.516749 +step:6257 train loss:3.518300 +step:6258 train loss:3.498806 +step:6259 train loss:3.505542 +step:6260 train loss:3.532392 +step:6261 train loss:3.553258 +step:6262 train loss:3.447710 +step:6263 train loss:3.479514 +step:6264 train loss:3.492038 +step:6265 train loss:3.478422 +step:6266 train loss:3.685680 +step:6267 train loss:3.481961 +step:6268 train loss:3.569515 +step:6269 train loss:3.444580 +step:6270 train loss:3.460912 +step:6271 train loss:3.505918 +step:6272 train loss:3.497794 +step:6273 train loss:3.697339 +step:6274 train loss:3.477079 +step:6275 train loss:3.509673 +step:6276 train loss:3.480194 +step:6277 train loss:3.462894 +step:6278 train loss:3.448283 +step:6279 train loss:3.505209 +step:6280 train loss:3.508524 +step:6281 train loss:3.444501 +step:6282 train loss:3.456271 +step:6283 train loss:3.543769 +step:6284 train loss:3.510295 +step:6285 train loss:3.510308 +step:6286 train loss:3.457438 +step:6287 train loss:3.488282 +step:6288 train loss:3.586330 +step:6289 train loss:3.446293 +step:6290 train loss:3.447775 +step:6291 train loss:3.478139 +step:6292 train loss:3.496389 +step:6293 train loss:3.482656 +step:6294 train loss:3.471525 +step:6295 train loss:3.490587 +step:6296 train loss:3.456515 +step:6297 train loss:3.582214 +step:6298 train loss:3.528394 +step:6299 train loss:3.424403 +step:6300 train loss:3.500857 +step:6301 train loss:3.531337 +step:6302 train loss:3.514453 +step:6303 train loss:3.483726 +step:6304 train loss:3.501259 +step:6305 train loss:3.471344 +step:6306 train loss:3.482196 +step:6307 train loss:3.491688 +step:6308 train loss:3.466500 +step:6309 train loss:3.465921 +step:6310 train loss:3.521543 +step:6311 train loss:3.472025 +step:6312 train loss:3.513284 +step:6313 train loss:3.443423 +step:6314 train loss:3.468718 +step:6315 train loss:3.521069 +step:6316 train loss:3.447473 +step:6317 train loss:3.430925 +step:6318 train loss:3.552328 +step:6319 train loss:3.481273 +step:6320 train loss:3.492497 +step:6321 train loss:3.484950 +step:6322 train loss:3.483476 +step:6323 train loss:3.412819 +step:6324 train loss:3.425897 +step:6325 train loss:3.520670 +step:6326 train loss:3.437467 +step:6327 train loss:3.514256 +step:6328 train loss:3.490817 +step:6329 train loss:3.414267 +step:6330 train loss:3.440840 +step:6331 train loss:3.460078 +step:6332 train loss:3.593967 +step:6333 train loss:3.470108 +step:6334 train loss:3.448998 +step:6335 train loss:3.420387 +step:6336 train loss:3.448572 +step:6337 train loss:3.476037 +step:6338 train loss:3.428727 +step:6339 train loss:3.475207 +step:6340 train loss:3.454192 +step:6341 train loss:3.469961 +step:6342 train loss:3.468295 +step:6343 train loss:3.565836 +step:6344 train loss:3.417162 +step:6345 train loss:3.432990 +step:6346 train loss:3.508984 +step:6347 train loss:3.388561 +step:6348 train loss:3.479682 +step:6349 train loss:3.457767 +step:6350 train loss:3.430717 +step:6351 train loss:3.432117 +step:6352 train loss:3.448790 +step:6353 train loss:3.464288 +step:6354 train loss:3.479200 +step:6355 train loss:3.488970 +step:6356 train loss:3.501513 +step:6357 train loss:3.359306 +step:6358 train loss:3.443351 +step:6359 train loss:3.502042 +step:6360 train loss:3.411995 +step:6361 train loss:3.412694 +step:6362 train loss:3.458726 +step:6363 train loss:3.437117 +step:6364 train loss:3.419404 +step:6365 train loss:3.497274 +step:6366 train loss:3.504973 +step:6367 train loss:3.439283 +step:6368 train loss:3.475698 +step:6369 train loss:3.445210 +step:6370 train loss:3.494318 +step:6371 train loss:3.410344 +step:6372 train loss:3.442940 +step:6373 train loss:3.463710 +step:6374 train loss:3.497805 +step:6375 train loss:3.458006 +step:6376 train loss:3.478576 +step:6377 train loss:3.482286 +step:6378 train loss:3.424314 +step:6379 train loss:3.467379 +step:6380 train loss:3.512912 +step:6381 train loss:3.474631 +step:6382 train loss:3.433940 +step:6383 train loss:3.492202 +step:6384 train loss:3.472690 +step:6385 train loss:3.448803 +step:6386 train loss:3.480322 +step:6387 train loss:3.462407 +step:6388 train loss:3.499458 +step:6389 train loss:3.510469 +step:6390 train loss:3.458403 +step:6391 train loss:3.448320 +step:6392 train loss:3.431946 +step:6393 train loss:3.484867 +step:6394 train loss:3.478548 +step:6395 train loss:3.659136 +step:6396 train loss:3.475801 +step:6397 train loss:3.421353 +step:6398 train loss:3.492236 +step:6399 train loss:3.432474 +step:6400 train loss:3.506229 +step:6401 train loss:3.544568 +step:6402 train loss:3.470114 +step:6403 train loss:3.465851 +step:6404 train loss:3.444350 +step:6405 train loss:3.471513 +step:6406 train loss:3.478243 +step:6407 train loss:3.536475 +step:6408 train loss:3.428910 +step:6409 train loss:3.411049 +step:6410 train loss:3.548034 +step:6411 train loss:3.473582 +step:6412 train loss:3.476821 +step:6413 train loss:3.480762 +step:6414 train loss:3.431764 +step:6415 train loss:3.494551 +step:6416 train loss:3.458659 +step:6417 train loss:3.431851 +step:6418 train loss:3.421350 +step:6419 train loss:3.504945 +step:6420 train loss:3.434129 +step:6421 train loss:3.459975 +step:6422 train loss:3.453064 +step:6423 train loss:3.458185 +step:6424 train loss:3.481217 +step:6425 train loss:3.477582 +step:6426 train loss:3.520490 +step:6427 train loss:3.484452 +step:6428 train loss:3.516003 +step:6429 train loss:3.483826 +step:6430 train loss:3.456676 +step:6431 train loss:3.436248 +step:6432 train loss:3.466701 +step:6433 train loss:3.478325 +step:6434 train loss:3.370792 +step:6435 train loss:3.549639 +step:6436 train loss:3.477365 +step:6437 train loss:3.437370 +step:6438 train loss:3.470830 +step:6439 train loss:3.439505 +step:6440 train loss:3.458452 +step:6441 train loss:3.449632 +step:6442 train loss:3.390529 +step:6443 train loss:3.451613 +step:6444 train loss:3.586284 +step:6445 train loss:3.492556 +step:6446 train loss:3.492837 +step:6447 train loss:3.475528 +step:6448 train loss:3.424365 +step:6449 train loss:3.448077 +step:6450 train loss:3.432321 +step:6451 train loss:3.420460 +step:6452 train loss:3.425561 +step:6453 train loss:3.465782 +step:6454 train loss:3.492257 +step:6455 train loss:3.480489 +step:6456 train loss:3.498914 +step:6457 train loss:3.474493 +step:6458 train loss:3.448535 +step:6459 train loss:3.430193 +step:6460 train loss:3.435562 +step:6461 train loss:3.439169 +step:6462 train loss:3.434034 +step:6463 train loss:3.531349 +step:6464 train loss:3.438458 +step:6465 train loss:3.477038 +step:6466 train loss:3.495468 +step:6467 train loss:3.417274 +step:6468 train loss:3.497988 +step:6469 train loss:3.403189 +step:6470 train loss:3.530592 +step:6471 train loss:3.434755 +step:6472 train loss:3.593818 +step:6473 train loss:3.477375 +step:6474 train loss:3.509398 +step:6475 train loss:3.455262 +step:6476 train loss:3.524590 +step:6477 train loss:3.458511 +step:6478 train loss:3.586354 +step:6479 train loss:3.499885 +step:6480 train loss:3.438558 +step:6481 train loss:3.494269 +step:6482 train loss:3.431550 +step:6483 train loss:3.494813 +step:6484 train loss:3.449772 +step:6485 train loss:3.510074 +step:6486 train loss:3.446881 +step:6487 train loss:3.440375 +step:6488 train loss:3.440707 +step:6489 train loss:3.439650 +step:6490 train loss:3.466680 +step:6491 train loss:3.439057 +step:6492 train loss:3.537743 +step:6493 train loss:3.443568 +step:6494 train loss:3.445212 +step:6495 train loss:3.448145 +step:6496 train loss:3.477283 +step:6497 train loss:3.493129 +step:6498 train loss:3.602252 +step:6499 train loss:3.577120 +step:6500 validation loss:3.401490 total_sharp:2.8404e-03 L1_sharp:3.7415e-03 L2_sharp:9.2525e-04 L3_sharp:1.5471e-03 L4_sharp:8.1120e-04 L5_sharp:9.7289e-04 L6_sharp:1.8642e-03 L7_sharp:2.0195e-03 L8_sharp:1.4508e-03 L9_sharp:9.8189e-04 L10_sharp:7.6867e-04 L11_sharp:5.9234e-04 L12_sharp:6.4905e-04 total_fnorm:1.6844e+00 total_l1_linf:1.3794e+04 total_spectral:1.6844e+00 L1_fnorm:2.8463e-01 L2_fnorm:2.7733e-01 L3_fnorm:2.8071e-01 L4_fnorm:2.8259e-01 L5_fnorm:2.9802e-01 L6_fnorm:2.9993e-01 L7_fnorm:2.9982e-01 L8_fnorm:3.0115e-01 L9_fnorm:3.0190e-01 L10_fnorm:3.0178e-01 L11_fnorm:3.0089e-01 L12_fnorm:3.0078e-01 L1_l1linf:3.5805e-01 L2_l1linf:4.0730e-01 L3_l1linf:4.1533e-01 L4_l1linf:4.1210e-01 L5_l1linf:3.9142e-01 L6_l1linf:3.6705e-01 L7_l1linf:3.5523e-01 L8_l1linf:3.3642e-01 L9_l1linf:3.6658e-01 L10_l1linf:3.9687e-01 L11_l1linf:4.1713e-01 L12_l1linf:4.2596e-01 L1_spectral:8.1060e-03 L2_spectral:9.3359e-03 L3_spectral:9.3313e-03 L4_spectral:9.2707e-03 L5_spectral:8.8630e-03 L6_spectral:8.2647e-03 L7_spectral:7.9698e-03 L8_spectral:7.6596e-03 L9_spectral:8.2796e-03 L10_spectral:8.9361e-03 L11_spectral:9.3556e-03 L12_spectral:9.6462e-03 ip_v_neg_g:3.2534e-03 cos_v_neg_g:1.6235e-03 v_norm:1.6844e+00 g_norm:1.1897e+00 hv_norm:2.9372e-01 cos_v_hv:1.6289e-02 hg_norm:1.4609e+01 cos_g_hg:5.0176e-01 v_par:1.1367e-04 v_perp:1.6844e+00 L1_cos_v_neg_g:2.0235e-03 L1_v_norm:2.8463e-01 L2_cos_v_neg_g:2.2174e-03 L2_v_norm:2.7733e-01 L3_cos_v_neg_g:2.0471e-03 L3_v_norm:2.8071e-01 L4_cos_v_neg_g:2.3896e-03 L4_v_norm:2.8259e-01 L5_cos_v_neg_g:2.4606e-03 L5_v_norm:2.9802e-01 L6_cos_v_neg_g:3.2094e-03 L6_v_norm:2.9993e-01 L7_cos_v_neg_g:4.6455e-03 L7_v_norm:2.9982e-01 L8_cos_v_neg_g:3.9197e-03 L8_v_norm:3.0115e-01 L9_cos_v_neg_g:2.7667e-03 L9_v_norm:3.0190e-01 L10_cos_v_neg_g:2.9579e-03 L10_v_norm:3.0178e-01 L11_cos_v_neg_g:2.9840e-03 L11_v_norm:3.0089e-01 L12_cos_v_neg_g:2.7290e-03 L12_v_norm:3.0078e-01 +step:6500 train loss:3.419271 +step:6501 train loss:3.436136 +step:6502 train loss:3.458744 +step:6503 train loss:3.518522 +step:6504 train loss:3.464070 +step:6505 train loss:3.470512 +step:6506 train loss:3.431579 +step:6507 train loss:3.498498 +step:6508 train loss:3.471053 +step:6509 train loss:3.451596 +step:6510 train loss:3.461653 +step:6511 train loss:3.471515 +step:6512 train loss:3.417553 +step:6513 train loss:3.483984 +step:6514 train loss:3.358305 +step:6515 train loss:3.449077 +step:6516 train loss:3.496335 +step:6517 train loss:3.412621 +step:6518 train loss:3.454767 +step:6519 train loss:3.443365 +step:6520 train loss:3.530897 +step:6521 train loss:3.507678 +step:6522 train loss:3.517081 +step:6523 train loss:3.415570 +step:6524 train loss:3.498822 +step:6525 train loss:3.486580 +step:6526 train loss:3.418567 +step:6527 train loss:3.479188 +step:6528 train loss:3.497491 +step:6529 train loss:3.525307 +step:6530 train loss:3.430387 +step:6531 train loss:3.508616 +step:6532 train loss:3.434085 +step:6533 train loss:3.474931 +step:6534 train loss:3.479779 +step:6535 train loss:3.455836 +step:6536 train loss:3.592964 +step:6537 train loss:3.398447 +step:6538 train loss:3.506730 +step:6539 train loss:3.431389 +step:6540 train loss:3.543435 +step:6541 train loss:3.525520 +step:6542 train loss:3.478795 +step:6543 train loss:3.436099 +step:6544 train loss:3.415287 +step:6545 train loss:3.407945 +step:6546 train loss:3.463864 +step:6547 train loss:3.525813 +step:6548 train loss:3.461761 +step:6549 train loss:3.478885 +step:6550 train loss:3.590117 +step:6551 train loss:3.473286 +step:6552 train loss:3.463610 +step:6553 train loss:3.504540 +step:6554 train loss:3.393331 +step:6555 train loss:3.480224 +step:6556 train loss:3.351027 +step:6557 train loss:3.699060 +step:6558 train loss:3.531493 +step:6559 train loss:3.441919 +step:6560 train loss:3.481798 +step:6561 train loss:3.451432 +step:6562 train loss:3.475521 +step:6563 train loss:3.362677 +step:6564 train loss:3.470986 +step:6565 train loss:3.376272 +step:6566 train loss:3.486495 +step:6567 train loss:3.456602 +step:6568 train loss:3.501743 +step:6569 train loss:3.450271 +step:6570 train loss:3.490304 +step:6571 train loss:3.418447 +step:6572 train loss:3.494610 +step:6573 train loss:3.508826 +step:6574 train loss:3.492024 +step:6575 train loss:3.441417 +step:6576 train loss:3.425829 +step:6577 train loss:3.499544 +step:6578 train loss:3.367088 +step:6579 train loss:3.468982 +step:6580 train loss:3.422548 +step:6581 train loss:3.437123 +step:6582 train loss:3.415175 +step:6583 train loss:3.516198 +step:6584 train loss:3.444858 +step:6585 train loss:3.482930 +step:6586 train loss:3.491747 +step:6587 train loss:3.498960 +step:6588 train loss:3.464720 +step:6589 train loss:3.495788 +step:6590 train loss:3.433808 +step:6591 train loss:3.485327 +step:6592 train loss:3.426924 +step:6593 train loss:3.435717 +step:6594 train loss:3.459757 +step:6595 train loss:3.441915 +step:6596 train loss:3.441403 +step:6597 train loss:3.463934 +step:6598 train loss:3.509264 +step:6599 train loss:3.396918 +step:6600 train loss:3.458683 +step:6601 train loss:3.511672 +step:6602 train loss:3.440394 +step:6603 train loss:3.463495 +step:6604 train loss:3.479982 +step:6605 train loss:3.456700 +step:6606 train loss:3.516273 +step:6607 train loss:3.433828 +step:6608 train loss:3.450138 +step:6609 train loss:3.420787 +step:6610 train loss:3.531137 +step:6611 train loss:3.454158 +step:6612 train loss:3.499777 +step:6613 train loss:3.412812 +step:6614 train loss:3.442094 +step:6615 train loss:3.441376 +step:6616 train loss:3.425151 +step:6617 train loss:3.461255 +step:6618 train loss:3.453809 +step:6619 train loss:3.425950 +step:6620 train loss:3.528392 +step:6621 train loss:3.403025 +step:6622 train loss:3.480150 +step:6623 train loss:3.407438 +step:6624 train loss:3.483678 +step:6625 train loss:3.526317 +step:6626 train loss:3.489103 +step:6627 train loss:3.434012 +step:6628 train loss:3.498071 +step:6629 train loss:3.396354 +step:6630 train loss:3.436182 +step:6631 train loss:3.471467 +step:6632 train loss:3.511658 +step:6633 train loss:3.462361 +step:6634 train loss:3.523379 +step:6635 train loss:3.422762 +step:6636 train loss:3.464117 +step:6637 train loss:3.433927 +step:6638 train loss:3.432779 +step:6639 train loss:3.445387 +step:6640 train loss:3.432911 +step:6641 train loss:3.444503 +step:6642 train loss:3.444312 +step:6643 train loss:3.529987 +step:6644 train loss:3.528809 +step:6645 train loss:3.404119 +step:6646 train loss:3.493147 +step:6647 train loss:3.450485 +step:6648 train loss:3.553807 +step:6649 train loss:3.484463 +step:6650 train loss:3.430897 +step:6651 train loss:3.479845 +step:6652 train loss:3.491157 +step:6653 train loss:3.439246 +step:6654 train loss:3.432044 +step:6655 train loss:3.476651 +step:6656 train loss:3.442466 +step:6657 train loss:3.469102 +step:6658 train loss:3.450663 +step:6659 train loss:3.604986 +step:6660 train loss:3.504833 +step:6661 train loss:3.430572 +step:6662 train loss:3.462835 +step:6663 train loss:3.394919 +step:6664 train loss:3.473505 +step:6665 train loss:3.484008 +step:6666 train loss:3.497457 +step:6667 train loss:3.412776 +step:6668 train loss:3.538892 +step:6669 train loss:3.424794 +step:6670 train loss:3.432292 +step:6671 train loss:3.518454 +step:6672 train loss:3.466010 +step:6673 train loss:3.477741 +step:6674 train loss:3.449552 +step:6675 train loss:3.470048 +step:6676 train loss:3.476918 +step:6677 train loss:3.434918 +step:6678 train loss:3.502614 +step:6679 train loss:3.541756 +step:6680 train loss:3.541466 +step:6681 train loss:3.494532 +step:6682 train loss:3.437079 +step:6683 train loss:3.457876 +step:6684 train loss:3.472866 +step:6685 train loss:3.482618 +step:6686 train loss:3.417824 +step:6687 train loss:3.434441 +step:6688 train loss:3.480842 +step:6689 train loss:3.489324 +step:6690 train loss:3.461438 +step:6691 train loss:3.499042 +step:6692 train loss:3.505260 +step:6693 train loss:3.536819 +step:6694 train loss:3.490144 +step:6695 train loss:3.465298 +step:6696 train loss:3.400480 +step:6697 train loss:3.618580 +step:6698 train loss:3.460989 +step:6699 train loss:3.458219 +step:6700 train loss:3.469162 +step:6701 train loss:3.527857 +step:6702 train loss:3.418521 +step:6703 train loss:3.464372 +step:6704 train loss:3.450486 +step:6705 train loss:3.465640 +step:6706 train loss:3.439957 +step:6707 train loss:3.514981 +step:6708 train loss:3.467709 +step:6709 train loss:3.495970 +step:6710 train loss:3.485640 +step:6711 train loss:3.436822 +step:6712 train loss:3.423369 +step:6713 train loss:3.450918 +step:6714 train loss:3.494728 +step:6715 train loss:3.434913 +step:6716 train loss:3.516637 +step:6717 train loss:3.455697 +step:6718 train loss:3.479869 +step:6719 train loss:3.512719 +step:6720 train loss:3.443155 +step:6721 train loss:3.461449 +step:6722 train loss:3.439317 +step:6723 train loss:3.566510 +step:6724 train loss:3.425265 +step:6725 train loss:3.485087 +step:6726 train loss:3.438120 +step:6727 train loss:3.504741 +step:6728 train loss:3.599216 +step:6729 train loss:3.462384 +step:6730 train loss:3.457042 +step:6731 train loss:3.499681 +step:6732 train loss:3.374055 +step:6733 train loss:3.511425 +step:6734 train loss:3.443328 +step:6735 train loss:3.465233 +step:6736 train loss:3.466255 +step:6737 train loss:3.463875 +step:6738 train loss:3.493446 +step:6739 train loss:3.452206 +step:6740 train loss:3.400007 +step:6741 train loss:3.512775 +step:6742 train loss:3.471782 +step:6743 train loss:3.477280 +step:6744 train loss:3.365464 +step:6745 train loss:3.526686 +step:6746 train loss:3.447196 +step:6747 train loss:3.448965 +step:6748 train loss:3.517712 +step:6749 train loss:3.501218 +step:6750 validation loss:3.396489 +step:6750 train loss:3.418265 +step:6751 train loss:3.452137 +step:6752 train loss:3.454272 +step:6753 train loss:3.492855 +step:6754 train loss:3.470819 +step:6755 train loss:3.488372 +step:6756 train loss:3.425462 +step:6757 train loss:3.395294 +step:6758 train loss:3.567164 +step:6759 train loss:3.463190 +step:6760 train loss:3.518429 +step:6761 train loss:3.452369 +step:6762 train loss:3.470724 +step:6763 train loss:3.374153 +step:6764 train loss:3.452684 +step:6765 train loss:3.459806 +step:6766 train loss:3.455120 +step:6767 train loss:3.405042 +step:6768 train loss:3.410229 +step:6769 train loss:3.373356 +step:6770 train loss:3.458220 +step:6771 train loss:3.460636 +step:6772 train loss:3.474024 +step:6773 train loss:3.449109 +step:6774 train loss:3.469112 +step:6775 train loss:3.504993 +step:6776 train loss:3.462490 +step:6777 train loss:3.537189 +step:6778 train loss:3.426424 +step:6779 train loss:3.475646 +step:6780 train loss:3.407593 +step:6781 train loss:3.470878 +step:6782 train loss:3.387600 +step:6783 train loss:3.415968 +step:6784 train loss:3.447520 +step:6785 train loss:3.431430 +step:6786 train loss:3.448959 +step:6787 train loss:3.522479 +step:6788 train loss:3.465496 +step:6789 train loss:3.470763 +step:6790 train loss:3.468035 +step:6791 train loss:3.481764 +step:6792 train loss:3.481196 +step:6793 train loss:3.478183 +step:6794 train loss:3.448244 +step:6795 train loss:3.445765 +step:6796 train loss:3.455711 +step:6797 train loss:3.546971 +step:6798 train loss:3.450533 +step:6799 train loss:3.444150 +step:6800 train loss:3.411754 +step:6801 train loss:3.544449 +step:6802 train loss:3.493504 +step:6803 train loss:3.483985 +step:6804 train loss:3.509573 +step:6805 train loss:3.471904 +step:6806 train loss:3.405984 +step:6807 train loss:3.462825 +step:6808 train loss:3.449044 +step:6809 train loss:3.475268 +step:6810 train loss:3.596555 +step:6811 train loss:3.501039 +step:6812 train loss:3.468042 +step:6813 train loss:3.487451 +step:6814 train loss:3.492155 +step:6815 train loss:3.540682 +step:6816 train loss:3.453461 +step:6817 train loss:3.483729 +step:6818 train loss:3.457619 +step:6819 train loss:3.443801 +step:6820 train loss:3.470232 +step:6821 train loss:3.434770 +step:6822 train loss:3.538764 +step:6823 train loss:3.518987 +step:6824 train loss:3.494885 +step:6825 train loss:3.444404 +step:6826 train loss:3.485320 +step:6827 train loss:3.475672 +step:6828 train loss:3.488577 +step:6829 train loss:3.472786 +step:6830 train loss:3.441694 +step:6831 train loss:3.406108 +step:6832 train loss:3.390395 +step:6833 train loss:3.404915 +step:6834 train loss:3.495175 +step:6835 train loss:3.464965 +step:6836 train loss:3.384848 +step:6837 train loss:3.453137 +step:6838 train loss:3.509573 +step:6839 train loss:3.594418 +step:6840 train loss:3.468301 +step:6841 train loss:3.416985 +step:6842 train loss:3.472224 +step:6843 train loss:3.577222 +step:6844 train loss:3.455536 +step:6845 train loss:3.508490 +step:6846 train loss:3.571929 +step:6847 train loss:3.504476 +step:6848 train loss:3.491323 +step:6849 train loss:3.519210 +step:6850 train loss:3.488448 +step:6851 train loss:3.418054 +step:6852 train loss:3.410542 +step:6853 train loss:3.402674 +step:6854 train loss:3.478096 +step:6855 train loss:3.448498 +step:6856 train loss:3.431946 +step:6857 train loss:3.487595 +step:6858 train loss:3.517568 +step:6859 train loss:3.424141 +step:6860 train loss:3.533894 +step:6861 train loss:3.562371 +step:6862 train loss:3.469933 +step:6863 train loss:3.464076 +step:6864 train loss:3.413351 +step:6865 train loss:3.480075 +step:6866 train loss:3.411410 +step:6867 train loss:3.586555 +step:6868 train loss:3.464999 +step:6869 train loss:3.493554 +step:6870 train loss:3.532934 +step:6871 train loss:3.451097 +step:6872 train loss:3.444777 +step:6873 train loss:3.462870 +step:6874 train loss:3.424328 +step:6875 train loss:3.427123 +step:6876 train loss:3.455932 +step:6877 train loss:3.499904 +step:6878 train loss:3.415374 +step:6879 train loss:3.458914 +step:6880 train loss:3.467765 +step:6881 train loss:3.429399 +step:6882 train loss:3.493772 +step:6883 train loss:3.474458 +step:6884 train loss:3.707852 +step:6885 train loss:3.476231 +step:6886 train loss:3.457686 +step:6887 train loss:3.401403 +step:6888 train loss:3.497805 +step:6889 train loss:3.382087 +step:6890 train loss:3.493869 +step:6891 train loss:3.501315 +step:6892 train loss:3.600304 +step:6893 train loss:3.433439 +step:6894 train loss:3.492657 +step:6895 train loss:3.491616 +step:6896 train loss:3.470836 +step:6897 train loss:3.423162 +step:6898 train loss:3.426217 +step:6899 train loss:3.511991 +step:6900 train loss:3.485980 +step:6901 train loss:3.433182 +step:6902 train loss:3.368619 +step:6903 train loss:3.410772 +step:6904 train loss:3.522840 +step:6905 train loss:3.562502 +step:6906 train loss:3.477380 +step:6907 train loss:3.495572 +step:6908 train loss:3.528898 +step:6909 train loss:3.524881 +step:6910 train loss:3.400186 +step:6911 train loss:3.534557 +step:6912 train loss:3.422324 +step:6913 train loss:3.461054 +step:6914 train loss:3.416355 +step:6915 train loss:3.446967 +step:6916 train loss:3.418652 +step:6917 train loss:3.546700 +step:6918 train loss:3.488909 +step:6919 train loss:3.485173 +step:6920 train loss:3.468974 +step:6921 train loss:3.532528 +step:6922 train loss:3.521934 +step:6923 train loss:3.389467 +step:6924 train loss:3.471245 +step:6925 train loss:3.442617 +step:6926 train loss:3.484536 +step:6927 train loss:3.537349 +step:6928 train loss:3.422616 +step:6929 train loss:3.434993 +step:6930 train loss:3.470193 +step:6931 train loss:3.468248 +step:6932 train loss:3.711687 +step:6933 train loss:3.531839 +step:6934 train loss:3.475872 +step:6935 train loss:3.455442 +step:6936 train loss:3.495672 +step:6937 train loss:3.447953 +step:6938 train loss:3.502644 +step:6939 train loss:3.441735 +step:6940 train loss:3.494574 +step:6941 train loss:3.412663 +step:6942 train loss:3.500791 +step:6943 train loss:3.392603 +step:6944 train loss:3.481436 +step:6945 train loss:3.424474 +step:6946 train loss:3.514762 +step:6947 train loss:3.437509 +step:6948 train loss:3.432073 +step:6949 train loss:3.506032 +step:6950 train loss:3.497782 +step:6951 train loss:3.501240 +step:6952 train loss:3.431788 +step:6953 train loss:3.478879 +step:6954 train loss:3.539263 +step:6955 train loss:3.457885 +step:6956 train loss:3.490792 +step:6957 train loss:3.480325 +step:6958 train loss:3.442985 +step:6959 train loss:3.480723 +step:6960 train loss:3.446075 +step:6961 train loss:3.456419 +step:6962 train loss:3.435540 +step:6963 train loss:3.408859 +step:6964 train loss:3.446614 +step:6965 train loss:3.442783 +step:6966 train loss:3.482839 +step:6967 train loss:3.422565 +step:6968 train loss:3.464421 +step:6969 train loss:3.481399 +step:6970 train loss:3.457330 +step:6971 train loss:3.522378 +step:6972 train loss:3.469975 +step:6973 train loss:3.424025 +step:6974 train loss:3.556854 +step:6975 train loss:3.456659 +step:6976 train loss:3.434307 +step:6977 train loss:3.471899 +step:6978 train loss:3.461873 +step:6979 train loss:3.471766 +step:6980 train loss:3.447869 +step:6981 train loss:3.511194 +step:6982 train loss:3.463017 +step:6983 train loss:3.455256 +step:6984 train loss:3.572497 +step:6985 train loss:3.411171 +step:6986 train loss:3.408699 +step:6987 train loss:3.458345 +step:6988 train loss:3.463008 +step:6989 train loss:3.607509 +step:6990 train loss:3.470583 +step:6991 train loss:3.426031 +step:6992 train loss:3.474905 +step:6993 train loss:3.542231 +step:6994 train loss:3.488395 +step:6995 train loss:3.438229 +step:6996 train loss:3.438627 +step:6997 train loss:3.522867 +step:6998 train loss:3.417458 +step:6999 train loss:3.471462 +step:7000 validation loss:3.389170 total_sharp:3.5022e-03 L1_sharp:1.1312e-02 L2_sharp:8.3449e-03 L3_sharp:2.4316e-03 L4_sharp:8.3860e-04 L5_sharp:9.0943e-04 L6_sharp:1.1521e-03 L7_sharp:1.3085e-03 L8_sharp:1.2249e-03 L9_sharp:9.5056e-04 L10_sharp:8.5836e-04 L11_sharp:7.4559e-04 L12_sharp:1.3556e-03 total_fnorm:1.6847e+00 total_l1_linf:1.3802e+04 total_spectral:1.6847e+00 L1_fnorm:2.8812e-01 L2_fnorm:2.7197e-01 L3_fnorm:2.8227e-01 L4_fnorm:2.9693e-01 L5_fnorm:2.9931e-01 L6_fnorm:3.0128e-01 L7_fnorm:3.0047e-01 L8_fnorm:3.0148e-01 L9_fnorm:3.0208e-01 L10_fnorm:3.0190e-01 L11_fnorm:3.0096e-01 L12_fnorm:3.0055e-01 L1_l1linf:3.7702e-01 L2_l1linf:4.6038e-01 L3_l1linf:4.5187e-01 L4_l1linf:4.2138e-01 L5_l1linf:4.0075e-01 L6_l1linf:3.5504e-01 L7_l1linf:3.5258e-01 L8_l1linf:3.4550e-01 L9_l1linf:3.8228e-01 L10_l1linf:4.1901e-01 L11_l1linf:4.4063e-01 L12_l1linf:4.5818e-01 L1_spectral:8.4537e-03 L2_spectral:1.0361e-02 L3_spectral:1.0166e-02 L4_spectral:9.4519e-03 L5_spectral:8.9994e-03 L6_spectral:8.0689e-03 L7_spectral:7.9087e-03 L8_spectral:7.7790e-03 L9_spectral:8.5612e-03 L10_spectral:9.3400e-03 L11_spectral:9.7762e-03 L12_spectral:1.0187e-02 ip_v_neg_g:3.3944e-03 cos_v_neg_g:1.6373e-03 v_norm:1.6847e+00 g_norm:1.2306e+00 hv_norm:4.0216e-01 cos_v_hv:1.4671e-02 hg_norm:1.7400e+01 cos_g_hg:4.8942e-01 v_par:2.0169e-04 v_perp:1.6847e+00 L1_cos_v_neg_g:3.3591e-03 L1_v_norm:2.8812e-01 L2_cos_v_neg_g:2.9569e-03 L2_v_norm:2.7197e-01 L3_cos_v_neg_g:1.4499e-03 L3_v_norm:2.8227e-01 L4_cos_v_neg_g:1.9143e-03 L4_v_norm:2.9693e-01 L5_cos_v_neg_g:1.8737e-03 L5_v_norm:2.9931e-01 L6_cos_v_neg_g:2.5777e-03 L6_v_norm:3.0128e-01 L7_cos_v_neg_g:3.1180e-03 L7_v_norm:3.0047e-01 L8_cos_v_neg_g:2.9316e-03 L8_v_norm:3.0148e-01 L9_cos_v_neg_g:2.7525e-03 L9_v_norm:3.0208e-01 L10_cos_v_neg_g:2.6921e-03 L10_v_norm:3.0190e-01 L11_cos_v_neg_g:3.1715e-03 L11_v_norm:3.0096e-01 L12_cos_v_neg_g:4.7078e-03 L12_v_norm:3.0055e-01 +step:7000 train loss:3.542629 +step:7001 train loss:3.457284 +step:7002 train loss:3.438926 +step:7003 train loss:3.466038 +step:7004 train loss:3.462197 +step:7005 train loss:3.442859 +step:7006 train loss:3.453581 +step:7007 train loss:3.500202 +step:7008 train loss:3.445993 +step:7009 train loss:3.481481 +step:7010 train loss:3.419659 +step:7011 train loss:3.472635 +step:7012 train loss:3.443933 +step:7013 train loss:3.522954 +step:7014 train loss:3.428145 +step:7015 train loss:3.490735 +step:7016 train loss:3.475905 +step:7017 train loss:3.443175 +step:7018 train loss:3.524508 +step:7019 train loss:3.445094 +step:7020 train loss:3.494799 +step:7021 train loss:3.438416 +step:7022 train loss:3.455209 +step:7023 train loss:3.469604 +step:7024 train loss:3.434569 +step:7025 train loss:3.482989 +step:7026 train loss:3.439034 +step:7027 train loss:3.502824 +step:7028 train loss:3.427272 +step:7029 train loss:3.418800 +step:7030 train loss:3.419010 +step:7031 train loss:3.475574 +step:7032 train loss:3.479173 +step:7033 train loss:3.455799 +step:7034 train loss:3.477802 +step:7035 train loss:3.527710 +step:7036 train loss:3.450186 +step:7037 train loss:3.473038 +step:7038 train loss:3.437401 +step:7039 train loss:3.489366 +step:7040 train loss:3.407472 +step:7041 train loss:3.502074 +step:7042 train loss:3.430567 +step:7043 train loss:3.404375 +step:7044 train loss:3.452376 +step:7045 train loss:3.454344 +step:7046 train loss:3.447401 +step:7047 train loss:3.481079 +step:7048 train loss:3.433212 +step:7049 train loss:3.442109 +step:7050 train loss:3.467975 +step:7051 train loss:3.483178 +step:7052 train loss:3.484749 +step:7053 train loss:3.447603 +step:7054 train loss:3.423274 +step:7055 train loss:3.494599 +step:7056 train loss:3.487839 +step:7057 train loss:3.417549 +step:7058 train loss:3.535797 +step:7059 train loss:3.450692 +step:7060 train loss:3.450773 +step:7061 train loss:3.425295 +step:7062 train loss:3.453087 +step:7063 train loss:3.509830 +step:7064 train loss:3.433412 +step:7065 train loss:3.487202 +step:7066 train loss:3.439839 +step:7067 train loss:3.480501 +step:7068 train loss:3.451723 +step:7069 train loss:3.417237 +step:7070 train loss:3.440406 +step:7071 train loss:3.414865 +step:7072 train loss:3.411792 +step:7073 train loss:3.410550 +step:7074 train loss:3.404318 +step:7075 train loss:3.425028 +step:7076 train loss:3.436789 +step:7077 train loss:3.442877 +step:7078 train loss:3.492272 +step:7079 train loss:3.500590 +step:7080 train loss:3.446982 +step:7081 train loss:3.466220 +step:7082 train loss:3.430391 +step:7083 train loss:3.464594 +step:7084 train loss:3.457193 +step:7085 train loss:3.420069 +step:7086 train loss:3.456031 +step:7087 train loss:3.432593 +step:7088 train loss:3.556526 +step:7089 train loss:3.446217 +step:7090 train loss:3.414850 +step:7091 train loss:3.425915 +step:7092 train loss:3.407890 +step:7093 train loss:3.499896 +step:7094 train loss:3.420885 +step:7095 train loss:3.441154 +step:7096 train loss:3.454640 +step:7097 train loss:3.443594 +step:7098 train loss:3.468786 +step:7099 train loss:3.424551 +step:7100 train loss:3.455528 +step:7101 train loss:3.524426 +step:7102 train loss:3.416215 +step:7103 train loss:3.440241 +step:7104 train loss:3.474381 +step:7105 train loss:3.453400 +step:7106 train loss:3.436391 +step:7107 train loss:3.474861 +step:7108 train loss:3.539820 +step:7109 train loss:3.470942 +step:7110 train loss:3.495229 +step:7111 train loss:3.474838 +step:7112 train loss:3.466830 +step:7113 train loss:3.460348 +step:7114 train loss:3.479753 +step:7115 train loss:3.515588 +step:7116 train loss:3.443803 +step:7117 train loss:3.484504 +step:7118 train loss:3.496632 +step:7119 train loss:3.456729 +step:7120 train loss:3.509378 +step:7121 train loss:3.430875 +step:7122 train loss:3.431462 +step:7123 train loss:3.369776 +step:7124 train loss:3.530105 +step:7125 train loss:3.380699 +step:7126 train loss:3.548521 +step:7127 train loss:3.502171 +step:7128 train loss:3.452545 +step:7129 train loss:3.456854 +step:7130 train loss:3.447706 +step:7131 train loss:3.388839 +step:7132 train loss:3.428872 +step:7133 train loss:3.477600 +step:7134 train loss:3.404137 +step:7135 train loss:3.465217 +step:7136 train loss:3.445113 +step:7137 train loss:3.426099 +step:7138 train loss:3.412811 +step:7139 train loss:3.415765 +step:7140 train loss:3.451585 +step:7141 train loss:3.447850 +step:7142 train loss:3.446031 +step:7143 train loss:3.481181 +step:7144 train loss:3.429578 +step:7145 train loss:3.446451 +step:7146 train loss:3.456789 +step:7147 train loss:3.479547 +step:7148 train loss:3.480497 +step:7149 train loss:3.491982 +step:7150 train loss:3.464320 +step:7151 train loss:3.431093 +step:7152 train loss:3.399117 +step:7153 train loss:3.434633 +step:7154 train loss:3.455005 +step:7155 train loss:3.465323 +step:7156 train loss:3.440860 +step:7157 train loss:3.456701 +step:7158 train loss:3.417636 +step:7159 train loss:3.470572 +step:7160 train loss:3.478465 +step:7161 train loss:3.432098 +step:7162 train loss:3.476953 +step:7163 train loss:3.412038 +step:7164 train loss:3.449751 +step:7165 train loss:3.452638 +step:7166 train loss:3.509819 +step:7167 train loss:3.485843 +step:7168 train loss:3.464195 +step:7169 train loss:3.442935 +step:7170 train loss:3.471307 +step:7171 train loss:3.422970 +step:7172 train loss:3.586825 +step:7173 train loss:3.428211 +step:7174 train loss:3.473393 +step:7175 train loss:3.447080 +step:7176 train loss:3.455675 +step:7177 train loss:3.469771 +step:7178 train loss:3.472922 +step:7179 train loss:3.457503 +step:7180 train loss:3.458705 +step:7181 train loss:3.488459 +step:7182 train loss:3.435454 +step:7183 train loss:3.510952 +step:7184 train loss:3.601129 +step:7185 train loss:3.513005 +step:7186 train loss:3.454387 +step:7187 train loss:3.463583 +step:7188 train loss:3.449463 +step:7189 train loss:3.447650 +step:7190 train loss:3.452366 +step:7191 train loss:3.445722 +step:7192 train loss:3.472951 +step:7193 train loss:3.395442 +step:7194 train loss:3.455888 +step:7195 train loss:3.435688 +step:7196 train loss:3.482811 +step:7197 train loss:3.460674 +step:7198 train loss:3.520489 +step:7199 train loss:3.476864 +step:7200 train loss:3.467862 +step:7201 train loss:3.476493 +step:7202 train loss:3.451509 +step:7203 train loss:3.470223 +step:7204 train loss:3.435977 +step:7205 train loss:3.396162 +step:7206 train loss:3.424646 +step:7207 train loss:3.599987 +step:7208 train loss:3.434333 +step:7209 train loss:3.513418 +step:7210 train loss:3.451901 +step:7211 train loss:3.481584 +step:7212 train loss:3.563118 +step:7213 train loss:3.410706 +step:7214 train loss:3.481684 +step:7215 train loss:3.451323 +step:7216 train loss:3.499127 +step:7217 train loss:3.461963 +step:7218 train loss:3.544564 +step:7219 train loss:3.455434 +step:7220 train loss:3.536495 +step:7221 train loss:3.416472 +step:7222 train loss:3.497487 +step:7223 train loss:3.416118 +step:7224 train loss:3.475686 +step:7225 train loss:3.458540 +step:7226 train loss:3.422256 +step:7227 train loss:3.443657 +step:7228 train loss:3.430401 +step:7229 train loss:3.433758 +step:7230 train loss:3.422590 +step:7231 train loss:3.554469 +step:7232 train loss:3.423578 +step:7233 train loss:3.492483 +step:7234 train loss:3.480094 +step:7235 train loss:3.450406 +step:7236 train loss:3.488783 +step:7237 train loss:3.443540 +step:7238 train loss:3.482233 +step:7239 train loss:3.434361 +step:7240 train loss:3.434824 +step:7241 train loss:3.444674 +step:7242 train loss:3.428518 +step:7243 train loss:3.473669 +step:7244 train loss:3.444925 +step:7245 train loss:3.453379 +step:7246 train loss:3.490166 +step:7247 train loss:3.447949 +step:7248 train loss:3.488506 +step:7249 train loss:3.434491 +step:7250 validation loss:3.386146 +step:7250 train loss:3.457677 +step:7251 train loss:3.505176 +step:7252 train loss:3.414412 +step:7253 train loss:3.506700 +step:7254 train loss:3.441762 +step:7255 train loss:3.417439 +step:7256 train loss:3.456580 +step:7257 train loss:3.495420 +step:7258 train loss:3.453674 +step:7259 train loss:3.434995 +step:7260 train loss:3.520833 +step:7261 train loss:3.478858 +step:7262 train loss:3.434895 +step:7263 train loss:3.478831 +step:7264 train loss:3.459516 +step:7265 train loss:3.368451 +step:7266 train loss:3.490358 +step:7267 train loss:3.408082 +step:7268 train loss:3.472285 +step:7269 train loss:3.479378 +step:7270 train loss:3.433855 +step:7271 train loss:3.450612 +step:7272 train loss:3.455313 +step:7273 train loss:3.453196 +step:7274 train loss:3.429647 +step:7275 train loss:3.499914 +step:7276 train loss:3.408397 +step:7277 train loss:3.452838 +step:7278 train loss:3.427051 +step:7279 train loss:3.407612 +step:7280 train loss:3.476194 +step:7281 train loss:3.502418 +step:7282 train loss:3.498210 +step:7283 train loss:3.388781 +step:7284 train loss:3.431774 +step:7285 train loss:3.459025 +step:7286 train loss:3.593055 +step:7287 train loss:3.498739 +step:7288 train loss:3.452001 +step:7289 train loss:3.461470 +step:7290 train loss:3.504194 +step:7291 train loss:3.468178 +step:7292 train loss:3.535928 +step:7293 train loss:3.437171 +step:7294 train loss:3.522346 +step:7295 train loss:3.407601 +step:7296 train loss:3.407390 +step:7297 train loss:3.451684 +step:7298 train loss:3.428756 +step:7299 train loss:3.470444 +step:7300 train loss:3.453322 +step:7301 train loss:3.404068 +step:7302 train loss:3.551644 +step:7303 train loss:3.438430 +step:7304 train loss:3.387823 +step:7305 train loss:3.460302 +step:7306 train loss:3.491727 +step:7307 train loss:3.496253 +step:7308 train loss:3.445947 +step:7309 train loss:3.412026 +step:7310 train loss:3.443157 +step:7311 train loss:3.426770 +step:7312 train loss:3.469494 +step:7313 train loss:3.503579 +step:7314 train loss:3.399298 +step:7315 train loss:3.394120 +step:7316 train loss:3.539475 +step:7317 train loss:3.473279 +step:7318 train loss:3.411788 +step:7319 train loss:3.441733 +step:7320 train loss:3.470748 +step:7321 train loss:3.497952 +step:7322 train loss:3.381327 +step:7323 train loss:3.434612 +step:7324 train loss:3.464201 +step:7325 train loss:3.426261 +step:7326 train loss:3.453470 +step:7327 train loss:3.429690 +step:7328 train loss:3.546684 +step:7329 train loss:3.390231 +step:7330 train loss:3.449234 +step:7331 train loss:3.439020 +step:7332 train loss:3.485823 +step:7333 train loss:3.464591 +step:7334 train loss:3.431960 +step:7335 train loss:3.433780 +step:7336 train loss:3.683925 +step:7337 train loss:3.469639 +step:7338 train loss:3.468153 +step:7339 train loss:3.475878 +step:7340 train loss:3.467120 +step:7341 train loss:3.453851 +step:7342 train loss:3.444501 +step:7343 train loss:3.460605 +step:7344 train loss:3.538198 +step:7345 train loss:3.399975 +step:7346 train loss:3.433967 +step:7347 train loss:3.426308 +step:7348 train loss:3.434101 +step:7349 train loss:3.532340 +step:7350 train loss:3.517248 +step:7351 train loss:3.454413 +step:7352 train loss:3.476296 +step:7353 train loss:3.465349 +step:7354 train loss:3.413529 +step:7355 train loss:3.592225 +step:7356 train loss:3.566071 +step:7357 train loss:3.486712 +step:7358 train loss:3.466028 +step:7359 train loss:3.438681 +step:7360 train loss:3.444438 +step:7361 train loss:3.402402 +step:7362 train loss:3.451753 +step:7363 train loss:3.461379 +step:7364 train loss:3.499978 +step:7365 train loss:3.479490 +step:7366 train loss:3.444273 +step:7367 train loss:3.522050 +step:7368 train loss:3.500726 +step:7369 train loss:3.493367 +step:7370 train loss:3.457465 +step:7371 train loss:3.414494 +step:7372 train loss:3.474181 +step:7373 train loss:3.494234 +step:7374 train loss:3.593040 +step:7375 train loss:3.415936 +step:7376 train loss:3.431303 +step:7377 train loss:3.477916 +step:7378 train loss:3.433500 +step:7379 train loss:3.556953 +step:7380 train loss:3.518981 +step:7381 train loss:3.478915 +step:7382 train loss:3.447085 +step:7383 train loss:3.541949 +step:7384 train loss:3.478477 +step:7385 train loss:3.437940 +step:7386 train loss:3.441445 +step:7387 train loss:3.489451 +step:7388 train loss:3.523761 +step:7389 train loss:3.465981 +step:7390 train loss:3.407657 +step:7391 train loss:3.440040 +step:7392 train loss:3.502182 +step:7393 train loss:3.467526 +step:7394 train loss:3.506262 +step:7395 train loss:3.393309 +step:7396 train loss:3.495640 +step:7397 train loss:3.422006 +step:7398 train loss:3.438123 +step:7399 train loss:3.483573 +step:7400 train loss:3.489032 +step:7401 train loss:3.406872 +step:7402 train loss:3.526064 +step:7403 train loss:3.410363 +step:7404 train loss:3.477573 +step:7405 train loss:3.601683 +step:7406 train loss:3.427468 +step:7407 train loss:3.472034 +step:7408 train loss:3.474665 +step:7409 train loss:3.444498 +step:7410 train loss:3.614031 +step:7411 train loss:3.458166 +step:7412 train loss:3.457022 +step:7413 train loss:3.515286 +step:7414 train loss:3.425545 +step:7415 train loss:3.479953 +step:7416 train loss:3.365413 +step:7417 train loss:3.488111 +step:7418 train loss:3.470165 +step:7419 train loss:3.438184 +step:7420 train loss:3.429476 +step:7421 train loss:3.462980 +step:7422 train loss:3.421444 +step:7423 train loss:3.558576 +step:7424 train loss:3.621459 +step:7425 train loss:3.510462 +step:7426 train loss:3.476073 +step:7427 train loss:3.446636 +step:7428 train loss:3.486247 +step:7429 train loss:3.485796 +step:7430 train loss:3.412066 +step:7431 train loss:3.417215 +step:7432 train loss:3.425537 +step:7433 train loss:3.525426 +step:7434 train loss:3.433710 +step:7435 train loss:3.521570 +step:7436 train loss:3.563607 +step:7437 train loss:3.385552 +step:7438 train loss:3.446310 +step:7439 train loss:3.454790 +step:7440 train loss:3.429803 +step:7441 train loss:3.399972 +step:7442 train loss:3.627327 +step:7443 train loss:3.446798 +step:7444 train loss:3.488593 +step:7445 train loss:3.422905 +step:7446 train loss:3.444676 +step:7447 train loss:3.369108 +step:7448 train loss:3.428731 +step:7449 train loss:3.436749 +step:7450 train loss:3.470241 +step:7451 train loss:3.506037 +step:7452 train loss:3.432324 +step:7453 train loss:3.456727 +step:7454 train loss:3.444343 +step:7455 train loss:3.452031 +step:7456 train loss:3.428459 +step:7457 train loss:3.437140 +step:7458 train loss:3.471254 +step:7459 train loss:3.452395 +step:7460 train loss:3.461154 +step:7461 train loss:3.496669 +step:7462 train loss:3.434506 +step:7463 train loss:3.497734 +step:7464 train loss:3.418041 +step:7465 train loss:3.427864 +step:7466 train loss:3.428086 +step:7467 train loss:3.439167 +step:7468 train loss:3.491885 +step:7469 train loss:3.420280 +step:7470 train loss:3.455166 +step:7471 train loss:3.444568 +step:7472 train loss:3.475238 +step:7473 train loss:3.418284 +step:7474 train loss:3.403831 +step:7475 train loss:3.429585 +step:7476 train loss:3.470874 +step:7477 train loss:3.444539 +step:7478 train loss:3.442867 +step:7479 train loss:3.457726 +step:7480 train loss:3.736815 +step:7481 train loss:3.385831 +step:7482 train loss:3.456385 +step:7483 train loss:3.453518 +step:7484 train loss:3.472655 +step:7485 train loss:3.458312 +step:7486 train loss:3.483001 +step:7487 train loss:3.474826 +step:7488 train loss:3.498963 +step:7489 train loss:3.494004 +step:7490 train loss:3.438169 +step:7491 train loss:3.462521 +step:7492 train loss:3.567613 +step:7493 train loss:3.543491 +step:7494 train loss:3.565468 +step:7495 train loss:3.436692 +step:7496 train loss:3.424150 +step:7497 train loss:3.524635 +step:7498 train loss:3.457371 +step:7499 train loss:3.495199 +step:7500 validation loss:3.384831 total_sharp:2.9890e-03 L1_sharp:6.4703e-03 L2_sharp:1.2424e-03 L3_sharp:1.7995e-03 L4_sharp:9.0513e-04 L5_sharp:1.2035e-03 L6_sharp:1.5546e-03 L7_sharp:1.7042e-03 L8_sharp:1.4149e-03 L9_sharp:1.0898e-03 L10_sharp:8.1129e-04 L11_sharp:6.1844e-04 L12_sharp:7.7069e-04 total_fnorm:1.6834e+00 total_l1_linf:1.3781e+04 total_spectral:1.6834e+00 L1_fnorm:2.8817e-01 L2_fnorm:2.7651e-01 L3_fnorm:2.8279e-01 L4_fnorm:2.9596e-01 L5_fnorm:2.9802e-01 L6_fnorm:3.0096e-01 L7_fnorm:2.9987e-01 L8_fnorm:3.0017e-01 L9_fnorm:3.0135e-01 L10_fnorm:3.0174e-01 L11_fnorm:3.0052e-01 L12_fnorm:3.0074e-01 L1_l1linf:4.0507e-01 L2_l1linf:4.7618e-01 L3_l1linf:4.7256e-01 L4_l1linf:4.5317e-01 L5_l1linf:4.2049e-01 L6_l1linf:3.9674e-01 L7_l1linf:3.8386e-01 L8_l1linf:3.5745e-01 L9_l1linf:3.8430e-01 L10_l1linf:4.0956e-01 L11_l1linf:4.3308e-01 L12_l1linf:4.2029e-01 L1_spectral:9.0787e-03 L2_spectral:1.0752e-02 L3_spectral:1.0534e-02 L4_spectral:1.0181e-02 L5_spectral:9.3598e-03 L6_spectral:8.8360e-03 L7_spectral:8.6241e-03 L8_spectral:8.0247e-03 L9_spectral:8.6520e-03 L10_spectral:9.2267e-03 L11_spectral:9.7080e-03 L12_spectral:9.5022e-03 ip_v_neg_g:4.9935e-03 cos_v_neg_g:2.2298e-03 v_norm:1.6834e+00 g_norm:1.3303e+00 hv_norm:3.1839e-01 cos_v_hv:1.5804e-02 hg_norm:2.5647e+01 cos_g_hg:5.4354e-01 v_par:1.6261e-04 v_perp:1.6834e+00 L1_cos_v_neg_g:3.3422e-03 L1_v_norm:2.8817e-01 L2_cos_v_neg_g:4.8043e-03 L2_v_norm:2.7651e-01 L3_cos_v_neg_g:4.9442e-03 L3_v_norm:2.8279e-01 L4_cos_v_neg_g:3.4177e-03 L4_v_norm:2.9596e-01 L5_cos_v_neg_g:3.2364e-03 L5_v_norm:2.9802e-01 L6_cos_v_neg_g:3.0987e-03 L6_v_norm:3.0096e-01 L7_cos_v_neg_g:4.2260e-03 L7_v_norm:2.9987e-01 L8_cos_v_neg_g:3.7860e-03 L8_v_norm:3.0017e-01 L9_cos_v_neg_g:4.5473e-03 L9_v_norm:3.0135e-01 L10_cos_v_neg_g:3.9039e-03 L10_v_norm:3.0174e-01 L11_cos_v_neg_g:2.8409e-03 L11_v_norm:3.0052e-01 L12_cos_v_neg_g:2.9148e-03 L12_v_norm:3.0074e-01 +step:7500 train loss:3.441383 +step:7501 train loss:3.429658 +step:7502 train loss:3.423257 +step:7503 train loss:3.402186 +step:7504 train loss:3.422948 +step:7505 train loss:3.415762 +step:7506 train loss:3.474638 +step:7507 train loss:3.390833 +step:7508 train loss:3.462498 +step:7509 train loss:3.432591 +step:7510 train loss:3.461974 +step:7511 train loss:3.467186 +step:7512 train loss:3.735109 +step:7513 train loss:3.419337 +step:7514 train loss:3.461567 +step:7515 train loss:3.413971 +step:7516 train loss:3.428026 +step:7517 train loss:3.464511 +step:7518 train loss:3.436689 +step:7519 train loss:3.452663 +step:7520 train loss:3.515523 +step:7521 train loss:3.401713 +step:7522 train loss:3.458815 +step:7523 train loss:3.489281 +step:7524 train loss:3.439045 +step:7525 train loss:3.441683 +step:7526 train loss:3.388046 +step:7527 train loss:3.397739 +step:7528 train loss:3.496726 +step:7529 train loss:3.472594 +step:7530 train loss:3.421132 +step:7531 train loss:3.497834 +step:7532 train loss:3.485787 +step:7533 train loss:3.411855 +step:7534 train loss:3.475346 +step:7535 train loss:3.477265 +step:7536 train loss:3.509937 +step:7537 train loss:3.546139 +step:7538 train loss:3.555396 +step:7539 train loss:3.455556 +step:7540 train loss:3.439790 +step:7541 train loss:3.496041 +step:7542 train loss:3.457740 +step:7543 train loss:3.410871 +step:7544 train loss:3.457830 +step:7545 train loss:3.444767 +step:7546 train loss:3.400400 +step:7547 train loss:3.444890 +step:7548 train loss:3.460287 +step:7549 train loss:3.443399 +step:7550 train loss:3.439363 +step:7551 train loss:3.541482 +step:7552 train loss:3.454685 +step:7553 train loss:3.489670 +step:7554 train loss:3.413787 +step:7555 train loss:3.509057 +step:7556 train loss:3.408909 +step:7557 train loss:3.503947 +step:7558 train loss:3.492517 +step:7559 train loss:3.451764 +step:7560 train loss:3.546983 +step:7561 train loss:3.512676 +step:7562 train loss:3.423224 +step:7563 train loss:3.417425 +step:7564 train loss:3.468842 +step:7565 train loss:3.488120 +step:7566 train loss:3.477426 +step:7567 train loss:3.490067 +step:7568 train loss:3.438038 +step:7569 train loss:3.500496 +step:7570 train loss:3.479349 +step:7571 train loss:3.562181 +step:7572 train loss:3.413033 +step:7573 train loss:3.479709 +step:7574 train loss:3.442626 +step:7575 train loss:3.440203 +step:7576 train loss:3.445105 +step:7577 train loss:3.460703 +step:7578 train loss:3.520829 +step:7579 train loss:3.455243 +step:7580 train loss:3.442879 +step:7581 train loss:3.429530 +step:7582 train loss:3.487735 +step:7583 train loss:3.426998 +step:7584 train loss:3.405073 +step:7585 train loss:3.372626 +step:7586 train loss:3.410958 +step:7587 train loss:3.472120 +step:7588 train loss:3.604226 +step:7589 train loss:3.423175 +step:7590 train loss:3.489477 +step:7591 train loss:3.493304 +step:7592 train loss:3.455931 +step:7593 train loss:3.477681 +step:7594 train loss:3.476343 +step:7595 train loss:3.447191 +step:7596 train loss:3.496090 +step:7597 train loss:3.400899 +step:7598 train loss:3.465601 +step:7599 train loss:3.455049 +step:7600 train loss:3.416765 +step:7601 train loss:3.527389 +step:7602 train loss:3.468058 +step:7603 train loss:3.433367 +step:7604 train loss:3.577324 +step:7605 train loss:3.461054 +step:7606 train loss:3.497383 +step:7607 train loss:3.449317 +step:7608 train loss:3.459940 +step:7609 train loss:3.496845 +step:7610 train loss:3.452062 +step:7611 train loss:3.429752 +step:7612 train loss:3.373662 +step:7613 train loss:3.420732 +step:7614 train loss:3.491501 +step:7615 train loss:3.450954 +step:7616 train loss:3.518384 +step:7617 train loss:3.418134 +step:7618 train loss:3.505117 +step:7619 train loss:3.447373 +step:7620 train loss:3.432691 +step:7621 train loss:3.382552 +step:7622 train loss:3.659687 +step:7623 train loss:3.670497 +step:7624 train loss:3.484613 +step:7625 train loss:3.522847 +step:7626 train loss:3.441107 +step:7627 train loss:3.511103 +step:7628 train loss:3.393312 +step:7629 train loss:3.453816 +step:7630 train loss:3.465739 +step:7631 train loss:3.447598 +step:7632 train loss:3.500031 +step:7633 train loss:3.566126 +step:7634 train loss:3.527475 +step:7635 train loss:3.435040 +step:7636 train loss:3.460285 +step:7637 train loss:3.404985 +step:7638 train loss:3.517694 +step:7639 train loss:3.445367 +step:7640 train loss:3.427615 +step:7641 train loss:3.459242 +step:7642 train loss:3.799169 +step:7643 train loss:3.544328 +step:7644 train loss:3.469324 +step:7645 train loss:3.454206 +step:7646 train loss:3.444092 +step:7647 train loss:3.437238 +step:7648 train loss:3.466334 +step:7649 train loss:3.429284 +step:7650 train loss:3.480052 +step:7651 train loss:3.497465 +step:7652 train loss:3.381736 +step:7653 train loss:3.579459 +step:7654 train loss:3.430782 +step:7655 train loss:3.452724 +step:7656 train loss:3.427543 +step:7657 train loss:3.438844 +step:7658 train loss:3.395349 +step:7659 train loss:3.461901 +step:7660 train loss:3.393266 +step:7661 train loss:3.409194 +step:7662 train loss:3.411900 +step:7663 train loss:3.458873 +step:7664 train loss:3.418288 +step:7665 train loss:3.389702 +step:7666 train loss:3.499167 +step:7667 train loss:3.416738 +step:7668 train loss:3.525222 +step:7669 train loss:3.458957 +step:7670 train loss:3.412102 +step:7671 train loss:3.467151 +step:7672 train loss:3.486987 +step:7673 train loss:3.451453 +step:7674 train loss:3.487185 +step:7675 train loss:3.544330 +step:7676 train loss:3.513840 +step:7677 train loss:3.536093 +step:7678 train loss:3.482247 +step:7679 train loss:3.502442 +step:7680 train loss:3.507411 +step:7681 train loss:3.471922 +step:7682 train loss:3.445487 +step:7683 train loss:3.444818 +step:7684 train loss:3.419370 +step:7685 train loss:3.399119 +step:7686 train loss:3.520163 +step:7687 train loss:3.431866 +step:7688 train loss:3.399615 +step:7689 train loss:3.448992 +step:7690 train loss:3.418667 +step:7691 train loss:3.445202 +step:7692 train loss:3.479609 +step:7693 train loss:3.477853 +step:7694 train loss:3.534069 +step:7695 train loss:3.458161 +step:7696 train loss:3.434677 +step:7697 train loss:3.422071 +step:7698 train loss:3.481668 +step:7699 train loss:3.477467 +step:7700 train loss:3.377654 +step:7701 train loss:3.492439 +step:7702 train loss:3.435568 +step:7703 train loss:3.439252 +step:7704 train loss:3.491225 +step:7705 train loss:3.449440 +step:7706 train loss:3.385807 +step:7707 train loss:3.505280 +step:7708 train loss:3.443328 +step:7709 train loss:3.463373 +step:7710 train loss:3.524284 +step:7711 train loss:3.486106 +step:7712 train loss:3.434390 +step:7713 train loss:3.512586 +step:7714 train loss:3.457358 +step:7715 train loss:3.410648 +step:7716 train loss:3.449790 +step:7717 train loss:3.473315 +step:7718 train loss:3.479536 +step:7719 train loss:3.435813 +step:7720 train loss:3.451228 +step:7721 train loss:3.489866 +step:7722 train loss:3.419372 +step:7723 train loss:3.795583 +step:7724 train loss:3.459049 +step:7725 train loss:3.365279 +step:7726 train loss:3.439969 +step:7727 train loss:3.471934 +step:7728 train loss:3.421388 +step:7729 train loss:3.426592 +step:7730 train loss:3.452276 +step:7731 train loss:3.481869 +step:7732 train loss:3.504950 +step:7733 train loss:3.413510 +step:7734 train loss:3.440700 +step:7735 train loss:3.526551 +step:7736 train loss:3.474168 +step:7737 train loss:3.490751 +step:7738 train loss:3.391360 +step:7739 train loss:3.471055 +step:7740 train loss:3.417997 +step:7741 train loss:3.454062 +step:7742 train loss:3.454551 +step:7743 train loss:3.405175 +step:7744 train loss:3.534455 +step:7745 train loss:3.418468 +step:7746 train loss:3.393941 +step:7747 train loss:3.491318 +step:7748 train loss:3.471307 +step:7749 train loss:3.396049 +step:7750 validation loss:3.378423 +step:7750 train loss:3.555030 +step:7751 train loss:3.440545 +step:7752 train loss:3.431406 +step:7753 train loss:3.431965 +step:7754 train loss:3.406866 +step:7755 train loss:3.473930 +step:7756 train loss:3.501390 +step:7757 train loss:3.448093 +step:7758 train loss:3.419557 +step:7759 train loss:3.450346 +step:7760 train loss:3.476032 +step:7761 train loss:3.467669 +step:7762 train loss:3.453246 +step:7763 train loss:3.438158 +step:7764 train loss:3.442429 +step:7765 train loss:3.396620 +step:7766 train loss:3.461741 +step:7767 train loss:3.464648 +step:7768 train loss:3.421268 +step:7769 train loss:3.484847 +step:7770 train loss:3.501641 +step:7771 train loss:3.476998 +step:7772 train loss:3.446984 +step:7773 train loss:3.507787 +step:7774 train loss:3.405996 +step:7775 train loss:3.393673 +step:7776 train loss:3.499400 +step:7777 train loss:3.451450 +step:7778 train loss:3.409564 +step:7779 train loss:3.451164 +step:7780 train loss:3.448217 +step:7781 train loss:3.455648 +step:7782 train loss:3.438843 +step:7783 train loss:3.422546 +step:7784 train loss:3.419189 +step:7785 train loss:3.460891 +step:7786 train loss:3.419897 +step:7787 train loss:3.498716 +step:7788 train loss:3.449404 +step:7789 train loss:3.385809 +step:7790 train loss:3.446688 +step:7791 train loss:3.476253 +step:7792 train loss:3.435507 +step:7793 train loss:3.460252 +step:7794 train loss:3.446707 +step:7795 train loss:3.476973 +step:7796 train loss:3.442964 +step:7797 train loss:3.460972 +step:7798 train loss:3.454034 +step:7799 train loss:3.442765 +step:7800 train loss:3.399841 +step:7801 train loss:3.461897 +step:7802 train loss:3.444547 +step:7803 train loss:3.493358 +step:7804 train loss:3.456161 +step:7805 train loss:3.451532 +step:7806 train loss:3.472794 +step:7807 train loss:3.544723 +step:7808 train loss:3.402402 +step:7809 train loss:3.379969 +step:7810 train loss:3.467309 +step:7811 train loss:3.401673 +step:7812 train loss:3.419025 +step:7813 train loss:3.506563 +step:7814 train loss:3.578866 +step:7815 train loss:3.391519 +step:7816 train loss:3.474486 +step:7817 train loss:3.504202 +step:7818 train loss:3.403827 +step:7819 train loss:3.454855 +step:7820 train loss:3.496880 +step:7821 train loss:3.432038 +step:7822 train loss:3.391316 +step:7823 train loss:3.524611 +step:7824 train loss:3.441276 +step:7825 train loss:3.431103 +step:7826 train loss:3.426217 +step:7827 train loss:3.466628 +step:7828 train loss:3.459651 +step:7829 train loss:3.417652 +step:7830 train loss:3.424859 +step:7831 train loss:3.428283 +step:7832 train loss:3.496951 +step:7833 train loss:3.474858 +step:7834 train loss:3.438507 +step:7835 train loss:3.466046 +step:7836 train loss:3.570466 +step:7837 train loss:3.462424 +step:7838 train loss:3.429905 +step:7839 train loss:3.388063 +step:7840 train loss:3.404091 +step:7841 train loss:3.503304 +step:7842 train loss:3.484730 +step:7843 train loss:3.539680 +step:7844 train loss:3.467655 +step:7845 train loss:3.445819 +step:7846 train loss:3.561761 +step:7847 train loss:3.450228 +step:7848 train loss:3.460281 +step:7849 train loss:3.474466 +step:7850 train loss:3.444991 +step:7851 train loss:3.472496 +step:7852 train loss:3.444125 +step:7853 train loss:3.417438 +step:7854 train loss:3.450213 +step:7855 train loss:3.447742 +step:7856 train loss:3.451877 +step:7857 train loss:3.439861 +step:7858 train loss:3.449132 +step:7859 train loss:3.454912 +step:7860 train loss:3.489748 +step:7861 train loss:3.478882 +step:7862 train loss:3.423772 +step:7863 train loss:3.526691 +step:7864 train loss:3.365090 +step:7865 train loss:3.445335 +step:7866 train loss:3.417680 +step:7867 train loss:3.465119 +step:7868 train loss:3.443533 +step:7869 train loss:3.443958 +step:7870 train loss:3.363116 +step:7871 train loss:3.431118 +step:7872 train loss:3.420121 +step:7873 train loss:3.498296 +step:7874 train loss:3.442029 +step:7875 train loss:3.445160 +step:7876 train loss:3.466470 +step:7877 train loss:3.419409 +step:7878 train loss:3.456637 +step:7879 train loss:3.796090 +step:7880 train loss:3.448828 +step:7881 train loss:3.475961 +step:7882 train loss:3.554481 +step:7883 train loss:3.369257 +step:7884 train loss:3.460690 +step:7885 train loss:3.441931 +step:7886 train loss:3.443396 +step:7887 train loss:3.437138 +step:7888 train loss:3.466610 +step:7889 train loss:3.519406 +step:7890 train loss:3.423475 +step:7891 train loss:3.469205 +step:7892 train loss:3.443568 +step:7893 train loss:3.419214 +step:7894 train loss:3.442333 +step:7895 train loss:3.422916 +step:7896 train loss:3.424092 +step:7897 train loss:3.449363 +step:7898 train loss:3.457421 +step:7899 train loss:3.441587 +step:7900 train loss:3.415751 +step:7901 train loss:3.403345 +step:7902 train loss:3.551714 +step:7903 train loss:3.394927 +step:7904 train loss:3.447875 +step:7905 train loss:3.516507 +step:7906 train loss:3.409332 +step:7907 train loss:3.436855 +step:7908 train loss:3.490066 +step:7909 train loss:3.541105 +step:7910 train loss:3.421687 +step:7911 train loss:3.442041 +step:7912 train loss:3.447574 +step:7913 train loss:3.419930 +step:7914 train loss:3.457758 +step:7915 train loss:3.556374 +step:7916 train loss:3.430854 +step:7917 train loss:3.488694 +step:7918 train loss:3.431377 +step:7919 train loss:3.420209 +step:7920 train loss:3.461530 +step:7921 train loss:3.465134 +step:7922 train loss:3.440807 +step:7923 train loss:3.487250 +step:7924 train loss:3.448982 +step:7925 train loss:3.471816 +step:7926 train loss:3.374980 +step:7927 train loss:3.657200 +step:7928 train loss:3.483028 +step:7929 train loss:3.443785 +step:7930 train loss:3.401914 +step:7931 train loss:3.427642 +step:7932 train loss:3.452558 +step:7933 train loss:3.462633 +step:7934 train loss:3.558965 +step:7935 train loss:3.482711 +step:7936 train loss:3.452580 +step:7937 train loss:3.405568 +step:7938 train loss:3.412994 +step:7939 train loss:3.464034 +step:7940 train loss:3.448055 +step:7941 train loss:3.475655 +step:7942 train loss:3.466149 +step:7943 train loss:3.477699 +step:7944 train loss:3.396617 +step:7945 train loss:3.498655 +step:7946 train loss:3.451110 +step:7947 train loss:3.461434 +step:7948 train loss:3.419618 +step:7949 train loss:3.476492 +step:7950 train loss:3.526268 +step:7951 train loss:3.496556 +step:7952 train loss:3.639714 +step:7953 train loss:3.534371 +step:7954 train loss:3.435880 +step:7955 train loss:3.419569 +step:7956 train loss:3.425222 +step:7957 train loss:3.501213 +step:7958 train loss:3.512688 +step:7959 train loss:3.465688 +step:7960 train loss:3.529500 +step:7961 train loss:3.439509 +step:7962 train loss:3.409998 +step:7963 train loss:3.447789 +step:7964 train loss:3.443413 +step:7965 train loss:3.453630 +step:7966 train loss:3.426091 +step:7967 train loss:3.451864 +step:7968 train loss:3.455553 +step:7969 train loss:3.417631 +step:7970 train loss:3.384152 +step:7971 train loss:3.470294 +step:7972 train loss:3.445813 +step:7973 train loss:3.418860 +step:7974 train loss:3.459471 +step:7975 train loss:3.443378 +step:7976 train loss:3.464119 +step:7977 train loss:3.494928 +step:7978 train loss:3.519626 +step:7979 train loss:3.464433 +step:7980 train loss:3.370434 +step:7981 train loss:3.408877 +step:7982 train loss:3.458929 +step:7983 train loss:3.474420 +step:7984 train loss:3.512577 +step:7985 train loss:3.441812 +step:7986 train loss:3.461190 +step:7987 train loss:3.515663 +step:7988 train loss:3.488392 +step:7989 train loss:3.393222 +step:7990 train loss:3.409621 +step:7991 train loss:3.424468 +step:7992 train loss:3.449989 +step:7993 train loss:3.431180 +step:7994 train loss:3.484396 +step:7995 train loss:3.483646 +step:7996 train loss:3.453314 +step:7997 train loss:3.470740 +step:7998 train loss:3.495980 +step:7999 train loss:3.422542 +step:8000 validation loss:3.374636 total_sharp:2.8226e-03 L1_sharp:4.6789e-03 L2_sharp:1.8026e-03 L3_sharp:9.6205e-04 L4_sharp:7.6775e-04 L5_sharp:1.0859e-03 L6_sharp:1.2592e-03 L7_sharp:1.6967e-03 L8_sharp:1.3067e-03 L9_sharp:9.4971e-04 L10_sharp:7.7680e-04 L11_sharp:7.2245e-04 L12_sharp:7.1141e-04 total_fnorm:1.6768e+00 total_l1_linf:1.3740e+04 total_spectral:1.6768e+00 L1_fnorm:2.8921e-01 L2_fnorm:2.7694e-01 L3_fnorm:2.8589e-01 L4_fnorm:2.9720e-01 L5_fnorm:2.9865e-01 L6_fnorm:3.0110e-01 L7_fnorm:2.9987e-01 L8_fnorm:3.0075e-01 L9_fnorm:3.0190e-01 L10_fnorm:3.0187e-01 L11_fnorm:3.0115e-01 L12_fnorm:3.0105e-01 L1_l1linf:3.7441e-01 L2_l1linf:4.7200e-01 L3_l1linf:4.4571e-01 L4_l1linf:4.1747e-01 L5_l1linf:3.8934e-01 L6_l1linf:3.4997e-01 L7_l1linf:3.3976e-01 L8_l1linf:3.2220e-01 L9_l1linf:3.5635e-01 L10_l1linf:4.0342e-01 L11_l1linf:4.3569e-01 L12_l1linf:4.2153e-01 L1_spectral:8.3831e-03 L2_spectral:1.0690e-02 L3_spectral:9.9807e-03 L4_spectral:9.3861e-03 L5_spectral:8.8329e-03 L6_spectral:7.8306e-03 L7_spectral:7.6609e-03 L8_spectral:7.3239e-03 L9_spectral:8.0318e-03 L10_spectral:9.0863e-03 L11_spectral:9.7703e-03 L12_spectral:9.4997e-03 ip_v_neg_g:3.8010e-03 cos_v_neg_g:1.8623e-03 v_norm:1.6768e+00 g_norm:1.2173e+00 hv_norm:3.0182e-01 cos_v_hv:1.5681e-02 hg_norm:2.0377e+01 cos_g_hg:4.8917e-01 v_par:1.6328e-04 v_perp:1.6768e+00 L1_cos_v_neg_g:2.9762e-03 L1_v_norm:2.8921e-01 L2_cos_v_neg_g:2.4052e-03 L2_v_norm:2.7694e-01 L3_cos_v_neg_g:2.5494e-03 L3_v_norm:2.8589e-01 L4_cos_v_neg_g:2.0773e-03 L4_v_norm:2.9720e-01 L5_cos_v_neg_g:2.9154e-03 L5_v_norm:2.9865e-01 L6_cos_v_neg_g:3.6759e-03 L6_v_norm:3.0110e-01 L7_cos_v_neg_g:3.8911e-03 L7_v_norm:2.9987e-01 L8_cos_v_neg_g:3.5872e-03 L8_v_norm:3.0075e-01 L9_cos_v_neg_g:3.5104e-03 L9_v_norm:3.0190e-01 L10_cos_v_neg_g:3.5006e-03 L10_v_norm:3.0187e-01 L11_cos_v_neg_g:3.3436e-03 L11_v_norm:3.0115e-01 L12_cos_v_neg_g:2.4217e-03 L12_v_norm:3.0105e-01 +step:8000 train loss:3.494126 +step:8001 train loss:3.453762 +step:8002 train loss:3.473614 +step:8003 train loss:3.490295 +step:8004 train loss:3.464710 +step:8005 train loss:3.391988 +step:8006 train loss:3.465992 +step:8007 train loss:3.434995 +step:8008 train loss:3.461784 +step:8009 train loss:3.535454 +step:8010 train loss:3.747992 +step:8011 train loss:3.414331 +step:8012 train loss:3.498474 +step:8013 train loss:3.447673 +step:8014 train loss:3.462688 +step:8015 train loss:3.460385 +step:8016 train loss:3.449087 +step:8017 train loss:3.470509 +step:8018 train loss:3.433882 +step:8019 train loss:3.399432 +step:8020 train loss:3.441126 +step:8021 train loss:3.515785 +step:8022 train loss:3.432655 +step:8023 train loss:3.464203 +step:8024 train loss:3.334394 +step:8025 train loss:3.440046 +step:8026 train loss:3.449713 +step:8027 train loss:3.455127 +step:8028 train loss:3.511237 +step:8029 train loss:3.440788 +step:8030 train loss:3.403322 +step:8031 train loss:3.460742 +step:8032 train loss:3.443342 +step:8033 train loss:3.395285 +step:8034 train loss:3.430449 +step:8035 train loss:3.424294 +step:8036 train loss:3.412223 +step:8037 train loss:3.382468 +step:8038 train loss:3.397054 +step:8039 train loss:3.486299 +step:8040 train loss:3.424833 +step:8041 train loss:3.419910 +step:8042 train loss:3.456322 +step:8043 train loss:3.401089 +step:8044 train loss:3.412259 +step:8045 train loss:3.481950 +step:8046 train loss:3.408045 +step:8047 train loss:3.409305 +step:8048 train loss:3.445085 +step:8049 train loss:3.489004 +step:8050 train loss:3.428648 +step:8051 train loss:3.405684 +step:8052 train loss:3.470128 +step:8053 train loss:3.422249 +step:8054 train loss:3.455744 +step:8055 train loss:3.487732 +step:8056 train loss:3.453619 +step:8057 train loss:3.531665 +step:8058 train loss:3.434901 +step:8059 train loss:3.493067 +step:8060 train loss:3.467341 +step:8061 train loss:3.355921 +step:8062 train loss:3.483806 +step:8063 train loss:3.450847 +step:8064 train loss:3.409911 +step:8065 train loss:3.476029 +step:8066 train loss:3.435308 +step:8067 train loss:3.497353 +step:8068 train loss:3.426078 +step:8069 train loss:3.448739 +step:8070 train loss:3.413836 +step:8071 train loss:3.423871 +step:8072 train loss:3.465932 +step:8073 train loss:3.418852 +step:8074 train loss:3.429338 +step:8075 train loss:3.418937 +step:8076 train loss:3.463622 +step:8077 train loss:3.471437 +step:8078 train loss:3.412900 +step:8079 train loss:3.436200 +step:8080 train loss:3.424038 +step:8081 train loss:3.439219 +step:8082 train loss:3.457966 +step:8083 train loss:3.356165 +step:8084 train loss:3.493864 +step:8085 train loss:3.370221 +step:8086 train loss:3.491888 +step:8087 train loss:3.393663 +step:8088 train loss:3.433014 +step:8089 train loss:3.473548 +step:8090 train loss:3.494400 +step:8091 train loss:3.440259 +step:8092 train loss:3.418236 +step:8093 train loss:3.427353 +step:8094 train loss:3.432592 +step:8095 train loss:3.452887 +step:8096 train loss:3.457715 +step:8097 train loss:3.380769 +step:8098 train loss:3.397181 +step:8099 train loss:3.385082 +step:8100 train loss:3.440357 +step:8101 train loss:3.519413 +step:8102 train loss:3.453052 +step:8103 train loss:3.406862 +step:8104 train loss:3.457484 +step:8105 train loss:3.450586 +step:8106 train loss:3.417710 +step:8107 train loss:3.394965 +step:8108 train loss:3.414953 +step:8109 train loss:3.408812 +step:8110 train loss:3.474660 +step:8111 train loss:3.396862 +step:8112 train loss:3.416912 +step:8113 train loss:3.405933 +step:8114 train loss:3.348987 +step:8115 train loss:3.403509 +step:8116 train loss:3.437903 +step:8117 train loss:3.413163 +step:8118 train loss:3.402438 +step:8119 train loss:3.445010 +step:8120 train loss:3.392770 +step:8121 train loss:3.452549 +step:8122 train loss:3.435295 +step:8123 train loss:3.438570 +step:8124 train loss:3.404648 +step:8125 train loss:3.384411 +step:8126 train loss:3.380781 +step:8127 train loss:3.470335 +step:8128 train loss:3.477658 +step:8129 train loss:3.400210 +step:8130 train loss:3.424775 +step:8131 train loss:3.397109 +step:8132 train loss:3.465687 +step:8133 train loss:3.390738 +step:8134 train loss:3.425822 +step:8135 train loss:3.418454 +step:8136 train loss:3.426080 +step:8137 train loss:3.491419 +step:8138 train loss:3.397380 +step:8139 train loss:3.472018 +step:8140 train loss:3.399213 +step:8141 train loss:3.426423 +step:8142 train loss:3.405523 +step:8143 train loss:3.456391 +step:8144 train loss:3.434275 +step:8145 train loss:3.397271 +step:8146 train loss:3.411124 +step:8147 train loss:3.432894 +step:8148 train loss:3.525655 +step:8149 train loss:3.435396 +step:8150 train loss:3.415863 +step:8151 train loss:3.408889 +step:8152 train loss:3.503723 +step:8153 train loss:3.382019 +step:8154 train loss:3.397439 +step:8155 train loss:3.427562 +step:8156 train loss:3.404204 +step:8157 train loss:3.427849 +step:8158 train loss:3.438545 +step:8159 train loss:3.455855 +step:8160 train loss:3.406262 +step:8161 train loss:3.449350 +step:8162 train loss:3.381992 +step:8163 train loss:3.442437 +step:8164 train loss:3.429034 +step:8165 train loss:3.477550 +step:8166 train loss:3.482963 +step:8167 train loss:3.385453 +step:8168 train loss:3.367735 +step:8169 train loss:3.415568 +step:8170 train loss:3.369862 +step:8171 train loss:3.424169 +step:8172 train loss:3.421698 +step:8173 train loss:3.426004 +step:8174 train loss:3.433238 +step:8175 train loss:3.392742 +step:8176 train loss:3.388105 +step:8177 train loss:3.436655 +step:8178 train loss:3.524252 +step:8179 train loss:3.429375 +step:8180 train loss:3.455131 +step:8181 train loss:3.450911 +step:8182 train loss:3.411444 +step:8183 train loss:3.401108 +step:8184 train loss:3.394329 +step:8185 train loss:3.431102 +step:8186 train loss:3.434710 +step:8187 train loss:3.443772 +step:8188 train loss:3.373804 +step:8189 train loss:3.520678 +step:8190 train loss:3.455901 +step:8191 train loss:3.453856 +step:8192 train loss:3.571334 +step:8193 train loss:3.438802 +step:8194 train loss:3.376155 +step:8195 train loss:3.469460 +step:8196 train loss:3.386833 +step:8197 train loss:3.414339 +step:8198 train loss:3.425104 +step:8199 train loss:3.424002 +step:8200 train loss:3.406224 +step:8201 train loss:3.518450 +step:8202 train loss:3.437956 +step:8203 train loss:3.454094 +step:8204 train loss:3.367544 +step:8205 train loss:3.371957 +step:8206 train loss:3.498638 +step:8207 train loss:3.422470 +step:8208 train loss:3.444463 +step:8209 train loss:3.485341 +step:8210 train loss:3.471395 +step:8211 train loss:3.402085 +step:8212 train loss:3.458656 +step:8213 train loss:3.470216 +step:8214 train loss:3.507933 +step:8215 train loss:3.484738 +step:8216 train loss:3.464811 +step:8217 train loss:3.445699 +step:8218 train loss:3.452426 +step:8219 train loss:3.585837 +step:8220 train loss:3.413091 +step:8221 train loss:3.437685 +step:8222 train loss:3.388054 +step:8223 train loss:3.410108 +step:8224 train loss:3.418361 +step:8225 train loss:3.468448 +step:8226 train loss:3.398221 +step:8227 train loss:3.466756 +step:8228 train loss:3.356521 +step:8229 train loss:3.396684 +step:8230 train loss:3.414310 +step:8231 train loss:3.433844 +step:8232 train loss:3.437644 +step:8233 train loss:3.477999 +step:8234 train loss:3.479033 +step:8235 train loss:3.445565 +step:8236 train loss:3.434232 +step:8237 train loss:3.384349 +step:8238 train loss:3.637960 +step:8239 train loss:3.472980 +step:8240 train loss:3.414891 +step:8241 train loss:3.388414 +step:8242 train loss:3.426047 +step:8243 train loss:3.419086 +step:8244 train loss:3.426848 +step:8245 train loss:3.413580 +step:8246 train loss:3.476972 +step:8247 train loss:3.512734 +step:8248 train loss:3.429285 +step:8249 train loss:3.421772 +step:8250 validation loss:3.366267 +step:8250 train loss:3.410080 +step:8251 train loss:3.506650 +step:8252 train loss:3.443912 +step:8253 train loss:3.409258 +step:8254 train loss:3.383146 +step:8255 train loss:3.413584 +step:8256 train loss:3.400007 +step:8257 train loss:3.503970 +step:8258 train loss:3.426339 +step:8259 train loss:3.406447 +step:8260 train loss:3.409222 +step:8261 train loss:3.402506 +step:8262 train loss:3.422904 +step:8263 train loss:3.434040 +step:8264 train loss:3.401893 +step:8265 train loss:3.389566 +step:8266 train loss:3.402030 +step:8267 train loss:3.333630 +step:8268 train loss:3.456703 +step:8269 train loss:3.387575 +step:8270 train loss:3.444197 +step:8271 train loss:3.467673 +step:8272 train loss:3.496487 +step:8273 train loss:3.369056 +step:8274 train loss:3.433854 +step:8275 train loss:3.393521 +step:8276 train loss:3.430814 +step:8277 train loss:3.500774 +step:8278 train loss:3.514598 +step:8279 train loss:3.431864 +step:8280 train loss:3.413208 +step:8281 train loss:3.381386 +step:8282 train loss:3.439880 +step:8283 train loss:3.432813 +step:8284 train loss:3.411216 +step:8285 train loss:3.406486 +step:8286 train loss:3.511600 +step:8287 train loss:3.453400 +step:8288 train loss:3.421465 +step:8289 train loss:3.437753 +step:8290 train loss:3.372618 +step:8291 train loss:3.415956 +step:8292 train loss:3.442050 +step:8293 train loss:3.420578 +step:8294 train loss:3.389435 +step:8295 train loss:3.430793 +step:8296 train loss:3.492146 +step:8297 train loss:3.575290 +step:8298 train loss:3.394542 +step:8299 train loss:3.432723 +step:8300 train loss:3.440975 +step:8301 train loss:3.412357 +step:8302 train loss:3.470291 +step:8303 train loss:3.608618 +step:8304 train loss:3.412243 +step:8305 train loss:3.457530 +step:8306 train loss:3.436197 +step:8307 train loss:3.452582 +step:8308 train loss:3.450166 +step:8309 train loss:3.472969 +step:8310 train loss:3.389533 +step:8311 train loss:3.483375 +step:8312 train loss:3.474815 +step:8313 train loss:3.537592 +step:8314 train loss:3.408355 +step:8315 train loss:3.358822 +step:8316 train loss:3.415250 +step:8317 train loss:3.439376 +step:8318 train loss:3.428090 +step:8319 train loss:3.465882 +step:8320 train loss:3.488575 +step:8321 train loss:3.392681 +step:8322 train loss:3.408631 +step:8323 train loss:3.445235 +step:8324 train loss:3.421700 +step:8325 train loss:3.475897 +step:8326 train loss:3.446522 +step:8327 train loss:3.430701 +step:8328 train loss:3.506071 +step:8329 train loss:3.411381 +step:8330 train loss:3.456711 +step:8331 train loss:3.378498 +step:8332 train loss:3.483337 +step:8333 train loss:3.496614 +step:8334 train loss:3.367224 +step:8335 train loss:3.425074 +step:8336 train loss:3.521400 +step:8337 train loss:3.451815 +step:8338 train loss:3.419972 +step:8339 train loss:3.398675 +step:8340 train loss:3.491382 +step:8341 train loss:3.389546 +step:8342 train loss:3.466415 +step:8343 train loss:3.376678 +step:8344 train loss:3.424397 +step:8345 train loss:3.456792 +step:8346 train loss:3.542141 +step:8347 train loss:3.426458 +step:8348 train loss:3.456487 +step:8349 train loss:3.426484 +step:8350 train loss:3.448681 +step:8351 train loss:3.388274 +step:8352 train loss:3.475983 +step:8353 train loss:3.427277 +step:8354 train loss:3.412836 +step:8355 train loss:3.411392 +step:8356 train loss:3.408587 +step:8357 train loss:3.421199 +step:8358 train loss:3.398594 +step:8359 train loss:3.391525 +step:8360 train loss:3.440319 +step:8361 train loss:3.452127 +step:8362 train loss:3.473144 +step:8363 train loss:3.468046 +step:8364 train loss:3.434036 +step:8365 train loss:3.579880 +step:8366 train loss:3.425024 +step:8367 train loss:3.395992 +step:8368 train loss:3.369608 +step:8369 train loss:3.394858 +step:8370 train loss:3.479409 +step:8371 train loss:3.448431 +step:8372 train loss:3.430358 +step:8373 train loss:3.436574 +step:8374 train loss:3.373866 +step:8375 train loss:3.433372 +step:8376 train loss:3.476454 +step:8377 train loss:3.300833 +step:8378 train loss:3.516805 +step:8379 train loss:3.381851 +step:8380 train loss:3.387755 +step:8381 train loss:3.393259 +step:8382 train loss:3.416126 +step:8383 train loss:3.380686 +step:8384 train loss:3.423046 +step:8385 train loss:3.432906 +step:8386 train loss:3.415727 +step:8387 train loss:3.573690 +step:8388 train loss:3.486703 +step:8389 train loss:3.464463 +step:8390 train loss:3.464759 +step:8391 train loss:3.395498 +step:8392 train loss:3.405313 +step:8393 train loss:3.358463 +step:8394 train loss:3.455924 +step:8395 train loss:3.459785 +step:8396 train loss:3.484804 +step:8397 train loss:3.419634 +step:8398 train loss:3.436112 +step:8399 train loss:3.404655 +step:8400 train loss:3.410074 +step:8401 train loss:3.417708 +step:8402 train loss:3.401176 +step:8403 train loss:3.416872 +step:8404 train loss:3.418836 +step:8405 train loss:3.376718 +step:8406 train loss:3.417259 +step:8407 train loss:3.459534 +step:8408 train loss:3.428029 +step:8409 train loss:3.352559 +step:8410 train loss:3.416551 +step:8411 train loss:3.443416 +step:8412 train loss:3.504088 +step:8413 train loss:3.479293 +step:8414 train loss:3.472299 +step:8415 train loss:3.394881 +step:8416 train loss:3.440949 +step:8417 train loss:3.357926 +step:8418 train loss:3.460546 +step:8419 train loss:3.418521 +step:8420 train loss:3.493023 +step:8421 train loss:3.411231 +step:8422 train loss:3.429039 +step:8423 train loss:3.443648 +step:8424 train loss:3.446833 +step:8425 train loss:3.506284 +step:8426 train loss:3.476437 +step:8427 train loss:3.396599 +step:8428 train loss:3.408464 +step:8429 train loss:3.470016 +step:8430 train loss:3.410089 +step:8431 train loss:3.414267 +step:8432 train loss:3.417369 +step:8433 train loss:3.392579 +step:8434 train loss:3.429565 +step:8435 train loss:3.347709 +step:8436 train loss:3.427919 +step:8437 train loss:3.470901 +step:8438 train loss:3.450495 +step:8439 train loss:3.388823 +step:8440 train loss:3.360231 +step:8441 train loss:3.415960 +step:8442 train loss:3.443064 +step:8443 train loss:3.400965 +step:8444 train loss:3.430084 +step:8445 train loss:3.380834 +step:8446 train loss:3.431917 +step:8447 train loss:3.444494 +step:8448 train loss:3.426426 +step:8449 train loss:3.416913 +step:8450 train loss:3.410385 +step:8451 train loss:3.435205 +step:8452 train loss:3.413334 +step:8453 train loss:3.393925 +step:8454 train loss:3.442482 +step:8455 train loss:3.514920 +step:8456 train loss:3.495450 +step:8457 train loss:3.546032 +step:8458 train loss:3.436026 +step:8459 train loss:3.442993 +step:8460 train loss:3.373679 +step:8461 train loss:3.530602 +step:8462 train loss:3.399482 +step:8463 train loss:3.434592 +step:8464 train loss:3.451959 +step:8465 train loss:3.457752 +step:8466 train loss:3.433038 +step:8467 train loss:3.433555 +step:8468 train loss:3.690560 +step:8469 train loss:3.397104 +step:8470 train loss:3.389521 +step:8471 train loss:3.432491 +step:8472 train loss:3.456024 +step:8473 train loss:3.406375 +step:8474 train loss:3.537675 +step:8475 train loss:3.492545 +step:8476 train loss:3.443244 +step:8477 train loss:3.430699 +step:8478 train loss:3.413536 +step:8479 train loss:3.414539 +step:8480 train loss:3.479744 +step:8481 train loss:3.404697 +step:8482 train loss:3.403233 +step:8483 train loss:3.549546 +step:8484 train loss:3.432024 +step:8485 train loss:3.474301 +step:8486 train loss:3.388385 +step:8487 train loss:3.441729 +step:8488 train loss:3.388464 +step:8489 train loss:3.470200 +step:8490 train loss:3.456375 +step:8491 train loss:3.476498 +step:8492 train loss:3.428355 +step:8493 train loss:3.504660 +step:8494 train loss:3.366380 +step:8495 train loss:3.461913 +step:8496 train loss:3.407248 +step:8497 train loss:3.441499 +step:8498 train loss:3.458597 +step:8499 train loss:3.436368 +step:8500 validation loss:3.364953 total_sharp:2.9734e-03 L1_sharp:4.3016e-03 L2_sharp:1.3063e-03 L3_sharp:1.5556e-03 L4_sharp:8.0878e-04 L5_sharp:1.1209e-03 L6_sharp:1.2098e-03 L7_sharp:1.5375e-03 L8_sharp:1.4785e-03 L9_sharp:1.1801e-03 L10_sharp:7.7522e-04 L11_sharp:8.4003e-04 L12_sharp:2.3083e-03 total_fnorm:1.6707e+00 total_l1_linf:1.3681e+04 total_spectral:1.6707e+00 L1_fnorm:2.8802e-01 L2_fnorm:2.7579e-01 L3_fnorm:2.8368e-01 L4_fnorm:2.9578e-01 L5_fnorm:2.9815e-01 L6_fnorm:3.0006e-01 L7_fnorm:2.9912e-01 L8_fnorm:2.9947e-01 L9_fnorm:3.0007e-01 L10_fnorm:2.9972e-01 L11_fnorm:2.9856e-01 L12_fnorm:2.9977e-01 L1_l1linf:3.7435e-01 L2_l1linf:4.3311e-01 L3_l1linf:4.3320e-01 L4_l1linf:4.1655e-01 L5_l1linf:3.8761e-01 L6_l1linf:3.3627e-01 L7_l1linf:3.3627e-01 L8_l1linf:3.2000e-01 L9_l1linf:3.5618e-01 L10_l1linf:4.0606e-01 L11_l1linf:4.1617e-01 L12_l1linf:4.2838e-01 L1_spectral:8.4934e-03 L2_spectral:9.8769e-03 L3_spectral:9.7899e-03 L4_spectral:9.3823e-03 L5_spectral:8.7859e-03 L6_spectral:7.5939e-03 L7_spectral:7.5824e-03 L8_spectral:7.2808e-03 L9_spectral:8.0703e-03 L10_spectral:9.1943e-03 L11_spectral:9.3068e-03 L12_spectral:9.6043e-03 ip_v_neg_g:4.1470e-03 cos_v_neg_g:2.0769e-03 v_norm:1.6707e+00 g_norm:1.1951e+00 hv_norm:3.1158e-01 cos_v_hv:1.5944e-02 hg_norm:1.7522e+01 cos_g_hg:4.6373e-01 v_par:1.4257e-04 v_perp:1.6707e+00 L1_cos_v_neg_g:3.4164e-03 L1_v_norm:2.8802e-01 L2_cos_v_neg_g:4.0356e-03 L2_v_norm:2.7579e-01 L3_cos_v_neg_g:3.0423e-03 L3_v_norm:2.8368e-01 L4_cos_v_neg_g:3.2488e-03 L4_v_norm:2.9578e-01 L5_cos_v_neg_g:2.9667e-03 L5_v_norm:2.9815e-01 L6_cos_v_neg_g:3.2890e-03 L6_v_norm:3.0006e-01 L7_cos_v_neg_g:3.7719e-03 L7_v_norm:2.9912e-01 L8_cos_v_neg_g:3.7834e-03 L8_v_norm:2.9947e-01 L9_cos_v_neg_g:3.7025e-03 L9_v_norm:3.0007e-01 L10_cos_v_neg_g:3.9584e-03 L10_v_norm:2.9972e-01 L11_cos_v_neg_g:4.1534e-03 L11_v_norm:2.9856e-01 L12_cos_v_neg_g:3.2886e-03 L12_v_norm:2.9977e-01 +step:8500 train loss:3.425147 +step:8501 train loss:3.653775 +step:8502 train loss:3.662594 +step:8503 train loss:3.426642 +step:8504 train loss:3.418630 +step:8505 train loss:3.398515 +step:8506 train loss:3.467565 +step:8507 train loss:3.407756 +step:8508 train loss:3.443199 +step:8509 train loss:3.379083 +step:8510 train loss:3.406048 +step:8511 train loss:3.359777 +step:8512 train loss:3.461005 +step:8513 train loss:3.464123 +step:8514 train loss:3.416643 +step:8515 train loss:3.507964 +step:8516 train loss:3.425027 +step:8517 train loss:3.442814 +step:8518 train loss:3.337125 +step:8519 train loss:3.429211 +step:8520 train loss:3.395844 +step:8521 train loss:3.436025 +step:8522 train loss:3.328825 +step:8523 train loss:3.424609 +step:8524 train loss:3.414750 +step:8525 train loss:3.482824 +step:8526 train loss:3.463356 +step:8527 train loss:3.405391 +step:8528 train loss:3.488640 +step:8529 train loss:3.445896 +step:8530 train loss:3.476523 +step:8531 train loss:3.466043 +step:8532 train loss:3.507201 +step:8533 train loss:3.460218 +step:8534 train loss:3.456053 +step:8535 train loss:3.428001 +step:8536 train loss:3.520463 +step:8537 train loss:3.432769 +step:8538 train loss:3.502423 +step:8539 train loss:3.424159 +step:8540 train loss:3.450912 +step:8541 train loss:3.389885 +step:8542 train loss:3.458934 +step:8543 train loss:3.372136 +step:8544 train loss:3.371125 +step:8545 train loss:3.417029 +step:8546 train loss:3.372871 +step:8547 train loss:3.426686 +step:8548 train loss:3.397316 +step:8549 train loss:3.440300 +step:8550 train loss:3.392827 +step:8551 train loss:3.444053 +step:8552 train loss:3.439835 +step:8553 train loss:3.449251 +step:8554 train loss:3.418404 +step:8555 train loss:3.436449 +step:8556 train loss:3.510504 +step:8557 train loss:3.413490 +step:8558 train loss:3.447856 +step:8559 train loss:3.440465 +step:8560 train loss:3.422314 +step:8561 train loss:3.377290 +step:8562 train loss:3.401758 +step:8563 train loss:3.402580 +step:8564 train loss:3.472684 +step:8565 train loss:3.447201 +step:8566 train loss:3.468373 +step:8567 train loss:3.410545 +step:8568 train loss:3.433505 +step:8569 train loss:3.438124 +step:8570 train loss:3.384625 +step:8571 train loss:3.422621 +step:8572 train loss:3.442912 +step:8573 train loss:3.515262 +step:8574 train loss:3.446697 +step:8575 train loss:3.443851 +step:8576 train loss:3.481249 +step:8577 train loss:3.562480 +step:8578 train loss:3.471822 +step:8579 train loss:3.454470 +step:8580 train loss:3.390696 +step:8581 train loss:3.432330 +step:8582 train loss:3.439323 +step:8583 train loss:3.432435 +step:8584 train loss:3.424145 +step:8585 train loss:3.506798 +step:8586 train loss:3.421657 +step:8587 train loss:3.434207 +step:8588 train loss:3.475353 +step:8589 train loss:3.424884 +step:8590 train loss:3.418293 +step:8591 train loss:3.421807 +step:8592 train loss:3.380126 +step:8593 train loss:3.458624 +step:8594 train loss:3.484672 +step:8595 train loss:3.403018 +step:8596 train loss:3.450204 +step:8597 train loss:3.410213 +step:8598 train loss:3.463757 +step:8599 train loss:3.434520 +step:8600 train loss:3.440913 +step:8601 train loss:3.429238 +step:8602 train loss:3.400993 +step:8603 train loss:3.459414 +step:8604 train loss:3.405245 +step:8605 train loss:3.422134 +step:8606 train loss:3.428214 +step:8607 train loss:3.440902 +step:8608 train loss:3.481930 +step:8609 train loss:3.379172 +step:8610 train loss:3.451950 +step:8611 train loss:3.381281 +step:8612 train loss:3.460421 +step:8613 train loss:3.393823 +step:8614 train loss:3.457095 +step:8615 train loss:3.498920 +step:8616 train loss:3.381644 +step:8617 train loss:3.448407 +step:8618 train loss:3.425463 +step:8619 train loss:3.379548 +step:8620 train loss:3.422953 +step:8621 train loss:3.455121 +step:8622 train loss:3.410262 +step:8623 train loss:3.426291 +step:8624 train loss:3.498174 +step:8625 train loss:3.420538 +step:8626 train loss:3.426839 +step:8627 train loss:3.424191 +step:8628 train loss:3.458868 +step:8629 train loss:3.367166 +step:8630 train loss:3.466284 +step:8631 train loss:3.407859 +step:8632 train loss:3.462476 +step:8633 train loss:3.410264 +step:8634 train loss:3.642896 +step:8635 train loss:3.439008 +step:8636 train loss:3.486102 +step:8637 train loss:3.410169 +step:8638 train loss:3.413132 +step:8639 train loss:3.466491 +step:8640 train loss:3.381056 +step:8641 train loss:3.478059 +step:8642 train loss:3.432015 +step:8643 train loss:3.542270 +step:8644 train loss:3.382954 +step:8645 train loss:3.456887 +step:8646 train loss:3.415085 +step:8647 train loss:3.445634 +step:8648 train loss:3.390986 +step:8649 train loss:3.475853 +step:8650 train loss:3.431380 +step:8651 train loss:3.440224 +step:8652 train loss:3.411353 +step:8653 train loss:3.440266 +step:8654 train loss:3.488079 +step:8655 train loss:3.413764 +step:8656 train loss:3.459447 +step:8657 train loss:3.460421 +step:8658 train loss:3.431878 +step:8659 train loss:3.422969 +step:8660 train loss:3.369196 +step:8661 train loss:3.427924 +step:8662 train loss:3.371652 +step:8663 train loss:3.445960 +step:8664 train loss:3.356524 +step:8665 train loss:3.381609 +step:8666 train loss:3.459248 +step:8667 train loss:3.351005 +step:8668 train loss:3.458443 +step:8669 train loss:3.498392 +step:8670 train loss:3.393988 +step:8671 train loss:3.394554 +step:8672 train loss:3.610916 +step:8673 train loss:3.376230 +step:8674 train loss:3.447616 +step:8675 train loss:3.484378 +step:8676 train loss:3.430380 +step:8677 train loss:3.454654 +step:8678 train loss:3.401909 +step:8679 train loss:3.459037 +step:8680 train loss:3.437145 +step:8681 train loss:3.444912 +step:8682 train loss:3.395698 +step:8683 train loss:3.419219 +step:8684 train loss:3.483715 +step:8685 train loss:3.434271 +step:8686 train loss:3.423851 +step:8687 train loss:3.377445 +step:8688 train loss:3.396484 +step:8689 train loss:3.465053 +step:8690 train loss:3.404825 +step:8691 train loss:3.480280 +step:8692 train loss:3.371108 +step:8693 train loss:3.460031 +step:8694 train loss:3.461798 +step:8695 train loss:3.444515 +step:8696 train loss:3.468997 +step:8697 train loss:3.425570 +step:8698 train loss:3.461358 +step:8699 train loss:3.415080 +step:8700 train loss:3.439534 +step:8701 train loss:3.402180 +step:8702 train loss:3.389937 +step:8703 train loss:3.401643 +step:8704 train loss:3.357180 +step:8705 train loss:3.436440 +step:8706 train loss:3.457734 +step:8707 train loss:3.454482 +step:8708 train loss:3.398890 +step:8709 train loss:3.460608 +step:8710 train loss:3.388576 +step:8711 train loss:3.442850 +step:8712 train loss:3.353157 +step:8713 train loss:3.428781 +step:8714 train loss:3.536453 +step:8715 train loss:3.392742 +step:8716 train loss:3.443686 +step:8717 train loss:3.416507 +step:8718 train loss:3.452598 +step:8719 train loss:3.420700 +step:8720 train loss:3.533120 +step:8721 train loss:3.423085 +step:8722 train loss:3.517642 +step:8723 train loss:3.386999 +step:8724 train loss:3.402915 +step:8725 train loss:3.427786 +step:8726 train loss:3.383904 +step:8727 train loss:3.461532 +step:8728 train loss:3.418849 +step:8729 train loss:3.422888 +step:8730 train loss:3.399167 +step:8731 train loss:3.404862 +step:8732 train loss:3.509407 +step:8733 train loss:3.427279 +step:8734 train loss:3.469955 +step:8735 train loss:3.535796 +step:8736 train loss:3.393855 +step:8737 train loss:3.421124 +step:8738 train loss:3.398463 +step:8739 train loss:3.463560 +step:8740 train loss:3.383792 +step:8741 train loss:3.439218 +step:8742 train loss:3.392100 +step:8743 train loss:3.432478 +step:8744 train loss:3.451934 +step:8745 train loss:3.493322 +step:8746 train loss:3.392289 +step:8747 train loss:3.498281 +step:8748 train loss:3.407043 +step:8749 train loss:3.445856 +step:8750 validation loss:3.356840 +step:8750 train loss:3.454152 +step:8751 train loss:3.495317 +step:8752 train loss:3.352834 +step:8753 train loss:3.399644 +step:8754 train loss:3.451356 +step:8755 train loss:3.433090 +step:8756 train loss:3.478288 +step:8757 train loss:3.391489 +step:8758 train loss:3.547935 +step:8759 train loss:3.393736 +step:8760 train loss:3.425468 +step:8761 train loss:3.501486 +step:8762 train loss:3.398146 +step:8763 train loss:3.369836 +step:8764 train loss:3.445947 +step:8765 train loss:3.511437 +step:8766 train loss:3.442963 +step:8767 train loss:3.401970 +step:8768 train loss:3.441490 +step:8769 train loss:3.413749 +step:8770 train loss:3.460012 +step:8771 train loss:3.432143 +step:8772 train loss:3.451666 +step:8773 train loss:3.412685 +step:8774 train loss:3.445696 +step:8775 train loss:3.444733 +step:8776 train loss:3.391548 +step:8777 train loss:3.426330 +step:8778 train loss:3.437507 +step:8779 train loss:3.456627 +step:8780 train loss:3.420951 +step:8781 train loss:3.427689 +step:8782 train loss:3.445605 +step:8783 train loss:3.428236 +step:8784 train loss:3.451332 +step:8785 train loss:3.437341 +step:8786 train loss:3.514840 +step:8787 train loss:3.455794 +step:8788 train loss:3.359828 +step:8789 train loss:3.457739 +step:8790 train loss:3.384930 +step:8791 train loss:3.438308 +step:8792 train loss:3.374905 +step:8793 train loss:3.466892 +step:8794 train loss:3.386139 +step:8795 train loss:3.459096 +step:8796 train loss:3.599296 +step:8797 train loss:3.348878 +step:8798 train loss:3.501973 +step:8799 train loss:3.422332 +step:8800 train loss:3.414217 +step:8801 train loss:3.436782 +step:8802 train loss:3.495530 +step:8803 train loss:3.453657 +step:8804 train loss:3.434858 +step:8805 train loss:3.451837 +step:8806 train loss:3.423070 +step:8807 train loss:3.415678 +step:8808 train loss:3.371155 +step:8809 train loss:3.497658 +step:8810 train loss:3.396747 +step:8811 train loss:3.386917 +step:8812 train loss:3.432419 +step:8813 train loss:3.340868 +step:8814 train loss:3.528997 +step:8815 train loss:3.372568 +step:8816 train loss:3.493327 +step:8817 train loss:3.428169 +step:8818 train loss:3.362535 +step:8819 train loss:3.478202 +step:8820 train loss:3.408863 +step:8821 train loss:3.434523 +step:8822 train loss:3.414897 +step:8823 train loss:3.430981 +step:8824 train loss:3.489307 +step:8825 train loss:3.467421 +step:8826 train loss:3.436667 +step:8827 train loss:3.395372 +step:8828 train loss:3.440520 +step:8829 train loss:3.418081 +step:8830 train loss:3.396031 +step:8831 train loss:3.471869 +step:8832 train loss:3.410763 +step:8833 train loss:3.443713 +step:8834 train loss:3.409525 +step:8835 train loss:3.346783 +step:8836 train loss:3.473547 +step:8837 train loss:3.378250 +step:8838 train loss:3.420479 +step:8839 train loss:3.404271 +step:8840 train loss:3.414110 +step:8841 train loss:3.421101 +step:8842 train loss:3.433339 +step:8843 train loss:3.443591 +step:8844 train loss:3.409708 +step:8845 train loss:3.433602 +step:8846 train loss:3.397140 +step:8847 train loss:3.437666 +step:8848 train loss:3.483860 +step:8849 train loss:3.465403 +step:8850 train loss:3.457439 +step:8851 train loss:3.337348 +step:8852 train loss:3.439096 +step:8853 train loss:3.420247 +step:8854 train loss:3.393996 +step:8855 train loss:3.465001 +step:8856 train loss:3.454844 +step:8857 train loss:3.522531 +step:8858 train loss:3.385996 +step:8859 train loss:3.461529 +step:8860 train loss:3.418082 +step:8861 train loss:3.400770 +step:8862 train loss:3.399176 +step:8863 train loss:3.383591 +step:8864 train loss:3.454975 +step:8865 train loss:3.444680 +step:8866 train loss:3.330821 +step:8867 train loss:3.429085 +step:8868 train loss:3.460095 +step:8869 train loss:3.545019 +step:8870 train loss:3.420002 +step:8871 train loss:3.444673 +step:8872 train loss:3.426593 +step:8873 train loss:3.428811 +step:8874 train loss:3.482920 +step:8875 train loss:3.417152 +step:8876 train loss:3.455039 +step:8877 train loss:3.437563 +step:8878 train loss:3.485779 +step:8879 train loss:3.446545 +step:8880 train loss:3.394232 +step:8881 train loss:3.360242 +step:8882 train loss:3.429986 +step:8883 train loss:3.417136 +step:8884 train loss:3.507496 +step:8885 train loss:3.438438 +step:8886 train loss:3.444036 +step:8887 train loss:3.471054 +step:8888 train loss:3.430104 +step:8889 train loss:3.433203 +step:8890 train loss:3.422639 +step:8891 train loss:3.396200 +step:8892 train loss:3.481277 +step:8893 train loss:3.419386 +step:8894 train loss:3.438884 +step:8895 train loss:3.467301 +step:8896 train loss:3.385312 +step:8897 train loss:3.473830 +step:8898 train loss:3.408522 +step:8899 train loss:3.429359 +step:8900 train loss:3.396925 +step:8901 train loss:3.413929 +step:8902 train loss:3.453121 +step:8903 train loss:3.392449 +step:8904 train loss:3.444446 +step:8905 train loss:3.420410 +step:8906 train loss:3.410208 +step:8907 train loss:3.421506 +step:8908 train loss:3.486024 +step:8909 train loss:3.429454 +step:8910 train loss:3.391978 +step:8911 train loss:3.489177 +step:8912 train loss:3.384854 +step:8913 train loss:3.399394 +step:8914 train loss:3.488239 +step:8915 train loss:3.433244 +step:8916 train loss:3.460773 +step:8917 train loss:3.418362 +step:8918 train loss:3.420541 +step:8919 train loss:3.413205 +step:8920 train loss:3.440353 +step:8921 train loss:3.437180 +step:8922 train loss:3.414927 +step:8923 train loss:3.599665 +step:8924 train loss:3.491502 +step:8925 train loss:3.422376 +step:8926 train loss:3.433678 +step:8927 train loss:3.464585 +step:8928 train loss:3.418104 +step:8929 train loss:3.415795 +step:8930 train loss:3.470336 +step:8931 train loss:3.376470 +step:8932 train loss:3.482473 +step:8933 train loss:3.390301 +step:8934 train loss:3.430873 +step:8935 train loss:3.442787 +step:8936 train loss:3.479957 +step:8937 train loss:3.474967 +step:8938 train loss:3.415776 +step:8939 train loss:3.484232 +step:8940 train loss:3.435344 +step:8941 train loss:3.379537 +step:8942 train loss:3.457541 +step:8943 train loss:3.389458 +step:8944 train loss:3.441745 +step:8945 train loss:3.458982 +step:8946 train loss:3.307906 +step:8947 train loss:3.491712 +step:8948 train loss:3.342489 +step:8949 train loss:3.344705 +step:8950 train loss:3.386865 +step:8951 train loss:3.427739 +step:8952 train loss:3.444882 +step:8953 train loss:3.400379 +step:8954 train loss:3.508372 +step:8955 train loss:3.420724 +step:8956 train loss:3.449525 +step:8957 train loss:3.436925 +step:8958 train loss:3.416470 +step:8959 train loss:3.407863 +step:8960 train loss:3.375234 +step:8961 train loss:3.397682 +step:8962 train loss:3.450575 +step:8963 train loss:3.429697 +step:8964 train loss:3.411856 +step:8965 train loss:3.452451 +step:8966 train loss:3.414399 +step:8967 train loss:3.391813 +step:8968 train loss:3.375964 +step:8969 train loss:3.364555 +step:8970 train loss:3.444054 +step:8971 train loss:3.393306 +step:8972 train loss:3.596708 +step:8973 train loss:3.478881 +step:8974 train loss:3.438735 +step:8975 train loss:3.440732 +step:8976 train loss:3.402967 +step:8977 train loss:3.490211 +step:8978 train loss:3.473011 +step:8979 train loss:3.390076 +step:8980 train loss:3.488230 +step:8981 train loss:3.439192 +step:8982 train loss:3.414691 +step:8983 train loss:3.354575 +step:8984 train loss:3.482232 +step:8985 train loss:3.397985 +step:8986 train loss:3.431498 +step:8987 train loss:3.408196 +step:8988 train loss:3.457774 +step:8989 train loss:3.369700 +step:8990 train loss:3.507193 +step:8991 train loss:3.360119 +step:8992 train loss:3.416149 +step:8993 train loss:3.508827 +step:8994 train loss:3.412051 +step:8995 train loss:3.435215 +step:8996 train loss:3.407196 +step:8997 train loss:3.358701 +step:8998 train loss:3.359936 +step:8999 train loss:3.387592 +step:9000 validation loss:3.355500 total_sharp:3.0309e-03 L1_sharp:4.3657e-03 L2_sharp:1.3502e-03 L3_sharp:1.4102e-03 L4_sharp:9.8874e-04 L5_sharp:1.1277e-03 L6_sharp:1.4957e-03 L7_sharp:1.9340e-03 L8_sharp:1.4633e-03 L9_sharp:1.0061e-03 L10_sharp:7.7556e-04 L11_sharp:6.6063e-04 L12_sharp:7.5521e-04 total_fnorm:1.6786e+00 total_l1_linf:1.3751e+04 total_spectral:1.6786e+00 L1_fnorm:2.9026e-01 L2_fnorm:2.7852e-01 L3_fnorm:2.8449e-01 L4_fnorm:2.9689e-01 L5_fnorm:2.9797e-01 L6_fnorm:3.0021e-01 L7_fnorm:2.9940e-01 L8_fnorm:2.9998e-01 L9_fnorm:3.0065e-01 L10_fnorm:3.0065e-01 L11_fnorm:3.0013e-01 L12_fnorm:3.0050e-01 L1_l1linf:3.8288e-01 L2_l1linf:4.5872e-01 L3_l1linf:4.5412e-01 L4_l1linf:4.2940e-01 L5_l1linf:3.8745e-01 L6_l1linf:3.5871e-01 L7_l1linf:3.5235e-01 L8_l1linf:3.4597e-01 L9_l1linf:3.6871e-01 L10_l1linf:4.0807e-01 L11_l1linf:4.1763e-01 L12_l1linf:4.1347e-01 L1_spectral:8.5299e-03 L2_spectral:1.0330e-02 L3_spectral:1.0127e-02 L4_spectral:9.5748e-03 L5_spectral:8.7169e-03 L6_spectral:8.0447e-03 L7_spectral:7.8743e-03 L8_spectral:7.7861e-03 L9_spectral:8.3213e-03 L10_spectral:9.0995e-03 L11_spectral:9.3269e-03 L12_spectral:9.3933e-03 ip_v_neg_g:3.1828e-03 cos_v_neg_g:1.4751e-03 v_norm:1.6786e+00 g_norm:1.2854e+00 hv_norm:3.1862e-01 cos_v_hv:1.5968e-02 hg_norm:3.6513e+01 cos_g_hg:5.0728e-01 v_par:1.4354e-04 v_perp:1.6786e+00 L1_cos_v_neg_g:5.5866e-04 L1_v_norm:2.9026e-01 L2_cos_v_neg_g:4.3092e-04 L2_v_norm:2.7852e-01 L3_cos_v_neg_g:2.5343e-03 L3_v_norm:2.8449e-01 L4_cos_v_neg_g:2.2731e-03 L4_v_norm:2.9689e-01 L5_cos_v_neg_g:3.4296e-03 L5_v_norm:2.9797e-01 L6_cos_v_neg_g:2.5622e-03 L6_v_norm:3.0021e-01 L7_cos_v_neg_g:3.5403e-03 L7_v_norm:2.9940e-01 L8_cos_v_neg_g:2.6823e-03 L8_v_norm:2.9998e-01 L9_cos_v_neg_g:3.5832e-03 L9_v_norm:3.0065e-01 L10_cos_v_neg_g:4.4356e-03 L10_v_norm:3.0065e-01 L11_cos_v_neg_g:2.8900e-03 L11_v_norm:3.0013e-01 L12_cos_v_neg_g:2.2605e-03 L12_v_norm:3.0050e-01 +step:9000 train loss:3.468813 +step:9001 train loss:3.438735 +step:9002 train loss:3.448149 +step:9003 train loss:3.384615 +step:9004 train loss:3.386076 +step:9005 train loss:3.400426 +step:9006 train loss:3.400405 +step:9007 train loss:3.422003 +step:9008 train loss:3.372814 +step:9009 train loss:3.372494 +step:9010 train loss:3.409282 +step:9011 train loss:3.402781 +step:9012 train loss:3.518086 +step:9013 train loss:3.345292 +step:9014 train loss:3.418732 +step:9015 train loss:3.415206 +step:9016 train loss:3.493389 +step:9017 train loss:3.432887 +step:9018 train loss:3.356385 +step:9019 train loss:3.443419 +step:9020 train loss:3.448208 +step:9021 train loss:3.407156 +step:9022 train loss:3.420674 +step:9023 train loss:3.416298 +step:9024 train loss:3.436826 +step:9025 train loss:3.420063 +step:9026 train loss:3.380584 +step:9027 train loss:3.424046 +step:9028 train loss:3.446115 +step:9029 train loss:3.463127 +step:9030 train loss:3.462981 +step:9031 train loss:3.425475 +step:9032 train loss:3.439018 +step:9033 train loss:3.420186 +step:9034 train loss:3.430796 +step:9035 train loss:3.436185 +step:9036 train loss:3.384635 +step:9037 train loss:3.379221 +step:9038 train loss:3.502492 +step:9039 train loss:3.405628 +step:9040 train loss:3.421291 +step:9041 train loss:3.468088 +step:9042 train loss:3.326643 +step:9043 train loss:3.421175 +step:9044 train loss:3.438192 +step:9045 train loss:3.384378 +step:9046 train loss:3.427432 +step:9047 train loss:3.423892 +step:9048 train loss:3.403741 +step:9049 train loss:3.435587 +step:9050 train loss:3.392900 +step:9051 train loss:3.429731 +step:9052 train loss:3.358579 +step:9053 train loss:3.487196 +step:9054 train loss:3.495451 +step:9055 train loss:3.419054 +step:9056 train loss:3.479851 +step:9057 train loss:3.334578 +step:9058 train loss:3.420885 +step:9059 train loss:3.493086 +step:9060 train loss:3.426972 +step:9061 train loss:3.454324 +step:9062 train loss:3.384268 +step:9063 train loss:3.518808 +step:9064 train loss:3.405692 +step:9065 train loss:3.415440 +step:9066 train loss:3.433556 +step:9067 train loss:3.398000 +step:9068 train loss:3.470158 +step:9069 train loss:3.428365 +step:9070 train loss:3.474905 +step:9071 train loss:3.414136 +step:9072 train loss:3.432967 +step:9073 train loss:3.391562 +step:9074 train loss:3.474659 +step:9075 train loss:3.418744 +step:9076 train loss:3.389037 +step:9077 train loss:3.460299 +step:9078 train loss:3.401654 +step:9079 train loss:3.450291 +step:9080 train loss:3.383109 +step:9081 train loss:3.418913 +step:9082 train loss:3.448935 +step:9083 train loss:3.472269 +step:9084 train loss:3.365878 +step:9085 train loss:3.437399 +step:9086 train loss:3.418813 +step:9087 train loss:3.369762 +step:9088 train loss:3.429006 +step:9089 train loss:3.444451 +step:9090 train loss:3.378084 +step:9091 train loss:3.476976 +step:9092 train loss:3.404437 +step:9093 train loss:3.403409 +step:9094 train loss:3.529282 +step:9095 train loss:3.398304 +step:9096 train loss:3.411839 +step:9097 train loss:3.399885 +step:9098 train loss:3.391887 +step:9099 train loss:3.514444 +step:9100 train loss:3.546331 +step:9101 train loss:3.466263 +step:9102 train loss:3.407658 +step:9103 train loss:3.415261 +step:9104 train loss:3.499823 +step:9105 train loss:3.360518 +step:9106 train loss:3.488698 +step:9107 train loss:3.422886 +step:9108 train loss:3.406285 +step:9109 train loss:3.433431 +step:9110 train loss:3.431095 +step:9111 train loss:3.416113 +step:9112 train loss:3.414926 +step:9113 train loss:3.451264 +step:9114 train loss:3.391640 +step:9115 train loss:3.419623 +step:9116 train loss:3.448770 +step:9117 train loss:3.454944 +step:9118 train loss:3.425461 +step:9119 train loss:3.342443 +step:9120 train loss:3.445176 +step:9121 train loss:3.474401 +step:9122 train loss:3.421382 +step:9123 train loss:3.443318 +step:9124 train loss:3.469014 +step:9125 train loss:3.424118 +step:9126 train loss:3.401079 +step:9127 train loss:3.430649 +step:9128 train loss:3.488909 +step:9129 train loss:3.442149 +step:9130 train loss:3.456722 +step:9131 train loss:3.435584 +step:9132 train loss:3.444283 +step:9133 train loss:3.435807 +step:9134 train loss:3.404371 +step:9135 train loss:3.433811 +step:9136 train loss:3.433242 +step:9137 train loss:3.485388 +step:9138 train loss:3.400807 +step:9139 train loss:3.479611 +step:9140 train loss:3.401168 +step:9141 train loss:3.378502 +step:9142 train loss:3.558367 +step:9143 train loss:3.384501 +step:9144 train loss:3.480004 +step:9145 train loss:3.483368 +step:9146 train loss:3.399010 +step:9147 train loss:3.473351 +step:9148 train loss:3.490599 +step:9149 train loss:3.400974 +step:9150 train loss:3.428725 +step:9151 train loss:3.481988 +step:9152 train loss:3.440639 +step:9153 train loss:3.408358 +step:9154 train loss:3.425149 +step:9155 train loss:3.387305 +step:9156 train loss:3.391604 +step:9157 train loss:3.410370 +step:9158 train loss:3.389166 +step:9159 train loss:3.481430 +step:9160 train loss:3.362795 +step:9161 train loss:3.392080 +step:9162 train loss:3.476980 +step:9163 train loss:3.422831 +step:9164 train loss:3.395394 +step:9165 train loss:3.388331 +step:9166 train loss:3.446110 +step:9167 train loss:3.391997 +step:9168 train loss:3.429455 +step:9169 train loss:3.369254 +step:9170 train loss:3.388687 +step:9171 train loss:3.453915 +step:9172 train loss:3.377611 +step:9173 train loss:3.501316 +step:9174 train loss:3.428280 +step:9175 train loss:3.409193 +step:9176 train loss:3.387107 +step:9177 train loss:3.434885 +step:9178 train loss:3.381076 +step:9179 train loss:3.340660 +step:9180 train loss:3.434570 +step:9181 train loss:3.444537 +step:9182 train loss:3.412825 +step:9183 train loss:3.421636 +step:9184 train loss:3.417787 +step:9185 train loss:3.430355 +step:9186 train loss:3.391198 +step:9187 train loss:3.464622 +step:9188 train loss:3.500903 +step:9189 train loss:3.427603 +step:9190 train loss:3.429753 +step:9191 train loss:3.421525 +step:9192 train loss:3.436539 +step:9193 train loss:3.436892 +step:9194 train loss:3.370878 +step:9195 train loss:3.365122 +step:9196 train loss:3.412671 +step:9197 train loss:3.372105 +step:9198 train loss:3.444432 +step:9199 train loss:3.391450 +step:9200 train loss:3.420898 +step:9201 train loss:3.453950 +step:9202 train loss:3.443007 +step:9203 train loss:3.397681 +step:9204 train loss:3.597006 +step:9205 train loss:3.512931 +step:9206 train loss:3.425177 +step:9207 train loss:3.475511 +step:9208 train loss:3.453211 +step:9209 train loss:3.475871 +step:9210 train loss:3.367127 +step:9211 train loss:3.392534 +step:9212 train loss:3.396079 +step:9213 train loss:3.454770 +step:9214 train loss:3.398841 +step:9215 train loss:3.466964 +step:9216 train loss:3.425975 +step:9217 train loss:3.368883 +step:9218 train loss:3.460031 +step:9219 train loss:3.417761 +step:9220 train loss:3.464095 +step:9221 train loss:3.514593 +step:9222 train loss:3.461595 +step:9223 train loss:3.629944 +step:9224 train loss:3.466362 +step:9225 train loss:3.400389 +step:9226 train loss:3.416991 +step:9227 train loss:3.432128 +step:9228 train loss:3.435586 +step:9229 train loss:3.392018 +step:9230 train loss:3.453248 +step:9231 train loss:3.339047 +step:9232 train loss:3.398206 +step:9233 train loss:3.417393 +step:9234 train loss:3.472286 +step:9235 train loss:3.479942 +step:9236 train loss:3.384684 +step:9237 train loss:3.450632 +step:9238 train loss:3.421489 +step:9239 train loss:3.413160 +step:9240 train loss:3.381061 +step:9241 train loss:3.412534 +step:9242 train loss:3.422856 +step:9243 train loss:3.419848 +step:9244 train loss:3.394623 +step:9245 train loss:3.401847 +step:9246 train loss:3.399731 +step:9247 train loss:3.410150 +step:9248 train loss:3.422606 +step:9249 train loss:3.420462 +step:9250 validation loss:3.351114 +step:9250 train loss:3.457083 +step:9251 train loss:3.401272 +step:9252 train loss:3.469476 +step:9253 train loss:3.463349 +step:9254 train loss:3.393150 +step:9255 train loss:3.510325 +step:9256 train loss:3.389509 +step:9257 train loss:3.331113 +step:9258 train loss:3.413763 +step:9259 train loss:3.413878 +step:9260 train loss:3.511008 +step:9261 train loss:3.390562 +step:9262 train loss:3.464804 +step:9263 train loss:3.367863 +step:9264 train loss:3.507373 +step:9265 train loss:3.540757 +step:9266 train loss:3.466575 +step:9267 train loss:3.414207 +step:9268 train loss:3.408178 +step:9269 train loss:3.432592 +step:9270 train loss:3.357111 +step:9271 train loss:3.468152 +step:9272 train loss:3.407887 +step:9273 train loss:3.429612 +step:9274 train loss:3.431629 +step:9275 train loss:3.427455 +step:9276 train loss:3.456357 +step:9277 train loss:3.429031 +step:9278 train loss:3.441714 +step:9279 train loss:3.437980 +step:9280 train loss:3.437400 +step:9281 train loss:3.412439 +step:9282 train loss:3.531490 +step:9283 train loss:3.412496 +step:9284 train loss:3.380935 +step:9285 train loss:3.401730 +step:9286 train loss:3.452271 +step:9287 train loss:3.424666 +step:9288 train loss:3.431560 +step:9289 train loss:3.400340 +step:9290 train loss:3.431208 +step:9291 train loss:3.410475 +step:9292 train loss:3.444017 +step:9293 train loss:3.502478 +step:9294 train loss:3.426324 +step:9295 train loss:3.407863 +step:9296 train loss:3.362613 +step:9297 train loss:3.431910 +step:9298 train loss:3.373596 +step:9299 train loss:3.356960 +step:9300 train loss:3.460817 +step:9301 train loss:3.488326 +step:9302 train loss:3.428357 +step:9303 train loss:3.475886 +step:9304 train loss:3.394781 +step:9305 train loss:3.387850 +step:9306 train loss:3.389972 +step:9307 train loss:3.388311 +step:9308 train loss:3.367870 +step:9309 train loss:3.352462 +step:9310 train loss:3.408104 +step:9311 train loss:3.473125 +step:9312 train loss:3.421714 +step:9313 train loss:3.366472 +step:9314 train loss:3.399914 +step:9315 train loss:3.427739 +step:9316 train loss:3.414237 +step:9317 train loss:3.388594 +step:9318 train loss:3.477487 +step:9319 train loss:3.385833 +step:9320 train loss:3.408396 +step:9321 train loss:3.420996 +step:9322 train loss:3.428823 +step:9323 train loss:3.506864 +step:9324 train loss:3.445508 +step:9325 train loss:3.386433 +step:9326 train loss:3.464796 +step:9327 train loss:3.458881 +step:9328 train loss:3.458675 +step:9329 train loss:3.344857 +step:9330 train loss:3.517894 +step:9331 train loss:3.446888 +step:9332 train loss:3.468338 +step:9333 train loss:3.488280 +step:9334 train loss:3.421800 +step:9335 train loss:3.521107 +step:9336 train loss:3.475827 +step:9337 train loss:3.429067 +step:9338 train loss:3.485890 +step:9339 train loss:3.462945 +step:9340 train loss:3.422314 +step:9341 train loss:3.514923 +step:9342 train loss:3.406150 +step:9343 train loss:3.404651 +step:9344 train loss:3.405176 +step:9345 train loss:3.546772 +step:9346 train loss:3.386909 +step:9347 train loss:3.403154 +step:9348 train loss:3.426105 +step:9349 train loss:3.368703 +step:9350 train loss:3.446965 +step:9351 train loss:3.421145 +step:9352 train loss:3.406829 +step:9353 train loss:3.442048 +step:9354 train loss:3.407799 +step:9355 train loss:3.400787 +step:9356 train loss:3.453254 +step:9357 train loss:3.399519 +step:9358 train loss:3.433549 +step:9359 train loss:3.377639 +step:9360 train loss:3.393142 +step:9361 train loss:3.394863 +step:9362 train loss:3.381150 +step:9363 train loss:3.446172 +step:9364 train loss:3.422626 +step:9365 train loss:3.429248 +step:9366 train loss:3.424695 +step:9367 train loss:3.438354 +step:9368 train loss:3.413072 +step:9369 train loss:3.411155 +step:9370 train loss:3.415981 +step:9371 train loss:3.441215 +step:9372 train loss:3.404060 +step:9373 train loss:3.386698 +step:9374 train loss:3.426898 +step:9375 train loss:3.437965 +step:9376 train loss:3.375921 +step:9377 train loss:3.450193 +step:9378 train loss:3.450228 +step:9379 train loss:3.478164 +step:9380 train loss:3.410024 +step:9381 train loss:3.414127 +step:9382 train loss:3.394500 +step:9383 train loss:3.385750 +step:9384 train loss:3.359021 +step:9385 train loss:3.433144 +step:9386 train loss:3.460071 +step:9387 train loss:3.439406 +step:9388 train loss:3.376663 +step:9389 train loss:3.391845 +step:9390 train loss:3.433110 +step:9391 train loss:3.439818 +step:9392 train loss:3.402334 +step:9393 train loss:3.395134 +step:9394 train loss:3.423790 +step:9395 train loss:3.418692 +step:9396 train loss:3.567998 +step:9397 train loss:3.455086 +step:9398 train loss:3.473462 +step:9399 train loss:3.427880 +step:9400 train loss:3.430018 +step:9401 train loss:3.417944 +step:9402 train loss:3.422622 +step:9403 train loss:3.356583 +step:9404 train loss:3.431875 +step:9405 train loss:3.393049 +step:9406 train loss:3.447777 +step:9407 train loss:3.384238 +step:9408 train loss:3.325074 +step:9409 train loss:3.388141 +step:9410 train loss:3.471355 +step:9411 train loss:3.430360 +step:9412 train loss:3.462748 +step:9413 train loss:3.476813 +step:9414 train loss:3.415550 +step:9415 train loss:3.410698 +step:9416 train loss:3.421831 +step:9417 train loss:3.380251 +step:9418 train loss:3.406789 +step:9419 train loss:3.375203 +step:9420 train loss:3.390942 +step:9421 train loss:3.442413 +step:9422 train loss:3.394036 +step:9423 train loss:3.458407 +step:9424 train loss:3.399321 +step:9425 train loss:3.438086 +step:9426 train loss:3.442909 +step:9427 train loss:3.415493 +step:9428 train loss:3.522247 +step:9429 train loss:3.410553 +step:9430 train loss:3.368883 +step:9431 train loss:3.456385 +step:9432 train loss:3.420616 +step:9433 train loss:3.461218 +step:9434 train loss:3.413419 +step:9435 train loss:3.436271 +step:9436 train loss:3.409848 +step:9437 train loss:3.420582 +step:9438 train loss:3.412999 +step:9439 train loss:3.414620 +step:9440 train loss:3.410300 +step:9441 train loss:3.413826 +step:9442 train loss:3.358203 +step:9443 train loss:3.410898 +step:9444 train loss:3.475969 +step:9445 train loss:3.408964 +step:9446 train loss:3.383674 +step:9447 train loss:3.454205 +step:9448 train loss:3.388692 +step:9449 train loss:3.409456 +step:9450 train loss:3.454039 +step:9451 train loss:3.369231 +step:9452 train loss:3.418206 +step:9453 train loss:3.400593 +step:9454 train loss:3.458645 +step:9455 train loss:3.441209 +step:9456 train loss:3.368784 +step:9457 train loss:3.414894 +step:9458 train loss:3.401338 +step:9459 train loss:3.397212 +step:9460 train loss:3.434747 +step:9461 train loss:3.463651 +step:9462 train loss:3.416418 +step:9463 train loss:3.442649 +step:9464 train loss:3.397383 +step:9465 train loss:3.489163 +step:9466 train loss:3.438769 +step:9467 train loss:3.460737 +step:9468 train loss:3.409906 +step:9469 train loss:3.394924 +step:9470 train loss:3.393843 +step:9471 train loss:3.434957 +step:9472 train loss:3.459394 +step:9473 train loss:3.448357 +step:9474 train loss:3.392095 +step:9475 train loss:3.383553 +step:9476 train loss:3.601422 +step:9477 train loss:3.475533 +step:9478 train loss:3.451368 +step:9479 train loss:3.548655 +step:9480 train loss:3.395217 +step:9481 train loss:3.432683 +step:9482 train loss:3.455261 +step:9483 train loss:3.414261 +step:9484 train loss:3.444253 +step:9485 train loss:3.362797 +step:9486 train loss:3.400429 +step:9487 train loss:3.432863 +step:9488 train loss:3.388139 +step:9489 train loss:3.434790 +step:9490 train loss:3.399872 +step:9491 train loss:3.441979 +step:9492 train loss:3.461350 +step:9493 train loss:3.434547 +step:9494 train loss:3.443569 +step:9495 train loss:3.397650 +step:9496 train loss:3.457300 +step:9497 train loss:3.471819 +step:9498 train loss:3.419703 +step:9499 train loss:3.471427 +step:9500 validation loss:3.354666 total_sharp:3.8369e-03 L1_sharp:1.0830e-02 L2_sharp:6.1304e-03 L3_sharp:2.2764e-03 L4_sharp:9.5749e-04 L5_sharp:1.3948e-03 L6_sharp:1.5840e-03 L7_sharp:2.0445e-03 L8_sharp:1.5263e-03 L9_sharp:1.1273e-03 L10_sharp:8.3582e-04 L11_sharp:6.0017e-04 L12_sharp:6.4145e-04 total_fnorm:1.6739e+00 total_l1_linf:1.3707e+04 total_spectral:1.6739e+00 L1_fnorm:2.8758e-01 L2_fnorm:2.7206e-01 L3_fnorm:2.8150e-01 L4_fnorm:2.9649e-01 L5_fnorm:2.9752e-01 L6_fnorm:3.0041e-01 L7_fnorm:2.9868e-01 L8_fnorm:2.9918e-01 L9_fnorm:3.0076e-01 L10_fnorm:3.0065e-01 L11_fnorm:2.9980e-01 L12_fnorm:3.0050e-01 L1_l1linf:3.9731e-01 L2_l1linf:4.6095e-01 L3_l1linf:4.3957e-01 L4_l1linf:4.1826e-01 L5_l1linf:3.9530e-01 L6_l1linf:3.5740e-01 L7_l1linf:3.5100e-01 L8_l1linf:3.5867e-01 L9_l1linf:3.6936e-01 L10_l1linf:3.9605e-01 L11_l1linf:4.1254e-01 L12_l1linf:4.1026e-01 L1_spectral:8.9305e-03 L2_spectral:1.0386e-02 L3_spectral:9.9051e-03 L4_spectral:9.3984e-03 L5_spectral:8.9015e-03 L6_spectral:8.0793e-03 L7_spectral:7.9180e-03 L8_spectral:8.0907e-03 L9_spectral:8.2705e-03 L10_spectral:8.8796e-03 L11_spectral:9.2745e-03 L12_spectral:9.3545e-03 ip_v_neg_g:5.4200e-03 cos_v_neg_g:2.5383e-03 v_norm:1.6739e+00 g_norm:1.2757e+00 hv_norm:4.6389e-01 cos_v_hv:1.3845e-02 hg_norm:2.5407e+01 cos_g_hg:5.1516e-01 v_par:3.0083e-04 v_perp:1.6739e+00 L1_cos_v_neg_g:6.7719e-03 L1_v_norm:2.8758e-01 L2_cos_v_neg_g:3.8198e-03 L2_v_norm:2.7206e-01 L3_cos_v_neg_g:3.9106e-03 L3_v_norm:2.8150e-01 L4_cos_v_neg_g:3.5145e-03 L4_v_norm:2.9649e-01 L5_cos_v_neg_g:3.4316e-03 L5_v_norm:2.9752e-01 L6_cos_v_neg_g:3.0448e-03 L6_v_norm:3.0041e-01 L7_cos_v_neg_g:4.0871e-03 L7_v_norm:2.9868e-01 L8_cos_v_neg_g:4.0798e-03 L8_v_norm:2.9918e-01 L9_cos_v_neg_g:3.8190e-03 L9_v_norm:3.0076e-01 L10_cos_v_neg_g:3.8320e-03 L10_v_norm:3.0065e-01 L11_cos_v_neg_g:3.1777e-03 L11_v_norm:2.9980e-01 L12_cos_v_neg_g:3.0995e-03 L12_v_norm:3.0050e-01 +step:9500 train loss:3.459481 +step:9501 train loss:3.437947 +step:9502 train loss:3.410865 +step:9503 train loss:3.427891 +step:9504 train loss:3.380169 +step:9505 train loss:3.406425 +step:9506 train loss:3.422602 +step:9507 train loss:3.407773 +step:9508 train loss:3.603189 +step:9509 train loss:3.420052 +step:9510 train loss:3.403707 +step:9511 train loss:3.432231 +step:9512 train loss:3.464025 +step:9513 train loss:3.451990 +step:9514 train loss:3.422218 +step:9515 train loss:3.322478 +step:9516 train loss:3.424261 +step:9517 train loss:3.459060 +step:9518 train loss:3.435300 +step:9519 train loss:3.445048 +step:9520 train loss:3.332947 +step:9521 train loss:3.326234 +step:9522 train loss:3.445227 +step:9523 train loss:3.439897 +step:9524 train loss:3.440097 +step:9525 train loss:3.486436 +step:9526 train loss:3.502303 +step:9527 train loss:3.458372 +step:9528 train loss:3.391363 +step:9529 train loss:3.437161 +step:9530 train loss:3.480925 +step:9531 train loss:3.387919 +step:9532 train loss:3.438209 +step:9533 train loss:3.410719 +step:9534 train loss:3.491410 +step:9535 train loss:3.413618 +step:9536 train loss:3.393586 +step:9537 train loss:3.339759 +step:9538 train loss:3.358198 +step:9539 train loss:3.429408 +step:9540 train loss:3.346371 +step:9541 train loss:3.406070 +step:9542 train loss:3.535492 +step:9543 train loss:3.434125 +step:9544 train loss:3.472436 +step:9545 train loss:3.405484 +step:9546 train loss:3.431664 +step:9547 train loss:3.474845 +step:9548 train loss:3.417231 +step:9549 train loss:3.382871 +step:9550 train loss:3.411134 +step:9551 train loss:3.407498 +step:9552 train loss:3.433006 +step:9553 train loss:3.426757 +step:9554 train loss:3.472288 +step:9555 train loss:3.472054 +step:9556 train loss:3.385470 +step:9557 train loss:3.407863 +step:9558 train loss:3.469510 +step:9559 train loss:3.473838 +step:9560 train loss:3.391667 +step:9561 train loss:3.415335 +step:9562 train loss:3.453135 +step:9563 train loss:3.400856 +step:9564 train loss:3.435819 +step:9565 train loss:3.414811 +step:9566 train loss:3.384876 +step:9567 train loss:3.454523 +step:9568 train loss:3.422821 +step:9569 train loss:3.464622 +step:9570 train loss:3.359637 +step:9571 train loss:3.432732 +step:9572 train loss:3.378478 +step:9573 train loss:3.408093 +step:9574 train loss:3.385245 +step:9575 train loss:3.458558 +step:9576 train loss:3.347791 +step:9577 train loss:3.396655 +step:9578 train loss:3.402228 +step:9579 train loss:3.401476 +step:9580 train loss:3.464016 +step:9581 train loss:3.457657 +step:9582 train loss:3.420260 +step:9583 train loss:3.453465 +step:9584 train loss:3.387875 +step:9585 train loss:3.406398 +step:9586 train loss:3.464449 +step:9587 train loss:3.426964 +step:9588 train loss:3.415724 +step:9589 train loss:3.472893 +step:9590 train loss:3.438576 +step:9591 train loss:3.402588 +step:9592 train loss:3.423782 +step:9593 train loss:3.425732 +step:9594 train loss:3.441140 +step:9595 train loss:3.418280 +step:9596 train loss:3.502513 +step:9597 train loss:3.410041 +step:9598 train loss:3.370642 +step:9599 train loss:3.377553 +step:9600 train loss:3.462892 +step:9601 train loss:3.381247 +step:9602 train loss:3.466134 +step:9603 train loss:3.456534 +step:9604 train loss:3.338937 +step:9605 train loss:3.427358 +step:9606 train loss:3.485307 +step:9607 train loss:3.400784 +step:9608 train loss:3.410966 +step:9609 train loss:3.422097 +step:9610 train loss:3.463119 +step:9611 train loss:3.393935 +step:9612 train loss:3.407150 +step:9613 train loss:3.441898 +step:9614 train loss:3.413312 +step:9615 train loss:3.604545 +step:9616 train loss:3.414660 +step:9617 train loss:3.397268 +step:9618 train loss:3.354815 +step:9619 train loss:3.419268 +step:9620 train loss:3.472835 +step:9621 train loss:3.397795 +step:9622 train loss:3.413496 +step:9623 train loss:3.449945 +step:9624 train loss:3.436297 +step:9625 train loss:3.452241 +step:9626 train loss:3.422003 +step:9627 train loss:3.501568 +step:9628 train loss:3.468936 +step:9629 train loss:3.382786 +step:9630 train loss:3.440897 +step:9631 train loss:3.425646 +step:9632 train loss:3.396831 +step:9633 train loss:3.440672 +step:9634 train loss:3.508211 +step:9635 train loss:3.407508 +step:9636 train loss:3.358767 +step:9637 train loss:3.486892 +step:9638 train loss:3.368720 +step:9639 train loss:3.341255 +step:9640 train loss:3.464720 +step:9641 train loss:3.436668 +step:9642 train loss:3.415858 +step:9643 train loss:3.417399 +step:9644 train loss:3.471904 +step:9645 train loss:3.397371 +step:9646 train loss:3.437819 +step:9647 train loss:3.447855 +step:9648 train loss:3.399127 +step:9649 train loss:3.368519 +step:9650 train loss:3.387735 +step:9651 train loss:3.475690 +step:9652 train loss:3.459378 +step:9653 train loss:3.402993 +step:9654 train loss:3.383269 +step:9655 train loss:3.379115 +step:9656 train loss:3.373434 +step:9657 train loss:3.398336 +step:9658 train loss:3.454617 +step:9659 train loss:3.565936 +step:9660 train loss:3.351811 +step:9661 train loss:3.365673 +step:9662 train loss:3.385106 +step:9663 train loss:3.429049 +step:9664 train loss:3.477435 +step:9665 train loss:3.321934 +step:9666 train loss:3.366396 +step:9667 train loss:3.501914 +step:9668 train loss:3.482650 +step:9669 train loss:3.499352 +step:9670 train loss:3.479755 +step:9671 train loss:3.479018 +step:9672 train loss:3.392515 +step:9673 train loss:3.414784 +step:9674 train loss:3.423320 +step:9675 train loss:3.423387 +step:9676 train loss:3.381203 +step:9677 train loss:3.389703 +step:9678 train loss:3.423810 +step:9679 train loss:3.417195 +step:9680 train loss:3.416366 +step:9681 train loss:3.400657 +step:9682 train loss:3.467699 +step:9683 train loss:3.442896 +step:9684 train loss:3.358921 +step:9685 train loss:3.447388 +step:9686 train loss:3.475841 +step:9687 train loss:3.384757 +step:9688 train loss:3.471806 +step:9689 train loss:3.570348 +step:9690 train loss:3.414282 +step:9691 train loss:3.399041 +step:9692 train loss:3.363425 +step:9693 train loss:3.358709 +step:9694 train loss:3.378546 +step:9695 train loss:3.489001 +step:9696 train loss:3.519128 +step:9697 train loss:3.429063 +step:9698 train loss:3.464461 +step:9699 train loss:3.427126 +step:9700 train loss:3.426545 +step:9701 train loss:3.475293 +step:9702 train loss:3.394468 +step:9703 train loss:3.417197 +step:9704 train loss:3.497123 +step:9705 train loss:3.394877 +step:9706 train loss:3.390929 +step:9707 train loss:3.436786 +step:9708 train loss:3.386404 +step:9709 train loss:3.409587 +step:9710 train loss:3.429440 +step:9711 train loss:3.401883 +step:9712 train loss:3.412301 +step:9713 train loss:3.464138 +step:9714 train loss:3.419705 +step:9715 train loss:3.438854 +step:9716 train loss:3.462169 +step:9717 train loss:3.382315 +step:9718 train loss:3.384551 +step:9719 train loss:3.471763 +step:9720 train loss:3.403490 +step:9721 train loss:3.393274 +step:9722 train loss:3.456910 +step:9723 train loss:3.403377 +step:9724 train loss:3.431569 +step:9725 train loss:3.481957 +step:9726 train loss:3.425641 +step:9727 train loss:3.402362 +step:9728 train loss:3.442366 +step:9729 train loss:3.468723 +step:9730 train loss:3.541676 +step:9731 train loss:3.458826 +step:9732 train loss:3.420199 +step:9733 train loss:3.461646 +step:9734 train loss:3.383915 +step:9735 train loss:3.488946 +step:9736 train loss:3.391239 +step:9737 train loss:3.449737 +step:9738 train loss:3.417343 +step:9739 train loss:3.490533 +step:9740 train loss:3.451393 +step:9741 train loss:3.392472 +step:9742 train loss:3.487671 +step:9743 train loss:3.357764 +step:9744 train loss:3.420457 +step:9745 train loss:3.378081 +step:9746 train loss:3.416528 +step:9747 train loss:3.409081 +step:9748 train loss:3.304971 +step:9749 train loss:3.407066 +step:9750 validation loss:3.347878 +step:9750 train loss:3.388173 +step:9751 train loss:3.524770 +step:9752 train loss:3.415079 +step:9753 train loss:3.365382 +step:9754 train loss:3.398940 +step:9755 train loss:3.396188 +step:9756 train loss:3.398059 +step:9757 train loss:3.365565 +step:9758 train loss:3.355529 +step:9759 train loss:3.405769 +step:9760 train loss:3.347345 +step:9761 train loss:3.389515 +step:9762 train loss:3.386956 +step:9763 train loss:3.408048 +step:9764 train loss:3.394225 +step:9765 train loss:3.355256 +step:9766 train loss:3.445168 +step:9767 train loss:3.401500 +step:9768 train loss:3.416048 +step:9769 train loss:3.370935 +step:9770 train loss:3.366300 +step:9771 train loss:3.417071 +step:9772 train loss:3.426793 +step:9773 train loss:3.407784 +step:9774 train loss:3.376356 +step:9775 train loss:3.467347 +step:9776 train loss:3.465769 +step:9777 train loss:3.349655 +step:9778 train loss:3.362701 +step:9779 train loss:3.363074 +step:9780 train loss:3.363967 +step:9781 train loss:3.385765 +step:9782 train loss:3.461060 +step:9783 train loss:3.373096 +step:9784 train loss:3.395507 +step:9785 train loss:3.392753 +step:9786 train loss:3.423155 +step:9787 train loss:3.453400 +step:9788 train loss:3.376334 +step:9789 train loss:3.387980 +step:9790 train loss:3.346143 +step:9791 train loss:3.397296 +step:9792 train loss:3.412324 +step:9793 train loss:3.429336 +step:9794 train loss:3.405188 +step:9795 train loss:3.409862 +step:9796 train loss:3.394878 +step:9797 train loss:3.389947 +step:9798 train loss:3.408320 +step:9799 train loss:3.408265 +step:9800 train loss:3.480317 +step:9801 train loss:3.404224 +step:9802 train loss:3.461941 +step:9803 train loss:3.320447 +step:9804 train loss:3.414301 +step:9805 train loss:3.422852 +step:9806 train loss:3.392947 +step:9807 train loss:3.365273 +step:9808 train loss:3.283911 +step:9809 train loss:3.464200 +step:9810 train loss:3.420156 +step:9811 train loss:3.404047 +step:9812 train loss:3.381282 +step:9813 train loss:3.457982 +step:9814 train loss:3.450350 +step:9815 train loss:3.352463 +step:9816 train loss:3.359249 +step:9817 train loss:3.388515 +step:9818 train loss:3.415582 +step:9819 train loss:3.388661 +step:9820 train loss:3.451909 +step:9821 train loss:3.433711 +step:9822 train loss:3.408475 +step:9823 train loss:3.468925 +step:9824 train loss:3.370310 +step:9825 train loss:3.458727 +step:9826 train loss:3.452065 +step:9827 train loss:3.459500 +step:9828 train loss:3.376124 +step:9829 train loss:3.382572 +step:9830 train loss:3.371952 +step:9831 train loss:3.427433 +step:9832 train loss:3.439737 +step:9833 train loss:3.351074 +step:9834 train loss:3.406546 +step:9835 train loss:3.369043 +step:9836 train loss:3.438121 +step:9837 train loss:3.405917 +step:9838 train loss:3.442601 +step:9839 train loss:3.420193 +step:9840 train loss:3.390086 +step:9841 train loss:3.396612 +step:9842 train loss:3.456104 +step:9843 train loss:3.450337 +step:9844 train loss:3.397026 +step:9845 train loss:3.429464 +step:9846 train loss:3.362957 +step:9847 train loss:3.493834 +step:9848 train loss:3.415741 +step:9849 train loss:3.442227 +step:9850 train loss:3.361326 +step:9851 train loss:3.413067 +step:9852 train loss:3.379085 +step:9853 train loss:3.400104 +step:9854 train loss:3.416503 +step:9855 train loss:3.357841 +step:9856 train loss:3.364614 +step:9857 train loss:3.351869 +step:9858 train loss:3.416315 +step:9859 train loss:3.336059 +step:9860 train loss:3.576118 +step:9861 train loss:3.401380 +step:9862 train loss:3.366668 +step:9863 train loss:3.349897 +step:9864 train loss:3.472176 +step:9865 train loss:3.353184 +step:9866 train loss:3.391702 +step:9867 train loss:3.390746 +step:9868 train loss:3.448919 +step:9869 train loss:3.414845 +step:9870 train loss:3.383440 +step:9871 train loss:3.426384 +step:9872 train loss:3.368270 +step:9873 train loss:3.419674 +step:9874 train loss:3.382525 +step:9875 train loss:3.389207 +step:9876 train loss:3.350132 +step:9877 train loss:3.402821 +step:9878 train loss:3.434170 +step:9879 train loss:3.434256 +step:9880 train loss:3.367826 +step:9881 train loss:3.420550 +step:9882 train loss:3.380370 +step:9883 train loss:3.391743 +step:9884 train loss:3.382217 +step:9885 train loss:3.449220 +step:9886 train loss:3.415217 +step:9887 train loss:3.412995 +step:9888 train loss:3.437911 +step:9889 train loss:3.467950 +step:9890 train loss:3.380898 +step:9891 train loss:3.382946 +step:9892 train loss:3.358653 +step:9893 train loss:3.476932 +step:9894 train loss:3.389247 +step:9895 train loss:3.324833 +step:9896 train loss:3.481511 +step:9897 train loss:3.355412 +step:9898 train loss:3.426360 +step:9899 train loss:3.405302 +step:9900 train loss:3.449092 +step:9901 train loss:3.371291 +step:9902 train loss:3.417744 +step:9903 train loss:3.388149 +step:9904 train loss:3.440398 +step:9905 train loss:3.343919 +step:9906 train loss:3.383258 +step:9907 train loss:3.392834 +step:9908 train loss:3.388256 +step:9909 train loss:3.404053 +step:9910 train loss:3.427578 +step:9911 train loss:3.513478 +step:9912 train loss:3.387165 +step:9913 train loss:3.391111 +step:9914 train loss:3.397032 +step:9915 train loss:3.402682 +step:9916 train loss:3.349807 +step:9917 train loss:3.389523 +step:9918 train loss:3.379596 +step:9919 train loss:3.546510 +step:9920 train loss:3.337873 +step:9921 train loss:3.425441 +step:9922 train loss:3.384643 +step:9923 train loss:3.441458 +step:9924 train loss:3.354898 +step:9925 train loss:3.416212 +step:9926 train loss:3.395825 +step:9927 train loss:3.437429 +step:9928 train loss:3.362787 +step:9929 train loss:3.401374 +step:9930 train loss:3.493887 +step:9931 train loss:3.456393 +step:9932 train loss:3.343136 +step:9933 train loss:3.438783 +step:9934 train loss:3.355131 +step:9935 train loss:3.472165 +step:9936 train loss:3.377984 +step:9937 train loss:3.404597 +step:9938 train loss:3.392544 +step:9939 train loss:3.457375 +step:9940 train loss:3.494312 +step:9941 train loss:3.365852 +step:9942 train loss:3.410790 +step:9943 train loss:3.535303 +step:9944 train loss:3.407222 +step:9945 train loss:3.427516 +step:9946 train loss:3.401851 +step:9947 train loss:3.349897 +step:9948 train loss:3.395877 +step:9949 train loss:3.290908 +step:9950 train loss:3.440298 +step:9951 train loss:3.360049 +step:9952 train loss:3.432460 +step:9953 train loss:3.392781 +step:9954 train loss:3.450275 +step:9955 train loss:3.428255 +step:9956 train loss:3.427459 +step:9957 train loss:3.407174 +step:9958 train loss:3.459867 +step:9959 train loss:3.359807 +step:9960 train loss:3.389062 +step:9961 train loss:3.399574 +step:9962 train loss:3.449178 +step:9963 train loss:3.338463 +step:9964 train loss:3.394141 +step:9965 train loss:3.398283 +step:9966 train loss:3.454639 +step:9967 train loss:3.371601 +step:9968 train loss:3.435207 +step:9969 train loss:3.348447 +step:9970 train loss:3.389694 +step:9971 train loss:3.432751 +step:9972 train loss:3.454965 +step:9973 train loss:3.430635 +step:9974 train loss:3.419135 +step:9975 train loss:3.388203 +step:9976 train loss:3.347166 +step:9977 train loss:3.398061 +step:9978 train loss:3.395184 +step:9979 train loss:3.407781 +step:9980 train loss:3.463246 +step:9981 train loss:3.372008 +step:9982 train loss:3.428869 +step:9983 train loss:3.348676 +step:9984 train loss:3.413230 +step:9985 train loss:3.356753 +step:9986 train loss:3.410982 +step:9987 train loss:3.454978 +step:9988 train loss:3.466421 +step:9989 train loss:3.362648 +step:9990 train loss:3.501485 +step:9991 train loss:3.351940 +step:9992 train loss:3.421110 +step:9993 train loss:3.414017 +step:9994 train loss:3.526705 +step:9995 train loss:3.467394 +step:9996 train loss:3.380599 +step:9997 train loss:3.423724 +step:9998 train loss:3.473239 +step:9999 train loss:3.441808 +step:10000 validation loss:3.344441 total_sharp:2.5199e-03 L1_sharp:3.9887e-03 L2_sharp:1.9625e-03 L3_sharp:1.2485e-03 L4_sharp:7.6826e-04 L5_sharp:8.3287e-04 L6_sharp:1.2435e-03 L7_sharp:1.4348e-03 L8_sharp:1.3031e-03 L9_sharp:8.9066e-04 L10_sharp:7.4688e-04 L11_sharp:5.9552e-04 L12_sharp:6.4913e-04 total_fnorm:1.6760e+00 total_l1_linf:1.3725e+04 total_spectral:1.6760e+00 L1_fnorm:2.8922e-01 L2_fnorm:2.7585e-01 L3_fnorm:2.8328e-01 L4_fnorm:2.9603e-01 L5_fnorm:2.9780e-01 L6_fnorm:2.9996e-01 L7_fnorm:2.9904e-01 L8_fnorm:2.9865e-01 L9_fnorm:2.9993e-01 L10_fnorm:2.9989e-01 L11_fnorm:2.9879e-01 L12_fnorm:2.9989e-01 L1_l1linf:4.1436e-01 L2_l1linf:4.5409e-01 L3_l1linf:4.5339e-01 L4_l1linf:4.3051e-01 L5_l1linf:3.9804e-01 L6_l1linf:3.6067e-01 L7_l1linf:3.4777e-01 L8_l1linf:3.4543e-01 L9_l1linf:3.7332e-01 L10_l1linf:4.1640e-01 L11_l1linf:4.2063e-01 L12_l1linf:4.0701e-01 L1_spectral:9.2841e-03 L2_spectral:1.0331e-02 L3_spectral:1.0191e-02 L4_spectral:9.8243e-03 L5_spectral:9.0146e-03 L6_spectral:8.1220e-03 L7_spectral:7.8585e-03 L8_spectral:7.7850e-03 L9_spectral:8.3979e-03 L10_spectral:9.3203e-03 L11_spectral:9.4125e-03 L12_spectral:9.1106e-03 ip_v_neg_g:2.6325e-03 cos_v_neg_g:9.0108e-04 v_norm:1.6760e+00 g_norm:1.7431e+00 hv_norm:2.9423e-01 cos_v_hv:1.4354e-02 hg_norm:1.0815e+02 cos_g_hg:6.7960e-01 v_par:8.9456e-05 v_perp:1.6760e+00 L1_cos_v_neg_g:8.9044e-04 L1_v_norm:2.8922e-01 L2_cos_v_neg_g:2.7313e-03 L2_v_norm:2.7585e-01 L3_cos_v_neg_g:2.8832e-04 L3_v_norm:2.8328e-01 L4_cos_v_neg_g:8.9955e-05 L4_v_norm:2.9603e-01 L5_cos_v_neg_g:7.9866e-04 L5_v_norm:2.9780e-01 L6_cos_v_neg_g:1.8963e-03 L6_v_norm:2.9996e-01 L7_cos_v_neg_g:3.1689e-03 L7_v_norm:2.9904e-01 L8_cos_v_neg_g:3.4939e-03 L8_v_norm:2.9865e-01 L9_cos_v_neg_g:2.4033e-03 L9_v_norm:2.9993e-01 L10_cos_v_neg_g:2.8529e-03 L10_v_norm:2.9989e-01 L11_cos_v_neg_g:1.8697e-03 L11_v_norm:2.9879e-01 L12_cos_v_neg_g:2.0559e-03 L12_v_norm:2.9989e-01 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..1ec80f9ab3d58753ecdcdacca9e22b0b113458e1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "4b87bcbf-3f9f-4d4c-9fa3-00d88d2a39ac", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..1366dba24dc7dba7ff984ee4e20e2334916ee781 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.441943645477295, + "total_l1_linf_norm": 20873.53515625, + "total_spectral_norm": 2.4419431686401367, + "layer_1_update_fnorm": 0.5982207655906677, + "layer_1_max_l1_linf_norm": 0.42037516832351685, + "layer_1_max_spectral_norm": 0.012056326493620872, + "layer_2_update_fnorm": 0.5569610595703125, + "layer_2_max_l1_linf_norm": 0.4066760540008545, + "layer_2_max_spectral_norm": 0.012044503353536129, + "layer_3_update_fnorm": 0.5577137470245361, + "layer_3_max_l1_linf_norm": 0.39989715814590454, + "layer_3_max_spectral_norm": 0.0120528694242239, + "layer_4_update_fnorm": 0.5811421871185303, + "layer_4_max_l1_linf_norm": 0.40194249153137207, + "layer_4_max_spectral_norm": 0.012044152244925499, + "layer_5_update_fnorm": 0.5938519835472107, + "layer_5_max_l1_linf_norm": 0.40624532103538513, + "layer_5_max_spectral_norm": 0.012043314054608345, + "layer_6_update_fnorm": 0.6040692329406738, + "layer_6_max_l1_linf_norm": 0.41186287999153137, + "layer_6_max_spectral_norm": 0.012048753909766674, + "layer_7_update_fnorm": 0.6030635833740234, + "layer_7_max_l1_linf_norm": 0.4146009683609009, + "layer_7_max_spectral_norm": 0.012050164863467216, + "layer_8_update_fnorm": 0.6037013530731201, + "layer_8_max_l1_linf_norm": 0.4116874635219574, + "layer_8_max_spectral_norm": 0.012045162729918957, + "layer_9_update_fnorm": 0.603941798210144, + "layer_9_max_l1_linf_norm": 0.40834638476371765, + "layer_9_max_spectral_norm": 0.012047918513417244, + "layer_10_update_fnorm": 0.6034194231033325, + "layer_10_max_l1_linf_norm": 0.4046260714530945, + "layer_10_max_spectral_norm": 0.012047518976032734, + "layer_11_update_fnorm": 0.6024457216262817, + "layer_11_max_l1_linf_norm": 0.40753304958343506, + "layer_11_max_spectral_norm": 0.012044500559568405, + "layer_12_update_fnorm": 0.6014623045921326, + "layer_12_max_l1_linf_norm": 0.4006231129169464, + "layer_12_max_spectral_norm": 0.01204380951821804, + "total_sharpness": 0.006050743628293276, + "ip_v_neg_g": 0.019536633044481277, + "cos_v_neg_g": 0.00825574155896902, + "v_norm": 2.441943645477295, + "g_norm": 0.9690763354301453, + "hv_norm": 0.5720171928405762, + "cos_v_hv": 0.025830646976828575, + "hg_norm": 8.254555702209473, + "cos_g_hg": 0.5377504229545593, + "v_parallel_norm": 0.0008581900619901717, + "v_perp_norm": 2.441943407058716, + "layer_1_v_norm": 0.5982207655906677, + "layer_1_cos_v_neg_g": 0.015945104882121086, + "layer_2_v_norm": 0.5569610595703125, + "layer_2_cos_v_neg_g": 0.021426277235150337, + "layer_3_v_norm": 0.5577137470245361, + "layer_3_cos_v_neg_g": 0.013310879468917847, + "layer_4_v_norm": 0.5811421871185303, + "layer_4_cos_v_neg_g": 0.008261003531515598, + "layer_5_v_norm": 0.5938519835472107, + "layer_5_cos_v_neg_g": 0.00912740919739008, + "layer_6_v_norm": 0.6040692329406738, + "layer_6_cos_v_neg_g": 0.008618269115686417, + "layer_7_v_norm": 0.6030635833740234, + "layer_7_cos_v_neg_g": 0.009500826708972454, + "layer_8_v_norm": 0.6037013530731201, + "layer_8_cos_v_neg_g": 0.008332762867212296, + "layer_9_v_norm": 0.603941798210144, + "layer_9_cos_v_neg_g": 0.0079946992918849, + "layer_10_v_norm": 0.6034194231033325, + "layer_10_cos_v_neg_g": 0.007472442463040352, + "layer_11_v_norm": 0.6024457216262817, + "layer_11_cos_v_neg_g": 0.0066243563778698444, + "layer_12_v_norm": 0.6014623045921326, + "layer_12_cos_v_neg_g": 0.006762363016605377, + "layer_1_sharpness": 0.010322081856429577, + "layer_2_sharpness": 0.002725347178056836, + "layer_3_sharpness": 0.0020494002383202314, + "layer_4_sharpness": 0.0007149361772462726, + "layer_5_sharpness": 0.0008164785685949028, + "layer_6_sharpness": 0.0009270109585486352, + "layer_7_sharpness": 0.0011681258911266923, + "layer_8_sharpness": 0.0008750042761676013, + "layer_9_sharpness": 0.0007499632192775607, + "layer_10_sharpness": 0.0005592852830886841, + "layer_11_sharpness": 0.000420823140302673, + "layer_12_sharpness": 0.0005053699715062976 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..6e6cbf52dbf64a4a282ece9a62d5906df3f0995b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.419337511062622, + "total_l1_linf_norm": 20708.7265625, + "total_spectral_norm": 2.419337511062622, + "layer_1_update_fnorm": 0.5705554485321045, + "layer_1_max_l1_linf_norm": 0.4190414249897003, + "layer_1_max_spectral_norm": 0.012053393758833408, + "layer_2_update_fnorm": 0.5212397575378418, + "layer_2_max_l1_linf_norm": 0.4713152050971985, + "layer_2_max_spectral_norm": 0.012038574554026127, + "layer_3_update_fnorm": 0.5542007088661194, + "layer_3_max_l1_linf_norm": 0.4507489800453186, + "layer_3_max_spectral_norm": 0.01204498577862978, + "layer_4_update_fnorm": 0.585773766040802, + "layer_4_max_l1_linf_norm": 0.43740591406822205, + "layer_4_max_spectral_norm": 0.01204433012753725, + "layer_5_update_fnorm": 0.5918545722961426, + "layer_5_max_l1_linf_norm": 0.41061657667160034, + "layer_5_max_spectral_norm": 0.012045033276081085, + "layer_6_update_fnorm": 0.5994861721992493, + "layer_6_max_l1_linf_norm": 0.41604021191596985, + "layer_6_max_spectral_norm": 0.012063625268638134, + "layer_7_update_fnorm": 0.5935251116752625, + "layer_7_max_l1_linf_norm": 0.41069668531417847, + "layer_7_max_spectral_norm": 0.012046048417687416, + "layer_8_update_fnorm": 0.5919197201728821, + "layer_8_max_l1_linf_norm": 0.41195833683013916, + "layer_8_max_spectral_norm": 0.012042834423482418, + "layer_9_update_fnorm": 0.5921094417572021, + "layer_9_max_l1_linf_norm": 0.40397340059280396, + "layer_9_max_spectral_norm": 0.012043791823089123, + "layer_10_update_fnorm": 0.5990347266197205, + "layer_10_max_l1_linf_norm": 0.41707170009613037, + "layer_10_max_spectral_norm": 0.012043923139572144, + "layer_11_update_fnorm": 0.5951904058456421, + "layer_11_max_l1_linf_norm": 0.43735653162002563, + "layer_11_max_spectral_norm": 0.012047059834003448, + "layer_12_update_fnorm": 0.6014290452003479, + "layer_12_max_l1_linf_norm": 0.4579993486404419, + "layer_12_max_spectral_norm": 0.012049110606312752, + "total_sharpness": 0.00172130495775491, + "ip_v_neg_g": 0.003359792288392782, + "cos_v_neg_g": 0.0009430105565115809, + "v_norm": 2.419337511062622, + "g_norm": 1.4726495742797852, + "hv_norm": 0.45219382643699646, + "cos_v_hv": 0.009209363721311092, + "hg_norm": 49.018856048583984, + "cos_g_hg": 0.5798954367637634, + "v_parallel_norm": 0.00024226527602877468, + "v_perp_norm": 2.419337511062622, + "layer_1_v_norm": 0.5705554485321045, + "layer_1_cos_v_neg_g": 0.0009148204117082059, + "layer_2_v_norm": 0.5212397575378418, + "layer_2_cos_v_neg_g": 0.004612368065863848, + "layer_3_v_norm": 0.5542007088661194, + "layer_3_cos_v_neg_g": 0.0012014656094834208, + "layer_4_v_norm": 0.585773766040802, + "layer_4_cos_v_neg_g": 0.0007513929158449173, + "layer_5_v_norm": 0.5918545722961426, + "layer_5_cos_v_neg_g": 0.0004020284104626626, + "layer_6_v_norm": 0.5994861721992493, + "layer_6_cos_v_neg_g": 0.0028220752719789743, + "layer_7_v_norm": 0.5935251116752625, + "layer_7_cos_v_neg_g": 0.001939226407557726, + "layer_8_v_norm": 0.5919197201728821, + "layer_8_cos_v_neg_g": 0.0023687644861638546, + "layer_9_v_norm": 0.5921094417572021, + "layer_9_cos_v_neg_g": 0.0016926754033192992, + "layer_10_v_norm": 0.5990347266197205, + "layer_10_cos_v_neg_g": 0.000835866027045995, + "layer_11_v_norm": 0.5951904058456421, + "layer_11_cos_v_neg_g": 0.0007594825583510101, + "layer_12_v_norm": 0.6014290452003479, + "layer_12_cos_v_neg_g": 0.002543605864048004, + "layer_1_sharpness": 0.003085949458181858, + "layer_2_sharpness": 0.0011253991397097707, + "layer_3_sharpness": 0.0012807281455025077, + "layer_4_sharpness": 0.0002121733414242044, + "layer_5_sharpness": 0.0002675975556485355, + "layer_6_sharpness": 0.0003150966949760914, + "layer_7_sharpness": 0.00047357683070003986, + "layer_8_sharpness": 0.00035575879155658185, + "layer_9_sharpness": 0.0004131734895054251, + "layer_10_sharpness": 0.00015388701285701245, + "layer_11_sharpness": 0.00013071359717287123, + "layer_12_sharpness": 0.00035039064823649824 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..57d3396065c4b25c3212d965ceb2528bc3dad3c5 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4434566497802734, + "total_l1_linf_norm": 20905.8828125, + "total_spectral_norm": 2.4434566497802734, + "layer_1_update_fnorm": 0.5844366550445557, + "layer_1_max_l1_linf_norm": 0.4178054928779602, + "layer_1_max_spectral_norm": 0.012053708545863628, + "layer_2_update_fnorm": 0.551864504814148, + "layer_2_max_l1_linf_norm": 0.400747686624527, + "layer_2_max_spectral_norm": 0.012043062597513199, + "layer_3_update_fnorm": 0.5573415160179138, + "layer_3_max_l1_linf_norm": 0.39799386262893677, + "layer_3_max_spectral_norm": 0.01205423753708601, + "layer_4_update_fnorm": 0.5812740325927734, + "layer_4_max_l1_linf_norm": 0.4138936996459961, + "layer_4_max_spectral_norm": 0.012043938972055912, + "layer_5_update_fnorm": 0.5970751643180847, + "layer_5_max_l1_linf_norm": 0.4197348952293396, + "layer_5_max_spectral_norm": 0.012058382853865623, + "layer_6_update_fnorm": 0.6044831275939941, + "layer_6_max_l1_linf_norm": 0.4171249270439148, + "layer_6_max_spectral_norm": 0.012044349685311317, + "layer_7_update_fnorm": 0.6033587455749512, + "layer_7_max_l1_linf_norm": 0.4177984893321991, + "layer_7_max_spectral_norm": 0.012045043520629406, + "layer_8_update_fnorm": 0.6051233410835266, + "layer_8_max_l1_linf_norm": 0.41741064190864563, + "layer_8_max_spectral_norm": 0.012042977847158909, + "layer_9_update_fnorm": 0.6059123873710632, + "layer_9_max_l1_linf_norm": 0.4118971824645996, + "layer_9_max_spectral_norm": 0.012048913165926933, + "layer_10_update_fnorm": 0.605633020401001, + "layer_10_max_l1_linf_norm": 0.4095132350921631, + "layer_10_max_spectral_norm": 0.012041750364005566, + "layer_11_update_fnorm": 0.6035506129264832, + "layer_11_max_l1_linf_norm": 0.4074586033821106, + "layer_11_max_spectral_norm": 0.012042498216032982, + "layer_12_update_fnorm": 0.6019887328147888, + "layer_12_max_l1_linf_norm": 0.3984560966491699, + "layer_12_max_spectral_norm": 0.012047584168612957, + "total_sharpness": 0.004797876346856356, + "ip_v_neg_g": 0.01334784273058176, + "cos_v_neg_g": 0.005206580273807049, + "v_norm": 2.4434566497802734, + "g_norm": 1.0491893291473389, + "hv_norm": 0.6299502849578857, + "cos_v_hv": 0.018610041588544846, + "hg_norm": 15.318938255310059, + "cos_g_hg": 0.564041018486023, + "v_parallel_norm": 0.000889540882781148, + "v_perp_norm": 2.4434564113616943, + "layer_1_v_norm": 0.5844366550445557, + "layer_1_cos_v_neg_g": 0.007694230414927006, + "layer_2_v_norm": 0.551864504814148, + "layer_2_cos_v_neg_g": 0.008728964254260063, + "layer_3_v_norm": 0.5573415160179138, + "layer_3_cos_v_neg_g": 0.008325346745550632, + "layer_4_v_norm": 0.5812740325927734, + "layer_4_cos_v_neg_g": 0.008020630106329918, + "layer_5_v_norm": 0.5970751643180847, + "layer_5_cos_v_neg_g": 0.007891344837844372, + "layer_6_v_norm": 0.6044831275939941, + "layer_6_cos_v_neg_g": 0.008115970529615879, + "layer_7_v_norm": 0.6033587455749512, + "layer_7_cos_v_neg_g": 0.007651620078831911, + "layer_8_v_norm": 0.6051234006881714, + "layer_8_cos_v_neg_g": 0.00747267110273242, + "layer_9_v_norm": 0.6059123873710632, + "layer_9_cos_v_neg_g": 0.007818239741027355, + "layer_10_v_norm": 0.605633020401001, + "layer_10_cos_v_neg_g": 0.006687027867883444, + "layer_11_v_norm": 0.6035506129264832, + "layer_11_cos_v_neg_g": 0.005357426591217518, + "layer_12_v_norm": 0.6019887328147888, + "layer_12_cos_v_neg_g": 0.004226944409310818, + "layer_1_sharpness": 0.00588673772290349, + "layer_2_sharpness": 0.0008918477687984705, + "layer_3_sharpness": 0.0015729472506791353, + "layer_4_sharpness": 0.0005037725204601884, + "layer_5_sharpness": 0.0007508942508138716, + "layer_6_sharpness": 0.000944059866014868, + "layer_7_sharpness": 0.001200609840452671, + "layer_8_sharpness": 0.0007799127488397062, + "layer_9_sharpness": 0.000727023696526885, + "layer_10_sharpness": 0.0004603745474014431, + "layer_11_sharpness": 0.0003943649062421173, + "layer_12_sharpness": 0.00036250887205824256 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..b296f9e9b2c450f321b449ee5d1f03aa7e9ac62b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4453482627868652, + "total_l1_linf_norm": 20933.29296875, + "total_spectral_norm": 2.445348024368286, + "layer_1_update_fnorm": 0.5657727718353271, + "layer_1_max_l1_linf_norm": 0.4168676435947418, + "layer_1_max_spectral_norm": 0.012057512067258358, + "layer_2_update_fnorm": 0.5437425971031189, + "layer_2_max_l1_linf_norm": 0.39284026622772217, + "layer_2_max_spectral_norm": 0.012040038593113422, + "layer_3_update_fnorm": 0.5618455410003662, + "layer_3_max_l1_linf_norm": 0.40426987409591675, + "layer_3_max_spectral_norm": 0.012050804682075977, + "layer_4_update_fnorm": 0.5866219401359558, + "layer_4_max_l1_linf_norm": 0.41964399814605713, + "layer_4_max_spectral_norm": 0.012049446813762188, + "layer_5_update_fnorm": 0.5997005701065063, + "layer_5_max_l1_linf_norm": 0.42091894149780273, + "layer_5_max_spectral_norm": 0.012050908990204334, + "layer_6_update_fnorm": 0.6054688692092896, + "layer_6_max_l1_linf_norm": 0.42308294773101807, + "layer_6_max_spectral_norm": 0.012044425122439861, + "layer_7_update_fnorm": 0.6041749715805054, + "layer_7_max_l1_linf_norm": 0.4182612895965576, + "layer_7_max_spectral_norm": 0.012042127549648285, + "layer_8_update_fnorm": 0.6054893732070923, + "layer_8_max_l1_linf_norm": 0.41975945234298706, + "layer_8_max_spectral_norm": 0.012040949426591396, + "layer_9_update_fnorm": 0.6062524318695068, + "layer_9_max_l1_linf_norm": 0.4131114184856415, + "layer_9_max_spectral_norm": 0.012045356445014477, + "layer_10_update_fnorm": 0.6067135334014893, + "layer_10_max_l1_linf_norm": 0.4090201258659363, + "layer_10_max_spectral_norm": 0.01204275619238615, + "layer_11_update_fnorm": 0.604987382888794, + "layer_11_max_l1_linf_norm": 0.4048396646976471, + "layer_11_max_spectral_norm": 0.01204700767993927, + "layer_12_update_fnorm": 0.6032988429069519, + "layer_12_max_l1_linf_norm": 0.3983832597732544, + "layer_12_max_spectral_norm": 0.012047932483255863, + "total_sharpness": 0.003842824138700962, + "ip_v_neg_g": 0.0108422189950943, + "cos_v_neg_g": 0.003368161153048277, + "v_norm": 2.4453482627868652, + "g_norm": 1.316390037536621, + "hv_norm": 0.733360767364502, + "cos_v_hv": 0.012813670560717583, + "hg_norm": 55.78310012817383, + "cos_g_hg": 0.5992521643638611, + "v_parallel_norm": 0.0009687276324257255, + "v_perp_norm": 2.445348024368286, + "layer_1_v_norm": 0.5657727718353271, + "layer_1_cos_v_neg_g": 0.005337289534509182, + "layer_2_v_norm": 0.5437425971031189, + "layer_2_cos_v_neg_g": 0.013486744835972786, + "layer_3_v_norm": 0.5618455410003662, + "layer_3_cos_v_neg_g": 0.010917634703218937, + "layer_4_v_norm": 0.5866219401359558, + "layer_4_cos_v_neg_g": 0.004994918126612902, + "layer_5_v_norm": 0.5997005701065063, + "layer_5_cos_v_neg_g": 0.0040383958257734776, + "layer_6_v_norm": 0.6054688692092896, + "layer_6_cos_v_neg_g": 0.004386619199067354, + "layer_7_v_norm": 0.6041749715805054, + "layer_7_cos_v_neg_g": 0.004631964024156332, + "layer_8_v_norm": 0.6054893732070923, + "layer_8_cos_v_neg_g": 0.005217049270868301, + "layer_9_v_norm": 0.6062524318695068, + "layer_9_cos_v_neg_g": 0.005254250951111317, + "layer_10_v_norm": 0.6067135334014893, + "layer_10_cos_v_neg_g": 0.0040833111852407455, + "layer_11_v_norm": 0.604987382888794, + "layer_11_cos_v_neg_g": 0.0026085898280143738, + "layer_12_v_norm": 0.6032988429069519, + "layer_12_cos_v_neg_g": 0.0018061341252177954, + "layer_1_sharpness": 0.00936721358448267, + "layer_2_sharpness": 0.001949771773070097, + "layer_3_sharpness": 0.0015377301024273038, + "layer_4_sharpness": 0.00039618430309928954, + "layer_5_sharpness": 0.000488678808324039, + "layer_6_sharpness": 0.00060622877208516, + "layer_7_sharpness": 0.0008127131150104105, + "layer_8_sharpness": 0.0006159317563287914, + "layer_9_sharpness": 0.0005794328753836453, + "layer_10_sharpness": 0.0003322807315271348, + "layer_11_sharpness": 0.00027194691938348114, + "layer_12_sharpness": 0.0003585427184589207 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..50f5754c57236da1056db0334fbf46ccaa90b416 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4349687099456787, + "total_l1_linf_norm": 20820.01953125, + "total_spectral_norm": 2.434968948364258, + "layer_1_update_fnorm": 0.5679044723510742, + "layer_1_max_l1_linf_norm": 0.4196780025959015, + "layer_1_max_spectral_norm": 0.012064917013049126, + "layer_2_update_fnorm": 0.536899745464325, + "layer_2_max_l1_linf_norm": 0.3992200493812561, + "layer_2_max_spectral_norm": 0.012038647197186947, + "layer_3_update_fnorm": 0.5579364895820618, + "layer_3_max_l1_linf_norm": 0.40476030111312866, + "layer_3_max_spectral_norm": 0.012049246579408646, + "layer_4_update_fnorm": 0.5865669846534729, + "layer_4_max_l1_linf_norm": 0.41901832818984985, + "layer_4_max_spectral_norm": 0.01204492524266243, + "layer_5_update_fnorm": 0.597160279750824, + "layer_5_max_l1_linf_norm": 0.4204425811767578, + "layer_5_max_spectral_norm": 0.0120610436424613, + "layer_6_update_fnorm": 0.6034600734710693, + "layer_6_max_l1_linf_norm": 0.42028510570526123, + "layer_6_max_spectral_norm": 0.012048684060573578, + "layer_7_update_fnorm": 0.6022236347198486, + "layer_7_max_l1_linf_norm": 0.41942521929740906, + "layer_7_max_spectral_norm": 0.012045130133628845, + "layer_8_update_fnorm": 0.6039765477180481, + "layer_8_max_l1_linf_norm": 0.4167379140853882, + "layer_8_max_spectral_norm": 0.012043769471347332, + "layer_9_update_fnorm": 0.6048543453216553, + "layer_9_max_l1_linf_norm": 0.41001152992248535, + "layer_9_max_spectral_norm": 0.01204686425626278, + "layer_10_update_fnorm": 0.6059687733650208, + "layer_10_max_l1_linf_norm": 0.4111696481704712, + "layer_10_max_spectral_norm": 0.012042814865708351, + "layer_11_update_fnorm": 0.6034401655197144, + "layer_11_max_l1_linf_norm": 0.4033084511756897, + "layer_11_max_spectral_norm": 0.012043905444443226, + "layer_12_update_fnorm": 0.6029898524284363, + "layer_12_max_l1_linf_norm": 0.39718538522720337, + "layer_12_max_spectral_norm": 0.01204618252813816, + "total_sharpness": 0.004430905915796757, + "ip_v_neg_g": 0.010789543390274048, + "cos_v_neg_g": 0.003628515638411045, + "v_norm": 2.4349687099456787, + "g_norm": 1.2211828231811523, + "hv_norm": 0.8164884448051453, + "cos_v_hv": 0.0132140489295125, + "hg_norm": 40.68715286254883, + "cos_g_hg": 0.6158530712127686, + "v_parallel_norm": 0.0011753087164834142, + "v_perp_norm": 2.4349684715270996, + "layer_1_v_norm": 0.5679044723510742, + "layer_1_cos_v_neg_g": 0.00559503398835659, + "layer_2_v_norm": 0.536899745464325, + "layer_2_cos_v_neg_g": 0.007788308430463076, + "layer_3_v_norm": 0.5579364895820618, + "layer_3_cos_v_neg_g": 0.006637046579271555, + "layer_4_v_norm": 0.5865669846534729, + "layer_4_cos_v_neg_g": 0.005007637664675713, + "layer_5_v_norm": 0.597160279750824, + "layer_5_cos_v_neg_g": 0.005885522812604904, + "layer_6_v_norm": 0.6034600734710693, + "layer_6_cos_v_neg_g": 0.006694467272609472, + "layer_7_v_norm": 0.6022236347198486, + "layer_7_cos_v_neg_g": 0.007154607214033604, + "layer_8_v_norm": 0.6039765477180481, + "layer_8_cos_v_neg_g": 0.0064122602343559265, + "layer_9_v_norm": 0.6048543453216553, + "layer_9_cos_v_neg_g": 0.0055609033443033695, + "layer_10_v_norm": 0.6059687733650208, + "layer_10_cos_v_neg_g": 0.004193246830254793, + "layer_11_v_norm": 0.6034401655197144, + "layer_11_cos_v_neg_g": 0.003961588256061077, + "layer_12_v_norm": 0.6029898524284363, + "layer_12_cos_v_neg_g": 0.0033898442052304745, + "layer_1_sharpness": 0.011656411923468113, + "layer_2_sharpness": 0.0019292649812996387, + "layer_3_sharpness": 0.0010117770871147513, + "layer_4_sharpness": 0.000327014597132802, + "layer_5_sharpness": 0.0004765292105730623, + "layer_6_sharpness": 0.0006154245347715914, + "layer_7_sharpness": 0.0007575353374704719, + "layer_8_sharpness": 0.0006946701323613524, + "layer_9_sharpness": 0.0005902625271119177, + "layer_10_sharpness": 0.0003568332176655531, + "layer_11_sharpness": 0.00030520057771354914, + "layer_12_sharpness": 0.0003512070106808096 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..406d365ea8bdf5668efbb53a0a8416c9ad5ca6c5 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4313607215881348, + "total_l1_linf_norm": 20791.046875, + "total_spectral_norm": 2.4313604831695557, + "layer_1_update_fnorm": 0.5677780508995056, + "layer_1_max_l1_linf_norm": 0.41556334495544434, + "layer_1_max_spectral_norm": 0.012054613791406155, + "layer_2_update_fnorm": 0.5340173244476318, + "layer_2_max_l1_linf_norm": 0.45336630940437317, + "layer_2_max_spectral_norm": 0.012038192711770535, + "layer_3_update_fnorm": 0.5594301819801331, + "layer_3_max_l1_linf_norm": 0.4515935480594635, + "layer_3_max_spectral_norm": 0.012048485688865185, + "layer_4_update_fnorm": 0.586465060710907, + "layer_4_max_l1_linf_norm": 0.4413262605667114, + "layer_4_max_spectral_norm": 0.012047901749610901, + "layer_5_update_fnorm": 0.5960416197776794, + "layer_5_max_l1_linf_norm": 0.42063307762145996, + "layer_5_max_spectral_norm": 0.012067850679159164, + "layer_6_update_fnorm": 0.6017048358917236, + "layer_6_max_l1_linf_norm": 0.4190506935119629, + "layer_6_max_spectral_norm": 0.01204739324748516, + "layer_7_update_fnorm": 0.6010231375694275, + "layer_7_max_l1_linf_norm": 0.41489213705062866, + "layer_7_max_spectral_norm": 0.012044304050505161, + "layer_8_update_fnorm": 0.602375864982605, + "layer_8_max_l1_linf_norm": 0.4146728813648224, + "layer_8_max_spectral_norm": 0.012043259106576443, + "layer_9_update_fnorm": 0.6034315824508667, + "layer_9_max_l1_linf_norm": 0.41031572222709656, + "layer_9_max_spectral_norm": 0.012045903131365776, + "layer_10_update_fnorm": 0.6036614775657654, + "layer_10_max_l1_linf_norm": 0.40877947211265564, + "layer_10_max_spectral_norm": 0.012045292183756828, + "layer_11_update_fnorm": 0.6010358929634094, + "layer_11_max_l1_linf_norm": 0.4205423593521118, + "layer_11_max_spectral_norm": 0.012044617906212807, + "layer_12_update_fnorm": 0.6015504598617554, + "layer_12_max_l1_linf_norm": 0.4350413680076599, + "layer_12_max_spectral_norm": 0.012047667987644672, + "total_sharpness": 0.0026820432394742966, + "ip_v_neg_g": 0.009037829004228115, + "cos_v_neg_g": 0.0032110922038555145, + "v_norm": 2.4313607215881348, + "g_norm": 1.1576091051101685, + "hv_norm": 0.4332215487957001, + "cos_v_hv": 0.015052378177642822, + "hg_norm": 27.409900665283203, + "cos_g_hg": 0.6448336839675903, + "v_parallel_norm": 0.001122667221352458, + "v_perp_norm": 2.4313604831695557, + "layer_1_v_norm": 0.5677780508995056, + "layer_1_cos_v_neg_g": 0.005139129236340523, + "layer_2_v_norm": 0.5340173244476318, + "layer_2_cos_v_neg_g": 0.006805961485952139, + "layer_3_v_norm": 0.5594301819801331, + "layer_3_cos_v_neg_g": 0.004866764880716801, + "layer_4_v_norm": 0.586465060710907, + "layer_4_cos_v_neg_g": 0.00459000701084733, + "layer_5_v_norm": 0.5960416197776794, + "layer_5_cos_v_neg_g": 0.004843251314014196, + "layer_6_v_norm": 0.6017048358917236, + "layer_6_cos_v_neg_g": 0.005268794950097799, + "layer_7_v_norm": 0.6010231375694275, + "layer_7_cos_v_neg_g": 0.005679120775312185, + "layer_8_v_norm": 0.6023758053779602, + "layer_8_cos_v_neg_g": 0.005764295347034931, + "layer_9_v_norm": 0.6034315824508667, + "layer_9_cos_v_neg_g": 0.005909078288823366, + "layer_10_v_norm": 0.6036614775657654, + "layer_10_cos_v_neg_g": 0.0045285155065357685, + "layer_11_v_norm": 0.6010358929634094, + "layer_11_cos_v_neg_g": 0.0036319673527032137, + "layer_12_v_norm": 0.6015504598617554, + "layer_12_cos_v_neg_g": 0.004424213897436857, + "layer_1_sharpness": 0.005661888513714075, + "layer_2_sharpness": 0.0007242254796437919, + "layer_3_sharpness": 0.0005975045496597886, + "layer_4_sharpness": 0.0002700572076719254, + "layer_5_sharpness": 0.0004668642650358379, + "layer_6_sharpness": 0.0005353947053663433, + "layer_7_sharpness": 0.00048348188283853233, + "layer_8_sharpness": 0.0004365757922641933, + "layer_9_sharpness": 0.0004889425472356379, + "layer_10_sharpness": 0.0003288321604486555, + "layer_11_sharpness": 0.00029738221201114357, + "layer_12_sharpness": 0.00033731063012965024 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..cb94d21fa3bd0c848e414e266810a1a4a52d7b1c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.434788703918457, + "total_l1_linf_norm": 20818.91015625, + "total_spectral_norm": 2.434788942337036, + "layer_1_update_fnorm": 0.5639520287513733, + "layer_1_max_l1_linf_norm": 0.41280657052993774, + "layer_1_max_spectral_norm": 0.012050259858369827, + "layer_2_update_fnorm": 0.5368168354034424, + "layer_2_max_l1_linf_norm": 0.4100024104118347, + "layer_2_max_spectral_norm": 0.01203940249979496, + "layer_3_update_fnorm": 0.558375358581543, + "layer_3_max_l1_linf_norm": 0.4085438549518585, + "layer_3_max_spectral_norm": 0.012048263102769852, + "layer_4_update_fnorm": 0.5869472622871399, + "layer_4_max_l1_linf_norm": 0.4274798631668091, + "layer_4_max_spectral_norm": 0.012046951800584793, + "layer_5_update_fnorm": 0.5975065231323242, + "layer_5_max_l1_linf_norm": 0.418942391872406, + "layer_5_max_spectral_norm": 0.012061866000294685, + "layer_6_update_fnorm": 0.6014206409454346, + "layer_6_max_l1_linf_norm": 0.42228925228118896, + "layer_6_max_spectral_norm": 0.012044239789247513, + "layer_7_update_fnorm": 0.6019102334976196, + "layer_7_max_l1_linf_norm": 0.41933169960975647, + "layer_7_max_spectral_norm": 0.0120426369830966, + "layer_8_update_fnorm": 0.6042294502258301, + "layer_8_max_l1_linf_norm": 0.41695716977119446, + "layer_8_max_spectral_norm": 0.012040440924465656, + "layer_9_update_fnorm": 0.6049577593803406, + "layer_9_max_l1_linf_norm": 0.4126691222190857, + "layer_9_max_spectral_norm": 0.012044540606439114, + "layer_10_update_fnorm": 0.605787456035614, + "layer_10_max_l1_linf_norm": 0.4113471210002899, + "layer_10_max_spectral_norm": 0.012044929899275303, + "layer_11_update_fnorm": 0.6031877398490906, + "layer_11_max_l1_linf_norm": 0.4014873504638672, + "layer_11_max_spectral_norm": 0.012045823037624359, + "layer_12_update_fnorm": 0.6029811501502991, + "layer_12_max_l1_linf_norm": 0.3934409022331238, + "layer_12_max_spectral_norm": 0.01204389613121748, + "total_sharpness": 0.002259938744828105, + "ip_v_neg_g": 0.007868983782827854, + "cos_v_neg_g": 0.0026019348297268152, + "v_norm": 2.434788703918457, + "g_norm": 1.242112636566162, + "hv_norm": 0.40715739130973816, + "cos_v_hv": 0.013514362275600433, + "hg_norm": 32.50663757324219, + "cos_g_hg": 0.6375680565834045, + "v_parallel_norm": 0.0005178003921173513, + "v_perp_norm": 2.434788465499878, + "layer_1_v_norm": 0.5639520287513733, + "layer_1_cos_v_neg_g": 0.0024983868934214115, + "layer_2_v_norm": 0.5368168354034424, + "layer_2_cos_v_neg_g": 0.00460890494287014, + "layer_3_v_norm": 0.558375358581543, + "layer_3_cos_v_neg_g": 0.0066062756814062595, + "layer_4_v_norm": 0.5869472622871399, + "layer_4_cos_v_neg_g": 0.0041187903843820095, + "layer_5_v_norm": 0.5975065231323242, + "layer_5_cos_v_neg_g": 0.004561292473226786, + "layer_6_v_norm": 0.6014206409454346, + "layer_6_cos_v_neg_g": 0.005636345595121384, + "layer_7_v_norm": 0.6019102334976196, + "layer_7_cos_v_neg_g": 0.0050932359881699085, + "layer_8_v_norm": 0.6042293906211853, + "layer_8_cos_v_neg_g": 0.005896423477679491, + "layer_9_v_norm": 0.6049577593803406, + "layer_9_cos_v_neg_g": 0.0054015773348510265, + "layer_10_v_norm": 0.605787456035614, + "layer_10_cos_v_neg_g": 0.004125838167965412, + "layer_11_v_norm": 0.6031877398490906, + "layer_11_cos_v_neg_g": 0.004239597357809544, + "layer_12_v_norm": 0.6029811501502991, + "layer_12_cos_v_neg_g": 0.0035052811726927757, + "layer_1_sharpness": 0.004260365851223469, + "layer_2_sharpness": 0.0005117572145536542, + "layer_3_sharpness": 0.0012476112460717559, + "layer_4_sharpness": 0.0002847890427801758, + "layer_5_sharpness": 0.0004602347908075899, + "layer_6_sharpness": 0.0005565935280174017, + "layer_7_sharpness": 0.0005070957122370601, + "layer_8_sharpness": 0.00041053295717574656, + "layer_9_sharpness": 0.00038766811485402286, + "layer_10_sharpness": 0.0002508856705389917, + "layer_11_sharpness": 0.00023798133770469576, + "layer_12_sharpness": 0.000328689202433452 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..b6dce0d5fdbd80aae619dbf2d1748635bc0d263f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.42533278465271, + "total_l1_linf_norm": 20744.005859375, + "total_spectral_norm": 2.425332546234131, + "layer_1_update_fnorm": 0.5657628178596497, + "layer_1_max_l1_linf_norm": 0.4216233193874359, + "layer_1_max_spectral_norm": 0.012054084800183773, + "layer_2_update_fnorm": 0.52278733253479, + "layer_2_max_l1_linf_norm": 0.4239456057548523, + "layer_2_max_spectral_norm": 0.012038299813866615, + "layer_3_update_fnorm": 0.5490929484367371, + "layer_3_max_l1_linf_norm": 0.42455148696899414, + "layer_3_max_spectral_norm": 0.012047059834003448, + "layer_4_update_fnorm": 0.5739508271217346, + "layer_4_max_l1_linf_norm": 0.42444175481796265, + "layer_4_max_spectral_norm": 0.012048249132931232, + "layer_5_update_fnorm": 0.59583580493927, + "layer_5_max_l1_linf_norm": 0.4175761044025421, + "layer_5_max_spectral_norm": 0.012058030813932419, + "layer_6_update_fnorm": 0.601097583770752, + "layer_6_max_l1_linf_norm": 0.4186973571777344, + "layer_6_max_spectral_norm": 0.012043613940477371, + "layer_7_update_fnorm": 0.5993481278419495, + "layer_7_max_l1_linf_norm": 0.4158945381641388, + "layer_7_max_spectral_norm": 0.012042189948260784, + "layer_8_update_fnorm": 0.6019638180732727, + "layer_8_max_l1_linf_norm": 0.4162323474884033, + "layer_8_max_spectral_norm": 0.012046565301716328, + "layer_9_update_fnorm": 0.6033456921577454, + "layer_9_max_l1_linf_norm": 0.4117404818534851, + "layer_9_max_spectral_norm": 0.01204534899443388, + "layer_10_update_fnorm": 0.6046565771102905, + "layer_10_max_l1_linf_norm": 0.4109855592250824, + "layer_10_max_spectral_norm": 0.01204671524465084, + "layer_11_update_fnorm": 0.6023210287094116, + "layer_11_max_l1_linf_norm": 0.4001134932041168, + "layer_11_max_spectral_norm": 0.0120460856705904, + "layer_12_update_fnorm": 0.6022268533706665, + "layer_12_max_l1_linf_norm": 0.3957213759422302, + "layer_12_max_spectral_norm": 0.012044939212501049, + "total_sharpness": 0.002452113199979067, + "ip_v_neg_g": 0.00862074550241232, + "cos_v_neg_g": 0.0031208007130771875, + "v_norm": 2.42533278465271, + "g_norm": 1.1389573812484741, + "hv_norm": 0.507883608341217, + "cos_v_hv": 0.011709751561284065, + "hg_norm": 24.716325759887695, + "cos_g_hg": 0.6433723568916321, + "v_parallel_norm": 0.0007987510762177408, + "v_perp_norm": 2.42533278465271, + "layer_1_v_norm": 0.5657628178596497, + "layer_1_cos_v_neg_g": 0.0036208955571055412, + "layer_2_v_norm": 0.52278733253479, + "layer_2_cos_v_neg_g": 0.008439216762781143, + "layer_3_v_norm": 0.5490929484367371, + "layer_3_cos_v_neg_g": 0.0054260119795799255, + "layer_4_v_norm": 0.5739508271217346, + "layer_4_cos_v_neg_g": 0.0040939622558653355, + "layer_5_v_norm": 0.59583580493927, + "layer_5_cos_v_neg_g": 0.005350120831280947, + "layer_6_v_norm": 0.601097583770752, + "layer_6_cos_v_neg_g": 0.005576748866587877, + "layer_7_v_norm": 0.5993481278419495, + "layer_7_cos_v_neg_g": 0.007279590703547001, + "layer_8_v_norm": 0.6019638180732727, + "layer_8_cos_v_neg_g": 0.006163866724818945, + "layer_9_v_norm": 0.6033456921577454, + "layer_9_cos_v_neg_g": 0.005897180642932653, + "layer_10_v_norm": 0.6046565771102905, + "layer_10_cos_v_neg_g": 0.004934079945087433, + "layer_11_v_norm": 0.6023210883140564, + "layer_11_cos_v_neg_g": 0.003926401026546955, + "layer_12_v_norm": 0.6022268533706665, + "layer_12_cos_v_neg_g": 0.0024092593230307102, + "layer_1_sharpness": 0.005556917749345303, + "layer_2_sharpness": 0.001245215767994523, + "layer_3_sharpness": 0.0008703545900061727, + "layer_4_sharpness": 0.0002473023778293282, + "layer_5_sharpness": 0.00034840707667171955, + "layer_6_sharpness": 0.00046487076906487346, + "layer_7_sharpness": 0.0006338343373499811, + "layer_8_sharpness": 0.00036097754491493106, + "layer_9_sharpness": 0.00035287521313875914, + "layer_10_sharpness": 0.00026262152823619545, + "layer_11_sharpness": 0.00023068854352459311, + "layer_12_sharpness": 0.0002251627593068406 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..723b4f58eaaab49553065ad8267470d47f97d2c2 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.432978630065918, + "total_l1_linf_norm": 20811.8125, + "total_spectral_norm": 2.432978630065918, + "layer_1_update_fnorm": 0.5652143359184265, + "layer_1_max_l1_linf_norm": 0.41624560952186584, + "layer_1_max_spectral_norm": 0.012046011164784431, + "layer_2_update_fnorm": 0.5270700454711914, + "layer_2_max_l1_linf_norm": 0.4621405601501465, + "layer_2_max_spectral_norm": 0.01203936617821455, + "layer_3_update_fnorm": 0.5556107759475708, + "layer_3_max_l1_linf_norm": 0.4419584572315216, + "layer_3_max_spectral_norm": 0.012045590206980705, + "layer_4_update_fnorm": 0.5878942012786865, + "layer_4_max_l1_linf_norm": 0.4349671006202698, + "layer_4_max_spectral_norm": 0.012043680995702744, + "layer_5_update_fnorm": 0.5947811007499695, + "layer_5_max_l1_linf_norm": 0.42182692885398865, + "layer_5_max_spectral_norm": 0.012067004106938839, + "layer_6_update_fnorm": 0.5994574427604675, + "layer_6_max_l1_linf_norm": 0.41715168952941895, + "layer_6_max_spectral_norm": 0.012045202776789665, + "layer_7_update_fnorm": 0.5966697335243225, + "layer_7_max_l1_linf_norm": 0.4122794270515442, + "layer_7_max_spectral_norm": 0.012043697759509087, + "layer_8_update_fnorm": 0.5989546775817871, + "layer_8_max_l1_linf_norm": 0.4117352068424225, + "layer_8_max_spectral_norm": 0.012040742672979832, + "layer_9_update_fnorm": 0.6011717319488525, + "layer_9_max_l1_linf_norm": 0.40923941135406494, + "layer_9_max_spectral_norm": 0.012043648399412632, + "layer_10_update_fnorm": 0.6035980582237244, + "layer_10_max_l1_linf_norm": 0.41675639152526855, + "layer_10_max_spectral_norm": 0.012046748772263527, + "layer_11_update_fnorm": 0.5998793840408325, + "layer_11_max_l1_linf_norm": 0.4436434507369995, + "layer_11_max_spectral_norm": 0.012046083807945251, + "layer_12_update_fnorm": 0.6020789742469788, + "layer_12_max_l1_linf_norm": 0.43851301074028015, + "layer_12_max_spectral_norm": 0.012046153657138348, + "total_sharpness": 0.0027897332329303026, + "ip_v_neg_g": 0.009738009423017502, + "cos_v_neg_g": 0.003531664377078414, + "v_norm": 2.432978630065918, + "g_norm": 1.1333197355270386, + "hv_norm": 0.4696735739707947, + "cos_v_hv": 0.014451231807470322, + "hg_norm": 24.356483459472656, + "cos_g_hg": 0.5888661742210388, + "v_parallel_norm": 0.0005727345123887062, + "v_perp_norm": 2.432978630065918, + "layer_1_v_norm": 0.5652143359184265, + "layer_1_cos_v_neg_g": 0.0034039299935102463, + "layer_2_v_norm": 0.5270700454711914, + "layer_2_cos_v_neg_g": 0.011187384836375713, + "layer_3_v_norm": 0.5556107759475708, + "layer_3_cos_v_neg_g": 0.008629932068288326, + "layer_4_v_norm": 0.5878942012786865, + "layer_4_cos_v_neg_g": 0.004260963760316372, + "layer_5_v_norm": 0.5947811007499695, + "layer_5_cos_v_neg_g": 0.004365075845271349, + "layer_6_v_norm": 0.5994574427604675, + "layer_6_cos_v_neg_g": 0.006311903707683086, + "layer_7_v_norm": 0.5966697335243225, + "layer_7_cos_v_neg_g": 0.007047183346003294, + "layer_8_v_norm": 0.5989546775817871, + "layer_8_cos_v_neg_g": 0.006491099018603563, + "layer_9_v_norm": 0.6011717319488525, + "layer_9_cos_v_neg_g": 0.0058696637861430645, + "layer_10_v_norm": 0.6035980582237244, + "layer_10_cos_v_neg_g": 0.007048633880913258, + "layer_11_v_norm": 0.5998793244361877, + "layer_11_cos_v_neg_g": 0.00504858186468482, + "layer_12_v_norm": 0.6020789742469788, + "layer_12_cos_v_neg_g": 0.005343061871826649, + "layer_1_sharpness": 0.004264789167791605, + "layer_2_sharpness": 0.0009037514100782573, + "layer_3_sharpness": 0.001232096808962524, + "layer_4_sharpness": 0.00029784865910187364, + "layer_5_sharpness": 0.0005287140375003219, + "layer_6_sharpness": 0.0008536654058843851, + "layer_7_sharpness": 0.0011063040001317859, + "layer_8_sharpness": 0.0006097579025663435, + "layer_9_sharpness": 0.0004340172163210809, + "layer_10_sharpness": 0.0003091046819463372, + "layer_11_sharpness": 0.0002382629318162799, + "layer_12_sharpness": 0.00027726375265046954 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..d11388931387281b6a33c6eeeb5105ae0982fe25 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 1.7678487300872803, + "total_l1_linf_norm": 15245.1171875, + "total_spectral_norm": 1.767849087715149, + "layer_1_update_fnorm": 0.4356909394264221, + "layer_1_max_l1_linf_norm": 0.31618356704711914, + "layer_1_max_spectral_norm": 0.008604738861322403, + "layer_2_update_fnorm": 0.4197026491165161, + "layer_2_max_l1_linf_norm": 0.3167575001716614, + "layer_2_max_spectral_norm": 0.008606168441474438, + "layer_3_update_fnorm": 0.39993637800216675, + "layer_3_max_l1_linf_norm": 0.3186039924621582, + "layer_3_max_spectral_norm": 0.0086020827293396, + "layer_4_update_fnorm": 0.4039881229400635, + "layer_4_max_l1_linf_norm": 0.30794239044189453, + "layer_4_max_spectral_norm": 0.008606797084212303, + "layer_5_update_fnorm": 0.41732096672058105, + "layer_5_max_l1_linf_norm": 0.3009956181049347, + "layer_5_max_spectral_norm": 0.008613208308815956, + "layer_6_update_fnorm": 0.4271490275859833, + "layer_6_max_l1_linf_norm": 0.2985674738883972, + "layer_6_max_spectral_norm": 0.00861387513577938, + "layer_7_update_fnorm": 0.42686766386032104, + "layer_7_max_l1_linf_norm": 0.2969742715358734, + "layer_7_max_spectral_norm": 0.008606615476310253, + "layer_8_update_fnorm": 0.4280712902545929, + "layer_8_max_l1_linf_norm": 0.2960582375526428, + "layer_8_max_spectral_norm": 0.008602564223110676, + "layer_9_update_fnorm": 0.4277261793613434, + "layer_9_max_l1_linf_norm": 0.2953993082046509, + "layer_9_max_spectral_norm": 0.008606617338955402, + "layer_10_update_fnorm": 0.42922523617744446, + "layer_10_max_l1_linf_norm": 0.2961620092391968, + "layer_10_max_spectral_norm": 0.008616081438958645, + "layer_11_update_fnorm": 0.4286241829395294, + "layer_11_max_l1_linf_norm": 0.2976972460746765, + "layer_11_max_spectral_norm": 0.00860962551087141, + "layer_12_update_fnorm": 0.4307894706726074, + "layer_12_max_l1_linf_norm": 0.2964174449443817, + "layer_12_max_spectral_norm": 0.008606498129665852, + "total_sharpness": 0.02125171385705471, + "ip_v_neg_g": 0.0335845984518528, + "cos_v_neg_g": 0.013304752297699451, + "v_norm": 1.7678487300872803, + "g_norm": 1.4278686046600342, + "hv_norm": 0.987900972366333, + "cos_v_hv": 0.03802994266152382, + "hg_norm": 18.351945877075195, + "cos_g_hg": 0.546143651008606, + "v_parallel_norm": 0.0007634839857928455, + "v_perp_norm": 1.7678486108779907, + "layer_1_v_norm": 0.4356909394264221, + "layer_1_cos_v_neg_g": 0.01729176752269268, + "layer_2_v_norm": 0.4197026491165161, + "layer_2_cos_v_neg_g": 0.022509820759296417, + "layer_3_v_norm": 0.39993634819984436, + "layer_3_cos_v_neg_g": 0.02593958005309105, + "layer_4_v_norm": 0.4039881229400635, + "layer_4_cos_v_neg_g": 0.02143809199333191, + "layer_5_v_norm": 0.41732096672058105, + "layer_5_cos_v_neg_g": 0.018679089844226837, + "layer_6_v_norm": 0.4271490275859833, + "layer_6_cos_v_neg_g": 0.01590634696185589, + "layer_7_v_norm": 0.42686766386032104, + "layer_7_cos_v_neg_g": 0.016263091936707497, + "layer_8_v_norm": 0.4280712902545929, + "layer_8_cos_v_neg_g": 0.015331118367612362, + "layer_9_v_norm": 0.4277261793613434, + "layer_9_cos_v_neg_g": 0.016010291874408722, + "layer_10_v_norm": 0.42922523617744446, + "layer_10_cos_v_neg_g": 0.01505221612751484, + "layer_11_v_norm": 0.4286241829395294, + "layer_11_cos_v_neg_g": 0.013345428742468357, + "layer_12_v_norm": 0.4307894706726074, + "layer_12_cos_v_neg_g": 0.010360886342823505, + "layer_1_sharpness": 0.012664157897233963, + "layer_2_sharpness": 0.006135550793260336, + "layer_3_sharpness": 0.006557248067110777, + "layer_4_sharpness": 0.004253273829817772, + "layer_5_sharpness": 0.004336353857070208, + "layer_6_sharpness": 0.0038712210953235626, + "layer_7_sharpness": 0.003556885290890932, + "layer_8_sharpness": 0.0030695779714733362, + "layer_9_sharpness": 0.002827987540513277, + "layer_10_sharpness": 0.0020117424428462982, + "layer_11_sharpness": 0.0020442793611437082, + "layer_12_sharpness": 0.0014784836675971746 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..889e931afe4e0b6d54672b0dbf0280a73039e140 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4297425746917725, + "total_l1_linf_norm": 20777.41015625, + "total_spectral_norm": 2.4297425746917725, + "layer_1_update_fnorm": 0.5649482011795044, + "layer_1_max_l1_linf_norm": 0.4174908995628357, + "layer_1_max_spectral_norm": 0.012049015611410141, + "layer_2_update_fnorm": 0.5261396169662476, + "layer_2_max_l1_linf_norm": 0.4529780149459839, + "layer_2_max_spectral_norm": 0.012040190398693085, + "layer_3_update_fnorm": 0.557852566242218, + "layer_3_max_l1_linf_norm": 0.4421996772289276, + "layer_3_max_spectral_norm": 0.012052919715642929, + "layer_4_update_fnorm": 0.5882068276405334, + "layer_4_max_l1_linf_norm": 0.4299513101577759, + "layer_4_max_spectral_norm": 0.012049730867147446, + "layer_5_update_fnorm": 0.5966721773147583, + "layer_5_max_l1_linf_norm": 0.41767430305480957, + "layer_5_max_spectral_norm": 0.012062533758580685, + "layer_6_update_fnorm": 0.6005644798278809, + "layer_6_max_l1_linf_norm": 0.42093127965927124, + "layer_6_max_spectral_norm": 0.012046536430716515, + "layer_7_update_fnorm": 0.5993503928184509, + "layer_7_max_l1_linf_norm": 0.41639286279678345, + "layer_7_max_spectral_norm": 0.012042914517223835, + "layer_8_update_fnorm": 0.5999686121940613, + "layer_8_max_l1_linf_norm": 0.4160998463630676, + "layer_8_max_spectral_norm": 0.01204589195549488, + "layer_9_update_fnorm": 0.6012172102928162, + "layer_9_max_l1_linf_norm": 0.4088323712348938, + "layer_9_max_spectral_norm": 0.012047912925481796, + "layer_10_update_fnorm": 0.604317843914032, + "layer_10_max_l1_linf_norm": 0.411196231842041, + "layer_10_max_spectral_norm": 0.012045308947563171, + "layer_11_update_fnorm": 0.6014955043792725, + "layer_11_max_l1_linf_norm": 0.41749751567840576, + "layer_11_max_spectral_norm": 0.012043393217027187, + "layer_12_update_fnorm": 0.6017658114433289, + "layer_12_max_l1_linf_norm": 0.42600128054618835, + "layer_12_max_spectral_norm": 0.012045499868690968, + "total_sharpness": 0.0026777656748890877, + "ip_v_neg_g": 0.007406002841889858, + "cos_v_neg_g": 0.002384888706728816, + "v_norm": 2.4297425746917725, + "g_norm": 1.278072476387024, + "hv_norm": 1.0414559841156006, + "cos_v_hv": 0.006247294135391712, + "hg_norm": 251.14410400390625, + "cos_g_hg": 0.4091627299785614, + "v_parallel_norm": 0.0009849269408732653, + "v_perp_norm": 2.4297423362731934, + "layer_1_v_norm": 0.5649482011795044, + "layer_1_cos_v_neg_g": 0.0029325345531105995, + "layer_2_v_norm": 0.5261396169662476, + "layer_2_cos_v_neg_g": 0.004955165088176727, + "layer_3_v_norm": 0.557852566242218, + "layer_3_cos_v_neg_g": 0.005917960312217474, + "layer_4_v_norm": 0.5882068276405334, + "layer_4_cos_v_neg_g": 0.002270923461765051, + "layer_5_v_norm": 0.5966721773147583, + "layer_5_cos_v_neg_g": 0.0027141720056533813, + "layer_6_v_norm": 0.6005644798278809, + "layer_6_cos_v_neg_g": 0.00493030808866024, + "layer_7_v_norm": 0.5993503928184509, + "layer_7_cos_v_neg_g": 0.004165001213550568, + "layer_8_v_norm": 0.5999686121940613, + "layer_8_cos_v_neg_g": 0.004444187507033348, + "layer_9_v_norm": 0.6012172102928162, + "layer_9_cos_v_neg_g": 0.004584731534123421, + "layer_10_v_norm": 0.604317843914032, + "layer_10_cos_v_neg_g": 0.004978060722351074, + "layer_11_v_norm": 0.6014955043792725, + "layer_11_cos_v_neg_g": 0.004475614055991173, + "layer_12_v_norm": 0.6017658114433289, + "layer_12_cos_v_neg_g": 0.005180147010833025, + "layer_1_sharpness": 0.00918848067522049, + "layer_2_sharpness": 0.0013103601522743702, + "layer_3_sharpness": 0.0012314808554947376, + "layer_4_sharpness": 0.0002884412242565304, + "layer_5_sharpness": 0.00041409273399040103, + "layer_6_sharpness": 0.00047942373203113675, + "layer_7_sharpness": 0.000495860178489238, + "layer_8_sharpness": 0.00037812613300047815, + "layer_9_sharpness": 0.0003592634166125208, + "layer_10_sharpness": 0.00025118482881225646, + "layer_11_sharpness": 0.00021143874619156122, + "layer_12_sharpness": 0.00019534247985575348 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..feb60bbdb5e6e559559af46451fafbb0a20a8a73 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.428440570831299, + "total_l1_linf_norm": 20771.267578125, + "total_spectral_norm": 2.428441047668457, + "layer_1_update_fnorm": 0.5665768384933472, + "layer_1_max_l1_linf_norm": 0.4134964942932129, + "layer_1_max_spectral_norm": 0.012043207883834839, + "layer_2_update_fnorm": 0.5226163268089294, + "layer_2_max_l1_linf_norm": 0.4474501609802246, + "layer_2_max_spectral_norm": 0.012038339860737324, + "layer_3_update_fnorm": 0.5529356598854065, + "layer_3_max_l1_linf_norm": 0.43977659940719604, + "layer_3_max_spectral_norm": 0.012048379518091679, + "layer_4_update_fnorm": 0.587704598903656, + "layer_4_max_l1_linf_norm": 0.42179927229881287, + "layer_4_max_spectral_norm": 0.01204757858067751, + "layer_5_update_fnorm": 0.5976331233978271, + "layer_5_max_l1_linf_norm": 0.41991105675697327, + "layer_5_max_spectral_norm": 0.01207107212394476, + "layer_6_update_fnorm": 0.6004857420921326, + "layer_6_max_l1_linf_norm": 0.4206543564796448, + "layer_6_max_spectral_norm": 0.012045364826917648, + "layer_7_update_fnorm": 0.5985985398292542, + "layer_7_max_l1_linf_norm": 0.41507846117019653, + "layer_7_max_spectral_norm": 0.012048753909766674, + "layer_8_update_fnorm": 0.5993227362632751, + "layer_8_max_l1_linf_norm": 0.4102395176887512, + "layer_8_max_spectral_norm": 0.012043722905218601, + "layer_9_update_fnorm": 0.6010876297950745, + "layer_9_max_l1_linf_norm": 0.40838822722435, + "layer_9_max_spectral_norm": 0.012044067494571209, + "layer_10_update_fnorm": 0.6037977933883667, + "layer_10_max_l1_linf_norm": 0.4099755585193634, + "layer_10_max_spectral_norm": 0.012045125477015972, + "layer_11_update_fnorm": 0.6016437411308289, + "layer_11_max_l1_linf_norm": 0.4232614040374756, + "layer_11_max_spectral_norm": 0.012045001611113548, + "layer_12_update_fnorm": 0.6021469831466675, + "layer_12_max_l1_linf_norm": 0.42873865365982056, + "layer_12_max_spectral_norm": 0.012046584859490395, + "total_sharpness": 0.0017467576544731855, + "ip_v_neg_g": 0.006319540552794933, + "cos_v_neg_g": 0.0014168357010930777, + "v_norm": 2.428440570831299, + "g_norm": 1.8367013931274414, + "hv_norm": 0.5572466850280762, + "cos_v_hv": 0.007612242829054594, + "hg_norm": 88.70146179199219, + "cos_g_hg": 0.6404848098754883, + "v_parallel_norm": 0.0005837109056301415, + "v_perp_norm": 2.428440570831299, + "layer_1_v_norm": 0.5665768384933472, + "layer_1_cos_v_neg_g": 0.0014709628885611892, + "layer_2_v_norm": 0.5226163268089294, + "layer_2_cos_v_neg_g": 0.0035988488234579563, + "layer_3_v_norm": 0.5529357194900513, + "layer_3_cos_v_neg_g": 0.005535229574888945, + "layer_4_v_norm": 0.587704598903656, + "layer_4_cos_v_neg_g": 0.001555218710564077, + "layer_5_v_norm": 0.5976331233978271, + "layer_5_cos_v_neg_g": 0.002635397482663393, + "layer_6_v_norm": 0.6004857420921326, + "layer_6_cos_v_neg_g": 0.002972845220938325, + "layer_7_v_norm": 0.5985985398292542, + "layer_7_cos_v_neg_g": 0.0025911633856594563, + "layer_8_v_norm": 0.5993227362632751, + "layer_8_cos_v_neg_g": 0.0028163897804915905, + "layer_9_v_norm": 0.6010876297950745, + "layer_9_cos_v_neg_g": 0.002279219450429082, + "layer_10_v_norm": 0.6037977933883667, + "layer_10_cos_v_neg_g": 0.0026260660961270332, + "layer_11_v_norm": 0.6016437411308289, + "layer_11_cos_v_neg_g": 0.0002617840364109725, + "layer_12_v_norm": 0.6021469831466675, + "layer_12_cos_v_neg_g": 0.0013132919557392597, + "layer_1_sharpness": 0.00397831154987216, + "layer_2_sharpness": 0.0012609607074409723, + "layer_3_sharpness": 0.0019492190331220627, + "layer_4_sharpness": 0.0002544673625379801, + "layer_5_sharpness": 0.0003190103452652693, + "layer_6_sharpness": 0.0003638060879893601, + "layer_7_sharpness": 0.0004586248251143843, + "layer_8_sharpness": 0.0002927794703282416, + "layer_9_sharpness": 0.0002428127481834963, + "layer_10_sharpness": 0.00019927667744923383, + "layer_11_sharpness": 0.00016836395661812276, + "layer_12_sharpness": 0.00013620017853099853 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..bcb39970403768bca639aa559b9d3f1e93f82538 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.431673765182495, + "total_l1_linf_norm": 20799.39453125, + "total_spectral_norm": 2.4316742420196533, + "layer_1_update_fnorm": 0.5684813261032104, + "layer_1_max_l1_linf_norm": 0.4107733964920044, + "layer_1_max_spectral_norm": 0.012049032375216484, + "layer_2_update_fnorm": 0.5313565135002136, + "layer_2_max_l1_linf_norm": 0.45467856526374817, + "layer_2_max_spectral_norm": 0.01203733216971159, + "layer_3_update_fnorm": 0.5630154609680176, + "layer_3_max_l1_linf_norm": 0.4416438937187195, + "layer_3_max_spectral_norm": 0.012047668918967247, + "layer_4_update_fnorm": 0.5904932618141174, + "layer_4_max_l1_linf_norm": 0.4281913936138153, + "layer_4_max_spectral_norm": 0.012051028199493885, + "layer_5_update_fnorm": 0.5964004993438721, + "layer_5_max_l1_linf_norm": 0.4209210276603699, + "layer_5_max_spectral_norm": 0.012063644826412201, + "layer_6_update_fnorm": 0.6003943681716919, + "layer_6_max_l1_linf_norm": 0.4179757237434387, + "layer_6_max_spectral_norm": 0.012044073082506657, + "layer_7_update_fnorm": 0.5966812968254089, + "layer_7_max_l1_linf_norm": 0.4157595634460449, + "layer_7_max_spectral_norm": 0.012044299393892288, + "layer_8_update_fnorm": 0.5977467894554138, + "layer_8_max_l1_linf_norm": 0.4127362370491028, + "layer_8_max_spectral_norm": 0.01204332709312439, + "layer_9_update_fnorm": 0.5999451279640198, + "layer_9_max_l1_linf_norm": 0.4091995060443878, + "layer_9_max_spectral_norm": 0.0120446952059865, + "layer_10_update_fnorm": 0.6034876108169556, + "layer_10_max_l1_linf_norm": 0.4112069606781006, + "layer_10_max_spectral_norm": 0.01204213872551918, + "layer_11_update_fnorm": 0.6006850600242615, + "layer_11_max_l1_linf_norm": 0.4205108880996704, + "layer_11_max_spectral_norm": 0.012048938311636448, + "layer_12_update_fnorm": 0.6023642420768738, + "layer_12_max_l1_linf_norm": 0.4245341420173645, + "layer_12_max_spectral_norm": 0.01204551849514246, + "total_sharpness": 0.001813058159314096, + "ip_v_neg_g": 0.006209338549524546, + "cos_v_neg_g": 0.0023578391410410404, + "v_norm": 2.431673765182495, + "g_norm": 1.082993507385254, + "hv_norm": 0.3420906364917755, + "cos_v_hv": 0.012887713499367237, + "hg_norm": 23.527307510375977, + "cos_g_hg": 0.6137417554855347, + "v_parallel_norm": 0.0006632654112763703, + "v_perp_norm": 2.431673765182495, + "layer_1_v_norm": 0.5684813261032104, + "layer_1_cos_v_neg_g": 0.0031852195970714092, + "layer_2_v_norm": 0.5313565135002136, + "layer_2_cos_v_neg_g": 0.0039542061276733875, + "layer_3_v_norm": 0.5630154609680176, + "layer_3_cos_v_neg_g": 0.0035808924585580826, + "layer_4_v_norm": 0.5904932618141174, + "layer_4_cos_v_neg_g": 0.002715323818847537, + "layer_5_v_norm": 0.5964004993438721, + "layer_5_cos_v_neg_g": 0.003343005897477269, + "layer_6_v_norm": 0.6003943681716919, + "layer_6_cos_v_neg_g": 0.0041345045901834965, + "layer_7_v_norm": 0.5966812968254089, + "layer_7_cos_v_neg_g": 0.003894374007359147, + "layer_8_v_norm": 0.597746729850769, + "layer_8_cos_v_neg_g": 0.0037367611657828093, + "layer_9_v_norm": 0.5999451279640198, + "layer_9_cos_v_neg_g": 0.004244982730597258, + "layer_10_v_norm": 0.6034876108169556, + "layer_10_cos_v_neg_g": 0.004155955743044615, + "layer_11_v_norm": 0.6006850600242615, + "layer_11_cos_v_neg_g": 0.003808587323874235, + "layer_12_v_norm": 0.6023642420768738, + "layer_12_cos_v_neg_g": 0.0036841221153736115, + "layer_1_sharpness": 0.004012936260551214, + "layer_2_sharpness": 0.0004614588397089392, + "layer_3_sharpness": 0.0004337928257882595, + "layer_4_sharpness": 0.00022543803788721561, + "layer_5_sharpness": 0.0003220424405299127, + "layer_6_sharpness": 0.000538409803994, + "layer_7_sharpness": 0.0006147149251773953, + "layer_8_sharpness": 0.0003748905146494508, + "layer_9_sharpness": 0.0003106884832959622, + "layer_10_sharpness": 0.00020360026974231005, + "layer_11_sharpness": 0.00018585531506687403, + "layer_12_sharpness": 0.00030646490631625056 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..7b011569b8788cf39aa379e4073f18dd17b2e8da --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.432643175125122, + "total_l1_linf_norm": 20821.7265625, + "total_spectral_norm": 2.4326436519622803, + "layer_1_update_fnorm": 0.5668557286262512, + "layer_1_max_l1_linf_norm": 0.412129282951355, + "layer_1_max_spectral_norm": 0.012047907337546349, + "layer_2_update_fnorm": 0.5269820094108582, + "layer_2_max_l1_linf_norm": 0.46367815136909485, + "layer_2_max_spectral_norm": 0.012041110545396805, + "layer_3_update_fnorm": 0.5586768984794617, + "layer_3_max_l1_linf_norm": 0.4608505368232727, + "layer_3_max_spectral_norm": 0.01204754039645195, + "layer_4_update_fnorm": 0.5889266133308411, + "layer_4_max_l1_linf_norm": 0.42950159311294556, + "layer_4_max_spectral_norm": 0.01204957626760006, + "layer_5_update_fnorm": 0.5964255928993225, + "layer_5_max_l1_linf_norm": 0.4198034703731537, + "layer_5_max_spectral_norm": 0.012064354494214058, + "layer_6_update_fnorm": 0.5992355346679688, + "layer_6_max_l1_linf_norm": 0.4173552989959717, + "layer_6_max_spectral_norm": 0.012044847011566162, + "layer_7_update_fnorm": 0.5964376926422119, + "layer_7_max_l1_linf_norm": 0.410559743642807, + "layer_7_max_spectral_norm": 0.012045339681208134, + "layer_8_update_fnorm": 0.5945637822151184, + "layer_8_max_l1_linf_norm": 0.4093118906021118, + "layer_8_max_spectral_norm": 0.012045365758240223, + "layer_9_update_fnorm": 0.5972907543182373, + "layer_9_max_l1_linf_norm": 0.41002941131591797, + "layer_9_max_spectral_norm": 0.012045011855661869, + "layer_10_update_fnorm": 0.6022812724113464, + "layer_10_max_l1_linf_norm": 0.41533857583999634, + "layer_10_max_spectral_norm": 0.01204167865216732, + "layer_11_update_fnorm": 0.599761426448822, + "layer_11_max_l1_linf_norm": 0.43086370825767517, + "layer_11_max_spectral_norm": 0.012041528709232807, + "layer_12_update_fnorm": 0.6015480160713196, + "layer_12_max_l1_linf_norm": 0.4199390411376953, + "layer_12_max_spectral_norm": 0.012046492658555508, + "total_sharpness": 0.0021606215741485357, + "ip_v_neg_g": 0.003618203802034259, + "cos_v_neg_g": 0.0013909111730754375, + "v_norm": 2.432643175125122, + "g_norm": 1.0693385601043701, + "hv_norm": 0.3866870403289795, + "cos_v_hv": 0.013592441566288471, + "hg_norm": 18.915130615234375, + "cos_g_hg": 0.5943204164505005, + "v_parallel_norm": 0.0005579464486800134, + "v_perp_norm": 2.432642936706543, + "layer_1_v_norm": 0.5668557286262512, + "layer_1_cos_v_neg_g": 0.0020498433150351048, + "layer_2_v_norm": 0.5269820094108582, + "layer_2_cos_v_neg_g": -0.00019630586029961705, + "layer_3_v_norm": 0.5586768984794617, + "layer_3_cos_v_neg_g": 0.002571543911471963, + "layer_4_v_norm": 0.5889266133308411, + "layer_4_cos_v_neg_g": 0.0008317908504977822, + "layer_5_v_norm": 0.5964255928993225, + "layer_5_cos_v_neg_g": 0.0012415668461471796, + "layer_6_v_norm": 0.599235475063324, + "layer_6_cos_v_neg_g": 0.0021469604689627886, + "layer_7_v_norm": 0.5964376926422119, + "layer_7_cos_v_neg_g": 0.002418825402855873, + "layer_8_v_norm": 0.5945637822151184, + "layer_8_cos_v_neg_g": 0.003412810619920492, + "layer_9_v_norm": 0.5972907543182373, + "layer_9_cos_v_neg_g": 0.003572280751541257, + "layer_10_v_norm": 0.6022812724113464, + "layer_10_cos_v_neg_g": 0.0029324949719011784, + "layer_11_v_norm": 0.599761426448822, + "layer_11_cos_v_neg_g": 0.002313687466084957, + "layer_12_v_norm": 0.6015480160713196, + "layer_12_cos_v_neg_g": 0.0015426670433953404, + "layer_1_sharpness": 0.003157764906063676, + "layer_2_sharpness": 0.00038548067095689476, + "layer_3_sharpness": 0.0007326973136514425, + "layer_4_sharpness": 0.00025486332015134394, + "layer_5_sharpness": 0.00035970742464996874, + "layer_6_sharpness": 0.0006497753201983869, + "layer_7_sharpness": 0.0007924789097160101, + "layer_8_sharpness": 0.0005981667200103402, + "layer_9_sharpness": 0.0004658000252675265, + "layer_10_sharpness": 0.0002259477332700044, + "layer_11_sharpness": 0.00018134243146050721, + "layer_12_sharpness": 0.0002968655026052147 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..986d5ab670c61b8c59e69a067a134ae494554844 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.431442975997925, + "total_l1_linf_norm": 20816.412109375, + "total_spectral_norm": 2.431443214416504, + "layer_1_update_fnorm": 0.5702075958251953, + "layer_1_max_l1_linf_norm": 0.41462546586990356, + "layer_1_max_spectral_norm": 0.01205370295792818, + "layer_2_update_fnorm": 0.5245494842529297, + "layer_2_max_l1_linf_norm": 0.4789201617240906, + "layer_2_max_spectral_norm": 0.012038071639835835, + "layer_3_update_fnorm": 0.5561530590057373, + "layer_3_max_l1_linf_norm": 0.4510331153869629, + "layer_3_max_spectral_norm": 0.01204940676689148, + "layer_4_update_fnorm": 0.5892606973648071, + "layer_4_max_l1_linf_norm": 0.4405733048915863, + "layer_4_max_spectral_norm": 0.012050095945596695, + "layer_5_update_fnorm": 0.5954346060752869, + "layer_5_max_l1_linf_norm": 0.42952609062194824, + "layer_5_max_spectral_norm": 0.012064236216247082, + "layer_6_update_fnorm": 0.5975775718688965, + "layer_6_max_l1_linf_norm": 0.41548633575439453, + "layer_6_max_spectral_norm": 0.012043376453220844, + "layer_7_update_fnorm": 0.5961682796478271, + "layer_7_max_l1_linf_norm": 0.41188985109329224, + "layer_7_max_spectral_norm": 0.012045520357787609, + "layer_8_update_fnorm": 0.5957028865814209, + "layer_8_max_l1_linf_norm": 0.410251259803772, + "layer_8_max_spectral_norm": 0.012042408809065819, + "layer_9_update_fnorm": 0.5985430479049683, + "layer_9_max_l1_linf_norm": 0.40897613763809204, + "layer_9_max_spectral_norm": 0.012047501280903816, + "layer_10_update_fnorm": 0.6031030416488647, + "layer_10_max_l1_linf_norm": 0.4138426184654236, + "layer_10_max_spectral_norm": 0.012045704759657383, + "layer_11_update_fnorm": 0.5997878909111023, + "layer_11_max_l1_linf_norm": 0.4317488372325897, + "layer_11_max_spectral_norm": 0.012043720111250877, + "layer_12_update_fnorm": 0.6018219590187073, + "layer_12_max_l1_linf_norm": 0.4416551887989044, + "layer_12_max_spectral_norm": 0.012045011855661869, + "total_sharpness": 0.0018227628897875547, + "ip_v_neg_g": 0.003905420657247305, + "cos_v_neg_g": 0.0014016219647601247, + "v_norm": 2.431442975997925, + "g_norm": 1.1459689140319824, + "hv_norm": 0.37847259640693665, + "cos_v_hv": 0.011710078455507755, + "hg_norm": 33.53908920288086, + "cos_g_hg": 0.5341759920120239, + "v_parallel_norm": 0.0003121512709185481, + "v_perp_norm": 2.431442975997925, + "layer_1_v_norm": 0.5702075958251953, + "layer_1_cos_v_neg_g": 0.0015889588976278901, + "layer_2_v_norm": 0.5245494842529297, + "layer_2_cos_v_neg_g": 0.005231854971498251, + "layer_3_v_norm": 0.5561530590057373, + "layer_3_cos_v_neg_g": 0.002545655472204089, + "layer_4_v_norm": 0.5892606973648071, + "layer_4_cos_v_neg_g": 0.00040625513065606356, + "layer_5_v_norm": 0.5954346060752869, + "layer_5_cos_v_neg_g": 0.0014543675351887941, + "layer_6_v_norm": 0.5975776314735413, + "layer_6_cos_v_neg_g": 0.002015527570620179, + "layer_7_v_norm": 0.5961682796478271, + "layer_7_cos_v_neg_g": 0.0032859116327017546, + "layer_8_v_norm": 0.5957028865814209, + "layer_8_cos_v_neg_g": 0.003090973012149334, + "layer_9_v_norm": 0.5985430479049683, + "layer_9_cos_v_neg_g": 0.0024058392737060785, + "layer_10_v_norm": 0.6031030416488647, + "layer_10_cos_v_neg_g": 0.0015647405525669456, + "layer_11_v_norm": 0.5997878909111023, + "layer_11_cos_v_neg_g": 0.0019386576022952795, + "layer_12_v_norm": 0.6018219590187073, + "layer_12_cos_v_neg_g": 0.0029116119258105755, + "layer_1_sharpness": 0.0036683299113065004, + "layer_2_sharpness": 0.0008825107361190021, + "layer_3_sharpness": 0.0008564161835238338, + "layer_4_sharpness": 0.00023081531981006265, + "layer_5_sharpness": 0.0003200978389941156, + "layer_6_sharpness": 0.0005477592349052429, + "layer_7_sharpness": 0.0006200150237418711, + "layer_8_sharpness": 0.00039270665729418397, + "layer_9_sharpness": 0.0003144170332234353, + "layer_10_sharpness": 0.0001906718680402264, + "layer_11_sharpness": 0.00015524955233559012, + "layer_12_sharpness": 0.00021914366516284645 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..b6a94d874af69a7123a2c2a4d50b674ba8465ea5 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4310057163238525, + "total_l1_linf_norm": 20801.171875, + "total_spectral_norm": 2.4310059547424316, + "layer_1_update_fnorm": 0.5699465274810791, + "layer_1_max_l1_linf_norm": 0.41741305589675903, + "layer_1_max_spectral_norm": 0.012055699713528156, + "layer_2_update_fnorm": 0.5284233689308167, + "layer_2_max_l1_linf_norm": 0.46773970127105713, + "layer_2_max_spectral_norm": 0.012037858366966248, + "layer_3_update_fnorm": 0.5614024996757507, + "layer_3_max_l1_linf_norm": 0.44435179233551025, + "layer_3_max_spectral_norm": 0.01205156184732914, + "layer_4_update_fnorm": 0.5891119837760925, + "layer_4_max_l1_linf_norm": 0.42381417751312256, + "layer_4_max_spectral_norm": 0.012048705480992794, + "layer_5_update_fnorm": 0.5948766469955444, + "layer_5_max_l1_linf_norm": 0.41703706979751587, + "layer_5_max_spectral_norm": 0.012062412686645985, + "layer_6_update_fnorm": 0.5988775491714478, + "layer_6_max_l1_linf_norm": 0.415007621049881, + "layer_6_max_spectral_norm": 0.012042687274515629, + "layer_7_update_fnorm": 0.5959721207618713, + "layer_7_max_l1_linf_norm": 0.4095315933227539, + "layer_7_max_spectral_norm": 0.012044921517372131, + "layer_8_update_fnorm": 0.5941336154937744, + "layer_8_max_l1_linf_norm": 0.41218802332878113, + "layer_8_max_spectral_norm": 0.01204431802034378, + "layer_9_update_fnorm": 0.5973284840583801, + "layer_9_max_l1_linf_norm": 0.40739941596984863, + "layer_9_max_spectral_norm": 0.012044685892760754, + "layer_10_update_fnorm": 0.6008760929107666, + "layer_10_max_l1_linf_norm": 0.40966886281967163, + "layer_10_max_spectral_norm": 0.012044990435242653, + "layer_11_update_fnorm": 0.5982616543769836, + "layer_11_max_l1_linf_norm": 0.43154239654541016, + "layer_11_max_spectral_norm": 0.012047328986227512, + "layer_12_update_fnorm": 0.6016743779182434, + "layer_12_max_l1_linf_norm": 0.4246343672275543, + "layer_12_max_spectral_norm": 0.012046036310493946, + "total_sharpness": 0.0018468470079824328, + "ip_v_neg_g": 0.005885468330234289, + "cos_v_neg_g": 0.0020007621496915817, + "v_norm": 2.4310057163238525, + "g_norm": 1.2100396156311035, + "hv_norm": 0.39047348499298096, + "cos_v_hv": 0.01149808056652546, + "hg_norm": 30.18389892578125, + "cos_g_hg": 0.6217653155326843, + "v_parallel_norm": 0.0005931585910730064, + "v_perp_norm": 2.4310054779052734, + "layer_1_v_norm": 0.5699465274810791, + "layer_1_cos_v_neg_g": 0.002300592605024576, + "layer_2_v_norm": 0.5284233689308167, + "layer_2_cos_v_neg_g": 0.0033588246442377567, + "layer_3_v_norm": 0.561402440071106, + "layer_3_cos_v_neg_g": 0.004172138404101133, + "layer_4_v_norm": 0.5891119837760925, + "layer_4_cos_v_neg_g": 0.0030412240885198116, + "layer_5_v_norm": 0.5948766469955444, + "layer_5_cos_v_neg_g": 0.002723257290199399, + "layer_6_v_norm": 0.5988775491714478, + "layer_6_cos_v_neg_g": 0.003628188045695424, + "layer_7_v_norm": 0.5959721207618713, + "layer_7_cos_v_neg_g": 0.003019071649760008, + "layer_8_v_norm": 0.5941336154937744, + "layer_8_cos_v_neg_g": 0.00419241888448596, + "layer_9_v_norm": 0.5973284840583801, + "layer_9_cos_v_neg_g": 0.004650614224374294, + "layer_10_v_norm": 0.6008760929107666, + "layer_10_cos_v_neg_g": 0.003777134930714965, + "layer_11_v_norm": 0.5982616543769836, + "layer_11_cos_v_neg_g": 0.0028248168528079987, + "layer_12_v_norm": 0.6016743779182434, + "layer_12_cos_v_neg_g": 0.0024470379576087, + "layer_1_sharpness": 0.0036680372431874275, + "layer_2_sharpness": 0.0005913436179980636, + "layer_3_sharpness": 0.0006718465592712164, + "layer_4_sharpness": 0.0002502521965652704, + "layer_5_sharpness": 0.0003487605135887861, + "layer_6_sharpness": 0.00047379598254337907, + "layer_7_sharpness": 0.0006353160133585334, + "layer_8_sharpness": 0.0004976221243850887, + "layer_9_sharpness": 0.00037047284422442317, + "layer_10_sharpness": 0.00020152830984443426, + "layer_11_sharpness": 0.00014958891551941633, + "layer_12_sharpness": 0.0001427711104042828 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..38c44cecfe5f371a5451db4ec0e5bcd0c1088aba --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.428795576095581, + "total_l1_linf_norm": 20781.640625, + "total_spectral_norm": 2.428795099258423, + "layer_1_update_fnorm": 0.5703808665275574, + "layer_1_max_l1_linf_norm": 0.41538193821907043, + "layer_1_max_spectral_norm": 0.012051446363329887, + "layer_2_update_fnorm": 0.5229169726371765, + "layer_2_max_l1_linf_norm": 0.44553428888320923, + "layer_2_max_spectral_norm": 0.012039645574986935, + "layer_3_update_fnorm": 0.5617263913154602, + "layer_3_max_l1_linf_norm": 0.44740399718284607, + "layer_3_max_spectral_norm": 0.012048729695379734, + "layer_4_update_fnorm": 0.5899090766906738, + "layer_4_max_l1_linf_norm": 0.436093807220459, + "layer_4_max_spectral_norm": 0.012049383483827114, + "layer_5_update_fnorm": 0.597978413105011, + "layer_5_max_l1_linf_norm": 0.41895854473114014, + "layer_5_max_spectral_norm": 0.012057416141033173, + "layer_6_update_fnorm": 0.6003477573394775, + "layer_6_max_l1_linf_norm": 0.42004233598709106, + "layer_6_max_spectral_norm": 0.012045035138726234, + "layer_7_update_fnorm": 0.5979679226875305, + "layer_7_max_l1_linf_norm": 0.41806042194366455, + "layer_7_max_spectral_norm": 0.012043365277349949, + "layer_8_update_fnorm": 0.5962246060371399, + "layer_8_max_l1_linf_norm": 0.41386014223098755, + "layer_8_max_spectral_norm": 0.012042617425322533, + "layer_9_update_fnorm": 0.5987327694892883, + "layer_9_max_l1_linf_norm": 0.4104500114917755, + "layer_9_max_spectral_norm": 0.012043468654155731, + "layer_10_update_fnorm": 0.6029807925224304, + "layer_10_max_l1_linf_norm": 0.4343883991241455, + "layer_10_max_spectral_norm": 0.012044732458889484, + "layer_11_update_fnorm": 0.5996479392051697, + "layer_11_max_l1_linf_norm": 0.4278520941734314, + "layer_11_max_spectral_norm": 0.012044676579535007, + "layer_12_update_fnorm": 0.6026960611343384, + "layer_12_max_l1_linf_norm": 0.4364830553531647, + "layer_12_max_spectral_norm": 0.012045122683048248, + "total_sharpness": 0.0014702975749969482, + "ip_v_neg_g": 0.0023426173720508814, + "cos_v_neg_g": 0.0008500648546032608, + "v_norm": 2.428795576095581, + "g_norm": 1.1346406936645508, + "hv_norm": 0.42668893933296204, + "cos_v_hv": 0.00836921576410532, + "hg_norm": 27.332212448120117, + "cos_g_hg": 0.5323110818862915, + "v_parallel_norm": 0.00020630290964618325, + "v_perp_norm": 2.428795576095581, + "layer_1_v_norm": 0.5703808665275574, + "layer_1_cos_v_neg_g": 0.0003342190175317228, + "layer_2_v_norm": 0.5229169726371765, + "layer_2_cos_v_neg_g": 7.915732567198575e-05, + "layer_3_v_norm": 0.5617263913154602, + "layer_3_cos_v_neg_g": 0.0010659899562597275, + "layer_4_v_norm": 0.5899090766906738, + "layer_4_cos_v_neg_g": 0.0010174233466386795, + "layer_5_v_norm": 0.597978413105011, + "layer_5_cos_v_neg_g": 0.0013357455609366298, + "layer_6_v_norm": 0.6003478169441223, + "layer_6_cos_v_neg_g": 0.0014827665872871876, + "layer_7_v_norm": 0.5979679226875305, + "layer_7_cos_v_neg_g": 0.001985106850042939, + "layer_8_v_norm": 0.5962246060371399, + "layer_8_cos_v_neg_g": 0.0009686461417004466, + "layer_9_v_norm": 0.5987327694892883, + "layer_9_cos_v_neg_g": 0.0011521615087985992, + "layer_10_v_norm": 0.6029807925224304, + "layer_10_cos_v_neg_g": 0.0020442886743694544, + "layer_11_v_norm": 0.5996479392051697, + "layer_11_cos_v_neg_g": 0.002901452826336026, + "layer_12_v_norm": 0.6026960611343384, + "layer_12_cos_v_neg_g": 0.0020631076768040657, + "layer_1_sharpness": 0.0036044809967279434, + "layer_2_sharpness": 0.00038622322608716786, + "layer_3_sharpness": 0.0005470456671901047, + "layer_4_sharpness": 0.0002655873540788889, + "layer_5_sharpness": 0.00030391878681257367, + "layer_6_sharpness": 0.0004148903535678983, + "layer_7_sharpness": 0.0005535511882044375, + "layer_8_sharpness": 0.0003634760796558112, + "layer_9_sharpness": 0.0002918275131378323, + "layer_10_sharpness": 0.0001774468255462125, + "layer_11_sharpness": 0.00015432504005730152, + "layer_12_sharpness": 0.0001641820272197947 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..b84e4bc33f7286e311ed8c48f41554dcfd430303 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4032206535339355, + "total_l1_linf_norm": 20553.001953125, + "total_spectral_norm": 2.4032208919525146, + "layer_1_update_fnorm": 0.5517145395278931, + "layer_1_max_l1_linf_norm": 0.4240044057369232, + "layer_1_max_spectral_norm": 0.012064870446920395, + "layer_2_update_fnorm": 0.5149884819984436, + "layer_2_max_l1_linf_norm": 0.42518556118011475, + "layer_2_max_spectral_norm": 0.01203796174377203, + "layer_3_update_fnorm": 0.5502603650093079, + "layer_3_max_l1_linf_norm": 0.4238394498825073, + "layer_3_max_spectral_norm": 0.012046613730490208, + "layer_4_update_fnorm": 0.5834248065948486, + "layer_4_max_l1_linf_norm": 0.42025864124298096, + "layer_4_max_spectral_norm": 0.012045836076140404, + "layer_5_update_fnorm": 0.5905757546424866, + "layer_5_max_l1_linf_norm": 0.4014524519443512, + "layer_5_max_spectral_norm": 0.012051212601363659, + "layer_6_update_fnorm": 0.5975830554962158, + "layer_6_max_l1_linf_norm": 0.40979135036468506, + "layer_6_max_spectral_norm": 0.012053766287863255, + "layer_7_update_fnorm": 0.5937884449958801, + "layer_7_max_l1_linf_norm": 0.4021172523498535, + "layer_7_max_spectral_norm": 0.012045467272400856, + "layer_8_update_fnorm": 0.5901637077331543, + "layer_8_max_l1_linf_norm": 0.4065481424331665, + "layer_8_max_spectral_norm": 0.012048405595123768, + "layer_9_update_fnorm": 0.5924262404441833, + "layer_9_max_l1_linf_norm": 0.4012448787689209, + "layer_9_max_spectral_norm": 0.012049604207277298, + "layer_10_update_fnorm": 0.5929188132286072, + "layer_10_max_l1_linf_norm": 0.40085500478744507, + "layer_10_max_spectral_norm": 0.01204752828925848, + "layer_11_update_fnorm": 0.5861798524856567, + "layer_11_max_l1_linf_norm": 0.4163603186607361, + "layer_11_max_spectral_norm": 0.012043604627251625, + "layer_12_update_fnorm": 0.5971269011497498, + "layer_12_max_l1_linf_norm": 0.44596460461616516, + "layer_12_max_spectral_norm": 0.012047083117067814, + "total_sharpness": 0.0015726719284430146, + "ip_v_neg_g": 0.003898865543305874, + "cos_v_neg_g": 0.0014923971612006426, + "v_norm": 2.4032206535339355, + "g_norm": 1.0870766639709473, + "hv_norm": 0.3298543095588684, + "cos_v_hv": 0.011458021588623524, + "hg_norm": 23.130992889404297, + "cos_g_hg": 0.4416569471359253, + "v_parallel_norm": 0.00041232386138290167, + "v_perp_norm": 2.4032206535339355, + "layer_1_v_norm": 0.5517145395278931, + "layer_1_cos_v_neg_g": 0.0019792160019278526, + "layer_2_v_norm": 0.5149884819984436, + "layer_2_cos_v_neg_g": 0.0012445765314623713, + "layer_3_v_norm": 0.5502603650093079, + "layer_3_cos_v_neg_g": 0.0021635862067341805, + "layer_4_v_norm": 0.5834248065948486, + "layer_4_cos_v_neg_g": 0.0021319356746971607, + "layer_5_v_norm": 0.5905757546424866, + "layer_5_cos_v_neg_g": 0.002286130329594016, + "layer_6_v_norm": 0.5975830554962158, + "layer_6_cos_v_neg_g": 0.0026613115333020687, + "layer_7_v_norm": 0.5937884449958801, + "layer_7_cos_v_neg_g": 0.003489159280434251, + "layer_8_v_norm": 0.5901637077331543, + "layer_8_cos_v_neg_g": 0.0031650026794523, + "layer_9_v_norm": 0.5924262404441833, + "layer_9_cos_v_neg_g": 0.0033371541649103165, + "layer_10_v_norm": 0.5929188132286072, + "layer_10_cos_v_neg_g": 0.002571498043835163, + "layer_11_v_norm": 0.586179792881012, + "layer_11_cos_v_neg_g": 0.0018833400681614876, + "layer_12_v_norm": 0.5971269011497498, + "layer_12_cos_v_neg_g": 0.0015785170253366232, + "layer_1_sharpness": 0.0033458829857409, + "layer_2_sharpness": 0.00052532838890329, + "layer_3_sharpness": 0.0005926228477619588, + "layer_4_sharpness": 0.00021130860841367394, + "layer_5_sharpness": 0.00030073971720412374, + "layer_6_sharpness": 0.00038439113995991647, + "layer_7_sharpness": 0.000532117672264576, + "layer_8_sharpness": 0.0004058400518260896, + "layer_9_sharpness": 0.0003618142509367317, + "layer_10_sharpness": 0.0002081815036945045, + "layer_11_sharpness": 0.00016590637096669525, + "layer_12_sharpness": 0.0002917600504588336 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..be6f6a6d0fc7ed21a7425e4598125f1206a0d7e5 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.427493095397949, + "total_l1_linf_norm": 20772.33203125, + "total_spectral_norm": 2.427493095397949, + "layer_1_update_fnorm": 0.5710433721542358, + "layer_1_max_l1_linf_norm": 0.42010658979415894, + "layer_1_max_spectral_norm": 0.01205573882907629, + "layer_2_update_fnorm": 0.5262197852134705, + "layer_2_max_l1_linf_norm": 0.4601765275001526, + "layer_2_max_spectral_norm": 0.0120387589558959, + "layer_3_update_fnorm": 0.5608795285224915, + "layer_3_max_l1_linf_norm": 0.4540257453918457, + "layer_3_max_spectral_norm": 0.012049003504216671, + "layer_4_update_fnorm": 0.5865930318832397, + "layer_4_max_l1_linf_norm": 0.4245821237564087, + "layer_4_max_spectral_norm": 0.012051264755427837, + "layer_5_update_fnorm": 0.5947005152702332, + "layer_5_max_l1_linf_norm": 0.4144882559776306, + "layer_5_max_spectral_norm": 0.012053994461894035, + "layer_6_update_fnorm": 0.5983116626739502, + "layer_6_max_l1_linf_norm": 0.4155172109603882, + "layer_6_max_spectral_norm": 0.012046068906784058, + "layer_7_update_fnorm": 0.5966641306877136, + "layer_7_max_l1_linf_norm": 0.4151296019554138, + "layer_7_max_spectral_norm": 0.012044030241668224, + "layer_8_update_fnorm": 0.5947635173797607, + "layer_8_max_l1_linf_norm": 0.41014623641967773, + "layer_8_max_spectral_norm": 0.012046797201037407, + "layer_9_update_fnorm": 0.5971493721008301, + "layer_9_max_l1_linf_norm": 0.4042530059814453, + "layer_9_max_spectral_norm": 0.012043672613799572, + "layer_10_update_fnorm": 0.6015668511390686, + "layer_10_max_l1_linf_norm": 0.41550564765930176, + "layer_10_max_spectral_norm": 0.012045561335980892, + "layer_11_update_fnorm": 0.5985661745071411, + "layer_11_max_l1_linf_norm": 0.41998594999313354, + "layer_11_max_spectral_norm": 0.012044458650052547, + "layer_12_update_fnorm": 0.6018497943878174, + "layer_12_max_l1_linf_norm": 0.43176940083503723, + "layer_12_max_spectral_norm": 0.012043633498251438, + "total_sharpness": 0.0015939592849463224, + "ip_v_neg_g": 0.0048612793907523155, + "cos_v_neg_g": 0.001698776031844318, + "v_norm": 2.427493095397949, + "g_norm": 1.1788443326950073, + "hv_norm": 0.36848390102386475, + "cos_v_hv": 0.01050066202878952, + "hg_norm": 25.688447952270508, + "cos_g_hg": 0.5711735486984253, + "v_parallel_norm": 0.0008020731620490551, + "v_perp_norm": 2.42749285697937, + "layer_1_v_norm": 0.5710433721542358, + "layer_1_cos_v_neg_g": 0.0024365619756281376, + "layer_2_v_norm": 0.5262197852134705, + "layer_2_cos_v_neg_g": 0.003641055431216955, + "layer_3_v_norm": 0.5608795285224915, + "layer_3_cos_v_neg_g": 0.003667427459731698, + "layer_4_v_norm": 0.5865930318832397, + "layer_4_cos_v_neg_g": 0.00046288545127026737, + "layer_5_v_norm": 0.5947005152702332, + "layer_5_cos_v_neg_g": 0.0017224422190338373, + "layer_6_v_norm": 0.5983116626739502, + "layer_6_cos_v_neg_g": 0.0016811754321679473, + "layer_7_v_norm": 0.5966641306877136, + "layer_7_cos_v_neg_g": 0.001576945185661316, + "layer_8_v_norm": 0.594763457775116, + "layer_8_cos_v_neg_g": 0.001606059493497014, + "layer_9_v_norm": 0.5971493721008301, + "layer_9_cos_v_neg_g": 0.003048309590667486, + "layer_10_v_norm": 0.6015668511390686, + "layer_10_cos_v_neg_g": 0.0030180809553712606, + "layer_11_v_norm": 0.5985661745071411, + "layer_11_cos_v_neg_g": 0.0029094647616147995, + "layer_12_v_norm": 0.6018497943878174, + "layer_12_cos_v_neg_g": 0.004357109311968088, + "layer_1_sharpness": 0.003043179865926504, + "layer_2_sharpness": 0.00035747705260291696, + "layer_3_sharpness": 0.0005338899791240692, + "layer_4_sharpness": 0.0003264735278207809, + "layer_5_sharpness": 0.00028101468342356384, + "layer_6_sharpness": 0.00043020257726311684, + "layer_7_sharpness": 0.0005163365276530385, + "layer_8_sharpness": 0.0003911748353857547, + "layer_9_sharpness": 0.0002835538180079311, + "layer_10_sharpness": 0.00016195111675187945, + "layer_11_sharpness": 0.00015948258806020021, + "layer_12_sharpness": 0.0002728686959017068 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..7f9dca2ae9e6fa64568d66c282aa1db90dfd605c --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 2.4234955310821533, + "total_l1_linf_norm": 20736.1328125, + "total_spectral_norm": 2.4234955310821533, + "layer_1_update_fnorm": 0.5715901255607605, + "layer_1_max_l1_linf_norm": 0.4140845835208893, + "layer_1_max_spectral_norm": 0.012055061757564545, + "layer_2_update_fnorm": 0.5289150476455688, + "layer_2_max_l1_linf_norm": 0.45234090089797974, + "layer_2_max_spectral_norm": 0.012037928216159344, + "layer_3_update_fnorm": 0.5568296313285828, + "layer_3_max_l1_linf_norm": 0.4237585663795471, + "layer_3_max_spectral_norm": 0.012048564851284027, + "layer_4_update_fnorm": 0.5894071459770203, + "layer_4_max_l1_linf_norm": 0.4276488423347473, + "layer_4_max_spectral_norm": 0.01204654946923256, + "layer_5_update_fnorm": 0.5953892469406128, + "layer_5_max_l1_linf_norm": 0.4123898148536682, + "layer_5_max_spectral_norm": 0.012048834003508091, + "layer_6_update_fnorm": 0.5992728471755981, + "layer_6_max_l1_linf_norm": 0.4144565463066101, + "layer_6_max_spectral_norm": 0.01206332165747881, + "layer_7_update_fnorm": 0.5943577289581299, + "layer_7_max_l1_linf_norm": 0.4098561108112335, + "layer_7_max_spectral_norm": 0.012045676819980145, + "layer_8_update_fnorm": 0.5930943489074707, + "layer_8_max_l1_linf_norm": 0.40870773792266846, + "layer_8_max_spectral_norm": 0.012040777131915092, + "layer_9_update_fnorm": 0.5948628783226013, + "layer_9_max_l1_linf_norm": 0.40820741653442383, + "layer_9_max_spectral_norm": 0.012047545053064823, + "layer_10_update_fnorm": 0.600639283657074, + "layer_10_max_l1_linf_norm": 0.40886616706848145, + "layer_10_max_spectral_norm": 0.012045036070048809, + "layer_11_update_fnorm": 0.597356379032135, + "layer_11_max_l1_linf_norm": 0.4166156053543091, + "layer_11_max_spectral_norm": 0.012043007649481297, + "layer_12_update_fnorm": 0.6023346781730652, + "layer_12_max_l1_linf_norm": 0.44075843691825867, + "layer_12_max_spectral_norm": 0.012042745016515255, + "total_sharpness": 0.0017728224629536271, + "ip_v_neg_g": 0.006636998616158962, + "cos_v_neg_g": 0.0024689638521522284, + "v_norm": 2.4234955310821533, + "g_norm": 1.1092125177383423, + "hv_norm": 0.4135291278362274, + "cos_v_hv": 0.0103896614164114, + "hg_norm": 17.882139205932617, + "cos_g_hg": 0.5788334012031555, + "v_parallel_norm": 0.0006044938345439732, + "v_perp_norm": 2.4234955310821533, + "layer_1_v_norm": 0.5715901255607605, + "layer_1_cos_v_neg_g": 0.0031497604213654995, + "layer_2_v_norm": 0.5289150476455688, + "layer_2_cos_v_neg_g": 0.0034619364887475967, + "layer_3_v_norm": 0.5568296313285828, + "layer_3_cos_v_neg_g": 0.004606952890753746, + "layer_4_v_norm": 0.5894071459770203, + "layer_4_cos_v_neg_g": 0.0029036570340394974, + "layer_5_v_norm": 0.5953892469406128, + "layer_5_cos_v_neg_g": 0.0037960419431328773, + "layer_6_v_norm": 0.5992728471755981, + "layer_6_cos_v_neg_g": 0.004536700434982777, + "layer_7_v_norm": 0.5943577289581299, + "layer_7_cos_v_neg_g": 0.005823183339089155, + "layer_8_v_norm": 0.5930943489074707, + "layer_8_cos_v_neg_g": 0.004067583940923214, + "layer_9_v_norm": 0.5948628783226013, + "layer_9_cos_v_neg_g": 0.0038832551799714565, + "layer_10_v_norm": 0.600639283657074, + "layer_10_cos_v_neg_g": 0.0024720586370676756, + "layer_11_v_norm": 0.597356379032135, + "layer_11_cos_v_neg_g": 0.002299686660990119, + "layer_12_v_norm": 0.6023346781730652, + "layer_12_cos_v_neg_g": 0.0018405659357085824, + "layer_1_sharpness": 0.002913978649303317, + "layer_2_sharpness": 0.00041703259921632707, + "layer_3_sharpness": 0.001174330827780068, + "layer_4_sharpness": 0.0002398189390078187, + "layer_5_sharpness": 0.00035384370130486786, + "layer_6_sharpness": 0.0005108721670694649, + "layer_7_sharpness": 0.0007122704992070794, + "layer_8_sharpness": 0.0004465309320949018, + "layer_9_sharpness": 0.0003611411666497588, + "layer_10_sharpness": 0.000154650624608621, + "layer_11_sharpness": 0.000134824585984461, + "layer_12_sharpness": 0.00016525352839380503 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b57c031dbcdf33cd5db567479137c3441db0d57 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.01_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019229 +step:1 train loss:11.023246 +step:2 train loss:11.013976 +step:3 train loss:11.007523 +step:4 train loss:10.996517 +step:5 train loss:10.982954 +step:6 train loss:10.964798 +step:7 train loss:10.945922 +step:8 train loss:10.925972 +step:9 train loss:10.899259 +step:10 train loss:10.871614 +step:11 train loss:10.846461 +step:12 train loss:10.807505 +step:13 train loss:10.773035 +step:14 train loss:10.734206 +step:15 train loss:10.694715 +step:16 train loss:10.653508 +step:17 train loss:10.611848 +step:18 train loss:10.568583 +step:19 train loss:10.519562 +step:20 train loss:10.468546 +step:21 train loss:10.423265 +step:22 train loss:10.353336 +step:23 train loss:10.313039 +step:24 train loss:10.244669 +step:25 train loss:10.203314 +step:26 train loss:10.138659 +step:27 train loss:10.074628 +step:28 train loss:10.030472 +step:29 train loss:9.970016 +step:30 train loss:9.913362 +step:31 train loss:9.837315 +step:32 train loss:9.768187 +step:33 train loss:9.711132 +step:34 train loss:9.666184 +step:35 train loss:9.579037 +step:36 train loss:9.519515 +step:37 train loss:9.437042 +step:38 train loss:9.398843 +step:39 train loss:9.317867 +step:40 train loss:9.259603 +step:41 train loss:9.172542 +step:42 train loss:9.134124 +step:43 train loss:9.025968 +step:44 train loss:8.967707 +step:45 train loss:8.908033 +step:46 train loss:8.855562 +step:47 train loss:8.794575 +step:48 train loss:8.710090 +step:49 train loss:8.635927 +step:50 train loss:8.550762 +step:51 train loss:8.496268 +step:52 train loss:8.447741 +step:53 train loss:8.389828 +step:54 train loss:8.320557 +step:55 train loss:8.251613 +step:56 train loss:8.172322 +step:57 train loss:8.140854 +step:58 train loss:8.045193 +step:59 train loss:8.015133 +step:60 train loss:7.942864 +step:61 train loss:7.891543 +step:62 train loss:7.830685 +step:63 train loss:7.826133 +step:64 train loss:7.704112 +step:65 train loss:7.691876 +step:66 train loss:7.642085 +step:67 train loss:7.624959 +step:68 train loss:7.550340 +step:69 train loss:7.504362 +step:70 train loss:7.439540 +step:71 train loss:7.406498 +step:72 train loss:7.400888 +step:73 train loss:7.337524 +step:74 train loss:7.331820 +step:75 train loss:7.258519 +step:76 train loss:7.326904 +step:77 train loss:7.242216 +step:78 train loss:7.015493 +step:79 train loss:7.152062 +step:80 train loss:7.110520 +step:81 train loss:7.171271 +step:82 train loss:7.127709 +step:83 train loss:7.085556 +step:84 train loss:7.037077 +step:85 train loss:7.009846 +step:86 train loss:6.990093 +step:87 train loss:6.951274 +step:88 train loss:6.949846 +step:89 train loss:6.899515 +step:90 train loss:6.941468 +step:91 train loss:6.938320 +step:92 train loss:6.932163 +step:93 train loss:6.869894 +step:94 train loss:6.831177 +step:95 train loss:6.769940 +step:96 train loss:6.867341 +step:97 train loss:6.803012 +step:98 train loss:6.783773 +step:99 train loss:6.752156 +step:100 train loss:6.772019 +step:101 train loss:6.679177 +step:102 train loss:6.684422 +step:103 train loss:6.668892 +step:104 train loss:6.695322 +step:105 train loss:6.750721 +step:106 train loss:6.689641 +step:107 train loss:6.640507 +step:108 train loss:6.658325 +step:109 train loss:6.691808 +step:110 train loss:6.614050 +step:111 train loss:6.624949 +step:112 train loss:6.615903 +step:113 train loss:6.569520 +step:114 train loss:6.633804 +step:115 train loss:6.576949 +step:116 train loss:6.559509 +step:117 train loss:6.496325 +step:118 train loss:6.550396 +step:119 train loss:6.501316 +step:120 train loss:6.514649 +step:121 train loss:6.431653 +step:122 train loss:6.532758 +step:123 train loss:6.449002 +step:124 train loss:6.433882 +step:125 train loss:6.409997 +step:126 train loss:6.505264 +step:127 train loss:6.423430 +step:128 train loss:6.470271 +step:129 train loss:6.446715 +step:130 train loss:6.471634 +step:131 train loss:6.418997 +step:132 train loss:6.344923 +step:133 train loss:6.399744 +step:134 train loss:6.383831 +step:135 train loss:6.288797 +step:136 train loss:6.334612 +step:137 train loss:6.339234 +step:138 train loss:6.281179 +step:139 train loss:6.356125 +step:140 train loss:6.270537 +step:141 train loss:6.366986 +step:142 train loss:6.313366 +step:143 train loss:6.325694 +step:144 train loss:6.300095 +step:145 train loss:6.233513 +step:146 train loss:6.246185 +step:147 train loss:6.297647 +step:148 train loss:6.310094 +step:149 train loss:6.261733 +step:150 train loss:6.264907 +step:151 train loss:6.177632 +step:152 train loss:6.216796 +step:153 train loss:6.197422 +step:154 train loss:6.271405 +step:155 train loss:6.258977 +step:156 train loss:6.276359 +step:157 train loss:6.189855 +step:158 train loss:6.177125 +step:159 train loss:6.203653 +step:160 train loss:6.189944 +step:161 train loss:6.182697 +step:162 train loss:6.150129 +step:163 train loss:6.165080 +step:164 train loss:6.174673 +step:165 train loss:6.185133 +step:166 train loss:6.135384 +step:167 train loss:6.138091 +step:168 train loss:6.116367 +step:169 train loss:6.065403 +step:170 train loss:6.025559 +step:171 train loss:6.147853 +step:172 train loss:6.079422 +step:173 train loss:6.132003 +step:174 train loss:6.126131 +step:175 train loss:6.088630 +step:176 train loss:6.044816 +step:177 train loss:6.086645 +step:178 train loss:6.086368 +step:179 train loss:6.044495 +step:180 train loss:6.025206 +step:181 train loss:6.066195 +step:182 train loss:5.997334 +step:183 train loss:6.085493 +step:184 train loss:6.050149 +step:185 train loss:5.976524 +step:186 train loss:6.112009 +step:187 train loss:6.050535 +step:188 train loss:5.885036 +step:189 train loss:6.028591 +step:190 train loss:6.024618 +step:191 train loss:5.948327 +step:192 train loss:5.869309 +step:193 train loss:6.019401 +step:194 train loss:6.028189 +step:195 train loss:6.018365 +step:196 train loss:5.993128 +step:197 train loss:5.980657 +step:198 train loss:5.929914 +step:199 train loss:6.001265 +step:200 train loss:6.046730 +step:201 train loss:5.976793 +step:202 train loss:5.977530 +step:203 train loss:5.935971 +step:204 train loss:5.961458 +step:205 train loss:5.812288 +step:206 train loss:5.946193 +step:207 train loss:5.928328 +step:208 train loss:5.869894 +step:209 train loss:5.861178 +step:210 train loss:5.869923 +step:211 train loss:5.932390 +step:212 train loss:5.890006 +step:213 train loss:5.900966 +step:214 train loss:5.879606 +step:215 train loss:5.907051 +step:216 train loss:5.847891 +step:217 train loss:5.861577 +step:218 train loss:5.838611 +step:219 train loss:5.805411 +step:220 train loss:5.847487 +step:221 train loss:5.804999 +step:222 train loss:5.840322 +step:223 train loss:5.859401 +step:224 train loss:5.846596 +step:225 train loss:5.778966 +step:226 train loss:5.784008 +step:227 train loss:5.840920 +step:228 train loss:5.807262 +step:229 train loss:5.870996 +step:230 train loss:5.742246 +step:231 train loss:5.800277 +step:232 train loss:5.786249 +step:233 train loss:5.762417 +step:234 train loss:5.752871 +step:235 train loss:5.833981 +step:236 train loss:5.778479 +step:237 train loss:5.819302 +step:238 train loss:5.809218 +step:239 train loss:5.721063 +step:240 train loss:5.796069 +step:241 train loss:5.828054 +step:242 train loss:5.800133 +step:243 train loss:5.717589 +step:244 train loss:5.741918 +step:245 train loss:5.716806 +step:246 train loss:5.721690 +step:247 train loss:5.711172 +step:248 train loss:5.665224 +step:249 train loss:5.721617 +step:250 validation loss:5.705631 +step:250 train loss:5.685350 +step:251 train loss:5.727264 +step:252 train loss:5.677325 +step:253 train loss:5.682323 +step:254 train loss:5.648971 +step:255 train loss:5.687772 +step:256 train loss:5.671203 +step:257 train loss:5.730277 +step:258 train loss:5.636398 +step:259 train loss:5.655824 +step:260 train loss:5.619520 +step:261 train loss:5.621047 +step:262 train loss:5.675804 +step:263 train loss:5.646500 +step:264 train loss:5.613153 +step:265 train loss:5.618343 +step:266 train loss:5.602737 +step:267 train loss:5.635744 +step:268 train loss:5.575254 +step:269 train loss:5.600738 +step:270 train loss:5.624706 +step:271 train loss:5.621744 +step:272 train loss:5.559898 +step:273 train loss:5.637947 +step:274 train loss:5.551826 +step:275 train loss:5.586391 +step:276 train loss:5.556001 +step:277 train loss:5.550734 +step:278 train loss:5.528674 +step:279 train loss:5.510240 +step:280 train loss:5.576169 +step:281 train loss:5.643780 +step:282 train loss:5.520745 +step:283 train loss:5.542022 +step:284 train loss:5.508890 +step:285 train loss:5.574836 +step:286 train loss:5.529024 +step:287 train loss:5.503061 +step:288 train loss:5.484123 +step:289 train loss:5.506017 +step:290 train loss:5.558299 +step:291 train loss:5.485160 +step:292 train loss:5.539423 +step:293 train loss:5.468658 +step:294 train loss:5.587602 +step:295 train loss:5.484875 +step:296 train loss:5.532090 +step:297 train loss:5.557048 +step:298 train loss:5.447012 +step:299 train loss:5.522452 +step:300 train loss:5.446280 +step:301 train loss:5.461930 +step:302 train loss:5.441823 +step:303 train loss:5.447550 +step:304 train loss:5.485247 +step:305 train loss:5.410774 +step:306 train loss:5.430436 +step:307 train loss:5.438476 +step:308 train loss:5.345307 +step:309 train loss:5.482869 +step:310 train loss:5.450577 +step:311 train loss:5.430901 +step:312 train loss:5.408209 +step:313 train loss:5.432114 +step:314 train loss:5.409325 +step:315 train loss:5.365929 +step:316 train loss:5.355297 +step:317 train loss:5.329844 +step:318 train loss:5.325911 +step:319 train loss:5.413511 +step:320 train loss:5.320755 +step:321 train loss:5.374280 +step:322 train loss:5.365150 +step:323 train loss:5.422722 +step:324 train loss:5.367159 +step:325 train loss:5.381617 +step:326 train loss:5.381762 +step:327 train loss:5.373551 +step:328 train loss:5.340555 +step:329 train loss:5.361913 +step:330 train loss:5.280067 +step:331 train loss:5.306563 +step:332 train loss:5.284772 +step:333 train loss:5.211365 +step:334 train loss:5.309904 +step:335 train loss:5.364870 +step:336 train loss:5.507230 +step:337 train loss:5.368031 +step:338 train loss:5.279204 +step:339 train loss:5.237510 +step:340 train loss:5.240592 +step:341 train loss:5.228092 +step:342 train loss:5.285880 +step:343 train loss:5.279265 +step:344 train loss:5.225988 +step:345 train loss:5.192920 +step:346 train loss:5.254157 +step:347 train loss:5.182014 +step:348 train loss:5.192056 +step:349 train loss:5.128885 +step:350 train loss:5.156950 +step:351 train loss:5.214146 +step:352 train loss:5.177619 +step:353 train loss:5.201313 +step:354 train loss:5.143856 +step:355 train loss:5.196185 +step:356 train loss:5.140555 +step:357 train loss:5.223453 +step:358 train loss:5.251863 +step:359 train loss:5.083838 +step:360 train loss:5.197565 +step:361 train loss:5.202087 +step:362 train loss:5.163384 +step:363 train loss:5.106997 +step:364 train loss:5.249968 +step:365 train loss:5.163653 +step:366 train loss:5.137449 +step:367 train loss:5.155416 +step:368 train loss:5.113268 +step:369 train loss:5.106080 +step:370 train loss:5.155302 +step:371 train loss:5.090750 +step:372 train loss:5.155787 +step:373 train loss:5.094591 +step:374 train loss:5.095722 +step:375 train loss:5.126985 +step:376 train loss:5.116822 +step:377 train loss:4.998301 +step:378 train loss:5.066299 +step:379 train loss:5.110099 +step:380 train loss:5.041321 +step:381 train loss:5.106046 +step:382 train loss:5.105078 +step:383 train loss:5.062185 +step:384 train loss:5.048885 +step:385 train loss:5.034039 +step:386 train loss:5.063898 +step:387 train loss:5.065507 +step:388 train loss:5.025043 +step:389 train loss:5.039000 +step:390 train loss:5.016458 +step:391 train loss:5.038577 +step:392 train loss:5.007616 +step:393 train loss:5.002634 +step:394 train loss:5.056135 +step:395 train loss:4.968003 +step:396 train loss:4.939794 +step:397 train loss:4.992949 +step:398 train loss:5.004671 +step:399 train loss:4.998287 +step:400 train loss:4.946284 +step:401 train loss:5.024747 +step:402 train loss:4.975865 +step:403 train loss:4.973597 +step:404 train loss:4.949785 +step:405 train loss:4.941242 +step:406 train loss:4.998694 +step:407 train loss:4.954595 +step:408 train loss:5.043372 +step:409 train loss:4.945483 +step:410 train loss:4.927773 +step:411 train loss:4.902666 +step:412 train loss:4.997159 +step:413 train loss:4.872772 +step:414 train loss:4.962679 +step:415 train loss:4.920531 +step:416 train loss:4.926502 +step:417 train loss:4.949012 +step:418 train loss:4.898771 +step:419 train loss:4.886902 +step:420 train loss:4.849479 +step:421 train loss:4.855528 +step:422 train loss:4.840669 +step:423 train loss:4.867751 +step:424 train loss:4.840325 +step:425 train loss:4.920842 +step:426 train loss:4.883013 +step:427 train loss:4.817660 +step:428 train loss:4.881880 +step:429 train loss:4.776133 +step:430 train loss:4.825005 +step:431 train loss:4.852685 +step:432 train loss:4.861917 +step:433 train loss:4.839549 +step:434 train loss:4.799856 +step:435 train loss:4.862437 +step:436 train loss:4.884470 +step:437 train loss:4.832180 +step:438 train loss:4.795071 +step:439 train loss:4.772840 +step:440 train loss:4.823282 +step:441 train loss:4.750484 +step:442 train loss:4.756699 +step:443 train loss:4.768260 +step:444 train loss:4.823656 +step:445 train loss:4.813422 +step:446 train loss:4.753643 +step:447 train loss:4.751773 +step:448 train loss:4.823531 +step:449 train loss:4.776398 +step:450 train loss:4.754754 +step:451 train loss:4.742436 +step:452 train loss:4.845181 +step:453 train loss:4.756678 +step:454 train loss:4.696789 +step:455 train loss:4.764713 +step:456 train loss:4.715150 +step:457 train loss:4.705475 +step:458 train loss:4.736870 +step:459 train loss:4.691133 +step:460 train loss:4.791309 +step:461 train loss:4.726173 +step:462 train loss:4.619133 +step:463 train loss:4.669542 +step:464 train loss:4.735946 +step:465 train loss:4.700665 +step:466 train loss:4.717409 +step:467 train loss:4.655766 +step:468 train loss:4.719848 +step:469 train loss:4.685119 +step:470 train loss:4.642733 +step:471 train loss:4.726292 +step:472 train loss:4.633183 +step:473 train loss:4.702936 +step:474 train loss:4.656411 +step:475 train loss:4.716108 +step:476 train loss:4.671982 +step:477 train loss:4.586359 +step:478 train loss:4.648400 +step:479 train loss:4.610679 +step:480 train loss:4.639505 +step:481 train loss:4.679858 +step:482 train loss:4.569738 +step:483 train loss:4.661646 +step:484 train loss:4.615051 +step:485 train loss:4.563360 +step:486 train loss:4.617974 +step:487 train loss:4.606719 +step:488 train loss:4.613152 +step:489 train loss:4.600410 +step:490 train loss:4.553434 +step:491 train loss:4.596862 +step:492 train loss:4.586006 +step:493 train loss:4.574013 +step:494 train loss:4.596169 +step:495 train loss:4.515651 +step:496 train loss:4.609721 +step:497 train loss:4.512631 +step:498 train loss:4.618112 +step:499 train loss:4.596008 +step:500 validation loss:4.529927 total_sharp:2.1252e-02 L1_sharp:1.2664e-02 L2_sharp:6.1356e-03 L3_sharp:6.5572e-03 L4_sharp:4.2533e-03 L5_sharp:4.3364e-03 L6_sharp:3.8712e-03 L7_sharp:3.5569e-03 L8_sharp:3.0696e-03 L9_sharp:2.8280e-03 L10_sharp:2.0117e-03 L11_sharp:2.0443e-03 L12_sharp:1.4785e-03 total_fnorm:1.7678e+00 total_l1_linf:1.5245e+04 total_spectral:1.7678e+00 L1_fnorm:4.3569e-01 L2_fnorm:4.1970e-01 L3_fnorm:3.9994e-01 L4_fnorm:4.0399e-01 L5_fnorm:4.1732e-01 L6_fnorm:4.2715e-01 L7_fnorm:4.2687e-01 L8_fnorm:4.2807e-01 L9_fnorm:4.2773e-01 L10_fnorm:4.2923e-01 L11_fnorm:4.2862e-01 L12_fnorm:4.3079e-01 L1_l1linf:3.1618e-01 L2_l1linf:3.1676e-01 L3_l1linf:3.1860e-01 L4_l1linf:3.0794e-01 L5_l1linf:3.0100e-01 L6_l1linf:2.9857e-01 L7_l1linf:2.9697e-01 L8_l1linf:2.9606e-01 L9_l1linf:2.9540e-01 L10_l1linf:2.9616e-01 L11_l1linf:2.9770e-01 L12_l1linf:2.9642e-01 L1_spectral:8.6047e-03 L2_spectral:8.6062e-03 L3_spectral:8.6021e-03 L4_spectral:8.6068e-03 L5_spectral:8.6132e-03 L6_spectral:8.6139e-03 L7_spectral:8.6066e-03 L8_spectral:8.6026e-03 L9_spectral:8.6066e-03 L10_spectral:8.6161e-03 L11_spectral:8.6096e-03 L12_spectral:8.6065e-03 ip_v_neg_g:3.3585e-02 cos_v_neg_g:1.3305e-02 v_norm:1.7678e+00 g_norm:1.4279e+00 hv_norm:9.8790e-01 cos_v_hv:3.8030e-02 hg_norm:1.8352e+01 cos_g_hg:5.4614e-01 v_par:7.6348e-04 v_perp:1.7678e+00 L1_cos_v_neg_g:1.7292e-02 L1_v_norm:4.3569e-01 L2_cos_v_neg_g:2.2510e-02 L2_v_norm:4.1970e-01 L3_cos_v_neg_g:2.5940e-02 L3_v_norm:3.9994e-01 L4_cos_v_neg_g:2.1438e-02 L4_v_norm:4.0399e-01 L5_cos_v_neg_g:1.8679e-02 L5_v_norm:4.1732e-01 L6_cos_v_neg_g:1.5906e-02 L6_v_norm:4.2715e-01 L7_cos_v_neg_g:1.6263e-02 L7_v_norm:4.2687e-01 L8_cos_v_neg_g:1.5331e-02 L8_v_norm:4.2807e-01 L9_cos_v_neg_g:1.6010e-02 L9_v_norm:4.2773e-01 L10_cos_v_neg_g:1.5052e-02 L10_v_norm:4.2923e-01 L11_cos_v_neg_g:1.3345e-02 L11_v_norm:4.2862e-01 L12_cos_v_neg_g:1.0361e-02 L12_v_norm:4.3079e-01 +step:500 train loss:4.585774 +step:501 train loss:4.552898 +step:502 train loss:4.589885 +step:503 train loss:4.528292 +step:504 train loss:4.603385 +step:505 train loss:4.533231 +step:506 train loss:4.521620 +step:507 train loss:4.530206 +step:508 train loss:4.570536 +step:509 train loss:4.553686 +step:510 train loss:4.487299 +step:511 train loss:4.496852 +step:512 train loss:4.482425 +step:513 train loss:4.519936 +step:514 train loss:4.603581 +step:515 train loss:4.522987 +step:516 train loss:4.603035 +step:517 train loss:4.502434 +step:518 train loss:4.507423 +step:519 train loss:4.540465 +step:520 train loss:4.495446 +step:521 train loss:4.494444 +step:522 train loss:4.516583 +step:523 train loss:4.537715 +step:524 train loss:4.456863 +step:525 train loss:4.466236 +step:526 train loss:4.522288 +step:527 train loss:4.485918 +step:528 train loss:4.480917 +step:529 train loss:4.524433 +step:530 train loss:4.456000 +step:531 train loss:4.494139 +step:532 train loss:4.440446 +step:533 train loss:4.447229 +step:534 train loss:4.496601 +step:535 train loss:4.498888 +step:536 train loss:4.539354 +step:537 train loss:4.417925 +step:538 train loss:4.403348 +step:539 train loss:4.523984 +step:540 train loss:4.538183 +step:541 train loss:4.442895 +step:542 train loss:4.418660 +step:543 train loss:4.462294 +step:544 train loss:4.464484 +step:545 train loss:4.443033 +step:546 train loss:4.415787 +step:547 train loss:4.453538 +step:548 train loss:4.309346 +step:549 train loss:4.439051 +step:550 train loss:4.420018 +step:551 train loss:4.426729 +step:552 train loss:4.504603 +step:553 train loss:4.468841 +step:554 train loss:4.428837 +step:555 train loss:4.459358 +step:556 train loss:4.418719 +step:557 train loss:4.380685 +step:558 train loss:4.384448 +step:559 train loss:4.433719 +step:560 train loss:4.507881 +step:561 train loss:4.371499 +step:562 train loss:4.357252 +step:563 train loss:4.435063 +step:564 train loss:4.377435 +step:565 train loss:4.418982 +step:566 train loss:4.407678 +step:567 train loss:4.416990 +step:568 train loss:4.465635 +step:569 train loss:4.418180 +step:570 train loss:4.339180 +step:571 train loss:4.385275 +step:572 train loss:4.352329 +step:573 train loss:4.403064 +step:574 train loss:4.453388 +step:575 train loss:4.370169 +step:576 train loss:4.396924 +step:577 train loss:4.384867 +step:578 train loss:4.397363 +step:579 train loss:4.422568 +step:580 train loss:4.358767 +step:581 train loss:4.415462 +step:582 train loss:4.394075 +step:583 train loss:4.405058 +step:584 train loss:4.380354 +step:585 train loss:4.345093 +step:586 train loss:4.374183 +step:587 train loss:4.448219 +step:588 train loss:4.346427 +step:589 train loss:4.407672 +step:590 train loss:4.430894 +step:591 train loss:4.341816 +step:592 train loss:4.350601 +step:593 train loss:4.339705 +step:594 train loss:4.313519 +step:595 train loss:4.389187 +step:596 train loss:4.364014 +step:597 train loss:4.368622 +step:598 train loss:4.338223 +step:599 train loss:4.366749 +step:600 train loss:4.305394 +step:601 train loss:4.320711 +step:602 train loss:4.334715 +step:603 train loss:4.346145 +step:604 train loss:4.353661 +step:605 train loss:4.383833 +step:606 train loss:4.327964 +step:607 train loss:4.313307 +step:608 train loss:4.328615 +step:609 train loss:4.315608 +step:610 train loss:4.299355 +step:611 train loss:4.320153 +step:612 train loss:4.329054 +step:613 train loss:4.249555 +step:614 train loss:4.303177 +step:615 train loss:4.360986 +step:616 train loss:4.288632 +step:617 train loss:4.333735 +step:618 train loss:4.281583 +step:619 train loss:4.330021 +step:620 train loss:4.361440 +step:621 train loss:4.273263 +step:622 train loss:4.376596 +step:623 train loss:4.341825 +step:624 train loss:4.305121 +step:625 train loss:4.341567 +step:626 train loss:4.310045 +step:627 train loss:4.323484 +step:628 train loss:4.318591 +step:629 train loss:4.245982 +step:630 train loss:4.305617 +step:631 train loss:4.255227 +step:632 train loss:4.277299 +step:633 train loss:4.306014 +step:634 train loss:4.298878 +step:635 train loss:4.260052 +step:636 train loss:4.323858 +step:637 train loss:4.246564 +step:638 train loss:4.193217 +step:639 train loss:4.313833 +step:640 train loss:4.267366 +step:641 train loss:4.282732 +step:642 train loss:4.337482 +step:643 train loss:4.214314 +step:644 train loss:4.332748 +step:645 train loss:4.268130 +step:646 train loss:4.280184 +step:647 train loss:4.289485 +step:648 train loss:4.366272 +step:649 train loss:4.281342 +step:650 train loss:4.284400 +step:651 train loss:4.236230 +step:652 train loss:4.234138 +step:653 train loss:4.220045 +step:654 train loss:4.250388 +step:655 train loss:4.289375 +step:656 train loss:4.242040 +step:657 train loss:4.288783 +step:658 train loss:4.211747 +step:659 train loss:4.302356 +step:660 train loss:4.284859 +step:661 train loss:4.317078 +step:662 train loss:4.307345 +step:663 train loss:4.297113 +step:664 train loss:4.202842 +step:665 train loss:4.219526 +step:666 train loss:4.234816 +step:667 train loss:4.280506 +step:668 train loss:4.270669 +step:669 train loss:4.250472 +step:670 train loss:4.281322 +step:671 train loss:4.245147 +step:672 train loss:4.208301 +step:673 train loss:4.298905 +step:674 train loss:4.258149 +step:675 train loss:4.197976 +step:676 train loss:4.297354 +step:677 train loss:4.232351 +step:678 train loss:4.207238 +step:679 train loss:4.239925 +step:680 train loss:4.216156 +step:681 train loss:4.267607 +step:682 train loss:4.177334 +step:683 train loss:4.242445 +step:684 train loss:4.297495 +step:685 train loss:4.232507 +step:686 train loss:4.292479 +step:687 train loss:4.266713 +step:688 train loss:4.193160 +step:689 train loss:4.207836 +step:690 train loss:4.192142 +step:691 train loss:4.228231 +step:692 train loss:4.225905 +step:693 train loss:4.209382 +step:694 train loss:4.229268 +step:695 train loss:4.190988 +step:696 train loss:4.138182 +step:697 train loss:4.275574 +step:698 train loss:4.165968 +step:699 train loss:4.202998 +step:700 train loss:4.244051 +step:701 train loss:4.160446 +step:702 train loss:4.220906 +step:703 train loss:4.177355 +step:704 train loss:4.114944 +step:705 train loss:4.217996 +step:706 train loss:4.087342 +step:707 train loss:4.153369 +step:708 train loss:4.243365 +step:709 train loss:4.200624 +step:710 train loss:4.197322 +step:711 train loss:4.188602 +step:712 train loss:4.180155 +step:713 train loss:4.145703 +step:714 train loss:4.215360 +step:715 train loss:4.101994 +step:716 train loss:4.274605 +step:717 train loss:4.153247 +step:718 train loss:4.227986 +step:719 train loss:4.205972 +step:720 train loss:4.165093 +step:721 train loss:4.231640 +step:722 train loss:4.181236 +step:723 train loss:4.213678 +step:724 train loss:4.218473 +step:725 train loss:4.134568 +step:726 train loss:4.164893 +step:727 train loss:4.191069 +step:728 train loss:4.171760 +step:729 train loss:4.143298 +step:730 train loss:4.210672 +step:731 train loss:4.253418 +step:732 train loss:4.202603 +step:733 train loss:4.186059 +step:734 train loss:4.201898 +step:735 train loss:4.267599 +step:736 train loss:4.171523 +step:737 train loss:4.175026 +step:738 train loss:4.226520 +step:739 train loss:4.148693 +step:740 train loss:4.200137 +step:741 train loss:4.282766 +step:742 train loss:4.160699 +step:743 train loss:4.145060 +step:744 train loss:4.175518 +step:745 train loss:4.085627 +step:746 train loss:4.148709 +step:747 train loss:4.159755 +step:748 train loss:4.148906 +step:749 train loss:4.174998 +step:750 validation loss:4.108386 +step:750 train loss:4.103442 +step:751 train loss:4.172759 +step:752 train loss:4.102982 +step:753 train loss:4.163268 +step:754 train loss:4.157953 +step:755 train loss:4.222014 +step:756 train loss:4.183686 +step:757 train loss:4.242377 +step:758 train loss:4.148207 +step:759 train loss:4.162420 +step:760 train loss:4.117638 +step:761 train loss:4.151200 +step:762 train loss:4.115355 +step:763 train loss:4.126862 +step:764 train loss:4.117970 +step:765 train loss:4.118182 +step:766 train loss:4.183479 +step:767 train loss:4.316219 +step:768 train loss:4.144509 +step:769 train loss:4.159748 +step:770 train loss:4.200550 +step:771 train loss:4.266404 +step:772 train loss:4.172510 +step:773 train loss:4.114182 +step:774 train loss:4.137728 +step:775 train loss:4.131999 +step:776 train loss:4.155566 +step:777 train loss:4.124028 +step:778 train loss:4.069862 +step:779 train loss:4.104513 +step:780 train loss:4.169363 +step:781 train loss:4.106789 +step:782 train loss:4.120590 +step:783 train loss:4.101069 +step:784 train loss:4.109801 +step:785 train loss:4.103448 +step:786 train loss:4.097015 +step:787 train loss:4.051908 +step:788 train loss:4.144958 +step:789 train loss:4.112936 +step:790 train loss:4.088296 +step:791 train loss:4.161595 +step:792 train loss:4.192608 +step:793 train loss:4.148983 +step:794 train loss:4.132856 +step:795 train loss:4.104427 +step:796 train loss:4.403927 +step:797 train loss:4.116012 +step:798 train loss:4.105826 +step:799 train loss:4.114019 +step:800 train loss:4.200778 +step:801 train loss:4.103792 +step:802 train loss:4.245875 +step:803 train loss:4.131763 +step:804 train loss:4.078671 +step:805 train loss:4.142408 +step:806 train loss:4.087129 +step:807 train loss:4.102342 +step:808 train loss:4.112734 +step:809 train loss:4.080820 +step:810 train loss:4.060952 +step:811 train loss:4.143137 +step:812 train loss:4.108229 +step:813 train loss:4.123587 +step:814 train loss:4.197377 +step:815 train loss:4.162910 +step:816 train loss:4.079054 +step:817 train loss:4.129586 +step:818 train loss:4.090578 +step:819 train loss:4.086707 +step:820 train loss:4.081038 +step:821 train loss:4.043580 +step:822 train loss:4.046406 +step:823 train loss:4.123183 +step:824 train loss:4.025504 +step:825 train loss:4.012349 +step:826 train loss:4.091731 +step:827 train loss:3.986081 +step:828 train loss:4.067245 +step:829 train loss:4.063597 +step:830 train loss:4.070295 +step:831 train loss:4.113623 +step:832 train loss:4.156504 +step:833 train loss:4.112904 +step:834 train loss:4.103158 +step:835 train loss:4.065198 +step:836 train loss:4.070228 +step:837 train loss:4.057990 +step:838 train loss:4.039255 +step:839 train loss:4.050943 +step:840 train loss:4.085379 +step:841 train loss:4.082271 +step:842 train loss:4.075242 +step:843 train loss:4.080337 +step:844 train loss:4.032207 +step:845 train loss:4.022273 +step:846 train loss:4.124362 +step:847 train loss:4.090425 +step:848 train loss:4.047215 +step:849 train loss:4.069607 +step:850 train loss:4.092517 +step:851 train loss:4.051697 +step:852 train loss:4.147768 +step:853 train loss:4.033173 +step:854 train loss:4.061967 +step:855 train loss:4.064031 +step:856 train loss:4.022819 +step:857 train loss:4.073367 +step:858 train loss:4.103498 +step:859 train loss:4.007064 +step:860 train loss:4.034873 +step:861 train loss:4.081039 +step:862 train loss:4.017602 +step:863 train loss:4.031501 +step:864 train loss:4.019556 +step:865 train loss:4.037346 +step:866 train loss:4.066443 +step:867 train loss:4.190739 +step:868 train loss:4.034567 +step:869 train loss:4.049510 +step:870 train loss:3.991880 +step:871 train loss:3.987559 +step:872 train loss:4.064713 +step:873 train loss:4.033387 +step:874 train loss:4.055140 +step:875 train loss:3.981134 +step:876 train loss:4.064558 +step:877 train loss:4.007230 +step:878 train loss:4.105136 +step:879 train loss:3.993156 +step:880 train loss:4.108380 +step:881 train loss:4.028563 +step:882 train loss:3.992543 +step:883 train loss:4.044201 +step:884 train loss:4.070032 +step:885 train loss:4.005341 +step:886 train loss:4.018623 +step:887 train loss:4.028938 +step:888 train loss:4.139503 +step:889 train loss:4.070729 +step:890 train loss:4.007999 +step:891 train loss:3.971387 +step:892 train loss:3.968994 +step:893 train loss:4.043179 +step:894 train loss:3.999788 +step:895 train loss:3.991089 +step:896 train loss:4.070422 +step:897 train loss:3.997806 +step:898 train loss:4.026817 +step:899 train loss:4.029060 +step:900 train loss:4.058955 +step:901 train loss:3.986449 +step:902 train loss:4.019365 +step:903 train loss:4.127034 +step:904 train loss:4.132484 +step:905 train loss:4.006406 +step:906 train loss:4.021684 +step:907 train loss:4.053998 +step:908 train loss:4.059157 +step:909 train loss:4.000900 +step:910 train loss:4.050011 +step:911 train loss:4.164371 +step:912 train loss:3.966823 +step:913 train loss:4.028718 +step:914 train loss:3.976451 +step:915 train loss:4.019580 +step:916 train loss:4.069799 +step:917 train loss:4.035275 +step:918 train loss:4.107902 +step:919 train loss:4.194755 +step:920 train loss:3.932091 +step:921 train loss:4.058820 +step:922 train loss:4.025219 +step:923 train loss:3.944525 +step:924 train loss:3.994875 +step:925 train loss:3.956488 +step:926 train loss:4.059072 +step:927 train loss:3.958829 +step:928 train loss:4.041527 +step:929 train loss:4.010660 +step:930 train loss:4.017655 +step:931 train loss:4.049887 +step:932 train loss:3.987176 +step:933 train loss:4.034625 +step:934 train loss:4.072289 +step:935 train loss:4.060019 +step:936 train loss:4.025358 +step:937 train loss:4.026201 +step:938 train loss:4.020686 +step:939 train loss:3.925039 +step:940 train loss:4.020648 +step:941 train loss:3.963658 +step:942 train loss:3.947649 +step:943 train loss:4.053690 +step:944 train loss:4.007202 +step:945 train loss:4.007936 +step:946 train loss:4.021221 +step:947 train loss:4.190557 +step:948 train loss:3.978697 +step:949 train loss:4.020529 +step:950 train loss:3.956644 +step:951 train loss:4.000423 +step:952 train loss:4.046730 +step:953 train loss:3.974123 +step:954 train loss:4.020550 +step:955 train loss:3.953632 +step:956 train loss:3.983939 +step:957 train loss:3.981016 +step:958 train loss:4.058896 +step:959 train loss:3.988297 +step:960 train loss:4.087061 +step:961 train loss:4.024837 +step:962 train loss:3.983708 +step:963 train loss:3.972704 +step:964 train loss:4.001576 +step:965 train loss:3.926851 +step:966 train loss:3.942772 +step:967 train loss:4.001485 +step:968 train loss:4.006946 +step:969 train loss:3.957844 +step:970 train loss:4.006090 +step:971 train loss:3.996022 +step:972 train loss:3.906958 +step:973 train loss:4.015729 +step:974 train loss:3.951002 +step:975 train loss:4.042357 +step:976 train loss:3.993661 +step:977 train loss:3.982962 +step:978 train loss:3.983568 +step:979 train loss:3.973484 +step:980 train loss:3.976516 +step:981 train loss:3.949888 +step:982 train loss:3.967588 +step:983 train loss:3.978086 +step:984 train loss:3.999902 +step:985 train loss:3.966803 +step:986 train loss:3.999490 +step:987 train loss:4.031756 +step:988 train loss:4.010830 +step:989 train loss:3.979457 +step:990 train loss:3.970473 +step:991 train loss:3.887993 +step:992 train loss:3.963942 +step:993 train loss:3.988882 +step:994 train loss:3.921917 +step:995 train loss:3.937145 +step:996 train loss:3.979865 +step:997 train loss:3.943549 +step:998 train loss:3.941699 +step:999 train loss:3.981589 +step:1000 validation loss:3.911654 total_sharp:6.0507e-03 L1_sharp:1.0322e-02 L2_sharp:2.7253e-03 L3_sharp:2.0494e-03 L4_sharp:7.1494e-04 L5_sharp:8.1648e-04 L6_sharp:9.2701e-04 L7_sharp:1.1681e-03 L8_sharp:8.7500e-04 L9_sharp:7.4996e-04 L10_sharp:5.5929e-04 L11_sharp:4.2082e-04 L12_sharp:5.0537e-04 total_fnorm:2.4419e+00 total_l1_linf:2.0874e+04 total_spectral:2.4419e+00 L1_fnorm:5.9822e-01 L2_fnorm:5.5696e-01 L3_fnorm:5.5771e-01 L4_fnorm:5.8114e-01 L5_fnorm:5.9385e-01 L6_fnorm:6.0407e-01 L7_fnorm:6.0306e-01 L8_fnorm:6.0370e-01 L9_fnorm:6.0394e-01 L10_fnorm:6.0342e-01 L11_fnorm:6.0245e-01 L12_fnorm:6.0146e-01 L1_l1linf:4.2038e-01 L2_l1linf:4.0668e-01 L3_l1linf:3.9990e-01 L4_l1linf:4.0194e-01 L5_l1linf:4.0625e-01 L6_l1linf:4.1186e-01 L7_l1linf:4.1460e-01 L8_l1linf:4.1169e-01 L9_l1linf:4.0835e-01 L10_l1linf:4.0463e-01 L11_l1linf:4.0753e-01 L12_l1linf:4.0062e-01 L1_spectral:1.2056e-02 L2_spectral:1.2045e-02 L3_spectral:1.2053e-02 L4_spectral:1.2044e-02 L5_spectral:1.2043e-02 L6_spectral:1.2049e-02 L7_spectral:1.2050e-02 L8_spectral:1.2045e-02 L9_spectral:1.2048e-02 L10_spectral:1.2048e-02 L11_spectral:1.2045e-02 L12_spectral:1.2044e-02 ip_v_neg_g:1.9537e-02 cos_v_neg_g:8.2557e-03 v_norm:2.4419e+00 g_norm:9.6908e-01 hv_norm:5.7202e-01 cos_v_hv:2.5831e-02 hg_norm:8.2546e+00 cos_g_hg:5.3775e-01 v_par:8.5819e-04 v_perp:2.4419e+00 L1_cos_v_neg_g:1.5945e-02 L1_v_norm:5.9822e-01 L2_cos_v_neg_g:2.1426e-02 L2_v_norm:5.5696e-01 L3_cos_v_neg_g:1.3311e-02 L3_v_norm:5.5771e-01 L4_cos_v_neg_g:8.2610e-03 L4_v_norm:5.8114e-01 L5_cos_v_neg_g:9.1274e-03 L5_v_norm:5.9385e-01 L6_cos_v_neg_g:8.6183e-03 L6_v_norm:6.0407e-01 L7_cos_v_neg_g:9.5008e-03 L7_v_norm:6.0306e-01 L8_cos_v_neg_g:8.3328e-03 L8_v_norm:6.0370e-01 L9_cos_v_neg_g:7.9947e-03 L9_v_norm:6.0394e-01 L10_cos_v_neg_g:7.4724e-03 L10_v_norm:6.0342e-01 L11_cos_v_neg_g:6.6244e-03 L11_v_norm:6.0245e-01 L12_cos_v_neg_g:6.7624e-03 L12_v_norm:6.0146e-01 +step:1000 train loss:3.988662 +step:1001 train loss:3.991374 +step:1002 train loss:3.985410 +step:1003 train loss:3.955045 +step:1004 train loss:3.930420 +step:1005 train loss:3.942608 +step:1006 train loss:4.033297 +step:1007 train loss:3.964589 +step:1008 train loss:3.951686 +step:1009 train loss:4.011199 +step:1010 train loss:3.979590 +step:1011 train loss:4.006993 +step:1012 train loss:3.947091 +step:1013 train loss:3.920647 +step:1014 train loss:3.928897 +step:1015 train loss:3.964524 +step:1016 train loss:3.983085 +step:1017 train loss:3.931294 +step:1018 train loss:3.984452 +step:1019 train loss:3.932884 +step:1020 train loss:3.931318 +step:1021 train loss:4.027991 +step:1022 train loss:3.922979 +step:1023 train loss:3.934830 +step:1024 train loss:4.020196 +step:1025 train loss:3.977363 +step:1026 train loss:3.915182 +step:1027 train loss:3.953851 +step:1028 train loss:3.961410 +step:1029 train loss:3.911122 +step:1030 train loss:4.004724 +step:1031 train loss:3.986524 +step:1032 train loss:3.952549 +step:1033 train loss:3.916403 +step:1034 train loss:3.974554 +step:1035 train loss:3.984478 +step:1036 train loss:3.900419 +step:1037 train loss:3.957403 +step:1038 train loss:3.977527 +step:1039 train loss:4.131813 +step:1040 train loss:3.957166 +step:1041 train loss:3.931687 +step:1042 train loss:3.958069 +step:1043 train loss:3.965559 +step:1044 train loss:3.946574 +step:1045 train loss:3.962775 +step:1046 train loss:3.901204 +step:1047 train loss:3.935611 +step:1048 train loss:3.930468 +step:1049 train loss:3.983354 +step:1050 train loss:3.950685 +step:1051 train loss:3.915890 +step:1052 train loss:4.037522 +step:1053 train loss:3.926190 +step:1054 train loss:3.913805 +step:1055 train loss:3.988148 +step:1056 train loss:3.928555 +step:1057 train loss:3.831403 +step:1058 train loss:3.931186 +step:1059 train loss:3.914534 +step:1060 train loss:3.917357 +step:1061 train loss:3.965672 +step:1062 train loss:3.925766 +step:1063 train loss:3.937419 +step:1064 train loss:3.925175 +step:1065 train loss:3.931037 +step:1066 train loss:3.906619 +step:1067 train loss:3.941474 +step:1068 train loss:3.898175 +step:1069 train loss:3.917862 +step:1070 train loss:3.932906 +step:1071 train loss:3.943047 +step:1072 train loss:3.967245 +step:1073 train loss:3.888216 +step:1074 train loss:3.899711 +step:1075 train loss:3.895243 +step:1076 train loss:3.971790 +step:1077 train loss:3.899539 +step:1078 train loss:3.952102 +step:1079 train loss:3.993699 +step:1080 train loss:3.870522 +step:1081 train loss:3.943341 +step:1082 train loss:3.932322 +step:1083 train loss:3.896645 +step:1084 train loss:3.883726 +step:1085 train loss:3.944717 +step:1086 train loss:3.918753 +step:1087 train loss:3.914608 +step:1088 train loss:3.916574 +step:1089 train loss:3.924995 +step:1090 train loss:3.874214 +step:1091 train loss:3.864719 +step:1092 train loss:3.965949 +step:1093 train loss:3.852569 +step:1094 train loss:3.913251 +step:1095 train loss:3.963611 +step:1096 train loss:3.894012 +step:1097 train loss:3.895844 +step:1098 train loss:3.868544 +step:1099 train loss:3.919977 +step:1100 train loss:3.972716 +step:1101 train loss:3.954042 +step:1102 train loss:3.968524 +step:1103 train loss:3.889747 +step:1104 train loss:3.923028 +step:1105 train loss:3.973210 +step:1106 train loss:3.913190 +step:1107 train loss:4.039846 +step:1108 train loss:3.970075 +step:1109 train loss:3.947177 +step:1110 train loss:3.894758 +step:1111 train loss:3.947788 +step:1112 train loss:3.854956 +step:1113 train loss:3.853958 +step:1114 train loss:3.833176 +step:1115 train loss:3.876966 +step:1116 train loss:3.938641 +step:1117 train loss:3.975772 +step:1118 train loss:3.992886 +step:1119 train loss:3.916110 +step:1120 train loss:3.939815 +step:1121 train loss:3.915799 +step:1122 train loss:3.895832 +step:1123 train loss:4.004762 +step:1124 train loss:3.885101 +step:1125 train loss:3.892425 +step:1126 train loss:3.858489 +step:1127 train loss:3.888820 +step:1128 train loss:3.885530 +step:1129 train loss:3.941977 +step:1130 train loss:3.866272 +step:1131 train loss:3.958389 +step:1132 train loss:3.900451 +step:1133 train loss:3.911798 +step:1134 train loss:3.887997 +step:1135 train loss:3.928985 +step:1136 train loss:3.949900 +step:1137 train loss:3.872346 +step:1138 train loss:3.946332 +step:1139 train loss:3.892097 +step:1140 train loss:3.980628 +step:1141 train loss:3.932804 +step:1142 train loss:3.864388 +step:1143 train loss:3.939705 +step:1144 train loss:3.967670 +step:1145 train loss:3.912258 +step:1146 train loss:3.867991 +step:1147 train loss:3.881294 +step:1148 train loss:3.915617 +step:1149 train loss:3.963281 +step:1150 train loss:3.961637 +step:1151 train loss:3.974375 +step:1152 train loss:3.871032 +step:1153 train loss:3.873792 +step:1154 train loss:3.859654 +step:1155 train loss:3.959848 +step:1156 train loss:3.861090 +step:1157 train loss:3.889912 +step:1158 train loss:3.942643 +step:1159 train loss:3.945529 +step:1160 train loss:3.870901 +step:1161 train loss:3.958549 +step:1162 train loss:3.894368 +step:1163 train loss:3.886366 +step:1164 train loss:3.793132 +step:1165 train loss:3.930197 +step:1166 train loss:3.859363 +step:1167 train loss:3.865552 +step:1168 train loss:3.921532 +step:1169 train loss:3.884118 +step:1170 train loss:3.891455 +step:1171 train loss:3.915826 +step:1172 train loss:3.867517 +step:1173 train loss:3.907054 +step:1174 train loss:3.844823 +step:1175 train loss:3.881913 +step:1176 train loss:4.000969 +step:1177 train loss:3.843624 +step:1178 train loss:3.900676 +step:1179 train loss:3.844520 +step:1180 train loss:3.886822 +step:1181 train loss:3.871564 +step:1182 train loss:3.930640 +step:1183 train loss:3.902109 +step:1184 train loss:3.847725 +step:1185 train loss:3.876198 +step:1186 train loss:3.875730 +step:1187 train loss:3.848202 +step:1188 train loss:3.887004 +step:1189 train loss:3.818193 +step:1190 train loss:3.874041 +step:1191 train loss:3.929803 +step:1192 train loss:3.882714 +step:1193 train loss:3.884931 +step:1194 train loss:4.004694 +step:1195 train loss:3.977362 +step:1196 train loss:3.867788 +step:1197 train loss:3.891641 +step:1198 train loss:3.880626 +step:1199 train loss:3.876451 +step:1200 train loss:3.943955 +step:1201 train loss:3.907466 +step:1202 train loss:3.845882 +step:1203 train loss:3.841827 +step:1204 train loss:3.878515 +step:1205 train loss:3.905188 +step:1206 train loss:3.831612 +step:1207 train loss:3.916107 +step:1208 train loss:3.887000 +step:1209 train loss:3.820340 +step:1210 train loss:3.918418 +step:1211 train loss:3.860114 +step:1212 train loss:3.892253 +step:1213 train loss:3.828900 +step:1214 train loss:3.900558 +step:1215 train loss:3.871360 +step:1216 train loss:3.894209 +step:1217 train loss:3.819295 +step:1218 train loss:3.889350 +step:1219 train loss:3.829707 +step:1220 train loss:3.860447 +step:1221 train loss:3.871441 +step:1222 train loss:3.922335 +step:1223 train loss:3.894535 +step:1224 train loss:3.864399 +step:1225 train loss:3.912899 +step:1226 train loss:3.860845 +step:1227 train loss:3.860515 +step:1228 train loss:3.869306 +step:1229 train loss:3.836754 +step:1230 train loss:3.832030 +step:1231 train loss:3.887213 +step:1232 train loss:3.843887 +step:1233 train loss:3.835132 +step:1234 train loss:3.921466 +step:1235 train loss:3.890049 +step:1236 train loss:3.801985 +step:1237 train loss:3.909367 +step:1238 train loss:3.854369 +step:1239 train loss:3.894238 +step:1240 train loss:3.817907 +step:1241 train loss:3.834149 +step:1242 train loss:3.867185 +step:1243 train loss:3.812095 +step:1244 train loss:3.937261 +step:1245 train loss:3.948668 +step:1246 train loss:3.874671 +step:1247 train loss:3.856291 +step:1248 train loss:3.879896 +step:1249 train loss:3.807922 +step:1250 validation loss:3.810080 +step:1250 train loss:3.831194 +step:1251 train loss:3.900542 +step:1252 train loss:3.853661 +step:1253 train loss:3.801061 +step:1254 train loss:3.839683 +step:1255 train loss:3.830826 +step:1256 train loss:3.878505 +step:1257 train loss:3.854413 +step:1258 train loss:3.912005 +step:1259 train loss:3.886049 +step:1260 train loss:3.799183 +step:1261 train loss:4.042774 +step:1262 train loss:3.878457 +step:1263 train loss:3.830307 +step:1264 train loss:3.854884 +step:1265 train loss:3.894609 +step:1266 train loss:3.842920 +step:1267 train loss:3.853782 +step:1268 train loss:3.860088 +step:1269 train loss:3.860283 +step:1270 train loss:3.785457 +step:1271 train loss:3.791546 +step:1272 train loss:3.824046 +step:1273 train loss:3.881209 +step:1274 train loss:3.844710 +step:1275 train loss:3.873527 +step:1276 train loss:3.872501 +step:1277 train loss:3.880969 +step:1278 train loss:3.823039 +step:1279 train loss:3.834201 +step:1280 train loss:3.849818 +step:1281 train loss:3.898334 +step:1282 train loss:3.837868 +step:1283 train loss:3.903034 +step:1284 train loss:3.844473 +step:1285 train loss:3.893567 +step:1286 train loss:3.794948 +step:1287 train loss:3.831564 +step:1288 train loss:3.860749 +step:1289 train loss:3.926355 +step:1290 train loss:3.880383 +step:1291 train loss:3.840594 +step:1292 train loss:3.820974 +step:1293 train loss:3.821950 +step:1294 train loss:3.865111 +step:1295 train loss:3.851801 +step:1296 train loss:3.892545 +step:1297 train loss:3.850897 +step:1298 train loss:3.868539 +step:1299 train loss:3.903420 +step:1300 train loss:3.827488 +step:1301 train loss:3.871836 +step:1302 train loss:3.826505 +step:1303 train loss:3.868124 +step:1304 train loss:3.898262 +step:1305 train loss:3.872881 +step:1306 train loss:3.869174 +step:1307 train loss:3.851310 +step:1308 train loss:3.802386 +step:1309 train loss:3.821406 +step:1310 train loss:3.809144 +step:1311 train loss:3.813311 +step:1312 train loss:3.891332 +step:1313 train loss:3.800174 +step:1314 train loss:3.811088 +step:1315 train loss:3.852769 +step:1316 train loss:3.831414 +step:1317 train loss:3.720910 +step:1318 train loss:3.888403 +step:1319 train loss:3.915169 +step:1320 train loss:3.828785 +step:1321 train loss:3.817269 +step:1322 train loss:3.918476 +step:1323 train loss:3.864309 +step:1324 train loss:3.971466 +step:1325 train loss:3.840173 +step:1326 train loss:3.873880 +step:1327 train loss:3.892814 +step:1328 train loss:3.797239 +step:1329 train loss:3.825183 +step:1330 train loss:3.852845 +step:1331 train loss:3.748527 +step:1332 train loss:3.886570 +step:1333 train loss:3.861207 +step:1334 train loss:3.854986 +step:1335 train loss:3.879773 +step:1336 train loss:3.885641 +step:1337 train loss:3.856715 +step:1338 train loss:3.836330 +step:1339 train loss:3.911466 +step:1340 train loss:3.882262 +step:1341 train loss:3.859752 +step:1342 train loss:3.828816 +step:1343 train loss:3.824442 +step:1344 train loss:3.883581 +step:1345 train loss:3.841429 +step:1346 train loss:3.931259 +step:1347 train loss:3.858361 +step:1348 train loss:3.810601 +step:1349 train loss:3.759685 +step:1350 train loss:3.806724 +step:1351 train loss:3.865403 +step:1352 train loss:3.841768 +step:1353 train loss:3.822303 +step:1354 train loss:3.823404 +step:1355 train loss:3.894039 +step:1356 train loss:3.800691 +step:1357 train loss:3.830441 +step:1358 train loss:3.823632 +step:1359 train loss:3.822788 +step:1360 train loss:3.856034 +step:1361 train loss:3.979507 +step:1362 train loss:3.884334 +step:1363 train loss:3.770890 +step:1364 train loss:3.800232 +step:1365 train loss:3.789673 +step:1366 train loss:3.832409 +step:1367 train loss:3.762924 +step:1368 train loss:3.796044 +step:1369 train loss:3.835643 +step:1370 train loss:3.854331 +step:1371 train loss:3.815800 +step:1372 train loss:3.853977 +step:1373 train loss:3.884685 +step:1374 train loss:3.880434 +step:1375 train loss:3.832490 +step:1376 train loss:3.860617 +step:1377 train loss:3.851042 +step:1378 train loss:3.834158 +step:1379 train loss:3.806100 +step:1380 train loss:3.878513 +step:1381 train loss:3.830232 +step:1382 train loss:3.806569 +step:1383 train loss:3.792728 +step:1384 train loss:3.865773 +step:1385 train loss:3.769635 +step:1386 train loss:3.842021 +step:1387 train loss:3.846684 +step:1388 train loss:3.812278 +step:1389 train loss:3.778704 +step:1390 train loss:3.819448 +step:1391 train loss:3.849275 +step:1392 train loss:3.832600 +step:1393 train loss:3.878526 +step:1394 train loss:3.815486 +step:1395 train loss:3.851674 +step:1396 train loss:3.836645 +step:1397 train loss:3.859190 +step:1398 train loss:3.860813 +step:1399 train loss:3.830222 +step:1400 train loss:3.807470 +step:1401 train loss:3.803230 +step:1402 train loss:3.808944 +step:1403 train loss:3.771050 +step:1404 train loss:3.829286 +step:1405 train loss:3.792346 +step:1406 train loss:3.826875 +step:1407 train loss:3.819106 +step:1408 train loss:3.793876 +step:1409 train loss:3.784871 +step:1410 train loss:3.801850 +step:1411 train loss:3.840541 +step:1412 train loss:3.885164 +step:1413 train loss:3.811605 +step:1414 train loss:3.845376 +step:1415 train loss:3.806327 +step:1416 train loss:3.856328 +step:1417 train loss:3.824785 +step:1418 train loss:3.767695 +step:1419 train loss:3.783913 +step:1420 train loss:3.799938 +step:1421 train loss:3.844458 +step:1422 train loss:3.817570 +step:1423 train loss:3.917528 +step:1424 train loss:3.817560 +step:1425 train loss:3.778926 +step:1426 train loss:3.800890 +step:1427 train loss:3.787349 +step:1428 train loss:3.771880 +step:1429 train loss:3.793512 +step:1430 train loss:3.811668 +step:1431 train loss:3.828312 +step:1432 train loss:3.812197 +step:1433 train loss:3.796096 +step:1434 train loss:3.769914 +step:1435 train loss:3.759495 +step:1436 train loss:3.840490 +step:1437 train loss:3.767141 +step:1438 train loss:3.759503 +step:1439 train loss:3.754513 +step:1440 train loss:3.788638 +step:1441 train loss:3.874193 +step:1442 train loss:3.827069 +step:1443 train loss:3.756471 +step:1444 train loss:3.771133 +step:1445 train loss:3.763622 +step:1446 train loss:3.814196 +step:1447 train loss:3.815659 +step:1448 train loss:3.780120 +step:1449 train loss:3.803528 +step:1450 train loss:3.816342 +step:1451 train loss:3.751665 +step:1452 train loss:3.808027 +step:1453 train loss:3.801597 +step:1454 train loss:3.783775 +step:1455 train loss:3.729809 +step:1456 train loss:3.800007 +step:1457 train loss:3.739513 +step:1458 train loss:3.874700 +step:1459 train loss:3.799760 +step:1460 train loss:3.765657 +step:1461 train loss:3.825852 +step:1462 train loss:3.837202 +step:1463 train loss:3.792838 +step:1464 train loss:3.776502 +step:1465 train loss:3.771417 +step:1466 train loss:3.736745 +step:1467 train loss:3.876418 +step:1468 train loss:3.760313 +step:1469 train loss:3.831917 +step:1470 train loss:3.773749 +step:1471 train loss:3.766070 +step:1472 train loss:3.773021 +step:1473 train loss:3.767452 +step:1474 train loss:3.714146 +step:1475 train loss:3.774969 +step:1476 train loss:3.857587 +step:1477 train loss:3.805000 +step:1478 train loss:3.741161 +step:1479 train loss:3.776587 +step:1480 train loss:3.769992 +step:1481 train loss:3.745876 +step:1482 train loss:3.809770 +step:1483 train loss:3.798416 +step:1484 train loss:3.829376 +step:1485 train loss:3.844778 +step:1486 train loss:3.773138 +step:1487 train loss:3.770230 +step:1488 train loss:3.766412 +step:1489 train loss:3.761102 +step:1490 train loss:3.818949 +step:1491 train loss:3.818058 +step:1492 train loss:3.800487 +step:1493 train loss:3.752134 +step:1494 train loss:3.782426 +step:1495 train loss:3.775691 +step:1496 train loss:3.737010 +step:1497 train loss:3.811605 +step:1498 train loss:3.721309 +step:1499 train loss:3.762453 +step:1500 validation loss:3.730608 total_sharp:4.7979e-03 L1_sharp:5.8867e-03 L2_sharp:8.9185e-04 L3_sharp:1.5729e-03 L4_sharp:5.0377e-04 L5_sharp:7.5089e-04 L6_sharp:9.4406e-04 L7_sharp:1.2006e-03 L8_sharp:7.7991e-04 L9_sharp:7.2702e-04 L10_sharp:4.6037e-04 L11_sharp:3.9436e-04 L12_sharp:3.6251e-04 total_fnorm:2.4435e+00 total_l1_linf:2.0906e+04 total_spectral:2.4435e+00 L1_fnorm:5.8444e-01 L2_fnorm:5.5186e-01 L3_fnorm:5.5734e-01 L4_fnorm:5.8127e-01 L5_fnorm:5.9708e-01 L6_fnorm:6.0448e-01 L7_fnorm:6.0336e-01 L8_fnorm:6.0512e-01 L9_fnorm:6.0591e-01 L10_fnorm:6.0563e-01 L11_fnorm:6.0355e-01 L12_fnorm:6.0199e-01 L1_l1linf:4.1781e-01 L2_l1linf:4.0075e-01 L3_l1linf:3.9799e-01 L4_l1linf:4.1389e-01 L5_l1linf:4.1973e-01 L6_l1linf:4.1712e-01 L7_l1linf:4.1780e-01 L8_l1linf:4.1741e-01 L9_l1linf:4.1190e-01 L10_l1linf:4.0951e-01 L11_l1linf:4.0746e-01 L12_l1linf:3.9846e-01 L1_spectral:1.2054e-02 L2_spectral:1.2043e-02 L3_spectral:1.2054e-02 L4_spectral:1.2044e-02 L5_spectral:1.2058e-02 L6_spectral:1.2044e-02 L7_spectral:1.2045e-02 L8_spectral:1.2043e-02 L9_spectral:1.2049e-02 L10_spectral:1.2042e-02 L11_spectral:1.2042e-02 L12_spectral:1.2048e-02 ip_v_neg_g:1.3348e-02 cos_v_neg_g:5.2066e-03 v_norm:2.4435e+00 g_norm:1.0492e+00 hv_norm:6.2995e-01 cos_v_hv:1.8610e-02 hg_norm:1.5319e+01 cos_g_hg:5.6404e-01 v_par:8.8954e-04 v_perp:2.4435e+00 L1_cos_v_neg_g:7.6942e-03 L1_v_norm:5.8444e-01 L2_cos_v_neg_g:8.7290e-03 L2_v_norm:5.5186e-01 L3_cos_v_neg_g:8.3253e-03 L3_v_norm:5.5734e-01 L4_cos_v_neg_g:8.0206e-03 L4_v_norm:5.8127e-01 L5_cos_v_neg_g:7.8913e-03 L5_v_norm:5.9708e-01 L6_cos_v_neg_g:8.1160e-03 L6_v_norm:6.0448e-01 L7_cos_v_neg_g:7.6516e-03 L7_v_norm:6.0336e-01 L8_cos_v_neg_g:7.4727e-03 L8_v_norm:6.0512e-01 L9_cos_v_neg_g:7.8182e-03 L9_v_norm:6.0591e-01 L10_cos_v_neg_g:6.6870e-03 L10_v_norm:6.0563e-01 L11_cos_v_neg_g:5.3574e-03 L11_v_norm:6.0355e-01 L12_cos_v_neg_g:4.2269e-03 L12_v_norm:6.0199e-01 +step:1500 train loss:3.752250 +step:1501 train loss:3.789527 +step:1502 train loss:3.718672 +step:1503 train loss:3.779966 +step:1504 train loss:3.746079 +step:1505 train loss:3.721510 +step:1506 train loss:3.709355 +step:1507 train loss:3.719079 +step:1508 train loss:3.740958 +step:1509 train loss:3.781429 +step:1510 train loss:3.733207 +step:1511 train loss:3.757052 +step:1512 train loss:3.733755 +step:1513 train loss:3.799294 +step:1514 train loss:3.759432 +step:1515 train loss:3.811003 +step:1516 train loss:3.745653 +step:1517 train loss:3.749830 +step:1518 train loss:3.836676 +step:1519 train loss:3.793746 +step:1520 train loss:3.843325 +step:1521 train loss:3.735416 +step:1522 train loss:3.797938 +step:1523 train loss:3.793200 +step:1524 train loss:3.726925 +step:1525 train loss:3.803874 +step:1526 train loss:3.722242 +step:1527 train loss:3.772060 +step:1528 train loss:3.831028 +step:1529 train loss:3.778998 +step:1530 train loss:3.826450 +step:1531 train loss:3.743425 +step:1532 train loss:3.817628 +step:1533 train loss:3.786504 +step:1534 train loss:3.743999 +step:1535 train loss:3.783286 +step:1536 train loss:3.823864 +step:1537 train loss:3.763469 +step:1538 train loss:3.785193 +step:1539 train loss:3.767356 +step:1540 train loss:3.789013 +step:1541 train loss:3.743756 +step:1542 train loss:3.840731 +step:1543 train loss:3.871305 +step:1544 train loss:3.732779 +step:1545 train loss:3.719649 +step:1546 train loss:3.762302 +step:1547 train loss:3.742177 +step:1548 train loss:3.784636 +step:1549 train loss:3.707994 +step:1550 train loss:3.832994 +step:1551 train loss:3.759304 +step:1552 train loss:3.789390 +step:1553 train loss:3.800195 +step:1554 train loss:3.808072 +step:1555 train loss:3.763252 +step:1556 train loss:3.747447 +step:1557 train loss:3.760286 +step:1558 train loss:3.785058 +step:1559 train loss:3.747257 +step:1560 train loss:3.824019 +step:1561 train loss:3.799660 +step:1562 train loss:3.690495 +step:1563 train loss:3.668409 +step:1564 train loss:3.809916 +step:1565 train loss:3.779336 +step:1566 train loss:3.800789 +step:1567 train loss:3.794794 +step:1568 train loss:3.752675 +step:1569 train loss:3.742709 +step:1570 train loss:3.765358 +step:1571 train loss:3.729185 +step:1572 train loss:3.740177 +step:1573 train loss:3.784481 +step:1574 train loss:3.742448 +step:1575 train loss:3.762511 +step:1576 train loss:3.721623 +step:1577 train loss:3.746712 +step:1578 train loss:3.733230 +step:1579 train loss:3.805624 +step:1580 train loss:3.764916 +step:1581 train loss:3.799173 +step:1582 train loss:3.805032 +step:1583 train loss:3.770593 +step:1584 train loss:3.691664 +step:1585 train loss:3.774205 +step:1586 train loss:3.744540 +step:1587 train loss:3.758687 +step:1588 train loss:3.742809 +step:1589 train loss:3.792653 +step:1590 train loss:3.700122 +step:1591 train loss:3.753121 +step:1592 train loss:3.710358 +step:1593 train loss:3.743759 +step:1594 train loss:3.748159 +step:1595 train loss:3.746410 +step:1596 train loss:3.750422 +step:1597 train loss:3.682258 +step:1598 train loss:3.778794 +step:1599 train loss:3.792235 +step:1600 train loss:3.666727 +step:1601 train loss:3.746930 +step:1602 train loss:3.806430 +step:1603 train loss:3.801747 +step:1604 train loss:3.722991 +step:1605 train loss:3.782238 +step:1606 train loss:3.825686 +step:1607 train loss:3.705410 +step:1608 train loss:3.742096 +step:1609 train loss:3.760014 +step:1610 train loss:3.818308 +step:1611 train loss:3.743995 +step:1612 train loss:3.666291 +step:1613 train loss:3.738721 +step:1614 train loss:3.848269 +step:1615 train loss:3.765416 +step:1616 train loss:3.768643 +step:1617 train loss:3.757846 +step:1618 train loss:3.753020 +step:1619 train loss:3.935569 +step:1620 train loss:3.721367 +step:1621 train loss:3.778940 +step:1622 train loss:3.700293 +step:1623 train loss:3.768439 +step:1624 train loss:3.739098 +step:1625 train loss:3.818868 +step:1626 train loss:3.700472 +step:1627 train loss:3.718264 +step:1628 train loss:3.734002 +step:1629 train loss:3.772424 +step:1630 train loss:3.780924 +step:1631 train loss:3.734429 +step:1632 train loss:3.705141 +step:1633 train loss:3.726526 +step:1634 train loss:3.783214 +step:1635 train loss:3.722888 +step:1636 train loss:3.707387 +step:1637 train loss:3.783252 +step:1638 train loss:3.889943 +step:1639 train loss:3.688537 +step:1640 train loss:3.775000 +step:1641 train loss:3.735020 +step:1642 train loss:3.833960 +step:1643 train loss:3.733300 +step:1644 train loss:3.737875 +step:1645 train loss:3.717799 +step:1646 train loss:3.802336 +step:1647 train loss:3.692948 +step:1648 train loss:3.755011 +step:1649 train loss:3.715405 +step:1650 train loss:3.735940 +step:1651 train loss:3.747122 +step:1652 train loss:3.770298 +step:1653 train loss:3.771111 +step:1654 train loss:3.772406 +step:1655 train loss:3.741991 +step:1656 train loss:3.737113 +step:1657 train loss:3.743872 +step:1658 train loss:3.709373 +step:1659 train loss:3.789145 +step:1660 train loss:3.689796 +step:1661 train loss:3.806674 +step:1662 train loss:3.736330 +step:1663 train loss:3.732648 +step:1664 train loss:3.824603 +step:1665 train loss:3.746902 +step:1666 train loss:3.756918 +step:1667 train loss:3.779817 +step:1668 train loss:3.751186 +step:1669 train loss:3.709159 +step:1670 train loss:3.769591 +step:1671 train loss:3.764574 +step:1672 train loss:3.759121 +step:1673 train loss:3.717545 +step:1674 train loss:3.716131 +step:1675 train loss:3.754800 +step:1676 train loss:4.021365 +step:1677 train loss:3.764383 +step:1678 train loss:3.681723 +step:1679 train loss:3.805679 +step:1680 train loss:3.729671 +step:1681 train loss:3.785455 +step:1682 train loss:3.743891 +step:1683 train loss:3.730713 +step:1684 train loss:3.685503 +step:1685 train loss:3.751806 +step:1686 train loss:3.733248 +step:1687 train loss:3.749623 +step:1688 train loss:3.724911 +step:1689 train loss:3.720512 +step:1690 train loss:3.738431 +step:1691 train loss:3.734557 +step:1692 train loss:3.747380 +step:1693 train loss:3.725131 +step:1694 train loss:3.673416 +step:1695 train loss:3.699647 +step:1696 train loss:3.704181 +step:1697 train loss:3.755364 +step:1698 train loss:3.748382 +step:1699 train loss:3.705244 +step:1700 train loss:3.785949 +step:1701 train loss:3.727991 +step:1702 train loss:3.720921 +step:1703 train loss:3.737647 +step:1704 train loss:3.745797 +step:1705 train loss:3.761695 +step:1706 train loss:3.764172 +step:1707 train loss:3.768728 +step:1708 train loss:3.687539 +step:1709 train loss:3.793766 +step:1710 train loss:3.705605 +step:1711 train loss:3.713816 +step:1712 train loss:3.739327 +step:1713 train loss:3.700480 +step:1714 train loss:4.077880 +step:1715 train loss:3.715444 +step:1716 train loss:3.706042 +step:1717 train loss:3.706211 +step:1718 train loss:3.788085 +step:1719 train loss:3.683797 +step:1720 train loss:3.780071 +step:1721 train loss:3.718021 +step:1722 train loss:3.688896 +step:1723 train loss:3.787338 +step:1724 train loss:3.734300 +step:1725 train loss:3.736543 +step:1726 train loss:3.730386 +step:1727 train loss:3.770867 +step:1728 train loss:3.778296 +step:1729 train loss:3.692641 +step:1730 train loss:3.771839 +step:1731 train loss:3.700424 +step:1732 train loss:3.714979 +step:1733 train loss:3.698065 +step:1734 train loss:3.757425 +step:1735 train loss:3.810739 +step:1736 train loss:3.730221 +step:1737 train loss:3.750876 +step:1738 train loss:3.714430 +step:1739 train loss:3.786495 +step:1740 train loss:3.771731 +step:1741 train loss:3.831470 +step:1742 train loss:3.806990 +step:1743 train loss:3.703828 +step:1744 train loss:3.714590 +step:1745 train loss:3.708903 +step:1746 train loss:3.688848 +step:1747 train loss:3.730420 +step:1748 train loss:3.661536 +step:1749 train loss:3.709754 +step:1750 validation loss:3.684006 +step:1750 train loss:3.745443 +step:1751 train loss:3.752963 +step:1752 train loss:3.733256 +step:1753 train loss:3.747994 +step:1754 train loss:3.744244 +step:1755 train loss:3.738159 +step:1756 train loss:3.759042 +step:1757 train loss:3.766589 +step:1758 train loss:3.678385 +step:1759 train loss:3.773043 +step:1760 train loss:3.726425 +step:1761 train loss:3.707614 +step:1762 train loss:3.700536 +step:1763 train loss:3.702824 +step:1764 train loss:4.006252 +step:1765 train loss:3.708599 +step:1766 train loss:3.804127 +step:1767 train loss:3.712826 +step:1768 train loss:3.694638 +step:1769 train loss:3.717847 +step:1770 train loss:3.728101 +step:1771 train loss:3.702127 +step:1772 train loss:3.813066 +step:1773 train loss:3.736414 +step:1774 train loss:3.740084 +step:1775 train loss:3.852666 +step:1776 train loss:3.725994 +step:1777 train loss:3.716976 +step:1778 train loss:3.772397 +step:1779 train loss:3.714934 +step:1780 train loss:3.758912 +step:1781 train loss:3.760954 +step:1782 train loss:3.798434 +step:1783 train loss:3.719351 +step:1784 train loss:3.818994 +step:1785 train loss:3.719502 +step:1786 train loss:3.718051 +step:1787 train loss:3.715306 +step:1788 train loss:3.737643 +step:1789 train loss:3.694655 +step:1790 train loss:3.710425 +step:1791 train loss:3.792019 +step:1792 train loss:3.786798 +step:1793 train loss:3.704558 +step:1794 train loss:3.745398 +step:1795 train loss:3.700208 +step:1796 train loss:3.685941 +step:1797 train loss:3.745955 +step:1798 train loss:3.691735 +step:1799 train loss:3.741993 +step:1800 train loss:3.776879 +step:1801 train loss:3.762749 +step:1802 train loss:3.767428 +step:1803 train loss:3.757444 +step:1804 train loss:3.759578 +step:1805 train loss:3.749308 +step:1806 train loss:3.754957 +step:1807 train loss:3.684313 +step:1808 train loss:3.745736 +step:1809 train loss:3.736409 +step:1810 train loss:3.725732 +step:1811 train loss:3.740547 +step:1812 train loss:3.724379 +step:1813 train loss:3.737555 +step:1814 train loss:3.806180 +step:1815 train loss:3.742972 +step:1816 train loss:3.699265 +step:1817 train loss:3.695579 +step:1818 train loss:3.743836 +step:1819 train loss:3.719876 +step:1820 train loss:3.746059 +step:1821 train loss:3.715056 +step:1822 train loss:3.693100 +step:1823 train loss:3.688579 +step:1824 train loss:3.764417 +step:1825 train loss:3.680531 +step:1826 train loss:3.724496 +step:1827 train loss:3.688366 +step:1828 train loss:3.743049 +step:1829 train loss:3.705802 +step:1830 train loss:3.911283 +step:1831 train loss:3.650556 +step:1832 train loss:3.712330 +step:1833 train loss:3.751461 +step:1834 train loss:3.704574 +step:1835 train loss:3.700942 +step:1836 train loss:3.743787 +step:1837 train loss:3.672489 +step:1838 train loss:3.767623 +step:1839 train loss:3.752999 +step:1840 train loss:3.717731 +step:1841 train loss:3.749713 +step:1842 train loss:3.715192 +step:1843 train loss:3.666411 +step:1844 train loss:3.736077 +step:1845 train loss:3.699290 +step:1846 train loss:3.762345 +step:1847 train loss:3.803272 +step:1848 train loss:3.611865 +step:1849 train loss:3.696991 +step:1850 train loss:3.677635 +step:1851 train loss:3.715528 +step:1852 train loss:3.705256 +step:1853 train loss:3.760491 +step:1854 train loss:3.721562 +step:1855 train loss:3.712075 +step:1856 train loss:3.708933 +step:1857 train loss:3.716940 +step:1858 train loss:3.762874 +step:1859 train loss:3.704097 +step:1860 train loss:3.688567 +step:1861 train loss:3.694538 +step:1862 train loss:3.740453 +step:1863 train loss:3.776999 +step:1864 train loss:3.676795 +step:1865 train loss:3.697123 +step:1866 train loss:3.698983 +step:1867 train loss:3.737932 +step:1868 train loss:3.781461 +step:1869 train loss:3.701238 +step:1870 train loss:3.727281 +step:1871 train loss:3.666603 +step:1872 train loss:3.737877 +step:1873 train loss:3.801334 +step:1874 train loss:3.669101 +step:1875 train loss:3.737640 +step:1876 train loss:3.702069 +step:1877 train loss:3.747276 +step:1878 train loss:3.666065 +step:1879 train loss:3.735314 +step:1880 train loss:3.808634 +step:1881 train loss:3.734948 +step:1882 train loss:3.751526 +step:1883 train loss:3.776637 +step:1884 train loss:3.789335 +step:1885 train loss:3.736633 +step:1886 train loss:3.674682 +step:1887 train loss:3.683979 +step:1888 train loss:3.691104 +step:1889 train loss:3.713953 +step:1890 train loss:3.703713 +step:1891 train loss:3.648296 +step:1892 train loss:3.735076 +step:1893 train loss:3.662654 +step:1894 train loss:3.683403 +step:1895 train loss:3.716303 +step:1896 train loss:3.765882 +step:1897 train loss:3.657562 +step:1898 train loss:3.707696 +step:1899 train loss:3.723673 +step:1900 train loss:3.671981 +step:1901 train loss:3.760219 +step:1902 train loss:3.742406 +step:1903 train loss:3.686178 +step:1904 train loss:3.672313 +step:1905 train loss:3.669913 +step:1906 train loss:3.734921 +step:1907 train loss:3.674882 +step:1908 train loss:3.693246 +step:1909 train loss:3.788188 +step:1910 train loss:3.674316 +step:1911 train loss:3.683856 +step:1912 train loss:3.738864 +step:1913 train loss:3.673948 +step:1914 train loss:3.712428 +step:1915 train loss:3.671072 +step:1916 train loss:3.727664 +step:1917 train loss:3.707218 +step:1918 train loss:3.616504 +step:1919 train loss:3.773048 +step:1920 train loss:3.873195 +step:1921 train loss:3.660799 +step:1922 train loss:3.640813 +step:1923 train loss:3.731603 +step:1924 train loss:3.770213 +step:1925 train loss:3.714573 +step:1926 train loss:3.658496 +step:1927 train loss:3.737417 +step:1928 train loss:3.653468 +step:1929 train loss:3.679970 +step:1930 train loss:3.750039 +step:1931 train loss:3.661567 +step:1932 train loss:3.711729 +step:1933 train loss:3.710078 +step:1934 train loss:3.783944 +step:1935 train loss:3.733944 +step:1936 train loss:3.707109 +step:1937 train loss:3.639578 +step:1938 train loss:4.021595 +step:1939 train loss:3.752511 +step:1940 train loss:3.724028 +step:1941 train loss:3.736987 +step:1942 train loss:3.723227 +step:1943 train loss:3.722192 +step:1944 train loss:3.677178 +step:1945 train loss:3.681064 +step:1946 train loss:3.708949 +step:1947 train loss:3.726999 +step:1948 train loss:3.642996 +step:1949 train loss:3.748038 +step:1950 train loss:3.683737 +step:1951 train loss:3.713237 +step:1952 train loss:3.730786 +step:1953 train loss:3.662364 +step:1954 train loss:3.703789 +step:1955 train loss:3.654559 +step:1956 train loss:3.740754 +step:1957 train loss:3.760594 +step:1958 train loss:3.781629 +step:1959 train loss:3.649200 +step:1960 train loss:3.686204 +step:1961 train loss:3.725093 +step:1962 train loss:3.712748 +step:1963 train loss:3.690217 +step:1964 train loss:3.731144 +step:1965 train loss:3.760002 +step:1966 train loss:3.672662 +step:1967 train loss:3.726783 +step:1968 train loss:3.669936 +step:1969 train loss:3.686898 +step:1970 train loss:3.749393 +step:1971 train loss:3.648869 +step:1972 train loss:3.763104 +step:1973 train loss:3.658164 +step:1974 train loss:3.703414 +step:1975 train loss:3.658573 +step:1976 train loss:3.690089 +step:1977 train loss:3.730630 +step:1978 train loss:3.668309 +step:1979 train loss:3.650939 +step:1980 train loss:3.691686 +step:1981 train loss:3.674316 +step:1982 train loss:3.750468 +step:1983 train loss:3.696806 +step:1984 train loss:3.738492 +step:1985 train loss:3.723109 +step:1986 train loss:3.714571 +step:1987 train loss:3.670810 +step:1988 train loss:3.695543 +step:1989 train loss:3.845123 +step:1990 train loss:3.667487 +step:1991 train loss:3.666784 +step:1992 train loss:3.675656 +step:1993 train loss:3.702715 +step:1994 train loss:3.705863 +step:1995 train loss:3.648726 +step:1996 train loss:3.707120 +step:1997 train loss:3.712337 +step:1998 train loss:3.660160 +step:1999 train loss:3.782302 +step:2000 validation loss:3.639174 total_sharp:3.8428e-03 L1_sharp:9.3672e-03 L2_sharp:1.9498e-03 L3_sharp:1.5377e-03 L4_sharp:3.9618e-04 L5_sharp:4.8868e-04 L6_sharp:6.0623e-04 L7_sharp:8.1271e-04 L8_sharp:6.1593e-04 L9_sharp:5.7943e-04 L10_sharp:3.3228e-04 L11_sharp:2.7195e-04 L12_sharp:3.5854e-04 total_fnorm:2.4453e+00 total_l1_linf:2.0933e+04 total_spectral:2.4453e+00 L1_fnorm:5.6577e-01 L2_fnorm:5.4374e-01 L3_fnorm:5.6185e-01 L4_fnorm:5.8662e-01 L5_fnorm:5.9970e-01 L6_fnorm:6.0547e-01 L7_fnorm:6.0417e-01 L8_fnorm:6.0549e-01 L9_fnorm:6.0625e-01 L10_fnorm:6.0671e-01 L11_fnorm:6.0499e-01 L12_fnorm:6.0330e-01 L1_l1linf:4.1687e-01 L2_l1linf:3.9284e-01 L3_l1linf:4.0427e-01 L4_l1linf:4.1964e-01 L5_l1linf:4.2092e-01 L6_l1linf:4.2308e-01 L7_l1linf:4.1826e-01 L8_l1linf:4.1976e-01 L9_l1linf:4.1311e-01 L10_l1linf:4.0902e-01 L11_l1linf:4.0484e-01 L12_l1linf:3.9838e-01 L1_spectral:1.2058e-02 L2_spectral:1.2040e-02 L3_spectral:1.2051e-02 L4_spectral:1.2049e-02 L5_spectral:1.2051e-02 L6_spectral:1.2044e-02 L7_spectral:1.2042e-02 L8_spectral:1.2041e-02 L9_spectral:1.2045e-02 L10_spectral:1.2043e-02 L11_spectral:1.2047e-02 L12_spectral:1.2048e-02 ip_v_neg_g:1.0842e-02 cos_v_neg_g:3.3682e-03 v_norm:2.4453e+00 g_norm:1.3164e+00 hv_norm:7.3336e-01 cos_v_hv:1.2814e-02 hg_norm:5.5783e+01 cos_g_hg:5.9925e-01 v_par:9.6873e-04 v_perp:2.4453e+00 L1_cos_v_neg_g:5.3373e-03 L1_v_norm:5.6577e-01 L2_cos_v_neg_g:1.3487e-02 L2_v_norm:5.4374e-01 L3_cos_v_neg_g:1.0918e-02 L3_v_norm:5.6185e-01 L4_cos_v_neg_g:4.9949e-03 L4_v_norm:5.8662e-01 L5_cos_v_neg_g:4.0384e-03 L5_v_norm:5.9970e-01 L6_cos_v_neg_g:4.3866e-03 L6_v_norm:6.0547e-01 L7_cos_v_neg_g:4.6320e-03 L7_v_norm:6.0417e-01 L8_cos_v_neg_g:5.2170e-03 L8_v_norm:6.0549e-01 L9_cos_v_neg_g:5.2543e-03 L9_v_norm:6.0625e-01 L10_cos_v_neg_g:4.0833e-03 L10_v_norm:6.0671e-01 L11_cos_v_neg_g:2.6086e-03 L11_v_norm:6.0499e-01 L12_cos_v_neg_g:1.8061e-03 L12_v_norm:6.0330e-01 +step:2000 train loss:3.733729 +step:2001 train loss:3.669038 +step:2002 train loss:3.767635 +step:2003 train loss:3.812050 +step:2004 train loss:3.685381 +step:2005 train loss:3.782467 +step:2006 train loss:3.662798 +step:2007 train loss:3.740322 +step:2008 train loss:3.687862 +step:2009 train loss:3.686905 +step:2010 train loss:3.815770 +step:2011 train loss:3.669079 +step:2012 train loss:3.698355 +step:2013 train loss:3.706620 +step:2014 train loss:3.612768 +step:2015 train loss:3.723238 +step:2016 train loss:3.700876 +step:2017 train loss:3.706669 +step:2018 train loss:3.668710 +step:2019 train loss:3.701929 +step:2020 train loss:3.709824 +step:2021 train loss:3.666910 +step:2022 train loss:3.718382 +step:2023 train loss:3.693669 +step:2024 train loss:3.751019 +step:2025 train loss:3.682176 +step:2026 train loss:3.664559 +step:2027 train loss:3.697144 +step:2028 train loss:3.623242 +step:2029 train loss:3.660882 +step:2030 train loss:3.660550 +step:2031 train loss:3.622031 +step:2032 train loss:3.676343 +step:2033 train loss:3.671374 +step:2034 train loss:3.671860 +step:2035 train loss:3.710658 +step:2036 train loss:3.702923 +step:2037 train loss:3.689649 +step:2038 train loss:3.684017 +step:2039 train loss:3.680344 +step:2040 train loss:3.705862 +step:2041 train loss:3.705735 +step:2042 train loss:3.638414 +step:2043 train loss:3.792283 +step:2044 train loss:3.660727 +step:2045 train loss:3.676154 +step:2046 train loss:3.687036 +step:2047 train loss:3.667176 +step:2048 train loss:3.704789 +step:2049 train loss:3.662180 +step:2050 train loss:3.685679 +step:2051 train loss:3.649309 +step:2052 train loss:3.704909 +step:2053 train loss:3.696063 +step:2054 train loss:3.680200 +step:2055 train loss:3.668862 +step:2056 train loss:3.715400 +step:2057 train loss:3.721279 +step:2058 train loss:3.683981 +step:2059 train loss:3.767695 +step:2060 train loss:3.710218 +step:2061 train loss:3.670343 +step:2062 train loss:3.699640 +step:2063 train loss:3.597990 +step:2064 train loss:3.718425 +step:2065 train loss:3.725569 +step:2066 train loss:3.583009 +step:2067 train loss:3.634560 +step:2068 train loss:3.739324 +step:2069 train loss:3.676635 +step:2070 train loss:3.679459 +step:2071 train loss:3.715671 +step:2072 train loss:3.651174 +step:2073 train loss:3.703042 +step:2074 train loss:3.681666 +step:2075 train loss:3.766140 +step:2076 train loss:3.701807 +step:2077 train loss:3.721143 +step:2078 train loss:3.679145 +step:2079 train loss:3.826657 +step:2080 train loss:3.648504 +step:2081 train loss:3.752774 +step:2082 train loss:3.684951 +step:2083 train loss:3.674870 +step:2084 train loss:3.650426 +step:2085 train loss:3.697048 +step:2086 train loss:3.708993 +step:2087 train loss:3.752317 +step:2088 train loss:3.617601 +step:2089 train loss:3.642066 +step:2090 train loss:3.678607 +step:2091 train loss:3.699950 +step:2092 train loss:3.681602 +step:2093 train loss:3.670802 +step:2094 train loss:3.706358 +step:2095 train loss:3.651299 +step:2096 train loss:3.641148 +step:2097 train loss:3.676968 +step:2098 train loss:3.677991 +step:2099 train loss:3.667055 +step:2100 train loss:3.718446 +step:2101 train loss:3.715543 +step:2102 train loss:3.680710 +step:2103 train loss:3.697783 +step:2104 train loss:3.679938 +step:2105 train loss:3.683708 +step:2106 train loss:3.680736 +step:2107 train loss:3.743436 +step:2108 train loss:3.660959 +step:2109 train loss:3.625381 +step:2110 train loss:3.716873 +step:2111 train loss:3.665524 +step:2112 train loss:3.731456 +step:2113 train loss:3.661471 +step:2114 train loss:3.670002 +step:2115 train loss:3.723689 +step:2116 train loss:3.655634 +step:2117 train loss:3.679200 +step:2118 train loss:3.658003 +step:2119 train loss:3.595796 +step:2120 train loss:3.678689 +step:2121 train loss:3.674790 +step:2122 train loss:3.687526 +step:2123 train loss:3.736002 +step:2124 train loss:3.748191 +step:2125 train loss:3.646853 +step:2126 train loss:3.653270 +step:2127 train loss:3.642745 +step:2128 train loss:3.643757 +step:2129 train loss:3.669236 +step:2130 train loss:3.674287 +step:2131 train loss:3.693021 +step:2132 train loss:3.625464 +step:2133 train loss:3.735777 +step:2134 train loss:3.683959 +step:2135 train loss:3.640323 +step:2136 train loss:3.737890 +step:2137 train loss:3.697267 +step:2138 train loss:3.660964 +step:2139 train loss:3.657336 +step:2140 train loss:3.657025 +step:2141 train loss:3.714784 +step:2142 train loss:3.678846 +step:2143 train loss:3.602775 +step:2144 train loss:3.709586 +step:2145 train loss:3.681702 +step:2146 train loss:3.716685 +step:2147 train loss:3.824850 +step:2148 train loss:3.623019 +step:2149 train loss:3.636770 +step:2150 train loss:3.657218 +step:2151 train loss:3.693828 +step:2152 train loss:3.691767 +step:2153 train loss:3.728515 +step:2154 train loss:3.650388 +step:2155 train loss:3.729944 +step:2156 train loss:3.656903 +step:2157 train loss:3.727068 +step:2158 train loss:3.769728 +step:2159 train loss:3.697299 +step:2160 train loss:3.763412 +step:2161 train loss:3.665700 +step:2162 train loss:3.669583 +step:2163 train loss:3.644699 +step:2164 train loss:3.665630 +step:2165 train loss:3.648016 +step:2166 train loss:3.761302 +step:2167 train loss:3.672525 +step:2168 train loss:3.681098 +step:2169 train loss:3.635746 +step:2170 train loss:3.786482 +step:2171 train loss:3.739736 +step:2172 train loss:3.671944 +step:2173 train loss:3.670937 +step:2174 train loss:3.731788 +step:2175 train loss:3.658689 +step:2176 train loss:3.746876 +step:2177 train loss:3.711443 +step:2178 train loss:3.637928 +step:2179 train loss:3.707972 +step:2180 train loss:3.729254 +step:2181 train loss:3.651608 +step:2182 train loss:3.699496 +step:2183 train loss:3.695414 +step:2184 train loss:3.646331 +step:2185 train loss:3.628850 +step:2186 train loss:3.667524 +step:2187 train loss:3.683148 +step:2188 train loss:3.726532 +step:2189 train loss:3.618359 +step:2190 train loss:3.664149 +step:2191 train loss:3.716405 +step:2192 train loss:3.648645 +step:2193 train loss:3.619956 +step:2194 train loss:3.618597 +step:2195 train loss:3.650527 +step:2196 train loss:3.654588 +step:2197 train loss:3.639097 +step:2198 train loss:3.666312 +step:2199 train loss:3.727585 +step:2200 train loss:3.663113 +step:2201 train loss:3.672306 +step:2202 train loss:3.628022 +step:2203 train loss:3.648907 +step:2204 train loss:3.682302 +step:2205 train loss:3.665332 +step:2206 train loss:3.665231 +step:2207 train loss:3.664341 +step:2208 train loss:3.639239 +step:2209 train loss:3.926198 +step:2210 train loss:3.692002 +step:2211 train loss:3.682225 +step:2212 train loss:3.663544 +step:2213 train loss:3.735898 +step:2214 train loss:3.731115 +step:2215 train loss:3.657363 +step:2216 train loss:3.621271 +step:2217 train loss:3.654848 +step:2218 train loss:3.653938 +step:2219 train loss:3.688561 +step:2220 train loss:3.629163 +step:2221 train loss:3.662866 +step:2222 train loss:3.679563 +step:2223 train loss:3.722219 +step:2224 train loss:3.686797 +step:2225 train loss:3.633710 +step:2226 train loss:3.700299 +step:2227 train loss:3.699626 +step:2228 train loss:3.698983 +step:2229 train loss:3.638278 +step:2230 train loss:3.763896 +step:2231 train loss:3.679464 +step:2232 train loss:3.676539 +step:2233 train loss:3.722181 +step:2234 train loss:3.616055 +step:2235 train loss:3.705109 +step:2236 train loss:3.645716 +step:2237 train loss:3.785007 +step:2238 train loss:3.578237 +step:2239 train loss:3.663177 +step:2240 train loss:3.674224 +step:2241 train loss:3.588290 +step:2242 train loss:3.739321 +step:2243 train loss:3.767289 +step:2244 train loss:3.643974 +step:2245 train loss:3.652758 +step:2246 train loss:3.612225 +step:2247 train loss:3.620192 +step:2248 train loss:3.678254 +step:2249 train loss:3.652838 +step:2250 validation loss:3.607107 +step:2250 train loss:3.673868 +step:2251 train loss:3.631989 +step:2252 train loss:3.632245 +step:2253 train loss:3.661308 +step:2254 train loss:3.670849 +step:2255 train loss:3.625082 +step:2256 train loss:3.677198 +step:2257 train loss:3.660229 +step:2258 train loss:3.656348 +step:2259 train loss:3.676494 +step:2260 train loss:3.620136 +step:2261 train loss:3.702256 +step:2262 train loss:3.726484 +step:2263 train loss:3.676633 +step:2264 train loss:3.794249 +step:2265 train loss:3.639370 +step:2266 train loss:3.681358 +step:2267 train loss:3.641738 +step:2268 train loss:3.650092 +step:2269 train loss:3.647264 +step:2270 train loss:3.643931 +step:2271 train loss:3.656842 +step:2272 train loss:3.693164 +step:2273 train loss:3.611539 +step:2274 train loss:3.644795 +step:2275 train loss:3.595454 +step:2276 train loss:3.673782 +step:2277 train loss:3.687148 +step:2278 train loss:3.665345 +step:2279 train loss:3.653451 +step:2280 train loss:3.556186 +step:2281 train loss:3.706171 +step:2282 train loss:3.633172 +step:2283 train loss:3.616981 +step:2284 train loss:3.630477 +step:2285 train loss:3.692531 +step:2286 train loss:3.651628 +step:2287 train loss:3.687350 +step:2288 train loss:3.659186 +step:2289 train loss:3.657175 +step:2290 train loss:3.664068 +step:2291 train loss:3.648888 +step:2292 train loss:3.698178 +step:2293 train loss:3.668968 +step:2294 train loss:3.668976 +step:2295 train loss:3.722347 +step:2296 train loss:3.656347 +step:2297 train loss:3.630382 +step:2298 train loss:3.690922 +step:2299 train loss:3.662444 +step:2300 train loss:3.579969 +step:2301 train loss:3.676645 +step:2302 train loss:3.689036 +step:2303 train loss:3.663535 +step:2304 train loss:3.649174 +step:2305 train loss:3.693792 +step:2306 train loss:3.681428 +step:2307 train loss:3.663050 +step:2308 train loss:3.680110 +step:2309 train loss:3.637209 +step:2310 train loss:3.624510 +step:2311 train loss:3.610159 +step:2312 train loss:3.678686 +step:2313 train loss:3.591980 +step:2314 train loss:3.665503 +step:2315 train loss:3.679802 +step:2316 train loss:3.726337 +step:2317 train loss:3.585204 +step:2318 train loss:3.636193 +step:2319 train loss:3.687730 +step:2320 train loss:3.651999 +step:2321 train loss:3.625072 +step:2322 train loss:3.643789 +step:2323 train loss:3.637295 +step:2324 train loss:3.670746 +step:2325 train loss:3.603163 +step:2326 train loss:3.641361 +step:2327 train loss:3.751835 +step:2328 train loss:3.695578 +step:2329 train loss:3.653560 +step:2330 train loss:3.614663 +step:2331 train loss:3.654482 +step:2332 train loss:3.580568 +step:2333 train loss:3.639862 +step:2334 train loss:3.618335 +step:2335 train loss:3.607592 +step:2336 train loss:3.858354 +step:2337 train loss:3.630411 +step:2338 train loss:3.675261 +step:2339 train loss:3.668716 +step:2340 train loss:3.690225 +step:2341 train loss:3.677253 +step:2342 train loss:3.628407 +step:2343 train loss:3.653471 +step:2344 train loss:3.697558 +step:2345 train loss:3.646100 +step:2346 train loss:3.675761 +step:2347 train loss:3.605936 +step:2348 train loss:3.659509 +step:2349 train loss:3.609380 +step:2350 train loss:3.668620 +step:2351 train loss:3.670624 +step:2352 train loss:3.676329 +step:2353 train loss:3.640705 +step:2354 train loss:3.683651 +step:2355 train loss:3.675464 +step:2356 train loss:3.710453 +step:2357 train loss:3.615092 +step:2358 train loss:3.630739 +step:2359 train loss:3.654230 +step:2360 train loss:3.681150 +step:2361 train loss:3.706722 +step:2362 train loss:3.547128 +step:2363 train loss:3.737946 +step:2364 train loss:3.684682 +step:2365 train loss:3.651665 +step:2366 train loss:3.602738 +step:2367 train loss:3.676994 +step:2368 train loss:3.660261 +step:2369 train loss:3.658759 +step:2370 train loss:3.664629 +step:2371 train loss:3.722223 +step:2372 train loss:3.576254 +step:2373 train loss:3.715115 +step:2374 train loss:3.697947 +step:2375 train loss:3.681680 +step:2376 train loss:3.674017 +step:2377 train loss:3.615396 +step:2378 train loss:3.664551 +step:2379 train loss:3.651311 +step:2380 train loss:3.712151 +step:2381 train loss:3.812720 +step:2382 train loss:3.594338 +step:2383 train loss:3.638967 +step:2384 train loss:3.672889 +step:2385 train loss:3.574077 +step:2386 train loss:3.729774 +step:2387 train loss:3.606996 +step:2388 train loss:3.660179 +step:2389 train loss:3.682557 +step:2390 train loss:3.633121 +step:2391 train loss:3.660112 +step:2392 train loss:3.684521 +step:2393 train loss:3.635775 +step:2394 train loss:3.663032 +step:2395 train loss:3.654749 +step:2396 train loss:3.657147 +step:2397 train loss:3.629755 +step:2398 train loss:3.693825 +step:2399 train loss:3.654052 +step:2400 train loss:3.628191 +step:2401 train loss:3.672508 +step:2402 train loss:3.623722 +step:2403 train loss:3.670198 +step:2404 train loss:3.631215 +step:2405 train loss:3.633518 +step:2406 train loss:3.657689 +step:2407 train loss:3.603682 +step:2408 train loss:3.644852 +step:2409 train loss:3.638857 +step:2410 train loss:3.632629 +step:2411 train loss:3.711277 +step:2412 train loss:3.689494 +step:2413 train loss:3.742552 +step:2414 train loss:3.624448 +step:2415 train loss:3.615146 +step:2416 train loss:3.633819 +step:2417 train loss:3.671047 +step:2418 train loss:3.686825 +step:2419 train loss:3.620811 +step:2420 train loss:3.636745 +step:2421 train loss:3.664920 +step:2422 train loss:3.715975 +step:2423 train loss:3.654304 +step:2424 train loss:3.615285 +step:2425 train loss:3.679961 +step:2426 train loss:3.620573 +step:2427 train loss:3.638732 +step:2428 train loss:3.727203 +step:2429 train loss:3.675027 +step:2430 train loss:3.767109 +step:2431 train loss:3.678298 +step:2432 train loss:3.652423 +step:2433 train loss:3.623895 +step:2434 train loss:3.611636 +step:2435 train loss:3.672905 +step:2436 train loss:3.627038 +step:2437 train loss:3.658239 +step:2438 train loss:3.704671 +step:2439 train loss:3.687833 +step:2440 train loss:3.629615 +step:2441 train loss:3.667555 +step:2442 train loss:3.659897 +step:2443 train loss:3.613943 +step:2444 train loss:3.655271 +step:2445 train loss:3.654608 +step:2446 train loss:3.622863 +step:2447 train loss:3.606294 +step:2448 train loss:3.660668 +step:2449 train loss:3.684236 +step:2450 train loss:3.644870 +step:2451 train loss:3.575875 +step:2452 train loss:3.667041 +step:2453 train loss:3.638551 +step:2454 train loss:3.636684 +step:2455 train loss:3.686924 +step:2456 train loss:3.638931 +step:2457 train loss:3.704702 +step:2458 train loss:3.678396 +step:2459 train loss:3.651656 +step:2460 train loss:3.665090 +step:2461 train loss:3.687668 +step:2462 train loss:3.659626 +step:2463 train loss:3.634793 +step:2464 train loss:3.647382 +step:2465 train loss:3.733849 +step:2466 train loss:3.817984 +step:2467 train loss:3.721806 +step:2468 train loss:3.608602 +step:2469 train loss:3.687047 +step:2470 train loss:3.727620 +step:2471 train loss:3.726488 +step:2472 train loss:3.705091 +step:2473 train loss:3.646364 +step:2474 train loss:3.609929 +step:2475 train loss:3.662222 +step:2476 train loss:3.737063 +step:2477 train loss:3.656379 +step:2478 train loss:3.610597 +step:2479 train loss:3.648924 +step:2480 train loss:3.645024 +step:2481 train loss:3.842994 +step:2482 train loss:3.646155 +step:2483 train loss:3.671077 +step:2484 train loss:3.626628 +step:2485 train loss:3.618667 +step:2486 train loss:3.649612 +step:2487 train loss:3.685447 +step:2488 train loss:3.596796 +step:2489 train loss:3.706043 +step:2490 train loss:3.628794 +step:2491 train loss:3.644125 +step:2492 train loss:3.683964 +step:2493 train loss:3.721731 +step:2494 train loss:3.640658 +step:2495 train loss:3.675194 +step:2496 train loss:3.648129 +step:2497 train loss:3.669049 +step:2498 train loss:3.675312 +step:2499 train loss:3.667475 +step:2500 validation loss:3.583135 total_sharp:4.4309e-03 L1_sharp:1.1656e-02 L2_sharp:1.9293e-03 L3_sharp:1.0118e-03 L4_sharp:3.2701e-04 L5_sharp:4.7653e-04 L6_sharp:6.1542e-04 L7_sharp:7.5754e-04 L8_sharp:6.9467e-04 L9_sharp:5.9026e-04 L10_sharp:3.5683e-04 L11_sharp:3.0520e-04 L12_sharp:3.5121e-04 total_fnorm:2.4350e+00 total_l1_linf:2.0820e+04 total_spectral:2.4350e+00 L1_fnorm:5.6790e-01 L2_fnorm:5.3690e-01 L3_fnorm:5.5794e-01 L4_fnorm:5.8657e-01 L5_fnorm:5.9716e-01 L6_fnorm:6.0346e-01 L7_fnorm:6.0222e-01 L8_fnorm:6.0398e-01 L9_fnorm:6.0485e-01 L10_fnorm:6.0597e-01 L11_fnorm:6.0344e-01 L12_fnorm:6.0299e-01 L1_l1linf:4.1968e-01 L2_l1linf:3.9922e-01 L3_l1linf:4.0476e-01 L4_l1linf:4.1902e-01 L5_l1linf:4.2044e-01 L6_l1linf:4.2029e-01 L7_l1linf:4.1943e-01 L8_l1linf:4.1674e-01 L9_l1linf:4.1001e-01 L10_l1linf:4.1117e-01 L11_l1linf:4.0331e-01 L12_l1linf:3.9719e-01 L1_spectral:1.2065e-02 L2_spectral:1.2039e-02 L3_spectral:1.2049e-02 L4_spectral:1.2045e-02 L5_spectral:1.2061e-02 L6_spectral:1.2049e-02 L7_spectral:1.2045e-02 L8_spectral:1.2044e-02 L9_spectral:1.2047e-02 L10_spectral:1.2043e-02 L11_spectral:1.2044e-02 L12_spectral:1.2046e-02 ip_v_neg_g:1.0790e-02 cos_v_neg_g:3.6285e-03 v_norm:2.4350e+00 g_norm:1.2212e+00 hv_norm:8.1649e-01 cos_v_hv:1.3214e-02 hg_norm:4.0687e+01 cos_g_hg:6.1585e-01 v_par:1.1753e-03 v_perp:2.4350e+00 L1_cos_v_neg_g:5.5950e-03 L1_v_norm:5.6790e-01 L2_cos_v_neg_g:7.7883e-03 L2_v_norm:5.3690e-01 L3_cos_v_neg_g:6.6370e-03 L3_v_norm:5.5794e-01 L4_cos_v_neg_g:5.0076e-03 L4_v_norm:5.8657e-01 L5_cos_v_neg_g:5.8855e-03 L5_v_norm:5.9716e-01 L6_cos_v_neg_g:6.6945e-03 L6_v_norm:6.0346e-01 L7_cos_v_neg_g:7.1546e-03 L7_v_norm:6.0222e-01 L8_cos_v_neg_g:6.4123e-03 L8_v_norm:6.0398e-01 L9_cos_v_neg_g:5.5609e-03 L9_v_norm:6.0485e-01 L10_cos_v_neg_g:4.1932e-03 L10_v_norm:6.0597e-01 L11_cos_v_neg_g:3.9616e-03 L11_v_norm:6.0344e-01 L12_cos_v_neg_g:3.3898e-03 L12_v_norm:6.0299e-01 +step:2500 train loss:3.613537 +step:2501 train loss:3.684621 +step:2502 train loss:3.666740 +step:2503 train loss:3.591390 +step:2504 train loss:3.630652 +step:2505 train loss:3.648199 +step:2506 train loss:3.613016 +step:2507 train loss:3.643837 +step:2508 train loss:3.592399 +step:2509 train loss:3.611410 +step:2510 train loss:3.606628 +step:2511 train loss:3.650635 +step:2512 train loss:3.697764 +step:2513 train loss:3.647885 +step:2514 train loss:3.631445 +step:2515 train loss:3.776384 +step:2516 train loss:3.645256 +step:2517 train loss:3.713087 +step:2518 train loss:3.675738 +step:2519 train loss:3.656310 +step:2520 train loss:3.660780 +step:2521 train loss:3.637182 +step:2522 train loss:3.671270 +step:2523 train loss:3.589833 +step:2524 train loss:3.649796 +step:2525 train loss:3.640876 +step:2526 train loss:3.691795 +step:2527 train loss:3.688307 +step:2528 train loss:3.668551 +step:2529 train loss:3.683272 +step:2530 train loss:3.662738 +step:2531 train loss:3.601043 +step:2532 train loss:3.702933 +step:2533 train loss:3.602467 +step:2534 train loss:3.690824 +step:2535 train loss:3.641530 +step:2536 train loss:3.567758 +step:2537 train loss:3.682063 +step:2538 train loss:3.657577 +step:2539 train loss:3.681095 +step:2540 train loss:3.614212 +step:2541 train loss:3.648521 +step:2542 train loss:3.657168 +step:2543 train loss:3.642638 +step:2544 train loss:3.632138 +step:2545 train loss:3.617610 +step:2546 train loss:3.583124 +step:2547 train loss:3.634528 +step:2548 train loss:3.657479 +step:2549 train loss:3.661982 +step:2550 train loss:3.792569 +step:2551 train loss:3.865207 +step:2552 train loss:3.599849 +step:2553 train loss:3.630558 +step:2554 train loss:3.775280 +step:2555 train loss:3.659583 +step:2556 train loss:3.582390 +step:2557 train loss:3.682291 +step:2558 train loss:3.672750 +step:2559 train loss:3.627423 +step:2560 train loss:3.619819 +step:2561 train loss:3.705381 +step:2562 train loss:3.664901 +step:2563 train loss:3.598421 +step:2564 train loss:3.665839 +step:2565 train loss:3.645942 +step:2566 train loss:3.627313 +step:2567 train loss:3.607602 +step:2568 train loss:3.657480 +step:2569 train loss:3.671143 +step:2570 train loss:3.619168 +step:2571 train loss:3.705440 +step:2572 train loss:3.663003 +step:2573 train loss:3.594844 +step:2574 train loss:3.650039 +step:2575 train loss:3.692657 +step:2576 train loss:3.641489 +step:2577 train loss:3.605451 +step:2578 train loss:3.646232 +step:2579 train loss:3.622058 +step:2580 train loss:3.596038 +step:2581 train loss:3.607531 +step:2582 train loss:3.614371 +step:2583 train loss:3.642431 +step:2584 train loss:3.658581 +step:2585 train loss:3.621178 +step:2586 train loss:3.642460 +step:2587 train loss:3.575733 +step:2588 train loss:3.610453 +step:2589 train loss:3.685340 +step:2590 train loss:3.607907 +step:2591 train loss:3.672711 +step:2592 train loss:3.717368 +step:2593 train loss:3.672559 +step:2594 train loss:3.633676 +step:2595 train loss:3.639818 +step:2596 train loss:3.677999 +step:2597 train loss:3.563714 +step:2598 train loss:3.723472 +step:2599 train loss:3.667962 +step:2600 train loss:3.701668 +step:2601 train loss:3.640492 +step:2602 train loss:3.665198 +step:2603 train loss:3.661807 +step:2604 train loss:3.585799 +step:2605 train loss:3.716322 +step:2606 train loss:3.659564 +step:2607 train loss:3.616042 +step:2608 train loss:3.594379 +step:2609 train loss:3.617739 +step:2610 train loss:3.641868 +step:2611 train loss:3.680350 +step:2612 train loss:3.647523 +step:2613 train loss:3.616273 +step:2614 train loss:3.607056 +step:2615 train loss:3.599113 +step:2616 train loss:3.679591 +step:2617 train loss:3.639974 +step:2618 train loss:3.606144 +step:2619 train loss:3.621746 +step:2620 train loss:3.615019 +step:2621 train loss:3.627877 +step:2622 train loss:3.701197 +step:2623 train loss:3.574389 +step:2624 train loss:3.594883 +step:2625 train loss:3.659027 +step:2626 train loss:3.655789 +step:2627 train loss:3.633327 +step:2628 train loss:3.691238 +step:2629 train loss:3.632407 +step:2630 train loss:3.629992 +step:2631 train loss:3.658035 +step:2632 train loss:3.623613 +step:2633 train loss:3.611273 +step:2634 train loss:3.656346 +step:2635 train loss:3.644161 +step:2636 train loss:3.689682 +step:2637 train loss:3.645557 +step:2638 train loss:3.628005 +step:2639 train loss:3.677488 +step:2640 train loss:3.595094 +step:2641 train loss:3.653328 +step:2642 train loss:3.578302 +step:2643 train loss:3.573519 +step:2644 train loss:3.664552 +step:2645 train loss:3.610260 +step:2646 train loss:3.636998 +step:2647 train loss:3.660756 +step:2648 train loss:3.691328 +step:2649 train loss:3.606806 +step:2650 train loss:3.593250 +step:2651 train loss:3.634421 +step:2652 train loss:3.609274 +step:2653 train loss:3.675731 +step:2654 train loss:3.632833 +step:2655 train loss:3.620214 +step:2656 train loss:3.641976 +step:2657 train loss:3.668231 +step:2658 train loss:3.677111 +step:2659 train loss:3.653366 +step:2660 train loss:3.640278 +step:2661 train loss:3.691375 +step:2662 train loss:3.661618 +step:2663 train loss:3.641280 +step:2664 train loss:3.647339 +step:2665 train loss:3.600689 +step:2666 train loss:3.629998 +step:2667 train loss:3.637328 +step:2668 train loss:3.616156 +step:2669 train loss:3.620407 +step:2670 train loss:3.646828 +step:2671 train loss:3.621004 +step:2672 train loss:3.642718 +step:2673 train loss:3.573916 +step:2674 train loss:3.673000 +step:2675 train loss:3.640706 +step:2676 train loss:3.659012 +step:2677 train loss:3.640857 +step:2678 train loss:3.630505 +step:2679 train loss:3.610577 +step:2680 train loss:3.594093 +step:2681 train loss:3.567013 +step:2682 train loss:3.655304 +step:2683 train loss:3.628235 +step:2684 train loss:3.658921 +step:2685 train loss:3.574924 +step:2686 train loss:3.588663 +step:2687 train loss:3.666898 +step:2688 train loss:3.679959 +step:2689 train loss:3.582703 +step:2690 train loss:3.673554 +step:2691 train loss:3.641959 +step:2692 train loss:3.664632 +step:2693 train loss:3.718346 +step:2694 train loss:3.619798 +step:2695 train loss:3.634717 +step:2696 train loss:3.641265 +step:2697 train loss:3.634053 +step:2698 train loss:3.646855 +step:2699 train loss:3.662731 +step:2700 train loss:3.629175 +step:2701 train loss:3.701236 +step:2702 train loss:3.635239 +step:2703 train loss:3.599068 +step:2704 train loss:3.681000 +step:2705 train loss:3.655564 +step:2706 train loss:3.591556 +step:2707 train loss:3.557683 +step:2708 train loss:3.656690 +step:2709 train loss:3.632516 +step:2710 train loss:3.636381 +step:2711 train loss:3.607832 +step:2712 train loss:3.673059 +step:2713 train loss:3.667385 +step:2714 train loss:3.611456 +step:2715 train loss:3.609945 +step:2716 train loss:3.681347 +step:2717 train loss:3.641081 +step:2718 train loss:3.640625 +step:2719 train loss:3.639076 +step:2720 train loss:3.607686 +step:2721 train loss:3.682707 +step:2722 train loss:3.611032 +step:2723 train loss:3.603564 +step:2724 train loss:3.622007 +step:2725 train loss:3.623961 +step:2726 train loss:3.599258 +step:2727 train loss:3.658182 +step:2728 train loss:3.595408 +step:2729 train loss:3.723718 +step:2730 train loss:3.672299 +step:2731 train loss:3.708323 +step:2732 train loss:3.614006 +step:2733 train loss:3.610412 +step:2734 train loss:3.663980 +step:2735 train loss:3.660504 +step:2736 train loss:3.585655 +step:2737 train loss:3.640521 +step:2738 train loss:3.695532 +step:2739 train loss:3.617024 +step:2740 train loss:3.613974 +step:2741 train loss:3.603157 +step:2742 train loss:3.531676 +step:2743 train loss:3.632392 +step:2744 train loss:3.658478 +step:2745 train loss:3.611347 +step:2746 train loss:3.624092 +step:2747 train loss:3.607834 +step:2748 train loss:3.574803 +step:2749 train loss:3.642734 +step:2750 validation loss:3.559371 +step:2750 train loss:3.646066 +step:2751 train loss:3.668668 +step:2752 train loss:3.656462 +step:2753 train loss:3.644363 +step:2754 train loss:3.589870 +step:2755 train loss:3.652151 +step:2756 train loss:3.630396 +step:2757 train loss:3.617029 +step:2758 train loss:3.646488 +step:2759 train loss:3.654445 +step:2760 train loss:3.564836 +step:2761 train loss:3.580761 +step:2762 train loss:3.598181 +step:2763 train loss:3.613921 +step:2764 train loss:3.561875 +step:2765 train loss:3.611759 +step:2766 train loss:3.700044 +step:2767 train loss:3.570916 +step:2768 train loss:3.635523 +step:2769 train loss:3.606949 +step:2770 train loss:3.625887 +step:2771 train loss:3.649134 +step:2772 train loss:3.616235 +step:2773 train loss:3.619272 +step:2774 train loss:3.612533 +step:2775 train loss:3.624898 +step:2776 train loss:3.578421 +step:2777 train loss:3.612299 +step:2778 train loss:3.617712 +step:2779 train loss:3.651348 +step:2780 train loss:3.620269 +step:2781 train loss:3.605498 +step:2782 train loss:3.593201 +step:2783 train loss:3.622185 +step:2784 train loss:3.631692 +step:2785 train loss:3.705678 +step:2786 train loss:3.669789 +step:2787 train loss:3.625874 +step:2788 train loss:3.624585 +step:2789 train loss:3.621881 +step:2790 train loss:3.559116 +step:2791 train loss:3.658494 +step:2792 train loss:3.647516 +step:2793 train loss:3.610858 +step:2794 train loss:3.622712 +step:2795 train loss:3.636952 +step:2796 train loss:3.626335 +step:2797 train loss:3.678873 +step:2798 train loss:3.660905 +step:2799 train loss:3.570932 +step:2800 train loss:3.613958 +step:2801 train loss:3.648287 +step:2802 train loss:3.679361 +step:2803 train loss:3.652173 +step:2804 train loss:3.585157 +step:2805 train loss:3.627515 +step:2806 train loss:3.614774 +step:2807 train loss:3.646371 +step:2808 train loss:3.587812 +step:2809 train loss:3.662309 +step:2810 train loss:3.648871 +step:2811 train loss:3.636868 +step:2812 train loss:3.690290 +step:2813 train loss:3.652563 +step:2814 train loss:3.640641 +step:2815 train loss:3.654659 +step:2816 train loss:3.660720 +step:2817 train loss:3.592199 +step:2818 train loss:3.701989 +step:2819 train loss:3.623891 +step:2820 train loss:3.620497 +step:2821 train loss:3.596667 +step:2822 train loss:3.642045 +step:2823 train loss:3.591749 +step:2824 train loss:3.491897 +step:2825 train loss:3.632850 +step:2826 train loss:3.630953 +step:2827 train loss:3.658253 +step:2828 train loss:3.647025 +step:2829 train loss:3.635508 +step:2830 train loss:3.665956 +step:2831 train loss:3.607481 +step:2832 train loss:3.579043 +step:2833 train loss:3.636319 +step:2834 train loss:3.587175 +step:2835 train loss:3.620558 +step:2836 train loss:3.627818 +step:2837 train loss:3.627716 +step:2838 train loss:3.571517 +step:2839 train loss:3.670210 +step:2840 train loss:3.630139 +step:2841 train loss:3.707435 +step:2842 train loss:3.652174 +step:2843 train loss:3.645354 +step:2844 train loss:3.672406 +step:2845 train loss:3.624380 +step:2846 train loss:3.577151 +step:2847 train loss:3.671124 +step:2848 train loss:3.627016 +step:2849 train loss:3.614341 +step:2850 train loss:3.668595 +step:2851 train loss:3.624135 +step:2852 train loss:3.707342 +step:2853 train loss:3.620079 +step:2854 train loss:3.575090 +step:2855 train loss:3.642551 +step:2856 train loss:3.559633 +step:2857 train loss:3.671276 +step:2858 train loss:3.626770 +step:2859 train loss:3.614051 +step:2860 train loss:3.602309 +step:2861 train loss:3.583508 +step:2862 train loss:3.618458 +step:2863 train loss:3.601161 +step:2864 train loss:3.606966 +step:2865 train loss:3.683904 +step:2866 train loss:3.691167 +step:2867 train loss:3.635727 +step:2868 train loss:3.634352 +step:2869 train loss:3.591958 +step:2870 train loss:3.679266 +step:2871 train loss:3.679014 +step:2872 train loss:3.645311 +step:2873 train loss:3.646078 +step:2874 train loss:3.625185 +step:2875 train loss:3.581237 +step:2876 train loss:3.623959 +step:2877 train loss:3.603815 +step:2878 train loss:3.620325 +step:2879 train loss:3.588045 +step:2880 train loss:3.605036 +step:2881 train loss:3.602954 +step:2882 train loss:3.535540 +step:2883 train loss:3.618240 +step:2884 train loss:3.694975 +step:2885 train loss:3.584774 +step:2886 train loss:3.636783 +step:2887 train loss:3.658554 +step:2888 train loss:3.630084 +step:2889 train loss:3.614188 +step:2890 train loss:3.588214 +step:2891 train loss:3.631639 +step:2892 train loss:3.631767 +step:2893 train loss:3.612486 +step:2894 train loss:3.587796 +step:2895 train loss:3.640006 +step:2896 train loss:3.682348 +step:2897 train loss:3.659886 +step:2898 train loss:3.802232 +step:2899 train loss:3.553820 +step:2900 train loss:3.622674 +step:2901 train loss:3.576362 +step:2902 train loss:3.578367 +step:2903 train loss:3.596139 +step:2904 train loss:3.619779 +step:2905 train loss:3.678847 +step:2906 train loss:3.652855 +step:2907 train loss:3.827419 +step:2908 train loss:3.571551 +step:2909 train loss:3.649940 +step:2910 train loss:3.621388 +step:2911 train loss:3.652115 +step:2912 train loss:3.606220 +step:2913 train loss:3.641838 +step:2914 train loss:3.670364 +step:2915 train loss:3.662822 +step:2916 train loss:3.619510 +step:2917 train loss:3.660877 +step:2918 train loss:3.647977 +step:2919 train loss:3.597466 +step:2920 train loss:3.645612 +step:2921 train loss:3.601792 +step:2922 train loss:3.626488 +step:2923 train loss:3.691339 +step:2924 train loss:3.627985 +step:2925 train loss:3.581393 +step:2926 train loss:3.671628 +step:2927 train loss:3.578043 +step:2928 train loss:3.549711 +step:2929 train loss:3.565478 +step:2930 train loss:3.585297 +step:2931 train loss:3.742239 +step:2932 train loss:3.659490 +step:2933 train loss:3.622396 +step:2934 train loss:3.617790 +step:2935 train loss:3.638686 +step:2936 train loss:3.586672 +step:2937 train loss:3.604925 +step:2938 train loss:3.625570 +step:2939 train loss:3.700037 +step:2940 train loss:3.600949 +step:2941 train loss:3.636384 +step:2942 train loss:3.595163 +step:2943 train loss:3.877161 +step:2944 train loss:3.701555 +step:2945 train loss:3.657069 +step:2946 train loss:3.670486 +step:2947 train loss:3.627137 +step:2948 train loss:3.583476 +step:2949 train loss:3.672830 +step:2950 train loss:3.630931 +step:2951 train loss:3.529186 +step:2952 train loss:3.597859 +step:2953 train loss:3.507051 +step:2954 train loss:3.603255 +step:2955 train loss:3.667676 +step:2956 train loss:3.615423 +step:2957 train loss:3.622455 +step:2958 train loss:3.574930 +step:2959 train loss:3.596477 +step:2960 train loss:3.688759 +step:2961 train loss:3.549592 +step:2962 train loss:3.628987 +step:2963 train loss:3.625489 +step:2964 train loss:3.603786 +step:2965 train loss:3.633596 +step:2966 train loss:3.607105 +step:2967 train loss:3.605702 +step:2968 train loss:3.579447 +step:2969 train loss:3.588956 +step:2970 train loss:3.655337 +step:2971 train loss:3.584432 +step:2972 train loss:3.571967 +step:2973 train loss:3.564601 +step:2974 train loss:3.603366 +step:2975 train loss:3.570154 +step:2976 train loss:3.611751 +step:2977 train loss:3.599863 +step:2978 train loss:3.683469 +step:2979 train loss:3.665413 +step:2980 train loss:3.681425 +step:2981 train loss:3.622651 +step:2982 train loss:3.617878 +step:2983 train loss:3.564760 +step:2984 train loss:3.545571 +step:2985 train loss:3.656800 +step:2986 train loss:3.551517 +step:2987 train loss:3.683314 +step:2988 train loss:3.608119 +step:2989 train loss:3.635684 +step:2990 train loss:3.586914 +step:2991 train loss:3.655992 +step:2992 train loss:3.651092 +step:2993 train loss:3.615386 +step:2994 train loss:3.606378 +step:2995 train loss:3.676400 +step:2996 train loss:3.601412 +step:2997 train loss:3.506122 +step:2998 train loss:3.625344 +step:2999 train loss:3.671508 +step:3000 validation loss:3.541645 total_sharp:2.6820e-03 L1_sharp:5.6619e-03 L2_sharp:7.2423e-04 L3_sharp:5.9750e-04 L4_sharp:2.7006e-04 L5_sharp:4.6686e-04 L6_sharp:5.3539e-04 L7_sharp:4.8348e-04 L8_sharp:4.3658e-04 L9_sharp:4.8894e-04 L10_sharp:3.2883e-04 L11_sharp:2.9738e-04 L12_sharp:3.3731e-04 total_fnorm:2.4314e+00 total_l1_linf:2.0791e+04 total_spectral:2.4314e+00 L1_fnorm:5.6778e-01 L2_fnorm:5.3402e-01 L3_fnorm:5.5943e-01 L4_fnorm:5.8647e-01 L5_fnorm:5.9604e-01 L6_fnorm:6.0170e-01 L7_fnorm:6.0102e-01 L8_fnorm:6.0238e-01 L9_fnorm:6.0343e-01 L10_fnorm:6.0366e-01 L11_fnorm:6.0104e-01 L12_fnorm:6.0155e-01 L1_l1linf:4.1556e-01 L2_l1linf:4.5337e-01 L3_l1linf:4.5159e-01 L4_l1linf:4.4133e-01 L5_l1linf:4.2063e-01 L6_l1linf:4.1905e-01 L7_l1linf:4.1489e-01 L8_l1linf:4.1467e-01 L9_l1linf:4.1032e-01 L10_l1linf:4.0878e-01 L11_l1linf:4.2054e-01 L12_l1linf:4.3504e-01 L1_spectral:1.2055e-02 L2_spectral:1.2038e-02 L3_spectral:1.2048e-02 L4_spectral:1.2048e-02 L5_spectral:1.2068e-02 L6_spectral:1.2047e-02 L7_spectral:1.2044e-02 L8_spectral:1.2043e-02 L9_spectral:1.2046e-02 L10_spectral:1.2045e-02 L11_spectral:1.2045e-02 L12_spectral:1.2048e-02 ip_v_neg_g:9.0378e-03 cos_v_neg_g:3.2111e-03 v_norm:2.4314e+00 g_norm:1.1576e+00 hv_norm:4.3322e-01 cos_v_hv:1.5052e-02 hg_norm:2.7410e+01 cos_g_hg:6.4483e-01 v_par:1.1227e-03 v_perp:2.4314e+00 L1_cos_v_neg_g:5.1391e-03 L1_v_norm:5.6778e-01 L2_cos_v_neg_g:6.8060e-03 L2_v_norm:5.3402e-01 L3_cos_v_neg_g:4.8668e-03 L3_v_norm:5.5943e-01 L4_cos_v_neg_g:4.5900e-03 L4_v_norm:5.8647e-01 L5_cos_v_neg_g:4.8433e-03 L5_v_norm:5.9604e-01 L6_cos_v_neg_g:5.2688e-03 L6_v_norm:6.0170e-01 L7_cos_v_neg_g:5.6791e-03 L7_v_norm:6.0102e-01 L8_cos_v_neg_g:5.7643e-03 L8_v_norm:6.0238e-01 L9_cos_v_neg_g:5.9091e-03 L9_v_norm:6.0343e-01 L10_cos_v_neg_g:4.5285e-03 L10_v_norm:6.0366e-01 L11_cos_v_neg_g:3.6320e-03 L11_v_norm:6.0104e-01 L12_cos_v_neg_g:4.4242e-03 L12_v_norm:6.0155e-01 +step:3000 train loss:3.560652 +step:3001 train loss:3.608034 +step:3002 train loss:3.605567 +step:3003 train loss:3.604864 +step:3004 train loss:3.633512 +step:3005 train loss:3.530605 +step:3006 train loss:3.578304 +step:3007 train loss:3.612621 +step:3008 train loss:3.659762 +step:3009 train loss:3.613805 +step:3010 train loss:3.638347 +step:3011 train loss:3.615817 +step:3012 train loss:3.595410 +step:3013 train loss:3.635844 +step:3014 train loss:3.596435 +step:3015 train loss:3.595716 +step:3016 train loss:3.610569 +step:3017 train loss:3.636756 +step:3018 train loss:3.567536 +step:3019 train loss:3.603704 +step:3020 train loss:3.626795 +step:3021 train loss:3.585027 +step:3022 train loss:3.678085 +step:3023 train loss:3.626592 +step:3024 train loss:3.611839 +step:3025 train loss:3.628746 +step:3026 train loss:3.594466 +step:3027 train loss:3.577219 +step:3028 train loss:3.631478 +step:3029 train loss:3.617007 +step:3030 train loss:3.589308 +step:3031 train loss:3.570443 +step:3032 train loss:3.563738 +step:3033 train loss:3.586197 +step:3034 train loss:3.634642 +step:3035 train loss:3.610516 +step:3036 train loss:3.575457 +step:3037 train loss:3.536095 +step:3038 train loss:3.651113 +step:3039 train loss:3.536136 +step:3040 train loss:3.512606 +step:3041 train loss:3.644898 +step:3042 train loss:3.579987 +step:3043 train loss:3.642034 +step:3044 train loss:3.540394 +step:3045 train loss:3.583866 +step:3046 train loss:3.560685 +step:3047 train loss:3.582737 +step:3048 train loss:3.557909 +step:3049 train loss:3.630873 +step:3050 train loss:3.520539 +step:3051 train loss:3.534334 +step:3052 train loss:3.554226 +step:3053 train loss:3.631951 +step:3054 train loss:3.698977 +step:3055 train loss:3.541398 +step:3056 train loss:3.565701 +step:3057 train loss:3.606527 +step:3058 train loss:3.553494 +step:3059 train loss:3.575176 +step:3060 train loss:3.581800 +step:3061 train loss:3.557819 +step:3062 train loss:3.617328 +step:3063 train loss:3.598479 +step:3064 train loss:3.626449 +step:3065 train loss:3.642354 +step:3066 train loss:3.534174 +step:3067 train loss:3.585856 +step:3068 train loss:3.636003 +step:3069 train loss:3.656618 +step:3070 train loss:3.578172 +step:3071 train loss:3.604342 +step:3072 train loss:3.601566 +step:3073 train loss:3.638420 +step:3074 train loss:3.572367 +step:3075 train loss:3.612640 +step:3076 train loss:3.543483 +step:3077 train loss:3.545329 +step:3078 train loss:3.578241 +step:3079 train loss:3.622456 +step:3080 train loss:3.612956 +step:3081 train loss:3.651630 +step:3082 train loss:3.632236 +step:3083 train loss:3.560606 +step:3084 train loss:3.642124 +step:3085 train loss:3.570901 +step:3086 train loss:3.632740 +step:3087 train loss:3.602731 +step:3088 train loss:3.679982 +step:3089 train loss:3.556792 +step:3090 train loss:3.627747 +step:3091 train loss:3.559095 +step:3092 train loss:3.570107 +step:3093 train loss:3.600567 +step:3094 train loss:3.583304 +step:3095 train loss:3.667898 +step:3096 train loss:3.597252 +step:3097 train loss:3.604939 +step:3098 train loss:3.588014 +step:3099 train loss:3.592716 +step:3100 train loss:3.623565 +step:3101 train loss:3.705458 +step:3102 train loss:3.627846 +step:3103 train loss:3.553445 +step:3104 train loss:3.633572 +step:3105 train loss:3.606349 +step:3106 train loss:3.604335 +step:3107 train loss:3.587402 +step:3108 train loss:3.557320 +step:3109 train loss:3.617814 +step:3110 train loss:3.542212 +step:3111 train loss:3.578268 +step:3112 train loss:3.516949 +step:3113 train loss:3.637886 +step:3114 train loss:3.551088 +step:3115 train loss:3.592349 +step:3116 train loss:3.478287 +step:3117 train loss:3.484624 +step:3118 train loss:3.593878 +step:3119 train loss:3.595958 +step:3120 train loss:3.598403 +step:3121 train loss:3.539383 +step:3122 train loss:3.626649 +step:3123 train loss:3.541687 +step:3124 train loss:3.608193 +step:3125 train loss:3.619518 +step:3126 train loss:3.724243 +step:3127 train loss:3.575724 +step:3128 train loss:3.601094 +step:3129 train loss:3.586793 +step:3130 train loss:3.563121 +step:3131 train loss:3.639904 +step:3132 train loss:3.621113 +step:3133 train loss:3.600867 +step:3134 train loss:3.486578 +step:3135 train loss:3.590228 +step:3136 train loss:3.554183 +step:3137 train loss:3.695192 +step:3138 train loss:3.593448 +step:3139 train loss:3.572634 +step:3140 train loss:3.592635 +step:3141 train loss:3.597883 +step:3142 train loss:3.534723 +step:3143 train loss:3.616628 +step:3144 train loss:3.568403 +step:3145 train loss:3.551605 +step:3146 train loss:3.570230 +step:3147 train loss:3.676222 +step:3148 train loss:3.581151 +step:3149 train loss:3.633926 +step:3150 train loss:3.624914 +step:3151 train loss:3.587928 +step:3152 train loss:3.589985 +step:3153 train loss:3.544554 +step:3154 train loss:3.632406 +step:3155 train loss:3.568741 +step:3156 train loss:3.622051 +step:3157 train loss:3.625166 +step:3158 train loss:3.600044 +step:3159 train loss:3.535846 +step:3160 train loss:3.585041 +step:3161 train loss:3.561692 +step:3162 train loss:3.612725 +step:3163 train loss:3.597954 +step:3164 train loss:3.574357 +step:3165 train loss:3.591065 +step:3166 train loss:3.626996 +step:3167 train loss:3.589738 +step:3168 train loss:3.667051 +step:3169 train loss:3.581205 +step:3170 train loss:3.561208 +step:3171 train loss:3.555862 +step:3172 train loss:3.557668 +step:3173 train loss:3.505554 +step:3174 train loss:3.617303 +step:3175 train loss:3.584069 +step:3176 train loss:3.596814 +step:3177 train loss:3.564690 +step:3178 train loss:3.540257 +step:3179 train loss:3.619175 +step:3180 train loss:3.548749 +step:3181 train loss:3.630972 +step:3182 train loss:3.634550 +step:3183 train loss:3.578527 +step:3184 train loss:3.575039 +step:3185 train loss:3.633661 +step:3186 train loss:3.593772 +step:3187 train loss:3.612575 +step:3188 train loss:3.657306 +step:3189 train loss:3.592660 +step:3190 train loss:3.556773 +step:3191 train loss:3.555473 +step:3192 train loss:3.525972 +step:3193 train loss:3.597857 +step:3194 train loss:3.568800 +step:3195 train loss:3.547730 +step:3196 train loss:3.602527 +step:3197 train loss:3.564150 +step:3198 train loss:3.598089 +step:3199 train loss:3.577342 +step:3200 train loss:3.585705 +step:3201 train loss:3.549123 +step:3202 train loss:3.617423 +step:3203 train loss:3.676752 +step:3204 train loss:3.642020 +step:3205 train loss:3.487231 +step:3206 train loss:3.767905 +step:3207 train loss:3.522634 +step:3208 train loss:3.592411 +step:3209 train loss:3.580851 +step:3210 train loss:3.564418 +step:3211 train loss:3.591809 +step:3212 train loss:3.602821 +step:3213 train loss:3.541674 +step:3214 train loss:3.648058 +step:3215 train loss:3.656110 +step:3216 train loss:3.518826 +step:3217 train loss:3.607325 +step:3218 train loss:3.640231 +step:3219 train loss:3.560730 +step:3220 train loss:3.629951 +step:3221 train loss:3.544508 +step:3222 train loss:3.590152 +step:3223 train loss:3.604732 +step:3224 train loss:3.616254 +step:3225 train loss:3.538725 +step:3226 train loss:3.573750 +step:3227 train loss:3.601667 +step:3228 train loss:3.596318 +step:3229 train loss:3.625509 +step:3230 train loss:3.642913 +step:3231 train loss:3.584768 +step:3232 train loss:3.590751 +step:3233 train loss:3.564622 +step:3234 train loss:3.550233 +step:3235 train loss:3.551773 +step:3236 train loss:3.571894 +step:3237 train loss:3.574862 +step:3238 train loss:3.597772 +step:3239 train loss:3.495148 +step:3240 train loss:3.604680 +step:3241 train loss:3.602395 +step:3242 train loss:3.662826 +step:3243 train loss:3.598481 +step:3244 train loss:3.619161 +step:3245 train loss:3.517276 +step:3246 train loss:3.645176 +step:3247 train loss:3.592159 +step:3248 train loss:3.610837 +step:3249 train loss:3.554234 +step:3250 validation loss:3.519388 +step:3250 train loss:3.552714 +step:3251 train loss:3.671088 +step:3252 train loss:3.592855 +step:3253 train loss:3.596396 +step:3254 train loss:3.664148 +step:3255 train loss:3.608664 +step:3256 train loss:3.599316 +step:3257 train loss:3.584315 +step:3258 train loss:3.512823 +step:3259 train loss:3.496187 +step:3260 train loss:3.607917 +step:3261 train loss:3.585131 +step:3262 train loss:3.576276 +step:3263 train loss:3.563922 +step:3264 train loss:3.670068 +step:3265 train loss:3.581076 +step:3266 train loss:3.610219 +step:3267 train loss:3.572490 +step:3268 train loss:3.577118 +step:3269 train loss:3.589481 +step:3270 train loss:3.617983 +step:3271 train loss:3.579945 +step:3272 train loss:3.564668 +step:3273 train loss:3.571948 +step:3274 train loss:3.706859 +step:3275 train loss:3.575731 +step:3276 train loss:3.645032 +step:3277 train loss:3.582923 +step:3278 train loss:3.561134 +step:3279 train loss:3.579586 +step:3280 train loss:3.612348 +step:3281 train loss:3.535507 +step:3282 train loss:3.611366 +step:3283 train loss:3.577588 +step:3284 train loss:3.545284 +step:3285 train loss:3.557749 +step:3286 train loss:3.590882 +step:3287 train loss:3.525043 +step:3288 train loss:3.612580 +step:3289 train loss:3.551610 +step:3290 train loss:3.585657 +step:3291 train loss:3.540467 +step:3292 train loss:3.568787 +step:3293 train loss:3.608191 +step:3294 train loss:3.628559 +step:3295 train loss:3.533256 +step:3296 train loss:3.595802 +step:3297 train loss:3.549775 +step:3298 train loss:3.554611 +step:3299 train loss:3.681623 +step:3300 train loss:3.524781 +step:3301 train loss:3.596612 +step:3302 train loss:3.578804 +step:3303 train loss:3.583894 +step:3304 train loss:3.555813 +step:3305 train loss:3.641981 +step:3306 train loss:3.575636 +step:3307 train loss:3.595127 +step:3308 train loss:3.554679 +step:3309 train loss:3.608941 +step:3310 train loss:3.531410 +step:3311 train loss:3.581453 +step:3312 train loss:3.553784 +step:3313 train loss:3.585340 +step:3314 train loss:3.583298 +step:3315 train loss:3.662200 +step:3316 train loss:3.516636 +step:3317 train loss:3.601635 +step:3318 train loss:3.620667 +step:3319 train loss:3.538648 +step:3320 train loss:3.708900 +step:3321 train loss:3.604774 +step:3322 train loss:3.609415 +step:3323 train loss:3.710095 +step:3324 train loss:3.631127 +step:3325 train loss:3.600390 +step:3326 train loss:3.593021 +step:3327 train loss:3.606328 +step:3328 train loss:3.588263 +step:3329 train loss:3.583814 +step:3330 train loss:3.575335 +step:3331 train loss:3.622342 +step:3332 train loss:3.642707 +step:3333 train loss:3.610338 +step:3334 train loss:3.538924 +step:3335 train loss:3.561449 +step:3336 train loss:3.586740 +step:3337 train loss:3.591679 +step:3338 train loss:3.574749 +step:3339 train loss:3.574229 +step:3340 train loss:3.612880 +step:3341 train loss:3.553425 +step:3342 train loss:3.603054 +step:3343 train loss:3.544461 +step:3344 train loss:3.602950 +step:3345 train loss:3.552980 +step:3346 train loss:3.565436 +step:3347 train loss:3.567744 +step:3348 train loss:3.586821 +step:3349 train loss:3.572478 +step:3350 train loss:3.600098 +step:3351 train loss:3.654739 +step:3352 train loss:3.596911 +step:3353 train loss:3.696594 +step:3354 train loss:3.542202 +step:3355 train loss:3.648098 +step:3356 train loss:3.596704 +step:3357 train loss:3.606968 +step:3358 train loss:3.545914 +step:3359 train loss:3.575892 +step:3360 train loss:3.574348 +step:3361 train loss:3.572520 +step:3362 train loss:3.563804 +step:3363 train loss:3.567099 +step:3364 train loss:3.544367 +step:3365 train loss:3.586277 +step:3366 train loss:3.617492 +step:3367 train loss:3.572135 +step:3368 train loss:3.663241 +step:3369 train loss:3.578735 +step:3370 train loss:3.645651 +step:3371 train loss:3.623388 +step:3372 train loss:3.585314 +step:3373 train loss:3.597703 +step:3374 train loss:3.646987 +step:3375 train loss:3.579227 +step:3376 train loss:3.591696 +step:3377 train loss:3.573649 +step:3378 train loss:3.549377 +step:3379 train loss:3.629284 +step:3380 train loss:3.611411 +step:3381 train loss:3.590278 +step:3382 train loss:3.613132 +step:3383 train loss:3.617626 +step:3384 train loss:3.552429 +step:3385 train loss:3.598785 +step:3386 train loss:3.581867 +step:3387 train loss:3.654837 +step:3388 train loss:3.554847 +step:3389 train loss:3.771526 +step:3390 train loss:3.488697 +step:3391 train loss:3.571669 +step:3392 train loss:3.556546 +step:3393 train loss:3.587292 +step:3394 train loss:3.545604 +step:3395 train loss:3.618628 +step:3396 train loss:3.529372 +step:3397 train loss:3.610667 +step:3398 train loss:3.575746 +step:3399 train loss:3.596687 +step:3400 train loss:3.541964 +step:3401 train loss:3.579255 +step:3402 train loss:3.741560 +step:3403 train loss:3.629881 +step:3404 train loss:3.745119 +step:3405 train loss:3.603575 +step:3406 train loss:3.570034 +step:3407 train loss:3.572887 +step:3408 train loss:3.551413 +step:3409 train loss:3.519335 +step:3410 train loss:3.553428 +step:3411 train loss:3.621402 +step:3412 train loss:3.543199 +step:3413 train loss:3.537335 +step:3414 train loss:3.577196 +step:3415 train loss:3.546693 +step:3416 train loss:3.557366 +step:3417 train loss:3.632835 +step:3418 train loss:3.636221 +step:3419 train loss:3.591563 +step:3420 train loss:3.570568 +step:3421 train loss:3.601735 +step:3422 train loss:3.618833 +step:3423 train loss:3.634763 +step:3424 train loss:3.516080 +step:3425 train loss:3.539554 +step:3426 train loss:3.536695 +step:3427 train loss:3.597011 +step:3428 train loss:3.519104 +step:3429 train loss:3.586321 +step:3430 train loss:3.555419 +step:3431 train loss:3.611198 +step:3432 train loss:3.594204 +step:3433 train loss:3.551898 +step:3434 train loss:3.636037 +step:3435 train loss:3.576910 +step:3436 train loss:3.664918 +step:3437 train loss:3.497384 +step:3438 train loss:3.599795 +step:3439 train loss:3.577779 +step:3440 train loss:3.672881 +step:3441 train loss:3.568569 +step:3442 train loss:3.634417 +step:3443 train loss:3.566424 +step:3444 train loss:3.586030 +step:3445 train loss:3.631250 +step:3446 train loss:3.536354 +step:3447 train loss:3.608707 +step:3448 train loss:3.566471 +step:3449 train loss:3.595211 +step:3450 train loss:3.501026 +step:3451 train loss:3.623276 +step:3452 train loss:3.570558 +step:3453 train loss:3.627843 +step:3454 train loss:3.651253 +step:3455 train loss:3.702624 +step:3456 train loss:3.648345 +step:3457 train loss:3.645574 +step:3458 train loss:3.569001 +step:3459 train loss:3.574388 +step:3460 train loss:3.525211 +step:3461 train loss:3.585912 +step:3462 train loss:3.588794 +step:3463 train loss:3.559663 +step:3464 train loss:3.618515 +step:3465 train loss:3.544704 +step:3466 train loss:3.614331 +step:3467 train loss:3.569164 +step:3468 train loss:3.584077 +step:3469 train loss:3.593051 +step:3470 train loss:3.576466 +step:3471 train loss:3.617260 +step:3472 train loss:3.501153 +step:3473 train loss:3.629722 +step:3474 train loss:3.519609 +step:3475 train loss:3.606456 +step:3476 train loss:3.572968 +step:3477 train loss:3.595327 +step:3478 train loss:3.569182 +step:3479 train loss:3.597858 +step:3480 train loss:3.618389 +step:3481 train loss:3.594841 +step:3482 train loss:3.582632 +step:3483 train loss:3.722439 +step:3484 train loss:3.563788 +step:3485 train loss:3.551245 +step:3486 train loss:3.604934 +step:3487 train loss:3.642957 +step:3488 train loss:3.553158 +step:3489 train loss:3.601852 +step:3490 train loss:3.565120 +step:3491 train loss:3.610467 +step:3492 train loss:3.641052 +step:3493 train loss:3.614878 +step:3494 train loss:3.607766 +step:3495 train loss:3.586747 +step:3496 train loss:3.551150 +step:3497 train loss:3.663561 +step:3498 train loss:3.606485 +step:3499 train loss:3.536529 +step:3500 validation loss:3.507669 total_sharp:2.2599e-03 L1_sharp:4.2604e-03 L2_sharp:5.1176e-04 L3_sharp:1.2476e-03 L4_sharp:2.8479e-04 L5_sharp:4.6023e-04 L6_sharp:5.5659e-04 L7_sharp:5.0710e-04 L8_sharp:4.1053e-04 L9_sharp:3.8767e-04 L10_sharp:2.5089e-04 L11_sharp:2.3798e-04 L12_sharp:3.2869e-04 total_fnorm:2.4348e+00 total_l1_linf:2.0819e+04 total_spectral:2.4348e+00 L1_fnorm:5.6395e-01 L2_fnorm:5.3682e-01 L3_fnorm:5.5838e-01 L4_fnorm:5.8695e-01 L5_fnorm:5.9751e-01 L6_fnorm:6.0142e-01 L7_fnorm:6.0191e-01 L8_fnorm:6.0423e-01 L9_fnorm:6.0496e-01 L10_fnorm:6.0579e-01 L11_fnorm:6.0319e-01 L12_fnorm:6.0298e-01 L1_l1linf:4.1281e-01 L2_l1linf:4.1000e-01 L3_l1linf:4.0854e-01 L4_l1linf:4.2748e-01 L5_l1linf:4.1894e-01 L6_l1linf:4.2229e-01 L7_l1linf:4.1933e-01 L8_l1linf:4.1696e-01 L9_l1linf:4.1267e-01 L10_l1linf:4.1135e-01 L11_l1linf:4.0149e-01 L12_l1linf:3.9344e-01 L1_spectral:1.2050e-02 L2_spectral:1.2039e-02 L3_spectral:1.2048e-02 L4_spectral:1.2047e-02 L5_spectral:1.2062e-02 L6_spectral:1.2044e-02 L7_spectral:1.2043e-02 L8_spectral:1.2040e-02 L9_spectral:1.2045e-02 L10_spectral:1.2045e-02 L11_spectral:1.2046e-02 L12_spectral:1.2044e-02 ip_v_neg_g:7.8690e-03 cos_v_neg_g:2.6019e-03 v_norm:2.4348e+00 g_norm:1.2421e+00 hv_norm:4.0716e-01 cos_v_hv:1.3514e-02 hg_norm:3.2507e+01 cos_g_hg:6.3757e-01 v_par:5.1780e-04 v_perp:2.4348e+00 L1_cos_v_neg_g:2.4984e-03 L1_v_norm:5.6395e-01 L2_cos_v_neg_g:4.6089e-03 L2_v_norm:5.3682e-01 L3_cos_v_neg_g:6.6063e-03 L3_v_norm:5.5838e-01 L4_cos_v_neg_g:4.1188e-03 L4_v_norm:5.8695e-01 L5_cos_v_neg_g:4.5613e-03 L5_v_norm:5.9751e-01 L6_cos_v_neg_g:5.6363e-03 L6_v_norm:6.0142e-01 L7_cos_v_neg_g:5.0932e-03 L7_v_norm:6.0191e-01 L8_cos_v_neg_g:5.8964e-03 L8_v_norm:6.0423e-01 L9_cos_v_neg_g:5.4016e-03 L9_v_norm:6.0496e-01 L10_cos_v_neg_g:4.1258e-03 L10_v_norm:6.0579e-01 L11_cos_v_neg_g:4.2396e-03 L11_v_norm:6.0319e-01 L12_cos_v_neg_g:3.5053e-03 L12_v_norm:6.0298e-01 +step:3500 train loss:3.557792 +step:3501 train loss:3.684174 +step:3502 train loss:3.666179 +step:3503 train loss:3.617918 +step:3504 train loss:3.570181 +step:3505 train loss:3.583143 +step:3506 train loss:3.486969 +step:3507 train loss:3.604190 +step:3508 train loss:3.542024 +step:3509 train loss:3.617441 +step:3510 train loss:3.544935 +step:3511 train loss:3.585595 +step:3512 train loss:3.720879 +step:3513 train loss:3.541884 +step:3514 train loss:3.557317 +step:3515 train loss:3.811241 +step:3516 train loss:3.603112 +step:3517 train loss:3.559790 +step:3518 train loss:3.565770 +step:3519 train loss:3.555066 +step:3520 train loss:3.589893 +step:3521 train loss:3.576791 +step:3522 train loss:3.492595 +step:3523 train loss:3.593786 +step:3524 train loss:3.576556 +step:3525 train loss:3.569394 +step:3526 train loss:3.588178 +step:3527 train loss:3.541887 +step:3528 train loss:3.591924 +step:3529 train loss:3.569144 +step:3530 train loss:3.568419 +step:3531 train loss:3.554214 +step:3532 train loss:3.750646 +step:3533 train loss:3.559362 +step:3534 train loss:3.581260 +step:3535 train loss:3.552895 +step:3536 train loss:3.551335 +step:3537 train loss:3.568479 +step:3538 train loss:3.591862 +step:3539 train loss:3.542467 +step:3540 train loss:3.609246 +step:3541 train loss:3.580309 +step:3542 train loss:3.585227 +step:3543 train loss:3.509835 +step:3544 train loss:3.525077 +step:3545 train loss:3.530646 +step:3546 train loss:3.596141 +step:3547 train loss:3.601291 +step:3548 train loss:3.578286 +step:3549 train loss:3.574530 +step:3550 train loss:3.567095 +step:3551 train loss:3.590673 +step:3552 train loss:3.490291 +step:3553 train loss:3.607279 +step:3554 train loss:3.605329 +step:3555 train loss:3.584518 +step:3556 train loss:3.609825 +step:3557 train loss:3.597749 +step:3558 train loss:3.570564 +step:3559 train loss:3.518047 +step:3560 train loss:3.610943 +step:3561 train loss:3.602787 +step:3562 train loss:3.777062 +step:3563 train loss:3.632885 +step:3564 train loss:3.594018 +step:3565 train loss:3.594598 +step:3566 train loss:3.570019 +step:3567 train loss:3.508544 +step:3568 train loss:3.538783 +step:3569 train loss:3.624434 +step:3570 train loss:3.646080 +step:3571 train loss:3.621121 +step:3572 train loss:3.613989 +step:3573 train loss:3.571437 +step:3574 train loss:3.571991 +step:3575 train loss:3.562208 +step:3576 train loss:3.544076 +step:3577 train loss:3.556947 +step:3578 train loss:3.636609 +step:3579 train loss:3.548120 +step:3580 train loss:3.629237 +step:3581 train loss:3.571760 +step:3582 train loss:3.627195 +step:3583 train loss:3.562178 +step:3584 train loss:3.539444 +step:3585 train loss:3.585630 +step:3586 train loss:3.539341 +step:3587 train loss:3.627896 +step:3588 train loss:3.766761 +step:3589 train loss:3.591716 +step:3590 train loss:3.574539 +step:3591 train loss:3.587186 +step:3592 train loss:3.545266 +step:3593 train loss:3.518244 +step:3594 train loss:3.574871 +step:3595 train loss:3.544858 +step:3596 train loss:3.631185 +step:3597 train loss:3.598652 +step:3598 train loss:3.555561 +step:3599 train loss:3.606622 +step:3600 train loss:3.541914 +step:3601 train loss:3.564246 +step:3602 train loss:3.546417 +step:3603 train loss:3.568530 +step:3604 train loss:3.587932 +step:3605 train loss:3.701274 +step:3606 train loss:3.596788 +step:3607 train loss:3.580324 +step:3608 train loss:3.596023 +step:3609 train loss:3.578123 +step:3610 train loss:3.550039 +step:3611 train loss:3.547781 +step:3612 train loss:3.623045 +step:3613 train loss:3.585783 +step:3614 train loss:3.543874 +step:3615 train loss:3.570370 +step:3616 train loss:3.534172 +step:3617 train loss:3.601675 +step:3618 train loss:3.556229 +step:3619 train loss:3.549308 +step:3620 train loss:3.567856 +step:3621 train loss:3.525209 +step:3622 train loss:3.631942 +step:3623 train loss:3.629366 +step:3624 train loss:3.595391 +step:3625 train loss:3.569983 +step:3626 train loss:3.583129 +step:3627 train loss:3.580280 +step:3628 train loss:3.562245 +step:3629 train loss:3.567265 +step:3630 train loss:3.649259 +step:3631 train loss:3.573849 +step:3632 train loss:3.600776 +step:3633 train loss:3.564402 +step:3634 train loss:3.564869 +step:3635 train loss:3.557943 +step:3636 train loss:3.629939 +step:3637 train loss:3.704769 +step:3638 train loss:3.625193 +step:3639 train loss:3.605532 +step:3640 train loss:3.613631 +step:3641 train loss:3.650048 +step:3642 train loss:3.547702 +step:3643 train loss:3.721642 +step:3644 train loss:3.606920 +step:3645 train loss:3.582230 +step:3646 train loss:3.702400 +step:3647 train loss:3.588938 +step:3648 train loss:3.582870 +step:3649 train loss:3.530080 +step:3650 train loss:3.573755 +step:3651 train loss:3.570936 +step:3652 train loss:3.556327 +step:3653 train loss:3.496291 +step:3654 train loss:3.551330 +step:3655 train loss:3.543257 +step:3656 train loss:3.574099 +step:3657 train loss:3.594090 +step:3658 train loss:3.584859 +step:3659 train loss:3.570610 +step:3660 train loss:3.549872 +step:3661 train loss:3.573399 +step:3662 train loss:3.545092 +step:3663 train loss:3.581922 +step:3664 train loss:3.539872 +step:3665 train loss:3.584500 +step:3666 train loss:3.621843 +step:3667 train loss:3.715063 +step:3668 train loss:3.590245 +step:3669 train loss:3.550771 +step:3670 train loss:3.598730 +step:3671 train loss:3.559697 +step:3672 train loss:3.594205 +step:3673 train loss:3.576717 +step:3674 train loss:3.592521 +step:3675 train loss:3.604045 +step:3676 train loss:3.569646 +step:3677 train loss:3.528587 +step:3678 train loss:3.588537 +step:3679 train loss:3.493177 +step:3680 train loss:3.592695 +step:3681 train loss:3.627036 +step:3682 train loss:3.609279 +step:3683 train loss:3.549374 +step:3684 train loss:3.549162 +step:3685 train loss:3.580424 +step:3686 train loss:3.604351 +step:3687 train loss:3.563016 +step:3688 train loss:3.534449 +step:3689 train loss:3.566709 +step:3690 train loss:3.559291 +step:3691 train loss:3.540027 +step:3692 train loss:3.608108 +step:3693 train loss:3.739908 +step:3694 train loss:3.550891 +step:3695 train loss:3.606086 +step:3696 train loss:3.572747 +step:3697 train loss:3.565908 +step:3698 train loss:3.503736 +step:3699 train loss:3.530526 +step:3700 train loss:3.557558 +step:3701 train loss:3.577968 +step:3702 train loss:3.597131 +step:3703 train loss:3.556923 +step:3704 train loss:3.601973 +step:3705 train loss:3.580475 +step:3706 train loss:3.535729 +step:3707 train loss:3.584756 +step:3708 train loss:3.565863 +step:3709 train loss:3.485067 +step:3710 train loss:3.610316 +step:3711 train loss:3.556856 +step:3712 train loss:3.593316 +step:3713 train loss:3.545621 +step:3714 train loss:3.565038 +step:3715 train loss:3.682749 +step:3716 train loss:3.590117 +step:3717 train loss:3.568281 +step:3718 train loss:3.566193 +step:3719 train loss:3.563931 +step:3720 train loss:3.572453 +step:3721 train loss:3.630782 +step:3722 train loss:3.643826 +step:3723 train loss:3.529232 +step:3724 train loss:3.588113 +step:3725 train loss:3.564737 +step:3726 train loss:3.586407 +step:3727 train loss:3.658633 +step:3728 train loss:3.624403 +step:3729 train loss:3.521275 +step:3730 train loss:3.539523 +step:3731 train loss:3.563788 +step:3732 train loss:3.719690 +step:3733 train loss:3.576883 +step:3734 train loss:3.576072 +step:3735 train loss:3.516018 +step:3736 train loss:3.573597 +step:3737 train loss:3.621083 +step:3738 train loss:3.648913 +step:3739 train loss:3.560748 +step:3740 train loss:3.468136 +step:3741 train loss:3.671387 +step:3742 train loss:3.582583 +step:3743 train loss:3.559948 +step:3744 train loss:3.590664 +step:3745 train loss:3.576744 +step:3746 train loss:3.537767 +step:3747 train loss:3.559878 +step:3748 train loss:3.602099 +step:3749 train loss:3.584231 +step:3750 validation loss:3.496332 +step:3750 train loss:3.592586 +step:3751 train loss:3.682674 +step:3752 train loss:3.626719 +step:3753 train loss:3.534144 +step:3754 train loss:3.584257 +step:3755 train loss:3.766958 +step:3756 train loss:3.541162 +step:3757 train loss:3.537056 +step:3758 train loss:3.569357 +step:3759 train loss:3.516504 +step:3760 train loss:3.511622 +step:3761 train loss:3.567185 +step:3762 train loss:3.558027 +step:3763 train loss:3.563773 +step:3764 train loss:3.547044 +step:3765 train loss:3.552812 +step:3766 train loss:3.520821 +step:3767 train loss:3.605184 +step:3768 train loss:3.543684 +step:3769 train loss:3.811351 +step:3770 train loss:3.598046 +step:3771 train loss:3.603991 +step:3772 train loss:3.566621 +step:3773 train loss:3.560191 +step:3774 train loss:3.560979 +step:3775 train loss:3.559528 +step:3776 train loss:3.555827 +step:3777 train loss:3.520028 +step:3778 train loss:3.536020 +step:3779 train loss:3.518574 +step:3780 train loss:3.604828 +step:3781 train loss:3.567346 +step:3782 train loss:3.487753 +step:3783 train loss:3.598541 +step:3784 train loss:3.604857 +step:3785 train loss:3.514868 +step:3786 train loss:3.621286 +step:3787 train loss:3.536080 +step:3788 train loss:3.547725 +step:3789 train loss:3.462377 +step:3790 train loss:3.570991 +step:3791 train loss:3.590930 +step:3792 train loss:3.562741 +step:3793 train loss:3.565343 +step:3794 train loss:3.588437 +step:3795 train loss:3.558666 +step:3796 train loss:3.575047 +step:3797 train loss:3.553234 +step:3798 train loss:3.560380 +step:3799 train loss:3.568960 +step:3800 train loss:3.477462 +step:3801 train loss:3.590636 +step:3802 train loss:3.518244 +step:3803 train loss:3.602928 +step:3804 train loss:3.612000 +step:3805 train loss:3.574520 +step:3806 train loss:3.592889 +step:3807 train loss:3.608750 +step:3808 train loss:3.569160 +step:3809 train loss:3.578148 +step:3810 train loss:3.580818 +step:3811 train loss:3.566520 +step:3812 train loss:3.566155 +step:3813 train loss:3.523556 +step:3814 train loss:3.568553 +step:3815 train loss:3.570707 +step:3816 train loss:3.586414 +step:3817 train loss:3.605028 +step:3818 train loss:3.577118 +step:3819 train loss:3.590500 +step:3820 train loss:3.587230 +step:3821 train loss:3.545070 +step:3822 train loss:3.633781 +step:3823 train loss:3.520519 +step:3824 train loss:3.539080 +step:3825 train loss:3.544177 +step:3826 train loss:3.662176 +step:3827 train loss:3.631253 +step:3828 train loss:3.524491 +step:3829 train loss:3.545846 +step:3830 train loss:3.602878 +step:3831 train loss:3.541548 +step:3832 train loss:3.597787 +step:3833 train loss:3.540204 +step:3834 train loss:3.506808 +step:3835 train loss:3.551234 +step:3836 train loss:3.524334 +step:3837 train loss:3.591826 +step:3838 train loss:3.543265 +step:3839 train loss:3.594955 +step:3840 train loss:3.600556 +step:3841 train loss:3.556537 +step:3842 train loss:3.578965 +step:3843 train loss:3.592659 +step:3844 train loss:3.567829 +step:3845 train loss:3.593790 +step:3846 train loss:3.629479 +step:3847 train loss:3.529878 +step:3848 train loss:3.533589 +step:3849 train loss:3.556632 +step:3850 train loss:3.567182 +step:3851 train loss:3.715868 +step:3852 train loss:3.691988 +step:3853 train loss:3.583809 +step:3854 train loss:3.546424 +step:3855 train loss:3.594051 +step:3856 train loss:3.519612 +step:3857 train loss:3.581444 +step:3858 train loss:3.495033 +step:3859 train loss:3.539776 +step:3860 train loss:3.610753 +step:3861 train loss:3.582695 +step:3862 train loss:3.520496 +step:3863 train loss:3.569703 +step:3864 train loss:3.543396 +step:3865 train loss:3.577592 +step:3866 train loss:3.598104 +step:3867 train loss:3.595104 +step:3868 train loss:3.544582 +step:3869 train loss:3.542418 +step:3870 train loss:3.517642 +step:3871 train loss:3.517578 +step:3872 train loss:3.651075 +step:3873 train loss:3.574027 +step:3874 train loss:3.587808 +step:3875 train loss:3.696514 +step:3876 train loss:3.572998 +step:3877 train loss:3.594586 +step:3878 train loss:3.621993 +step:3879 train loss:3.610425 +step:3880 train loss:3.691710 +step:3881 train loss:3.510567 +step:3882 train loss:3.547226 +step:3883 train loss:3.561701 +step:3884 train loss:3.550647 +step:3885 train loss:3.570071 +step:3886 train loss:3.626893 +step:3887 train loss:3.607689 +step:3888 train loss:3.568706 +step:3889 train loss:3.542034 +step:3890 train loss:3.579245 +step:3891 train loss:3.590798 +step:3892 train loss:3.501451 +step:3893 train loss:3.609582 +step:3894 train loss:3.556342 +step:3895 train loss:3.576331 +step:3896 train loss:3.567606 +step:3897 train loss:3.539618 +step:3898 train loss:3.598325 +step:3899 train loss:3.635138 +step:3900 train loss:3.590473 +step:3901 train loss:3.604391 +step:3902 train loss:3.527642 +step:3903 train loss:3.547600 +step:3904 train loss:3.579940 +step:3905 train loss:3.518577 +step:3906 train loss:3.550250 +step:3907 train loss:3.587457 +step:3908 train loss:3.662053 +step:3909 train loss:3.558524 +step:3910 train loss:3.579286 +step:3911 train loss:3.595968 +step:3912 train loss:3.547000 +step:3913 train loss:3.557649 +step:3914 train loss:3.578624 +step:3915 train loss:3.551359 +step:3916 train loss:3.585991 +step:3917 train loss:3.634817 +step:3918 train loss:3.598147 +step:3919 train loss:3.581733 +step:3920 train loss:3.556012 +step:3921 train loss:3.593421 +step:3922 train loss:3.602307 +step:3923 train loss:3.586091 +step:3924 train loss:3.527491 +step:3925 train loss:3.733724 +step:3926 train loss:3.570431 +step:3927 train loss:3.547886 +step:3928 train loss:3.630337 +step:3929 train loss:3.697587 +step:3930 train loss:3.587494 +step:3931 train loss:3.515938 +step:3932 train loss:3.575166 +step:3933 train loss:3.590198 +step:3934 train loss:3.540668 +step:3935 train loss:3.520139 +step:3936 train loss:3.610147 +step:3937 train loss:3.567761 +step:3938 train loss:3.582214 +step:3939 train loss:3.603826 +step:3940 train loss:3.558002 +step:3941 train loss:3.637987 +step:3942 train loss:3.599253 +step:3943 train loss:3.587194 +step:3944 train loss:3.635358 +step:3945 train loss:3.544477 +step:3946 train loss:3.488095 +step:3947 train loss:3.620952 +step:3948 train loss:3.587507 +step:3949 train loss:3.756414 +step:3950 train loss:3.555156 +step:3951 train loss:3.505456 +step:3952 train loss:3.440550 +step:3953 train loss:3.521123 +step:3954 train loss:3.564999 +step:3955 train loss:3.596681 +step:3956 train loss:3.558873 +step:3957 train loss:3.606166 +step:3958 train loss:3.583910 +step:3959 train loss:3.618689 +step:3960 train loss:3.544997 +step:3961 train loss:3.571775 +step:3962 train loss:3.575530 +step:3963 train loss:3.550417 +step:3964 train loss:3.530953 +step:3965 train loss:3.582640 +step:3966 train loss:3.541578 +step:3967 train loss:3.588053 +step:3968 train loss:3.602957 +step:3969 train loss:3.512610 +step:3970 train loss:3.620020 +step:3971 train loss:3.538437 +step:3972 train loss:3.569712 +step:3973 train loss:3.528802 +step:3974 train loss:3.624922 +step:3975 train loss:3.572296 +step:3976 train loss:3.530393 +step:3977 train loss:3.589289 +step:3978 train loss:3.551901 +step:3979 train loss:3.541154 +step:3980 train loss:3.615096 +step:3981 train loss:3.544541 +step:3982 train loss:3.565645 +step:3983 train loss:3.547513 +step:3984 train loss:3.582690 +step:3985 train loss:3.557046 +step:3986 train loss:3.572404 +step:3987 train loss:3.583509 +step:3988 train loss:3.522167 +step:3989 train loss:3.590340 +step:3990 train loss:3.583632 +step:3991 train loss:3.602310 +step:3992 train loss:3.554173 +step:3993 train loss:3.586467 +step:3994 train loss:3.540674 +step:3995 train loss:3.592323 +step:3996 train loss:3.510880 +step:3997 train loss:3.588329 +step:3998 train loss:3.469420 +step:3999 train loss:3.627506 +step:4000 validation loss:3.484789 total_sharp:2.4521e-03 L1_sharp:5.5569e-03 L2_sharp:1.2452e-03 L3_sharp:8.7035e-04 L4_sharp:2.4730e-04 L5_sharp:3.4841e-04 L6_sharp:4.6487e-04 L7_sharp:6.3383e-04 L8_sharp:3.6098e-04 L9_sharp:3.5288e-04 L10_sharp:2.6262e-04 L11_sharp:2.3069e-04 L12_sharp:2.2516e-04 total_fnorm:2.4253e+00 total_l1_linf:2.0744e+04 total_spectral:2.4253e+00 L1_fnorm:5.6576e-01 L2_fnorm:5.2279e-01 L3_fnorm:5.4909e-01 L4_fnorm:5.7395e-01 L5_fnorm:5.9584e-01 L6_fnorm:6.0110e-01 L7_fnorm:5.9935e-01 L8_fnorm:6.0196e-01 L9_fnorm:6.0335e-01 L10_fnorm:6.0466e-01 L11_fnorm:6.0232e-01 L12_fnorm:6.0223e-01 L1_l1linf:4.2162e-01 L2_l1linf:4.2395e-01 L3_l1linf:4.2455e-01 L4_l1linf:4.2444e-01 L5_l1linf:4.1758e-01 L6_l1linf:4.1870e-01 L7_l1linf:4.1589e-01 L8_l1linf:4.1623e-01 L9_l1linf:4.1174e-01 L10_l1linf:4.1099e-01 L11_l1linf:4.0011e-01 L12_l1linf:3.9572e-01 L1_spectral:1.2054e-02 L2_spectral:1.2038e-02 L3_spectral:1.2047e-02 L4_spectral:1.2048e-02 L5_spectral:1.2058e-02 L6_spectral:1.2044e-02 L7_spectral:1.2042e-02 L8_spectral:1.2047e-02 L9_spectral:1.2045e-02 L10_spectral:1.2047e-02 L11_spectral:1.2046e-02 L12_spectral:1.2045e-02 ip_v_neg_g:8.6207e-03 cos_v_neg_g:3.1208e-03 v_norm:2.4253e+00 g_norm:1.1390e+00 hv_norm:5.0788e-01 cos_v_hv:1.1710e-02 hg_norm:2.4716e+01 cos_g_hg:6.4337e-01 v_par:7.9875e-04 v_perp:2.4253e+00 L1_cos_v_neg_g:3.6209e-03 L1_v_norm:5.6576e-01 L2_cos_v_neg_g:8.4392e-03 L2_v_norm:5.2279e-01 L3_cos_v_neg_g:5.4260e-03 L3_v_norm:5.4909e-01 L4_cos_v_neg_g:4.0940e-03 L4_v_norm:5.7395e-01 L5_cos_v_neg_g:5.3501e-03 L5_v_norm:5.9584e-01 L6_cos_v_neg_g:5.5767e-03 L6_v_norm:6.0110e-01 L7_cos_v_neg_g:7.2796e-03 L7_v_norm:5.9935e-01 L8_cos_v_neg_g:6.1639e-03 L8_v_norm:6.0196e-01 L9_cos_v_neg_g:5.8972e-03 L9_v_norm:6.0335e-01 L10_cos_v_neg_g:4.9341e-03 L10_v_norm:6.0466e-01 L11_cos_v_neg_g:3.9264e-03 L11_v_norm:6.0232e-01 L12_cos_v_neg_g:2.4093e-03 L12_v_norm:6.0223e-01 +step:4000 train loss:3.507357 +step:4001 train loss:3.582468 +step:4002 train loss:3.561270 +step:4003 train loss:3.596150 +step:4004 train loss:3.502492 +step:4005 train loss:3.598985 +step:4006 train loss:3.604661 +step:4007 train loss:3.526923 +step:4008 train loss:3.487500 +step:4009 train loss:3.567646 +step:4010 train loss:3.541887 +step:4011 train loss:3.551998 +step:4012 train loss:3.569033 +step:4013 train loss:3.544249 +step:4014 train loss:3.556769 +step:4015 train loss:3.545952 +step:4016 train loss:3.556970 +step:4017 train loss:3.526653 +step:4018 train loss:3.467086 +step:4019 train loss:3.515715 +step:4020 train loss:3.583147 +step:4021 train loss:3.527658 +step:4022 train loss:3.530459 +step:4023 train loss:3.544619 +step:4024 train loss:3.460124 +step:4025 train loss:3.580568 +step:4026 train loss:3.569442 +step:4027 train loss:3.578928 +step:4028 train loss:3.591262 +step:4029 train loss:3.626140 +step:4030 train loss:3.542854 +step:4031 train loss:3.582185 +step:4032 train loss:3.541778 +step:4033 train loss:3.575632 +step:4034 train loss:3.588726 +step:4035 train loss:3.565745 +step:4036 train loss:3.565020 +step:4037 train loss:3.583851 +step:4038 train loss:3.500014 +step:4039 train loss:3.556057 +step:4040 train loss:3.537365 +step:4041 train loss:3.526600 +step:4042 train loss:3.549410 +step:4043 train loss:3.531110 +step:4044 train loss:3.567944 +step:4045 train loss:3.573133 +step:4046 train loss:3.528013 +step:4047 train loss:3.557341 +step:4048 train loss:3.569656 +step:4049 train loss:3.530454 +step:4050 train loss:3.635374 +step:4051 train loss:3.545145 +step:4052 train loss:3.570681 +step:4053 train loss:3.618263 +step:4054 train loss:3.589285 +step:4055 train loss:3.604722 +step:4056 train loss:3.601974 +step:4057 train loss:3.536189 +step:4058 train loss:3.520362 +step:4059 train loss:3.603994 +step:4060 train loss:3.542636 +step:4061 train loss:3.512253 +step:4062 train loss:3.627383 +step:4063 train loss:3.580282 +step:4064 train loss:3.547442 +step:4065 train loss:3.530493 +step:4066 train loss:3.558917 +step:4067 train loss:3.585977 +step:4068 train loss:3.551832 +step:4069 train loss:3.608801 +step:4070 train loss:3.526483 +step:4071 train loss:3.501145 +step:4072 train loss:3.576224 +step:4073 train loss:3.511105 +step:4074 train loss:3.560500 +step:4075 train loss:3.627636 +step:4076 train loss:3.487544 +step:4077 train loss:3.561065 +step:4078 train loss:3.662590 +step:4079 train loss:3.603595 +step:4080 train loss:3.548548 +step:4081 train loss:3.519023 +step:4082 train loss:3.571449 +step:4083 train loss:3.509490 +step:4084 train loss:3.523809 +step:4085 train loss:3.772732 +step:4086 train loss:3.530688 +step:4087 train loss:3.574535 +step:4088 train loss:3.560047 +step:4089 train loss:3.548657 +step:4090 train loss:3.569092 +step:4091 train loss:3.593512 +step:4092 train loss:3.513254 +step:4093 train loss:3.546339 +step:4094 train loss:3.561612 +step:4095 train loss:3.517833 +step:4096 train loss:3.552766 +step:4097 train loss:3.555037 +step:4098 train loss:3.529360 +step:4099 train loss:3.532443 +step:4100 train loss:3.581281 +step:4101 train loss:3.506894 +step:4102 train loss:3.547472 +step:4103 train loss:3.752350 +step:4104 train loss:3.561786 +step:4105 train loss:3.532325 +step:4106 train loss:3.598097 +step:4107 train loss:3.522330 +step:4108 train loss:3.525769 +step:4109 train loss:3.576830 +step:4110 train loss:3.589665 +step:4111 train loss:3.563441 +step:4112 train loss:3.581123 +step:4113 train loss:3.542449 +step:4114 train loss:3.490040 +step:4115 train loss:3.526302 +step:4116 train loss:3.507617 +step:4117 train loss:3.529423 +step:4118 train loss:3.582597 +step:4119 train loss:3.606293 +step:4120 train loss:3.525445 +step:4121 train loss:3.519917 +step:4122 train loss:3.593202 +step:4123 train loss:3.598750 +step:4124 train loss:3.578089 +step:4125 train loss:3.619242 +step:4126 train loss:3.552074 +step:4127 train loss:3.570588 +step:4128 train loss:3.560729 +step:4129 train loss:3.606983 +step:4130 train loss:3.538835 +step:4131 train loss:3.571990 +step:4132 train loss:3.588709 +step:4133 train loss:3.540844 +step:4134 train loss:3.593406 +step:4135 train loss:3.527902 +step:4136 train loss:3.551158 +step:4137 train loss:3.521142 +step:4138 train loss:3.529435 +step:4139 train loss:3.580575 +step:4140 train loss:3.533133 +step:4141 train loss:3.498371 +step:4142 train loss:3.543488 +step:4143 train loss:3.580420 +step:4144 train loss:3.532080 +step:4145 train loss:3.500584 +step:4146 train loss:3.567054 +step:4147 train loss:3.542068 +step:4148 train loss:3.535208 +step:4149 train loss:3.616973 +step:4150 train loss:3.577654 +step:4151 train loss:3.560068 +step:4152 train loss:3.582986 +step:4153 train loss:3.589104 +step:4154 train loss:3.598397 +step:4155 train loss:3.620408 +step:4156 train loss:3.496066 +step:4157 train loss:3.520265 +step:4158 train loss:3.572446 +step:4159 train loss:3.477914 +step:4160 train loss:3.566077 +step:4161 train loss:3.564097 +step:4162 train loss:3.478844 +step:4163 train loss:3.560112 +step:4164 train loss:3.504424 +step:4165 train loss:3.509923 +step:4166 train loss:3.574427 +step:4167 train loss:3.565507 +step:4168 train loss:3.560992 +step:4169 train loss:3.596868 +step:4170 train loss:3.708980 +step:4171 train loss:3.554893 +step:4172 train loss:3.578950 +step:4173 train loss:3.574494 +step:4174 train loss:3.539134 +step:4175 train loss:3.622546 +step:4176 train loss:3.544971 +step:4177 train loss:3.570570 +step:4178 train loss:3.546339 +step:4179 train loss:3.508029 +step:4180 train loss:3.503278 +step:4181 train loss:3.552260 +step:4182 train loss:3.539909 +step:4183 train loss:3.475259 +step:4184 train loss:3.542280 +step:4185 train loss:3.612692 +step:4186 train loss:3.586638 +step:4187 train loss:3.595314 +step:4188 train loss:3.572122 +step:4189 train loss:3.532936 +step:4190 train loss:3.567677 +step:4191 train loss:3.520071 +step:4192 train loss:3.612931 +step:4193 train loss:3.515391 +step:4194 train loss:3.503466 +step:4195 train loss:3.498603 +step:4196 train loss:3.561581 +step:4197 train loss:3.580914 +step:4198 train loss:3.505019 +step:4199 train loss:3.587157 +step:4200 train loss:3.549313 +step:4201 train loss:3.527365 +step:4202 train loss:3.546706 +step:4203 train loss:3.555928 +step:4204 train loss:3.549381 +step:4205 train loss:3.560731 +step:4206 train loss:3.576850 +step:4207 train loss:3.580916 +step:4208 train loss:3.545396 +step:4209 train loss:3.611542 +step:4210 train loss:3.639622 +step:4211 train loss:3.520530 +step:4212 train loss:3.563961 +step:4213 train loss:3.511342 +step:4214 train loss:3.521395 +step:4215 train loss:3.538175 +step:4216 train loss:3.509520 +step:4217 train loss:3.535391 +step:4218 train loss:3.571848 +step:4219 train loss:3.581570 +step:4220 train loss:3.649848 +step:4221 train loss:3.537332 +step:4222 train loss:3.599541 +step:4223 train loss:3.520008 +step:4224 train loss:3.594269 +step:4225 train loss:3.522065 +step:4226 train loss:3.583009 +step:4227 train loss:3.552130 +step:4228 train loss:3.529178 +step:4229 train loss:3.539118 +step:4230 train loss:3.520497 +step:4231 train loss:3.510126 +step:4232 train loss:3.565418 +step:4233 train loss:3.469374 +step:4234 train loss:3.552406 +step:4235 train loss:3.630878 +step:4236 train loss:3.600135 +step:4237 train loss:3.578914 +step:4238 train loss:3.589851 +step:4239 train loss:3.642760 +step:4240 train loss:3.549087 +step:4241 train loss:3.474497 +step:4242 train loss:3.593859 +step:4243 train loss:3.591934 +step:4244 train loss:3.607655 +step:4245 train loss:3.660148 +step:4246 train loss:3.534657 +step:4247 train loss:3.592926 +step:4248 train loss:3.544497 +step:4249 train loss:3.552490 +step:4250 validation loss:3.470524 +step:4250 train loss:3.528796 +step:4251 train loss:3.627235 +step:4252 train loss:3.533656 +step:4253 train loss:3.530819 +step:4254 train loss:3.537090 +step:4255 train loss:3.522367 +step:4256 train loss:3.537202 +step:4257 train loss:3.592620 +step:4258 train loss:3.456690 +step:4259 train loss:3.520555 +step:4260 train loss:3.584247 +step:4261 train loss:3.568380 +step:4262 train loss:3.710936 +step:4263 train loss:3.639389 +step:4264 train loss:3.581324 +step:4265 train loss:3.576716 +step:4266 train loss:3.570614 +step:4267 train loss:3.570960 +step:4268 train loss:3.519687 +step:4269 train loss:3.611482 +step:4270 train loss:3.592750 +step:4271 train loss:3.508336 +step:4272 train loss:3.559289 +step:4273 train loss:3.533788 +step:4274 train loss:3.525114 +step:4275 train loss:3.543837 +step:4276 train loss:3.510186 +step:4277 train loss:3.651746 +step:4278 train loss:3.496058 +step:4279 train loss:3.522685 +step:4280 train loss:3.605976 +step:4281 train loss:3.592406 +step:4282 train loss:3.656740 +step:4283 train loss:3.512147 +step:4284 train loss:3.541720 +step:4285 train loss:3.543868 +step:4286 train loss:3.605573 +step:4287 train loss:3.605736 +step:4288 train loss:3.586412 +step:4289 train loss:3.539155 +step:4290 train loss:3.545177 +step:4291 train loss:3.505429 +step:4292 train loss:3.553328 +step:4293 train loss:3.564302 +step:4294 train loss:3.547917 +step:4295 train loss:3.484247 +step:4296 train loss:3.556519 +step:4297 train loss:3.538429 +step:4298 train loss:3.549906 +step:4299 train loss:3.547081 +step:4300 train loss:3.662822 +step:4301 train loss:3.482016 +step:4302 train loss:3.620045 +step:4303 train loss:3.498800 +step:4304 train loss:3.506647 +step:4305 train loss:3.523444 +step:4306 train loss:3.597635 +step:4307 train loss:3.511070 +step:4308 train loss:3.511677 +step:4309 train loss:3.580365 +step:4310 train loss:3.520606 +step:4311 train loss:3.573714 +step:4312 train loss:3.569927 +step:4313 train loss:3.563741 +step:4314 train loss:3.508516 +step:4315 train loss:3.538763 +step:4316 train loss:3.488296 +step:4317 train loss:3.542991 +step:4318 train loss:3.585077 +step:4319 train loss:3.534090 +step:4320 train loss:3.595846 +step:4321 train loss:3.577904 +step:4322 train loss:3.529979 +step:4323 train loss:3.469158 +step:4324 train loss:3.562209 +step:4325 train loss:3.540117 +step:4326 train loss:3.535895 +step:4327 train loss:3.637604 +step:4328 train loss:3.547861 +step:4329 train loss:3.504308 +step:4330 train loss:3.550044 +step:4331 train loss:3.559131 +step:4332 train loss:3.593805 +step:4333 train loss:3.556306 +step:4334 train loss:3.564780 +step:4335 train loss:3.565941 +step:4336 train loss:3.580400 +step:4337 train loss:3.541786 +step:4338 train loss:3.661533 +step:4339 train loss:3.566085 +step:4340 train loss:3.573245 +step:4341 train loss:3.543439 +step:4342 train loss:3.558311 +step:4343 train loss:3.676132 +step:4344 train loss:3.566869 +step:4345 train loss:3.579366 +step:4346 train loss:3.592368 +step:4347 train loss:3.604588 +step:4348 train loss:3.519011 +step:4349 train loss:3.601217 +step:4350 train loss:3.543665 +step:4351 train loss:3.495280 +step:4352 train loss:3.569441 +step:4353 train loss:3.516381 +step:4354 train loss:3.573865 +step:4355 train loss:3.531528 +step:4356 train loss:3.556824 +step:4357 train loss:3.538425 +step:4358 train loss:3.636498 +step:4359 train loss:3.580442 +step:4360 train loss:3.499469 +step:4361 train loss:3.549881 +step:4362 train loss:3.568073 +step:4363 train loss:3.586725 +step:4364 train loss:3.550252 +step:4365 train loss:3.530888 +step:4366 train loss:3.580646 +step:4367 train loss:3.594890 +step:4368 train loss:3.570295 +step:4369 train loss:3.443357 +step:4370 train loss:3.566236 +step:4371 train loss:3.476806 +step:4372 train loss:3.626162 +step:4373 train loss:3.563637 +step:4374 train loss:3.533762 +step:4375 train loss:3.580266 +step:4376 train loss:3.588076 +step:4377 train loss:3.523221 +step:4378 train loss:3.536793 +step:4379 train loss:3.618813 +step:4380 train loss:3.596504 +step:4381 train loss:3.502321 +step:4382 train loss:3.545588 +step:4383 train loss:3.574690 +step:4384 train loss:3.572158 +step:4385 train loss:3.499867 +step:4386 train loss:3.553051 +step:4387 train loss:3.523594 +step:4388 train loss:3.544746 +step:4389 train loss:3.570997 +step:4390 train loss:3.613354 +step:4391 train loss:3.539830 +step:4392 train loss:3.613472 +step:4393 train loss:3.569676 +step:4394 train loss:3.507618 +step:4395 train loss:3.565497 +step:4396 train loss:3.541809 +step:4397 train loss:3.584469 +step:4398 train loss:3.527327 +step:4399 train loss:3.522168 +step:4400 train loss:3.525942 +step:4401 train loss:3.588154 +step:4402 train loss:3.585348 +step:4403 train loss:3.538611 +step:4404 train loss:3.568496 +step:4405 train loss:3.490686 +step:4406 train loss:3.569017 +step:4407 train loss:3.506420 +step:4408 train loss:3.595200 +step:4409 train loss:3.556901 +step:4410 train loss:3.562414 +step:4411 train loss:3.519374 +step:4412 train loss:3.642551 +step:4413 train loss:3.533556 +step:4414 train loss:3.540155 +step:4415 train loss:3.526000 +step:4416 train loss:3.514923 +step:4417 train loss:3.511057 +step:4418 train loss:3.583238 +step:4419 train loss:3.553545 +step:4420 train loss:3.565392 +step:4421 train loss:3.588540 +step:4422 train loss:3.606653 +step:4423 train loss:3.562559 +step:4424 train loss:3.546924 +step:4425 train loss:3.508819 +step:4426 train loss:3.586462 +step:4427 train loss:3.549762 +step:4428 train loss:3.485404 +step:4429 train loss:3.549296 +step:4430 train loss:3.586658 +step:4431 train loss:3.580471 +step:4432 train loss:3.485288 +step:4433 train loss:3.540593 +step:4434 train loss:3.535116 +step:4435 train loss:3.567079 +step:4436 train loss:3.501729 +step:4437 train loss:3.580703 +step:4438 train loss:3.547973 +step:4439 train loss:3.554721 +step:4440 train loss:3.553400 +step:4441 train loss:3.554165 +step:4442 train loss:3.604401 +step:4443 train loss:3.542916 +step:4444 train loss:3.624914 +step:4445 train loss:3.589742 +step:4446 train loss:3.521364 +step:4447 train loss:3.567383 +step:4448 train loss:3.585949 +step:4449 train loss:3.522553 +step:4450 train loss:3.541571 +step:4451 train loss:3.593158 +step:4452 train loss:3.657907 +step:4453 train loss:3.577462 +step:4454 train loss:3.554299 +step:4455 train loss:3.596692 +step:4456 train loss:3.542022 +step:4457 train loss:3.542154 +step:4458 train loss:3.557841 +step:4459 train loss:3.591464 +step:4460 train loss:3.501436 +step:4461 train loss:3.473722 +step:4462 train loss:3.531737 +step:4463 train loss:3.549135 +step:4464 train loss:3.520948 +step:4465 train loss:3.553298 +step:4466 train loss:3.648715 +step:4467 train loss:3.529903 +step:4468 train loss:3.526683 +step:4469 train loss:3.513879 +step:4470 train loss:3.493598 +step:4471 train loss:3.553166 +step:4472 train loss:3.481951 +step:4473 train loss:3.565206 +step:4474 train loss:3.590001 +step:4475 train loss:3.554885 +step:4476 train loss:3.512732 +step:4477 train loss:3.500050 +step:4478 train loss:3.557499 +step:4479 train loss:3.656653 +step:4480 train loss:3.496151 +step:4481 train loss:3.568054 +step:4482 train loss:3.524409 +step:4483 train loss:3.523428 +step:4484 train loss:3.576046 +step:4485 train loss:3.530020 +step:4486 train loss:3.633888 +step:4487 train loss:3.528516 +step:4488 train loss:3.523561 +step:4489 train loss:3.480173 +step:4490 train loss:3.565382 +step:4491 train loss:3.513787 +step:4492 train loss:3.544886 +step:4493 train loss:3.533290 +step:4494 train loss:3.530869 +step:4495 train loss:3.591077 +step:4496 train loss:3.534851 +step:4497 train loss:3.621272 +step:4498 train loss:3.510783 +step:4499 train loss:3.558434 +step:4500 validation loss:3.461681 total_sharp:2.7897e-03 L1_sharp:4.2648e-03 L2_sharp:9.0375e-04 L3_sharp:1.2321e-03 L4_sharp:2.9785e-04 L5_sharp:5.2871e-04 L6_sharp:8.5367e-04 L7_sharp:1.1063e-03 L8_sharp:6.0976e-04 L9_sharp:4.3402e-04 L10_sharp:3.0910e-04 L11_sharp:2.3826e-04 L12_sharp:2.7726e-04 total_fnorm:2.4330e+00 total_l1_linf:2.0812e+04 total_spectral:2.4330e+00 L1_fnorm:5.6521e-01 L2_fnorm:5.2707e-01 L3_fnorm:5.5561e-01 L4_fnorm:5.8789e-01 L5_fnorm:5.9478e-01 L6_fnorm:5.9946e-01 L7_fnorm:5.9667e-01 L8_fnorm:5.9895e-01 L9_fnorm:6.0117e-01 L10_fnorm:6.0360e-01 L11_fnorm:5.9988e-01 L12_fnorm:6.0208e-01 L1_l1linf:4.1625e-01 L2_l1linf:4.6214e-01 L3_l1linf:4.4196e-01 L4_l1linf:4.3497e-01 L5_l1linf:4.2183e-01 L6_l1linf:4.1715e-01 L7_l1linf:4.1228e-01 L8_l1linf:4.1174e-01 L9_l1linf:4.0924e-01 L10_l1linf:4.1676e-01 L11_l1linf:4.4364e-01 L12_l1linf:4.3851e-01 L1_spectral:1.2046e-02 L2_spectral:1.2039e-02 L3_spectral:1.2046e-02 L4_spectral:1.2044e-02 L5_spectral:1.2067e-02 L6_spectral:1.2045e-02 L7_spectral:1.2044e-02 L8_spectral:1.2041e-02 L9_spectral:1.2044e-02 L10_spectral:1.2047e-02 L11_spectral:1.2046e-02 L12_spectral:1.2046e-02 ip_v_neg_g:9.7380e-03 cos_v_neg_g:3.5317e-03 v_norm:2.4330e+00 g_norm:1.1333e+00 hv_norm:4.6967e-01 cos_v_hv:1.4451e-02 hg_norm:2.4356e+01 cos_g_hg:5.8887e-01 v_par:5.7273e-04 v_perp:2.4330e+00 L1_cos_v_neg_g:3.4039e-03 L1_v_norm:5.6521e-01 L2_cos_v_neg_g:1.1187e-02 L2_v_norm:5.2707e-01 L3_cos_v_neg_g:8.6299e-03 L3_v_norm:5.5561e-01 L4_cos_v_neg_g:4.2610e-03 L4_v_norm:5.8789e-01 L5_cos_v_neg_g:4.3651e-03 L5_v_norm:5.9478e-01 L6_cos_v_neg_g:6.3119e-03 L6_v_norm:5.9946e-01 L7_cos_v_neg_g:7.0472e-03 L7_v_norm:5.9667e-01 L8_cos_v_neg_g:6.4911e-03 L8_v_norm:5.9895e-01 L9_cos_v_neg_g:5.8697e-03 L9_v_norm:6.0117e-01 L10_cos_v_neg_g:7.0486e-03 L10_v_norm:6.0360e-01 L11_cos_v_neg_g:5.0486e-03 L11_v_norm:5.9988e-01 L12_cos_v_neg_g:5.3431e-03 L12_v_norm:6.0208e-01 +step:4500 train loss:3.470109 +step:4501 train loss:3.530775 +step:4502 train loss:3.654881 +step:4503 train loss:3.556117 +step:4504 train loss:3.569616 +step:4505 train loss:3.546820 +step:4506 train loss:3.520520 +step:4507 train loss:3.598132 +step:4508 train loss:3.535234 +step:4509 train loss:3.529493 +step:4510 train loss:3.567030 +step:4511 train loss:3.516397 +step:4512 train loss:3.541638 +step:4513 train loss:3.597708 +step:4514 train loss:3.503884 +step:4515 train loss:3.626268 +step:4516 train loss:3.594758 +step:4517 train loss:3.548591 +step:4518 train loss:3.489727 +step:4519 train loss:3.526381 +step:4520 train loss:3.537213 +step:4521 train loss:3.479357 +step:4522 train loss:3.536298 +step:4523 train loss:3.579567 +step:4524 train loss:3.565465 +step:4525 train loss:3.485752 +step:4526 train loss:3.530543 +step:4527 train loss:3.514677 +step:4528 train loss:3.547098 +step:4529 train loss:3.541002 +step:4530 train loss:3.637210 +step:4531 train loss:3.526733 +step:4532 train loss:3.547378 +step:4533 train loss:3.521389 +step:4534 train loss:3.615298 +step:4535 train loss:3.514622 +step:4536 train loss:3.585380 +step:4537 train loss:3.569227 +step:4538 train loss:3.546764 +step:4539 train loss:3.567478 +step:4540 train loss:3.542188 +step:4541 train loss:3.514261 +step:4542 train loss:3.558516 +step:4543 train loss:3.647502 +step:4544 train loss:3.588461 +step:4545 train loss:3.528072 +step:4546 train loss:3.621482 +step:4547 train loss:3.578029 +step:4548 train loss:3.584712 +step:4549 train loss:3.543009 +step:4550 train loss:3.511824 +step:4551 train loss:3.524855 +step:4552 train loss:3.529428 +step:4553 train loss:3.613476 +step:4554 train loss:3.506368 +step:4555 train loss:3.616946 +step:4556 train loss:3.553682 +step:4557 train loss:3.480968 +step:4558 train loss:3.566479 +step:4559 train loss:3.578452 +step:4560 train loss:3.514473 +step:4561 train loss:3.504108 +step:4562 train loss:3.546527 +step:4563 train loss:3.499255 +step:4564 train loss:3.526491 +step:4565 train loss:3.523624 +step:4566 train loss:3.500220 +step:4567 train loss:3.523764 +step:4568 train loss:3.527417 +step:4569 train loss:3.507421 +step:4570 train loss:3.558424 +step:4571 train loss:3.540230 +step:4572 train loss:3.529609 +step:4573 train loss:3.537536 +step:4574 train loss:3.687936 +step:4575 train loss:3.515819 +step:4576 train loss:3.507232 +step:4577 train loss:3.543828 +step:4578 train loss:3.586845 +step:4579 train loss:3.537253 +step:4580 train loss:3.595894 +step:4581 train loss:3.534895 +step:4582 train loss:3.527819 +step:4583 train loss:3.535722 +step:4584 train loss:3.506379 +step:4585 train loss:3.589557 +step:4586 train loss:3.575015 +step:4587 train loss:3.474572 +step:4588 train loss:3.519457 +step:4589 train loss:3.590816 +step:4590 train loss:3.563669 +step:4591 train loss:3.500762 +step:4592 train loss:3.590919 +step:4593 train loss:3.508360 +step:4594 train loss:3.540051 +step:4595 train loss:3.563678 +step:4596 train loss:3.500518 +step:4597 train loss:3.634364 +step:4598 train loss:3.554622 +step:4599 train loss:3.512883 +step:4600 train loss:3.516191 +step:4601 train loss:3.539758 +step:4602 train loss:3.490622 +step:4603 train loss:3.503255 +step:4604 train loss:3.609977 +step:4605 train loss:3.528019 +step:4606 train loss:3.557145 +step:4607 train loss:3.538295 +step:4608 train loss:3.568940 +step:4609 train loss:3.530141 +step:4610 train loss:3.569553 +step:4611 train loss:3.600281 +step:4612 train loss:3.594851 +step:4613 train loss:3.578714 +step:4614 train loss:3.575378 +step:4615 train loss:3.513635 +step:4616 train loss:3.495790 +step:4617 train loss:3.538831 +step:4618 train loss:3.557775 +step:4619 train loss:3.514616 +step:4620 train loss:3.531205 +step:4621 train loss:3.530840 +step:4622 train loss:3.476777 +step:4623 train loss:3.578727 +step:4624 train loss:3.563255 +step:4625 train loss:3.524383 +step:4626 train loss:3.563212 +step:4627 train loss:3.531259 +step:4628 train loss:3.521064 +step:4629 train loss:3.560936 +step:4630 train loss:3.618711 +step:4631 train loss:3.620459 +step:4632 train loss:3.514767 +step:4633 train loss:3.526989 +step:4634 train loss:3.602348 +step:4635 train loss:3.565895 +step:4636 train loss:3.580671 +step:4637 train loss:3.519217 +step:4638 train loss:3.521081 +step:4639 train loss:3.517170 +step:4640 train loss:3.531652 +step:4641 train loss:3.532956 +step:4642 train loss:3.568505 +step:4643 train loss:3.527952 +step:4644 train loss:3.553370 +step:4645 train loss:3.566099 +step:4646 train loss:3.522424 +step:4647 train loss:3.505230 +step:4648 train loss:3.585997 +step:4649 train loss:3.598048 +step:4650 train loss:3.548277 +step:4651 train loss:3.550169 +step:4652 train loss:3.536917 +step:4653 train loss:3.595006 +step:4654 train loss:3.591524 +step:4655 train loss:3.495665 +step:4656 train loss:3.531013 +step:4657 train loss:3.580196 +step:4658 train loss:3.536264 +step:4659 train loss:3.549835 +step:4660 train loss:3.594063 +step:4661 train loss:3.511761 +step:4662 train loss:3.529731 +step:4663 train loss:3.552741 +step:4664 train loss:3.587976 +step:4665 train loss:3.587528 +step:4666 train loss:3.583597 +step:4667 train loss:3.577029 +step:4668 train loss:3.537185 +step:4669 train loss:3.546778 +step:4670 train loss:3.578617 +step:4671 train loss:3.589628 +step:4672 train loss:3.450284 +step:4673 train loss:3.485398 +step:4674 train loss:3.613748 +step:4675 train loss:3.523150 +step:4676 train loss:3.481071 +step:4677 train loss:3.486584 +step:4678 train loss:3.459859 +step:4679 train loss:3.555674 +step:4680 train loss:3.497512 +step:4681 train loss:3.546598 +step:4682 train loss:3.498651 +step:4683 train loss:3.468766 +step:4684 train loss:3.593310 +step:4685 train loss:3.522688 +step:4686 train loss:3.532314 +step:4687 train loss:3.573363 +step:4688 train loss:3.498869 +step:4689 train loss:3.572838 +step:4690 train loss:3.517216 +step:4691 train loss:3.550414 +step:4692 train loss:3.482189 +step:4693 train loss:3.517705 +step:4694 train loss:3.557516 +step:4695 train loss:3.581279 +step:4696 train loss:3.564006 +step:4697 train loss:3.480570 +step:4698 train loss:3.496359 +step:4699 train loss:3.546637 +step:4700 train loss:3.519212 +step:4701 train loss:3.526070 +step:4702 train loss:3.482242 +step:4703 train loss:3.561453 +step:4704 train loss:3.550556 +step:4705 train loss:3.491416 +step:4706 train loss:3.500036 +step:4707 train loss:3.487904 +step:4708 train loss:3.555170 +step:4709 train loss:3.500674 +step:4710 train loss:3.513137 +step:4711 train loss:3.579312 +step:4712 train loss:3.473336 +step:4713 train loss:3.579553 +step:4714 train loss:3.477771 +step:4715 train loss:3.565393 +step:4716 train loss:3.538484 +step:4717 train loss:3.463525 +step:4718 train loss:3.561282 +step:4719 train loss:3.482758 +step:4720 train loss:3.585240 +step:4721 train loss:3.537978 +step:4722 train loss:3.591911 +step:4723 train loss:3.489520 +step:4724 train loss:3.538881 +step:4725 train loss:3.475192 +step:4726 train loss:3.520050 +step:4727 train loss:3.529768 +step:4728 train loss:3.532576 +step:4729 train loss:3.564983 +step:4730 train loss:3.458480 +step:4731 train loss:3.523130 +step:4732 train loss:3.479934 +step:4733 train loss:3.408454 +step:4734 train loss:3.551068 +step:4735 train loss:3.499450 +step:4736 train loss:3.543570 +step:4737 train loss:3.423868 +step:4738 train loss:3.573941 +step:4739 train loss:3.450467 +step:4740 train loss:3.558597 +step:4741 train loss:3.526227 +step:4742 train loss:3.488350 +step:4743 train loss:3.490393 +step:4744 train loss:3.528627 +step:4745 train loss:3.552128 +step:4746 train loss:3.587544 +step:4747 train loss:3.551890 +step:4748 train loss:3.449056 +step:4749 train loss:3.518273 +step:4750 validation loss:3.450769 +step:4750 train loss:3.464822 +step:4751 train loss:3.557248 +step:4752 train loss:3.491702 +step:4753 train loss:3.595228 +step:4754 train loss:3.467706 +step:4755 train loss:3.505284 +step:4756 train loss:3.585546 +step:4757 train loss:3.504324 +step:4758 train loss:3.527762 +step:4759 train loss:3.521564 +step:4760 train loss:3.554615 +step:4761 train loss:3.472863 +step:4762 train loss:3.504200 +step:4763 train loss:3.525995 +step:4764 train loss:3.586654 +step:4765 train loss:3.484488 +step:4766 train loss:3.498783 +step:4767 train loss:3.455645 +step:4768 train loss:3.510402 +step:4769 train loss:3.540130 +step:4770 train loss:3.496383 +step:4771 train loss:3.510509 +step:4772 train loss:3.482378 +step:4773 train loss:3.519567 +step:4774 train loss:3.464599 +step:4775 train loss:3.590675 +step:4776 train loss:3.459548 +step:4777 train loss:3.530277 +step:4778 train loss:3.474325 +step:4779 train loss:3.520487 +step:4780 train loss:3.458394 +step:4781 train loss:3.464416 +step:4782 train loss:3.573791 +step:4783 train loss:3.561090 +step:4784 train loss:3.522049 +step:4785 train loss:3.519416 +step:4786 train loss:3.630940 +step:4787 train loss:3.463760 +step:4788 train loss:3.486647 +step:4789 train loss:3.508255 +step:4790 train loss:3.563005 +step:4791 train loss:3.525469 +step:4792 train loss:3.568934 +step:4793 train loss:3.486733 +step:4794 train loss:3.560813 +step:4795 train loss:3.510135 +step:4796 train loss:3.499317 +step:4797 train loss:3.510414 +step:4798 train loss:3.512290 +step:4799 train loss:3.513175 +step:4800 train loss:3.540424 +step:4801 train loss:3.537553 +step:4802 train loss:3.574514 +step:4803 train loss:3.557746 +step:4804 train loss:3.514240 +step:4805 train loss:3.510297 +step:4806 train loss:3.487020 +step:4807 train loss:3.595414 +step:4808 train loss:3.465263 +step:4809 train loss:3.567534 +step:4810 train loss:3.510581 +step:4811 train loss:3.525084 +step:4812 train loss:3.504480 +step:4813 train loss:3.458297 +step:4814 train loss:3.459953 +step:4815 train loss:3.447105 +step:4816 train loss:3.518655 +step:4817 train loss:3.450358 +step:4818 train loss:3.518131 +step:4819 train loss:3.510639 +step:4820 train loss:3.768290 +step:4821 train loss:3.541141 +step:4822 train loss:3.545622 +step:4823 train loss:3.480851 +step:4824 train loss:3.486236 +step:4825 train loss:3.469152 +step:4826 train loss:3.552747 +step:4827 train loss:3.505955 +step:4828 train loss:3.441445 +step:4829 train loss:3.549800 +step:4830 train loss:3.490313 +step:4831 train loss:3.640408 +step:4832 train loss:3.505772 +step:4833 train loss:3.542121 +step:4834 train loss:3.447210 +step:4835 train loss:3.536611 +step:4836 train loss:3.518738 +step:4837 train loss:3.546019 +step:4838 train loss:3.485102 +step:4839 train loss:3.551851 +step:4840 train loss:3.457343 +step:4841 train loss:3.552585 +step:4842 train loss:3.470614 +step:4843 train loss:3.546694 +step:4844 train loss:3.550300 +step:4845 train loss:3.482428 +step:4846 train loss:3.501667 +step:4847 train loss:3.484668 +step:4848 train loss:3.514396 +step:4849 train loss:3.462441 +step:4850 train loss:3.475054 +step:4851 train loss:3.466897 +step:4852 train loss:3.550116 +step:4853 train loss:3.518996 +step:4854 train loss:3.501363 +step:4855 train loss:3.562641 +step:4856 train loss:3.534183 +step:4857 train loss:3.538799 +step:4858 train loss:3.622179 +step:4859 train loss:3.467431 +step:4860 train loss:3.544266 +step:4861 train loss:3.516581 +step:4862 train loss:3.549612 +step:4863 train loss:3.487194 +step:4864 train loss:3.498018 +step:4865 train loss:3.490016 +step:4866 train loss:3.536294 +step:4867 train loss:3.503677 +step:4868 train loss:3.522695 +step:4869 train loss:3.477492 +step:4870 train loss:3.500476 +step:4871 train loss:3.589240 +step:4872 train loss:3.527309 +step:4873 train loss:3.531583 +step:4874 train loss:3.496308 +step:4875 train loss:3.468564 +step:4876 train loss:3.477874 +step:4877 train loss:3.482644 +step:4878 train loss:3.517819 +step:4879 train loss:3.483877 +step:4880 train loss:3.503212 +step:4881 train loss:3.455391 +step:4882 train loss:3.653850 +step:4883 train loss:3.466484 +step:4884 train loss:3.493555 +step:4885 train loss:3.469989 +step:4886 train loss:3.547381 +step:4887 train loss:3.500791 +step:4888 train loss:3.509162 +step:4889 train loss:3.505036 +step:4890 train loss:3.540606 +step:4891 train loss:3.480268 +step:4892 train loss:3.483652 +step:4893 train loss:3.532593 +step:4894 train loss:3.467449 +step:4895 train loss:3.501753 +step:4896 train loss:3.478860 +step:4897 train loss:3.557375 +step:4898 train loss:3.505934 +step:4899 train loss:3.490741 +step:4900 train loss:3.531031 +step:4901 train loss:3.493439 +step:4902 train loss:3.474709 +step:4903 train loss:3.499341 +step:4904 train loss:3.508671 +step:4905 train loss:3.509808 +step:4906 train loss:3.506931 +step:4907 train loss:3.581044 +step:4908 train loss:3.485041 +step:4909 train loss:3.493702 +step:4910 train loss:3.509887 +step:4911 train loss:3.568808 +step:4912 train loss:3.541198 +step:4913 train loss:3.524258 +step:4914 train loss:3.511185 +step:4915 train loss:3.494793 +step:4916 train loss:3.433764 +step:4917 train loss:3.460665 +step:4918 train loss:3.489902 +step:4919 train loss:3.483371 +step:4920 train loss:3.496211 +step:4921 train loss:3.649025 +step:4922 train loss:3.535821 +step:4923 train loss:3.552745 +step:4924 train loss:3.554337 +step:4925 train loss:3.487095 +step:4926 train loss:3.478637 +step:4927 train loss:3.513939 +step:4928 train loss:3.550142 +step:4929 train loss:3.505245 +step:4930 train loss:3.486499 +step:4931 train loss:3.480437 +step:4932 train loss:3.492011 +step:4933 train loss:3.483342 +step:4934 train loss:3.550126 +step:4935 train loss:3.535213 +step:4936 train loss:3.499578 +step:4937 train loss:3.610942 +step:4938 train loss:3.599257 +step:4939 train loss:3.464994 +step:4940 train loss:3.543138 +step:4941 train loss:3.443337 +step:4942 train loss:3.483065 +step:4943 train loss:3.484230 +step:4944 train loss:3.490303 +step:4945 train loss:3.534114 +step:4946 train loss:3.510575 +step:4947 train loss:3.490700 +step:4948 train loss:3.534173 +step:4949 train loss:3.437310 +step:4950 train loss:3.517704 +step:4951 train loss:3.566920 +step:4952 train loss:3.508891 +step:4953 train loss:3.538695 +step:4954 train loss:3.447311 +step:4955 train loss:3.518576 +step:4956 train loss:3.548325 +step:4957 train loss:3.542381 +step:4958 train loss:3.455456 +step:4959 train loss:3.573332 +step:4960 train loss:3.501841 +step:4961 train loss:3.518698 +step:4962 train loss:3.483343 +step:4963 train loss:3.529469 +step:4964 train loss:3.480711 +step:4965 train loss:3.631874 +step:4966 train loss:3.481497 +step:4967 train loss:3.589282 +step:4968 train loss:3.477327 +step:4969 train loss:3.523960 +step:4970 train loss:3.510895 +step:4971 train loss:3.469542 +step:4972 train loss:3.505698 +step:4973 train loss:3.514122 +step:4974 train loss:3.500755 +step:4975 train loss:3.594234 +step:4976 train loss:3.566665 +step:4977 train loss:3.512739 +step:4978 train loss:3.501641 +step:4979 train loss:3.501091 +step:4980 train loss:3.611171 +step:4981 train loss:3.444270 +step:4982 train loss:3.527441 +step:4983 train loss:3.452353 +step:4984 train loss:3.638548 +step:4985 train loss:3.533681 +step:4986 train loss:3.476274 +step:4987 train loss:3.496001 +step:4988 train loss:3.698948 +step:4989 train loss:3.503466 +step:4990 train loss:3.491000 +step:4991 train loss:3.506957 +step:4992 train loss:3.492688 +step:4993 train loss:3.471902 +step:4994 train loss:3.585702 +step:4995 train loss:3.506476 +step:4996 train loss:3.592082 +step:4997 train loss:3.493780 +step:4998 train loss:3.497479 +step:4999 train loss:3.482507 +step:5000 validation loss:3.445820 total_sharp:2.6778e-03 L1_sharp:9.1885e-03 L2_sharp:1.3104e-03 L3_sharp:1.2315e-03 L4_sharp:2.8844e-04 L5_sharp:4.1409e-04 L6_sharp:4.7942e-04 L7_sharp:4.9586e-04 L8_sharp:3.7813e-04 L9_sharp:3.5926e-04 L10_sharp:2.5118e-04 L11_sharp:2.1144e-04 L12_sharp:1.9534e-04 total_fnorm:2.4297e+00 total_l1_linf:2.0777e+04 total_spectral:2.4297e+00 L1_fnorm:5.6495e-01 L2_fnorm:5.2614e-01 L3_fnorm:5.5785e-01 L4_fnorm:5.8821e-01 L5_fnorm:5.9667e-01 L6_fnorm:6.0056e-01 L7_fnorm:5.9935e-01 L8_fnorm:5.9997e-01 L9_fnorm:6.0122e-01 L10_fnorm:6.0432e-01 L11_fnorm:6.0150e-01 L12_fnorm:6.0177e-01 L1_l1linf:4.1749e-01 L2_l1linf:4.5298e-01 L3_l1linf:4.4220e-01 L4_l1linf:4.2995e-01 L5_l1linf:4.1767e-01 L6_l1linf:4.2093e-01 L7_l1linf:4.1639e-01 L8_l1linf:4.1610e-01 L9_l1linf:4.0883e-01 L10_l1linf:4.1120e-01 L11_l1linf:4.1750e-01 L12_l1linf:4.2600e-01 L1_spectral:1.2049e-02 L2_spectral:1.2040e-02 L3_spectral:1.2053e-02 L4_spectral:1.2050e-02 L5_spectral:1.2063e-02 L6_spectral:1.2047e-02 L7_spectral:1.2043e-02 L8_spectral:1.2046e-02 L9_spectral:1.2048e-02 L10_spectral:1.2045e-02 L11_spectral:1.2043e-02 L12_spectral:1.2045e-02 ip_v_neg_g:7.4060e-03 cos_v_neg_g:2.3849e-03 v_norm:2.4297e+00 g_norm:1.2781e+00 hv_norm:1.0415e+00 cos_v_hv:6.2473e-03 hg_norm:2.5114e+02 cos_g_hg:4.0916e-01 v_par:9.8493e-04 v_perp:2.4297e+00 L1_cos_v_neg_g:2.9325e-03 L1_v_norm:5.6495e-01 L2_cos_v_neg_g:4.9552e-03 L2_v_norm:5.2614e-01 L3_cos_v_neg_g:5.9180e-03 L3_v_norm:5.5785e-01 L4_cos_v_neg_g:2.2709e-03 L4_v_norm:5.8821e-01 L5_cos_v_neg_g:2.7142e-03 L5_v_norm:5.9667e-01 L6_cos_v_neg_g:4.9303e-03 L6_v_norm:6.0056e-01 L7_cos_v_neg_g:4.1650e-03 L7_v_norm:5.9935e-01 L8_cos_v_neg_g:4.4442e-03 L8_v_norm:5.9997e-01 L9_cos_v_neg_g:4.5847e-03 L9_v_norm:6.0122e-01 L10_cos_v_neg_g:4.9781e-03 L10_v_norm:6.0432e-01 L11_cos_v_neg_g:4.4756e-03 L11_v_norm:6.0150e-01 L12_cos_v_neg_g:5.1801e-03 L12_v_norm:6.0177e-01 +step:5000 train loss:3.595704 +step:5001 train loss:3.460623 +step:5002 train loss:3.516443 +step:5003 train loss:3.508345 +step:5004 train loss:3.503256 +step:5005 train loss:3.498776 +step:5006 train loss:3.544460 +step:5007 train loss:3.545267 +step:5008 train loss:3.484334 +step:5009 train loss:3.527265 +step:5010 train loss:3.480784 +step:5011 train loss:3.508605 +step:5012 train loss:3.484414 +step:5013 train loss:3.586452 +step:5014 train loss:3.500481 +step:5015 train loss:3.574563 +step:5016 train loss:3.503575 +step:5017 train loss:3.551191 +step:5018 train loss:3.466258 +step:5019 train loss:3.502110 +step:5020 train loss:3.494868 +step:5021 train loss:3.509221 +step:5022 train loss:3.541683 +step:5023 train loss:3.517303 +step:5024 train loss:3.562850 +step:5025 train loss:3.452589 +step:5026 train loss:3.574224 +step:5027 train loss:3.510128 +step:5028 train loss:3.575914 +step:5029 train loss:3.470736 +step:5030 train loss:3.509503 +step:5031 train loss:3.493912 +step:5032 train loss:3.526826 +step:5033 train loss:3.506958 +step:5034 train loss:3.507051 +step:5035 train loss:3.588880 +step:5036 train loss:3.541967 +step:5037 train loss:3.487947 +step:5038 train loss:3.542462 +step:5039 train loss:3.551591 +step:5040 train loss:3.514804 +step:5041 train loss:3.531847 +step:5042 train loss:3.434139 +step:5043 train loss:3.573906 +step:5044 train loss:3.496745 +step:5045 train loss:3.546334 +step:5046 train loss:3.465327 +step:5047 train loss:3.543115 +step:5048 train loss:3.457011 +step:5049 train loss:3.594929 +step:5050 train loss:3.477388 +step:5051 train loss:3.528454 +step:5052 train loss:3.422625 +step:5053 train loss:3.609530 +step:5054 train loss:3.490432 +step:5055 train loss:3.520937 +step:5056 train loss:3.551754 +step:5057 train loss:3.485139 +step:5058 train loss:3.514290 +step:5059 train loss:3.478701 +step:5060 train loss:3.525822 +step:5061 train loss:3.516858 +step:5062 train loss:3.489638 +step:5063 train loss:3.483402 +step:5064 train loss:3.490956 +step:5065 train loss:3.474586 +step:5066 train loss:3.542653 +step:5067 train loss:3.517260 +step:5068 train loss:3.505823 +step:5069 train loss:3.476181 +step:5070 train loss:3.504766 +step:5071 train loss:3.575796 +step:5072 train loss:3.465315 +step:5073 train loss:3.473019 +step:5074 train loss:3.422190 +step:5075 train loss:3.493462 +step:5076 train loss:3.423666 +step:5077 train loss:3.485933 +step:5078 train loss:3.521197 +step:5079 train loss:3.522363 +step:5080 train loss:3.501235 +step:5081 train loss:3.512979 +step:5082 train loss:3.501080 +step:5083 train loss:3.559282 +step:5084 train loss:3.540704 +step:5085 train loss:3.500095 +step:5086 train loss:3.575031 +step:5087 train loss:3.560899 +step:5088 train loss:3.482648 +step:5089 train loss:3.548240 +step:5090 train loss:3.494680 +step:5091 train loss:3.495756 +step:5092 train loss:3.594956 +step:5093 train loss:3.475256 +step:5094 train loss:3.476233 +step:5095 train loss:3.523839 +step:5096 train loss:3.493958 +step:5097 train loss:3.499332 +step:5098 train loss:3.502990 +step:5099 train loss:3.467317 +step:5100 train loss:3.481604 +step:5101 train loss:3.678730 +step:5102 train loss:3.519417 +step:5103 train loss:3.528496 +step:5104 train loss:3.580793 +step:5105 train loss:3.509789 +step:5106 train loss:3.469404 +step:5107 train loss:3.487934 +step:5108 train loss:3.482203 +step:5109 train loss:3.562947 +step:5110 train loss:3.471409 +step:5111 train loss:3.568861 +step:5112 train loss:3.476390 +step:5113 train loss:3.460972 +step:5114 train loss:3.500399 +step:5115 train loss:3.467836 +step:5116 train loss:3.522376 +step:5117 train loss:3.468649 +step:5118 train loss:3.493807 +step:5119 train loss:3.475378 +step:5120 train loss:3.522415 +step:5121 train loss:3.467237 +step:5122 train loss:3.483710 +step:5123 train loss:3.465158 +step:5124 train loss:3.426303 +step:5125 train loss:3.533504 +step:5126 train loss:3.523535 +step:5127 train loss:3.527225 +step:5128 train loss:3.535871 +step:5129 train loss:3.468046 +step:5130 train loss:3.476897 +step:5131 train loss:3.418924 +step:5132 train loss:3.535671 +step:5133 train loss:3.504657 +step:5134 train loss:3.505140 +step:5135 train loss:3.463017 +step:5136 train loss:3.530167 +step:5137 train loss:3.524209 +step:5138 train loss:3.505057 +step:5139 train loss:3.539132 +step:5140 train loss:3.515532 +step:5141 train loss:3.540668 +step:5142 train loss:3.495158 +step:5143 train loss:3.520321 +step:5144 train loss:3.519974 +step:5145 train loss:3.460063 +step:5146 train loss:3.452660 +step:5147 train loss:3.531931 +step:5148 train loss:3.454918 +step:5149 train loss:3.533218 +step:5150 train loss:3.508191 +step:5151 train loss:3.477017 +step:5152 train loss:3.518856 +step:5153 train loss:3.490638 +step:5154 train loss:3.502965 +step:5155 train loss:3.512548 +step:5156 train loss:3.487343 +step:5157 train loss:3.490450 +step:5158 train loss:3.509918 +step:5159 train loss:3.547328 +step:5160 train loss:3.615577 +step:5161 train loss:3.542147 +step:5162 train loss:3.561124 +step:5163 train loss:3.473605 +step:5164 train loss:3.545682 +step:5165 train loss:3.553851 +step:5166 train loss:3.488724 +step:5167 train loss:3.589647 +step:5168 train loss:3.507297 +step:5169 train loss:3.537247 +step:5170 train loss:3.516553 +step:5171 train loss:3.561578 +step:5172 train loss:3.478912 +step:5173 train loss:3.548566 +step:5174 train loss:3.478158 +step:5175 train loss:3.513476 +step:5176 train loss:3.501555 +step:5177 train loss:3.505136 +step:5178 train loss:3.564579 +step:5179 train loss:3.476419 +step:5180 train loss:3.558810 +step:5181 train loss:3.499039 +step:5182 train loss:3.560212 +step:5183 train loss:3.489199 +step:5184 train loss:3.472017 +step:5185 train loss:3.491978 +step:5186 train loss:3.553189 +step:5187 train loss:3.543456 +step:5188 train loss:3.478094 +step:5189 train loss:3.524708 +step:5190 train loss:3.503540 +step:5191 train loss:3.488254 +step:5192 train loss:3.468095 +step:5193 train loss:3.555852 +step:5194 train loss:3.503407 +step:5195 train loss:3.477271 +step:5196 train loss:3.548233 +step:5197 train loss:3.608871 +step:5198 train loss:3.507147 +step:5199 train loss:3.491024 +step:5200 train loss:3.522355 +step:5201 train loss:3.506048 +step:5202 train loss:3.516657 +step:5203 train loss:3.512482 +step:5204 train loss:3.489804 +step:5205 train loss:3.532266 +step:5206 train loss:3.468808 +step:5207 train loss:3.472104 +step:5208 train loss:3.534935 +step:5209 train loss:3.551092 +step:5210 train loss:3.456452 +step:5211 train loss:3.500696 +step:5212 train loss:3.514925 +step:5213 train loss:3.489867 +step:5214 train loss:3.540693 +step:5215 train loss:3.655481 +step:5216 train loss:3.502579 +step:5217 train loss:3.483269 +step:5218 train loss:3.484862 +step:5219 train loss:3.550746 +step:5220 train loss:3.467307 +step:5221 train loss:3.468464 +step:5222 train loss:3.551254 +step:5223 train loss:3.542386 +step:5224 train loss:3.441252 +step:5225 train loss:3.589673 +step:5226 train loss:3.503969 +step:5227 train loss:3.576958 +step:5228 train loss:3.545797 +step:5229 train loss:3.488912 +step:5230 train loss:3.500680 +step:5231 train loss:3.451381 +step:5232 train loss:3.574227 +step:5233 train loss:3.530414 +step:5234 train loss:3.537736 +step:5235 train loss:3.482725 +step:5236 train loss:3.560664 +step:5237 train loss:3.611038 +step:5238 train loss:3.517816 +step:5239 train loss:3.574965 +step:5240 train loss:3.460214 +step:5241 train loss:3.519024 +step:5242 train loss:3.486468 +step:5243 train loss:3.496103 +step:5244 train loss:3.493371 +step:5245 train loss:3.536776 +step:5246 train loss:3.581291 +step:5247 train loss:3.508656 +step:5248 train loss:3.480241 +step:5249 train loss:3.537696 +step:5250 validation loss:3.434598 +step:5250 train loss:3.506507 +step:5251 train loss:3.571127 +step:5252 train loss:3.457011 +step:5253 train loss:3.610525 +step:5254 train loss:3.485827 +step:5255 train loss:3.559763 +step:5256 train loss:3.471136 +step:5257 train loss:3.530455 +step:5258 train loss:3.522044 +step:5259 train loss:3.510864 +step:5260 train loss:3.505760 +step:5261 train loss:3.493199 +step:5262 train loss:3.538502 +step:5263 train loss:3.519264 +step:5264 train loss:3.474572 +step:5265 train loss:3.551769 +step:5266 train loss:3.468250 +step:5267 train loss:3.481918 +step:5268 train loss:3.461616 +step:5269 train loss:3.467462 +step:5270 train loss:3.516213 +step:5271 train loss:3.441838 +step:5272 train loss:3.537771 +step:5273 train loss:3.441458 +step:5274 train loss:3.496018 +step:5275 train loss:3.506435 +step:5276 train loss:3.634519 +step:5277 train loss:3.532559 +step:5278 train loss:3.478860 +step:5279 train loss:3.530215 +step:5280 train loss:3.502246 +step:5281 train loss:3.497452 +step:5282 train loss:3.469951 +step:5283 train loss:3.472763 +step:5284 train loss:3.482204 +step:5285 train loss:3.542107 +step:5286 train loss:3.455767 +step:5287 train loss:3.553541 +step:5288 train loss:3.531170 +step:5289 train loss:3.499829 +step:5290 train loss:3.553095 +step:5291 train loss:3.507973 +step:5292 train loss:3.523685 +step:5293 train loss:3.494202 +step:5294 train loss:3.480820 +step:5295 train loss:3.488637 +step:5296 train loss:3.483076 +step:5297 train loss:3.499651 +step:5298 train loss:3.447392 +step:5299 train loss:3.539186 +step:5300 train loss:3.488631 +step:5301 train loss:3.558233 +step:5302 train loss:3.561822 +step:5303 train loss:3.425797 +step:5304 train loss:3.455642 +step:5305 train loss:3.438536 +step:5306 train loss:3.466706 +step:5307 train loss:3.472551 +step:5308 train loss:3.568343 +step:5309 train loss:3.517011 +step:5310 train loss:3.503051 +step:5311 train loss:3.568320 +step:5312 train loss:3.452097 +step:5313 train loss:3.540508 +step:5314 train loss:3.535728 +step:5315 train loss:3.495713 +step:5316 train loss:3.525838 +step:5317 train loss:3.543845 +step:5318 train loss:3.500573 +step:5319 train loss:3.526070 +step:5320 train loss:3.477904 +step:5321 train loss:3.600293 +step:5322 train loss:3.509094 +step:5323 train loss:3.512266 +step:5324 train loss:3.454803 +step:5325 train loss:3.540730 +step:5326 train loss:3.526998 +step:5327 train loss:3.418023 +step:5328 train loss:3.555307 +step:5329 train loss:3.520933 +step:5330 train loss:3.522528 +step:5331 train loss:3.567361 +step:5332 train loss:3.492869 +step:5333 train loss:3.556348 +step:5334 train loss:3.529432 +step:5335 train loss:3.591868 +step:5336 train loss:3.623515 +step:5337 train loss:3.457400 +step:5338 train loss:3.466427 +step:5339 train loss:3.492452 +step:5340 train loss:3.514888 +step:5341 train loss:3.526068 +step:5342 train loss:3.427378 +step:5343 train loss:3.587564 +step:5344 train loss:3.469038 +step:5345 train loss:3.474244 +step:5346 train loss:3.472485 +step:5347 train loss:3.496223 +step:5348 train loss:3.539733 +step:5349 train loss:3.478513 +step:5350 train loss:3.518128 +step:5351 train loss:3.589766 +step:5352 train loss:3.636606 +step:5353 train loss:3.543056 +step:5354 train loss:3.512222 +step:5355 train loss:3.480247 +step:5356 train loss:3.499480 +step:5357 train loss:3.482736 +step:5358 train loss:3.502174 +step:5359 train loss:3.516015 +step:5360 train loss:3.487942 +step:5361 train loss:3.492651 +step:5362 train loss:3.475529 +step:5363 train loss:3.470682 +step:5364 train loss:3.470283 +step:5365 train loss:3.506898 +step:5366 train loss:3.531657 +step:5367 train loss:3.468075 +step:5368 train loss:3.531524 +step:5369 train loss:3.546868 +step:5370 train loss:3.447738 +step:5371 train loss:3.496691 +step:5372 train loss:3.526189 +step:5373 train loss:3.559564 +step:5374 train loss:3.444191 +step:5375 train loss:3.490542 +step:5376 train loss:3.554086 +step:5377 train loss:3.493366 +step:5378 train loss:3.469587 +step:5379 train loss:3.469567 +step:5380 train loss:3.507148 +step:5381 train loss:3.545036 +step:5382 train loss:3.451515 +step:5383 train loss:3.519657 +step:5384 train loss:3.529476 +step:5385 train loss:3.525095 +step:5386 train loss:3.508776 +step:5387 train loss:3.515518 +step:5388 train loss:3.524164 +step:5389 train loss:3.458032 +step:5390 train loss:3.488168 +step:5391 train loss:3.425141 +step:5392 train loss:3.493209 +step:5393 train loss:3.485789 +step:5394 train loss:3.478324 +step:5395 train loss:3.547424 +step:5396 train loss:3.514493 +step:5397 train loss:3.535253 +step:5398 train loss:3.529233 +step:5399 train loss:3.562509 +step:5400 train loss:3.569199 +step:5401 train loss:3.529019 +step:5402 train loss:3.636951 +step:5403 train loss:3.541516 +step:5404 train loss:3.518743 +step:5405 train loss:3.586880 +step:5406 train loss:3.544600 +step:5407 train loss:3.479001 +step:5408 train loss:3.619018 +step:5409 train loss:3.458995 +step:5410 train loss:3.522920 +step:5411 train loss:3.506819 +step:5412 train loss:3.487071 +step:5413 train loss:3.535257 +step:5414 train loss:3.512995 +step:5415 train loss:3.489721 +step:5416 train loss:3.483274 +step:5417 train loss:3.555799 +step:5418 train loss:3.566236 +step:5419 train loss:3.474107 +step:5420 train loss:3.533841 +step:5421 train loss:3.503640 +step:5422 train loss:3.550519 +step:5423 train loss:3.524117 +step:5424 train loss:3.426455 +step:5425 train loss:3.494424 +step:5426 train loss:3.582270 +step:5427 train loss:3.478287 +step:5428 train loss:3.511372 +step:5429 train loss:3.442393 +step:5430 train loss:3.480792 +step:5431 train loss:3.540313 +step:5432 train loss:3.517386 +step:5433 train loss:3.522994 +step:5434 train loss:3.473767 +step:5435 train loss:3.470145 +step:5436 train loss:3.471796 +step:5437 train loss:3.511379 +step:5438 train loss:3.492679 +step:5439 train loss:3.501515 +step:5440 train loss:3.542922 +step:5441 train loss:3.561474 +step:5442 train loss:3.479308 +step:5443 train loss:3.481613 +step:5444 train loss:3.422941 +step:5445 train loss:3.513236 +step:5446 train loss:3.478750 +step:5447 train loss:3.520120 +step:5448 train loss:3.575353 +step:5449 train loss:3.465451 +step:5450 train loss:3.502200 +step:5451 train loss:3.493273 +step:5452 train loss:3.510324 +step:5453 train loss:3.565418 +step:5454 train loss:3.489742 +step:5455 train loss:3.476986 +step:5456 train loss:3.616728 +step:5457 train loss:3.497421 +step:5458 train loss:3.526631 +step:5459 train loss:3.473866 +step:5460 train loss:3.490016 +step:5461 train loss:3.493611 +step:5462 train loss:3.495625 +step:5463 train loss:3.506822 +step:5464 train loss:3.508360 +step:5465 train loss:3.452656 +step:5466 train loss:3.524683 +step:5467 train loss:3.507629 +step:5468 train loss:3.515532 +step:5469 train loss:3.609515 +step:5470 train loss:3.504678 +step:5471 train loss:3.578454 +step:5472 train loss:3.522023 +step:5473 train loss:3.428373 +step:5474 train loss:3.762870 +step:5475 train loss:3.438800 +step:5476 train loss:3.517913 +step:5477 train loss:3.513954 +step:5478 train loss:3.516075 +step:5479 train loss:3.655974 +step:5480 train loss:3.499804 +step:5481 train loss:3.561769 +step:5482 train loss:3.476433 +step:5483 train loss:3.516569 +step:5484 train loss:3.553690 +step:5485 train loss:3.472394 +step:5486 train loss:3.519508 +step:5487 train loss:3.517709 +step:5488 train loss:3.428567 +step:5489 train loss:3.532745 +step:5490 train loss:3.480750 +step:5491 train loss:3.585443 +step:5492 train loss:3.508546 +step:5493 train loss:3.441636 +step:5494 train loss:3.492866 +step:5495 train loss:3.468221 +step:5496 train loss:3.468967 +step:5497 train loss:3.587090 +step:5498 train loss:3.457644 +step:5499 train loss:3.595727 +step:5500 validation loss:3.430755 total_sharp:1.7468e-03 L1_sharp:3.9783e-03 L2_sharp:1.2610e-03 L3_sharp:1.9492e-03 L4_sharp:2.5447e-04 L5_sharp:3.1901e-04 L6_sharp:3.6381e-04 L7_sharp:4.5862e-04 L8_sharp:2.9278e-04 L9_sharp:2.4281e-04 L10_sharp:1.9928e-04 L11_sharp:1.6836e-04 L12_sharp:1.3620e-04 total_fnorm:2.4284e+00 total_l1_linf:2.0771e+04 total_spectral:2.4284e+00 L1_fnorm:5.6658e-01 L2_fnorm:5.2262e-01 L3_fnorm:5.5294e-01 L4_fnorm:5.8770e-01 L5_fnorm:5.9763e-01 L6_fnorm:6.0049e-01 L7_fnorm:5.9860e-01 L8_fnorm:5.9932e-01 L9_fnorm:6.0109e-01 L10_fnorm:6.0380e-01 L11_fnorm:6.0164e-01 L12_fnorm:6.0215e-01 L1_l1linf:4.1350e-01 L2_l1linf:4.4745e-01 L3_l1linf:4.3978e-01 L4_l1linf:4.2180e-01 L5_l1linf:4.1991e-01 L6_l1linf:4.2065e-01 L7_l1linf:4.1508e-01 L8_l1linf:4.1024e-01 L9_l1linf:4.0839e-01 L10_l1linf:4.0998e-01 L11_l1linf:4.2326e-01 L12_l1linf:4.2874e-01 L1_spectral:1.2043e-02 L2_spectral:1.2038e-02 L3_spectral:1.2048e-02 L4_spectral:1.2048e-02 L5_spectral:1.2071e-02 L6_spectral:1.2045e-02 L7_spectral:1.2049e-02 L8_spectral:1.2044e-02 L9_spectral:1.2044e-02 L10_spectral:1.2045e-02 L11_spectral:1.2045e-02 L12_spectral:1.2047e-02 ip_v_neg_g:6.3195e-03 cos_v_neg_g:1.4168e-03 v_norm:2.4284e+00 g_norm:1.8367e+00 hv_norm:5.5725e-01 cos_v_hv:7.6122e-03 hg_norm:8.8701e+01 cos_g_hg:6.4048e-01 v_par:5.8371e-04 v_perp:2.4284e+00 L1_cos_v_neg_g:1.4710e-03 L1_v_norm:5.6658e-01 L2_cos_v_neg_g:3.5988e-03 L2_v_norm:5.2262e-01 L3_cos_v_neg_g:5.5352e-03 L3_v_norm:5.5294e-01 L4_cos_v_neg_g:1.5552e-03 L4_v_norm:5.8770e-01 L5_cos_v_neg_g:2.6354e-03 L5_v_norm:5.9763e-01 L6_cos_v_neg_g:2.9728e-03 L6_v_norm:6.0049e-01 L7_cos_v_neg_g:2.5912e-03 L7_v_norm:5.9860e-01 L8_cos_v_neg_g:2.8164e-03 L8_v_norm:5.9932e-01 L9_cos_v_neg_g:2.2792e-03 L9_v_norm:6.0109e-01 L10_cos_v_neg_g:2.6261e-03 L10_v_norm:6.0380e-01 L11_cos_v_neg_g:2.6178e-04 L11_v_norm:6.0164e-01 L12_cos_v_neg_g:1.3133e-03 L12_v_norm:6.0215e-01 +step:5500 train loss:3.507043 +step:5501 train loss:3.584576 +step:5502 train loss:3.528038 +step:5503 train loss:3.497271 +step:5504 train loss:3.541376 +step:5505 train loss:3.504610 +step:5506 train loss:3.548149 +step:5507 train loss:3.529723 +step:5508 train loss:3.560056 +step:5509 train loss:3.568361 +step:5510 train loss:3.537016 +step:5511 train loss:3.530623 +step:5512 train loss:3.656830 +step:5513 train loss:3.455664 +step:5514 train loss:3.517554 +step:5515 train loss:3.542478 +step:5516 train loss:3.568534 +step:5517 train loss:3.524398 +step:5518 train loss:3.553434 +step:5519 train loss:3.587174 +step:5520 train loss:3.492786 +step:5521 train loss:3.502595 +step:5522 train loss:3.475915 +step:5523 train loss:3.520388 +step:5524 train loss:3.562336 +step:5525 train loss:3.472124 +step:5526 train loss:3.485667 +step:5527 train loss:3.512400 +step:5528 train loss:3.612715 +step:5529 train loss:3.576081 +step:5530 train loss:3.541601 +step:5531 train loss:3.478241 +step:5532 train loss:3.505165 +step:5533 train loss:3.536857 +step:5534 train loss:3.457576 +step:5535 train loss:3.506798 +step:5536 train loss:3.447392 +step:5537 train loss:3.491794 +step:5538 train loss:3.480198 +step:5539 train loss:3.430312 +step:5540 train loss:3.653851 +step:5541 train loss:3.462595 +step:5542 train loss:3.512517 +step:5543 train loss:3.504474 +step:5544 train loss:3.488629 +step:5545 train loss:3.482600 +step:5546 train loss:3.514743 +step:5547 train loss:3.450777 +step:5548 train loss:3.495693 +step:5549 train loss:3.500711 +step:5550 train loss:3.521196 +step:5551 train loss:3.524689 +step:5552 train loss:3.479872 +step:5553 train loss:3.515389 +step:5554 train loss:3.482015 +step:5555 train loss:3.488865 +step:5556 train loss:3.509320 +step:5557 train loss:3.570413 +step:5558 train loss:3.492016 +step:5559 train loss:3.502027 +step:5560 train loss:3.491279 +step:5561 train loss:3.529966 +step:5562 train loss:3.479523 +step:5563 train loss:3.462760 +step:5564 train loss:3.498706 +step:5565 train loss:3.565920 +step:5566 train loss:3.465976 +step:5567 train loss:3.586689 +step:5568 train loss:3.703568 +step:5569 train loss:3.491944 +step:5570 train loss:3.427274 +step:5571 train loss:3.512901 +step:5572 train loss:3.453202 +step:5573 train loss:3.443677 +step:5574 train loss:3.408729 +step:5575 train loss:3.511378 +step:5576 train loss:3.493794 +step:5577 train loss:3.497630 +step:5578 train loss:3.528662 +step:5579 train loss:3.487378 +step:5580 train loss:3.507524 +step:5581 train loss:3.528557 +step:5582 train loss:3.505841 +step:5583 train loss:3.518734 +step:5584 train loss:3.639905 +step:5585 train loss:3.542470 +step:5586 train loss:3.479121 +step:5587 train loss:3.510018 +step:5588 train loss:3.523182 +step:5589 train loss:3.527547 +step:5590 train loss:3.583066 +step:5591 train loss:3.453363 +step:5592 train loss:3.645783 +step:5593 train loss:3.501544 +step:5594 train loss:3.510476 +step:5595 train loss:3.504445 +step:5596 train loss:3.452086 +step:5597 train loss:3.469459 +step:5598 train loss:3.479074 +step:5599 train loss:3.477764 +step:5600 train loss:3.523561 +step:5601 train loss:3.548002 +step:5602 train loss:3.483093 +step:5603 train loss:3.523138 +step:5604 train loss:3.518925 +step:5605 train loss:3.492069 +step:5606 train loss:3.496061 +step:5607 train loss:3.524002 +step:5608 train loss:3.468557 +step:5609 train loss:3.522448 +step:5610 train loss:3.477709 +step:5611 train loss:3.516961 +step:5612 train loss:3.547963 +step:5613 train loss:3.507083 +step:5614 train loss:3.475105 +step:5615 train loss:3.573214 +step:5616 train loss:3.471678 +step:5617 train loss:3.560515 +step:5618 train loss:3.545390 +step:5619 train loss:3.500185 +step:5620 train loss:3.501685 +step:5621 train loss:3.575241 +step:5622 train loss:3.458683 +step:5623 train loss:3.495457 +step:5624 train loss:3.485252 +step:5625 train loss:3.519807 +step:5626 train loss:3.513165 +step:5627 train loss:3.482833 +step:5628 train loss:3.528566 +step:5629 train loss:3.507068 +step:5630 train loss:3.436713 +step:5631 train loss:3.478917 +step:5632 train loss:3.521915 +step:5633 train loss:3.511611 +step:5634 train loss:3.471897 +step:5635 train loss:3.504430 +step:5636 train loss:3.484505 +step:5637 train loss:3.620325 +step:5638 train loss:3.529549 +step:5639 train loss:3.512083 +step:5640 train loss:3.511886 +step:5641 train loss:3.553663 +step:5642 train loss:3.489338 +step:5643 train loss:3.501940 +step:5644 train loss:3.587039 +step:5645 train loss:3.542203 +step:5646 train loss:3.539946 +step:5647 train loss:3.528286 +step:5648 train loss:3.518754 +step:5649 train loss:3.432697 +step:5650 train loss:3.437086 +step:5651 train loss:3.512658 +step:5652 train loss:3.511927 +step:5653 train loss:3.482461 +step:5654 train loss:3.609046 +step:5655 train loss:3.470895 +step:5656 train loss:3.497045 +step:5657 train loss:3.560942 +step:5658 train loss:3.467021 +step:5659 train loss:3.504030 +step:5660 train loss:3.549463 +step:5661 train loss:3.494419 +step:5662 train loss:3.533609 +step:5663 train loss:3.422745 +step:5664 train loss:3.395118 +step:5665 train loss:3.515776 +step:5666 train loss:3.518392 +step:5667 train loss:3.555047 +step:5668 train loss:3.484292 +step:5669 train loss:3.496130 +step:5670 train loss:3.500774 +step:5671 train loss:3.482151 +step:5672 train loss:3.534515 +step:5673 train loss:3.502479 +step:5674 train loss:3.568772 +step:5675 train loss:3.487966 +step:5676 train loss:3.635646 +step:5677 train loss:3.530148 +step:5678 train loss:3.510543 +step:5679 train loss:3.497266 +step:5680 train loss:3.530103 +step:5681 train loss:3.498580 +step:5682 train loss:3.510667 +step:5683 train loss:3.470844 +step:5684 train loss:3.481268 +step:5685 train loss:3.525492 +step:5686 train loss:3.541713 +step:5687 train loss:3.496660 +step:5688 train loss:3.574486 +step:5689 train loss:3.481012 +step:5690 train loss:3.637096 +step:5691 train loss:3.461882 +step:5692 train loss:3.458057 +step:5693 train loss:3.456860 +step:5694 train loss:3.478909 +step:5695 train loss:3.493658 +step:5696 train loss:3.542101 +step:5697 train loss:3.473040 +step:5698 train loss:3.488039 +step:5699 train loss:3.501410 +step:5700 train loss:3.499408 +step:5701 train loss:3.496728 +step:5702 train loss:3.561466 +step:5703 train loss:3.462064 +step:5704 train loss:3.503585 +step:5705 train loss:3.511293 +step:5706 train loss:3.533937 +step:5707 train loss:3.456266 +step:5708 train loss:3.540299 +step:5709 train loss:3.542647 +step:5710 train loss:3.537223 +step:5711 train loss:3.553809 +step:5712 train loss:3.537807 +step:5713 train loss:3.465207 +step:5714 train loss:3.546632 +step:5715 train loss:3.503773 +step:5716 train loss:3.510384 +step:5717 train loss:3.532101 +step:5718 train loss:3.480368 +step:5719 train loss:3.552688 +step:5720 train loss:3.521479 +step:5721 train loss:3.453454 +step:5722 train loss:3.467174 +step:5723 train loss:3.541240 +step:5724 train loss:3.463672 +step:5725 train loss:3.536765 +step:5726 train loss:3.528523 +step:5727 train loss:3.488124 +step:5728 train loss:3.492674 +step:5729 train loss:3.488878 +step:5730 train loss:3.567035 +step:5731 train loss:3.428679 +step:5732 train loss:3.490139 +step:5733 train loss:3.483418 +step:5734 train loss:3.498682 +step:5735 train loss:3.487503 +step:5736 train loss:3.493092 +step:5737 train loss:3.513460 +step:5738 train loss:3.477224 +step:5739 train loss:3.490092 +step:5740 train loss:3.528132 +step:5741 train loss:3.506965 +step:5742 train loss:3.556230 +step:5743 train loss:3.523062 +step:5744 train loss:3.483614 +step:5745 train loss:3.483008 +step:5746 train loss:3.514238 +step:5747 train loss:3.502748 +step:5748 train loss:3.546627 +step:5749 train loss:3.502317 +step:5750 validation loss:3.428410 +step:5750 train loss:3.509851 +step:5751 train loss:3.524054 +step:5752 train loss:3.509623 +step:5753 train loss:3.483709 +step:5754 train loss:3.489129 +step:5755 train loss:3.502357 +step:5756 train loss:3.495097 +step:5757 train loss:3.555480 +step:5758 train loss:3.490490 +step:5759 train loss:3.453928 +step:5760 train loss:3.533894 +step:5761 train loss:3.528614 +step:5762 train loss:3.487972 +step:5763 train loss:3.515047 +step:5764 train loss:3.476128 +step:5765 train loss:3.596583 +step:5766 train loss:3.505811 +step:5767 train loss:3.540887 +step:5768 train loss:3.476490 +step:5769 train loss:3.598973 +step:5770 train loss:3.522470 +step:5771 train loss:3.549365 +step:5772 train loss:3.497824 +step:5773 train loss:3.476605 +step:5774 train loss:3.486497 +step:5775 train loss:3.557574 +step:5776 train loss:3.541276 +step:5777 train loss:3.460109 +step:5778 train loss:3.557890 +step:5779 train loss:3.502337 +step:5780 train loss:3.480812 +step:5781 train loss:3.545121 +step:5782 train loss:3.501773 +step:5783 train loss:3.463238 +step:5784 train loss:3.566152 +step:5785 train loss:3.551843 +step:5786 train loss:3.465255 +step:5787 train loss:3.514267 +step:5788 train loss:3.522030 +step:5789 train loss:3.463290 +step:5790 train loss:3.567608 +step:5791 train loss:3.492120 +step:5792 train loss:3.771984 +step:5793 train loss:3.538735 +step:5794 train loss:3.556146 +step:5795 train loss:3.549090 +step:5796 train loss:3.531729 +step:5797 train loss:3.511227 +step:5798 train loss:3.510553 +step:5799 train loss:3.483843 +step:5800 train loss:3.637484 +step:5801 train loss:3.513684 +step:5802 train loss:3.504614 +step:5803 train loss:3.512995 +step:5804 train loss:3.531977 +step:5805 train loss:3.499020 +step:5806 train loss:3.533505 +step:5807 train loss:3.456874 +step:5808 train loss:3.489231 +step:5809 train loss:3.502396 +step:5810 train loss:3.471518 +step:5811 train loss:3.491156 +step:5812 train loss:3.468979 +step:5813 train loss:3.479130 +step:5814 train loss:3.474444 +step:5815 train loss:3.479942 +step:5816 train loss:3.539312 +step:5817 train loss:3.550586 +step:5818 train loss:3.525280 +step:5819 train loss:3.575375 +step:5820 train loss:3.514003 +step:5821 train loss:3.508066 +step:5822 train loss:3.523969 +step:5823 train loss:3.528790 +step:5824 train loss:3.479537 +step:5825 train loss:3.574807 +step:5826 train loss:3.485934 +step:5827 train loss:3.452873 +step:5828 train loss:3.438807 +step:5829 train loss:3.501714 +step:5830 train loss:3.475472 +step:5831 train loss:3.448013 +step:5832 train loss:3.564340 +step:5833 train loss:3.538760 +step:5834 train loss:3.523791 +step:5835 train loss:3.472478 +step:5836 train loss:3.440219 +step:5837 train loss:3.560873 +step:5838 train loss:3.540776 +step:5839 train loss:3.516321 +step:5840 train loss:3.598907 +step:5841 train loss:3.520135 +step:5842 train loss:3.534650 +step:5843 train loss:3.477449 +step:5844 train loss:3.547338 +step:5845 train loss:3.455362 +step:5846 train loss:3.508245 +step:5847 train loss:3.531560 +step:5848 train loss:3.599109 +step:5849 train loss:3.497142 +step:5850 train loss:3.522899 +step:5851 train loss:3.487171 +step:5852 train loss:3.579356 +step:5853 train loss:3.668940 +step:5854 train loss:3.455856 +step:5855 train loss:3.516319 +step:5856 train loss:3.489893 +step:5857 train loss:3.501363 +step:5858 train loss:3.473547 +step:5859 train loss:3.483748 +step:5860 train loss:3.582467 +step:5861 train loss:3.467480 +step:5862 train loss:3.578220 +step:5863 train loss:3.518749 +step:5864 train loss:3.506067 +step:5865 train loss:3.508055 +step:5866 train loss:3.500846 +step:5867 train loss:3.588529 +step:5868 train loss:3.504649 +step:5869 train loss:3.531663 +step:5870 train loss:3.507267 +step:5871 train loss:3.487226 +step:5872 train loss:3.516116 +step:5873 train loss:3.497751 +step:5874 train loss:3.578685 +step:5875 train loss:3.505191 +step:5876 train loss:3.486791 +step:5877 train loss:3.495235 +step:5878 train loss:3.493625 +step:5879 train loss:3.465438 +step:5880 train loss:3.665830 +step:5881 train loss:3.499970 +step:5882 train loss:3.474696 +step:5883 train loss:3.477123 +step:5884 train loss:3.491842 +step:5885 train loss:3.490328 +step:5886 train loss:3.512391 +step:5887 train loss:3.510387 +step:5888 train loss:3.493098 +step:5889 train loss:3.469320 +step:5890 train loss:3.515010 +step:5891 train loss:3.461712 +step:5892 train loss:3.546133 +step:5893 train loss:3.464694 +step:5894 train loss:3.456554 +step:5895 train loss:3.466220 +step:5896 train loss:3.470463 +step:5897 train loss:3.539950 +step:5898 train loss:3.764083 +step:5899 train loss:3.488314 +step:5900 train loss:3.538810 +step:5901 train loss:3.498411 +step:5902 train loss:3.501109 +step:5903 train loss:3.492333 +step:5904 train loss:3.523911 +step:5905 train loss:3.628851 +step:5906 train loss:3.569860 +step:5907 train loss:3.514281 +step:5908 train loss:3.489419 +step:5909 train loss:3.484617 +step:5910 train loss:3.471295 +step:5911 train loss:3.488906 +step:5912 train loss:3.531391 +step:5913 train loss:3.523841 +step:5914 train loss:3.504567 +step:5915 train loss:3.631552 +step:5916 train loss:3.510531 +step:5917 train loss:3.479924 +step:5918 train loss:3.481392 +step:5919 train loss:3.507026 +step:5920 train loss:3.502831 +step:5921 train loss:3.475863 +step:5922 train loss:3.533627 +step:5923 train loss:3.524747 +step:5924 train loss:3.483071 +step:5925 train loss:3.607557 +step:5926 train loss:3.490257 +step:5927 train loss:3.469601 +step:5928 train loss:3.506038 +step:5929 train loss:3.522534 +step:5930 train loss:3.476579 +step:5931 train loss:3.458701 +step:5932 train loss:3.502611 +step:5933 train loss:3.554932 +step:5934 train loss:3.466030 +step:5935 train loss:3.494069 +step:5936 train loss:3.480118 +step:5937 train loss:3.459667 +step:5938 train loss:3.481330 +step:5939 train loss:3.457198 +step:5940 train loss:3.538509 +step:5941 train loss:3.476108 +step:5942 train loss:3.491745 +step:5943 train loss:3.494736 +step:5944 train loss:3.550013 +step:5945 train loss:3.485675 +step:5946 train loss:3.459146 +step:5947 train loss:3.473475 +step:5948 train loss:3.512329 +step:5949 train loss:3.558469 +step:5950 train loss:3.515849 +step:5951 train loss:3.519076 +step:5952 train loss:3.442601 +step:5953 train loss:3.482354 +step:5954 train loss:3.494145 +step:5955 train loss:3.502487 +step:5956 train loss:3.477355 +step:5957 train loss:3.446697 +step:5958 train loss:3.520346 +step:5959 train loss:3.476727 +step:5960 train loss:3.452281 +step:5961 train loss:3.479060 +step:5962 train loss:3.510240 +step:5963 train loss:3.543186 +step:5964 train loss:3.499106 +step:5965 train loss:3.520023 +step:5966 train loss:3.515167 +step:5967 train loss:3.480438 +step:5968 train loss:3.553601 +step:5969 train loss:3.493905 +step:5970 train loss:3.513423 +step:5971 train loss:3.462765 +step:5972 train loss:3.492182 +step:5973 train loss:3.478839 +step:5974 train loss:3.506005 +step:5975 train loss:3.476635 +step:5976 train loss:3.517438 +step:5977 train loss:3.474333 +step:5978 train loss:3.456851 +step:5979 train loss:3.492898 +step:5980 train loss:3.566821 +step:5981 train loss:3.455068 +step:5982 train loss:3.468600 +step:5983 train loss:3.535857 +step:5984 train loss:3.477426 +step:5985 train loss:3.523239 +step:5986 train loss:3.497154 +step:5987 train loss:3.482766 +step:5988 train loss:3.489344 +step:5989 train loss:3.510217 +step:5990 train loss:3.441895 +step:5991 train loss:3.502797 +step:5992 train loss:3.537797 +step:5993 train loss:3.487558 +step:5994 train loss:3.508091 +step:5995 train loss:3.400440 +step:5996 train loss:3.566732 +step:5997 train loss:3.546756 +step:5998 train loss:3.421968 +step:5999 train loss:3.452326 +step:6000 validation loss:3.418829 total_sharp:1.8131e-03 L1_sharp:4.0129e-03 L2_sharp:4.6146e-04 L3_sharp:4.3379e-04 L4_sharp:2.2544e-04 L5_sharp:3.2204e-04 L6_sharp:5.3841e-04 L7_sharp:6.1471e-04 L8_sharp:3.7489e-04 L9_sharp:3.1069e-04 L10_sharp:2.0360e-04 L11_sharp:1.8586e-04 L12_sharp:3.0646e-04 total_fnorm:2.4317e+00 total_l1_linf:2.0799e+04 total_spectral:2.4317e+00 L1_fnorm:5.6848e-01 L2_fnorm:5.3136e-01 L3_fnorm:5.6302e-01 L4_fnorm:5.9049e-01 L5_fnorm:5.9640e-01 L6_fnorm:6.0039e-01 L7_fnorm:5.9668e-01 L8_fnorm:5.9775e-01 L9_fnorm:5.9995e-01 L10_fnorm:6.0349e-01 L11_fnorm:6.0069e-01 L12_fnorm:6.0236e-01 L1_l1linf:4.1077e-01 L2_l1linf:4.5468e-01 L3_l1linf:4.4164e-01 L4_l1linf:4.2819e-01 L5_l1linf:4.2092e-01 L6_l1linf:4.1798e-01 L7_l1linf:4.1576e-01 L8_l1linf:4.1274e-01 L9_l1linf:4.0920e-01 L10_l1linf:4.1121e-01 L11_l1linf:4.2051e-01 L12_l1linf:4.2453e-01 L1_spectral:1.2049e-02 L2_spectral:1.2037e-02 L3_spectral:1.2048e-02 L4_spectral:1.2051e-02 L5_spectral:1.2064e-02 L6_spectral:1.2044e-02 L7_spectral:1.2044e-02 L8_spectral:1.2043e-02 L9_spectral:1.2045e-02 L10_spectral:1.2042e-02 L11_spectral:1.2049e-02 L12_spectral:1.2046e-02 ip_v_neg_g:6.2093e-03 cos_v_neg_g:2.3578e-03 v_norm:2.4317e+00 g_norm:1.0830e+00 hv_norm:3.4209e-01 cos_v_hv:1.2888e-02 hg_norm:2.3527e+01 cos_g_hg:6.1374e-01 v_par:6.6327e-04 v_perp:2.4317e+00 L1_cos_v_neg_g:3.1852e-03 L1_v_norm:5.6848e-01 L2_cos_v_neg_g:3.9542e-03 L2_v_norm:5.3136e-01 L3_cos_v_neg_g:3.5809e-03 L3_v_norm:5.6302e-01 L4_cos_v_neg_g:2.7153e-03 L4_v_norm:5.9049e-01 L5_cos_v_neg_g:3.3430e-03 L5_v_norm:5.9640e-01 L6_cos_v_neg_g:4.1345e-03 L6_v_norm:6.0039e-01 L7_cos_v_neg_g:3.8944e-03 L7_v_norm:5.9668e-01 L8_cos_v_neg_g:3.7368e-03 L8_v_norm:5.9775e-01 L9_cos_v_neg_g:4.2450e-03 L9_v_norm:5.9995e-01 L10_cos_v_neg_g:4.1560e-03 L10_v_norm:6.0349e-01 L11_cos_v_neg_g:3.8086e-03 L11_v_norm:6.0069e-01 L12_cos_v_neg_g:3.6841e-03 L12_v_norm:6.0236e-01 +step:6000 train loss:3.499431 +step:6001 train loss:3.465611 +step:6002 train loss:3.492725 +step:6003 train loss:3.511632 +step:6004 train loss:3.465263 +step:6005 train loss:3.536597 +step:6006 train loss:3.443839 +step:6007 train loss:3.468688 +step:6008 train loss:3.480425 +step:6009 train loss:3.515105 +step:6010 train loss:3.503722 +step:6011 train loss:3.496884 +step:6012 train loss:3.464416 +step:6013 train loss:3.519403 +step:6014 train loss:3.540873 +step:6015 train loss:3.539323 +step:6016 train loss:3.507539 +step:6017 train loss:3.517582 +step:6018 train loss:3.454458 +step:6019 train loss:3.491688 +step:6020 train loss:3.478595 +step:6021 train loss:3.407086 +step:6022 train loss:3.519724 +step:6023 train loss:3.453004 +step:6024 train loss:3.532092 +step:6025 train loss:3.497212 +step:6026 train loss:3.469580 +step:6027 train loss:3.508657 +step:6028 train loss:3.426780 +step:6029 train loss:3.542509 +step:6030 train loss:3.508701 +step:6031 train loss:3.483014 +step:6032 train loss:3.447942 +step:6033 train loss:3.498741 +step:6034 train loss:3.524916 +step:6035 train loss:3.440415 +step:6036 train loss:3.416488 +step:6037 train loss:3.530163 +step:6038 train loss:3.536513 +step:6039 train loss:3.517812 +step:6040 train loss:3.477970 +step:6041 train loss:3.453990 +step:6042 train loss:3.438306 +step:6043 train loss:3.498263 +step:6044 train loss:3.614332 +step:6045 train loss:3.459480 +step:6046 train loss:3.470915 +step:6047 train loss:3.503633 +step:6048 train loss:3.514625 +step:6049 train loss:3.492760 +step:6050 train loss:3.463268 +step:6051 train loss:3.511077 +step:6052 train loss:3.483793 +step:6053 train loss:3.605338 +step:6054 train loss:3.643277 +step:6055 train loss:3.456040 +step:6056 train loss:3.449209 +step:6057 train loss:3.483046 +step:6058 train loss:3.509123 +step:6059 train loss:3.512514 +step:6060 train loss:3.521530 +step:6061 train loss:3.532701 +step:6062 train loss:3.487337 +step:6063 train loss:3.499639 +step:6064 train loss:3.495834 +step:6065 train loss:3.499156 +step:6066 train loss:3.482074 +step:6067 train loss:3.524617 +step:6068 train loss:3.466736 +step:6069 train loss:3.423207 +step:6070 train loss:3.574178 +step:6071 train loss:3.513821 +step:6072 train loss:3.453738 +step:6073 train loss:3.497616 +step:6074 train loss:3.579134 +step:6075 train loss:3.499310 +step:6076 train loss:3.507813 +step:6077 train loss:3.508266 +step:6078 train loss:3.445120 +step:6079 train loss:3.476439 +step:6080 train loss:3.483111 +step:6081 train loss:3.518257 +step:6082 train loss:3.468343 +step:6083 train loss:3.481885 +step:6084 train loss:3.549650 +step:6085 train loss:3.542257 +step:6086 train loss:3.442818 +step:6087 train loss:3.489638 +step:6088 train loss:3.473995 +step:6089 train loss:3.531904 +step:6090 train loss:3.534265 +step:6091 train loss:3.483171 +step:6092 train loss:3.441940 +step:6093 train loss:3.505404 +step:6094 train loss:3.421323 +step:6095 train loss:3.586485 +step:6096 train loss:3.456701 +step:6097 train loss:3.533190 +step:6098 train loss:3.501216 +step:6099 train loss:3.569062 +step:6100 train loss:3.559154 +step:6101 train loss:3.493098 +step:6102 train loss:3.608733 +step:6103 train loss:3.492737 +step:6104 train loss:3.606861 +step:6105 train loss:3.539876 +step:6106 train loss:3.479730 +step:6107 train loss:3.542527 +step:6108 train loss:3.504622 +step:6109 train loss:3.574128 +step:6110 train loss:3.508280 +step:6111 train loss:3.542659 +step:6112 train loss:3.482345 +step:6113 train loss:3.508743 +step:6114 train loss:3.479702 +step:6115 train loss:3.537498 +step:6116 train loss:3.479688 +step:6117 train loss:3.537293 +step:6118 train loss:3.518947 +step:6119 train loss:3.527975 +step:6120 train loss:3.674548 +step:6121 train loss:3.508066 +step:6122 train loss:3.517319 +step:6123 train loss:3.501203 +step:6124 train loss:3.473886 +step:6125 train loss:3.467837 +step:6126 train loss:3.481156 +step:6127 train loss:3.472829 +step:6128 train loss:3.432422 +step:6129 train loss:3.674971 +step:6130 train loss:3.453181 +step:6131 train loss:3.435690 +step:6132 train loss:3.503912 +step:6133 train loss:3.471029 +step:6134 train loss:3.501932 +step:6135 train loss:3.583407 +step:6136 train loss:3.601122 +step:6137 train loss:3.464885 +step:6138 train loss:3.518999 +step:6139 train loss:3.498597 +step:6140 train loss:3.498565 +step:6141 train loss:3.459442 +step:6142 train loss:3.523551 +step:6143 train loss:3.489722 +step:6144 train loss:3.509165 +step:6145 train loss:3.763928 +step:6146 train loss:3.592389 +step:6147 train loss:3.680036 +step:6148 train loss:3.442603 +step:6149 train loss:3.573143 +step:6150 train loss:3.521705 +step:6151 train loss:3.477642 +step:6152 train loss:3.477072 +step:6153 train loss:3.543944 +step:6154 train loss:3.627328 +step:6155 train loss:3.494802 +step:6156 train loss:3.589589 +step:6157 train loss:3.519828 +step:6158 train loss:3.508877 +step:6159 train loss:3.475847 +step:6160 train loss:3.644234 +step:6161 train loss:3.495111 +step:6162 train loss:3.510496 +step:6163 train loss:3.543508 +step:6164 train loss:3.459036 +step:6165 train loss:3.524060 +step:6166 train loss:3.519988 +step:6167 train loss:3.537984 +step:6168 train loss:3.506916 +step:6169 train loss:3.504368 +step:6170 train loss:3.508623 +step:6171 train loss:3.478627 +step:6172 train loss:3.466481 +step:6173 train loss:3.517105 +step:6174 train loss:3.442919 +step:6175 train loss:3.456137 +step:6176 train loss:3.440090 +step:6177 train loss:3.531325 +step:6178 train loss:3.482695 +step:6179 train loss:3.488006 +step:6180 train loss:3.494695 +step:6181 train loss:3.531811 +step:6182 train loss:3.411665 +step:6183 train loss:3.422700 +step:6184 train loss:3.536038 +step:6185 train loss:3.491657 +step:6186 train loss:3.452893 +step:6187 train loss:3.496467 +step:6188 train loss:3.464525 +step:6189 train loss:3.502723 +step:6190 train loss:3.461902 +step:6191 train loss:3.491994 +step:6192 train loss:3.464282 +step:6193 train loss:3.528826 +step:6194 train loss:3.521327 +step:6195 train loss:3.503693 +step:6196 train loss:3.517788 +step:6197 train loss:3.537976 +step:6198 train loss:3.451689 +step:6199 train loss:3.476471 +step:6200 train loss:3.518030 +step:6201 train loss:3.561001 +step:6202 train loss:3.563144 +step:6203 train loss:3.562449 +step:6204 train loss:3.547881 +step:6205 train loss:3.482155 +step:6206 train loss:3.469952 +step:6207 train loss:3.528346 +step:6208 train loss:3.556383 +step:6209 train loss:3.521450 +step:6210 train loss:3.553651 +step:6211 train loss:3.472432 +step:6212 train loss:3.464814 +step:6213 train loss:3.478834 +step:6214 train loss:3.455394 +step:6215 train loss:3.633756 +step:6216 train loss:3.499539 +step:6217 train loss:3.558121 +step:6218 train loss:3.535116 +step:6219 train loss:3.549625 +step:6220 train loss:3.502709 +step:6221 train loss:3.466404 +step:6222 train loss:3.711698 +step:6223 train loss:3.469962 +step:6224 train loss:3.499365 +step:6225 train loss:3.481800 +step:6226 train loss:3.492755 +step:6227 train loss:3.493375 +step:6228 train loss:3.491516 +step:6229 train loss:3.530233 +step:6230 train loss:3.485766 +step:6231 train loss:3.595960 +step:6232 train loss:3.442623 +step:6233 train loss:3.479066 +step:6234 train loss:3.487512 +step:6235 train loss:3.516079 +step:6236 train loss:3.451955 +step:6237 train loss:3.473679 +step:6238 train loss:3.502182 +step:6239 train loss:3.485044 +step:6240 train loss:3.507576 +step:6241 train loss:3.490639 +step:6242 train loss:3.489956 +step:6243 train loss:3.525356 +step:6244 train loss:3.680308 +step:6245 train loss:3.474725 +step:6246 train loss:3.466111 +step:6247 train loss:3.459745 +step:6248 train loss:3.462973 +step:6249 train loss:3.401649 +step:6250 validation loss:3.410855 +step:6250 train loss:3.437021 +step:6251 train loss:3.458487 +step:6252 train loss:3.497361 +step:6253 train loss:3.509115 +step:6254 train loss:3.500984 +step:6255 train loss:3.467569 +step:6256 train loss:3.517155 +step:6257 train loss:3.516858 +step:6258 train loss:3.496614 +step:6259 train loss:3.505438 +step:6260 train loss:3.531527 +step:6261 train loss:3.550595 +step:6262 train loss:3.445071 +step:6263 train loss:3.480252 +step:6264 train loss:3.489398 +step:6265 train loss:3.479469 +step:6266 train loss:3.685716 +step:6267 train loss:3.484302 +step:6268 train loss:3.568112 +step:6269 train loss:3.446339 +step:6270 train loss:3.459724 +step:6271 train loss:3.506072 +step:6272 train loss:3.495653 +step:6273 train loss:3.697662 +step:6274 train loss:3.476495 +step:6275 train loss:3.511616 +step:6276 train loss:3.479506 +step:6277 train loss:3.466096 +step:6278 train loss:3.449200 +step:6279 train loss:3.505584 +step:6280 train loss:3.508323 +step:6281 train loss:3.443136 +step:6282 train loss:3.455267 +step:6283 train loss:3.541799 +step:6284 train loss:3.510234 +step:6285 train loss:3.509152 +step:6286 train loss:3.454660 +step:6287 train loss:3.487084 +step:6288 train loss:3.584486 +step:6289 train loss:3.447311 +step:6290 train loss:3.446184 +step:6291 train loss:3.477020 +step:6292 train loss:3.496349 +step:6293 train loss:3.483110 +step:6294 train loss:3.470360 +step:6295 train loss:3.492684 +step:6296 train loss:3.455582 +step:6297 train loss:3.584603 +step:6298 train loss:3.530174 +step:6299 train loss:3.421767 +step:6300 train loss:3.504553 +step:6301 train loss:3.528338 +step:6302 train loss:3.515037 +step:6303 train loss:3.481395 +step:6304 train loss:3.501756 +step:6305 train loss:3.469540 +step:6306 train loss:3.481165 +step:6307 train loss:3.490360 +step:6308 train loss:3.464904 +step:6309 train loss:3.466391 +step:6310 train loss:3.517278 +step:6311 train loss:3.473839 +step:6312 train loss:3.511221 +step:6313 train loss:3.441948 +step:6314 train loss:3.469459 +step:6315 train loss:3.521827 +step:6316 train loss:3.446264 +step:6317 train loss:3.432304 +step:6318 train loss:3.550145 +step:6319 train loss:3.481225 +step:6320 train loss:3.493752 +step:6321 train loss:3.481640 +step:6322 train loss:3.481785 +step:6323 train loss:3.414437 +step:6324 train loss:3.424107 +step:6325 train loss:3.521702 +step:6326 train loss:3.437980 +step:6327 train loss:3.516176 +step:6328 train loss:3.490873 +step:6329 train loss:3.414104 +step:6330 train loss:3.439287 +step:6331 train loss:3.459873 +step:6332 train loss:3.597359 +step:6333 train loss:3.468790 +step:6334 train loss:3.446929 +step:6335 train loss:3.419925 +step:6336 train loss:3.449225 +step:6337 train loss:3.477667 +step:6338 train loss:3.429191 +step:6339 train loss:3.474864 +step:6340 train loss:3.452586 +step:6341 train loss:3.469138 +step:6342 train loss:3.467016 +step:6343 train loss:3.566000 +step:6344 train loss:3.415720 +step:6345 train loss:3.434685 +step:6346 train loss:3.506703 +step:6347 train loss:3.389267 +step:6348 train loss:3.479314 +step:6349 train loss:3.456792 +step:6350 train loss:3.432696 +step:6351 train loss:3.429813 +step:6352 train loss:3.450052 +step:6353 train loss:3.463486 +step:6354 train loss:3.479612 +step:6355 train loss:3.489416 +step:6356 train loss:3.499680 +step:6357 train loss:3.360642 +step:6358 train loss:3.447307 +step:6359 train loss:3.502451 +step:6360 train loss:3.412616 +step:6361 train loss:3.411048 +step:6362 train loss:3.458991 +step:6363 train loss:3.439324 +step:6364 train loss:3.418891 +step:6365 train loss:3.496039 +step:6366 train loss:3.503791 +step:6367 train loss:3.439071 +step:6368 train loss:3.475726 +step:6369 train loss:3.444772 +step:6370 train loss:3.494790 +step:6371 train loss:3.409997 +step:6372 train loss:3.442479 +step:6373 train loss:3.464920 +step:6374 train loss:3.496970 +step:6375 train loss:3.456761 +step:6376 train loss:3.478369 +step:6377 train loss:3.481391 +step:6378 train loss:3.425333 +step:6379 train loss:3.468623 +step:6380 train loss:3.513811 +step:6381 train loss:3.475525 +step:6382 train loss:3.435106 +step:6383 train loss:3.491883 +step:6384 train loss:3.473180 +step:6385 train loss:3.445628 +step:6386 train loss:3.480054 +step:6387 train loss:3.462899 +step:6388 train loss:3.499055 +step:6389 train loss:3.509564 +step:6390 train loss:3.457715 +step:6391 train loss:3.449344 +step:6392 train loss:3.433243 +step:6393 train loss:3.486402 +step:6394 train loss:3.477660 +step:6395 train loss:3.656594 +step:6396 train loss:3.474875 +step:6397 train loss:3.419567 +step:6398 train loss:3.490365 +step:6399 train loss:3.431945 +step:6400 train loss:3.503010 +step:6401 train loss:3.543096 +step:6402 train loss:3.469858 +step:6403 train loss:3.466865 +step:6404 train loss:3.444257 +step:6405 train loss:3.468853 +step:6406 train loss:3.477857 +step:6407 train loss:3.534521 +step:6408 train loss:3.427334 +step:6409 train loss:3.410498 +step:6410 train loss:3.547947 +step:6411 train loss:3.471057 +step:6412 train loss:3.477280 +step:6413 train loss:3.481294 +step:6414 train loss:3.429205 +step:6415 train loss:3.495503 +step:6416 train loss:3.460634 +step:6417 train loss:3.432674 +step:6418 train loss:3.421203 +step:6419 train loss:3.503903 +step:6420 train loss:3.434392 +step:6421 train loss:3.459565 +step:6422 train loss:3.453045 +step:6423 train loss:3.457546 +step:6424 train loss:3.481359 +step:6425 train loss:3.477344 +step:6426 train loss:3.518771 +step:6427 train loss:3.483242 +step:6428 train loss:3.514871 +step:6429 train loss:3.484206 +step:6430 train loss:3.456753 +step:6431 train loss:3.439470 +step:6432 train loss:3.468031 +step:6433 train loss:3.479347 +step:6434 train loss:3.362266 +step:6435 train loss:3.545511 +step:6436 train loss:3.475952 +step:6437 train loss:3.438048 +step:6438 train loss:3.469941 +step:6439 train loss:3.438221 +step:6440 train loss:3.457490 +step:6441 train loss:3.450650 +step:6442 train loss:3.392457 +step:6443 train loss:3.450257 +step:6444 train loss:3.585207 +step:6445 train loss:3.492418 +step:6446 train loss:3.494072 +step:6447 train loss:3.476787 +step:6448 train loss:3.422021 +step:6449 train loss:3.445657 +step:6450 train loss:3.430896 +step:6451 train loss:3.420812 +step:6452 train loss:3.423360 +step:6453 train loss:3.462862 +step:6454 train loss:3.488829 +step:6455 train loss:3.477510 +step:6456 train loss:3.498899 +step:6457 train loss:3.474537 +step:6458 train loss:3.447128 +step:6459 train loss:3.428621 +step:6460 train loss:3.433426 +step:6461 train loss:3.439277 +step:6462 train loss:3.430857 +step:6463 train loss:3.526995 +step:6464 train loss:3.437003 +step:6465 train loss:3.474577 +step:6466 train loss:3.493885 +step:6467 train loss:3.416024 +step:6468 train loss:3.496758 +step:6469 train loss:3.402945 +step:6470 train loss:3.528267 +step:6471 train loss:3.434249 +step:6472 train loss:3.592623 +step:6473 train loss:3.475168 +step:6474 train loss:3.510028 +step:6475 train loss:3.453971 +step:6476 train loss:3.523095 +step:6477 train loss:3.459587 +step:6478 train loss:3.584680 +step:6479 train loss:3.500491 +step:6480 train loss:3.434705 +step:6481 train loss:3.491822 +step:6482 train loss:3.433026 +step:6483 train loss:3.491388 +step:6484 train loss:3.449976 +step:6485 train loss:3.512326 +step:6486 train loss:3.444534 +step:6487 train loss:3.437960 +step:6488 train loss:3.440072 +step:6489 train loss:3.437510 +step:6490 train loss:3.468321 +step:6491 train loss:3.439334 +step:6492 train loss:3.539003 +step:6493 train loss:3.444291 +step:6494 train loss:3.445071 +step:6495 train loss:3.447660 +step:6496 train loss:3.473976 +step:6497 train loss:3.494906 +step:6498 train loss:3.602816 +step:6499 train loss:3.577965 +step:6500 validation loss:3.402469 total_sharp:2.1606e-03 L1_sharp:3.1578e-03 L2_sharp:3.8548e-04 L3_sharp:7.3270e-04 L4_sharp:2.5486e-04 L5_sharp:3.5971e-04 L6_sharp:6.4978e-04 L7_sharp:7.9248e-04 L8_sharp:5.9817e-04 L9_sharp:4.6580e-04 L10_sharp:2.2595e-04 L11_sharp:1.8134e-04 L12_sharp:2.9687e-04 total_fnorm:2.4326e+00 total_l1_linf:2.0822e+04 total_spectral:2.4326e+00 L1_fnorm:5.6686e-01 L2_fnorm:5.2698e-01 L3_fnorm:5.5868e-01 L4_fnorm:5.8893e-01 L5_fnorm:5.9643e-01 L6_fnorm:5.9924e-01 L7_fnorm:5.9644e-01 L8_fnorm:5.9456e-01 L9_fnorm:5.9729e-01 L10_fnorm:6.0228e-01 L11_fnorm:5.9976e-01 L12_fnorm:6.0155e-01 L1_l1linf:4.1213e-01 L2_l1linf:4.6368e-01 L3_l1linf:4.6085e-01 L4_l1linf:4.2950e-01 L5_l1linf:4.1980e-01 L6_l1linf:4.1736e-01 L7_l1linf:4.1056e-01 L8_l1linf:4.0931e-01 L9_l1linf:4.1003e-01 L10_l1linf:4.1534e-01 L11_l1linf:4.3086e-01 L12_l1linf:4.1994e-01 L1_spectral:1.2048e-02 L2_spectral:1.2041e-02 L3_spectral:1.2048e-02 L4_spectral:1.2050e-02 L5_spectral:1.2064e-02 L6_spectral:1.2045e-02 L7_spectral:1.2045e-02 L8_spectral:1.2045e-02 L9_spectral:1.2045e-02 L10_spectral:1.2042e-02 L11_spectral:1.2042e-02 L12_spectral:1.2046e-02 ip_v_neg_g:3.6182e-03 cos_v_neg_g:1.3909e-03 v_norm:2.4326e+00 g_norm:1.0693e+00 hv_norm:3.8669e-01 cos_v_hv:1.3592e-02 hg_norm:1.8915e+01 cos_g_hg:5.9432e-01 v_par:5.5795e-04 v_perp:2.4326e+00 L1_cos_v_neg_g:2.0498e-03 L1_v_norm:5.6686e-01 L2_cos_v_neg_g:-1.9631e-04 L2_v_norm:5.2698e-01 L3_cos_v_neg_g:2.5715e-03 L3_v_norm:5.5868e-01 L4_cos_v_neg_g:8.3179e-04 L4_v_norm:5.8893e-01 L5_cos_v_neg_g:1.2416e-03 L5_v_norm:5.9643e-01 L6_cos_v_neg_g:2.1470e-03 L6_v_norm:5.9924e-01 L7_cos_v_neg_g:2.4188e-03 L7_v_norm:5.9644e-01 L8_cos_v_neg_g:3.4128e-03 L8_v_norm:5.9456e-01 L9_cos_v_neg_g:3.5723e-03 L9_v_norm:5.9729e-01 L10_cos_v_neg_g:2.9325e-03 L10_v_norm:6.0228e-01 L11_cos_v_neg_g:2.3137e-03 L11_v_norm:5.9976e-01 L12_cos_v_neg_g:1.5427e-03 L12_v_norm:6.0155e-01 +step:6500 train loss:3.419024 +step:6501 train loss:3.436975 +step:6502 train loss:3.458282 +step:6503 train loss:3.516791 +step:6504 train loss:3.463775 +step:6505 train loss:3.471067 +step:6506 train loss:3.433424 +step:6507 train loss:3.498640 +step:6508 train loss:3.470253 +step:6509 train loss:3.449377 +step:6510 train loss:3.461819 +step:6511 train loss:3.471860 +step:6512 train loss:3.418202 +step:6513 train loss:3.485465 +step:6514 train loss:3.359265 +step:6515 train loss:3.449831 +step:6516 train loss:3.499871 +step:6517 train loss:3.413929 +step:6518 train loss:3.455848 +step:6519 train loss:3.445724 +step:6520 train loss:3.530121 +step:6521 train loss:3.508399 +step:6522 train loss:3.517005 +step:6523 train loss:3.415650 +step:6524 train loss:3.497381 +step:6525 train loss:3.487432 +step:6526 train loss:3.417892 +step:6527 train loss:3.476835 +step:6528 train loss:3.498290 +step:6529 train loss:3.524665 +step:6530 train loss:3.425972 +step:6531 train loss:3.505888 +step:6532 train loss:3.436258 +step:6533 train loss:3.472566 +step:6534 train loss:3.479534 +step:6535 train loss:3.455063 +step:6536 train loss:3.595009 +step:6537 train loss:3.400080 +step:6538 train loss:3.507217 +step:6539 train loss:3.429209 +step:6540 train loss:3.541248 +step:6541 train loss:3.524501 +step:6542 train loss:3.482034 +step:6543 train loss:3.432542 +step:6544 train loss:3.415413 +step:6545 train loss:3.407727 +step:6546 train loss:3.462764 +step:6547 train loss:3.524758 +step:6548 train loss:3.461637 +step:6549 train loss:3.479738 +step:6550 train loss:3.589273 +step:6551 train loss:3.474155 +step:6552 train loss:3.462715 +step:6553 train loss:3.504832 +step:6554 train loss:3.393311 +step:6555 train loss:3.482287 +step:6556 train loss:3.349983 +step:6557 train loss:3.702457 +step:6558 train loss:3.532236 +step:6559 train loss:3.441166 +step:6560 train loss:3.482684 +step:6561 train loss:3.450468 +step:6562 train loss:3.476107 +step:6563 train loss:3.365078 +step:6564 train loss:3.471584 +step:6565 train loss:3.376926 +step:6566 train loss:3.491338 +step:6567 train loss:3.454947 +step:6568 train loss:3.503965 +step:6569 train loss:3.451078 +step:6570 train loss:3.491206 +step:6571 train loss:3.417709 +step:6572 train loss:3.493471 +step:6573 train loss:3.509899 +step:6574 train loss:3.493558 +step:6575 train loss:3.440703 +step:6576 train loss:3.426872 +step:6577 train loss:3.498912 +step:6578 train loss:3.365403 +step:6579 train loss:3.470582 +step:6580 train loss:3.421773 +step:6581 train loss:3.437831 +step:6582 train loss:3.412421 +step:6583 train loss:3.519030 +step:6584 train loss:3.447537 +step:6585 train loss:3.484699 +step:6586 train loss:3.491783 +step:6587 train loss:3.498855 +step:6588 train loss:3.467174 +step:6589 train loss:3.492647 +step:6590 train loss:3.432393 +step:6591 train loss:3.483388 +step:6592 train loss:3.425092 +step:6593 train loss:3.435894 +step:6594 train loss:3.461123 +step:6595 train loss:3.442482 +step:6596 train loss:3.441800 +step:6597 train loss:3.463413 +step:6598 train loss:3.507632 +step:6599 train loss:3.394031 +step:6600 train loss:3.457754 +step:6601 train loss:3.513313 +step:6602 train loss:3.437493 +step:6603 train loss:3.464587 +step:6604 train loss:3.476793 +step:6605 train loss:3.456537 +step:6606 train loss:3.514691 +step:6607 train loss:3.434547 +step:6608 train loss:3.449579 +step:6609 train loss:3.420796 +step:6610 train loss:3.531741 +step:6611 train loss:3.452819 +step:6612 train loss:3.499365 +step:6613 train loss:3.411763 +step:6614 train loss:3.440997 +step:6615 train loss:3.442147 +step:6616 train loss:3.426414 +step:6617 train loss:3.461779 +step:6618 train loss:3.452754 +step:6619 train loss:3.425891 +step:6620 train loss:3.528746 +step:6621 train loss:3.402233 +step:6622 train loss:3.477711 +step:6623 train loss:3.408261 +step:6624 train loss:3.482228 +step:6625 train loss:3.525973 +step:6626 train loss:3.490226 +step:6627 train loss:3.436563 +step:6628 train loss:3.496878 +step:6629 train loss:3.396248 +step:6630 train loss:3.434108 +step:6631 train loss:3.469884 +step:6632 train loss:3.510861 +step:6633 train loss:3.461695 +step:6634 train loss:3.523320 +step:6635 train loss:3.422734 +step:6636 train loss:3.464426 +step:6637 train loss:3.432272 +step:6638 train loss:3.432622 +step:6639 train loss:3.443507 +step:6640 train loss:3.434653 +step:6641 train loss:3.446658 +step:6642 train loss:3.445471 +step:6643 train loss:3.529851 +step:6644 train loss:3.528566 +step:6645 train loss:3.402171 +step:6646 train loss:3.494681 +step:6647 train loss:3.449005 +step:6648 train loss:3.555006 +step:6649 train loss:3.482421 +step:6650 train loss:3.427783 +step:6651 train loss:3.479460 +step:6652 train loss:3.491278 +step:6653 train loss:3.438890 +step:6654 train loss:3.429169 +step:6655 train loss:3.473786 +step:6656 train loss:3.440817 +step:6657 train loss:3.469913 +step:6658 train loss:3.449267 +step:6659 train loss:3.604513 +step:6660 train loss:3.505644 +step:6661 train loss:3.428967 +step:6662 train loss:3.462914 +step:6663 train loss:3.393407 +step:6664 train loss:3.471845 +step:6665 train loss:3.483701 +step:6666 train loss:3.499347 +step:6667 train loss:3.412335 +step:6668 train loss:3.538907 +step:6669 train loss:3.424857 +step:6670 train loss:3.432705 +step:6671 train loss:3.517349 +step:6672 train loss:3.463722 +step:6673 train loss:3.477684 +step:6674 train loss:3.451053 +step:6675 train loss:3.469392 +step:6676 train loss:3.477045 +step:6677 train loss:3.433723 +step:6678 train loss:3.503681 +step:6679 train loss:3.541519 +step:6680 train loss:3.541729 +step:6681 train loss:3.494973 +step:6682 train loss:3.434932 +step:6683 train loss:3.456070 +step:6684 train loss:3.472009 +step:6685 train loss:3.484037 +step:6686 train loss:3.418626 +step:6687 train loss:3.435281 +step:6688 train loss:3.481252 +step:6689 train loss:3.489297 +step:6690 train loss:3.461994 +step:6691 train loss:3.499084 +step:6692 train loss:3.505678 +step:6693 train loss:3.537035 +step:6694 train loss:3.491584 +step:6695 train loss:3.465632 +step:6696 train loss:3.400333 +step:6697 train loss:3.617284 +step:6698 train loss:3.460724 +step:6699 train loss:3.457311 +step:6700 train loss:3.468693 +step:6701 train loss:3.529316 +step:6702 train loss:3.417027 +step:6703 train loss:3.465971 +step:6704 train loss:3.449226 +step:6705 train loss:3.463480 +step:6706 train loss:3.438728 +step:6707 train loss:3.515634 +step:6708 train loss:3.465774 +step:6709 train loss:3.495219 +step:6710 train loss:3.485524 +step:6711 train loss:3.437369 +step:6712 train loss:3.423951 +step:6713 train loss:3.451718 +step:6714 train loss:3.493282 +step:6715 train loss:3.434110 +step:6716 train loss:3.515682 +step:6717 train loss:3.454841 +step:6718 train loss:3.480100 +step:6719 train loss:3.511431 +step:6720 train loss:3.442120 +step:6721 train loss:3.459842 +step:6722 train loss:3.439456 +step:6723 train loss:3.564136 +step:6724 train loss:3.423522 +step:6725 train loss:3.482713 +step:6726 train loss:3.436971 +step:6727 train loss:3.504392 +step:6728 train loss:3.598490 +step:6729 train loss:3.462411 +step:6730 train loss:3.454411 +step:6731 train loss:3.497677 +step:6732 train loss:3.375075 +step:6733 train loss:3.510517 +step:6734 train loss:3.441880 +step:6735 train loss:3.466102 +step:6736 train loss:3.466866 +step:6737 train loss:3.463471 +step:6738 train loss:3.491802 +step:6739 train loss:3.451696 +step:6740 train loss:3.399359 +step:6741 train loss:3.513081 +step:6742 train loss:3.471249 +step:6743 train loss:3.477677 +step:6744 train loss:3.365952 +step:6745 train loss:3.524509 +step:6746 train loss:3.449323 +step:6747 train loss:3.447205 +step:6748 train loss:3.517483 +step:6749 train loss:3.502889 +step:6750 validation loss:3.397959 +step:6750 train loss:3.418187 +step:6751 train loss:3.455498 +step:6752 train loss:3.455058 +step:6753 train loss:3.493204 +step:6754 train loss:3.472659 +step:6755 train loss:3.485573 +step:6756 train loss:3.424880 +step:6757 train loss:3.394656 +step:6758 train loss:3.568343 +step:6759 train loss:3.462812 +step:6760 train loss:3.519062 +step:6761 train loss:3.451033 +step:6762 train loss:3.469561 +step:6763 train loss:3.372799 +step:6764 train loss:3.451928 +step:6765 train loss:3.459144 +step:6766 train loss:3.453995 +step:6767 train loss:3.406502 +step:6768 train loss:3.411376 +step:6769 train loss:3.374006 +step:6770 train loss:3.457596 +step:6771 train loss:3.460541 +step:6772 train loss:3.472319 +step:6773 train loss:3.449891 +step:6774 train loss:3.463718 +step:6775 train loss:3.505024 +step:6776 train loss:3.462698 +step:6777 train loss:3.537332 +step:6778 train loss:3.435410 +step:6779 train loss:3.475799 +step:6780 train loss:3.407803 +step:6781 train loss:3.470208 +step:6782 train loss:3.387005 +step:6783 train loss:3.417415 +step:6784 train loss:3.446953 +step:6785 train loss:3.430929 +step:6786 train loss:3.448421 +step:6787 train loss:3.521672 +step:6788 train loss:3.462689 +step:6789 train loss:3.468780 +step:6790 train loss:3.468644 +step:6791 train loss:3.480753 +step:6792 train loss:3.481237 +step:6793 train loss:3.477437 +step:6794 train loss:3.448233 +step:6795 train loss:3.446222 +step:6796 train loss:3.455233 +step:6797 train loss:3.547983 +step:6798 train loss:3.452744 +step:6799 train loss:3.442035 +step:6800 train loss:3.410016 +step:6801 train loss:3.544230 +step:6802 train loss:3.495006 +step:6803 train loss:3.485646 +step:6804 train loss:3.510617 +step:6805 train loss:3.474863 +step:6806 train loss:3.403968 +step:6807 train loss:3.464611 +step:6808 train loss:3.448103 +step:6809 train loss:3.474605 +step:6810 train loss:3.598341 +step:6811 train loss:3.503032 +step:6812 train loss:3.470055 +step:6813 train loss:3.486969 +step:6814 train loss:3.494241 +step:6815 train loss:3.540563 +step:6816 train loss:3.453016 +step:6817 train loss:3.484378 +step:6818 train loss:3.456025 +step:6819 train loss:3.442325 +step:6820 train loss:3.471331 +step:6821 train loss:3.433728 +step:6822 train loss:3.535625 +step:6823 train loss:3.518971 +step:6824 train loss:3.493542 +step:6825 train loss:3.443989 +step:6826 train loss:3.485307 +step:6827 train loss:3.476253 +step:6828 train loss:3.487868 +step:6829 train loss:3.475479 +step:6830 train loss:3.441248 +step:6831 train loss:3.404522 +step:6832 train loss:3.391810 +step:6833 train loss:3.403901 +step:6834 train loss:3.494396 +step:6835 train loss:3.464500 +step:6836 train loss:3.382455 +step:6837 train loss:3.449583 +step:6838 train loss:3.512612 +step:6839 train loss:3.591656 +step:6840 train loss:3.468970 +step:6841 train loss:3.419289 +step:6842 train loss:3.471592 +step:6843 train loss:3.577446 +step:6844 train loss:3.457767 +step:6845 train loss:3.507635 +step:6846 train loss:3.570804 +step:6847 train loss:3.505824 +step:6848 train loss:3.489798 +step:6849 train loss:3.516787 +step:6850 train loss:3.487206 +step:6851 train loss:3.417961 +step:6852 train loss:3.411769 +step:6853 train loss:3.401963 +step:6854 train loss:3.477486 +step:6855 train loss:3.448575 +step:6856 train loss:3.431146 +step:6857 train loss:3.485768 +step:6858 train loss:3.516981 +step:6859 train loss:3.423541 +step:6860 train loss:3.534301 +step:6861 train loss:3.562209 +step:6862 train loss:3.470317 +step:6863 train loss:3.464102 +step:6864 train loss:3.413723 +step:6865 train loss:3.481186 +step:6866 train loss:3.410838 +step:6867 train loss:3.589756 +step:6868 train loss:3.464755 +step:6869 train loss:3.494898 +step:6870 train loss:3.532498 +step:6871 train loss:3.450850 +step:6872 train loss:3.442715 +step:6873 train loss:3.463579 +step:6874 train loss:3.423360 +step:6875 train loss:3.425071 +step:6876 train loss:3.456178 +step:6877 train loss:3.500012 +step:6878 train loss:3.414136 +step:6879 train loss:3.458685 +step:6880 train loss:3.466900 +step:6881 train loss:3.429751 +step:6882 train loss:3.498616 +step:6883 train loss:3.480236 +step:6884 train loss:3.708464 +step:6885 train loss:3.478729 +step:6886 train loss:3.458335 +step:6887 train loss:3.400136 +step:6888 train loss:3.499804 +step:6889 train loss:3.382113 +step:6890 train loss:3.492801 +step:6891 train loss:3.502862 +step:6892 train loss:3.601114 +step:6893 train loss:3.434097 +step:6894 train loss:3.494208 +step:6895 train loss:3.491198 +step:6896 train loss:3.470420 +step:6897 train loss:3.424662 +step:6898 train loss:3.425897 +step:6899 train loss:3.515177 +step:6900 train loss:3.487600 +step:6901 train loss:3.438164 +step:6902 train loss:3.368258 +step:6903 train loss:3.412053 +step:6904 train loss:3.523027 +step:6905 train loss:3.562807 +step:6906 train loss:3.476685 +step:6907 train loss:3.497724 +step:6908 train loss:3.528095 +step:6909 train loss:3.525916 +step:6910 train loss:3.400787 +step:6911 train loss:3.531465 +step:6912 train loss:3.422756 +step:6913 train loss:3.461637 +step:6914 train loss:3.414770 +step:6915 train loss:3.445846 +step:6916 train loss:3.420353 +step:6917 train loss:3.544373 +step:6918 train loss:3.490262 +step:6919 train loss:3.484853 +step:6920 train loss:3.468718 +step:6921 train loss:3.534997 +step:6922 train loss:3.524233 +step:6923 train loss:3.390602 +step:6924 train loss:3.471834 +step:6925 train loss:3.442694 +step:6926 train loss:3.484808 +step:6927 train loss:3.536438 +step:6928 train loss:3.422814 +step:6929 train loss:3.435024 +step:6930 train loss:3.469011 +step:6931 train loss:3.467702 +step:6932 train loss:3.714459 +step:6933 train loss:3.532453 +step:6934 train loss:3.472584 +step:6935 train loss:3.457628 +step:6936 train loss:3.496419 +step:6937 train loss:3.445543 +step:6938 train loss:3.503670 +step:6939 train loss:3.440830 +step:6940 train loss:3.495944 +step:6941 train loss:3.412010 +step:6942 train loss:3.498331 +step:6943 train loss:3.392813 +step:6944 train loss:3.482844 +step:6945 train loss:3.427708 +step:6946 train loss:3.513082 +step:6947 train loss:3.439853 +step:6948 train loss:3.433297 +step:6949 train loss:3.506630 +step:6950 train loss:3.499854 +step:6951 train loss:3.499214 +step:6952 train loss:3.432682 +step:6953 train loss:3.477709 +step:6954 train loss:3.540714 +step:6955 train loss:3.457219 +step:6956 train loss:3.493576 +step:6957 train loss:3.481932 +step:6958 train loss:3.442385 +step:6959 train loss:3.483515 +step:6960 train loss:3.446167 +step:6961 train loss:3.456868 +step:6962 train loss:3.435949 +step:6963 train loss:3.409194 +step:6964 train loss:3.447401 +step:6965 train loss:3.444352 +step:6966 train loss:3.483047 +step:6967 train loss:3.423478 +step:6968 train loss:3.462651 +step:6969 train loss:3.485410 +step:6970 train loss:3.458361 +step:6971 train loss:3.525558 +step:6972 train loss:3.471099 +step:6973 train loss:3.426616 +step:6974 train loss:3.557509 +step:6975 train loss:3.457948 +step:6976 train loss:3.434715 +step:6977 train loss:3.469704 +step:6978 train loss:3.461672 +step:6979 train loss:3.472430 +step:6980 train loss:3.449516 +step:6981 train loss:3.512440 +step:6982 train loss:3.462994 +step:6983 train loss:3.455314 +step:6984 train loss:3.572077 +step:6985 train loss:3.413015 +step:6986 train loss:3.408168 +step:6987 train loss:3.457560 +step:6988 train loss:3.463280 +step:6989 train loss:3.607312 +step:6990 train loss:3.471097 +step:6991 train loss:3.425822 +step:6992 train loss:3.474486 +step:6993 train loss:3.543541 +step:6994 train loss:3.487905 +step:6995 train loss:3.438554 +step:6996 train loss:3.444699 +step:6997 train loss:3.519569 +step:6998 train loss:3.420173 +step:6999 train loss:3.472718 +step:7000 validation loss:3.393314 total_sharp:1.8228e-03 L1_sharp:3.6683e-03 L2_sharp:8.8251e-04 L3_sharp:8.5642e-04 L4_sharp:2.3082e-04 L5_sharp:3.2010e-04 L6_sharp:5.4776e-04 L7_sharp:6.2002e-04 L8_sharp:3.9271e-04 L9_sharp:3.1442e-04 L10_sharp:1.9067e-04 L11_sharp:1.5525e-04 L12_sharp:2.1914e-04 total_fnorm:2.4314e+00 total_l1_linf:2.0816e+04 total_spectral:2.4314e+00 L1_fnorm:5.7021e-01 L2_fnorm:5.2455e-01 L3_fnorm:5.5615e-01 L4_fnorm:5.8926e-01 L5_fnorm:5.9543e-01 L6_fnorm:5.9758e-01 L7_fnorm:5.9617e-01 L8_fnorm:5.9570e-01 L9_fnorm:5.9854e-01 L10_fnorm:6.0310e-01 L11_fnorm:5.9979e-01 L12_fnorm:6.0182e-01 L1_l1linf:4.1463e-01 L2_l1linf:4.7892e-01 L3_l1linf:4.5103e-01 L4_l1linf:4.4057e-01 L5_l1linf:4.2953e-01 L6_l1linf:4.1549e-01 L7_l1linf:4.1189e-01 L8_l1linf:4.1025e-01 L9_l1linf:4.0898e-01 L10_l1linf:4.1384e-01 L11_l1linf:4.3175e-01 L12_l1linf:4.4166e-01 L1_spectral:1.2054e-02 L2_spectral:1.2038e-02 L3_spectral:1.2049e-02 L4_spectral:1.2050e-02 L5_spectral:1.2064e-02 L6_spectral:1.2043e-02 L7_spectral:1.2046e-02 L8_spectral:1.2042e-02 L9_spectral:1.2048e-02 L10_spectral:1.2046e-02 L11_spectral:1.2044e-02 L12_spectral:1.2045e-02 ip_v_neg_g:3.9054e-03 cos_v_neg_g:1.4016e-03 v_norm:2.4314e+00 g_norm:1.1460e+00 hv_norm:3.7847e-01 cos_v_hv:1.1710e-02 hg_norm:3.3539e+01 cos_g_hg:5.3418e-01 v_par:3.1215e-04 v_perp:2.4314e+00 L1_cos_v_neg_g:1.5890e-03 L1_v_norm:5.7021e-01 L2_cos_v_neg_g:5.2319e-03 L2_v_norm:5.2455e-01 L3_cos_v_neg_g:2.5457e-03 L3_v_norm:5.5615e-01 L4_cos_v_neg_g:4.0626e-04 L4_v_norm:5.8926e-01 L5_cos_v_neg_g:1.4544e-03 L5_v_norm:5.9543e-01 L6_cos_v_neg_g:2.0155e-03 L6_v_norm:5.9758e-01 L7_cos_v_neg_g:3.2859e-03 L7_v_norm:5.9617e-01 L8_cos_v_neg_g:3.0910e-03 L8_v_norm:5.9570e-01 L9_cos_v_neg_g:2.4058e-03 L9_v_norm:5.9854e-01 L10_cos_v_neg_g:1.5647e-03 L10_v_norm:6.0310e-01 L11_cos_v_neg_g:1.9387e-03 L11_v_norm:5.9979e-01 L12_cos_v_neg_g:2.9116e-03 L12_v_norm:6.0182e-01 +step:7000 train loss:3.544952 +step:7001 train loss:3.453454 +step:7002 train loss:3.440459 +step:7003 train loss:3.467838 +step:7004 train loss:3.461440 +step:7005 train loss:3.444823 +step:7006 train loss:3.452955 +step:7007 train loss:3.502766 +step:7008 train loss:3.444430 +step:7009 train loss:3.484923 +step:7010 train loss:3.420514 +step:7011 train loss:3.474632 +step:7012 train loss:3.443971 +step:7013 train loss:3.525241 +step:7014 train loss:3.428250 +step:7015 train loss:3.490471 +step:7016 train loss:3.476290 +step:7017 train loss:3.443246 +step:7018 train loss:3.525084 +step:7019 train loss:3.445995 +step:7020 train loss:3.498277 +step:7021 train loss:3.437224 +step:7022 train loss:3.456169 +step:7023 train loss:3.469905 +step:7024 train loss:3.435836 +step:7025 train loss:3.483977 +step:7026 train loss:3.441307 +step:7027 train loss:3.503181 +step:7028 train loss:3.430272 +step:7029 train loss:3.420818 +step:7030 train loss:3.419607 +step:7031 train loss:3.475904 +step:7032 train loss:3.481112 +step:7033 train loss:3.456953 +step:7034 train loss:3.478813 +step:7035 train loss:3.527439 +step:7036 train loss:3.449224 +step:7037 train loss:3.474000 +step:7038 train loss:3.438124 +step:7039 train loss:3.492274 +step:7040 train loss:3.410752 +step:7041 train loss:3.502959 +step:7042 train loss:3.431496 +step:7043 train loss:3.407817 +step:7044 train loss:3.450695 +step:7045 train loss:3.453258 +step:7046 train loss:3.448145 +step:7047 train loss:3.481780 +step:7048 train loss:3.432295 +step:7049 train loss:3.443235 +step:7050 train loss:3.466828 +step:7051 train loss:3.483333 +step:7052 train loss:3.485724 +step:7053 train loss:3.444800 +step:7054 train loss:3.423052 +step:7055 train loss:3.493900 +step:7056 train loss:3.488762 +step:7057 train loss:3.417651 +step:7058 train loss:3.537292 +step:7059 train loss:3.445847 +step:7060 train loss:3.453432 +step:7061 train loss:3.426337 +step:7062 train loss:3.453150 +step:7063 train loss:3.510495 +step:7064 train loss:3.432332 +step:7065 train loss:3.485318 +step:7066 train loss:3.443478 +step:7067 train loss:3.480876 +step:7068 train loss:3.453714 +step:7069 train loss:3.419365 +step:7070 train loss:3.441421 +step:7071 train loss:3.414674 +step:7072 train loss:3.415274 +step:7073 train loss:3.410336 +step:7074 train loss:3.409358 +step:7075 train loss:3.423616 +step:7076 train loss:3.437348 +step:7077 train loss:3.444835 +step:7078 train loss:3.491376 +step:7079 train loss:3.500611 +step:7080 train loss:3.451799 +step:7081 train loss:3.468203 +step:7082 train loss:3.433308 +step:7083 train loss:3.465081 +step:7084 train loss:3.458247 +step:7085 train loss:3.420257 +step:7086 train loss:3.455922 +step:7087 train loss:3.433848 +step:7088 train loss:3.558469 +step:7089 train loss:3.448197 +step:7090 train loss:3.415520 +step:7091 train loss:3.428130 +step:7092 train loss:3.406591 +step:7093 train loss:3.501050 +step:7094 train loss:3.422168 +step:7095 train loss:3.441404 +step:7096 train loss:3.454893 +step:7097 train loss:3.445352 +step:7098 train loss:3.469441 +step:7099 train loss:3.422466 +step:7100 train loss:3.457726 +step:7101 train loss:3.523928 +step:7102 train loss:3.417267 +step:7103 train loss:3.443010 +step:7104 train loss:3.474499 +step:7105 train loss:3.452836 +step:7106 train loss:3.436854 +step:7107 train loss:3.473427 +step:7108 train loss:3.541021 +step:7109 train loss:3.470770 +step:7110 train loss:3.497216 +step:7111 train loss:3.475152 +step:7112 train loss:3.469955 +step:7113 train loss:3.460533 +step:7114 train loss:3.476678 +step:7115 train loss:3.516183 +step:7116 train loss:3.444672 +step:7117 train loss:3.485802 +step:7118 train loss:3.495718 +step:7119 train loss:3.458599 +step:7120 train loss:3.510549 +step:7121 train loss:3.431482 +step:7122 train loss:3.431128 +step:7123 train loss:3.372192 +step:7124 train loss:3.529587 +step:7125 train loss:3.381008 +step:7126 train loss:3.549263 +step:7127 train loss:3.504021 +step:7128 train loss:3.458165 +step:7129 train loss:3.458275 +step:7130 train loss:3.448066 +step:7131 train loss:3.390003 +step:7132 train loss:3.428843 +step:7133 train loss:3.478093 +step:7134 train loss:3.410698 +step:7135 train loss:3.465666 +step:7136 train loss:3.445128 +step:7137 train loss:3.425565 +step:7138 train loss:3.412771 +step:7139 train loss:3.417115 +step:7140 train loss:3.451137 +step:7141 train loss:3.447086 +step:7142 train loss:3.445967 +step:7143 train loss:3.481868 +step:7144 train loss:3.430455 +step:7145 train loss:3.448121 +step:7146 train loss:3.457958 +step:7147 train loss:3.481293 +step:7148 train loss:3.481525 +step:7149 train loss:3.491762 +step:7150 train loss:3.460793 +step:7151 train loss:3.429888 +step:7152 train loss:3.400106 +step:7153 train loss:3.435532 +step:7154 train loss:3.455738 +step:7155 train loss:3.466085 +step:7156 train loss:3.439787 +step:7157 train loss:3.456834 +step:7158 train loss:3.415162 +step:7159 train loss:3.471274 +step:7160 train loss:3.481973 +step:7161 train loss:3.431412 +step:7162 train loss:3.477066 +step:7163 train loss:3.413827 +step:7164 train loss:3.449504 +step:7165 train loss:3.453856 +step:7166 train loss:3.512462 +step:7167 train loss:3.488742 +step:7168 train loss:3.467380 +step:7169 train loss:3.446173 +step:7170 train loss:3.471704 +step:7171 train loss:3.423943 +step:7172 train loss:3.586153 +step:7173 train loss:3.429598 +step:7174 train loss:3.474378 +step:7175 train loss:3.446154 +step:7176 train loss:3.456618 +step:7177 train loss:3.472172 +step:7178 train loss:3.471524 +step:7179 train loss:3.458356 +step:7180 train loss:3.455750 +step:7181 train loss:3.489350 +step:7182 train loss:3.436802 +step:7183 train loss:3.509235 +step:7184 train loss:3.601406 +step:7185 train loss:3.513756 +step:7186 train loss:3.454310 +step:7187 train loss:3.464690 +step:7188 train loss:3.448870 +step:7189 train loss:3.450294 +step:7190 train loss:3.454661 +step:7191 train loss:3.447008 +step:7192 train loss:3.473461 +step:7193 train loss:3.397993 +step:7194 train loss:3.460911 +step:7195 train loss:3.437212 +step:7196 train loss:3.482037 +step:7197 train loss:3.460541 +step:7198 train loss:3.521455 +step:7199 train loss:3.475486 +step:7200 train loss:3.467912 +step:7201 train loss:3.477493 +step:7202 train loss:3.452383 +step:7203 train loss:3.472260 +step:7204 train loss:3.438009 +step:7205 train loss:3.397385 +step:7206 train loss:3.423992 +step:7207 train loss:3.601252 +step:7208 train loss:3.434417 +step:7209 train loss:3.514469 +step:7210 train loss:3.454719 +step:7211 train loss:3.483055 +step:7212 train loss:3.562782 +step:7213 train loss:3.412703 +step:7214 train loss:3.482399 +step:7215 train loss:3.452780 +step:7216 train loss:3.500630 +step:7217 train loss:3.461396 +step:7218 train loss:3.546936 +step:7219 train loss:3.455123 +step:7220 train loss:3.536527 +step:7221 train loss:3.416579 +step:7222 train loss:3.497184 +step:7223 train loss:3.415344 +step:7224 train loss:3.476034 +step:7225 train loss:3.457856 +step:7226 train loss:3.421964 +step:7227 train loss:3.443343 +step:7228 train loss:3.431936 +step:7229 train loss:3.433510 +step:7230 train loss:3.422652 +step:7231 train loss:3.554959 +step:7232 train loss:3.427966 +step:7233 train loss:3.491351 +step:7234 train loss:3.480207 +step:7235 train loss:3.450551 +step:7236 train loss:3.489600 +step:7237 train loss:3.442355 +step:7238 train loss:3.482586 +step:7239 train loss:3.433084 +step:7240 train loss:3.434229 +step:7241 train loss:3.445084 +step:7242 train loss:3.428984 +step:7243 train loss:3.474399 +step:7244 train loss:3.443697 +step:7245 train loss:3.454573 +step:7246 train loss:3.492712 +step:7247 train loss:3.446361 +step:7248 train loss:3.488791 +step:7249 train loss:3.435510 +step:7250 validation loss:3.389915 +step:7250 train loss:3.459231 +step:7251 train loss:3.505059 +step:7252 train loss:3.416116 +step:7253 train loss:3.506703 +step:7254 train loss:3.443821 +step:7255 train loss:3.418575 +step:7256 train loss:3.456297 +step:7257 train loss:3.494556 +step:7258 train loss:3.454324 +step:7259 train loss:3.434788 +step:7260 train loss:3.519966 +step:7261 train loss:3.477776 +step:7262 train loss:3.434008 +step:7263 train loss:3.478549 +step:7264 train loss:3.460250 +step:7265 train loss:3.366654 +step:7266 train loss:3.490582 +step:7267 train loss:3.411532 +step:7268 train loss:3.475408 +step:7269 train loss:3.479184 +step:7270 train loss:3.431848 +step:7271 train loss:3.451793 +step:7272 train loss:3.455715 +step:7273 train loss:3.454751 +step:7274 train loss:3.430774 +step:7275 train loss:3.499424 +step:7276 train loss:3.408005 +step:7277 train loss:3.453494 +step:7278 train loss:3.427204 +step:7279 train loss:3.407871 +step:7280 train loss:3.475783 +step:7281 train loss:3.501348 +step:7282 train loss:3.497204 +step:7283 train loss:3.388573 +step:7284 train loss:3.433594 +step:7285 train loss:3.459672 +step:7286 train loss:3.595399 +step:7287 train loss:3.499359 +step:7288 train loss:3.453092 +step:7289 train loss:3.461816 +step:7290 train loss:3.506379 +step:7291 train loss:3.470232 +step:7292 train loss:3.536490 +step:7293 train loss:3.438374 +step:7294 train loss:3.523122 +step:7295 train loss:3.407429 +step:7296 train loss:3.407718 +step:7297 train loss:3.453428 +step:7298 train loss:3.428686 +step:7299 train loss:3.472194 +step:7300 train loss:3.452614 +step:7301 train loss:3.403442 +step:7302 train loss:3.548977 +step:7303 train loss:3.439794 +step:7304 train loss:3.385586 +step:7305 train loss:3.459917 +step:7306 train loss:3.492620 +step:7307 train loss:3.496010 +step:7308 train loss:3.446929 +step:7309 train loss:3.414270 +step:7310 train loss:3.441865 +step:7311 train loss:3.426068 +step:7312 train loss:3.470166 +step:7313 train loss:3.501593 +step:7314 train loss:3.400606 +step:7315 train loss:3.395059 +step:7316 train loss:3.539348 +step:7317 train loss:3.475652 +step:7318 train loss:3.414049 +step:7319 train loss:3.441380 +step:7320 train loss:3.476278 +step:7321 train loss:3.499863 +step:7322 train loss:3.382398 +step:7323 train loss:3.438043 +step:7324 train loss:3.464175 +step:7325 train loss:3.427991 +step:7326 train loss:3.453138 +step:7327 train loss:3.432680 +step:7328 train loss:3.550445 +step:7329 train loss:3.391023 +step:7330 train loss:3.449119 +step:7331 train loss:3.438348 +step:7332 train loss:3.486136 +step:7333 train loss:3.465443 +step:7334 train loss:3.431091 +step:7335 train loss:3.434365 +step:7336 train loss:3.685417 +step:7337 train loss:3.471142 +step:7338 train loss:3.468416 +step:7339 train loss:3.476963 +step:7340 train loss:3.468984 +step:7341 train loss:3.454521 +step:7342 train loss:3.442137 +step:7343 train loss:3.460623 +step:7344 train loss:3.537995 +step:7345 train loss:3.400931 +step:7346 train loss:3.434071 +step:7347 train loss:3.426470 +step:7348 train loss:3.434496 +step:7349 train loss:3.533498 +step:7350 train loss:3.517129 +step:7351 train loss:3.453647 +step:7352 train loss:3.477512 +step:7353 train loss:3.464160 +step:7354 train loss:3.414589 +step:7355 train loss:3.591810 +step:7356 train loss:3.568372 +step:7357 train loss:3.487102 +step:7358 train loss:3.467388 +step:7359 train loss:3.438503 +step:7360 train loss:3.446471 +step:7361 train loss:3.402829 +step:7362 train loss:3.451473 +step:7363 train loss:3.461617 +step:7364 train loss:3.499676 +step:7365 train loss:3.481323 +step:7366 train loss:3.443518 +step:7367 train loss:3.522744 +step:7368 train loss:3.502022 +step:7369 train loss:3.494386 +step:7370 train loss:3.458462 +step:7371 train loss:3.416967 +step:7372 train loss:3.474033 +step:7373 train loss:3.493189 +step:7374 train loss:3.593144 +step:7375 train loss:3.415705 +step:7376 train loss:3.430555 +step:7377 train loss:3.478270 +step:7378 train loss:3.434174 +step:7379 train loss:3.554609 +step:7380 train loss:3.520339 +step:7381 train loss:3.480271 +step:7382 train loss:3.448291 +step:7383 train loss:3.541861 +step:7384 train loss:3.480237 +step:7385 train loss:3.438499 +step:7386 train loss:3.440889 +step:7387 train loss:3.489897 +step:7388 train loss:3.521641 +step:7389 train loss:3.465616 +step:7390 train loss:3.405709 +step:7391 train loss:3.441666 +step:7392 train loss:3.502185 +step:7393 train loss:3.468192 +step:7394 train loss:3.505050 +step:7395 train loss:3.393335 +step:7396 train loss:3.498635 +step:7397 train loss:3.422318 +step:7398 train loss:3.440363 +step:7399 train loss:3.485595 +step:7400 train loss:3.491472 +step:7401 train loss:3.407905 +step:7402 train loss:3.523311 +step:7403 train loss:3.410125 +step:7404 train loss:3.475781 +step:7405 train loss:3.601712 +step:7406 train loss:3.426796 +step:7407 train loss:3.471956 +step:7408 train loss:3.473747 +step:7409 train loss:3.444611 +step:7410 train loss:3.613652 +step:7411 train loss:3.458320 +step:7412 train loss:3.458243 +step:7413 train loss:3.515604 +step:7414 train loss:3.425282 +step:7415 train loss:3.482879 +step:7416 train loss:3.362258 +step:7417 train loss:3.490325 +step:7418 train loss:3.467956 +step:7419 train loss:3.437858 +step:7420 train loss:3.431386 +step:7421 train loss:3.461307 +step:7422 train loss:3.422500 +step:7423 train loss:3.561212 +step:7424 train loss:3.623839 +step:7425 train loss:3.513228 +step:7426 train loss:3.477757 +step:7427 train loss:3.447212 +step:7428 train loss:3.469774 +step:7429 train loss:3.484602 +step:7430 train loss:3.411529 +step:7431 train loss:3.416474 +step:7432 train loss:3.424746 +step:7433 train loss:3.520551 +step:7434 train loss:3.436458 +step:7435 train loss:3.522812 +step:7436 train loss:3.564148 +step:7437 train loss:3.386702 +step:7438 train loss:3.443791 +step:7439 train loss:3.456418 +step:7440 train loss:3.431358 +step:7441 train loss:3.401381 +step:7442 train loss:3.627354 +step:7443 train loss:3.448227 +step:7444 train loss:3.490022 +step:7445 train loss:3.421628 +step:7446 train loss:3.444240 +step:7447 train loss:3.370257 +step:7448 train loss:3.426751 +step:7449 train loss:3.438286 +step:7450 train loss:3.474821 +step:7451 train loss:3.507054 +step:7452 train loss:3.430116 +step:7453 train loss:3.458254 +step:7454 train loss:3.445655 +step:7455 train loss:3.451854 +step:7456 train loss:3.427726 +step:7457 train loss:3.439034 +step:7458 train loss:3.472423 +step:7459 train loss:3.453654 +step:7460 train loss:3.462756 +step:7461 train loss:3.495012 +step:7462 train loss:3.434223 +step:7463 train loss:3.497077 +step:7464 train loss:3.419802 +step:7465 train loss:3.427911 +step:7466 train loss:3.429065 +step:7467 train loss:3.437801 +step:7468 train loss:3.492709 +step:7469 train loss:3.420869 +step:7470 train loss:3.454646 +step:7471 train loss:3.444282 +step:7472 train loss:3.474941 +step:7473 train loss:3.417694 +step:7474 train loss:3.404847 +step:7475 train loss:3.432765 +step:7476 train loss:3.473590 +step:7477 train loss:3.445334 +step:7478 train loss:3.441770 +step:7479 train loss:3.455364 +step:7480 train loss:3.737442 +step:7481 train loss:3.386105 +step:7482 train loss:3.454782 +step:7483 train loss:3.451836 +step:7484 train loss:3.474324 +step:7485 train loss:3.457962 +step:7486 train loss:3.483815 +step:7487 train loss:3.472911 +step:7488 train loss:3.499320 +step:7489 train loss:3.494202 +step:7490 train loss:3.438864 +step:7491 train loss:3.461496 +step:7492 train loss:3.568597 +step:7493 train loss:3.545531 +step:7494 train loss:3.568438 +step:7495 train loss:3.437495 +step:7496 train loss:3.424273 +step:7497 train loss:3.524726 +step:7498 train loss:3.457951 +step:7499 train loss:3.496674 +step:7500 validation loss:3.387120 total_sharp:1.8468e-03 L1_sharp:3.6680e-03 L2_sharp:5.9134e-04 L3_sharp:6.7185e-04 L4_sharp:2.5025e-04 L5_sharp:3.4876e-04 L6_sharp:4.7380e-04 L7_sharp:6.3532e-04 L8_sharp:4.9762e-04 L9_sharp:3.7047e-04 L10_sharp:2.0153e-04 L11_sharp:1.4959e-04 L12_sharp:1.4277e-04 total_fnorm:2.4310e+00 total_l1_linf:2.0801e+04 total_spectral:2.4310e+00 L1_fnorm:5.6995e-01 L2_fnorm:5.2842e-01 L3_fnorm:5.6140e-01 L4_fnorm:5.8911e-01 L5_fnorm:5.9488e-01 L6_fnorm:5.9888e-01 L7_fnorm:5.9597e-01 L8_fnorm:5.9413e-01 L9_fnorm:5.9733e-01 L10_fnorm:6.0088e-01 L11_fnorm:5.9826e-01 L12_fnorm:6.0167e-01 L1_l1linf:4.1741e-01 L2_l1linf:4.6774e-01 L3_l1linf:4.4435e-01 L4_l1linf:4.2381e-01 L5_l1linf:4.1704e-01 L6_l1linf:4.1501e-01 L7_l1linf:4.0953e-01 L8_l1linf:4.1219e-01 L9_l1linf:4.0740e-01 L10_l1linf:4.0967e-01 L11_l1linf:4.3154e-01 L12_l1linf:4.2463e-01 L1_spectral:1.2056e-02 L2_spectral:1.2038e-02 L3_spectral:1.2052e-02 L4_spectral:1.2049e-02 L5_spectral:1.2062e-02 L6_spectral:1.2043e-02 L7_spectral:1.2045e-02 L8_spectral:1.2044e-02 L9_spectral:1.2045e-02 L10_spectral:1.2045e-02 L11_spectral:1.2047e-02 L12_spectral:1.2046e-02 ip_v_neg_g:5.8855e-03 cos_v_neg_g:2.0008e-03 v_norm:2.4310e+00 g_norm:1.2100e+00 hv_norm:3.9047e-01 cos_v_hv:1.1498e-02 hg_norm:3.0184e+01 cos_g_hg:6.2177e-01 v_par:5.9316e-04 v_perp:2.4310e+00 L1_cos_v_neg_g:2.3006e-03 L1_v_norm:5.6995e-01 L2_cos_v_neg_g:3.3588e-03 L2_v_norm:5.2842e-01 L3_cos_v_neg_g:4.1721e-03 L3_v_norm:5.6140e-01 L4_cos_v_neg_g:3.0412e-03 L4_v_norm:5.8911e-01 L5_cos_v_neg_g:2.7233e-03 L5_v_norm:5.9488e-01 L6_cos_v_neg_g:3.6282e-03 L6_v_norm:5.9888e-01 L7_cos_v_neg_g:3.0191e-03 L7_v_norm:5.9597e-01 L8_cos_v_neg_g:4.1924e-03 L8_v_norm:5.9413e-01 L9_cos_v_neg_g:4.6506e-03 L9_v_norm:5.9733e-01 L10_cos_v_neg_g:3.7771e-03 L10_v_norm:6.0088e-01 L11_cos_v_neg_g:2.8248e-03 L11_v_norm:5.9826e-01 L12_cos_v_neg_g:2.4470e-03 L12_v_norm:6.0167e-01 +step:7500 train loss:3.440582 +step:7501 train loss:3.432708 +step:7502 train loss:3.423872 +step:7503 train loss:3.399375 +step:7504 train loss:3.425148 +step:7505 train loss:3.414583 +step:7506 train loss:3.476597 +step:7507 train loss:3.392252 +step:7508 train loss:3.461168 +step:7509 train loss:3.433330 +step:7510 train loss:3.463829 +step:7511 train loss:3.467179 +step:7512 train loss:3.735172 +step:7513 train loss:3.420867 +step:7514 train loss:3.457284 +step:7515 train loss:3.414845 +step:7516 train loss:3.428294 +step:7517 train loss:3.465458 +step:7518 train loss:3.436415 +step:7519 train loss:3.453177 +step:7520 train loss:3.517224 +step:7521 train loss:3.401806 +step:7522 train loss:3.460204 +step:7523 train loss:3.489591 +step:7524 train loss:3.439250 +step:7525 train loss:3.439978 +step:7526 train loss:3.388740 +step:7527 train loss:3.398706 +step:7528 train loss:3.497566 +step:7529 train loss:3.473583 +step:7530 train loss:3.418578 +step:7531 train loss:3.497088 +step:7532 train loss:3.486114 +step:7533 train loss:3.412444 +step:7534 train loss:3.475717 +step:7535 train loss:3.477905 +step:7536 train loss:3.513021 +step:7537 train loss:3.539964 +step:7538 train loss:3.556318 +step:7539 train loss:3.455236 +step:7540 train loss:3.442666 +step:7541 train loss:3.496408 +step:7542 train loss:3.459238 +step:7543 train loss:3.412652 +step:7544 train loss:3.459129 +step:7545 train loss:3.443741 +step:7546 train loss:3.401067 +step:7547 train loss:3.445483 +step:7548 train loss:3.462642 +step:7549 train loss:3.441801 +step:7550 train loss:3.440440 +step:7551 train loss:3.541177 +step:7552 train loss:3.454987 +step:7553 train loss:3.488953 +step:7554 train loss:3.413595 +step:7555 train loss:3.506408 +step:7556 train loss:3.409125 +step:7557 train loss:3.503075 +step:7558 train loss:3.491758 +step:7559 train loss:3.450653 +step:7560 train loss:3.546099 +step:7561 train loss:3.513654 +step:7562 train loss:3.422890 +step:7563 train loss:3.415304 +step:7564 train loss:3.466568 +step:7565 train loss:3.487018 +step:7566 train loss:3.479358 +step:7567 train loss:3.491754 +step:7568 train loss:3.437893 +step:7569 train loss:3.499788 +step:7570 train loss:3.481661 +step:7571 train loss:3.562335 +step:7572 train loss:3.415052 +step:7573 train loss:3.479415 +step:7574 train loss:3.443432 +step:7575 train loss:3.439495 +step:7576 train loss:3.444473 +step:7577 train loss:3.462392 +step:7578 train loss:3.519685 +step:7579 train loss:3.453671 +step:7580 train loss:3.442445 +step:7581 train loss:3.430676 +step:7582 train loss:3.488366 +step:7583 train loss:3.424426 +step:7584 train loss:3.406192 +step:7585 train loss:3.371986 +step:7586 train loss:3.412271 +step:7587 train loss:3.471721 +step:7588 train loss:3.602411 +step:7589 train loss:3.423438 +step:7590 train loss:3.489631 +step:7591 train loss:3.493148 +step:7592 train loss:3.453815 +step:7593 train loss:3.478900 +step:7594 train loss:3.477356 +step:7595 train loss:3.446563 +step:7596 train loss:3.496539 +step:7597 train loss:3.402284 +step:7598 train loss:3.467941 +step:7599 train loss:3.454049 +step:7600 train loss:3.417005 +step:7601 train loss:3.529453 +step:7602 train loss:3.469748 +step:7603 train loss:3.433773 +step:7604 train loss:3.575402 +step:7605 train loss:3.461173 +step:7606 train loss:3.495890 +step:7607 train loss:3.449388 +step:7608 train loss:3.458089 +step:7609 train loss:3.496989 +step:7610 train loss:3.453271 +step:7611 train loss:3.429215 +step:7612 train loss:3.374873 +step:7613 train loss:3.425162 +step:7614 train loss:3.489918 +step:7615 train loss:3.450558 +step:7616 train loss:3.520529 +step:7617 train loss:3.417107 +step:7618 train loss:3.503264 +step:7619 train loss:3.448280 +step:7620 train loss:3.435205 +step:7621 train loss:3.382369 +step:7622 train loss:3.662196 +step:7623 train loss:3.672096 +step:7624 train loss:3.483948 +step:7625 train loss:3.524270 +step:7626 train loss:3.442165 +step:7627 train loss:3.512846 +step:7628 train loss:3.392267 +step:7629 train loss:3.454241 +step:7630 train loss:3.465103 +step:7631 train loss:3.449156 +step:7632 train loss:3.500609 +step:7633 train loss:3.565914 +step:7634 train loss:3.527635 +step:7635 train loss:3.436051 +step:7636 train loss:3.458623 +step:7637 train loss:3.406831 +step:7638 train loss:3.519501 +step:7639 train loss:3.446836 +step:7640 train loss:3.428958 +step:7641 train loss:3.459856 +step:7642 train loss:3.800212 +step:7643 train loss:3.546741 +step:7644 train loss:3.472992 +step:7645 train loss:3.454597 +step:7646 train loss:3.444678 +step:7647 train loss:3.437779 +step:7648 train loss:3.468818 +step:7649 train loss:3.431352 +step:7650 train loss:3.481557 +step:7651 train loss:3.499983 +step:7652 train loss:3.380818 +step:7653 train loss:3.586017 +step:7654 train loss:3.431619 +step:7655 train loss:3.455474 +step:7656 train loss:3.428905 +step:7657 train loss:3.439040 +step:7658 train loss:3.397228 +step:7659 train loss:3.461004 +step:7660 train loss:3.393262 +step:7661 train loss:3.409231 +step:7662 train loss:3.415232 +step:7663 train loss:3.460651 +step:7664 train loss:3.417987 +step:7665 train loss:3.392736 +step:7666 train loss:3.501369 +step:7667 train loss:3.418982 +step:7668 train loss:3.524393 +step:7669 train loss:3.458339 +step:7670 train loss:3.414141 +step:7671 train loss:3.467414 +step:7672 train loss:3.487032 +step:7673 train loss:3.454624 +step:7674 train loss:3.491766 +step:7675 train loss:3.546715 +step:7676 train loss:3.515351 +step:7677 train loss:3.539396 +step:7678 train loss:3.482056 +step:7679 train loss:3.502911 +step:7680 train loss:3.508570 +step:7681 train loss:3.475639 +step:7682 train loss:3.444497 +step:7683 train loss:3.447078 +step:7684 train loss:3.416967 +step:7685 train loss:3.400681 +step:7686 train loss:3.520145 +step:7687 train loss:3.433038 +step:7688 train loss:3.400739 +step:7689 train loss:3.449738 +step:7690 train loss:3.415681 +step:7691 train loss:3.445941 +step:7692 train loss:3.481108 +step:7693 train loss:3.479400 +step:7694 train loss:3.533305 +step:7695 train loss:3.459657 +step:7696 train loss:3.432627 +step:7697 train loss:3.423721 +step:7698 train loss:3.483021 +step:7699 train loss:3.479744 +step:7700 train loss:3.377218 +step:7701 train loss:3.498309 +step:7702 train loss:3.436749 +step:7703 train loss:3.439720 +step:7704 train loss:3.490461 +step:7705 train loss:3.449026 +step:7706 train loss:3.387761 +step:7707 train loss:3.507809 +step:7708 train loss:3.444890 +step:7709 train loss:3.463105 +step:7710 train loss:3.525551 +step:7711 train loss:3.487096 +step:7712 train loss:3.434214 +step:7713 train loss:3.511979 +step:7714 train loss:3.458592 +step:7715 train loss:3.409440 +step:7716 train loss:3.450826 +step:7717 train loss:3.472783 +step:7718 train loss:3.477492 +step:7719 train loss:3.436125 +step:7720 train loss:3.451103 +step:7721 train loss:3.492354 +step:7722 train loss:3.422251 +step:7723 train loss:3.796644 +step:7724 train loss:3.459389 +step:7725 train loss:3.379395 +step:7726 train loss:3.440092 +step:7727 train loss:3.474664 +step:7728 train loss:3.423173 +step:7729 train loss:3.427963 +step:7730 train loss:3.455390 +step:7731 train loss:3.483265 +step:7732 train loss:3.507561 +step:7733 train loss:3.416212 +step:7734 train loss:3.443052 +step:7735 train loss:3.532007 +step:7736 train loss:3.476697 +step:7737 train loss:3.491790 +step:7738 train loss:3.394601 +step:7739 train loss:3.471831 +step:7740 train loss:3.417468 +step:7741 train loss:3.454753 +step:7742 train loss:3.456850 +step:7743 train loss:3.406718 +step:7744 train loss:3.534232 +step:7745 train loss:3.421337 +step:7746 train loss:3.396580 +step:7747 train loss:3.492265 +step:7748 train loss:3.473796 +step:7749 train loss:3.398074 +step:7750 validation loss:3.381855 +step:7750 train loss:3.555912 +step:7751 train loss:3.439213 +step:7752 train loss:3.432128 +step:7753 train loss:3.435002 +step:7754 train loss:3.410227 +step:7755 train loss:3.474451 +step:7756 train loss:3.501853 +step:7757 train loss:3.448841 +step:7758 train loss:3.420646 +step:7759 train loss:3.448536 +step:7760 train loss:3.476713 +step:7761 train loss:3.467610 +step:7762 train loss:3.454262 +step:7763 train loss:3.437572 +step:7764 train loss:3.443062 +step:7765 train loss:3.397406 +step:7766 train loss:3.462850 +step:7767 train loss:3.464722 +step:7768 train loss:3.421032 +step:7769 train loss:3.485898 +step:7770 train loss:3.502050 +step:7771 train loss:3.476699 +step:7772 train loss:3.450331 +step:7773 train loss:3.510145 +step:7774 train loss:3.406271 +step:7775 train loss:3.394375 +step:7776 train loss:3.501704 +step:7777 train loss:3.452929 +step:7778 train loss:3.411114 +step:7779 train loss:3.454151 +step:7780 train loss:3.447554 +step:7781 train loss:3.458220 +step:7782 train loss:3.441829 +step:7783 train loss:3.424867 +step:7784 train loss:3.419978 +step:7785 train loss:3.463500 +step:7786 train loss:3.419024 +step:7787 train loss:3.500643 +step:7788 train loss:3.449517 +step:7789 train loss:3.385434 +step:7790 train loss:3.448393 +step:7791 train loss:3.477364 +step:7792 train loss:3.437599 +step:7793 train loss:3.460299 +step:7794 train loss:3.447910 +step:7795 train loss:3.479504 +step:7796 train loss:3.445457 +step:7797 train loss:3.464540 +step:7798 train loss:3.453969 +step:7799 train loss:3.445141 +step:7800 train loss:3.400707 +step:7801 train loss:3.464924 +step:7802 train loss:3.446508 +step:7803 train loss:3.496495 +step:7804 train loss:3.456797 +step:7805 train loss:3.453447 +step:7806 train loss:3.473976 +step:7807 train loss:3.543892 +step:7808 train loss:3.404121 +step:7809 train loss:3.382181 +step:7810 train loss:3.468566 +step:7811 train loss:3.402601 +step:7812 train loss:3.421849 +step:7813 train loss:3.507885 +step:7814 train loss:3.579755 +step:7815 train loss:3.393092 +step:7816 train loss:3.476344 +step:7817 train loss:3.507578 +step:7818 train loss:3.405187 +step:7819 train loss:3.460343 +step:7820 train loss:3.496898 +step:7821 train loss:3.433114 +step:7822 train loss:3.392271 +step:7823 train loss:3.512536 +step:7824 train loss:3.444684 +step:7825 train loss:3.430695 +step:7826 train loss:3.427195 +step:7827 train loss:3.469590 +step:7828 train loss:3.460331 +step:7829 train loss:3.419019 +step:7830 train loss:3.425180 +step:7831 train loss:3.428891 +step:7832 train loss:3.499440 +step:7833 train loss:3.476531 +step:7834 train loss:3.442121 +step:7835 train loss:3.467098 +step:7836 train loss:3.574897 +step:7837 train loss:3.460458 +step:7838 train loss:3.431828 +step:7839 train loss:3.391090 +step:7840 train loss:3.407434 +step:7841 train loss:3.504662 +step:7842 train loss:3.489124 +step:7843 train loss:3.542978 +step:7844 train loss:3.472180 +step:7845 train loss:3.449020 +step:7846 train loss:3.563480 +step:7847 train loss:3.452999 +step:7848 train loss:3.462189 +step:7849 train loss:3.476259 +step:7850 train loss:3.447084 +step:7851 train loss:3.474729 +step:7852 train loss:3.448586 +step:7853 train loss:3.421318 +step:7854 train loss:3.451130 +step:7855 train loss:3.449377 +step:7856 train loss:3.454145 +step:7857 train loss:3.443908 +step:7858 train loss:3.451128 +step:7859 train loss:3.457328 +step:7860 train loss:3.494752 +step:7861 train loss:3.481400 +step:7862 train loss:3.426413 +step:7863 train loss:3.527710 +step:7864 train loss:3.368921 +step:7865 train loss:3.448077 +step:7866 train loss:3.420391 +step:7867 train loss:3.468982 +step:7868 train loss:3.444536 +step:7869 train loss:3.448411 +step:7870 train loss:3.367055 +step:7871 train loss:3.432371 +step:7872 train loss:3.421466 +step:7873 train loss:3.500965 +step:7874 train loss:3.441802 +step:7875 train loss:3.447506 +step:7876 train loss:3.466991 +step:7877 train loss:3.421770 +step:7878 train loss:3.459200 +step:7879 train loss:3.799187 +step:7880 train loss:3.450848 +step:7881 train loss:3.476721 +step:7882 train loss:3.556507 +step:7883 train loss:3.372604 +step:7884 train loss:3.462533 +step:7885 train loss:3.443249 +step:7886 train loss:3.444375 +step:7887 train loss:3.440198 +step:7888 train loss:3.469676 +step:7889 train loss:3.519644 +step:7890 train loss:3.425194 +step:7891 train loss:3.473885 +step:7892 train loss:3.443763 +step:7893 train loss:3.419171 +step:7894 train loss:3.442641 +step:7895 train loss:3.424819 +step:7896 train loss:3.426025 +step:7897 train loss:3.450889 +step:7898 train loss:3.458537 +step:7899 train loss:3.443335 +step:7900 train loss:3.416636 +step:7901 train loss:3.405787 +step:7902 train loss:3.553291 +step:7903 train loss:3.397310 +step:7904 train loss:3.449748 +step:7905 train loss:3.517761 +step:7906 train loss:3.411019 +step:7907 train loss:3.439010 +step:7908 train loss:3.491036 +step:7909 train loss:3.545046 +step:7910 train loss:3.421809 +step:7911 train loss:3.443613 +step:7912 train loss:3.447873 +step:7913 train loss:3.421545 +step:7914 train loss:3.458186 +step:7915 train loss:3.559591 +step:7916 train loss:3.433550 +step:7917 train loss:3.489662 +step:7918 train loss:3.433028 +step:7919 train loss:3.420078 +step:7920 train loss:3.463218 +step:7921 train loss:3.465728 +step:7922 train loss:3.442632 +step:7923 train loss:3.490321 +step:7924 train loss:3.452545 +step:7925 train loss:3.473389 +step:7926 train loss:3.376010 +step:7927 train loss:3.661437 +step:7928 train loss:3.484041 +step:7929 train loss:3.445495 +step:7930 train loss:3.403639 +step:7931 train loss:3.429059 +step:7932 train loss:3.452615 +step:7933 train loss:3.464698 +step:7934 train loss:3.561487 +step:7935 train loss:3.482662 +step:7936 train loss:3.453889 +step:7937 train loss:3.405673 +step:7938 train loss:3.417586 +step:7939 train loss:3.466270 +step:7940 train loss:3.447673 +step:7941 train loss:3.477464 +step:7942 train loss:3.467080 +step:7943 train loss:3.478318 +step:7944 train loss:3.398463 +step:7945 train loss:3.499792 +step:7946 train loss:3.455630 +step:7947 train loss:3.463813 +step:7948 train loss:3.422384 +step:7949 train loss:3.476553 +step:7950 train loss:3.531787 +step:7951 train loss:3.500275 +step:7952 train loss:3.644420 +step:7953 train loss:3.537007 +step:7954 train loss:3.438859 +step:7955 train loss:3.422954 +step:7956 train loss:3.429082 +step:7957 train loss:3.504728 +step:7958 train loss:3.516711 +step:7959 train loss:3.469883 +step:7960 train loss:3.531257 +step:7961 train loss:3.442183 +step:7962 train loss:3.413088 +step:7963 train loss:3.449930 +step:7964 train loss:3.445042 +step:7965 train loss:3.457729 +step:7966 train loss:3.427977 +step:7967 train loss:3.453677 +step:7968 train loss:3.460715 +step:7969 train loss:3.416336 +step:7970 train loss:3.385937 +step:7971 train loss:3.473259 +step:7972 train loss:3.447556 +step:7973 train loss:3.419666 +step:7974 train loss:3.461029 +step:7975 train loss:3.448050 +step:7976 train loss:3.465087 +step:7977 train loss:3.496838 +step:7978 train loss:3.520411 +step:7979 train loss:3.466586 +step:7980 train loss:3.370955 +step:7981 train loss:3.409890 +step:7982 train loss:3.458363 +step:7983 train loss:3.475000 +step:7984 train loss:3.516335 +step:7985 train loss:3.442327 +step:7986 train loss:3.464562 +step:7987 train loss:3.514799 +step:7988 train loss:3.491012 +step:7989 train loss:3.393572 +step:7990 train loss:3.413306 +step:7991 train loss:3.424802 +step:7992 train loss:3.451295 +step:7993 train loss:3.432388 +step:7994 train loss:3.483708 +step:7995 train loss:3.486086 +step:7996 train loss:3.451666 +step:7997 train loss:3.472478 +step:7998 train loss:3.493769 +step:7999 train loss:3.424169 +step:8000 validation loss:3.377477 total_sharp:1.4703e-03 L1_sharp:3.6045e-03 L2_sharp:3.8622e-04 L3_sharp:5.4705e-04 L4_sharp:2.6559e-04 L5_sharp:3.0392e-04 L6_sharp:4.1489e-04 L7_sharp:5.5355e-04 L8_sharp:3.6348e-04 L9_sharp:2.9183e-04 L10_sharp:1.7745e-04 L11_sharp:1.5433e-04 L12_sharp:1.6418e-04 total_fnorm:2.4288e+00 total_l1_linf:2.0782e+04 total_spectral:2.4288e+00 L1_fnorm:5.7038e-01 L2_fnorm:5.2292e-01 L3_fnorm:5.6173e-01 L4_fnorm:5.8991e-01 L5_fnorm:5.9798e-01 L6_fnorm:6.0035e-01 L7_fnorm:5.9797e-01 L8_fnorm:5.9622e-01 L9_fnorm:5.9873e-01 L10_fnorm:6.0298e-01 L11_fnorm:5.9965e-01 L12_fnorm:6.0270e-01 L1_l1linf:4.1538e-01 L2_l1linf:4.4553e-01 L3_l1linf:4.4740e-01 L4_l1linf:4.3609e-01 L5_l1linf:4.1896e-01 L6_l1linf:4.2004e-01 L7_l1linf:4.1806e-01 L8_l1linf:4.1386e-01 L9_l1linf:4.1045e-01 L10_l1linf:4.3439e-01 L11_l1linf:4.2785e-01 L12_l1linf:4.3648e-01 L1_spectral:1.2051e-02 L2_spectral:1.2040e-02 L3_spectral:1.2049e-02 L4_spectral:1.2049e-02 L5_spectral:1.2057e-02 L6_spectral:1.2045e-02 L7_spectral:1.2043e-02 L8_spectral:1.2043e-02 L9_spectral:1.2043e-02 L10_spectral:1.2045e-02 L11_spectral:1.2045e-02 L12_spectral:1.2045e-02 ip_v_neg_g:2.3426e-03 cos_v_neg_g:8.5006e-04 v_norm:2.4288e+00 g_norm:1.1346e+00 hv_norm:4.2669e-01 cos_v_hv:8.3692e-03 hg_norm:2.7332e+01 cos_g_hg:5.3231e-01 v_par:2.0630e-04 v_perp:2.4288e+00 L1_cos_v_neg_g:3.3422e-04 L1_v_norm:5.7038e-01 L2_cos_v_neg_g:7.9157e-05 L2_v_norm:5.2292e-01 L3_cos_v_neg_g:1.0660e-03 L3_v_norm:5.6173e-01 L4_cos_v_neg_g:1.0174e-03 L4_v_norm:5.8991e-01 L5_cos_v_neg_g:1.3357e-03 L5_v_norm:5.9798e-01 L6_cos_v_neg_g:1.4828e-03 L6_v_norm:6.0035e-01 L7_cos_v_neg_g:1.9851e-03 L7_v_norm:5.9797e-01 L8_cos_v_neg_g:9.6865e-04 L8_v_norm:5.9622e-01 L9_cos_v_neg_g:1.1522e-03 L9_v_norm:5.9873e-01 L10_cos_v_neg_g:2.0443e-03 L10_v_norm:6.0298e-01 L11_cos_v_neg_g:2.9015e-03 L11_v_norm:5.9965e-01 L12_cos_v_neg_g:2.0631e-03 L12_v_norm:6.0270e-01 +step:8000 train loss:3.496061 +step:8001 train loss:3.456148 +step:8002 train loss:3.474550 +step:8003 train loss:3.492277 +step:8004 train loss:3.466500 +step:8005 train loss:3.392612 +step:8006 train loss:3.469040 +step:8007 train loss:3.436101 +step:8008 train loss:3.463763 +step:8009 train loss:3.537712 +step:8010 train loss:3.754316 +step:8011 train loss:3.417579 +step:8012 train loss:3.502292 +step:8013 train loss:3.450485 +step:8014 train loss:3.467527 +step:8015 train loss:3.461325 +step:8016 train loss:3.452003 +step:8017 train loss:3.472984 +step:8018 train loss:3.434908 +step:8019 train loss:3.402345 +step:8020 train loss:3.443278 +step:8021 train loss:3.515544 +step:8022 train loss:3.433308 +step:8023 train loss:3.466177 +step:8024 train loss:3.346725 +step:8025 train loss:3.442341 +step:8026 train loss:3.451146 +step:8027 train loss:3.459448 +step:8028 train loss:3.513597 +step:8029 train loss:3.445246 +step:8030 train loss:3.407134 +step:8031 train loss:3.463330 +step:8032 train loss:3.446789 +step:8033 train loss:3.397516 +step:8034 train loss:3.437381 +step:8035 train loss:3.425863 +step:8036 train loss:3.413500 +step:8037 train loss:3.385023 +step:8038 train loss:3.399499 +step:8039 train loss:3.490858 +step:8040 train loss:3.427083 +step:8041 train loss:3.423240 +step:8042 train loss:3.461878 +step:8043 train loss:3.404106 +step:8044 train loss:3.415004 +step:8045 train loss:3.485602 +step:8046 train loss:3.409406 +step:8047 train loss:3.415297 +step:8048 train loss:3.448129 +step:8049 train loss:3.492705 +step:8050 train loss:3.433042 +step:8051 train loss:3.408191 +step:8052 train loss:3.469428 +step:8053 train loss:3.426558 +step:8054 train loss:3.458205 +step:8055 train loss:3.490032 +step:8056 train loss:3.455365 +step:8057 train loss:3.531631 +step:8058 train loss:3.434784 +step:8059 train loss:3.494865 +step:8060 train loss:3.467376 +step:8061 train loss:3.356243 +step:8062 train loss:3.487926 +step:8063 train loss:3.449933 +step:8064 train loss:3.410175 +step:8065 train loss:3.476572 +step:8066 train loss:3.434253 +step:8067 train loss:3.499057 +step:8068 train loss:3.425039 +step:8069 train loss:3.450359 +step:8070 train loss:3.415442 +step:8071 train loss:3.425356 +step:8072 train loss:3.468228 +step:8073 train loss:3.421481 +step:8074 train loss:3.431063 +step:8075 train loss:3.417473 +step:8076 train loss:3.462219 +step:8077 train loss:3.471346 +step:8078 train loss:3.415216 +step:8079 train loss:3.436600 +step:8080 train loss:3.422764 +step:8081 train loss:3.441713 +step:8082 train loss:3.458771 +step:8083 train loss:3.357768 +step:8084 train loss:3.493942 +step:8085 train loss:3.370668 +step:8086 train loss:3.494997 +step:8087 train loss:3.391414 +step:8088 train loss:3.434546 +step:8089 train loss:3.476548 +step:8090 train loss:3.496522 +step:8091 train loss:3.442457 +step:8092 train loss:3.420378 +step:8093 train loss:3.427984 +step:8094 train loss:3.432084 +step:8095 train loss:3.455141 +step:8096 train loss:3.456565 +step:8097 train loss:3.385272 +step:8098 train loss:3.399164 +step:8099 train loss:3.386991 +step:8100 train loss:3.439554 +step:8101 train loss:3.521478 +step:8102 train loss:3.453092 +step:8103 train loss:3.407132 +step:8104 train loss:3.455894 +step:8105 train loss:3.454225 +step:8106 train loss:3.420903 +step:8107 train loss:3.397402 +step:8108 train loss:3.416103 +step:8109 train loss:3.410451 +step:8110 train loss:3.473587 +step:8111 train loss:3.397180 +step:8112 train loss:3.417565 +step:8113 train loss:3.407066 +step:8114 train loss:3.350194 +step:8115 train loss:3.405147 +step:8116 train loss:3.440265 +step:8117 train loss:3.414897 +step:8118 train loss:3.406173 +step:8119 train loss:3.445616 +step:8120 train loss:3.394273 +step:8121 train loss:3.456115 +step:8122 train loss:3.435274 +step:8123 train loss:3.439857 +step:8124 train loss:3.404985 +step:8125 train loss:3.383759 +step:8126 train loss:3.382668 +step:8127 train loss:3.472080 +step:8128 train loss:3.478232 +step:8129 train loss:3.400668 +step:8130 train loss:3.425467 +step:8131 train loss:3.397307 +step:8132 train loss:3.464843 +step:8133 train loss:3.390999 +step:8134 train loss:3.425660 +step:8135 train loss:3.421770 +step:8136 train loss:3.426743 +step:8137 train loss:3.493855 +step:8138 train loss:3.399548 +step:8139 train loss:3.472811 +step:8140 train loss:3.401422 +step:8141 train loss:3.423913 +step:8142 train loss:3.408078 +step:8143 train loss:3.458309 +step:8144 train loss:3.432561 +step:8145 train loss:3.398698 +step:8146 train loss:3.412074 +step:8147 train loss:3.434132 +step:8148 train loss:3.528940 +step:8149 train loss:3.436600 +step:8150 train loss:3.417679 +step:8151 train loss:3.410845 +step:8152 train loss:3.504447 +step:8153 train loss:3.383175 +step:8154 train loss:3.400909 +step:8155 train loss:3.425991 +step:8156 train loss:3.406513 +step:8157 train loss:3.430658 +step:8158 train loss:3.440824 +step:8159 train loss:3.456721 +step:8160 train loss:3.410425 +step:8161 train loss:3.452137 +step:8162 train loss:3.384016 +step:8163 train loss:3.444057 +step:8164 train loss:3.432098 +step:8165 train loss:3.479804 +step:8166 train loss:3.483973 +step:8167 train loss:3.390183 +step:8168 train loss:3.369955 +step:8169 train loss:3.415384 +step:8170 train loss:3.369915 +step:8171 train loss:3.427842 +step:8172 train loss:3.421909 +step:8173 train loss:3.428821 +step:8174 train loss:3.433547 +step:8175 train loss:3.395722 +step:8176 train loss:3.389999 +step:8177 train loss:3.439087 +step:8178 train loss:3.524403 +step:8179 train loss:3.430987 +step:8180 train loss:3.456885 +step:8181 train loss:3.451780 +step:8182 train loss:3.412045 +step:8183 train loss:3.402282 +step:8184 train loss:3.393898 +step:8185 train loss:3.432866 +step:8186 train loss:3.438396 +step:8187 train loss:3.445291 +step:8188 train loss:3.375746 +step:8189 train loss:3.522512 +step:8190 train loss:3.455038 +step:8191 train loss:3.458557 +step:8192 train loss:3.574374 +step:8193 train loss:3.441803 +step:8194 train loss:3.374558 +step:8195 train loss:3.471320 +step:8196 train loss:3.388717 +step:8197 train loss:3.414402 +step:8198 train loss:3.424792 +step:8199 train loss:3.425246 +step:8200 train loss:3.409066 +step:8201 train loss:3.520327 +step:8202 train loss:3.438941 +step:8203 train loss:3.455826 +step:8204 train loss:3.367948 +step:8205 train loss:3.374286 +step:8206 train loss:3.499100 +step:8207 train loss:3.423645 +step:8208 train loss:3.444584 +step:8209 train loss:3.485446 +step:8210 train loss:3.471320 +step:8211 train loss:3.403753 +step:8212 train loss:3.459354 +step:8213 train loss:3.472427 +step:8214 train loss:3.510002 +step:8215 train loss:3.485068 +step:8216 train loss:3.466587 +step:8217 train loss:3.445905 +step:8218 train loss:3.454918 +step:8219 train loss:3.588090 +step:8220 train loss:3.416158 +step:8221 train loss:3.437054 +step:8222 train loss:3.388920 +step:8223 train loss:3.409285 +step:8224 train loss:3.419038 +step:8225 train loss:3.469172 +step:8226 train loss:3.400037 +step:8227 train loss:3.468780 +step:8228 train loss:3.356342 +step:8229 train loss:3.396204 +step:8230 train loss:3.414512 +step:8231 train loss:3.437007 +step:8232 train loss:3.439045 +step:8233 train loss:3.481898 +step:8234 train loss:3.479677 +step:8235 train loss:3.444777 +step:8236 train loss:3.433659 +step:8237 train loss:3.385026 +step:8238 train loss:3.640092 +step:8239 train loss:3.473825 +step:8240 train loss:3.416576 +step:8241 train loss:3.390405 +step:8242 train loss:3.424668 +step:8243 train loss:3.419483 +step:8244 train loss:3.426368 +step:8245 train loss:3.415162 +step:8246 train loss:3.478441 +step:8247 train loss:3.513996 +step:8248 train loss:3.428571 +step:8249 train loss:3.422334 +step:8250 validation loss:3.370988 +step:8250 train loss:3.412014 +step:8251 train loss:3.507845 +step:8252 train loss:3.446957 +step:8253 train loss:3.409012 +step:8254 train loss:3.386282 +step:8255 train loss:3.414528 +step:8256 train loss:3.400133 +step:8257 train loss:3.502573 +step:8258 train loss:3.428576 +step:8259 train loss:3.409984 +step:8260 train loss:3.413468 +step:8261 train loss:3.405430 +step:8262 train loss:3.423773 +step:8263 train loss:3.435810 +step:8264 train loss:3.403216 +step:8265 train loss:3.391799 +step:8266 train loss:3.404631 +step:8267 train loss:3.336283 +step:8268 train loss:3.457951 +step:8269 train loss:3.389540 +step:8270 train loss:3.444129 +step:8271 train loss:3.469643 +step:8272 train loss:3.497551 +step:8273 train loss:3.372347 +step:8274 train loss:3.436091 +step:8275 train loss:3.396149 +step:8276 train loss:3.433417 +step:8277 train loss:3.504244 +step:8278 train loss:3.517137 +step:8279 train loss:3.435564 +step:8280 train loss:3.413809 +step:8281 train loss:3.382281 +step:8282 train loss:3.444128 +step:8283 train loss:3.435529 +step:8284 train loss:3.412462 +step:8285 train loss:3.409670 +step:8286 train loss:3.514360 +step:8287 train loss:3.455299 +step:8288 train loss:3.422843 +step:8289 train loss:3.438420 +step:8290 train loss:3.373777 +step:8291 train loss:3.417688 +step:8292 train loss:3.443045 +step:8293 train loss:3.423669 +step:8294 train loss:3.391046 +step:8295 train loss:3.431470 +step:8296 train loss:3.493946 +step:8297 train loss:3.580279 +step:8298 train loss:3.397203 +step:8299 train loss:3.434870 +step:8300 train loss:3.442702 +step:8301 train loss:3.415731 +step:8302 train loss:3.472974 +step:8303 train loss:3.608977 +step:8304 train loss:3.414551 +step:8305 train loss:3.459478 +step:8306 train loss:3.437432 +step:8307 train loss:3.453900 +step:8308 train loss:3.454060 +step:8309 train loss:3.475152 +step:8310 train loss:3.390782 +step:8311 train loss:3.485255 +step:8312 train loss:3.474086 +step:8313 train loss:3.539511 +step:8314 train loss:3.409785 +step:8315 train loss:3.361068 +step:8316 train loss:3.416590 +step:8317 train loss:3.442792 +step:8318 train loss:3.429655 +step:8319 train loss:3.467824 +step:8320 train loss:3.490335 +step:8321 train loss:3.395729 +step:8322 train loss:3.410167 +step:8323 train loss:3.447505 +step:8324 train loss:3.423554 +step:8325 train loss:3.478643 +step:8326 train loss:3.446344 +step:8327 train loss:3.432652 +step:8328 train loss:3.508366 +step:8329 train loss:3.413604 +step:8330 train loss:3.457248 +step:8331 train loss:3.382895 +step:8332 train loss:3.485365 +step:8333 train loss:3.499787 +step:8334 train loss:3.366830 +step:8335 train loss:3.427363 +step:8336 train loss:3.522899 +step:8337 train loss:3.452650 +step:8338 train loss:3.422143 +step:8339 train loss:3.399345 +step:8340 train loss:3.491452 +step:8341 train loss:3.390238 +step:8342 train loss:3.466519 +step:8343 train loss:3.378708 +step:8344 train loss:3.424857 +step:8345 train loss:3.457702 +step:8346 train loss:3.543084 +step:8347 train loss:3.428880 +step:8348 train loss:3.460722 +step:8349 train loss:3.429507 +step:8350 train loss:3.452027 +step:8351 train loss:3.387811 +step:8352 train loss:3.477540 +step:8353 train loss:3.426903 +step:8354 train loss:3.414834 +step:8355 train loss:3.410140 +step:8356 train loss:3.410563 +step:8357 train loss:3.422871 +step:8358 train loss:3.400159 +step:8359 train loss:3.392954 +step:8360 train loss:3.441531 +step:8361 train loss:3.453203 +step:8362 train loss:3.474558 +step:8363 train loss:3.470090 +step:8364 train loss:3.436352 +step:8365 train loss:3.581077 +step:8366 train loss:3.426330 +step:8367 train loss:3.396505 +step:8368 train loss:3.368762 +step:8369 train loss:3.396255 +step:8370 train loss:3.482000 +step:8371 train loss:3.450673 +step:8372 train loss:3.431040 +step:8373 train loss:3.436800 +step:8374 train loss:3.375054 +step:8375 train loss:3.434010 +step:8376 train loss:3.476269 +step:8377 train loss:3.303895 +step:8378 train loss:3.518346 +step:8379 train loss:3.383621 +step:8380 train loss:3.387487 +step:8381 train loss:3.395397 +step:8382 train loss:3.416770 +step:8383 train loss:3.381829 +step:8384 train loss:3.423642 +step:8385 train loss:3.433881 +step:8386 train loss:3.418716 +step:8387 train loss:3.574935 +step:8388 train loss:3.486691 +step:8389 train loss:3.467100 +step:8390 train loss:3.468094 +step:8391 train loss:3.396152 +step:8392 train loss:3.407935 +step:8393 train loss:3.362674 +step:8394 train loss:3.456918 +step:8395 train loss:3.460201 +step:8396 train loss:3.484692 +step:8397 train loss:3.420515 +step:8398 train loss:3.437155 +step:8399 train loss:3.403524 +step:8400 train loss:3.410714 +step:8401 train loss:3.416795 +step:8402 train loss:3.400840 +step:8403 train loss:3.420467 +step:8404 train loss:3.419882 +step:8405 train loss:3.379027 +step:8406 train loss:3.416934 +step:8407 train loss:3.461078 +step:8408 train loss:3.430907 +step:8409 train loss:3.354350 +step:8410 train loss:3.418443 +step:8411 train loss:3.444542 +step:8412 train loss:3.506800 +step:8413 train loss:3.481541 +step:8414 train loss:3.472807 +step:8415 train loss:3.397419 +step:8416 train loss:3.440483 +step:8417 train loss:3.361892 +step:8418 train loss:3.463221 +step:8419 train loss:3.421147 +step:8420 train loss:3.495653 +step:8421 train loss:3.413661 +step:8422 train loss:3.429813 +step:8423 train loss:3.445987 +step:8424 train loss:3.451911 +step:8425 train loss:3.510363 +step:8426 train loss:3.477994 +step:8427 train loss:3.397923 +step:8428 train loss:3.412472 +step:8429 train loss:3.472854 +step:8430 train loss:3.412092 +step:8431 train loss:3.415447 +step:8432 train loss:3.417995 +step:8433 train loss:3.391215 +step:8434 train loss:3.430434 +step:8435 train loss:3.347546 +step:8436 train loss:3.428803 +step:8437 train loss:3.470823 +step:8438 train loss:3.449631 +step:8439 train loss:3.393156 +step:8440 train loss:3.360938 +step:8441 train loss:3.417553 +step:8442 train loss:3.444193 +step:8443 train loss:3.400798 +step:8444 train loss:3.432786 +step:8445 train loss:3.382893 +step:8446 train loss:3.434403 +step:8447 train loss:3.444181 +step:8448 train loss:3.427550 +step:8449 train loss:3.418537 +step:8450 train loss:3.411462 +step:8451 train loss:3.439239 +step:8452 train loss:3.413738 +step:8453 train loss:3.396745 +step:8454 train loss:3.447453 +step:8455 train loss:3.516439 +step:8456 train loss:3.497002 +step:8457 train loss:3.547311 +step:8458 train loss:3.437905 +step:8459 train loss:3.443327 +step:8460 train loss:3.374921 +step:8461 train loss:3.531212 +step:8462 train loss:3.402184 +step:8463 train loss:3.434167 +step:8464 train loss:3.454162 +step:8465 train loss:3.460928 +step:8466 train loss:3.434898 +step:8467 train loss:3.435255 +step:8468 train loss:3.691895 +step:8469 train loss:3.398133 +step:8470 train loss:3.393511 +step:8471 train loss:3.433960 +step:8472 train loss:3.458387 +step:8473 train loss:3.409520 +step:8474 train loss:3.537208 +step:8475 train loss:3.491578 +step:8476 train loss:3.441753 +step:8477 train loss:3.430579 +step:8478 train loss:3.413121 +step:8479 train loss:3.416674 +step:8480 train loss:3.505022 +step:8481 train loss:3.407979 +step:8482 train loss:3.406987 +step:8483 train loss:3.550880 +step:8484 train loss:3.433082 +step:8485 train loss:3.479648 +step:8486 train loss:3.392928 +step:8487 train loss:3.446941 +step:8488 train loss:3.391386 +step:8489 train loss:3.474056 +step:8490 train loss:3.459781 +step:8491 train loss:3.479088 +step:8492 train loss:3.432126 +step:8493 train loss:3.507580 +step:8494 train loss:3.368908 +step:8495 train loss:3.465854 +step:8496 train loss:3.409115 +step:8497 train loss:3.444586 +step:8498 train loss:3.461347 +step:8499 train loss:3.440439 +step:8500 validation loss:3.370867 total_sharp:1.5727e-03 L1_sharp:3.3459e-03 L2_sharp:5.2533e-04 L3_sharp:5.9262e-04 L4_sharp:2.1131e-04 L5_sharp:3.0074e-04 L6_sharp:3.8439e-04 L7_sharp:5.3212e-04 L8_sharp:4.0584e-04 L9_sharp:3.6181e-04 L10_sharp:2.0818e-04 L11_sharp:1.6591e-04 L12_sharp:2.9176e-04 total_fnorm:2.4032e+00 total_l1_linf:2.0553e+04 total_spectral:2.4032e+00 L1_fnorm:5.5171e-01 L2_fnorm:5.1499e-01 L3_fnorm:5.5026e-01 L4_fnorm:5.8342e-01 L5_fnorm:5.9058e-01 L6_fnorm:5.9758e-01 L7_fnorm:5.9379e-01 L8_fnorm:5.9016e-01 L9_fnorm:5.9243e-01 L10_fnorm:5.9292e-01 L11_fnorm:5.8618e-01 L12_fnorm:5.9713e-01 L1_l1linf:4.2400e-01 L2_l1linf:4.2519e-01 L3_l1linf:4.2384e-01 L4_l1linf:4.2026e-01 L5_l1linf:4.0145e-01 L6_l1linf:4.0979e-01 L7_l1linf:4.0212e-01 L8_l1linf:4.0655e-01 L9_l1linf:4.0124e-01 L10_l1linf:4.0086e-01 L11_l1linf:4.1636e-01 L12_l1linf:4.4596e-01 L1_spectral:1.2065e-02 L2_spectral:1.2038e-02 L3_spectral:1.2047e-02 L4_spectral:1.2046e-02 L5_spectral:1.2051e-02 L6_spectral:1.2054e-02 L7_spectral:1.2045e-02 L8_spectral:1.2048e-02 L9_spectral:1.2050e-02 L10_spectral:1.2048e-02 L11_spectral:1.2044e-02 L12_spectral:1.2047e-02 ip_v_neg_g:3.8989e-03 cos_v_neg_g:1.4924e-03 v_norm:2.4032e+00 g_norm:1.0871e+00 hv_norm:3.2985e-01 cos_v_hv:1.1458e-02 hg_norm:2.3131e+01 cos_g_hg:4.4166e-01 v_par:4.1232e-04 v_perp:2.4032e+00 L1_cos_v_neg_g:1.9792e-03 L1_v_norm:5.5171e-01 L2_cos_v_neg_g:1.2446e-03 L2_v_norm:5.1499e-01 L3_cos_v_neg_g:2.1636e-03 L3_v_norm:5.5026e-01 L4_cos_v_neg_g:2.1319e-03 L4_v_norm:5.8342e-01 L5_cos_v_neg_g:2.2861e-03 L5_v_norm:5.9058e-01 L6_cos_v_neg_g:2.6613e-03 L6_v_norm:5.9758e-01 L7_cos_v_neg_g:3.4892e-03 L7_v_norm:5.9379e-01 L8_cos_v_neg_g:3.1650e-03 L8_v_norm:5.9016e-01 L9_cos_v_neg_g:3.3372e-03 L9_v_norm:5.9243e-01 L10_cos_v_neg_g:2.5715e-03 L10_v_norm:5.9292e-01 L11_cos_v_neg_g:1.8833e-03 L11_v_norm:5.8618e-01 L12_cos_v_neg_g:1.5785e-03 L12_v_norm:5.9713e-01 +step:8500 train loss:3.429768 +step:8501 train loss:3.655210 +step:8502 train loss:3.665971 +step:8503 train loss:3.427185 +step:8504 train loss:3.420120 +step:8505 train loss:3.400844 +step:8506 train loss:3.472067 +step:8507 train loss:3.411301 +step:8508 train loss:3.444171 +step:8509 train loss:3.381634 +step:8510 train loss:3.407030 +step:8511 train loss:3.364662 +step:8512 train loss:3.462386 +step:8513 train loss:3.466709 +step:8514 train loss:3.417968 +step:8515 train loss:3.507236 +step:8516 train loss:3.425510 +step:8517 train loss:3.446276 +step:8518 train loss:3.337988 +step:8519 train loss:3.431079 +step:8520 train loss:3.399030 +step:8521 train loss:3.436552 +step:8522 train loss:3.330518 +step:8523 train loss:3.426999 +step:8524 train loss:3.417561 +step:8525 train loss:3.486120 +step:8526 train loss:3.464637 +step:8527 train loss:3.406005 +step:8528 train loss:3.490865 +step:8529 train loss:3.448699 +step:8530 train loss:3.479795 +step:8531 train loss:3.469256 +step:8532 train loss:3.509541 +step:8533 train loss:3.461917 +step:8534 train loss:3.457520 +step:8535 train loss:3.431330 +step:8536 train loss:3.519542 +step:8537 train loss:3.433381 +step:8538 train loss:3.505190 +step:8539 train loss:3.425784 +step:8540 train loss:3.453226 +step:8541 train loss:3.392186 +step:8542 train loss:3.459676 +step:8543 train loss:3.373679 +step:8544 train loss:3.373783 +step:8545 train loss:3.419062 +step:8546 train loss:3.376007 +step:8547 train loss:3.426833 +step:8548 train loss:3.400843 +step:8549 train loss:3.441792 +step:8550 train loss:3.394456 +step:8551 train loss:3.443332 +step:8552 train loss:3.444102 +step:8553 train loss:3.449977 +step:8554 train loss:3.421705 +step:8555 train loss:3.438044 +step:8556 train loss:3.512784 +step:8557 train loss:3.413644 +step:8558 train loss:3.447611 +step:8559 train loss:3.440608 +step:8560 train loss:3.421432 +step:8561 train loss:3.378910 +step:8562 train loss:3.403621 +step:8563 train loss:3.403295 +step:8564 train loss:3.473883 +step:8565 train loss:3.448778 +step:8566 train loss:3.471117 +step:8567 train loss:3.412852 +step:8568 train loss:3.433177 +step:8569 train loss:3.438588 +step:8570 train loss:3.386054 +step:8571 train loss:3.427696 +step:8572 train loss:3.444239 +step:8573 train loss:3.517314 +step:8574 train loss:3.448645 +step:8575 train loss:3.445583 +step:8576 train loss:3.481746 +step:8577 train loss:3.563651 +step:8578 train loss:3.474719 +step:8579 train loss:3.459401 +step:8580 train loss:3.390309 +step:8581 train loss:3.433058 +step:8582 train loss:3.440078 +step:8583 train loss:3.435569 +step:8584 train loss:3.426995 +step:8585 train loss:3.511730 +step:8586 train loss:3.424905 +step:8587 train loss:3.436612 +step:8588 train loss:3.485035 +step:8589 train loss:3.432030 +step:8590 train loss:3.424875 +step:8591 train loss:3.427076 +step:8592 train loss:3.383353 +step:8593 train loss:3.462298 +step:8594 train loss:3.488090 +step:8595 train loss:3.405017 +step:8596 train loss:3.453427 +step:8597 train loss:3.415454 +step:8598 train loss:3.468468 +step:8599 train loss:3.433966 +step:8600 train loss:3.442240 +step:8601 train loss:3.432270 +step:8602 train loss:3.404529 +step:8603 train loss:3.463222 +step:8604 train loss:3.407102 +step:8605 train loss:3.422043 +step:8606 train loss:3.431336 +step:8607 train loss:3.442997 +step:8608 train loss:3.483520 +step:8609 train loss:3.381727 +step:8610 train loss:3.454484 +step:8611 train loss:3.384629 +step:8612 train loss:3.464247 +step:8613 train loss:3.397305 +step:8614 train loss:3.458928 +step:8615 train loss:3.501365 +step:8616 train loss:3.384579 +step:8617 train loss:3.450793 +step:8618 train loss:3.426952 +step:8619 train loss:3.381692 +step:8620 train loss:3.424063 +step:8621 train loss:3.457332 +step:8622 train loss:3.412294 +step:8623 train loss:3.429042 +step:8624 train loss:3.498623 +step:8625 train loss:3.422719 +step:8626 train loss:3.429204 +step:8627 train loss:3.426593 +step:8628 train loss:3.460944 +step:8629 train loss:3.368420 +step:8630 train loss:3.469820 +step:8631 train loss:3.408370 +step:8632 train loss:3.466283 +step:8633 train loss:3.412693 +step:8634 train loss:3.644331 +step:8635 train loss:3.441580 +step:8636 train loss:3.485996 +step:8637 train loss:3.411296 +step:8638 train loss:3.413896 +step:8639 train loss:3.468957 +step:8640 train loss:3.382318 +step:8641 train loss:3.480258 +step:8642 train loss:3.433944 +step:8643 train loss:3.540578 +step:8644 train loss:3.384122 +step:8645 train loss:3.458879 +step:8646 train loss:3.418257 +step:8647 train loss:3.446563 +step:8648 train loss:3.393242 +step:8649 train loss:3.478137 +step:8650 train loss:3.431014 +step:8651 train loss:3.445092 +step:8652 train loss:3.411116 +step:8653 train loss:3.442566 +step:8654 train loss:3.487073 +step:8655 train loss:3.416780 +step:8656 train loss:3.460711 +step:8657 train loss:3.461403 +step:8658 train loss:3.433082 +step:8659 train loss:3.425039 +step:8660 train loss:3.372230 +step:8661 train loss:3.430209 +step:8662 train loss:3.374145 +step:8663 train loss:3.447320 +step:8664 train loss:3.360094 +step:8665 train loss:3.384252 +step:8666 train loss:3.459140 +step:8667 train loss:3.352570 +step:8668 train loss:3.459041 +step:8669 train loss:3.500483 +step:8670 train loss:3.396658 +step:8671 train loss:3.395899 +step:8672 train loss:3.615726 +step:8673 train loss:3.377516 +step:8674 train loss:3.448810 +step:8675 train loss:3.485645 +step:8676 train loss:3.432410 +step:8677 train loss:3.455230 +step:8678 train loss:3.404366 +step:8679 train loss:3.458827 +step:8680 train loss:3.439209 +step:8681 train loss:3.444391 +step:8682 train loss:3.397232 +step:8683 train loss:3.416107 +step:8684 train loss:3.488028 +step:8685 train loss:3.433804 +step:8686 train loss:3.425478 +step:8687 train loss:3.380001 +step:8688 train loss:3.398908 +step:8689 train loss:3.464839 +step:8690 train loss:3.405721 +step:8691 train loss:3.482848 +step:8692 train loss:3.372528 +step:8693 train loss:3.459756 +step:8694 train loss:3.461965 +step:8695 train loss:3.446406 +step:8696 train loss:3.471792 +step:8697 train loss:3.427597 +step:8698 train loss:3.462142 +step:8699 train loss:3.415555 +step:8700 train loss:3.440604 +step:8701 train loss:3.405697 +step:8702 train loss:3.390472 +step:8703 train loss:3.403067 +step:8704 train loss:3.359059 +step:8705 train loss:3.438246 +step:8706 train loss:3.460610 +step:8707 train loss:3.456785 +step:8708 train loss:3.400804 +step:8709 train loss:3.462177 +step:8710 train loss:3.391249 +step:8711 train loss:3.443830 +step:8712 train loss:3.353531 +step:8713 train loss:3.430698 +step:8714 train loss:3.536697 +step:8715 train loss:3.395442 +step:8716 train loss:3.445276 +step:8717 train loss:3.418069 +step:8718 train loss:3.456257 +step:8719 train loss:3.424592 +step:8720 train loss:3.534223 +step:8721 train loss:3.425317 +step:8722 train loss:3.522347 +step:8723 train loss:3.389748 +step:8724 train loss:3.403665 +step:8725 train loss:3.429199 +step:8726 train loss:3.384398 +step:8727 train loss:3.463107 +step:8728 train loss:3.419904 +step:8729 train loss:3.424771 +step:8730 train loss:3.403088 +step:8731 train loss:3.405416 +step:8732 train loss:3.508287 +step:8733 train loss:3.428091 +step:8734 train loss:3.469938 +step:8735 train loss:3.536666 +step:8736 train loss:3.396816 +step:8737 train loss:3.422439 +step:8738 train loss:3.401803 +step:8739 train loss:3.464207 +step:8740 train loss:3.384180 +step:8741 train loss:3.438284 +step:8742 train loss:3.394810 +step:8743 train loss:3.433596 +step:8744 train loss:3.455105 +step:8745 train loss:3.494835 +step:8746 train loss:3.394238 +step:8747 train loss:3.500495 +step:8748 train loss:3.408586 +step:8749 train loss:3.445318 +step:8750 validation loss:3.363908 +step:8750 train loss:3.453689 +step:8751 train loss:3.497607 +step:8752 train loss:3.353892 +step:8753 train loss:3.402721 +step:8754 train loss:3.453451 +step:8755 train loss:3.436820 +step:8756 train loss:3.480411 +step:8757 train loss:3.390760 +step:8758 train loss:3.549663 +step:8759 train loss:3.394741 +step:8760 train loss:3.428930 +step:8761 train loss:3.504274 +step:8762 train loss:3.401593 +step:8763 train loss:3.373510 +step:8764 train loss:3.445803 +step:8765 train loss:3.513709 +step:8766 train loss:3.444836 +step:8767 train loss:3.401859 +step:8768 train loss:3.443558 +step:8769 train loss:3.416202 +step:8770 train loss:3.462263 +step:8771 train loss:3.433820 +step:8772 train loss:3.455543 +step:8773 train loss:3.415295 +step:8774 train loss:3.448038 +step:8775 train loss:3.446400 +step:8776 train loss:3.393317 +step:8777 train loss:3.428782 +step:8778 train loss:3.438089 +step:8779 train loss:3.460024 +step:8780 train loss:3.421007 +step:8781 train loss:3.427017 +step:8782 train loss:3.447520 +step:8783 train loss:3.428349 +step:8784 train loss:3.451261 +step:8785 train loss:3.439405 +step:8786 train loss:3.518178 +step:8787 train loss:3.458391 +step:8788 train loss:3.359931 +step:8789 train loss:3.459249 +step:8790 train loss:3.388793 +step:8791 train loss:3.440389 +step:8792 train loss:3.377193 +step:8793 train loss:3.467764 +step:8794 train loss:3.386606 +step:8795 train loss:3.461001 +step:8796 train loss:3.603199 +step:8797 train loss:3.351031 +step:8798 train loss:3.504664 +step:8799 train loss:3.424915 +step:8800 train loss:3.419796 +step:8801 train loss:3.439292 +step:8802 train loss:3.497321 +step:8803 train loss:3.454872 +step:8804 train loss:3.438141 +step:8805 train loss:3.455371 +step:8806 train loss:3.426686 +step:8807 train loss:3.417265 +step:8808 train loss:3.371960 +step:8809 train loss:3.500599 +step:8810 train loss:3.398160 +step:8811 train loss:3.389578 +step:8812 train loss:3.433410 +step:8813 train loss:3.342361 +step:8814 train loss:3.530689 +step:8815 train loss:3.375586 +step:8816 train loss:3.495549 +step:8817 train loss:3.429594 +step:8818 train loss:3.364004 +step:8819 train loss:3.480610 +step:8820 train loss:3.408710 +step:8821 train loss:3.436341 +step:8822 train loss:3.415707 +step:8823 train loss:3.434364 +step:8824 train loss:3.490727 +step:8825 train loss:3.469362 +step:8826 train loss:3.439327 +step:8827 train loss:3.400769 +step:8828 train loss:3.442163 +step:8829 train loss:3.420408 +step:8830 train loss:3.398174 +step:8831 train loss:3.475893 +step:8832 train loss:3.411370 +step:8833 train loss:3.445400 +step:8834 train loss:3.408888 +step:8835 train loss:3.348347 +step:8836 train loss:3.476021 +step:8837 train loss:3.379893 +step:8838 train loss:3.423083 +step:8839 train loss:3.406385 +step:8840 train loss:3.412600 +step:8841 train loss:3.423700 +step:8842 train loss:3.437627 +step:8843 train loss:3.446473 +step:8844 train loss:3.411607 +step:8845 train loss:3.433425 +step:8846 train loss:3.400597 +step:8847 train loss:3.438793 +step:8848 train loss:3.484527 +step:8849 train loss:3.465268 +step:8850 train loss:3.459452 +step:8851 train loss:3.340294 +step:8852 train loss:3.443593 +step:8853 train loss:3.424336 +step:8854 train loss:3.395638 +step:8855 train loss:3.464795 +step:8856 train loss:3.457856 +step:8857 train loss:3.522563 +step:8858 train loss:3.389731 +step:8859 train loss:3.464230 +step:8860 train loss:3.419675 +step:8861 train loss:3.401924 +step:8862 train loss:3.401298 +step:8863 train loss:3.385639 +step:8864 train loss:3.456193 +step:8865 train loss:3.446716 +step:8866 train loss:3.332175 +step:8867 train loss:3.432912 +step:8868 train loss:3.461390 +step:8869 train loss:3.546941 +step:8870 train loss:3.424128 +step:8871 train loss:3.448000 +step:8872 train loss:3.432017 +step:8873 train loss:3.429888 +step:8874 train loss:3.487534 +step:8875 train loss:3.417126 +step:8876 train loss:3.457130 +step:8877 train loss:3.439057 +step:8878 train loss:3.489877 +step:8879 train loss:3.448663 +step:8880 train loss:3.397806 +step:8881 train loss:3.362684 +step:8882 train loss:3.432861 +step:8883 train loss:3.418993 +step:8884 train loss:3.509576 +step:8885 train loss:3.440853 +step:8886 train loss:3.446027 +step:8887 train loss:3.471826 +step:8888 train loss:3.431633 +step:8889 train loss:3.434196 +step:8890 train loss:3.424384 +step:8891 train loss:3.398216 +step:8892 train loss:3.481268 +step:8893 train loss:3.421467 +step:8894 train loss:3.442489 +step:8895 train loss:3.468695 +step:8896 train loss:3.387280 +step:8897 train loss:3.476781 +step:8898 train loss:3.411181 +step:8899 train loss:3.432192 +step:8900 train loss:3.399119 +step:8901 train loss:3.414537 +step:8902 train loss:3.454835 +step:8903 train loss:3.393915 +step:8904 train loss:3.446761 +step:8905 train loss:3.422699 +step:8906 train loss:3.411413 +step:8907 train loss:3.420340 +step:8908 train loss:3.490435 +step:8909 train loss:3.429163 +step:8910 train loss:3.394434 +step:8911 train loss:3.491182 +step:8912 train loss:3.386995 +step:8913 train loss:3.397780 +step:8914 train loss:3.489947 +step:8915 train loss:3.435520 +step:8916 train loss:3.464098 +step:8917 train loss:3.420979 +step:8918 train loss:3.424901 +step:8919 train loss:3.417086 +step:8920 train loss:3.439741 +step:8921 train loss:3.436585 +step:8922 train loss:3.416232 +step:8923 train loss:3.600824 +step:8924 train loss:3.493689 +step:8925 train loss:3.424417 +step:8926 train loss:3.434935 +step:8927 train loss:3.465849 +step:8928 train loss:3.417624 +step:8929 train loss:3.417296 +step:8930 train loss:3.471390 +step:8931 train loss:3.377292 +step:8932 train loss:3.482980 +step:8933 train loss:3.392328 +step:8934 train loss:3.430616 +step:8935 train loss:3.446101 +step:8936 train loss:3.480435 +step:8937 train loss:3.474141 +step:8938 train loss:3.416613 +step:8939 train loss:3.483899 +step:8940 train loss:3.436648 +step:8941 train loss:3.381097 +step:8942 train loss:3.461139 +step:8943 train loss:3.390865 +step:8944 train loss:3.443033 +step:8945 train loss:3.459378 +step:8946 train loss:3.312682 +step:8947 train loss:3.494065 +step:8948 train loss:3.344476 +step:8949 train loss:3.347717 +step:8950 train loss:3.389746 +step:8951 train loss:3.429018 +step:8952 train loss:3.446231 +step:8953 train loss:3.402798 +step:8954 train loss:3.508115 +step:8955 train loss:3.423541 +step:8956 train loss:3.451668 +step:8957 train loss:3.441277 +step:8958 train loss:3.418927 +step:8959 train loss:3.410657 +step:8960 train loss:3.376518 +step:8961 train loss:3.400824 +step:8962 train loss:3.450089 +step:8963 train loss:3.428754 +step:8964 train loss:3.413306 +step:8965 train loss:3.454500 +step:8966 train loss:3.416276 +step:8967 train loss:3.393419 +step:8968 train loss:3.377704 +step:8969 train loss:3.368272 +step:8970 train loss:3.445204 +step:8971 train loss:3.394557 +step:8972 train loss:3.598654 +step:8973 train loss:3.484725 +step:8974 train loss:3.440599 +step:8975 train loss:3.441547 +step:8976 train loss:3.405424 +step:8977 train loss:3.491263 +step:8978 train loss:3.474128 +step:8979 train loss:3.391333 +step:8980 train loss:3.489491 +step:8981 train loss:3.440774 +step:8982 train loss:3.414804 +step:8983 train loss:3.356893 +step:8984 train loss:3.482148 +step:8985 train loss:3.400183 +step:8986 train loss:3.436335 +step:8987 train loss:3.409637 +step:8988 train loss:3.460476 +step:8989 train loss:3.369406 +step:8990 train loss:3.511048 +step:8991 train loss:3.362406 +step:8992 train loss:3.417531 +step:8993 train loss:3.509524 +step:8994 train loss:3.414932 +step:8995 train loss:3.439141 +step:8996 train loss:3.410283 +step:8997 train loss:3.359438 +step:8998 train loss:3.361071 +step:8999 train loss:3.387639 +step:9000 validation loss:3.361893 total_sharp:1.5940e-03 L1_sharp:3.0432e-03 L2_sharp:3.5748e-04 L3_sharp:5.3389e-04 L4_sharp:3.2647e-04 L5_sharp:2.8101e-04 L6_sharp:4.3020e-04 L7_sharp:5.1634e-04 L8_sharp:3.9117e-04 L9_sharp:2.8355e-04 L10_sharp:1.6195e-04 L11_sharp:1.5948e-04 L12_sharp:2.7287e-04 total_fnorm:2.4275e+00 total_l1_linf:2.0772e+04 total_spectral:2.4275e+00 L1_fnorm:5.7104e-01 L2_fnorm:5.2622e-01 L3_fnorm:5.6088e-01 L4_fnorm:5.8659e-01 L5_fnorm:5.9470e-01 L6_fnorm:5.9831e-01 L7_fnorm:5.9666e-01 L8_fnorm:5.9476e-01 L9_fnorm:5.9715e-01 L10_fnorm:6.0157e-01 L11_fnorm:5.9857e-01 L12_fnorm:6.0185e-01 L1_l1linf:4.2011e-01 L2_l1linf:4.6018e-01 L3_l1linf:4.5403e-01 L4_l1linf:4.2458e-01 L5_l1linf:4.1449e-01 L6_l1linf:4.1552e-01 L7_l1linf:4.1513e-01 L8_l1linf:4.1015e-01 L9_l1linf:4.0425e-01 L10_l1linf:4.1551e-01 L11_l1linf:4.1999e-01 L12_l1linf:4.3177e-01 L1_spectral:1.2056e-02 L2_spectral:1.2039e-02 L3_spectral:1.2049e-02 L4_spectral:1.2051e-02 L5_spectral:1.2054e-02 L6_spectral:1.2046e-02 L7_spectral:1.2044e-02 L8_spectral:1.2047e-02 L9_spectral:1.2044e-02 L10_spectral:1.2046e-02 L11_spectral:1.2044e-02 L12_spectral:1.2044e-02 ip_v_neg_g:4.8613e-03 cos_v_neg_g:1.6988e-03 v_norm:2.4275e+00 g_norm:1.1788e+00 hv_norm:3.6848e-01 cos_v_hv:1.0501e-02 hg_norm:2.5688e+01 cos_g_hg:5.7117e-01 v_par:8.0207e-04 v_perp:2.4275e+00 L1_cos_v_neg_g:2.4366e-03 L1_v_norm:5.7104e-01 L2_cos_v_neg_g:3.6411e-03 L2_v_norm:5.2622e-01 L3_cos_v_neg_g:3.6674e-03 L3_v_norm:5.6088e-01 L4_cos_v_neg_g:4.6289e-04 L4_v_norm:5.8659e-01 L5_cos_v_neg_g:1.7224e-03 L5_v_norm:5.9470e-01 L6_cos_v_neg_g:1.6812e-03 L6_v_norm:5.9831e-01 L7_cos_v_neg_g:1.5769e-03 L7_v_norm:5.9666e-01 L8_cos_v_neg_g:1.6061e-03 L8_v_norm:5.9476e-01 L9_cos_v_neg_g:3.0483e-03 L9_v_norm:5.9715e-01 L10_cos_v_neg_g:3.0181e-03 L10_v_norm:6.0157e-01 L11_cos_v_neg_g:2.9095e-03 L11_v_norm:5.9857e-01 L12_cos_v_neg_g:4.3571e-03 L12_v_norm:6.0185e-01 +step:9000 train loss:3.472249 +step:9001 train loss:3.440409 +step:9002 train loss:3.448871 +step:9003 train loss:3.387487 +step:9004 train loss:3.388961 +step:9005 train loss:3.401133 +step:9006 train loss:3.402678 +step:9007 train loss:3.423450 +step:9008 train loss:3.378010 +step:9009 train loss:3.372729 +step:9010 train loss:3.411383 +step:9011 train loss:3.405256 +step:9012 train loss:3.520933 +step:9013 train loss:3.347201 +step:9014 train loss:3.419628 +step:9015 train loss:3.415576 +step:9016 train loss:3.495240 +step:9017 train loss:3.435698 +step:9018 train loss:3.357687 +step:9019 train loss:3.446406 +step:9020 train loss:3.450355 +step:9021 train loss:3.410993 +step:9022 train loss:3.421611 +step:9023 train loss:3.416825 +step:9024 train loss:3.441885 +step:9025 train loss:3.421379 +step:9026 train loss:3.381011 +step:9027 train loss:3.426171 +step:9028 train loss:3.446971 +step:9029 train loss:3.464834 +step:9030 train loss:3.463520 +step:9031 train loss:3.426826 +step:9032 train loss:3.439999 +step:9033 train loss:3.424518 +step:9034 train loss:3.433558 +step:9035 train loss:3.435930 +step:9036 train loss:3.386774 +step:9037 train loss:3.379989 +step:9038 train loss:3.505715 +step:9039 train loss:3.408073 +step:9040 train loss:3.423387 +step:9041 train loss:3.470454 +step:9042 train loss:3.328966 +step:9043 train loss:3.424597 +step:9044 train loss:3.440907 +step:9045 train loss:3.387793 +step:9046 train loss:3.432787 +step:9047 train loss:3.424205 +step:9048 train loss:3.404993 +step:9049 train loss:3.439324 +step:9050 train loss:3.393853 +step:9051 train loss:3.430610 +step:9052 train loss:3.359729 +step:9053 train loss:3.489509 +step:9054 train loss:3.497464 +step:9055 train loss:3.422223 +step:9056 train loss:3.481311 +step:9057 train loss:3.337823 +step:9058 train loss:3.422418 +step:9059 train loss:3.498684 +step:9060 train loss:3.428323 +step:9061 train loss:3.456638 +step:9062 train loss:3.386290 +step:9063 train loss:3.522165 +step:9064 train loss:3.412617 +step:9065 train loss:3.415991 +step:9066 train loss:3.435401 +step:9067 train loss:3.399265 +step:9068 train loss:3.473198 +step:9069 train loss:3.428303 +step:9070 train loss:3.479687 +step:9071 train loss:3.416692 +step:9072 train loss:3.434044 +step:9073 train loss:3.393917 +step:9074 train loss:3.477320 +step:9075 train loss:3.421164 +step:9076 train loss:3.388078 +step:9077 train loss:3.464154 +step:9078 train loss:3.403436 +step:9079 train loss:3.452451 +step:9080 train loss:3.383044 +step:9081 train loss:3.420473 +step:9082 train loss:3.450010 +step:9083 train loss:3.475252 +step:9084 train loss:3.369608 +step:9085 train loss:3.438241 +step:9086 train loss:3.419968 +step:9087 train loss:3.371559 +step:9088 train loss:3.431822 +step:9089 train loss:3.445071 +step:9090 train loss:3.379307 +step:9091 train loss:3.480297 +step:9092 train loss:3.405718 +step:9093 train loss:3.406564 +step:9094 train loss:3.533086 +step:9095 train loss:3.399690 +step:9096 train loss:3.415298 +step:9097 train loss:3.405523 +step:9098 train loss:3.394031 +step:9099 train loss:3.517245 +step:9100 train loss:3.548895 +step:9101 train loss:3.468381 +step:9102 train loss:3.409893 +step:9103 train loss:3.417933 +step:9104 train loss:3.502422 +step:9105 train loss:3.364290 +step:9106 train loss:3.491184 +step:9107 train loss:3.426949 +step:9108 train loss:3.407782 +step:9109 train loss:3.436229 +step:9110 train loss:3.435247 +step:9111 train loss:3.417972 +step:9112 train loss:3.417636 +step:9113 train loss:3.466274 +step:9114 train loss:3.393554 +step:9115 train loss:3.421254 +step:9116 train loss:3.450638 +step:9117 train loss:3.459043 +step:9118 train loss:3.426218 +step:9119 train loss:3.349596 +step:9120 train loss:3.451881 +step:9121 train loss:3.478956 +step:9122 train loss:3.425633 +step:9123 train loss:3.445368 +step:9124 train loss:3.474322 +step:9125 train loss:3.427239 +step:9126 train loss:3.404633 +step:9127 train loss:3.434699 +step:9128 train loss:3.490313 +step:9129 train loss:3.446741 +step:9130 train loss:3.457757 +step:9131 train loss:3.439466 +step:9132 train loss:3.446016 +step:9133 train loss:3.437668 +step:9134 train loss:3.407913 +step:9135 train loss:3.438076 +step:9136 train loss:3.436121 +step:9137 train loss:3.487990 +step:9138 train loss:3.402478 +step:9139 train loss:3.481743 +step:9140 train loss:3.403809 +step:9141 train loss:3.381995 +step:9142 train loss:3.559701 +step:9143 train loss:3.386849 +step:9144 train loss:3.482625 +step:9145 train loss:3.487318 +step:9146 train loss:3.402243 +step:9147 train loss:3.476005 +step:9148 train loss:3.494634 +step:9149 train loss:3.404286 +step:9150 train loss:3.428978 +step:9151 train loss:3.487440 +step:9152 train loss:3.445052 +step:9153 train loss:3.413239 +step:9154 train loss:3.427620 +step:9155 train loss:3.388451 +step:9156 train loss:3.393098 +step:9157 train loss:3.410884 +step:9158 train loss:3.391195 +step:9159 train loss:3.483634 +step:9160 train loss:3.365475 +step:9161 train loss:3.395279 +step:9162 train loss:3.478984 +step:9163 train loss:3.424524 +step:9164 train loss:3.397622 +step:9165 train loss:3.390178 +step:9166 train loss:3.449615 +step:9167 train loss:3.393365 +step:9168 train loss:3.433420 +step:9169 train loss:3.374437 +step:9170 train loss:3.392102 +step:9171 train loss:3.457679 +step:9172 train loss:3.382099 +step:9173 train loss:3.501047 +step:9174 train loss:3.430659 +step:9175 train loss:3.409524 +step:9176 train loss:3.391729 +step:9177 train loss:3.434825 +step:9178 train loss:3.381429 +step:9179 train loss:3.342599 +step:9180 train loss:3.435897 +step:9181 train loss:3.447205 +step:9182 train loss:3.416359 +step:9183 train loss:3.423289 +step:9184 train loss:3.419555 +step:9185 train loss:3.428594 +step:9186 train loss:3.393145 +step:9187 train loss:3.466630 +step:9188 train loss:3.504920 +step:9189 train loss:3.427480 +step:9190 train loss:3.433612 +step:9191 train loss:3.422649 +step:9192 train loss:3.439051 +step:9193 train loss:3.436489 +step:9194 train loss:3.374197 +step:9195 train loss:3.367186 +step:9196 train loss:3.415689 +step:9197 train loss:3.373228 +step:9198 train loss:3.445552 +step:9199 train loss:3.395865 +step:9200 train loss:3.420510 +step:9201 train loss:3.455676 +step:9202 train loss:3.444041 +step:9203 train loss:3.401644 +step:9204 train loss:3.599050 +step:9205 train loss:3.513301 +step:9206 train loss:3.427879 +step:9207 train loss:3.479726 +step:9208 train loss:3.455686 +step:9209 train loss:3.476494 +step:9210 train loss:3.369179 +step:9211 train loss:3.400228 +step:9212 train loss:3.397467 +step:9213 train loss:3.457999 +step:9214 train loss:3.400798 +step:9215 train loss:3.469441 +step:9216 train loss:3.429472 +step:9217 train loss:3.370094 +step:9218 train loss:3.461200 +step:9219 train loss:3.421744 +step:9220 train loss:3.467310 +step:9221 train loss:3.517923 +step:9222 train loss:3.464406 +step:9223 train loss:3.635229 +step:9224 train loss:3.468097 +step:9225 train loss:3.402245 +step:9226 train loss:3.421396 +step:9227 train loss:3.435013 +step:9228 train loss:3.438394 +step:9229 train loss:3.392824 +step:9230 train loss:3.455894 +step:9231 train loss:3.342734 +step:9232 train loss:3.401386 +step:9233 train loss:3.420700 +step:9234 train loss:3.475551 +step:9235 train loss:3.480778 +step:9236 train loss:3.387085 +step:9237 train loss:3.450633 +step:9238 train loss:3.423561 +step:9239 train loss:3.415668 +step:9240 train loss:3.385245 +step:9241 train loss:3.414007 +step:9242 train loss:3.426772 +step:9243 train loss:3.425152 +step:9244 train loss:3.396337 +step:9245 train loss:3.403562 +step:9246 train loss:3.407268 +step:9247 train loss:3.411876 +step:9248 train loss:3.425136 +step:9249 train loss:3.422893 +step:9250 validation loss:3.357676 +step:9250 train loss:3.461107 +step:9251 train loss:3.405378 +step:9252 train loss:3.472134 +step:9253 train loss:3.464712 +step:9254 train loss:3.394662 +step:9255 train loss:3.513946 +step:9256 train loss:3.391842 +step:9257 train loss:3.335642 +step:9258 train loss:3.417561 +step:9259 train loss:3.416498 +step:9260 train loss:3.516294 +step:9261 train loss:3.394405 +step:9262 train loss:3.465883 +step:9263 train loss:3.368023 +step:9264 train loss:3.512434 +step:9265 train loss:3.540991 +step:9266 train loss:3.470933 +step:9267 train loss:3.418105 +step:9268 train loss:3.410955 +step:9269 train loss:3.438638 +step:9270 train loss:3.360115 +step:9271 train loss:3.471632 +step:9272 train loss:3.412350 +step:9273 train loss:3.433863 +step:9274 train loss:3.435000 +step:9275 train loss:3.430183 +step:9276 train loss:3.458639 +step:9277 train loss:3.432319 +step:9278 train loss:3.444516 +step:9279 train loss:3.442122 +step:9280 train loss:3.438545 +step:9281 train loss:3.417416 +step:9282 train loss:3.534868 +step:9283 train loss:3.414973 +step:9284 train loss:3.381393 +step:9285 train loss:3.405220 +step:9286 train loss:3.453674 +step:9287 train loss:3.428918 +step:9288 train loss:3.434157 +step:9289 train loss:3.403033 +step:9290 train loss:3.431687 +step:9291 train loss:3.410826 +step:9292 train loss:3.446340 +step:9293 train loss:3.504681 +step:9294 train loss:3.427411 +step:9295 train loss:3.412491 +step:9296 train loss:3.365766 +step:9297 train loss:3.433212 +step:9298 train loss:3.374109 +step:9299 train loss:3.359035 +step:9300 train loss:3.464797 +step:9301 train loss:3.490323 +step:9302 train loss:3.428938 +step:9303 train loss:3.476598 +step:9304 train loss:3.395339 +step:9305 train loss:3.392991 +step:9306 train loss:3.392315 +step:9307 train loss:3.393725 +step:9308 train loss:3.366793 +step:9309 train loss:3.357430 +step:9310 train loss:3.411418 +step:9311 train loss:3.475971 +step:9312 train loss:3.424750 +step:9313 train loss:3.370717 +step:9314 train loss:3.399408 +step:9315 train loss:3.430120 +step:9316 train loss:3.416220 +step:9317 train loss:3.393491 +step:9318 train loss:3.480999 +step:9319 train loss:3.390737 +step:9320 train loss:3.414156 +step:9321 train loss:3.420862 +step:9322 train loss:3.431053 +step:9323 train loss:3.507869 +step:9324 train loss:3.447964 +step:9325 train loss:3.388678 +step:9326 train loss:3.465570 +step:9327 train loss:3.463274 +step:9328 train loss:3.461953 +step:9329 train loss:3.347644 +step:9330 train loss:3.520986 +step:9331 train loss:3.448552 +step:9332 train loss:3.473410 +step:9333 train loss:3.489070 +step:9334 train loss:3.424575 +step:9335 train loss:3.523335 +step:9336 train loss:3.480300 +step:9337 train loss:3.431948 +step:9338 train loss:3.489425 +step:9339 train loss:3.464076 +step:9340 train loss:3.426035 +step:9341 train loss:3.517425 +step:9342 train loss:3.409878 +step:9343 train loss:3.407757 +step:9344 train loss:3.407488 +step:9345 train loss:3.554091 +step:9346 train loss:3.387999 +step:9347 train loss:3.403350 +step:9348 train loss:3.427748 +step:9349 train loss:3.369786 +step:9350 train loss:3.449524 +step:9351 train loss:3.421063 +step:9352 train loss:3.409153 +step:9353 train loss:3.444944 +step:9354 train loss:3.408831 +step:9355 train loss:3.402010 +step:9356 train loss:3.455767 +step:9357 train loss:3.402725 +step:9358 train loss:3.438168 +step:9359 train loss:3.381047 +step:9360 train loss:3.395686 +step:9361 train loss:3.396491 +step:9362 train loss:3.386017 +step:9363 train loss:3.447495 +step:9364 train loss:3.426196 +step:9365 train loss:3.432866 +step:9366 train loss:3.425405 +step:9367 train loss:3.439516 +step:9368 train loss:3.413961 +step:9369 train loss:3.411927 +step:9370 train loss:3.419464 +step:9371 train loss:3.445314 +step:9372 train loss:3.406769 +step:9373 train loss:3.390750 +step:9374 train loss:3.430271 +step:9375 train loss:3.439713 +step:9376 train loss:3.377366 +step:9377 train loss:3.453860 +step:9378 train loss:3.450842 +step:9379 train loss:3.479712 +step:9380 train loss:3.413455 +step:9381 train loss:3.418642 +step:9382 train loss:3.396641 +step:9383 train loss:3.391373 +step:9384 train loss:3.360234 +step:9385 train loss:3.435003 +step:9386 train loss:3.462345 +step:9387 train loss:3.439408 +step:9388 train loss:3.379633 +step:9389 train loss:3.395106 +step:9390 train loss:3.434706 +step:9391 train loss:3.441336 +step:9392 train loss:3.403422 +step:9393 train loss:3.396429 +step:9394 train loss:3.426557 +step:9395 train loss:3.420010 +step:9396 train loss:3.569232 +step:9397 train loss:3.457314 +step:9398 train loss:3.476021 +step:9399 train loss:3.430439 +step:9400 train loss:3.431848 +step:9401 train loss:3.422119 +step:9402 train loss:3.426625 +step:9403 train loss:3.356904 +step:9404 train loss:3.434359 +step:9405 train loss:3.395470 +step:9406 train loss:3.450068 +step:9407 train loss:3.386322 +step:9408 train loss:3.329275 +step:9409 train loss:3.391944 +step:9410 train loss:3.471848 +step:9411 train loss:3.432675 +step:9412 train loss:3.466058 +step:9413 train loss:3.480368 +step:9414 train loss:3.415279 +step:9415 train loss:3.414084 +step:9416 train loss:3.424521 +step:9417 train loss:3.381677 +step:9418 train loss:3.408002 +step:9419 train loss:3.374875 +step:9420 train loss:3.395814 +step:9421 train loss:3.445487 +step:9422 train loss:3.396178 +step:9423 train loss:3.460171 +step:9424 train loss:3.399734 +step:9425 train loss:3.439588 +step:9426 train loss:3.445747 +step:9427 train loss:3.419782 +step:9428 train loss:3.525431 +step:9429 train loss:3.415031 +step:9430 train loss:3.370598 +step:9431 train loss:3.461204 +step:9432 train loss:3.422501 +step:9433 train loss:3.461641 +step:9434 train loss:3.417068 +step:9435 train loss:3.438503 +step:9436 train loss:3.411592 +step:9437 train loss:3.423846 +step:9438 train loss:3.415961 +step:9439 train loss:3.417093 +step:9440 train loss:3.410158 +step:9441 train loss:3.417786 +step:9442 train loss:3.360190 +step:9443 train loss:3.412200 +step:9444 train loss:3.480983 +step:9445 train loss:3.412928 +step:9446 train loss:3.386698 +step:9447 train loss:3.454882 +step:9448 train loss:3.392390 +step:9449 train loss:3.414594 +step:9450 train loss:3.454381 +step:9451 train loss:3.371047 +step:9452 train loss:3.421305 +step:9453 train loss:3.402792 +step:9454 train loss:3.463067 +step:9455 train loss:3.445417 +step:9456 train loss:3.367652 +step:9457 train loss:3.416770 +step:9458 train loss:3.402385 +step:9459 train loss:3.397973 +step:9460 train loss:3.437669 +step:9461 train loss:3.465218 +step:9462 train loss:3.418528 +step:9463 train loss:3.445847 +step:9464 train loss:3.400141 +step:9465 train loss:3.491233 +step:9466 train loss:3.440117 +step:9467 train loss:3.461934 +step:9468 train loss:3.411198 +step:9469 train loss:3.397247 +step:9470 train loss:3.394832 +step:9471 train loss:3.437074 +step:9472 train loss:3.458619 +step:9473 train loss:3.449417 +step:9474 train loss:3.393416 +step:9475 train loss:3.386391 +step:9476 train loss:3.605127 +step:9477 train loss:3.477658 +step:9478 train loss:3.452648 +step:9479 train loss:3.551501 +step:9480 train loss:3.397353 +step:9481 train loss:3.434584 +step:9482 train loss:3.458579 +step:9483 train loss:3.415375 +step:9484 train loss:3.447754 +step:9485 train loss:3.364094 +step:9486 train loss:3.403549 +step:9487 train loss:3.436288 +step:9488 train loss:3.392773 +step:9489 train loss:3.435912 +step:9490 train loss:3.403469 +step:9491 train loss:3.446630 +step:9492 train loss:3.462344 +step:9493 train loss:3.436885 +step:9494 train loss:3.446145 +step:9495 train loss:3.398890 +step:9496 train loss:3.460264 +step:9497 train loss:3.475744 +step:9498 train loss:3.419010 +step:9499 train loss:3.471820 +step:9500 validation loss:3.359903 total_sharp:1.7728e-03 L1_sharp:2.9140e-03 L2_sharp:4.1703e-04 L3_sharp:1.1743e-03 L4_sharp:2.3982e-04 L5_sharp:3.5384e-04 L6_sharp:5.1087e-04 L7_sharp:7.1227e-04 L8_sharp:4.4653e-04 L9_sharp:3.6114e-04 L10_sharp:1.5465e-04 L11_sharp:1.3482e-04 L12_sharp:1.6525e-04 total_fnorm:2.4235e+00 total_l1_linf:2.0736e+04 total_spectral:2.4235e+00 L1_fnorm:5.7159e-01 L2_fnorm:5.2892e-01 L3_fnorm:5.5683e-01 L4_fnorm:5.8941e-01 L5_fnorm:5.9539e-01 L6_fnorm:5.9927e-01 L7_fnorm:5.9436e-01 L8_fnorm:5.9309e-01 L9_fnorm:5.9486e-01 L10_fnorm:6.0064e-01 L11_fnorm:5.9736e-01 L12_fnorm:6.0233e-01 L1_l1linf:4.1408e-01 L2_l1linf:4.5234e-01 L3_l1linf:4.2376e-01 L4_l1linf:4.2765e-01 L5_l1linf:4.1239e-01 L6_l1linf:4.1446e-01 L7_l1linf:4.0986e-01 L8_l1linf:4.0871e-01 L9_l1linf:4.0821e-01 L10_l1linf:4.0887e-01 L11_l1linf:4.1662e-01 L12_l1linf:4.4076e-01 L1_spectral:1.2055e-02 L2_spectral:1.2038e-02 L3_spectral:1.2049e-02 L4_spectral:1.2047e-02 L5_spectral:1.2049e-02 L6_spectral:1.2063e-02 L7_spectral:1.2046e-02 L8_spectral:1.2041e-02 L9_spectral:1.2048e-02 L10_spectral:1.2045e-02 L11_spectral:1.2043e-02 L12_spectral:1.2043e-02 ip_v_neg_g:6.6370e-03 cos_v_neg_g:2.4690e-03 v_norm:2.4235e+00 g_norm:1.1092e+00 hv_norm:4.1353e-01 cos_v_hv:1.0390e-02 hg_norm:1.7882e+01 cos_g_hg:5.7883e-01 v_par:6.0449e-04 v_perp:2.4235e+00 L1_cos_v_neg_g:3.1498e-03 L1_v_norm:5.7159e-01 L2_cos_v_neg_g:3.4619e-03 L2_v_norm:5.2892e-01 L3_cos_v_neg_g:4.6070e-03 L3_v_norm:5.5683e-01 L4_cos_v_neg_g:2.9037e-03 L4_v_norm:5.8941e-01 L5_cos_v_neg_g:3.7960e-03 L5_v_norm:5.9539e-01 L6_cos_v_neg_g:4.5367e-03 L6_v_norm:5.9927e-01 L7_cos_v_neg_g:5.8232e-03 L7_v_norm:5.9436e-01 L8_cos_v_neg_g:4.0676e-03 L8_v_norm:5.9309e-01 L9_cos_v_neg_g:3.8833e-03 L9_v_norm:5.9486e-01 L10_cos_v_neg_g:2.4721e-03 L10_v_norm:6.0064e-01 L11_cos_v_neg_g:2.2997e-03 L11_v_norm:5.9736e-01 L12_cos_v_neg_g:1.8406e-03 L12_v_norm:6.0233e-01 +step:9500 train loss:3.462353 +step:9501 train loss:3.440960 +step:9502 train loss:3.411783 +step:9503 train loss:3.429596 +step:9504 train loss:3.383884 +step:9505 train loss:3.408038 +step:9506 train loss:3.425041 +step:9507 train loss:3.407428 +step:9508 train loss:3.605307 +step:9509 train loss:3.422167 +step:9510 train loss:3.406564 +step:9511 train loss:3.434828 +step:9512 train loss:3.468629 +step:9513 train loss:3.454235 +step:9514 train loss:3.423282 +step:9515 train loss:3.324147 +step:9516 train loss:3.426331 +step:9517 train loss:3.460752 +step:9518 train loss:3.436729 +step:9519 train loss:3.447232 +step:9520 train loss:3.333696 +step:9521 train loss:3.329507 +step:9522 train loss:3.447608 +step:9523 train loss:3.442562 +step:9524 train loss:3.443641 +step:9525 train loss:3.490807 +step:9526 train loss:3.503485 +step:9527 train loss:3.460768 +step:9528 train loss:3.393962 +step:9529 train loss:3.438993 +step:9530 train loss:3.485201 +step:9531 train loss:3.390608 +step:9532 train loss:3.443039 +step:9533 train loss:3.412833 +step:9534 train loss:3.494140 +step:9535 train loss:3.417170 +step:9536 train loss:3.395548 +step:9537 train loss:3.341646 +step:9538 train loss:3.360054 +step:9539 train loss:3.430090 +step:9540 train loss:3.348048 +step:9541 train loss:3.407603 +step:9542 train loss:3.536836 +step:9543 train loss:3.436189 +step:9544 train loss:3.475554 +step:9545 train loss:3.407541 +step:9546 train loss:3.433726 +step:9547 train loss:3.475826 +step:9548 train loss:3.417764 +step:9549 train loss:3.383238 +step:9550 train loss:3.413266 +step:9551 train loss:3.410649 +step:9552 train loss:3.435178 +step:9553 train loss:3.428226 +step:9554 train loss:3.474724 +step:9555 train loss:3.480308 +step:9556 train loss:3.387951 +step:9557 train loss:3.408325 +step:9558 train loss:3.470006 +step:9559 train loss:3.477490 +step:9560 train loss:3.392746 +step:9561 train loss:3.419198 +step:9562 train loss:3.456156 +step:9563 train loss:3.404849 +step:9564 train loss:3.439370 +step:9565 train loss:3.420530 +step:9566 train loss:3.386478 +step:9567 train loss:3.457545 +step:9568 train loss:3.425094 +step:9569 train loss:3.467697 +step:9570 train loss:3.361929 +step:9571 train loss:3.435456 +step:9572 train loss:3.380435 +step:9573 train loss:3.408727 +step:9574 train loss:3.387025 +step:9575 train loss:3.461070 +step:9576 train loss:3.350331 +step:9577 train loss:3.400868 +step:9578 train loss:3.404041 +step:9579 train loss:3.402675 +step:9580 train loss:3.465325 +step:9581 train loss:3.460663 +step:9582 train loss:3.425157 +step:9583 train loss:3.455575 +step:9584 train loss:3.391004 +step:9585 train loss:3.411220 +step:9586 train loss:3.468075 +step:9587 train loss:3.431287 +step:9588 train loss:3.416612 +step:9589 train loss:3.476156 +step:9590 train loss:3.440033 +step:9591 train loss:3.405851 +step:9592 train loss:3.427164 +step:9593 train loss:3.427782 +step:9594 train loss:3.443235 +step:9595 train loss:3.420997 +step:9596 train loss:3.504383 +step:9597 train loss:3.411644 +step:9598 train loss:3.372966 +step:9599 train loss:3.382310 +step:9600 train loss:3.465051 +step:9601 train loss:3.383995 +step:9602 train loss:3.468247 +step:9603 train loss:3.460411 +step:9604 train loss:3.342713 +step:9605 train loss:3.430606 +step:9606 train loss:3.486851 +step:9607 train loss:3.403679 +step:9608 train loss:3.414552 +step:9609 train loss:3.423174 +step:9610 train loss:3.464865 +step:9611 train loss:3.396330 +step:9612 train loss:3.406855 +step:9613 train loss:3.445371 +step:9614 train loss:3.414672 +step:9615 train loss:3.605413 +step:9616 train loss:3.417526 +step:9617 train loss:3.402397 +step:9618 train loss:3.357688 +step:9619 train loss:3.421650 +step:9620 train loss:3.475708 +step:9621 train loss:3.399190 +step:9622 train loss:3.414756 +step:9623 train loss:3.453119 +step:9624 train loss:3.438661 +step:9625 train loss:3.451773 +step:9626 train loss:3.425905 +step:9627 train loss:3.503673 +step:9628 train loss:3.470877 +step:9629 train loss:3.384000 +step:9630 train loss:3.443491 +step:9631 train loss:3.430038 +step:9632 train loss:3.398108 +step:9633 train loss:3.442118 +step:9634 train loss:3.510580 +step:9635 train loss:3.409874 +step:9636 train loss:3.356947 +step:9637 train loss:3.490786 +step:9638 train loss:3.372397 +step:9639 train loss:3.345107 +step:9640 train loss:3.466377 +step:9641 train loss:3.438488 +step:9642 train loss:3.417187 +step:9643 train loss:3.418063 +step:9644 train loss:3.475336 +step:9645 train loss:3.400758 +step:9646 train loss:3.440320 +step:9647 train loss:3.451397 +step:9648 train loss:3.401054 +step:9649 train loss:3.371869 +step:9650 train loss:3.391415 +step:9651 train loss:3.480746 +step:9652 train loss:3.462560 +step:9653 train loss:3.404376 +step:9654 train loss:3.383627 +step:9655 train loss:3.381300 +step:9656 train loss:3.374948 +step:9657 train loss:3.400884 +step:9658 train loss:3.458159 +step:9659 train loss:3.568569 +step:9660 train loss:3.351086 +step:9661 train loss:3.370659 +step:9662 train loss:3.387936 +step:9663 train loss:3.432595 +step:9664 train loss:3.479944 +step:9665 train loss:3.326406 +step:9666 train loss:3.367735 +step:9667 train loss:3.503709 +step:9668 train loss:3.481790 +step:9669 train loss:3.502234 +step:9670 train loss:3.480864 +step:9671 train loss:3.482941 +step:9672 train loss:3.394771 +step:9673 train loss:3.417237 +step:9674 train loss:3.427955 +step:9675 train loss:3.425085 +step:9676 train loss:3.382756 +step:9677 train loss:3.390390 +step:9678 train loss:3.425208 +step:9679 train loss:3.417851 +step:9680 train loss:3.418199 +step:9681 train loss:3.403441 +step:9682 train loss:3.470147 +step:9683 train loss:3.444896 +step:9684 train loss:3.361018 +step:9685 train loss:3.448480 +step:9686 train loss:3.478585 +step:9687 train loss:3.387061 +step:9688 train loss:3.474275 +step:9689 train loss:3.575030 +step:9690 train loss:3.414324 +step:9691 train loss:3.402775 +step:9692 train loss:3.370046 +step:9693 train loss:3.362721 +step:9694 train loss:3.383214 +step:9695 train loss:3.493439 +step:9696 train loss:3.523120 +step:9697 train loss:3.432335 +step:9698 train loss:3.468004 +step:9699 train loss:3.430806 +step:9700 train loss:3.429642 +step:9701 train loss:3.478721 +step:9702 train loss:3.395745 +step:9703 train loss:3.420584 +step:9704 train loss:3.502083 +step:9705 train loss:3.397098 +step:9706 train loss:3.394593 +step:9707 train loss:3.440003 +step:9708 train loss:3.389151 +step:9709 train loss:3.412006 +step:9710 train loss:3.432038 +step:9711 train loss:3.405764 +step:9712 train loss:3.415899 +step:9713 train loss:3.467407 +step:9714 train loss:3.420368 +step:9715 train loss:3.440705 +step:9716 train loss:3.464848 +step:9717 train loss:3.386149 +step:9718 train loss:3.388517 +step:9719 train loss:3.476251 +step:9720 train loss:3.404987 +step:9721 train loss:3.395864 +step:9722 train loss:3.458312 +step:9723 train loss:3.406570 +step:9724 train loss:3.433874 +step:9725 train loss:3.485280 +step:9726 train loss:3.428633 +step:9727 train loss:3.406868 +step:9728 train loss:3.442595 +step:9729 train loss:3.472604 +step:9730 train loss:3.545280 +step:9731 train loss:3.459193 +step:9732 train loss:3.422666 +step:9733 train loss:3.466214 +step:9734 train loss:3.388013 +step:9735 train loss:3.493788 +step:9736 train loss:3.394632 +step:9737 train loss:3.451564 +step:9738 train loss:3.417294 +step:9739 train loss:3.493412 +step:9740 train loss:3.454810 +step:9741 train loss:3.397450 +step:9742 train loss:3.491390 +step:9743 train loss:3.361694 +step:9744 train loss:3.421478 +step:9745 train loss:3.382967 +step:9746 train loss:3.417968 +step:9747 train loss:3.409595 +step:9748 train loss:3.308705 +step:9749 train loss:3.408050 +step:9750 validation loss:3.352247 +step:9750 train loss:3.389366 +step:9751 train loss:3.529956 +step:9752 train loss:3.415430 +step:9753 train loss:3.368687 +step:9754 train loss:3.400887 +step:9755 train loss:3.402104 +step:9756 train loss:3.400843 +step:9757 train loss:3.367556 +step:9758 train loss:3.357518 +step:9759 train loss:3.406083 +step:9760 train loss:3.349216 +step:9761 train loss:3.393262 +step:9762 train loss:3.388880 +step:9763 train loss:3.408499 +step:9764 train loss:3.396808 +step:9765 train loss:3.356007 +step:9766 train loss:3.446803 +step:9767 train loss:3.404118 +step:9768 train loss:3.416983 +step:9769 train loss:3.370465 +step:9770 train loss:3.368089 +step:9771 train loss:3.420056 +step:9772 train loss:3.429036 +step:9773 train loss:3.407825 +step:9774 train loss:3.380173 +step:9775 train loss:3.470081 +step:9776 train loss:3.468806 +step:9777 train loss:3.353167 +step:9778 train loss:3.364987 +step:9779 train loss:3.367457 +step:9780 train loss:3.367910 +step:9781 train loss:3.387253 +step:9782 train loss:3.463809 +step:9783 train loss:3.374187 +step:9784 train loss:3.400289 +step:9785 train loss:3.397042 +step:9786 train loss:3.426677 +step:9787 train loss:3.454254 +step:9788 train loss:3.378227 +step:9789 train loss:3.391140 +step:9790 train loss:3.348580 +step:9791 train loss:3.399240 +step:9792 train loss:3.414723 +step:9793 train loss:3.430929 +step:9794 train loss:3.408740 +step:9795 train loss:3.411553 +step:9796 train loss:3.399401 +step:9797 train loss:3.392229 +step:9798 train loss:3.410974 +step:9799 train loss:3.408657 +step:9800 train loss:3.482810 +step:9801 train loss:3.407186 +step:9802 train loss:3.464303 +step:9803 train loss:3.322362 +step:9804 train loss:3.416540 +step:9805 train loss:3.424379 +step:9806 train loss:3.395037 +step:9807 train loss:3.369437 +step:9808 train loss:3.284270 +step:9809 train loss:3.466838 +step:9810 train loss:3.423460 +step:9811 train loss:3.408055 +step:9812 train loss:3.384518 +step:9813 train loss:3.461392 +step:9814 train loss:3.452487 +step:9815 train loss:3.357554 +step:9816 train loss:3.362875 +step:9817 train loss:3.391757 +step:9818 train loss:3.417495 +step:9819 train loss:3.389549 +step:9820 train loss:3.456202 +step:9821 train loss:3.436386 +step:9822 train loss:3.411306 +step:9823 train loss:3.471268 +step:9824 train loss:3.375364 +step:9825 train loss:3.461413 +step:9826 train loss:3.456706 +step:9827 train loss:3.465235 +step:9828 train loss:3.377658 +step:9829 train loss:3.383356 +step:9830 train loss:3.374128 +step:9831 train loss:3.430146 +step:9832 train loss:3.440657 +step:9833 train loss:3.354084 +step:9834 train loss:3.409290 +step:9835 train loss:3.371632 +step:9836 train loss:3.441809 +step:9837 train loss:3.408902 +step:9838 train loss:3.446667 +step:9839 train loss:3.422948 +step:9840 train loss:3.392858 +step:9841 train loss:3.400871 +step:9842 train loss:3.461133 +step:9843 train loss:3.454295 +step:9844 train loss:3.399680 +step:9845 train loss:3.432565 +step:9846 train loss:3.364716 +step:9847 train loss:3.497442 +step:9848 train loss:3.418958 +step:9849 train loss:3.441324 +step:9850 train loss:3.363654 +step:9851 train loss:3.416190 +step:9852 train loss:3.382880 +step:9853 train loss:3.402466 +step:9854 train loss:3.419649 +step:9855 train loss:3.360845 +step:9856 train loss:3.367344 +step:9857 train loss:3.353084 +step:9858 train loss:3.419062 +step:9859 train loss:3.337342 +step:9860 train loss:3.576319 +step:9861 train loss:3.402001 +step:9862 train loss:3.368750 +step:9863 train loss:3.351612 +step:9864 train loss:3.474570 +step:9865 train loss:3.356741 +step:9866 train loss:3.392709 +step:9867 train loss:3.393557 +step:9868 train loss:3.451887 +step:9869 train loss:3.417526 +step:9870 train loss:3.384188 +step:9871 train loss:3.429413 +step:9872 train loss:3.367149 +step:9873 train loss:3.422847 +step:9874 train loss:3.386819 +step:9875 train loss:3.389792 +step:9876 train loss:3.353071 +step:9877 train loss:3.407690 +step:9878 train loss:3.435345 +step:9879 train loss:3.435971 +step:9880 train loss:3.370114 +step:9881 train loss:3.422629 +step:9882 train loss:3.382488 +step:9883 train loss:3.393011 +step:9884 train loss:3.387466 +step:9885 train loss:3.450177 +step:9886 train loss:3.417671 +step:9887 train loss:3.417603 +step:9888 train loss:3.440261 +step:9889 train loss:3.469668 +step:9890 train loss:3.382239 +step:9891 train loss:3.385949 +step:9892 train loss:3.358613 +step:9893 train loss:3.478408 +step:9894 train loss:3.391700 +step:9895 train loss:3.327332 +step:9896 train loss:3.482128 +step:9897 train loss:3.358184 +step:9898 train loss:3.429095 +step:9899 train loss:3.405782 +step:9900 train loss:3.451690 +step:9901 train loss:3.375345 +step:9902 train loss:3.419597 +step:9903 train loss:3.389641 +step:9904 train loss:3.441161 +step:9905 train loss:3.345716 +step:9906 train loss:3.384449 +step:9907 train loss:3.394466 +step:9908 train loss:3.389572 +step:9909 train loss:3.407715 +step:9910 train loss:3.429679 +step:9911 train loss:3.516381 +step:9912 train loss:3.389275 +step:9913 train loss:3.393915 +step:9914 train loss:3.399491 +step:9915 train loss:3.402560 +step:9916 train loss:3.351782 +step:9917 train loss:3.389983 +step:9918 train loss:3.382963 +step:9919 train loss:3.549819 +step:9920 train loss:3.335766 +step:9921 train loss:3.428990 +step:9922 train loss:3.385854 +step:9923 train loss:3.445511 +step:9924 train loss:3.357435 +step:9925 train loss:3.416913 +step:9926 train loss:3.396387 +step:9927 train loss:3.440515 +step:9928 train loss:3.365830 +step:9929 train loss:3.403504 +step:9930 train loss:3.497077 +step:9931 train loss:3.459444 +step:9932 train loss:3.344354 +step:9933 train loss:3.441701 +step:9934 train loss:3.360618 +step:9935 train loss:3.475153 +step:9936 train loss:3.380572 +step:9937 train loss:3.406847 +step:9938 train loss:3.396959 +step:9939 train loss:3.459305 +step:9940 train loss:3.493399 +step:9941 train loss:3.366889 +step:9942 train loss:3.415703 +step:9943 train loss:3.538393 +step:9944 train loss:3.410616 +step:9945 train loss:3.430835 +step:9946 train loss:3.405205 +step:9947 train loss:3.352191 +step:9948 train loss:3.399405 +step:9949 train loss:3.290393 +step:9950 train loss:3.444746 +step:9951 train loss:3.362975 +step:9952 train loss:3.431167 +step:9953 train loss:3.394610 +step:9954 train loss:3.452021 +step:9955 train loss:3.428452 +step:9956 train loss:3.429523 +step:9957 train loss:3.406603 +step:9958 train loss:3.461306 +step:9959 train loss:3.358164 +step:9960 train loss:3.393274 +step:9961 train loss:3.400387 +step:9962 train loss:3.451732 +step:9963 train loss:3.341178 +step:9964 train loss:3.394512 +step:9965 train loss:3.399207 +step:9966 train loss:3.458515 +step:9967 train loss:3.378574 +step:9968 train loss:3.437846 +step:9969 train loss:3.349190 +step:9970 train loss:3.391904 +step:9971 train loss:3.435669 +step:9972 train loss:3.456640 +step:9973 train loss:3.435292 +step:9974 train loss:3.423378 +step:9975 train loss:3.390891 +step:9976 train loss:3.349559 +step:9977 train loss:3.398107 +step:9978 train loss:3.397277 +step:9979 train loss:3.408874 +step:9980 train loss:3.462813 +step:9981 train loss:3.372547 +step:9982 train loss:3.432019 +step:9983 train loss:3.352895 +step:9984 train loss:3.413698 +step:9985 train loss:3.360056 +step:9986 train loss:3.414515 +step:9987 train loss:3.456586 +step:9988 train loss:3.471263 +step:9989 train loss:3.362717 +step:9990 train loss:3.503761 +step:9991 train loss:3.354240 +step:9992 train loss:3.422792 +step:9993 train loss:3.415021 +step:9994 train loss:3.528751 +step:9995 train loss:3.467617 +step:9996 train loss:3.383308 +step:9997 train loss:3.424879 +step:9998 train loss:3.474385 +step:9999 train loss:3.443795 +step:10000 validation loss:3.349420 total_sharp:1.7213e-03 L1_sharp:3.0859e-03 L2_sharp:1.1254e-03 L3_sharp:1.2807e-03 L4_sharp:2.1217e-04 L5_sharp:2.6760e-04 L6_sharp:3.1510e-04 L7_sharp:4.7358e-04 L8_sharp:3.5576e-04 L9_sharp:4.1317e-04 L10_sharp:1.5389e-04 L11_sharp:1.3071e-04 L12_sharp:3.5039e-04 total_fnorm:2.4193e+00 total_l1_linf:2.0709e+04 total_spectral:2.4193e+00 L1_fnorm:5.7056e-01 L2_fnorm:5.2124e-01 L3_fnorm:5.5420e-01 L4_fnorm:5.8577e-01 L5_fnorm:5.9185e-01 L6_fnorm:5.9949e-01 L7_fnorm:5.9353e-01 L8_fnorm:5.9192e-01 L9_fnorm:5.9211e-01 L10_fnorm:5.9903e-01 L11_fnorm:5.9519e-01 L12_fnorm:6.0143e-01 L1_l1linf:4.1904e-01 L2_l1linf:4.7132e-01 L3_l1linf:4.5075e-01 L4_l1linf:4.3741e-01 L5_l1linf:4.1062e-01 L6_l1linf:4.1604e-01 L7_l1linf:4.1070e-01 L8_l1linf:4.1196e-01 L9_l1linf:4.0397e-01 L10_l1linf:4.1707e-01 L11_l1linf:4.3736e-01 L12_l1linf:4.5800e-01 L1_spectral:1.2053e-02 L2_spectral:1.2039e-02 L3_spectral:1.2045e-02 L4_spectral:1.2044e-02 L5_spectral:1.2045e-02 L6_spectral:1.2064e-02 L7_spectral:1.2046e-02 L8_spectral:1.2043e-02 L9_spectral:1.2044e-02 L10_spectral:1.2044e-02 L11_spectral:1.2047e-02 L12_spectral:1.2049e-02 ip_v_neg_g:3.3598e-03 cos_v_neg_g:9.4301e-04 v_norm:2.4193e+00 g_norm:1.4726e+00 hv_norm:4.5219e-01 cos_v_hv:9.2094e-03 hg_norm:4.9019e+01 cos_g_hg:5.7990e-01 v_par:2.4227e-04 v_perp:2.4193e+00 L1_cos_v_neg_g:9.1482e-04 L1_v_norm:5.7056e-01 L2_cos_v_neg_g:4.6124e-03 L2_v_norm:5.2124e-01 L3_cos_v_neg_g:1.2015e-03 L3_v_norm:5.5420e-01 L4_cos_v_neg_g:7.5139e-04 L4_v_norm:5.8577e-01 L5_cos_v_neg_g:4.0203e-04 L5_v_norm:5.9185e-01 L6_cos_v_neg_g:2.8221e-03 L6_v_norm:5.9949e-01 L7_cos_v_neg_g:1.9392e-03 L7_v_norm:5.9353e-01 L8_cos_v_neg_g:2.3688e-03 L8_v_norm:5.9192e-01 L9_cos_v_neg_g:1.6927e-03 L9_v_norm:5.9211e-01 L10_cos_v_neg_g:8.3587e-04 L10_v_norm:5.9903e-01 L11_cos_v_neg_g:7.5948e-04 L11_v_norm:5.9519e-01 L12_cos_v_neg_g:2.5436e-03 L12_v_norm:6.0143e-01 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..e68d4c6cdc1bbe80933161276ee3d5fd5365702b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.02, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "f2be1a97-8e3d-4dc4-9e9d-6005dfc735d3", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..6f14c41623c1a17eb48eb229f7526963f63d6d22 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.295505046844482, + "total_l1_linf_norm": 35049.96484375, + "total_spectral_norm": 4.295505523681641, + "layer_1_update_fnorm": 1.1895675659179688, + "layer_1_max_l1_linf_norm": 0.8522477746009827, + "layer_1_max_spectral_norm": 0.024106845259666443, + "layer_2_update_fnorm": 1.0906567573547363, + "layer_2_max_l1_linf_norm": 0.791166365146637, + "layer_2_max_spectral_norm": 0.024072153493762016, + "layer_3_update_fnorm": 1.103026270866394, + "layer_3_max_l1_linf_norm": 0.7942429780960083, + "layer_3_max_spectral_norm": 0.02408246323466301, + "layer_4_update_fnorm": 1.1540448665618896, + "layer_4_max_l1_linf_norm": 0.8149966597557068, + "layer_4_max_spectral_norm": 0.02410593256354332, + "layer_5_update_fnorm": 1.18536376953125, + "layer_5_max_l1_linf_norm": 0.824933648109436, + "layer_5_max_spectral_norm": 0.024097422137856483, + "layer_6_update_fnorm": 1.20448637008667, + "layer_6_max_l1_linf_norm": 0.8242671489715576, + "layer_6_max_spectral_norm": 0.024087827652692795, + "layer_7_update_fnorm": 1.2038280963897705, + "layer_7_max_l1_linf_norm": 0.8176159262657166, + "layer_7_max_spectral_norm": 0.024093862622976303, + "layer_8_update_fnorm": 1.2067716121673584, + "layer_8_max_l1_linf_norm": 0.8172577619552612, + "layer_8_max_spectral_norm": 0.024086613208055496, + "layer_9_update_fnorm": 1.2052890062332153, + "layer_9_max_l1_linf_norm": 0.8081601858139038, + "layer_9_max_spectral_norm": 0.024093247950077057, + "layer_10_update_fnorm": 1.204973816871643, + "layer_10_max_l1_linf_norm": 0.8068233728408813, + "layer_10_max_spectral_norm": 0.024091379716992378, + "layer_11_update_fnorm": 1.200622320175171, + "layer_11_max_l1_linf_norm": 0.8027119636535645, + "layer_11_max_spectral_norm": 0.024087030440568924, + "layer_12_update_fnorm": 1.2034422159194946, + "layer_12_max_l1_linf_norm": 0.8021008968353271, + "layer_12_max_spectral_norm": 0.02409680373966694, + "total_sharpness": 0.0027732173912227154, + "ip_v_neg_g": 0.028729384765028954, + "cos_v_neg_g": 0.007829892449080944, + "v_norm": 4.295505046844482, + "g_norm": 0.8541935086250305, + "hv_norm": 0.6395063400268555, + "cos_v_hv": 0.01862744614481926, + "hg_norm": 9.151280403137207, + "cos_g_hg": 0.6293671727180481, + "v_parallel_norm": 0.003220626385882497, + "v_perp_norm": 4.295504093170166, + "layer_1_v_norm": 1.1895675659179688, + "layer_1_cos_v_neg_g": 0.009348832070827484, + "layer_2_v_norm": 1.0906567573547363, + "layer_2_cos_v_neg_g": 0.014545786194503307, + "layer_3_v_norm": 1.103026270866394, + "layer_3_cos_v_neg_g": 0.018983865156769753, + "layer_4_v_norm": 1.1540448665618896, + "layer_4_cos_v_neg_g": 0.01788264326751232, + "layer_5_v_norm": 1.18536376953125, + "layer_5_cos_v_neg_g": 0.014909685589373112, + "layer_6_v_norm": 1.20448637008667, + "layer_6_cos_v_neg_g": 0.01151722390204668, + "layer_7_v_norm": 1.2038280963897705, + "layer_7_cos_v_neg_g": 0.013142546638846397, + "layer_8_v_norm": 1.2067716121673584, + "layer_8_cos_v_neg_g": 0.011766724288463593, + "layer_9_v_norm": 1.2052890062332153, + "layer_9_cos_v_neg_g": 0.012029540725052357, + "layer_10_v_norm": 1.204973816871643, + "layer_10_cos_v_neg_g": 0.010540807619690895, + "layer_11_v_norm": 1.200622320175171, + "layer_11_cos_v_neg_g": 0.009103951044380665, + "layer_12_v_norm": 1.2034422159194946, + "layer_12_cos_v_neg_g": 0.00857890397310257, + "layer_1_sharpness": 0.003715075785294175, + "layer_2_sharpness": 0.00038646519533358514, + "layer_3_sharpness": 0.0008538824622519314, + "layer_4_sharpness": 0.00040433675167150795, + "layer_5_sharpness": 0.00041128211887553334, + "layer_6_sharpness": 0.00035566685255616903, + "layer_7_sharpness": 0.00038549466989934444, + "layer_8_sharpness": 0.0002867528237402439, + "layer_9_sharpness": 0.00023958530800882727, + "layer_10_sharpness": 0.00020563007274176925, + "layer_11_sharpness": 0.00019293987134005874, + "layer_12_sharpness": 0.000229652039706707 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..08c44029068c35503c43e6b79fa63ded898e3d8b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.223132610321045, + "total_l1_linf_norm": 34453.10546875, + "total_spectral_norm": 4.223132610321045, + "layer_1_update_fnorm": 1.0771087408065796, + "layer_1_max_l1_linf_norm": 0.881790041923523, + "layer_1_max_spectral_norm": 0.024099135771393776, + "layer_2_update_fnorm": 0.970719039440155, + "layer_2_max_l1_linf_norm": 0.7859847545623779, + "layer_2_max_spectral_norm": 0.024073980748653412, + "layer_3_update_fnorm": 1.083469033241272, + "layer_3_max_l1_linf_norm": 0.814354419708252, + "layer_3_max_spectral_norm": 0.024090804159641266, + "layer_4_update_fnorm": 1.1667438745498657, + "layer_4_max_l1_linf_norm": 0.8402940034866333, + "layer_4_max_spectral_norm": 0.0240989588201046, + "layer_5_update_fnorm": 1.1864008903503418, + "layer_5_max_l1_linf_norm": 0.8223483562469482, + "layer_5_max_spectral_norm": 0.02413015626370907, + "layer_6_update_fnorm": 1.1904637813568115, + "layer_6_max_l1_linf_norm": 0.8170406818389893, + "layer_6_max_spectral_norm": 0.0240958109498024, + "layer_7_update_fnorm": 1.1881213188171387, + "layer_7_max_l1_linf_norm": 0.8088265061378479, + "layer_7_max_spectral_norm": 0.02409244328737259, + "layer_8_update_fnorm": 1.196128010749817, + "layer_8_max_l1_linf_norm": 0.8034628629684448, + "layer_8_max_spectral_norm": 0.02408614568412304, + "layer_9_update_fnorm": 1.194312334060669, + "layer_9_max_l1_linf_norm": 0.8051966428756714, + "layer_9_max_spectral_norm": 0.02408827841281891, + "layer_10_update_fnorm": 1.2044538259506226, + "layer_10_max_l1_linf_norm": 0.8079794049263, + "layer_10_max_spectral_norm": 0.02408408187329769, + "layer_11_update_fnorm": 1.1977119445800781, + "layer_11_max_l1_linf_norm": 0.7960652709007263, + "layer_11_max_spectral_norm": 0.024088628590106964, + "layer_12_update_fnorm": 1.204030990600586, + "layer_12_max_l1_linf_norm": 0.8358606696128845, + "layer_12_max_spectral_norm": 0.024083267897367477, + "total_sharpness": 0.0005768568371422589, + "ip_v_neg_g": 0.005907590966671705, + "cos_v_neg_g": 0.0010891967685893178, + "v_norm": 4.223132610321045, + "g_norm": 1.2843085527420044, + "hv_norm": 0.9076834917068481, + "cos_v_hv": 0.0026839126367121935, + "hg_norm": 53.07091522216797, + "cos_g_hg": 0.5359634160995483, + "v_parallel_norm": 0.0016012884443625808, + "v_perp_norm": 4.223132610321045, + "layer_1_v_norm": 1.0771087408065796, + "layer_1_cos_v_neg_g": 0.0025297191459685564, + "layer_2_v_norm": 0.970719039440155, + "layer_2_cos_v_neg_g": 0.002631994429975748, + "layer_3_v_norm": 1.083469033241272, + "layer_3_cos_v_neg_g": 0.00221060449257493, + "layer_4_v_norm": 1.1667438745498657, + "layer_4_cos_v_neg_g": 0.0012918057618662715, + "layer_5_v_norm": 1.1864008903503418, + "layer_5_cos_v_neg_g": 0.0004753757384605706, + "layer_6_v_norm": 1.1904637813568115, + "layer_6_cos_v_neg_g": 0.00030529574723914266, + "layer_7_v_norm": 1.1881213188171387, + "layer_7_cos_v_neg_g": 0.0026733740232884884, + "layer_8_v_norm": 1.196128010749817, + "layer_8_cos_v_neg_g": 0.0017005158588290215, + "layer_9_v_norm": 1.194312334060669, + "layer_9_cos_v_neg_g": 0.0016691487981006503, + "layer_10_v_norm": 1.2044538259506226, + "layer_10_cos_v_neg_g": 0.0022117625921964645, + "layer_11_v_norm": 1.1977118253707886, + "layer_11_cos_v_neg_g": 0.0019496979657560587, + "layer_12_v_norm": 1.204030990600586, + "layer_12_cos_v_neg_g": 0.0022867131046950817, + "layer_1_sharpness": 0.0017357236938551068, + "layer_2_sharpness": 0.00010531493899179623, + "layer_3_sharpness": 0.00019393593538552523, + "layer_4_sharpness": 5.336952381185256e-05, + "layer_5_sharpness": 6.346501322695985e-05, + "layer_6_sharpness": 9.022894664667547e-05, + "layer_7_sharpness": 0.00010142462997464463, + "layer_8_sharpness": 5.953930667601526e-05, + "layer_9_sharpness": 4.3062020267825574e-05, + "layer_10_sharpness": 3.231474329368211e-05, + "layer_11_sharpness": 2.9514772904803976e-05, + "layer_12_sharpness": 4.1053135646507144e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..1c5590706244c5c97d3f7ce6eeddfc76652f412d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.261536121368408, + "total_l1_linf_norm": 34851.859375, + "total_spectral_norm": 4.261536598205566, + "layer_1_update_fnorm": 1.128785490989685, + "layer_1_max_l1_linf_norm": 0.8738886117935181, + "layer_1_max_spectral_norm": 0.024092204868793488, + "layer_2_update_fnorm": 1.0393009185791016, + "layer_2_max_l1_linf_norm": 0.7901930809020996, + "layer_2_max_spectral_norm": 0.024090170860290527, + "layer_3_update_fnorm": 1.068050742149353, + "layer_3_max_l1_linf_norm": 0.7936786413192749, + "layer_3_max_spectral_norm": 0.024086132645606995, + "layer_4_update_fnorm": 1.1467711925506592, + "layer_4_max_l1_linf_norm": 0.8334370851516724, + "layer_4_max_spectral_norm": 0.02410665713250637, + "layer_5_update_fnorm": 1.1871318817138672, + "layer_5_max_l1_linf_norm": 0.8341376185417175, + "layer_5_max_spectral_norm": 0.024085596203804016, + "layer_6_update_fnorm": 1.2030946016311646, + "layer_6_max_l1_linf_norm": 0.8377605080604553, + "layer_6_max_spectral_norm": 0.02409028261899948, + "layer_7_update_fnorm": 1.2035382986068726, + "layer_7_max_l1_linf_norm": 0.8245288133621216, + "layer_7_max_spectral_norm": 0.024095429107546806, + "layer_8_update_fnorm": 1.208513617515564, + "layer_8_max_l1_linf_norm": 0.815045952796936, + "layer_8_max_spectral_norm": 0.02408873476088047, + "layer_9_update_fnorm": 1.207280158996582, + "layer_9_max_l1_linf_norm": 0.8151836395263672, + "layer_9_max_spectral_norm": 0.024098355323076248, + "layer_10_update_fnorm": 1.2068238258361816, + "layer_10_max_l1_linf_norm": 0.8133822679519653, + "layer_10_max_spectral_norm": 0.02408648654818535, + "layer_11_update_fnorm": 1.2016401290893555, + "layer_11_max_l1_linf_norm": 0.8032059073448181, + "layer_11_max_spectral_norm": 0.024091627448797226, + "layer_12_update_fnorm": 1.205196499824524, + "layer_12_max_l1_linf_norm": 0.7938801050186157, + "layer_12_max_spectral_norm": 0.024091970175504684, + "total_sharpness": 0.002142194891348481, + "ip_v_neg_g": 0.017652561888098717, + "cos_v_neg_g": 0.0035088863223791122, + "v_norm": 4.261536121368408, + "g_norm": 1.1805170774459839, + "hv_norm": 1.0125919580459595, + "cos_v_hv": 0.009015519171953201, + "hg_norm": 42.666534423828125, + "cos_g_hg": 0.7056676745414734, + "v_parallel_norm": 0.003107417607679963, + "v_perp_norm": 4.261535167694092, + "layer_1_v_norm": 1.128785490989685, + "layer_1_cos_v_neg_g": 0.005666480865329504, + "layer_2_v_norm": 1.0393009185791016, + "layer_2_cos_v_neg_g": 0.009489904157817364, + "layer_3_v_norm": 1.068050742149353, + "layer_3_cos_v_neg_g": 0.009611827321350574, + "layer_4_v_norm": 1.1467711925506592, + "layer_4_cos_v_neg_g": 0.008481224998831749, + "layer_5_v_norm": 1.1871318817138672, + "layer_5_cos_v_neg_g": 0.00821459200233221, + "layer_6_v_norm": 1.2030946016311646, + "layer_6_cos_v_neg_g": 0.0070029255002737045, + "layer_7_v_norm": 1.2035382986068726, + "layer_7_cos_v_neg_g": 0.007475973106920719, + "layer_8_v_norm": 1.208513617515564, + "layer_8_cos_v_neg_g": 0.007840684615075588, + "layer_9_v_norm": 1.207280158996582, + "layer_9_cos_v_neg_g": 0.006429074797779322, + "layer_10_v_norm": 1.2068238258361816, + "layer_10_cos_v_neg_g": 0.005935494787991047, + "layer_11_v_norm": 1.2016401290893555, + "layer_11_cos_v_neg_g": 0.004737774841487408, + "layer_12_v_norm": 1.205196499824524, + "layer_12_cos_v_neg_g": 0.003371471306309104, + "layer_1_sharpness": 0.007286687847226858, + "layer_2_sharpness": 0.0002422940160613507, + "layer_3_sharpness": 0.00045749344280920923, + "layer_4_sharpness": 0.00015309674199670553, + "layer_5_sharpness": 0.00018154256395064294, + "layer_6_sharpness": 0.0001733355748001486, + "layer_7_sharpness": 0.0002470296749379486, + "layer_8_sharpness": 0.00019809845252893865, + "layer_9_sharpness": 0.00015683291712775826, + "layer_10_sharpness": 0.0001293870445806533, + "layer_11_sharpness": 0.00011919840471819043, + "layer_12_sharpness": 0.00012846142635680735 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..4582d54d0996388e6827909c3f2f528664bbb325 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.265693664550781, + "total_l1_linf_norm": 34874.25, + "total_spectral_norm": 4.265693187713623, + "layer_1_update_fnorm": 1.130225658416748, + "layer_1_max_l1_linf_norm": 0.884100079536438, + "layer_1_max_spectral_norm": 0.024092571809887886, + "layer_2_update_fnorm": 1.020350694656372, + "layer_2_max_l1_linf_norm": 0.7954957485198975, + "layer_2_max_spectral_norm": 0.024081319570541382, + "layer_3_update_fnorm": 1.070670247077942, + "layer_3_max_l1_linf_norm": 0.8130753040313721, + "layer_3_max_spectral_norm": 0.02408263087272644, + "layer_4_update_fnorm": 1.15361750125885, + "layer_4_max_l1_linf_norm": 0.8453767895698547, + "layer_4_max_spectral_norm": 0.02409842424094677, + "layer_5_update_fnorm": 1.1944375038146973, + "layer_5_max_l1_linf_norm": 0.8478333950042725, + "layer_5_max_spectral_norm": 0.024084782227873802, + "layer_6_update_fnorm": 1.2023087739944458, + "layer_6_max_l1_linf_norm": 0.8328588008880615, + "layer_6_max_spectral_norm": 0.024089643731713295, + "layer_7_update_fnorm": 1.2039074897766113, + "layer_7_max_l1_linf_norm": 0.8331904411315918, + "layer_7_max_spectral_norm": 0.024089669808745384, + "layer_8_update_fnorm": 1.2091389894485474, + "layer_8_max_l1_linf_norm": 0.8237965703010559, + "layer_8_max_spectral_norm": 0.02408965677022934, + "layer_9_update_fnorm": 1.2088520526885986, + "layer_9_max_l1_linf_norm": 0.8187806606292725, + "layer_9_max_spectral_norm": 0.02408544532954693, + "layer_10_update_fnorm": 1.2097058296203613, + "layer_10_max_l1_linf_norm": 0.8120120763778687, + "layer_10_max_spectral_norm": 0.024088077247142792, + "layer_11_update_fnorm": 1.2068252563476562, + "layer_11_max_l1_linf_norm": 0.8053829073905945, + "layer_11_max_spectral_norm": 0.024091551080346107, + "layer_12_update_fnorm": 1.2064141035079956, + "layer_12_max_l1_linf_norm": 0.791347324848175, + "layer_12_max_spectral_norm": 0.024087408557534218, + "total_sharpness": 0.0020229059737175703, + "ip_v_neg_g": 0.018921537324786186, + "cos_v_neg_g": 0.0030310801230371, + "v_norm": 4.265693664550781, + "g_norm": 1.4634212255477905, + "hv_norm": 1.4224025011062622, + "cos_v_hv": 0.006066564936190844, + "hg_norm": 96.79609680175781, + "cos_g_hg": 0.7130823135375977, + "v_parallel_norm": 0.004097191616892815, + "v_perp_norm": 4.265691757202148, + "layer_1_v_norm": 1.130225658416748, + "layer_1_cos_v_neg_g": 0.00625920295715332, + "layer_2_v_norm": 1.020350694656372, + "layer_2_cos_v_neg_g": 0.010485871694982052, + "layer_3_v_norm": 1.0706701278686523, + "layer_3_cos_v_neg_g": 0.009960397146642208, + "layer_4_v_norm": 1.15361750125885, + "layer_4_cos_v_neg_g": 0.005710866767913103, + "layer_5_v_norm": 1.1944375038146973, + "layer_5_cos_v_neg_g": 0.005829895380884409, + "layer_6_v_norm": 1.2023086547851562, + "layer_6_cos_v_neg_g": 0.005839754361659288, + "layer_7_v_norm": 1.2039074897766113, + "layer_7_cos_v_neg_g": 0.006582001224160194, + "layer_8_v_norm": 1.2091389894485474, + "layer_8_cos_v_neg_g": 0.007673466112464666, + "layer_9_v_norm": 1.2088520526885986, + "layer_9_cos_v_neg_g": 0.006406011991202831, + "layer_10_v_norm": 1.2097058296203613, + "layer_10_cos_v_neg_g": 0.004904280416667461, + "layer_11_v_norm": 1.2068253755569458, + "layer_11_cos_v_neg_g": 0.002655604388564825, + "layer_12_v_norm": 1.2064141035079956, + "layer_12_cos_v_neg_g": 0.0014404619578272104, + "layer_1_sharpness": 0.009561036713421345, + "layer_2_sharpness": 0.0006067627691663802, + "layer_3_sharpness": 0.0004758873546961695, + "layer_4_sharpness": 9.901281009661034e-05, + "layer_5_sharpness": 9.570697147864848e-05, + "layer_6_sharpness": 0.0001550757442601025, + "layer_7_sharpness": 0.00018420242122374475, + "layer_8_sharpness": 0.00017515555373393, + "layer_9_sharpness": 0.00013811993994750082, + "layer_10_sharpness": 8.828055433696136e-05, + "layer_11_sharpness": 8.410214650211856e-05, + "layer_12_sharpness": 7.979234214872122e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..864ee58bc0751745e870a0dc54df2d09f812d13a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.239996433258057, + "total_l1_linf_norm": 34625.83984375, + "total_spectral_norm": 4.239996433258057, + "layer_1_update_fnorm": 1.108981966972351, + "layer_1_max_l1_linf_norm": 0.8913931846618652, + "layer_1_max_spectral_norm": 0.02409600280225277, + "layer_2_update_fnorm": 0.9972213506698608, + "layer_2_max_l1_linf_norm": 0.792195200920105, + "layer_2_max_spectral_norm": 0.024077484384179115, + "layer_3_update_fnorm": 1.0503407716751099, + "layer_3_max_l1_linf_norm": 0.8018472194671631, + "layer_3_max_spectral_norm": 0.02408653311431408, + "layer_4_update_fnorm": 1.1528465747833252, + "layer_4_max_l1_linf_norm": 0.8389598727226257, + "layer_4_max_spectral_norm": 0.024098319932818413, + "layer_5_update_fnorm": 1.191556692123413, + "layer_5_max_l1_linf_norm": 0.8431177735328674, + "layer_5_max_spectral_norm": 0.024095771834254265, + "layer_6_update_fnorm": 1.20026695728302, + "layer_6_max_l1_linf_norm": 0.8375368714332581, + "layer_6_max_spectral_norm": 0.024088911712169647, + "layer_7_update_fnorm": 1.1979748010635376, + "layer_7_max_l1_linf_norm": 0.8266521692276001, + "layer_7_max_spectral_norm": 0.02408839575946331, + "layer_8_update_fnorm": 1.2043951749801636, + "layer_8_max_l1_linf_norm": 0.8222706317901611, + "layer_8_max_spectral_norm": 0.024086102843284607, + "layer_9_update_fnorm": 1.2065256834030151, + "layer_9_max_l1_linf_norm": 0.8181887865066528, + "layer_9_max_spectral_norm": 0.02409224957227707, + "layer_10_update_fnorm": 1.2079153060913086, + "layer_10_max_l1_linf_norm": 0.8126410841941833, + "layer_10_max_spectral_norm": 0.024096928536891937, + "layer_11_update_fnorm": 1.2043548822402954, + "layer_11_max_l1_linf_norm": 0.8052427172660828, + "layer_11_max_spectral_norm": 0.024096298962831497, + "layer_12_update_fnorm": 1.2068365812301636, + "layer_12_max_l1_linf_norm": 0.7913501262664795, + "layer_12_max_spectral_norm": 0.024091003462672234, + "total_sharpness": 0.0019340469734743237, + "ip_v_neg_g": 0.020191632211208344, + "cos_v_neg_g": 0.003791560884565115, + "v_norm": 4.239996433258057, + "g_norm": 1.2559949159622192, + "hv_norm": 1.1375471353530884, + "cos_v_hv": 0.007208802737295628, + "hg_norm": 49.311134338378906, + "cos_g_hg": 0.7021319270133972, + "v_parallel_norm": 0.0041764103807508945, + "v_perp_norm": 4.239994525909424, + "layer_1_v_norm": 1.108981966972351, + "layer_1_cos_v_neg_g": 0.007032514549791813, + "layer_2_v_norm": 0.9972213506698608, + "layer_2_cos_v_neg_g": 0.00894855335354805, + "layer_3_v_norm": 1.0503407716751099, + "layer_3_cos_v_neg_g": 0.011234252713620663, + "layer_4_v_norm": 1.1528465747833252, + "layer_4_cos_v_neg_g": 0.008218840695917606, + "layer_5_v_norm": 1.191556692123413, + "layer_5_cos_v_neg_g": 0.007581302896142006, + "layer_6_v_norm": 1.20026695728302, + "layer_6_cos_v_neg_g": 0.008315692655742168, + "layer_7_v_norm": 1.1979748010635376, + "layer_7_cos_v_neg_g": 0.00960561353713274, + "layer_8_v_norm": 1.2043951749801636, + "layer_8_cos_v_neg_g": 0.008530795574188232, + "layer_9_v_norm": 1.2065256834030151, + "layer_9_cos_v_neg_g": 0.007267598062753677, + "layer_10_v_norm": 1.2079153060913086, + "layer_10_cos_v_neg_g": 0.005271567963063717, + "layer_11_v_norm": 1.204355001449585, + "layer_11_cos_v_neg_g": 0.0033874185755848885, + "layer_12_v_norm": 1.2068365812301636, + "layer_12_cos_v_neg_g": 0.0024501935113221407, + "layer_1_sharpness": 0.005485727917402983, + "layer_2_sharpness": 0.0003239403013139963, + "layer_3_sharpness": 0.0004960164660587907, + "layer_4_sharpness": 0.00011031490430468693, + "layer_5_sharpness": 0.00012246178812347353, + "layer_6_sharpness": 0.00021890953939873725, + "layer_7_sharpness": 0.0003001931181643158, + "layer_8_sharpness": 0.00019956941832788289, + "layer_9_sharpness": 0.0001347630168311298, + "layer_10_sharpness": 9.512007818557322e-05, + "layer_11_sharpness": 7.844489300623536e-05, + "layer_12_sharpness": 7.370999810518697e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..28ecfc64d122d50f3317f4d4be94e8ebdb2810b9 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.228570938110352, + "total_l1_linf_norm": 34544.3515625, + "total_spectral_norm": 4.228570938110352, + "layer_1_update_fnorm": 1.1051840782165527, + "layer_1_max_l1_linf_norm": 0.8850706219673157, + "layer_1_max_spectral_norm": 0.024105718359351158, + "layer_2_update_fnorm": 0.9687039852142334, + "layer_2_max_l1_linf_norm": 0.7997164726257324, + "layer_2_max_spectral_norm": 0.02408033050596714, + "layer_3_update_fnorm": 1.0511208772659302, + "layer_3_max_l1_linf_norm": 0.8059271574020386, + "layer_3_max_spectral_norm": 0.02408767305314541, + "layer_4_update_fnorm": 1.1496161222457886, + "layer_4_max_l1_linf_norm": 0.8280234932899475, + "layer_4_max_spectral_norm": 0.02410087175667286, + "layer_5_update_fnorm": 1.1920430660247803, + "layer_5_max_l1_linf_norm": 0.8366743922233582, + "layer_5_max_spectral_norm": 0.024094197899103165, + "layer_6_update_fnorm": 1.1984729766845703, + "layer_6_max_l1_linf_norm": 0.8332718014717102, + "layer_6_max_spectral_norm": 0.024092910811305046, + "layer_7_update_fnorm": 1.1979656219482422, + "layer_7_max_l1_linf_norm": 0.8247737884521484, + "layer_7_max_spectral_norm": 0.024095674976706505, + "layer_8_update_fnorm": 1.2038687467575073, + "layer_8_max_l1_linf_norm": 0.8215838670730591, + "layer_8_max_spectral_norm": 0.024089233949780464, + "layer_9_update_fnorm": 1.2036923170089722, + "layer_9_max_l1_linf_norm": 0.8126082420349121, + "layer_9_max_spectral_norm": 0.02408760040998459, + "layer_10_update_fnorm": 1.2059253454208374, + "layer_10_max_l1_linf_norm": 0.8110203146934509, + "layer_10_max_spectral_norm": 0.02409772388637066, + "layer_11_update_fnorm": 1.20033860206604, + "layer_11_max_l1_linf_norm": 0.8017791509628296, + "layer_11_max_spectral_norm": 0.024096336215734482, + "layer_12_update_fnorm": 1.2042367458343506, + "layer_12_max_l1_linf_norm": 0.8009750247001648, + "layer_12_max_spectral_norm": 0.02409263513982296, + "total_sharpness": 0.0014788914704695344, + "ip_v_neg_g": 0.013763004913926125, + "cos_v_neg_g": 0.0029739956371486187, + "v_norm": 4.228570938110352, + "g_norm": 1.0944081544876099, + "hv_norm": 0.8713940978050232, + "cos_v_hv": 0.007176543120294809, + "hg_norm": 35.584693908691406, + "cos_g_hg": 0.6901260018348694, + "v_parallel_norm": 0.003237643511965871, + "v_perp_norm": 4.228569984436035, + "layer_1_v_norm": 1.1051840782165527, + "layer_1_cos_v_neg_g": 0.005970196798443794, + "layer_2_v_norm": 0.9687039852142334, + "layer_2_cos_v_neg_g": 0.0053915889002382755, + "layer_3_v_norm": 1.0511208772659302, + "layer_3_cos_v_neg_g": 0.006942134350538254, + "layer_4_v_norm": 1.1496161222457886, + "layer_4_cos_v_neg_g": 0.003883954370394349, + "layer_5_v_norm": 1.1920430660247803, + "layer_5_cos_v_neg_g": 0.0045860037207603455, + "layer_6_v_norm": 1.1984729766845703, + "layer_6_cos_v_neg_g": 0.00511749554425478, + "layer_7_v_norm": 1.1979656219482422, + "layer_7_cos_v_neg_g": 0.005630264524370432, + "layer_8_v_norm": 1.2038687467575073, + "layer_8_cos_v_neg_g": 0.005775631871074438, + "layer_9_v_norm": 1.2036923170089722, + "layer_9_cos_v_neg_g": 0.0064294347539544106, + "layer_10_v_norm": 1.2059253454208374, + "layer_10_cos_v_neg_g": 0.005900625139474869, + "layer_11_v_norm": 1.2003387212753296, + "layer_11_cos_v_neg_g": 0.0046503483317792416, + "layer_12_v_norm": 1.2042367458343506, + "layer_12_cos_v_neg_g": 0.003578340634703636, + "layer_1_sharpness": 0.006145262625068426, + "layer_2_sharpness": 0.0004524703836068511, + "layer_3_sharpness": 0.00036209006793797016, + "layer_4_sharpness": 8.768968109507114e-05, + "layer_5_sharpness": 9.440363646717742e-05, + "layer_6_sharpness": 0.00013191364996600896, + "layer_7_sharpness": 0.0001595944631844759, + "layer_8_sharpness": 0.00012899996363557875, + "layer_9_sharpness": 0.00010549865692155436, + "layer_10_sharpness": 7.318129792110994e-05, + "layer_11_sharpness": 6.63223909214139e-05, + "layer_12_sharpness": 7.446211384376511e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..f63f7dfdcb2ecf7e2ffa33f842e216d0ccdecbd4 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.225791931152344, + "total_l1_linf_norm": 34520.8125, + "total_spectral_norm": 4.225791931152344, + "layer_1_update_fnorm": 1.068838119506836, + "layer_1_max_l1_linf_norm": 0.8928619623184204, + "layer_1_max_spectral_norm": 0.024108802899718285, + "layer_2_update_fnorm": 0.9691649675369263, + "layer_2_max_l1_linf_norm": 0.7880566120147705, + "layer_2_max_spectral_norm": 0.024074729532003403, + "layer_3_update_fnorm": 1.0642658472061157, + "layer_3_max_l1_linf_norm": 0.8012442588806152, + "layer_3_max_spectral_norm": 0.024082591757178307, + "layer_4_update_fnorm": 1.1529593467712402, + "layer_4_max_l1_linf_norm": 0.8318055868148804, + "layer_4_max_spectral_norm": 0.02408958412706852, + "layer_5_update_fnorm": 1.190613031387329, + "layer_5_max_l1_linf_norm": 0.8420813083648682, + "layer_5_max_spectral_norm": 0.02410680241882801, + "layer_6_update_fnorm": 1.1998475790023804, + "layer_6_max_l1_linf_norm": 0.8326407670974731, + "layer_6_max_spectral_norm": 0.02409570850431919, + "layer_7_update_fnorm": 1.1962372064590454, + "layer_7_max_l1_linf_norm": 0.8269602060317993, + "layer_7_max_spectral_norm": 0.02408353053033352, + "layer_8_update_fnorm": 1.2029705047607422, + "layer_8_max_l1_linf_norm": 0.8164675235748291, + "layer_8_max_spectral_norm": 0.02409372292459011, + "layer_9_update_fnorm": 1.2041906118392944, + "layer_9_max_l1_linf_norm": 0.8136286735534668, + "layer_9_max_spectral_norm": 0.02408524975180626, + "layer_10_update_fnorm": 1.2085601091384888, + "layer_10_max_l1_linf_norm": 0.8133072257041931, + "layer_10_max_spectral_norm": 0.02408544532954693, + "layer_11_update_fnorm": 1.2038525342941284, + "layer_11_max_l1_linf_norm": 0.799075722694397, + "layer_11_max_spectral_norm": 0.024086998775601387, + "layer_12_update_fnorm": 1.2071319818496704, + "layer_12_max_l1_linf_norm": 0.7974528074264526, + "layer_12_max_spectral_norm": 0.02408619411289692, + "total_sharpness": 0.0011151679791510105, + "ip_v_neg_g": 0.010881859809160233, + "cos_v_neg_g": 0.002066944260150194, + "v_norm": 4.225791931152344, + "g_norm": 1.2458513975143433, + "hv_norm": 0.8878350853919983, + "cos_v_hv": 0.005307819228619337, + "hg_norm": 71.94905090332031, + "cos_g_hg": 0.33590179681777954, + "v_parallel_norm": 0.0017917872173711658, + "v_perp_norm": 4.2257914543151855, + "layer_1_v_norm": 1.068838119506836, + "layer_1_cos_v_neg_g": 0.003735549049451947, + "layer_2_v_norm": 0.9691649675369263, + "layer_2_cos_v_neg_g": 0.005097649060189724, + "layer_3_v_norm": 1.0642658472061157, + "layer_3_cos_v_neg_g": 0.006131357047706842, + "layer_4_v_norm": 1.1529593467712402, + "layer_4_cos_v_neg_g": 0.0032766687218099833, + "layer_5_v_norm": 1.190613031387329, + "layer_5_cos_v_neg_g": 0.00375136686488986, + "layer_6_v_norm": 1.1998475790023804, + "layer_6_cos_v_neg_g": 0.004496310371905565, + "layer_7_v_norm": 1.1962372064590454, + "layer_7_cos_v_neg_g": 0.004648137837648392, + "layer_8_v_norm": 1.2029703855514526, + "layer_8_cos_v_neg_g": 0.004411271773278713, + "layer_9_v_norm": 1.2041906118392944, + "layer_9_cos_v_neg_g": 0.006002461072057486, + "layer_10_v_norm": 1.2085601091384888, + "layer_10_cos_v_neg_g": 0.005596405826508999, + "layer_11_v_norm": 1.2038525342941284, + "layer_11_cos_v_neg_g": 0.004364924039691687, + "layer_12_v_norm": 1.2071319818496704, + "layer_12_cos_v_neg_g": 0.004670817404985428, + "layer_1_sharpness": 0.0043072146363556385, + "layer_2_sharpness": 0.00014702742919325829, + "layer_3_sharpness": 0.0003498529258649796, + "layer_4_sharpness": 8.272662671515718e-05, + "layer_5_sharpness": 9.953149856301025e-05, + "layer_6_sharpness": 0.00013513477460946888, + "layer_7_sharpness": 0.00021783090778626502, + "layer_8_sharpness": 0.00011321665078867227, + "layer_9_sharpness": 9.229444549418986e-05, + "layer_10_sharpness": 7.351191015914083e-05, + "layer_11_sharpness": 6.241389201022685e-05, + "layer_12_sharpness": 0.00011991817154921591 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..84a17edb86d7b44764eb03ebec73f8a7e7ec1f33 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.2416276931762695, + "total_l1_linf_norm": 34636.6171875, + "total_spectral_norm": 4.241628646850586, + "layer_1_update_fnorm": 1.1021486520767212, + "layer_1_max_l1_linf_norm": 0.8920411467552185, + "layer_1_max_spectral_norm": 0.024096045643091202, + "layer_2_update_fnorm": 0.9949029088020325, + "layer_2_max_l1_linf_norm": 0.7880465388298035, + "layer_2_max_spectral_norm": 0.024084633216261864, + "layer_3_update_fnorm": 1.0623254776000977, + "layer_3_max_l1_linf_norm": 0.8067054152488708, + "layer_3_max_spectral_norm": 0.024083340540528297, + "layer_4_update_fnorm": 1.1579418182373047, + "layer_4_max_l1_linf_norm": 0.8292058706283569, + "layer_4_max_spectral_norm": 0.02410387247800827, + "layer_5_update_fnorm": 1.1922672986984253, + "layer_5_max_l1_linf_norm": 0.8358983397483826, + "layer_5_max_spectral_norm": 0.024106353521347046, + "layer_6_update_fnorm": 1.19930899143219, + "layer_6_max_l1_linf_norm": 0.8449423313140869, + "layer_6_max_spectral_norm": 0.024095289409160614, + "layer_7_update_fnorm": 1.197077989578247, + "layer_7_max_l1_linf_norm": 0.8248909711837769, + "layer_7_max_spectral_norm": 0.024092325940728188, + "layer_8_update_fnorm": 1.2032617330551147, + "layer_8_max_l1_linf_norm": 0.8154107928276062, + "layer_8_max_spectral_norm": 0.024087075144052505, + "layer_9_update_fnorm": 1.2033737897872925, + "layer_9_max_l1_linf_norm": 0.8159608244895935, + "layer_9_max_spectral_norm": 0.024089736863970757, + "layer_10_update_fnorm": 1.2071540355682373, + "layer_10_max_l1_linf_norm": 0.8150467872619629, + "layer_10_max_spectral_norm": 0.024083275347948074, + "layer_11_update_fnorm": 1.2047548294067383, + "layer_11_max_l1_linf_norm": 0.8025590777397156, + "layer_11_max_spectral_norm": 0.02409176528453827, + "layer_12_update_fnorm": 1.2062373161315918, + "layer_12_max_l1_linf_norm": 0.798753559589386, + "layer_12_max_spectral_norm": 0.024090787395834923, + "total_sharpness": 0.0009881852893158793, + "ip_v_neg_g": 0.00928591564297676, + "cos_v_neg_g": 0.0021293614991009235, + "v_norm": 4.2416276931762695, + "g_norm": 1.0281175374984741, + "hv_norm": 0.6123213171958923, + "cos_v_hv": 0.00684528611600399, + "hg_norm": 31.69651222229004, + "cos_g_hg": 0.6084635257720947, + "v_parallel_norm": 0.0025045855436474085, + "v_perp_norm": 4.241627216339111, + "layer_1_v_norm": 1.1021486520767212, + "layer_1_cos_v_neg_g": 0.004052259027957916, + "layer_2_v_norm": 0.9949029088020325, + "layer_2_cos_v_neg_g": 0.0033346449490636587, + "layer_3_v_norm": 1.0623254776000977, + "layer_3_cos_v_neg_g": 0.006491675972938538, + "layer_4_v_norm": 1.1579418182373047, + "layer_4_cos_v_neg_g": 0.003442175220698118, + "layer_5_v_norm": 1.1922672986984253, + "layer_5_cos_v_neg_g": 0.0033360186498612165, + "layer_6_v_norm": 1.19930899143219, + "layer_6_cos_v_neg_g": 0.004095854237675667, + "layer_7_v_norm": 1.197077989578247, + "layer_7_cos_v_neg_g": 0.004791993647813797, + "layer_8_v_norm": 1.2032617330551147, + "layer_8_cos_v_neg_g": 0.0035203846637159586, + "layer_9_v_norm": 1.2033737897872925, + "layer_9_cos_v_neg_g": 0.0036309543065726757, + "layer_10_v_norm": 1.2071540355682373, + "layer_10_cos_v_neg_g": 0.0031161438673734665, + "layer_11_v_norm": 1.2047548294067383, + "layer_11_cos_v_neg_g": 0.0026729987002909184, + "layer_12_v_norm": 1.2062373161315918, + "layer_12_cos_v_neg_g": 0.002441051648929715, + "layer_1_sharpness": 0.003546773921698332, + "layer_2_sharpness": 0.00013802808825857937, + "layer_3_sharpness": 0.0004536914639174938, + "layer_4_sharpness": 6.709004810545594e-05, + "layer_5_sharpness": 8.54669779073447e-05, + "layer_6_sharpness": 0.0001395725557813421, + "layer_7_sharpness": 0.0001756336132530123, + "layer_8_sharpness": 0.00010595512139843777, + "layer_9_sharpness": 8.667054498801008e-05, + "layer_10_sharpness": 6.091804607422091e-05, + "layer_11_sharpness": 5.492909622262232e-05, + "layer_12_sharpness": 5.641996904159896e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..ee1a0001c6568dfc23926f34dc3929153f2534fa --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.236464023590088, + "total_l1_linf_norm": 34613.7265625, + "total_spectral_norm": 4.236464500427246, + "layer_1_update_fnorm": 1.096647024154663, + "layer_1_max_l1_linf_norm": 0.888338029384613, + "layer_1_max_spectral_norm": 0.024106692522764206, + "layer_2_update_fnorm": 0.9719258546829224, + "layer_2_max_l1_linf_norm": 0.7981939911842346, + "layer_2_max_spectral_norm": 0.024069033563137054, + "layer_3_update_fnorm": 1.0724579095840454, + "layer_3_max_l1_linf_norm": 0.8057098984718323, + "layer_3_max_spectral_norm": 0.02408250980079174, + "layer_4_update_fnorm": 1.155230164527893, + "layer_4_max_l1_linf_norm": 0.8350226879119873, + "layer_4_max_spectral_norm": 0.024094244465231895, + "layer_5_update_fnorm": 1.1896718740463257, + "layer_5_max_l1_linf_norm": 0.8356372714042664, + "layer_5_max_spectral_norm": 0.024108244106173515, + "layer_6_update_fnorm": 1.1962077617645264, + "layer_6_max_l1_linf_norm": 0.8253604173660278, + "layer_6_max_spectral_norm": 0.02409534901380539, + "layer_7_update_fnorm": 1.1955891847610474, + "layer_7_max_l1_linf_norm": 0.8267492055892944, + "layer_7_max_spectral_norm": 0.024093488231301308, + "layer_8_update_fnorm": 1.2031095027923584, + "layer_8_max_l1_linf_norm": 0.8193756937980652, + "layer_8_max_spectral_norm": 0.024093540385365486, + "layer_9_update_fnorm": 1.2036465406417847, + "layer_9_max_l1_linf_norm": 0.8157405853271484, + "layer_9_max_spectral_norm": 0.024086372926831245, + "layer_10_update_fnorm": 1.2067022323608398, + "layer_10_max_l1_linf_norm": 0.8115507960319519, + "layer_10_max_spectral_norm": 0.02408296801149845, + "layer_11_update_fnorm": 1.2018629312515259, + "layer_11_max_l1_linf_norm": 0.7942612171173096, + "layer_11_max_spectral_norm": 0.024091221392154694, + "layer_12_update_fnorm": 1.203647494316101, + "layer_12_max_l1_linf_norm": 0.7923282384872437, + "layer_12_max_spectral_norm": 0.024093758314847946, + "total_sharpness": 0.0008888746961019933, + "ip_v_neg_g": 0.009075458161532879, + "cos_v_neg_g": 0.00218355655670166, + "v_norm": 4.236464023590088, + "g_norm": 0.9810712933540344, + "hv_norm": 0.5644016265869141, + "cos_v_hv": 0.00667199632152915, + "hg_norm": 23.601469039916992, + "cos_g_hg": 0.6325322985649109, + "v_parallel_norm": 0.0018790775211527944, + "v_perp_norm": 4.23646354675293, + "layer_1_v_norm": 1.096647024154663, + "layer_1_cos_v_neg_g": 0.0035705657210201025, + "layer_2_v_norm": 0.9719258546829224, + "layer_2_cos_v_neg_g": 0.004617450758814812, + "layer_3_v_norm": 1.0724579095840454, + "layer_3_cos_v_neg_g": 0.005384411197155714, + "layer_4_v_norm": 1.155230164527893, + "layer_4_cos_v_neg_g": 0.0038445256650447845, + "layer_5_v_norm": 1.1896718740463257, + "layer_5_cos_v_neg_g": 0.0036409031599760056, + "layer_6_v_norm": 1.1962077617645264, + "layer_6_cos_v_neg_g": 0.005385868716984987, + "layer_7_v_norm": 1.1955891847610474, + "layer_7_cos_v_neg_g": 0.004079924896359444, + "layer_8_v_norm": 1.2031095027923584, + "layer_8_cos_v_neg_g": 0.0037289580795913935, + "layer_9_v_norm": 1.2036465406417847, + "layer_9_cos_v_neg_g": 0.004232081584632397, + "layer_10_v_norm": 1.2067022323608398, + "layer_10_cos_v_neg_g": 0.003862569108605385, + "layer_11_v_norm": 1.2018629312515259, + "layer_11_cos_v_neg_g": 0.004157556686550379, + "layer_12_v_norm": 1.203647494316101, + "layer_12_cos_v_neg_g": 0.004011539742350578, + "layer_1_sharpness": 0.0032346947118639946, + "layer_2_sharpness": 0.00010391927207820117, + "layer_3_sharpness": 0.00015337624063249677, + "layer_4_sharpness": 7.596376235596836e-05, + "layer_5_sharpness": 8.049626194406301e-05, + "layer_6_sharpness": 0.00013381244207266718, + "layer_7_sharpness": 0.00014751177513971925, + "layer_8_sharpness": 8.921806875150651e-05, + "layer_9_sharpness": 7.858011667849496e-05, + "layer_10_sharpness": 5.873971531400457e-05, + "layer_11_sharpness": 6.1020484281471e-05, + "layer_12_sharpness": 6.96162533131428e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..58c03a038086a47fdc672e12de1f638d65587ade --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 3.077641725540161, + "total_l1_linf_norm": 25340.31640625, + "total_spectral_norm": 3.077641248703003, + "layer_1_update_fnorm": 0.869880735874176, + "layer_1_max_l1_linf_norm": 0.632656991481781, + "layer_1_max_spectral_norm": 0.017217176035046577, + "layer_2_update_fnorm": 0.8284845352172852, + "layer_2_max_l1_linf_norm": 0.6338022947311401, + "layer_2_max_spectral_norm": 0.017209213227033615, + "layer_3_update_fnorm": 0.8056959509849548, + "layer_3_max_l1_linf_norm": 0.6250077486038208, + "layer_3_max_spectral_norm": 0.017204055562615395, + "layer_4_update_fnorm": 0.809989869594574, + "layer_4_max_l1_linf_norm": 0.6083816289901733, + "layer_4_max_spectral_norm": 0.017214149236679077, + "layer_5_update_fnorm": 0.8347806930541992, + "layer_5_max_l1_linf_norm": 0.5921434164047241, + "layer_5_max_spectral_norm": 0.01722865365445614, + "layer_6_update_fnorm": 0.8433071970939636, + "layer_6_max_l1_linf_norm": 0.5860642194747925, + "layer_6_max_spectral_norm": 0.017198076471686363, + "layer_7_update_fnorm": 0.848206639289856, + "layer_7_max_l1_linf_norm": 0.5812393426895142, + "layer_7_max_spectral_norm": 0.017210770398378372, + "layer_8_update_fnorm": 0.8528771996498108, + "layer_8_max_l1_linf_norm": 0.5785003900527954, + "layer_8_max_spectral_norm": 0.017228273674845695, + "layer_9_update_fnorm": 0.8537211418151855, + "layer_9_max_l1_linf_norm": 0.5757372379302979, + "layer_9_max_spectral_norm": 0.017211059108376503, + "layer_10_update_fnorm": 0.8528174757957458, + "layer_10_max_l1_linf_norm": 0.5798228979110718, + "layer_10_max_spectral_norm": 0.017206067219376564, + "layer_11_update_fnorm": 0.8513349294662476, + "layer_11_max_l1_linf_norm": 0.5880016684532166, + "layer_11_max_spectral_norm": 0.017203696072101593, + "layer_12_update_fnorm": 0.8576872944831848, + "layer_12_max_l1_linf_norm": 0.577384352684021, + "layer_12_max_spectral_norm": 0.017231574282050133, + "total_sharpness": 0.008633490651845932, + "ip_v_neg_g": 0.03618292137980461, + "cos_v_neg_g": 0.011758551001548767, + "v_norm": 3.077641725540161, + "g_norm": 0.9998428821563721, + "hv_norm": 0.8577008247375488, + "cos_v_hv": 0.030979091301560402, + "hg_norm": 9.979012489318848, + "cos_g_hg": 0.5745638608932495, + "v_parallel_norm": 0.0013885984662920237, + "v_perp_norm": 3.077641487121582, + "layer_1_v_norm": 0.869880735874176, + "layer_1_cos_v_neg_g": 0.014363839291036129, + "layer_2_v_norm": 0.8284845352172852, + "layer_2_cos_v_neg_g": 0.020675092935562134, + "layer_3_v_norm": 0.8056958913803101, + "layer_3_cos_v_neg_g": 0.02355700172483921, + "layer_4_v_norm": 0.809989869594574, + "layer_4_cos_v_neg_g": 0.02101176232099533, + "layer_5_v_norm": 0.8347806930541992, + "layer_5_cos_v_neg_g": 0.017882229760289192, + "layer_6_v_norm": 0.8433071374893188, + "layer_6_cos_v_neg_g": 0.016870057210326195, + "layer_7_v_norm": 0.848206639289856, + "layer_7_cos_v_neg_g": 0.015466217882931232, + "layer_8_v_norm": 0.8528771996498108, + "layer_8_cos_v_neg_g": 0.014560133218765259, + "layer_9_v_norm": 0.8537211418151855, + "layer_9_cos_v_neg_g": 0.012808021157979965, + "layer_10_v_norm": 0.8528174757957458, + "layer_10_cos_v_neg_g": 0.0127249825745821, + "layer_11_v_norm": 0.8513349890708923, + "layer_11_cos_v_neg_g": 0.009952896274626255, + "layer_12_v_norm": 0.8576872944831848, + "layer_12_cos_v_neg_g": 0.007306471932679415, + "layer_1_sharpness": 0.0073931170627474785, + "layer_2_sharpness": 0.0014796648174524307, + "layer_3_sharpness": 0.002069035777822137, + "layer_4_sharpness": 0.0014037651708349586, + "layer_5_sharpness": 0.0015575126744806767, + "layer_6_sharpness": 0.0013401723699644208, + "layer_7_sharpness": 0.0011710482649505138, + "layer_8_sharpness": 0.0009188554831780493, + "layer_9_sharpness": 0.0006110340473242104, + "layer_10_sharpness": 0.0005547762266360223, + "layer_11_sharpness": 0.0005060501280240715, + "layer_12_sharpness": 0.00040416777483187616 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..11fdcf3b1b0ec54e63041de9eb463ca4b1201ec1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.225167274475098, + "total_l1_linf_norm": 34496.546875, + "total_spectral_norm": 4.225167274475098, + "layer_1_update_fnorm": 1.0863522291183472, + "layer_1_max_l1_linf_norm": 0.8927714824676514, + "layer_1_max_spectral_norm": 0.024105535820126534, + "layer_2_update_fnorm": 0.951998770236969, + "layer_2_max_l1_linf_norm": 0.7887965440750122, + "layer_2_max_spectral_norm": 0.024074871093034744, + "layer_3_update_fnorm": 1.073307752609253, + "layer_3_max_l1_linf_norm": 0.8048747777938843, + "layer_3_max_spectral_norm": 0.024088183417916298, + "layer_4_update_fnorm": 1.1589202880859375, + "layer_4_max_l1_linf_norm": 0.8301777839660645, + "layer_4_max_spectral_norm": 0.024101290851831436, + "layer_5_update_fnorm": 1.191796064376831, + "layer_5_max_l1_linf_norm": 0.8351926803588867, + "layer_5_max_spectral_norm": 0.024124903604388237, + "layer_6_update_fnorm": 1.1953336000442505, + "layer_6_max_l1_linf_norm": 0.8352260589599609, + "layer_6_max_spectral_norm": 0.02409578487277031, + "layer_7_update_fnorm": 1.1947087049484253, + "layer_7_max_l1_linf_norm": 0.8198702335357666, + "layer_7_max_spectral_norm": 0.02409202791750431, + "layer_8_update_fnorm": 1.2008097171783447, + "layer_8_max_l1_linf_norm": 0.814193844795227, + "layer_8_max_spectral_norm": 0.02408183366060257, + "layer_9_update_fnorm": 1.2000161409378052, + "layer_9_max_l1_linf_norm": 0.8096035718917847, + "layer_9_max_spectral_norm": 0.02408718131482601, + "layer_10_update_fnorm": 1.2064564228057861, + "layer_10_max_l1_linf_norm": 0.8113355040550232, + "layer_10_max_spectral_norm": 0.024089567363262177, + "layer_11_update_fnorm": 1.2009276151657104, + "layer_11_max_l1_linf_norm": 0.790952742099762, + "layer_11_max_spectral_norm": 0.02410082519054413, + "layer_12_update_fnorm": 1.2047268152236938, + "layer_12_max_l1_linf_norm": 0.7918331623077393, + "layer_12_max_spectral_norm": 0.024088062345981598, + "total_sharpness": 0.0010090017458423972, + "ip_v_neg_g": 0.009022959508001804, + "cos_v_neg_g": 0.0019793857354670763, + "v_norm": 4.225167274475098, + "g_norm": 1.0788837671279907, + "hv_norm": 0.7339672446250916, + "cos_v_hv": 0.005808435846120119, + "hg_norm": 39.76354217529297, + "cos_g_hg": 0.6085693836212158, + "v_parallel_norm": 0.002088333247229457, + "v_perp_norm": 4.2251667976379395, + "layer_1_v_norm": 1.0863522291183472, + "layer_1_cos_v_neg_g": 0.003271407214924693, + "layer_2_v_norm": 0.951998770236969, + "layer_2_cos_v_neg_g": 0.0029400503262877464, + "layer_3_v_norm": 1.073307752609253, + "layer_3_cos_v_neg_g": 0.004767054691910744, + "layer_4_v_norm": 1.1589202880859375, + "layer_4_cos_v_neg_g": 0.003463577013462782, + "layer_5_v_norm": 1.191796064376831, + "layer_5_cos_v_neg_g": 0.003730088472366333, + "layer_6_v_norm": 1.1953336000442505, + "layer_6_cos_v_neg_g": 0.0032212277874350548, + "layer_7_v_norm": 1.1947087049484253, + "layer_7_cos_v_neg_g": 0.0036253295838832855, + "layer_8_v_norm": 1.2008097171783447, + "layer_8_cos_v_neg_g": 0.004288895521312952, + "layer_9_v_norm": 1.2000161409378052, + "layer_9_cos_v_neg_g": 0.00419284263625741, + "layer_10_v_norm": 1.2064564228057861, + "layer_10_cos_v_neg_g": 0.005553028080612421, + "layer_11_v_norm": 1.2009276151657104, + "layer_11_cos_v_neg_g": 0.005378691479563713, + "layer_12_v_norm": 1.2047268152236938, + "layer_12_cos_v_neg_g": 0.005087799858301878, + "layer_1_sharpness": 0.004223665688186884, + "layer_2_sharpness": 0.00019369208894204348, + "layer_3_sharpness": 0.00022000200988259166, + "layer_4_sharpness": 9.75995062617585e-05, + "layer_5_sharpness": 9.708542347652838e-05, + "layer_6_sharpness": 0.0001478587946621701, + "layer_7_sharpness": 0.00017564855806995183, + "layer_8_sharpness": 0.00010463532817084342, + "layer_9_sharpness": 7.045592792565003e-05, + "layer_10_sharpness": 5.447738294606097e-05, + "layer_11_sharpness": 5.600060467259027e-05, + "layer_12_sharpness": 4.679607445723377e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..fff6c59e7d4a32a64db7a636e94ad9fec0771b27 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.235311985015869, + "total_l1_linf_norm": 34590.859375, + "total_spectral_norm": 4.235312461853027, + "layer_1_update_fnorm": 1.1029784679412842, + "layer_1_max_l1_linf_norm": 0.8839428424835205, + "layer_1_max_spectral_norm": 0.02410368248820305, + "layer_2_update_fnorm": 0.9836986660957336, + "layer_2_max_l1_linf_norm": 0.7890403270721436, + "layer_2_max_spectral_norm": 0.024074936285614967, + "layer_3_update_fnorm": 1.0617496967315674, + "layer_3_max_l1_linf_norm": 0.808857798576355, + "layer_3_max_spectral_norm": 0.024076541885733604, + "layer_4_update_fnorm": 1.1606148481369019, + "layer_4_max_l1_linf_norm": 0.8367846608161926, + "layer_4_max_spectral_norm": 0.0240944717079401, + "layer_5_update_fnorm": 1.1865112781524658, + "layer_5_max_l1_linf_norm": 0.8309240341186523, + "layer_5_max_spectral_norm": 0.024117080494761467, + "layer_6_update_fnorm": 1.1951982975006104, + "layer_6_max_l1_linf_norm": 0.827896237373352, + "layer_6_max_spectral_norm": 0.024095486849546432, + "layer_7_update_fnorm": 1.1933753490447998, + "layer_7_max_l1_linf_norm": 0.8198659420013428, + "layer_7_max_spectral_norm": 0.024084879085421562, + "layer_8_update_fnorm": 1.2011022567749023, + "layer_8_max_l1_linf_norm": 0.8170076608657837, + "layer_8_max_spectral_norm": 0.02409113198518753, + "layer_9_update_fnorm": 1.2015786170959473, + "layer_9_max_l1_linf_norm": 0.8118948936462402, + "layer_9_max_spectral_norm": 0.024088652804493904, + "layer_10_update_fnorm": 1.20809805393219, + "layer_10_max_l1_linf_norm": 0.8256208896636963, + "layer_10_max_spectral_norm": 0.024084584787487984, + "layer_11_update_fnorm": 1.2036681175231934, + "layer_11_max_l1_linf_norm": 0.8005030155181885, + "layer_11_max_spectral_norm": 0.024090483784675598, + "layer_12_update_fnorm": 1.2062684297561646, + "layer_12_max_l1_linf_norm": 0.7866458892822266, + "layer_12_max_spectral_norm": 0.02409215457737446, + "total_sharpness": 0.0009984540520235896, + "ip_v_neg_g": 0.009751340374350548, + "cos_v_neg_g": 0.0011563256848603487, + "v_norm": 4.235311985015869, + "g_norm": 1.9911260604858398, + "hv_norm": 0.946230947971344, + "cos_v_hv": 0.004469062201678753, + "hg_norm": 146.39910888671875, + "cos_g_hg": 0.7096859216690063, + "v_parallel_norm": 0.0013904670486226678, + "v_perp_norm": 4.235311985015869, + "layer_1_v_norm": 1.1029784679412842, + "layer_1_cos_v_neg_g": 0.0025997296907007694, + "layer_2_v_norm": 0.9836986660957336, + "layer_2_cos_v_neg_g": 0.005173094570636749, + "layer_3_v_norm": 1.0617496967315674, + "layer_3_cos_v_neg_g": 0.004147448111325502, + "layer_4_v_norm": 1.1606148481369019, + "layer_4_cos_v_neg_g": 0.0021988877560943365, + "layer_5_v_norm": 1.1865112781524658, + "layer_5_cos_v_neg_g": 0.0023563429713249207, + "layer_6_v_norm": 1.1951984167099, + "layer_6_cos_v_neg_g": 0.002042863517999649, + "layer_7_v_norm": 1.1933753490447998, + "layer_7_cos_v_neg_g": 0.0018728416180238128, + "layer_8_v_norm": 1.2011022567749023, + "layer_8_cos_v_neg_g": 0.0015394623624160886, + "layer_9_v_norm": 1.2015786170959473, + "layer_9_cos_v_neg_g": -3.5321693303558277e-06, + "layer_10_v_norm": 1.20809805393219, + "layer_10_cos_v_neg_g": 0.001056745881214738, + "layer_11_v_norm": 1.2036681175231934, + "layer_11_cos_v_neg_g": 0.0011870019370689988, + "layer_12_v_norm": 1.2062684297561646, + "layer_12_cos_v_neg_g": 0.0024184463545680046, + "layer_1_sharpness": 0.0029766634106636047, + "layer_2_sharpness": 0.00041865534149110317, + "layer_3_sharpness": 0.0006710541783832014, + "layer_4_sharpness": 6.027163180988282e-05, + "layer_5_sharpness": 8.133818482747301e-05, + "layer_6_sharpness": 0.00010972548625431955, + "layer_7_sharpness": 0.00014134390221443027, + "layer_8_sharpness": 7.094454485923052e-05, + "layer_9_sharpness": 5.1395581976976246e-05, + "layer_10_sharpness": 4.366972643765621e-05, + "layer_11_sharpness": 4.398605233291164e-05, + "layer_12_sharpness": 3.6811350582866e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..615463721d6bc3424dcd89157734ea8730a46ff7 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.241275310516357, + "total_l1_linf_norm": 34620.625, + "total_spectral_norm": 4.241275310516357, + "layer_1_update_fnorm": 1.0994982719421387, + "layer_1_max_l1_linf_norm": 0.895410418510437, + "layer_1_max_spectral_norm": 0.024106549099087715, + "layer_2_update_fnorm": 0.9806928634643555, + "layer_2_max_l1_linf_norm": 0.788495659828186, + "layer_2_max_spectral_norm": 0.024073775857686996, + "layer_3_update_fnorm": 1.079835057258606, + "layer_3_max_l1_linf_norm": 0.8163192272186279, + "layer_3_max_spectral_norm": 0.024086177349090576, + "layer_4_update_fnorm": 1.160789132118225, + "layer_4_max_l1_linf_norm": 0.840019166469574, + "layer_4_max_spectral_norm": 0.024097932502627373, + "layer_5_update_fnorm": 1.1910024881362915, + "layer_5_max_l1_linf_norm": 0.8298439979553223, + "layer_5_max_spectral_norm": 0.024120792746543884, + "layer_6_update_fnorm": 1.1954872608184814, + "layer_6_max_l1_linf_norm": 0.8273696303367615, + "layer_6_max_spectral_norm": 0.02408495731651783, + "layer_7_update_fnorm": 1.1964137554168701, + "layer_7_max_l1_linf_norm": 0.818583607673645, + "layer_7_max_spectral_norm": 0.024089915677905083, + "layer_8_update_fnorm": 1.2047282457351685, + "layer_8_max_l1_linf_norm": 0.819219708442688, + "layer_8_max_spectral_norm": 0.02407834306359291, + "layer_9_update_fnorm": 1.2039309740066528, + "layer_9_max_l1_linf_norm": 0.8136780261993408, + "layer_9_max_spectral_norm": 0.024091502651572227, + "layer_10_update_fnorm": 1.2083441019058228, + "layer_10_max_l1_linf_norm": 0.8158376216888428, + "layer_10_max_spectral_norm": 0.02408788353204727, + "layer_11_update_fnorm": 1.202123999595642, + "layer_11_max_l1_linf_norm": 0.7983935475349426, + "layer_11_max_spectral_norm": 0.024092189967632294, + "layer_12_update_fnorm": 1.2064552307128906, + "layer_12_max_l1_linf_norm": 0.7873327732086182, + "layer_12_max_spectral_norm": 0.024088535457849503, + "total_sharpness": 0.0008286154479719698, + "ip_v_neg_g": 0.006489698309451342, + "cos_v_neg_g": 0.0016359967412427068, + "v_norm": 4.241275310516357, + "g_norm": 0.9352885484695435, + "hv_norm": 0.826004147529602, + "cos_v_hv": 0.0042546833865344524, + "hg_norm": 25.998598098754883, + "cos_g_hg": 0.5463235378265381, + "v_parallel_norm": 0.0018450136994943023, + "v_perp_norm": 4.241274833679199, + "layer_1_v_norm": 1.0994982719421387, + "layer_1_cos_v_neg_g": 0.0034276554360985756, + "layer_2_v_norm": 0.9806928634643555, + "layer_2_cos_v_neg_g": 0.0027732648886740208, + "layer_3_v_norm": 1.079835057258606, + "layer_3_cos_v_neg_g": 0.0026829217094928026, + "layer_4_v_norm": 1.160789132118225, + "layer_4_cos_v_neg_g": 0.00268897064961493, + "layer_5_v_norm": 1.1910024881362915, + "layer_5_cos_v_neg_g": 0.0021998488809913397, + "layer_6_v_norm": 1.1954872608184814, + "layer_6_cos_v_neg_g": 0.002590066520497203, + "layer_7_v_norm": 1.1964137554168701, + "layer_7_cos_v_neg_g": 0.0029386484529823065, + "layer_8_v_norm": 1.2047282457351685, + "layer_8_cos_v_neg_g": 0.003700926434248686, + "layer_9_v_norm": 1.2039309740066528, + "layer_9_cos_v_neg_g": 0.0033927594777196646, + "layer_10_v_norm": 1.2083441019058228, + "layer_10_cos_v_neg_g": 0.0034869685769081116, + "layer_11_v_norm": 1.2021241188049316, + "layer_11_cos_v_neg_g": 0.0029940688982605934, + "layer_12_v_norm": 1.2064552307128906, + "layer_12_cos_v_neg_g": 0.0015983968041837215, + "layer_1_sharpness": 0.003557139076292515, + "layer_2_sharpness": 9.61018304224126e-05, + "layer_3_sharpness": 0.00019798714492935687, + "layer_4_sharpness": 7.46916703064926e-05, + "layer_5_sharpness": 6.721886165905744e-05, + "layer_6_sharpness": 0.00010487997496966273, + "layer_7_sharpness": 0.00012255058391019702, + "layer_8_sharpness": 7.307873602258042e-05, + "layer_9_sharpness": 6.19667989667505e-05, + "layer_10_sharpness": 4.8823279939824715e-05, + "layer_11_sharpness": 4.3315714719938114e-05, + "layer_12_sharpness": 4.8166533815674484e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..3c42f8c91ec251d469b2e290ea1e06feeccce02f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.226624488830566, + "total_l1_linf_norm": 34548.703125, + "total_spectral_norm": 4.226624965667725, + "layer_1_update_fnorm": 1.0882418155670166, + "layer_1_max_l1_linf_norm": 0.8873822689056396, + "layer_1_max_spectral_norm": 0.024100128561258316, + "layer_2_update_fnorm": 0.9668118357658386, + "layer_2_max_l1_linf_norm": 0.7866858243942261, + "layer_2_max_spectral_norm": 0.024070464074611664, + "layer_3_update_fnorm": 1.0759546756744385, + "layer_3_max_l1_linf_norm": 0.806381344795227, + "layer_3_max_spectral_norm": 0.02408556640148163, + "layer_4_update_fnorm": 1.1566799879074097, + "layer_4_max_l1_linf_norm": 0.834004282951355, + "layer_4_max_spectral_norm": 0.024107152596116066, + "layer_5_update_fnorm": 1.1855716705322266, + "layer_5_max_l1_linf_norm": 0.8288432359695435, + "layer_5_max_spectral_norm": 0.024107344448566437, + "layer_6_update_fnorm": 1.1936882734298706, + "layer_6_max_l1_linf_norm": 0.8190489411354065, + "layer_6_max_spectral_norm": 0.024091124534606934, + "layer_7_update_fnorm": 1.1879240274429321, + "layer_7_max_l1_linf_norm": 0.8127843737602234, + "layer_7_max_spectral_norm": 0.024092290550470352, + "layer_8_update_fnorm": 1.1950201988220215, + "layer_8_max_l1_linf_norm": 0.8013387322425842, + "layer_8_max_spectral_norm": 0.024089301005005836, + "layer_9_update_fnorm": 1.1976912021636963, + "layer_9_max_l1_linf_norm": 0.8062009811401367, + "layer_9_max_spectral_norm": 0.02408808283507824, + "layer_10_update_fnorm": 1.2053433656692505, + "layer_10_max_l1_linf_norm": 0.8090764284133911, + "layer_10_max_spectral_norm": 0.024086009711027145, + "layer_11_update_fnorm": 1.201565146446228, + "layer_11_max_l1_linf_norm": 0.8016957640647888, + "layer_11_max_spectral_norm": 0.024089191108942032, + "layer_12_update_fnorm": 1.205007791519165, + "layer_12_max_l1_linf_norm": 0.7865544557571411, + "layer_12_max_spectral_norm": 0.02409714087843895, + "total_sharpness": 0.001037371577695012, + "ip_v_neg_g": 0.006860828958451748, + "cos_v_neg_g": 0.001777514349669218, + "v_norm": 4.226624488830566, + "g_norm": 0.9132081866264343, + "hv_norm": 0.6956170797348022, + "cos_v_hv": 0.006303152069449425, + "hg_norm": 18.938344955444336, + "cos_g_hg": 0.6193296313285828, + "v_parallel_norm": 0.002180566545575857, + "v_perp_norm": 4.226624011993408, + "layer_1_v_norm": 1.0882418155670166, + "layer_1_cos_v_neg_g": 0.004080008249729872, + "layer_2_v_norm": 0.9668118357658386, + "layer_2_cos_v_neg_g": 0.0033819801174104214, + "layer_3_v_norm": 1.075954794883728, + "layer_3_cos_v_neg_g": 0.0038290005177259445, + "layer_4_v_norm": 1.1566799879074097, + "layer_4_cos_v_neg_g": 0.0027716776821762323, + "layer_5_v_norm": 1.1855716705322266, + "layer_5_cos_v_neg_g": 0.003277139738202095, + "layer_6_v_norm": 1.1936882734298706, + "layer_6_cos_v_neg_g": 0.0035877141635864973, + "layer_7_v_norm": 1.1879240274429321, + "layer_7_cos_v_neg_g": 0.0020401536021381617, + "layer_8_v_norm": 1.1950201988220215, + "layer_8_cos_v_neg_g": 0.0026160553097724915, + "layer_9_v_norm": 1.1976912021636963, + "layer_9_cos_v_neg_g": 0.0017233287217095494, + "layer_10_v_norm": 1.2053433656692505, + "layer_10_cos_v_neg_g": 0.0028177739586681128, + "layer_11_v_norm": 1.2015652656555176, + "layer_11_cos_v_neg_g": 0.0024544394109398127, + "layer_12_v_norm": 1.205007791519165, + "layer_12_cos_v_neg_g": 0.002289868425577879, + "layer_1_sharpness": 0.0036181206814944744, + "layer_2_sharpness": 0.00011291824193904176, + "layer_3_sharpness": 0.00020347921235952526, + "layer_4_sharpness": 8.729760156711563e-05, + "layer_5_sharpness": 0.00010228226892650127, + "layer_6_sharpness": 0.00013168936129659414, + "layer_7_sharpness": 0.00023611958022229373, + "layer_8_sharpness": 0.00016603631956968457, + "layer_9_sharpness": 7.485519745387137e-05, + "layer_10_sharpness": 4.971603266312741e-05, + "layer_11_sharpness": 4.028256080346182e-05, + "layer_12_sharpness": 4.5215096179163083e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..fa1d50ad7ddabfc2377e813705c7169a9d8e349d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.245537281036377, + "total_l1_linf_norm": 34680.78125, + "total_spectral_norm": 4.245537281036377, + "layer_1_update_fnorm": 1.091915249824524, + "layer_1_max_l1_linf_norm": 0.8898967504501343, + "layer_1_max_spectral_norm": 0.024104539304971695, + "layer_2_update_fnorm": 0.9876782298088074, + "layer_2_max_l1_linf_norm": 0.7950257062911987, + "layer_2_max_spectral_norm": 0.024079568684101105, + "layer_3_update_fnorm": 1.0885769128799438, + "layer_3_max_l1_linf_norm": 0.8215408325195312, + "layer_3_max_spectral_norm": 0.024090591818094254, + "layer_4_update_fnorm": 1.1675679683685303, + "layer_4_max_l1_linf_norm": 0.8350714445114136, + "layer_4_max_spectral_norm": 0.02410644292831421, + "layer_5_update_fnorm": 1.1940289735794067, + "layer_5_max_l1_linf_norm": 0.8326858282089233, + "layer_5_max_spectral_norm": 0.02411949262022972, + "layer_6_update_fnorm": 1.1952083110809326, + "layer_6_max_l1_linf_norm": 0.822238564491272, + "layer_6_max_spectral_norm": 0.02408970519900322, + "layer_7_update_fnorm": 1.1922060251235962, + "layer_7_max_l1_linf_norm": 0.8141418695449829, + "layer_7_max_spectral_norm": 0.024088764563202858, + "layer_8_update_fnorm": 1.2005870342254639, + "layer_8_max_l1_linf_norm": 0.8102567791938782, + "layer_8_max_spectral_norm": 0.02408423274755478, + "layer_9_update_fnorm": 1.200616717338562, + "layer_9_max_l1_linf_norm": 0.8092933893203735, + "layer_9_max_spectral_norm": 0.024088185280561447, + "layer_10_update_fnorm": 1.208848237991333, + "layer_10_max_l1_linf_norm": 0.8161365985870361, + "layer_10_max_spectral_norm": 0.02408529631793499, + "layer_11_update_fnorm": 1.203478217124939, + "layer_11_max_l1_linf_norm": 0.8017374277114868, + "layer_11_max_spectral_norm": 0.024088898673653603, + "layer_12_update_fnorm": 1.2057366371154785, + "layer_12_max_l1_linf_norm": 0.7886875867843628, + "layer_12_max_spectral_norm": 0.024092333391308784, + "total_sharpness": 0.0006963129853829741, + "ip_v_neg_g": 0.007883765734732151, + "cos_v_neg_g": 0.0017926241271197796, + "v_norm": 4.245537281036377, + "g_norm": 1.0358855724334717, + "hv_norm": 0.524837851524353, + "cos_v_hv": 0.005632639862596989, + "hg_norm": 75.6171646118164, + "cos_g_hg": 0.4556047022342682, + "v_parallel_norm": 0.001573158660903573, + "v_perp_norm": 4.245536804199219, + "layer_1_v_norm": 1.091915249824524, + "layer_1_cos_v_neg_g": 0.003127921372652054, + "layer_2_v_norm": 0.9876782298088074, + "layer_2_cos_v_neg_g": 0.0031869299709796906, + "layer_3_v_norm": 1.0885769128799438, + "layer_3_cos_v_neg_g": 0.0029724417254328728, + "layer_4_v_norm": 1.1675679683685303, + "layer_4_cos_v_neg_g": 0.0034895900171250105, + "layer_5_v_norm": 1.1940289735794067, + "layer_5_cos_v_neg_g": 0.0031871392857283354, + "layer_6_v_norm": 1.1952083110809326, + "layer_6_cos_v_neg_g": 0.0038935495540499687, + "layer_7_v_norm": 1.1922060251235962, + "layer_7_cos_v_neg_g": 0.005685661919414997, + "layer_8_v_norm": 1.2005870342254639, + "layer_8_cos_v_neg_g": 0.004725256003439426, + "layer_9_v_norm": 1.200616717338562, + "layer_9_cos_v_neg_g": 0.0037857077550143003, + "layer_10_v_norm": 1.208848237991333, + "layer_10_cos_v_neg_g": 0.003176552476361394, + "layer_11_v_norm": 1.2034780979156494, + "layer_11_cos_v_neg_g": 0.002712057903409004, + "layer_12_v_norm": 1.2057366371154785, + "layer_12_cos_v_neg_g": 0.00268190773203969, + "layer_1_sharpness": 0.0033408894669264555, + "layer_2_sharpness": 6.460881559178233e-05, + "layer_3_sharpness": 0.00013742568262387067, + "layer_4_sharpness": 6.7560700699687e-05, + "layer_5_sharpness": 6.728868174832314e-05, + "layer_6_sharpness": 0.00010471885616425425, + "layer_7_sharpness": 0.00013858232705388218, + "layer_8_sharpness": 8.369860734092072e-05, + "layer_9_sharpness": 5.9687543398467824e-05, + "layer_10_sharpness": 4.182332122582011e-05, + "layer_11_sharpness": 3.756885780603625e-05, + "layer_12_sharpness": 4.030976924696006e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..4d23de6832cb54736a19a33270d49204d077ed2e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.23732852935791, + "total_l1_linf_norm": 34599.27734375, + "total_spectral_norm": 4.237329006195068, + "layer_1_update_fnorm": 1.0913715362548828, + "layer_1_max_l1_linf_norm": 0.887415885925293, + "layer_1_max_spectral_norm": 0.024105578660964966, + "layer_2_update_fnorm": 0.9829546809196472, + "layer_2_max_l1_linf_norm": 0.7940946817398071, + "layer_2_max_spectral_norm": 0.024068031460046768, + "layer_3_update_fnorm": 1.0812349319458008, + "layer_3_max_l1_linf_norm": 0.8121098279953003, + "layer_3_max_spectral_norm": 0.02408529445528984, + "layer_4_update_fnorm": 1.1654425859451294, + "layer_4_max_l1_linf_norm": 0.8373784422874451, + "layer_4_max_spectral_norm": 0.02410629577934742, + "layer_5_update_fnorm": 1.1904442310333252, + "layer_5_max_l1_linf_norm": 0.8284645080566406, + "layer_5_max_spectral_norm": 0.024116551503539085, + "layer_6_update_fnorm": 1.1902490854263306, + "layer_6_max_l1_linf_norm": 0.822709858417511, + "layer_6_max_spectral_norm": 0.024091126397252083, + "layer_7_update_fnorm": 1.191638469696045, + "layer_7_max_l1_linf_norm": 0.8145323395729065, + "layer_7_max_spectral_norm": 0.024088051170110703, + "layer_8_update_fnorm": 1.2002593278884888, + "layer_8_max_l1_linf_norm": 0.8143464922904968, + "layer_8_max_spectral_norm": 0.024085210636258125, + "layer_9_update_fnorm": 1.2006745338439941, + "layer_9_max_l1_linf_norm": 0.8150333762168884, + "layer_9_max_spectral_norm": 0.024088062345981598, + "layer_10_update_fnorm": 1.2072737216949463, + "layer_10_max_l1_linf_norm": 0.8143396377563477, + "layer_10_max_spectral_norm": 0.024081284180283546, + "layer_11_update_fnorm": 1.19901704788208, + "layer_11_max_l1_linf_norm": 0.7927073240280151, + "layer_11_max_spectral_norm": 0.024096045643091202, + "layer_12_update_fnorm": 1.2055994272232056, + "layer_12_max_l1_linf_norm": 0.792071521282196, + "layer_12_max_spectral_norm": 0.024092385545372963, + "total_sharpness": 0.0006309311138466001, + "ip_v_neg_g": 0.005429267417639494, + "cos_v_neg_g": 0.0012838701950386167, + "v_norm": 4.23732852935791, + "g_norm": 0.9979941248893738, + "hv_norm": 0.5674511790275574, + "cos_v_hv": 0.004711351823061705, + "hg_norm": 24.022165298461914, + "cos_g_hg": 0.4250026047229767, + "v_parallel_norm": 0.0013454505242407322, + "v_perp_norm": 4.237328052520752, + "layer_1_v_norm": 1.0913715362548828, + "layer_1_cos_v_neg_g": 0.0025343934539705515, + "layer_2_v_norm": 0.9829546809196472, + "layer_2_cos_v_neg_g": 0.002461581025272608, + "layer_3_v_norm": 1.0812349319458008, + "layer_3_cos_v_neg_g": 0.0013362219324335456, + "layer_4_v_norm": 1.1654425859451294, + "layer_4_cos_v_neg_g": 0.0018222567159682512, + "layer_5_v_norm": 1.1904442310333252, + "layer_5_cos_v_neg_g": 0.0008571411017328501, + "layer_6_v_norm": 1.1902490854263306, + "layer_6_cos_v_neg_g": 0.0016884692013263702, + "layer_7_v_norm": 1.191638469696045, + "layer_7_cos_v_neg_g": 0.0015050111105665565, + "layer_8_v_norm": 1.2002593278884888, + "layer_8_cos_v_neg_g": 0.0018392268102616072, + "layer_9_v_norm": 1.2006745338439941, + "layer_9_cos_v_neg_g": 0.0029529710300266743, + "layer_10_v_norm": 1.2072737216949463, + "layer_10_cos_v_neg_g": 0.003925247583538294, + "layer_11_v_norm": 1.1990169286727905, + "layer_11_cos_v_neg_g": 0.0028830203227698803, + "layer_12_v_norm": 1.2055994272232056, + "layer_12_cos_v_neg_g": 0.003380567068234086, + "layer_1_sharpness": 0.0022542548831552267, + "layer_2_sharpness": 8.373675518669188e-05, + "layer_3_sharpness": 0.00026237740530632436, + "layer_4_sharpness": 6.048562499927357e-05, + "layer_5_sharpness": 6.718193617416546e-05, + "layer_6_sharpness": 0.00010477864270797, + "layer_7_sharpness": 0.00010898241453105584, + "layer_8_sharpness": 7.042376091703773e-05, + "layer_9_sharpness": 5.0872255087597296e-05, + "layer_10_sharpness": 3.9577182178618386e-05, + "layer_11_sharpness": 3.24826396536082e-05, + "layer_12_sharpness": 4.484244345803745e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..6ed0ea10fff8af31c7adaabb7d8e5344d16025f7 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.236710548400879, + "total_l1_linf_norm": 34570.140625, + "total_spectral_norm": 4.2367095947265625, + "layer_1_update_fnorm": 1.0914175510406494, + "layer_1_max_l1_linf_norm": 0.8827784061431885, + "layer_1_max_spectral_norm": 0.024098733440041542, + "layer_2_update_fnorm": 0.9786771535873413, + "layer_2_max_l1_linf_norm": 0.788241446018219, + "layer_2_max_spectral_norm": 0.02407369576394558, + "layer_3_update_fnorm": 1.0848404169082642, + "layer_3_max_l1_linf_norm": 0.819833517074585, + "layer_3_max_spectral_norm": 0.024086706340312958, + "layer_4_update_fnorm": 1.163871169090271, + "layer_4_max_l1_linf_norm": 0.8424252271652222, + "layer_4_max_spectral_norm": 0.024092527106404305, + "layer_5_update_fnorm": 1.1919387578964233, + "layer_5_max_l1_linf_norm": 0.8439692258834839, + "layer_5_max_spectral_norm": 0.024127241224050522, + "layer_6_update_fnorm": 1.19292414188385, + "layer_6_max_l1_linf_norm": 0.8269304037094116, + "layer_6_max_spectral_norm": 0.024091584607958794, + "layer_7_update_fnorm": 1.1928555965423584, + "layer_7_max_l1_linf_norm": 0.815254807472229, + "layer_7_max_spectral_norm": 0.024083593860268593, + "layer_8_update_fnorm": 1.200487732887268, + "layer_8_max_l1_linf_norm": 0.8115149736404419, + "layer_8_max_spectral_norm": 0.0240874532610178, + "layer_9_update_fnorm": 1.1996798515319824, + "layer_9_max_l1_linf_norm": 0.8095771074295044, + "layer_9_max_spectral_norm": 0.024084927514195442, + "layer_10_update_fnorm": 1.207271695137024, + "layer_10_max_l1_linf_norm": 0.8127947449684143, + "layer_10_max_spectral_norm": 0.02408405765891075, + "layer_11_update_fnorm": 1.2032477855682373, + "layer_11_max_l1_linf_norm": 0.7944660186767578, + "layer_11_max_spectral_norm": 0.024088220670819283, + "layer_12_update_fnorm": 1.2068476676940918, + "layer_12_max_l1_linf_norm": 0.7906976342201233, + "layer_12_max_spectral_norm": 0.024091433733701706, + "total_sharpness": 0.0005920046241953969, + "ip_v_neg_g": 0.006258437409996986, + "cos_v_neg_g": 0.0015959867741912603, + "v_norm": 4.236710548400879, + "g_norm": 0.9255669713020325, + "hv_norm": 0.5372405052185059, + "cos_v_hv": 0.004668583162128925, + "hg_norm": 37.55412292480469, + "cos_g_hg": 0.45844998955726624, + "v_parallel_norm": 0.0016425491776317358, + "v_perp_norm": 4.236710071563721, + "layer_1_v_norm": 1.0914175510406494, + "layer_1_cos_v_neg_g": 0.003017596434801817, + "layer_2_v_norm": 0.9786771535873413, + "layer_2_cos_v_neg_g": 0.005824689753353596, + "layer_3_v_norm": 1.0848404169082642, + "layer_3_cos_v_neg_g": 0.005820248741656542, + "layer_4_v_norm": 1.163871169090271, + "layer_4_cos_v_neg_g": 0.0021097988355904818, + "layer_5_v_norm": 1.1919387578964233, + "layer_5_cos_v_neg_g": 0.0018476282712072134, + "layer_6_v_norm": 1.1929242610931396, + "layer_6_cos_v_neg_g": 0.0020119331311434507, + "layer_7_v_norm": 1.1928555965423584, + "layer_7_cos_v_neg_g": 0.00320633128285408, + "layer_8_v_norm": 1.200487732887268, + "layer_8_cos_v_neg_g": 0.0032689967192709446, + "layer_9_v_norm": 1.1996798515319824, + "layer_9_cos_v_neg_g": 0.003242449602112174, + "layer_10_v_norm": 1.207271695137024, + "layer_10_cos_v_neg_g": 0.002272488083690405, + "layer_11_v_norm": 1.2032476663589478, + "layer_11_cos_v_neg_g": 0.0014536959351971745, + "layer_12_v_norm": 1.2068476676940918, + "layer_12_cos_v_neg_g": 0.001382754067890346, + "layer_1_sharpness": 0.00194425112567842, + "layer_2_sharpness": 7.134096085792407e-05, + "layer_3_sharpness": 0.0002234476269222796, + "layer_4_sharpness": 7.009448745520785e-05, + "layer_5_sharpness": 6.676813791273162e-05, + "layer_6_sharpness": 9.741896792547777e-05, + "layer_7_sharpness": 0.00012496471754275262, + "layer_8_sharpness": 8.548005280317739e-05, + "layer_9_sharpness": 5.48658863408491e-05, + "layer_10_sharpness": 3.7006895581725985e-05, + "layer_11_sharpness": 3.289690721430816e-05, + "layer_12_sharpness": 3.5025743272854015e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..29a99428043ab4ea1e6429bea8949c892617776b --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.2108025550842285, + "total_l1_linf_norm": 34354.8671875, + "total_spectral_norm": 4.2108025550842285, + "layer_1_update_fnorm": 1.048841953277588, + "layer_1_max_l1_linf_norm": 0.8863624334335327, + "layer_1_max_spectral_norm": 0.02409515529870987, + "layer_2_update_fnorm": 0.9759672284126282, + "layer_2_max_l1_linf_norm": 0.7910943627357483, + "layer_2_max_spectral_norm": 0.024076325818896294, + "layer_3_update_fnorm": 1.0825939178466797, + "layer_3_max_l1_linf_norm": 0.8033270835876465, + "layer_3_max_spectral_norm": 0.024083979427814484, + "layer_4_update_fnorm": 1.1585708856582642, + "layer_4_max_l1_linf_norm": 0.8210045099258423, + "layer_4_max_spectral_norm": 0.02410263381898403, + "layer_5_update_fnorm": 1.1862423419952393, + "layer_5_max_l1_linf_norm": 0.8174630403518677, + "layer_5_max_spectral_norm": 0.02410750649869442, + "layer_6_update_fnorm": 1.1902992725372314, + "layer_6_max_l1_linf_norm": 0.8205758929252625, + "layer_6_max_spectral_norm": 0.024086231365799904, + "layer_7_update_fnorm": 1.1903879642486572, + "layer_7_max_l1_linf_norm": 0.8158804178237915, + "layer_7_max_spectral_norm": 0.024089526385068893, + "layer_8_update_fnorm": 1.1979641914367676, + "layer_8_max_l1_linf_norm": 0.813789963722229, + "layer_8_max_spectral_norm": 0.024082252755761147, + "layer_9_update_fnorm": 1.1954952478408813, + "layer_9_max_l1_linf_norm": 0.8014891147613525, + "layer_9_max_spectral_norm": 0.024094734340906143, + "layer_10_update_fnorm": 1.2011756896972656, + "layer_10_max_l1_linf_norm": 0.8104794025421143, + "layer_10_max_spectral_norm": 0.024089839309453964, + "layer_11_update_fnorm": 1.1914998292922974, + "layer_11_max_l1_linf_norm": 0.7910754084587097, + "layer_11_max_spectral_norm": 0.024097805842757225, + "layer_12_update_fnorm": 1.201328992843628, + "layer_12_max_l1_linf_norm": 0.8574815988540649, + "layer_12_max_spectral_norm": 0.02409774251282215, + "total_sharpness": 0.0006145337247289717, + "ip_v_neg_g": 0.006131739821285009, + "cos_v_neg_g": 0.001719201565720141, + "v_norm": 4.2108025550842285, + "g_norm": 0.8470168113708496, + "hv_norm": 0.4798772931098938, + "cos_v_hv": 0.005392379593104124, + "hg_norm": 38.435970306396484, + "cos_g_hg": 0.30718907713890076, + "v_parallel_norm": 0.0013298371341079473, + "v_perp_norm": 4.2108025550842285, + "layer_1_v_norm": 1.048841953277588, + "layer_1_cos_v_neg_g": 0.0029252099338918924, + "layer_2_v_norm": 0.9759672284126282, + "layer_2_cos_v_neg_g": 0.0027276072651147842, + "layer_3_v_norm": 1.0825939178466797, + "layer_3_cos_v_neg_g": 0.0037475957069545984, + "layer_4_v_norm": 1.1585708856582642, + "layer_4_cos_v_neg_g": 0.003372355829924345, + "layer_5_v_norm": 1.1862423419952393, + "layer_5_cos_v_neg_g": 0.0033281941432505846, + "layer_6_v_norm": 1.1902992725372314, + "layer_6_cos_v_neg_g": 0.0034422739408910275, + "layer_7_v_norm": 1.1903879642486572, + "layer_7_cos_v_neg_g": 0.003723171539604664, + "layer_8_v_norm": 1.1979641914367676, + "layer_8_cos_v_neg_g": 0.003604056779295206, + "layer_9_v_norm": 1.1954952478408813, + "layer_9_cos_v_neg_g": 0.003457283601164818, + "layer_10_v_norm": 1.2011756896972656, + "layer_10_cos_v_neg_g": 0.004104774910956621, + "layer_11_v_norm": 1.1914997100830078, + "layer_11_cos_v_neg_g": 0.0027423810679465532, + "layer_12_v_norm": 1.201328992843628, + "layer_12_cos_v_neg_g": 0.001515088020823896, + "layer_1_sharpness": 0.002445616526529193, + "layer_2_sharpness": 6.431240035453811e-05, + "layer_3_sharpness": 0.00014207912317942828, + "layer_4_sharpness": 7.683605508645996e-05, + "layer_5_sharpness": 7.448112592101097e-05, + "layer_6_sharpness": 0.00010486464452696964, + "layer_7_sharpness": 0.00010233177454210818, + "layer_8_sharpness": 7.514150638598949e-05, + "layer_9_sharpness": 6.0066089645260945e-05, + "layer_10_sharpness": 3.8746762584196404e-05, + "layer_11_sharpness": 3.6975183320464566e-05, + "layer_12_sharpness": 8.335509483003989e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..10be697bef043516fda38396c4bfc1b690f5c65e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.237854957580566, + "total_l1_linf_norm": 34571.3671875, + "total_spectral_norm": 4.237854957580566, + "layer_1_update_fnorm": 1.090674638748169, + "layer_1_max_l1_linf_norm": 0.8855683207511902, + "layer_1_max_spectral_norm": 0.024100156500935555, + "layer_2_update_fnorm": 0.9675868153572083, + "layer_2_max_l1_linf_norm": 0.787796676158905, + "layer_2_max_spectral_norm": 0.024080568924546242, + "layer_3_update_fnorm": 1.0928179025650024, + "layer_3_max_l1_linf_norm": 0.8139245510101318, + "layer_3_max_spectral_norm": 0.02408374845981598, + "layer_4_update_fnorm": 1.1667211055755615, + "layer_4_max_l1_linf_norm": 0.844451904296875, + "layer_4_max_spectral_norm": 0.024090563878417015, + "layer_5_update_fnorm": 1.1906020641326904, + "layer_5_max_l1_linf_norm": 0.8383548259735107, + "layer_5_max_spectral_norm": 0.02413380891084671, + "layer_6_update_fnorm": 1.1946219205856323, + "layer_6_max_l1_linf_norm": 0.8209726214408875, + "layer_6_max_spectral_norm": 0.02409428358078003, + "layer_7_update_fnorm": 1.192524790763855, + "layer_7_max_l1_linf_norm": 0.8151543140411377, + "layer_7_max_spectral_norm": 0.024089988321065903, + "layer_8_update_fnorm": 1.2007962465286255, + "layer_8_max_l1_linf_norm": 0.8135681748390198, + "layer_8_max_spectral_norm": 0.024086089804768562, + "layer_9_update_fnorm": 1.2009469270706177, + "layer_9_max_l1_linf_norm": 0.8142385482788086, + "layer_9_max_spectral_norm": 0.024091722443699837, + "layer_10_update_fnorm": 1.207886815071106, + "layer_10_max_l1_linf_norm": 0.8168281316757202, + "layer_10_max_spectral_norm": 0.02408495545387268, + "layer_11_update_fnorm": 1.2022854089736938, + "layer_11_max_l1_linf_norm": 0.8004363179206848, + "layer_11_max_spectral_norm": 0.024087028577923775, + "layer_12_update_fnorm": 1.206278920173645, + "layer_12_max_l1_linf_norm": 0.7959768772125244, + "layer_12_max_spectral_norm": 0.024096013978123665, + "total_sharpness": 0.0005346984835341573, + "ip_v_neg_g": 0.004389361012727022, + "cos_v_neg_g": 0.0011355957249179482, + "v_norm": 4.237854957580566, + "g_norm": 0.9120769500732422, + "hv_norm": 0.5028352737426758, + "cos_v_hv": 0.004506394732743502, + "hg_norm": 29.01253890991211, + "cos_g_hg": 0.32397571206092834, + "v_parallel_norm": 0.00117926683742553, + "v_perp_norm": 4.237854480743408, + "layer_1_v_norm": 1.090674638748169, + "layer_1_cos_v_neg_g": 0.0023582347203046083, + "layer_2_v_norm": 0.9675868153572083, + "layer_2_cos_v_neg_g": 0.003311576321721077, + "layer_3_v_norm": 1.0928179025650024, + "layer_3_cos_v_neg_g": 0.002944051055237651, + "layer_4_v_norm": 1.1667211055755615, + "layer_4_cos_v_neg_g": 0.0011849332368001342, + "layer_5_v_norm": 1.1906020641326904, + "layer_5_cos_v_neg_g": 0.0011373891029506922, + "layer_6_v_norm": 1.1946219205856323, + "layer_6_cos_v_neg_g": 0.0016074113082140684, + "layer_7_v_norm": 1.192524790763855, + "layer_7_cos_v_neg_g": 0.002276201033964753, + "layer_8_v_norm": 1.2007962465286255, + "layer_8_cos_v_neg_g": 0.0017051617614924908, + "layer_9_v_norm": 1.2009469270706177, + "layer_9_cos_v_neg_g": 0.0016768414061516523, + "layer_10_v_norm": 1.207886815071106, + "layer_10_cos_v_neg_g": 0.0014834547182545066, + "layer_11_v_norm": 1.2022854089736938, + "layer_11_cos_v_neg_g": 0.0009940890595316887, + "layer_12_v_norm": 1.206278920173645, + "layer_12_cos_v_neg_g": 0.0008031711913645267, + "layer_1_sharpness": 0.0017671104287728667, + "layer_2_sharpness": 6.271320307860151e-05, + "layer_3_sharpness": 0.00011865556007251143, + "layer_4_sharpness": 6.11630966886878e-05, + "layer_5_sharpness": 6.323875277303159e-05, + "layer_6_sharpness": 8.48917625262402e-05, + "layer_7_sharpness": 0.00010245732119074091, + "layer_8_sharpness": 7.09248342900537e-05, + "layer_9_sharpness": 5.029115709476173e-05, + "layer_10_sharpness": 3.5030396247748286e-05, + "layer_11_sharpness": 2.984013372042682e-05, + "layer_12_sharpness": 4.783461554325186e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..b2bd9cedc6b6c05dda83254fb5f6ef0c10e9e672 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 4.235434532165527, + "total_l1_linf_norm": 34533.4609375, + "total_spectral_norm": 4.235434055328369, + "layer_1_update_fnorm": 1.0917234420776367, + "layer_1_max_l1_linf_norm": 0.8868430852890015, + "layer_1_max_spectral_norm": 0.024100586771965027, + "layer_2_update_fnorm": 0.9801827073097229, + "layer_2_max_l1_linf_norm": 0.7913304567337036, + "layer_2_max_spectral_norm": 0.024071909487247467, + "layer_3_update_fnorm": 1.0880229473114014, + "layer_3_max_l1_linf_norm": 0.8185914754867554, + "layer_3_max_spectral_norm": 0.024084143340587616, + "layer_4_update_fnorm": 1.1693202257156372, + "layer_4_max_l1_linf_norm": 0.8420134782791138, + "layer_4_max_spectral_norm": 0.024096457287669182, + "layer_5_update_fnorm": 1.1910812854766846, + "layer_5_max_l1_linf_norm": 0.8282018899917603, + "layer_5_max_spectral_norm": 0.024128057062625885, + "layer_6_update_fnorm": 1.192160964012146, + "layer_6_max_l1_linf_norm": 0.8191239833831787, + "layer_6_max_spectral_norm": 0.024089736863970757, + "layer_7_update_fnorm": 1.1924806833267212, + "layer_7_max_l1_linf_norm": 0.8197081089019775, + "layer_7_max_spectral_norm": 0.024091137573122978, + "layer_8_update_fnorm": 1.1986515522003174, + "layer_8_max_l1_linf_norm": 0.8118258714675903, + "layer_8_max_spectral_norm": 0.02408442273736, + "layer_9_update_fnorm": 1.1966649293899536, + "layer_9_max_l1_linf_norm": 0.8089480996131897, + "layer_9_max_spectral_norm": 0.02408830262720585, + "layer_10_update_fnorm": 1.206947684288025, + "layer_10_max_l1_linf_norm": 0.8101253509521484, + "layer_10_max_spectral_norm": 0.024081146344542503, + "layer_11_update_fnorm": 1.1997677087783813, + "layer_11_max_l1_linf_norm": 0.7916897535324097, + "layer_11_max_spectral_norm": 0.02409047819674015, + "layer_12_update_fnorm": 1.2059637308120728, + "layer_12_max_l1_linf_norm": 0.8079574108123779, + "layer_12_max_spectral_norm": 0.02408917061984539, + "total_sharpness": 0.0005547560867853463, + "ip_v_neg_g": 0.005526806693524122, + "cos_v_neg_g": 0.0015210409183055162, + "v_norm": 4.235434532165527, + "g_norm": 0.8578975200653076, + "hv_norm": 0.4005899131298065, + "cos_v_hv": 0.005865432322025299, + "hg_norm": 17.189184188842773, + "cos_g_hg": 0.39521852135658264, + "v_parallel_norm": 0.0015370975015684962, + "v_perp_norm": 4.235434055328369, + "layer_1_v_norm": 1.0917234420776367, + "layer_1_cos_v_neg_g": 0.002704298822209239, + "layer_2_v_norm": 0.9801827073097229, + "layer_2_cos_v_neg_g": 0.002158282557502389, + "layer_3_v_norm": 1.088023066520691, + "layer_3_cos_v_neg_g": 0.003258299082517624, + "layer_4_v_norm": 1.1693202257156372, + "layer_4_cos_v_neg_g": 0.001595810055732727, + "layer_5_v_norm": 1.1910812854766846, + "layer_5_cos_v_neg_g": 0.0018819704419001937, + "layer_6_v_norm": 1.1921608448028564, + "layer_6_cos_v_neg_g": 0.002805036725476384, + "layer_7_v_norm": 1.1924806833267212, + "layer_7_cos_v_neg_g": 0.003265748731791973, + "layer_8_v_norm": 1.1986515522003174, + "layer_8_cos_v_neg_g": 0.004309972282499075, + "layer_9_v_norm": 1.1966649293899536, + "layer_9_cos_v_neg_g": 0.003923278767615557, + "layer_10_v_norm": 1.206947684288025, + "layer_10_cos_v_neg_g": 0.0037272630725055933, + "layer_11_v_norm": 1.1997677087783813, + "layer_11_cos_v_neg_g": 0.002658564131706953, + "layer_12_v_norm": 1.2059637308120728, + "layer_12_cos_v_neg_g": 0.002618009690195322, + "layer_1_sharpness": 0.0016515289898961782, + "layer_2_sharpness": 7.493883458664641e-05, + "layer_3_sharpness": 0.00021263465168885887, + "layer_4_sharpness": 4.6191307774279267e-05, + "layer_5_sharpness": 5.710752884624526e-05, + "layer_6_sharpness": 8.662306936457753e-05, + "layer_7_sharpness": 0.00010711009963415563, + "layer_8_sharpness": 6.244784162845463e-05, + "layer_9_sharpness": 4.9663311074255034e-05, + "layer_10_sharpness": 3.730924800038338e-05, + "layer_11_sharpness": 3.166172973578796e-05, + "layer_12_sharpness": 5.2915213018422946e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..c4a2b34f9be3ddaf978a9dff3a78b2e93a483f0f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.02_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026603 +step:0 train loss:11.019232 +step:1 train loss:11.021655 +step:2 train loss:11.009263 +step:3 train loss:10.997823 +step:4 train loss:10.980644 +step:5 train loss:10.958502 +step:6 train loss:10.931274 +step:7 train loss:10.899804 +step:8 train loss:10.870020 +step:9 train loss:10.827015 +step:10 train loss:10.785848 +step:11 train loss:10.745009 +step:12 train loss:10.687385 +step:13 train loss:10.633357 +step:14 train loss:10.578691 +step:15 train loss:10.519938 +step:16 train loss:10.461802 +step:17 train loss:10.402798 +step:18 train loss:10.346280 +step:19 train loss:10.278630 +step:20 train loss:10.213874 +step:21 train loss:10.154259 +step:22 train loss:10.059710 +step:23 train loss:10.011754 +step:24 train loss:9.925763 +step:25 train loss:9.875253 +step:26 train loss:9.797213 +step:27 train loss:9.716827 +step:28 train loss:9.669450 +step:29 train loss:9.598014 +step:30 train loss:9.536114 +step:31 train loss:9.443628 +step:32 train loss:9.367929 +step:33 train loss:9.307373 +step:34 train loss:9.268478 +step:35 train loss:9.170286 +step:36 train loss:9.113898 +step:37 train loss:9.026379 +step:38 train loss:8.999363 +step:39 train loss:8.918451 +step:40 train loss:8.864893 +step:41 train loss:8.773250 +step:42 train loss:8.746748 +step:43 train loss:8.636140 +step:44 train loss:8.576151 +step:45 train loss:8.534756 +step:46 train loss:8.481174 +step:47 train loss:8.437132 +step:48 train loss:8.342675 +step:49 train loss:8.283622 +step:50 train loss:8.184721 +step:51 train loss:8.159945 +step:52 train loss:8.106045 +step:53 train loss:8.057655 +step:54 train loss:7.990853 +step:55 train loss:7.931157 +step:56 train loss:7.849420 +step:57 train loss:7.834420 +step:58 train loss:7.741092 +step:59 train loss:7.723742 +step:60 train loss:7.663430 +step:61 train loss:7.611662 +step:62 train loss:7.561290 +step:63 train loss:7.573080 +step:64 train loss:7.452106 +step:65 train loss:7.452134 +step:66 train loss:7.413104 +step:67 train loss:7.410923 +step:68 train loss:7.339917 +step:69 train loss:7.302031 +step:70 train loss:7.244328 +step:71 train loss:7.215314 +step:72 train loss:7.219730 +step:73 train loss:7.161871 +step:74 train loss:7.162664 +step:75 train loss:7.090719 +step:76 train loss:7.172180 +step:77 train loss:7.089824 +step:78 train loss:6.850777 +step:79 train loss:7.005844 +step:80 train loss:6.970787 +step:81 train loss:7.037622 +step:82 train loss:6.993773 +step:83 train loss:6.951257 +step:84 train loss:6.903692 +step:85 train loss:6.877370 +step:86 train loss:6.860811 +step:87 train loss:6.824376 +step:88 train loss:6.821986 +step:89 train loss:6.772898 +step:90 train loss:6.814445 +step:91 train loss:6.812449 +step:92 train loss:6.809702 +step:93 train loss:6.750867 +step:94 train loss:6.710454 +step:95 train loss:6.647726 +step:96 train loss:6.747081 +step:97 train loss:6.685510 +step:98 train loss:6.665207 +step:99 train loss:6.636148 +step:100 train loss:6.645966 +step:101 train loss:6.565927 +step:102 train loss:6.576922 +step:103 train loss:6.564780 +step:104 train loss:6.587362 +step:105 train loss:6.648204 +step:106 train loss:6.587799 +step:107 train loss:6.533873 +step:108 train loss:6.557894 +step:109 train loss:6.591850 +step:110 train loss:6.513571 +step:111 train loss:6.527007 +step:112 train loss:6.526183 +step:113 train loss:6.471773 +step:114 train loss:6.541694 +step:115 train loss:6.488950 +step:116 train loss:6.467020 +step:117 train loss:6.406948 +step:118 train loss:6.459588 +step:119 train loss:6.417258 +step:120 train loss:6.430504 +step:121 train loss:6.346424 +step:122 train loss:6.445478 +step:123 train loss:6.366035 +step:124 train loss:6.348652 +step:125 train loss:6.326513 +step:126 train loss:6.424371 +step:127 train loss:6.337675 +step:128 train loss:6.386740 +step:129 train loss:6.363190 +step:130 train loss:6.391231 +step:131 train loss:6.339526 +step:132 train loss:6.271108 +step:133 train loss:6.321290 +step:134 train loss:6.306256 +step:135 train loss:6.211026 +step:136 train loss:6.256102 +step:137 train loss:6.265172 +step:138 train loss:6.209537 +step:139 train loss:6.281661 +step:140 train loss:6.196661 +step:141 train loss:6.295040 +step:142 train loss:6.243489 +step:143 train loss:6.255893 +step:144 train loss:6.227938 +step:145 train loss:6.159138 +step:146 train loss:6.179141 +step:147 train loss:6.228684 +step:148 train loss:6.239065 +step:149 train loss:6.196235 +step:150 train loss:6.195663 +step:151 train loss:6.107507 +step:152 train loss:6.142854 +step:153 train loss:6.132575 +step:154 train loss:6.201931 +step:155 train loss:6.187000 +step:156 train loss:6.209598 +step:157 train loss:6.121952 +step:158 train loss:6.111599 +step:159 train loss:6.138769 +step:160 train loss:6.126405 +step:161 train loss:6.111537 +step:162 train loss:6.082345 +step:163 train loss:6.099191 +step:164 train loss:6.108845 +step:165 train loss:6.117552 +step:166 train loss:6.067913 +step:167 train loss:6.071652 +step:168 train loss:6.046464 +step:169 train loss:5.996686 +step:170 train loss:5.960359 +step:171 train loss:6.081887 +step:172 train loss:6.011221 +step:173 train loss:6.066416 +step:174 train loss:6.057883 +step:175 train loss:6.025960 +step:176 train loss:5.969724 +step:177 train loss:6.016577 +step:178 train loss:6.017296 +step:179 train loss:5.976398 +step:180 train loss:5.956113 +step:181 train loss:5.994598 +step:182 train loss:5.927024 +step:183 train loss:6.016754 +step:184 train loss:5.975765 +step:185 train loss:5.910710 +step:186 train loss:6.037906 +step:187 train loss:5.981493 +step:188 train loss:5.813349 +step:189 train loss:5.954745 +step:190 train loss:5.952596 +step:191 train loss:5.880865 +step:192 train loss:5.797435 +step:193 train loss:5.941958 +step:194 train loss:5.952391 +step:195 train loss:5.942472 +step:196 train loss:5.916616 +step:197 train loss:5.914452 +step:198 train loss:5.864516 +step:199 train loss:5.929754 +step:200 train loss:5.983355 +step:201 train loss:5.903522 +step:202 train loss:5.905573 +step:203 train loss:5.866755 +step:204 train loss:5.893661 +step:205 train loss:5.742725 +step:206 train loss:5.867303 +step:207 train loss:5.856214 +step:208 train loss:5.792190 +step:209 train loss:5.780315 +step:210 train loss:5.797502 +step:211 train loss:5.855114 +step:212 train loss:5.809889 +step:213 train loss:5.820871 +step:214 train loss:5.798539 +step:215 train loss:5.814361 +step:216 train loss:5.767396 +step:217 train loss:5.781216 +step:218 train loss:5.752649 +step:219 train loss:5.727314 +step:220 train loss:5.765015 +step:221 train loss:5.717752 +step:222 train loss:5.759455 +step:223 train loss:5.779016 +step:224 train loss:5.757864 +step:225 train loss:5.697119 +step:226 train loss:5.699236 +step:227 train loss:5.762202 +step:228 train loss:5.721572 +step:229 train loss:5.782977 +step:230 train loss:5.655300 +step:231 train loss:5.703286 +step:232 train loss:5.693062 +step:233 train loss:5.673464 +step:234 train loss:5.667894 +step:235 train loss:5.743582 +step:236 train loss:5.693943 +step:237 train loss:5.734467 +step:238 train loss:5.721028 +step:239 train loss:5.625730 +step:240 train loss:5.704776 +step:241 train loss:5.737849 +step:242 train loss:5.700397 +step:243 train loss:5.620467 +step:244 train loss:5.639238 +step:245 train loss:5.614025 +step:246 train loss:5.623811 +step:247 train loss:5.611925 +step:248 train loss:5.563097 +step:249 train loss:5.624404 +step:250 validation loss:5.594456 +step:250 train loss:5.579708 +step:251 train loss:5.628460 +step:252 train loss:5.569026 +step:253 train loss:5.581820 +step:254 train loss:5.536660 +step:255 train loss:5.576074 +step:256 train loss:5.562149 +step:257 train loss:5.626219 +step:258 train loss:5.528452 +step:259 train loss:5.535725 +step:260 train loss:5.508630 +step:261 train loss:5.506536 +step:262 train loss:5.563730 +step:263 train loss:5.533544 +step:264 train loss:5.496258 +step:265 train loss:5.517025 +step:266 train loss:5.494085 +step:267 train loss:5.515344 +step:268 train loss:5.457917 +step:269 train loss:5.483533 +step:270 train loss:5.502043 +step:271 train loss:5.517141 +step:272 train loss:5.436650 +step:273 train loss:5.520480 +step:274 train loss:5.429925 +step:275 train loss:5.478248 +step:276 train loss:5.437775 +step:277 train loss:5.427423 +step:278 train loss:5.415054 +step:279 train loss:5.385396 +step:280 train loss:5.448050 +step:281 train loss:5.518520 +step:282 train loss:5.395461 +step:283 train loss:5.408242 +step:284 train loss:5.380117 +step:285 train loss:5.429893 +step:286 train loss:5.393060 +step:287 train loss:5.383167 +step:288 train loss:5.348028 +step:289 train loss:5.365934 +step:290 train loss:5.413138 +step:291 train loss:5.342585 +step:292 train loss:5.399089 +step:293 train loss:5.305979 +step:294 train loss:5.441177 +step:295 train loss:5.333262 +step:296 train loss:5.377320 +step:297 train loss:5.407780 +step:298 train loss:5.303634 +step:299 train loss:5.364233 +step:300 train loss:5.282683 +step:301 train loss:5.315650 +step:302 train loss:5.289977 +step:303 train loss:5.298913 +step:304 train loss:5.327638 +step:305 train loss:5.252287 +step:306 train loss:5.285244 +step:307 train loss:5.283059 +step:308 train loss:5.202029 +step:309 train loss:5.341836 +step:310 train loss:5.302066 +step:311 train loss:5.283934 +step:312 train loss:5.255469 +step:313 train loss:5.279747 +step:314 train loss:5.248095 +step:315 train loss:5.206810 +step:316 train loss:5.206870 +step:317 train loss:5.168325 +step:318 train loss:5.155809 +step:319 train loss:5.256345 +step:320 train loss:5.151668 +step:321 train loss:5.206855 +step:322 train loss:5.203126 +step:323 train loss:5.247980 +step:324 train loss:5.195230 +step:325 train loss:5.218602 +step:326 train loss:5.222706 +step:327 train loss:5.217251 +step:328 train loss:5.159993 +step:329 train loss:5.196847 +step:330 train loss:5.104710 +step:331 train loss:5.142657 +step:332 train loss:5.117283 +step:333 train loss:5.048094 +step:334 train loss:5.132650 +step:335 train loss:5.193320 +step:336 train loss:5.348453 +step:337 train loss:5.204396 +step:338 train loss:5.127210 +step:339 train loss:5.078508 +step:340 train loss:5.074308 +step:341 train loss:5.064361 +step:342 train loss:5.125271 +step:343 train loss:5.112091 +step:344 train loss:5.075082 +step:345 train loss:5.027518 +step:346 train loss:5.071740 +step:347 train loss:5.017936 +step:348 train loss:5.020864 +step:349 train loss:4.953472 +step:350 train loss:4.985855 +step:351 train loss:5.049023 +step:352 train loss:5.001809 +step:353 train loss:5.028961 +step:354 train loss:4.990820 +step:355 train loss:5.030932 +step:356 train loss:4.968828 +step:357 train loss:5.070166 +step:358 train loss:5.079027 +step:359 train loss:4.915638 +step:360 train loss:5.040018 +step:361 train loss:5.011711 +step:362 train loss:5.000504 +step:363 train loss:4.936211 +step:364 train loss:5.067396 +step:365 train loss:4.993537 +step:366 train loss:4.963888 +step:367 train loss:4.978697 +step:368 train loss:4.947906 +step:369 train loss:4.947626 +step:370 train loss:4.970317 +step:371 train loss:4.917515 +step:372 train loss:4.984482 +step:373 train loss:4.920418 +step:374 train loss:4.937189 +step:375 train loss:4.950474 +step:376 train loss:4.943281 +step:377 train loss:4.818109 +step:378 train loss:4.888410 +step:379 train loss:4.928543 +step:380 train loss:4.857214 +step:381 train loss:4.912773 +step:382 train loss:4.910247 +step:383 train loss:4.876593 +step:384 train loss:4.863260 +step:385 train loss:4.852130 +step:386 train loss:4.886736 +step:387 train loss:4.881340 +step:388 train loss:4.850965 +step:389 train loss:4.859573 +step:390 train loss:4.829292 +step:391 train loss:4.872973 +step:392 train loss:4.829090 +step:393 train loss:4.809503 +step:394 train loss:4.873837 +step:395 train loss:4.780084 +step:396 train loss:4.760675 +step:397 train loss:4.806224 +step:398 train loss:4.802253 +step:399 train loss:4.810917 +step:400 train loss:4.742546 +step:401 train loss:4.853045 +step:402 train loss:4.766191 +step:403 train loss:4.776017 +step:404 train loss:4.764410 +step:405 train loss:4.759227 +step:406 train loss:4.828425 +step:407 train loss:4.765104 +step:408 train loss:4.860874 +step:409 train loss:4.762811 +step:410 train loss:4.728078 +step:411 train loss:4.717740 +step:412 train loss:4.808178 +step:413 train loss:4.694237 +step:414 train loss:4.800920 +step:415 train loss:4.729106 +step:416 train loss:4.759060 +step:417 train loss:4.776624 +step:418 train loss:4.709530 +step:419 train loss:4.707010 +step:420 train loss:4.661662 +step:421 train loss:4.675571 +step:422 train loss:4.666328 +step:423 train loss:4.680393 +step:424 train loss:4.641619 +step:425 train loss:4.739073 +step:426 train loss:4.706244 +step:427 train loss:4.643860 +step:428 train loss:4.700892 +step:429 train loss:4.602239 +step:430 train loss:4.659476 +step:431 train loss:4.662356 +step:432 train loss:4.694710 +step:433 train loss:4.693341 +step:434 train loss:4.635943 +step:435 train loss:4.705174 +step:436 train loss:4.713682 +step:437 train loss:4.662605 +step:438 train loss:4.641874 +step:439 train loss:4.606505 +step:440 train loss:4.657217 +step:441 train loss:4.579539 +step:442 train loss:4.585183 +step:443 train loss:4.594455 +step:444 train loss:4.668791 +step:445 train loss:4.640676 +step:446 train loss:4.608103 +step:447 train loss:4.590695 +step:448 train loss:4.666684 +step:449 train loss:4.615016 +step:450 train loss:4.615367 +step:451 train loss:4.600721 +step:452 train loss:4.697629 +step:453 train loss:4.621389 +step:454 train loss:4.549997 +step:455 train loss:4.610658 +step:456 train loss:4.578763 +step:457 train loss:4.561759 +step:458 train loss:4.597271 +step:459 train loss:4.549066 +step:460 train loss:4.651323 +step:461 train loss:4.580786 +step:462 train loss:4.482083 +step:463 train loss:4.533595 +step:464 train loss:4.619718 +step:465 train loss:4.565005 +step:466 train loss:4.573915 +step:467 train loss:4.521369 +step:468 train loss:4.586803 +step:469 train loss:4.548391 +step:470 train loss:4.509581 +step:471 train loss:4.576293 +step:472 train loss:4.503158 +step:473 train loss:4.574629 +step:474 train loss:4.526296 +step:475 train loss:4.596470 +step:476 train loss:4.550441 +step:477 train loss:4.451679 +step:478 train loss:4.524792 +step:479 train loss:4.488942 +step:480 train loss:4.524129 +step:481 train loss:4.571594 +step:482 train loss:4.453499 +step:483 train loss:4.531694 +step:484 train loss:4.490668 +step:485 train loss:4.441874 +step:486 train loss:4.486030 +step:487 train loss:4.490386 +step:488 train loss:4.500597 +step:489 train loss:4.507006 +step:490 train loss:4.447761 +step:491 train loss:4.495181 +step:492 train loss:4.482989 +step:493 train loss:4.477684 +step:494 train loss:4.499195 +step:495 train loss:4.419053 +step:496 train loss:4.510170 +step:497 train loss:4.414053 +step:498 train loss:4.523651 +step:499 train loss:4.499296 +step:500 validation loss:4.431253 total_sharp:8.6335e-03 L1_sharp:7.3931e-03 L2_sharp:1.4797e-03 L3_sharp:2.0690e-03 L4_sharp:1.4038e-03 L5_sharp:1.5575e-03 L6_sharp:1.3402e-03 L7_sharp:1.1710e-03 L8_sharp:9.1886e-04 L9_sharp:6.1103e-04 L10_sharp:5.5478e-04 L11_sharp:5.0605e-04 L12_sharp:4.0417e-04 total_fnorm:3.0776e+00 total_l1_linf:2.5340e+04 total_spectral:3.0776e+00 L1_fnorm:8.6988e-01 L2_fnorm:8.2848e-01 L3_fnorm:8.0570e-01 L4_fnorm:8.0999e-01 L5_fnorm:8.3478e-01 L6_fnorm:8.4331e-01 L7_fnorm:8.4821e-01 L8_fnorm:8.5288e-01 L9_fnorm:8.5372e-01 L10_fnorm:8.5282e-01 L11_fnorm:8.5133e-01 L12_fnorm:8.5769e-01 L1_l1linf:6.3266e-01 L2_l1linf:6.3380e-01 L3_l1linf:6.2501e-01 L4_l1linf:6.0838e-01 L5_l1linf:5.9214e-01 L6_l1linf:5.8606e-01 L7_l1linf:5.8124e-01 L8_l1linf:5.7850e-01 L9_l1linf:5.7574e-01 L10_l1linf:5.7982e-01 L11_l1linf:5.8800e-01 L12_l1linf:5.7738e-01 L1_spectral:1.7217e-02 L2_spectral:1.7209e-02 L3_spectral:1.7204e-02 L4_spectral:1.7214e-02 L5_spectral:1.7229e-02 L6_spectral:1.7198e-02 L7_spectral:1.7211e-02 L8_spectral:1.7228e-02 L9_spectral:1.7211e-02 L10_spectral:1.7206e-02 L11_spectral:1.7204e-02 L12_spectral:1.7232e-02 ip_v_neg_g:3.6183e-02 cos_v_neg_g:1.1759e-02 v_norm:3.0776e+00 g_norm:9.9984e-01 hv_norm:8.5770e-01 cos_v_hv:3.0979e-02 hg_norm:9.9790e+00 cos_g_hg:5.7456e-01 v_par:1.3886e-03 v_perp:3.0776e+00 L1_cos_v_neg_g:1.4364e-02 L1_v_norm:8.6988e-01 L2_cos_v_neg_g:2.0675e-02 L2_v_norm:8.2848e-01 L3_cos_v_neg_g:2.3557e-02 L3_v_norm:8.0570e-01 L4_cos_v_neg_g:2.1012e-02 L4_v_norm:8.0999e-01 L5_cos_v_neg_g:1.7882e-02 L5_v_norm:8.3478e-01 L6_cos_v_neg_g:1.6870e-02 L6_v_norm:8.4331e-01 L7_cos_v_neg_g:1.5466e-02 L7_v_norm:8.4821e-01 L8_cos_v_neg_g:1.4560e-02 L8_v_norm:8.5288e-01 L9_cos_v_neg_g:1.2808e-02 L9_v_norm:8.5372e-01 L10_cos_v_neg_g:1.2725e-02 L10_v_norm:8.5282e-01 L11_cos_v_neg_g:9.9529e-03 L11_v_norm:8.5133e-01 L12_cos_v_neg_g:7.3065e-03 L12_v_norm:8.5769e-01 +step:500 train loss:4.485492 +step:501 train loss:4.458755 +step:502 train loss:4.490247 +step:503 train loss:4.432498 +step:504 train loss:4.509131 +step:505 train loss:4.440861 +step:506 train loss:4.430757 +step:507 train loss:4.445710 +step:508 train loss:4.483635 +step:509 train loss:4.463071 +step:510 train loss:4.393577 +step:511 train loss:4.419890 +step:512 train loss:4.397531 +step:513 train loss:4.428125 +step:514 train loss:4.518470 +step:515 train loss:4.431087 +step:516 train loss:4.516364 +step:517 train loss:4.410833 +step:518 train loss:4.417101 +step:519 train loss:4.450653 +step:520 train loss:4.409064 +step:521 train loss:4.403409 +step:522 train loss:4.432385 +step:523 train loss:4.470524 +step:524 train loss:4.370225 +step:525 train loss:4.381260 +step:526 train loss:4.445680 +step:527 train loss:4.404539 +step:528 train loss:4.408668 +step:529 train loss:4.450392 +step:530 train loss:4.391537 +step:531 train loss:4.426472 +step:532 train loss:4.365205 +step:533 train loss:4.377807 +step:534 train loss:4.412398 +step:535 train loss:4.421371 +step:536 train loss:4.469388 +step:537 train loss:4.359797 +step:538 train loss:4.327005 +step:539 train loss:4.463629 +step:540 train loss:4.474464 +step:541 train loss:4.380047 +step:542 train loss:4.349895 +step:543 train loss:4.383911 +step:544 train loss:4.389013 +step:545 train loss:4.375110 +step:546 train loss:4.351277 +step:547 train loss:4.388233 +step:548 train loss:4.244905 +step:549 train loss:4.374163 +step:550 train loss:4.354871 +step:551 train loss:4.358880 +step:552 train loss:4.441035 +step:553 train loss:4.409262 +step:554 train loss:4.363292 +step:555 train loss:4.392729 +step:556 train loss:4.351229 +step:557 train loss:4.314526 +step:558 train loss:4.309362 +step:559 train loss:4.364257 +step:560 train loss:4.440414 +step:561 train loss:4.307414 +step:562 train loss:4.297817 +step:563 train loss:4.373486 +step:564 train loss:4.318361 +step:565 train loss:4.350532 +step:566 train loss:4.342379 +step:567 train loss:4.347055 +step:568 train loss:4.403116 +step:569 train loss:4.354567 +step:570 train loss:4.280800 +step:571 train loss:4.334582 +step:572 train loss:4.289526 +step:573 train loss:4.341626 +step:574 train loss:4.397376 +step:575 train loss:4.317066 +step:576 train loss:4.345157 +step:577 train loss:4.334756 +step:578 train loss:4.344260 +step:579 train loss:4.371735 +step:580 train loss:4.305651 +step:581 train loss:4.358965 +step:582 train loss:4.340309 +step:583 train loss:4.350637 +step:584 train loss:4.326276 +step:585 train loss:4.289021 +step:586 train loss:4.317539 +step:587 train loss:4.389238 +step:588 train loss:4.304431 +step:589 train loss:4.354503 +step:590 train loss:4.383914 +step:591 train loss:4.290113 +step:592 train loss:4.292610 +step:593 train loss:4.289522 +step:594 train loss:4.260592 +step:595 train loss:4.337255 +step:596 train loss:4.318864 +step:597 train loss:4.315336 +step:598 train loss:4.282066 +step:599 train loss:4.323946 +step:600 train loss:4.260393 +step:601 train loss:4.273591 +step:602 train loss:4.287073 +step:603 train loss:4.292337 +step:604 train loss:4.308208 +step:605 train loss:4.335237 +step:606 train loss:4.278431 +step:607 train loss:4.268539 +step:608 train loss:4.292068 +step:609 train loss:4.272868 +step:610 train loss:4.256363 +step:611 train loss:4.279378 +step:612 train loss:4.280261 +step:613 train loss:4.202414 +step:614 train loss:4.257389 +step:615 train loss:4.315736 +step:616 train loss:4.245764 +step:617 train loss:4.288025 +step:618 train loss:4.236735 +step:619 train loss:4.282475 +step:620 train loss:4.321949 +step:621 train loss:4.233092 +step:622 train loss:4.339373 +step:623 train loss:4.295623 +step:624 train loss:4.256784 +step:625 train loss:4.301549 +step:626 train loss:4.264393 +step:627 train loss:4.281257 +step:628 train loss:4.280450 +step:629 train loss:4.205984 +step:630 train loss:4.268130 +step:631 train loss:4.219157 +step:632 train loss:4.237702 +step:633 train loss:4.264899 +step:634 train loss:4.255300 +step:635 train loss:4.214617 +step:636 train loss:4.281639 +step:637 train loss:4.207921 +step:638 train loss:4.155222 +step:639 train loss:4.280289 +step:640 train loss:4.225246 +step:641 train loss:4.242381 +step:642 train loss:4.294229 +step:643 train loss:4.174241 +step:644 train loss:4.291016 +step:645 train loss:4.228237 +step:646 train loss:4.243176 +step:647 train loss:4.255481 +step:648 train loss:4.331802 +step:649 train loss:4.236135 +step:650 train loss:4.247439 +step:651 train loss:4.200061 +step:652 train loss:4.194124 +step:653 train loss:4.187796 +step:654 train loss:4.214581 +step:655 train loss:4.253175 +step:656 train loss:4.206485 +step:657 train loss:4.256013 +step:658 train loss:4.176218 +step:659 train loss:4.272367 +step:660 train loss:4.247754 +step:661 train loss:4.283481 +step:662 train loss:4.268686 +step:663 train loss:4.263658 +step:664 train loss:4.171134 +step:665 train loss:4.192945 +step:666 train loss:4.190503 +step:667 train loss:4.240057 +step:668 train loss:4.238708 +step:669 train loss:4.208025 +step:670 train loss:4.247221 +step:671 train loss:4.215937 +step:672 train loss:4.176528 +step:673 train loss:4.264223 +step:674 train loss:4.229282 +step:675 train loss:4.172484 +step:676 train loss:4.258030 +step:677 train loss:4.197150 +step:678 train loss:4.173883 +step:679 train loss:4.206094 +step:680 train loss:4.185543 +step:681 train loss:4.241813 +step:682 train loss:4.143044 +step:683 train loss:4.218019 +step:684 train loss:4.271604 +step:685 train loss:4.207297 +step:686 train loss:4.261958 +step:687 train loss:4.239134 +step:688 train loss:4.166011 +step:689 train loss:4.181551 +step:690 train loss:4.163019 +step:691 train loss:4.196305 +step:692 train loss:4.200396 +step:693 train loss:4.181867 +step:694 train loss:4.199818 +step:695 train loss:4.161341 +step:696 train loss:4.104625 +step:697 train loss:4.249981 +step:698 train loss:4.139003 +step:699 train loss:4.179069 +step:700 train loss:4.224339 +step:701 train loss:4.140019 +step:702 train loss:4.201189 +step:703 train loss:4.150867 +step:704 train loss:4.082996 +step:705 train loss:4.195289 +step:706 train loss:4.063519 +step:707 train loss:4.123132 +step:708 train loss:4.216667 +step:709 train loss:4.178437 +step:710 train loss:4.172176 +step:711 train loss:4.160710 +step:712 train loss:4.160275 +step:713 train loss:4.126146 +step:714 train loss:4.196084 +step:715 train loss:4.079988 +step:716 train loss:4.255291 +step:717 train loss:4.134084 +step:718 train loss:4.215342 +step:719 train loss:4.189520 +step:720 train loss:4.144076 +step:721 train loss:4.214973 +step:722 train loss:4.173645 +step:723 train loss:4.199195 +step:724 train loss:4.208450 +step:725 train loss:4.112005 +step:726 train loss:4.139594 +step:727 train loss:4.172347 +step:728 train loss:4.149498 +step:729 train loss:4.124593 +step:730 train loss:4.188378 +step:731 train loss:4.229476 +step:732 train loss:4.186770 +step:733 train loss:4.166195 +step:734 train loss:4.179884 +step:735 train loss:4.255881 +step:736 train loss:4.153864 +step:737 train loss:4.160261 +step:738 train loss:4.208628 +step:739 train loss:4.126040 +step:740 train loss:4.181959 +step:741 train loss:4.263440 +step:742 train loss:4.145278 +step:743 train loss:4.130907 +step:744 train loss:4.148962 +step:745 train loss:4.072715 +step:746 train loss:4.130414 +step:747 train loss:4.141311 +step:748 train loss:4.126824 +step:749 train loss:4.158556 +step:750 validation loss:4.085765 +step:750 train loss:4.083932 +step:751 train loss:4.155170 +step:752 train loss:4.081452 +step:753 train loss:4.148044 +step:754 train loss:4.136564 +step:755 train loss:4.202507 +step:756 train loss:4.162933 +step:757 train loss:4.224552 +step:758 train loss:4.131699 +step:759 train loss:4.143517 +step:760 train loss:4.099330 +step:761 train loss:4.137965 +step:762 train loss:4.099115 +step:763 train loss:4.103803 +step:764 train loss:4.096744 +step:765 train loss:4.104287 +step:766 train loss:4.170881 +step:767 train loss:4.300456 +step:768 train loss:4.128481 +step:769 train loss:4.140904 +step:770 train loss:4.185034 +step:771 train loss:4.246187 +step:772 train loss:4.155286 +step:773 train loss:4.097257 +step:774 train loss:4.131451 +step:775 train loss:4.117632 +step:776 train loss:4.138645 +step:777 train loss:4.112651 +step:778 train loss:4.054573 +step:779 train loss:4.087074 +step:780 train loss:4.152263 +step:781 train loss:4.094347 +step:782 train loss:4.107746 +step:783 train loss:4.085558 +step:784 train loss:4.092565 +step:785 train loss:4.085585 +step:786 train loss:4.084014 +step:787 train loss:4.043705 +step:788 train loss:4.136051 +step:789 train loss:4.104382 +step:790 train loss:4.075918 +step:791 train loss:4.146833 +step:792 train loss:4.177413 +step:793 train loss:4.135609 +step:794 train loss:4.117550 +step:795 train loss:4.088284 +step:796 train loss:4.386010 +step:797 train loss:4.099897 +step:798 train loss:4.090861 +step:799 train loss:4.102409 +step:800 train loss:4.157365 +step:801 train loss:4.089990 +step:802 train loss:4.230848 +step:803 train loss:4.114196 +step:804 train loss:4.065783 +step:805 train loss:4.124762 +step:806 train loss:4.077344 +step:807 train loss:4.085612 +step:808 train loss:4.101625 +step:809 train loss:4.072168 +step:810 train loss:4.043779 +step:811 train loss:4.126934 +step:812 train loss:4.089203 +step:813 train loss:4.109405 +step:814 train loss:4.185509 +step:815 train loss:4.153823 +step:816 train loss:4.065431 +step:817 train loss:4.116847 +step:818 train loss:4.078553 +step:819 train loss:4.072567 +step:820 train loss:4.066098 +step:821 train loss:4.031630 +step:822 train loss:4.035147 +step:823 train loss:4.112145 +step:824 train loss:4.010054 +step:825 train loss:3.989659 +step:826 train loss:4.075726 +step:827 train loss:3.980773 +step:828 train loss:4.050828 +step:829 train loss:4.052439 +step:830 train loss:4.059731 +step:831 train loss:4.098456 +step:832 train loss:4.140781 +step:833 train loss:4.097697 +step:834 train loss:4.089396 +step:835 train loss:4.053017 +step:836 train loss:4.055014 +step:837 train loss:4.044898 +step:838 train loss:4.023007 +step:839 train loss:4.037863 +step:840 train loss:4.070719 +step:841 train loss:4.074975 +step:842 train loss:4.064230 +step:843 train loss:4.070819 +step:844 train loss:4.022994 +step:845 train loss:4.008590 +step:846 train loss:4.113473 +step:847 train loss:4.079300 +step:848 train loss:4.034792 +step:849 train loss:4.057106 +step:850 train loss:4.080642 +step:851 train loss:4.038424 +step:852 train loss:4.147719 +step:853 train loss:4.031757 +step:854 train loss:4.054152 +step:855 train loss:4.061368 +step:856 train loss:4.013143 +step:857 train loss:4.062820 +step:858 train loss:4.093670 +step:859 train loss:3.999162 +step:860 train loss:4.030816 +step:861 train loss:4.069813 +step:862 train loss:4.012973 +step:863 train loss:4.022760 +step:864 train loss:4.008699 +step:865 train loss:4.031788 +step:866 train loss:4.061510 +step:867 train loss:4.185381 +step:868 train loss:4.028044 +step:869 train loss:4.043960 +step:870 train loss:3.985748 +step:871 train loss:3.977953 +step:872 train loss:4.058788 +step:873 train loss:4.024539 +step:874 train loss:4.047569 +step:875 train loss:3.975778 +step:876 train loss:4.055025 +step:877 train loss:3.996814 +step:878 train loss:4.099492 +step:879 train loss:3.985369 +step:880 train loss:4.103399 +step:881 train loss:4.018937 +step:882 train loss:3.986954 +step:883 train loss:4.037558 +step:884 train loss:4.063250 +step:885 train loss:3.993748 +step:886 train loss:4.014138 +step:887 train loss:4.020526 +step:888 train loss:4.130175 +step:889 train loss:4.056819 +step:890 train loss:4.002168 +step:891 train loss:3.968784 +step:892 train loss:3.957251 +step:893 train loss:4.031407 +step:894 train loss:3.994007 +step:895 train loss:3.981358 +step:896 train loss:4.066787 +step:897 train loss:3.990699 +step:898 train loss:4.019187 +step:899 train loss:4.022921 +step:900 train loss:4.056522 +step:901 train loss:3.978789 +step:902 train loss:4.009541 +step:903 train loss:4.117578 +step:904 train loss:4.126571 +step:905 train loss:4.001153 +step:906 train loss:4.013186 +step:907 train loss:4.048752 +step:908 train loss:4.050508 +step:909 train loss:3.996090 +step:910 train loss:4.041680 +step:911 train loss:4.155552 +step:912 train loss:3.959054 +step:913 train loss:4.027663 +step:914 train loss:3.974047 +step:915 train loss:4.012094 +step:916 train loss:4.068685 +step:917 train loss:4.022847 +step:918 train loss:4.099465 +step:919 train loss:4.191488 +step:920 train loss:3.926899 +step:921 train loss:4.050330 +step:922 train loss:4.020065 +step:923 train loss:3.939059 +step:924 train loss:3.991750 +step:925 train loss:3.953993 +step:926 train loss:4.054363 +step:927 train loss:3.952144 +step:928 train loss:4.039192 +step:929 train loss:4.005657 +step:930 train loss:4.008892 +step:931 train loss:4.044004 +step:932 train loss:3.982248 +step:933 train loss:4.028984 +step:934 train loss:4.061998 +step:935 train loss:4.050508 +step:936 train loss:4.020059 +step:937 train loss:4.018885 +step:938 train loss:4.009090 +step:939 train loss:3.915923 +step:940 train loss:4.012803 +step:941 train loss:3.957534 +step:942 train loss:3.941157 +step:943 train loss:4.044952 +step:944 train loss:4.001117 +step:945 train loss:4.002785 +step:946 train loss:4.016947 +step:947 train loss:4.182015 +step:948 train loss:3.974950 +step:949 train loss:4.013824 +step:950 train loss:3.949146 +step:951 train loss:3.993567 +step:952 train loss:4.041732 +step:953 train loss:3.972228 +step:954 train loss:4.018444 +step:955 train loss:3.951267 +step:956 train loss:3.982934 +step:957 train loss:3.977380 +step:958 train loss:4.050637 +step:959 train loss:3.987327 +step:960 train loss:4.083629 +step:961 train loss:4.019118 +step:962 train loss:3.982616 +step:963 train loss:3.970280 +step:964 train loss:3.994504 +step:965 train loss:3.916994 +step:966 train loss:3.937085 +step:967 train loss:3.999460 +step:968 train loss:4.003260 +step:969 train loss:3.952200 +step:970 train loss:4.002769 +step:971 train loss:3.992296 +step:972 train loss:3.902668 +step:973 train loss:4.008469 +step:974 train loss:3.942632 +step:975 train loss:4.044232 +step:976 train loss:3.991693 +step:977 train loss:3.980547 +step:978 train loss:3.980944 +step:979 train loss:3.969186 +step:980 train loss:3.969376 +step:981 train loss:3.945069 +step:982 train loss:3.966747 +step:983 train loss:3.973705 +step:984 train loss:3.998477 +step:985 train loss:3.965694 +step:986 train loss:3.993650 +step:987 train loss:4.026561 +step:988 train loss:4.011799 +step:989 train loss:3.978219 +step:990 train loss:3.972255 +step:991 train loss:3.885073 +step:992 train loss:3.958714 +step:993 train loss:3.989586 +step:994 train loss:3.921316 +step:995 train loss:3.938484 +step:996 train loss:3.978827 +step:997 train loss:3.943009 +step:998 train loss:3.939395 +step:999 train loss:3.981697 +step:1000 validation loss:3.910584 total_sharp:2.7732e-03 L1_sharp:3.7151e-03 L2_sharp:3.8647e-04 L3_sharp:8.5388e-04 L4_sharp:4.0434e-04 L5_sharp:4.1128e-04 L6_sharp:3.5567e-04 L7_sharp:3.8549e-04 L8_sharp:2.8675e-04 L9_sharp:2.3959e-04 L10_sharp:2.0563e-04 L11_sharp:1.9294e-04 L12_sharp:2.2965e-04 total_fnorm:4.2955e+00 total_l1_linf:3.5050e+04 total_spectral:4.2955e+00 L1_fnorm:1.1896e+00 L2_fnorm:1.0907e+00 L3_fnorm:1.1030e+00 L4_fnorm:1.1540e+00 L5_fnorm:1.1854e+00 L6_fnorm:1.2045e+00 L7_fnorm:1.2038e+00 L8_fnorm:1.2068e+00 L9_fnorm:1.2053e+00 L10_fnorm:1.2050e+00 L11_fnorm:1.2006e+00 L12_fnorm:1.2034e+00 L1_l1linf:8.5225e-01 L2_l1linf:7.9117e-01 L3_l1linf:7.9424e-01 L4_l1linf:8.1500e-01 L5_l1linf:8.2493e-01 L6_l1linf:8.2427e-01 L7_l1linf:8.1762e-01 L8_l1linf:8.1726e-01 L9_l1linf:8.0816e-01 L10_l1linf:8.0682e-01 L11_l1linf:8.0271e-01 L12_l1linf:8.0210e-01 L1_spectral:2.4107e-02 L2_spectral:2.4072e-02 L3_spectral:2.4082e-02 L4_spectral:2.4106e-02 L5_spectral:2.4097e-02 L6_spectral:2.4088e-02 L7_spectral:2.4094e-02 L8_spectral:2.4087e-02 L9_spectral:2.4093e-02 L10_spectral:2.4091e-02 L11_spectral:2.4087e-02 L12_spectral:2.4097e-02 ip_v_neg_g:2.8729e-02 cos_v_neg_g:7.8299e-03 v_norm:4.2955e+00 g_norm:8.5419e-01 hv_norm:6.3951e-01 cos_v_hv:1.8627e-02 hg_norm:9.1513e+00 cos_g_hg:6.2937e-01 v_par:3.2206e-03 v_perp:4.2955e+00 L1_cos_v_neg_g:9.3488e-03 L1_v_norm:1.1896e+00 L2_cos_v_neg_g:1.4546e-02 L2_v_norm:1.0907e+00 L3_cos_v_neg_g:1.8984e-02 L3_v_norm:1.1030e+00 L4_cos_v_neg_g:1.7883e-02 L4_v_norm:1.1540e+00 L5_cos_v_neg_g:1.4910e-02 L5_v_norm:1.1854e+00 L6_cos_v_neg_g:1.1517e-02 L6_v_norm:1.2045e+00 L7_cos_v_neg_g:1.3143e-02 L7_v_norm:1.2038e+00 L8_cos_v_neg_g:1.1767e-02 L8_v_norm:1.2068e+00 L9_cos_v_neg_g:1.2030e-02 L9_v_norm:1.2053e+00 L10_cos_v_neg_g:1.0541e-02 L10_v_norm:1.2050e+00 L11_cos_v_neg_g:9.1040e-03 L11_v_norm:1.2006e+00 L12_cos_v_neg_g:8.5789e-03 L12_v_norm:1.2034e+00 +step:1000 train loss:3.985999 +step:1001 train loss:3.986454 +step:1002 train loss:3.979855 +step:1003 train loss:3.950433 +step:1004 train loss:3.926511 +step:1005 train loss:3.942615 +step:1006 train loss:4.034457 +step:1007 train loss:3.959594 +step:1008 train loss:3.952739 +step:1009 train loss:4.017930 +step:1010 train loss:3.977951 +step:1011 train loss:4.000876 +step:1012 train loss:3.948805 +step:1013 train loss:3.918593 +step:1014 train loss:3.926535 +step:1015 train loss:3.959490 +step:1016 train loss:3.982812 +step:1017 train loss:3.929077 +step:1018 train loss:3.986354 +step:1019 train loss:3.933679 +step:1020 train loss:3.929251 +step:1021 train loss:4.024270 +step:1022 train loss:3.922442 +step:1023 train loss:3.934385 +step:1024 train loss:4.016016 +step:1025 train loss:3.975505 +step:1026 train loss:3.916440 +step:1027 train loss:3.951281 +step:1028 train loss:3.960762 +step:1029 train loss:3.908723 +step:1030 train loss:4.001119 +step:1031 train loss:3.984544 +step:1032 train loss:3.949665 +step:1033 train loss:3.914243 +step:1034 train loss:3.976331 +step:1035 train loss:3.981539 +step:1036 train loss:3.900599 +step:1037 train loss:3.957576 +step:1038 train loss:3.977787 +step:1039 train loss:4.131920 +step:1040 train loss:3.954412 +step:1041 train loss:3.930262 +step:1042 train loss:3.956682 +step:1043 train loss:3.963002 +step:1044 train loss:3.943293 +step:1045 train loss:3.963380 +step:1046 train loss:3.903869 +step:1047 train loss:3.936959 +step:1048 train loss:3.925988 +step:1049 train loss:3.985535 +step:1050 train loss:3.951585 +step:1051 train loss:3.917656 +step:1052 train loss:4.034389 +step:1053 train loss:3.922543 +step:1054 train loss:3.913136 +step:1055 train loss:3.990354 +step:1056 train loss:3.929268 +step:1057 train loss:3.833038 +step:1058 train loss:3.930547 +step:1059 train loss:3.913957 +step:1060 train loss:3.916436 +step:1061 train loss:3.965660 +step:1062 train loss:3.927974 +step:1063 train loss:3.936203 +step:1064 train loss:3.924850 +step:1065 train loss:3.934513 +step:1066 train loss:3.908529 +step:1067 train loss:3.940552 +step:1068 train loss:3.898218 +step:1069 train loss:3.915667 +step:1070 train loss:3.932438 +step:1071 train loss:3.943236 +step:1072 train loss:3.969468 +step:1073 train loss:3.888778 +step:1074 train loss:3.899378 +step:1075 train loss:3.893930 +step:1076 train loss:3.975119 +step:1077 train loss:3.901546 +step:1078 train loss:3.954276 +step:1079 train loss:3.996345 +step:1080 train loss:3.867487 +step:1081 train loss:3.941072 +step:1082 train loss:3.932688 +step:1083 train loss:3.898065 +step:1084 train loss:3.882357 +step:1085 train loss:3.943887 +step:1086 train loss:3.924201 +step:1087 train loss:3.923339 +step:1088 train loss:3.917709 +step:1089 train loss:3.927803 +step:1090 train loss:3.872256 +step:1091 train loss:3.868423 +step:1092 train loss:3.968208 +step:1093 train loss:3.855221 +step:1094 train loss:3.915345 +step:1095 train loss:3.963946 +step:1096 train loss:3.891838 +step:1097 train loss:3.897913 +step:1098 train loss:3.869887 +step:1099 train loss:3.920029 +step:1100 train loss:3.971456 +step:1101 train loss:3.956713 +step:1102 train loss:3.964557 +step:1103 train loss:3.892943 +step:1104 train loss:3.924743 +step:1105 train loss:3.976178 +step:1106 train loss:3.913608 +step:1107 train loss:4.040005 +step:1108 train loss:3.973044 +step:1109 train loss:3.947515 +step:1110 train loss:3.896260 +step:1111 train loss:3.950948 +step:1112 train loss:3.855789 +step:1113 train loss:3.853718 +step:1114 train loss:3.839904 +step:1115 train loss:3.878368 +step:1116 train loss:3.938191 +step:1117 train loss:3.979275 +step:1118 train loss:3.993668 +step:1119 train loss:3.920401 +step:1120 train loss:3.944476 +step:1121 train loss:3.916097 +step:1122 train loss:3.897834 +step:1123 train loss:4.008534 +step:1124 train loss:3.886553 +step:1125 train loss:3.895183 +step:1126 train loss:3.862302 +step:1127 train loss:3.893147 +step:1128 train loss:3.887431 +step:1129 train loss:3.946409 +step:1130 train loss:3.868295 +step:1131 train loss:3.957852 +step:1132 train loss:3.898865 +step:1133 train loss:3.914103 +step:1134 train loss:3.891196 +step:1135 train loss:3.932568 +step:1136 train loss:3.952752 +step:1137 train loss:3.875634 +step:1138 train loss:3.948173 +step:1139 train loss:3.893893 +step:1140 train loss:3.983892 +step:1141 train loss:3.931376 +step:1142 train loss:3.861896 +step:1143 train loss:3.943158 +step:1144 train loss:3.968794 +step:1145 train loss:3.914682 +step:1146 train loss:3.872093 +step:1147 train loss:3.884308 +step:1148 train loss:3.913851 +step:1149 train loss:3.968426 +step:1150 train loss:3.964579 +step:1151 train loss:3.974739 +step:1152 train loss:3.875004 +step:1153 train loss:3.875448 +step:1154 train loss:3.860421 +step:1155 train loss:3.960945 +step:1156 train loss:3.863671 +step:1157 train loss:3.891721 +step:1158 train loss:3.950928 +step:1159 train loss:3.948118 +step:1160 train loss:3.872696 +step:1161 train loss:3.961446 +step:1162 train loss:3.899195 +step:1163 train loss:3.887179 +step:1164 train loss:3.793961 +step:1165 train loss:3.933796 +step:1166 train loss:3.860313 +step:1167 train loss:3.869098 +step:1168 train loss:3.927940 +step:1169 train loss:3.885350 +step:1170 train loss:3.891463 +step:1171 train loss:3.919344 +step:1172 train loss:3.871799 +step:1173 train loss:3.911658 +step:1174 train loss:3.848119 +step:1175 train loss:3.883443 +step:1176 train loss:4.002244 +step:1177 train loss:3.844891 +step:1178 train loss:3.906237 +step:1179 train loss:3.851622 +step:1180 train loss:3.888997 +step:1181 train loss:3.873919 +step:1182 train loss:3.934071 +step:1183 train loss:3.905184 +step:1184 train loss:3.847840 +step:1185 train loss:3.880741 +step:1186 train loss:3.878515 +step:1187 train loss:3.848693 +step:1188 train loss:3.885121 +step:1189 train loss:3.823963 +step:1190 train loss:3.879138 +step:1191 train loss:3.934857 +step:1192 train loss:3.888444 +step:1193 train loss:3.886912 +step:1194 train loss:4.006311 +step:1195 train loss:3.980308 +step:1196 train loss:3.871512 +step:1197 train loss:3.893572 +step:1198 train loss:3.883118 +step:1199 train loss:3.878740 +step:1200 train loss:3.947232 +step:1201 train loss:3.918189 +step:1202 train loss:3.857025 +step:1203 train loss:3.845961 +step:1204 train loss:3.883658 +step:1205 train loss:3.904772 +step:1206 train loss:3.834119 +step:1207 train loss:3.921688 +step:1208 train loss:3.891290 +step:1209 train loss:3.820208 +step:1210 train loss:3.921909 +step:1211 train loss:3.864765 +step:1212 train loss:3.892876 +step:1213 train loss:3.831997 +step:1214 train loss:3.904675 +step:1215 train loss:3.877148 +step:1216 train loss:3.899227 +step:1217 train loss:3.819900 +step:1218 train loss:3.893640 +step:1219 train loss:3.832865 +step:1220 train loss:3.862941 +step:1221 train loss:3.875661 +step:1222 train loss:3.925892 +step:1223 train loss:3.900576 +step:1224 train loss:3.866769 +step:1225 train loss:3.914196 +step:1226 train loss:3.860113 +step:1227 train loss:3.867686 +step:1228 train loss:3.870135 +step:1229 train loss:3.836497 +step:1230 train loss:3.834207 +step:1231 train loss:3.890479 +step:1232 train loss:3.841121 +step:1233 train loss:3.840171 +step:1234 train loss:3.923939 +step:1235 train loss:3.894213 +step:1236 train loss:3.802778 +step:1237 train loss:3.915868 +step:1238 train loss:3.856117 +step:1239 train loss:3.894040 +step:1240 train loss:3.819479 +step:1241 train loss:3.834270 +step:1242 train loss:3.867724 +step:1243 train loss:3.812078 +step:1244 train loss:3.939207 +step:1245 train loss:3.949461 +step:1246 train loss:3.882035 +step:1247 train loss:3.856673 +step:1248 train loss:3.884719 +step:1249 train loss:3.810860 +step:1250 validation loss:3.812842 +step:1250 train loss:3.834325 +step:1251 train loss:3.906398 +step:1252 train loss:3.860066 +step:1253 train loss:3.814522 +step:1254 train loss:3.845968 +step:1255 train loss:3.835512 +step:1256 train loss:3.882511 +step:1257 train loss:3.858536 +step:1258 train loss:3.915580 +step:1259 train loss:3.887315 +step:1260 train loss:3.803710 +step:1261 train loss:4.047054 +step:1262 train loss:3.880475 +step:1263 train loss:3.833821 +step:1264 train loss:3.857303 +step:1265 train loss:3.900570 +step:1266 train loss:3.849484 +step:1267 train loss:3.855643 +step:1268 train loss:3.865328 +step:1269 train loss:3.861006 +step:1270 train loss:3.787932 +step:1271 train loss:3.798984 +step:1272 train loss:3.831858 +step:1273 train loss:3.886961 +step:1274 train loss:3.852092 +step:1275 train loss:3.885321 +step:1276 train loss:3.878518 +step:1277 train loss:3.886804 +step:1278 train loss:3.828723 +step:1279 train loss:3.840073 +step:1280 train loss:3.853080 +step:1281 train loss:3.901846 +step:1282 train loss:3.839271 +step:1283 train loss:3.908348 +step:1284 train loss:3.849275 +step:1285 train loss:3.903346 +step:1286 train loss:3.801517 +step:1287 train loss:3.836557 +step:1288 train loss:3.866459 +step:1289 train loss:3.933419 +step:1290 train loss:3.887233 +step:1291 train loss:3.848115 +step:1292 train loss:3.828400 +step:1293 train loss:3.827388 +step:1294 train loss:3.872517 +step:1295 train loss:3.860395 +step:1296 train loss:3.898782 +step:1297 train loss:3.854376 +step:1298 train loss:3.876174 +step:1299 train loss:3.913353 +step:1300 train loss:3.835258 +step:1301 train loss:3.875875 +step:1302 train loss:3.831671 +step:1303 train loss:3.875414 +step:1304 train loss:3.901937 +step:1305 train loss:3.878231 +step:1306 train loss:3.881557 +step:1307 train loss:3.863267 +step:1308 train loss:3.810505 +step:1309 train loss:3.827482 +step:1310 train loss:3.816127 +step:1311 train loss:3.817706 +step:1312 train loss:3.896977 +step:1313 train loss:3.806222 +step:1314 train loss:3.817523 +step:1315 train loss:3.858420 +step:1316 train loss:3.837579 +step:1317 train loss:3.726537 +step:1318 train loss:3.894098 +step:1319 train loss:3.923429 +step:1320 train loss:3.835183 +step:1321 train loss:3.823173 +step:1322 train loss:3.922337 +step:1323 train loss:3.870320 +step:1324 train loss:3.976714 +step:1325 train loss:3.846299 +step:1326 train loss:3.878666 +step:1327 train loss:3.896332 +step:1328 train loss:3.805110 +step:1329 train loss:3.828654 +step:1330 train loss:3.859808 +step:1331 train loss:3.758273 +step:1332 train loss:3.896071 +step:1333 train loss:3.872022 +step:1334 train loss:3.862742 +step:1335 train loss:3.890951 +step:1336 train loss:3.896844 +step:1337 train loss:3.860807 +step:1338 train loss:3.845417 +step:1339 train loss:3.920116 +step:1340 train loss:3.890521 +step:1341 train loss:3.869261 +step:1342 train loss:3.842537 +step:1343 train loss:3.834528 +step:1344 train loss:3.894837 +step:1345 train loss:3.850363 +step:1346 train loss:3.942219 +step:1347 train loss:3.865605 +step:1348 train loss:3.818172 +step:1349 train loss:3.765922 +step:1350 train loss:3.814911 +step:1351 train loss:3.874738 +step:1352 train loss:3.848452 +step:1353 train loss:3.828521 +step:1354 train loss:3.831344 +step:1355 train loss:3.902117 +step:1356 train loss:3.810781 +step:1357 train loss:3.840221 +step:1358 train loss:3.828934 +step:1359 train loss:3.827382 +step:1360 train loss:3.863491 +step:1361 train loss:3.987765 +step:1362 train loss:3.891001 +step:1363 train loss:3.778844 +step:1364 train loss:3.803713 +step:1365 train loss:3.795779 +step:1366 train loss:3.839191 +step:1367 train loss:3.769046 +step:1368 train loss:3.802635 +step:1369 train loss:3.838466 +step:1370 train loss:3.856849 +step:1371 train loss:3.821708 +step:1372 train loss:3.859346 +step:1373 train loss:3.889579 +step:1374 train loss:3.891304 +step:1375 train loss:3.839927 +step:1376 train loss:3.866440 +step:1377 train loss:3.854576 +step:1378 train loss:3.837671 +step:1379 train loss:3.812667 +step:1380 train loss:3.885725 +step:1381 train loss:3.836257 +step:1382 train loss:3.820404 +step:1383 train loss:3.798226 +step:1384 train loss:3.871742 +step:1385 train loss:3.775913 +step:1386 train loss:3.845638 +step:1387 train loss:3.852697 +step:1388 train loss:3.817892 +step:1389 train loss:3.788887 +step:1390 train loss:3.830621 +step:1391 train loss:3.855544 +step:1392 train loss:3.839648 +step:1393 train loss:3.886404 +step:1394 train loss:3.831096 +step:1395 train loss:3.859485 +step:1396 train loss:3.844980 +step:1397 train loss:3.863802 +step:1398 train loss:3.865888 +step:1399 train loss:3.837330 +step:1400 train loss:3.815417 +step:1401 train loss:3.807997 +step:1402 train loss:3.816355 +step:1403 train loss:3.779194 +step:1404 train loss:3.838042 +step:1405 train loss:3.803362 +step:1406 train loss:3.834016 +step:1407 train loss:3.825516 +step:1408 train loss:3.801248 +step:1409 train loss:3.793233 +step:1410 train loss:3.810722 +step:1411 train loss:3.846235 +step:1412 train loss:3.893309 +step:1413 train loss:3.821849 +step:1414 train loss:3.854354 +step:1415 train loss:3.821969 +step:1416 train loss:3.864774 +step:1417 train loss:3.833892 +step:1418 train loss:3.773613 +step:1419 train loss:3.791107 +step:1420 train loss:3.808910 +step:1421 train loss:3.852396 +step:1422 train loss:3.825585 +step:1423 train loss:3.923913 +step:1424 train loss:3.823104 +step:1425 train loss:3.785253 +step:1426 train loss:3.809559 +step:1427 train loss:3.795868 +step:1428 train loss:3.777897 +step:1429 train loss:3.802679 +step:1430 train loss:3.817853 +step:1431 train loss:3.835343 +step:1432 train loss:3.819417 +step:1433 train loss:3.804123 +step:1434 train loss:3.780109 +step:1435 train loss:3.770593 +step:1436 train loss:3.850817 +step:1437 train loss:3.777654 +step:1438 train loss:3.767426 +step:1439 train loss:3.764477 +step:1440 train loss:3.794465 +step:1441 train loss:3.881466 +step:1442 train loss:3.837168 +step:1443 train loss:3.764427 +step:1444 train loss:3.781899 +step:1445 train loss:3.785111 +step:1446 train loss:3.834550 +step:1447 train loss:3.825114 +step:1448 train loss:3.791486 +step:1449 train loss:3.812873 +step:1450 train loss:3.826313 +step:1451 train loss:3.759698 +step:1452 train loss:3.811739 +step:1453 train loss:3.812875 +step:1454 train loss:3.790549 +step:1455 train loss:3.737734 +step:1456 train loss:3.805977 +step:1457 train loss:3.748293 +step:1458 train loss:3.883026 +step:1459 train loss:3.807256 +step:1460 train loss:3.773868 +step:1461 train loss:3.834797 +step:1462 train loss:3.844774 +step:1463 train loss:3.798780 +step:1464 train loss:3.789696 +step:1465 train loss:3.776222 +step:1466 train loss:3.743238 +step:1467 train loss:3.880941 +step:1468 train loss:3.768826 +step:1469 train loss:3.848814 +step:1470 train loss:3.785898 +step:1471 train loss:3.778996 +step:1472 train loss:3.785357 +step:1473 train loss:3.783053 +step:1474 train loss:3.727409 +step:1475 train loss:3.785816 +step:1476 train loss:3.865325 +step:1477 train loss:3.817628 +step:1478 train loss:3.748119 +step:1479 train loss:3.784237 +step:1480 train loss:3.776597 +step:1481 train loss:3.753229 +step:1482 train loss:3.818173 +step:1483 train loss:3.803884 +step:1484 train loss:3.834726 +step:1485 train loss:3.854924 +step:1486 train loss:3.783235 +step:1487 train loss:3.775875 +step:1488 train loss:3.776846 +step:1489 train loss:3.770148 +step:1490 train loss:3.825018 +step:1491 train loss:3.827340 +step:1492 train loss:3.812293 +step:1493 train loss:3.762985 +step:1494 train loss:3.794048 +step:1495 train loss:3.782921 +step:1496 train loss:3.745401 +step:1497 train loss:3.817729 +step:1498 train loss:3.729241 +step:1499 train loss:3.773768 +step:1500 validation loss:3.741905 total_sharp:2.1422e-03 L1_sharp:7.2867e-03 L2_sharp:2.4229e-04 L3_sharp:4.5749e-04 L4_sharp:1.5310e-04 L5_sharp:1.8154e-04 L6_sharp:1.7334e-04 L7_sharp:2.4703e-04 L8_sharp:1.9810e-04 L9_sharp:1.5683e-04 L10_sharp:1.2939e-04 L11_sharp:1.1920e-04 L12_sharp:1.2846e-04 total_fnorm:4.2615e+00 total_l1_linf:3.4852e+04 total_spectral:4.2615e+00 L1_fnorm:1.1288e+00 L2_fnorm:1.0393e+00 L3_fnorm:1.0681e+00 L4_fnorm:1.1468e+00 L5_fnorm:1.1871e+00 L6_fnorm:1.2031e+00 L7_fnorm:1.2035e+00 L8_fnorm:1.2085e+00 L9_fnorm:1.2073e+00 L10_fnorm:1.2068e+00 L11_fnorm:1.2016e+00 L12_fnorm:1.2052e+00 L1_l1linf:8.7389e-01 L2_l1linf:7.9019e-01 L3_l1linf:7.9368e-01 L4_l1linf:8.3344e-01 L5_l1linf:8.3414e-01 L6_l1linf:8.3776e-01 L7_l1linf:8.2453e-01 L8_l1linf:8.1505e-01 L9_l1linf:8.1518e-01 L10_l1linf:8.1338e-01 L11_l1linf:8.0321e-01 L12_l1linf:7.9388e-01 L1_spectral:2.4092e-02 L2_spectral:2.4090e-02 L3_spectral:2.4086e-02 L4_spectral:2.4107e-02 L5_spectral:2.4086e-02 L6_spectral:2.4090e-02 L7_spectral:2.4095e-02 L8_spectral:2.4089e-02 L9_spectral:2.4098e-02 L10_spectral:2.4086e-02 L11_spectral:2.4092e-02 L12_spectral:2.4092e-02 ip_v_neg_g:1.7653e-02 cos_v_neg_g:3.5089e-03 v_norm:4.2615e+00 g_norm:1.1805e+00 hv_norm:1.0126e+00 cos_v_hv:9.0155e-03 hg_norm:4.2667e+01 cos_g_hg:7.0567e-01 v_par:3.1074e-03 v_perp:4.2615e+00 L1_cos_v_neg_g:5.6665e-03 L1_v_norm:1.1288e+00 L2_cos_v_neg_g:9.4899e-03 L2_v_norm:1.0393e+00 L3_cos_v_neg_g:9.6118e-03 L3_v_norm:1.0681e+00 L4_cos_v_neg_g:8.4812e-03 L4_v_norm:1.1468e+00 L5_cos_v_neg_g:8.2146e-03 L5_v_norm:1.1871e+00 L6_cos_v_neg_g:7.0029e-03 L6_v_norm:1.2031e+00 L7_cos_v_neg_g:7.4760e-03 L7_v_norm:1.2035e+00 L8_cos_v_neg_g:7.8407e-03 L8_v_norm:1.2085e+00 L9_cos_v_neg_g:6.4291e-03 L9_v_norm:1.2073e+00 L10_cos_v_neg_g:5.9355e-03 L10_v_norm:1.2068e+00 L11_cos_v_neg_g:4.7378e-03 L11_v_norm:1.2016e+00 L12_cos_v_neg_g:3.3715e-03 L12_v_norm:1.2052e+00 +step:1500 train loss:3.760307 +step:1501 train loss:3.796243 +step:1502 train loss:3.724252 +step:1503 train loss:3.786214 +step:1504 train loss:3.753769 +step:1505 train loss:3.728809 +step:1506 train loss:3.718485 +step:1507 train loss:3.729406 +step:1508 train loss:3.747895 +step:1509 train loss:3.790058 +step:1510 train loss:3.744013 +step:1511 train loss:3.768404 +step:1512 train loss:3.746319 +step:1513 train loss:3.809281 +step:1514 train loss:3.767708 +step:1515 train loss:3.819653 +step:1516 train loss:3.755202 +step:1517 train loss:3.757733 +step:1518 train loss:3.847819 +step:1519 train loss:3.801703 +step:1520 train loss:3.851119 +step:1521 train loss:3.747353 +step:1522 train loss:3.813118 +step:1523 train loss:3.804442 +step:1524 train loss:3.733807 +step:1525 train loss:3.815694 +step:1526 train loss:3.731161 +step:1527 train loss:3.779975 +step:1528 train loss:3.841164 +step:1529 train loss:3.787135 +step:1530 train loss:3.838712 +step:1531 train loss:3.753214 +step:1532 train loss:3.827885 +step:1533 train loss:3.793042 +step:1534 train loss:3.750126 +step:1535 train loss:3.795844 +step:1536 train loss:3.831391 +step:1537 train loss:3.775977 +step:1538 train loss:3.791326 +step:1539 train loss:3.775371 +step:1540 train loss:3.804847 +step:1541 train loss:3.754362 +step:1542 train loss:3.853041 +step:1543 train loss:3.882799 +step:1544 train loss:3.742106 +step:1545 train loss:3.730362 +step:1546 train loss:3.772414 +step:1547 train loss:3.755945 +step:1548 train loss:3.792020 +step:1549 train loss:3.716316 +step:1550 train loss:3.839336 +step:1551 train loss:3.770156 +step:1552 train loss:3.801516 +step:1553 train loss:3.812488 +step:1554 train loss:3.822752 +step:1555 train loss:3.774022 +step:1556 train loss:3.761215 +step:1557 train loss:3.765783 +step:1558 train loss:3.794733 +step:1559 train loss:3.759777 +step:1560 train loss:3.835277 +step:1561 train loss:3.808059 +step:1562 train loss:3.700218 +step:1563 train loss:3.687091 +step:1564 train loss:3.818434 +step:1565 train loss:3.789449 +step:1566 train loss:3.809525 +step:1567 train loss:3.802024 +step:1568 train loss:3.760452 +step:1569 train loss:3.752052 +step:1570 train loss:3.777877 +step:1571 train loss:3.743891 +step:1572 train loss:3.746863 +step:1573 train loss:3.790437 +step:1574 train loss:3.750237 +step:1575 train loss:3.773347 +step:1576 train loss:3.730591 +step:1577 train loss:3.760019 +step:1578 train loss:3.738824 +step:1579 train loss:3.813838 +step:1580 train loss:3.775092 +step:1581 train loss:3.805523 +step:1582 train loss:3.811452 +step:1583 train loss:3.779357 +step:1584 train loss:3.698009 +step:1585 train loss:3.781608 +step:1586 train loss:3.758468 +step:1587 train loss:3.768197 +step:1588 train loss:3.755819 +step:1589 train loss:3.804705 +step:1590 train loss:3.709565 +step:1591 train loss:3.761920 +step:1592 train loss:3.717862 +step:1593 train loss:3.753742 +step:1594 train loss:3.759721 +step:1595 train loss:3.757466 +step:1596 train loss:3.763161 +step:1597 train loss:3.691953 +step:1598 train loss:3.789715 +step:1599 train loss:3.803529 +step:1600 train loss:3.674543 +step:1601 train loss:3.756937 +step:1602 train loss:3.816266 +step:1603 train loss:3.811525 +step:1604 train loss:3.733595 +step:1605 train loss:3.792913 +step:1606 train loss:3.836976 +step:1607 train loss:3.716657 +step:1608 train loss:3.751660 +step:1609 train loss:3.772706 +step:1610 train loss:3.826052 +step:1611 train loss:3.755172 +step:1612 train loss:3.675493 +step:1613 train loss:3.748086 +step:1614 train loss:3.854794 +step:1615 train loss:3.774943 +step:1616 train loss:3.781957 +step:1617 train loss:3.767695 +step:1618 train loss:3.765105 +step:1619 train loss:3.948251 +step:1620 train loss:3.730329 +step:1621 train loss:3.788386 +step:1622 train loss:3.712292 +step:1623 train loss:3.778523 +step:1624 train loss:3.753239 +step:1625 train loss:3.825937 +step:1626 train loss:3.711785 +step:1627 train loss:3.728399 +step:1628 train loss:3.743631 +step:1629 train loss:3.781968 +step:1630 train loss:3.789351 +step:1631 train loss:3.745569 +step:1632 train loss:3.715295 +step:1633 train loss:3.735431 +step:1634 train loss:3.794747 +step:1635 train loss:3.731035 +step:1636 train loss:3.719500 +step:1637 train loss:3.797392 +step:1638 train loss:3.898185 +step:1639 train loss:3.694871 +step:1640 train loss:3.788625 +step:1641 train loss:3.747640 +step:1642 train loss:3.845858 +step:1643 train loss:3.747132 +step:1644 train loss:3.747522 +step:1645 train loss:3.727929 +step:1646 train loss:3.812334 +step:1647 train loss:3.701156 +step:1648 train loss:3.765241 +step:1649 train loss:3.728517 +step:1650 train loss:3.744986 +step:1651 train loss:3.756914 +step:1652 train loss:3.778966 +step:1653 train loss:3.781488 +step:1654 train loss:3.783390 +step:1655 train loss:3.753540 +step:1656 train loss:3.744888 +step:1657 train loss:3.751854 +step:1658 train loss:3.719809 +step:1659 train loss:3.797456 +step:1660 train loss:3.700218 +step:1661 train loss:3.816534 +step:1662 train loss:3.744183 +step:1663 train loss:3.739694 +step:1664 train loss:3.832267 +step:1665 train loss:3.754293 +step:1666 train loss:3.765498 +step:1667 train loss:3.789343 +step:1668 train loss:3.755756 +step:1669 train loss:3.721371 +step:1670 train loss:3.782063 +step:1671 train loss:3.775941 +step:1672 train loss:3.772294 +step:1673 train loss:3.723661 +step:1674 train loss:3.728411 +step:1675 train loss:3.762127 +step:1676 train loss:4.038156 +step:1677 train loss:3.778705 +step:1678 train loss:3.691679 +step:1679 train loss:3.813319 +step:1680 train loss:3.741512 +step:1681 train loss:3.797042 +step:1682 train loss:3.752592 +step:1683 train loss:3.740048 +step:1684 train loss:3.693356 +step:1685 train loss:3.760277 +step:1686 train loss:3.743383 +step:1687 train loss:3.761870 +step:1688 train loss:3.737690 +step:1689 train loss:3.730561 +step:1690 train loss:3.749695 +step:1691 train loss:3.744836 +step:1692 train loss:3.760004 +step:1693 train loss:3.735436 +step:1694 train loss:3.682110 +step:1695 train loss:3.708016 +step:1696 train loss:3.717698 +step:1697 train loss:3.763667 +step:1698 train loss:3.760829 +step:1699 train loss:3.714581 +step:1700 train loss:3.797040 +step:1701 train loss:3.737425 +step:1702 train loss:3.730555 +step:1703 train loss:3.743954 +step:1704 train loss:3.755831 +step:1705 train loss:3.769091 +step:1706 train loss:3.775984 +step:1707 train loss:3.774925 +step:1708 train loss:3.696675 +step:1709 train loss:3.802344 +step:1710 train loss:3.714085 +step:1711 train loss:3.723885 +step:1712 train loss:3.751912 +step:1713 train loss:3.710920 +step:1714 train loss:4.085732 +step:1715 train loss:3.726939 +step:1716 train loss:3.716739 +step:1717 train loss:3.714229 +step:1718 train loss:3.797050 +step:1719 train loss:3.702877 +step:1720 train loss:3.788030 +step:1721 train loss:3.726891 +step:1722 train loss:3.697708 +step:1723 train loss:3.800546 +step:1724 train loss:3.747586 +step:1725 train loss:3.746814 +step:1726 train loss:3.740131 +step:1727 train loss:3.780633 +step:1728 train loss:3.789149 +step:1729 train loss:3.710999 +step:1730 train loss:3.783968 +step:1731 train loss:3.709191 +step:1732 train loss:3.727098 +step:1733 train loss:3.708539 +step:1734 train loss:3.770877 +step:1735 train loss:3.823407 +step:1736 train loss:3.740936 +step:1737 train loss:3.762416 +step:1738 train loss:3.722703 +step:1739 train loss:3.794014 +step:1740 train loss:3.781083 +step:1741 train loss:3.842320 +step:1742 train loss:3.817746 +step:1743 train loss:3.714754 +step:1744 train loss:3.724676 +step:1745 train loss:3.720063 +step:1746 train loss:3.701254 +step:1747 train loss:3.738657 +step:1748 train loss:3.673229 +step:1749 train loss:3.721096 +step:1750 validation loss:3.686637 +step:1750 train loss:3.753366 +step:1751 train loss:3.764333 +step:1752 train loss:3.742712 +step:1753 train loss:3.760848 +step:1754 train loss:3.754210 +step:1755 train loss:3.750348 +step:1756 train loss:3.769876 +step:1757 train loss:3.777212 +step:1758 train loss:3.689621 +step:1759 train loss:3.782583 +step:1760 train loss:3.734969 +step:1761 train loss:3.716970 +step:1762 train loss:3.708372 +step:1763 train loss:3.711735 +step:1764 train loss:4.014678 +step:1765 train loss:3.717822 +step:1766 train loss:3.812547 +step:1767 train loss:3.719657 +step:1768 train loss:3.705604 +step:1769 train loss:3.731909 +step:1770 train loss:3.737848 +step:1771 train loss:3.712118 +step:1772 train loss:3.822904 +step:1773 train loss:3.744001 +step:1774 train loss:3.749297 +step:1775 train loss:3.864592 +step:1776 train loss:3.738312 +step:1777 train loss:3.727078 +step:1778 train loss:3.780515 +step:1779 train loss:3.726072 +step:1780 train loss:3.772319 +step:1781 train loss:3.772323 +step:1782 train loss:3.808821 +step:1783 train loss:3.731010 +step:1784 train loss:3.826339 +step:1785 train loss:3.727150 +step:1786 train loss:3.724063 +step:1787 train loss:3.726018 +step:1788 train loss:3.750063 +step:1789 train loss:3.707222 +step:1790 train loss:3.715684 +step:1791 train loss:3.800435 +step:1792 train loss:3.797971 +step:1793 train loss:3.716205 +step:1794 train loss:3.762422 +step:1795 train loss:3.721823 +step:1796 train loss:3.711441 +step:1797 train loss:3.768263 +step:1798 train loss:3.712406 +step:1799 train loss:3.759268 +step:1800 train loss:3.792497 +step:1801 train loss:3.775931 +step:1802 train loss:3.781576 +step:1803 train loss:3.773087 +step:1804 train loss:3.770833 +step:1805 train loss:3.761953 +step:1806 train loss:3.768512 +step:1807 train loss:3.697438 +step:1808 train loss:3.758742 +step:1809 train loss:3.749120 +step:1810 train loss:3.736454 +step:1811 train loss:3.753633 +step:1812 train loss:3.736900 +step:1813 train loss:3.748459 +step:1814 train loss:3.817596 +step:1815 train loss:3.753098 +step:1816 train loss:3.710846 +step:1817 train loss:3.701231 +step:1818 train loss:3.753766 +step:1819 train loss:3.730530 +step:1820 train loss:3.756274 +step:1821 train loss:3.727464 +step:1822 train loss:3.704087 +step:1823 train loss:3.698107 +step:1824 train loss:3.772599 +step:1825 train loss:3.689393 +step:1826 train loss:3.732770 +step:1827 train loss:3.697174 +step:1828 train loss:3.749955 +step:1829 train loss:3.715950 +step:1830 train loss:3.920046 +step:1831 train loss:3.662307 +step:1832 train loss:3.721771 +step:1833 train loss:3.756218 +step:1834 train loss:3.713840 +step:1835 train loss:3.711331 +step:1836 train loss:3.752874 +step:1837 train loss:3.682595 +step:1838 train loss:3.772760 +step:1839 train loss:3.764955 +step:1840 train loss:3.727566 +step:1841 train loss:3.755438 +step:1842 train loss:3.724738 +step:1843 train loss:3.674798 +step:1844 train loss:3.744985 +step:1845 train loss:3.707684 +step:1846 train loss:3.771549 +step:1847 train loss:3.813096 +step:1848 train loss:3.619143 +step:1849 train loss:3.704332 +step:1850 train loss:3.681738 +step:1851 train loss:3.724383 +step:1852 train loss:3.717289 +step:1853 train loss:3.767400 +step:1854 train loss:3.727324 +step:1855 train loss:3.719884 +step:1856 train loss:3.717243 +step:1857 train loss:3.726606 +step:1858 train loss:3.773647 +step:1859 train loss:3.719142 +step:1860 train loss:3.696289 +step:1861 train loss:3.706276 +step:1862 train loss:3.753425 +step:1863 train loss:3.785440 +step:1864 train loss:3.687249 +step:1865 train loss:3.705092 +step:1866 train loss:3.707722 +step:1867 train loss:3.747597 +step:1868 train loss:3.794425 +step:1869 train loss:3.711598 +step:1870 train loss:3.739163 +step:1871 train loss:3.677928 +step:1872 train loss:3.750916 +step:1873 train loss:3.807307 +step:1874 train loss:3.675527 +step:1875 train loss:3.746350 +step:1876 train loss:3.713737 +step:1877 train loss:3.756660 +step:1878 train loss:3.676357 +step:1879 train loss:3.743173 +step:1880 train loss:3.816765 +step:1881 train loss:3.747805 +step:1882 train loss:3.762134 +step:1883 train loss:3.786996 +step:1884 train loss:3.801815 +step:1885 train loss:3.747162 +step:1886 train loss:3.682118 +step:1887 train loss:3.696473 +step:1888 train loss:3.698731 +step:1889 train loss:3.724423 +step:1890 train loss:3.716013 +step:1891 train loss:3.658800 +step:1892 train loss:3.744354 +step:1893 train loss:3.670759 +step:1894 train loss:3.690480 +step:1895 train loss:3.724904 +step:1896 train loss:3.774084 +step:1897 train loss:3.663340 +step:1898 train loss:3.716648 +step:1899 train loss:3.732270 +step:1900 train loss:3.680787 +step:1901 train loss:3.769418 +step:1902 train loss:3.753482 +step:1903 train loss:3.698677 +step:1904 train loss:3.682415 +step:1905 train loss:3.678673 +step:1906 train loss:3.745742 +step:1907 train loss:3.683748 +step:1908 train loss:3.704979 +step:1909 train loss:3.799460 +step:1910 train loss:3.684784 +step:1911 train loss:3.695089 +step:1912 train loss:3.748137 +step:1913 train loss:3.683727 +step:1914 train loss:3.723335 +step:1915 train loss:3.681523 +step:1916 train loss:3.737450 +step:1917 train loss:3.718732 +step:1918 train loss:3.628199 +step:1919 train loss:3.782161 +step:1920 train loss:3.886526 +step:1921 train loss:3.670372 +step:1922 train loss:3.651946 +step:1923 train loss:3.741591 +step:1924 train loss:3.781253 +step:1925 train loss:3.722973 +step:1926 train loss:3.665895 +step:1927 train loss:3.748310 +step:1928 train loss:3.664214 +step:1929 train loss:3.690179 +step:1930 train loss:3.761403 +step:1931 train loss:3.673335 +step:1932 train loss:3.722905 +step:1933 train loss:3.720745 +step:1934 train loss:3.796823 +step:1935 train loss:3.748840 +step:1936 train loss:3.718215 +step:1937 train loss:3.653401 +step:1938 train loss:4.029174 +step:1939 train loss:3.761100 +step:1940 train loss:3.737234 +step:1941 train loss:3.745973 +step:1942 train loss:3.732170 +step:1943 train loss:3.729666 +step:1944 train loss:3.689056 +step:1945 train loss:3.693676 +step:1946 train loss:3.721369 +step:1947 train loss:3.738003 +step:1948 train loss:3.654141 +step:1949 train loss:3.759214 +step:1950 train loss:3.695895 +step:1951 train loss:3.721865 +step:1952 train loss:3.744537 +step:1953 train loss:3.674102 +step:1954 train loss:3.713622 +step:1955 train loss:3.665263 +step:1956 train loss:3.747143 +step:1957 train loss:3.771182 +step:1958 train loss:3.789425 +step:1959 train loss:3.657834 +step:1960 train loss:3.699342 +step:1961 train loss:3.734492 +step:1962 train loss:3.724857 +step:1963 train loss:3.699998 +step:1964 train loss:3.741517 +step:1965 train loss:3.771090 +step:1966 train loss:3.683991 +step:1967 train loss:3.742481 +step:1968 train loss:3.677039 +step:1969 train loss:3.698023 +step:1970 train loss:3.758111 +step:1971 train loss:3.659783 +step:1972 train loss:3.774588 +step:1973 train loss:3.668716 +step:1974 train loss:3.714382 +step:1975 train loss:3.670020 +step:1976 train loss:3.700293 +step:1977 train loss:3.739819 +step:1978 train loss:3.681388 +step:1979 train loss:3.663548 +step:1980 train loss:3.702865 +step:1981 train loss:3.684499 +step:1982 train loss:3.760937 +step:1983 train loss:3.708886 +step:1984 train loss:3.749162 +step:1985 train loss:3.732991 +step:1986 train loss:3.727974 +step:1987 train loss:3.679137 +step:1988 train loss:3.706927 +step:1989 train loss:3.855026 +step:1990 train loss:3.679658 +step:1991 train loss:3.678667 +step:1992 train loss:3.684838 +step:1993 train loss:3.716478 +step:1994 train loss:3.715942 +step:1995 train loss:3.660126 +step:1996 train loss:3.717894 +step:1997 train loss:3.723466 +step:1998 train loss:3.669908 +step:1999 train loss:3.792323 +step:2000 validation loss:3.650192 total_sharp:2.0229e-03 L1_sharp:9.5610e-03 L2_sharp:6.0676e-04 L3_sharp:4.7589e-04 L4_sharp:9.9013e-05 L5_sharp:9.5707e-05 L6_sharp:1.5508e-04 L7_sharp:1.8420e-04 L8_sharp:1.7516e-04 L9_sharp:1.3812e-04 L10_sharp:8.8281e-05 L11_sharp:8.4102e-05 L12_sharp:7.9792e-05 total_fnorm:4.2657e+00 total_l1_linf:3.4874e+04 total_spectral:4.2657e+00 L1_fnorm:1.1302e+00 L2_fnorm:1.0204e+00 L3_fnorm:1.0707e+00 L4_fnorm:1.1536e+00 L5_fnorm:1.1944e+00 L6_fnorm:1.2023e+00 L7_fnorm:1.2039e+00 L8_fnorm:1.2091e+00 L9_fnorm:1.2089e+00 L10_fnorm:1.2097e+00 L11_fnorm:1.2068e+00 L12_fnorm:1.2064e+00 L1_l1linf:8.8410e-01 L2_l1linf:7.9550e-01 L3_l1linf:8.1308e-01 L4_l1linf:8.4538e-01 L5_l1linf:8.4783e-01 L6_l1linf:8.3286e-01 L7_l1linf:8.3319e-01 L8_l1linf:8.2380e-01 L9_l1linf:8.1878e-01 L10_l1linf:8.1201e-01 L11_l1linf:8.0538e-01 L12_l1linf:7.9135e-01 L1_spectral:2.4093e-02 L2_spectral:2.4081e-02 L3_spectral:2.4083e-02 L4_spectral:2.4098e-02 L5_spectral:2.4085e-02 L6_spectral:2.4090e-02 L7_spectral:2.4090e-02 L8_spectral:2.4090e-02 L9_spectral:2.4085e-02 L10_spectral:2.4088e-02 L11_spectral:2.4092e-02 L12_spectral:2.4087e-02 ip_v_neg_g:1.8922e-02 cos_v_neg_g:3.0311e-03 v_norm:4.2657e+00 g_norm:1.4634e+00 hv_norm:1.4224e+00 cos_v_hv:6.0666e-03 hg_norm:9.6796e+01 cos_g_hg:7.1308e-01 v_par:4.0972e-03 v_perp:4.2657e+00 L1_cos_v_neg_g:6.2592e-03 L1_v_norm:1.1302e+00 L2_cos_v_neg_g:1.0486e-02 L2_v_norm:1.0204e+00 L3_cos_v_neg_g:9.9604e-03 L3_v_norm:1.0707e+00 L4_cos_v_neg_g:5.7109e-03 L4_v_norm:1.1536e+00 L5_cos_v_neg_g:5.8299e-03 L5_v_norm:1.1944e+00 L6_cos_v_neg_g:5.8398e-03 L6_v_norm:1.2023e+00 L7_cos_v_neg_g:6.5820e-03 L7_v_norm:1.2039e+00 L8_cos_v_neg_g:7.6735e-03 L8_v_norm:1.2091e+00 L9_cos_v_neg_g:6.4060e-03 L9_v_norm:1.2089e+00 L10_cos_v_neg_g:4.9043e-03 L10_v_norm:1.2097e+00 L11_cos_v_neg_g:2.6556e-03 L11_v_norm:1.2068e+00 L12_cos_v_neg_g:1.4405e-03 L12_v_norm:1.2064e+00 +step:2000 train loss:3.744269 +step:2001 train loss:3.680766 +step:2002 train loss:3.775453 +step:2003 train loss:3.823271 +step:2004 train loss:3.695001 +step:2005 train loss:3.791661 +step:2006 train loss:3.675702 +step:2007 train loss:3.753327 +step:2008 train loss:3.699392 +step:2009 train loss:3.696924 +step:2010 train loss:3.826546 +step:2011 train loss:3.678349 +step:2012 train loss:3.709120 +step:2013 train loss:3.716534 +step:2014 train loss:3.625604 +step:2015 train loss:3.732408 +step:2016 train loss:3.710904 +step:2017 train loss:3.720111 +step:2018 train loss:3.680045 +step:2019 train loss:3.714314 +step:2020 train loss:3.721313 +step:2021 train loss:3.679883 +step:2022 train loss:3.729378 +step:2023 train loss:3.703007 +step:2024 train loss:3.758150 +step:2025 train loss:3.693656 +step:2026 train loss:3.673419 +step:2027 train loss:3.704147 +step:2028 train loss:3.634309 +step:2029 train loss:3.669294 +step:2030 train loss:3.672005 +step:2031 train loss:3.632629 +step:2032 train loss:3.687071 +step:2033 train loss:3.680493 +step:2034 train loss:3.685163 +step:2035 train loss:3.717927 +step:2036 train loss:3.712047 +step:2037 train loss:3.697063 +step:2038 train loss:3.692819 +step:2039 train loss:3.694533 +step:2040 train loss:3.716560 +step:2041 train loss:3.716613 +step:2042 train loss:3.650387 +step:2043 train loss:3.807362 +step:2044 train loss:3.673712 +step:2045 train loss:3.688399 +step:2046 train loss:3.699460 +step:2047 train loss:3.678803 +step:2048 train loss:3.716589 +step:2049 train loss:3.676918 +step:2050 train loss:3.698491 +step:2051 train loss:3.657534 +step:2052 train loss:3.716370 +step:2053 train loss:3.707264 +step:2054 train loss:3.689672 +step:2055 train loss:3.681444 +step:2056 train loss:3.725624 +step:2057 train loss:3.733916 +step:2058 train loss:3.696688 +step:2059 train loss:3.776959 +step:2060 train loss:3.721488 +step:2061 train loss:3.684078 +step:2062 train loss:3.710566 +step:2063 train loss:3.607149 +step:2064 train loss:3.731912 +step:2065 train loss:3.735825 +step:2066 train loss:3.593964 +step:2067 train loss:3.646715 +step:2068 train loss:3.748853 +step:2069 train loss:3.690352 +step:2070 train loss:3.693478 +step:2071 train loss:3.730422 +step:2072 train loss:3.663034 +step:2073 train loss:3.711070 +step:2074 train loss:3.693188 +step:2075 train loss:3.775372 +step:2076 train loss:3.709126 +step:2077 train loss:3.729666 +step:2078 train loss:3.687512 +step:2079 train loss:3.838082 +step:2080 train loss:3.658710 +step:2081 train loss:3.762927 +step:2082 train loss:3.695073 +step:2083 train loss:3.686213 +step:2084 train loss:3.662228 +step:2085 train loss:3.706824 +step:2086 train loss:3.718521 +step:2087 train loss:3.761996 +step:2088 train loss:3.627347 +step:2089 train loss:3.653135 +step:2090 train loss:3.688280 +step:2091 train loss:3.709219 +step:2092 train loss:3.690151 +step:2093 train loss:3.681465 +step:2094 train loss:3.719234 +step:2095 train loss:3.660540 +step:2096 train loss:3.653421 +step:2097 train loss:3.688042 +step:2098 train loss:3.686980 +step:2099 train loss:3.674417 +step:2100 train loss:3.729566 +step:2101 train loss:3.728100 +step:2102 train loss:3.693466 +step:2103 train loss:3.706172 +step:2104 train loss:3.691537 +step:2105 train loss:3.694221 +step:2106 train loss:3.689815 +step:2107 train loss:3.755260 +step:2108 train loss:3.673395 +step:2109 train loss:3.636907 +step:2110 train loss:3.729189 +step:2111 train loss:3.674602 +step:2112 train loss:3.742407 +step:2113 train loss:3.673795 +step:2114 train loss:3.681646 +step:2115 train loss:3.735489 +step:2116 train loss:3.672484 +step:2117 train loss:3.691402 +step:2118 train loss:3.671473 +step:2119 train loss:3.604831 +step:2120 train loss:3.690982 +step:2121 train loss:3.686089 +step:2122 train loss:3.697793 +step:2123 train loss:3.749515 +step:2124 train loss:3.760971 +step:2125 train loss:3.659654 +step:2126 train loss:3.665394 +step:2127 train loss:3.656243 +step:2128 train loss:3.654956 +step:2129 train loss:3.680435 +step:2130 train loss:3.684472 +step:2131 train loss:3.705501 +step:2132 train loss:3.635531 +step:2133 train loss:3.747561 +step:2134 train loss:3.697414 +step:2135 train loss:3.654117 +step:2136 train loss:3.745402 +step:2137 train loss:3.710222 +step:2138 train loss:3.670213 +step:2139 train loss:3.668935 +step:2140 train loss:3.668380 +step:2141 train loss:3.723675 +step:2142 train loss:3.690952 +step:2143 train loss:3.611346 +step:2144 train loss:3.719545 +step:2145 train loss:3.692974 +step:2146 train loss:3.724194 +step:2147 train loss:3.837349 +step:2148 train loss:3.639249 +step:2149 train loss:3.645722 +step:2150 train loss:3.668423 +step:2151 train loss:3.704892 +step:2152 train loss:3.703643 +step:2153 train loss:3.738331 +step:2154 train loss:3.658703 +step:2155 train loss:3.739239 +step:2156 train loss:3.667944 +step:2157 train loss:3.739836 +step:2158 train loss:3.783694 +step:2159 train loss:3.711914 +step:2160 train loss:3.774791 +step:2161 train loss:3.677475 +step:2162 train loss:3.678697 +step:2163 train loss:3.656510 +step:2164 train loss:3.677848 +step:2165 train loss:3.661954 +step:2166 train loss:3.772629 +step:2167 train loss:3.683764 +step:2168 train loss:3.693032 +step:2169 train loss:3.645580 +step:2170 train loss:3.796172 +step:2171 train loss:3.749981 +step:2172 train loss:3.682654 +step:2173 train loss:3.680380 +step:2174 train loss:3.739901 +step:2175 train loss:3.669493 +step:2176 train loss:3.755154 +step:2177 train loss:3.721154 +step:2178 train loss:3.651660 +step:2179 train loss:3.719181 +step:2180 train loss:3.738131 +step:2181 train loss:3.662396 +step:2182 train loss:3.710726 +step:2183 train loss:3.705535 +step:2184 train loss:3.657887 +step:2185 train loss:3.638454 +step:2186 train loss:3.678128 +step:2187 train loss:3.691845 +step:2188 train loss:3.735301 +step:2189 train loss:3.628918 +step:2190 train loss:3.673214 +step:2191 train loss:3.727634 +step:2192 train loss:3.657378 +step:2193 train loss:3.628484 +step:2194 train loss:3.631758 +step:2195 train loss:3.658795 +step:2196 train loss:3.665048 +step:2197 train loss:3.649819 +step:2198 train loss:3.676569 +step:2199 train loss:3.736775 +step:2200 train loss:3.672146 +step:2201 train loss:3.682542 +step:2202 train loss:3.640026 +step:2203 train loss:3.659639 +step:2204 train loss:3.692686 +step:2205 train loss:3.677238 +step:2206 train loss:3.675150 +step:2207 train loss:3.674480 +step:2208 train loss:3.647870 +step:2209 train loss:3.932364 +step:2210 train loss:3.700740 +step:2211 train loss:3.692641 +step:2212 train loss:3.670565 +step:2213 train loss:3.746446 +step:2214 train loss:3.741565 +step:2215 train loss:3.664242 +step:2216 train loss:3.632067 +step:2217 train loss:3.665556 +step:2218 train loss:3.663191 +step:2219 train loss:3.697634 +step:2220 train loss:3.638379 +step:2221 train loss:3.673523 +step:2222 train loss:3.689694 +step:2223 train loss:3.729957 +step:2224 train loss:3.697156 +step:2225 train loss:3.644029 +step:2226 train loss:3.711718 +step:2227 train loss:3.710077 +step:2228 train loss:3.706245 +step:2229 train loss:3.648012 +step:2230 train loss:3.775073 +step:2231 train loss:3.690005 +step:2232 train loss:3.687006 +step:2233 train loss:3.729600 +step:2234 train loss:3.625072 +step:2235 train loss:3.715058 +step:2236 train loss:3.656709 +step:2237 train loss:3.793287 +step:2238 train loss:3.587847 +step:2239 train loss:3.675113 +step:2240 train loss:3.684270 +step:2241 train loss:3.599081 +step:2242 train loss:3.747242 +step:2243 train loss:3.776558 +step:2244 train loss:3.653767 +step:2245 train loss:3.661866 +step:2246 train loss:3.622472 +step:2247 train loss:3.632992 +step:2248 train loss:3.687769 +step:2249 train loss:3.661234 +step:2250 validation loss:3.616562 +step:2250 train loss:3.688024 +step:2251 train loss:3.642579 +step:2252 train loss:3.643800 +step:2253 train loss:3.675044 +step:2254 train loss:3.682157 +step:2255 train loss:3.635840 +step:2256 train loss:3.687261 +step:2257 train loss:3.670606 +step:2258 train loss:3.668574 +step:2259 train loss:3.688019 +step:2260 train loss:3.631845 +step:2261 train loss:3.712870 +step:2262 train loss:3.736267 +step:2263 train loss:3.687187 +step:2264 train loss:3.804903 +step:2265 train loss:3.649522 +step:2266 train loss:3.693365 +step:2267 train loss:3.651742 +step:2268 train loss:3.658096 +step:2269 train loss:3.658131 +step:2270 train loss:3.654501 +step:2271 train loss:3.668862 +step:2272 train loss:3.701699 +step:2273 train loss:3.622989 +step:2274 train loss:3.653942 +step:2275 train loss:3.606776 +step:2276 train loss:3.685686 +step:2277 train loss:3.701262 +step:2278 train loss:3.677211 +step:2279 train loss:3.662973 +step:2280 train loss:3.568283 +step:2281 train loss:3.716809 +step:2282 train loss:3.645963 +step:2283 train loss:3.630337 +step:2284 train loss:3.642370 +step:2285 train loss:3.702901 +step:2286 train loss:3.664030 +step:2287 train loss:3.694635 +step:2288 train loss:3.670721 +step:2289 train loss:3.671293 +step:2290 train loss:3.675329 +step:2291 train loss:3.658809 +step:2292 train loss:3.706362 +step:2293 train loss:3.679899 +step:2294 train loss:3.677717 +step:2295 train loss:3.733456 +step:2296 train loss:3.665788 +step:2297 train loss:3.639592 +step:2298 train loss:3.700218 +step:2299 train loss:3.674037 +step:2300 train loss:3.588915 +step:2301 train loss:3.688658 +step:2302 train loss:3.697643 +step:2303 train loss:3.674694 +step:2304 train loss:3.662808 +step:2305 train loss:3.701654 +step:2306 train loss:3.692060 +step:2307 train loss:3.674584 +step:2308 train loss:3.690163 +step:2309 train loss:3.645880 +step:2310 train loss:3.634308 +step:2311 train loss:3.619176 +step:2312 train loss:3.689557 +step:2313 train loss:3.604028 +step:2314 train loss:3.678084 +step:2315 train loss:3.690947 +step:2316 train loss:3.733378 +step:2317 train loss:3.596052 +step:2318 train loss:3.646614 +step:2319 train loss:3.698168 +step:2320 train loss:3.664661 +step:2321 train loss:3.634089 +step:2322 train loss:3.656064 +step:2323 train loss:3.650153 +step:2324 train loss:3.684285 +step:2325 train loss:3.613222 +step:2326 train loss:3.651911 +step:2327 train loss:3.764789 +step:2328 train loss:3.707313 +step:2329 train loss:3.662775 +step:2330 train loss:3.630127 +step:2331 train loss:3.662800 +step:2332 train loss:3.592198 +step:2333 train loss:3.649166 +step:2334 train loss:3.633109 +step:2335 train loss:3.619972 +step:2336 train loss:3.868877 +step:2337 train loss:3.640697 +step:2338 train loss:3.688118 +step:2339 train loss:3.678555 +step:2340 train loss:3.698084 +step:2341 train loss:3.688650 +step:2342 train loss:3.639459 +step:2343 train loss:3.661802 +step:2344 train loss:3.708990 +step:2345 train loss:3.657158 +step:2346 train loss:3.685857 +step:2347 train loss:3.613486 +step:2348 train loss:3.668229 +step:2349 train loss:3.622667 +step:2350 train loss:3.679838 +step:2351 train loss:3.684459 +step:2352 train loss:3.687407 +step:2353 train loss:3.653609 +step:2354 train loss:3.696259 +step:2355 train loss:3.684422 +step:2356 train loss:3.723073 +step:2357 train loss:3.627693 +step:2358 train loss:3.644035 +step:2359 train loss:3.666734 +step:2360 train loss:3.692022 +step:2361 train loss:3.721229 +step:2362 train loss:3.557643 +step:2363 train loss:3.749079 +step:2364 train loss:3.696530 +step:2365 train loss:3.665208 +step:2366 train loss:3.611392 +step:2367 train loss:3.682964 +step:2368 train loss:3.672580 +step:2369 train loss:3.670393 +step:2370 train loss:3.676398 +step:2371 train loss:3.734144 +step:2372 train loss:3.589782 +step:2373 train loss:3.728392 +step:2374 train loss:3.709883 +step:2375 train loss:3.693214 +step:2376 train loss:3.683626 +step:2377 train loss:3.628241 +step:2378 train loss:3.676865 +step:2379 train loss:3.662426 +step:2380 train loss:3.721834 +step:2381 train loss:3.820979 +step:2382 train loss:3.604190 +step:2383 train loss:3.649456 +step:2384 train loss:3.683101 +step:2385 train loss:3.586742 +step:2386 train loss:3.740200 +step:2387 train loss:3.616381 +step:2388 train loss:3.672305 +step:2389 train loss:3.691692 +step:2390 train loss:3.643098 +step:2391 train loss:3.670971 +step:2392 train loss:3.695021 +step:2393 train loss:3.644727 +step:2394 train loss:3.671175 +step:2395 train loss:3.665072 +step:2396 train loss:3.666726 +step:2397 train loss:3.642756 +step:2398 train loss:3.703261 +step:2399 train loss:3.665635 +step:2400 train loss:3.641618 +step:2401 train loss:3.686259 +step:2402 train loss:3.635955 +step:2403 train loss:3.684976 +step:2404 train loss:3.643761 +step:2405 train loss:3.647272 +step:2406 train loss:3.669463 +step:2407 train loss:3.615834 +step:2408 train loss:3.660214 +step:2409 train loss:3.652509 +step:2410 train loss:3.645275 +step:2411 train loss:3.721944 +step:2412 train loss:3.704420 +step:2413 train loss:3.752389 +step:2414 train loss:3.637290 +step:2415 train loss:3.628923 +step:2416 train loss:3.643722 +step:2417 train loss:3.684627 +step:2418 train loss:3.698844 +step:2419 train loss:3.631161 +step:2420 train loss:3.648740 +step:2421 train loss:3.678227 +step:2422 train loss:3.727539 +step:2423 train loss:3.665253 +step:2424 train loss:3.629611 +step:2425 train loss:3.691107 +step:2426 train loss:3.631228 +step:2427 train loss:3.649164 +step:2428 train loss:3.737160 +step:2429 train loss:3.685629 +step:2430 train loss:3.780431 +step:2431 train loss:3.689111 +step:2432 train loss:3.661192 +step:2433 train loss:3.633251 +step:2434 train loss:3.623605 +step:2435 train loss:3.684244 +step:2436 train loss:3.635847 +step:2437 train loss:3.666734 +step:2438 train loss:3.714932 +step:2439 train loss:3.697757 +step:2440 train loss:3.638524 +step:2441 train loss:3.675947 +step:2442 train loss:3.670079 +step:2443 train loss:3.629209 +step:2444 train loss:3.670565 +step:2445 train loss:3.666303 +step:2446 train loss:3.636073 +step:2447 train loss:3.615687 +step:2448 train loss:3.671748 +step:2449 train loss:3.698025 +step:2450 train loss:3.655529 +step:2451 train loss:3.586809 +step:2452 train loss:3.682324 +step:2453 train loss:3.648534 +step:2454 train loss:3.649072 +step:2455 train loss:3.697179 +step:2456 train loss:3.653504 +step:2457 train loss:3.715638 +step:2458 train loss:3.688972 +step:2459 train loss:3.661967 +step:2460 train loss:3.676042 +step:2461 train loss:3.696933 +step:2462 train loss:3.671037 +step:2463 train loss:3.648832 +step:2464 train loss:3.659009 +step:2465 train loss:3.743435 +step:2466 train loss:3.826405 +step:2467 train loss:3.729620 +step:2468 train loss:3.618863 +step:2469 train loss:3.698774 +step:2470 train loss:3.740000 +step:2471 train loss:3.735727 +step:2472 train loss:3.715440 +step:2473 train loss:3.660220 +step:2474 train loss:3.617512 +step:2475 train loss:3.673640 +step:2476 train loss:3.747566 +step:2477 train loss:3.665465 +step:2478 train loss:3.619151 +step:2479 train loss:3.662345 +step:2480 train loss:3.655377 +step:2481 train loss:3.855314 +step:2482 train loss:3.653899 +step:2483 train loss:3.681206 +step:2484 train loss:3.637988 +step:2485 train loss:3.631355 +step:2486 train loss:3.657173 +step:2487 train loss:3.696360 +step:2488 train loss:3.606241 +step:2489 train loss:3.716575 +step:2490 train loss:3.638368 +step:2491 train loss:3.652714 +step:2492 train loss:3.695874 +step:2493 train loss:3.732849 +step:2494 train loss:3.649479 +step:2495 train loss:3.685405 +step:2496 train loss:3.658041 +step:2497 train loss:3.675203 +step:2498 train loss:3.685727 +step:2499 train loss:3.677410 +step:2500 validation loss:3.592500 total_sharp:1.9340e-03 L1_sharp:5.4857e-03 L2_sharp:3.2394e-04 L3_sharp:4.9602e-04 L4_sharp:1.1031e-04 L5_sharp:1.2246e-04 L6_sharp:2.1891e-04 L7_sharp:3.0019e-04 L8_sharp:1.9957e-04 L9_sharp:1.3476e-04 L10_sharp:9.5120e-05 L11_sharp:7.8445e-05 L12_sharp:7.3710e-05 total_fnorm:4.2400e+00 total_l1_linf:3.4626e+04 total_spectral:4.2400e+00 L1_fnorm:1.1090e+00 L2_fnorm:9.9722e-01 L3_fnorm:1.0503e+00 L4_fnorm:1.1528e+00 L5_fnorm:1.1916e+00 L6_fnorm:1.2003e+00 L7_fnorm:1.1980e+00 L8_fnorm:1.2044e+00 L9_fnorm:1.2065e+00 L10_fnorm:1.2079e+00 L11_fnorm:1.2044e+00 L12_fnorm:1.2068e+00 L1_l1linf:8.9139e-01 L2_l1linf:7.9220e-01 L3_l1linf:8.0185e-01 L4_l1linf:8.3896e-01 L5_l1linf:8.4312e-01 L6_l1linf:8.3754e-01 L7_l1linf:8.2665e-01 L8_l1linf:8.2227e-01 L9_l1linf:8.1819e-01 L10_l1linf:8.1264e-01 L11_l1linf:8.0524e-01 L12_l1linf:7.9135e-01 L1_spectral:2.4096e-02 L2_spectral:2.4077e-02 L3_spectral:2.4087e-02 L4_spectral:2.4098e-02 L5_spectral:2.4096e-02 L6_spectral:2.4089e-02 L7_spectral:2.4088e-02 L8_spectral:2.4086e-02 L9_spectral:2.4092e-02 L10_spectral:2.4097e-02 L11_spectral:2.4096e-02 L12_spectral:2.4091e-02 ip_v_neg_g:2.0192e-02 cos_v_neg_g:3.7916e-03 v_norm:4.2400e+00 g_norm:1.2560e+00 hv_norm:1.1375e+00 cos_v_hv:7.2088e-03 hg_norm:4.9311e+01 cos_g_hg:7.0213e-01 v_par:4.1764e-03 v_perp:4.2400e+00 L1_cos_v_neg_g:7.0325e-03 L1_v_norm:1.1090e+00 L2_cos_v_neg_g:8.9486e-03 L2_v_norm:9.9722e-01 L3_cos_v_neg_g:1.1234e-02 L3_v_norm:1.0503e+00 L4_cos_v_neg_g:8.2188e-03 L4_v_norm:1.1528e+00 L5_cos_v_neg_g:7.5813e-03 L5_v_norm:1.1916e+00 L6_cos_v_neg_g:8.3157e-03 L6_v_norm:1.2003e+00 L7_cos_v_neg_g:9.6056e-03 L7_v_norm:1.1980e+00 L8_cos_v_neg_g:8.5308e-03 L8_v_norm:1.2044e+00 L9_cos_v_neg_g:7.2676e-03 L9_v_norm:1.2065e+00 L10_cos_v_neg_g:5.2716e-03 L10_v_norm:1.2079e+00 L11_cos_v_neg_g:3.3874e-03 L11_v_norm:1.2044e+00 L12_cos_v_neg_g:2.4502e-03 L12_v_norm:1.2068e+00 +step:2500 train loss:3.622671 +step:2501 train loss:3.692261 +step:2502 train loss:3.678833 +step:2503 train loss:3.600838 +step:2504 train loss:3.640771 +step:2505 train loss:3.657601 +step:2506 train loss:3.623820 +step:2507 train loss:3.654240 +step:2508 train loss:3.600766 +step:2509 train loss:3.625731 +step:2510 train loss:3.616712 +step:2511 train loss:3.661698 +step:2512 train loss:3.708490 +step:2513 train loss:3.658351 +step:2514 train loss:3.642781 +step:2515 train loss:3.798994 +step:2516 train loss:3.654701 +step:2517 train loss:3.726261 +step:2518 train loss:3.689587 +step:2519 train loss:3.664217 +step:2520 train loss:3.673016 +step:2521 train loss:3.648342 +step:2522 train loss:3.681074 +step:2523 train loss:3.600430 +step:2524 train loss:3.656631 +step:2525 train loss:3.648884 +step:2526 train loss:3.703680 +step:2527 train loss:3.696188 +step:2528 train loss:3.680256 +step:2529 train loss:3.693473 +step:2530 train loss:3.675737 +step:2531 train loss:3.608321 +step:2532 train loss:3.714927 +step:2533 train loss:3.611427 +step:2534 train loss:3.702529 +step:2535 train loss:3.655001 +step:2536 train loss:3.579155 +step:2537 train loss:3.692810 +step:2538 train loss:3.670496 +step:2539 train loss:3.692746 +step:2540 train loss:3.622516 +step:2541 train loss:3.659942 +step:2542 train loss:3.665879 +step:2543 train loss:3.654121 +step:2544 train loss:3.644242 +step:2545 train loss:3.627990 +step:2546 train loss:3.595476 +step:2547 train loss:3.643347 +step:2548 train loss:3.667219 +step:2549 train loss:3.668394 +step:2550 train loss:3.800266 +step:2551 train loss:3.874851 +step:2552 train loss:3.609282 +step:2553 train loss:3.637254 +step:2554 train loss:3.786319 +step:2555 train loss:3.669985 +step:2556 train loss:3.593433 +step:2557 train loss:3.691272 +step:2558 train loss:3.685492 +step:2559 train loss:3.639629 +step:2560 train loss:3.631410 +step:2561 train loss:3.717303 +step:2562 train loss:3.672045 +step:2563 train loss:3.612367 +step:2564 train loss:3.674834 +step:2565 train loss:3.656291 +step:2566 train loss:3.636536 +step:2567 train loss:3.617792 +step:2568 train loss:3.664720 +step:2569 train loss:3.681947 +step:2570 train loss:3.627077 +step:2571 train loss:3.712597 +step:2572 train loss:3.671517 +step:2573 train loss:3.607050 +step:2574 train loss:3.660759 +step:2575 train loss:3.703687 +step:2576 train loss:3.652030 +step:2577 train loss:3.616940 +step:2578 train loss:3.654438 +step:2579 train loss:3.629722 +step:2580 train loss:3.606771 +step:2581 train loss:3.617900 +step:2582 train loss:3.627548 +step:2583 train loss:3.650636 +step:2584 train loss:3.669794 +step:2585 train loss:3.633566 +step:2586 train loss:3.653125 +step:2587 train loss:3.585800 +step:2588 train loss:3.619617 +step:2589 train loss:3.694865 +step:2590 train loss:3.616848 +step:2591 train loss:3.679954 +step:2592 train loss:3.724192 +step:2593 train loss:3.679386 +step:2594 train loss:3.642845 +step:2595 train loss:3.654020 +step:2596 train loss:3.694475 +step:2597 train loss:3.576302 +step:2598 train loss:3.733820 +step:2599 train loss:3.684476 +step:2600 train loss:3.713533 +step:2601 train loss:3.650000 +step:2602 train loss:3.678583 +step:2603 train loss:3.673896 +step:2604 train loss:3.595739 +step:2605 train loss:3.726694 +step:2606 train loss:3.672016 +step:2607 train loss:3.624810 +step:2608 train loss:3.605700 +step:2609 train loss:3.628144 +step:2610 train loss:3.651291 +step:2611 train loss:3.691941 +step:2612 train loss:3.653039 +step:2613 train loss:3.627803 +step:2614 train loss:3.620162 +step:2615 train loss:3.608444 +step:2616 train loss:3.688319 +step:2617 train loss:3.650043 +step:2618 train loss:3.616916 +step:2619 train loss:3.630828 +step:2620 train loss:3.624379 +step:2621 train loss:3.636839 +step:2622 train loss:3.710722 +step:2623 train loss:3.584661 +step:2624 train loss:3.608402 +step:2625 train loss:3.672976 +step:2626 train loss:3.664299 +step:2627 train loss:3.643624 +step:2628 train loss:3.700752 +step:2629 train loss:3.642403 +step:2630 train loss:3.640456 +step:2631 train loss:3.667881 +step:2632 train loss:3.635984 +step:2633 train loss:3.618565 +step:2634 train loss:3.664658 +step:2635 train loss:3.651077 +step:2636 train loss:3.698380 +step:2637 train loss:3.653336 +step:2638 train loss:3.639250 +step:2639 train loss:3.688625 +step:2640 train loss:3.608829 +step:2641 train loss:3.665090 +step:2642 train loss:3.586104 +step:2643 train loss:3.584100 +step:2644 train loss:3.672861 +step:2645 train loss:3.616363 +step:2646 train loss:3.647227 +step:2647 train loss:3.667549 +step:2648 train loss:3.702082 +step:2649 train loss:3.615599 +step:2650 train loss:3.601710 +step:2651 train loss:3.644071 +step:2652 train loss:3.620503 +step:2653 train loss:3.685103 +step:2654 train loss:3.641221 +step:2655 train loss:3.628832 +step:2656 train loss:3.650342 +step:2657 train loss:3.675895 +step:2658 train loss:3.685816 +step:2659 train loss:3.663289 +step:2660 train loss:3.650254 +step:2661 train loss:3.699293 +step:2662 train loss:3.672603 +step:2663 train loss:3.649498 +step:2664 train loss:3.653426 +step:2665 train loss:3.609399 +step:2666 train loss:3.638905 +step:2667 train loss:3.646882 +step:2668 train loss:3.623867 +step:2669 train loss:3.631096 +step:2670 train loss:3.658840 +step:2671 train loss:3.630908 +step:2672 train loss:3.654181 +step:2673 train loss:3.583851 +step:2674 train loss:3.682818 +step:2675 train loss:3.653889 +step:2676 train loss:3.673528 +step:2677 train loss:3.648384 +step:2678 train loss:3.643190 +step:2679 train loss:3.623518 +step:2680 train loss:3.602936 +step:2681 train loss:3.576901 +step:2682 train loss:3.662912 +step:2683 train loss:3.639010 +step:2684 train loss:3.670206 +step:2685 train loss:3.588195 +step:2686 train loss:3.601403 +step:2687 train loss:3.678452 +step:2688 train loss:3.692052 +step:2689 train loss:3.593264 +step:2690 train loss:3.683803 +step:2691 train loss:3.648897 +step:2692 train loss:3.676184 +step:2693 train loss:3.729023 +step:2694 train loss:3.629219 +step:2695 train loss:3.648005 +step:2696 train loss:3.648740 +step:2697 train loss:3.642838 +step:2698 train loss:3.656793 +step:2699 train loss:3.671470 +step:2700 train loss:3.641802 +step:2701 train loss:3.713231 +step:2702 train loss:3.645645 +step:2703 train loss:3.602980 +step:2704 train loss:3.680392 +step:2705 train loss:3.665463 +step:2706 train loss:3.601779 +step:2707 train loss:3.566696 +step:2708 train loss:3.665092 +step:2709 train loss:3.639879 +step:2710 train loss:3.646649 +step:2711 train loss:3.614357 +step:2712 train loss:3.682621 +step:2713 train loss:3.677733 +step:2714 train loss:3.621857 +step:2715 train loss:3.619178 +step:2716 train loss:3.690337 +step:2717 train loss:3.650263 +step:2718 train loss:3.650021 +step:2719 train loss:3.647163 +step:2720 train loss:3.614130 +step:2721 train loss:3.693103 +step:2722 train loss:3.621729 +step:2723 train loss:3.611926 +step:2724 train loss:3.630493 +step:2725 train loss:3.629880 +step:2726 train loss:3.607629 +step:2727 train loss:3.665850 +step:2728 train loss:3.604035 +step:2729 train loss:3.733648 +step:2730 train loss:3.680368 +step:2731 train loss:3.714354 +step:2732 train loss:3.624614 +step:2733 train loss:3.618420 +step:2734 train loss:3.673610 +step:2735 train loss:3.666190 +step:2736 train loss:3.593118 +step:2737 train loss:3.646098 +step:2738 train loss:3.706893 +step:2739 train loss:3.622418 +step:2740 train loss:3.623112 +step:2741 train loss:3.611781 +step:2742 train loss:3.542198 +step:2743 train loss:3.639349 +step:2744 train loss:3.666215 +step:2745 train loss:3.619354 +step:2746 train loss:3.635135 +step:2747 train loss:3.622079 +step:2748 train loss:3.581958 +step:2749 train loss:3.649630 +step:2750 validation loss:3.566550 +step:2750 train loss:3.656555 +step:2751 train loss:3.679764 +step:2752 train loss:3.664785 +step:2753 train loss:3.653230 +step:2754 train loss:3.596891 +step:2755 train loss:3.660994 +step:2756 train loss:3.635586 +step:2757 train loss:3.625507 +step:2758 train loss:3.653881 +step:2759 train loss:3.663926 +step:2760 train loss:3.574479 +step:2761 train loss:3.588599 +step:2762 train loss:3.605423 +step:2763 train loss:3.621268 +step:2764 train loss:3.571801 +step:2765 train loss:3.619668 +step:2766 train loss:3.711708 +step:2767 train loss:3.583920 +step:2768 train loss:3.648511 +step:2769 train loss:3.618326 +step:2770 train loss:3.638447 +step:2771 train loss:3.661972 +step:2772 train loss:3.628425 +step:2773 train loss:3.631208 +step:2774 train loss:3.623552 +step:2775 train loss:3.635108 +step:2776 train loss:3.587206 +step:2777 train loss:3.623304 +step:2778 train loss:3.628925 +step:2779 train loss:3.659308 +step:2780 train loss:3.632146 +step:2781 train loss:3.616530 +step:2782 train loss:3.603675 +step:2783 train loss:3.632544 +step:2784 train loss:3.641169 +step:2785 train loss:3.718000 +step:2786 train loss:3.679129 +step:2787 train loss:3.638732 +step:2788 train loss:3.635727 +step:2789 train loss:3.632966 +step:2790 train loss:3.570286 +step:2791 train loss:3.667957 +step:2792 train loss:3.658833 +step:2793 train loss:3.622771 +step:2794 train loss:3.634800 +step:2795 train loss:3.648089 +step:2796 train loss:3.637260 +step:2797 train loss:3.687613 +step:2798 train loss:3.670025 +step:2799 train loss:3.582539 +step:2800 train loss:3.625458 +step:2801 train loss:3.661730 +step:2802 train loss:3.688276 +step:2803 train loss:3.663348 +step:2804 train loss:3.595287 +step:2805 train loss:3.637887 +step:2806 train loss:3.626703 +step:2807 train loss:3.657868 +step:2808 train loss:3.597103 +step:2809 train loss:3.671886 +step:2810 train loss:3.657922 +step:2811 train loss:3.646016 +step:2812 train loss:3.702634 +step:2813 train loss:3.662937 +step:2814 train loss:3.651483 +step:2815 train loss:3.663815 +step:2816 train loss:3.669691 +step:2817 train loss:3.600412 +step:2818 train loss:3.711041 +step:2819 train loss:3.634238 +step:2820 train loss:3.630510 +step:2821 train loss:3.607357 +step:2822 train loss:3.652406 +step:2823 train loss:3.602459 +step:2824 train loss:3.501337 +step:2825 train loss:3.642457 +step:2826 train loss:3.639059 +step:2827 train loss:3.670905 +step:2828 train loss:3.659895 +step:2829 train loss:3.647155 +step:2830 train loss:3.675812 +step:2831 train loss:3.620790 +step:2832 train loss:3.587032 +step:2833 train loss:3.647808 +step:2834 train loss:3.597415 +step:2835 train loss:3.633132 +step:2836 train loss:3.639358 +step:2837 train loss:3.639518 +step:2838 train loss:3.581424 +step:2839 train loss:3.680003 +step:2840 train loss:3.640893 +step:2841 train loss:3.718124 +step:2842 train loss:3.663860 +step:2843 train loss:3.652723 +step:2844 train loss:3.679798 +step:2845 train loss:3.635685 +step:2846 train loss:3.586987 +step:2847 train loss:3.685314 +step:2848 train loss:3.634643 +step:2849 train loss:3.625489 +step:2850 train loss:3.683329 +step:2851 train loss:3.634873 +step:2852 train loss:3.718053 +step:2853 train loss:3.634100 +step:2854 train loss:3.589695 +step:2855 train loss:3.655100 +step:2856 train loss:3.575089 +step:2857 train loss:3.682379 +step:2858 train loss:3.638976 +step:2859 train loss:3.627207 +step:2860 train loss:3.615891 +step:2861 train loss:3.593867 +step:2862 train loss:3.630594 +step:2863 train loss:3.610923 +step:2864 train loss:3.617442 +step:2865 train loss:3.693902 +step:2866 train loss:3.705762 +step:2867 train loss:3.646072 +step:2868 train loss:3.644733 +step:2869 train loss:3.602181 +step:2870 train loss:3.691261 +step:2871 train loss:3.689714 +step:2872 train loss:3.652730 +step:2873 train loss:3.658482 +step:2874 train loss:3.636999 +step:2875 train loss:3.590297 +step:2876 train loss:3.634999 +step:2877 train loss:3.619968 +step:2878 train loss:3.635658 +step:2879 train loss:3.600759 +step:2880 train loss:3.615340 +step:2881 train loss:3.612087 +step:2882 train loss:3.546863 +step:2883 train loss:3.627692 +step:2884 train loss:3.703236 +step:2885 train loss:3.593483 +step:2886 train loss:3.647158 +step:2887 train loss:3.670086 +step:2888 train loss:3.640643 +step:2889 train loss:3.625285 +step:2890 train loss:3.599303 +step:2891 train loss:3.639568 +step:2892 train loss:3.642114 +step:2893 train loss:3.622870 +step:2894 train loss:3.598273 +step:2895 train loss:3.649900 +step:2896 train loss:3.691296 +step:2897 train loss:3.671024 +step:2898 train loss:3.809107 +step:2899 train loss:3.561771 +step:2900 train loss:3.633043 +step:2901 train loss:3.585567 +step:2902 train loss:3.588282 +step:2903 train loss:3.604748 +step:2904 train loss:3.628827 +step:2905 train loss:3.689871 +step:2906 train loss:3.661588 +step:2907 train loss:3.837397 +step:2908 train loss:3.583451 +step:2909 train loss:3.660878 +step:2910 train loss:3.631820 +step:2911 train loss:3.661062 +step:2912 train loss:3.616122 +step:2913 train loss:3.649487 +step:2914 train loss:3.678145 +step:2915 train loss:3.672278 +step:2916 train loss:3.628671 +step:2917 train loss:3.670807 +step:2918 train loss:3.660138 +step:2919 train loss:3.603848 +step:2920 train loss:3.653426 +step:2921 train loss:3.610206 +step:2922 train loss:3.637800 +step:2923 train loss:3.700498 +step:2924 train loss:3.636909 +step:2925 train loss:3.588954 +step:2926 train loss:3.680403 +step:2927 train loss:3.587402 +step:2928 train loss:3.559694 +step:2929 train loss:3.574384 +step:2930 train loss:3.591752 +step:2931 train loss:3.750119 +step:2932 train loss:3.667118 +step:2933 train loss:3.631100 +step:2934 train loss:3.624130 +step:2935 train loss:3.647635 +step:2936 train loss:3.593869 +step:2937 train loss:3.615434 +step:2938 train loss:3.634042 +step:2939 train loss:3.709255 +step:2940 train loss:3.608990 +step:2941 train loss:3.644599 +step:2942 train loss:3.603760 +step:2943 train loss:3.888487 +step:2944 train loss:3.708089 +step:2945 train loss:3.665869 +step:2946 train loss:3.680033 +step:2947 train loss:3.635295 +step:2948 train loss:3.595913 +step:2949 train loss:3.680810 +step:2950 train loss:3.639081 +step:2951 train loss:3.535895 +step:2952 train loss:3.606688 +step:2953 train loss:3.518346 +step:2954 train loss:3.609446 +step:2955 train loss:3.680640 +step:2956 train loss:3.623777 +step:2957 train loss:3.631184 +step:2958 train loss:3.584517 +step:2959 train loss:3.604989 +step:2960 train loss:3.698149 +step:2961 train loss:3.562803 +step:2962 train loss:3.639228 +step:2963 train loss:3.635025 +step:2964 train loss:3.613080 +step:2965 train loss:3.641765 +step:2966 train loss:3.617775 +step:2967 train loss:3.614677 +step:2968 train loss:3.593474 +step:2969 train loss:3.598534 +step:2970 train loss:3.664340 +step:2971 train loss:3.596656 +step:2972 train loss:3.579405 +step:2973 train loss:3.571649 +step:2974 train loss:3.614054 +step:2975 train loss:3.582344 +step:2976 train loss:3.619030 +step:2977 train loss:3.607999 +step:2978 train loss:3.692645 +step:2979 train loss:3.674136 +step:2980 train loss:3.692389 +step:2981 train loss:3.633163 +step:2982 train loss:3.627828 +step:2983 train loss:3.576256 +step:2984 train loss:3.557500 +step:2985 train loss:3.666709 +step:2986 train loss:3.561689 +step:2987 train loss:3.693450 +step:2988 train loss:3.619612 +step:2989 train loss:3.645486 +step:2990 train loss:3.595938 +step:2991 train loss:3.666074 +step:2992 train loss:3.659519 +step:2993 train loss:3.625697 +step:2994 train loss:3.619500 +step:2995 train loss:3.687249 +step:2996 train loss:3.612922 +step:2997 train loss:3.521704 +step:2998 train loss:3.639902 +step:2999 train loss:3.679747 +step:3000 validation loss:3.550156 total_sharp:1.4789e-03 L1_sharp:6.1453e-03 L2_sharp:4.5247e-04 L3_sharp:3.6209e-04 L4_sharp:8.7690e-05 L5_sharp:9.4404e-05 L6_sharp:1.3191e-04 L7_sharp:1.5959e-04 L8_sharp:1.2900e-04 L9_sharp:1.0550e-04 L10_sharp:7.3181e-05 L11_sharp:6.6322e-05 L12_sharp:7.4462e-05 total_fnorm:4.2286e+00 total_l1_linf:3.4544e+04 total_spectral:4.2286e+00 L1_fnorm:1.1052e+00 L2_fnorm:9.6870e-01 L3_fnorm:1.0511e+00 L4_fnorm:1.1496e+00 L5_fnorm:1.1920e+00 L6_fnorm:1.1985e+00 L7_fnorm:1.1980e+00 L8_fnorm:1.2039e+00 L9_fnorm:1.2037e+00 L10_fnorm:1.2059e+00 L11_fnorm:1.2003e+00 L12_fnorm:1.2042e+00 L1_l1linf:8.8507e-01 L2_l1linf:7.9972e-01 L3_l1linf:8.0593e-01 L4_l1linf:8.2802e-01 L5_l1linf:8.3667e-01 L6_l1linf:8.3327e-01 L7_l1linf:8.2477e-01 L8_l1linf:8.2158e-01 L9_l1linf:8.1261e-01 L10_l1linf:8.1102e-01 L11_l1linf:8.0178e-01 L12_l1linf:8.0098e-01 L1_spectral:2.4106e-02 L2_spectral:2.4080e-02 L3_spectral:2.4088e-02 L4_spectral:2.4101e-02 L5_spectral:2.4094e-02 L6_spectral:2.4093e-02 L7_spectral:2.4096e-02 L8_spectral:2.4089e-02 L9_spectral:2.4088e-02 L10_spectral:2.4098e-02 L11_spectral:2.4096e-02 L12_spectral:2.4093e-02 ip_v_neg_g:1.3763e-02 cos_v_neg_g:2.9740e-03 v_norm:4.2286e+00 g_norm:1.0944e+00 hv_norm:8.7139e-01 cos_v_hv:7.1765e-03 hg_norm:3.5585e+01 cos_g_hg:6.9013e-01 v_par:3.2376e-03 v_perp:4.2286e+00 L1_cos_v_neg_g:5.9702e-03 L1_v_norm:1.1052e+00 L2_cos_v_neg_g:5.3916e-03 L2_v_norm:9.6870e-01 L3_cos_v_neg_g:6.9421e-03 L3_v_norm:1.0511e+00 L4_cos_v_neg_g:3.8840e-03 L4_v_norm:1.1496e+00 L5_cos_v_neg_g:4.5860e-03 L5_v_norm:1.1920e+00 L6_cos_v_neg_g:5.1175e-03 L6_v_norm:1.1985e+00 L7_cos_v_neg_g:5.6303e-03 L7_v_norm:1.1980e+00 L8_cos_v_neg_g:5.7756e-03 L8_v_norm:1.2039e+00 L9_cos_v_neg_g:6.4294e-03 L9_v_norm:1.2037e+00 L10_cos_v_neg_g:5.9006e-03 L10_v_norm:1.2059e+00 L11_cos_v_neg_g:4.6503e-03 L11_v_norm:1.2003e+00 L12_cos_v_neg_g:3.5783e-03 L12_v_norm:1.2042e+00 +step:3000 train loss:3.571825 +step:3001 train loss:3.619658 +step:3002 train loss:3.616324 +step:3003 train loss:3.614375 +step:3004 train loss:3.649233 +step:3005 train loss:3.539537 +step:3006 train loss:3.589140 +step:3007 train loss:3.623647 +step:3008 train loss:3.669962 +step:3009 train loss:3.623705 +step:3010 train loss:3.646976 +step:3011 train loss:3.624274 +step:3012 train loss:3.605519 +step:3013 train loss:3.645181 +step:3014 train loss:3.605554 +step:3015 train loss:3.604907 +step:3016 train loss:3.621406 +step:3017 train loss:3.649249 +step:3018 train loss:3.577110 +step:3019 train loss:3.618184 +step:3020 train loss:3.637016 +step:3021 train loss:3.593859 +step:3022 train loss:3.691110 +step:3023 train loss:3.635941 +step:3024 train loss:3.622612 +step:3025 train loss:3.639865 +step:3026 train loss:3.605408 +step:3027 train loss:3.589872 +step:3028 train loss:3.639880 +step:3029 train loss:3.626515 +step:3030 train loss:3.595872 +step:3031 train loss:3.580039 +step:3032 train loss:3.572374 +step:3033 train loss:3.592642 +step:3034 train loss:3.644058 +step:3035 train loss:3.620334 +step:3036 train loss:3.584671 +step:3037 train loss:3.544866 +step:3038 train loss:3.661494 +step:3039 train loss:3.543426 +step:3040 train loss:3.522310 +step:3041 train loss:3.652549 +step:3042 train loss:3.589963 +step:3043 train loss:3.651227 +step:3044 train loss:3.552718 +step:3045 train loss:3.590920 +step:3046 train loss:3.570919 +step:3047 train loss:3.594509 +step:3048 train loss:3.568903 +step:3049 train loss:3.642412 +step:3050 train loss:3.529713 +step:3051 train loss:3.544780 +step:3052 train loss:3.568887 +step:3053 train loss:3.643259 +step:3054 train loss:3.710341 +step:3055 train loss:3.552299 +step:3056 train loss:3.577374 +step:3057 train loss:3.616554 +step:3058 train loss:3.565280 +step:3059 train loss:3.588717 +step:3060 train loss:3.589976 +step:3061 train loss:3.567104 +step:3062 train loss:3.627298 +step:3063 train loss:3.608133 +step:3064 train loss:3.640385 +step:3065 train loss:3.652390 +step:3066 train loss:3.545950 +step:3067 train loss:3.595561 +step:3068 train loss:3.646273 +step:3069 train loss:3.666796 +step:3070 train loss:3.592870 +step:3071 train loss:3.613734 +step:3072 train loss:3.613584 +step:3073 train loss:3.647959 +step:3074 train loss:3.582488 +step:3075 train loss:3.623317 +step:3076 train loss:3.555176 +step:3077 train loss:3.555774 +step:3078 train loss:3.589017 +step:3079 train loss:3.633902 +step:3080 train loss:3.624025 +step:3081 train loss:3.664399 +step:3082 train loss:3.644045 +step:3083 train loss:3.571434 +step:3084 train loss:3.654809 +step:3085 train loss:3.579723 +step:3086 train loss:3.644280 +step:3087 train loss:3.613701 +step:3088 train loss:3.688924 +step:3089 train loss:3.567920 +step:3090 train loss:3.637012 +step:3091 train loss:3.569194 +step:3092 train loss:3.581858 +step:3093 train loss:3.611152 +step:3094 train loss:3.595066 +step:3095 train loss:3.681219 +step:3096 train loss:3.608156 +step:3097 train loss:3.614682 +step:3098 train loss:3.598687 +step:3099 train loss:3.604844 +step:3100 train loss:3.633520 +step:3101 train loss:3.713526 +step:3102 train loss:3.638172 +step:3103 train loss:3.562576 +step:3104 train loss:3.642508 +step:3105 train loss:3.615978 +step:3106 train loss:3.611898 +step:3107 train loss:3.597521 +step:3108 train loss:3.567650 +step:3109 train loss:3.626729 +step:3110 train loss:3.552731 +step:3111 train loss:3.587694 +step:3112 train loss:3.525625 +step:3113 train loss:3.654425 +step:3114 train loss:3.570968 +step:3115 train loss:3.607882 +step:3116 train loss:3.493236 +step:3117 train loss:3.501667 +step:3118 train loss:3.607144 +step:3119 train loss:3.608617 +step:3120 train loss:3.612760 +step:3121 train loss:3.553997 +step:3122 train loss:3.638150 +step:3123 train loss:3.554873 +step:3124 train loss:3.619283 +step:3125 train loss:3.630508 +step:3126 train loss:3.736938 +step:3127 train loss:3.586059 +step:3128 train loss:3.613052 +step:3129 train loss:3.598611 +step:3130 train loss:3.572076 +step:3131 train loss:3.650871 +step:3132 train loss:3.631276 +step:3133 train loss:3.608918 +step:3134 train loss:3.499999 +step:3135 train loss:3.599300 +step:3136 train loss:3.566777 +step:3137 train loss:3.705289 +step:3138 train loss:3.603287 +step:3139 train loss:3.584419 +step:3140 train loss:3.603119 +step:3141 train loss:3.606936 +step:3142 train loss:3.543076 +step:3143 train loss:3.628831 +step:3144 train loss:3.578701 +step:3145 train loss:3.562815 +step:3146 train loss:3.577332 +step:3147 train loss:3.685150 +step:3148 train loss:3.590086 +step:3149 train loss:3.646395 +step:3150 train loss:3.633256 +step:3151 train loss:3.597040 +step:3152 train loss:3.598770 +step:3153 train loss:3.552626 +step:3154 train loss:3.642848 +step:3155 train loss:3.577317 +step:3156 train loss:3.632660 +step:3157 train loss:3.632650 +step:3158 train loss:3.610838 +step:3159 train loss:3.545300 +step:3160 train loss:3.596317 +step:3161 train loss:3.571755 +step:3162 train loss:3.623060 +step:3163 train loss:3.606159 +step:3164 train loss:3.585330 +step:3165 train loss:3.600938 +step:3166 train loss:3.637115 +step:3167 train loss:3.599417 +step:3168 train loss:3.678243 +step:3169 train loss:3.590823 +step:3170 train loss:3.570518 +step:3171 train loss:3.563100 +step:3172 train loss:3.566887 +step:3173 train loss:3.513145 +step:3174 train loss:3.628653 +step:3175 train loss:3.592196 +step:3176 train loss:3.608079 +step:3177 train loss:3.573921 +step:3178 train loss:3.552198 +step:3179 train loss:3.628148 +step:3180 train loss:3.559682 +step:3181 train loss:3.641346 +step:3182 train loss:3.645732 +step:3183 train loss:3.588212 +step:3184 train loss:3.588139 +step:3185 train loss:3.643593 +step:3186 train loss:3.605289 +step:3187 train loss:3.623478 +step:3188 train loss:3.666933 +step:3189 train loss:3.604679 +step:3190 train loss:3.566604 +step:3191 train loss:3.567418 +step:3192 train loss:3.535814 +step:3193 train loss:3.608217 +step:3194 train loss:3.579400 +step:3195 train loss:3.558913 +step:3196 train loss:3.612123 +step:3197 train loss:3.574015 +step:3198 train loss:3.612265 +step:3199 train loss:3.588083 +step:3200 train loss:3.595490 +step:3201 train loss:3.559031 +step:3202 train loss:3.624679 +step:3203 train loss:3.685225 +step:3204 train loss:3.649863 +step:3205 train loss:3.495515 +step:3206 train loss:3.776501 +step:3207 train loss:3.531133 +step:3208 train loss:3.603655 +step:3209 train loss:3.591483 +step:3210 train loss:3.573032 +step:3211 train loss:3.602714 +step:3212 train loss:3.610703 +step:3213 train loss:3.551347 +step:3214 train loss:3.657339 +step:3215 train loss:3.664333 +step:3216 train loss:3.529329 +step:3217 train loss:3.616807 +step:3218 train loss:3.649060 +step:3219 train loss:3.569758 +step:3220 train loss:3.641095 +step:3221 train loss:3.555233 +step:3222 train loss:3.596986 +step:3223 train loss:3.614710 +step:3224 train loss:3.626493 +step:3225 train loss:3.551405 +step:3226 train loss:3.584182 +step:3227 train loss:3.610795 +step:3228 train loss:3.604158 +step:3229 train loss:3.639357 +step:3230 train loss:3.651671 +step:3231 train loss:3.591755 +step:3232 train loss:3.601242 +step:3233 train loss:3.574561 +step:3234 train loss:3.561366 +step:3235 train loss:3.561873 +step:3236 train loss:3.584527 +step:3237 train loss:3.584955 +step:3238 train loss:3.607298 +step:3239 train loss:3.505546 +step:3240 train loss:3.613222 +step:3241 train loss:3.612809 +step:3242 train loss:3.670372 +step:3243 train loss:3.606949 +step:3244 train loss:3.625037 +step:3245 train loss:3.527440 +step:3246 train loss:3.652331 +step:3247 train loss:3.600551 +step:3248 train loss:3.623471 +step:3249 train loss:3.563147 +step:3250 validation loss:3.529783 +step:3250 train loss:3.565294 +step:3251 train loss:3.681059 +step:3252 train loss:3.606334 +step:3253 train loss:3.608342 +step:3254 train loss:3.674800 +step:3255 train loss:3.617987 +step:3256 train loss:3.612580 +step:3257 train loss:3.595407 +step:3258 train loss:3.523497 +step:3259 train loss:3.503274 +step:3260 train loss:3.615928 +step:3261 train loss:3.595001 +step:3262 train loss:3.584474 +step:3263 train loss:3.572328 +step:3264 train loss:3.683609 +step:3265 train loss:3.590609 +step:3266 train loss:3.621211 +step:3267 train loss:3.584402 +step:3268 train loss:3.586774 +step:3269 train loss:3.599036 +step:3270 train loss:3.629599 +step:3271 train loss:3.586559 +step:3272 train loss:3.573802 +step:3273 train loss:3.576757 +step:3274 train loss:3.716385 +step:3275 train loss:3.586836 +step:3276 train loss:3.656781 +step:3277 train loss:3.592871 +step:3278 train loss:3.571041 +step:3279 train loss:3.589385 +step:3280 train loss:3.622492 +step:3281 train loss:3.543914 +step:3282 train loss:3.618278 +step:3283 train loss:3.587870 +step:3284 train loss:3.554987 +step:3285 train loss:3.565814 +step:3286 train loss:3.600095 +step:3287 train loss:3.533665 +step:3288 train loss:3.621487 +step:3289 train loss:3.560555 +step:3290 train loss:3.596715 +step:3291 train loss:3.551339 +step:3292 train loss:3.579905 +step:3293 train loss:3.620910 +step:3294 train loss:3.639562 +step:3295 train loss:3.543440 +step:3296 train loss:3.605286 +step:3297 train loss:3.559841 +step:3298 train loss:3.562177 +step:3299 train loss:3.691254 +step:3300 train loss:3.533274 +step:3301 train loss:3.607579 +step:3302 train loss:3.589530 +step:3303 train loss:3.593567 +step:3304 train loss:3.563264 +step:3305 train loss:3.651521 +step:3306 train loss:3.588728 +step:3307 train loss:3.605574 +step:3308 train loss:3.562667 +step:3309 train loss:3.619586 +step:3310 train loss:3.540583 +step:3311 train loss:3.590142 +step:3312 train loss:3.563917 +step:3313 train loss:3.597943 +step:3314 train loss:3.592945 +step:3315 train loss:3.669759 +step:3316 train loss:3.525604 +step:3317 train loss:3.610031 +step:3318 train loss:3.630284 +step:3319 train loss:3.549973 +step:3320 train loss:3.719552 +step:3321 train loss:3.616725 +step:3322 train loss:3.618859 +step:3323 train loss:3.720673 +step:3324 train loss:3.641987 +step:3325 train loss:3.611868 +step:3326 train loss:3.603557 +step:3327 train loss:3.614739 +step:3328 train loss:3.598042 +step:3329 train loss:3.594706 +step:3330 train loss:3.586022 +step:3331 train loss:3.633894 +step:3332 train loss:3.652984 +step:3333 train loss:3.618564 +step:3334 train loss:3.548934 +step:3335 train loss:3.570238 +step:3336 train loss:3.596755 +step:3337 train loss:3.600962 +step:3338 train loss:3.585775 +step:3339 train loss:3.584212 +step:3340 train loss:3.622442 +step:3341 train loss:3.563605 +step:3342 train loss:3.615312 +step:3343 train loss:3.554166 +step:3344 train loss:3.612426 +step:3345 train loss:3.563030 +step:3346 train loss:3.576118 +step:3347 train loss:3.578688 +step:3348 train loss:3.604228 +step:3349 train loss:3.585215 +step:3350 train loss:3.611995 +step:3351 train loss:3.664317 +step:3352 train loss:3.610108 +step:3353 train loss:3.708256 +step:3354 train loss:3.553270 +step:3355 train loss:3.656427 +step:3356 train loss:3.610305 +step:3357 train loss:3.615882 +step:3358 train loss:3.557570 +step:3359 train loss:3.587323 +step:3360 train loss:3.582974 +step:3361 train loss:3.584096 +step:3362 train loss:3.574433 +step:3363 train loss:3.574138 +step:3364 train loss:3.557532 +step:3365 train loss:3.594621 +step:3366 train loss:3.626575 +step:3367 train loss:3.579340 +step:3368 train loss:3.674553 +step:3369 train loss:3.588982 +step:3370 train loss:3.676303 +step:3371 train loss:3.634790 +step:3372 train loss:3.597244 +step:3373 train loss:3.610191 +step:3374 train loss:3.658288 +step:3375 train loss:3.589841 +step:3376 train loss:3.602581 +step:3377 train loss:3.580991 +step:3378 train loss:3.559940 +step:3379 train loss:3.639313 +step:3380 train loss:3.618812 +step:3381 train loss:3.600071 +step:3382 train loss:3.622908 +step:3383 train loss:3.627059 +step:3384 train loss:3.560868 +step:3385 train loss:3.609302 +step:3386 train loss:3.589768 +step:3387 train loss:3.664227 +step:3388 train loss:3.563491 +step:3389 train loss:3.782182 +step:3390 train loss:3.503871 +step:3391 train loss:3.581527 +step:3392 train loss:3.565757 +step:3393 train loss:3.599802 +step:3394 train loss:3.553412 +step:3395 train loss:3.627022 +step:3396 train loss:3.541316 +step:3397 train loss:3.620385 +step:3398 train loss:3.585456 +step:3399 train loss:3.605504 +step:3400 train loss:3.552249 +step:3401 train loss:3.590426 +step:3402 train loss:3.750896 +step:3403 train loss:3.641084 +step:3404 train loss:3.753659 +step:3405 train loss:3.612543 +step:3406 train loss:3.581098 +step:3407 train loss:3.583194 +step:3408 train loss:3.562978 +step:3409 train loss:3.531555 +step:3410 train loss:3.559047 +step:3411 train loss:3.630153 +step:3412 train loss:3.551238 +step:3413 train loss:3.548495 +step:3414 train loss:3.580918 +step:3415 train loss:3.559706 +step:3416 train loss:3.565890 +step:3417 train loss:3.643263 +step:3418 train loss:3.647423 +step:3419 train loss:3.600325 +step:3420 train loss:3.581898 +step:3421 train loss:3.615871 +step:3422 train loss:3.628347 +step:3423 train loss:3.646535 +step:3424 train loss:3.527856 +step:3425 train loss:3.552977 +step:3426 train loss:3.547613 +step:3427 train loss:3.610237 +step:3428 train loss:3.529320 +step:3429 train loss:3.599597 +step:3430 train loss:3.565341 +step:3431 train loss:3.620375 +step:3432 train loss:3.601765 +step:3433 train loss:3.562479 +step:3434 train loss:3.642646 +step:3435 train loss:3.587349 +step:3436 train loss:3.674674 +step:3437 train loss:3.506890 +step:3438 train loss:3.609621 +step:3439 train loss:3.583184 +step:3440 train loss:3.683582 +step:3441 train loss:3.574964 +step:3442 train loss:3.643810 +step:3443 train loss:3.575279 +step:3444 train loss:3.596196 +step:3445 train loss:3.638754 +step:3446 train loss:3.546191 +step:3447 train loss:3.620474 +step:3448 train loss:3.572652 +step:3449 train loss:3.605699 +step:3450 train loss:3.513650 +step:3451 train loss:3.633394 +step:3452 train loss:3.580647 +step:3453 train loss:3.635951 +step:3454 train loss:3.659708 +step:3455 train loss:3.715418 +step:3456 train loss:3.656427 +step:3457 train loss:3.653157 +step:3458 train loss:3.580616 +step:3459 train loss:3.583809 +step:3460 train loss:3.535570 +step:3461 train loss:3.596872 +step:3462 train loss:3.598432 +step:3463 train loss:3.570611 +step:3464 train loss:3.626800 +step:3465 train loss:3.552807 +step:3466 train loss:3.621967 +step:3467 train loss:3.575125 +step:3468 train loss:3.593183 +step:3469 train loss:3.601994 +step:3470 train loss:3.588140 +step:3471 train loss:3.626736 +step:3472 train loss:3.510255 +step:3473 train loss:3.636976 +step:3474 train loss:3.529344 +step:3475 train loss:3.616385 +step:3476 train loss:3.583300 +step:3477 train loss:3.603833 +step:3478 train loss:3.577297 +step:3479 train loss:3.607811 +step:3480 train loss:3.627855 +step:3481 train loss:3.604792 +step:3482 train loss:3.593870 +step:3483 train loss:3.732478 +step:3484 train loss:3.573760 +step:3485 train loss:3.560524 +step:3486 train loss:3.613767 +step:3487 train loss:3.652628 +step:3488 train loss:3.561969 +step:3489 train loss:3.612263 +step:3490 train loss:3.573911 +step:3491 train loss:3.619982 +step:3492 train loss:3.651736 +step:3493 train loss:3.623271 +step:3494 train loss:3.616609 +step:3495 train loss:3.597301 +step:3496 train loss:3.561645 +step:3497 train loss:3.674264 +step:3498 train loss:3.619172 +step:3499 train loss:3.548490 +step:3500 validation loss:3.516309 total_sharp:1.1152e-03 L1_sharp:4.3072e-03 L2_sharp:1.4703e-04 L3_sharp:3.4985e-04 L4_sharp:8.2727e-05 L5_sharp:9.9531e-05 L6_sharp:1.3513e-04 L7_sharp:2.1783e-04 L8_sharp:1.1322e-04 L9_sharp:9.2294e-05 L10_sharp:7.3512e-05 L11_sharp:6.2414e-05 L12_sharp:1.1992e-04 total_fnorm:4.2258e+00 total_l1_linf:3.4521e+04 total_spectral:4.2258e+00 L1_fnorm:1.0688e+00 L2_fnorm:9.6916e-01 L3_fnorm:1.0643e+00 L4_fnorm:1.1530e+00 L5_fnorm:1.1906e+00 L6_fnorm:1.1998e+00 L7_fnorm:1.1962e+00 L8_fnorm:1.2030e+00 L9_fnorm:1.2042e+00 L10_fnorm:1.2086e+00 L11_fnorm:1.2039e+00 L12_fnorm:1.2071e+00 L1_l1linf:8.9286e-01 L2_l1linf:7.8806e-01 L3_l1linf:8.0124e-01 L4_l1linf:8.3181e-01 L5_l1linf:8.4208e-01 L6_l1linf:8.3264e-01 L7_l1linf:8.2696e-01 L8_l1linf:8.1647e-01 L9_l1linf:8.1363e-01 L10_l1linf:8.1331e-01 L11_l1linf:7.9908e-01 L12_l1linf:7.9745e-01 L1_spectral:2.4109e-02 L2_spectral:2.4075e-02 L3_spectral:2.4083e-02 L4_spectral:2.4090e-02 L5_spectral:2.4107e-02 L6_spectral:2.4096e-02 L7_spectral:2.4084e-02 L8_spectral:2.4094e-02 L9_spectral:2.4085e-02 L10_spectral:2.4085e-02 L11_spectral:2.4087e-02 L12_spectral:2.4086e-02 ip_v_neg_g:1.0882e-02 cos_v_neg_g:2.0669e-03 v_norm:4.2258e+00 g_norm:1.2459e+00 hv_norm:8.8784e-01 cos_v_hv:5.3078e-03 hg_norm:7.1949e+01 cos_g_hg:3.3590e-01 v_par:1.7918e-03 v_perp:4.2258e+00 L1_cos_v_neg_g:3.7355e-03 L1_v_norm:1.0688e+00 L2_cos_v_neg_g:5.0976e-03 L2_v_norm:9.6916e-01 L3_cos_v_neg_g:6.1314e-03 L3_v_norm:1.0643e+00 L4_cos_v_neg_g:3.2767e-03 L4_v_norm:1.1530e+00 L5_cos_v_neg_g:3.7514e-03 L5_v_norm:1.1906e+00 L6_cos_v_neg_g:4.4963e-03 L6_v_norm:1.1998e+00 L7_cos_v_neg_g:4.6481e-03 L7_v_norm:1.1962e+00 L8_cos_v_neg_g:4.4113e-03 L8_v_norm:1.2030e+00 L9_cos_v_neg_g:6.0025e-03 L9_v_norm:1.2042e+00 L10_cos_v_neg_g:5.5964e-03 L10_v_norm:1.2086e+00 L11_cos_v_neg_g:4.3649e-03 L11_v_norm:1.2039e+00 L12_cos_v_neg_g:4.6708e-03 L12_v_norm:1.2071e+00 +step:3500 train loss:3.570094 +step:3501 train loss:3.696784 +step:3502 train loss:3.673398 +step:3503 train loss:3.625780 +step:3504 train loss:3.582479 +step:3505 train loss:3.591419 +step:3506 train loss:3.496313 +step:3507 train loss:3.613516 +step:3508 train loss:3.553963 +step:3509 train loss:3.627117 +step:3510 train loss:3.554667 +step:3511 train loss:3.593492 +step:3512 train loss:3.729205 +step:3513 train loss:3.551449 +step:3514 train loss:3.566670 +step:3515 train loss:3.820729 +step:3516 train loss:3.614488 +step:3517 train loss:3.568940 +step:3518 train loss:3.576249 +step:3519 train loss:3.564406 +step:3520 train loss:3.599614 +step:3521 train loss:3.586774 +step:3522 train loss:3.500669 +step:3523 train loss:3.603631 +step:3524 train loss:3.584249 +step:3525 train loss:3.577222 +step:3526 train loss:3.598401 +step:3527 train loss:3.545447 +step:3528 train loss:3.599786 +step:3529 train loss:3.579748 +step:3530 train loss:3.577173 +step:3531 train loss:3.563938 +step:3532 train loss:3.756777 +step:3533 train loss:3.570393 +step:3534 train loss:3.590421 +step:3535 train loss:3.563524 +step:3536 train loss:3.561160 +step:3537 train loss:3.577480 +step:3538 train loss:3.603615 +step:3539 train loss:3.552593 +step:3540 train loss:3.618313 +step:3541 train loss:3.589823 +step:3542 train loss:3.594490 +step:3543 train loss:3.518420 +step:3544 train loss:3.536125 +step:3545 train loss:3.539425 +step:3546 train loss:3.604236 +step:3547 train loss:3.610954 +step:3548 train loss:3.586526 +step:3549 train loss:3.583765 +step:3550 train loss:3.577734 +step:3551 train loss:3.600050 +step:3552 train loss:3.495175 +step:3553 train loss:3.615538 +step:3554 train loss:3.612778 +step:3555 train loss:3.594348 +step:3556 train loss:3.619690 +step:3557 train loss:3.609442 +step:3558 train loss:3.579151 +step:3559 train loss:3.526843 +step:3560 train loss:3.622901 +step:3561 train loss:3.611621 +step:3562 train loss:3.787580 +step:3563 train loss:3.644687 +step:3564 train loss:3.603568 +step:3565 train loss:3.605313 +step:3566 train loss:3.578449 +step:3567 train loss:3.517777 +step:3568 train loss:3.548220 +step:3569 train loss:3.630343 +step:3570 train loss:3.654590 +step:3571 train loss:3.631203 +step:3572 train loss:3.623724 +step:3573 train loss:3.580181 +step:3574 train loss:3.581738 +step:3575 train loss:3.572023 +step:3576 train loss:3.553367 +step:3577 train loss:3.565814 +step:3578 train loss:3.646715 +step:3579 train loss:3.555933 +step:3580 train loss:3.636566 +step:3581 train loss:3.580232 +step:3582 train loss:3.636451 +step:3583 train loss:3.571641 +step:3584 train loss:3.549232 +step:3585 train loss:3.594479 +step:3586 train loss:3.548341 +step:3587 train loss:3.638818 +step:3588 train loss:3.775406 +step:3589 train loss:3.603662 +step:3590 train loss:3.585703 +step:3591 train loss:3.594550 +step:3592 train loss:3.556040 +step:3593 train loss:3.528590 +step:3594 train loss:3.583181 +step:3595 train loss:3.556912 +step:3596 train loss:3.637093 +step:3597 train loss:3.607776 +step:3598 train loss:3.563246 +step:3599 train loss:3.614831 +step:3600 train loss:3.551463 +step:3601 train loss:3.571204 +step:3602 train loss:3.556170 +step:3603 train loss:3.574686 +step:3604 train loss:3.597155 +step:3605 train loss:3.706480 +step:3606 train loss:3.606452 +step:3607 train loss:3.589078 +step:3608 train loss:3.605138 +step:3609 train loss:3.586533 +step:3610 train loss:3.557974 +step:3611 train loss:3.555492 +step:3612 train loss:3.627764 +step:3613 train loss:3.593962 +step:3614 train loss:3.551457 +step:3615 train loss:3.578368 +step:3616 train loss:3.547134 +step:3617 train loss:3.611909 +step:3618 train loss:3.565313 +step:3619 train loss:3.558926 +step:3620 train loss:3.574895 +step:3621 train loss:3.534603 +step:3622 train loss:3.638464 +step:3623 train loss:3.633171 +step:3624 train loss:3.602382 +step:3625 train loss:3.578047 +step:3626 train loss:3.592201 +step:3627 train loss:3.589382 +step:3628 train loss:3.570689 +step:3629 train loss:3.575557 +step:3630 train loss:3.656520 +step:3631 train loss:3.581602 +step:3632 train loss:3.610177 +step:3633 train loss:3.571683 +step:3634 train loss:3.575408 +step:3635 train loss:3.565845 +step:3636 train loss:3.637669 +step:3637 train loss:3.712377 +step:3638 train loss:3.632594 +step:3639 train loss:3.615194 +step:3640 train loss:3.622050 +step:3641 train loss:3.659199 +step:3642 train loss:3.555620 +step:3643 train loss:3.727958 +step:3644 train loss:3.614680 +step:3645 train loss:3.589914 +step:3646 train loss:3.714036 +step:3647 train loss:3.597637 +step:3648 train loss:3.592394 +step:3649 train loss:3.539765 +step:3650 train loss:3.583923 +step:3651 train loss:3.579344 +step:3652 train loss:3.563154 +step:3653 train loss:3.510817 +step:3654 train loss:3.558931 +step:3655 train loss:3.553772 +step:3656 train loss:3.584600 +step:3657 train loss:3.601026 +step:3658 train loss:3.595602 +step:3659 train loss:3.579801 +step:3660 train loss:3.558266 +step:3661 train loss:3.583496 +step:3662 train loss:3.557262 +step:3663 train loss:3.591107 +step:3664 train loss:3.549009 +step:3665 train loss:3.593910 +step:3666 train loss:3.625618 +step:3667 train loss:3.723554 +step:3668 train loss:3.602205 +step:3669 train loss:3.560089 +step:3670 train loss:3.610432 +step:3671 train loss:3.568840 +step:3672 train loss:3.603332 +step:3673 train loss:3.585475 +step:3674 train loss:3.601438 +step:3675 train loss:3.611695 +step:3676 train loss:3.578222 +step:3677 train loss:3.540482 +step:3678 train loss:3.597934 +step:3679 train loss:3.501276 +step:3680 train loss:3.604133 +step:3681 train loss:3.636449 +step:3682 train loss:3.617461 +step:3683 train loss:3.559650 +step:3684 train loss:3.558957 +step:3685 train loss:3.590175 +step:3686 train loss:3.614236 +step:3687 train loss:3.571818 +step:3688 train loss:3.544118 +step:3689 train loss:3.578238 +step:3690 train loss:3.569086 +step:3691 train loss:3.550999 +step:3692 train loss:3.616707 +step:3693 train loss:3.748289 +step:3694 train loss:3.560627 +step:3695 train loss:3.614691 +step:3696 train loss:3.578332 +step:3697 train loss:3.575227 +step:3698 train loss:3.511936 +step:3699 train loss:3.538327 +step:3700 train loss:3.569773 +step:3701 train loss:3.586771 +step:3702 train loss:3.609895 +step:3703 train loss:3.566991 +step:3704 train loss:3.610516 +step:3705 train loss:3.589866 +step:3706 train loss:3.545071 +step:3707 train loss:3.595327 +step:3708 train loss:3.576824 +step:3709 train loss:3.497586 +step:3710 train loss:3.618236 +step:3711 train loss:3.566282 +step:3712 train loss:3.604946 +step:3713 train loss:3.556132 +step:3714 train loss:3.574542 +step:3715 train loss:3.691682 +step:3716 train loss:3.599221 +step:3717 train loss:3.572451 +step:3718 train loss:3.578368 +step:3719 train loss:3.569785 +step:3720 train loss:3.581074 +step:3721 train loss:3.638733 +step:3722 train loss:3.654126 +step:3723 train loss:3.539353 +step:3724 train loss:3.595345 +step:3725 train loss:3.575294 +step:3726 train loss:3.595141 +step:3727 train loss:3.667413 +step:3728 train loss:3.635243 +step:3729 train loss:3.531676 +step:3730 train loss:3.547699 +step:3731 train loss:3.571129 +step:3732 train loss:3.730908 +step:3733 train loss:3.585951 +step:3734 train loss:3.585162 +step:3735 train loss:3.523021 +step:3736 train loss:3.581333 +step:3737 train loss:3.629706 +step:3738 train loss:3.656825 +step:3739 train loss:3.571296 +step:3740 train loss:3.480387 +step:3741 train loss:3.678749 +step:3742 train loss:3.593785 +step:3743 train loss:3.569060 +step:3744 train loss:3.605285 +step:3745 train loss:3.585758 +step:3746 train loss:3.552560 +step:3747 train loss:3.569735 +step:3748 train loss:3.614712 +step:3749 train loss:3.594732 +step:3750 validation loss:3.506105 +step:3750 train loss:3.603579 +step:3751 train loss:3.695070 +step:3752 train loss:3.626892 +step:3753 train loss:3.548288 +step:3754 train loss:3.596081 +step:3755 train loss:3.776546 +step:3756 train loss:3.553535 +step:3757 train loss:3.547486 +step:3758 train loss:3.578232 +step:3759 train loss:3.523737 +step:3760 train loss:3.522631 +step:3761 train loss:3.576323 +step:3762 train loss:3.566280 +step:3763 train loss:3.573084 +step:3764 train loss:3.556738 +step:3765 train loss:3.562913 +step:3766 train loss:3.528739 +step:3767 train loss:3.613115 +step:3768 train loss:3.553996 +step:3769 train loss:3.820552 +step:3770 train loss:3.605955 +step:3771 train loss:3.615773 +step:3772 train loss:3.575116 +step:3773 train loss:3.569480 +step:3774 train loss:3.569695 +step:3775 train loss:3.569428 +step:3776 train loss:3.567596 +step:3777 train loss:3.528551 +step:3778 train loss:3.547167 +step:3779 train loss:3.529765 +step:3780 train loss:3.615771 +step:3781 train loss:3.575622 +step:3782 train loss:3.497699 +step:3783 train loss:3.604021 +step:3784 train loss:3.614172 +step:3785 train loss:3.524796 +step:3786 train loss:3.631819 +step:3787 train loss:3.543936 +step:3788 train loss:3.559400 +step:3789 train loss:3.479115 +step:3790 train loss:3.580770 +step:3791 train loss:3.601030 +step:3792 train loss:3.571578 +step:3793 train loss:3.574625 +step:3794 train loss:3.596135 +step:3795 train loss:3.570276 +step:3796 train loss:3.583763 +step:3797 train loss:3.563168 +step:3798 train loss:3.571119 +step:3799 train loss:3.578694 +step:3800 train loss:3.486225 +step:3801 train loss:3.603187 +step:3802 train loss:3.527156 +step:3803 train loss:3.613005 +step:3804 train loss:3.622974 +step:3805 train loss:3.584752 +step:3806 train loss:3.600776 +step:3807 train loss:3.619131 +step:3808 train loss:3.577905 +step:3809 train loss:3.587476 +step:3810 train loss:3.589148 +step:3811 train loss:3.577232 +step:3812 train loss:3.574988 +step:3813 train loss:3.531859 +step:3814 train loss:3.577954 +step:3815 train loss:3.579689 +step:3816 train loss:3.597513 +step:3817 train loss:3.612609 +step:3818 train loss:3.586046 +step:3819 train loss:3.598537 +step:3820 train loss:3.598828 +step:3821 train loss:3.555173 +step:3822 train loss:3.642011 +step:3823 train loss:3.530486 +step:3824 train loss:3.547513 +step:3825 train loss:3.553965 +step:3826 train loss:3.633759 +step:3827 train loss:3.640303 +step:3828 train loss:3.534575 +step:3829 train loss:3.553578 +step:3830 train loss:3.614764 +step:3831 train loss:3.546337 +step:3832 train loss:3.605143 +step:3833 train loss:3.546181 +step:3834 train loss:3.515242 +step:3835 train loss:3.558144 +step:3836 train loss:3.533349 +step:3837 train loss:3.601462 +step:3838 train loss:3.552842 +step:3839 train loss:3.599014 +step:3840 train loss:3.609801 +step:3841 train loss:3.561813 +step:3842 train loss:3.589457 +step:3843 train loss:3.599951 +step:3844 train loss:3.576402 +step:3845 train loss:3.599134 +step:3846 train loss:3.637108 +step:3847 train loss:3.535359 +step:3848 train loss:3.541173 +step:3849 train loss:3.565352 +step:3850 train loss:3.576026 +step:3851 train loss:3.721941 +step:3852 train loss:3.699379 +step:3853 train loss:3.590453 +step:3854 train loss:3.552082 +step:3855 train loss:3.601435 +step:3856 train loss:3.528584 +step:3857 train loss:3.590420 +step:3858 train loss:3.500041 +step:3859 train loss:3.549882 +step:3860 train loss:3.617725 +step:3861 train loss:3.589052 +step:3862 train loss:3.525877 +step:3863 train loss:3.577507 +step:3864 train loss:3.549504 +step:3865 train loss:3.584123 +step:3866 train loss:3.607922 +step:3867 train loss:3.600929 +step:3868 train loss:3.555919 +step:3869 train loss:3.552117 +step:3870 train loss:3.526734 +step:3871 train loss:3.526040 +step:3872 train loss:3.659771 +step:3873 train loss:3.581434 +step:3874 train loss:3.593546 +step:3875 train loss:3.701729 +step:3876 train loss:3.581403 +step:3877 train loss:3.602944 +step:3878 train loss:3.628331 +step:3879 train loss:3.617956 +step:3880 train loss:3.700850 +step:3881 train loss:3.518859 +step:3882 train loss:3.554514 +step:3883 train loss:3.565845 +step:3884 train loss:3.561771 +step:3885 train loss:3.576712 +step:3886 train loss:3.637107 +step:3887 train loss:3.615480 +step:3888 train loss:3.577873 +step:3889 train loss:3.548714 +step:3890 train loss:3.587279 +step:3891 train loss:3.600278 +step:3892 train loss:3.512269 +step:3893 train loss:3.616268 +step:3894 train loss:3.568376 +step:3895 train loss:3.585045 +step:3896 train loss:3.578537 +step:3897 train loss:3.546613 +step:3898 train loss:3.605830 +step:3899 train loss:3.642864 +step:3900 train loss:3.599317 +step:3901 train loss:3.615394 +step:3902 train loss:3.538889 +step:3903 train loss:3.558778 +step:3904 train loss:3.589553 +step:3905 train loss:3.524348 +step:3906 train loss:3.559209 +step:3907 train loss:3.595283 +step:3908 train loss:3.670590 +step:3909 train loss:3.565315 +step:3910 train loss:3.590098 +step:3911 train loss:3.607295 +step:3912 train loss:3.554204 +step:3913 train loss:3.568873 +step:3914 train loss:3.588720 +step:3915 train loss:3.561229 +step:3916 train loss:3.596150 +step:3917 train loss:3.643152 +step:3918 train loss:3.610945 +step:3919 train loss:3.587302 +step:3920 train loss:3.566489 +step:3921 train loss:3.605029 +step:3922 train loss:3.607052 +step:3923 train loss:3.596058 +step:3924 train loss:3.534441 +step:3925 train loss:3.736606 +step:3926 train loss:3.578617 +step:3927 train loss:3.555748 +step:3928 train loss:3.641153 +step:3929 train loss:3.708188 +step:3930 train loss:3.594346 +step:3931 train loss:3.524958 +step:3932 train loss:3.582963 +step:3933 train loss:3.599934 +step:3934 train loss:3.550164 +step:3935 train loss:3.526686 +step:3936 train loss:3.616991 +step:3937 train loss:3.575908 +step:3938 train loss:3.587691 +step:3939 train loss:3.611780 +step:3940 train loss:3.565153 +step:3941 train loss:3.646496 +step:3942 train loss:3.609722 +step:3943 train loss:3.592955 +step:3944 train loss:3.643238 +step:3945 train loss:3.553122 +step:3946 train loss:3.498585 +step:3947 train loss:3.628510 +step:3948 train loss:3.596108 +step:3949 train loss:3.765700 +step:3950 train loss:3.565071 +step:3951 train loss:3.524635 +step:3952 train loss:3.448678 +step:3953 train loss:3.529605 +step:3954 train loss:3.576924 +step:3955 train loss:3.604335 +step:3956 train loss:3.567944 +step:3957 train loss:3.613477 +step:3958 train loss:3.594665 +step:3959 train loss:3.627143 +step:3960 train loss:3.555614 +step:3961 train loss:3.581709 +step:3962 train loss:3.584294 +step:3963 train loss:3.559636 +step:3964 train loss:3.539105 +step:3965 train loss:3.589738 +step:3966 train loss:3.547992 +step:3967 train loss:3.597985 +step:3968 train loss:3.609734 +step:3969 train loss:3.519844 +step:3970 train loss:3.627067 +step:3971 train loss:3.549313 +step:3972 train loss:3.575948 +step:3973 train loss:3.538625 +step:3974 train loss:3.634724 +step:3975 train loss:3.580799 +step:3976 train loss:3.537728 +step:3977 train loss:3.600006 +step:3978 train loss:3.561280 +step:3979 train loss:3.551803 +step:3980 train loss:3.624480 +step:3981 train loss:3.552845 +step:3982 train loss:3.573727 +step:3983 train loss:3.557974 +step:3984 train loss:3.592873 +step:3985 train loss:3.567109 +step:3986 train loss:3.581503 +step:3987 train loss:3.593077 +step:3988 train loss:3.526709 +step:3989 train loss:3.598492 +step:3990 train loss:3.594664 +step:3991 train loss:3.609200 +step:3992 train loss:3.564266 +step:3993 train loss:3.600323 +step:3994 train loss:3.550254 +step:3995 train loss:3.601154 +step:3996 train loss:3.517927 +step:3997 train loss:3.596728 +step:3998 train loss:3.478687 +step:3999 train loss:3.634486 +step:4000 validation loss:3.488828 total_sharp:9.8819e-04 L1_sharp:3.5468e-03 L2_sharp:1.3803e-04 L3_sharp:4.5369e-04 L4_sharp:6.7090e-05 L5_sharp:8.5467e-05 L6_sharp:1.3957e-04 L7_sharp:1.7563e-04 L8_sharp:1.0596e-04 L9_sharp:8.6671e-05 L10_sharp:6.0918e-05 L11_sharp:5.4929e-05 L12_sharp:5.6420e-05 total_fnorm:4.2416e+00 total_l1_linf:3.4637e+04 total_spectral:4.2416e+00 L1_fnorm:1.1021e+00 L2_fnorm:9.9490e-01 L3_fnorm:1.0623e+00 L4_fnorm:1.1579e+00 L5_fnorm:1.1923e+00 L6_fnorm:1.1993e+00 L7_fnorm:1.1971e+00 L8_fnorm:1.2033e+00 L9_fnorm:1.2034e+00 L10_fnorm:1.2072e+00 L11_fnorm:1.2048e+00 L12_fnorm:1.2062e+00 L1_l1linf:8.9204e-01 L2_l1linf:7.8805e-01 L3_l1linf:8.0671e-01 L4_l1linf:8.2921e-01 L5_l1linf:8.3590e-01 L6_l1linf:8.4494e-01 L7_l1linf:8.2489e-01 L8_l1linf:8.1541e-01 L9_l1linf:8.1596e-01 L10_l1linf:8.1505e-01 L11_l1linf:8.0256e-01 L12_l1linf:7.9875e-01 L1_spectral:2.4096e-02 L2_spectral:2.4085e-02 L3_spectral:2.4083e-02 L4_spectral:2.4104e-02 L5_spectral:2.4106e-02 L6_spectral:2.4095e-02 L7_spectral:2.4092e-02 L8_spectral:2.4087e-02 L9_spectral:2.4090e-02 L10_spectral:2.4083e-02 L11_spectral:2.4092e-02 L12_spectral:2.4091e-02 ip_v_neg_g:9.2859e-03 cos_v_neg_g:2.1294e-03 v_norm:4.2416e+00 g_norm:1.0281e+00 hv_norm:6.1232e-01 cos_v_hv:6.8453e-03 hg_norm:3.1697e+01 cos_g_hg:6.0846e-01 v_par:2.5046e-03 v_perp:4.2416e+00 L1_cos_v_neg_g:4.0523e-03 L1_v_norm:1.1021e+00 L2_cos_v_neg_g:3.3346e-03 L2_v_norm:9.9490e-01 L3_cos_v_neg_g:6.4917e-03 L3_v_norm:1.0623e+00 L4_cos_v_neg_g:3.4422e-03 L4_v_norm:1.1579e+00 L5_cos_v_neg_g:3.3360e-03 L5_v_norm:1.1923e+00 L6_cos_v_neg_g:4.0959e-03 L6_v_norm:1.1993e+00 L7_cos_v_neg_g:4.7920e-03 L7_v_norm:1.1971e+00 L8_cos_v_neg_g:3.5204e-03 L8_v_norm:1.2033e+00 L9_cos_v_neg_g:3.6310e-03 L9_v_norm:1.2034e+00 L10_cos_v_neg_g:3.1161e-03 L10_v_norm:1.2072e+00 L11_cos_v_neg_g:2.6730e-03 L11_v_norm:1.2048e+00 L12_cos_v_neg_g:2.4411e-03 L12_v_norm:1.2062e+00 +step:4000 train loss:3.515319 +step:4001 train loss:3.590908 +step:4002 train loss:3.570669 +step:4003 train loss:3.605346 +step:4004 train loss:3.513084 +step:4005 train loss:3.606688 +step:4006 train loss:3.613904 +step:4007 train loss:3.537465 +step:4008 train loss:3.493312 +step:4009 train loss:3.576653 +step:4010 train loss:3.552428 +step:4011 train loss:3.561215 +step:4012 train loss:3.574552 +step:4013 train loss:3.552722 +step:4014 train loss:3.563718 +step:4015 train loss:3.554290 +step:4016 train loss:3.564832 +step:4017 train loss:3.532610 +step:4018 train loss:3.474499 +step:4019 train loss:3.524666 +step:4020 train loss:3.589163 +step:4021 train loss:3.536925 +step:4022 train loss:3.538393 +step:4023 train loss:3.550467 +step:4024 train loss:3.469268 +step:4025 train loss:3.585772 +step:4026 train loss:3.577642 +step:4027 train loss:3.587843 +step:4028 train loss:3.599869 +step:4029 train loss:3.635565 +step:4030 train loss:3.551149 +step:4031 train loss:3.587092 +step:4032 train loss:3.547178 +step:4033 train loss:3.583512 +step:4034 train loss:3.595174 +step:4035 train loss:3.575576 +step:4036 train loss:3.571768 +step:4037 train loss:3.591543 +step:4038 train loss:3.509810 +step:4039 train loss:3.564723 +step:4040 train loss:3.546875 +step:4041 train loss:3.537654 +step:4042 train loss:3.557487 +step:4043 train loss:3.542898 +step:4044 train loss:3.576716 +step:4045 train loss:3.581793 +step:4046 train loss:3.537657 +step:4047 train loss:3.565568 +step:4048 train loss:3.576449 +step:4049 train loss:3.544359 +step:4050 train loss:3.643423 +step:4051 train loss:3.552687 +step:4052 train loss:3.578110 +step:4053 train loss:3.625631 +step:4054 train loss:3.597877 +step:4055 train loss:3.614752 +step:4056 train loss:3.611676 +step:4057 train loss:3.546206 +step:4058 train loss:3.527685 +step:4059 train loss:3.612809 +step:4060 train loss:3.555730 +step:4061 train loss:3.522405 +step:4062 train loss:3.637133 +step:4063 train loss:3.587495 +step:4064 train loss:3.556520 +step:4065 train loss:3.539329 +step:4066 train loss:3.566449 +step:4067 train loss:3.594578 +step:4068 train loss:3.557280 +step:4069 train loss:3.617018 +step:4070 train loss:3.535143 +step:4071 train loss:3.509324 +step:4072 train loss:3.584059 +step:4073 train loss:3.518972 +step:4074 train loss:3.570590 +step:4075 train loss:3.635615 +step:4076 train loss:3.494688 +step:4077 train loss:3.571462 +step:4078 train loss:3.670701 +step:4079 train loss:3.614100 +step:4080 train loss:3.558311 +step:4081 train loss:3.529461 +step:4082 train loss:3.579376 +step:4083 train loss:3.516893 +step:4084 train loss:3.534086 +step:4085 train loss:3.779069 +step:4086 train loss:3.539702 +step:4087 train loss:3.581424 +step:4088 train loss:3.569168 +step:4089 train loss:3.555279 +step:4090 train loss:3.578254 +step:4091 train loss:3.605533 +step:4092 train loss:3.523273 +step:4093 train loss:3.556406 +step:4094 train loss:3.572270 +step:4095 train loss:3.530611 +step:4096 train loss:3.561545 +step:4097 train loss:3.565609 +step:4098 train loss:3.538301 +step:4099 train loss:3.538729 +step:4100 train loss:3.591440 +step:4101 train loss:3.515176 +step:4102 train loss:3.554402 +step:4103 train loss:3.763012 +step:4104 train loss:3.569378 +step:4105 train loss:3.539440 +step:4106 train loss:3.606324 +step:4107 train loss:3.531217 +step:4108 train loss:3.532341 +step:4109 train loss:3.586596 +step:4110 train loss:3.597187 +step:4111 train loss:3.570951 +step:4112 train loss:3.590304 +step:4113 train loss:3.548725 +step:4114 train loss:3.497677 +step:4115 train loss:3.533060 +step:4116 train loss:3.518864 +step:4117 train loss:3.538873 +step:4118 train loss:3.590520 +step:4119 train loss:3.616093 +step:4120 train loss:3.536589 +step:4121 train loss:3.529524 +step:4122 train loss:3.599832 +step:4123 train loss:3.609762 +step:4124 train loss:3.588022 +step:4125 train loss:3.627897 +step:4126 train loss:3.560094 +step:4127 train loss:3.579029 +step:4128 train loss:3.571696 +step:4129 train loss:3.616339 +step:4130 train loss:3.548816 +step:4131 train loss:3.580688 +step:4132 train loss:3.598903 +step:4133 train loss:3.547901 +step:4134 train loss:3.603336 +step:4135 train loss:3.537130 +step:4136 train loss:3.560775 +step:4137 train loss:3.531367 +step:4138 train loss:3.539553 +step:4139 train loss:3.590430 +step:4140 train loss:3.543499 +step:4141 train loss:3.504145 +step:4142 train loss:3.550529 +step:4143 train loss:3.587281 +step:4144 train loss:3.539995 +step:4145 train loss:3.505227 +step:4146 train loss:3.576891 +step:4147 train loss:3.550788 +step:4148 train loss:3.544171 +step:4149 train loss:3.621759 +step:4150 train loss:3.586890 +step:4151 train loss:3.568468 +step:4152 train loss:3.589245 +step:4153 train loss:3.598104 +step:4154 train loss:3.604639 +step:4155 train loss:3.629808 +step:4156 train loss:3.503623 +step:4157 train loss:3.526507 +step:4158 train loss:3.582134 +step:4159 train loss:3.489515 +step:4160 train loss:3.576084 +step:4161 train loss:3.574225 +step:4162 train loss:3.485937 +step:4163 train loss:3.567020 +step:4164 train loss:3.515019 +step:4165 train loss:3.518561 +step:4166 train loss:3.584170 +step:4167 train loss:3.573764 +step:4168 train loss:3.568664 +step:4169 train loss:3.603312 +step:4170 train loss:3.718737 +step:4171 train loss:3.565517 +step:4172 train loss:3.585179 +step:4173 train loss:3.585563 +step:4174 train loss:3.546551 +step:4175 train loss:3.634684 +step:4176 train loss:3.553974 +step:4177 train loss:3.579419 +step:4178 train loss:3.557281 +step:4179 train loss:3.515169 +step:4180 train loss:3.510568 +step:4181 train loss:3.560285 +step:4182 train loss:3.547974 +step:4183 train loss:3.481771 +step:4184 train loss:3.551751 +step:4185 train loss:3.620670 +step:4186 train loss:3.595579 +step:4187 train loss:3.602783 +step:4188 train loss:3.579690 +step:4189 train loss:3.540081 +step:4190 train loss:3.578049 +step:4191 train loss:3.530792 +step:4192 train loss:3.622489 +step:4193 train loss:3.524836 +step:4194 train loss:3.512135 +step:4195 train loss:3.507846 +step:4196 train loss:3.574346 +step:4197 train loss:3.591908 +step:4198 train loss:3.514754 +step:4199 train loss:3.597496 +step:4200 train loss:3.557356 +step:4201 train loss:3.537934 +step:4202 train loss:3.553715 +step:4203 train loss:3.564502 +step:4204 train loss:3.558176 +step:4205 train loss:3.571094 +step:4206 train loss:3.587052 +step:4207 train loss:3.589909 +step:4208 train loss:3.551957 +step:4209 train loss:3.619427 +step:4210 train loss:3.648410 +step:4211 train loss:3.527356 +step:4212 train loss:3.574321 +step:4213 train loss:3.522659 +step:4214 train loss:3.532145 +step:4215 train loss:3.543655 +step:4216 train loss:3.517369 +step:4217 train loss:3.544827 +step:4218 train loss:3.582516 +step:4219 train loss:3.590692 +step:4220 train loss:3.658531 +step:4221 train loss:3.544423 +step:4222 train loss:3.608727 +step:4223 train loss:3.527278 +step:4224 train loss:3.600395 +step:4225 train loss:3.530514 +step:4226 train loss:3.590664 +step:4227 train loss:3.562248 +step:4228 train loss:3.538979 +step:4229 train loss:3.546945 +step:4230 train loss:3.528973 +step:4231 train loss:3.520021 +step:4232 train loss:3.570202 +step:4233 train loss:3.476599 +step:4234 train loss:3.559428 +step:4235 train loss:3.638920 +step:4236 train loss:3.606492 +step:4237 train loss:3.585037 +step:4238 train loss:3.597633 +step:4239 train loss:3.649574 +step:4240 train loss:3.556863 +step:4241 train loss:3.481049 +step:4242 train loss:3.605972 +step:4243 train loss:3.599612 +step:4244 train loss:3.615209 +step:4245 train loss:3.668175 +step:4246 train loss:3.545014 +step:4247 train loss:3.603988 +step:4248 train loss:3.551718 +step:4249 train loss:3.559442 +step:4250 validation loss:3.480431 +step:4250 train loss:3.538180 +step:4251 train loss:3.635341 +step:4252 train loss:3.544615 +step:4253 train loss:3.538524 +step:4254 train loss:3.547087 +step:4255 train loss:3.530582 +step:4256 train loss:3.546239 +step:4257 train loss:3.601522 +step:4258 train loss:3.465525 +step:4259 train loss:3.528466 +step:4260 train loss:3.594193 +step:4261 train loss:3.577370 +step:4262 train loss:3.721097 +step:4263 train loss:3.648072 +step:4264 train loss:3.587531 +step:4265 train loss:3.587421 +step:4266 train loss:3.579881 +step:4267 train loss:3.579418 +step:4268 train loss:3.529509 +step:4269 train loss:3.622019 +step:4270 train loss:3.600582 +step:4271 train loss:3.516938 +step:4272 train loss:3.567626 +step:4273 train loss:3.543479 +step:4274 train loss:3.534026 +step:4275 train loss:3.553523 +step:4276 train loss:3.520106 +step:4277 train loss:3.658387 +step:4278 train loss:3.507903 +step:4279 train loss:3.531206 +step:4280 train loss:3.613628 +step:4281 train loss:3.600880 +step:4282 train loss:3.666656 +step:4283 train loss:3.521196 +step:4284 train loss:3.548498 +step:4285 train loss:3.550543 +step:4286 train loss:3.614399 +step:4287 train loss:3.614221 +step:4288 train loss:3.593212 +step:4289 train loss:3.547884 +step:4290 train loss:3.555502 +step:4291 train loss:3.512718 +step:4292 train loss:3.558882 +step:4293 train loss:3.573470 +step:4294 train loss:3.557731 +step:4295 train loss:3.494286 +step:4296 train loss:3.565931 +step:4297 train loss:3.547308 +step:4298 train loss:3.558653 +step:4299 train loss:3.554899 +step:4300 train loss:3.673164 +step:4301 train loss:3.491282 +step:4302 train loss:3.628718 +step:4303 train loss:3.509224 +step:4304 train loss:3.515038 +step:4305 train loss:3.533088 +step:4306 train loss:3.606555 +step:4307 train loss:3.521291 +step:4308 train loss:3.521178 +step:4309 train loss:3.588634 +step:4310 train loss:3.527296 +step:4311 train loss:3.586877 +step:4312 train loss:3.578283 +step:4313 train loss:3.573341 +step:4314 train loss:3.521515 +step:4315 train loss:3.549597 +step:4316 train loss:3.501545 +step:4317 train loss:3.553833 +step:4318 train loss:3.595418 +step:4319 train loss:3.546839 +step:4320 train loss:3.607438 +step:4321 train loss:3.589929 +step:4322 train loss:3.542560 +step:4323 train loss:3.480044 +step:4324 train loss:3.572911 +step:4325 train loss:3.551023 +step:4326 train loss:3.545161 +step:4327 train loss:3.649858 +step:4328 train loss:3.560138 +step:4329 train loss:3.511041 +step:4330 train loss:3.560015 +step:4331 train loss:3.571566 +step:4332 train loss:3.601490 +step:4333 train loss:3.563614 +step:4334 train loss:3.574368 +step:4335 train loss:3.574139 +step:4336 train loss:3.589876 +step:4337 train loss:3.551542 +step:4338 train loss:3.669034 +step:4339 train loss:3.576578 +step:4340 train loss:3.580456 +step:4341 train loss:3.551697 +step:4342 train loss:3.569620 +step:4343 train loss:3.685275 +step:4344 train loss:3.577169 +step:4345 train loss:3.590797 +step:4346 train loss:3.604930 +step:4347 train loss:3.613174 +step:4348 train loss:3.527855 +step:4349 train loss:3.611222 +step:4350 train loss:3.555703 +step:4351 train loss:3.504940 +step:4352 train loss:3.578912 +step:4353 train loss:3.524757 +step:4354 train loss:3.580952 +step:4355 train loss:3.541337 +step:4356 train loss:3.564628 +step:4357 train loss:3.550418 +step:4358 train loss:3.644013 +step:4359 train loss:3.595298 +step:4360 train loss:3.511133 +step:4361 train loss:3.562588 +step:4362 train loss:3.581984 +step:4363 train loss:3.597962 +step:4364 train loss:3.560789 +step:4365 train loss:3.543917 +step:4366 train loss:3.592447 +step:4367 train loss:3.608055 +step:4368 train loss:3.583289 +step:4369 train loss:3.450756 +step:4370 train loss:3.578594 +step:4371 train loss:3.487344 +step:4372 train loss:3.638869 +step:4373 train loss:3.572691 +step:4374 train loss:3.545094 +step:4375 train loss:3.590127 +step:4376 train loss:3.599645 +step:4377 train loss:3.533703 +step:4378 train loss:3.548206 +step:4379 train loss:3.629172 +step:4380 train loss:3.608563 +step:4381 train loss:3.514004 +step:4382 train loss:3.557886 +step:4383 train loss:3.585010 +step:4384 train loss:3.581323 +step:4385 train loss:3.507076 +step:4386 train loss:3.564189 +step:4387 train loss:3.533806 +step:4388 train loss:3.554596 +step:4389 train loss:3.580569 +step:4390 train loss:3.625478 +step:4391 train loss:3.547081 +step:4392 train loss:3.620989 +step:4393 train loss:3.578609 +step:4394 train loss:3.520067 +step:4395 train loss:3.574831 +step:4396 train loss:3.551149 +step:4397 train loss:3.592733 +step:4398 train loss:3.539139 +step:4399 train loss:3.530281 +step:4400 train loss:3.537756 +step:4401 train loss:3.598091 +step:4402 train loss:3.592975 +step:4403 train loss:3.546302 +step:4404 train loss:3.577013 +step:4405 train loss:3.500613 +step:4406 train loss:3.576501 +step:4407 train loss:3.515205 +step:4408 train loss:3.604875 +step:4409 train loss:3.567500 +step:4410 train loss:3.571984 +step:4411 train loss:3.530855 +step:4412 train loss:3.649887 +step:4413 train loss:3.542576 +step:4414 train loss:3.549148 +step:4415 train loss:3.536191 +step:4416 train loss:3.526741 +step:4417 train loss:3.521717 +step:4418 train loss:3.594615 +step:4419 train loss:3.560600 +step:4420 train loss:3.576138 +step:4421 train loss:3.596195 +step:4422 train loss:3.612308 +step:4423 train loss:3.574419 +step:4424 train loss:3.555871 +step:4425 train loss:3.517411 +step:4426 train loss:3.594468 +step:4427 train loss:3.556496 +step:4428 train loss:3.494778 +step:4429 train loss:3.553916 +step:4430 train loss:3.594528 +step:4431 train loss:3.591561 +step:4432 train loss:3.490947 +step:4433 train loss:3.547585 +step:4434 train loss:3.544709 +step:4435 train loss:3.575728 +step:4436 train loss:3.511934 +step:4437 train loss:3.589273 +step:4438 train loss:3.557480 +step:4439 train loss:3.563345 +step:4440 train loss:3.563011 +step:4441 train loss:3.562963 +step:4442 train loss:3.613526 +step:4443 train loss:3.551918 +step:4444 train loss:3.633629 +step:4445 train loss:3.598556 +step:4446 train loss:3.529114 +step:4447 train loss:3.573737 +step:4448 train loss:3.596579 +step:4449 train loss:3.531243 +step:4450 train loss:3.550256 +step:4451 train loss:3.601857 +step:4452 train loss:3.660706 +step:4453 train loss:3.589820 +step:4454 train loss:3.563691 +step:4455 train loss:3.608909 +step:4456 train loss:3.550798 +step:4457 train loss:3.550982 +step:4458 train loss:3.564796 +step:4459 train loss:3.599123 +step:4460 train loss:3.507616 +step:4461 train loss:3.480035 +step:4462 train loss:3.540504 +step:4463 train loss:3.556575 +step:4464 train loss:3.526345 +step:4465 train loss:3.564270 +step:4466 train loss:3.660017 +step:4467 train loss:3.537224 +step:4468 train loss:3.534831 +step:4469 train loss:3.523331 +step:4470 train loss:3.502736 +step:4471 train loss:3.562873 +step:4472 train loss:3.491847 +step:4473 train loss:3.575028 +step:4474 train loss:3.597745 +step:4475 train loss:3.561034 +step:4476 train loss:3.520125 +step:4477 train loss:3.505833 +step:4478 train loss:3.566393 +step:4479 train loss:3.666577 +step:4480 train loss:3.502437 +step:4481 train loss:3.576668 +step:4482 train loss:3.537493 +step:4483 train loss:3.533739 +step:4484 train loss:3.587349 +step:4485 train loss:3.541480 +step:4486 train loss:3.642974 +step:4487 train loss:3.538487 +step:4488 train loss:3.535404 +step:4489 train loss:3.489321 +step:4490 train loss:3.575228 +step:4491 train loss:3.524284 +step:4492 train loss:3.554236 +step:4493 train loss:3.543095 +step:4494 train loss:3.540818 +step:4495 train loss:3.602302 +step:4496 train loss:3.545730 +step:4497 train loss:3.631353 +step:4498 train loss:3.522032 +step:4499 train loss:3.570237 +step:4500 validation loss:3.470874 total_sharp:8.8887e-04 L1_sharp:3.2347e-03 L2_sharp:1.0392e-04 L3_sharp:1.5338e-04 L4_sharp:7.5964e-05 L5_sharp:8.0496e-05 L6_sharp:1.3381e-04 L7_sharp:1.4751e-04 L8_sharp:8.9218e-05 L9_sharp:7.8580e-05 L10_sharp:5.8740e-05 L11_sharp:6.1020e-05 L12_sharp:6.9616e-05 total_fnorm:4.2365e+00 total_l1_linf:3.4614e+04 total_spectral:4.2365e+00 L1_fnorm:1.0966e+00 L2_fnorm:9.7193e-01 L3_fnorm:1.0725e+00 L4_fnorm:1.1552e+00 L5_fnorm:1.1897e+00 L6_fnorm:1.1962e+00 L7_fnorm:1.1956e+00 L8_fnorm:1.2031e+00 L9_fnorm:1.2036e+00 L10_fnorm:1.2067e+00 L11_fnorm:1.2019e+00 L12_fnorm:1.2036e+00 L1_l1linf:8.8834e-01 L2_l1linf:7.9819e-01 L3_l1linf:8.0571e-01 L4_l1linf:8.3502e-01 L5_l1linf:8.3564e-01 L6_l1linf:8.2536e-01 L7_l1linf:8.2675e-01 L8_l1linf:8.1938e-01 L9_l1linf:8.1574e-01 L10_l1linf:8.1155e-01 L11_l1linf:7.9426e-01 L12_l1linf:7.9233e-01 L1_spectral:2.4107e-02 L2_spectral:2.4069e-02 L3_spectral:2.4083e-02 L4_spectral:2.4094e-02 L5_spectral:2.4108e-02 L6_spectral:2.4095e-02 L7_spectral:2.4093e-02 L8_spectral:2.4094e-02 L9_spectral:2.4086e-02 L10_spectral:2.4083e-02 L11_spectral:2.4091e-02 L12_spectral:2.4094e-02 ip_v_neg_g:9.0755e-03 cos_v_neg_g:2.1836e-03 v_norm:4.2365e+00 g_norm:9.8107e-01 hv_norm:5.6440e-01 cos_v_hv:6.6720e-03 hg_norm:2.3601e+01 cos_g_hg:6.3253e-01 v_par:1.8791e-03 v_perp:4.2365e+00 L1_cos_v_neg_g:3.5706e-03 L1_v_norm:1.0966e+00 L2_cos_v_neg_g:4.6175e-03 L2_v_norm:9.7193e-01 L3_cos_v_neg_g:5.3844e-03 L3_v_norm:1.0725e+00 L4_cos_v_neg_g:3.8445e-03 L4_v_norm:1.1552e+00 L5_cos_v_neg_g:3.6409e-03 L5_v_norm:1.1897e+00 L6_cos_v_neg_g:5.3859e-03 L6_v_norm:1.1962e+00 L7_cos_v_neg_g:4.0799e-03 L7_v_norm:1.1956e+00 L8_cos_v_neg_g:3.7290e-03 L8_v_norm:1.2031e+00 L9_cos_v_neg_g:4.2321e-03 L9_v_norm:1.2036e+00 L10_cos_v_neg_g:3.8626e-03 L10_v_norm:1.2067e+00 L11_cos_v_neg_g:4.1576e-03 L11_v_norm:1.2019e+00 L12_cos_v_neg_g:4.0115e-03 L12_v_norm:1.2036e+00 +step:4500 train loss:3.479468 +step:4501 train loss:3.541214 +step:4502 train loss:3.663106 +step:4503 train loss:3.566932 +step:4504 train loss:3.578923 +step:4505 train loss:3.554807 +step:4506 train loss:3.530051 +step:4507 train loss:3.609237 +step:4508 train loss:3.545285 +step:4509 train loss:3.539264 +step:4510 train loss:3.574215 +step:4511 train loss:3.524922 +step:4512 train loss:3.551342 +step:4513 train loss:3.606425 +step:4514 train loss:3.514291 +step:4515 train loss:3.633324 +step:4516 train loss:3.604698 +step:4517 train loss:3.558990 +step:4518 train loss:3.496394 +step:4519 train loss:3.536602 +step:4520 train loss:3.547366 +step:4521 train loss:3.489633 +step:4522 train loss:3.546431 +step:4523 train loss:3.587193 +step:4524 train loss:3.574195 +step:4525 train loss:3.494153 +step:4526 train loss:3.538591 +step:4527 train loss:3.526032 +step:4528 train loss:3.558052 +step:4529 train loss:3.551788 +step:4530 train loss:3.646718 +step:4531 train loss:3.536209 +step:4532 train loss:3.558449 +step:4533 train loss:3.529917 +step:4534 train loss:3.624549 +step:4535 train loss:3.521250 +step:4536 train loss:3.593229 +step:4537 train loss:3.579955 +step:4538 train loss:3.554000 +step:4539 train loss:3.575738 +step:4540 train loss:3.549397 +step:4541 train loss:3.522512 +step:4542 train loss:3.566902 +step:4543 train loss:3.658667 +step:4544 train loss:3.598205 +step:4545 train loss:3.538354 +step:4546 train loss:3.630857 +step:4547 train loss:3.588379 +step:4548 train loss:3.593874 +step:4549 train loss:3.550295 +step:4550 train loss:3.520721 +step:4551 train loss:3.535764 +step:4552 train loss:3.535634 +step:4553 train loss:3.623330 +step:4554 train loss:3.515464 +step:4555 train loss:3.625823 +step:4556 train loss:3.561041 +step:4557 train loss:3.493315 +step:4558 train loss:3.577516 +step:4559 train loss:3.585200 +step:4560 train loss:3.523035 +step:4561 train loss:3.513671 +step:4562 train loss:3.554629 +step:4563 train loss:3.506999 +step:4564 train loss:3.535972 +step:4565 train loss:3.532538 +step:4566 train loss:3.506085 +step:4567 train loss:3.533958 +step:4568 train loss:3.536559 +step:4569 train loss:3.518258 +step:4570 train loss:3.569003 +step:4571 train loss:3.548480 +step:4572 train loss:3.540028 +step:4573 train loss:3.547376 +step:4574 train loss:3.697682 +step:4575 train loss:3.523846 +step:4576 train loss:3.518912 +step:4577 train loss:3.553101 +step:4578 train loss:3.594016 +step:4579 train loss:3.545735 +step:4580 train loss:3.605030 +step:4581 train loss:3.542595 +step:4582 train loss:3.537107 +step:4583 train loss:3.541290 +step:4584 train loss:3.517685 +step:4585 train loss:3.597303 +step:4586 train loss:3.583575 +step:4587 train loss:3.485690 +step:4588 train loss:3.527367 +step:4589 train loss:3.598938 +step:4590 train loss:3.570965 +step:4591 train loss:3.509211 +step:4592 train loss:3.599905 +step:4593 train loss:3.517319 +step:4594 train loss:3.550148 +step:4595 train loss:3.571499 +step:4596 train loss:3.508250 +step:4597 train loss:3.643756 +step:4598 train loss:3.562831 +step:4599 train loss:3.521305 +step:4600 train loss:3.526814 +step:4601 train loss:3.548705 +step:4602 train loss:3.500327 +step:4603 train loss:3.510961 +step:4604 train loss:3.619334 +step:4605 train loss:3.536915 +step:4606 train loss:3.564847 +step:4607 train loss:3.545885 +step:4608 train loss:3.577134 +step:4609 train loss:3.537598 +step:4610 train loss:3.579694 +step:4611 train loss:3.607739 +step:4612 train loss:3.604032 +step:4613 train loss:3.586529 +step:4614 train loss:3.579255 +step:4615 train loss:3.521082 +step:4616 train loss:3.505786 +step:4617 train loss:3.547420 +step:4618 train loss:3.566674 +step:4619 train loss:3.522272 +step:4620 train loss:3.542995 +step:4621 train loss:3.539986 +step:4622 train loss:3.482890 +step:4623 train loss:3.588719 +step:4624 train loss:3.576062 +step:4625 train loss:3.533103 +step:4626 train loss:3.579847 +step:4627 train loss:3.546960 +step:4628 train loss:3.533936 +step:4629 train loss:3.574100 +step:4630 train loss:3.634661 +step:4631 train loss:3.634936 +step:4632 train loss:3.529903 +step:4633 train loss:3.537538 +step:4634 train loss:3.611523 +step:4635 train loss:3.577447 +step:4636 train loss:3.596394 +step:4637 train loss:3.527960 +step:4638 train loss:3.536721 +step:4639 train loss:3.529628 +step:4640 train loss:3.538265 +step:4641 train loss:3.543720 +step:4642 train loss:3.577506 +step:4643 train loss:3.537824 +step:4644 train loss:3.559127 +step:4645 train loss:3.575655 +step:4646 train loss:3.530917 +step:4647 train loss:3.515069 +step:4648 train loss:3.599969 +step:4649 train loss:3.608434 +step:4650 train loss:3.556605 +step:4651 train loss:3.559342 +step:4652 train loss:3.546975 +step:4653 train loss:3.605535 +step:4654 train loss:3.601702 +step:4655 train loss:3.505937 +step:4656 train loss:3.535506 +step:4657 train loss:3.588890 +step:4658 train loss:3.544291 +step:4659 train loss:3.558761 +step:4660 train loss:3.603842 +step:4661 train loss:3.519935 +step:4662 train loss:3.536057 +step:4663 train loss:3.560403 +step:4664 train loss:3.597486 +step:4665 train loss:3.596095 +step:4666 train loss:3.591472 +step:4667 train loss:3.586847 +step:4668 train loss:3.545156 +step:4669 train loss:3.556611 +step:4670 train loss:3.586978 +step:4671 train loss:3.578819 +step:4672 train loss:3.460423 +step:4673 train loss:3.497534 +step:4674 train loss:3.624233 +step:4675 train loss:3.532146 +step:4676 train loss:3.490453 +step:4677 train loss:3.495861 +step:4678 train loss:3.467629 +step:4679 train loss:3.564404 +step:4680 train loss:3.507938 +step:4681 train loss:3.558080 +step:4682 train loss:3.505997 +step:4683 train loss:3.478617 +step:4684 train loss:3.603544 +step:4685 train loss:3.531012 +step:4686 train loss:3.545232 +step:4687 train loss:3.580640 +step:4688 train loss:3.510189 +step:4689 train loss:3.583123 +step:4690 train loss:3.527352 +step:4691 train loss:3.560192 +step:4692 train loss:3.493828 +step:4693 train loss:3.526092 +step:4694 train loss:3.570397 +step:4695 train loss:3.590336 +step:4696 train loss:3.573103 +step:4697 train loss:3.489637 +step:4698 train loss:3.506735 +step:4699 train loss:3.556726 +step:4700 train loss:3.528488 +step:4701 train loss:3.535147 +step:4702 train loss:3.490968 +step:4703 train loss:3.573172 +step:4704 train loss:3.559687 +step:4705 train loss:3.501222 +step:4706 train loss:3.509024 +step:4707 train loss:3.496580 +step:4708 train loss:3.562901 +step:4709 train loss:3.510367 +step:4710 train loss:3.520566 +step:4711 train loss:3.589060 +step:4712 train loss:3.482110 +step:4713 train loss:3.588610 +step:4714 train loss:3.488292 +step:4715 train loss:3.576598 +step:4716 train loss:3.545547 +step:4717 train loss:3.473291 +step:4718 train loss:3.569174 +step:4719 train loss:3.491024 +step:4720 train loss:3.591612 +step:4721 train loss:3.544078 +step:4722 train loss:3.601046 +step:4723 train loss:3.497458 +step:4724 train loss:3.547282 +step:4725 train loss:3.484774 +step:4726 train loss:3.528535 +step:4727 train loss:3.538170 +step:4728 train loss:3.539252 +step:4729 train loss:3.573514 +step:4730 train loss:3.470788 +step:4731 train loss:3.532232 +step:4732 train loss:3.489331 +step:4733 train loss:3.420434 +step:4734 train loss:3.559694 +step:4735 train loss:3.508061 +step:4736 train loss:3.553842 +step:4737 train loss:3.433595 +step:4738 train loss:3.580679 +step:4739 train loss:3.460916 +step:4740 train loss:3.568465 +step:4741 train loss:3.534865 +step:4742 train loss:3.498131 +step:4743 train loss:3.497061 +step:4744 train loss:3.539763 +step:4745 train loss:3.562893 +step:4746 train loss:3.594861 +step:4747 train loss:3.561384 +step:4748 train loss:3.458028 +step:4749 train loss:3.528555 +step:4750 validation loss:3.461462 +step:4750 train loss:3.475483 +step:4751 train loss:3.567330 +step:4752 train loss:3.499078 +step:4753 train loss:3.605553 +step:4754 train loss:3.476985 +step:4755 train loss:3.517210 +step:4756 train loss:3.593662 +step:4757 train loss:3.515565 +step:4758 train loss:3.535883 +step:4759 train loss:3.531093 +step:4760 train loss:3.562320 +step:4761 train loss:3.482650 +step:4762 train loss:3.516090 +step:4763 train loss:3.534459 +step:4764 train loss:3.596349 +step:4765 train loss:3.493945 +step:4766 train loss:3.509175 +step:4767 train loss:3.464365 +step:4768 train loss:3.519739 +step:4769 train loss:3.548627 +step:4770 train loss:3.502238 +step:4771 train loss:3.521820 +step:4772 train loss:3.492620 +step:4773 train loss:3.530521 +step:4774 train loss:3.472808 +step:4775 train loss:3.601835 +step:4776 train loss:3.467556 +step:4777 train loss:3.539870 +step:4778 train loss:3.484383 +step:4779 train loss:3.528695 +step:4780 train loss:3.467294 +step:4781 train loss:3.473334 +step:4782 train loss:3.584396 +step:4783 train loss:3.570408 +step:4784 train loss:3.532569 +step:4785 train loss:3.527435 +step:4786 train loss:3.640230 +step:4787 train loss:3.473555 +step:4788 train loss:3.494996 +step:4789 train loss:3.519221 +step:4790 train loss:3.574354 +step:4791 train loss:3.538330 +step:4792 train loss:3.580732 +step:4793 train loss:3.497308 +step:4794 train loss:3.570928 +step:4795 train loss:3.518718 +step:4796 train loss:3.509794 +step:4797 train loss:3.516262 +step:4798 train loss:3.521400 +step:4799 train loss:3.521708 +step:4800 train loss:3.550790 +step:4801 train loss:3.546769 +step:4802 train loss:3.584138 +step:4803 train loss:3.565278 +step:4804 train loss:3.522030 +step:4805 train loss:3.518590 +step:4806 train loss:3.497192 +step:4807 train loss:3.601901 +step:4808 train loss:3.474627 +step:4809 train loss:3.576449 +step:4810 train loss:3.517027 +step:4811 train loss:3.535403 +step:4812 train loss:3.511814 +step:4813 train loss:3.466357 +step:4814 train loss:3.465158 +step:4815 train loss:3.455329 +step:4816 train loss:3.526624 +step:4817 train loss:3.459827 +step:4818 train loss:3.528087 +step:4819 train loss:3.519792 +step:4820 train loss:3.776227 +step:4821 train loss:3.549523 +step:4822 train loss:3.555224 +step:4823 train loss:3.491160 +step:4824 train loss:3.496588 +step:4825 train loss:3.477877 +step:4826 train loss:3.563919 +step:4827 train loss:3.511335 +step:4828 train loss:3.452493 +step:4829 train loss:3.558663 +step:4830 train loss:3.496327 +step:4831 train loss:3.646713 +step:4832 train loss:3.513641 +step:4833 train loss:3.551517 +step:4834 train loss:3.455783 +step:4835 train loss:3.544634 +step:4836 train loss:3.526424 +step:4837 train loss:3.553130 +step:4838 train loss:3.493683 +step:4839 train loss:3.557988 +step:4840 train loss:3.467235 +step:4841 train loss:3.561183 +step:4842 train loss:3.477850 +step:4843 train loss:3.554724 +step:4844 train loss:3.559120 +step:4845 train loss:3.493155 +step:4846 train loss:3.511766 +step:4847 train loss:3.494659 +step:4848 train loss:3.520101 +step:4849 train loss:3.472705 +step:4850 train loss:3.483364 +step:4851 train loss:3.475433 +step:4852 train loss:3.557159 +step:4853 train loss:3.528561 +step:4854 train loss:3.509231 +step:4855 train loss:3.571261 +step:4856 train loss:3.544174 +step:4857 train loss:3.547846 +step:4858 train loss:3.630888 +step:4859 train loss:3.475369 +step:4860 train loss:3.554170 +step:4861 train loss:3.524241 +step:4862 train loss:3.559758 +step:4863 train loss:3.495784 +step:4864 train loss:3.505316 +step:4865 train loss:3.502296 +step:4866 train loss:3.542304 +step:4867 train loss:3.514367 +step:4868 train loss:3.528443 +step:4869 train loss:3.482917 +step:4870 train loss:3.508652 +step:4871 train loss:3.596817 +step:4872 train loss:3.538879 +step:4873 train loss:3.539098 +step:4874 train loss:3.506281 +step:4875 train loss:3.478321 +step:4876 train loss:3.486683 +step:4877 train loss:3.491367 +step:4878 train loss:3.526427 +step:4879 train loss:3.492739 +step:4880 train loss:3.511222 +step:4881 train loss:3.466409 +step:4882 train loss:3.663578 +step:4883 train loss:3.475682 +step:4884 train loss:3.503941 +step:4885 train loss:3.478128 +step:4886 train loss:3.554147 +step:4887 train loss:3.508005 +step:4888 train loss:3.518145 +step:4889 train loss:3.515687 +step:4890 train loss:3.548482 +step:4891 train loss:3.490327 +step:4892 train loss:3.492880 +step:4893 train loss:3.539331 +step:4894 train loss:3.476726 +step:4895 train loss:3.509065 +step:4896 train loss:3.489241 +step:4897 train loss:3.565072 +step:4898 train loss:3.514155 +step:4899 train loss:3.497511 +step:4900 train loss:3.540675 +step:4901 train loss:3.497270 +step:4902 train loss:3.484448 +step:4903 train loss:3.507545 +step:4904 train loss:3.516433 +step:4905 train loss:3.518542 +step:4906 train loss:3.520164 +step:4907 train loss:3.591486 +step:4908 train loss:3.494097 +step:4909 train loss:3.502726 +step:4910 train loss:3.521724 +step:4911 train loss:3.577597 +step:4912 train loss:3.548065 +step:4913 train loss:3.532218 +step:4914 train loss:3.518958 +step:4915 train loss:3.503154 +step:4916 train loss:3.441688 +step:4917 train loss:3.468777 +step:4918 train loss:3.498466 +step:4919 train loss:3.492475 +step:4920 train loss:3.497818 +step:4921 train loss:3.657791 +step:4922 train loss:3.543724 +step:4923 train loss:3.560961 +step:4924 train loss:3.561801 +step:4925 train loss:3.495937 +step:4926 train loss:3.487894 +step:4927 train loss:3.521767 +step:4928 train loss:3.556739 +step:4929 train loss:3.512170 +step:4930 train loss:3.491889 +step:4931 train loss:3.488786 +step:4932 train loss:3.501887 +step:4933 train loss:3.492459 +step:4934 train loss:3.557873 +step:4935 train loss:3.544050 +step:4936 train loss:3.507425 +step:4937 train loss:3.619417 +step:4938 train loss:3.607106 +step:4939 train loss:3.472818 +step:4940 train loss:3.551528 +step:4941 train loss:3.450674 +step:4942 train loss:3.491786 +step:4943 train loss:3.492676 +step:4944 train loss:3.499142 +step:4945 train loss:3.539374 +step:4946 train loss:3.518021 +step:4947 train loss:3.499864 +step:4948 train loss:3.542019 +step:4949 train loss:3.444935 +step:4950 train loss:3.527771 +step:4951 train loss:3.575827 +step:4952 train loss:3.516503 +step:4953 train loss:3.547532 +step:4954 train loss:3.455007 +step:4955 train loss:3.529557 +step:4956 train loss:3.556114 +step:4957 train loss:3.550398 +step:4958 train loss:3.464353 +step:4959 train loss:3.580123 +step:4960 train loss:3.510901 +step:4961 train loss:3.528293 +step:4962 train loss:3.493690 +step:4963 train loss:3.539100 +step:4964 train loss:3.488849 +step:4965 train loss:3.639211 +step:4966 train loss:3.491333 +step:4967 train loss:3.596822 +step:4968 train loss:3.488331 +step:4969 train loss:3.532494 +step:4970 train loss:3.518650 +step:4971 train loss:3.475055 +step:4972 train loss:3.512799 +step:4973 train loss:3.522996 +step:4974 train loss:3.510697 +step:4975 train loss:3.605156 +step:4976 train loss:3.574286 +step:4977 train loss:3.523504 +step:4978 train loss:3.512302 +step:4979 train loss:3.506936 +step:4980 train loss:3.619925 +step:4981 train loss:3.454512 +step:4982 train loss:3.537800 +step:4983 train loss:3.461805 +step:4984 train loss:3.647845 +step:4985 train loss:3.543808 +step:4986 train loss:3.486578 +step:4987 train loss:3.506976 +step:4988 train loss:3.710841 +step:4989 train loss:3.511814 +step:4990 train loss:3.502983 +step:4991 train loss:3.520108 +step:4992 train loss:3.506049 +step:4993 train loss:3.478732 +step:4994 train loss:3.595345 +step:4995 train loss:3.515815 +step:4996 train loss:3.601401 +step:4997 train loss:3.504796 +step:4998 train loss:3.506858 +step:4999 train loss:3.490842 +step:5000 validation loss:3.450073 total_sharp:1.0090e-03 L1_sharp:4.2237e-03 L2_sharp:1.9369e-04 L3_sharp:2.2000e-04 L4_sharp:9.7600e-05 L5_sharp:9.7085e-05 L6_sharp:1.4786e-04 L7_sharp:1.7565e-04 L8_sharp:1.0464e-04 L9_sharp:7.0456e-05 L10_sharp:5.4477e-05 L11_sharp:5.6001e-05 L12_sharp:4.6796e-05 total_fnorm:4.2252e+00 total_l1_linf:3.4497e+04 total_spectral:4.2252e+00 L1_fnorm:1.0864e+00 L2_fnorm:9.5200e-01 L3_fnorm:1.0733e+00 L4_fnorm:1.1589e+00 L5_fnorm:1.1918e+00 L6_fnorm:1.1953e+00 L7_fnorm:1.1947e+00 L8_fnorm:1.2008e+00 L9_fnorm:1.2000e+00 L10_fnorm:1.2065e+00 L11_fnorm:1.2009e+00 L12_fnorm:1.2047e+00 L1_l1linf:8.9277e-01 L2_l1linf:7.8880e-01 L3_l1linf:8.0487e-01 L4_l1linf:8.3018e-01 L5_l1linf:8.3519e-01 L6_l1linf:8.3523e-01 L7_l1linf:8.1987e-01 L8_l1linf:8.1419e-01 L9_l1linf:8.0960e-01 L10_l1linf:8.1134e-01 L11_l1linf:7.9095e-01 L12_l1linf:7.9183e-01 L1_spectral:2.4106e-02 L2_spectral:2.4075e-02 L3_spectral:2.4088e-02 L4_spectral:2.4101e-02 L5_spectral:2.4125e-02 L6_spectral:2.4096e-02 L7_spectral:2.4092e-02 L8_spectral:2.4082e-02 L9_spectral:2.4087e-02 L10_spectral:2.4090e-02 L11_spectral:2.4101e-02 L12_spectral:2.4088e-02 ip_v_neg_g:9.0230e-03 cos_v_neg_g:1.9794e-03 v_norm:4.2252e+00 g_norm:1.0789e+00 hv_norm:7.3397e-01 cos_v_hv:5.8084e-03 hg_norm:3.9764e+01 cos_g_hg:6.0857e-01 v_par:2.0883e-03 v_perp:4.2252e+00 L1_cos_v_neg_g:3.2714e-03 L1_v_norm:1.0864e+00 L2_cos_v_neg_g:2.9401e-03 L2_v_norm:9.5200e-01 L3_cos_v_neg_g:4.7671e-03 L3_v_norm:1.0733e+00 L4_cos_v_neg_g:3.4636e-03 L4_v_norm:1.1589e+00 L5_cos_v_neg_g:3.7301e-03 L5_v_norm:1.1918e+00 L6_cos_v_neg_g:3.2212e-03 L6_v_norm:1.1953e+00 L7_cos_v_neg_g:3.6253e-03 L7_v_norm:1.1947e+00 L8_cos_v_neg_g:4.2889e-03 L8_v_norm:1.2008e+00 L9_cos_v_neg_g:4.1928e-03 L9_v_norm:1.2000e+00 L10_cos_v_neg_g:5.5530e-03 L10_v_norm:1.2065e+00 L11_cos_v_neg_g:5.3787e-03 L11_v_norm:1.2009e+00 L12_cos_v_neg_g:5.0878e-03 L12_v_norm:1.2047e+00 +step:5000 train loss:3.603752 +step:5001 train loss:3.469405 +step:5002 train loss:3.527042 +step:5003 train loss:3.518374 +step:5004 train loss:3.512598 +step:5005 train loss:3.510072 +step:5006 train loss:3.554727 +step:5007 train loss:3.555497 +step:5008 train loss:3.494826 +step:5009 train loss:3.535150 +step:5010 train loss:3.489026 +step:5011 train loss:3.519452 +step:5012 train loss:3.493509 +step:5013 train loss:3.596765 +step:5014 train loss:3.508661 +step:5015 train loss:3.582743 +step:5016 train loss:3.511089 +step:5017 train loss:3.558043 +step:5018 train loss:3.474870 +step:5019 train loss:3.511006 +step:5020 train loss:3.504829 +step:5021 train loss:3.517581 +step:5022 train loss:3.552263 +step:5023 train loss:3.526610 +step:5024 train loss:3.573083 +step:5025 train loss:3.461662 +step:5026 train loss:3.583795 +step:5027 train loss:3.519300 +step:5028 train loss:3.584467 +step:5029 train loss:3.476686 +step:5030 train loss:3.519111 +step:5031 train loss:3.505898 +step:5032 train loss:3.534427 +step:5033 train loss:3.517003 +step:5034 train loss:3.515136 +step:5035 train loss:3.599940 +step:5036 train loss:3.550138 +step:5037 train loss:3.498797 +step:5038 train loss:3.552017 +step:5039 train loss:3.561424 +step:5040 train loss:3.525490 +step:5041 train loss:3.540793 +step:5042 train loss:3.445908 +step:5043 train loss:3.585829 +step:5044 train loss:3.504356 +step:5045 train loss:3.554220 +step:5046 train loss:3.472995 +step:5047 train loss:3.551699 +step:5048 train loss:3.465602 +step:5049 train loss:3.603936 +step:5050 train loss:3.487480 +step:5051 train loss:3.535480 +step:5052 train loss:3.432682 +step:5053 train loss:3.620140 +step:5054 train loss:3.503912 +step:5055 train loss:3.530566 +step:5056 train loss:3.561476 +step:5057 train loss:3.493701 +step:5058 train loss:3.521314 +step:5059 train loss:3.489302 +step:5060 train loss:3.533528 +step:5061 train loss:3.524796 +step:5062 train loss:3.499691 +step:5063 train loss:3.490826 +step:5064 train loss:3.500197 +step:5065 train loss:3.483019 +step:5066 train loss:3.548654 +step:5067 train loss:3.527089 +step:5068 train loss:3.511347 +step:5069 train loss:3.485182 +step:5070 train loss:3.511297 +step:5071 train loss:3.584167 +step:5072 train loss:3.473692 +step:5073 train loss:3.482816 +step:5074 train loss:3.431030 +step:5075 train loss:3.500184 +step:5076 train loss:3.434405 +step:5077 train loss:3.495564 +step:5078 train loss:3.512993 +step:5079 train loss:3.531276 +step:5080 train loss:3.508191 +step:5081 train loss:3.520939 +step:5082 train loss:3.511619 +step:5083 train loss:3.566758 +step:5084 train loss:3.544777 +step:5085 train loss:3.509341 +step:5086 train loss:3.583141 +step:5087 train loss:3.569480 +step:5088 train loss:3.486645 +step:5089 train loss:3.554120 +step:5090 train loss:3.499673 +step:5091 train loss:3.500571 +step:5092 train loss:3.600686 +step:5093 train loss:3.482240 +step:5094 train loss:3.480196 +step:5095 train loss:3.531881 +step:5096 train loss:3.502467 +step:5097 train loss:3.508676 +step:5098 train loss:3.511785 +step:5099 train loss:3.475605 +step:5100 train loss:3.492295 +step:5101 train loss:3.683796 +step:5102 train loss:3.526747 +step:5103 train loss:3.535809 +step:5104 train loss:3.584398 +step:5105 train loss:3.518900 +step:5106 train loss:3.476861 +step:5107 train loss:3.499027 +step:5108 train loss:3.488944 +step:5109 train loss:3.570270 +step:5110 train loss:3.478902 +step:5111 train loss:3.576119 +step:5112 train loss:3.482728 +step:5113 train loss:3.469824 +step:5114 train loss:3.511225 +step:5115 train loss:3.476412 +step:5116 train loss:3.531772 +step:5117 train loss:3.477598 +step:5118 train loss:3.501511 +step:5119 train loss:3.484956 +step:5120 train loss:3.530552 +step:5121 train loss:3.478236 +step:5122 train loss:3.490111 +step:5123 train loss:3.476984 +step:5124 train loss:3.435698 +step:5125 train loss:3.546237 +step:5126 train loss:3.531827 +step:5127 train loss:3.533928 +step:5128 train loss:3.546985 +step:5129 train loss:3.474982 +step:5130 train loss:3.486247 +step:5131 train loss:3.428455 +step:5132 train loss:3.543769 +step:5133 train loss:3.513359 +step:5134 train loss:3.515564 +step:5135 train loss:3.471672 +step:5136 train loss:3.537590 +step:5137 train loss:3.534147 +step:5138 train loss:3.515230 +step:5139 train loss:3.548109 +step:5140 train loss:3.522689 +step:5141 train loss:3.552076 +step:5142 train loss:3.503168 +step:5143 train loss:3.527067 +step:5144 train loss:3.528788 +step:5145 train loss:3.468956 +step:5146 train loss:3.461586 +step:5147 train loss:3.538767 +step:5148 train loss:3.467023 +step:5149 train loss:3.540762 +step:5150 train loss:3.516327 +step:5151 train loss:3.484293 +step:5152 train loss:3.526889 +step:5153 train loss:3.502784 +step:5154 train loss:3.512297 +step:5155 train loss:3.522291 +step:5156 train loss:3.494260 +step:5157 train loss:3.502073 +step:5158 train loss:3.516783 +step:5159 train loss:3.558090 +step:5160 train loss:3.625718 +step:5161 train loss:3.550537 +step:5162 train loss:3.571277 +step:5163 train loss:3.484608 +step:5164 train loss:3.551617 +step:5165 train loss:3.561152 +step:5166 train loss:3.504606 +step:5167 train loss:3.598141 +step:5168 train loss:3.516970 +step:5169 train loss:3.545535 +step:5170 train loss:3.527396 +step:5171 train loss:3.569886 +step:5172 train loss:3.488985 +step:5173 train loss:3.555341 +step:5174 train loss:3.487114 +step:5175 train loss:3.523917 +step:5176 train loss:3.511043 +step:5177 train loss:3.511445 +step:5178 train loss:3.576414 +step:5179 train loss:3.486499 +step:5180 train loss:3.567046 +step:5181 train loss:3.512047 +step:5182 train loss:3.570221 +step:5183 train loss:3.497828 +step:5184 train loss:3.478663 +step:5185 train loss:3.500971 +step:5186 train loss:3.561373 +step:5187 train loss:3.554046 +step:5188 train loss:3.484854 +step:5189 train loss:3.531661 +step:5190 train loss:3.511920 +step:5191 train loss:3.496476 +step:5192 train loss:3.476859 +step:5193 train loss:3.566873 +step:5194 train loss:3.514304 +step:5195 train loss:3.485889 +step:5196 train loss:3.557434 +step:5197 train loss:3.606011 +step:5198 train loss:3.517671 +step:5199 train loss:3.501133 +step:5200 train loss:3.529277 +step:5201 train loss:3.515348 +step:5202 train loss:3.528097 +step:5203 train loss:3.522831 +step:5204 train loss:3.497731 +step:5205 train loss:3.541521 +step:5206 train loss:3.478666 +step:5207 train loss:3.482347 +step:5208 train loss:3.544866 +step:5209 train loss:3.560251 +step:5210 train loss:3.467085 +step:5211 train loss:3.509703 +step:5212 train loss:3.527107 +step:5213 train loss:3.499911 +step:5214 train loss:3.548348 +step:5215 train loss:3.663851 +step:5216 train loss:3.513536 +step:5217 train loss:3.493612 +step:5218 train loss:3.495644 +step:5219 train loss:3.562121 +step:5220 train loss:3.477595 +step:5221 train loss:3.478118 +step:5222 train loss:3.560774 +step:5223 train loss:3.551725 +step:5224 train loss:3.451372 +step:5225 train loss:3.599571 +step:5226 train loss:3.512102 +step:5227 train loss:3.585162 +step:5228 train loss:3.555173 +step:5229 train loss:3.496850 +step:5230 train loss:3.511288 +step:5231 train loss:3.459445 +step:5232 train loss:3.582815 +step:5233 train loss:3.541584 +step:5234 train loss:3.547254 +step:5235 train loss:3.492978 +step:5236 train loss:3.571340 +step:5237 train loss:3.621548 +step:5238 train loss:3.524035 +step:5239 train loss:3.584327 +step:5240 train loss:3.469927 +step:5241 train loss:3.529462 +step:5242 train loss:3.495637 +step:5243 train loss:3.503850 +step:5244 train loss:3.503691 +step:5245 train loss:3.547184 +step:5246 train loss:3.590078 +step:5247 train loss:3.516067 +step:5248 train loss:3.488793 +step:5249 train loss:3.547014 +step:5250 validation loss:3.441225 +step:5250 train loss:3.514588 +step:5251 train loss:3.577092 +step:5252 train loss:3.467851 +step:5253 train loss:3.620984 +step:5254 train loss:3.495614 +step:5255 train loss:3.567759 +step:5256 train loss:3.479426 +step:5257 train loss:3.539368 +step:5258 train loss:3.530671 +step:5259 train loss:3.518265 +step:5260 train loss:3.515013 +step:5261 train loss:3.500985 +step:5262 train loss:3.546794 +step:5263 train loss:3.528322 +step:5264 train loss:3.483498 +step:5265 train loss:3.560207 +step:5266 train loss:3.477954 +step:5267 train loss:3.491906 +step:5268 train loss:3.469908 +step:5269 train loss:3.476389 +step:5270 train loss:3.526376 +step:5271 train loss:3.449886 +step:5272 train loss:3.546926 +step:5273 train loss:3.451572 +step:5274 train loss:3.504396 +step:5275 train loss:3.514005 +step:5276 train loss:3.641742 +step:5277 train loss:3.540026 +step:5278 train loss:3.489245 +step:5279 train loss:3.537966 +step:5280 train loss:3.510055 +step:5281 train loss:3.507083 +step:5282 train loss:3.477256 +step:5283 train loss:3.479712 +step:5284 train loss:3.492005 +step:5285 train loss:3.551638 +step:5286 train loss:3.463683 +step:5287 train loss:3.564206 +step:5288 train loss:3.539240 +step:5289 train loss:3.509064 +step:5290 train loss:3.561682 +step:5291 train loss:3.514920 +step:5292 train loss:3.532974 +step:5293 train loss:3.501318 +step:5294 train loss:3.490074 +step:5295 train loss:3.497430 +step:5296 train loss:3.488116 +step:5297 train loss:3.508613 +step:5298 train loss:3.456560 +step:5299 train loss:3.546540 +step:5300 train loss:3.497391 +step:5301 train loss:3.564830 +step:5302 train loss:3.572350 +step:5303 train loss:3.433863 +step:5304 train loss:3.464008 +step:5305 train loss:3.445467 +step:5306 train loss:3.475658 +step:5307 train loss:3.481480 +step:5308 train loss:3.576437 +step:5309 train loss:3.524627 +step:5310 train loss:3.510729 +step:5311 train loss:3.577336 +step:5312 train loss:3.461874 +step:5313 train loss:3.549720 +step:5314 train loss:3.544997 +step:5315 train loss:3.503435 +step:5316 train loss:3.534266 +step:5317 train loss:3.552032 +step:5318 train loss:3.505033 +step:5319 train loss:3.533458 +step:5320 train loss:3.487907 +step:5321 train loss:3.606485 +step:5322 train loss:3.519512 +step:5323 train loss:3.519446 +step:5324 train loss:3.462882 +step:5325 train loss:3.549088 +step:5326 train loss:3.534743 +step:5327 train loss:3.425788 +step:5328 train loss:3.563032 +step:5329 train loss:3.527081 +step:5330 train loss:3.529163 +step:5331 train loss:3.575999 +step:5332 train loss:3.502672 +step:5333 train loss:3.565279 +step:5334 train loss:3.538110 +step:5335 train loss:3.603868 +step:5336 train loss:3.631664 +step:5337 train loss:3.467446 +step:5338 train loss:3.475248 +step:5339 train loss:3.502087 +step:5340 train loss:3.523102 +step:5341 train loss:3.534641 +step:5342 train loss:3.434992 +step:5343 train loss:3.594571 +step:5344 train loss:3.478608 +step:5345 train loss:3.480018 +step:5346 train loss:3.480141 +step:5347 train loss:3.505257 +step:5348 train loss:3.545871 +step:5349 train loss:3.483174 +step:5350 train loss:3.527202 +step:5351 train loss:3.599842 +step:5352 train loss:3.645467 +step:5353 train loss:3.550472 +step:5354 train loss:3.521449 +step:5355 train loss:3.488404 +step:5356 train loss:3.506804 +step:5357 train loss:3.492397 +step:5358 train loss:3.514300 +step:5359 train loss:3.521852 +step:5360 train loss:3.496874 +step:5361 train loss:3.500480 +step:5362 train loss:3.483218 +step:5363 train loss:3.478365 +step:5364 train loss:3.477286 +step:5365 train loss:3.514270 +step:5366 train loss:3.541054 +step:5367 train loss:3.470342 +step:5368 train loss:3.539908 +step:5369 train loss:3.556969 +step:5370 train loss:3.456075 +step:5371 train loss:3.503613 +step:5372 train loss:3.536002 +step:5373 train loss:3.568287 +step:5374 train loss:3.453328 +step:5375 train loss:3.499392 +step:5376 train loss:3.562078 +step:5377 train loss:3.502507 +step:5378 train loss:3.478183 +step:5379 train loss:3.479703 +step:5380 train loss:3.517042 +step:5381 train loss:3.553124 +step:5382 train loss:3.457374 +step:5383 train loss:3.529663 +step:5384 train loss:3.536904 +step:5385 train loss:3.535059 +step:5386 train loss:3.516970 +step:5387 train loss:3.527585 +step:5388 train loss:3.530689 +step:5389 train loss:3.464491 +step:5390 train loss:3.494798 +step:5391 train loss:3.435549 +step:5392 train loss:3.502933 +step:5393 train loss:3.481908 +step:5394 train loss:3.486489 +step:5395 train loss:3.557125 +step:5396 train loss:3.525078 +step:5397 train loss:3.544105 +step:5398 train loss:3.537156 +step:5399 train loss:3.569977 +step:5400 train loss:3.578328 +step:5401 train loss:3.536000 +step:5402 train loss:3.645185 +step:5403 train loss:3.547999 +step:5404 train loss:3.523964 +step:5405 train loss:3.594912 +step:5406 train loss:3.551994 +step:5407 train loss:3.484717 +step:5408 train loss:3.629523 +step:5409 train loss:3.468946 +step:5410 train loss:3.532788 +step:5411 train loss:3.516586 +step:5412 train loss:3.495983 +step:5413 train loss:3.544480 +step:5414 train loss:3.520860 +step:5415 train loss:3.496998 +step:5416 train loss:3.492773 +step:5417 train loss:3.563442 +step:5418 train loss:3.577000 +step:5419 train loss:3.481985 +step:5420 train loss:3.541715 +step:5421 train loss:3.511571 +step:5422 train loss:3.558325 +step:5423 train loss:3.533286 +step:5424 train loss:3.437198 +step:5425 train loss:3.503036 +step:5426 train loss:3.592432 +step:5427 train loss:3.487786 +step:5428 train loss:3.520577 +step:5429 train loss:3.445363 +step:5430 train loss:3.490026 +step:5431 train loss:3.546545 +step:5432 train loss:3.526699 +step:5433 train loss:3.530971 +step:5434 train loss:3.481860 +step:5435 train loss:3.478778 +step:5436 train loss:3.480278 +step:5437 train loss:3.520583 +step:5438 train loss:3.500532 +step:5439 train loss:3.509838 +step:5440 train loss:3.549773 +step:5441 train loss:3.567472 +step:5442 train loss:3.489651 +step:5443 train loss:3.489009 +step:5444 train loss:3.433774 +step:5445 train loss:3.521016 +step:5446 train loss:3.488645 +step:5447 train loss:3.528070 +step:5448 train loss:3.585913 +step:5449 train loss:3.473189 +step:5450 train loss:3.509115 +step:5451 train loss:3.500685 +step:5452 train loss:3.517957 +step:5453 train loss:3.572683 +step:5454 train loss:3.496601 +step:5455 train loss:3.482018 +step:5456 train loss:3.623926 +step:5457 train loss:3.509220 +step:5458 train loss:3.536610 +step:5459 train loss:3.484107 +step:5460 train loss:3.500229 +step:5461 train loss:3.502779 +step:5462 train loss:3.503778 +step:5463 train loss:3.517568 +step:5464 train loss:3.518882 +step:5465 train loss:3.462244 +step:5466 train loss:3.536104 +step:5467 train loss:3.516979 +step:5468 train loss:3.524133 +step:5469 train loss:3.619184 +step:5470 train loss:3.513727 +step:5471 train loss:3.587987 +step:5472 train loss:3.534535 +step:5473 train loss:3.438031 +step:5474 train loss:3.775452 +step:5475 train loss:3.446888 +step:5476 train loss:3.524886 +step:5477 train loss:3.522056 +step:5478 train loss:3.522909 +step:5479 train loss:3.664694 +step:5480 train loss:3.510259 +step:5481 train loss:3.571907 +step:5482 train loss:3.486594 +step:5483 train loss:3.523099 +step:5484 train loss:3.561627 +step:5485 train loss:3.479573 +step:5486 train loss:3.524853 +step:5487 train loss:3.526512 +step:5488 train loss:3.436573 +step:5489 train loss:3.543962 +step:5490 train loss:3.489005 +step:5491 train loss:3.593736 +step:5492 train loss:3.518520 +step:5493 train loss:3.451158 +step:5494 train loss:3.501796 +step:5495 train loss:3.479832 +step:5496 train loss:3.479874 +step:5497 train loss:3.596499 +step:5498 train loss:3.467020 +step:5499 train loss:3.604338 +step:5500 validation loss:3.436637 total_sharp:9.9845e-04 L1_sharp:2.9767e-03 L2_sharp:4.1866e-04 L3_sharp:6.7105e-04 L4_sharp:6.0272e-05 L5_sharp:8.1338e-05 L6_sharp:1.0973e-04 L7_sharp:1.4134e-04 L8_sharp:7.0945e-05 L9_sharp:5.1396e-05 L10_sharp:4.3670e-05 L11_sharp:4.3986e-05 L12_sharp:3.6811e-05 total_fnorm:4.2353e+00 total_l1_linf:3.4591e+04 total_spectral:4.2353e+00 L1_fnorm:1.1030e+00 L2_fnorm:9.8370e-01 L3_fnorm:1.0617e+00 L4_fnorm:1.1606e+00 L5_fnorm:1.1865e+00 L6_fnorm:1.1952e+00 L7_fnorm:1.1934e+00 L8_fnorm:1.2011e+00 L9_fnorm:1.2016e+00 L10_fnorm:1.2081e+00 L11_fnorm:1.2037e+00 L12_fnorm:1.2063e+00 L1_l1linf:8.8394e-01 L2_l1linf:7.8904e-01 L3_l1linf:8.0886e-01 L4_l1linf:8.3678e-01 L5_l1linf:8.3092e-01 L6_l1linf:8.2790e-01 L7_l1linf:8.1987e-01 L8_l1linf:8.1701e-01 L9_l1linf:8.1189e-01 L10_l1linf:8.2562e-01 L11_l1linf:8.0050e-01 L12_l1linf:7.8665e-01 L1_spectral:2.4104e-02 L2_spectral:2.4075e-02 L3_spectral:2.4077e-02 L4_spectral:2.4094e-02 L5_spectral:2.4117e-02 L6_spectral:2.4095e-02 L7_spectral:2.4085e-02 L8_spectral:2.4091e-02 L9_spectral:2.4089e-02 L10_spectral:2.4085e-02 L11_spectral:2.4090e-02 L12_spectral:2.4092e-02 ip_v_neg_g:9.7513e-03 cos_v_neg_g:1.1563e-03 v_norm:4.2353e+00 g_norm:1.9911e+00 hv_norm:9.4623e-01 cos_v_hv:4.4691e-03 hg_norm:1.4640e+02 cos_g_hg:7.0969e-01 v_par:1.3905e-03 v_perp:4.2353e+00 L1_cos_v_neg_g:2.5997e-03 L1_v_norm:1.1030e+00 L2_cos_v_neg_g:5.1731e-03 L2_v_norm:9.8370e-01 L3_cos_v_neg_g:4.1474e-03 L3_v_norm:1.0617e+00 L4_cos_v_neg_g:2.1989e-03 L4_v_norm:1.1606e+00 L5_cos_v_neg_g:2.3563e-03 L5_v_norm:1.1865e+00 L6_cos_v_neg_g:2.0429e-03 L6_v_norm:1.1952e+00 L7_cos_v_neg_g:1.8728e-03 L7_v_norm:1.1934e+00 L8_cos_v_neg_g:1.5395e-03 L8_v_norm:1.2011e+00 L9_cos_v_neg_g:-3.5322e-06 L9_v_norm:1.2016e+00 L10_cos_v_neg_g:1.0567e-03 L10_v_norm:1.2081e+00 L11_cos_v_neg_g:1.1870e-03 L11_v_norm:1.2037e+00 L12_cos_v_neg_g:2.4184e-03 L12_v_norm:1.2063e+00 +step:5500 train loss:3.516912 +step:5501 train loss:3.592681 +step:5502 train loss:3.535393 +step:5503 train loss:3.504149 +step:5504 train loss:3.551354 +step:5505 train loss:3.510666 +step:5506 train loss:3.558764 +step:5507 train loss:3.541428 +step:5508 train loss:3.567057 +step:5509 train loss:3.581217 +step:5510 train loss:3.549804 +step:5511 train loss:3.541235 +step:5512 train loss:3.668874 +step:5513 train loss:3.465563 +step:5514 train loss:3.525855 +step:5515 train loss:3.552433 +step:5516 train loss:3.579082 +step:5517 train loss:3.534677 +step:5518 train loss:3.562083 +step:5519 train loss:3.598539 +step:5520 train loss:3.503236 +step:5521 train loss:3.513448 +step:5522 train loss:3.486383 +step:5523 train loss:3.531274 +step:5524 train loss:3.572312 +step:5525 train loss:3.482679 +step:5526 train loss:3.494853 +step:5527 train loss:3.522112 +step:5528 train loss:3.618978 +step:5529 train loss:3.585123 +step:5530 train loss:3.551558 +step:5531 train loss:3.487440 +step:5532 train loss:3.514919 +step:5533 train loss:3.545364 +step:5534 train loss:3.464631 +step:5535 train loss:3.513318 +step:5536 train loss:3.458267 +step:5537 train loss:3.500762 +step:5538 train loss:3.495974 +step:5539 train loss:3.437684 +step:5540 train loss:3.663123 +step:5541 train loss:3.473233 +step:5542 train loss:3.521105 +step:5543 train loss:3.513494 +step:5544 train loss:3.499521 +step:5545 train loss:3.488951 +step:5546 train loss:3.524738 +step:5547 train loss:3.458195 +step:5548 train loss:3.502267 +step:5549 train loss:3.508626 +step:5550 train loss:3.532776 +step:5551 train loss:3.532509 +step:5552 train loss:3.490899 +step:5553 train loss:3.522136 +step:5554 train loss:3.491901 +step:5555 train loss:3.499824 +step:5556 train loss:3.518372 +step:5557 train loss:3.578764 +step:5558 train loss:3.500793 +step:5559 train loss:3.510128 +step:5560 train loss:3.502656 +step:5561 train loss:3.538612 +step:5562 train loss:3.487928 +step:5563 train loss:3.473443 +step:5564 train loss:3.506784 +step:5565 train loss:3.573219 +step:5566 train loss:3.473568 +step:5567 train loss:3.595778 +step:5568 train loss:3.712900 +step:5569 train loss:3.500902 +step:5570 train loss:3.435305 +step:5571 train loss:3.522560 +step:5572 train loss:3.461160 +step:5573 train loss:3.454693 +step:5574 train loss:3.420301 +step:5575 train loss:3.520928 +step:5576 train loss:3.501846 +step:5577 train loss:3.507594 +step:5578 train loss:3.539702 +step:5579 train loss:3.493726 +step:5580 train loss:3.517654 +step:5581 train loss:3.539352 +step:5582 train loss:3.513819 +step:5583 train loss:3.526415 +step:5584 train loss:3.646665 +step:5585 train loss:3.552321 +step:5586 train loss:3.490149 +step:5587 train loss:3.517525 +step:5588 train loss:3.532491 +step:5589 train loss:3.533778 +step:5590 train loss:3.592638 +step:5591 train loss:3.462624 +step:5592 train loss:3.656148 +step:5593 train loss:3.511426 +step:5594 train loss:3.518619 +step:5595 train loss:3.512892 +step:5596 train loss:3.463247 +step:5597 train loss:3.480139 +step:5598 train loss:3.486511 +step:5599 train loss:3.486891 +step:5600 train loss:3.533256 +step:5601 train loss:3.557009 +step:5602 train loss:3.490333 +step:5603 train loss:3.531599 +step:5604 train loss:3.527979 +step:5605 train loss:3.499297 +step:5606 train loss:3.507806 +step:5607 train loss:3.532151 +step:5608 train loss:3.476645 +step:5609 train loss:3.531882 +step:5610 train loss:3.487198 +step:5611 train loss:3.526147 +step:5612 train loss:3.556222 +step:5613 train loss:3.516137 +step:5614 train loss:3.482375 +step:5615 train loss:3.582813 +step:5616 train loss:3.479123 +step:5617 train loss:3.570869 +step:5618 train loss:3.554821 +step:5619 train loss:3.507757 +step:5620 train loss:3.509551 +step:5621 train loss:3.584112 +step:5622 train loss:3.466223 +step:5623 train loss:3.504679 +step:5624 train loss:3.492319 +step:5625 train loss:3.529243 +step:5626 train loss:3.524164 +step:5627 train loss:3.489716 +step:5628 train loss:3.538650 +step:5629 train loss:3.515549 +step:5630 train loss:3.445645 +step:5631 train loss:3.489461 +step:5632 train loss:3.530311 +step:5633 train loss:3.521000 +step:5634 train loss:3.479736 +step:5635 train loss:3.514292 +step:5636 train loss:3.493889 +step:5637 train loss:3.629136 +step:5638 train loss:3.538753 +step:5639 train loss:3.520066 +step:5640 train loss:3.523095 +step:5641 train loss:3.562885 +step:5642 train loss:3.498006 +step:5643 train loss:3.512540 +step:5644 train loss:3.595250 +step:5645 train loss:3.551652 +step:5646 train loss:3.546738 +step:5647 train loss:3.537235 +step:5648 train loss:3.526268 +step:5649 train loss:3.442998 +step:5650 train loss:3.445109 +step:5651 train loss:3.522082 +step:5652 train loss:3.523527 +step:5653 train loss:3.488959 +step:5654 train loss:3.619931 +step:5655 train loss:3.480098 +step:5656 train loss:3.503848 +step:5657 train loss:3.573295 +step:5658 train loss:3.476066 +step:5659 train loss:3.512628 +step:5660 train loss:3.558156 +step:5661 train loss:3.502810 +step:5662 train loss:3.543151 +step:5663 train loss:3.434386 +step:5664 train loss:3.404405 +step:5665 train loss:3.526047 +step:5666 train loss:3.529598 +step:5667 train loss:3.562508 +step:5668 train loss:3.492640 +step:5669 train loss:3.504662 +step:5670 train loss:3.509036 +step:5671 train loss:3.491071 +step:5672 train loss:3.545675 +step:5673 train loss:3.512545 +step:5674 train loss:3.575705 +step:5675 train loss:3.496447 +step:5676 train loss:3.643256 +step:5677 train loss:3.536830 +step:5678 train loss:3.519315 +step:5679 train loss:3.507359 +step:5680 train loss:3.537600 +step:5681 train loss:3.508687 +step:5682 train loss:3.520389 +step:5683 train loss:3.478998 +step:5684 train loss:3.493116 +step:5685 train loss:3.533353 +step:5686 train loss:3.550060 +step:5687 train loss:3.505005 +step:5688 train loss:3.583345 +step:5689 train loss:3.489265 +step:5690 train loss:3.647868 +step:5691 train loss:3.471034 +step:5692 train loss:3.464907 +step:5693 train loss:3.467262 +step:5694 train loss:3.488870 +step:5695 train loss:3.506237 +step:5696 train loss:3.554891 +step:5697 train loss:3.482172 +step:5698 train loss:3.499610 +step:5699 train loss:3.514223 +step:5700 train loss:3.510413 +step:5701 train loss:3.510228 +step:5702 train loss:3.570216 +step:5703 train loss:3.471843 +step:5704 train loss:3.515030 +step:5705 train loss:3.521312 +step:5706 train loss:3.546790 +step:5707 train loss:3.465509 +step:5708 train loss:3.551501 +step:5709 train loss:3.553435 +step:5710 train loss:3.546040 +step:5711 train loss:3.564397 +step:5712 train loss:3.548101 +step:5713 train loss:3.474384 +step:5714 train loss:3.558037 +step:5715 train loss:3.513361 +step:5716 train loss:3.520478 +step:5717 train loss:3.542609 +step:5718 train loss:3.489526 +step:5719 train loss:3.561182 +step:5720 train loss:3.532436 +step:5721 train loss:3.461719 +step:5722 train loss:3.477103 +step:5723 train loss:3.553703 +step:5724 train loss:3.473190 +step:5725 train loss:3.545737 +step:5726 train loss:3.537541 +step:5727 train loss:3.495452 +step:5728 train loss:3.499746 +step:5729 train loss:3.498328 +step:5730 train loss:3.579752 +step:5731 train loss:3.439247 +step:5732 train loss:3.497429 +step:5733 train loss:3.494203 +step:5734 train loss:3.507000 +step:5735 train loss:3.497517 +step:5736 train loss:3.501473 +step:5737 train loss:3.523146 +step:5738 train loss:3.486299 +step:5739 train loss:3.498965 +step:5740 train loss:3.538122 +step:5741 train loss:3.514866 +step:5742 train loss:3.568087 +step:5743 train loss:3.534320 +step:5744 train loss:3.493148 +step:5745 train loss:3.493115 +step:5746 train loss:3.522702 +step:5747 train loss:3.512492 +step:5748 train loss:3.554751 +step:5749 train loss:3.512178 +step:5750 validation loss:3.432825 +step:5750 train loss:3.519017 +step:5751 train loss:3.532122 +step:5752 train loss:3.519295 +step:5753 train loss:3.492698 +step:5754 train loss:3.499113 +step:5755 train loss:3.512789 +step:5756 train loss:3.503245 +step:5757 train loss:3.564182 +step:5758 train loss:3.498262 +step:5759 train loss:3.463110 +step:5760 train loss:3.543586 +step:5761 train loss:3.538133 +step:5762 train loss:3.499185 +step:5763 train loss:3.523491 +step:5764 train loss:3.487595 +step:5765 train loss:3.605509 +step:5766 train loss:3.515445 +step:5767 train loss:3.550195 +step:5768 train loss:3.486187 +step:5769 train loss:3.610149 +step:5770 train loss:3.531791 +step:5771 train loss:3.559401 +step:5772 train loss:3.505244 +step:5773 train loss:3.489350 +step:5774 train loss:3.497421 +step:5775 train loss:3.569476 +step:5776 train loss:3.549331 +step:5777 train loss:3.470834 +step:5778 train loss:3.574117 +step:5779 train loss:3.513963 +step:5780 train loss:3.489372 +step:5781 train loss:3.553279 +step:5782 train loss:3.509109 +step:5783 train loss:3.472259 +step:5784 train loss:3.574629 +step:5785 train loss:3.563572 +step:5786 train loss:3.476394 +step:5787 train loss:3.525386 +step:5788 train loss:3.532024 +step:5789 train loss:3.474105 +step:5790 train loss:3.578577 +step:5791 train loss:3.501733 +step:5792 train loss:3.778383 +step:5793 train loss:3.549988 +step:5794 train loss:3.563101 +step:5795 train loss:3.559680 +step:5796 train loss:3.540982 +step:5797 train loss:3.521423 +step:5798 train loss:3.519653 +step:5799 train loss:3.491941 +step:5800 train loss:3.649075 +step:5801 train loss:3.524812 +step:5802 train loss:3.510977 +step:5803 train loss:3.522699 +step:5804 train loss:3.542230 +step:5805 train loss:3.508645 +step:5806 train loss:3.542905 +step:5807 train loss:3.466123 +step:5808 train loss:3.499477 +step:5809 train loss:3.511208 +step:5810 train loss:3.481121 +step:5811 train loss:3.499892 +step:5812 train loss:3.478189 +step:5813 train loss:3.490011 +step:5814 train loss:3.484192 +step:5815 train loss:3.493523 +step:5816 train loss:3.550430 +step:5817 train loss:3.563021 +step:5818 train loss:3.534808 +step:5819 train loss:3.584975 +step:5820 train loss:3.526647 +step:5821 train loss:3.519704 +step:5822 train loss:3.532735 +step:5823 train loss:3.540389 +step:5824 train loss:3.489494 +step:5825 train loss:3.583667 +step:5826 train loss:3.495763 +step:5827 train loss:3.462619 +step:5828 train loss:3.449157 +step:5829 train loss:3.510684 +step:5830 train loss:3.486081 +step:5831 train loss:3.459904 +step:5832 train loss:3.572625 +step:5833 train loss:3.549019 +step:5834 train loss:3.533120 +step:5835 train loss:3.482789 +step:5836 train loss:3.449000 +step:5837 train loss:3.570802 +step:5838 train loss:3.550001 +step:5839 train loss:3.525591 +step:5840 train loss:3.609411 +step:5841 train loss:3.530483 +step:5842 train loss:3.544724 +step:5843 train loss:3.487332 +step:5844 train loss:3.568660 +step:5845 train loss:3.464634 +step:5846 train loss:3.514982 +step:5847 train loss:3.544144 +step:5848 train loss:3.609217 +step:5849 train loss:3.505778 +step:5850 train loss:3.532053 +step:5851 train loss:3.499173 +step:5852 train loss:3.587915 +step:5853 train loss:3.680476 +step:5854 train loss:3.465156 +step:5855 train loss:3.526511 +step:5856 train loss:3.499008 +step:5857 train loss:3.511698 +step:5858 train loss:3.482946 +step:5859 train loss:3.489685 +step:5860 train loss:3.591449 +step:5861 train loss:3.476876 +step:5862 train loss:3.589794 +step:5863 train loss:3.529069 +step:5864 train loss:3.516284 +step:5865 train loss:3.518075 +step:5866 train loss:3.510285 +step:5867 train loss:3.597965 +step:5868 train loss:3.513381 +step:5869 train loss:3.540245 +step:5870 train loss:3.519863 +step:5871 train loss:3.494265 +step:5872 train loss:3.525265 +step:5873 train loss:3.506328 +step:5874 train loss:3.587894 +step:5875 train loss:3.514015 +step:5876 train loss:3.496030 +step:5877 train loss:3.506059 +step:5878 train loss:3.501429 +step:5879 train loss:3.475582 +step:5880 train loss:3.677542 +step:5881 train loss:3.514759 +step:5882 train loss:3.484946 +step:5883 train loss:3.490809 +step:5884 train loss:3.502717 +step:5885 train loss:3.502127 +step:5886 train loss:3.521355 +step:5887 train loss:3.520565 +step:5888 train loss:3.497734 +step:5889 train loss:3.481610 +step:5890 train loss:3.525689 +step:5891 train loss:3.472281 +step:5892 train loss:3.555404 +step:5893 train loss:3.474396 +step:5894 train loss:3.466708 +step:5895 train loss:3.476555 +step:5896 train loss:3.483170 +step:5897 train loss:3.549742 +step:5898 train loss:3.776698 +step:5899 train loss:3.497736 +step:5900 train loss:3.550459 +step:5901 train loss:3.502551 +step:5902 train loss:3.511142 +step:5903 train loss:3.500313 +step:5904 train loss:3.533834 +step:5905 train loss:3.640664 +step:5906 train loss:3.576756 +step:5907 train loss:3.522716 +step:5908 train loss:3.500828 +step:5909 train loss:3.494501 +step:5910 train loss:3.479603 +step:5911 train loss:3.496495 +step:5912 train loss:3.532880 +step:5913 train loss:3.532993 +step:5914 train loss:3.513249 +step:5915 train loss:3.638286 +step:5916 train loss:3.520241 +step:5917 train loss:3.488627 +step:5918 train loss:3.489346 +step:5919 train loss:3.515561 +step:5920 train loss:3.512003 +step:5921 train loss:3.485238 +step:5922 train loss:3.541142 +step:5923 train loss:3.535535 +step:5924 train loss:3.491478 +step:5925 train loss:3.616441 +step:5926 train loss:3.499056 +step:5927 train loss:3.476970 +step:5928 train loss:3.511611 +step:5929 train loss:3.533109 +step:5930 train loss:3.484737 +step:5931 train loss:3.467372 +step:5932 train loss:3.508981 +step:5933 train loss:3.563694 +step:5934 train loss:3.476227 +step:5935 train loss:3.503496 +step:5936 train loss:3.490414 +step:5937 train loss:3.468987 +step:5938 train loss:3.490832 +step:5939 train loss:3.464720 +step:5940 train loss:3.552908 +step:5941 train loss:3.483830 +step:5942 train loss:3.499907 +step:5943 train loss:3.504689 +step:5944 train loss:3.559318 +step:5945 train loss:3.494215 +step:5946 train loss:3.469161 +step:5947 train loss:3.483052 +step:5948 train loss:3.522671 +step:5949 train loss:3.568013 +step:5950 train loss:3.525711 +step:5951 train loss:3.526224 +step:5952 train loss:3.449903 +step:5953 train loss:3.491394 +step:5954 train loss:3.503605 +step:5955 train loss:3.508254 +step:5956 train loss:3.484826 +step:5957 train loss:3.453880 +step:5958 train loss:3.532367 +step:5959 train loss:3.486316 +step:5960 train loss:3.463146 +step:5961 train loss:3.485337 +step:5962 train loss:3.518402 +step:5963 train loss:3.552049 +step:5964 train loss:3.509471 +step:5965 train loss:3.529824 +step:5966 train loss:3.525352 +step:5967 train loss:3.490155 +step:5968 train loss:3.564147 +step:5969 train loss:3.502985 +step:5970 train loss:3.522769 +step:5971 train loss:3.469669 +step:5972 train loss:3.500264 +step:5973 train loss:3.487954 +step:5974 train loss:3.512717 +step:5975 train loss:3.482295 +step:5976 train loss:3.528302 +step:5977 train loss:3.483949 +step:5978 train loss:3.466750 +step:5979 train loss:3.501533 +step:5980 train loss:3.573364 +step:5981 train loss:3.466736 +step:5982 train loss:3.478226 +step:5983 train loss:3.542466 +step:5984 train loss:3.485080 +step:5985 train loss:3.533968 +step:5986 train loss:3.506871 +step:5987 train loss:3.491220 +step:5988 train loss:3.499782 +step:5989 train loss:3.519624 +step:5990 train loss:3.449606 +step:5991 train loss:3.511254 +step:5992 train loss:3.545121 +step:5993 train loss:3.497421 +step:5994 train loss:3.518565 +step:5995 train loss:3.409681 +step:5996 train loss:3.575624 +step:5997 train loss:3.556422 +step:5998 train loss:3.432693 +step:5999 train loss:3.461749 +step:6000 validation loss:3.426405 total_sharp:8.2862e-04 L1_sharp:3.5571e-03 L2_sharp:9.6102e-05 L3_sharp:1.9799e-04 L4_sharp:7.4692e-05 L5_sharp:6.7219e-05 L6_sharp:1.0488e-04 L7_sharp:1.2255e-04 L8_sharp:7.3079e-05 L9_sharp:6.1967e-05 L10_sharp:4.8823e-05 L11_sharp:4.3316e-05 L12_sharp:4.8167e-05 total_fnorm:4.2413e+00 total_l1_linf:3.4621e+04 total_spectral:4.2413e+00 L1_fnorm:1.0995e+00 L2_fnorm:9.8069e-01 L3_fnorm:1.0798e+00 L4_fnorm:1.1608e+00 L5_fnorm:1.1910e+00 L6_fnorm:1.1955e+00 L7_fnorm:1.1964e+00 L8_fnorm:1.2047e+00 L9_fnorm:1.2039e+00 L10_fnorm:1.2083e+00 L11_fnorm:1.2021e+00 L12_fnorm:1.2065e+00 L1_l1linf:8.9541e-01 L2_l1linf:7.8850e-01 L3_l1linf:8.1632e-01 L4_l1linf:8.4002e-01 L5_l1linf:8.2984e-01 L6_l1linf:8.2737e-01 L7_l1linf:8.1858e-01 L8_l1linf:8.1922e-01 L9_l1linf:8.1368e-01 L10_l1linf:8.1584e-01 L11_l1linf:7.9839e-01 L12_l1linf:7.8733e-01 L1_spectral:2.4107e-02 L2_spectral:2.4074e-02 L3_spectral:2.4086e-02 L4_spectral:2.4098e-02 L5_spectral:2.4121e-02 L6_spectral:2.4085e-02 L7_spectral:2.4090e-02 L8_spectral:2.4078e-02 L9_spectral:2.4092e-02 L10_spectral:2.4088e-02 L11_spectral:2.4092e-02 L12_spectral:2.4089e-02 ip_v_neg_g:6.4897e-03 cos_v_neg_g:1.6360e-03 v_norm:4.2413e+00 g_norm:9.3529e-01 hv_norm:8.2600e-01 cos_v_hv:4.2547e-03 hg_norm:2.5999e+01 cos_g_hg:5.4632e-01 v_par:1.8450e-03 v_perp:4.2413e+00 L1_cos_v_neg_g:3.4277e-03 L1_v_norm:1.0995e+00 L2_cos_v_neg_g:2.7733e-03 L2_v_norm:9.8069e-01 L3_cos_v_neg_g:2.6829e-03 L3_v_norm:1.0798e+00 L4_cos_v_neg_g:2.6890e-03 L4_v_norm:1.1608e+00 L5_cos_v_neg_g:2.1998e-03 L5_v_norm:1.1910e+00 L6_cos_v_neg_g:2.5901e-03 L6_v_norm:1.1955e+00 L7_cos_v_neg_g:2.9386e-03 L7_v_norm:1.1964e+00 L8_cos_v_neg_g:3.7009e-03 L8_v_norm:1.2047e+00 L9_cos_v_neg_g:3.3928e-03 L9_v_norm:1.2039e+00 L10_cos_v_neg_g:3.4870e-03 L10_v_norm:1.2083e+00 L11_cos_v_neg_g:2.9941e-03 L11_v_norm:1.2021e+00 L12_cos_v_neg_g:1.5984e-03 L12_v_norm:1.2065e+00 +step:6000 train loss:3.508373 +step:6001 train loss:3.476258 +step:6002 train loss:3.503285 +step:6003 train loss:3.523397 +step:6004 train loss:3.475858 +step:6005 train loss:3.545211 +step:6006 train loss:3.454397 +step:6007 train loss:3.476960 +step:6008 train loss:3.487983 +step:6009 train loss:3.523725 +step:6010 train loss:3.515112 +step:6011 train loss:3.505219 +step:6012 train loss:3.472282 +step:6013 train loss:3.527707 +step:6014 train loss:3.549368 +step:6015 train loss:3.548280 +step:6016 train loss:3.515367 +step:6017 train loss:3.525614 +step:6018 train loss:3.463407 +step:6019 train loss:3.500602 +step:6020 train loss:3.486690 +step:6021 train loss:3.418239 +step:6022 train loss:3.530086 +step:6023 train loss:3.465301 +step:6024 train loss:3.541354 +step:6025 train loss:3.505970 +step:6026 train loss:3.479503 +step:6027 train loss:3.518628 +step:6028 train loss:3.435156 +step:6029 train loss:3.550895 +step:6030 train loss:3.519066 +step:6031 train loss:3.488269 +step:6032 train loss:3.455790 +step:6033 train loss:3.506729 +step:6034 train loss:3.535459 +step:6035 train loss:3.450615 +step:6036 train loss:3.425642 +step:6037 train loss:3.538361 +step:6038 train loss:3.543502 +step:6039 train loss:3.529215 +step:6040 train loss:3.484591 +step:6041 train loss:3.462409 +step:6042 train loss:3.446276 +step:6043 train loss:3.505746 +step:6044 train loss:3.625104 +step:6045 train loss:3.469741 +step:6046 train loss:3.478784 +step:6047 train loss:3.513158 +step:6048 train loss:3.523286 +step:6049 train loss:3.501853 +step:6050 train loss:3.469146 +step:6051 train loss:3.518250 +step:6052 train loss:3.493882 +step:6053 train loss:3.613984 +step:6054 train loss:3.653745 +step:6055 train loss:3.466480 +step:6056 train loss:3.458370 +step:6057 train loss:3.492748 +step:6058 train loss:3.520748 +step:6059 train loss:3.521899 +step:6060 train loss:3.531850 +step:6061 train loss:3.543810 +step:6062 train loss:3.497915 +step:6063 train loss:3.510192 +step:6064 train loss:3.505197 +step:6065 train loss:3.506712 +step:6066 train loss:3.491175 +step:6067 train loss:3.533477 +step:6068 train loss:3.477401 +step:6069 train loss:3.432734 +step:6070 train loss:3.584516 +step:6071 train loss:3.522651 +step:6072 train loss:3.464330 +step:6073 train loss:3.506583 +step:6074 train loss:3.586888 +step:6075 train loss:3.508511 +step:6076 train loss:3.517859 +step:6077 train loss:3.517728 +step:6078 train loss:3.453477 +step:6079 train loss:3.484121 +step:6080 train loss:3.487931 +step:6081 train loss:3.528287 +step:6082 train loss:3.478551 +step:6083 train loss:3.491626 +step:6084 train loss:3.559618 +step:6085 train loss:3.550646 +step:6086 train loss:3.452623 +step:6087 train loss:3.497125 +step:6088 train loss:3.483844 +step:6089 train loss:3.538563 +step:6090 train loss:3.545591 +step:6091 train loss:3.491506 +step:6092 train loss:3.451590 +step:6093 train loss:3.515072 +step:6094 train loss:3.430231 +step:6095 train loss:3.593909 +step:6096 train loss:3.463007 +step:6097 train loss:3.541958 +step:6098 train loss:3.513551 +step:6099 train loss:3.575842 +step:6100 train loss:3.565729 +step:6101 train loss:3.500411 +step:6102 train loss:3.614245 +step:6103 train loss:3.503351 +step:6104 train loss:3.613795 +step:6105 train loss:3.549362 +step:6106 train loss:3.487521 +step:6107 train loss:3.549400 +step:6108 train loss:3.513745 +step:6109 train loss:3.583917 +step:6110 train loss:3.517102 +step:6111 train loss:3.551054 +step:6112 train loss:3.490050 +step:6113 train loss:3.517732 +step:6114 train loss:3.488024 +step:6115 train loss:3.545457 +step:6116 train loss:3.489847 +step:6117 train loss:3.545321 +step:6118 train loss:3.528588 +step:6119 train loss:3.537762 +step:6120 train loss:3.686432 +step:6121 train loss:3.517769 +step:6122 train loss:3.528453 +step:6123 train loss:3.507196 +step:6124 train loss:3.483255 +step:6125 train loss:3.474835 +step:6126 train loss:3.492575 +step:6127 train loss:3.483040 +step:6128 train loss:3.457777 +step:6129 train loss:3.680768 +step:6130 train loss:3.465510 +step:6131 train loss:3.446074 +step:6132 train loss:3.518206 +step:6133 train loss:3.479694 +step:6134 train loss:3.511821 +step:6135 train loss:3.591287 +step:6136 train loss:3.613110 +step:6137 train loss:3.474218 +step:6138 train loss:3.530777 +step:6139 train loss:3.509801 +step:6140 train loss:3.508045 +step:6141 train loss:3.469220 +step:6142 train loss:3.533693 +step:6143 train loss:3.498511 +step:6144 train loss:3.517436 +step:6145 train loss:3.770475 +step:6146 train loss:3.602087 +step:6147 train loss:3.689230 +step:6148 train loss:3.451110 +step:6149 train loss:3.580469 +step:6150 train loss:3.533239 +step:6151 train loss:3.485607 +step:6152 train loss:3.486290 +step:6153 train loss:3.550832 +step:6154 train loss:3.635988 +step:6155 train loss:3.503760 +step:6156 train loss:3.604856 +step:6157 train loss:3.529621 +step:6158 train loss:3.518547 +step:6159 train loss:3.487085 +step:6160 train loss:3.650304 +step:6161 train loss:3.503443 +step:6162 train loss:3.519374 +step:6163 train loss:3.552353 +step:6164 train loss:3.468869 +step:6165 train loss:3.533984 +step:6166 train loss:3.528993 +step:6167 train loss:3.546407 +step:6168 train loss:3.524122 +step:6169 train loss:3.515337 +step:6170 train loss:3.517610 +step:6171 train loss:3.487823 +step:6172 train loss:3.476454 +step:6173 train loss:3.524256 +step:6174 train loss:3.452117 +step:6175 train loss:3.466882 +step:6176 train loss:3.448943 +step:6177 train loss:3.542037 +step:6178 train loss:3.491558 +step:6179 train loss:3.496756 +step:6180 train loss:3.505903 +step:6181 train loss:3.540209 +step:6182 train loss:3.421699 +step:6183 train loss:3.435413 +step:6184 train loss:3.548309 +step:6185 train loss:3.502598 +step:6186 train loss:3.465276 +step:6187 train loss:3.506025 +step:6188 train loss:3.474362 +step:6189 train loss:3.512283 +step:6190 train loss:3.472647 +step:6191 train loss:3.503243 +step:6192 train loss:3.475384 +step:6193 train loss:3.537990 +step:6194 train loss:3.531358 +step:6195 train loss:3.513874 +step:6196 train loss:3.527381 +step:6197 train loss:3.548780 +step:6198 train loss:3.463150 +step:6199 train loss:3.488070 +step:6200 train loss:3.526586 +step:6201 train loss:3.571157 +step:6202 train loss:3.572803 +step:6203 train loss:3.572583 +step:6204 train loss:3.555830 +step:6205 train loss:3.493925 +step:6206 train loss:3.479057 +step:6207 train loss:3.538856 +step:6208 train loss:3.568913 +step:6209 train loss:3.530615 +step:6210 train loss:3.564752 +step:6211 train loss:3.483676 +step:6212 train loss:3.473790 +step:6213 train loss:3.487151 +step:6214 train loss:3.464624 +step:6215 train loss:3.644418 +step:6216 train loss:3.509702 +step:6217 train loss:3.568555 +step:6218 train loss:3.543955 +step:6219 train loss:3.559785 +step:6220 train loss:3.513391 +step:6221 train loss:3.479054 +step:6222 train loss:3.720222 +step:6223 train loss:3.477298 +step:6224 train loss:3.507228 +step:6225 train loss:3.489948 +step:6226 train loss:3.500148 +step:6227 train loss:3.506058 +step:6228 train loss:3.497985 +step:6229 train loss:3.536473 +step:6230 train loss:3.496529 +step:6231 train loss:3.605443 +step:6232 train loss:3.450532 +step:6233 train loss:3.488821 +step:6234 train loss:3.495618 +step:6235 train loss:3.524951 +step:6236 train loss:3.461355 +step:6237 train loss:3.483227 +step:6238 train loss:3.510813 +step:6239 train loss:3.495754 +step:6240 train loss:3.520061 +step:6241 train loss:3.498044 +step:6242 train loss:3.497449 +step:6243 train loss:3.534973 +step:6244 train loss:3.689983 +step:6245 train loss:3.486979 +step:6246 train loss:3.473725 +step:6247 train loss:3.469612 +step:6248 train loss:3.471593 +step:6249 train loss:3.413722 +step:6250 validation loss:3.420503 +step:6250 train loss:3.449924 +step:6251 train loss:3.468662 +step:6252 train loss:3.507517 +step:6253 train loss:3.520478 +step:6254 train loss:3.511118 +step:6255 train loss:3.476389 +step:6256 train loss:3.527283 +step:6257 train loss:3.525623 +step:6258 train loss:3.505829 +step:6259 train loss:3.511116 +step:6260 train loss:3.542539 +step:6261 train loss:3.560151 +step:6262 train loss:3.454260 +step:6263 train loss:3.487443 +step:6264 train loss:3.499788 +step:6265 train loss:3.488542 +step:6266 train loss:3.695710 +step:6267 train loss:3.492254 +step:6268 train loss:3.578209 +step:6269 train loss:3.455663 +step:6270 train loss:3.471050 +step:6271 train loss:3.513909 +step:6272 train loss:3.506793 +step:6273 train loss:3.708987 +step:6274 train loss:3.484698 +step:6275 train loss:3.520138 +step:6276 train loss:3.487710 +step:6277 train loss:3.471991 +step:6278 train loss:3.459030 +step:6279 train loss:3.513041 +step:6280 train loss:3.516906 +step:6281 train loss:3.452878 +step:6282 train loss:3.464820 +step:6283 train loss:3.550905 +step:6284 train loss:3.518397 +step:6285 train loss:3.518835 +step:6286 train loss:3.464368 +step:6287 train loss:3.496343 +step:6288 train loss:3.593744 +step:6289 train loss:3.456771 +step:6290 train loss:3.451552 +step:6291 train loss:3.486944 +step:6292 train loss:3.505932 +step:6293 train loss:3.495012 +step:6294 train loss:3.481652 +step:6295 train loss:3.500552 +step:6296 train loss:3.464252 +step:6297 train loss:3.593683 +step:6298 train loss:3.540049 +step:6299 train loss:3.433708 +step:6300 train loss:3.513146 +step:6301 train loss:3.538360 +step:6302 train loss:3.523624 +step:6303 train loss:3.490597 +step:6304 train loss:3.511548 +step:6305 train loss:3.481075 +step:6306 train loss:3.490922 +step:6307 train loss:3.500909 +step:6308 train loss:3.475236 +step:6309 train loss:3.476120 +step:6310 train loss:3.527203 +step:6311 train loss:3.480714 +step:6312 train loss:3.519554 +step:6313 train loss:3.451509 +step:6314 train loss:3.475533 +step:6315 train loss:3.532146 +step:6316 train loss:3.454432 +step:6317 train loss:3.440399 +step:6318 train loss:3.560649 +step:6319 train loss:3.489496 +step:6320 train loss:3.504740 +step:6321 train loss:3.490294 +step:6322 train loss:3.492969 +step:6323 train loss:3.421891 +step:6324 train loss:3.434010 +step:6325 train loss:3.528389 +step:6326 train loss:3.446785 +step:6327 train loss:3.521676 +step:6328 train loss:3.499488 +step:6329 train loss:3.421991 +step:6330 train loss:3.449395 +step:6331 train loss:3.470066 +step:6332 train loss:3.607715 +step:6333 train loss:3.477660 +step:6334 train loss:3.455383 +step:6335 train loss:3.428379 +step:6336 train loss:3.460770 +step:6337 train loss:3.486207 +step:6338 train loss:3.440362 +step:6339 train loss:3.483598 +step:6340 train loss:3.464701 +step:6341 train loss:3.478558 +step:6342 train loss:3.477358 +step:6343 train loss:3.573333 +step:6344 train loss:3.424968 +step:6345 train loss:3.443819 +step:6346 train loss:3.518842 +step:6347 train loss:3.397910 +step:6348 train loss:3.486960 +step:6349 train loss:3.466908 +step:6350 train loss:3.437316 +step:6351 train loss:3.437833 +step:6352 train loss:3.457847 +step:6353 train loss:3.474088 +step:6354 train loss:3.485949 +step:6355 train loss:3.501214 +step:6356 train loss:3.509134 +step:6357 train loss:3.368189 +step:6358 train loss:3.455628 +step:6359 train loss:3.510832 +step:6360 train loss:3.424069 +step:6361 train loss:3.424007 +step:6362 train loss:3.468231 +step:6363 train loss:3.446068 +step:6364 train loss:3.430717 +step:6365 train loss:3.504338 +step:6366 train loss:3.514087 +step:6367 train loss:3.448966 +step:6368 train loss:3.487328 +step:6369 train loss:3.454568 +step:6370 train loss:3.503146 +step:6371 train loss:3.420254 +step:6372 train loss:3.450454 +step:6373 train loss:3.474697 +step:6374 train loss:3.506006 +step:6375 train loss:3.465484 +step:6376 train loss:3.487657 +step:6377 train loss:3.490891 +step:6378 train loss:3.434081 +step:6379 train loss:3.478745 +step:6380 train loss:3.523101 +step:6381 train loss:3.484538 +step:6382 train loss:3.442087 +step:6383 train loss:3.502026 +step:6384 train loss:3.480142 +step:6385 train loss:3.455125 +step:6386 train loss:3.487227 +step:6387 train loss:3.470695 +step:6388 train loss:3.507550 +step:6389 train loss:3.520123 +step:6390 train loss:3.467402 +step:6391 train loss:3.455017 +step:6392 train loss:3.441314 +step:6393 train loss:3.495724 +step:6394 train loss:3.486269 +step:6395 train loss:3.663725 +step:6396 train loss:3.485737 +step:6397 train loss:3.428091 +step:6398 train loss:3.500115 +step:6399 train loss:3.439665 +step:6400 train loss:3.514496 +step:6401 train loss:3.552615 +step:6402 train loss:3.481472 +step:6403 train loss:3.475644 +step:6404 train loss:3.454164 +step:6405 train loss:3.480032 +step:6406 train loss:3.483772 +step:6407 train loss:3.544803 +step:6408 train loss:3.437902 +step:6409 train loss:3.420585 +step:6410 train loss:3.555704 +step:6411 train loss:3.482641 +step:6412 train loss:3.483897 +step:6413 train loss:3.489167 +step:6414 train loss:3.438689 +step:6415 train loss:3.504599 +step:6416 train loss:3.469152 +step:6417 train loss:3.438304 +step:6418 train loss:3.429423 +step:6419 train loss:3.512379 +step:6420 train loss:3.444171 +step:6421 train loss:3.469326 +step:6422 train loss:3.459467 +step:6423 train loss:3.466546 +step:6424 train loss:3.489069 +step:6425 train loss:3.488019 +step:6426 train loss:3.528530 +step:6427 train loss:3.493067 +step:6428 train loss:3.523784 +step:6429 train loss:3.495379 +step:6430 train loss:3.464871 +step:6431 train loss:3.444011 +step:6432 train loss:3.475317 +step:6433 train loss:3.490275 +step:6434 train loss:3.369894 +step:6435 train loss:3.562432 +step:6436 train loss:3.485212 +step:6437 train loss:3.444691 +step:6438 train loss:3.480567 +step:6439 train loss:3.447662 +step:6440 train loss:3.468410 +step:6441 train loss:3.460125 +step:6442 train loss:3.401773 +step:6443 train loss:3.460833 +step:6444 train loss:3.594677 +step:6445 train loss:3.499406 +step:6446 train loss:3.505958 +step:6447 train loss:3.484802 +step:6448 train loss:3.431622 +step:6449 train loss:3.457008 +step:6450 train loss:3.443799 +step:6451 train loss:3.426725 +step:6452 train loss:3.432795 +step:6453 train loss:3.473594 +step:6454 train loss:3.500837 +step:6455 train loss:3.489153 +step:6456 train loss:3.506887 +step:6457 train loss:3.485012 +step:6458 train loss:3.457769 +step:6459 train loss:3.439027 +step:6460 train loss:3.443738 +step:6461 train loss:3.446244 +step:6462 train loss:3.440831 +step:6463 train loss:3.537606 +step:6464 train loss:3.447090 +step:6465 train loss:3.487693 +step:6466 train loss:3.502134 +step:6467 train loss:3.426746 +step:6468 train loss:3.506047 +step:6469 train loss:3.413210 +step:6470 train loss:3.538063 +step:6471 train loss:3.442126 +step:6472 train loss:3.600634 +step:6473 train loss:3.485066 +step:6474 train loss:3.518396 +step:6475 train loss:3.463377 +step:6476 train loss:3.531446 +step:6477 train loss:3.470132 +step:6478 train loss:3.596113 +step:6479 train loss:3.510509 +step:6480 train loss:3.445550 +step:6481 train loss:3.499741 +step:6482 train loss:3.442278 +step:6483 train loss:3.503989 +step:6484 train loss:3.460088 +step:6485 train loss:3.522367 +step:6486 train loss:3.454885 +step:6487 train loss:3.449618 +step:6488 train loss:3.450451 +step:6489 train loss:3.450662 +step:6490 train loss:3.475529 +step:6491 train loss:3.446951 +step:6492 train loss:3.548571 +step:6493 train loss:3.453752 +step:6494 train loss:3.456693 +step:6495 train loss:3.455537 +step:6496 train loss:3.487044 +step:6497 train loss:3.504721 +step:6498 train loss:3.609877 +step:6499 train loss:3.588112 +step:6500 validation loss:3.413903 total_sharp:1.0374e-03 L1_sharp:3.6181e-03 L2_sharp:1.1292e-04 L3_sharp:2.0348e-04 L4_sharp:8.7298e-05 L5_sharp:1.0228e-04 L6_sharp:1.3169e-04 L7_sharp:2.3612e-04 L8_sharp:1.6604e-04 L9_sharp:7.4855e-05 L10_sharp:4.9716e-05 L11_sharp:4.0283e-05 L12_sharp:4.5215e-05 total_fnorm:4.2266e+00 total_l1_linf:3.4549e+04 total_spectral:4.2266e+00 L1_fnorm:1.0882e+00 L2_fnorm:9.6681e-01 L3_fnorm:1.0760e+00 L4_fnorm:1.1567e+00 L5_fnorm:1.1856e+00 L6_fnorm:1.1937e+00 L7_fnorm:1.1879e+00 L8_fnorm:1.1950e+00 L9_fnorm:1.1977e+00 L10_fnorm:1.2053e+00 L11_fnorm:1.2016e+00 L12_fnorm:1.2050e+00 L1_l1linf:8.8738e-01 L2_l1linf:7.8669e-01 L3_l1linf:8.0638e-01 L4_l1linf:8.3400e-01 L5_l1linf:8.2884e-01 L6_l1linf:8.1905e-01 L7_l1linf:8.1278e-01 L8_l1linf:8.0134e-01 L9_l1linf:8.0620e-01 L10_l1linf:8.0908e-01 L11_l1linf:8.0170e-01 L12_l1linf:7.8655e-01 L1_spectral:2.4100e-02 L2_spectral:2.4070e-02 L3_spectral:2.4086e-02 L4_spectral:2.4107e-02 L5_spectral:2.4107e-02 L6_spectral:2.4091e-02 L7_spectral:2.4092e-02 L8_spectral:2.4089e-02 L9_spectral:2.4088e-02 L10_spectral:2.4086e-02 L11_spectral:2.4089e-02 L12_spectral:2.4097e-02 ip_v_neg_g:6.8608e-03 cos_v_neg_g:1.7775e-03 v_norm:4.2266e+00 g_norm:9.1321e-01 hv_norm:6.9562e-01 cos_v_hv:6.3032e-03 hg_norm:1.8938e+01 cos_g_hg:6.1933e-01 v_par:2.1806e-03 v_perp:4.2266e+00 L1_cos_v_neg_g:4.0800e-03 L1_v_norm:1.0882e+00 L2_cos_v_neg_g:3.3820e-03 L2_v_norm:9.6681e-01 L3_cos_v_neg_g:3.8290e-03 L3_v_norm:1.0760e+00 L4_cos_v_neg_g:2.7717e-03 L4_v_norm:1.1567e+00 L5_cos_v_neg_g:3.2771e-03 L5_v_norm:1.1856e+00 L6_cos_v_neg_g:3.5877e-03 L6_v_norm:1.1937e+00 L7_cos_v_neg_g:2.0402e-03 L7_v_norm:1.1879e+00 L8_cos_v_neg_g:2.6161e-03 L8_v_norm:1.1950e+00 L9_cos_v_neg_g:1.7233e-03 L9_v_norm:1.1977e+00 L10_cos_v_neg_g:2.8178e-03 L10_v_norm:1.2053e+00 L11_cos_v_neg_g:2.4544e-03 L11_v_norm:1.2016e+00 L12_cos_v_neg_g:2.2899e-03 L12_v_norm:1.2050e+00 +step:6500 train loss:3.431328 +step:6501 train loss:3.446864 +step:6502 train loss:3.469300 +step:6503 train loss:3.525545 +step:6504 train loss:3.474593 +step:6505 train loss:3.479971 +step:6506 train loss:3.443141 +step:6507 train loss:3.508038 +step:6508 train loss:3.479180 +step:6509 train loss:3.458736 +step:6510 train loss:3.468716 +step:6511 train loss:3.482930 +step:6512 train loss:3.424584 +step:6513 train loss:3.493122 +step:6514 train loss:3.368222 +step:6515 train loss:3.459009 +step:6516 train loss:3.508522 +step:6517 train loss:3.420716 +step:6518 train loss:3.462826 +step:6519 train loss:3.455499 +step:6520 train loss:3.541071 +step:6521 train loss:3.518779 +step:6522 train loss:3.526682 +step:6523 train loss:3.425090 +step:6524 train loss:3.506851 +step:6525 train loss:3.496211 +step:6526 train loss:3.428102 +step:6527 train loss:3.486140 +step:6528 train loss:3.507358 +step:6529 train loss:3.531684 +step:6530 train loss:3.433017 +step:6531 train loss:3.513747 +step:6532 train loss:3.445466 +step:6533 train loss:3.480548 +step:6534 train loss:3.489998 +step:6535 train loss:3.462978 +step:6536 train loss:3.603558 +step:6537 train loss:3.409855 +step:6538 train loss:3.515595 +step:6539 train loss:3.441213 +step:6540 train loss:3.551220 +step:6541 train loss:3.533893 +step:6542 train loss:3.490574 +step:6543 train loss:3.441726 +step:6544 train loss:3.425945 +step:6545 train loss:3.417762 +step:6546 train loss:3.471485 +step:6547 train loss:3.533625 +step:6548 train loss:3.472167 +step:6549 train loss:3.488305 +step:6550 train loss:3.598867 +step:6551 train loss:3.481259 +step:6552 train loss:3.472350 +step:6553 train loss:3.513680 +step:6554 train loss:3.404107 +step:6555 train loss:3.488487 +step:6556 train loss:3.357596 +step:6557 train loss:3.709535 +step:6558 train loss:3.539807 +step:6559 train loss:3.449308 +step:6560 train loss:3.493054 +step:6561 train loss:3.459939 +step:6562 train loss:3.483892 +step:6563 train loss:3.375481 +step:6564 train loss:3.478514 +step:6565 train loss:3.383419 +step:6566 train loss:3.496642 +step:6567 train loss:3.467334 +step:6568 train loss:3.512830 +step:6569 train loss:3.461528 +step:6570 train loss:3.497985 +step:6571 train loss:3.427314 +step:6572 train loss:3.503170 +step:6573 train loss:3.516888 +step:6574 train loss:3.500713 +step:6575 train loss:3.447041 +step:6576 train loss:3.436006 +step:6577 train loss:3.505641 +step:6578 train loss:3.375809 +step:6579 train loss:3.478552 +step:6580 train loss:3.433394 +step:6581 train loss:3.445182 +step:6582 train loss:3.424366 +step:6583 train loss:3.526206 +step:6584 train loss:3.456084 +step:6585 train loss:3.491636 +step:6586 train loss:3.499132 +step:6587 train loss:3.508116 +step:6588 train loss:3.474512 +step:6589 train loss:3.504474 +step:6590 train loss:3.441999 +step:6591 train loss:3.493136 +step:6592 train loss:3.432108 +step:6593 train loss:3.445322 +step:6594 train loss:3.469084 +step:6595 train loss:3.452640 +step:6596 train loss:3.451843 +step:6597 train loss:3.473928 +step:6598 train loss:3.516591 +step:6599 train loss:3.408499 +step:6600 train loss:3.468290 +step:6601 train loss:3.523080 +step:6602 train loss:3.448374 +step:6603 train loss:3.473778 +step:6604 train loss:3.485781 +step:6605 train loss:3.465546 +step:6606 train loss:3.525558 +step:6607 train loss:3.445491 +step:6608 train loss:3.458524 +step:6609 train loss:3.429639 +step:6610 train loss:3.540731 +step:6611 train loss:3.463866 +step:6612 train loss:3.505892 +step:6613 train loss:3.421611 +step:6614 train loss:3.451662 +step:6615 train loss:3.451307 +step:6616 train loss:3.433738 +step:6617 train loss:3.471121 +step:6618 train loss:3.460467 +step:6619 train loss:3.436106 +step:6620 train loss:3.537426 +step:6621 train loss:3.414826 +step:6622 train loss:3.487881 +step:6623 train loss:3.419416 +step:6624 train loss:3.489643 +step:6625 train loss:3.535992 +step:6626 train loss:3.498245 +step:6627 train loss:3.444917 +step:6628 train loss:3.505811 +step:6629 train loss:3.405690 +step:6630 train loss:3.445175 +step:6631 train loss:3.481476 +step:6632 train loss:3.518053 +step:6633 train loss:3.472909 +step:6634 train loss:3.531109 +step:6635 train loss:3.434197 +step:6636 train loss:3.475259 +step:6637 train loss:3.445939 +step:6638 train loss:3.443190 +step:6639 train loss:3.458842 +step:6640 train loss:3.448767 +step:6641 train loss:3.461846 +step:6642 train loss:3.459002 +step:6643 train loss:3.541241 +step:6644 train loss:3.540130 +step:6645 train loss:3.413582 +step:6646 train loss:3.504565 +step:6647 train loss:3.462047 +step:6648 train loss:3.565349 +step:6649 train loss:3.492902 +step:6650 train loss:3.442273 +step:6651 train loss:3.490627 +step:6652 train loss:3.504611 +step:6653 train loss:3.448997 +step:6654 train loss:3.442986 +step:6655 train loss:3.483578 +step:6656 train loss:3.454009 +step:6657 train loss:3.480479 +step:6658 train loss:3.459227 +step:6659 train loss:3.614489 +step:6660 train loss:3.515236 +step:6661 train loss:3.436939 +step:6662 train loss:3.470841 +step:6663 train loss:3.402633 +step:6664 train loss:3.482754 +step:6665 train loss:3.491318 +step:6666 train loss:3.508639 +step:6667 train loss:3.422935 +step:6668 train loss:3.550710 +step:6669 train loss:3.434922 +step:6670 train loss:3.443561 +step:6671 train loss:3.525372 +step:6672 train loss:3.473420 +step:6673 train loss:3.484977 +step:6674 train loss:3.460226 +step:6675 train loss:3.478108 +step:6676 train loss:3.485562 +step:6677 train loss:3.442644 +step:6678 train loss:3.513359 +step:6679 train loss:3.550211 +step:6680 train loss:3.552697 +step:6681 train loss:3.504996 +step:6682 train loss:3.444340 +step:6683 train loss:3.467370 +step:6684 train loss:3.479925 +step:6685 train loss:3.493969 +step:6686 train loss:3.424554 +step:6687 train loss:3.445227 +step:6688 train loss:3.489603 +step:6689 train loss:3.498397 +step:6690 train loss:3.471156 +step:6691 train loss:3.507244 +step:6692 train loss:3.515142 +step:6693 train loss:3.546897 +step:6694 train loss:3.499160 +step:6695 train loss:3.473466 +step:6696 train loss:3.412511 +step:6697 train loss:3.625155 +step:6698 train loss:3.471210 +step:6699 train loss:3.464942 +step:6700 train loss:3.478868 +step:6701 train loss:3.536774 +step:6702 train loss:3.427218 +step:6703 train loss:3.473876 +step:6704 train loss:3.459996 +step:6705 train loss:3.474205 +step:6706 train loss:3.448483 +step:6707 train loss:3.523913 +step:6708 train loss:3.474661 +step:6709 train loss:3.503660 +step:6710 train loss:3.493723 +step:6711 train loss:3.445085 +step:6712 train loss:3.431730 +step:6713 train loss:3.459869 +step:6714 train loss:3.502789 +step:6715 train loss:3.443749 +step:6716 train loss:3.525023 +step:6717 train loss:3.464418 +step:6718 train loss:3.491397 +step:6719 train loss:3.521551 +step:6720 train loss:3.452507 +step:6721 train loss:3.468492 +step:6722 train loss:3.447861 +step:6723 train loss:3.572690 +step:6724 train loss:3.433453 +step:6725 train loss:3.491448 +step:6726 train loss:3.447477 +step:6727 train loss:3.512368 +step:6728 train loss:3.608033 +step:6729 train loss:3.472234 +step:6730 train loss:3.464723 +step:6731 train loss:3.508165 +step:6732 train loss:3.384948 +step:6733 train loss:3.518566 +step:6734 train loss:3.451128 +step:6735 train loss:3.476072 +step:6736 train loss:3.474842 +step:6737 train loss:3.471828 +step:6738 train loss:3.501482 +step:6739 train loss:3.459411 +step:6740 train loss:3.408973 +step:6741 train loss:3.522366 +step:6742 train loss:3.482771 +step:6743 train loss:3.486233 +step:6744 train loss:3.375834 +step:6745 train loss:3.534424 +step:6746 train loss:3.458535 +step:6747 train loss:3.458394 +step:6748 train loss:3.527513 +step:6749 train loss:3.511812 +step:6750 validation loss:3.406737 +step:6750 train loss:3.428993 +step:6751 train loss:3.463667 +step:6752 train loss:3.464678 +step:6753 train loss:3.501152 +step:6754 train loss:3.482969 +step:6755 train loss:3.496663 +step:6756 train loss:3.432091 +step:6757 train loss:3.404536 +step:6758 train loss:3.577293 +step:6759 train loss:3.470040 +step:6760 train loss:3.526195 +step:6761 train loss:3.460677 +step:6762 train loss:3.480848 +step:6763 train loss:3.381580 +step:6764 train loss:3.460806 +step:6765 train loss:3.467011 +step:6766 train loss:3.462892 +step:6767 train loss:3.414734 +step:6768 train loss:3.419508 +step:6769 train loss:3.381057 +step:6770 train loss:3.467234 +step:6771 train loss:3.469232 +step:6772 train loss:3.482049 +step:6773 train loss:3.458057 +step:6774 train loss:3.481555 +step:6775 train loss:3.513854 +step:6776 train loss:3.469890 +step:6777 train loss:3.546422 +step:6778 train loss:3.436733 +step:6779 train loss:3.483530 +step:6780 train loss:3.415737 +step:6781 train loss:3.478660 +step:6782 train loss:3.396567 +step:6783 train loss:3.426366 +step:6784 train loss:3.457136 +step:6785 train loss:3.439984 +step:6786 train loss:3.457751 +step:6787 train loss:3.531940 +step:6788 train loss:3.472982 +step:6789 train loss:3.480549 +step:6790 train loss:3.476938 +step:6791 train loss:3.488294 +step:6792 train loss:3.489051 +step:6793 train loss:3.488517 +step:6794 train loss:3.460692 +step:6795 train loss:3.455947 +step:6796 train loss:3.461963 +step:6797 train loss:3.559558 +step:6798 train loss:3.462722 +step:6799 train loss:3.452338 +step:6800 train loss:3.419100 +step:6801 train loss:3.554691 +step:6802 train loss:3.501349 +step:6803 train loss:3.494200 +step:6804 train loss:3.520023 +step:6805 train loss:3.481049 +step:6806 train loss:3.411201 +step:6807 train loss:3.474799 +step:6808 train loss:3.457386 +step:6809 train loss:3.483402 +step:6810 train loss:3.606299 +step:6811 train loss:3.509574 +step:6812 train loss:3.478815 +step:6813 train loss:3.495516 +step:6814 train loss:3.503436 +step:6815 train loss:3.547911 +step:6816 train loss:3.464927 +step:6817 train loss:3.493498 +step:6818 train loss:3.465117 +step:6819 train loss:3.452650 +step:6820 train loss:3.480691 +step:6821 train loss:3.443753 +step:6822 train loss:3.544780 +step:6823 train loss:3.527581 +step:6824 train loss:3.503390 +step:6825 train loss:3.453714 +step:6826 train loss:3.493095 +step:6827 train loss:3.486274 +step:6828 train loss:3.495443 +step:6829 train loss:3.482315 +step:6830 train loss:3.450924 +step:6831 train loss:3.413156 +step:6832 train loss:3.400491 +step:6833 train loss:3.414018 +step:6834 train loss:3.505680 +step:6835 train loss:3.473370 +step:6836 train loss:3.391367 +step:6837 train loss:3.459290 +step:6838 train loss:3.519894 +step:6839 train loss:3.603964 +step:6840 train loss:3.476231 +step:6841 train loss:3.427099 +step:6842 train loss:3.480403 +step:6843 train loss:3.586421 +step:6844 train loss:3.465609 +step:6845 train loss:3.517143 +step:6846 train loss:3.580300 +step:6847 train loss:3.515187 +step:6848 train loss:3.500580 +step:6849 train loss:3.527205 +step:6850 train loss:3.496993 +step:6851 train loss:3.426951 +step:6852 train loss:3.420797 +step:6853 train loss:3.411468 +step:6854 train loss:3.487545 +step:6855 train loss:3.459105 +step:6856 train loss:3.442348 +step:6857 train loss:3.495252 +step:6858 train loss:3.523218 +step:6859 train loss:3.432458 +step:6860 train loss:3.542381 +step:6861 train loss:3.569238 +step:6862 train loss:3.477319 +step:6863 train loss:3.474523 +step:6864 train loss:3.420953 +step:6865 train loss:3.490473 +step:6866 train loss:3.420200 +step:6867 train loss:3.598516 +step:6868 train loss:3.472764 +step:6869 train loss:3.505374 +step:6870 train loss:3.540218 +step:6871 train loss:3.460064 +step:6872 train loss:3.453609 +step:6873 train loss:3.472690 +step:6874 train loss:3.432709 +step:6875 train loss:3.435843 +step:6876 train loss:3.466043 +step:6877 train loss:3.508842 +step:6878 train loss:3.421397 +step:6879 train loss:3.468245 +step:6880 train loss:3.477504 +step:6881 train loss:3.438639 +step:6882 train loss:3.505108 +step:6883 train loss:3.496784 +step:6884 train loss:3.717579 +step:6885 train loss:3.485971 +step:6886 train loss:3.466831 +step:6887 train loss:3.409060 +step:6888 train loss:3.508831 +step:6889 train loss:3.391757 +step:6890 train loss:3.503009 +step:6891 train loss:3.510113 +step:6892 train loss:3.611059 +step:6893 train loss:3.442464 +step:6894 train loss:3.503581 +step:6895 train loss:3.501828 +step:6896 train loss:3.479080 +step:6897 train loss:3.435587 +step:6898 train loss:3.435531 +step:6899 train loss:3.522189 +step:6900 train loss:3.497955 +step:6901 train loss:3.444595 +step:6902 train loss:3.376741 +step:6903 train loss:3.422439 +step:6904 train loss:3.532020 +step:6905 train loss:3.574689 +step:6906 train loss:3.484492 +step:6907 train loss:3.503009 +step:6908 train loss:3.539381 +step:6909 train loss:3.532925 +step:6910 train loss:3.410033 +step:6911 train loss:3.539860 +step:6912 train loss:3.429905 +step:6913 train loss:3.469718 +step:6914 train loss:3.423474 +step:6915 train loss:3.453447 +step:6916 train loss:3.427822 +step:6917 train loss:3.552619 +step:6918 train loss:3.500950 +step:6919 train loss:3.492650 +step:6920 train loss:3.479085 +step:6921 train loss:3.542714 +step:6922 train loss:3.532310 +step:6923 train loss:3.397495 +step:6924 train loss:3.482104 +step:6925 train loss:3.451584 +step:6926 train loss:3.493191 +step:6927 train loss:3.546321 +step:6928 train loss:3.432495 +step:6929 train loss:3.441760 +step:6930 train loss:3.478132 +step:6931 train loss:3.475730 +step:6932 train loss:3.717708 +step:6933 train loss:3.541863 +step:6934 train loss:3.482322 +step:6935 train loss:3.466679 +step:6936 train loss:3.505365 +step:6937 train loss:3.454771 +step:6938 train loss:3.512558 +step:6939 train loss:3.449597 +step:6940 train loss:3.504805 +step:6941 train loss:3.419636 +step:6942 train loss:3.507772 +step:6943 train loss:3.400257 +step:6944 train loss:3.492168 +step:6945 train loss:3.433106 +step:6946 train loss:3.523819 +step:6947 train loss:3.447497 +step:6948 train loss:3.442398 +step:6949 train loss:3.517224 +step:6950 train loss:3.507377 +step:6951 train loss:3.509184 +step:6952 train loss:3.440121 +step:6953 train loss:3.487997 +step:6954 train loss:3.550750 +step:6955 train loss:3.466676 +step:6956 train loss:3.501282 +step:6957 train loss:3.491857 +step:6958 train loss:3.452473 +step:6959 train loss:3.493114 +step:6960 train loss:3.457782 +step:6961 train loss:3.462370 +step:6962 train loss:3.444753 +step:6963 train loss:3.415700 +step:6964 train loss:3.458574 +step:6965 train loss:3.453461 +step:6966 train loss:3.493800 +step:6967 train loss:3.429851 +step:6968 train loss:3.472782 +step:6969 train loss:3.492197 +step:6970 train loss:3.467900 +step:6971 train loss:3.533031 +step:6972 train loss:3.480806 +step:6973 train loss:3.434011 +step:6974 train loss:3.565692 +step:6975 train loss:3.468257 +step:6976 train loss:3.442259 +step:6977 train loss:3.481552 +step:6978 train loss:3.472710 +step:6979 train loss:3.482917 +step:6980 train loss:3.458260 +step:6981 train loss:3.520170 +step:6982 train loss:3.472743 +step:6983 train loss:3.460012 +step:6984 train loss:3.580211 +step:6985 train loss:3.424027 +step:6986 train loss:3.416226 +step:6987 train loss:3.467349 +step:6988 train loss:3.470800 +step:6989 train loss:3.616568 +step:6990 train loss:3.478484 +step:6991 train loss:3.433958 +step:6992 train loss:3.486285 +step:6993 train loss:3.550802 +step:6994 train loss:3.497554 +step:6995 train loss:3.447958 +step:6996 train loss:3.453277 +step:6997 train loss:3.529988 +step:6998 train loss:3.427107 +step:6999 train loss:3.480247 +step:7000 validation loss:3.399752 total_sharp:6.9631e-04 L1_sharp:3.3409e-03 L2_sharp:6.4609e-05 L3_sharp:1.3743e-04 L4_sharp:6.7561e-05 L5_sharp:6.7289e-05 L6_sharp:1.0472e-04 L7_sharp:1.3858e-04 L8_sharp:8.3699e-05 L9_sharp:5.9688e-05 L10_sharp:4.1823e-05 L11_sharp:3.7569e-05 L12_sharp:4.0310e-05 total_fnorm:4.2455e+00 total_l1_linf:3.4681e+04 total_spectral:4.2455e+00 L1_fnorm:1.0919e+00 L2_fnorm:9.8768e-01 L3_fnorm:1.0886e+00 L4_fnorm:1.1676e+00 L5_fnorm:1.1940e+00 L6_fnorm:1.1952e+00 L7_fnorm:1.1922e+00 L8_fnorm:1.2006e+00 L9_fnorm:1.2006e+00 L10_fnorm:1.2088e+00 L11_fnorm:1.2035e+00 L12_fnorm:1.2057e+00 L1_l1linf:8.8990e-01 L2_l1linf:7.9503e-01 L3_l1linf:8.2154e-01 L4_l1linf:8.3507e-01 L5_l1linf:8.3269e-01 L6_l1linf:8.2224e-01 L7_l1linf:8.1414e-01 L8_l1linf:8.1026e-01 L9_l1linf:8.0929e-01 L10_l1linf:8.1614e-01 L11_l1linf:8.0174e-01 L12_l1linf:7.8869e-01 L1_spectral:2.4105e-02 L2_spectral:2.4080e-02 L3_spectral:2.4091e-02 L4_spectral:2.4106e-02 L5_spectral:2.4119e-02 L6_spectral:2.4090e-02 L7_spectral:2.4089e-02 L8_spectral:2.4084e-02 L9_spectral:2.4088e-02 L10_spectral:2.4085e-02 L11_spectral:2.4089e-02 L12_spectral:2.4092e-02 ip_v_neg_g:7.8838e-03 cos_v_neg_g:1.7926e-03 v_norm:4.2455e+00 g_norm:1.0359e+00 hv_norm:5.2484e-01 cos_v_hv:5.6326e-03 hg_norm:7.5617e+01 cos_g_hg:4.5560e-01 v_par:1.5732e-03 v_perp:4.2455e+00 L1_cos_v_neg_g:3.1279e-03 L1_v_norm:1.0919e+00 L2_cos_v_neg_g:3.1869e-03 L2_v_norm:9.8768e-01 L3_cos_v_neg_g:2.9724e-03 L3_v_norm:1.0886e+00 L4_cos_v_neg_g:3.4896e-03 L4_v_norm:1.1676e+00 L5_cos_v_neg_g:3.1871e-03 L5_v_norm:1.1940e+00 L6_cos_v_neg_g:3.8935e-03 L6_v_norm:1.1952e+00 L7_cos_v_neg_g:5.6857e-03 L7_v_norm:1.1922e+00 L8_cos_v_neg_g:4.7253e-03 L8_v_norm:1.2006e+00 L9_cos_v_neg_g:3.7857e-03 L9_v_norm:1.2006e+00 L10_cos_v_neg_g:3.1766e-03 L10_v_norm:1.2088e+00 L11_cos_v_neg_g:2.7121e-03 L11_v_norm:1.2035e+00 L12_cos_v_neg_g:2.6819e-03 L12_v_norm:1.2057e+00 +step:7000 train loss:3.554022 +step:7001 train loss:3.461717 +step:7002 train loss:3.450749 +step:7003 train loss:3.475039 +step:7004 train loss:3.472384 +step:7005 train loss:3.453663 +step:7006 train loss:3.462275 +step:7007 train loss:3.511760 +step:7008 train loss:3.451795 +step:7009 train loss:3.491616 +step:7010 train loss:3.427978 +step:7011 train loss:3.482931 +step:7012 train loss:3.455326 +step:7013 train loss:3.532671 +step:7014 train loss:3.436408 +step:7015 train loss:3.496812 +step:7016 train loss:3.485318 +step:7017 train loss:3.451365 +step:7018 train loss:3.532572 +step:7019 train loss:3.453946 +step:7020 train loss:3.503706 +step:7021 train loss:3.446865 +step:7022 train loss:3.466375 +step:7023 train loss:3.477955 +step:7024 train loss:3.443373 +step:7025 train loss:3.491330 +step:7026 train loss:3.449019 +step:7027 train loss:3.513042 +step:7028 train loss:3.435461 +step:7029 train loss:3.427239 +step:7030 train loss:3.427961 +step:7031 train loss:3.485086 +step:7032 train loss:3.491376 +step:7033 train loss:3.464310 +step:7034 train loss:3.486427 +step:7035 train loss:3.535796 +step:7036 train loss:3.460510 +step:7037 train loss:3.482236 +step:7038 train loss:3.446362 +step:7039 train loss:3.500051 +step:7040 train loss:3.418615 +step:7041 train loss:3.512381 +step:7042 train loss:3.440454 +step:7043 train loss:3.416502 +step:7044 train loss:3.461373 +step:7045 train loss:3.461420 +step:7046 train loss:3.455791 +step:7047 train loss:3.491066 +step:7048 train loss:3.441196 +step:7049 train loss:3.450199 +step:7050 train loss:3.476206 +step:7051 train loss:3.491389 +step:7052 train loss:3.496188 +step:7053 train loss:3.456696 +step:7054 train loss:3.434198 +step:7055 train loss:3.506358 +step:7056 train loss:3.501131 +step:7057 train loss:3.428264 +step:7058 train loss:3.547446 +step:7059 train loss:3.453526 +step:7060 train loss:3.464663 +step:7061 train loss:3.436378 +step:7062 train loss:3.460382 +step:7063 train loss:3.521449 +step:7064 train loss:3.441371 +step:7065 train loss:3.491761 +step:7066 train loss:3.450989 +step:7067 train loss:3.490754 +step:7068 train loss:3.461944 +step:7069 train loss:3.427036 +step:7070 train loss:3.451417 +step:7071 train loss:3.423094 +step:7072 train loss:3.424057 +step:7073 train loss:3.419780 +step:7074 train loss:3.416438 +step:7075 train loss:3.434854 +step:7076 train loss:3.444317 +step:7077 train loss:3.453408 +step:7078 train loss:3.499815 +step:7079 train loss:3.510301 +step:7080 train loss:3.457881 +step:7081 train loss:3.475719 +step:7082 train loss:3.441372 +step:7083 train loss:3.475665 +step:7084 train loss:3.467005 +step:7085 train loss:3.429576 +step:7086 train loss:3.465735 +step:7087 train loss:3.441183 +step:7088 train loss:3.565659 +step:7089 train loss:3.459266 +step:7090 train loss:3.425166 +step:7091 train loss:3.438547 +step:7092 train loss:3.416720 +step:7093 train loss:3.510795 +step:7094 train loss:3.431336 +step:7095 train loss:3.449740 +step:7096 train loss:3.462312 +step:7097 train loss:3.454764 +step:7098 train loss:3.475593 +step:7099 train loss:3.431920 +step:7100 train loss:3.466029 +step:7101 train loss:3.531908 +step:7102 train loss:3.427085 +step:7103 train loss:3.448916 +step:7104 train loss:3.481637 +step:7105 train loss:3.462506 +step:7106 train loss:3.444003 +step:7107 train loss:3.481640 +step:7108 train loss:3.548720 +step:7109 train loss:3.484071 +step:7110 train loss:3.506909 +step:7111 train loss:3.484197 +step:7112 train loss:3.476761 +step:7113 train loss:3.471747 +step:7114 train loss:3.486165 +step:7115 train loss:3.526509 +step:7116 train loss:3.452899 +step:7117 train loss:3.494831 +step:7118 train loss:3.507639 +step:7119 train loss:3.468417 +step:7120 train loss:3.521464 +step:7121 train loss:3.438081 +step:7122 train loss:3.441920 +step:7123 train loss:3.382617 +step:7124 train loss:3.537141 +step:7125 train loss:3.389088 +step:7126 train loss:3.558063 +step:7127 train loss:3.513731 +step:7128 train loss:3.460090 +step:7129 train loss:3.467495 +step:7130 train loss:3.458841 +step:7131 train loss:3.399934 +step:7132 train loss:3.440471 +step:7133 train loss:3.486499 +step:7134 train loss:3.412373 +step:7135 train loss:3.474313 +step:7136 train loss:3.453646 +step:7137 train loss:3.433330 +step:7138 train loss:3.422389 +step:7139 train loss:3.424859 +step:7140 train loss:3.459705 +step:7141 train loss:3.457136 +step:7142 train loss:3.456539 +step:7143 train loss:3.489536 +step:7144 train loss:3.437543 +step:7145 train loss:3.456657 +step:7146 train loss:3.465330 +step:7147 train loss:3.489339 +step:7148 train loss:3.489871 +step:7149 train loss:3.501463 +step:7150 train loss:3.474593 +step:7151 train loss:3.438168 +step:7152 train loss:3.410150 +step:7153 train loss:3.445659 +step:7154 train loss:3.466243 +step:7155 train loss:3.477153 +step:7156 train loss:3.449674 +step:7157 train loss:3.468987 +step:7158 train loss:3.425789 +step:7159 train loss:3.481030 +step:7160 train loss:3.488805 +step:7161 train loss:3.440095 +step:7162 train loss:3.485720 +step:7163 train loss:3.423042 +step:7164 train loss:3.458328 +step:7165 train loss:3.461906 +step:7166 train loss:3.520879 +step:7167 train loss:3.496964 +step:7168 train loss:3.475522 +step:7169 train loss:3.454976 +step:7170 train loss:3.482587 +step:7171 train loss:3.431813 +step:7172 train loss:3.596802 +step:7173 train loss:3.436686 +step:7174 train loss:3.481890 +step:7175 train loss:3.457110 +step:7176 train loss:3.467554 +step:7177 train loss:3.481977 +step:7178 train loss:3.482239 +step:7179 train loss:3.467465 +step:7180 train loss:3.467592 +step:7181 train loss:3.499321 +step:7182 train loss:3.446733 +step:7183 train loss:3.519366 +step:7184 train loss:3.608874 +step:7185 train loss:3.525276 +step:7186 train loss:3.464530 +step:7187 train loss:3.474845 +step:7188 train loss:3.459896 +step:7189 train loss:3.458952 +step:7190 train loss:3.463379 +step:7191 train loss:3.454713 +step:7192 train loss:3.483406 +step:7193 train loss:3.403843 +step:7194 train loss:3.467486 +step:7195 train loss:3.445461 +step:7196 train loss:3.493337 +step:7197 train loss:3.470495 +step:7198 train loss:3.533576 +step:7199 train loss:3.485902 +step:7200 train loss:3.475939 +step:7201 train loss:3.485804 +step:7202 train loss:3.459566 +step:7203 train loss:3.482044 +step:7204 train loss:3.446618 +step:7205 train loss:3.408015 +step:7206 train loss:3.434034 +step:7207 train loss:3.608700 +step:7208 train loss:3.440913 +step:7209 train loss:3.524572 +step:7210 train loss:3.461930 +step:7211 train loss:3.491718 +step:7212 train loss:3.572653 +step:7213 train loss:3.420792 +step:7214 train loss:3.491602 +step:7215 train loss:3.461668 +step:7216 train loss:3.509506 +step:7217 train loss:3.469720 +step:7218 train loss:3.556192 +step:7219 train loss:3.464313 +step:7220 train loss:3.545098 +step:7221 train loss:3.423984 +step:7222 train loss:3.504299 +step:7223 train loss:3.425144 +step:7224 train loss:3.485081 +step:7225 train loss:3.467361 +step:7226 train loss:3.432806 +step:7227 train loss:3.453469 +step:7228 train loss:3.442075 +step:7229 train loss:3.442924 +step:7230 train loss:3.430708 +step:7231 train loss:3.563177 +step:7232 train loss:3.433518 +step:7233 train loss:3.501115 +step:7234 train loss:3.488063 +step:7235 train loss:3.462949 +step:7236 train loss:3.499246 +step:7237 train loss:3.450406 +step:7238 train loss:3.491168 +step:7239 train loss:3.445278 +step:7240 train loss:3.442175 +step:7241 train loss:3.455294 +step:7242 train loss:3.438012 +step:7243 train loss:3.483819 +step:7244 train loss:3.454229 +step:7245 train loss:3.461484 +step:7246 train loss:3.501320 +step:7247 train loss:3.456768 +step:7248 train loss:3.496210 +step:7249 train loss:3.443768 +step:7250 validation loss:3.394631 +step:7250 train loss:3.467577 +step:7251 train loss:3.512626 +step:7252 train loss:3.423985 +step:7253 train loss:3.518010 +step:7254 train loss:3.451147 +step:7255 train loss:3.428576 +step:7256 train loss:3.466964 +step:7257 train loss:3.505976 +step:7258 train loss:3.462839 +step:7259 train loss:3.445977 +step:7260 train loss:3.528736 +step:7261 train loss:3.489315 +step:7262 train loss:3.443242 +step:7263 train loss:3.487793 +step:7264 train loss:3.470242 +step:7265 train loss:3.375193 +step:7266 train loss:3.501719 +step:7267 train loss:3.418909 +step:7268 train loss:3.481019 +step:7269 train loss:3.487538 +step:7270 train loss:3.443892 +step:7271 train loss:3.460080 +step:7272 train loss:3.466065 +step:7273 train loss:3.462061 +step:7274 train loss:3.439244 +step:7275 train loss:3.509473 +step:7276 train loss:3.417028 +step:7277 train loss:3.466406 +step:7278 train loss:3.435466 +step:7279 train loss:3.416034 +step:7280 train loss:3.485430 +step:7281 train loss:3.509669 +step:7282 train loss:3.508205 +step:7283 train loss:3.398783 +step:7284 train loss:3.439878 +step:7285 train loss:3.467950 +step:7286 train loss:3.601104 +step:7287 train loss:3.509332 +step:7288 train loss:3.461722 +step:7289 train loss:3.471848 +step:7290 train loss:3.514908 +step:7291 train loss:3.479508 +step:7292 train loss:3.546844 +step:7293 train loss:3.446765 +step:7294 train loss:3.530510 +step:7295 train loss:3.420086 +step:7296 train loss:3.416794 +step:7297 train loss:3.461985 +step:7298 train loss:3.436929 +step:7299 train loss:3.479788 +step:7300 train loss:3.462043 +step:7301 train loss:3.414472 +step:7302 train loss:3.560884 +step:7303 train loss:3.448524 +step:7304 train loss:3.393949 +step:7305 train loss:3.468679 +step:7306 train loss:3.502003 +step:7307 train loss:3.506717 +step:7308 train loss:3.456727 +step:7309 train loss:3.423402 +step:7310 train loss:3.452509 +step:7311 train loss:3.435871 +step:7312 train loss:3.477904 +step:7313 train loss:3.512529 +step:7314 train loss:3.410284 +step:7315 train loss:3.403082 +step:7316 train loss:3.547498 +step:7317 train loss:3.483549 +step:7318 train loss:3.424054 +step:7319 train loss:3.451118 +step:7320 train loss:3.483240 +step:7321 train loss:3.508638 +step:7322 train loss:3.390531 +step:7323 train loss:3.445117 +step:7324 train loss:3.472566 +step:7325 train loss:3.435013 +step:7326 train loss:3.460966 +step:7327 train loss:3.441095 +step:7328 train loss:3.554890 +step:7329 train loss:3.400368 +step:7330 train loss:3.457766 +step:7331 train loss:3.447986 +step:7332 train loss:3.496982 +step:7333 train loss:3.473124 +step:7334 train loss:3.441531 +step:7335 train loss:3.442482 +step:7336 train loss:3.696148 +step:7337 train loss:3.478880 +step:7338 train loss:3.477565 +step:7339 train loss:3.486097 +step:7340 train loss:3.477211 +step:7341 train loss:3.464259 +step:7342 train loss:3.454108 +step:7343 train loss:3.470520 +step:7344 train loss:3.548058 +step:7345 train loss:3.409866 +step:7346 train loss:3.443915 +step:7347 train loss:3.435756 +step:7348 train loss:3.441764 +step:7349 train loss:3.542170 +step:7350 train loss:3.527181 +step:7351 train loss:3.462052 +step:7352 train loss:3.487535 +step:7353 train loss:3.475039 +step:7354 train loss:3.421815 +step:7355 train loss:3.603664 +step:7356 train loss:3.574519 +step:7357 train loss:3.496158 +step:7358 train loss:3.474815 +step:7359 train loss:3.449886 +step:7360 train loss:3.456656 +step:7361 train loss:3.411646 +step:7362 train loss:3.461355 +step:7363 train loss:3.471655 +step:7364 train loss:3.509245 +step:7365 train loss:3.490262 +step:7366 train loss:3.453926 +step:7367 train loss:3.531368 +step:7368 train loss:3.508529 +step:7369 train loss:3.503133 +step:7370 train loss:3.466744 +step:7371 train loss:3.423254 +step:7372 train loss:3.484370 +step:7373 train loss:3.504212 +step:7374 train loss:3.602651 +step:7375 train loss:3.425388 +step:7376 train loss:3.442395 +step:7377 train loss:3.486116 +step:7378 train loss:3.441405 +step:7379 train loss:3.565675 +step:7380 train loss:3.529045 +step:7381 train loss:3.491186 +step:7382 train loss:3.456165 +step:7383 train loss:3.549470 +step:7384 train loss:3.490746 +step:7385 train loss:3.450220 +step:7386 train loss:3.450757 +step:7387 train loss:3.498684 +step:7388 train loss:3.529945 +step:7389 train loss:3.474183 +step:7390 train loss:3.413644 +step:7391 train loss:3.450614 +step:7392 train loss:3.509389 +step:7393 train loss:3.476204 +step:7394 train loss:3.513139 +step:7395 train loss:3.402659 +step:7396 train loss:3.508739 +step:7397 train loss:3.430299 +step:7398 train loss:3.448104 +step:7399 train loss:3.492971 +step:7400 train loss:3.498381 +step:7401 train loss:3.415404 +step:7402 train loss:3.533756 +step:7403 train loss:3.419129 +step:7404 train loss:3.485214 +step:7405 train loss:3.613127 +step:7406 train loss:3.439018 +step:7407 train loss:3.482931 +step:7408 train loss:3.484443 +step:7409 train loss:3.454879 +step:7410 train loss:3.622868 +step:7411 train loss:3.466696 +step:7412 train loss:3.470275 +step:7413 train loss:3.525555 +step:7414 train loss:3.433142 +step:7415 train loss:3.492573 +step:7416 train loss:3.373325 +step:7417 train loss:3.499933 +step:7418 train loss:3.477938 +step:7419 train loss:3.446943 +step:7420 train loss:3.440155 +step:7421 train loss:3.472476 +step:7422 train loss:3.431491 +step:7423 train loss:3.568829 +step:7424 train loss:3.635532 +step:7425 train loss:3.521837 +step:7426 train loss:3.484673 +step:7427 train loss:3.457071 +step:7428 train loss:3.491248 +step:7429 train loss:3.495268 +step:7430 train loss:3.421619 +step:7431 train loss:3.426679 +step:7432 train loss:3.433392 +step:7433 train loss:3.533861 +step:7434 train loss:3.442879 +step:7435 train loss:3.532275 +step:7436 train loss:3.572515 +step:7437 train loss:3.396469 +step:7438 train loss:3.455739 +step:7439 train loss:3.464090 +step:7440 train loss:3.439705 +step:7441 train loss:3.411127 +step:7442 train loss:3.636464 +step:7443 train loss:3.457751 +step:7444 train loss:3.498395 +step:7445 train loss:3.431496 +step:7446 train loss:3.454007 +step:7447 train loss:3.379722 +step:7448 train loss:3.436019 +step:7449 train loss:3.446149 +step:7450 train loss:3.479949 +step:7451 train loss:3.513656 +step:7452 train loss:3.440677 +step:7453 train loss:3.466313 +step:7454 train loss:3.454047 +step:7455 train loss:3.460744 +step:7456 train loss:3.436710 +step:7457 train loss:3.449251 +step:7458 train loss:3.482693 +step:7459 train loss:3.461369 +step:7460 train loss:3.471245 +step:7461 train loss:3.503522 +step:7462 train loss:3.443198 +step:7463 train loss:3.506100 +step:7464 train loss:3.427241 +step:7465 train loss:3.436059 +step:7466 train loss:3.438483 +step:7467 train loss:3.447194 +step:7468 train loss:3.501476 +step:7469 train loss:3.429714 +step:7470 train loss:3.462738 +step:7471 train loss:3.452167 +step:7472 train loss:3.484371 +step:7473 train loss:3.428202 +step:7474 train loss:3.414041 +step:7475 train loss:3.441786 +step:7476 train loss:3.479603 +step:7477 train loss:3.453918 +step:7478 train loss:3.451321 +step:7479 train loss:3.463874 +step:7480 train loss:3.747817 +step:7481 train loss:3.394081 +step:7482 train loss:3.465282 +step:7483 train loss:3.461024 +step:7484 train loss:3.483420 +step:7485 train loss:3.467076 +step:7486 train loss:3.491749 +step:7487 train loss:3.484638 +step:7488 train loss:3.507955 +step:7489 train loss:3.503317 +step:7490 train loss:3.448193 +step:7491 train loss:3.469912 +step:7492 train loss:3.578775 +step:7493 train loss:3.552178 +step:7494 train loss:3.577250 +step:7495 train loss:3.445428 +step:7496 train loss:3.433228 +step:7497 train loss:3.536998 +step:7498 train loss:3.469845 +step:7499 train loss:3.505158 +step:7500 validation loss:3.393731 total_sharp:6.3093e-04 L1_sharp:2.2543e-03 L2_sharp:8.3737e-05 L3_sharp:2.6238e-04 L4_sharp:6.0486e-05 L5_sharp:6.7182e-05 L6_sharp:1.0478e-04 L7_sharp:1.0898e-04 L8_sharp:7.0424e-05 L9_sharp:5.0872e-05 L10_sharp:3.9577e-05 L11_sharp:3.2483e-05 L12_sharp:4.4842e-05 total_fnorm:4.2373e+00 total_l1_linf:3.4599e+04 total_spectral:4.2373e+00 L1_fnorm:1.0914e+00 L2_fnorm:9.8295e-01 L3_fnorm:1.0812e+00 L4_fnorm:1.1654e+00 L5_fnorm:1.1904e+00 L6_fnorm:1.1902e+00 L7_fnorm:1.1916e+00 L8_fnorm:1.2003e+00 L9_fnorm:1.2007e+00 L10_fnorm:1.2073e+00 L11_fnorm:1.1990e+00 L12_fnorm:1.2056e+00 L1_l1linf:8.8742e-01 L2_l1linf:7.9409e-01 L3_l1linf:8.1211e-01 L4_l1linf:8.3738e-01 L5_l1linf:8.2846e-01 L6_l1linf:8.2271e-01 L7_l1linf:8.1453e-01 L8_l1linf:8.1435e-01 L9_l1linf:8.1503e-01 L10_l1linf:8.1434e-01 L11_l1linf:7.9271e-01 L12_l1linf:7.9207e-01 L1_spectral:2.4106e-02 L2_spectral:2.4068e-02 L3_spectral:2.4085e-02 L4_spectral:2.4106e-02 L5_spectral:2.4117e-02 L6_spectral:2.4091e-02 L7_spectral:2.4088e-02 L8_spectral:2.4085e-02 L9_spectral:2.4088e-02 L10_spectral:2.4081e-02 L11_spectral:2.4096e-02 L12_spectral:2.4092e-02 ip_v_neg_g:5.4293e-03 cos_v_neg_g:1.2839e-03 v_norm:4.2373e+00 g_norm:9.9799e-01 hv_norm:5.6745e-01 cos_v_hv:4.7114e-03 hg_norm:2.4022e+01 cos_g_hg:4.2500e-01 v_par:1.3455e-03 v_perp:4.2373e+00 L1_cos_v_neg_g:2.5344e-03 L1_v_norm:1.0914e+00 L2_cos_v_neg_g:2.4616e-03 L2_v_norm:9.8295e-01 L3_cos_v_neg_g:1.3362e-03 L3_v_norm:1.0812e+00 L4_cos_v_neg_g:1.8223e-03 L4_v_norm:1.1654e+00 L5_cos_v_neg_g:8.5714e-04 L5_v_norm:1.1904e+00 L6_cos_v_neg_g:1.6885e-03 L6_v_norm:1.1902e+00 L7_cos_v_neg_g:1.5050e-03 L7_v_norm:1.1916e+00 L8_cos_v_neg_g:1.8392e-03 L8_v_norm:1.2003e+00 L9_cos_v_neg_g:2.9530e-03 L9_v_norm:1.2007e+00 L10_cos_v_neg_g:3.9252e-03 L10_v_norm:1.2073e+00 L11_cos_v_neg_g:2.8830e-03 L11_v_norm:1.1990e+00 L12_cos_v_neg_g:3.3806e-03 L12_v_norm:1.2056e+00 +step:7500 train loss:3.450872 +step:7501 train loss:3.441400 +step:7502 train loss:3.434554 +step:7503 train loss:3.406860 +step:7504 train loss:3.433491 +step:7505 train loss:3.422834 +step:7506 train loss:3.484088 +step:7507 train loss:3.400420 +step:7508 train loss:3.469481 +step:7509 train loss:3.442556 +step:7510 train loss:3.470522 +step:7511 train loss:3.478540 +step:7512 train loss:3.747919 +step:7513 train loss:3.430495 +step:7514 train loss:3.458686 +step:7515 train loss:3.425268 +step:7516 train loss:3.438385 +step:7517 train loss:3.474730 +step:7518 train loss:3.446421 +step:7519 train loss:3.459008 +step:7520 train loss:3.524308 +step:7521 train loss:3.409107 +step:7522 train loss:3.466556 +step:7523 train loss:3.499408 +step:7524 train loss:3.445232 +step:7525 train loss:3.448925 +step:7526 train loss:3.398052 +step:7527 train loss:3.406280 +step:7528 train loss:3.506810 +step:7529 train loss:3.480763 +step:7530 train loss:3.429229 +step:7531 train loss:3.504960 +step:7532 train loss:3.492265 +step:7533 train loss:3.421464 +step:7534 train loss:3.484462 +step:7535 train loss:3.486360 +step:7536 train loss:3.518845 +step:7537 train loss:3.536309 +step:7538 train loss:3.565105 +step:7539 train loss:3.463358 +step:7540 train loss:3.447949 +step:7541 train loss:3.502737 +step:7542 train loss:3.465870 +step:7543 train loss:3.423572 +step:7544 train loss:3.465337 +step:7545 train loss:3.453383 +step:7546 train loss:3.408663 +step:7547 train loss:3.453962 +step:7548 train loss:3.469697 +step:7549 train loss:3.451530 +step:7550 train loss:3.445950 +step:7551 train loss:3.551017 +step:7552 train loss:3.461487 +step:7553 train loss:3.498411 +step:7554 train loss:3.422621 +step:7555 train loss:3.516803 +step:7556 train loss:3.418388 +step:7557 train loss:3.513326 +step:7558 train loss:3.500778 +step:7559 train loss:3.459950 +step:7560 train loss:3.553760 +step:7561 train loss:3.521887 +step:7562 train loss:3.429320 +step:7563 train loss:3.424569 +step:7564 train loss:3.476025 +step:7565 train loss:3.495625 +step:7566 train loss:3.487629 +step:7567 train loss:3.501607 +step:7568 train loss:3.447896 +step:7569 train loss:3.508489 +step:7570 train loss:3.489423 +step:7571 train loss:3.574008 +step:7572 train loss:3.424155 +step:7573 train loss:3.488885 +step:7574 train loss:3.452022 +step:7575 train loss:3.448762 +step:7576 train loss:3.454190 +step:7577 train loss:3.471659 +step:7578 train loss:3.528692 +step:7579 train loss:3.464592 +step:7580 train loss:3.453028 +step:7581 train loss:3.438883 +step:7582 train loss:3.495010 +step:7583 train loss:3.433254 +step:7584 train loss:3.415488 +step:7585 train loss:3.381409 +step:7586 train loss:3.420545 +step:7587 train loss:3.480836 +step:7588 train loss:3.613238 +step:7589 train loss:3.432678 +step:7590 train loss:3.497937 +step:7591 train loss:3.501677 +step:7592 train loss:3.462769 +step:7593 train loss:3.487691 +step:7594 train loss:3.484584 +step:7595 train loss:3.454296 +step:7596 train loss:3.504502 +step:7597 train loss:3.409288 +step:7598 train loss:3.475845 +step:7599 train loss:3.465983 +step:7600 train loss:3.425485 +step:7601 train loss:3.537921 +step:7602 train loss:3.478658 +step:7603 train loss:3.441399 +step:7604 train loss:3.584909 +step:7605 train loss:3.471879 +step:7606 train loss:3.503608 +step:7607 train loss:3.457926 +step:7608 train loss:3.466826 +step:7609 train loss:3.506775 +step:7610 train loss:3.462627 +step:7611 train loss:3.438327 +step:7612 train loss:3.384897 +step:7613 train loss:3.430497 +step:7614 train loss:3.499633 +step:7615 train loss:3.461304 +step:7616 train loss:3.526410 +step:7617 train loss:3.426221 +step:7618 train loss:3.513539 +step:7619 train loss:3.455053 +step:7620 train loss:3.442848 +step:7621 train loss:3.389711 +step:7622 train loss:3.669235 +step:7623 train loss:3.682551 +step:7624 train loss:3.495029 +step:7625 train loss:3.535209 +step:7626 train loss:3.452028 +step:7627 train loss:3.522216 +step:7628 train loss:3.402640 +step:7629 train loss:3.464632 +step:7630 train loss:3.475760 +step:7631 train loss:3.458933 +step:7632 train loss:3.508607 +step:7633 train loss:3.576517 +step:7634 train loss:3.535161 +step:7635 train loss:3.444981 +step:7636 train loss:3.470712 +step:7637 train loss:3.414312 +step:7638 train loss:3.527122 +step:7639 train loss:3.456142 +step:7640 train loss:3.437509 +step:7641 train loss:3.469249 +step:7642 train loss:3.808743 +step:7643 train loss:3.552687 +step:7644 train loss:3.480066 +step:7645 train loss:3.463278 +step:7646 train loss:3.453383 +step:7647 train loss:3.446626 +step:7648 train loss:3.479396 +step:7649 train loss:3.441937 +step:7650 train loss:3.488176 +step:7651 train loss:3.509201 +step:7652 train loss:3.389636 +step:7653 train loss:3.591547 +step:7654 train loss:3.442514 +step:7655 train loss:3.462128 +step:7656 train loss:3.437802 +step:7657 train loss:3.448618 +step:7658 train loss:3.406099 +step:7659 train loss:3.470993 +step:7660 train loss:3.405778 +step:7661 train loss:3.418872 +step:7662 train loss:3.421656 +step:7663 train loss:3.469599 +step:7664 train loss:3.431216 +step:7665 train loss:3.402465 +step:7666 train loss:3.507282 +step:7667 train loss:3.425982 +step:7668 train loss:3.534110 +step:7669 train loss:3.466504 +step:7670 train loss:3.423096 +step:7671 train loss:3.475038 +step:7672 train loss:3.495432 +step:7673 train loss:3.462092 +step:7674 train loss:3.498120 +step:7675 train loss:3.556496 +step:7676 train loss:3.521384 +step:7677 train loss:3.548680 +step:7678 train loss:3.491115 +step:7679 train loss:3.510250 +step:7680 train loss:3.515975 +step:7681 train loss:3.487082 +step:7682 train loss:3.454441 +step:7683 train loss:3.453780 +step:7684 train loss:3.428548 +step:7685 train loss:3.409420 +step:7686 train loss:3.530933 +step:7687 train loss:3.440395 +step:7688 train loss:3.410606 +step:7689 train loss:3.460239 +step:7690 train loss:3.425598 +step:7691 train loss:3.455546 +step:7692 train loss:3.487557 +step:7693 train loss:3.487572 +step:7694 train loss:3.545142 +step:7695 train loss:3.470200 +step:7696 train loss:3.443742 +step:7697 train loss:3.431138 +step:7698 train loss:3.492283 +step:7699 train loss:3.486853 +step:7700 train loss:3.389534 +step:7701 train loss:3.504174 +step:7702 train loss:3.445538 +step:7703 train loss:3.449600 +step:7704 train loss:3.500811 +step:7705 train loss:3.459133 +step:7706 train loss:3.396934 +step:7707 train loss:3.516790 +step:7708 train loss:3.454765 +step:7709 train loss:3.473499 +step:7710 train loss:3.534794 +step:7711 train loss:3.496147 +step:7712 train loss:3.442894 +step:7713 train loss:3.521303 +step:7714 train loss:3.469399 +step:7715 train loss:3.418336 +step:7716 train loss:3.458617 +step:7717 train loss:3.484791 +step:7718 train loss:3.488672 +step:7719 train loss:3.446552 +step:7720 train loss:3.461015 +step:7721 train loss:3.500767 +step:7722 train loss:3.430166 +step:7723 train loss:3.806134 +step:7724 train loss:3.468727 +step:7725 train loss:3.389853 +step:7726 train loss:3.450426 +step:7727 train loss:3.485024 +step:7728 train loss:3.432231 +step:7729 train loss:3.439949 +step:7730 train loss:3.463246 +step:7731 train loss:3.493842 +step:7732 train loss:3.517226 +step:7733 train loss:3.424325 +step:7734 train loss:3.451650 +step:7735 train loss:3.541148 +step:7736 train loss:3.487095 +step:7737 train loss:3.501359 +step:7738 train loss:3.405598 +step:7739 train loss:3.480833 +step:7740 train loss:3.429077 +step:7741 train loss:3.464430 +step:7742 train loss:3.469737 +step:7743 train loss:3.415319 +step:7744 train loss:3.544175 +step:7745 train loss:3.431050 +step:7746 train loss:3.406013 +step:7747 train loss:3.501294 +step:7748 train loss:3.485044 +step:7749 train loss:3.407527 +step:7750 validation loss:3.390671 +step:7750 train loss:3.567186 +step:7751 train loss:3.450493 +step:7752 train loss:3.441930 +step:7753 train loss:3.443788 +step:7754 train loss:3.419345 +step:7755 train loss:3.484977 +step:7756 train loss:3.509272 +step:7757 train loss:3.458642 +step:7758 train loss:3.430650 +step:7759 train loss:3.459908 +step:7760 train loss:3.487578 +step:7761 train loss:3.477845 +step:7762 train loss:3.464163 +step:7763 train loss:3.448866 +step:7764 train loss:3.455641 +step:7765 train loss:3.406636 +step:7766 train loss:3.474558 +step:7767 train loss:3.474581 +step:7768 train loss:3.433451 +step:7769 train loss:3.495813 +step:7770 train loss:3.513480 +step:7771 train loss:3.485347 +step:7772 train loss:3.457287 +step:7773 train loss:3.522017 +step:7774 train loss:3.414863 +step:7775 train loss:3.404487 +step:7776 train loss:3.510506 +step:7777 train loss:3.461818 +step:7778 train loss:3.419171 +step:7779 train loss:3.461920 +step:7780 train loss:3.456669 +step:7781 train loss:3.467623 +step:7782 train loss:3.451478 +step:7783 train loss:3.432599 +step:7784 train loss:3.432704 +step:7785 train loss:3.471351 +step:7786 train loss:3.426946 +step:7787 train loss:3.507069 +step:7788 train loss:3.459021 +step:7789 train loss:3.394676 +step:7790 train loss:3.456874 +step:7791 train loss:3.485739 +step:7792 train loss:3.446182 +step:7793 train loss:3.470380 +step:7794 train loss:3.458081 +step:7795 train loss:3.489257 +step:7796 train loss:3.453249 +step:7797 train loss:3.472524 +step:7798 train loss:3.462591 +step:7799 train loss:3.452760 +step:7800 train loss:3.408066 +step:7801 train loss:3.471965 +step:7802 train loss:3.454083 +step:7803 train loss:3.504601 +step:7804 train loss:3.463630 +step:7805 train loss:3.461204 +step:7806 train loss:3.481541 +step:7807 train loss:3.554402 +step:7808 train loss:3.414716 +step:7809 train loss:3.393985 +step:7810 train loss:3.479636 +step:7811 train loss:3.414339 +step:7812 train loss:3.430279 +step:7813 train loss:3.522740 +step:7814 train loss:3.588663 +step:7815 train loss:3.402131 +step:7816 train loss:3.488202 +step:7817 train loss:3.518285 +step:7818 train loss:3.416446 +step:7819 train loss:3.468205 +step:7820 train loss:3.506607 +step:7821 train loss:3.442579 +step:7822 train loss:3.401662 +step:7823 train loss:3.544982 +step:7824 train loss:3.452256 +step:7825 train loss:3.439445 +step:7826 train loss:3.438591 +step:7827 train loss:3.480320 +step:7828 train loss:3.470733 +step:7829 train loss:3.431314 +step:7830 train loss:3.438315 +step:7831 train loss:3.441362 +step:7832 train loss:3.510563 +step:7833 train loss:3.488292 +step:7834 train loss:3.453593 +step:7835 train loss:3.480469 +step:7836 train loss:3.588833 +step:7837 train loss:3.473454 +step:7838 train loss:3.444063 +step:7839 train loss:3.402703 +step:7840 train loss:3.418992 +step:7841 train loss:3.516350 +step:7842 train loss:3.502476 +step:7843 train loss:3.554472 +step:7844 train loss:3.484842 +step:7845 train loss:3.461099 +step:7846 train loss:3.573938 +step:7847 train loss:3.461662 +step:7848 train loss:3.471849 +step:7849 train loss:3.487461 +step:7850 train loss:3.455943 +step:7851 train loss:3.484264 +step:7852 train loss:3.458160 +step:7853 train loss:3.431177 +step:7854 train loss:3.460527 +step:7855 train loss:3.461949 +step:7856 train loss:3.464438 +step:7857 train loss:3.452512 +step:7858 train loss:3.461108 +step:7859 train loss:3.467798 +step:7860 train loss:3.504510 +step:7861 train loss:3.492115 +step:7862 train loss:3.436086 +step:7863 train loss:3.539198 +step:7864 train loss:3.378213 +step:7865 train loss:3.458236 +step:7866 train loss:3.431628 +step:7867 train loss:3.476307 +step:7868 train loss:3.457266 +step:7869 train loss:3.458046 +step:7870 train loss:3.374936 +step:7871 train loss:3.442984 +step:7872 train loss:3.430718 +step:7873 train loss:3.510032 +step:7874 train loss:3.452077 +step:7875 train loss:3.457034 +step:7876 train loss:3.478588 +step:7877 train loss:3.431344 +step:7878 train loss:3.468808 +step:7879 train loss:3.807315 +step:7880 train loss:3.461379 +step:7881 train loss:3.486741 +step:7882 train loss:3.565555 +step:7883 train loss:3.383092 +step:7884 train loss:3.470927 +step:7885 train loss:3.452600 +step:7886 train loss:3.453632 +step:7887 train loss:3.447222 +step:7888 train loss:3.479250 +step:7889 train loss:3.528018 +step:7890 train loss:3.433598 +step:7891 train loss:3.482418 +step:7892 train loss:3.455036 +step:7893 train loss:3.428558 +step:7894 train loss:3.451103 +step:7895 train loss:3.434051 +step:7896 train loss:3.434396 +step:7897 train loss:3.459300 +step:7898 train loss:3.467351 +step:7899 train loss:3.454322 +step:7900 train loss:3.423974 +step:7901 train loss:3.415165 +step:7902 train loss:3.563468 +step:7903 train loss:3.408270 +step:7904 train loss:3.455970 +step:7905 train loss:3.527242 +step:7906 train loss:3.420797 +step:7907 train loss:3.447710 +step:7908 train loss:3.500013 +step:7909 train loss:3.552597 +step:7910 train loss:3.430516 +step:7911 train loss:3.455923 +step:7912 train loss:3.456504 +step:7913 train loss:3.430225 +step:7914 train loss:3.467629 +step:7915 train loss:3.571178 +step:7916 train loss:3.440600 +step:7917 train loss:3.500064 +step:7918 train loss:3.441000 +step:7919 train loss:3.430387 +step:7920 train loss:3.472206 +step:7921 train loss:3.476226 +step:7922 train loss:3.451544 +step:7923 train loss:3.498970 +step:7924 train loss:3.462792 +step:7925 train loss:3.484157 +step:7926 train loss:3.386905 +step:7927 train loss:3.669847 +step:7928 train loss:3.493767 +step:7929 train loss:3.455274 +step:7930 train loss:3.415032 +step:7931 train loss:3.439021 +step:7932 train loss:3.463913 +step:7933 train loss:3.477369 +step:7934 train loss:3.572129 +step:7935 train loss:3.491293 +step:7936 train loss:3.463217 +step:7937 train loss:3.418699 +step:7938 train loss:3.426777 +step:7939 train loss:3.475792 +step:7940 train loss:3.458050 +step:7941 train loss:3.486402 +step:7942 train loss:3.476385 +step:7943 train loss:3.490476 +step:7944 train loss:3.408056 +step:7945 train loss:3.512319 +step:7946 train loss:3.461899 +step:7947 train loss:3.474567 +step:7948 train loss:3.430706 +step:7949 train loss:3.489125 +step:7950 train loss:3.539918 +step:7951 train loss:3.502434 +step:7952 train loss:3.652861 +step:7953 train loss:3.544987 +step:7954 train loss:3.444795 +step:7955 train loss:3.432394 +step:7956 train loss:3.438606 +step:7957 train loss:3.513168 +step:7958 train loss:3.524279 +step:7959 train loss:3.477417 +step:7960 train loss:3.540193 +step:7961 train loss:3.448065 +step:7962 train loss:3.421366 +step:7963 train loss:3.456954 +step:7964 train loss:3.456093 +step:7965 train loss:3.465794 +step:7966 train loss:3.435647 +step:7967 train loss:3.461043 +step:7968 train loss:3.467920 +step:7969 train loss:3.426108 +step:7970 train loss:3.394269 +step:7971 train loss:3.482178 +step:7972 train loss:3.455873 +step:7973 train loss:3.431551 +step:7974 train loss:3.469959 +step:7975 train loss:3.456423 +step:7976 train loss:3.475317 +step:7977 train loss:3.504923 +step:7978 train loss:3.529001 +step:7979 train loss:3.476439 +step:7980 train loss:3.382103 +step:7981 train loss:3.416897 +step:7982 train loss:3.468178 +step:7983 train loss:3.484689 +step:7984 train loss:3.525042 +step:7985 train loss:3.450135 +step:7986 train loss:3.474001 +step:7987 train loss:3.525111 +step:7988 train loss:3.499229 +step:7989 train loss:3.403758 +step:7990 train loss:3.420675 +step:7991 train loss:3.433902 +step:7992 train loss:3.456911 +step:7993 train loss:3.441461 +step:7994 train loss:3.492913 +step:7995 train loss:3.497143 +step:7996 train loss:3.461929 +step:7997 train loss:3.481854 +step:7998 train loss:3.504625 +step:7999 train loss:3.432755 +step:8000 validation loss:3.385050 total_sharp:5.9200e-04 L1_sharp:1.9443e-03 L2_sharp:7.1341e-05 L3_sharp:2.2345e-04 L4_sharp:7.0094e-05 L5_sharp:6.6768e-05 L6_sharp:9.7419e-05 L7_sharp:1.2496e-04 L8_sharp:8.5480e-05 L9_sharp:5.4866e-05 L10_sharp:3.7007e-05 L11_sharp:3.2897e-05 L12_sharp:3.5026e-05 total_fnorm:4.2367e+00 total_l1_linf:3.4570e+04 total_spectral:4.2367e+00 L1_fnorm:1.0914e+00 L2_fnorm:9.7868e-01 L3_fnorm:1.0848e+00 L4_fnorm:1.1639e+00 L5_fnorm:1.1919e+00 L6_fnorm:1.1929e+00 L7_fnorm:1.1929e+00 L8_fnorm:1.2005e+00 L9_fnorm:1.1997e+00 L10_fnorm:1.2073e+00 L11_fnorm:1.2032e+00 L12_fnorm:1.2068e+00 L1_l1linf:8.8278e-01 L2_l1linf:7.8824e-01 L3_l1linf:8.1983e-01 L4_l1linf:8.4243e-01 L5_l1linf:8.4397e-01 L6_l1linf:8.2693e-01 L7_l1linf:8.1525e-01 L8_l1linf:8.1151e-01 L9_l1linf:8.0958e-01 L10_l1linf:8.1279e-01 L11_l1linf:7.9447e-01 L12_l1linf:7.9070e-01 L1_spectral:2.4099e-02 L2_spectral:2.4074e-02 L3_spectral:2.4087e-02 L4_spectral:2.4093e-02 L5_spectral:2.4127e-02 L6_spectral:2.4092e-02 L7_spectral:2.4084e-02 L8_spectral:2.4087e-02 L9_spectral:2.4085e-02 L10_spectral:2.4084e-02 L11_spectral:2.4088e-02 L12_spectral:2.4091e-02 ip_v_neg_g:6.2584e-03 cos_v_neg_g:1.5960e-03 v_norm:4.2367e+00 g_norm:9.2557e-01 hv_norm:5.3724e-01 cos_v_hv:4.6686e-03 hg_norm:3.7554e+01 cos_g_hg:4.5845e-01 v_par:1.6425e-03 v_perp:4.2367e+00 L1_cos_v_neg_g:3.0176e-03 L1_v_norm:1.0914e+00 L2_cos_v_neg_g:5.8247e-03 L2_v_norm:9.7868e-01 L3_cos_v_neg_g:5.8202e-03 L3_v_norm:1.0848e+00 L4_cos_v_neg_g:2.1098e-03 L4_v_norm:1.1639e+00 L5_cos_v_neg_g:1.8476e-03 L5_v_norm:1.1919e+00 L6_cos_v_neg_g:2.0119e-03 L6_v_norm:1.1929e+00 L7_cos_v_neg_g:3.2063e-03 L7_v_norm:1.1929e+00 L8_cos_v_neg_g:3.2690e-03 L8_v_norm:1.2005e+00 L9_cos_v_neg_g:3.2424e-03 L9_v_norm:1.1997e+00 L10_cos_v_neg_g:2.2725e-03 L10_v_norm:1.2073e+00 L11_cos_v_neg_g:1.4537e-03 L11_v_norm:1.2032e+00 L12_cos_v_neg_g:1.3828e-03 L12_v_norm:1.2068e+00 +step:8000 train loss:3.504256 +step:8001 train loss:3.465539 +step:8002 train loss:3.485265 +step:8003 train loss:3.501182 +step:8004 train loss:3.477548 +step:8005 train loss:3.400204 +step:8006 train loss:3.477734 +step:8007 train loss:3.444259 +step:8008 train loss:3.473349 +step:8009 train loss:3.547855 +step:8010 train loss:3.773624 +step:8011 train loss:3.427583 +step:8012 train loss:3.511178 +step:8013 train loss:3.459738 +step:8014 train loss:3.477352 +step:8015 train loss:3.472269 +step:8016 train loss:3.462202 +step:8017 train loss:3.484240 +step:8018 train loss:3.444655 +step:8019 train loss:3.412972 +step:8020 train loss:3.453283 +step:8021 train loss:3.525219 +step:8022 train loss:3.444288 +step:8023 train loss:3.477528 +step:8024 train loss:3.355171 +step:8025 train loss:3.451396 +step:8026 train loss:3.462784 +step:8027 train loss:3.466808 +step:8028 train loss:3.525402 +step:8029 train loss:3.452958 +step:8030 train loss:3.414627 +step:8031 train loss:3.472643 +step:8032 train loss:3.457551 +step:8033 train loss:3.405725 +step:8034 train loss:3.446283 +step:8035 train loss:3.434277 +step:8036 train loss:3.424476 +step:8037 train loss:3.394865 +step:8038 train loss:3.406401 +step:8039 train loss:3.501989 +step:8040 train loss:3.435939 +step:8041 train loss:3.430645 +step:8042 train loss:3.469796 +step:8043 train loss:3.414619 +step:8044 train loss:3.424372 +step:8045 train loss:3.493742 +step:8046 train loss:3.421966 +step:8047 train loss:3.423244 +step:8048 train loss:3.455113 +step:8049 train loss:3.501609 +step:8050 train loss:3.442167 +step:8051 train loss:3.417738 +step:8052 train loss:3.481787 +step:8053 train loss:3.433697 +step:8054 train loss:3.468791 +step:8055 train loss:3.498919 +step:8056 train loss:3.466270 +step:8057 train loss:3.542820 +step:8058 train loss:3.445900 +step:8059 train loss:3.503823 +step:8060 train loss:3.476739 +step:8061 train loss:3.365571 +step:8062 train loss:3.497014 +step:8063 train loss:3.460806 +step:8064 train loss:3.417176 +step:8065 train loss:3.487913 +step:8066 train loss:3.443315 +step:8067 train loss:3.507921 +step:8068 train loss:3.434686 +step:8069 train loss:3.459023 +step:8070 train loss:3.425221 +step:8071 train loss:3.434833 +step:8072 train loss:3.475884 +step:8073 train loss:3.427548 +step:8074 train loss:3.442092 +step:8075 train loss:3.431123 +step:8076 train loss:3.475197 +step:8077 train loss:3.479938 +step:8078 train loss:3.424376 +step:8079 train loss:3.446652 +step:8080 train loss:3.432966 +step:8081 train loss:3.448729 +step:8082 train loss:3.468197 +step:8083 train loss:3.363638 +step:8084 train loss:3.502506 +step:8085 train loss:3.377079 +step:8086 train loss:3.500781 +step:8087 train loss:3.399696 +step:8088 train loss:3.443986 +step:8089 train loss:3.482612 +step:8090 train loss:3.505432 +step:8091 train loss:3.449558 +step:8092 train loss:3.429429 +step:8093 train loss:3.436919 +step:8094 train loss:3.439848 +step:8095 train loss:3.463099 +step:8096 train loss:3.468145 +step:8097 train loss:3.393371 +step:8098 train loss:3.406908 +step:8099 train loss:3.390734 +step:8100 train loss:3.451170 +step:8101 train loss:3.530046 +step:8102 train loss:3.461282 +step:8103 train loss:3.416052 +step:8104 train loss:3.466230 +step:8105 train loss:3.463399 +step:8106 train loss:3.427530 +step:8107 train loss:3.406169 +step:8108 train loss:3.423650 +step:8109 train loss:3.417084 +step:8110 train loss:3.480587 +step:8111 train loss:3.405486 +step:8112 train loss:3.427327 +step:8113 train loss:3.416008 +step:8114 train loss:3.360052 +step:8115 train loss:3.413010 +step:8116 train loss:3.448821 +step:8117 train loss:3.423434 +step:8118 train loss:3.411662 +step:8119 train loss:3.456003 +step:8120 train loss:3.405065 +step:8121 train loss:3.465036 +step:8122 train loss:3.445084 +step:8123 train loss:3.449959 +step:8124 train loss:3.414662 +step:8125 train loss:3.395030 +step:8126 train loss:3.391417 +step:8127 train loss:3.481645 +step:8128 train loss:3.489283 +step:8129 train loss:3.410601 +step:8130 train loss:3.436210 +step:8131 train loss:3.407071 +step:8132 train loss:3.474312 +step:8133 train loss:3.402158 +step:8134 train loss:3.436084 +step:8135 train loss:3.429543 +step:8136 train loss:3.437662 +step:8137 train loss:3.500130 +step:8138 train loss:3.408966 +step:8139 train loss:3.482196 +step:8140 train loss:3.410824 +step:8141 train loss:3.433672 +step:8142 train loss:3.416157 +step:8143 train loss:3.467817 +step:8144 train loss:3.443365 +step:8145 train loss:3.407627 +step:8146 train loss:3.421551 +step:8147 train loss:3.443343 +step:8148 train loss:3.536915 +step:8149 train loss:3.448233 +step:8150 train loss:3.426167 +step:8151 train loss:3.419429 +step:8152 train loss:3.513809 +step:8153 train loss:3.391753 +step:8154 train loss:3.409239 +step:8155 train loss:3.437010 +step:8156 train loss:3.414100 +step:8157 train loss:3.438465 +step:8158 train loss:3.448280 +step:8159 train loss:3.465615 +step:8160 train loss:3.419102 +step:8161 train loss:3.460719 +step:8162 train loss:3.392841 +step:8163 train loss:3.450994 +step:8164 train loss:3.439587 +step:8165 train loss:3.487088 +step:8166 train loss:3.493971 +step:8167 train loss:3.395897 +step:8168 train loss:3.379607 +step:8169 train loss:3.423550 +step:8170 train loss:3.377662 +step:8171 train loss:3.434809 +step:8172 train loss:3.434508 +step:8173 train loss:3.433405 +step:8174 train loss:3.443765 +step:8175 train loss:3.404331 +step:8176 train loss:3.399790 +step:8177 train loss:3.447615 +step:8178 train loss:3.532271 +step:8179 train loss:3.440321 +step:8180 train loss:3.465271 +step:8181 train loss:3.460128 +step:8182 train loss:3.422680 +step:8183 train loss:3.409559 +step:8184 train loss:3.405051 +step:8185 train loss:3.439942 +step:8186 train loss:3.448745 +step:8187 train loss:3.457261 +step:8188 train loss:3.384975 +step:8189 train loss:3.531582 +step:8190 train loss:3.470185 +step:8191 train loss:3.468022 +step:8192 train loss:3.582803 +step:8193 train loss:3.448668 +step:8194 train loss:3.383492 +step:8195 train loss:3.478627 +step:8196 train loss:3.399288 +step:8197 train loss:3.425570 +step:8198 train loss:3.436729 +step:8199 train loss:3.435567 +step:8200 train loss:3.417724 +step:8201 train loss:3.530746 +step:8202 train loss:3.449119 +step:8203 train loss:3.469696 +step:8204 train loss:3.377756 +step:8205 train loss:3.385844 +step:8206 train loss:3.508545 +step:8207 train loss:3.434922 +step:8208 train loss:3.453677 +step:8209 train loss:3.497726 +step:8210 train loss:3.481995 +step:8211 train loss:3.413764 +step:8212 train loss:3.471380 +step:8213 train loss:3.481556 +step:8214 train loss:3.519484 +step:8215 train loss:3.497041 +step:8216 train loss:3.477040 +step:8217 train loss:3.456604 +step:8218 train loss:3.463912 +step:8219 train loss:3.597631 +step:8220 train loss:3.424215 +step:8221 train loss:3.447439 +step:8222 train loss:3.396697 +step:8223 train loss:3.418418 +step:8224 train loss:3.428350 +step:8225 train loss:3.477994 +step:8226 train loss:3.408227 +step:8227 train loss:3.479418 +step:8228 train loss:3.365367 +step:8229 train loss:3.405490 +step:8230 train loss:3.423483 +step:8231 train loss:3.445937 +step:8232 train loss:3.448087 +step:8233 train loss:3.489795 +step:8234 train loss:3.487500 +step:8235 train loss:3.454620 +step:8236 train loss:3.442296 +step:8237 train loss:3.393425 +step:8238 train loss:3.648693 +step:8239 train loss:3.483261 +step:8240 train loss:3.427174 +step:8241 train loss:3.398564 +step:8242 train loss:3.434308 +step:8243 train loss:3.426997 +step:8244 train loss:3.438278 +step:8245 train loss:3.424320 +step:8246 train loss:3.487723 +step:8247 train loss:3.522612 +step:8248 train loss:3.438539 +step:8249 train loss:3.431014 +step:8250 validation loss:3.377947 +step:8250 train loss:3.420967 +step:8251 train loss:3.518328 +step:8252 train loss:3.454832 +step:8253 train loss:3.420032 +step:8254 train loss:3.394118 +step:8255 train loss:3.427636 +step:8256 train loss:3.409832 +step:8257 train loss:3.515540 +step:8258 train loss:3.436876 +step:8259 train loss:3.419289 +step:8260 train loss:3.419924 +step:8261 train loss:3.418197 +step:8262 train loss:3.430388 +step:8263 train loss:3.445713 +step:8264 train loss:3.414102 +step:8265 train loss:3.401401 +step:8266 train loss:3.412421 +step:8267 train loss:3.343618 +step:8268 train loss:3.467596 +step:8269 train loss:3.399420 +step:8270 train loss:3.453545 +step:8271 train loss:3.480033 +step:8272 train loss:3.507507 +step:8273 train loss:3.381846 +step:8274 train loss:3.442468 +step:8275 train loss:3.406849 +step:8276 train loss:3.440104 +step:8277 train loss:3.512395 +step:8278 train loss:3.526965 +step:8279 train loss:3.442134 +step:8280 train loss:3.424440 +step:8281 train loss:3.391640 +step:8282 train loss:3.453036 +step:8283 train loss:3.444143 +step:8284 train loss:3.421301 +step:8285 train loss:3.415592 +step:8286 train loss:3.526411 +step:8287 train loss:3.462580 +step:8288 train loss:3.430838 +step:8289 train loss:3.445933 +step:8290 train loss:3.386253 +step:8291 train loss:3.428766 +step:8292 train loss:3.452846 +step:8293 train loss:3.430272 +step:8294 train loss:3.399984 +step:8295 train loss:3.438351 +step:8296 train loss:3.503526 +step:8297 train loss:3.586391 +step:8298 train loss:3.404186 +step:8299 train loss:3.444427 +step:8300 train loss:3.450756 +step:8301 train loss:3.424229 +step:8302 train loss:3.481178 +step:8303 train loss:3.616333 +step:8304 train loss:3.424452 +step:8305 train loss:3.468761 +step:8306 train loss:3.445812 +step:8307 train loss:3.464047 +step:8308 train loss:3.462335 +step:8309 train loss:3.483082 +step:8310 train loss:3.400069 +step:8311 train loss:3.494339 +step:8312 train loss:3.481995 +step:8313 train loss:3.549821 +step:8314 train loss:3.420309 +step:8315 train loss:3.368839 +step:8316 train loss:3.424565 +step:8317 train loss:3.448414 +step:8318 train loss:3.438166 +step:8319 train loss:3.476205 +step:8320 train loss:3.499361 +step:8321 train loss:3.405073 +step:8322 train loss:3.420128 +step:8323 train loss:3.456499 +step:8324 train loss:3.433229 +step:8325 train loss:3.487642 +step:8326 train loss:3.454467 +step:8327 train loss:3.442348 +step:8328 train loss:3.516645 +step:8329 train loss:3.419485 +step:8330 train loss:3.465699 +step:8331 train loss:3.390310 +step:8332 train loss:3.491430 +step:8333 train loss:3.506798 +step:8334 train loss:3.376742 +step:8335 train loss:3.435485 +step:8336 train loss:3.533726 +step:8337 train loss:3.461541 +step:8338 train loss:3.431347 +step:8339 train loss:3.408825 +step:8340 train loss:3.499271 +step:8341 train loss:3.400115 +step:8342 train loss:3.474898 +step:8343 train loss:3.386947 +step:8344 train loss:3.433625 +step:8345 train loss:3.465684 +step:8346 train loss:3.550255 +step:8347 train loss:3.437250 +step:8348 train loss:3.468079 +step:8349 train loss:3.436884 +step:8350 train loss:3.459273 +step:8351 train loss:3.396847 +step:8352 train loss:3.484465 +step:8353 train loss:3.438971 +step:8354 train loss:3.423017 +step:8355 train loss:3.421722 +step:8356 train loss:3.420088 +step:8357 train loss:3.431395 +step:8358 train loss:3.409699 +step:8359 train loss:3.400896 +step:8360 train loss:3.449529 +step:8361 train loss:3.461355 +step:8362 train loss:3.481785 +step:8363 train loss:3.479687 +step:8364 train loss:3.445350 +step:8365 train loss:3.590474 +step:8366 train loss:3.435353 +step:8367 train loss:3.405644 +step:8368 train loss:3.377946 +step:8369 train loss:3.405317 +step:8370 train loss:3.491374 +step:8371 train loss:3.458288 +step:8372 train loss:3.441672 +step:8373 train loss:3.448683 +step:8374 train loss:3.385630 +step:8375 train loss:3.443467 +step:8376 train loss:3.485248 +step:8377 train loss:3.311230 +step:8378 train loss:3.527213 +step:8379 train loss:3.388137 +step:8380 train loss:3.396963 +step:8381 train loss:3.402245 +step:8382 train loss:3.427087 +step:8383 train loss:3.391442 +step:8384 train loss:3.431700 +step:8385 train loss:3.445272 +step:8386 train loss:3.426484 +step:8387 train loss:3.584654 +step:8388 train loss:3.498046 +step:8389 train loss:3.465333 +step:8390 train loss:3.476827 +step:8391 train loss:3.405710 +step:8392 train loss:3.416476 +step:8393 train loss:3.371178 +step:8394 train loss:3.465234 +step:8395 train loss:3.468739 +step:8396 train loss:3.493557 +step:8397 train loss:3.430232 +step:8398 train loss:3.445839 +step:8399 train loss:3.414365 +step:8400 train loss:3.419873 +step:8401 train loss:3.420063 +step:8402 train loss:3.409124 +step:8403 train loss:3.430721 +step:8404 train loss:3.430186 +step:8405 train loss:3.386719 +step:8406 train loss:3.427993 +step:8407 train loss:3.469717 +step:8408 train loss:3.439618 +step:8409 train loss:3.362323 +step:8410 train loss:3.427916 +step:8411 train loss:3.455782 +step:8412 train loss:3.516133 +step:8413 train loss:3.488908 +step:8414 train loss:3.483069 +step:8415 train loss:3.403819 +step:8416 train loss:3.452568 +step:8417 train loss:3.369848 +step:8418 train loss:3.472706 +step:8419 train loss:3.429182 +step:8420 train loss:3.503689 +step:8421 train loss:3.422593 +step:8422 train loss:3.438735 +step:8423 train loss:3.456210 +step:8424 train loss:3.461201 +step:8425 train loss:3.517454 +step:8426 train loss:3.486410 +step:8427 train loss:3.405272 +step:8428 train loss:3.421079 +step:8429 train loss:3.479221 +step:8430 train loss:3.419022 +step:8431 train loss:3.424314 +step:8432 train loss:3.427705 +step:8433 train loss:3.425234 +step:8434 train loss:3.439189 +step:8435 train loss:3.356617 +step:8436 train loss:3.439684 +step:8437 train loss:3.481477 +step:8438 train loss:3.460087 +step:8439 train loss:3.401602 +step:8440 train loss:3.372855 +step:8441 train loss:3.427656 +step:8442 train loss:3.454096 +step:8443 train loss:3.409403 +step:8444 train loss:3.442372 +step:8445 train loss:3.392905 +step:8446 train loss:3.444333 +step:8447 train loss:3.456172 +step:8448 train loss:3.437122 +step:8449 train loss:3.427634 +step:8450 train loss:3.420223 +step:8451 train loss:3.447538 +step:8452 train loss:3.420414 +step:8453 train loss:3.404961 +step:8454 train loss:3.452937 +step:8455 train loss:3.526520 +step:8456 train loss:3.505119 +step:8457 train loss:3.557266 +step:8458 train loss:3.445000 +step:8459 train loss:3.451544 +step:8460 train loss:3.383628 +step:8461 train loss:3.539483 +step:8462 train loss:3.411447 +step:8463 train loss:3.446243 +step:8464 train loss:3.461928 +step:8465 train loss:3.467049 +step:8466 train loss:3.443027 +step:8467 train loss:3.444252 +step:8468 train loss:3.702097 +step:8469 train loss:3.407389 +step:8470 train loss:3.400638 +step:8471 train loss:3.443776 +step:8472 train loss:3.466457 +step:8473 train loss:3.415366 +step:8474 train loss:3.545558 +step:8475 train loss:3.502247 +step:8476 train loss:3.451206 +step:8477 train loss:3.438719 +step:8478 train loss:3.424692 +step:8479 train loss:3.425211 +step:8480 train loss:3.536220 +step:8481 train loss:3.419548 +step:8482 train loss:3.416213 +step:8483 train loss:3.557601 +step:8484 train loss:3.443635 +step:8485 train loss:3.488365 +step:8486 train loss:3.399326 +step:8487 train loss:3.455045 +step:8488 train loss:3.399429 +step:8489 train loss:3.478383 +step:8490 train loss:3.465662 +step:8491 train loss:3.489754 +step:8492 train loss:3.443091 +step:8493 train loss:3.514486 +step:8494 train loss:3.375970 +step:8495 train loss:3.473811 +step:8496 train loss:3.418583 +step:8497 train loss:3.451923 +step:8498 train loss:3.471011 +step:8499 train loss:3.448421 +step:8500 validation loss:3.374953 total_sharp:6.1453e-04 L1_sharp:2.4456e-03 L2_sharp:6.4312e-05 L3_sharp:1.4208e-04 L4_sharp:7.6836e-05 L5_sharp:7.4481e-05 L6_sharp:1.0486e-04 L7_sharp:1.0233e-04 L8_sharp:7.5142e-05 L9_sharp:6.0066e-05 L10_sharp:3.8747e-05 L11_sharp:3.6975e-05 L12_sharp:8.3355e-05 total_fnorm:4.2108e+00 total_l1_linf:3.4355e+04 total_spectral:4.2108e+00 L1_fnorm:1.0488e+00 L2_fnorm:9.7597e-01 L3_fnorm:1.0826e+00 L4_fnorm:1.1586e+00 L5_fnorm:1.1862e+00 L6_fnorm:1.1903e+00 L7_fnorm:1.1904e+00 L8_fnorm:1.1980e+00 L9_fnorm:1.1955e+00 L10_fnorm:1.2012e+00 L11_fnorm:1.1915e+00 L12_fnorm:1.2013e+00 L1_l1linf:8.8636e-01 L2_l1linf:7.9109e-01 L3_l1linf:8.0333e-01 L4_l1linf:8.2100e-01 L5_l1linf:8.1746e-01 L6_l1linf:8.2058e-01 L7_l1linf:8.1588e-01 L8_l1linf:8.1379e-01 L9_l1linf:8.0149e-01 L10_l1linf:8.1048e-01 L11_l1linf:7.9108e-01 L12_l1linf:8.5748e-01 L1_spectral:2.4095e-02 L2_spectral:2.4076e-02 L3_spectral:2.4084e-02 L4_spectral:2.4103e-02 L5_spectral:2.4108e-02 L6_spectral:2.4086e-02 L7_spectral:2.4090e-02 L8_spectral:2.4082e-02 L9_spectral:2.4095e-02 L10_spectral:2.4090e-02 L11_spectral:2.4098e-02 L12_spectral:2.4098e-02 ip_v_neg_g:6.1317e-03 cos_v_neg_g:1.7192e-03 v_norm:4.2108e+00 g_norm:8.4702e-01 hv_norm:4.7988e-01 cos_v_hv:5.3924e-03 hg_norm:3.8436e+01 cos_g_hg:3.0719e-01 v_par:1.3298e-03 v_perp:4.2108e+00 L1_cos_v_neg_g:2.9252e-03 L1_v_norm:1.0488e+00 L2_cos_v_neg_g:2.7276e-03 L2_v_norm:9.7597e-01 L3_cos_v_neg_g:3.7476e-03 L3_v_norm:1.0826e+00 L4_cos_v_neg_g:3.3724e-03 L4_v_norm:1.1586e+00 L5_cos_v_neg_g:3.3282e-03 L5_v_norm:1.1862e+00 L6_cos_v_neg_g:3.4423e-03 L6_v_norm:1.1903e+00 L7_cos_v_neg_g:3.7232e-03 L7_v_norm:1.1904e+00 L8_cos_v_neg_g:3.6041e-03 L8_v_norm:1.1980e+00 L9_cos_v_neg_g:3.4573e-03 L9_v_norm:1.1955e+00 L10_cos_v_neg_g:4.1048e-03 L10_v_norm:1.2012e+00 L11_cos_v_neg_g:2.7424e-03 L11_v_norm:1.1915e+00 L12_cos_v_neg_g:1.5151e-03 L12_v_norm:1.2013e+00 +step:8500 train loss:3.438560 +step:8501 train loss:3.665262 +step:8502 train loss:3.679749 +step:8503 train loss:3.436948 +step:8504 train loss:3.430930 +step:8505 train loss:3.408899 +step:8506 train loss:3.478585 +step:8507 train loss:3.420985 +step:8508 train loss:3.451458 +step:8509 train loss:3.392721 +step:8510 train loss:3.414867 +step:8511 train loss:3.370909 +step:8512 train loss:3.473906 +step:8513 train loss:3.475575 +step:8514 train loss:3.424245 +step:8515 train loss:3.519270 +step:8516 train loss:3.434612 +step:8517 train loss:3.456218 +step:8518 train loss:3.347282 +step:8519 train loss:3.438782 +step:8520 train loss:3.408784 +step:8521 train loss:3.445154 +step:8522 train loss:3.341448 +step:8523 train loss:3.434073 +step:8524 train loss:3.425698 +step:8525 train loss:3.492569 +step:8526 train loss:3.473060 +step:8527 train loss:3.417777 +step:8528 train loss:3.498440 +step:8529 train loss:3.457822 +step:8530 train loss:3.487517 +step:8531 train loss:3.479297 +step:8532 train loss:3.517339 +step:8533 train loss:3.471379 +step:8534 train loss:3.466091 +step:8535 train loss:3.441424 +step:8536 train loss:3.529080 +step:8537 train loss:3.443437 +step:8538 train loss:3.514069 +step:8539 train loss:3.434600 +step:8540 train loss:3.460897 +step:8541 train loss:3.400918 +step:8542 train loss:3.468056 +step:8543 train loss:3.384277 +step:8544 train loss:3.382223 +step:8545 train loss:3.427019 +step:8546 train loss:3.384890 +step:8547 train loss:3.434784 +step:8548 train loss:3.408389 +step:8549 train loss:3.449802 +step:8550 train loss:3.403092 +step:8551 train loss:3.453098 +step:8552 train loss:3.450796 +step:8553 train loss:3.458460 +step:8554 train loss:3.428512 +step:8555 train loss:3.446908 +step:8556 train loss:3.519366 +step:8557 train loss:3.421703 +step:8558 train loss:3.458799 +step:8559 train loss:3.449847 +step:8560 train loss:3.430515 +step:8561 train loss:3.388446 +step:8562 train loss:3.411151 +step:8563 train loss:3.411670 +step:8564 train loss:3.482876 +step:8565 train loss:3.457668 +step:8566 train loss:3.479798 +step:8567 train loss:3.421736 +step:8568 train loss:3.440391 +step:8569 train loss:3.448476 +step:8570 train loss:3.393660 +step:8571 train loss:3.436298 +step:8572 train loss:3.453008 +step:8573 train loss:3.525745 +step:8574 train loss:3.455900 +step:8575 train loss:3.453644 +step:8576 train loss:3.489931 +step:8577 train loss:3.571872 +step:8578 train loss:3.484118 +step:8579 train loss:3.466248 +step:8580 train loss:3.399909 +step:8581 train loss:3.441568 +step:8582 train loss:3.447222 +step:8583 train loss:3.442937 +step:8584 train loss:3.435426 +step:8585 train loss:3.519320 +step:8586 train loss:3.433833 +step:8587 train loss:3.442702 +step:8588 train loss:3.486360 +step:8589 train loss:3.437466 +step:8590 train loss:3.429156 +step:8591 train loss:3.432429 +step:8592 train loss:3.390229 +step:8593 train loss:3.467437 +step:8594 train loss:3.494153 +step:8595 train loss:3.412844 +step:8596 train loss:3.460876 +step:8597 train loss:3.419286 +step:8598 train loss:3.474689 +step:8599 train loss:3.440323 +step:8600 train loss:3.451079 +step:8601 train loss:3.439312 +step:8602 train loss:3.412283 +step:8603 train loss:3.471071 +step:8604 train loss:3.415288 +step:8605 train loss:3.430618 +step:8606 train loss:3.439363 +step:8607 train loss:3.452369 +step:8608 train loss:3.491551 +step:8609 train loss:3.388739 +step:8610 train loss:3.462115 +step:8611 train loss:3.392855 +step:8612 train loss:3.473547 +step:8613 train loss:3.405397 +step:8614 train loss:3.468768 +step:8615 train loss:3.509448 +step:8616 train loss:3.392757 +step:8617 train loss:3.460264 +step:8618 train loss:3.436715 +step:8619 train loss:3.391662 +step:8620 train loss:3.432827 +step:8621 train loss:3.465968 +step:8622 train loss:3.421743 +step:8623 train loss:3.436106 +step:8624 train loss:3.509261 +step:8625 train loss:3.431300 +step:8626 train loss:3.439409 +step:8627 train loss:3.433703 +step:8628 train loss:3.468179 +step:8629 train loss:3.378520 +step:8630 train loss:3.478593 +step:8631 train loss:3.418313 +step:8632 train loss:3.474051 +step:8633 train loss:3.420465 +step:8634 train loss:3.656280 +step:8635 train loss:3.448045 +step:8636 train loss:3.493678 +step:8637 train loss:3.419145 +step:8638 train loss:3.423373 +step:8639 train loss:3.476933 +step:8640 train loss:3.391287 +step:8641 train loss:3.490751 +step:8642 train loss:3.442278 +step:8643 train loss:3.550282 +step:8644 train loss:3.394643 +step:8645 train loss:3.464767 +step:8646 train loss:3.424924 +step:8647 train loss:3.454782 +step:8648 train loss:3.399588 +step:8649 train loss:3.486940 +step:8650 train loss:3.440716 +step:8651 train loss:3.452581 +step:8652 train loss:3.420089 +step:8653 train loss:3.453247 +step:8654 train loss:3.496476 +step:8655 train loss:3.426212 +step:8656 train loss:3.467281 +step:8657 train loss:3.470432 +step:8658 train loss:3.445091 +step:8659 train loss:3.433608 +step:8660 train loss:3.379217 +step:8661 train loss:3.438013 +step:8662 train loss:3.381537 +step:8663 train loss:3.453943 +step:8664 train loss:3.369183 +step:8665 train loss:3.394044 +step:8666 train loss:3.468173 +step:8667 train loss:3.359706 +step:8668 train loss:3.468735 +step:8669 train loss:3.508195 +step:8670 train loss:3.407279 +step:8671 train loss:3.406123 +step:8672 train loss:3.621461 +step:8673 train loss:3.386308 +step:8674 train loss:3.456761 +step:8675 train loss:3.495699 +step:8676 train loss:3.441905 +step:8677 train loss:3.465116 +step:8678 train loss:3.413959 +step:8679 train loss:3.470624 +step:8680 train loss:3.450243 +step:8681 train loss:3.452376 +step:8682 train loss:3.406706 +step:8683 train loss:3.425635 +step:8684 train loss:3.497034 +step:8685 train loss:3.443268 +step:8686 train loss:3.435041 +step:8687 train loss:3.390122 +step:8688 train loss:3.407091 +step:8689 train loss:3.475268 +step:8690 train loss:3.414461 +step:8691 train loss:3.491624 +step:8692 train loss:3.380310 +step:8693 train loss:3.468714 +step:8694 train loss:3.470581 +step:8695 train loss:3.454968 +step:8696 train loss:3.481473 +step:8697 train loss:3.435610 +step:8698 train loss:3.471951 +step:8699 train loss:3.424231 +step:8700 train loss:3.450084 +step:8701 train loss:3.413303 +step:8702 train loss:3.398888 +step:8703 train loss:3.412102 +step:8704 train loss:3.371147 +step:8705 train loss:3.449203 +step:8706 train loss:3.467252 +step:8707 train loss:3.464984 +step:8708 train loss:3.409296 +step:8709 train loss:3.473633 +step:8710 train loss:3.397589 +step:8711 train loss:3.453902 +step:8712 train loss:3.361291 +step:8713 train loss:3.439022 +step:8714 train loss:3.545618 +step:8715 train loss:3.403261 +step:8716 train loss:3.454226 +step:8717 train loss:3.426537 +step:8718 train loss:3.461879 +step:8719 train loss:3.432163 +step:8720 train loss:3.543988 +step:8721 train loss:3.434393 +step:8722 train loss:3.532461 +step:8723 train loss:3.398853 +step:8724 train loss:3.412493 +step:8725 train loss:3.438066 +step:8726 train loss:3.392190 +step:8727 train loss:3.472606 +step:8728 train loss:3.429470 +step:8729 train loss:3.433006 +step:8730 train loss:3.412393 +step:8731 train loss:3.414042 +step:8732 train loss:3.520032 +step:8733 train loss:3.439353 +step:8734 train loss:3.476202 +step:8735 train loss:3.546098 +step:8736 train loss:3.405790 +step:8737 train loss:3.431618 +step:8738 train loss:3.411065 +step:8739 train loss:3.473770 +step:8740 train loss:3.394198 +step:8741 train loss:3.447035 +step:8742 train loss:3.402853 +step:8743 train loss:3.443913 +step:8744 train loss:3.464100 +step:8745 train loss:3.505286 +step:8746 train loss:3.403885 +step:8747 train loss:3.509939 +step:8748 train loss:3.417459 +step:8749 train loss:3.454042 +step:8750 validation loss:3.366532 +step:8750 train loss:3.464002 +step:8751 train loss:3.505822 +step:8752 train loss:3.363492 +step:8753 train loss:3.408090 +step:8754 train loss:3.461859 +step:8755 train loss:3.445765 +step:8756 train loss:3.490340 +step:8757 train loss:3.399713 +step:8758 train loss:3.560183 +step:8759 train loss:3.403939 +step:8760 train loss:3.436689 +step:8761 train loss:3.511764 +step:8762 train loss:3.409772 +step:8763 train loss:3.380791 +step:8764 train loss:3.454196 +step:8765 train loss:3.521072 +step:8766 train loss:3.454011 +step:8767 train loss:3.410828 +step:8768 train loss:3.451750 +step:8769 train loss:3.425873 +step:8770 train loss:3.469986 +step:8771 train loss:3.441994 +step:8772 train loss:3.466078 +step:8773 train loss:3.423267 +step:8774 train loss:3.456714 +step:8775 train loss:3.455619 +step:8776 train loss:3.401003 +step:8777 train loss:3.437443 +step:8778 train loss:3.448096 +step:8779 train loss:3.472558 +step:8780 train loss:3.431444 +step:8781 train loss:3.437035 +step:8782 train loss:3.456402 +step:8783 train loss:3.436882 +step:8784 train loss:3.461195 +step:8785 train loss:3.446843 +step:8786 train loss:3.525419 +step:8787 train loss:3.468390 +step:8788 train loss:3.371246 +step:8789 train loss:3.470019 +step:8790 train loss:3.395705 +step:8791 train loss:3.447654 +step:8792 train loss:3.386994 +step:8793 train loss:3.476656 +step:8794 train loss:3.397238 +step:8795 train loss:3.469230 +step:8796 train loss:3.613498 +step:8797 train loss:3.358973 +step:8798 train loss:3.514446 +step:8799 train loss:3.432634 +step:8800 train loss:3.425727 +step:8801 train loss:3.448649 +step:8802 train loss:3.506735 +step:8803 train loss:3.463832 +step:8804 train loss:3.446303 +step:8805 train loss:3.464181 +step:8806 train loss:3.434175 +step:8807 train loss:3.425801 +step:8808 train loss:3.379643 +step:8809 train loss:3.506948 +step:8810 train loss:3.408107 +step:8811 train loss:3.396912 +step:8812 train loss:3.442125 +step:8813 train loss:3.353297 +step:8814 train loss:3.537813 +step:8815 train loss:3.385103 +step:8816 train loss:3.504170 +step:8817 train loss:3.438237 +step:8818 train loss:3.372282 +step:8819 train loss:3.488687 +step:8820 train loss:3.416786 +step:8821 train loss:3.445373 +step:8822 train loss:3.424702 +step:8823 train loss:3.440192 +step:8824 train loss:3.501394 +step:8825 train loss:3.476551 +step:8826 train loss:3.448578 +step:8827 train loss:3.406819 +step:8828 train loss:3.451061 +step:8829 train loss:3.427467 +step:8830 train loss:3.406446 +step:8831 train loss:3.484340 +step:8832 train loss:3.418959 +step:8833 train loss:3.453713 +step:8834 train loss:3.419070 +step:8835 train loss:3.357944 +step:8836 train loss:3.482833 +step:8837 train loss:3.386947 +step:8838 train loss:3.429623 +step:8839 train loss:3.415954 +step:8840 train loss:3.420078 +step:8841 train loss:3.430620 +step:8842 train loss:3.441782 +step:8843 train loss:3.454736 +step:8844 train loss:3.419369 +step:8845 train loss:3.442362 +step:8846 train loss:3.407484 +step:8847 train loss:3.448034 +step:8848 train loss:3.493522 +step:8849 train loss:3.473380 +step:8850 train loss:3.466477 +step:8851 train loss:3.346871 +step:8852 train loss:3.452602 +step:8853 train loss:3.431705 +step:8854 train loss:3.403312 +step:8855 train loss:3.472391 +step:8856 train loss:3.464077 +step:8857 train loss:3.532444 +step:8858 train loss:3.397198 +step:8859 train loss:3.472830 +step:8860 train loss:3.428857 +step:8861 train loss:3.410039 +step:8862 train loss:3.410371 +step:8863 train loss:3.393223 +step:8864 train loss:3.462778 +step:8865 train loss:3.454123 +step:8866 train loss:3.339266 +step:8867 train loss:3.440992 +step:8868 train loss:3.468049 +step:8869 train loss:3.556156 +step:8870 train loss:3.428221 +step:8871 train loss:3.457321 +step:8872 train loss:3.439617 +step:8873 train loss:3.438559 +step:8874 train loss:3.495123 +step:8875 train loss:3.429410 +step:8876 train loss:3.466167 +step:8877 train loss:3.449768 +step:8878 train loss:3.498027 +step:8879 train loss:3.455719 +step:8880 train loss:3.405648 +step:8881 train loss:3.370535 +step:8882 train loss:3.439082 +step:8883 train loss:3.426509 +step:8884 train loss:3.517939 +step:8885 train loss:3.449871 +step:8886 train loss:3.455087 +step:8887 train loss:3.481142 +step:8888 train loss:3.438210 +step:8889 train loss:3.444328 +step:8890 train loss:3.434409 +step:8891 train loss:3.406789 +step:8892 train loss:3.488477 +step:8893 train loss:3.432918 +step:8894 train loss:3.448893 +step:8895 train loss:3.476416 +step:8896 train loss:3.394145 +step:8897 train loss:3.485344 +step:8898 train loss:3.418498 +step:8899 train loss:3.441675 +step:8900 train loss:3.408428 +step:8901 train loss:3.422019 +step:8902 train loss:3.464121 +step:8903 train loss:3.401643 +step:8904 train loss:3.452493 +step:8905 train loss:3.430049 +step:8906 train loss:3.419281 +step:8907 train loss:3.429631 +step:8908 train loss:3.498149 +step:8909 train loss:3.439298 +step:8910 train loss:3.403734 +step:8911 train loss:3.500392 +step:8912 train loss:3.397336 +step:8913 train loss:3.407725 +step:8914 train loss:3.500227 +step:8915 train loss:3.445638 +step:8916 train loss:3.471458 +step:8917 train loss:3.429114 +step:8918 train loss:3.433950 +step:8919 train loss:3.426173 +step:8920 train loss:3.450568 +step:8921 train loss:3.446472 +step:8922 train loss:3.424663 +step:8923 train loss:3.619832 +step:8924 train loss:3.505590 +step:8925 train loss:3.433750 +step:8926 train loss:3.446280 +step:8927 train loss:3.475919 +step:8928 train loss:3.432146 +step:8929 train loss:3.425895 +step:8930 train loss:3.483827 +step:8931 train loss:3.390910 +step:8932 train loss:3.495277 +step:8933 train loss:3.402469 +step:8934 train loss:3.443406 +step:8935 train loss:3.455596 +step:8936 train loss:3.491089 +step:8937 train loss:3.489790 +step:8938 train loss:3.430520 +step:8939 train loss:3.496686 +step:8940 train loss:3.449816 +step:8941 train loss:3.390674 +step:8942 train loss:3.472674 +step:8943 train loss:3.403383 +step:8944 train loss:3.453326 +step:8945 train loss:3.469873 +step:8946 train loss:3.324353 +step:8947 train loss:3.507587 +step:8948 train loss:3.354075 +step:8949 train loss:3.355557 +step:8950 train loss:3.397617 +step:8951 train loss:3.438170 +step:8952 train loss:3.456423 +step:8953 train loss:3.412271 +step:8954 train loss:3.518587 +step:8955 train loss:3.434071 +step:8956 train loss:3.461519 +step:8957 train loss:3.449381 +step:8958 train loss:3.428558 +step:8959 train loss:3.419813 +step:8960 train loss:3.382286 +step:8961 train loss:3.408548 +step:8962 train loss:3.460016 +step:8963 train loss:3.439043 +step:8964 train loss:3.422065 +step:8965 train loss:3.463196 +step:8966 train loss:3.424743 +step:8967 train loss:3.401989 +step:8968 train loss:3.385819 +step:8969 train loss:3.377086 +step:8970 train loss:3.455551 +step:8971 train loss:3.404796 +step:8972 train loss:3.610586 +step:8973 train loss:3.491707 +step:8974 train loss:3.450549 +step:8975 train loss:3.451742 +step:8976 train loss:3.412559 +step:8977 train loss:3.501724 +step:8978 train loss:3.483758 +step:8979 train loss:3.399126 +step:8980 train loss:3.498993 +step:8981 train loss:3.450789 +step:8982 train loss:3.420508 +step:8983 train loss:3.364950 +step:8984 train loss:3.489060 +step:8985 train loss:3.408926 +step:8986 train loss:3.445459 +step:8987 train loss:3.419080 +step:8988 train loss:3.467039 +step:8989 train loss:3.378615 +step:8990 train loss:3.516387 +step:8991 train loss:3.372303 +step:8992 train loss:3.426950 +step:8993 train loss:3.519895 +step:8994 train loss:3.420999 +step:8995 train loss:3.448515 +step:8996 train loss:3.419521 +step:8997 train loss:3.368037 +step:8998 train loss:3.371004 +step:8999 train loss:3.397888 +step:9000 validation loss:3.365124 total_sharp:5.3470e-04 L1_sharp:1.7671e-03 L2_sharp:6.2713e-05 L3_sharp:1.1866e-04 L4_sharp:6.1163e-05 L5_sharp:6.3239e-05 L6_sharp:8.4892e-05 L7_sharp:1.0246e-04 L8_sharp:7.0925e-05 L9_sharp:5.0291e-05 L10_sharp:3.5030e-05 L11_sharp:2.9840e-05 L12_sharp:4.7835e-05 total_fnorm:4.2379e+00 total_l1_linf:3.4571e+04 total_spectral:4.2379e+00 L1_fnorm:1.0907e+00 L2_fnorm:9.6759e-01 L3_fnorm:1.0928e+00 L4_fnorm:1.1667e+00 L5_fnorm:1.1906e+00 L6_fnorm:1.1946e+00 L7_fnorm:1.1925e+00 L8_fnorm:1.2008e+00 L9_fnorm:1.2009e+00 L10_fnorm:1.2079e+00 L11_fnorm:1.2023e+00 L12_fnorm:1.2063e+00 L1_l1linf:8.8557e-01 L2_l1linf:7.8780e-01 L3_l1linf:8.1392e-01 L4_l1linf:8.4445e-01 L5_l1linf:8.3835e-01 L6_l1linf:8.2097e-01 L7_l1linf:8.1515e-01 L8_l1linf:8.1357e-01 L9_l1linf:8.1424e-01 L10_l1linf:8.1683e-01 L11_l1linf:8.0044e-01 L12_l1linf:7.9598e-01 L1_spectral:2.4100e-02 L2_spectral:2.4081e-02 L3_spectral:2.4084e-02 L4_spectral:2.4091e-02 L5_spectral:2.4134e-02 L6_spectral:2.4094e-02 L7_spectral:2.4090e-02 L8_spectral:2.4086e-02 L9_spectral:2.4092e-02 L10_spectral:2.4085e-02 L11_spectral:2.4087e-02 L12_spectral:2.4096e-02 ip_v_neg_g:4.3894e-03 cos_v_neg_g:1.1356e-03 v_norm:4.2379e+00 g_norm:9.1208e-01 hv_norm:5.0284e-01 cos_v_hv:4.5064e-03 hg_norm:2.9013e+01 cos_g_hg:3.2398e-01 v_par:1.1793e-03 v_perp:4.2379e+00 L1_cos_v_neg_g:2.3582e-03 L1_v_norm:1.0907e+00 L2_cos_v_neg_g:3.3116e-03 L2_v_norm:9.6759e-01 L3_cos_v_neg_g:2.9441e-03 L3_v_norm:1.0928e+00 L4_cos_v_neg_g:1.1849e-03 L4_v_norm:1.1667e+00 L5_cos_v_neg_g:1.1374e-03 L5_v_norm:1.1906e+00 L6_cos_v_neg_g:1.6074e-03 L6_v_norm:1.1946e+00 L7_cos_v_neg_g:2.2762e-03 L7_v_norm:1.1925e+00 L8_cos_v_neg_g:1.7052e-03 L8_v_norm:1.2008e+00 L9_cos_v_neg_g:1.6768e-03 L9_v_norm:1.2009e+00 L10_cos_v_neg_g:1.4835e-03 L10_v_norm:1.2079e+00 L11_cos_v_neg_g:9.9409e-04 L11_v_norm:1.2023e+00 L12_cos_v_neg_g:8.0317e-04 L12_v_norm:1.2063e+00 +step:9000 train loss:3.481890 +step:9001 train loss:3.448638 +step:9002 train loss:3.458124 +step:9003 train loss:3.397087 +step:9004 train loss:3.395947 +step:9005 train loss:3.409287 +step:9006 train loss:3.413428 +step:9007 train loss:3.431133 +step:9008 train loss:3.386458 +step:9009 train loss:3.381310 +step:9010 train loss:3.420131 +step:9011 train loss:3.412107 +step:9012 train loss:3.530323 +step:9013 train loss:3.355465 +step:9014 train loss:3.426091 +step:9015 train loss:3.424603 +step:9016 train loss:3.503099 +step:9017 train loss:3.444561 +step:9018 train loss:3.365095 +step:9019 train loss:3.453610 +step:9020 train loss:3.461441 +step:9021 train loss:3.419450 +step:9022 train loss:3.432072 +step:9023 train loss:3.427536 +step:9024 train loss:3.448545 +step:9025 train loss:3.429687 +step:9026 train loss:3.390338 +step:9027 train loss:3.434367 +step:9028 train loss:3.457261 +step:9029 train loss:3.475183 +step:9030 train loss:3.473105 +step:9031 train loss:3.436223 +step:9032 train loss:3.447504 +step:9033 train loss:3.434085 +step:9034 train loss:3.442165 +step:9035 train loss:3.445505 +step:9036 train loss:3.395789 +step:9037 train loss:3.388218 +step:9038 train loss:3.513780 +step:9039 train loss:3.417428 +step:9040 train loss:3.432142 +step:9041 train loss:3.480712 +step:9042 train loss:3.334412 +step:9043 train loss:3.431434 +step:9044 train loss:3.449018 +step:9045 train loss:3.397603 +step:9046 train loss:3.439804 +step:9047 train loss:3.432629 +step:9048 train loss:3.413227 +step:9049 train loss:3.448049 +step:9050 train loss:3.400921 +step:9051 train loss:3.440553 +step:9052 train loss:3.367991 +step:9053 train loss:3.497482 +step:9054 train loss:3.506308 +step:9055 train loss:3.430171 +step:9056 train loss:3.491824 +step:9057 train loss:3.346884 +step:9058 train loss:3.431731 +step:9059 train loss:3.504860 +step:9060 train loss:3.437630 +step:9061 train loss:3.465905 +step:9062 train loss:3.396226 +step:9063 train loss:3.530628 +step:9064 train loss:3.416896 +step:9065 train loss:3.425689 +step:9066 train loss:3.445690 +step:9067 train loss:3.407228 +step:9068 train loss:3.480052 +step:9069 train loss:3.438516 +step:9070 train loss:3.488302 +step:9071 train loss:3.424465 +step:9072 train loss:3.443256 +step:9073 train loss:3.401572 +step:9074 train loss:3.485734 +step:9075 train loss:3.429937 +step:9076 train loss:3.398908 +step:9077 train loss:3.473499 +step:9078 train loss:3.411749 +step:9079 train loss:3.460813 +step:9080 train loss:3.391458 +step:9081 train loss:3.430247 +step:9082 train loss:3.457862 +step:9083 train loss:3.485435 +step:9084 train loss:3.375294 +step:9085 train loss:3.448513 +step:9086 train loss:3.430002 +step:9087 train loss:3.378141 +step:9088 train loss:3.438296 +step:9089 train loss:3.456116 +step:9090 train loss:3.388590 +step:9091 train loss:3.491321 +step:9092 train loss:3.416854 +step:9093 train loss:3.413826 +step:9094 train loss:3.539210 +step:9095 train loss:3.407531 +step:9096 train loss:3.424550 +step:9097 train loss:3.413950 +step:9098 train loss:3.402628 +step:9099 train loss:3.525043 +step:9100 train loss:3.559632 +step:9101 train loss:3.475529 +step:9102 train loss:3.417833 +step:9103 train loss:3.425827 +step:9104 train loss:3.512529 +step:9105 train loss:3.371503 +step:9106 train loss:3.500454 +step:9107 train loss:3.432964 +step:9108 train loss:3.416341 +step:9109 train loss:3.442499 +step:9110 train loss:3.442046 +step:9111 train loss:3.425776 +step:9112 train loss:3.427859 +step:9113 train loss:3.465602 +step:9114 train loss:3.406173 +step:9115 train loss:3.431712 +step:9116 train loss:3.461861 +step:9117 train loss:3.469134 +step:9118 train loss:3.441287 +step:9119 train loss:3.358413 +step:9120 train loss:3.459031 +step:9121 train loss:3.491940 +step:9122 train loss:3.437977 +step:9123 train loss:3.457189 +step:9124 train loss:3.486094 +step:9125 train loss:3.443892 +step:9126 train loss:3.415415 +step:9127 train loss:3.447670 +step:9128 train loss:3.502636 +step:9129 train loss:3.457189 +step:9130 train loss:3.470042 +step:9131 train loss:3.449576 +step:9132 train loss:3.458176 +step:9133 train loss:3.446998 +step:9134 train loss:3.418982 +step:9135 train loss:3.446867 +step:9136 train loss:3.445582 +step:9137 train loss:3.496109 +step:9138 train loss:3.416680 +step:9139 train loss:3.491717 +step:9140 train loss:3.413430 +step:9141 train loss:3.392287 +step:9142 train loss:3.570359 +step:9143 train loss:3.398221 +step:9144 train loss:3.490985 +step:9145 train loss:3.495899 +step:9146 train loss:3.410205 +step:9147 train loss:3.488016 +step:9148 train loss:3.503747 +step:9149 train loss:3.413424 +step:9150 train loss:3.440106 +step:9151 train loss:3.496645 +step:9152 train loss:3.453503 +step:9153 train loss:3.420251 +step:9154 train loss:3.435869 +step:9155 train loss:3.395711 +step:9156 train loss:3.403780 +step:9157 train loss:3.421762 +step:9158 train loss:3.400682 +step:9159 train loss:3.491270 +step:9160 train loss:3.374617 +step:9161 train loss:3.402613 +step:9162 train loss:3.488646 +step:9163 train loss:3.432847 +step:9164 train loss:3.406320 +step:9165 train loss:3.401309 +step:9166 train loss:3.456568 +step:9167 train loss:3.402278 +step:9168 train loss:3.440789 +step:9169 train loss:3.379413 +step:9170 train loss:3.400654 +step:9171 train loss:3.464243 +step:9172 train loss:3.390853 +step:9173 train loss:3.510452 +step:9174 train loss:3.439216 +step:9175 train loss:3.418409 +step:9176 train loss:3.399385 +step:9177 train loss:3.445347 +step:9178 train loss:3.393356 +step:9179 train loss:3.349509 +step:9180 train loss:3.444930 +step:9181 train loss:3.453895 +step:9182 train loss:3.424052 +step:9183 train loss:3.432203 +step:9184 train loss:3.427137 +step:9185 train loss:3.439957 +step:9186 train loss:3.402394 +step:9187 train loss:3.475787 +step:9188 train loss:3.518235 +step:9189 train loss:3.437548 +step:9190 train loss:3.440991 +step:9191 train loss:3.433043 +step:9192 train loss:3.446397 +step:9193 train loss:3.447059 +step:9194 train loss:3.382264 +step:9195 train loss:3.373347 +step:9196 train loss:3.426347 +step:9197 train loss:3.381987 +step:9198 train loss:3.454994 +step:9199 train loss:3.404063 +step:9200 train loss:3.430222 +step:9201 train loss:3.466751 +step:9202 train loss:3.453305 +step:9203 train loss:3.412391 +step:9204 train loss:3.610658 +step:9205 train loss:3.522346 +step:9206 train loss:3.436062 +step:9207 train loss:3.488358 +step:9208 train loss:3.463856 +step:9209 train loss:3.482161 +step:9210 train loss:3.377426 +step:9211 train loss:3.407400 +step:9212 train loss:3.406471 +step:9213 train loss:3.466035 +step:9214 train loss:3.409049 +step:9215 train loss:3.476682 +step:9216 train loss:3.438664 +step:9217 train loss:3.379909 +step:9218 train loss:3.471330 +step:9219 train loss:3.429242 +step:9220 train loss:3.476347 +step:9221 train loss:3.527663 +step:9222 train loss:3.472004 +step:9223 train loss:3.645425 +step:9224 train loss:3.477514 +step:9225 train loss:3.410089 +step:9226 train loss:3.428233 +step:9227 train loss:3.442770 +step:9228 train loss:3.445170 +step:9229 train loss:3.405205 +step:9230 train loss:3.466737 +step:9231 train loss:3.351725 +step:9232 train loss:3.410024 +step:9233 train loss:3.427989 +step:9234 train loss:3.485242 +step:9235 train loss:3.488795 +step:9236 train loss:3.396784 +step:9237 train loss:3.458965 +step:9238 train loss:3.430568 +step:9239 train loss:3.422366 +step:9240 train loss:3.392221 +step:9241 train loss:3.424369 +step:9242 train loss:3.433161 +step:9243 train loss:3.431860 +step:9244 train loss:3.405364 +step:9245 train loss:3.412780 +step:9246 train loss:3.412799 +step:9247 train loss:3.420357 +step:9248 train loss:3.434672 +step:9249 train loss:3.431585 +step:9250 validation loss:3.363134 +step:9250 train loss:3.470295 +step:9251 train loss:3.412668 +step:9252 train loss:3.479424 +step:9253 train loss:3.476999 +step:9254 train loss:3.404261 +step:9255 train loss:3.521910 +step:9256 train loss:3.397114 +step:9257 train loss:3.341583 +step:9258 train loss:3.425050 +step:9259 train loss:3.425505 +step:9260 train loss:3.522596 +step:9261 train loss:3.401833 +step:9262 train loss:3.474037 +step:9263 train loss:3.375712 +step:9264 train loss:3.520855 +step:9265 train loss:3.549850 +step:9266 train loss:3.478436 +step:9267 train loss:3.424319 +step:9268 train loss:3.423033 +step:9269 train loss:3.446484 +step:9270 train loss:3.369179 +step:9271 train loss:3.480433 +step:9272 train loss:3.419109 +step:9273 train loss:3.442523 +step:9274 train loss:3.442673 +step:9275 train loss:3.438765 +step:9276 train loss:3.467737 +step:9277 train loss:3.445124 +step:9278 train loss:3.455541 +step:9279 train loss:3.450764 +step:9280 train loss:3.448535 +step:9281 train loss:3.423158 +step:9282 train loss:3.542220 +step:9283 train loss:3.424980 +step:9284 train loss:3.391948 +step:9285 train loss:3.413167 +step:9286 train loss:3.462334 +step:9287 train loss:3.437494 +step:9288 train loss:3.443996 +step:9289 train loss:3.413723 +step:9290 train loss:3.440073 +step:9291 train loss:3.422079 +step:9292 train loss:3.456592 +step:9293 train loss:3.514507 +step:9294 train loss:3.437415 +step:9295 train loss:3.419506 +step:9296 train loss:3.372768 +step:9297 train loss:3.444829 +step:9298 train loss:3.382815 +step:9299 train loss:3.368442 +step:9300 train loss:3.472770 +step:9301 train loss:3.500787 +step:9302 train loss:3.437703 +step:9303 train loss:3.485449 +step:9304 train loss:3.405351 +step:9305 train loss:3.398284 +step:9306 train loss:3.400073 +step:9307 train loss:3.401189 +step:9308 train loss:3.376013 +step:9309 train loss:3.362587 +step:9310 train loss:3.419087 +step:9311 train loss:3.484885 +step:9312 train loss:3.430829 +step:9313 train loss:3.379743 +step:9314 train loss:3.409362 +step:9315 train loss:3.437696 +step:9316 train loss:3.424737 +step:9317 train loss:3.402712 +step:9318 train loss:3.486331 +step:9319 train loss:3.396873 +step:9320 train loss:3.420996 +step:9321 train loss:3.430528 +step:9322 train loss:3.438159 +step:9323 train loss:3.517671 +step:9324 train loss:3.457096 +step:9325 train loss:3.397369 +step:9326 train loss:3.474452 +step:9327 train loss:3.468546 +step:9328 train loss:3.472821 +step:9329 train loss:3.355792 +step:9330 train loss:3.530293 +step:9331 train loss:3.457340 +step:9332 train loss:3.479626 +step:9333 train loss:3.497899 +step:9334 train loss:3.433056 +step:9335 train loss:3.530617 +step:9336 train loss:3.488643 +step:9337 train loss:3.440421 +step:9338 train loss:3.497471 +step:9339 train loss:3.474106 +step:9340 train loss:3.434398 +step:9341 train loss:3.526705 +step:9342 train loss:3.419343 +step:9343 train loss:3.415406 +step:9344 train loss:3.417750 +step:9345 train loss:3.562231 +step:9346 train loss:3.396132 +step:9347 train loss:3.410119 +step:9348 train loss:3.436732 +step:9349 train loss:3.378030 +step:9350 train loss:3.456105 +step:9351 train loss:3.431185 +step:9352 train loss:3.417836 +step:9353 train loss:3.451695 +step:9354 train loss:3.416477 +step:9355 train loss:3.411695 +step:9356 train loss:3.462479 +step:9357 train loss:3.411418 +step:9358 train loss:3.447326 +step:9359 train loss:3.388706 +step:9360 train loss:3.404397 +step:9361 train loss:3.405404 +step:9362 train loss:3.393124 +step:9363 train loss:3.454932 +step:9364 train loss:3.433782 +step:9365 train loss:3.438920 +step:9366 train loss:3.434074 +step:9367 train loss:3.447839 +step:9368 train loss:3.422834 +step:9369 train loss:3.421041 +step:9370 train loss:3.425335 +step:9371 train loss:3.452971 +step:9372 train loss:3.414169 +step:9373 train loss:3.397804 +step:9374 train loss:3.438243 +step:9375 train loss:3.445709 +step:9376 train loss:3.384371 +step:9377 train loss:3.461350 +step:9378 train loss:3.459743 +step:9379 train loss:3.489433 +step:9380 train loss:3.419159 +step:9381 train loss:3.427296 +step:9382 train loss:3.402432 +step:9383 train loss:3.400221 +step:9384 train loss:3.368256 +step:9385 train loss:3.444683 +step:9386 train loss:3.470133 +step:9387 train loss:3.448482 +step:9388 train loss:3.388702 +step:9389 train loss:3.402984 +step:9390 train loss:3.442674 +step:9391 train loss:3.452564 +step:9392 train loss:3.412311 +step:9393 train loss:3.406101 +step:9394 train loss:3.434827 +step:9395 train loss:3.428698 +step:9396 train loss:3.577741 +step:9397 train loss:3.467485 +step:9398 train loss:3.486412 +step:9399 train loss:3.441423 +step:9400 train loss:3.442277 +step:9401 train loss:3.432036 +step:9402 train loss:3.433984 +step:9403 train loss:3.365484 +step:9404 train loss:3.443310 +step:9405 train loss:3.402917 +step:9406 train loss:3.458788 +step:9407 train loss:3.396407 +step:9408 train loss:3.335584 +step:9409 train loss:3.400024 +step:9410 train loss:3.481586 +step:9411 train loss:3.440274 +step:9412 train loss:3.475281 +step:9413 train loss:3.487863 +step:9414 train loss:3.427678 +step:9415 train loss:3.421176 +step:9416 train loss:3.434196 +step:9417 train loss:3.391127 +step:9418 train loss:3.418731 +step:9419 train loss:3.385025 +step:9420 train loss:3.404109 +step:9421 train loss:3.453728 +step:9422 train loss:3.402514 +step:9423 train loss:3.468162 +step:9424 train loss:3.409110 +step:9425 train loss:3.449227 +step:9426 train loss:3.454962 +step:9427 train loss:3.429804 +step:9428 train loss:3.536540 +step:9429 train loss:3.422796 +step:9430 train loss:3.380111 +step:9431 train loss:3.468922 +step:9432 train loss:3.430752 +step:9433 train loss:3.472144 +step:9434 train loss:3.424109 +step:9435 train loss:3.449201 +step:9436 train loss:3.420604 +step:9437 train loss:3.433351 +step:9438 train loss:3.426856 +step:9439 train loss:3.426783 +step:9440 train loss:3.417181 +step:9441 train loss:3.427353 +step:9442 train loss:3.368779 +step:9443 train loss:3.422812 +step:9444 train loss:3.489084 +step:9445 train loss:3.421196 +step:9446 train loss:3.396986 +step:9447 train loss:3.462264 +step:9448 train loss:3.398283 +step:9449 train loss:3.421465 +step:9450 train loss:3.465593 +step:9451 train loss:3.379285 +step:9452 train loss:3.430800 +step:9453 train loss:3.411022 +step:9454 train loss:3.472638 +step:9455 train loss:3.455210 +step:9456 train loss:3.377183 +step:9457 train loss:3.426866 +step:9458 train loss:3.411430 +step:9459 train loss:3.407589 +step:9460 train loss:3.446447 +step:9461 train loss:3.474078 +step:9462 train loss:3.425287 +step:9463 train loss:3.455189 +step:9464 train loss:3.408835 +step:9465 train loss:3.501767 +step:9466 train loss:3.449020 +step:9467 train loss:3.470019 +step:9468 train loss:3.420669 +step:9469 train loss:3.407417 +step:9470 train loss:3.402955 +step:9471 train loss:3.444605 +step:9472 train loss:3.467266 +step:9473 train loss:3.458936 +step:9474 train loss:3.400790 +step:9475 train loss:3.395629 +step:9476 train loss:3.613138 +step:9477 train loss:3.486918 +step:9478 train loss:3.462720 +step:9479 train loss:3.559757 +step:9480 train loss:3.407340 +step:9481 train loss:3.442710 +step:9482 train loss:3.467741 +step:9483 train loss:3.422396 +step:9484 train loss:3.455302 +step:9485 train loss:3.375982 +step:9486 train loss:3.412306 +step:9487 train loss:3.446977 +step:9488 train loss:3.400498 +step:9489 train loss:3.443068 +step:9490 train loss:3.409892 +step:9491 train loss:3.455712 +step:9492 train loss:3.471032 +step:9493 train loss:3.446425 +step:9494 train loss:3.454904 +step:9495 train loss:3.407480 +step:9496 train loss:3.469694 +step:9497 train loss:3.483857 +step:9498 train loss:3.431870 +step:9499 train loss:3.480309 +step:9500 validation loss:3.366589 total_sharp:5.5476e-04 L1_sharp:1.6515e-03 L2_sharp:7.4939e-05 L3_sharp:2.1263e-04 L4_sharp:4.6191e-05 L5_sharp:5.7108e-05 L6_sharp:8.6623e-05 L7_sharp:1.0711e-04 L8_sharp:6.2448e-05 L9_sharp:4.9663e-05 L10_sharp:3.7309e-05 L11_sharp:3.1662e-05 L12_sharp:5.2915e-05 total_fnorm:4.2354e+00 total_l1_linf:3.4533e+04 total_spectral:4.2354e+00 L1_fnorm:1.0917e+00 L2_fnorm:9.8018e-01 L3_fnorm:1.0880e+00 L4_fnorm:1.1693e+00 L5_fnorm:1.1911e+00 L6_fnorm:1.1922e+00 L7_fnorm:1.1925e+00 L8_fnorm:1.1987e+00 L9_fnorm:1.1967e+00 L10_fnorm:1.2069e+00 L11_fnorm:1.1998e+00 L12_fnorm:1.2060e+00 L1_l1linf:8.8684e-01 L2_l1linf:7.9133e-01 L3_l1linf:8.1859e-01 L4_l1linf:8.4201e-01 L5_l1linf:8.2820e-01 L6_l1linf:8.1912e-01 L7_l1linf:8.1971e-01 L8_l1linf:8.1183e-01 L9_l1linf:8.0895e-01 L10_l1linf:8.1013e-01 L11_l1linf:7.9169e-01 L12_l1linf:8.0796e-01 L1_spectral:2.4101e-02 L2_spectral:2.4072e-02 L3_spectral:2.4084e-02 L4_spectral:2.4096e-02 L5_spectral:2.4128e-02 L6_spectral:2.4090e-02 L7_spectral:2.4091e-02 L8_spectral:2.4084e-02 L9_spectral:2.4088e-02 L10_spectral:2.4081e-02 L11_spectral:2.4090e-02 L12_spectral:2.4089e-02 ip_v_neg_g:5.5268e-03 cos_v_neg_g:1.5210e-03 v_norm:4.2354e+00 g_norm:8.5790e-01 hv_norm:4.0059e-01 cos_v_hv:5.8654e-03 hg_norm:1.7189e+01 cos_g_hg:3.9522e-01 v_par:1.5371e-03 v_perp:4.2354e+00 L1_cos_v_neg_g:2.7043e-03 L1_v_norm:1.0917e+00 L2_cos_v_neg_g:2.1583e-03 L2_v_norm:9.8018e-01 L3_cos_v_neg_g:3.2583e-03 L3_v_norm:1.0880e+00 L4_cos_v_neg_g:1.5958e-03 L4_v_norm:1.1693e+00 L5_cos_v_neg_g:1.8820e-03 L5_v_norm:1.1911e+00 L6_cos_v_neg_g:2.8050e-03 L6_v_norm:1.1922e+00 L7_cos_v_neg_g:3.2657e-03 L7_v_norm:1.1925e+00 L8_cos_v_neg_g:4.3100e-03 L8_v_norm:1.1987e+00 L9_cos_v_neg_g:3.9233e-03 L9_v_norm:1.1967e+00 L10_cos_v_neg_g:3.7273e-03 L10_v_norm:1.2069e+00 L11_cos_v_neg_g:2.6586e-03 L11_v_norm:1.1998e+00 L12_cos_v_neg_g:2.6180e-03 L12_v_norm:1.2060e+00 +step:9500 train loss:3.473296 +step:9501 train loss:3.451317 +step:9502 train loss:3.423625 +step:9503 train loss:3.438174 +step:9504 train loss:3.394041 +step:9505 train loss:3.416006 +step:9506 train loss:3.434221 +step:9507 train loss:3.418770 +step:9508 train loss:3.613908 +step:9509 train loss:3.431711 +step:9510 train loss:3.415773 +step:9511 train loss:3.443870 +step:9512 train loss:3.476667 +step:9513 train loss:3.462286 +step:9514 train loss:3.432738 +step:9515 train loss:3.333598 +step:9516 train loss:3.435151 +step:9517 train loss:3.469236 +step:9518 train loss:3.445759 +step:9519 train loss:3.455597 +step:9520 train loss:3.343600 +step:9521 train loss:3.336938 +step:9522 train loss:3.455800 +step:9523 train loss:3.450022 +step:9524 train loss:3.451023 +step:9525 train loss:3.497489 +step:9526 train loss:3.513709 +step:9527 train loss:3.469112 +step:9528 train loss:3.401368 +step:9529 train loss:3.447285 +step:9530 train loss:3.491330 +step:9531 train loss:3.396645 +step:9532 train loss:3.450850 +step:9533 train loss:3.424215 +step:9534 train loss:3.502417 +step:9535 train loss:3.425012 +step:9536 train loss:3.403640 +step:9537 train loss:3.351396 +step:9538 train loss:3.368239 +step:9539 train loss:3.439401 +step:9540 train loss:3.359350 +step:9541 train loss:3.416782 +step:9542 train loss:3.545346 +step:9543 train loss:3.442775 +step:9544 train loss:3.484542 +step:9545 train loss:3.417011 +step:9546 train loss:3.443496 +step:9547 train loss:3.484308 +step:9548 train loss:3.426878 +step:9549 train loss:3.392975 +step:9550 train loss:3.423947 +step:9551 train loss:3.418010 +step:9552 train loss:3.444608 +step:9553 train loss:3.435468 +step:9554 train loss:3.484205 +step:9555 train loss:3.495212 +step:9556 train loss:3.396822 +step:9557 train loss:3.417679 +step:9558 train loss:3.480457 +step:9559 train loss:3.485271 +step:9560 train loss:3.401331 +step:9561 train loss:3.427270 +step:9562 train loss:3.465149 +step:9563 train loss:3.413139 +step:9564 train loss:3.448247 +step:9565 train loss:3.427049 +step:9566 train loss:3.395341 +step:9567 train loss:3.465343 +step:9568 train loss:3.434320 +step:9569 train loss:3.475994 +step:9570 train loss:3.370977 +step:9571 train loss:3.443726 +step:9572 train loss:3.388249 +step:9573 train loss:3.416994 +step:9574 train loss:3.396677 +step:9575 train loss:3.469337 +step:9576 train loss:3.358901 +step:9577 train loss:3.408521 +step:9578 train loss:3.411061 +step:9579 train loss:3.411970 +step:9580 train loss:3.473744 +step:9581 train loss:3.468429 +step:9582 train loss:3.431703 +step:9583 train loss:3.465269 +step:9584 train loss:3.399264 +step:9585 train loss:3.418504 +step:9586 train loss:3.474962 +step:9587 train loss:3.438712 +step:9588 train loss:3.424787 +step:9589 train loss:3.484942 +step:9590 train loss:3.448280 +step:9591 train loss:3.414369 +step:9592 train loss:3.436076 +step:9593 train loss:3.436902 +step:9594 train loss:3.451194 +step:9595 train loss:3.429246 +step:9596 train loss:3.514272 +step:9597 train loss:3.419894 +step:9598 train loss:3.381020 +step:9599 train loss:3.388977 +step:9600 train loss:3.475049 +step:9601 train loss:3.392257 +step:9602 train loss:3.476230 +step:9603 train loss:3.469531 +step:9604 train loss:3.351980 +step:9605 train loss:3.439287 +step:9606 train loss:3.496185 +step:9607 train loss:3.413759 +step:9608 train loss:3.422361 +step:9609 train loss:3.431354 +step:9610 train loss:3.474769 +step:9611 train loss:3.407116 +step:9612 train loss:3.417671 +step:9613 train loss:3.453001 +step:9614 train loss:3.423839 +step:9615 train loss:3.617594 +step:9616 train loss:3.427842 +step:9617 train loss:3.407591 +step:9618 train loss:3.366054 +step:9619 train loss:3.430162 +step:9620 train loss:3.486319 +step:9621 train loss:3.410194 +step:9622 train loss:3.423707 +step:9623 train loss:3.459463 +step:9624 train loss:3.446671 +step:9625 train loss:3.461262 +step:9626 train loss:3.432817 +step:9627 train loss:3.513420 +step:9628 train loss:3.478466 +step:9629 train loss:3.393462 +step:9630 train loss:3.452108 +step:9631 train loss:3.438333 +step:9632 train loss:3.406032 +step:9633 train loss:3.451236 +step:9634 train loss:3.516920 +step:9635 train loss:3.417601 +step:9636 train loss:3.369363 +step:9637 train loss:3.500351 +step:9638 train loss:3.381265 +step:9639 train loss:3.350523 +step:9640 train loss:3.476848 +step:9641 train loss:3.447052 +step:9642 train loss:3.424171 +step:9643 train loss:3.427702 +step:9644 train loss:3.482874 +step:9645 train loss:3.411468 +step:9646 train loss:3.448150 +step:9647 train loss:3.460768 +step:9648 train loss:3.408440 +step:9649 train loss:3.380277 +step:9650 train loss:3.399037 +step:9651 train loss:3.490723 +step:9652 train loss:3.469876 +step:9653 train loss:3.414731 +step:9654 train loss:3.393772 +step:9655 train loss:3.390641 +step:9656 train loss:3.382860 +step:9657 train loss:3.410488 +step:9658 train loss:3.466736 +step:9659 train loss:3.579679 +step:9660 train loss:3.358978 +step:9661 train loss:3.377641 +step:9662 train loss:3.395062 +step:9663 train loss:3.439670 +step:9664 train loss:3.488168 +step:9665 train loss:3.331462 +step:9666 train loss:3.375382 +step:9667 train loss:3.511465 +step:9668 train loss:3.491529 +step:9669 train loss:3.510533 +step:9670 train loss:3.490209 +step:9671 train loss:3.489799 +step:9672 train loss:3.403195 +step:9673 train loss:3.423235 +step:9674 train loss:3.435019 +step:9675 train loss:3.433460 +step:9676 train loss:3.391308 +step:9677 train loss:3.398460 +step:9678 train loss:3.434873 +step:9679 train loss:3.426167 +step:9680 train loss:3.426675 +step:9681 train loss:3.410850 +step:9682 train loss:3.479529 +step:9683 train loss:3.454861 +step:9684 train loss:3.370705 +step:9685 train loss:3.458076 +step:9686 train loss:3.488098 +step:9687 train loss:3.394595 +step:9688 train loss:3.485292 +step:9689 train loss:3.582204 +step:9690 train loss:3.423562 +step:9691 train loss:3.411006 +step:9692 train loss:3.376163 +step:9693 train loss:3.370995 +step:9694 train loss:3.391479 +step:9695 train loss:3.502677 +step:9696 train loss:3.530769 +step:9697 train loss:3.438953 +step:9698 train loss:3.476483 +step:9699 train loss:3.437588 +step:9700 train loss:3.436767 +step:9701 train loss:3.484484 +step:9702 train loss:3.404040 +step:9703 train loss:3.428608 +step:9704 train loss:3.508768 +step:9705 train loss:3.404762 +step:9706 train loss:3.402445 +step:9707 train loss:3.450818 +step:9708 train loss:3.398890 +step:9709 train loss:3.420680 +step:9710 train loss:3.443191 +step:9711 train loss:3.411790 +step:9712 train loss:3.423963 +step:9713 train loss:3.474685 +step:9714 train loss:3.430697 +step:9715 train loss:3.449986 +step:9716 train loss:3.474766 +step:9717 train loss:3.394363 +step:9718 train loss:3.397755 +step:9719 train loss:3.483569 +step:9720 train loss:3.414491 +step:9721 train loss:3.404171 +step:9722 train loss:3.468817 +step:9723 train loss:3.416971 +step:9724 train loss:3.441502 +step:9725 train loss:3.494727 +step:9726 train loss:3.436760 +step:9727 train loss:3.414183 +step:9728 train loss:3.450303 +step:9729 train loss:3.480778 +step:9730 train loss:3.553137 +step:9731 train loss:3.468907 +step:9732 train loss:3.430255 +step:9733 train loss:3.473408 +step:9734 train loss:3.393233 +step:9735 train loss:3.500194 +step:9736 train loss:3.403545 +step:9737 train loss:3.460087 +step:9738 train loss:3.427905 +step:9739 train loss:3.499501 +step:9740 train loss:3.463469 +step:9741 train loss:3.402863 +step:9742 train loss:3.499741 +step:9743 train loss:3.368921 +step:9744 train loss:3.430802 +step:9745 train loss:3.391239 +step:9746 train loss:3.428096 +step:9747 train loss:3.418749 +step:9748 train loss:3.317655 +step:9749 train loss:3.417737 +step:9750 validation loss:3.358233 +step:9750 train loss:3.400009 +step:9751 train loss:3.541120 +step:9752 train loss:3.423127 +step:9753 train loss:3.380259 +step:9754 train loss:3.409354 +step:9755 train loss:3.411150 +step:9756 train loss:3.409127 +step:9757 train loss:3.374742 +step:9758 train loss:3.366607 +step:9759 train loss:3.415766 +step:9760 train loss:3.357403 +step:9761 train loss:3.403974 +step:9762 train loss:3.395964 +step:9763 train loss:3.416247 +step:9764 train loss:3.405598 +step:9765 train loss:3.367310 +step:9766 train loss:3.456193 +step:9767 train loss:3.412681 +step:9768 train loss:3.424415 +step:9769 train loss:3.380167 +step:9770 train loss:3.378247 +step:9771 train loss:3.427499 +step:9772 train loss:3.437786 +step:9773 train loss:3.416893 +step:9774 train loss:3.389404 +step:9775 train loss:3.477408 +step:9776 train loss:3.475905 +step:9777 train loss:3.363074 +step:9778 train loss:3.372300 +step:9779 train loss:3.376433 +step:9780 train loss:3.378631 +step:9781 train loss:3.396666 +step:9782 train loss:3.471875 +step:9783 train loss:3.383257 +step:9784 train loss:3.408870 +step:9785 train loss:3.404075 +step:9786 train loss:3.435576 +step:9787 train loss:3.463410 +step:9788 train loss:3.387632 +step:9789 train loss:3.399993 +step:9790 train loss:3.360282 +step:9791 train loss:3.408437 +step:9792 train loss:3.423792 +step:9793 train loss:3.440004 +step:9794 train loss:3.417298 +step:9795 train loss:3.421102 +step:9796 train loss:3.406723 +step:9797 train loss:3.401972 +step:9798 train loss:3.419685 +step:9799 train loss:3.419344 +step:9800 train loss:3.491175 +step:9801 train loss:3.414340 +step:9802 train loss:3.475412 +step:9803 train loss:3.331155 +step:9804 train loss:3.427186 +step:9805 train loss:3.433117 +step:9806 train loss:3.405384 +step:9807 train loss:3.376575 +step:9808 train loss:3.294553 +step:9809 train loss:3.476630 +step:9810 train loss:3.433062 +step:9811 train loss:3.417426 +step:9812 train loss:3.392993 +step:9813 train loss:3.472312 +step:9814 train loss:3.463948 +step:9815 train loss:3.366141 +step:9816 train loss:3.371185 +step:9817 train loss:3.400214 +step:9818 train loss:3.426723 +step:9819 train loss:3.399282 +step:9820 train loss:3.465011 +step:9821 train loss:3.447059 +step:9822 train loss:3.419441 +step:9823 train loss:3.479962 +step:9824 train loss:3.384696 +step:9825 train loss:3.470875 +step:9826 train loss:3.465926 +step:9827 train loss:3.470843 +step:9828 train loss:3.386752 +step:9829 train loss:3.393863 +step:9830 train loss:3.381885 +step:9831 train loss:3.437949 +step:9832 train loss:3.452810 +step:9833 train loss:3.363355 +step:9834 train loss:3.419992 +step:9835 train loss:3.379518 +step:9836 train loss:3.450382 +step:9837 train loss:3.416212 +step:9838 train loss:3.455724 +step:9839 train loss:3.432607 +step:9840 train loss:3.400197 +step:9841 train loss:3.407454 +step:9842 train loss:3.467918 +step:9843 train loss:3.462452 +step:9844 train loss:3.409356 +step:9845 train loss:3.440166 +step:9846 train loss:3.374548 +step:9847 train loss:3.506569 +step:9848 train loss:3.427372 +step:9849 train loss:3.453249 +step:9850 train loss:3.373189 +step:9851 train loss:3.426767 +step:9852 train loss:3.389245 +step:9853 train loss:3.411336 +step:9854 train loss:3.425783 +step:9855 train loss:3.373619 +step:9856 train loss:3.376077 +step:9857 train loss:3.360966 +step:9858 train loss:3.428480 +step:9859 train loss:3.347551 +step:9860 train loss:3.587409 +step:9861 train loss:3.411514 +step:9862 train loss:3.376961 +step:9863 train loss:3.362568 +step:9864 train loss:3.485031 +step:9865 train loss:3.362739 +step:9866 train loss:3.402900 +step:9867 train loss:3.402159 +step:9868 train loss:3.460040 +step:9869 train loss:3.426706 +step:9870 train loss:3.394149 +step:9871 train loss:3.437424 +step:9872 train loss:3.377667 +step:9873 train loss:3.433969 +step:9874 train loss:3.394281 +step:9875 train loss:3.400619 +step:9876 train loss:3.363049 +step:9877 train loss:3.415294 +step:9878 train loss:3.444955 +step:9879 train loss:3.447584 +step:9880 train loss:3.377416 +step:9881 train loss:3.433585 +step:9882 train loss:3.391799 +step:9883 train loss:3.403488 +step:9884 train loss:3.395910 +step:9885 train loss:3.461077 +step:9886 train loss:3.425855 +step:9887 train loss:3.425266 +step:9888 train loss:3.450300 +step:9889 train loss:3.480274 +step:9890 train loss:3.390996 +step:9891 train loss:3.395981 +step:9892 train loss:3.368425 +step:9893 train loss:3.487702 +step:9894 train loss:3.400838 +step:9895 train loss:3.335027 +step:9896 train loss:3.491695 +step:9897 train loss:3.367948 +step:9898 train loss:3.437990 +step:9899 train loss:3.417964 +step:9900 train loss:3.461430 +step:9901 train loss:3.380844 +step:9902 train loss:3.428196 +step:9903 train loss:3.398355 +step:9904 train loss:3.449869 +step:9905 train loss:3.353781 +step:9906 train loss:3.393912 +step:9907 train loss:3.402594 +step:9908 train loss:3.397499 +step:9909 train loss:3.415739 +step:9910 train loss:3.436985 +step:9911 train loss:3.523769 +step:9912 train loss:3.397384 +step:9913 train loss:3.401132 +step:9914 train loss:3.408094 +step:9915 train loss:3.411860 +step:9916 train loss:3.359489 +step:9917 train loss:3.399258 +step:9918 train loss:3.393071 +step:9919 train loss:3.557688 +step:9920 train loss:3.344420 +step:9921 train loss:3.435347 +step:9922 train loss:3.395231 +step:9923 train loss:3.453313 +step:9924 train loss:3.367747 +step:9925 train loss:3.426497 +step:9926 train loss:3.405738 +step:9927 train loss:3.448309 +step:9928 train loss:3.372897 +step:9929 train loss:3.412205 +step:9930 train loss:3.506305 +step:9931 train loss:3.467638 +step:9932 train loss:3.354091 +step:9933 train loss:3.449422 +step:9934 train loss:3.369021 +step:9935 train loss:3.483948 +step:9936 train loss:3.390242 +step:9937 train loss:3.416550 +step:9938 train loss:3.403451 +step:9939 train loss:3.468506 +step:9940 train loss:3.505218 +step:9941 train loss:3.377137 +step:9942 train loss:3.424334 +step:9943 train loss:3.549819 +step:9944 train loss:3.420865 +step:9945 train loss:3.444933 +step:9946 train loss:3.413363 +step:9947 train loss:3.362170 +step:9948 train loss:3.406816 +step:9949 train loss:3.303204 +step:9950 train loss:3.452210 +step:9951 train loss:3.372297 +step:9952 train loss:3.442294 +step:9953 train loss:3.404837 +step:9954 train loss:3.461728 +step:9955 train loss:3.436286 +step:9956 train loss:3.440163 +step:9957 train loss:3.415236 +step:9958 train loss:3.470475 +step:9959 train loss:3.369353 +step:9960 train loss:3.400101 +step:9961 train loss:3.409076 +step:9962 train loss:3.460430 +step:9963 train loss:3.349987 +step:9964 train loss:3.404254 +step:9965 train loss:3.410092 +step:9966 train loss:3.466323 +step:9967 train loss:3.379985 +step:9968 train loss:3.445220 +step:9969 train loss:3.357485 +step:9970 train loss:3.402909 +step:9971 train loss:3.442027 +step:9972 train loss:3.466405 +step:9973 train loss:3.441445 +step:9974 train loss:3.430167 +step:9975 train loss:3.398741 +step:9976 train loss:3.357835 +step:9977 train loss:3.407024 +step:9978 train loss:3.406669 +step:9979 train loss:3.417821 +step:9980 train loss:3.471786 +step:9981 train loss:3.381486 +step:9982 train loss:3.441509 +step:9983 train loss:3.361408 +step:9984 train loss:3.423268 +step:9985 train loss:3.369359 +step:9986 train loss:3.422101 +step:9987 train loss:3.464980 +step:9988 train loss:3.477841 +step:9989 train loss:3.370466 +step:9990 train loss:3.513119 +step:9991 train loss:3.362668 +step:9992 train loss:3.431866 +step:9993 train loss:3.424845 +step:9994 train loss:3.537915 +step:9995 train loss:3.477158 +step:9996 train loss:3.391445 +step:9997 train loss:3.434126 +step:9998 train loss:3.483756 +step:9999 train loss:3.450989 +step:10000 validation loss:3.354632 total_sharp:5.7686e-04 L1_sharp:1.7357e-03 L2_sharp:1.0531e-04 L3_sharp:1.9394e-04 L4_sharp:5.3370e-05 L5_sharp:6.3465e-05 L6_sharp:9.0229e-05 L7_sharp:1.0142e-04 L8_sharp:5.9539e-05 L9_sharp:4.3062e-05 L10_sharp:3.2315e-05 L11_sharp:2.9515e-05 L12_sharp:4.1053e-05 total_fnorm:4.2231e+00 total_l1_linf:3.4453e+04 total_spectral:4.2231e+00 L1_fnorm:1.0771e+00 L2_fnorm:9.7072e-01 L3_fnorm:1.0835e+00 L4_fnorm:1.1667e+00 L5_fnorm:1.1864e+00 L6_fnorm:1.1905e+00 L7_fnorm:1.1881e+00 L8_fnorm:1.1961e+00 L9_fnorm:1.1943e+00 L10_fnorm:1.2045e+00 L11_fnorm:1.1977e+00 L12_fnorm:1.2040e+00 L1_l1linf:8.8179e-01 L2_l1linf:7.8598e-01 L3_l1linf:8.1435e-01 L4_l1linf:8.4029e-01 L5_l1linf:8.2235e-01 L6_l1linf:8.1704e-01 L7_l1linf:8.0883e-01 L8_l1linf:8.0346e-01 L9_l1linf:8.0520e-01 L10_l1linf:8.0798e-01 L11_l1linf:7.9607e-01 L12_l1linf:8.3586e-01 L1_spectral:2.4099e-02 L2_spectral:2.4074e-02 L3_spectral:2.4091e-02 L4_spectral:2.4099e-02 L5_spectral:2.4130e-02 L6_spectral:2.4096e-02 L7_spectral:2.4092e-02 L8_spectral:2.4086e-02 L9_spectral:2.4088e-02 L10_spectral:2.4084e-02 L11_spectral:2.4089e-02 L12_spectral:2.4083e-02 ip_v_neg_g:5.9076e-03 cos_v_neg_g:1.0892e-03 v_norm:4.2231e+00 g_norm:1.2843e+00 hv_norm:9.0768e-01 cos_v_hv:2.6839e-03 hg_norm:5.3071e+01 cos_g_hg:5.3596e-01 v_par:1.6013e-03 v_perp:4.2231e+00 L1_cos_v_neg_g:2.5297e-03 L1_v_norm:1.0771e+00 L2_cos_v_neg_g:2.6320e-03 L2_v_norm:9.7072e-01 L3_cos_v_neg_g:2.2106e-03 L3_v_norm:1.0835e+00 L4_cos_v_neg_g:1.2918e-03 L4_v_norm:1.1667e+00 L5_cos_v_neg_g:4.7538e-04 L5_v_norm:1.1864e+00 L6_cos_v_neg_g:3.0530e-04 L6_v_norm:1.1905e+00 L7_cos_v_neg_g:2.6734e-03 L7_v_norm:1.1881e+00 L8_cos_v_neg_g:1.7005e-03 L8_v_norm:1.1961e+00 L9_cos_v_neg_g:1.6691e-03 L9_v_norm:1.1943e+00 L10_cos_v_neg_g:2.2118e-03 L10_v_norm:1.2045e+00 L11_cos_v_neg_g:1.9497e-03 L11_v_norm:1.1977e+00 L12_cos_v_neg_g:2.2867e-03 L12_v_norm:1.2040e+00 diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/config.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..4f292d8f316dba3aed118479403c217d78efef5a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure/muon_lr_search", + "model": "d12", + "batch_size": 8, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 1.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.05, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 42, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500, + "sharpness_hvp_microbatches": 1 + }, + "run_uuid": "b440f935-a12f-441f-bea0-4bc7a308875b", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_1000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..e5f0084e07487674ae2eb820e5d668a0d6924114 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_1000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.33454704284668, + "total_l1_linf_norm": 77826.796875, + "total_spectral_norm": 10.33454704284668, + "layer_1_update_fnorm": 2.991765260696411, + "layer_1_max_l1_linf_norm": 2.1270835399627686, + "layer_1_max_spectral_norm": 0.06029663607478142, + "layer_2_update_fnorm": 2.7931439876556396, + "layer_2_max_l1_linf_norm": 1.971081256866455, + "layer_2_max_spectral_norm": 0.06021296605467796, + "layer_3_update_fnorm": 2.799121618270874, + "layer_3_max_l1_linf_norm": 1.9741045236587524, + "layer_3_max_spectral_norm": 0.0602300763130188, + "layer_4_update_fnorm": 2.883533000946045, + "layer_4_max_l1_linf_norm": 2.03788161277771, + "layer_4_max_spectral_norm": 0.06023121252655983, + "layer_5_update_fnorm": 2.968923568725586, + "layer_5_max_l1_linf_norm": 2.0416102409362793, + "layer_5_max_spectral_norm": 0.06020683795213699, + "layer_6_update_fnorm": 2.991908311843872, + "layer_6_max_l1_linf_norm": 2.049389362335205, + "layer_6_max_spectral_norm": 0.06020445004105568, + "layer_7_update_fnorm": 3.0107226371765137, + "layer_7_max_l1_linf_norm": 2.046029567718506, + "layer_7_max_spectral_norm": 0.06022712588310242, + "layer_8_update_fnorm": 3.012129306793213, + "layer_8_max_l1_linf_norm": 2.0374748706817627, + "layer_8_max_spectral_norm": 0.06022422015666962, + "layer_9_update_fnorm": 3.009828805923462, + "layer_9_max_l1_linf_norm": 2.02358341217041, + "layer_9_max_spectral_norm": 0.060223497450351715, + "layer_10_update_fnorm": 3.0112509727478027, + "layer_10_max_l1_linf_norm": 2.000934600830078, + "layer_10_max_spectral_norm": 0.06021516025066376, + "layer_11_update_fnorm": 3.0107336044311523, + "layer_11_max_l1_linf_norm": 2.00171160697937, + "layer_11_max_spectral_norm": 0.06022828072309494, + "layer_12_update_fnorm": 3.009204626083374, + "layer_12_max_l1_linf_norm": 2.002436399459839, + "layer_12_max_spectral_norm": 0.060219548642635345, + "total_sharpness": 0.00028611638117581606, + "ip_v_neg_g": 0.014625155366957188, + "cos_v_neg_g": 0.003784141270443797, + "v_norm": 10.33454704284668, + "g_norm": 0.3739742636680603, + "hv_norm": 0.11830062419176102, + "cos_v_hv": 0.02499465085566044, + "hg_norm": 0.24300947785377502, + "cos_g_hg": 0.3013781011104584, + "v_parallel_norm": 0.0017515498911961913, + "v_perp_norm": 10.33454704284668, + "layer_1_v_norm": 2.991765260696411, + "layer_1_cos_v_neg_g": 0.009414790198206902, + "layer_2_v_norm": 2.7931439876556396, + "layer_2_cos_v_neg_g": 0.008933525532484055, + "layer_3_v_norm": 2.799121856689453, + "layer_3_cos_v_neg_g": 0.011464258655905724, + "layer_4_v_norm": 2.883533000946045, + "layer_4_cos_v_neg_g": 0.00922361295670271, + "layer_5_v_norm": 2.968923568725586, + "layer_5_cos_v_neg_g": 0.007222606800496578, + "layer_6_v_norm": 2.991908311843872, + "layer_6_cos_v_neg_g": 0.008027666248381138, + "layer_7_v_norm": 3.0107226371765137, + "layer_7_cos_v_neg_g": 0.007516634184867144, + "layer_8_v_norm": 3.012129306793213, + "layer_8_cos_v_neg_g": 0.006624249275773764, + "layer_9_v_norm": 3.009828805923462, + "layer_9_cos_v_neg_g": 0.007250070106238127, + "layer_10_v_norm": 3.0112509727478027, + "layer_10_cos_v_neg_g": 0.006866632029414177, + "layer_11_v_norm": 3.0107338428497314, + "layer_11_cos_v_neg_g": 0.006567369215190411, + "layer_12_v_norm": 3.009204626083374, + "layer_12_cos_v_neg_g": 0.0059259128756821156, + "layer_1_sharpness": 0.00010683138680178672, + "layer_2_sharpness": 2.7752732421504334e-05, + "layer_3_sharpness": 5.792267620563507e-05, + "layer_4_sharpness": 3.19515893352218e-05, + "layer_5_sharpness": 3.066187127842568e-05, + "layer_6_sharpness": 4.905784589936957e-05, + "layer_7_sharpness": 5.790920477011241e-05, + "layer_8_sharpness": 5.020068056182936e-05, + "layer_9_sharpness": 4.00006611016579e-05, + "layer_10_sharpness": 3.16172736347653e-05, + "layer_11_sharpness": 2.9737389922956936e-05, + "layer_12_sharpness": 2.801561822707299e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_10000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..b673a3103d22fd7ad18fe5cf9b4a52658570b177 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_10000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.346810340881348, + "total_l1_linf_norm": 77647.859375, + "total_spectral_norm": 10.346810340881348, + "layer_1_update_fnorm": 2.9230997562408447, + "layer_1_max_l1_linf_norm": 1.992537498474121, + "layer_1_max_spectral_norm": 0.0602024607360363, + "layer_2_update_fnorm": 2.779292345046997, + "layer_2_max_l1_linf_norm": 1.9572007656097412, + "layer_2_max_spectral_norm": 0.060224078595638275, + "layer_3_update_fnorm": 2.86344838142395, + "layer_3_max_l1_linf_norm": 2.043074607849121, + "layer_3_max_spectral_norm": 0.060232944786548615, + "layer_4_update_fnorm": 2.963432550430298, + "layer_4_max_l1_linf_norm": 2.070498466491699, + "layer_4_max_spectral_norm": 0.06028500944375992, + "layer_5_update_fnorm": 2.9789578914642334, + "layer_5_max_l1_linf_norm": 2.021000862121582, + "layer_5_max_spectral_norm": 0.060218825936317444, + "layer_6_update_fnorm": 2.998351573944092, + "layer_6_max_l1_linf_norm": 2.0227432250976562, + "layer_6_max_spectral_norm": 0.06020965799689293, + "layer_7_update_fnorm": 3.002851963043213, + "layer_7_max_l1_linf_norm": 2.0227103233337402, + "layer_7_max_spectral_norm": 0.06024203076958656, + "layer_8_update_fnorm": 3.002993106842041, + "layer_8_max_l1_linf_norm": 2.0106143951416016, + "layer_8_max_spectral_norm": 0.060217805206775665, + "layer_9_update_fnorm": 2.9937551021575928, + "layer_9_max_l1_linf_norm": 1.9767578840255737, + "layer_9_max_spectral_norm": 0.06024577096104622, + "layer_10_update_fnorm": 3.016070604324341, + "layer_10_max_l1_linf_norm": 2.0036799907684326, + "layer_10_max_spectral_norm": 0.060209278017282486, + "layer_11_update_fnorm": 2.998176097869873, + "layer_11_max_l1_linf_norm": 1.9682157039642334, + "layer_11_max_spectral_norm": 0.06022341176867485, + "layer_12_update_fnorm": 3.0147340297698975, + "layer_12_max_l1_linf_norm": 2.147092819213867, + "layer_12_max_spectral_norm": 0.060217101126909256, + "total_sharpness": 5.725035589421168e-05, + "ip_v_neg_g": 0.00403311662375927, + "cos_v_neg_g": 0.0011146728647872806, + "v_norm": 10.346810340881348, + "g_norm": 0.3496929407119751, + "hv_norm": 0.06812218576669693, + "cos_v_hv": 0.008695530705153942, + "hg_norm": 1.3023078441619873, + "cos_g_hg": 0.3646574914455414, + "v_parallel_norm": 0.0008931639604270458, + "v_perp_norm": 10.346810340881348, + "layer_1_v_norm": 2.9230997562408447, + "layer_1_cos_v_neg_g": 0.00484665809199214, + "layer_2_v_norm": 2.779292345046997, + "layer_2_cos_v_neg_g": 0.000999479554593563, + "layer_3_v_norm": 2.86344838142395, + "layer_3_cos_v_neg_g": 0.0011707786470651627, + "layer_4_v_norm": 2.963432550430298, + "layer_4_cos_v_neg_g": 0.002834196202456951, + "layer_5_v_norm": 2.9789578914642334, + "layer_5_cos_v_neg_g": 0.0027626287192106247, + "layer_6_v_norm": 2.998351573944092, + "layer_6_cos_v_neg_g": 0.002167647937312722, + "layer_7_v_norm": 3.002851963043213, + "layer_7_cos_v_neg_g": 0.0031827203929424286, + "layer_8_v_norm": 3.002992868423462, + "layer_8_cos_v_neg_g": 0.0037182243540883064, + "layer_9_v_norm": 2.9937551021575928, + "layer_9_cos_v_neg_g": 0.004622526932507753, + "layer_10_v_norm": 3.016070604324341, + "layer_10_cos_v_neg_g": 0.004014207515865564, + "layer_11_v_norm": 2.998175859451294, + "layer_11_cos_v_neg_g": 0.0039062085561454296, + "layer_12_v_norm": 3.0147340297698975, + "layer_12_cos_v_neg_g": 0.005831877700984478, + "layer_1_sharpness": 6.979742465773597e-05, + "layer_2_sharpness": 8.89570583240129e-06, + "layer_3_sharpness": 7.6063056440034416e-06, + "layer_4_sharpness": 4.878277650277596e-06, + "layer_5_sharpness": 6.741934612364275e-06, + "layer_6_sharpness": 7.5921871030004695e-06, + "layer_7_sharpness": 8.010244528122712e-06, + "layer_8_sharpness": 7.724802344455384e-06, + "layer_9_sharpness": 8.898628038878087e-06, + "layer_10_sharpness": 4.7485350478382315e-06, + "layer_11_sharpness": 4.335797257226659e-06, + "layer_12_sharpness": 1.0276074135617819e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_1500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..46798f2979c8ec57ecb6d827e3e8e9ea03f838a6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_1500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.336840629577637, + "total_l1_linf_norm": 77810.125, + "total_spectral_norm": 10.336838722229004, + "layer_1_update_fnorm": 2.9505341053009033, + "layer_1_max_l1_linf_norm": 2.086972236633301, + "layer_1_max_spectral_norm": 0.060280825942754745, + "layer_2_update_fnorm": 2.7869203090667725, + "layer_2_max_l1_linf_norm": 1.965059518814087, + "layer_2_max_spectral_norm": 0.06020304560661316, + "layer_3_update_fnorm": 2.7940146923065186, + "layer_3_max_l1_linf_norm": 1.9853777885437012, + "layer_3_max_spectral_norm": 0.06026037409901619, + "layer_4_update_fnorm": 2.90000581741333, + "layer_4_max_l1_linf_norm": 2.0696423053741455, + "layer_4_max_spectral_norm": 0.060222044587135315, + "layer_5_update_fnorm": 2.9793546199798584, + "layer_5_max_l1_linf_norm": 2.0741395950317383, + "layer_5_max_spectral_norm": 0.06024770066142082, + "layer_6_update_fnorm": 2.993509531021118, + "layer_6_max_l1_linf_norm": 2.049083709716797, + "layer_6_max_spectral_norm": 0.06022505834698677, + "layer_7_update_fnorm": 3.0145201683044434, + "layer_7_max_l1_linf_norm": 2.0410900115966797, + "layer_7_max_spectral_norm": 0.0602291077375412, + "layer_8_update_fnorm": 3.0160598754882812, + "layer_8_max_l1_linf_norm": 2.027785301208496, + "layer_8_max_spectral_norm": 0.06022581458091736, + "layer_9_update_fnorm": 3.0164833068847656, + "layer_9_max_l1_linf_norm": 2.0250308513641357, + "layer_9_max_spectral_norm": 0.06021857261657715, + "layer_10_update_fnorm": 3.01774525642395, + "layer_10_max_l1_linf_norm": 2.016065835952759, + "layer_10_max_spectral_norm": 0.06021498888731003, + "layer_11_update_fnorm": 3.0129544734954834, + "layer_11_max_l1_linf_norm": 2.002223014831543, + "layer_11_max_spectral_norm": 0.06022496521472931, + "layer_12_update_fnorm": 3.0152738094329834, + "layer_12_max_l1_linf_norm": 2.019604444503784, + "layer_12_max_spectral_norm": 0.06022047623991966, + "total_sharpness": 0.0001973914768313989, + "ip_v_neg_g": 0.009979323484003544, + "cos_v_neg_g": 0.0025930432602763176, + "v_norm": 10.336840629577637, + "g_norm": 0.37230899930000305, + "hv_norm": 0.10508916527032852, + "cos_v_hv": 0.019415931776165962, + "hg_norm": 0.37476322054862976, + "cos_g_hg": 0.3260616064071655, + "v_parallel_norm": 0.0016789310611784458, + "v_perp_norm": 10.336840629577637, + "layer_1_v_norm": 2.9505341053009033, + "layer_1_cos_v_neg_g": 0.01053581852465868, + "layer_2_v_norm": 2.7869203090667725, + "layer_2_cos_v_neg_g": 0.004725215956568718, + "layer_3_v_norm": 2.7940146923065186, + "layer_3_cos_v_neg_g": 0.005605566315352917, + "layer_4_v_norm": 2.90000581741333, + "layer_4_cos_v_neg_g": 0.0045591313391923904, + "layer_5_v_norm": 2.9793546199798584, + "layer_5_cos_v_neg_g": 0.004063854459673166, + "layer_6_v_norm": 2.993509292602539, + "layer_6_cos_v_neg_g": 0.004910618532449007, + "layer_7_v_norm": 3.0145201683044434, + "layer_7_cos_v_neg_g": 0.007023794110864401, + "layer_8_v_norm": 3.0160598754882812, + "layer_8_cos_v_neg_g": 0.006327659357339144, + "layer_9_v_norm": 3.0164833068847656, + "layer_9_cos_v_neg_g": 0.0067749787122011185, + "layer_10_v_norm": 3.01774525642395, + "layer_10_cos_v_neg_g": 0.00601841090247035, + "layer_11_v_norm": 3.0129547119140625, + "layer_11_cos_v_neg_g": 0.004923614207655191, + "layer_12_v_norm": 3.0152738094329834, + "layer_12_cos_v_neg_g": 0.005051674786955118, + "layer_1_sharpness": 0.00015940252342261374, + "layer_2_sharpness": 2.2678079403704032e-05, + "layer_3_sharpness": 3.752677730517462e-05, + "layer_4_sharpness": 1.67786765814526e-05, + "layer_5_sharpness": 1.8604276192490943e-05, + "layer_6_sharpness": 2.886485162889585e-05, + "layer_7_sharpness": 4.090638685738668e-05, + "layer_8_sharpness": 3.527753142407164e-05, + "layer_9_sharpness": 2.9681259547942318e-05, + "layer_10_sharpness": 2.0980805857107043e-05, + "layer_11_sharpness": 1.6903904906939715e-05, + "layer_12_sharpness": 4.2966010369127616e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_2000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..4f21df6072b4eacd537a2764f0f86e1da484845e --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_2000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.324209213256836, + "total_l1_linf_norm": 77731.6875, + "total_spectral_norm": 10.324210166931152, + "layer_1_update_fnorm": 2.9348104000091553, + "layer_1_max_l1_linf_norm": 2.093435764312744, + "layer_1_max_spectral_norm": 0.06028572842478752, + "layer_2_update_fnorm": 2.6956217288970947, + "layer_2_max_l1_linf_norm": 1.9680798053741455, + "layer_2_max_spectral_norm": 0.06019972264766693, + "layer_3_update_fnorm": 2.7832369804382324, + "layer_3_max_l1_linf_norm": 1.9917259216308594, + "layer_3_max_spectral_norm": 0.06028148904442787, + "layer_4_update_fnorm": 2.9194133281707764, + "layer_4_max_l1_linf_norm": 2.080990791320801, + "layer_4_max_spectral_norm": 0.06023050472140312, + "layer_5_update_fnorm": 2.9929168224334717, + "layer_5_max_l1_linf_norm": 2.0660195350646973, + "layer_5_max_spectral_norm": 0.06029122695326805, + "layer_6_update_fnorm": 2.9998481273651123, + "layer_6_max_l1_linf_norm": 2.0628161430358887, + "layer_6_max_spectral_norm": 0.06022323668003082, + "layer_7_update_fnorm": 3.0168001651763916, + "layer_7_max_l1_linf_norm": 2.06007719039917, + "layer_7_max_spectral_norm": 0.060225725173950195, + "layer_8_update_fnorm": 3.021390676498413, + "layer_8_max_l1_linf_norm": 2.0551483631134033, + "layer_8_max_spectral_norm": 0.060241345316171646, + "layer_9_update_fnorm": 3.019625663757324, + "layer_9_max_l1_linf_norm": 2.022749662399292, + "layer_9_max_spectral_norm": 0.060232870280742645, + "layer_10_update_fnorm": 3.0227344036102295, + "layer_10_max_l1_linf_norm": 2.0241665840148926, + "layer_10_max_spectral_norm": 0.06025122478604317, + "layer_11_update_fnorm": 3.016996383666992, + "layer_11_max_l1_linf_norm": 1.998108148574829, + "layer_11_max_spectral_norm": 0.06023428589105606, + "layer_12_update_fnorm": 3.0189716815948486, + "layer_12_max_l1_linf_norm": 2.0346922874450684, + "layer_12_max_spectral_norm": 0.06023367866873741, + "total_sharpness": 0.000271946337306872, + "ip_v_neg_g": 0.012263976968824863, + "cos_v_neg_g": 0.003085119416937232, + "v_norm": 10.324209213256836, + "g_norm": 0.38503706455230713, + "hv_norm": 0.21487198770046234, + "cos_v_hv": 0.013066527433693409, + "hg_norm": 0.4690684676170349, + "cos_g_hg": 0.35107865929603577, + "v_parallel_norm": 0.0017972083296626806, + "v_perp_norm": 10.324209213256836, + "layer_1_v_norm": 2.9348104000091553, + "layer_1_cos_v_neg_g": 0.015147095546126366, + "layer_2_v_norm": 2.6956217288970947, + "layer_2_cos_v_neg_g": 0.013097784481942654, + "layer_3_v_norm": 2.7832369804382324, + "layer_3_cos_v_neg_g": 0.007600827608257532, + "layer_4_v_norm": 2.9194133281707764, + "layer_4_cos_v_neg_g": 0.004926934838294983, + "layer_5_v_norm": 2.9929168224334717, + "layer_5_cos_v_neg_g": 0.005827128887176514, + "layer_6_v_norm": 2.9998481273651123, + "layer_6_cos_v_neg_g": 0.005495468154549599, + "layer_7_v_norm": 3.0168001651763916, + "layer_7_cos_v_neg_g": 0.005837690085172653, + "layer_8_v_norm": 3.021390438079834, + "layer_8_cos_v_neg_g": 0.006356037221848965, + "layer_9_v_norm": 3.019625663757324, + "layer_9_cos_v_neg_g": 0.007357536815106869, + "layer_10_v_norm": 3.0227344036102295, + "layer_10_cos_v_neg_g": 0.006330274045467377, + "layer_11_v_norm": 3.016996383666992, + "layer_11_cos_v_neg_g": 0.0048638381995260715, + "layer_12_v_norm": 3.0189716815948486, + "layer_12_cos_v_neg_g": 0.004512499552220106, + "layer_1_sharpness": 0.0005921559641137719, + "layer_2_sharpness": 0.00013289992057252675, + "layer_3_sharpness": 4.884836380369961e-05, + "layer_4_sharpness": 1.3425963516056072e-05, + "layer_5_sharpness": 1.6602019968559034e-05, + "layer_6_sharpness": 2.060793667624239e-05, + "layer_7_sharpness": 2.914766446338035e-05, + "layer_8_sharpness": 2.563356247264892e-05, + "layer_9_sharpness": 2.4619157557026483e-05, + "layer_10_sharpness": 1.81620052899234e-05, + "layer_11_sharpness": 1.501112274127081e-05, + "layer_12_sharpness": 5.051535117672756e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_2500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..1dd6516047b5f936cb4c89215d9ea08660a343eb --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_2500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.354785919189453, + "total_l1_linf_norm": 77853.6875, + "total_spectral_norm": 10.35478687286377, + "layer_1_update_fnorm": 2.9365081787109375, + "layer_1_max_l1_linf_norm": 2.0452487468719482, + "layer_1_max_spectral_norm": 0.06021135672926903, + "layer_2_update_fnorm": 2.7553865909576416, + "layer_2_max_l1_linf_norm": 1.9748003482818604, + "layer_2_max_spectral_norm": 0.060222018510103226, + "layer_3_update_fnorm": 2.8192670345306396, + "layer_3_max_l1_linf_norm": 2.0076427459716797, + "layer_3_max_spectral_norm": 0.060270074754953384, + "layer_4_update_fnorm": 2.925912857055664, + "layer_4_max_l1_linf_norm": 2.065822124481201, + "layer_4_max_spectral_norm": 0.060235027223825455, + "layer_5_update_fnorm": 2.9948008060455322, + "layer_5_max_l1_linf_norm": 2.077671527862549, + "layer_5_max_spectral_norm": 0.06023312732577324, + "layer_6_update_fnorm": 3.006040573120117, + "layer_6_max_l1_linf_norm": 2.0530035495758057, + "layer_6_max_spectral_norm": 0.06023978814482689, + "layer_7_update_fnorm": 3.021069049835205, + "layer_7_max_l1_linf_norm": 2.044557809829712, + "layer_7_max_spectral_norm": 0.06023208051919937, + "layer_8_update_fnorm": 3.024500608444214, + "layer_8_max_l1_linf_norm": 2.0344958305358887, + "layer_8_max_spectral_norm": 0.0602128766477108, + "layer_9_update_fnorm": 3.021165132522583, + "layer_9_max_l1_linf_norm": 2.019634962081909, + "layer_9_max_spectral_norm": 0.0602385476231575, + "layer_10_update_fnorm": 3.0226168632507324, + "layer_10_max_l1_linf_norm": 2.0312414169311523, + "layer_10_max_spectral_norm": 0.06020618602633476, + "layer_11_update_fnorm": 3.0143914222717285, + "layer_11_max_l1_linf_norm": 2.003227949142456, + "layer_11_max_spectral_norm": 0.060223035514354706, + "layer_12_update_fnorm": 3.018547773361206, + "layer_12_max_l1_linf_norm": 1.9885761737823486, + "layer_12_max_spectral_norm": 0.060220763087272644, + "total_sharpness": 0.00014406190894078463, + "ip_v_neg_g": 0.006343775428831577, + "cos_v_neg_g": 0.0017089518951252103, + "v_norm": 10.354785919189453, + "g_norm": 0.35848984122276306, + "hv_norm": 0.09328746050596237, + "cos_v_hv": 0.015990683808922768, + "hg_norm": 0.34934914112091064, + "cos_g_hg": 0.35222122073173523, + "v_parallel_norm": 0.0012048883363604546, + "v_perp_norm": 10.354785919189453, + "layer_1_v_norm": 2.9365081787109375, + "layer_1_cos_v_neg_g": 0.007008650340139866, + "layer_2_v_norm": 2.7553865909576416, + "layer_2_cos_v_neg_g": 0.005208909045904875, + "layer_3_v_norm": 2.8192672729492188, + "layer_3_cos_v_neg_g": 0.003182315267622471, + "layer_4_v_norm": 2.925912857055664, + "layer_4_cos_v_neg_g": 0.0028978607151657343, + "layer_5_v_norm": 2.9948008060455322, + "layer_5_cos_v_neg_g": 0.002701493911445141, + "layer_6_v_norm": 3.006040573120117, + "layer_6_cos_v_neg_g": 0.0041693695820868015, + "layer_7_v_norm": 3.021069049835205, + "layer_7_cos_v_neg_g": 0.005516762379556894, + "layer_8_v_norm": 3.024500608444214, + "layer_8_cos_v_neg_g": 0.006305410526692867, + "layer_9_v_norm": 3.021165132522583, + "layer_9_cos_v_neg_g": 0.005803355015814304, + "layer_10_v_norm": 3.0226168632507324, + "layer_10_cos_v_neg_g": 0.004704901482909918, + "layer_11_v_norm": 3.0143914222717285, + "layer_11_cos_v_neg_g": 0.003226835746318102, + "layer_12_v_norm": 3.018547773361206, + "layer_12_cos_v_neg_g": 0.004131023772060871, + "layer_1_sharpness": 0.00014812179142609239, + "layer_2_sharpness": 2.5966859539039433e-05, + "layer_3_sharpness": 2.452705302857794e-05, + "layer_4_sharpness": 1.1058227755711414e-05, + "layer_5_sharpness": 1.2361238077573944e-05, + "layer_6_sharpness": 1.9809051082120277e-05, + "layer_7_sharpness": 2.7056301405536942e-05, + "layer_8_sharpness": 2.2369897124008276e-05, + "layer_9_sharpness": 2.116275209118612e-05, + "layer_10_sharpness": 1.3352919268072583e-05, + "layer_11_sharpness": 1.2220370990689844e-05, + "layer_12_sharpness": 1.5956946299411356e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_3000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..78ac18d4ded83f9869af17fca29df8a1723f7b39 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_3000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.316059112548828, + "total_l1_linf_norm": 77550.0078125, + "total_spectral_norm": 10.316059112548828, + "layer_1_update_fnorm": 2.9205522537231445, + "layer_1_max_l1_linf_norm": 2.103289842605591, + "layer_1_max_spectral_norm": 0.060296062380075455, + "layer_2_update_fnorm": 2.710089683532715, + "layer_2_max_l1_linf_norm": 1.9594826698303223, + "layer_2_max_spectral_norm": 0.06020960584282875, + "layer_3_update_fnorm": 2.801417589187622, + "layer_3_max_l1_linf_norm": 1.9913270473480225, + "layer_3_max_spectral_norm": 0.06027764081954956, + "layer_4_update_fnorm": 2.9223709106445312, + "layer_4_max_l1_linf_norm": 2.0764245986938477, + "layer_4_max_spectral_norm": 0.06023840233683586, + "layer_5_update_fnorm": 2.9903128147125244, + "layer_5_max_l1_linf_norm": 2.0521652698516846, + "layer_5_max_spectral_norm": 0.0602271594107151, + "layer_6_update_fnorm": 2.997741222381592, + "layer_6_max_l1_linf_norm": 2.0327036380767822, + "layer_6_max_spectral_norm": 0.06021757423877716, + "layer_7_update_fnorm": 3.011845111846924, + "layer_7_max_l1_linf_norm": 2.026200532913208, + "layer_7_max_spectral_norm": 0.060224298387765884, + "layer_8_update_fnorm": 3.012334108352661, + "layer_8_max_l1_linf_norm": 2.0192484855651855, + "layer_8_max_spectral_norm": 0.06022465229034424, + "layer_9_update_fnorm": 3.011300563812256, + "layer_9_max_l1_linf_norm": 2.0145866870880127, + "layer_9_max_spectral_norm": 0.06022633612155914, + "layer_10_update_fnorm": 3.0204689502716064, + "layer_10_max_l1_linf_norm": 2.0150203704833984, + "layer_10_max_spectral_norm": 0.060216035693883896, + "layer_11_update_fnorm": 3.007394790649414, + "layer_11_max_l1_linf_norm": 1.9882709980010986, + "layer_11_max_spectral_norm": 0.0602201409637928, + "layer_12_update_fnorm": 3.015360116958618, + "layer_12_max_l1_linf_norm": 1.9960674047470093, + "layer_12_max_spectral_norm": 0.06022639945149422, + "total_sharpness": 0.00013987894635647535, + "ip_v_neg_g": 0.009146853350102901, + "cos_v_neg_g": 0.0025418479926884174, + "v_norm": 10.316059112548828, + "g_norm": 0.3488255739212036, + "hv_norm": 0.11237900704145432, + "cos_v_hv": 0.012840472161769867, + "hg_norm": 0.3917475938796997, + "cos_g_hg": 0.30953970551490784, + "v_parallel_norm": 0.0013547216076403856, + "v_perp_norm": 10.316059112548828, + "layer_1_v_norm": 2.9205522537231445, + "layer_1_cos_v_neg_g": 0.01296020857989788, + "layer_2_v_norm": 2.710089683532715, + "layer_2_cos_v_neg_g": 0.009511523880064487, + "layer_3_v_norm": 2.801417350769043, + "layer_3_cos_v_neg_g": 0.0057372963055968285, + "layer_4_v_norm": 2.9223709106445312, + "layer_4_cos_v_neg_g": 0.00491676339879632, + "layer_5_v_norm": 2.9903128147125244, + "layer_5_cos_v_neg_g": 0.005280393175780773, + "layer_6_v_norm": 2.997741222381592, + "layer_6_cos_v_neg_g": 0.006205710116773844, + "layer_7_v_norm": 3.011845111846924, + "layer_7_cos_v_neg_g": 0.0069270809181034565, + "layer_8_v_norm": 3.0123343467712402, + "layer_8_cos_v_neg_g": 0.007595643401145935, + "layer_9_v_norm": 3.011300563812256, + "layer_9_cos_v_neg_g": 0.00769050745293498, + "layer_10_v_norm": 3.0204689502716064, + "layer_10_cos_v_neg_g": 0.00627119792625308, + "layer_11_v_norm": 3.007395029067993, + "layer_11_cos_v_neg_g": 0.0051279449835419655, + "layer_12_v_norm": 3.015360116958618, + "layer_12_cos_v_neg_g": 0.00410967692732811, + "layer_1_sharpness": 0.00020782275532837957, + "layer_2_sharpness": 3.763059066841379e-05, + "layer_3_sharpness": 2.6370173145551234e-05, + "layer_4_sharpness": 1.038018581311917e-05, + "layer_5_sharpness": 1.1468065167719033e-05, + "layer_6_sharpness": 1.6889540347619914e-05, + "layer_7_sharpness": 2.0513109120656736e-05, + "layer_8_sharpness": 2.169553772546351e-05, + "layer_9_sharpness": 1.7582933651283383e-05, + "layer_10_sharpness": 1.0545509212533943e-05, + "layer_11_sharpness": 9.845436579780653e-06, + "layer_12_sharpness": 1.0833421583811287e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_3500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..9dded3d1e75f62721d582e319c03e151d485b504 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_3500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.328950881958008, + "total_l1_linf_norm": 77642.8671875, + "total_spectral_norm": 10.328951835632324, + "layer_1_update_fnorm": 2.9148623943328857, + "layer_1_max_l1_linf_norm": 2.0523452758789062, + "layer_1_max_spectral_norm": 0.06021912395954132, + "layer_2_update_fnorm": 2.7307627201080322, + "layer_2_max_l1_linf_norm": 1.958071231842041, + "layer_2_max_spectral_norm": 0.06021112576127052, + "layer_3_update_fnorm": 2.826514482498169, + "layer_3_max_l1_linf_norm": 2.0073535442352295, + "layer_3_max_spectral_norm": 0.06024545803666115, + "layer_4_update_fnorm": 2.906968355178833, + "layer_4_max_l1_linf_norm": 2.069676160812378, + "layer_4_max_spectral_norm": 0.060215163975954056, + "layer_5_update_fnorm": 2.9907376766204834, + "layer_5_max_l1_linf_norm": 2.0385217666625977, + "layer_5_max_spectral_norm": 0.06022817641496658, + "layer_6_update_fnorm": 2.995734691619873, + "layer_6_max_l1_linf_norm": 2.04512095451355, + "layer_6_max_spectral_norm": 0.06021915376186371, + "layer_7_update_fnorm": 3.0172278881073, + "layer_7_max_l1_linf_norm": 2.0463128089904785, + "layer_7_max_spectral_norm": 0.06021673232316971, + "layer_8_update_fnorm": 3.019176483154297, + "layer_8_max_l1_linf_norm": 2.029005527496338, + "layer_8_max_spectral_norm": 0.06020427495241165, + "layer_9_update_fnorm": 3.013409376144409, + "layer_9_max_l1_linf_norm": 2.0165228843688965, + "layer_9_max_spectral_norm": 0.060233958065509796, + "layer_10_update_fnorm": 3.0222856998443604, + "layer_10_max_l1_linf_norm": 2.042819023132324, + "layer_10_max_spectral_norm": 0.0602218322455883, + "layer_11_update_fnorm": 3.0157341957092285, + "layer_11_max_l1_linf_norm": 2.007230281829834, + "layer_11_max_spectral_norm": 0.06020619347691536, + "layer_12_update_fnorm": 3.0140011310577393, + "layer_12_max_l1_linf_norm": 1.9818906784057617, + "layer_12_max_spectral_norm": 0.06022857874631882, + "total_sharpness": 0.00010539984941715375, + "ip_v_neg_g": 0.0075712357647717, + "cos_v_neg_g": 0.001943419105373323, + "v_norm": 10.328950881958008, + "g_norm": 0.37717604637145996, + "hv_norm": 0.08246147632598877, + "cos_v_hv": 0.013202164322137833, + "hg_norm": 0.8169959783554077, + "cos_g_hg": 0.3994615077972412, + "v_parallel_norm": 0.00045176592539064586, + "v_perp_norm": 10.328950881958008, + "layer_1_v_norm": 2.9148623943328857, + "layer_1_cos_v_neg_g": 0.009312179870903492, + "layer_2_v_norm": 2.7307627201080322, + "layer_2_cos_v_neg_g": 0.006852539721876383, + "layer_3_v_norm": 2.826514720916748, + "layer_3_cos_v_neg_g": 0.005901413969695568, + "layer_4_v_norm": 2.906968355178833, + "layer_4_cos_v_neg_g": 0.005051041953265667, + "layer_5_v_norm": 2.9907376766204834, + "layer_5_cos_v_neg_g": 0.004804598167538643, + "layer_6_v_norm": 2.995734453201294, + "layer_6_cos_v_neg_g": 0.00528152147307992, + "layer_7_v_norm": 3.0172278881073, + "layer_7_cos_v_neg_g": 0.006393235642462969, + "layer_8_v_norm": 3.019176483154297, + "layer_8_cos_v_neg_g": 0.006418499629944563, + "layer_9_v_norm": 3.013409376144409, + "layer_9_cos_v_neg_g": 0.005923712160438299, + "layer_10_v_norm": 3.0222856998443604, + "layer_10_cos_v_neg_g": 0.005053692497313023, + "layer_11_v_norm": 3.0157341957092285, + "layer_11_cos_v_neg_g": 0.004518568515777588, + "layer_12_v_norm": 3.0140011310577393, + "layer_12_cos_v_neg_g": 0.0026503673288971186, + "layer_1_sharpness": 0.00010219166142633185, + "layer_2_sharpness": 2.3316446458920836e-05, + "layer_3_sharpness": 1.6436228179372847e-05, + "layer_4_sharpness": 9.747091098688543e-06, + "layer_5_sharpness": 1.0850700164155569e-05, + "layer_6_sharpness": 1.494382831879193e-05, + "layer_7_sharpness": 1.9010725736734457e-05, + "layer_8_sharpness": 1.7850003132480197e-05, + "layer_9_sharpness": 1.781273022061214e-05, + "layer_10_sharpness": 1.0692999239836354e-05, + "layer_11_sharpness": 9.99416079139337e-06, + "layer_12_sharpness": 1.1336802344885655e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_4000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..8e747e745b90a4596b17a18edbade7f7cb46169d --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_4000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.348401069641113, + "total_l1_linf_norm": 77788.6171875, + "total_spectral_norm": 10.348401069641113, + "layer_1_update_fnorm": 2.934330701828003, + "layer_1_max_l1_linf_norm": 2.0313079357147217, + "layer_1_max_spectral_norm": 0.060223255306482315, + "layer_2_update_fnorm": 2.733582019805908, + "layer_2_max_l1_linf_norm": 1.9621913433074951, + "layer_2_max_spectral_norm": 0.06021852418780327, + "layer_3_update_fnorm": 2.8245315551757812, + "layer_3_max_l1_linf_norm": 2.032360553741455, + "layer_3_max_spectral_norm": 0.060236722230911255, + "layer_4_update_fnorm": 2.939460515975952, + "layer_4_max_l1_linf_norm": 2.07344651222229, + "layer_4_max_spectral_norm": 0.06021882966160774, + "layer_5_update_fnorm": 2.994554281234741, + "layer_5_max_l1_linf_norm": 2.052720546722412, + "layer_5_max_spectral_norm": 0.0602290965616703, + "layer_6_update_fnorm": 3.00321364402771, + "layer_6_max_l1_linf_norm": 2.055664539337158, + "layer_6_max_spectral_norm": 0.060219645500183105, + "layer_7_update_fnorm": 3.0194220542907715, + "layer_7_max_l1_linf_norm": 2.062178134918213, + "layer_7_max_spectral_norm": 0.06022050604224205, + "layer_8_update_fnorm": 3.019317626953125, + "layer_8_max_l1_linf_norm": 2.0318636894226074, + "layer_8_max_spectral_norm": 0.0602235421538353, + "layer_9_update_fnorm": 3.0166354179382324, + "layer_9_max_l1_linf_norm": 2.0123229026794434, + "layer_9_max_spectral_norm": 0.06021162495017052, + "layer_10_update_fnorm": 3.0225396156311035, + "layer_10_max_l1_linf_norm": 2.0116629600524902, + "layer_10_max_spectral_norm": 0.06021367013454437, + "layer_11_update_fnorm": 3.01132869720459, + "layer_11_max_l1_linf_norm": 2.00264310836792, + "layer_11_max_spectral_norm": 0.060222383588552475, + "layer_12_update_fnorm": 3.016014575958252, + "layer_12_max_l1_linf_norm": 1.9990850687026978, + "layer_12_max_spectral_norm": 0.0602303147315979, + "total_sharpness": 9.827216854318976e-05, + "ip_v_neg_g": 0.00441968347877264, + "cos_v_neg_g": 0.0012813159264624119, + "v_norm": 10.348401069641113, + "g_norm": 0.3333202302455902, + "hv_norm": 0.06698133051395416, + "cos_v_hv": 0.015182734467089176, + "hg_norm": 0.4820590913295746, + "cos_g_hg": 0.3214801549911499, + "v_parallel_norm": 0.0013198561500757933, + "v_perp_norm": 10.348401069641113, + "layer_1_v_norm": 2.934330701828003, + "layer_1_cos_v_neg_g": 0.0014626733027398586, + "layer_2_v_norm": 2.733582019805908, + "layer_2_cos_v_neg_g": 0.0005037454538978636, + "layer_3_v_norm": 2.8245315551757812, + "layer_3_cos_v_neg_g": 0.003083571558818221, + "layer_4_v_norm": 2.939460515975952, + "layer_4_cos_v_neg_g": 0.0006805075099691749, + "layer_5_v_norm": 2.994554281234741, + "layer_5_cos_v_neg_g": 0.002501394832506776, + "layer_6_v_norm": 3.00321364402771, + "layer_6_cos_v_neg_g": 0.0035839455667883158, + "layer_7_v_norm": 3.0194220542907715, + "layer_7_cos_v_neg_g": 0.004809858277440071, + "layer_8_v_norm": 3.019317626953125, + "layer_8_cos_v_neg_g": 0.006331359967589378, + "layer_9_v_norm": 3.0166354179382324, + "layer_9_cos_v_neg_g": 0.006120920646935701, + "layer_10_v_norm": 3.0225396156311035, + "layer_10_cos_v_neg_g": 0.005280105397105217, + "layer_11_v_norm": 3.01132869720459, + "layer_11_cos_v_neg_g": 0.004124554339796305, + "layer_12_v_norm": 3.016014575958252, + "layer_12_cos_v_neg_g": 0.004030234180390835, + "layer_1_sharpness": 3.7277077353792265e-05, + "layer_2_sharpness": 2.0280200260458514e-05, + "layer_3_sharpness": 2.2439637177740224e-05, + "layer_4_sharpness": 1.072205304808449e-05, + "layer_5_sharpness": 1.1622715646808501e-05, + "layer_6_sharpness": 1.4615761756431311e-05, + "layer_7_sharpness": 1.883139520941768e-05, + "layer_8_sharpness": 1.8492526578484103e-05, + "layer_9_sharpness": 1.7915799617185257e-05, + "layer_10_sharpness": 1.0924016351054888e-05, + "layer_11_sharpness": 9.454397513763979e-06, + "layer_12_sharpness": 1.7076283256756142e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_4500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..3147f8a941d8d7332beb476c53a78fd356d06ee6 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_4500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.331624984741211, + "total_l1_linf_norm": 77645.9375, + "total_spectral_norm": 10.331623077392578, + "layer_1_update_fnorm": 2.8994827270507812, + "layer_1_max_l1_linf_norm": 2.054231643676758, + "layer_1_max_spectral_norm": 0.06023848056793213, + "layer_2_update_fnorm": 2.7128448486328125, + "layer_2_max_l1_linf_norm": 1.9593507051467896, + "layer_2_max_spectral_norm": 0.06020554527640343, + "layer_3_update_fnorm": 2.8244333267211914, + "layer_3_max_l1_linf_norm": 2.0198636054992676, + "layer_3_max_spectral_norm": 0.060224443674087524, + "layer_4_update_fnorm": 2.9455976486206055, + "layer_4_max_l1_linf_norm": 2.092961549758911, + "layer_4_max_spectral_norm": 0.060253728181123734, + "layer_5_update_fnorm": 2.9951064586639404, + "layer_5_max_l1_linf_norm": 2.0550732612609863, + "layer_5_max_spectral_norm": 0.06023678183555603, + "layer_6_update_fnorm": 2.9970450401306152, + "layer_6_max_l1_linf_norm": 2.0466551780700684, + "layer_6_max_spectral_norm": 0.06020340695977211, + "layer_7_update_fnorm": 3.014622688293457, + "layer_7_max_l1_linf_norm": 2.0319347381591797, + "layer_7_max_spectral_norm": 0.060220010578632355, + "layer_8_update_fnorm": 3.0185844898223877, + "layer_8_max_l1_linf_norm": 2.0352892875671387, + "layer_8_max_spectral_norm": 0.060215599834918976, + "layer_9_update_fnorm": 3.013399124145508, + "layer_9_max_l1_linf_norm": 2.0106582641601562, + "layer_9_max_spectral_norm": 0.0602150559425354, + "layer_10_update_fnorm": 3.022231340408325, + "layer_10_max_l1_linf_norm": 2.0266969203948975, + "layer_10_max_spectral_norm": 0.06022152304649353, + "layer_11_update_fnorm": 3.0121753215789795, + "layer_11_max_l1_linf_norm": 1.9944665431976318, + "layer_11_max_spectral_norm": 0.060209646821022034, + "layer_12_update_fnorm": 3.0150132179260254, + "layer_12_max_l1_linf_norm": 2.0164108276367188, + "layer_12_max_spectral_norm": 0.06022978201508522, + "total_sharpness": 9.314795897807926e-05, + "ip_v_neg_g": 0.003967730328440666, + "cos_v_neg_g": 0.0011401923839002848, + "v_norm": 10.331624984741211, + "g_norm": 0.33681806921958923, + "hv_norm": 0.0708978921175003, + "cos_v_hv": 0.013574025593698025, + "hg_norm": 0.5426754355430603, + "cos_g_hg": 0.34773239493370056, + "v_parallel_norm": 0.0007985440315678716, + "v_perp_norm": 10.331624984741211, + "layer_1_v_norm": 2.8994827270507812, + "layer_1_cos_v_neg_g": 0.00345728755928576, + "layer_2_v_norm": 2.7128448486328125, + "layer_2_cos_v_neg_g": 0.0028909624088555574, + "layer_3_v_norm": 2.8244335651397705, + "layer_3_cos_v_neg_g": 0.0033243054058402777, + "layer_4_v_norm": 2.9455976486206055, + "layer_4_cos_v_neg_g": 0.0025296579115092754, + "layer_5_v_norm": 2.9951064586639404, + "layer_5_cos_v_neg_g": 0.0030338787473738194, + "layer_6_v_norm": 2.9970450401306152, + "layer_6_cos_v_neg_g": 0.0032902483362704515, + "layer_7_v_norm": 3.014622688293457, + "layer_7_cos_v_neg_g": 0.003537764074280858, + "layer_8_v_norm": 3.0185844898223877, + "layer_8_cos_v_neg_g": 0.0035307561047375202, + "layer_9_v_norm": 3.013399124145508, + "layer_9_cos_v_neg_g": 0.004810164216905832, + "layer_10_v_norm": 3.022231340408325, + "layer_10_cos_v_neg_g": 0.0037043425254523754, + "layer_11_v_norm": 3.0121753215789795, + "layer_11_cos_v_neg_g": 0.00306549109518528, + "layer_12_v_norm": 3.0150132179260254, + "layer_12_cos_v_neg_g": 0.002385771833360195, + "layer_1_sharpness": 8.708362292964011e-05, + "layer_2_sharpness": 1.621983028599061e-05, + "layer_3_sharpness": 1.758714279276319e-05, + "layer_4_sharpness": 7.1745566856407095e-06, + "layer_5_sharpness": 9.46692034631269e-06, + "layer_6_sharpness": 1.444195004296489e-05, + "layer_7_sharpness": 1.5966174032655545e-05, + "layer_8_sharpness": 1.5707033526268788e-05, + "layer_9_sharpness": 1.572873406985309e-05, + "layer_10_sharpness": 9.01129260455491e-06, + "layer_11_sharpness": 7.536216344306013e-06, + "layer_12_sharpness": 9.388216312800068e-06 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..14f420c47ce23b2028fe222332c5e714ded1ebfd --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 7.331729888916016, + "total_l1_linf_norm": 55683.88671875, + "total_spectral_norm": 7.331730842590332, + "layer_1_update_fnorm": 2.1895546913146973, + "layer_1_max_l1_linf_norm": 1.5714576244354248, + "layer_1_max_spectral_norm": 0.043016619980335236, + "layer_2_update_fnorm": 2.018921375274658, + "layer_2_max_l1_linf_norm": 1.5041680335998535, + "layer_2_max_spectral_norm": 0.0430598147213459, + "layer_3_update_fnorm": 2.0237977504730225, + "layer_3_max_l1_linf_norm": 1.4906482696533203, + "layer_3_max_spectral_norm": 0.04309007152915001, + "layer_4_update_fnorm": 2.0111124515533447, + "layer_4_max_l1_linf_norm": 1.4247353076934814, + "layer_4_max_spectral_norm": 0.043028224259614944, + "layer_5_update_fnorm": 2.0763800144195557, + "layer_5_max_l1_linf_norm": 1.426734447479248, + "layer_5_max_spectral_norm": 0.04303019493818283, + "layer_6_update_fnorm": 2.109553337097168, + "layer_6_max_l1_linf_norm": 1.4198546409606934, + "layer_6_max_spectral_norm": 0.043040741235017776, + "layer_7_update_fnorm": 2.1205172538757324, + "layer_7_max_l1_linf_norm": 1.4314837455749512, + "layer_7_max_spectral_norm": 0.04304181784391403, + "layer_8_update_fnorm": 2.128844738006592, + "layer_8_max_l1_linf_norm": 1.4384007453918457, + "layer_8_max_spectral_norm": 0.0430116131901741, + "layer_9_update_fnorm": 2.1121411323547363, + "layer_9_max_l1_linf_norm": 1.449373722076416, + "layer_9_max_spectral_norm": 0.04301077127456665, + "layer_10_update_fnorm": 2.123126268386841, + "layer_10_max_l1_linf_norm": 1.440932273864746, + "layer_10_max_spectral_norm": 0.04300875589251518, + "layer_11_update_fnorm": 2.1188106536865234, + "layer_11_max_l1_linf_norm": 1.4436668157577515, + "layer_11_max_spectral_norm": 0.04301471635699272, + "layer_12_update_fnorm": 2.1309378147125244, + "layer_12_max_l1_linf_norm": 1.4685804843902588, + "layer_12_max_spectral_norm": 0.043012671172618866, + "total_sharpness": 0.002341864164918661, + "ip_v_neg_g": 0.06434807926416397, + "cos_v_neg_g": 0.011437347158789635, + "v_norm": 7.331729888916016, + "g_norm": 0.7673682570457458, + "hv_norm": 0.8705049753189087, + "cos_v_hv": 0.019724087789654732, + "hg_norm": 5.203967571258545, + "cos_g_hg": 0.6226193308830261, + "v_parallel_norm": 0.00820439588278532, + "v_perp_norm": 7.331725120544434, + "layer_1_v_norm": 2.1895546913146973, + "layer_1_cos_v_neg_g": 0.018462462350726128, + "layer_2_v_norm": 2.018921375274658, + "layer_2_cos_v_neg_g": 0.02349133975803852, + "layer_3_v_norm": 2.0237979888916016, + "layer_3_cos_v_neg_g": 0.020598556846380234, + "layer_4_v_norm": 2.0111124515533447, + "layer_4_cos_v_neg_g": 0.018111063167452812, + "layer_5_v_norm": 2.0763800144195557, + "layer_5_cos_v_neg_g": 0.018819846212863922, + "layer_6_v_norm": 2.109553337097168, + "layer_6_cos_v_neg_g": 0.020892340689897537, + "layer_7_v_norm": 2.1205172538757324, + "layer_7_cos_v_neg_g": 0.019592314958572388, + "layer_8_v_norm": 2.128844738006592, + "layer_8_cos_v_neg_g": 0.018775667995214462, + "layer_9_v_norm": 2.1121411323547363, + "layer_9_cos_v_neg_g": 0.017138084396719933, + "layer_10_v_norm": 2.123126268386841, + "layer_10_cos_v_neg_g": 0.016064360737800598, + "layer_11_v_norm": 2.1188106536865234, + "layer_11_cos_v_neg_g": 0.012894885614514351, + "layer_12_v_norm": 2.1309378147125244, + "layer_12_cos_v_neg_g": 0.009603474289178848, + "layer_1_sharpness": 0.0036774540785700083, + "layer_2_sharpness": 0.00037500125472433865, + "layer_3_sharpness": 0.0002466813020873815, + "layer_4_sharpness": 0.00015485832409467548, + "layer_5_sharpness": 0.00021107982320245355, + "layer_6_sharpness": 0.00027368313749320805, + "layer_7_sharpness": 0.00024978554574772716, + "layer_8_sharpness": 0.00021345686400309205, + "layer_9_sharpness": 0.00016118063649628311, + "layer_10_sharpness": 0.00013713962107431144, + "layer_11_sharpness": 0.00010211201151832938, + "layer_12_sharpness": 8.872537728166208e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_5000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..47e6e198b58584125e2847fb3f89590c4af78d1f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_5000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.363164901733398, + "total_l1_linf_norm": 77860.21875, + "total_spectral_norm": 10.363165855407715, + "layer_1_update_fnorm": 2.9348440170288086, + "layer_1_max_l1_linf_norm": 2.0058720111846924, + "layer_1_max_spectral_norm": 0.060215551406145096, + "layer_2_update_fnorm": 2.7478227615356445, + "layer_2_max_l1_linf_norm": 1.9735572338104248, + "layer_2_max_spectral_norm": 0.06021424010396004, + "layer_3_update_fnorm": 2.8488521575927734, + "layer_3_max_l1_linf_norm": 2.0315942764282227, + "layer_3_max_spectral_norm": 0.060211699455976486, + "layer_4_update_fnorm": 2.9539334774017334, + "layer_4_max_l1_linf_norm": 2.081594467163086, + "layer_4_max_spectral_norm": 0.06026025116443634, + "layer_5_update_fnorm": 2.9922587871551514, + "layer_5_max_l1_linf_norm": 2.0555784702301025, + "layer_5_max_spectral_norm": 0.06022530794143677, + "layer_6_update_fnorm": 3.0028767585754395, + "layer_6_max_l1_linf_norm": 2.061370849609375, + "layer_6_max_spectral_norm": 0.06023354083299637, + "layer_7_update_fnorm": 3.020057439804077, + "layer_7_max_l1_linf_norm": 2.0365734100341797, + "layer_7_max_spectral_norm": 0.060225557535886765, + "layer_8_update_fnorm": 3.019970655441284, + "layer_8_max_l1_linf_norm": 2.031071662902832, + "layer_8_max_spectral_norm": 0.060214728116989136, + "layer_9_update_fnorm": 3.016123056411743, + "layer_9_max_l1_linf_norm": 2.000364303588867, + "layer_9_max_spectral_norm": 0.06021498516201973, + "layer_10_update_fnorm": 3.0243234634399414, + "layer_10_max_l1_linf_norm": 2.017216682434082, + "layer_10_max_spectral_norm": 0.060212112963199615, + "layer_11_update_fnorm": 3.0138142108917236, + "layer_11_max_l1_linf_norm": 1.991135597229004, + "layer_11_max_spectral_norm": 0.06021055579185486, + "layer_12_update_fnorm": 3.0141775608062744, + "layer_12_max_l1_linf_norm": 1.9931191205978394, + "layer_12_max_spectral_norm": 0.06021863594651222, + "total_sharpness": 6.806657620472834e-05, + "ip_v_neg_g": 0.004140608943998814, + "cos_v_neg_g": 0.0010468577966094017, + "v_norm": 10.363164901733398, + "g_norm": 0.3816665709018707, + "hv_norm": 0.07099837809801102, + "cos_v_hv": 0.009935230016708374, + "hg_norm": 1.3577451705932617, + "cos_g_hg": 0.48530304431915283, + "v_parallel_norm": 0.002521896967664361, + "v_perp_norm": 10.363164901733398, + "layer_1_v_norm": 2.9348440170288086, + "layer_1_cos_v_neg_g": 0.0020761576015502214, + "layer_2_v_norm": 2.7478227615356445, + "layer_2_cos_v_neg_g": 0.0009689187281765044, + "layer_3_v_norm": 2.8488521575927734, + "layer_3_cos_v_neg_g": 0.0013693346409127116, + "layer_4_v_norm": 2.9539334774017334, + "layer_4_cos_v_neg_g": 0.003387704724445939, + "layer_5_v_norm": 2.9922587871551514, + "layer_5_cos_v_neg_g": 0.0035064811818301678, + "layer_6_v_norm": 3.0028765201568604, + "layer_6_cos_v_neg_g": 0.0022939841728657484, + "layer_7_v_norm": 3.020057439804077, + "layer_7_cos_v_neg_g": 0.00350518268533051, + "layer_8_v_norm": 3.0199708938598633, + "layer_8_cos_v_neg_g": 0.004425510298460722, + "layer_9_v_norm": 3.016123056411743, + "layer_9_cos_v_neg_g": 0.0039441632106900215, + "layer_10_v_norm": 3.0243234634399414, + "layer_10_cos_v_neg_g": 0.004202029202133417, + "layer_11_v_norm": 3.0138142108917236, + "layer_11_cos_v_neg_g": 0.004887246061116457, + "layer_12_v_norm": 3.0141775608062744, + "layer_12_cos_v_neg_g": 0.004709555767476559, + "layer_1_sharpness": 3.9267561078304425e-05, + "layer_2_sharpness": 1.4916669897502288e-05, + "layer_3_sharpness": 1.3436916560749523e-05, + "layer_4_sharpness": 1.0108051355928183e-05, + "layer_5_sharpness": 9.882244739856105e-06, + "layer_6_sharpness": 1.1005419764842372e-05, + "layer_7_sharpness": 1.337413505098084e-05, + "layer_8_sharpness": 1.225557571160607e-05, + "layer_9_sharpness": 1.1519075087562669e-05, + "layer_10_sharpness": 7.595045190100791e-06, + "layer_11_sharpness": 7.470356649719179e-06, + "layer_12_sharpness": 1.647648423386272e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_5500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..fe128d4e0ce6ad14726f8784ca65b26d667eb8d3 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_5500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.365283012390137, + "total_l1_linf_norm": 77861.5390625, + "total_spectral_norm": 10.36528205871582, + "layer_1_update_fnorm": 2.931364059448242, + "layer_1_max_l1_linf_norm": 1.996307134628296, + "layer_1_max_spectral_norm": 0.06020373851060867, + "layer_2_update_fnorm": 2.758185386657715, + "layer_2_max_l1_linf_norm": 1.954967737197876, + "layer_2_max_spectral_norm": 0.06022072583436966, + "layer_3_update_fnorm": 2.8436992168426514, + "layer_3_max_l1_linf_norm": 2.0446033477783203, + "layer_3_max_spectral_norm": 0.060206905007362366, + "layer_4_update_fnorm": 2.9595000743865967, + "layer_4_max_l1_linf_norm": 2.07705020904541, + "layer_4_max_spectral_norm": 0.06030258163809776, + "layer_5_update_fnorm": 2.9989070892333984, + "layer_5_max_l1_linf_norm": 2.043731927871704, + "layer_5_max_spectral_norm": 0.06023237854242325, + "layer_6_update_fnorm": 3.0038912296295166, + "layer_6_max_l1_linf_norm": 2.0433685779571533, + "layer_6_max_spectral_norm": 0.06019848585128784, + "layer_7_update_fnorm": 3.0172107219696045, + "layer_7_max_l1_linf_norm": 2.062579393386841, + "layer_7_max_spectral_norm": 0.060252465307712555, + "layer_8_update_fnorm": 3.0172371864318848, + "layer_8_max_l1_linf_norm": 2.0318236351013184, + "layer_8_max_spectral_norm": 0.06022519990801811, + "layer_9_update_fnorm": 3.0133512020111084, + "layer_9_max_l1_linf_norm": 2.0283493995666504, + "layer_9_max_spectral_norm": 0.06022423133254051, + "layer_10_update_fnorm": 3.0239546298980713, + "layer_10_max_l1_linf_norm": 2.022658109664917, + "layer_10_max_spectral_norm": 0.06021399050951004, + "layer_11_update_fnorm": 3.0124316215515137, + "layer_11_max_l1_linf_norm": 1.9894909858703613, + "layer_11_max_spectral_norm": 0.060224153101444244, + "layer_12_update_fnorm": 3.0166561603546143, + "layer_12_max_l1_linf_norm": 1.9923137426376343, + "layer_12_max_spectral_norm": 0.06022363901138306, + "total_sharpness": 5.651016908814199e-05, + "ip_v_neg_g": 0.0023024133406579494, + "cos_v_neg_g": 0.0003280999371781945, + "v_norm": 10.365283012390137, + "g_norm": 0.6770113706588745, + "hv_norm": 0.0751839429140091, + "cos_v_hv": 0.00779081042855978, + "hg_norm": 3.2603847980499268, + "cos_g_hg": 0.6447307467460632, + "v_parallel_norm": 0.0006596079911105335, + "v_perp_norm": 10.365283012390137, + "layer_1_v_norm": 2.931364059448242, + "layer_1_cos_v_neg_g": 0.002415919443592429, + "layer_2_v_norm": 2.758185386657715, + "layer_2_cos_v_neg_g": 0.0017965815495699644, + "layer_3_v_norm": 2.8436994552612305, + "layer_3_cos_v_neg_g": 0.0018232375150546432, + "layer_4_v_norm": 2.9595000743865967, + "layer_4_cos_v_neg_g": 0.0003031566448044032, + "layer_5_v_norm": 2.9989070892333984, + "layer_5_cos_v_neg_g": 0.0007006441592238843, + "layer_6_v_norm": 3.0038912296295166, + "layer_6_cos_v_neg_g": 0.0007883262587711215, + "layer_7_v_norm": 3.0172107219696045, + "layer_7_cos_v_neg_g": -4.0432725654682145e-05, + "layer_8_v_norm": 3.0172371864318848, + "layer_8_cos_v_neg_g": 0.0008109436021186411, + "layer_9_v_norm": 3.0133512020111084, + "layer_9_cos_v_neg_g": 0.0016983139794319868, + "layer_10_v_norm": 3.0239546298980713, + "layer_10_cos_v_neg_g": 0.0024948297068476677, + "layer_11_v_norm": 3.0124316215515137, + "layer_11_cos_v_neg_g": 0.002615882083773613, + "layer_12_v_norm": 3.0166561603546143, + "layer_12_cos_v_neg_g": 0.0007457879837602377, + "layer_1_sharpness": 2.1946068955003284e-05, + "layer_2_sharpness": 1.5582729247398674e-05, + "layer_3_sharpness": 1.484325639466988e-05, + "layer_4_sharpness": 4.607165010384051e-06, + "layer_5_sharpness": 5.262973900244106e-06, + "layer_6_sharpness": 8.0968311522156e-06, + "layer_7_sharpness": 9.343066267319955e-06, + "layer_8_sharpness": 9.341029908682685e-06, + "layer_9_sharpness": 8.02586419013096e-06, + "layer_10_sharpness": 5.513912583410274e-06, + "layer_11_sharpness": 5.1702554628718644e-06, + "layer_12_sharpness": 1.9065173546550795e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_6000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..0a3dd6332a584d5dc1f877db4e82dbdc4c953a9a --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_6000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.365272521972656, + "total_l1_linf_norm": 77858.5234375, + "total_spectral_norm": 10.365272521972656, + "layer_1_update_fnorm": 2.9321038722991943, + "layer_1_max_l1_linf_norm": 1.9946703910827637, + "layer_1_max_spectral_norm": 0.060213807970285416, + "layer_2_update_fnorm": 2.7500343322753906, + "layer_2_max_l1_linf_norm": 1.9608646631240845, + "layer_2_max_spectral_norm": 0.060213979333639145, + "layer_3_update_fnorm": 2.856989622116089, + "layer_3_max_l1_linf_norm": 2.040194034576416, + "layer_3_max_spectral_norm": 0.060220956802368164, + "layer_4_update_fnorm": 2.950761079788208, + "layer_4_max_l1_linf_norm": 2.078350782394409, + "layer_4_max_spectral_norm": 0.06027944013476372, + "layer_5_update_fnorm": 2.9941623210906982, + "layer_5_max_l1_linf_norm": 2.0372567176818848, + "layer_5_max_spectral_norm": 0.060225989669561386, + "layer_6_update_fnorm": 3.0048887729644775, + "layer_6_max_l1_linf_norm": 2.043527603149414, + "layer_6_max_spectral_norm": 0.060221027582883835, + "layer_7_update_fnorm": 3.0190932750701904, + "layer_7_max_l1_linf_norm": 2.0612001419067383, + "layer_7_max_spectral_norm": 0.06022115424275398, + "layer_8_update_fnorm": 3.0195021629333496, + "layer_8_max_l1_linf_norm": 2.0453147888183594, + "layer_8_max_spectral_norm": 0.060218214988708496, + "layer_9_update_fnorm": 3.014312505722046, + "layer_9_max_l1_linf_norm": 2.00875186920166, + "layer_9_max_spectral_norm": 0.060227397829294205, + "layer_10_update_fnorm": 3.0246422290802, + "layer_10_max_l1_linf_norm": 2.0171666145324707, + "layer_10_max_spectral_norm": 0.06021213158965111, + "layer_11_update_fnorm": 3.0140843391418457, + "layer_11_max_l1_linf_norm": 1.996981143951416, + "layer_11_max_spectral_norm": 0.06020773574709892, + "layer_12_update_fnorm": 3.0162575244903564, + "layer_12_max_l1_linf_norm": 1.9981757402420044, + "layer_12_max_spectral_norm": 0.0602165162563324, + "total_sharpness": 6.935034616617486e-05, + "ip_v_neg_g": 0.0034052347764372826, + "cos_v_neg_g": 0.0010108414571732283, + "v_norm": 10.365272521972656, + "g_norm": 0.32499992847442627, + "hv_norm": 0.06916853040456772, + "cos_v_hv": 0.010392517782747746, + "hg_norm": 0.4893680214881897, + "cos_g_hg": 0.36051684617996216, + "v_parallel_norm": 0.0008823833777569234, + "v_perp_norm": 10.365272521972656, + "layer_1_v_norm": 2.9321038722991943, + "layer_1_cos_v_neg_g": 0.0043886806815862656, + "layer_2_v_norm": 2.7500343322753906, + "layer_2_cos_v_neg_g": 0.001406188472174108, + "layer_3_v_norm": 2.8569893836975098, + "layer_3_cos_v_neg_g": 0.0024376234505325556, + "layer_4_v_norm": 2.950761079788208, + "layer_4_cos_v_neg_g": 0.002835198538377881, + "layer_5_v_norm": 2.9941623210906982, + "layer_5_cos_v_neg_g": 0.003199643222615123, + "layer_6_v_norm": 3.0048890113830566, + "layer_6_cos_v_neg_g": 0.0034518223255872726, + "layer_7_v_norm": 3.0190932750701904, + "layer_7_cos_v_neg_g": 0.005029976833611727, + "layer_8_v_norm": 3.0195021629333496, + "layer_8_cos_v_neg_g": 0.004149707965552807, + "layer_9_v_norm": 3.014312505722046, + "layer_9_cos_v_neg_g": 0.003997597843408585, + "layer_10_v_norm": 3.0246422290802, + "layer_10_cos_v_neg_g": 0.0025564765091985464, + "layer_11_v_norm": 3.0140843391418457, + "layer_11_cos_v_neg_g": 0.0023942862171679735, + "layer_12_v_norm": 3.0162575244903564, + "layer_12_cos_v_neg_g": 0.0006839759298600256, + "layer_1_sharpness": 3.198915874236263e-05, + "layer_2_sharpness": 1.5929772416711785e-05, + "layer_3_sharpness": 9.846216016740073e-06, + "layer_4_sharpness": 7.968078534759115e-06, + "layer_5_sharpness": 7.2850671131163836e-06, + "layer_6_sharpness": 1.08630274553434e-05, + "layer_7_sharpness": 1.3944351849204395e-05, + "layer_8_sharpness": 1.2334315215412062e-05, + "layer_9_sharpness": 1.2450605026970152e-05, + "layer_10_sharpness": 7.557465778518235e-06, + "layer_11_sharpness": 6.9587858888553455e-06, + "layer_12_sharpness": 7.674073458474595e-06 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_6500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..5ab01555a865af152ab20576cee60cbd5fbc9700 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_6500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.372478485107422, + "total_l1_linf_norm": 77962.125, + "total_spectral_norm": 10.372478485107422, + "layer_1_update_fnorm": 2.935403823852539, + "layer_1_max_l1_linf_norm": 1.988401174545288, + "layer_1_max_spectral_norm": 0.060206640511751175, + "layer_2_update_fnorm": 2.776731252670288, + "layer_2_max_l1_linf_norm": 1.9608488082885742, + "layer_2_max_spectral_norm": 0.06021181866526604, + "layer_3_update_fnorm": 2.852910280227661, + "layer_3_max_l1_linf_norm": 2.051774740219116, + "layer_3_max_spectral_norm": 0.06022441014647484, + "layer_4_update_fnorm": 2.9578697681427, + "layer_4_max_l1_linf_norm": 2.0977892875671387, + "layer_4_max_spectral_norm": 0.060281578451395035, + "layer_5_update_fnorm": 2.989614248275757, + "layer_5_max_l1_linf_norm": 2.053069591522217, + "layer_5_max_spectral_norm": 0.0602346807718277, + "layer_6_update_fnorm": 3.0036213397979736, + "layer_6_max_l1_linf_norm": 2.0540690422058105, + "layer_6_max_spectral_norm": 0.06022081524133682, + "layer_7_update_fnorm": 3.0196120738983154, + "layer_7_max_l1_linf_norm": 2.0547237396240234, + "layer_7_max_spectral_norm": 0.06022277846932411, + "layer_8_update_fnorm": 3.0180459022521973, + "layer_8_max_l1_linf_norm": 2.0238161087036133, + "layer_8_max_spectral_norm": 0.0602109432220459, + "layer_9_update_fnorm": 3.0137672424316406, + "layer_9_max_l1_linf_norm": 2.0132155418395996, + "layer_9_max_spectral_norm": 0.06022021546959877, + "layer_10_update_fnorm": 3.0250461101531982, + "layer_10_max_l1_linf_norm": 2.0282340049743652, + "layer_10_max_spectral_norm": 0.060197267681360245, + "layer_11_update_fnorm": 3.0111243724823, + "layer_11_max_l1_linf_norm": 1.994312047958374, + "layer_11_max_spectral_norm": 0.06021253019571304, + "layer_12_update_fnorm": 3.014195442199707, + "layer_12_max_l1_linf_norm": 1.9645934104919434, + "layer_12_max_spectral_norm": 0.06022528186440468, + "total_sharpness": 5.264536594040692e-05, + "ip_v_neg_g": 0.0032883696258068085, + "cos_v_neg_g": 0.000919878832064569, + "v_norm": 10.372478485107422, + "g_norm": 0.34464141726493835, + "hv_norm": 0.05878813564777374, + "cos_v_hv": 0.009288658387959003, + "hg_norm": 0.8488984704017639, + "cos_g_hg": 0.3904614746570587, + "v_parallel_norm": 0.0012148679234087467, + "v_perp_norm": 10.372478485107422, + "layer_1_v_norm": 2.935403823852539, + "layer_1_cos_v_neg_g": 0.0024532186798751354, + "layer_2_v_norm": 2.776731252670288, + "layer_2_cos_v_neg_g": 0.0014358145417645574, + "layer_3_v_norm": 2.852910041809082, + "layer_3_cos_v_neg_g": 0.002091769129037857, + "layer_4_v_norm": 2.9578697681427, + "layer_4_cos_v_neg_g": 0.0026235608384013176, + "layer_5_v_norm": 2.989614248275757, + "layer_5_cos_v_neg_g": 0.003020777367055416, + "layer_6_v_norm": 3.0036213397979736, + "layer_6_cos_v_neg_g": 0.003504777094349265, + "layer_7_v_norm": 3.0196120738983154, + "layer_7_cos_v_neg_g": 0.0044710575602948666, + "layer_8_v_norm": 3.0180459022521973, + "layer_8_cos_v_neg_g": 0.003927411045879126, + "layer_9_v_norm": 3.0137672424316406, + "layer_9_cos_v_neg_g": 0.003851267509162426, + "layer_10_v_norm": 3.0250461101531982, + "layer_10_cos_v_neg_g": 0.0024489751085639, + "layer_11_v_norm": 3.0111243724823, + "layer_11_cos_v_neg_g": 0.002137943869456649, + "layer_12_v_norm": 3.014195442199707, + "layer_12_cos_v_neg_g": 0.0024948539212346077, + "layer_1_sharpness": 1.1236672435188666e-05, + "layer_2_sharpness": 8.263089512183797e-06, + "layer_3_sharpness": 9.402671821590047e-06, + "layer_4_sharpness": 4.981983693141956e-06, + "layer_5_sharpness": 8.530608283763286e-06, + "layer_6_sharpness": 9.351429980597459e-06, + "layer_7_sharpness": 1.1568021363927983e-05, + "layer_8_sharpness": 9.722537470224779e-06, + "layer_9_sharpness": 1.0626527000567876e-05, + "layer_10_sharpness": 5.825279458804289e-06, + "layer_11_sharpness": 5.045636498834938e-06, + "layer_12_sharpness": 5.680920367012732e-06 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_7000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..a35911df6707d8d4054b0416e2f159a821ec2e87 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_7000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.369246482849121, + "total_l1_linf_norm": 77917.6015625, + "total_spectral_norm": 10.369244575500488, + "layer_1_update_fnorm": 2.934870958328247, + "layer_1_max_l1_linf_norm": 2.0148212909698486, + "layer_1_max_spectral_norm": 0.06021542847156525, + "layer_2_update_fnorm": 2.756699562072754, + "layer_2_max_l1_linf_norm": 1.9544318914413452, + "layer_2_max_spectral_norm": 0.06024383753538132, + "layer_3_update_fnorm": 2.8549137115478516, + "layer_3_max_l1_linf_norm": 2.041808843612671, + "layer_3_max_spectral_norm": 0.060213394463062286, + "layer_4_update_fnorm": 2.95780611038208, + "layer_4_max_l1_linf_norm": 2.0875020027160645, + "layer_4_max_spectral_norm": 0.060312312096357346, + "layer_5_update_fnorm": 2.989424228668213, + "layer_5_max_l1_linf_norm": 2.052464723587036, + "layer_5_max_spectral_norm": 0.060231950134038925, + "layer_6_update_fnorm": 3.0029704570770264, + "layer_6_max_l1_linf_norm": 2.050601005554199, + "layer_6_max_spectral_norm": 0.06022723764181137, + "layer_7_update_fnorm": 3.0201027393341064, + "layer_7_max_l1_linf_norm": 2.0560905933380127, + "layer_7_max_spectral_norm": 0.06021130084991455, + "layer_8_update_fnorm": 3.017249345779419, + "layer_8_max_l1_linf_norm": 2.0258233547210693, + "layer_8_max_spectral_norm": 0.06023959442973137, + "layer_9_update_fnorm": 3.0161616802215576, + "layer_9_max_l1_linf_norm": 2.0229201316833496, + "layer_9_max_spectral_norm": 0.06022492051124573, + "layer_10_update_fnorm": 3.0275380611419678, + "layer_10_max_l1_linf_norm": 2.039430856704712, + "layer_10_max_spectral_norm": 0.06020672246813774, + "layer_11_update_fnorm": 3.015186071395874, + "layer_11_max_l1_linf_norm": 2.0080387592315674, + "layer_11_max_spectral_norm": 0.060217875987291336, + "layer_12_update_fnorm": 3.0144553184509277, + "layer_12_max_l1_linf_norm": 1.9738945960998535, + "layer_12_max_spectral_norm": 0.060228172689676285, + "total_sharpness": 5.5955202697077766e-05, + "ip_v_neg_g": 0.0028178817592561245, + "cos_v_neg_g": 0.0008157832198776305, + "v_norm": 10.369246482849121, + "g_norm": 0.33312007784843445, + "hv_norm": 0.05958912521600723, + "cos_v_hv": 0.009736897423863411, + "hg_norm": 0.475449800491333, + "cos_g_hg": 0.33834829926490784, + "v_parallel_norm": 0.0006689285510219634, + "v_perp_norm": 10.369246482849121, + "layer_1_v_norm": 2.934870958328247, + "layer_1_cos_v_neg_g": 0.0036709513515233994, + "layer_2_v_norm": 2.756699562072754, + "layer_2_cos_v_neg_g": 0.0021891999058425426, + "layer_3_v_norm": 2.8549137115478516, + "layer_3_cos_v_neg_g": 0.0022736506070941687, + "layer_4_v_norm": 2.95780611038208, + "layer_4_cos_v_neg_g": 0.0024886042810976505, + "layer_5_v_norm": 2.989424228668213, + "layer_5_cos_v_neg_g": 0.0021568096708506346, + "layer_6_v_norm": 3.0029702186584473, + "layer_6_cos_v_neg_g": 0.0024340548552572727, + "layer_7_v_norm": 3.0201027393341064, + "layer_7_cos_v_neg_g": 0.0028653109911829233, + "layer_8_v_norm": 3.017249345779419, + "layer_8_cos_v_neg_g": 0.0028022052720189095, + "layer_9_v_norm": 3.0161616802215576, + "layer_9_cos_v_neg_g": 0.0027415312360972166, + "layer_10_v_norm": 3.0275380611419678, + "layer_10_cos_v_neg_g": 0.0021993881091475487, + "layer_11_v_norm": 3.015186071395874, + "layer_11_cos_v_neg_g": 0.0029603270813822746, + "layer_12_v_norm": 3.0144553184509277, + "layer_12_cos_v_neg_g": 0.002376440679654479, + "layer_1_sharpness": 5.647567377309315e-05, + "layer_2_sharpness": 1.2132378287788015e-05, + "layer_3_sharpness": 9.84348571364535e-06, + "layer_4_sharpness": 6.468624178523896e-06, + "layer_5_sharpness": 6.7470687099557836e-06, + "layer_6_sharpness": 8.634837286081165e-06, + "layer_7_sharpness": 1.0060845852422062e-05, + "layer_8_sharpness": 1.008061008178629e-05, + "layer_9_sharpness": 9.208451047015842e-06, + "layer_10_sharpness": 5.927956408413593e-06, + "layer_11_sharpness": 4.937872745358618e-06, + "layer_12_sharpness": 6.651620424236171e-06 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_7500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..492e0e24604b5ebd8113de2904a0604f13e5f579 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_7500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.355579376220703, + "total_l1_linf_norm": 77775.546875, + "total_spectral_norm": 10.35558032989502, + "layer_1_update_fnorm": 2.93407940864563, + "layer_1_max_l1_linf_norm": 1.9970033168792725, + "layer_1_max_spectral_norm": 0.06020016595721245, + "layer_2_update_fnorm": 2.7681243419647217, + "layer_2_max_l1_linf_norm": 1.9430092573165894, + "layer_2_max_spectral_norm": 0.060234978795051575, + "layer_3_update_fnorm": 2.8552379608154297, + "layer_3_max_l1_linf_norm": 2.034325122833252, + "layer_3_max_spectral_norm": 0.06022125855088234, + "layer_4_update_fnorm": 2.9548399448394775, + "layer_4_max_l1_linf_norm": 2.0754032135009766, + "layer_4_max_spectral_norm": 0.06031312420964241, + "layer_5_update_fnorm": 2.980842351913452, + "layer_5_max_l1_linf_norm": 2.032846689224243, + "layer_5_max_spectral_norm": 0.06021549925208092, + "layer_6_update_fnorm": 2.996636152267456, + "layer_6_max_l1_linf_norm": 2.0357205867767334, + "layer_6_max_spectral_norm": 0.06021393463015556, + "layer_7_update_fnorm": 3.0072553157806396, + "layer_7_max_l1_linf_norm": 2.0313727855682373, + "layer_7_max_spectral_norm": 0.06023299694061279, + "layer_8_update_fnorm": 3.013789176940918, + "layer_8_max_l1_linf_norm": 2.0279972553253174, + "layer_8_max_spectral_norm": 0.06024358421564102, + "layer_9_update_fnorm": 3.0086159706115723, + "layer_9_max_l1_linf_norm": 1.9985566139221191, + "layer_9_max_spectral_norm": 0.06022074073553085, + "layer_10_update_fnorm": 3.0196242332458496, + "layer_10_max_l1_linf_norm": 2.0192532539367676, + "layer_10_max_spectral_norm": 0.0602223202586174, + "layer_11_update_fnorm": 3.005265235900879, + "layer_11_max_l1_linf_norm": 1.9937021732330322, + "layer_11_max_spectral_norm": 0.06021600961685181, + "layer_12_update_fnorm": 3.017512559890747, + "layer_12_max_l1_linf_norm": 2.0124356746673584, + "layer_12_max_spectral_norm": 0.06022634357213974, + "total_sharpness": 5.924658398726024e-05, + "ip_v_neg_g": 0.0038886861875653267, + "cos_v_neg_g": 0.0010998980142176151, + "v_norm": 10.355579376220703, + "g_norm": 0.3414098620414734, + "hv_norm": 0.08002128452062607, + "cos_v_hv": 0.007667119149118662, + "hg_norm": 0.6552504897117615, + "cos_g_hg": 0.35116738080978394, + "v_parallel_norm": 0.0016010929830372334, + "v_perp_norm": 10.355579376220703, + "layer_1_v_norm": 2.93407940864563, + "layer_1_cos_v_neg_g": 0.0021781662944704294, + "layer_2_v_norm": 2.7681243419647217, + "layer_2_cos_v_neg_g": 0.004763895645737648, + "layer_3_v_norm": 2.8552377223968506, + "layer_3_cos_v_neg_g": 0.0029375941958278418, + "layer_4_v_norm": 2.9548399448394775, + "layer_4_cos_v_neg_g": 0.002174230059608817, + "layer_5_v_norm": 2.980842351913452, + "layer_5_cos_v_neg_g": 0.0023177838884294033, + "layer_6_v_norm": 2.996636152267456, + "layer_6_cos_v_neg_g": 0.0026830367278307676, + "layer_7_v_norm": 3.0072553157806396, + "layer_7_cos_v_neg_g": 0.0028664646670222282, + "layer_8_v_norm": 3.013789176940918, + "layer_8_cos_v_neg_g": 0.003921886440366507, + "layer_9_v_norm": 3.0086159706115723, + "layer_9_cos_v_neg_g": 0.004295050166547298, + "layer_10_v_norm": 3.0196242332458496, + "layer_10_cos_v_neg_g": 0.003873782232403755, + "layer_11_v_norm": 3.005265235900879, + "layer_11_cos_v_neg_g": 0.0041898153722286224, + "layer_12_v_norm": 3.017512559890747, + "layer_12_cos_v_neg_g": 0.0038377847522497177, + "layer_1_sharpness": 1.6115633115987293e-05, + "layer_2_sharpness": 1.2620155757758766e-05, + "layer_3_sharpness": 9.86624218057841e-06, + "layer_4_sharpness": 6.150253739178879e-06, + "layer_5_sharpness": 7.534349606430624e-06, + "layer_6_sharpness": 9.68326367001282e-06, + "layer_7_sharpness": 1.0980485058098566e-05, + "layer_8_sharpness": 1.0222283890470862e-05, + "layer_9_sharpness": 1.0365192792960443e-05, + "layer_10_sharpness": 6.150233275548089e-06, + "layer_11_sharpness": 5.486546342581278e-06, + "layer_12_sharpness": 1.162344869953813e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_8000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..04a5285d01916c2ee3518da2fea29b85b35e470f --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_8000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.372862815856934, + "total_l1_linf_norm": 77857.6171875, + "total_spectral_norm": 10.372861862182617, + "layer_1_update_fnorm": 2.9407901763916016, + "layer_1_max_l1_linf_norm": 1.9806244373321533, + "layer_1_max_spectral_norm": 0.060218214988708496, + "layer_2_update_fnorm": 2.776862859725952, + "layer_2_max_l1_linf_norm": 1.9506957530975342, + "layer_2_max_spectral_norm": 0.060224611312150955, + "layer_3_update_fnorm": 2.8627994060516357, + "layer_3_max_l1_linf_norm": 2.0655245780944824, + "layer_3_max_spectral_norm": 0.06024022027850151, + "layer_4_update_fnorm": 2.963435411453247, + "layer_4_max_l1_linf_norm": 2.0680642127990723, + "layer_4_max_spectral_norm": 0.06025499850511551, + "layer_5_update_fnorm": 2.9906227588653564, + "layer_5_max_l1_linf_norm": 2.0340161323547363, + "layer_5_max_spectral_norm": 0.060221001505851746, + "layer_6_update_fnorm": 3.00384521484375, + "layer_6_max_l1_linf_norm": 2.0738728046417236, + "layer_6_max_spectral_norm": 0.06021520122885704, + "layer_7_update_fnorm": 3.00818133354187, + "layer_7_max_l1_linf_norm": 2.0304174423217773, + "layer_7_max_spectral_norm": 0.060221701860427856, + "layer_8_update_fnorm": 3.0141842365264893, + "layer_8_max_l1_linf_norm": 2.0284194946289062, + "layer_8_max_spectral_norm": 0.06022271141409874, + "layer_9_update_fnorm": 3.0084593296051025, + "layer_9_max_l1_linf_norm": 1.9974507093429565, + "layer_9_max_spectral_norm": 0.06021951138973236, + "layer_10_update_fnorm": 3.026662826538086, + "layer_10_max_l1_linf_norm": 2.0334901809692383, + "layer_10_max_spectral_norm": 0.06020507216453552, + "layer_11_update_fnorm": 3.0122387409210205, + "layer_11_max_l1_linf_norm": 2.0006144046783447, + "layer_11_max_spectral_norm": 0.060213759541511536, + "layer_12_update_fnorm": 3.018338680267334, + "layer_12_max_l1_linf_norm": 1.9843156337738037, + "layer_12_max_spectral_norm": 0.06023222953081131, + "total_sharpness": 5.4613250540569425e-05, + "ip_v_neg_g": 0.0031210274901241064, + "cos_v_neg_g": 0.0009286050335504115, + "v_norm": 10.372862815856934, + "g_norm": 0.32401707768440247, + "hv_norm": 0.05020178109407425, + "cos_v_hv": 0.011284374631941319, + "hg_norm": 0.4872681796550751, + "cos_g_hg": 0.31983935832977295, + "v_parallel_norm": 0.001321211690083146, + "v_perp_norm": 10.372862815856934, + "layer_1_v_norm": 2.9407901763916016, + "layer_1_cos_v_neg_g": 0.004433428402990103, + "layer_2_v_norm": 2.776862859725952, + "layer_2_cos_v_neg_g": 0.00037239244556985795, + "layer_3_v_norm": 2.8627991676330566, + "layer_3_cos_v_neg_g": 0.0015822008717805147, + "layer_4_v_norm": 2.963435411453247, + "layer_4_cos_v_neg_g": 0.0019400385208427906, + "layer_5_v_norm": 2.9906227588653564, + "layer_5_cos_v_neg_g": 0.0021369170863181353, + "layer_6_v_norm": 3.003845691680908, + "layer_6_cos_v_neg_g": 0.003233760828152299, + "layer_7_v_norm": 3.00818133354187, + "layer_7_cos_v_neg_g": 0.0041666580364108086, + "layer_8_v_norm": 3.01418399810791, + "layer_8_cos_v_neg_g": 0.0038345633074641228, + "layer_9_v_norm": 3.0084593296051025, + "layer_9_cos_v_neg_g": 0.0039902618154883385, + "layer_10_v_norm": 3.026662826538086, + "layer_10_cos_v_neg_g": 0.0030600191093981266, + "layer_11_v_norm": 3.0122387409210205, + "layer_11_cos_v_neg_g": 0.0022665727883577347, + "layer_12_v_norm": 3.018338680267334, + "layer_12_cos_v_neg_g": 0.0019316725665703416, + "layer_1_sharpness": 1.1883579645655118e-05, + "layer_2_sharpness": 7.2558527790533844e-06, + "layer_3_sharpness": 6.829159246990457e-06, + "layer_4_sharpness": 4.737818926514592e-06, + "layer_5_sharpness": 6.8779067987634335e-06, + "layer_6_sharpness": 1.11232675408246e-05, + "layer_7_sharpness": 1.358376357529778e-05, + "layer_8_sharpness": 1.175731358671328e-05, + "layer_9_sharpness": 1.1591652764764149e-05, + "layer_10_sharpness": 6.467319963121554e-06, + "layer_11_sharpness": 4.588890533341328e-06, + "layer_12_sharpness": 5.38483163836645e-06 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_8500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..912687ddf5992fdadaeaf72de64d6042f0fa2b40 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_8500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.294906616210938, + "total_l1_linf_norm": 77239.8046875, + "total_spectral_norm": 10.294905662536621, + "layer_1_update_fnorm": 2.9127869606018066, + "layer_1_max_l1_linf_norm": 2.020108222961426, + "layer_1_max_spectral_norm": 0.060222137719392776, + "layer_2_update_fnorm": 2.709855794906616, + "layer_2_max_l1_linf_norm": 1.9727630615234375, + "layer_2_max_spectral_norm": 0.06020483374595642, + "layer_3_update_fnorm": 2.82194185256958, + "layer_3_max_l1_linf_norm": 1.9970548152923584, + "layer_3_max_spectral_norm": 0.060217831283807755, + "layer_4_update_fnorm": 2.955022096633911, + "layer_4_max_l1_linf_norm": 2.040841579437256, + "layer_4_max_spectral_norm": 0.06029868125915527, + "layer_5_update_fnorm": 2.981551170349121, + "layer_5_max_l1_linf_norm": 2.0156002044677734, + "layer_5_max_spectral_norm": 0.06023038551211357, + "layer_6_update_fnorm": 2.9911086559295654, + "layer_6_max_l1_linf_norm": 2.020195960998535, + "layer_6_max_spectral_norm": 0.060228101909160614, + "layer_7_update_fnorm": 2.9945311546325684, + "layer_7_max_l1_linf_norm": 2.0117480754852295, + "layer_7_max_spectral_norm": 0.06022616848349571, + "layer_8_update_fnorm": 3.002511978149414, + "layer_8_max_l1_linf_norm": 2.001558780670166, + "layer_8_max_spectral_norm": 0.060234613716602325, + "layer_9_update_fnorm": 2.995405435562134, + "layer_9_max_l1_linf_norm": 1.9885910749435425, + "layer_9_max_spectral_norm": 0.06025093048810959, + "layer_10_update_fnorm": 3.0006613731384277, + "layer_10_max_l1_linf_norm": 1.989426612854004, + "layer_10_max_spectral_norm": 0.06025806814432144, + "layer_11_update_fnorm": 2.978327751159668, + "layer_11_max_l1_linf_norm": 1.9857001304626465, + "layer_11_max_spectral_norm": 0.060219161212444305, + "layer_12_update_fnorm": 3.0073342323303223, + "layer_12_max_l1_linf_norm": 2.2170610427856445, + "layer_12_max_spectral_norm": 0.06023967266082764, + "total_sharpness": 6.223243690328673e-05, + "ip_v_neg_g": 0.0032876741606742144, + "cos_v_neg_g": 0.0009299739613197744, + "v_norm": 10.294906616210938, + "g_norm": 0.34339627623558044, + "hv_norm": 0.0658588707447052, + "cos_v_hv": 0.009728031232953072, + "hg_norm": 0.8551756739616394, + "cos_g_hg": 0.4292328655719757, + "v_parallel_norm": 0.0005579082062467933, + "v_perp_norm": 10.294906616210938, + "layer_1_v_norm": 2.9127869606018066, + "layer_1_cos_v_neg_g": 0.005687134340405464, + "layer_2_v_norm": 2.709855794906616, + "layer_2_cos_v_neg_g": 0.002575343009084463, + "layer_3_v_norm": 2.821942090988159, + "layer_3_cos_v_neg_g": 0.0020724511705338955, + "layer_4_v_norm": 2.955022096633911, + "layer_4_cos_v_neg_g": 0.0030694804154336452, + "layer_5_v_norm": 2.981551170349121, + "layer_5_cos_v_neg_g": 0.003036116948351264, + "layer_6_v_norm": 2.9911086559295654, + "layer_6_cos_v_neg_g": 0.0033719143830239773, + "layer_7_v_norm": 2.9945311546325684, + "layer_7_cos_v_neg_g": 0.003904039738699794, + "layer_8_v_norm": 3.002511978149414, + "layer_8_cos_v_neg_g": 0.0035704413894563913, + "layer_9_v_norm": 2.995405435562134, + "layer_9_cos_v_neg_g": 0.005160605069249868, + "layer_10_v_norm": 3.0006613731384277, + "layer_10_cos_v_neg_g": 0.0031166868284344673, + "layer_11_v_norm": 2.978327751159668, + "layer_11_cos_v_neg_g": 0.0026822227519005537, + "layer_12_v_norm": 3.0073342323303223, + "layer_12_cos_v_neg_g": 0.0015517311403527856, + "layer_1_sharpness": 6.107302760938182e-05, + "layer_2_sharpness": 1.654717925703153e-05, + "layer_3_sharpness": 8.8752422016114e-06, + "layer_4_sharpness": 5.114293799124425e-06, + "layer_5_sharpness": 7.007052772678435e-06, + "layer_6_sharpness": 1.083153529179981e-05, + "layer_7_sharpness": 1.56164969666861e-05, + "layer_8_sharpness": 1.2279421753191855e-05, + "layer_9_sharpness": 1.0251650564896408e-05, + "layer_10_sharpness": 5.148818218003726e-06, + "layer_11_sharpness": 4.9442469389759935e-06, + "layer_12_sharpness": 1.7280179235967807e-05 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_9000.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..151bf45592b4572932d4817a659f44bdfccb3902 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_9000.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.370260238647461, + "total_l1_linf_norm": 77832.578125, + "total_spectral_norm": 10.370261192321777, + "layer_1_update_fnorm": 2.931673526763916, + "layer_1_max_l1_linf_norm": 1.9737356901168823, + "layer_1_max_spectral_norm": 0.06022128090262413, + "layer_2_update_fnorm": 2.7627272605895996, + "layer_2_max_l1_linf_norm": 1.9618525505065918, + "layer_2_max_spectral_norm": 0.06023211032152176, + "layer_3_update_fnorm": 2.8671681880950928, + "layer_3_max_l1_linf_norm": 2.0695934295654297, + "layer_3_max_spectral_norm": 0.06025315448641777, + "layer_4_update_fnorm": 2.9684321880340576, + "layer_4_max_l1_linf_norm": 2.066018581390381, + "layer_4_max_spectral_norm": 0.06028781086206436, + "layer_5_update_fnorm": 2.9894673824310303, + "layer_5_max_l1_linf_norm": 2.0186610221862793, + "layer_5_max_spectral_norm": 0.06023000180721283, + "layer_6_update_fnorm": 3.0052359104156494, + "layer_6_max_l1_linf_norm": 2.0382025241851807, + "layer_6_max_spectral_norm": 0.06024697795510292, + "layer_7_update_fnorm": 3.0141091346740723, + "layer_7_max_l1_linf_norm": 2.032663106918335, + "layer_7_max_spectral_norm": 0.06023440510034561, + "layer_8_update_fnorm": 3.0152668952941895, + "layer_8_max_l1_linf_norm": 2.0354228019714355, + "layer_8_max_spectral_norm": 0.06023360416293144, + "layer_9_update_fnorm": 3.010864496231079, + "layer_9_max_l1_linf_norm": 1.9969086647033691, + "layer_9_max_spectral_norm": 0.060221992433071136, + "layer_10_update_fnorm": 3.0236897468566895, + "layer_10_max_l1_linf_norm": 2.0215914249420166, + "layer_10_max_spectral_norm": 0.06021515652537346, + "layer_11_update_fnorm": 3.010854959487915, + "layer_11_max_l1_linf_norm": 1.9871020317077637, + "layer_11_max_spectral_norm": 0.060214780271053314, + "layer_12_update_fnorm": 3.015779972076416, + "layer_12_max_l1_linf_norm": 2.0692946910858154, + "layer_12_max_spectral_norm": 0.06022864580154419, + "total_sharpness": 5.036958464188501e-05, + "ip_v_neg_g": 0.003214125521481037, + "cos_v_neg_g": 0.0008687610388733447, + "v_norm": 10.370260238647461, + "g_norm": 0.3567572832107544, + "hv_norm": 0.0791107565164566, + "cos_v_hv": 0.006602714769542217, + "hg_norm": 0.8503821492195129, + "cos_g_hg": 0.4498957395553589, + "v_parallel_norm": 0.002259273547679186, + "v_perp_norm": 10.370260238647461, + "layer_1_v_norm": 2.931673526763916, + "layer_1_cos_v_neg_g": 0.0042878626845777035, + "layer_2_v_norm": 2.7627272605895996, + "layer_2_cos_v_neg_g": 0.0001905265380628407, + "layer_3_v_norm": 2.8671681880950928, + "layer_3_cos_v_neg_g": 0.0005315932794474065, + "layer_4_v_norm": 2.9684321880340576, + "layer_4_cos_v_neg_g": 0.0007110726437531412, + "layer_5_v_norm": 2.9894673824310303, + "layer_5_cos_v_neg_g": 0.0013639439130201936, + "layer_6_v_norm": 3.0052356719970703, + "layer_6_cos_v_neg_g": 0.002500600414350629, + "layer_7_v_norm": 3.0141091346740723, + "layer_7_cos_v_neg_g": 0.002922181971371174, + "layer_8_v_norm": 3.0152668952941895, + "layer_8_cos_v_neg_g": 0.0032520578242838383, + "layer_9_v_norm": 3.010864496231079, + "layer_9_cos_v_neg_g": 0.004366799257695675, + "layer_10_v_norm": 3.0236897468566895, + "layer_10_cos_v_neg_g": 0.0035164286382496357, + "layer_11_v_norm": 3.010854959487915, + "layer_11_cos_v_neg_g": 0.0035190125927329063, + "layer_12_v_norm": 3.015779972076416, + "layer_12_cos_v_neg_g": 0.004558803979307413, + "layer_1_sharpness": 1.0052134712168481e-05, + "layer_2_sharpness": 1.3290319657244254e-05, + "layer_3_sharpness": 5.143328507983824e-06, + "layer_4_sharpness": 4.591680863086367e-06, + "layer_5_sharpness": 5.5142036217148416e-06, + "layer_6_sharpness": 8.91118725121487e-06, + "layer_7_sharpness": 9.024567589221988e-06, + "layer_8_sharpness": 8.1179659900954e-06, + "layer_9_sharpness": 8.406540473515633e-06, + "layer_10_sharpness": 5.239145593805006e-06, + "layer_11_sharpness": 4.7893095143081155e-06, + "layer_12_sharpness": 7.2754264692775905e-06 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_9500.json b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..fdd2a1f8c8c7668b822d5ec330bf1049f329eb42 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/sharpness_step_9500.json @@ -0,0 +1,88 @@ +{ + "total_update_fnorm": 10.348601341247559, + "total_l1_linf_norm": 77649.03125, + "total_spectral_norm": 10.348601341247559, + "layer_1_update_fnorm": 2.9266819953918457, + "layer_1_max_l1_linf_norm": 1.9806491136550903, + "layer_1_max_spectral_norm": 0.06022583693265915, + "layer_2_update_fnorm": 2.7825608253479004, + "layer_2_max_l1_linf_norm": 1.9502062797546387, + "layer_2_max_spectral_norm": 0.06022772192955017, + "layer_3_update_fnorm": 2.848968982696533, + "layer_3_max_l1_linf_norm": 2.0469188690185547, + "layer_3_max_spectral_norm": 0.06023130565881729, + "layer_4_update_fnorm": 2.9595508575439453, + "layer_4_max_l1_linf_norm": 2.0972564220428467, + "layer_4_max_spectral_norm": 0.06027458608150482, + "layer_5_update_fnorm": 2.981633424758911, + "layer_5_max_l1_linf_norm": 2.0050454139709473, + "layer_5_max_spectral_norm": 0.06024640426039696, + "layer_6_update_fnorm": 2.9934332370758057, + "layer_6_max_l1_linf_norm": 2.027113437652588, + "layer_6_max_spectral_norm": 0.06020457297563553, + "layer_7_update_fnorm": 2.997741937637329, + "layer_7_max_l1_linf_norm": 2.0091192722320557, + "layer_7_max_spectral_norm": 0.06021619960665703, + "layer_8_update_fnorm": 3.0043962001800537, + "layer_8_max_l1_linf_norm": 2.016721487045288, + "layer_8_max_spectral_norm": 0.06022634729743004, + "layer_9_update_fnorm": 3.0015525817871094, + "layer_9_max_l1_linf_norm": 1.999102234840393, + "layer_9_max_spectral_norm": 0.06021212786436081, + "layer_10_update_fnorm": 3.022432804107666, + "layer_10_max_l1_linf_norm": 2.0328786373138428, + "layer_10_max_spectral_norm": 0.06023037061095238, + "layer_11_update_fnorm": 3.0084547996520996, + "layer_11_max_l1_linf_norm": 1.9799830913543701, + "layer_11_max_spectral_norm": 0.060234468430280685, + "layer_12_update_fnorm": 3.015343189239502, + "layer_12_max_l1_linf_norm": 2.061410903930664, + "layer_12_max_spectral_norm": 0.06023411825299263, + "total_sharpness": 5.788726048194803e-05, + "ip_v_neg_g": 0.0019315517274662852, + "cos_v_neg_g": 0.0005598017596639693, + "v_norm": 10.348601341247559, + "g_norm": 0.3334190845489502, + "hv_norm": 0.07269970327615738, + "cos_v_hv": 0.008240091614425182, + "hg_norm": 0.8742169141769409, + "cos_g_hg": 0.4086393117904663, + "v_parallel_norm": 0.0008905631257221103, + "v_perp_norm": 10.348601341247559, + "layer_1_v_norm": 2.9266819953918457, + "layer_1_cos_v_neg_g": 0.0027921805158257484, + "layer_2_v_norm": 2.7825608253479004, + "layer_2_cos_v_neg_g": 0.002517570974305272, + "layer_3_v_norm": 2.848968982696533, + "layer_3_cos_v_neg_g": 0.0006571871927008033, + "layer_4_v_norm": 2.9595508575439453, + "layer_4_cos_v_neg_g": 0.0013401636388152838, + "layer_5_v_norm": 2.981633424758911, + "layer_5_cos_v_neg_g": 0.0015727262943983078, + "layer_6_v_norm": 2.9934332370758057, + "layer_6_cos_v_neg_g": 0.0026900458615273237, + "layer_7_v_norm": 2.997741937637329, + "layer_7_cos_v_neg_g": 0.0019168617436662316, + "layer_8_v_norm": 3.0043959617614746, + "layer_8_cos_v_neg_g": 0.002237132051959634, + "layer_9_v_norm": 3.0015525817871094, + "layer_9_cos_v_neg_g": 0.0026856204494833946, + "layer_10_v_norm": 3.022432804107666, + "layer_10_cos_v_neg_g": 0.0024965638294816017, + "layer_11_v_norm": 3.0084547996520996, + "layer_11_cos_v_neg_g": 0.0022530739661306143, + "layer_12_v_norm": 3.015343189239502, + "layer_12_cos_v_neg_g": 0.001950578996911645, + "layer_1_sharpness": 2.6812109354068525e-05, + "layer_2_sharpness": 8.123048246488906e-06, + "layer_3_sharpness": 1.1106136298622005e-05, + "layer_4_sharpness": 5.253222298051696e-06, + "layer_5_sharpness": 7.579880275443429e-06, + "layer_6_sharpness": 1.1060556971642654e-05, + "layer_7_sharpness": 1.1484572496556211e-05, + "layer_8_sharpness": 1.1626596460700966e-05, + "layer_9_sharpness": 9.891961781249847e-06, + "layer_10_sharpness": 5.286161467665806e-06, + "layer_11_sharpness": 4.50496145276702e-06, + "layer_12_sharpness": 7.77686909714248e-06 +} \ No newline at end of file diff --git a/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/training_log.txt b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..41dc4038eb50b3feeea87a39e0c5e64f249d33d1 --- /dev/null +++ b/logs_sharpness_pure/muon_lr_search/opt_muon_alr_0.001_mlr_0.05_seed_42/training_log.txt @@ -0,0 +1,11450 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer + +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +from nano_GPT_qkvo_pure_RMS import GPT, GPTConfig + + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + self.current_shard = (self.current_shard + 1) % len(self.files) + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" same updates) + # So we use AVG to keep the value unchanged (or could skip reduce entirely) + total_update_norm_sq = sum(torch.sum(v * v) for v in update_direction_v) + dist.all_reduce(total_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results["total_update_fnorm"] = torch.sqrt(total_update_norm_sq).item() + + # Calculate TOTAL update Max-of-Max and Spectral norms + print0(f"[Enhanced Sharpness @ Step {step}] Calculating total update Max-of-Max and Spectral norms...") + try: + all_updates_flat = torch.cat([v.flatten() for v in update_direction_v if v.numel() > 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + # Access blocks via model.transformer.h (model is already unwrapped raw_model) + # nano_GPT_qkvo_pure.py structure: model.transformer.h is a ModuleList of Block + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + # Note: update_direction_v is identical across ranks in DDP, use AVG to keep value unchanged + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + # Keep model in original dtype (bfloat16/float16) to avoid dtype mismatch issues + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + + # Approximate the *training step objective* by averaging loss across N micro-batches. + # - If N == grad_accum_steps, this matches the training step objective scale. + # - If N is smaller (e.g. 1/2), it's a cheaper approximation that reduces sharpness overhead. + loss_hvp_sum = None + shard_was_changed = False + n_hvp_micro = max(1, int(sharpness_hvp_microbatches)) + for _ in range(n_hvp_micro): + x_hvp, y_hvp = train_loader.next_batch() + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + loss_hvp_sum = loss_mb if loss_hvp_sum is None else (loss_hvp_sum + loss_mb) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + + # Restore train_loader state to avoid affecting subsequent training + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + loss_hvp = loss_hvp_sum / n_hvp_micro + grads_hvp = torch.autograd.grad(loss_hvp, model.parameters(), create_graph=True, allow_unused=True) + + # --- 6. Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_hvp, update_direction_v) if g is not None) + hvp_total_result = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_total_result, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + # ---- Alignment metrics between update v and (negative) gradient g ---- + # g is grads_hvp (first-order grads on the same batch) + # ip_v_neg_g = ; cos_v_neg_g = / (||v||*||g||+eps) + eps = 1e-12 + ip_v_neg_g = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + v_norm = torch.sqrt(v_norm_sq_total + eps) + g_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + dist.all_reduce(ip_v_neg_g, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq, op=dist.ReduceOp.AVG) + g_norm = torch.sqrt(g_norm_sq + eps) + analysis_results["ip_v_neg_g"] = ip_v_neg_g.item() + analysis_results["cos_v_neg_g"] = (ip_v_neg_g / (v_norm * g_norm + eps)).item() + analysis_results["v_norm"] = v_norm.item() + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_total_result if hvp is not None) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg (does gradient align with curvature) ---- + hvp_g_result = torch.autograd.grad( + grads_hvp, + model.parameters(), + grad_outputs=[g if g is not None else torch.zeros_like(v) for g, v in zip(grads_hvp, update_direction_v)], + retain_graph=True, + allow_unused=True, + ) + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_result) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_result if hg is not None) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(max(v_norm_sq_total - v_parallel_norm_sq, torch.tensor(0.0, device=device)) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + del hvp_total_result + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + v_dot_g_group = sum(torch.sum(grads_hvp[i] * update_direction_v[i]) + for i in indices if grads_hvp[i] is not None) + hvp_group_result = torch.autograd.grad(v_dot_g_group, model.parameters(), + retain_graph=True, allow_unused=True) + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # CRITICAL: Use AVG for both numerator and denominator to keep sharpness scale consistent + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + del hvp_group_result + torch.cuda.empty_cache() + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- 8. Cleanup --- + model.train() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del loss_hvp, v_dot_g_total, vhp_dot_v_total, v_norm_sq_total + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'ip_v_neg_g' in results: + misc_parts.append(f"ip_v_neg_g:{results['ip_v_neg_g']:.4e}") + if 'cos_v_neg_g' in results: + misc_parts.append(f"cos_v_neg_g:{results['cos_v_neg_g']:.4e}") + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + if 'g_norm' in results: + misc_parts.append(f"g_norm:{results['g_norm']:.4e}") + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + parser.add_argument("--sharpness_hvp_microbatches", type=int, default=1, + help="How many micro-batches to average for HVP loss during sharpness analysis. " + "1 is fastest; set to grad_accum_steps for strict scale alignment with a training step objective.") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + FLASH = args.flash + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader(args.input_bin, B, T, ddp_rank, ddp_world_size) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + last_training_update = None + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it,base_lr): + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + else: + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + # file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + sharpness_hvp_microbatches=min(args.sharpness_hvp_microbatches, grad_accum_steps), + last_training_update=last_training_update # Pass the real update captured from training + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + if ddp: + # we want only the last micro-step to sync grads in a DDP model + # the official way to do this is with model.no_sync(), but that is a + # context manager that bloats the code, so we just toggle this variable + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + # we have to scale the loss to account for gradient accumulation, + # because the gradients just add on each successive backward(). + # addition of gradients corresponds to a SUM in the objective, but + # instead of a SUM we want MEAN, so we scale the loss here + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + norm = torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + # Capture params before optimizer step ONLY when sharpness analysis will run next + # This minimizes memory overhead while ensuring we have the correct update + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + else: + params_before_optimizer_step = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + # Calculate and store the actual training update (only when needed) + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p_before - p.detach() + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group() +step:0 validation loss:11.026602 +step:0 train loss:11.019230 +step:1 train loss:11.016842 +step:2 train loss:10.995049 +step:3 train loss:10.968683 +step:4 train loss:10.933098 +step:5 train loss:10.885777 +step:6 train loss:10.832063 +step:7 train loss:10.765014 +step:8 train loss:10.708132 +step:9 train loss:10.621178 +step:10 train loss:10.547351 +step:11 train loss:10.469767 +step:12 train loss:10.372145 +step:13 train loss:10.277294 +step:14 train loss:10.195881 +step:15 train loss:10.104308 +step:16 train loss:10.020298 +step:17 train loss:9.935476 +step:18 train loss:9.862999 +step:19 train loss:9.767285 +step:20 train loss:9.688955 +step:21 train loss:9.616671 +step:22 train loss:9.489807 +step:23 train loss:9.442591 +step:24 train loss:9.342997 +step:25 train loss:9.296923 +step:26 train loss:9.218692 +step:27 train loss:9.129695 +step:28 train loss:9.104359 +step:29 train loss:9.035501 +step:30 train loss:8.986980 +step:31 train loss:8.889496 +step:32 train loss:8.827362 +step:33 train loss:8.776212 +step:34 train loss:8.767909 +step:35 train loss:8.662621 +step:36 train loss:8.624932 +step:37 train loss:8.541353 +step:38 train loss:8.528305 +step:39 train loss:8.452913 +step:40 train loss:8.415079 +step:41 train loss:8.325428 +step:42 train loss:8.328585 +step:43 train loss:8.216320 +step:44 train loss:8.165991 +step:45 train loss:8.143925 +step:46 train loss:8.107559 +step:47 train loss:8.075018 +step:48 train loss:7.990629 +step:49 train loss:7.952958 +step:50 train loss:7.857046 +step:51 train loss:7.864212 +step:52 train loss:7.818543 +step:53 train loss:7.777942 +step:54 train loss:7.727575 +step:55 train loss:7.673155 +step:56 train loss:7.606488 +step:57 train loss:7.604925 +step:58 train loss:7.517015 +step:59 train loss:7.510260 +step:60 train loss:7.465492 +step:61 train loss:7.417307 +step:62 train loss:7.371323 +step:63 train loss:7.388793 +step:64 train loss:7.274023 +step:65 train loss:7.284825 +step:66 train loss:7.251123 +step:67 train loss:7.256741 +step:68 train loss:7.190883 +step:69 train loss:7.153601 +step:70 train loss:7.101793 +step:71 train loss:7.076150 +step:72 train loss:7.081464 +step:73 train loss:7.024012 +step:74 train loss:7.024529 +step:75 train loss:6.958201 +step:76 train loss:7.036306 +step:77 train loss:6.961497 +step:78 train loss:6.723775 +step:79 train loss:6.876229 +step:80 train loss:6.840139 +step:81 train loss:6.912534 +step:82 train loss:6.867518 +step:83 train loss:6.822413 +step:84 train loss:6.782495 +step:85 train loss:6.760051 +step:86 train loss:6.740115 +step:87 train loss:6.707001 +step:88 train loss:6.708761 +step:89 train loss:6.657172 +step:90 train loss:6.706742 +step:91 train loss:6.707008 +step:92 train loss:6.706146 +step:93 train loss:6.650307 +step:94 train loss:6.608302 +step:95 train loss:6.553095 +step:96 train loss:6.653161 +step:97 train loss:6.594356 +step:98 train loss:6.576741 +step:99 train loss:6.546688 +step:100 train loss:6.552728 +step:101 train loss:6.486161 +step:102 train loss:6.494625 +step:103 train loss:6.490488 +step:104 train loss:6.508456 +step:105 train loss:6.569762 +step:106 train loss:6.514599 +step:107 train loss:6.460530 +step:108 train loss:6.482088 +step:109 train loss:6.516082 +step:110 train loss:6.445399 +step:111 train loss:6.455543 +step:112 train loss:6.456206 +step:113 train loss:6.409086 +step:114 train loss:6.471372 +step:115 train loss:6.423970 +step:116 train loss:6.403830 +step:117 train loss:6.347845 +step:118 train loss:6.395722 +step:119 train loss:6.350479 +step:120 train loss:6.368654 +step:121 train loss:6.289583 +step:122 train loss:6.382555 +step:123 train loss:6.309586 +step:124 train loss:6.288268 +step:125 train loss:6.270524 +step:126 train loss:6.373686 +step:127 train loss:6.282867 +step:128 train loss:6.333426 +step:129 train loss:6.306376 +step:130 train loss:6.326632 +step:131 train loss:6.286069 +step:132 train loss:6.214277 +step:133 train loss:6.266850 +step:134 train loss:6.253755 +step:135 train loss:6.160698 +step:136 train loss:6.206943 +step:137 train loss:6.210795 +step:138 train loss:6.162172 +step:139 train loss:6.229280 +step:140 train loss:6.146708 +step:141 train loss:6.239738 +step:142 train loss:6.188169 +step:143 train loss:6.198811 +step:144 train loss:6.176382 +step:145 train loss:6.109290 +step:146 train loss:6.124591 +step:147 train loss:6.175075 +step:148 train loss:6.184193 +step:149 train loss:6.145918 +step:150 train loss:6.139909 +step:151 train loss:6.052360 +step:152 train loss:6.090998 +step:153 train loss:6.077403 +step:154 train loss:6.138703 +step:155 train loss:6.128596 +step:156 train loss:6.148760 +step:157 train loss:6.064642 +step:158 train loss:6.058288 +step:159 train loss:6.083077 +step:160 train loss:6.066203 +step:161 train loss:6.069361 +step:162 train loss:6.045853 +step:163 train loss:6.051742 +step:164 train loss:6.051968 +step:165 train loss:6.070831 +step:166 train loss:6.018605 +step:167 train loss:6.023796 +step:168 train loss:5.994986 +step:169 train loss:5.949564 +step:170 train loss:5.905559 +step:171 train loss:6.033558 +step:172 train loss:5.953552 +step:173 train loss:6.012108 +step:174 train loss:6.000280 +step:175 train loss:5.963877 +step:176 train loss:5.913680 +step:177 train loss:5.959604 +step:178 train loss:5.964729 +step:179 train loss:5.923088 +step:180 train loss:5.895990 +step:181 train loss:5.939868 +step:182 train loss:5.874843 +step:183 train loss:5.962673 +step:184 train loss:5.914914 +step:185 train loss:5.848346 +step:186 train loss:5.988328 +step:187 train loss:5.929878 +step:188 train loss:5.762219 +step:189 train loss:5.902722 +step:190 train loss:5.900949 +step:191 train loss:5.812698 +step:192 train loss:5.758862 +step:193 train loss:5.892518 +step:194 train loss:5.892505 +step:195 train loss:5.889435 +step:196 train loss:5.861485 +step:197 train loss:5.844960 +step:198 train loss:5.796417 +step:199 train loss:5.860443 +step:200 train loss:5.923445 +step:201 train loss:5.841852 +step:202 train loss:5.834545 +step:203 train loss:5.799022 +step:204 train loss:5.821322 +step:205 train loss:5.676598 +step:206 train loss:5.802255 +step:207 train loss:5.786017 +step:208 train loss:5.729475 +step:209 train loss:5.714968 +step:210 train loss:5.722079 +step:211 train loss:5.797303 +step:212 train loss:5.756419 +step:213 train loss:5.758088 +step:214 train loss:5.736752 +step:215 train loss:5.744066 +step:216 train loss:5.698865 +step:217 train loss:5.716702 +step:218 train loss:5.681760 +step:219 train loss:5.651698 +step:220 train loss:5.705551 +step:221 train loss:5.649737 +step:222 train loss:5.683633 +step:223 train loss:5.721787 +step:224 train loss:5.684301 +step:225 train loss:5.617039 +step:226 train loss:5.623337 +step:227 train loss:5.690578 +step:228 train loss:5.652593 +step:229 train loss:5.708606 +step:230 train loss:5.587644 +step:231 train loss:5.632050 +step:232 train loss:5.624059 +step:233 train loss:5.595898 +step:234 train loss:5.596757 +step:235 train loss:5.675519 +step:236 train loss:5.615653 +step:237 train loss:5.661931 +step:238 train loss:5.629935 +step:239 train loss:5.532872 +step:240 train loss:5.616638 +step:241 train loss:5.660561 +step:242 train loss:5.620028 +step:243 train loss:5.540774 +step:244 train loss:5.561943 +step:245 train loss:5.527672 +step:246 train loss:5.538498 +step:247 train loss:5.525511 +step:248 train loss:5.479840 +step:249 train loss:5.549835 +step:250 validation loss:5.523815 +step:250 train loss:5.509972 +step:251 train loss:5.555039 +step:252 train loss:5.488088 +step:253 train loss:5.501012 +step:254 train loss:5.454296 +step:255 train loss:5.510621 +step:256 train loss:5.473822 +step:257 train loss:5.524505 +step:258 train loss:5.436719 +step:259 train loss:5.450497 +step:260 train loss:5.421419 +step:261 train loss:5.406533 +step:262 train loss:5.483159 +step:263 train loss:5.444955 +step:264 train loss:5.413115 +step:265 train loss:5.415565 +step:266 train loss:5.376533 +step:267 train loss:5.414658 +step:268 train loss:5.357946 +step:269 train loss:5.378525 +step:270 train loss:5.397746 +step:271 train loss:5.405222 +step:272 train loss:5.328747 +step:273 train loss:5.417106 +step:274 train loss:5.331805 +step:275 train loss:5.372510 +step:276 train loss:5.354702 +step:277 train loss:5.323572 +step:278 train loss:5.326995 +step:279 train loss:5.287602 +step:280 train loss:5.346024 +step:281 train loss:5.422062 +step:282 train loss:5.300411 +step:283 train loss:5.321620 +step:284 train loss:5.263832 +step:285 train loss:5.342542 +step:286 train loss:5.285881 +step:287 train loss:5.271619 +step:288 train loss:5.253623 +step:289 train loss:5.292210 +step:290 train loss:5.325268 +step:291 train loss:5.248571 +step:292 train loss:5.304907 +step:293 train loss:5.218023 +step:294 train loss:5.344172 +step:295 train loss:5.230984 +step:296 train loss:5.283719 +step:297 train loss:5.316435 +step:298 train loss:5.207746 +step:299 train loss:5.265951 +step:300 train loss:5.195613 +step:301 train loss:5.219033 +step:302 train loss:5.190166 +step:303 train loss:5.197436 +step:304 train loss:5.220812 +step:305 train loss:5.157456 +step:306 train loss:5.178452 +step:307 train loss:5.199282 +step:308 train loss:5.113056 +step:309 train loss:5.248012 +step:310 train loss:5.231452 +step:311 train loss:5.195815 +step:312 train loss:5.153131 +step:313 train loss:5.184459 +step:314 train loss:5.141093 +step:315 train loss:5.107881 +step:316 train loss:5.112783 +step:317 train loss:5.080570 +step:318 train loss:5.075967 +step:319 train loss:5.157060 +step:320 train loss:5.067794 +step:321 train loss:5.119110 +step:322 train loss:5.107825 +step:323 train loss:5.154482 +step:324 train loss:5.101707 +step:325 train loss:5.120708 +step:326 train loss:5.127900 +step:327 train loss:5.148970 +step:328 train loss:5.085746 +step:329 train loss:5.103806 +step:330 train loss:5.019804 +step:331 train loss:5.061399 +step:332 train loss:5.035865 +step:333 train loss:4.974157 +step:334 train loss:5.054187 +step:335 train loss:5.115327 +step:336 train loss:5.266862 +step:337 train loss:5.110118 +step:338 train loss:5.015534 +step:339 train loss:4.985844 +step:340 train loss:4.980468 +step:341 train loss:4.974026 +step:342 train loss:5.037189 +step:343 train loss:5.022054 +step:344 train loss:4.987701 +step:345 train loss:4.920109 +step:346 train loss:4.994690 +step:347 train loss:4.934951 +step:348 train loss:4.937286 +step:349 train loss:4.850141 +step:350 train loss:4.898592 +step:351 train loss:4.963569 +step:352 train loss:4.928080 +step:353 train loss:4.945147 +step:354 train loss:4.882537 +step:355 train loss:4.946358 +step:356 train loss:4.872849 +step:357 train loss:4.966805 +step:358 train loss:4.973746 +step:359 train loss:4.805803 +step:360 train loss:4.929640 +step:361 train loss:4.920519 +step:362 train loss:4.911867 +step:363 train loss:4.829914 +step:364 train loss:4.985501 +step:365 train loss:4.892184 +step:366 train loss:4.870543 +step:367 train loss:4.881045 +step:368 train loss:4.838735 +step:369 train loss:4.856210 +step:370 train loss:4.876310 +step:371 train loss:4.836942 +step:372 train loss:4.902076 +step:373 train loss:4.829967 +step:374 train loss:4.850405 +step:375 train loss:4.864849 +step:376 train loss:4.871071 +step:377 train loss:4.730156 +step:378 train loss:4.810321 +step:379 train loss:4.854679 +step:380 train loss:4.767572 +step:381 train loss:4.818666 +step:382 train loss:4.835151 +step:383 train loss:4.798695 +step:384 train loss:4.790037 +step:385 train loss:4.769289 +step:386 train loss:4.813838 +step:387 train loss:4.800985 +step:388 train loss:4.764042 +step:389 train loss:4.789448 +step:390 train loss:4.770463 +step:391 train loss:4.783862 +step:392 train loss:4.754549 +step:393 train loss:4.740921 +step:394 train loss:4.799317 +step:395 train loss:4.705197 +step:396 train loss:4.698245 +step:397 train loss:4.742816 +step:398 train loss:4.735626 +step:399 train loss:4.747720 +step:400 train loss:4.675322 +step:401 train loss:4.796302 +step:402 train loss:4.701516 +step:403 train loss:4.717091 +step:404 train loss:4.702170 +step:405 train loss:4.688316 +step:406 train loss:4.770242 +step:407 train loss:4.704855 +step:408 train loss:4.807652 +step:409 train loss:4.712835 +step:410 train loss:4.667515 +step:411 train loss:4.664600 +step:412 train loss:4.753348 +step:413 train loss:4.639777 +step:414 train loss:4.738225 +step:415 train loss:4.692636 +step:416 train loss:4.712141 +step:417 train loss:4.718272 +step:418 train loss:4.661911 +step:419 train loss:4.655803 +step:420 train loss:4.624735 +step:421 train loss:4.639786 +step:422 train loss:4.615492 +step:423 train loss:4.636136 +step:424 train loss:4.595381 +step:425 train loss:4.690656 +step:426 train loss:4.655737 +step:427 train loss:4.604108 +step:428 train loss:4.671030 +step:429 train loss:4.556620 +step:430 train loss:4.622741 +step:431 train loss:4.621149 +step:432 train loss:4.648707 +step:433 train loss:4.640969 +step:434 train loss:4.595851 +step:435 train loss:4.668060 +step:436 train loss:4.685449 +step:437 train loss:4.644533 +step:438 train loss:4.613645 +step:439 train loss:4.574376 +step:440 train loss:4.624038 +step:441 train loss:4.545945 +step:442 train loss:4.553015 +step:443 train loss:4.563318 +step:444 train loss:4.637584 +step:445 train loss:4.606109 +step:446 train loss:4.573361 +step:447 train loss:4.557957 +step:448 train loss:4.641887 +step:449 train loss:4.584832 +step:450 train loss:4.586370 +step:451 train loss:4.571699 +step:452 train loss:4.664845 +step:453 train loss:4.596249 +step:454 train loss:4.530532 +step:455 train loss:4.580299 +step:456 train loss:4.541472 +step:457 train loss:4.535464 +step:458 train loss:4.569167 +step:459 train loss:4.525766 +step:460 train loss:4.634349 +step:461 train loss:4.549240 +step:462 train loss:4.462392 +step:463 train loss:4.532834 +step:464 train loss:4.592820 +step:465 train loss:4.549357 +step:466 train loss:4.561517 +step:467 train loss:4.503820 +step:468 train loss:4.565333 +step:469 train loss:4.529200 +step:470 train loss:4.494532 +step:471 train loss:4.557349 +step:472 train loss:4.475357 +step:473 train loss:4.555496 +step:474 train loss:4.511352 +step:475 train loss:4.577300 +step:476 train loss:4.544646 +step:477 train loss:4.451015 +step:478 train loss:4.514672 +step:479 train loss:4.484235 +step:480 train loss:4.509226 +step:481 train loss:4.565111 +step:482 train loss:4.435963 +step:483 train loss:4.523386 +step:484 train loss:4.480216 +step:485 train loss:4.432480 +step:486 train loss:4.479468 +step:487 train loss:4.480197 +step:488 train loss:4.488760 +step:489 train loss:4.483174 +step:490 train loss:4.442190 +step:491 train loss:4.489111 +step:492 train loss:4.483482 +step:493 train loss:4.463367 +step:494 train loss:4.491993 +step:495 train loss:4.408793 +step:496 train loss:4.506088 +step:497 train loss:4.398276 +step:498 train loss:4.517197 +step:499 train loss:4.493120 +step:500 validation loss:4.437368 total_sharp:2.3419e-03 L1_sharp:3.6775e-03 L2_sharp:3.7500e-04 L3_sharp:2.4668e-04 L4_sharp:1.5486e-04 L5_sharp:2.1108e-04 L6_sharp:2.7368e-04 L7_sharp:2.4979e-04 L8_sharp:2.1346e-04 L9_sharp:1.6118e-04 L10_sharp:1.3714e-04 L11_sharp:1.0211e-04 L12_sharp:8.8725e-05 total_fnorm:7.3317e+00 total_l1_linf:5.5684e+04 total_spectral:7.3317e+00 L1_fnorm:2.1896e+00 L2_fnorm:2.0189e+00 L3_fnorm:2.0238e+00 L4_fnorm:2.0111e+00 L5_fnorm:2.0764e+00 L6_fnorm:2.1096e+00 L7_fnorm:2.1205e+00 L8_fnorm:2.1288e+00 L9_fnorm:2.1121e+00 L10_fnorm:2.1231e+00 L11_fnorm:2.1188e+00 L12_fnorm:2.1309e+00 L1_l1linf:1.5715e+00 L2_l1linf:1.5042e+00 L3_l1linf:1.4906e+00 L4_l1linf:1.4247e+00 L5_l1linf:1.4267e+00 L6_l1linf:1.4199e+00 L7_l1linf:1.4315e+00 L8_l1linf:1.4384e+00 L9_l1linf:1.4494e+00 L10_l1linf:1.4409e+00 L11_l1linf:1.4437e+00 L12_l1linf:1.4686e+00 L1_spectral:4.3017e-02 L2_spectral:4.3060e-02 L3_spectral:4.3090e-02 L4_spectral:4.3028e-02 L5_spectral:4.3030e-02 L6_spectral:4.3041e-02 L7_spectral:4.3042e-02 L8_spectral:4.3012e-02 L9_spectral:4.3011e-02 L10_spectral:4.3009e-02 L11_spectral:4.3015e-02 L12_spectral:4.3013e-02 ip_v_neg_g:6.4348e-02 cos_v_neg_g:1.1437e-02 v_norm:7.3317e+00 g_norm:7.6737e-01 hv_norm:8.7050e-01 cos_v_hv:1.9724e-02 hg_norm:5.2040e+00 cos_g_hg:6.2262e-01 v_par:8.2044e-03 v_perp:7.3317e+00 L1_cos_v_neg_g:1.8462e-02 L1_v_norm:2.1896e+00 L2_cos_v_neg_g:2.3491e-02 L2_v_norm:2.0189e+00 L3_cos_v_neg_g:2.0599e-02 L3_v_norm:2.0238e+00 L4_cos_v_neg_g:1.8111e-02 L4_v_norm:2.0111e+00 L5_cos_v_neg_g:1.8820e-02 L5_v_norm:2.0764e+00 L6_cos_v_neg_g:2.0892e-02 L6_v_norm:2.1096e+00 L7_cos_v_neg_g:1.9592e-02 L7_v_norm:2.1205e+00 L8_cos_v_neg_g:1.8776e-02 L8_v_norm:2.1288e+00 L9_cos_v_neg_g:1.7138e-02 L9_v_norm:2.1121e+00 L10_cos_v_neg_g:1.6064e-02 L10_v_norm:2.1231e+00 L11_cos_v_neg_g:1.2895e-02 L11_v_norm:2.1188e+00 L12_cos_v_neg_g:9.6035e-03 L12_v_norm:2.1309e+00 +step:500 train loss:4.489055 +step:501 train loss:4.483723 +step:502 train loss:4.488398 +step:503 train loss:4.436813 +step:504 train loss:4.518231 +step:505 train loss:4.452042 +step:506 train loss:4.442488 +step:507 train loss:4.439112 +step:508 train loss:4.477708 +step:509 train loss:4.463940 +step:510 train loss:4.396981 +step:511 train loss:4.413471 +step:512 train loss:4.404140 +step:513 train loss:4.432369 +step:514 train loss:4.521661 +step:515 train loss:4.436428 +step:516 train loss:4.517505 +step:517 train loss:4.418629 +step:518 train loss:4.426712 +step:519 train loss:4.461199 +step:520 train loss:4.423500 +step:521 train loss:4.426723 +step:522 train loss:4.443167 +step:523 train loss:4.467506 +step:524 train loss:4.377271 +step:525 train loss:4.390077 +step:526 train loss:4.443403 +step:527 train loss:4.408778 +step:528 train loss:4.415055 +step:529 train loss:4.461570 +step:530 train loss:4.397170 +step:531 train loss:4.435988 +step:532 train loss:4.384523 +step:533 train loss:4.384351 +step:534 train loss:4.435195 +step:535 train loss:4.440631 +step:536 train loss:4.482036 +step:537 train loss:4.347363 +step:538 train loss:4.339623 +step:539 train loss:4.465250 +step:540 train loss:4.478145 +step:541 train loss:4.385175 +step:542 train loss:4.354811 +step:543 train loss:4.397685 +step:544 train loss:4.407052 +step:545 train loss:4.394018 +step:546 train loss:4.387760 +step:547 train loss:4.391705 +step:548 train loss:4.274125 +step:549 train loss:4.386231 +step:550 train loss:4.375832 +step:551 train loss:4.376411 +step:552 train loss:4.453698 +step:553 train loss:4.426403 +step:554 train loss:4.392992 +step:555 train loss:4.422722 +step:556 train loss:4.376367 +step:557 train loss:4.339277 +step:558 train loss:4.327471 +step:559 train loss:4.374599 +step:560 train loss:4.449271 +step:561 train loss:4.320899 +step:562 train loss:4.311734 +step:563 train loss:4.386419 +step:564 train loss:4.327114 +step:565 train loss:4.374416 +step:566 train loss:4.354819 +step:567 train loss:4.375188 +step:568 train loss:4.424186 +step:569 train loss:4.373223 +step:570 train loss:4.292404 +step:571 train loss:4.338232 +step:572 train loss:4.294530 +step:573 train loss:4.350414 +step:574 train loss:4.407497 +step:575 train loss:4.319729 +step:576 train loss:4.354228 +step:577 train loss:4.337408 +step:578 train loss:4.349201 +step:579 train loss:4.368579 +step:580 train loss:4.307527 +step:581 train loss:4.363498 +step:582 train loss:4.355890 +step:583 train loss:4.356581 +step:584 train loss:4.329460 +step:585 train loss:4.295768 +step:586 train loss:4.323140 +step:587 train loss:4.392348 +step:588 train loss:4.297305 +step:589 train loss:4.359037 +step:590 train loss:4.384889 +step:591 train loss:4.290686 +step:592 train loss:4.304377 +step:593 train loss:4.300120 +step:594 train loss:4.264189 +step:595 train loss:4.340060 +step:596 train loss:4.321500 +step:597 train loss:4.320705 +step:598 train loss:4.282798 +step:599 train loss:4.318070 +step:600 train loss:4.258442 +step:601 train loss:4.273018 +step:602 train loss:4.290850 +step:603 train loss:4.300101 +step:604 train loss:4.316366 +step:605 train loss:4.336895 +step:606 train loss:4.283623 +step:607 train loss:4.263372 +step:608 train loss:4.282060 +step:609 train loss:4.262081 +step:610 train loss:4.246036 +step:611 train loss:4.268420 +step:612 train loss:4.277333 +step:613 train loss:4.198451 +step:614 train loss:4.257893 +step:615 train loss:4.316385 +step:616 train loss:4.244470 +step:617 train loss:4.289387 +step:618 train loss:4.228994 +step:619 train loss:4.280823 +step:620 train loss:4.316908 +step:621 train loss:4.230052 +step:622 train loss:4.335729 +step:623 train loss:4.296035 +step:624 train loss:4.260523 +step:625 train loss:4.297451 +step:626 train loss:4.261090 +step:627 train loss:4.275384 +step:628 train loss:4.277388 +step:629 train loss:4.202805 +step:630 train loss:4.262407 +step:631 train loss:4.212914 +step:632 train loss:4.234683 +step:633 train loss:4.260346 +step:634 train loss:4.240124 +step:635 train loss:4.208423 +step:636 train loss:4.278463 +step:637 train loss:4.207165 +step:638 train loss:4.153646 +step:639 train loss:4.265876 +step:640 train loss:4.224596 +step:641 train loss:4.236328 +step:642 train loss:4.290268 +step:643 train loss:4.166564 +step:644 train loss:4.275841 +step:645 train loss:4.216033 +step:646 train loss:4.234182 +step:647 train loss:4.239448 +step:648 train loss:4.318014 +step:649 train loss:4.241402 +step:650 train loss:4.242766 +step:651 train loss:4.203205 +step:652 train loss:4.193803 +step:653 train loss:4.194286 +step:654 train loss:4.212738 +step:655 train loss:4.249966 +step:656 train loss:4.209688 +step:657 train loss:4.252794 +step:658 train loss:4.177151 +step:659 train loss:4.275734 +step:660 train loss:4.255520 +step:661 train loss:4.283406 +step:662 train loss:4.271948 +step:663 train loss:4.264124 +step:664 train loss:4.163496 +step:665 train loss:4.179641 +step:666 train loss:4.192325 +step:667 train loss:4.233069 +step:668 train loss:4.236113 +step:669 train loss:4.205164 +step:670 train loss:4.237762 +step:671 train loss:4.197436 +step:672 train loss:4.169070 +step:673 train loss:4.255929 +step:674 train loss:4.215593 +step:675 train loss:4.157157 +step:676 train loss:4.247635 +step:677 train loss:4.196477 +step:678 train loss:4.169695 +step:679 train loss:4.210481 +step:680 train loss:4.182174 +step:681 train loss:4.223158 +step:682 train loss:4.126608 +step:683 train loss:4.205385 +step:684 train loss:4.256568 +step:685 train loss:4.193690 +step:686 train loss:4.252976 +step:687 train loss:4.231591 +step:688 train loss:4.158981 +step:689 train loss:4.173274 +step:690 train loss:4.150754 +step:691 train loss:4.192000 +step:692 train loss:4.192683 +step:693 train loss:4.175825 +step:694 train loss:4.196448 +step:695 train loss:4.151792 +step:696 train loss:4.096209 +step:697 train loss:4.238354 +step:698 train loss:4.125113 +step:699 train loss:4.166214 +step:700 train loss:4.200414 +step:701 train loss:4.121176 +step:702 train loss:4.160751 +step:703 train loss:4.140862 +step:704 train loss:4.080855 +step:705 train loss:4.177415 +step:706 train loss:4.052473 +step:707 train loss:4.110334 +step:708 train loss:4.204033 +step:709 train loss:4.162207 +step:710 train loss:4.159882 +step:711 train loss:4.149593 +step:712 train loss:4.144830 +step:713 train loss:4.109017 +step:714 train loss:4.179273 +step:715 train loss:4.064650 +step:716 train loss:4.239542 +step:717 train loss:4.123168 +step:718 train loss:4.198946 +step:719 train loss:4.171121 +step:720 train loss:4.130527 +step:721 train loss:4.202412 +step:722 train loss:4.141413 +step:723 train loss:4.179647 +step:724 train loss:4.187094 +step:725 train loss:4.095862 +step:726 train loss:4.118610 +step:727 train loss:4.152877 +step:728 train loss:4.130697 +step:729 train loss:4.117359 +step:730 train loss:4.174048 +step:731 train loss:4.215283 +step:732 train loss:4.175655 +step:733 train loss:4.149402 +step:734 train loss:4.163799 +step:735 train loss:4.239971 +step:736 train loss:4.136002 +step:737 train loss:4.141279 +step:738 train loss:4.192267 +step:739 train loss:4.112563 +step:740 train loss:4.166831 +step:741 train loss:4.253991 +step:742 train loss:4.133284 +step:743 train loss:4.114859 +step:744 train loss:4.135817 +step:745 train loss:4.052948 +step:746 train loss:4.112383 +step:747 train loss:4.128873 +step:748 train loss:4.115289 +step:749 train loss:4.147425 +step:750 validation loss:4.078153 +step:750 train loss:4.073788 +step:751 train loss:4.145113 +step:752 train loss:4.067729 +step:753 train loss:4.131545 +step:754 train loss:4.124329 +step:755 train loss:4.191380 +step:756 train loss:4.154409 +step:757 train loss:4.213683 +step:758 train loss:4.119862 +step:759 train loss:4.127343 +step:760 train loss:4.087887 +step:761 train loss:4.122102 +step:762 train loss:4.089477 +step:763 train loss:4.094728 +step:764 train loss:4.084894 +step:765 train loss:4.090030 +step:766 train loss:4.155619 +step:767 train loss:4.284857 +step:768 train loss:4.118707 +step:769 train loss:4.132616 +step:770 train loss:4.183265 +step:771 train loss:4.237845 +step:772 train loss:4.146807 +step:773 train loss:4.085426 +step:774 train loss:4.111135 +step:775 train loss:4.103884 +step:776 train loss:4.128996 +step:777 train loss:4.094871 +step:778 train loss:4.045309 +step:779 train loss:4.079671 +step:780 train loss:4.141860 +step:781 train loss:4.080627 +step:782 train loss:4.098072 +step:783 train loss:4.074716 +step:784 train loss:4.080390 +step:785 train loss:4.075516 +step:786 train loss:4.075004 +step:787 train loss:4.024874 +step:788 train loss:4.113203 +step:789 train loss:4.089170 +step:790 train loss:4.062627 +step:791 train loss:4.133430 +step:792 train loss:4.166065 +step:793 train loss:4.122806 +step:794 train loss:4.106853 +step:795 train loss:4.076246 +step:796 train loss:4.375081 +step:797 train loss:4.086089 +step:798 train loss:4.081967 +step:799 train loss:4.086510 +step:800 train loss:4.158555 +step:801 train loss:4.073812 +step:802 train loss:4.219740 +step:803 train loss:4.100093 +step:804 train loss:4.051765 +step:805 train loss:4.112286 +step:806 train loss:4.059684 +step:807 train loss:4.074108 +step:808 train loss:4.084969 +step:809 train loss:4.050708 +step:810 train loss:4.034783 +step:811 train loss:4.114230 +step:812 train loss:4.077749 +step:813 train loss:4.098079 +step:814 train loss:4.170286 +step:815 train loss:4.124863 +step:816 train loss:4.049701 +step:817 train loss:4.103784 +step:818 train loss:4.064847 +step:819 train loss:4.057217 +step:820 train loss:4.053211 +step:821 train loss:4.017838 +step:822 train loss:4.017449 +step:823 train loss:4.100307 +step:824 train loss:3.999857 +step:825 train loss:3.992190 +step:826 train loss:4.060775 +step:827 train loss:3.960773 +step:828 train loss:4.036592 +step:829 train loss:4.038446 +step:830 train loss:4.043599 +step:831 train loss:4.087218 +step:832 train loss:4.126682 +step:833 train loss:4.086742 +step:834 train loss:4.077688 +step:835 train loss:4.039445 +step:836 train loss:4.044899 +step:837 train loss:4.035289 +step:838 train loss:4.016366 +step:839 train loss:4.024003 +step:840 train loss:4.057091 +step:841 train loss:4.056367 +step:842 train loss:4.049126 +step:843 train loss:4.056326 +step:844 train loss:4.008350 +step:845 train loss:3.997601 +step:846 train loss:4.099724 +step:847 train loss:4.066889 +step:848 train loss:4.020936 +step:849 train loss:4.044546 +step:850 train loss:4.071160 +step:851 train loss:4.025957 +step:852 train loss:4.121360 +step:853 train loss:4.008657 +step:854 train loss:4.036890 +step:855 train loss:4.045507 +step:856 train loss:4.000508 +step:857 train loss:4.051320 +step:858 train loss:4.080930 +step:859 train loss:3.983250 +step:860 train loss:4.013064 +step:861 train loss:4.059271 +step:862 train loss:4.000508 +step:863 train loss:4.009342 +step:864 train loss:3.995455 +step:865 train loss:4.018122 +step:866 train loss:4.045135 +step:867 train loss:4.175035 +step:868 train loss:4.014534 +step:869 train loss:4.030530 +step:870 train loss:3.969755 +step:871 train loss:3.968455 +step:872 train loss:4.046268 +step:873 train loss:4.011401 +step:874 train loss:4.036581 +step:875 train loss:3.961965 +step:876 train loss:4.041189 +step:877 train loss:3.986070 +step:878 train loss:4.086269 +step:879 train loss:3.974730 +step:880 train loss:4.092977 +step:881 train loss:4.006375 +step:882 train loss:3.974873 +step:883 train loss:4.027119 +step:884 train loss:4.048497 +step:885 train loss:3.986591 +step:886 train loss:4.003667 +step:887 train loss:4.006945 +step:888 train loss:4.120807 +step:889 train loss:4.048384 +step:890 train loss:3.991861 +step:891 train loss:3.950947 +step:892 train loss:3.947635 +step:893 train loss:4.020017 +step:894 train loss:3.979447 +step:895 train loss:3.966125 +step:896 train loss:4.051744 +step:897 train loss:3.980666 +step:898 train loss:4.007622 +step:899 train loss:4.010964 +step:900 train loss:4.039936 +step:901 train loss:3.968346 +step:902 train loss:4.001377 +step:903 train loss:4.111106 +step:904 train loss:4.115350 +step:905 train loss:3.984630 +step:906 train loss:4.002529 +step:907 train loss:4.032111 +step:908 train loss:4.041161 +step:909 train loss:3.981278 +step:910 train loss:4.033139 +step:911 train loss:4.145383 +step:912 train loss:3.946864 +step:913 train loss:4.009315 +step:914 train loss:3.959220 +step:915 train loss:4.003520 +step:916 train loss:4.049969 +step:917 train loss:4.011939 +step:918 train loss:4.085815 +step:919 train loss:4.177729 +step:920 train loss:3.916944 +step:921 train loss:4.038692 +step:922 train loss:4.009546 +step:923 train loss:3.928317 +step:924 train loss:3.976981 +step:925 train loss:3.940826 +step:926 train loss:4.040431 +step:927 train loss:3.938174 +step:928 train loss:4.024474 +step:929 train loss:3.994740 +step:930 train loss:3.997993 +step:931 train loss:4.033612 +step:932 train loss:3.969831 +step:933 train loss:4.017156 +step:934 train loss:4.053066 +step:935 train loss:4.043758 +step:936 train loss:4.008764 +step:937 train loss:4.008831 +step:938 train loss:3.998677 +step:939 train loss:3.906706 +step:940 train loss:4.004128 +step:941 train loss:3.947337 +step:942 train loss:3.933424 +step:943 train loss:4.033513 +step:944 train loss:3.988142 +step:945 train loss:3.991151 +step:946 train loss:4.000860 +step:947 train loss:4.172407 +step:948 train loss:3.962186 +step:949 train loss:4.004351 +step:950 train loss:3.940056 +step:951 train loss:3.984201 +step:952 train loss:4.026403 +step:953 train loss:3.955544 +step:954 train loss:4.002507 +step:955 train loss:3.935706 +step:956 train loss:3.969243 +step:957 train loss:3.964405 +step:958 train loss:4.038891 +step:959 train loss:3.972122 +step:960 train loss:4.070141 +step:961 train loss:4.008927 +step:962 train loss:3.973046 +step:963 train loss:3.961059 +step:964 train loss:3.985316 +step:965 train loss:3.907434 +step:966 train loss:3.926329 +step:967 train loss:3.986161 +step:968 train loss:3.991288 +step:969 train loss:3.944281 +step:970 train loss:3.989874 +step:971 train loss:3.977046 +step:972 train loss:3.893790 +step:973 train loss:4.000118 +step:974 train loss:3.932792 +step:975 train loss:4.031184 +step:976 train loss:3.981311 +step:977 train loss:3.972477 +step:978 train loss:3.969534 +step:979 train loss:3.957953 +step:980 train loss:3.961322 +step:981 train loss:3.937693 +step:982 train loss:3.954968 +step:983 train loss:3.964761 +step:984 train loss:3.984659 +step:985 train loss:3.951787 +step:986 train loss:3.982749 +step:987 train loss:4.016611 +step:988 train loss:3.999635 +step:989 train loss:3.965338 +step:990 train loss:3.960924 +step:991 train loss:3.874009 +step:992 train loss:3.948725 +step:993 train loss:3.974535 +step:994 train loss:3.908301 +step:995 train loss:3.924746 +step:996 train loss:3.969524 +step:997 train loss:3.932674 +step:998 train loss:3.928455 +step:999 train loss:3.969291 +step:1000 validation loss:3.892653 total_sharp:2.8612e-04 L1_sharp:1.0683e-04 L2_sharp:2.7753e-05 L3_sharp:5.7923e-05 L4_sharp:3.1952e-05 L5_sharp:3.0662e-05 L6_sharp:4.9058e-05 L7_sharp:5.7909e-05 L8_sharp:5.0201e-05 L9_sharp:4.0001e-05 L10_sharp:3.1617e-05 L11_sharp:2.9737e-05 L12_sharp:2.8016e-05 total_fnorm:1.0335e+01 total_l1_linf:7.7827e+04 total_spectral:1.0335e+01 L1_fnorm:2.9918e+00 L2_fnorm:2.7931e+00 L3_fnorm:2.7991e+00 L4_fnorm:2.8835e+00 L5_fnorm:2.9689e+00 L6_fnorm:2.9919e+00 L7_fnorm:3.0107e+00 L8_fnorm:3.0121e+00 L9_fnorm:3.0098e+00 L10_fnorm:3.0113e+00 L11_fnorm:3.0107e+00 L12_fnorm:3.0092e+00 L1_l1linf:2.1271e+00 L2_l1linf:1.9711e+00 L3_l1linf:1.9741e+00 L4_l1linf:2.0379e+00 L5_l1linf:2.0416e+00 L6_l1linf:2.0494e+00 L7_l1linf:2.0460e+00 L8_l1linf:2.0375e+00 L9_l1linf:2.0236e+00 L10_l1linf:2.0009e+00 L11_l1linf:2.0017e+00 L12_l1linf:2.0024e+00 L1_spectral:6.0297e-02 L2_spectral:6.0213e-02 L3_spectral:6.0230e-02 L4_spectral:6.0231e-02 L5_spectral:6.0207e-02 L6_spectral:6.0204e-02 L7_spectral:6.0227e-02 L8_spectral:6.0224e-02 L9_spectral:6.0223e-02 L10_spectral:6.0215e-02 L11_spectral:6.0228e-02 L12_spectral:6.0220e-02 ip_v_neg_g:1.4625e-02 cos_v_neg_g:3.7841e-03 v_norm:1.0335e+01 g_norm:3.7397e-01 hv_norm:1.1830e-01 cos_v_hv:2.4995e-02 hg_norm:2.4301e-01 cos_g_hg:3.0138e-01 v_par:1.7515e-03 v_perp:1.0335e+01 L1_cos_v_neg_g:9.4148e-03 L1_v_norm:2.9918e+00 L2_cos_v_neg_g:8.9335e-03 L2_v_norm:2.7931e+00 L3_cos_v_neg_g:1.1464e-02 L3_v_norm:2.7991e+00 L4_cos_v_neg_g:9.2236e-03 L4_v_norm:2.8835e+00 L5_cos_v_neg_g:7.2226e-03 L5_v_norm:2.9689e+00 L6_cos_v_neg_g:8.0277e-03 L6_v_norm:2.9919e+00 L7_cos_v_neg_g:7.5166e-03 L7_v_norm:3.0107e+00 L8_cos_v_neg_g:6.6242e-03 L8_v_norm:3.0121e+00 L9_cos_v_neg_g:7.2501e-03 L9_v_norm:3.0098e+00 L10_cos_v_neg_g:6.8666e-03 L10_v_norm:3.0113e+00 L11_cos_v_neg_g:6.5674e-03 L11_v_norm:3.0107e+00 L12_cos_v_neg_g:5.9259e-03 L12_v_norm:3.0092e+00 +step:1000 train loss:3.973681 +step:1001 train loss:3.976792 +step:1002 train loss:3.971634 +step:1003 train loss:3.939967 +step:1004 train loss:3.917088 +step:1005 train loss:3.929641 +step:1006 train loss:4.015341 +step:1007 train loss:3.945551 +step:1008 train loss:3.940591 +step:1009 train loss:4.003099 +step:1010 train loss:3.961849 +step:1011 train loss:3.991208 +step:1012 train loss:3.935815 +step:1013 train loss:3.906040 +step:1014 train loss:3.914023 +step:1015 train loss:3.948249 +step:1016 train loss:3.966354 +step:1017 train loss:3.917596 +step:1018 train loss:3.973416 +step:1019 train loss:3.910897 +step:1020 train loss:3.920586 +step:1021 train loss:4.017889 +step:1022 train loss:3.917797 +step:1023 train loss:3.923684 +step:1024 train loss:4.009377 +step:1025 train loss:3.968543 +step:1026 train loss:3.910881 +step:1027 train loss:3.942959 +step:1028 train loss:3.949897 +step:1029 train loss:3.899129 +step:1030 train loss:3.990220 +step:1031 train loss:3.974174 +step:1032 train loss:3.942341 +step:1033 train loss:3.907715 +step:1034 train loss:3.965391 +step:1035 train loss:3.969933 +step:1036 train loss:3.888093 +step:1037 train loss:3.944204 +step:1038 train loss:3.964829 +step:1039 train loss:4.122180 +step:1040 train loss:3.945382 +step:1041 train loss:3.919092 +step:1042 train loss:3.945961 +step:1043 train loss:3.949084 +step:1044 train loss:3.933234 +step:1045 train loss:3.949723 +step:1046 train loss:3.886969 +step:1047 train loss:3.922342 +step:1048 train loss:3.920719 +step:1049 train loss:3.973531 +step:1050 train loss:3.940572 +step:1051 train loss:3.906023 +step:1052 train loss:4.022185 +step:1053 train loss:3.916065 +step:1054 train loss:3.902151 +step:1055 train loss:3.977558 +step:1056 train loss:3.917125 +step:1057 train loss:3.821465 +step:1058 train loss:3.917194 +step:1059 train loss:3.904283 +step:1060 train loss:3.906640 +step:1061 train loss:3.952878 +step:1062 train loss:3.914505 +step:1063 train loss:3.921960 +step:1064 train loss:3.913903 +step:1065 train loss:3.924056 +step:1066 train loss:3.897749 +step:1067 train loss:3.930492 +step:1068 train loss:3.882917 +step:1069 train loss:3.909421 +step:1070 train loss:3.920103 +step:1071 train loss:3.934679 +step:1072 train loss:3.957690 +step:1073 train loss:3.876651 +step:1074 train loss:3.892955 +step:1075 train loss:3.885334 +step:1076 train loss:3.958800 +step:1077 train loss:3.886351 +step:1078 train loss:3.942131 +step:1079 train loss:3.982605 +step:1080 train loss:3.860815 +step:1081 train loss:3.933554 +step:1082 train loss:3.924637 +step:1083 train loss:3.886428 +step:1084 train loss:3.872710 +step:1085 train loss:3.933205 +step:1086 train loss:3.910254 +step:1087 train loss:3.906569 +step:1088 train loss:3.904312 +step:1089 train loss:3.914136 +step:1090 train loss:3.862235 +step:1091 train loss:3.854696 +step:1092 train loss:3.958001 +step:1093 train loss:3.843251 +step:1094 train loss:3.903891 +step:1095 train loss:3.953204 +step:1096 train loss:3.882600 +step:1097 train loss:3.884700 +step:1098 train loss:3.855486 +step:1099 train loss:3.907553 +step:1100 train loss:3.961354 +step:1101 train loss:3.944027 +step:1102 train loss:3.956503 +step:1103 train loss:3.876382 +step:1104 train loss:3.912420 +step:1105 train loss:3.962283 +step:1106 train loss:3.902854 +step:1107 train loss:4.029941 +step:1108 train loss:3.961141 +step:1109 train loss:3.939207 +step:1110 train loss:3.887391 +step:1111 train loss:3.937698 +step:1112 train loss:3.841795 +step:1113 train loss:3.842697 +step:1114 train loss:3.828128 +step:1115 train loss:3.868214 +step:1116 train loss:3.925387 +step:1117 train loss:3.967483 +step:1118 train loss:3.982005 +step:1119 train loss:3.894867 +step:1120 train loss:3.928126 +step:1121 train loss:3.903874 +step:1122 train loss:3.885086 +step:1123 train loss:3.994139 +step:1124 train loss:3.872629 +step:1125 train loss:3.882253 +step:1126 train loss:3.849698 +step:1127 train loss:3.879674 +step:1128 train loss:3.876583 +step:1129 train loss:3.930650 +step:1130 train loss:3.854040 +step:1131 train loss:3.945114 +step:1132 train loss:3.888380 +step:1133 train loss:3.903007 +step:1134 train loss:3.876087 +step:1135 train loss:3.917135 +step:1136 train loss:3.938160 +step:1137 train loss:3.865396 +step:1138 train loss:3.937086 +step:1139 train loss:3.884759 +step:1140 train loss:3.963972 +step:1141 train loss:3.921776 +step:1142 train loss:3.854383 +step:1143 train loss:3.932456 +step:1144 train loss:3.957388 +step:1145 train loss:3.901371 +step:1146 train loss:3.858570 +step:1147 train loss:3.873580 +step:1148 train loss:3.903644 +step:1149 train loss:3.950082 +step:1150 train loss:3.953182 +step:1151 train loss:3.962274 +step:1152 train loss:3.863359 +step:1153 train loss:3.864389 +step:1154 train loss:3.849922 +step:1155 train loss:3.950002 +step:1156 train loss:3.852539 +step:1157 train loss:3.881771 +step:1158 train loss:3.930138 +step:1159 train loss:3.933945 +step:1160 train loss:3.858229 +step:1161 train loss:3.947310 +step:1162 train loss:3.887512 +step:1163 train loss:3.876321 +step:1164 train loss:3.786047 +step:1165 train loss:3.919631 +step:1166 train loss:3.845548 +step:1167 train loss:3.854792 +step:1168 train loss:3.910501 +step:1169 train loss:3.872411 +step:1170 train loss:3.878778 +step:1171 train loss:3.906542 +step:1172 train loss:3.860405 +step:1173 train loss:3.896949 +step:1174 train loss:3.838354 +step:1175 train loss:3.872010 +step:1176 train loss:3.991508 +step:1177 train loss:3.834540 +step:1178 train loss:3.892143 +step:1179 train loss:3.836440 +step:1180 train loss:3.875844 +step:1181 train loss:3.861855 +step:1182 train loss:3.915957 +step:1183 train loss:3.893891 +step:1184 train loss:3.837286 +step:1185 train loss:3.867381 +step:1186 train loss:3.867058 +step:1187 train loss:3.840040 +step:1188 train loss:3.872105 +step:1189 train loss:3.810322 +step:1190 train loss:3.862179 +step:1191 train loss:3.923661 +step:1192 train loss:3.874750 +step:1193 train loss:3.877256 +step:1194 train loss:3.994590 +step:1195 train loss:3.970061 +step:1196 train loss:3.856743 +step:1197 train loss:3.885309 +step:1198 train loss:3.871730 +step:1199 train loss:3.865421 +step:1200 train loss:3.935485 +step:1201 train loss:3.900818 +step:1202 train loss:3.838961 +step:1203 train loss:3.832926 +step:1204 train loss:3.871190 +step:1205 train loss:3.894962 +step:1206 train loss:3.823576 +step:1207 train loss:3.906085 +step:1208 train loss:3.879178 +step:1209 train loss:3.813465 +step:1210 train loss:3.908940 +step:1211 train loss:3.851364 +step:1212 train loss:3.881238 +step:1213 train loss:3.820781 +step:1214 train loss:3.892446 +step:1215 train loss:3.862711 +step:1216 train loss:3.872061 +step:1217 train loss:3.807239 +step:1218 train loss:3.881912 +step:1219 train loss:3.821575 +step:1220 train loss:3.852125 +step:1221 train loss:3.863688 +step:1222 train loss:3.914502 +step:1223 train loss:3.886757 +step:1224 train loss:3.855336 +step:1225 train loss:3.903370 +step:1226 train loss:3.846959 +step:1227 train loss:3.849471 +step:1228 train loss:3.860285 +step:1229 train loss:3.826117 +step:1230 train loss:3.826199 +step:1231 train loss:3.878540 +step:1232 train loss:3.836691 +step:1233 train loss:3.825781 +step:1234 train loss:3.914746 +step:1235 train loss:3.882281 +step:1236 train loss:3.796126 +step:1237 train loss:3.901587 +step:1238 train loss:3.846116 +step:1239 train loss:3.886219 +step:1240 train loss:3.811376 +step:1241 train loss:3.822525 +step:1242 train loss:3.856703 +step:1243 train loss:3.803277 +step:1244 train loss:3.932677 +step:1245 train loss:3.934639 +step:1246 train loss:3.868254 +step:1247 train loss:3.845284 +step:1248 train loss:3.874583 +step:1249 train loss:3.798401 +step:1250 validation loss:3.797692 +step:1250 train loss:3.822700 +step:1251 train loss:3.894907 +step:1252 train loss:3.841676 +step:1253 train loss:3.790722 +step:1254 train loss:3.831125 +step:1255 train loss:3.822195 +step:1256 train loss:3.868824 +step:1257 train loss:3.845292 +step:1258 train loss:3.902384 +step:1259 train loss:3.875456 +step:1260 train loss:3.792183 +step:1261 train loss:4.033070 +step:1262 train loss:3.869904 +step:1263 train loss:3.821268 +step:1264 train loss:3.843138 +step:1265 train loss:3.885747 +step:1266 train loss:3.836119 +step:1267 train loss:3.843803 +step:1268 train loss:3.854753 +step:1269 train loss:3.851388 +step:1270 train loss:3.777375 +step:1271 train loss:3.791209 +step:1272 train loss:3.817388 +step:1273 train loss:3.874718 +step:1274 train loss:3.838386 +step:1275 train loss:3.867680 +step:1276 train loss:3.864056 +step:1277 train loss:3.873340 +step:1278 train loss:3.816979 +step:1279 train loss:3.826187 +step:1280 train loss:3.841400 +step:1281 train loss:3.892415 +step:1282 train loss:3.827548 +step:1283 train loss:3.895991 +step:1284 train loss:3.837453 +step:1285 train loss:3.887702 +step:1286 train loss:3.789836 +step:1287 train loss:3.826163 +step:1288 train loss:3.851754 +step:1289 train loss:3.919605 +step:1290 train loss:3.875004 +step:1291 train loss:3.835763 +step:1292 train loss:3.814631 +step:1293 train loss:3.816238 +step:1294 train loss:3.859231 +step:1295 train loss:3.845580 +step:1296 train loss:3.885424 +step:1297 train loss:3.844801 +step:1298 train loss:3.862542 +step:1299 train loss:3.899916 +step:1300 train loss:3.820037 +step:1301 train loss:3.864905 +step:1302 train loss:3.818486 +step:1303 train loss:3.863202 +step:1304 train loss:3.892313 +step:1305 train loss:3.866385 +step:1306 train loss:3.862625 +step:1307 train loss:3.842983 +step:1308 train loss:3.799881 +step:1309 train loss:3.817744 +step:1310 train loss:3.807178 +step:1311 train loss:3.809656 +step:1312 train loss:3.887050 +step:1313 train loss:3.797843 +step:1314 train loss:3.804573 +step:1315 train loss:3.846223 +step:1316 train loss:3.821949 +step:1317 train loss:3.715798 +step:1318 train loss:3.879027 +step:1319 train loss:3.908194 +step:1320 train loss:3.823208 +step:1321 train loss:3.811281 +step:1322 train loss:3.912563 +step:1323 train loss:3.860215 +step:1324 train loss:3.966150 +step:1325 train loss:3.832141 +step:1326 train loss:3.865952 +step:1327 train loss:3.886947 +step:1328 train loss:3.792650 +step:1329 train loss:3.818777 +step:1330 train loss:3.844494 +step:1331 train loss:3.744969 +step:1332 train loss:3.876220 +step:1333 train loss:3.858285 +step:1334 train loss:3.848482 +step:1335 train loss:3.874699 +step:1336 train loss:3.882606 +step:1337 train loss:3.850271 +step:1338 train loss:3.832413 +step:1339 train loss:3.902891 +step:1340 train loss:3.874735 +step:1341 train loss:3.851324 +step:1342 train loss:3.826074 +step:1343 train loss:3.816288 +step:1344 train loss:3.883874 +step:1345 train loss:3.836564 +step:1346 train loss:3.924287 +step:1347 train loss:3.851173 +step:1348 train loss:3.805136 +step:1349 train loss:3.751666 +step:1350 train loss:3.799541 +step:1351 train loss:3.859693 +step:1352 train loss:3.836389 +step:1353 train loss:3.816749 +step:1354 train loss:3.817906 +step:1355 train loss:3.887683 +step:1356 train loss:3.796603 +step:1357 train loss:3.827731 +step:1358 train loss:3.817236 +step:1359 train loss:3.815269 +step:1360 train loss:3.850895 +step:1361 train loss:3.974701 +step:1362 train loss:3.881448 +step:1363 train loss:3.764854 +step:1364 train loss:3.789136 +step:1365 train loss:3.783008 +step:1366 train loss:3.823475 +step:1367 train loss:3.757637 +step:1368 train loss:3.790981 +step:1369 train loss:3.829427 +step:1370 train loss:3.849831 +step:1371 train loss:3.811406 +step:1372 train loss:3.846894 +step:1373 train loss:3.881035 +step:1374 train loss:3.877573 +step:1375 train loss:3.826096 +step:1376 train loss:3.854592 +step:1377 train loss:3.839821 +step:1378 train loss:3.825606 +step:1379 train loss:3.802243 +step:1380 train loss:3.873204 +step:1381 train loss:3.825962 +step:1382 train loss:3.805398 +step:1383 train loss:3.789751 +step:1384 train loss:3.857761 +step:1385 train loss:3.764361 +step:1386 train loss:3.838874 +step:1387 train loss:3.839034 +step:1388 train loss:3.810218 +step:1389 train loss:3.772147 +step:1390 train loss:3.818117 +step:1391 train loss:3.846861 +step:1392 train loss:3.828672 +step:1393 train loss:3.872643 +step:1394 train loss:3.812476 +step:1395 train loss:3.847643 +step:1396 train loss:3.831634 +step:1397 train loss:3.856331 +step:1398 train loss:3.853137 +step:1399 train loss:3.827477 +step:1400 train loss:3.805726 +step:1401 train loss:3.797873 +step:1402 train loss:3.803494 +step:1403 train loss:3.764204 +step:1404 train loss:3.827005 +step:1405 train loss:3.787012 +step:1406 train loss:3.820275 +step:1407 train loss:3.814604 +step:1408 train loss:3.790761 +step:1409 train loss:3.781295 +step:1410 train loss:3.801027 +step:1411 train loss:3.837593 +step:1412 train loss:3.880573 +step:1413 train loss:3.805978 +step:1414 train loss:3.841908 +step:1415 train loss:3.802764 +step:1416 train loss:3.851001 +step:1417 train loss:3.823086 +step:1418 train loss:3.759013 +step:1419 train loss:3.779852 +step:1420 train loss:3.798365 +step:1421 train loss:3.840017 +step:1422 train loss:3.812723 +step:1423 train loss:3.911522 +step:1424 train loss:3.815056 +step:1425 train loss:3.776044 +step:1426 train loss:3.796810 +step:1427 train loss:3.782853 +step:1428 train loss:3.767075 +step:1429 train loss:3.792030 +step:1430 train loss:3.805857 +step:1431 train loss:3.827751 +step:1432 train loss:3.810698 +step:1433 train loss:3.792205 +step:1434 train loss:3.768351 +step:1435 train loss:3.758600 +step:1436 train loss:3.840008 +step:1437 train loss:3.763935 +step:1438 train loss:3.755111 +step:1439 train loss:3.752337 +step:1440 train loss:3.786132 +step:1441 train loss:3.868939 +step:1442 train loss:3.824935 +step:1443 train loss:3.755195 +step:1444 train loss:3.765414 +step:1445 train loss:3.757958 +step:1446 train loss:3.801665 +step:1447 train loss:3.813565 +step:1448 train loss:3.776868 +step:1449 train loss:3.800022 +step:1450 train loss:3.813334 +step:1451 train loss:3.747889 +step:1452 train loss:3.801769 +step:1453 train loss:3.796131 +step:1454 train loss:3.777835 +step:1455 train loss:3.727097 +step:1456 train loss:3.794960 +step:1457 train loss:3.735625 +step:1458 train loss:3.870291 +step:1459 train loss:3.797345 +step:1460 train loss:3.761186 +step:1461 train loss:3.822320 +step:1462 train loss:3.833690 +step:1463 train loss:3.787841 +step:1464 train loss:3.777220 +step:1465 train loss:3.766753 +step:1466 train loss:3.732371 +step:1467 train loss:3.871998 +step:1468 train loss:3.756498 +step:1469 train loss:3.828143 +step:1470 train loss:3.767848 +step:1471 train loss:3.762459 +step:1472 train loss:3.769882 +step:1473 train loss:3.761797 +step:1474 train loss:3.713309 +step:1475 train loss:3.769102 +step:1476 train loss:3.853165 +step:1477 train loss:3.801643 +step:1478 train loss:3.736952 +step:1479 train loss:3.773015 +step:1480 train loss:3.764959 +step:1481 train loss:3.741194 +step:1482 train loss:3.807246 +step:1483 train loss:3.793941 +step:1484 train loss:3.824909 +step:1485 train loss:3.843221 +step:1486 train loss:3.769197 +step:1487 train loss:3.764700 +step:1488 train loss:3.762997 +step:1489 train loss:3.761180 +step:1490 train loss:3.814130 +step:1491 train loss:3.816490 +step:1492 train loss:3.800186 +step:1493 train loss:3.750293 +step:1494 train loss:3.783502 +step:1495 train loss:3.775118 +step:1496 train loss:3.734265 +step:1497 train loss:3.810660 +step:1498 train loss:3.714216 +step:1499 train loss:3.761628 +step:1500 validation loss:3.725737 total_sharp:1.9739e-04 L1_sharp:1.5940e-04 L2_sharp:2.2678e-05 L3_sharp:3.7527e-05 L4_sharp:1.6779e-05 L5_sharp:1.8604e-05 L6_sharp:2.8865e-05 L7_sharp:4.0906e-05 L8_sharp:3.5278e-05 L9_sharp:2.9681e-05 L10_sharp:2.0981e-05 L11_sharp:1.6904e-05 L12_sharp:4.2966e-05 total_fnorm:1.0337e+01 total_l1_linf:7.7810e+04 total_spectral:1.0337e+01 L1_fnorm:2.9505e+00 L2_fnorm:2.7869e+00 L3_fnorm:2.7940e+00 L4_fnorm:2.9000e+00 L5_fnorm:2.9794e+00 L6_fnorm:2.9935e+00 L7_fnorm:3.0145e+00 L8_fnorm:3.0161e+00 L9_fnorm:3.0165e+00 L10_fnorm:3.0177e+00 L11_fnorm:3.0130e+00 L12_fnorm:3.0153e+00 L1_l1linf:2.0870e+00 L2_l1linf:1.9651e+00 L3_l1linf:1.9854e+00 L4_l1linf:2.0696e+00 L5_l1linf:2.0741e+00 L6_l1linf:2.0491e+00 L7_l1linf:2.0411e+00 L8_l1linf:2.0278e+00 L9_l1linf:2.0250e+00 L10_l1linf:2.0161e+00 L11_l1linf:2.0022e+00 L12_l1linf:2.0196e+00 L1_spectral:6.0281e-02 L2_spectral:6.0203e-02 L3_spectral:6.0260e-02 L4_spectral:6.0222e-02 L5_spectral:6.0248e-02 L6_spectral:6.0225e-02 L7_spectral:6.0229e-02 L8_spectral:6.0226e-02 L9_spectral:6.0219e-02 L10_spectral:6.0215e-02 L11_spectral:6.0225e-02 L12_spectral:6.0220e-02 ip_v_neg_g:9.9793e-03 cos_v_neg_g:2.5930e-03 v_norm:1.0337e+01 g_norm:3.7231e-01 hv_norm:1.0509e-01 cos_v_hv:1.9416e-02 hg_norm:3.7476e-01 cos_g_hg:3.2606e-01 v_par:1.6789e-03 v_perp:1.0337e+01 L1_cos_v_neg_g:1.0536e-02 L1_v_norm:2.9505e+00 L2_cos_v_neg_g:4.7252e-03 L2_v_norm:2.7869e+00 L3_cos_v_neg_g:5.6056e-03 L3_v_norm:2.7940e+00 L4_cos_v_neg_g:4.5591e-03 L4_v_norm:2.9000e+00 L5_cos_v_neg_g:4.0639e-03 L5_v_norm:2.9794e+00 L6_cos_v_neg_g:4.9106e-03 L6_v_norm:2.9935e+00 L7_cos_v_neg_g:7.0238e-03 L7_v_norm:3.0145e+00 L8_cos_v_neg_g:6.3277e-03 L8_v_norm:3.0161e+00 L9_cos_v_neg_g:6.7750e-03 L9_v_norm:3.0165e+00 L10_cos_v_neg_g:6.0184e-03 L10_v_norm:3.0177e+00 L11_cos_v_neg_g:4.9236e-03 L11_v_norm:3.0130e+00 L12_cos_v_neg_g:5.0517e-03 L12_v_norm:3.0153e+00 +step:1500 train loss:3.750366 +step:1501 train loss:3.786495 +step:1502 train loss:3.714399 +step:1503 train loss:3.775463 +step:1504 train loss:3.743754 +step:1505 train loss:3.719720 +step:1506 train loss:3.706167 +step:1507 train loss:3.717763 +step:1508 train loss:3.738987 +step:1509 train loss:3.780457 +step:1510 train loss:3.730510 +step:1511 train loss:3.758500 +step:1512 train loss:3.732643 +step:1513 train loss:3.798175 +step:1514 train loss:3.758113 +step:1515 train loss:3.809525 +step:1516 train loss:3.745944 +step:1517 train loss:3.746386 +step:1518 train loss:3.834079 +step:1519 train loss:3.790258 +step:1520 train loss:3.840703 +step:1521 train loss:3.732559 +step:1522 train loss:3.795729 +step:1523 train loss:3.791611 +step:1524 train loss:3.721786 +step:1525 train loss:3.800930 +step:1526 train loss:3.719045 +step:1527 train loss:3.767545 +step:1528 train loss:3.830508 +step:1529 train loss:3.776671 +step:1530 train loss:3.824664 +step:1531 train loss:3.741672 +step:1532 train loss:3.817375 +step:1533 train loss:3.784637 +step:1534 train loss:3.740449 +step:1535 train loss:3.784159 +step:1536 train loss:3.817444 +step:1537 train loss:3.762131 +step:1538 train loss:3.782257 +step:1539 train loss:3.763556 +step:1540 train loss:3.786385 +step:1541 train loss:3.744825 +step:1542 train loss:3.836506 +step:1543 train loss:3.869532 +step:1544 train loss:3.731128 +step:1545 train loss:3.716832 +step:1546 train loss:3.761499 +step:1547 train loss:3.744405 +step:1548 train loss:3.780719 +step:1549 train loss:3.704368 +step:1550 train loss:3.830251 +step:1551 train loss:3.758607 +step:1552 train loss:3.788240 +step:1553 train loss:3.799099 +step:1554 train loss:3.807835 +step:1555 train loss:3.761132 +step:1556 train loss:3.746395 +step:1557 train loss:3.756829 +step:1558 train loss:3.782569 +step:1559 train loss:3.746956 +step:1560 train loss:3.824088 +step:1561 train loss:3.797193 +step:1562 train loss:3.687208 +step:1563 train loss:3.660583 +step:1564 train loss:3.806101 +step:1565 train loss:3.777068 +step:1566 train loss:3.799011 +step:1567 train loss:3.792381 +step:1568 train loss:3.750401 +step:1569 train loss:3.741882 +step:1570 train loss:3.763768 +step:1571 train loss:3.729391 +step:1572 train loss:3.735813 +step:1573 train loss:3.781087 +step:1574 train loss:3.740963 +step:1575 train loss:3.760950 +step:1576 train loss:3.719926 +step:1577 train loss:3.745281 +step:1578 train loss:3.729469 +step:1579 train loss:3.802290 +step:1580 train loss:3.762412 +step:1581 train loss:3.795340 +step:1582 train loss:3.802681 +step:1583 train loss:3.766396 +step:1584 train loss:3.691194 +step:1585 train loss:3.770929 +step:1586 train loss:3.746919 +step:1587 train loss:3.758444 +step:1588 train loss:3.744948 +step:1589 train loss:3.790750 +step:1590 train loss:3.705722 +step:1591 train loss:3.752384 +step:1592 train loss:3.713017 +step:1593 train loss:3.746141 +step:1594 train loss:3.749646 +step:1595 train loss:3.748597 +step:1596 train loss:3.750337 +step:1597 train loss:3.681553 +step:1598 train loss:3.778970 +step:1599 train loss:3.792954 +step:1600 train loss:3.665322 +step:1601 train loss:3.747404 +step:1602 train loss:3.804618 +step:1603 train loss:3.800293 +step:1604 train loss:3.722745 +step:1605 train loss:3.778966 +step:1606 train loss:3.821994 +step:1607 train loss:3.706654 +step:1608 train loss:3.734357 +step:1609 train loss:3.758305 +step:1610 train loss:3.814873 +step:1611 train loss:3.742079 +step:1612 train loss:3.664693 +step:1613 train loss:3.737654 +step:1614 train loss:3.842814 +step:1615 train loss:3.764034 +step:1616 train loss:3.764833 +step:1617 train loss:3.755269 +step:1618 train loss:3.753264 +step:1619 train loss:3.933952 +step:1620 train loss:3.719922 +step:1621 train loss:3.776718 +step:1622 train loss:3.701118 +step:1623 train loss:3.766359 +step:1624 train loss:3.735890 +step:1625 train loss:3.814475 +step:1626 train loss:3.701189 +step:1627 train loss:3.714983 +step:1628 train loss:3.730994 +step:1629 train loss:3.771771 +step:1630 train loss:3.779888 +step:1631 train loss:3.733804 +step:1632 train loss:3.703565 +step:1633 train loss:3.724397 +step:1634 train loss:3.782057 +step:1635 train loss:3.721472 +step:1636 train loss:3.708205 +step:1637 train loss:3.781794 +step:1638 train loss:3.885866 +step:1639 train loss:3.687063 +step:1640 train loss:3.770715 +step:1641 train loss:3.732524 +step:1642 train loss:3.832977 +step:1643 train loss:3.732732 +step:1644 train loss:3.735894 +step:1645 train loss:3.714946 +step:1646 train loss:3.796706 +step:1647 train loss:3.690071 +step:1648 train loss:3.757519 +step:1649 train loss:3.717335 +step:1650 train loss:3.733563 +step:1651 train loss:3.746028 +step:1652 train loss:3.767452 +step:1653 train loss:3.771880 +step:1654 train loss:3.770955 +step:1655 train loss:3.741630 +step:1656 train loss:3.736594 +step:1657 train loss:3.740349 +step:1658 train loss:3.709192 +step:1659 train loss:3.786553 +step:1660 train loss:3.690742 +step:1661 train loss:3.804518 +step:1662 train loss:3.736834 +step:1663 train loss:3.729041 +step:1664 train loss:3.823946 +step:1665 train loss:3.745649 +step:1666 train loss:3.756466 +step:1667 train loss:3.777618 +step:1668 train loss:3.747248 +step:1669 train loss:3.708819 +step:1670 train loss:3.768558 +step:1671 train loss:3.764542 +step:1672 train loss:3.758867 +step:1673 train loss:3.714316 +step:1674 train loss:3.715019 +step:1675 train loss:3.752905 +step:1676 train loss:4.022598 +step:1677 train loss:3.764028 +step:1678 train loss:3.679758 +step:1679 train loss:3.804065 +step:1680 train loss:3.727362 +step:1681 train loss:3.783567 +step:1682 train loss:3.741439 +step:1683 train loss:3.732546 +step:1684 train loss:3.686367 +step:1685 train loss:3.752039 +step:1686 train loss:3.733620 +step:1687 train loss:3.751517 +step:1688 train loss:3.724239 +step:1689 train loss:3.719565 +step:1690 train loss:3.737421 +step:1691 train loss:3.734529 +step:1692 train loss:3.745421 +step:1693 train loss:3.724190 +step:1694 train loss:3.672678 +step:1695 train loss:3.698647 +step:1696 train loss:3.705472 +step:1697 train loss:3.754222 +step:1698 train loss:3.751297 +step:1699 train loss:3.704878 +step:1700 train loss:3.788005 +step:1701 train loss:3.725613 +step:1702 train loss:3.720903 +step:1703 train loss:3.735358 +step:1704 train loss:3.746999 +step:1705 train loss:3.762075 +step:1706 train loss:3.765334 +step:1707 train loss:3.771002 +step:1708 train loss:3.688154 +step:1709 train loss:3.792313 +step:1710 train loss:3.703390 +step:1711 train loss:3.714928 +step:1712 train loss:3.740821 +step:1713 train loss:3.700380 +step:1714 train loss:4.074000 +step:1715 train loss:3.715307 +step:1716 train loss:3.703742 +step:1717 train loss:3.704269 +step:1718 train loss:3.785930 +step:1719 train loss:3.688929 +step:1720 train loss:3.780258 +step:1721 train loss:3.716585 +step:1722 train loss:3.688932 +step:1723 train loss:3.789263 +step:1724 train loss:3.734859 +step:1725 train loss:3.735514 +step:1726 train loss:3.730159 +step:1727 train loss:3.769892 +step:1728 train loss:3.778432 +step:1729 train loss:3.698484 +step:1730 train loss:3.773104 +step:1731 train loss:3.701781 +step:1732 train loss:3.716085 +step:1733 train loss:3.700220 +step:1734 train loss:3.759535 +step:1735 train loss:3.809756 +step:1736 train loss:3.730714 +step:1737 train loss:3.753606 +step:1738 train loss:3.711085 +step:1739 train loss:3.785810 +step:1740 train loss:3.772367 +step:1741 train loss:3.831133 +step:1742 train loss:3.808035 +step:1743 train loss:3.705218 +step:1744 train loss:3.716014 +step:1745 train loss:3.705658 +step:1746 train loss:3.689631 +step:1747 train loss:3.727648 +step:1748 train loss:3.661204 +step:1749 train loss:3.707875 +step:1750 validation loss:3.677730 +step:1750 train loss:3.744228 +step:1751 train loss:3.753716 +step:1752 train loss:3.735308 +step:1753 train loss:3.748863 +step:1754 train loss:3.745044 +step:1755 train loss:3.737675 +step:1756 train loss:3.760927 +step:1757 train loss:3.766522 +step:1758 train loss:3.682644 +step:1759 train loss:3.774630 +step:1760 train loss:3.725611 +step:1761 train loss:3.707375 +step:1762 train loss:3.703752 +step:1763 train loss:3.704130 +step:1764 train loss:4.006930 +step:1765 train loss:3.708460 +step:1766 train loss:3.804330 +step:1767 train loss:3.711123 +step:1768 train loss:3.694533 +step:1769 train loss:3.714522 +step:1770 train loss:3.728266 +step:1771 train loss:3.702238 +step:1772 train loss:3.811598 +step:1773 train loss:3.736442 +step:1774 train loss:3.736524 +step:1775 train loss:3.851632 +step:1776 train loss:3.725847 +step:1777 train loss:3.715918 +step:1778 train loss:3.769950 +step:1779 train loss:3.714708 +step:1780 train loss:3.759398 +step:1781 train loss:3.759848 +step:1782 train loss:3.799206 +step:1783 train loss:3.718840 +step:1784 train loss:3.818951 +step:1785 train loss:3.719879 +step:1786 train loss:3.717666 +step:1787 train loss:3.714161 +step:1788 train loss:3.736887 +step:1789 train loss:3.694152 +step:1790 train loss:3.708834 +step:1791 train loss:3.791341 +step:1792 train loss:3.784568 +step:1793 train loss:3.705652 +step:1794 train loss:3.745564 +step:1795 train loss:3.700722 +step:1796 train loss:3.685473 +step:1797 train loss:3.745755 +step:1798 train loss:3.690333 +step:1799 train loss:3.743075 +step:1800 train loss:3.777537 +step:1801 train loss:3.762393 +step:1802 train loss:3.768272 +step:1803 train loss:3.757768 +step:1804 train loss:3.756701 +step:1805 train loss:3.748975 +step:1806 train loss:3.755653 +step:1807 train loss:3.680696 +step:1808 train loss:3.745333 +step:1809 train loss:3.735294 +step:1810 train loss:3.719180 +step:1811 train loss:3.742638 +step:1812 train loss:3.722508 +step:1813 train loss:3.735320 +step:1814 train loss:3.803801 +step:1815 train loss:3.739082 +step:1816 train loss:3.696620 +step:1817 train loss:3.690317 +step:1818 train loss:3.743659 +step:1819 train loss:3.719614 +step:1820 train loss:3.746785 +step:1821 train loss:3.715618 +step:1822 train loss:3.695383 +step:1823 train loss:3.686150 +step:1824 train loss:3.762213 +step:1825 train loss:3.678949 +step:1826 train loss:3.720934 +step:1827 train loss:3.689279 +step:1828 train loss:3.740338 +step:1829 train loss:3.705095 +step:1830 train loss:3.908156 +step:1831 train loss:3.651036 +step:1832 train loss:3.710926 +step:1833 train loss:3.746315 +step:1834 train loss:3.702646 +step:1835 train loss:3.699246 +step:1836 train loss:3.741701 +step:1837 train loss:3.671524 +step:1838 train loss:3.765634 +step:1839 train loss:3.752555 +step:1840 train loss:3.716557 +step:1841 train loss:3.746705 +step:1842 train loss:3.714206 +step:1843 train loss:3.666809 +step:1844 train loss:3.732919 +step:1845 train loss:3.698537 +step:1846 train loss:3.762078 +step:1847 train loss:3.804343 +step:1848 train loss:3.613270 +step:1849 train loss:3.697319 +step:1850 train loss:3.674579 +step:1851 train loss:3.715852 +step:1852 train loss:3.701573 +step:1853 train loss:3.761079 +step:1854 train loss:3.719118 +step:1855 train loss:3.712111 +step:1856 train loss:3.707406 +step:1857 train loss:3.717781 +step:1858 train loss:3.764510 +step:1859 train loss:3.704709 +step:1860 train loss:3.686322 +step:1861 train loss:3.698346 +step:1862 train loss:3.742708 +step:1863 train loss:3.778704 +step:1864 train loss:3.679161 +step:1865 train loss:3.698834 +step:1866 train loss:3.695743 +step:1867 train loss:3.739046 +step:1868 train loss:3.781668 +step:1869 train loss:3.702217 +step:1870 train loss:3.731356 +step:1871 train loss:3.667909 +step:1872 train loss:3.737738 +step:1873 train loss:3.802505 +step:1874 train loss:3.667347 +step:1875 train loss:3.736976 +step:1876 train loss:3.702061 +step:1877 train loss:3.745942 +step:1878 train loss:3.668869 +step:1879 train loss:3.731790 +step:1880 train loss:3.806770 +step:1881 train loss:3.736287 +step:1882 train loss:3.752560 +step:1883 train loss:3.777738 +step:1884 train loss:3.788092 +step:1885 train loss:3.737669 +step:1886 train loss:3.675036 +step:1887 train loss:3.683156 +step:1888 train loss:3.691155 +step:1889 train loss:3.712162 +step:1890 train loss:3.704177 +step:1891 train loss:3.649833 +step:1892 train loss:3.736310 +step:1893 train loss:3.662494 +step:1894 train loss:3.686118 +step:1895 train loss:3.716769 +step:1896 train loss:3.767640 +step:1897 train loss:3.655663 +step:1898 train loss:3.709203 +step:1899 train loss:3.724239 +step:1900 train loss:3.674937 +step:1901 train loss:3.760351 +step:1902 train loss:3.743090 +step:1903 train loss:3.687291 +step:1904 train loss:3.671127 +step:1905 train loss:3.672560 +step:1906 train loss:3.738054 +step:1907 train loss:3.674736 +step:1908 train loss:3.693976 +step:1909 train loss:3.786341 +step:1910 train loss:3.672877 +step:1911 train loss:3.688152 +step:1912 train loss:3.738660 +step:1913 train loss:3.675148 +step:1914 train loss:3.711918 +step:1915 train loss:3.672222 +step:1916 train loss:3.728360 +step:1917 train loss:3.708347 +step:1918 train loss:3.617407 +step:1919 train loss:3.774183 +step:1920 train loss:3.870937 +step:1921 train loss:3.661452 +step:1922 train loss:3.640043 +step:1923 train loss:3.730513 +step:1924 train loss:3.771068 +step:1925 train loss:3.715436 +step:1926 train loss:3.655160 +step:1927 train loss:3.737689 +step:1928 train loss:3.652643 +step:1929 train loss:3.679730 +step:1930 train loss:3.750709 +step:1931 train loss:3.662831 +step:1932 train loss:3.713031 +step:1933 train loss:3.710310 +step:1934 train loss:3.784533 +step:1935 train loss:3.732487 +step:1936 train loss:3.706964 +step:1937 train loss:3.642201 +step:1938 train loss:4.020431 +step:1939 train loss:3.747990 +step:1940 train loss:3.725869 +step:1941 train loss:3.735072 +step:1942 train loss:3.724184 +step:1943 train loss:3.722661 +step:1944 train loss:3.679271 +step:1945 train loss:3.682160 +step:1946 train loss:3.709788 +step:1947 train loss:3.727637 +step:1948 train loss:3.642572 +step:1949 train loss:3.749538 +step:1950 train loss:3.686520 +step:1951 train loss:3.714856 +step:1952 train loss:3.732888 +step:1953 train loss:3.663972 +step:1954 train loss:3.704704 +step:1955 train loss:3.653825 +step:1956 train loss:3.740466 +step:1957 train loss:3.761684 +step:1958 train loss:3.782132 +step:1959 train loss:3.647661 +step:1960 train loss:3.685691 +step:1961 train loss:3.727105 +step:1962 train loss:3.712692 +step:1963 train loss:3.691500 +step:1964 train loss:3.733497 +step:1965 train loss:3.761592 +step:1966 train loss:3.671753 +step:1967 train loss:3.730127 +step:1968 train loss:3.667761 +step:1969 train loss:3.687001 +step:1970 train loss:3.749426 +step:1971 train loss:3.648222 +step:1972 train loss:3.765368 +step:1973 train loss:3.659258 +step:1974 train loss:3.701875 +step:1975 train loss:3.659461 +step:1976 train loss:3.689194 +step:1977 train loss:3.729558 +step:1978 train loss:3.670046 +step:1979 train loss:3.651826 +step:1980 train loss:3.694640 +step:1981 train loss:3.673479 +step:1982 train loss:3.753723 +step:1983 train loss:3.700654 +step:1984 train loss:3.739951 +step:1985 train loss:3.725987 +step:1986 train loss:3.718318 +step:1987 train loss:3.671508 +step:1988 train loss:3.700099 +step:1989 train loss:3.844468 +step:1990 train loss:3.670201 +step:1991 train loss:3.667501 +step:1992 train loss:3.675214 +step:1993 train loss:3.705508 +step:1994 train loss:3.707673 +step:1995 train loss:3.650462 +step:1996 train loss:3.709936 +step:1997 train loss:3.715327 +step:1998 train loss:3.659965 +step:1999 train loss:3.783683 +step:2000 validation loss:3.637595 total_sharp:2.7195e-04 L1_sharp:5.9216e-04 L2_sharp:1.3290e-04 L3_sharp:4.8848e-05 L4_sharp:1.3426e-05 L5_sharp:1.6602e-05 L6_sharp:2.0608e-05 L7_sharp:2.9148e-05 L8_sharp:2.5634e-05 L9_sharp:2.4619e-05 L10_sharp:1.8162e-05 L11_sharp:1.5011e-05 L12_sharp:5.0515e-05 total_fnorm:1.0324e+01 total_l1_linf:7.7732e+04 total_spectral:1.0324e+01 L1_fnorm:2.9348e+00 L2_fnorm:2.6956e+00 L3_fnorm:2.7832e+00 L4_fnorm:2.9194e+00 L5_fnorm:2.9929e+00 L6_fnorm:2.9998e+00 L7_fnorm:3.0168e+00 L8_fnorm:3.0214e+00 L9_fnorm:3.0196e+00 L10_fnorm:3.0227e+00 L11_fnorm:3.0170e+00 L12_fnorm:3.0190e+00 L1_l1linf:2.0934e+00 L2_l1linf:1.9681e+00 L3_l1linf:1.9917e+00 L4_l1linf:2.0810e+00 L5_l1linf:2.0660e+00 L6_l1linf:2.0628e+00 L7_l1linf:2.0601e+00 L8_l1linf:2.0551e+00 L9_l1linf:2.0227e+00 L10_l1linf:2.0242e+00 L11_l1linf:1.9981e+00 L12_l1linf:2.0347e+00 L1_spectral:6.0286e-02 L2_spectral:6.0200e-02 L3_spectral:6.0281e-02 L4_spectral:6.0231e-02 L5_spectral:6.0291e-02 L6_spectral:6.0223e-02 L7_spectral:6.0226e-02 L8_spectral:6.0241e-02 L9_spectral:6.0233e-02 L10_spectral:6.0251e-02 L11_spectral:6.0234e-02 L12_spectral:6.0234e-02 ip_v_neg_g:1.2264e-02 cos_v_neg_g:3.0851e-03 v_norm:1.0324e+01 g_norm:3.8504e-01 hv_norm:2.1487e-01 cos_v_hv:1.3067e-02 hg_norm:4.6907e-01 cos_g_hg:3.5108e-01 v_par:1.7972e-03 v_perp:1.0324e+01 L1_cos_v_neg_g:1.5147e-02 L1_v_norm:2.9348e+00 L2_cos_v_neg_g:1.3098e-02 L2_v_norm:2.6956e+00 L3_cos_v_neg_g:7.6008e-03 L3_v_norm:2.7832e+00 L4_cos_v_neg_g:4.9269e-03 L4_v_norm:2.9194e+00 L5_cos_v_neg_g:5.8271e-03 L5_v_norm:2.9929e+00 L6_cos_v_neg_g:5.4955e-03 L6_v_norm:2.9998e+00 L7_cos_v_neg_g:5.8377e-03 L7_v_norm:3.0168e+00 L8_cos_v_neg_g:6.3560e-03 L8_v_norm:3.0214e+00 L9_cos_v_neg_g:7.3575e-03 L9_v_norm:3.0196e+00 L10_cos_v_neg_g:6.3303e-03 L10_v_norm:3.0227e+00 L11_cos_v_neg_g:4.8638e-03 L11_v_norm:3.0170e+00 L12_cos_v_neg_g:4.5125e-03 L12_v_norm:3.0190e+00 +step:2000 train loss:3.733330 +step:2001 train loss:3.671646 +step:2002 train loss:3.769193 +step:2003 train loss:3.815348 +step:2004 train loss:3.686826 +step:2005 train loss:3.784239 +step:2006 train loss:3.665243 +step:2007 train loss:3.744760 +step:2008 train loss:3.688178 +step:2009 train loss:3.685292 +step:2010 train loss:3.818569 +step:2011 train loss:3.669820 +step:2012 train loss:3.700881 +step:2013 train loss:3.707494 +step:2014 train loss:3.613854 +step:2015 train loss:3.724428 +step:2016 train loss:3.705416 +step:2017 train loss:3.708243 +step:2018 train loss:3.671008 +step:2019 train loss:3.703010 +step:2020 train loss:3.712466 +step:2021 train loss:3.671485 +step:2022 train loss:3.718212 +step:2023 train loss:3.695825 +step:2024 train loss:3.752617 +step:2025 train loss:3.685854 +step:2026 train loss:3.666639 +step:2027 train loss:3.696923 +step:2028 train loss:3.626485 +step:2029 train loss:3.662769 +step:2030 train loss:3.660640 +step:2031 train loss:3.622724 +step:2032 train loss:3.678109 +step:2033 train loss:3.671409 +step:2034 train loss:3.673480 +step:2035 train loss:3.711937 +step:2036 train loss:3.703356 +step:2037 train loss:3.690340 +step:2038 train loss:3.683602 +step:2039 train loss:3.681341 +step:2040 train loss:3.706908 +step:2041 train loss:3.706665 +step:2042 train loss:3.638699 +step:2043 train loss:3.792212 +step:2044 train loss:3.661716 +step:2045 train loss:3.675965 +step:2046 train loss:3.688670 +step:2047 train loss:3.668770 +step:2048 train loss:3.707921 +step:2049 train loss:3.664491 +step:2050 train loss:3.688481 +step:2051 train loss:3.647900 +step:2052 train loss:3.704822 +step:2053 train loss:3.695572 +step:2054 train loss:3.680894 +step:2055 train loss:3.669441 +step:2056 train loss:3.715744 +step:2057 train loss:3.725459 +step:2058 train loss:3.687961 +step:2059 train loss:3.769894 +step:2060 train loss:3.714576 +step:2061 train loss:3.674138 +step:2062 train loss:3.701058 +step:2063 train loss:3.598793 +step:2064 train loss:3.721382 +step:2065 train loss:3.726460 +step:2066 train loss:3.585623 +step:2067 train loss:3.636730 +step:2068 train loss:3.739640 +step:2069 train loss:3.682275 +step:2070 train loss:3.683719 +step:2071 train loss:3.716463 +step:2072 train loss:3.653317 +step:2073 train loss:3.704237 +step:2074 train loss:3.683089 +step:2075 train loss:3.765203 +step:2076 train loss:3.702059 +step:2077 train loss:3.720878 +step:2078 train loss:3.680034 +step:2079 train loss:3.827976 +step:2080 train loss:3.650711 +step:2081 train loss:3.755690 +step:2082 train loss:3.686643 +step:2083 train loss:3.677750 +step:2084 train loss:3.651738 +step:2085 train loss:3.697588 +step:2086 train loss:3.709055 +step:2087 train loss:3.752143 +step:2088 train loss:3.618603 +step:2089 train loss:3.643182 +step:2090 train loss:3.680367 +step:2091 train loss:3.700999 +step:2092 train loss:3.681402 +step:2093 train loss:3.675316 +step:2094 train loss:3.708678 +step:2095 train loss:3.649559 +step:2096 train loss:3.642096 +step:2097 train loss:3.678192 +step:2098 train loss:3.678971 +step:2099 train loss:3.664713 +step:2100 train loss:3.719321 +step:2101 train loss:3.716372 +step:2102 train loss:3.682153 +step:2103 train loss:3.699146 +step:2104 train loss:3.680524 +step:2105 train loss:3.682951 +step:2106 train loss:3.681185 +step:2107 train loss:3.747074 +step:2108 train loss:3.662535 +step:2109 train loss:3.625758 +step:2110 train loss:3.720140 +step:2111 train loss:3.665366 +step:2112 train loss:3.733232 +step:2113 train loss:3.665181 +step:2114 train loss:3.669592 +step:2115 train loss:3.725923 +step:2116 train loss:3.652746 +step:2117 train loss:3.676325 +step:2118 train loss:3.660630 +step:2119 train loss:3.597512 +step:2120 train loss:3.680981 +step:2121 train loss:3.673931 +step:2122 train loss:3.686172 +step:2123 train loss:3.737472 +step:2124 train loss:3.750094 +step:2125 train loss:3.649206 +step:2126 train loss:3.654660 +step:2127 train loss:3.643105 +step:2128 train loss:3.642537 +step:2129 train loss:3.669874 +step:2130 train loss:3.675466 +step:2131 train loss:3.694255 +step:2132 train loss:3.626528 +step:2133 train loss:3.735203 +step:2134 train loss:3.687070 +step:2135 train loss:3.641221 +step:2136 train loss:3.738116 +step:2137 train loss:3.700202 +step:2138 train loss:3.658626 +step:2139 train loss:3.658964 +step:2140 train loss:3.659866 +step:2141 train loss:3.715179 +step:2142 train loss:3.681129 +step:2143 train loss:3.601709 +step:2144 train loss:3.712051 +step:2145 train loss:3.681811 +step:2146 train loss:3.718059 +step:2147 train loss:3.827789 +step:2148 train loss:3.624135 +step:2149 train loss:3.638095 +step:2150 train loss:3.657012 +step:2151 train loss:3.694343 +step:2152 train loss:3.691652 +step:2153 train loss:3.730038 +step:2154 train loss:3.652721 +step:2155 train loss:3.730314 +step:2156 train loss:3.658547 +step:2157 train loss:3.728224 +step:2158 train loss:3.771430 +step:2159 train loss:3.699879 +step:2160 train loss:3.763981 +step:2161 train loss:3.665372 +step:2162 train loss:3.670821 +step:2163 train loss:3.643924 +step:2164 train loss:3.669371 +step:2165 train loss:3.651857 +step:2166 train loss:3.765673 +step:2167 train loss:3.676041 +step:2168 train loss:3.685016 +step:2169 train loss:3.637256 +step:2170 train loss:3.787508 +step:2171 train loss:3.741235 +step:2172 train loss:3.676255 +step:2173 train loss:3.673068 +step:2174 train loss:3.732762 +step:2175 train loss:3.659223 +step:2176 train loss:3.745253 +step:2177 train loss:3.713550 +step:2178 train loss:3.641918 +step:2179 train loss:3.710077 +step:2180 train loss:3.728856 +step:2181 train loss:3.653040 +step:2182 train loss:3.703286 +step:2183 train loss:3.695521 +step:2184 train loss:3.646836 +step:2185 train loss:3.629753 +step:2186 train loss:3.668021 +step:2187 train loss:3.679771 +step:2188 train loss:3.727800 +step:2189 train loss:3.619087 +step:2190 train loss:3.665309 +step:2191 train loss:3.720274 +step:2192 train loss:3.651586 +step:2193 train loss:3.619468 +step:2194 train loss:3.623639 +step:2195 train loss:3.650251 +step:2196 train loss:3.657575 +step:2197 train loss:3.640572 +step:2198 train loss:3.668810 +step:2199 train loss:3.724714 +step:2200 train loss:3.664135 +step:2201 train loss:3.673571 +step:2202 train loss:3.630166 +step:2203 train loss:3.651089 +step:2204 train loss:3.683710 +step:2205 train loss:3.667350 +step:2206 train loss:3.666178 +step:2207 train loss:3.664159 +step:2208 train loss:3.642174 +step:2209 train loss:3.924921 +step:2210 train loss:3.692124 +step:2211 train loss:3.685135 +step:2212 train loss:3.661557 +step:2213 train loss:3.738497 +step:2214 train loss:3.732405 +step:2215 train loss:3.658293 +step:2216 train loss:3.623250 +step:2217 train loss:3.657749 +step:2218 train loss:3.652928 +step:2219 train loss:3.690358 +step:2220 train loss:3.630190 +step:2221 train loss:3.664919 +step:2222 train loss:3.679615 +step:2223 train loss:3.723274 +step:2224 train loss:3.691036 +step:2225 train loss:3.633245 +step:2226 train loss:3.703983 +step:2227 train loss:3.700000 +step:2228 train loss:3.697635 +step:2229 train loss:3.641003 +step:2230 train loss:3.764776 +step:2231 train loss:3.682942 +step:2232 train loss:3.676400 +step:2233 train loss:3.723942 +step:2234 train loss:3.617293 +step:2235 train loss:3.707150 +step:2236 train loss:3.646064 +step:2237 train loss:3.785700 +step:2238 train loss:3.580829 +step:2239 train loss:3.668395 +step:2240 train loss:3.676396 +step:2241 train loss:3.590890 +step:2242 train loss:3.739200 +step:2243 train loss:3.767902 +step:2244 train loss:3.646382 +step:2245 train loss:3.653445 +step:2246 train loss:3.612974 +step:2247 train loss:3.623494 +step:2248 train loss:3.678568 +step:2249 train loss:3.653636 +step:2250 validation loss:3.605272 +step:2250 train loss:3.670951 +step:2251 train loss:3.634331 +step:2252 train loss:3.633536 +step:2253 train loss:3.663986 +step:2254 train loss:3.673221 +step:2255 train loss:3.622880 +step:2256 train loss:3.679500 +step:2257 train loss:3.659663 +step:2258 train loss:3.658918 +step:2259 train loss:3.676742 +step:2260 train loss:3.620944 +step:2261 train loss:3.703046 +step:2262 train loss:3.728847 +step:2263 train loss:3.677922 +step:2264 train loss:3.797476 +step:2265 train loss:3.640555 +step:2266 train loss:3.684215 +step:2267 train loss:3.642941 +step:2268 train loss:3.652379 +step:2269 train loss:3.650247 +step:2270 train loss:3.639718 +step:2271 train loss:3.661459 +step:2272 train loss:3.694114 +step:2273 train loss:3.611780 +step:2274 train loss:3.649031 +step:2275 train loss:3.600208 +step:2276 train loss:3.676521 +step:2277 train loss:3.692255 +step:2278 train loss:3.668154 +step:2279 train loss:3.654056 +step:2280 train loss:3.559183 +step:2281 train loss:3.707052 +step:2282 train loss:3.636515 +step:2283 train loss:3.617378 +step:2284 train loss:3.635364 +step:2285 train loss:3.692772 +step:2286 train loss:3.653638 +step:2287 train loss:3.687516 +step:2288 train loss:3.661155 +step:2289 train loss:3.660930 +step:2290 train loss:3.668125 +step:2291 train loss:3.649231 +step:2292 train loss:3.697383 +step:2293 train loss:3.668876 +step:2294 train loss:3.668258 +step:2295 train loss:3.724144 +step:2296 train loss:3.657849 +step:2297 train loss:3.631059 +step:2298 train loss:3.692983 +step:2299 train loss:3.664808 +step:2300 train loss:3.580558 +step:2301 train loss:3.678523 +step:2302 train loss:3.688035 +step:2303 train loss:3.663188 +step:2304 train loss:3.650280 +step:2305 train loss:3.694455 +step:2306 train loss:3.680835 +step:2307 train loss:3.665624 +step:2308 train loss:3.682771 +step:2309 train loss:3.636950 +step:2310 train loss:3.627582 +step:2311 train loss:3.606625 +step:2312 train loss:3.683331 +step:2313 train loss:3.594781 +step:2314 train loss:3.668482 +step:2315 train loss:3.681035 +step:2316 train loss:3.727889 +step:2317 train loss:3.589404 +step:2318 train loss:3.638256 +step:2319 train loss:3.690031 +step:2320 train loss:3.654658 +step:2321 train loss:3.623971 +step:2322 train loss:3.646693 +step:2323 train loss:3.638507 +step:2324 train loss:3.671189 +step:2325 train loss:3.604465 +step:2326 train loss:3.644785 +step:2327 train loss:3.753894 +step:2328 train loss:3.696272 +step:2329 train loss:3.655701 +step:2330 train loss:3.616821 +step:2331 train loss:3.655862 +step:2332 train loss:3.583695 +step:2333 train loss:3.642834 +step:2334 train loss:3.621955 +step:2335 train loss:3.612112 +step:2336 train loss:3.859667 +step:2337 train loss:3.633082 +step:2338 train loss:3.679065 +step:2339 train loss:3.672664 +step:2340 train loss:3.692478 +step:2341 train loss:3.680139 +step:2342 train loss:3.632262 +step:2343 train loss:3.653725 +step:2344 train loss:3.701313 +step:2345 train loss:3.648692 +step:2346 train loss:3.679334 +step:2347 train loss:3.607252 +step:2348 train loss:3.661877 +step:2349 train loss:3.611587 +step:2350 train loss:3.672337 +step:2351 train loss:3.674408 +step:2352 train loss:3.682453 +step:2353 train loss:3.643670 +step:2354 train loss:3.689517 +step:2355 train loss:3.679680 +step:2356 train loss:3.713205 +step:2357 train loss:3.619472 +step:2358 train loss:3.635907 +step:2359 train loss:3.659121 +step:2360 train loss:3.681510 +step:2361 train loss:3.710990 +step:2362 train loss:3.549065 +step:2363 train loss:3.741923 +step:2364 train loss:3.688677 +step:2365 train loss:3.655914 +step:2366 train loss:3.607132 +step:2367 train loss:3.678345 +step:2368 train loss:3.664392 +step:2369 train loss:3.661457 +step:2370 train loss:3.665907 +step:2371 train loss:3.727857 +step:2372 train loss:3.579762 +step:2373 train loss:3.719994 +step:2374 train loss:3.702101 +step:2375 train loss:3.685688 +step:2376 train loss:3.676871 +step:2377 train loss:3.619336 +step:2378 train loss:3.666568 +step:2379 train loss:3.654068 +step:2380 train loss:3.714015 +step:2381 train loss:3.814591 +step:2382 train loss:3.595583 +step:2383 train loss:3.640412 +step:2384 train loss:3.676950 +step:2385 train loss:3.577519 +step:2386 train loss:3.733035 +step:2387 train loss:3.610415 +step:2388 train loss:3.662561 +step:2389 train loss:3.685175 +step:2390 train loss:3.635366 +step:2391 train loss:3.661022 +step:2392 train loss:3.687268 +step:2393 train loss:3.638452 +step:2394 train loss:3.663667 +step:2395 train loss:3.656390 +step:2396 train loss:3.659532 +step:2397 train loss:3.633965 +step:2398 train loss:3.695153 +step:2399 train loss:3.660316 +step:2400 train loss:3.631477 +step:2401 train loss:3.676437 +step:2402 train loss:3.625894 +step:2403 train loss:3.674034 +step:2404 train loss:3.634119 +step:2405 train loss:3.636611 +step:2406 train loss:3.659897 +step:2407 train loss:3.606044 +step:2408 train loss:3.646503 +step:2409 train loss:3.641205 +step:2410 train loss:3.633934 +step:2411 train loss:3.715494 +step:2412 train loss:3.689973 +step:2413 train loss:3.743219 +step:2414 train loss:3.626304 +step:2415 train loss:3.619427 +step:2416 train loss:3.634289 +step:2417 train loss:3.676084 +step:2418 train loss:3.688641 +step:2419 train loss:3.620011 +step:2420 train loss:3.640900 +step:2421 train loss:3.669742 +step:2422 train loss:3.716350 +step:2423 train loss:3.655365 +step:2424 train loss:3.617813 +step:2425 train loss:3.681312 +step:2426 train loss:3.621089 +step:2427 train loss:3.640421 +step:2428 train loss:3.729447 +step:2429 train loss:3.677966 +step:2430 train loss:3.772487 +step:2431 train loss:3.682048 +step:2432 train loss:3.652982 +step:2433 train loss:3.626777 +step:2434 train loss:3.615077 +step:2435 train loss:3.676101 +step:2436 train loss:3.629331 +step:2437 train loss:3.661225 +step:2438 train loss:3.707434 +step:2439 train loss:3.691133 +step:2440 train loss:3.632609 +step:2441 train loss:3.670743 +step:2442 train loss:3.661318 +step:2443 train loss:3.618082 +step:2444 train loss:3.658937 +step:2445 train loss:3.656945 +step:2446 train loss:3.627677 +step:2447 train loss:3.607808 +step:2448 train loss:3.660652 +step:2449 train loss:3.687259 +step:2450 train loss:3.646169 +step:2451 train loss:3.579061 +step:2452 train loss:3.668883 +step:2453 train loss:3.640451 +step:2454 train loss:3.638679 +step:2455 train loss:3.686898 +step:2456 train loss:3.641814 +step:2457 train loss:3.704709 +step:2458 train loss:3.679552 +step:2459 train loss:3.654898 +step:2460 train loss:3.665483 +step:2461 train loss:3.688253 +step:2462 train loss:3.659656 +step:2463 train loss:3.637338 +step:2464 train loss:3.651923 +step:2465 train loss:3.731318 +step:2466 train loss:3.817728 +step:2467 train loss:3.721259 +step:2468 train loss:3.610530 +step:2469 train loss:3.687511 +step:2470 train loss:3.731626 +step:2471 train loss:3.727902 +step:2472 train loss:3.703697 +step:2473 train loss:3.649734 +step:2474 train loss:3.609930 +step:2475 train loss:3.664930 +step:2476 train loss:3.738673 +step:2477 train loss:3.657264 +step:2478 train loss:3.612336 +step:2479 train loss:3.654428 +step:2480 train loss:3.645742 +step:2481 train loss:3.846428 +step:2482 train loss:3.645499 +step:2483 train loss:3.672634 +step:2484 train loss:3.627365 +step:2485 train loss:3.619216 +step:2486 train loss:3.651610 +step:2487 train loss:3.688814 +step:2488 train loss:3.596101 +step:2489 train loss:3.709011 +step:2490 train loss:3.632275 +step:2491 train loss:3.646681 +step:2492 train loss:3.687245 +step:2493 train loss:3.724516 +step:2494 train loss:3.645011 +step:2495 train loss:3.676950 +step:2496 train loss:3.648731 +step:2497 train loss:3.668831 +step:2498 train loss:3.678197 +step:2499 train loss:3.666831 +step:2500 validation loss:3.583427 total_sharp:1.4406e-04 L1_sharp:1.4812e-04 L2_sharp:2.5967e-05 L3_sharp:2.4527e-05 L4_sharp:1.1058e-05 L5_sharp:1.2361e-05 L6_sharp:1.9809e-05 L7_sharp:2.7056e-05 L8_sharp:2.2370e-05 L9_sharp:2.1163e-05 L10_sharp:1.3353e-05 L11_sharp:1.2220e-05 L12_sharp:1.5957e-05 total_fnorm:1.0355e+01 total_l1_linf:7.7854e+04 total_spectral:1.0355e+01 L1_fnorm:2.9365e+00 L2_fnorm:2.7554e+00 L3_fnorm:2.8193e+00 L4_fnorm:2.9259e+00 L5_fnorm:2.9948e+00 L6_fnorm:3.0060e+00 L7_fnorm:3.0211e+00 L8_fnorm:3.0245e+00 L9_fnorm:3.0212e+00 L10_fnorm:3.0226e+00 L11_fnorm:3.0144e+00 L12_fnorm:3.0185e+00 L1_l1linf:2.0452e+00 L2_l1linf:1.9748e+00 L3_l1linf:2.0076e+00 L4_l1linf:2.0658e+00 L5_l1linf:2.0777e+00 L6_l1linf:2.0530e+00 L7_l1linf:2.0446e+00 L8_l1linf:2.0345e+00 L9_l1linf:2.0196e+00 L10_l1linf:2.0312e+00 L11_l1linf:2.0032e+00 L12_l1linf:1.9886e+00 L1_spectral:6.0211e-02 L2_spectral:6.0222e-02 L3_spectral:6.0270e-02 L4_spectral:6.0235e-02 L5_spectral:6.0233e-02 L6_spectral:6.0240e-02 L7_spectral:6.0232e-02 L8_spectral:6.0213e-02 L9_spectral:6.0239e-02 L10_spectral:6.0206e-02 L11_spectral:6.0223e-02 L12_spectral:6.0221e-02 ip_v_neg_g:6.3438e-03 cos_v_neg_g:1.7090e-03 v_norm:1.0355e+01 g_norm:3.5849e-01 hv_norm:9.3287e-02 cos_v_hv:1.5991e-02 hg_norm:3.4935e-01 cos_g_hg:3.5222e-01 v_par:1.2049e-03 v_perp:1.0355e+01 L1_cos_v_neg_g:7.0087e-03 L1_v_norm:2.9365e+00 L2_cos_v_neg_g:5.2089e-03 L2_v_norm:2.7554e+00 L3_cos_v_neg_g:3.1823e-03 L3_v_norm:2.8193e+00 L4_cos_v_neg_g:2.8979e-03 L4_v_norm:2.9259e+00 L5_cos_v_neg_g:2.7015e-03 L5_v_norm:2.9948e+00 L6_cos_v_neg_g:4.1694e-03 L6_v_norm:3.0060e+00 L7_cos_v_neg_g:5.5168e-03 L7_v_norm:3.0211e+00 L8_cos_v_neg_g:6.3054e-03 L8_v_norm:3.0245e+00 L9_cos_v_neg_g:5.8034e-03 L9_v_norm:3.0212e+00 L10_cos_v_neg_g:4.7049e-03 L10_v_norm:3.0226e+00 L11_cos_v_neg_g:3.2268e-03 L11_v_norm:3.0144e+00 L12_cos_v_neg_g:4.1310e-03 L12_v_norm:3.0185e+00 +step:2500 train loss:3.615571 +step:2501 train loss:3.684216 +step:2502 train loss:3.667023 +step:2503 train loss:3.592083 +step:2504 train loss:3.631444 +step:2505 train loss:3.648578 +step:2506 train loss:3.617372 +step:2507 train loss:3.646100 +step:2508 train loss:3.589406 +step:2509 train loss:3.615730 +step:2510 train loss:3.607835 +step:2511 train loss:3.654523 +step:2512 train loss:3.699315 +step:2513 train loss:3.648675 +step:2514 train loss:3.631575 +step:2515 train loss:3.764119 +step:2516 train loss:3.646983 +step:2517 train loss:3.717255 +step:2518 train loss:3.675218 +step:2519 train loss:3.656844 +step:2520 train loss:3.661441 +step:2521 train loss:3.635475 +step:2522 train loss:3.672374 +step:2523 train loss:3.589925 +step:2524 train loss:3.649243 +step:2525 train loss:3.640763 +step:2526 train loss:3.694088 +step:2527 train loss:3.687862 +step:2528 train loss:3.670874 +step:2529 train loss:3.684820 +step:2530 train loss:3.661942 +step:2531 train loss:3.600479 +step:2532 train loss:3.704300 +step:2533 train loss:3.608331 +step:2534 train loss:3.692741 +step:2535 train loss:3.646291 +step:2536 train loss:3.572439 +step:2537 train loss:3.685919 +step:2538 train loss:3.662280 +step:2539 train loss:3.685212 +step:2540 train loss:3.614722 +step:2541 train loss:3.647688 +step:2542 train loss:3.658153 +step:2543 train loss:3.646482 +step:2544 train loss:3.635246 +step:2545 train loss:3.620563 +step:2546 train loss:3.587906 +step:2547 train loss:3.635033 +step:2548 train loss:3.660315 +step:2549 train loss:3.662654 +step:2550 train loss:3.790581 +step:2551 train loss:3.865999 +step:2552 train loss:3.600996 +step:2553 train loss:3.630442 +step:2554 train loss:3.776627 +step:2555 train loss:3.659303 +step:2556 train loss:3.583993 +step:2557 train loss:3.684992 +step:2558 train loss:3.676056 +step:2559 train loss:3.630304 +step:2560 train loss:3.616956 +step:2561 train loss:3.708994 +step:2562 train loss:3.664033 +step:2563 train loss:3.602247 +step:2564 train loss:3.666138 +step:2565 train loss:3.648299 +step:2566 train loss:3.628483 +step:2567 train loss:3.609008 +step:2568 train loss:3.662180 +step:2569 train loss:3.671852 +step:2570 train loss:3.621176 +step:2571 train loss:3.707186 +step:2572 train loss:3.664445 +step:2573 train loss:3.595583 +step:2574 train loss:3.651908 +step:2575 train loss:3.694448 +step:2576 train loss:3.642907 +step:2577 train loss:3.608642 +step:2578 train loss:3.645117 +step:2579 train loss:3.623391 +step:2580 train loss:3.600047 +step:2581 train loss:3.610765 +step:2582 train loss:3.619007 +step:2583 train loss:3.642211 +step:2584 train loss:3.657788 +step:2585 train loss:3.621438 +step:2586 train loss:3.642298 +step:2587 train loss:3.577831 +step:2588 train loss:3.611301 +step:2589 train loss:3.688335 +step:2590 train loss:3.611038 +step:2591 train loss:3.669236 +step:2592 train loss:3.715257 +step:2593 train loss:3.671863 +step:2594 train loss:3.637286 +step:2595 train loss:3.641396 +step:2596 train loss:3.680085 +step:2597 train loss:3.563948 +step:2598 train loss:3.723125 +step:2599 train loss:3.670152 +step:2600 train loss:3.705226 +step:2601 train loss:3.640117 +step:2602 train loss:3.666921 +step:2603 train loss:3.664567 +step:2604 train loss:3.586035 +step:2605 train loss:3.717497 +step:2606 train loss:3.658852 +step:2607 train loss:3.617930 +step:2608 train loss:3.595021 +step:2609 train loss:3.619717 +step:2610 train loss:3.642174 +step:2611 train loss:3.683342 +step:2612 train loss:3.644872 +step:2613 train loss:3.619257 +step:2614 train loss:3.606617 +step:2615 train loss:3.598481 +step:2616 train loss:3.681947 +step:2617 train loss:3.640685 +step:2618 train loss:3.608208 +step:2619 train loss:3.622793 +step:2620 train loss:3.616292 +step:2621 train loss:3.628788 +step:2622 train loss:3.704281 +step:2623 train loss:3.574944 +step:2624 train loss:3.592482 +step:2625 train loss:3.662360 +step:2626 train loss:3.655673 +step:2627 train loss:3.634005 +step:2628 train loss:3.692657 +step:2629 train loss:3.632983 +step:2630 train loss:3.631995 +step:2631 train loss:3.660450 +step:2632 train loss:3.625534 +step:2633 train loss:3.610660 +step:2634 train loss:3.658000 +step:2635 train loss:3.646614 +step:2636 train loss:3.691203 +step:2637 train loss:3.644817 +step:2638 train loss:3.629422 +step:2639 train loss:3.677622 +step:2640 train loss:3.596835 +step:2641 train loss:3.654505 +step:2642 train loss:3.578321 +step:2643 train loss:3.573802 +step:2644 train loss:3.666529 +step:2645 train loss:3.611045 +step:2646 train loss:3.638064 +step:2647 train loss:3.658741 +step:2648 train loss:3.692098 +step:2649 train loss:3.605969 +step:2650 train loss:3.594750 +step:2651 train loss:3.637203 +step:2652 train loss:3.612664 +step:2653 train loss:3.677121 +step:2654 train loss:3.635506 +step:2655 train loss:3.622190 +step:2656 train loss:3.642516 +step:2657 train loss:3.670682 +step:2658 train loss:3.677253 +step:2659 train loss:3.658751 +step:2660 train loss:3.642314 +step:2661 train loss:3.690910 +step:2662 train loss:3.665168 +step:2663 train loss:3.641989 +step:2664 train loss:3.651587 +step:2665 train loss:3.601965 +step:2666 train loss:3.632624 +step:2667 train loss:3.639700 +step:2668 train loss:3.615572 +step:2669 train loss:3.620062 +step:2670 train loss:3.647444 +step:2671 train loss:3.622653 +step:2672 train loss:3.644549 +step:2673 train loss:3.575736 +step:2674 train loss:3.674480 +step:2675 train loss:3.645831 +step:2676 train loss:3.661942 +step:2677 train loss:3.643077 +step:2678 train loss:3.632389 +step:2679 train loss:3.615207 +step:2680 train loss:3.595289 +step:2681 train loss:3.567986 +step:2682 train loss:3.656093 +step:2683 train loss:3.629522 +step:2684 train loss:3.661884 +step:2685 train loss:3.571921 +step:2686 train loss:3.589990 +step:2687 train loss:3.668622 +step:2688 train loss:3.682682 +step:2689 train loss:3.584741 +step:2690 train loss:3.674589 +step:2691 train loss:3.641259 +step:2692 train loss:3.669856 +step:2693 train loss:3.718327 +step:2694 train loss:3.622978 +step:2695 train loss:3.639407 +step:2696 train loss:3.639728 +step:2697 train loss:3.636322 +step:2698 train loss:3.650844 +step:2699 train loss:3.663540 +step:2700 train loss:3.633350 +step:2701 train loss:3.703192 +step:2702 train loss:3.637340 +step:2703 train loss:3.590597 +step:2704 train loss:3.674618 +step:2705 train loss:3.657194 +step:2706 train loss:3.593032 +step:2707 train loss:3.558284 +step:2708 train loss:3.657154 +step:2709 train loss:3.633719 +step:2710 train loss:3.637386 +step:2711 train loss:3.606543 +step:2712 train loss:3.676101 +step:2713 train loss:3.670040 +step:2714 train loss:3.616163 +step:2715 train loss:3.611287 +step:2716 train loss:3.683119 +step:2717 train loss:3.641845 +step:2718 train loss:3.641274 +step:2719 train loss:3.637763 +step:2720 train loss:3.605865 +step:2721 train loss:3.685688 +step:2722 train loss:3.610291 +step:2723 train loss:3.605339 +step:2724 train loss:3.622914 +step:2725 train loss:3.625605 +step:2726 train loss:3.601432 +step:2727 train loss:3.658245 +step:2728 train loss:3.597027 +step:2729 train loss:3.724850 +step:2730 train loss:3.670859 +step:2731 train loss:3.709383 +step:2732 train loss:3.618697 +step:2733 train loss:3.610991 +step:2734 train loss:3.663137 +step:2735 train loss:3.662503 +step:2736 train loss:3.584487 +step:2737 train loss:3.638113 +step:2738 train loss:3.699108 +step:2739 train loss:3.617424 +step:2740 train loss:3.615890 +step:2741 train loss:3.605413 +step:2742 train loss:3.533301 +step:2743 train loss:3.633359 +step:2744 train loss:3.660428 +step:2745 train loss:3.613062 +step:2746 train loss:3.627518 +step:2747 train loss:3.612480 +step:2748 train loss:3.573995 +step:2749 train loss:3.642871 +step:2750 validation loss:3.561663 +step:2750 train loss:3.648835 +step:2751 train loss:3.673555 +step:2752 train loss:3.655134 +step:2753 train loss:3.645479 +step:2754 train loss:3.587313 +step:2755 train loss:3.651250 +step:2756 train loss:3.628599 +step:2757 train loss:3.619810 +step:2758 train loss:3.646459 +step:2759 train loss:3.655949 +step:2760 train loss:3.567113 +step:2761 train loss:3.581829 +step:2762 train loss:3.599309 +step:2763 train loss:3.613042 +step:2764 train loss:3.560752 +step:2765 train loss:3.610450 +step:2766 train loss:3.703681 +step:2767 train loss:3.574080 +step:2768 train loss:3.638452 +step:2769 train loss:3.606205 +step:2770 train loss:3.628107 +step:2771 train loss:3.651378 +step:2772 train loss:3.616935 +step:2773 train loss:3.620016 +step:2774 train loss:3.614263 +step:2775 train loss:3.626948 +step:2776 train loss:3.579475 +step:2777 train loss:3.616083 +step:2778 train loss:3.621151 +step:2779 train loss:3.652339 +step:2780 train loss:3.624094 +step:2781 train loss:3.608266 +step:2782 train loss:3.597377 +step:2783 train loss:3.624038 +step:2784 train loss:3.631808 +step:2785 train loss:3.708660 +step:2786 train loss:3.669883 +step:2787 train loss:3.628947 +step:2788 train loss:3.626370 +step:2789 train loss:3.623482 +step:2790 train loss:3.561610 +step:2791 train loss:3.660401 +step:2792 train loss:3.650487 +step:2793 train loss:3.614527 +step:2794 train loss:3.624942 +step:2795 train loss:3.639565 +step:2796 train loss:3.629860 +step:2797 train loss:3.678198 +step:2798 train loss:3.664155 +step:2799 train loss:3.573428 +step:2800 train loss:3.617362 +step:2801 train loss:3.652501 +step:2802 train loss:3.681145 +step:2803 train loss:3.654989 +step:2804 train loss:3.587276 +step:2805 train loss:3.630762 +step:2806 train loss:3.618947 +step:2807 train loss:3.651468 +step:2808 train loss:3.589228 +step:2809 train loss:3.664490 +step:2810 train loss:3.649725 +step:2811 train loss:3.637691 +step:2812 train loss:3.690772 +step:2813 train loss:3.657944 +step:2814 train loss:3.642145 +step:2815 train loss:3.657554 +step:2816 train loss:3.660641 +step:2817 train loss:3.596882 +step:2818 train loss:3.702763 +step:2819 train loss:3.624174 +step:2820 train loss:3.622872 +step:2821 train loss:3.599102 +step:2822 train loss:3.643446 +step:2823 train loss:3.594082 +step:2824 train loss:3.490903 +step:2825 train loss:3.634399 +step:2826 train loss:3.629818 +step:2827 train loss:3.663799 +step:2828 train loss:3.658436 +step:2829 train loss:3.640055 +step:2830 train loss:3.669078 +step:2831 train loss:3.611164 +step:2832 train loss:3.581803 +step:2833 train loss:3.639137 +step:2834 train loss:3.591051 +step:2835 train loss:3.623988 +step:2836 train loss:3.629929 +step:2837 train loss:3.632106 +step:2838 train loss:3.573848 +step:2839 train loss:3.673578 +step:2840 train loss:3.630179 +step:2841 train loss:3.709655 +step:2842 train loss:3.652544 +step:2843 train loss:3.646996 +step:2844 train loss:3.670507 +step:2845 train loss:3.623843 +step:2846 train loss:3.578857 +step:2847 train loss:3.673724 +step:2848 train loss:3.625979 +step:2849 train loss:3.616316 +step:2850 train loss:3.670920 +step:2851 train loss:3.627880 +step:2852 train loss:3.710663 +step:2853 train loss:3.623226 +step:2854 train loss:3.574492 +step:2855 train loss:3.641990 +step:2856 train loss:3.563505 +step:2857 train loss:3.672822 +step:2858 train loss:3.629382 +step:2859 train loss:3.608339 +step:2860 train loss:3.607311 +step:2861 train loss:3.586597 +step:2862 train loss:3.619966 +step:2863 train loss:3.601079 +step:2864 train loss:3.606983 +step:2865 train loss:3.685556 +step:2866 train loss:3.694405 +step:2867 train loss:3.634974 +step:2868 train loss:3.634864 +step:2869 train loss:3.594563 +step:2870 train loss:3.682430 +step:2871 train loss:3.681782 +step:2872 train loss:3.644495 +step:2873 train loss:3.648972 +step:2874 train loss:3.627413 +step:2875 train loss:3.583519 +step:2876 train loss:3.625738 +step:2877 train loss:3.606729 +step:2878 train loss:3.621580 +step:2879 train loss:3.590138 +step:2880 train loss:3.606577 +step:2881 train loss:3.601986 +step:2882 train loss:3.535869 +step:2883 train loss:3.618462 +step:2884 train loss:3.694125 +step:2885 train loss:3.584875 +step:2886 train loss:3.638970 +step:2887 train loss:3.661792 +step:2888 train loss:3.627861 +step:2889 train loss:3.617548 +step:2890 train loss:3.588245 +step:2891 train loss:3.632128 +step:2892 train loss:3.635571 +step:2893 train loss:3.615899 +step:2894 train loss:3.589839 +step:2895 train loss:3.641657 +step:2896 train loss:3.684120 +step:2897 train loss:3.664729 +step:2898 train loss:3.802627 +step:2899 train loss:3.553003 +step:2900 train loss:3.626708 +step:2901 train loss:3.579240 +step:2902 train loss:3.579921 +step:2903 train loss:3.595930 +step:2904 train loss:3.621355 +step:2905 train loss:3.681061 +step:2906 train loss:3.658080 +step:2907 train loss:3.829866 +step:2908 train loss:3.573882 +step:2909 train loss:3.652839 +step:2910 train loss:3.623850 +step:2911 train loss:3.651155 +step:2912 train loss:3.606325 +step:2913 train loss:3.646676 +step:2914 train loss:3.673585 +step:2915 train loss:3.666248 +step:2916 train loss:3.622897 +step:2917 train loss:3.664979 +step:2918 train loss:3.652093 +step:2919 train loss:3.598817 +step:2920 train loss:3.647094 +step:2921 train loss:3.603525 +step:2922 train loss:3.626995 +step:2923 train loss:3.694049 +step:2924 train loss:3.629268 +step:2925 train loss:3.582282 +step:2926 train loss:3.672814 +step:2927 train loss:3.582376 +step:2928 train loss:3.551310 +step:2929 train loss:3.567591 +step:2930 train loss:3.586900 +step:2931 train loss:3.744283 +step:2932 train loss:3.661704 +step:2933 train loss:3.623711 +step:2934 train loss:3.619205 +step:2935 train loss:3.641036 +step:2936 train loss:3.586650 +step:2937 train loss:3.609967 +step:2938 train loss:3.627341 +step:2939 train loss:3.702827 +step:2940 train loss:3.600813 +step:2941 train loss:3.637296 +step:2942 train loss:3.596685 +step:2943 train loss:3.881208 +step:2944 train loss:3.702357 +step:2945 train loss:3.660306 +step:2946 train loss:3.671729 +step:2947 train loss:3.631410 +step:2948 train loss:3.585855 +step:2949 train loss:3.674892 +step:2950 train loss:3.632800 +step:2951 train loss:3.529177 +step:2952 train loss:3.598408 +step:2953 train loss:3.509897 +step:2954 train loss:3.604828 +step:2955 train loss:3.670974 +step:2956 train loss:3.620246 +step:2957 train loss:3.624643 +step:2958 train loss:3.573426 +step:2959 train loss:3.600580 +step:2960 train loss:3.690420 +step:2961 train loss:3.550703 +step:2962 train loss:3.630067 +step:2963 train loss:3.628381 +step:2964 train loss:3.607054 +step:2965 train loss:3.635277 +step:2966 train loss:3.608873 +step:2967 train loss:3.607980 +step:2968 train loss:3.584259 +step:2969 train loss:3.589501 +step:2970 train loss:3.656636 +step:2971 train loss:3.589118 +step:2972 train loss:3.572069 +step:2973 train loss:3.567834 +step:2974 train loss:3.607140 +step:2975 train loss:3.571502 +step:2976 train loss:3.613807 +step:2977 train loss:3.603726 +step:2978 train loss:3.688267 +step:2979 train loss:3.667572 +step:2980 train loss:3.680670 +step:2981 train loss:3.623278 +step:2982 train loss:3.618499 +step:2983 train loss:3.569753 +step:2984 train loss:3.549414 +step:2985 train loss:3.661395 +step:2986 train loss:3.555393 +step:2987 train loss:3.685480 +step:2988 train loss:3.611248 +step:2989 train loss:3.638540 +step:2990 train loss:3.593076 +step:2991 train loss:3.658543 +step:2992 train loss:3.656225 +step:2993 train loss:3.616458 +step:2994 train loss:3.610344 +step:2995 train loss:3.680177 +step:2996 train loss:3.603318 +step:2997 train loss:3.513524 +step:2998 train loss:3.627552 +step:2999 train loss:3.674362 +step:3000 validation loss:3.542896 total_sharp:1.3988e-04 L1_sharp:2.0782e-04 L2_sharp:3.7631e-05 L3_sharp:2.6370e-05 L4_sharp:1.0380e-05 L5_sharp:1.1468e-05 L6_sharp:1.6890e-05 L7_sharp:2.0513e-05 L8_sharp:2.1696e-05 L9_sharp:1.7583e-05 L10_sharp:1.0546e-05 L11_sharp:9.8454e-06 L12_sharp:1.0833e-05 total_fnorm:1.0316e+01 total_l1_linf:7.7550e+04 total_spectral:1.0316e+01 L1_fnorm:2.9206e+00 L2_fnorm:2.7101e+00 L3_fnorm:2.8014e+00 L4_fnorm:2.9224e+00 L5_fnorm:2.9903e+00 L6_fnorm:2.9977e+00 L7_fnorm:3.0118e+00 L8_fnorm:3.0123e+00 L9_fnorm:3.0113e+00 L10_fnorm:3.0205e+00 L11_fnorm:3.0074e+00 L12_fnorm:3.0154e+00 L1_l1linf:2.1033e+00 L2_l1linf:1.9595e+00 L3_l1linf:1.9913e+00 L4_l1linf:2.0764e+00 L5_l1linf:2.0522e+00 L6_l1linf:2.0327e+00 L7_l1linf:2.0262e+00 L8_l1linf:2.0192e+00 L9_l1linf:2.0146e+00 L10_l1linf:2.0150e+00 L11_l1linf:1.9883e+00 L12_l1linf:1.9961e+00 L1_spectral:6.0296e-02 L2_spectral:6.0210e-02 L3_spectral:6.0278e-02 L4_spectral:6.0238e-02 L5_spectral:6.0227e-02 L6_spectral:6.0218e-02 L7_spectral:6.0224e-02 L8_spectral:6.0225e-02 L9_spectral:6.0226e-02 L10_spectral:6.0216e-02 L11_spectral:6.0220e-02 L12_spectral:6.0226e-02 ip_v_neg_g:9.1469e-03 cos_v_neg_g:2.5418e-03 v_norm:1.0316e+01 g_norm:3.4883e-01 hv_norm:1.1238e-01 cos_v_hv:1.2840e-02 hg_norm:3.9175e-01 cos_g_hg:3.0954e-01 v_par:1.3547e-03 v_perp:1.0316e+01 L1_cos_v_neg_g:1.2960e-02 L1_v_norm:2.9206e+00 L2_cos_v_neg_g:9.5115e-03 L2_v_norm:2.7101e+00 L3_cos_v_neg_g:5.7373e-03 L3_v_norm:2.8014e+00 L4_cos_v_neg_g:4.9168e-03 L4_v_norm:2.9224e+00 L5_cos_v_neg_g:5.2804e-03 L5_v_norm:2.9903e+00 L6_cos_v_neg_g:6.2057e-03 L6_v_norm:2.9977e+00 L7_cos_v_neg_g:6.9271e-03 L7_v_norm:3.0118e+00 L8_cos_v_neg_g:7.5956e-03 L8_v_norm:3.0123e+00 L9_cos_v_neg_g:7.6905e-03 L9_v_norm:3.0113e+00 L10_cos_v_neg_g:6.2712e-03 L10_v_norm:3.0205e+00 L11_cos_v_neg_g:5.1279e-03 L11_v_norm:3.0074e+00 L12_cos_v_neg_g:4.1097e-03 L12_v_norm:3.0154e+00 +step:3000 train loss:3.564735 +step:3001 train loss:3.610337 +step:3002 train loss:3.610014 +step:3003 train loss:3.607356 +step:3004 train loss:3.638021 +step:3005 train loss:3.534058 +step:3006 train loss:3.581774 +step:3007 train loss:3.615830 +step:3008 train loss:3.664053 +step:3009 train loss:3.618769 +step:3010 train loss:3.640225 +step:3011 train loss:3.618638 +step:3012 train loss:3.598782 +step:3013 train loss:3.639469 +step:3014 train loss:3.600167 +step:3015 train loss:3.597172 +step:3016 train loss:3.611760 +step:3017 train loss:3.642245 +step:3018 train loss:3.570453 +step:3019 train loss:3.608779 +step:3020 train loss:3.630190 +step:3021 train loss:3.589366 +step:3022 train loss:3.682982 +step:3023 train loss:3.631495 +step:3024 train loss:3.617588 +step:3025 train loss:3.632367 +step:3026 train loss:3.597825 +step:3027 train loss:3.581375 +step:3028 train loss:3.630490 +step:3029 train loss:3.618957 +step:3030 train loss:3.590804 +step:3031 train loss:3.572464 +step:3032 train loss:3.568669 +step:3033 train loss:3.588500 +step:3034 train loss:3.636618 +step:3035 train loss:3.612326 +step:3036 train loss:3.578738 +step:3037 train loss:3.537715 +step:3038 train loss:3.654936 +step:3039 train loss:3.536179 +step:3040 train loss:3.516884 +step:3041 train loss:3.646244 +step:3042 train loss:3.583703 +step:3043 train loss:3.643559 +step:3044 train loss:3.539322 +step:3045 train loss:3.584829 +step:3046 train loss:3.563588 +step:3047 train loss:3.585434 +step:3048 train loss:3.560414 +step:3049 train loss:3.633666 +step:3050 train loss:3.524594 +step:3051 train loss:3.534450 +step:3052 train loss:3.556060 +step:3053 train loss:3.632185 +step:3054 train loss:3.700184 +step:3055 train loss:3.542124 +step:3056 train loss:3.567123 +step:3057 train loss:3.606614 +step:3058 train loss:3.555305 +step:3059 train loss:3.579576 +step:3060 train loss:3.583309 +step:3061 train loss:3.559790 +step:3062 train loss:3.620123 +step:3063 train loss:3.599574 +step:3064 train loss:3.629355 +step:3065 train loss:3.644410 +step:3066 train loss:3.534990 +step:3067 train loss:3.588725 +step:3068 train loss:3.639866 +step:3069 train loss:3.657785 +step:3070 train loss:3.581070 +step:3071 train loss:3.606240 +step:3072 train loss:3.604128 +step:3073 train loss:3.641228 +step:3074 train loss:3.575471 +step:3075 train loss:3.612795 +step:3076 train loss:3.546884 +step:3077 train loss:3.543645 +step:3078 train loss:3.580067 +step:3079 train loss:3.626721 +step:3080 train loss:3.615865 +step:3081 train loss:3.655908 +step:3082 train loss:3.634912 +step:3083 train loss:3.562761 +step:3084 train loss:3.647894 +step:3085 train loss:3.571266 +step:3086 train loss:3.635165 +step:3087 train loss:3.603020 +step:3088 train loss:3.681597 +step:3089 train loss:3.558812 +step:3090 train loss:3.629938 +step:3091 train loss:3.561033 +step:3092 train loss:3.574763 +step:3093 train loss:3.602202 +step:3094 train loss:3.587655 +step:3095 train loss:3.672388 +step:3096 train loss:3.599494 +step:3097 train loss:3.605724 +step:3098 train loss:3.592598 +step:3099 train loss:3.597812 +step:3100 train loss:3.628103 +step:3101 train loss:3.706492 +step:3102 train loss:3.633164 +step:3103 train loss:3.554724 +step:3104 train loss:3.635899 +step:3105 train loss:3.608723 +step:3106 train loss:3.605877 +step:3107 train loss:3.587829 +step:3108 train loss:3.560472 +step:3109 train loss:3.618906 +step:3110 train loss:3.545163 +step:3111 train loss:3.582601 +step:3112 train loss:3.520507 +step:3113 train loss:3.640263 +step:3114 train loss:3.552909 +step:3115 train loss:3.594276 +step:3116 train loss:3.478272 +step:3117 train loss:3.486285 +step:3118 train loss:3.594882 +step:3119 train loss:3.597269 +step:3120 train loss:3.603665 +step:3121 train loss:3.542140 +step:3122 train loss:3.629846 +step:3123 train loss:3.543748 +step:3124 train loss:3.608591 +step:3125 train loss:3.622261 +step:3126 train loss:3.727482 +step:3127 train loss:3.575811 +step:3128 train loss:3.604354 +step:3129 train loss:3.585784 +step:3130 train loss:3.568442 +step:3131 train loss:3.639966 +step:3132 train loss:3.624043 +step:3133 train loss:3.599623 +step:3134 train loss:3.489932 +step:3135 train loss:3.590710 +step:3136 train loss:3.556481 +step:3137 train loss:3.698859 +step:3138 train loss:3.595716 +step:3139 train loss:3.575174 +step:3140 train loss:3.595200 +step:3141 train loss:3.599149 +step:3142 train loss:3.533724 +step:3143 train loss:3.620667 +step:3144 train loss:3.569126 +step:3145 train loss:3.554681 +step:3146 train loss:3.572387 +step:3147 train loss:3.679453 +step:3148 train loss:3.584257 +step:3149 train loss:3.638594 +step:3150 train loss:3.625563 +step:3151 train loss:3.589090 +step:3152 train loss:3.592865 +step:3153 train loss:3.546699 +step:3154 train loss:3.632976 +step:3155 train loss:3.569730 +step:3156 train loss:3.624240 +step:3157 train loss:3.625641 +step:3158 train loss:3.601222 +step:3159 train loss:3.538731 +step:3160 train loss:3.586650 +step:3161 train loss:3.566435 +step:3162 train loss:3.615503 +step:3163 train loss:3.601229 +step:3164 train loss:3.576297 +step:3165 train loss:3.594945 +step:3166 train loss:3.629924 +step:3167 train loss:3.593594 +step:3168 train loss:3.672135 +step:3169 train loss:3.583098 +step:3170 train loss:3.564860 +step:3171 train loss:3.556571 +step:3172 train loss:3.559925 +step:3173 train loss:3.506609 +step:3174 train loss:3.621775 +step:3175 train loss:3.585710 +step:3176 train loss:3.597496 +step:3177 train loss:3.566730 +step:3178 train loss:3.544223 +step:3179 train loss:3.622973 +step:3180 train loss:3.550552 +step:3181 train loss:3.633110 +step:3182 train loss:3.637474 +step:3183 train loss:3.583077 +step:3184 train loss:3.580267 +step:3185 train loss:3.638073 +step:3186 train loss:3.595263 +step:3187 train loss:3.615858 +step:3188 train loss:3.660942 +step:3189 train loss:3.599370 +step:3190 train loss:3.562932 +step:3191 train loss:3.559854 +step:3192 train loss:3.530341 +step:3193 train loss:3.604255 +step:3194 train loss:3.572424 +step:3195 train loss:3.552295 +step:3196 train loss:3.607275 +step:3197 train loss:3.566886 +step:3198 train loss:3.602340 +step:3199 train loss:3.581930 +step:3200 train loss:3.590671 +step:3201 train loss:3.551653 +step:3202 train loss:3.619931 +step:3203 train loss:3.680680 +step:3204 train loss:3.641333 +step:3205 train loss:3.491618 +step:3206 train loss:3.772335 +step:3207 train loss:3.526463 +step:3208 train loss:3.592023 +step:3209 train loss:3.583608 +step:3210 train loss:3.567565 +step:3211 train loss:3.598196 +step:3212 train loss:3.603435 +step:3213 train loss:3.545351 +step:3214 train loss:3.649412 +step:3215 train loss:3.658323 +step:3216 train loss:3.524068 +step:3217 train loss:3.609672 +step:3218 train loss:3.643228 +step:3219 train loss:3.565162 +step:3220 train loss:3.631855 +step:3221 train loss:3.545540 +step:3222 train loss:3.592043 +step:3223 train loss:3.607992 +step:3224 train loss:3.614112 +step:3225 train loss:3.544290 +step:3226 train loss:3.575727 +step:3227 train loss:3.603369 +step:3228 train loss:3.598761 +step:3229 train loss:3.630822 +step:3230 train loss:3.643464 +step:3231 train loss:3.584547 +step:3232 train loss:3.592153 +step:3233 train loss:3.565371 +step:3234 train loss:3.554726 +step:3235 train loss:3.554011 +step:3236 train loss:3.576981 +step:3237 train loss:3.578651 +step:3238 train loss:3.590314 +step:3239 train loss:3.494202 +step:3240 train loss:3.605080 +step:3241 train loss:3.605379 +step:3242 train loss:3.662083 +step:3243 train loss:3.600105 +step:3244 train loss:3.619787 +step:3245 train loss:3.523089 +step:3246 train loss:3.647058 +step:3247 train loss:3.591911 +step:3248 train loss:3.614360 +step:3249 train loss:3.556915 +step:3250 validation loss:3.522217 +step:3250 train loss:3.554914 +step:3251 train loss:3.670556 +step:3252 train loss:3.597983 +step:3253 train loss:3.600195 +step:3254 train loss:3.667440 +step:3255 train loss:3.610578 +step:3256 train loss:3.603149 +step:3257 train loss:3.585612 +step:3258 train loss:3.516299 +step:3259 train loss:3.497117 +step:3260 train loss:3.611767 +step:3261 train loss:3.587519 +step:3262 train loss:3.579327 +step:3263 train loss:3.566952 +step:3264 train loss:3.672367 +step:3265 train loss:3.583625 +step:3266 train loss:3.613625 +step:3267 train loss:3.576446 +step:3268 train loss:3.580084 +step:3269 train loss:3.593046 +step:3270 train loss:3.622520 +step:3271 train loss:3.581293 +step:3272 train loss:3.564924 +step:3273 train loss:3.570960 +step:3274 train loss:3.703873 +step:3275 train loss:3.577651 +step:3276 train loss:3.648155 +step:3277 train loss:3.583120 +step:3278 train loss:3.560282 +step:3279 train loss:3.581242 +step:3280 train loss:3.615400 +step:3281 train loss:3.536371 +step:3282 train loss:3.611608 +step:3283 train loss:3.580336 +step:3284 train loss:3.546593 +step:3285 train loss:3.560533 +step:3286 train loss:3.592062 +step:3287 train loss:3.527529 +step:3288 train loss:3.613729 +step:3289 train loss:3.552695 +step:3290 train loss:3.588153 +step:3291 train loss:3.543166 +step:3292 train loss:3.572079 +step:3293 train loss:3.612785 +step:3294 train loss:3.630185 +step:3295 train loss:3.534414 +step:3296 train loss:3.596476 +step:3297 train loss:3.550919 +step:3298 train loss:3.555399 +step:3299 train loss:3.682521 +step:3300 train loss:3.526153 +step:3301 train loss:3.602002 +step:3302 train loss:3.578367 +step:3303 train loss:3.584855 +step:3304 train loss:3.557946 +step:3305 train loss:3.645557 +step:3306 train loss:3.577562 +step:3307 train loss:3.595729 +step:3308 train loss:3.556663 +step:3309 train loss:3.612549 +step:3310 train loss:3.533234 +step:3311 train loss:3.583792 +step:3312 train loss:3.556666 +step:3313 train loss:3.589729 +step:3314 train loss:3.585391 +step:3315 train loss:3.665422 +step:3316 train loss:3.518600 +step:3317 train loss:3.605725 +step:3318 train loss:3.620913 +step:3319 train loss:3.543715 +step:3320 train loss:3.711119 +step:3321 train loss:3.608499 +step:3322 train loss:3.611044 +step:3323 train loss:3.714043 +step:3324 train loss:3.634609 +step:3325 train loss:3.600163 +step:3326 train loss:3.595428 +step:3327 train loss:3.609630 +step:3328 train loss:3.589899 +step:3329 train loss:3.587449 +step:3330 train loss:3.579773 +step:3331 train loss:3.627324 +step:3332 train loss:3.644758 +step:3333 train loss:3.610586 +step:3334 train loss:3.542235 +step:3335 train loss:3.562380 +step:3336 train loss:3.592288 +step:3337 train loss:3.594345 +step:3338 train loss:3.577177 +step:3339 train loss:3.575700 +step:3340 train loss:3.618058 +step:3341 train loss:3.555112 +step:3342 train loss:3.608782 +step:3343 train loss:3.546052 +step:3344 train loss:3.605486 +step:3345 train loss:3.554268 +step:3346 train loss:3.569377 +step:3347 train loss:3.569792 +step:3348 train loss:3.598834 +step:3349 train loss:3.575640 +step:3350 train loss:3.604033 +step:3351 train loss:3.657657 +step:3352 train loss:3.601359 +step:3353 train loss:3.702571 +step:3354 train loss:3.545178 +step:3355 train loss:3.649735 +step:3356 train loss:3.600409 +step:3357 train loss:3.610523 +step:3358 train loss:3.550145 +step:3359 train loss:3.580801 +step:3360 train loss:3.576881 +step:3361 train loss:3.579439 +step:3362 train loss:3.566814 +step:3363 train loss:3.568865 +step:3364 train loss:3.548803 +step:3365 train loss:3.588135 +step:3366 train loss:3.619566 +step:3367 train loss:3.573376 +step:3368 train loss:3.668157 +step:3369 train loss:3.581872 +step:3370 train loss:3.676365 +step:3371 train loss:3.625499 +step:3372 train loss:3.588996 +step:3373 train loss:3.602171 +step:3374 train loss:3.650323 +step:3375 train loss:3.581456 +step:3376 train loss:3.588779 +step:3377 train loss:3.575717 +step:3378 train loss:3.553362 +step:3379 train loss:3.632497 +step:3380 train loss:3.613461 +step:3381 train loss:3.594051 +step:3382 train loss:3.616527 +step:3383 train loss:3.620782 +step:3384 train loss:3.555031 +step:3385 train loss:3.600931 +step:3386 train loss:3.584917 +step:3387 train loss:3.655807 +step:3388 train loss:3.558559 +step:3389 train loss:3.774552 +step:3390 train loss:3.489626 +step:3391 train loss:3.575472 +step:3392 train loss:3.558383 +step:3393 train loss:3.589005 +step:3394 train loss:3.549680 +step:3395 train loss:3.619191 +step:3396 train loss:3.532876 +step:3397 train loss:3.612322 +step:3398 train loss:3.578941 +step:3399 train loss:3.599187 +step:3400 train loss:3.544647 +step:3401 train loss:3.584479 +step:3402 train loss:3.742899 +step:3403 train loss:3.634483 +step:3404 train loss:3.747451 +step:3405 train loss:3.604537 +step:3406 train loss:3.574657 +step:3407 train loss:3.575235 +step:3408 train loss:3.555387 +step:3409 train loss:3.521246 +step:3410 train loss:3.555530 +step:3411 train loss:3.623707 +step:3412 train loss:3.543478 +step:3413 train loss:3.541416 +step:3414 train loss:3.577882 +step:3415 train loss:3.550869 +step:3416 train loss:3.558677 +step:3417 train loss:3.634006 +step:3418 train loss:3.637835 +step:3419 train loss:3.593693 +step:3420 train loss:3.571154 +step:3421 train loss:3.604400 +step:3422 train loss:3.620349 +step:3423 train loss:3.638241 +step:3424 train loss:3.519193 +step:3425 train loss:3.543144 +step:3426 train loss:3.539951 +step:3427 train loss:3.601199 +step:3428 train loss:3.521634 +step:3429 train loss:3.586714 +step:3430 train loss:3.556597 +step:3431 train loss:3.612550 +step:3432 train loss:3.593461 +step:3433 train loss:3.556022 +step:3434 train loss:3.635809 +step:3435 train loss:3.577806 +step:3436 train loss:3.667584 +step:3437 train loss:3.494157 +step:3438 train loss:3.603571 +step:3439 train loss:3.575847 +step:3440 train loss:3.676134 +step:3441 train loss:3.570287 +step:3442 train loss:3.638225 +step:3443 train loss:3.567981 +step:3444 train loss:3.588414 +step:3445 train loss:3.632450 +step:3446 train loss:3.538956 +step:3447 train loss:3.612830 +step:3448 train loss:3.567270 +step:3449 train loss:3.598757 +step:3450 train loss:3.505477 +step:3451 train loss:3.626231 +step:3452 train loss:3.572008 +step:3453 train loss:3.630263 +step:3454 train loss:3.654619 +step:3455 train loss:3.707856 +step:3456 train loss:3.645887 +step:3457 train loss:3.646722 +step:3458 train loss:3.570235 +step:3459 train loss:3.576760 +step:3460 train loss:3.523436 +step:3461 train loss:3.589726 +step:3462 train loss:3.591524 +step:3463 train loss:3.564474 +step:3464 train loss:3.618099 +step:3465 train loss:3.546903 +step:3466 train loss:3.618060 +step:3467 train loss:3.570172 +step:3468 train loss:3.585895 +step:3469 train loss:3.595001 +step:3470 train loss:3.579267 +step:3471 train loss:3.621433 +step:3472 train loss:3.504894 +step:3473 train loss:3.631653 +step:3474 train loss:3.523266 +step:3475 train loss:3.609721 +step:3476 train loss:3.574426 +step:3477 train loss:3.597033 +step:3478 train loss:3.570894 +step:3479 train loss:3.599597 +step:3480 train loss:3.620086 +step:3481 train loss:3.598120 +step:3482 train loss:3.584298 +step:3483 train loss:3.723600 +step:3484 train loss:3.565786 +step:3485 train loss:3.552816 +step:3486 train loss:3.605827 +step:3487 train loss:3.647835 +step:3488 train loss:3.551277 +step:3489 train loss:3.604379 +step:3490 train loss:3.567617 +step:3491 train loss:3.612564 +step:3492 train loss:3.643373 +step:3493 train loss:3.617774 +step:3494 train loss:3.609267 +step:3495 train loss:3.587720 +step:3496 train loss:3.554174 +step:3497 train loss:3.667474 +step:3498 train loss:3.609008 +step:3499 train loss:3.539048 +step:3500 validation loss:3.508062 total_sharp:1.0540e-04 L1_sharp:1.0219e-04 L2_sharp:2.3316e-05 L3_sharp:1.6436e-05 L4_sharp:9.7471e-06 L5_sharp:1.0851e-05 L6_sharp:1.4944e-05 L7_sharp:1.9011e-05 L8_sharp:1.7850e-05 L9_sharp:1.7813e-05 L10_sharp:1.0693e-05 L11_sharp:9.9942e-06 L12_sharp:1.1337e-05 total_fnorm:1.0329e+01 total_l1_linf:7.7643e+04 total_spectral:1.0329e+01 L1_fnorm:2.9149e+00 L2_fnorm:2.7308e+00 L3_fnorm:2.8265e+00 L4_fnorm:2.9070e+00 L5_fnorm:2.9907e+00 L6_fnorm:2.9957e+00 L7_fnorm:3.0172e+00 L8_fnorm:3.0192e+00 L9_fnorm:3.0134e+00 L10_fnorm:3.0223e+00 L11_fnorm:3.0157e+00 L12_fnorm:3.0140e+00 L1_l1linf:2.0523e+00 L2_l1linf:1.9581e+00 L3_l1linf:2.0074e+00 L4_l1linf:2.0697e+00 L5_l1linf:2.0385e+00 L6_l1linf:2.0451e+00 L7_l1linf:2.0463e+00 L8_l1linf:2.0290e+00 L9_l1linf:2.0165e+00 L10_l1linf:2.0428e+00 L11_l1linf:2.0072e+00 L12_l1linf:1.9819e+00 L1_spectral:6.0219e-02 L2_spectral:6.0211e-02 L3_spectral:6.0245e-02 L4_spectral:6.0215e-02 L5_spectral:6.0228e-02 L6_spectral:6.0219e-02 L7_spectral:6.0217e-02 L8_spectral:6.0204e-02 L9_spectral:6.0234e-02 L10_spectral:6.0222e-02 L11_spectral:6.0206e-02 L12_spectral:6.0229e-02 ip_v_neg_g:7.5712e-03 cos_v_neg_g:1.9434e-03 v_norm:1.0329e+01 g_norm:3.7718e-01 hv_norm:8.2461e-02 cos_v_hv:1.3202e-02 hg_norm:8.1700e-01 cos_g_hg:3.9946e-01 v_par:4.5177e-04 v_perp:1.0329e+01 L1_cos_v_neg_g:9.3122e-03 L1_v_norm:2.9149e+00 L2_cos_v_neg_g:6.8525e-03 L2_v_norm:2.7308e+00 L3_cos_v_neg_g:5.9014e-03 L3_v_norm:2.8265e+00 L4_cos_v_neg_g:5.0510e-03 L4_v_norm:2.9070e+00 L5_cos_v_neg_g:4.8046e-03 L5_v_norm:2.9907e+00 L6_cos_v_neg_g:5.2815e-03 L6_v_norm:2.9957e+00 L7_cos_v_neg_g:6.3932e-03 L7_v_norm:3.0172e+00 L8_cos_v_neg_g:6.4185e-03 L8_v_norm:3.0192e+00 L9_cos_v_neg_g:5.9237e-03 L9_v_norm:3.0134e+00 L10_cos_v_neg_g:5.0537e-03 L10_v_norm:3.0223e+00 L11_cos_v_neg_g:4.5186e-03 L11_v_norm:3.0157e+00 L12_cos_v_neg_g:2.6504e-03 L12_v_norm:3.0140e+00 +step:3500 train loss:3.561845 +step:3501 train loss:3.688154 +step:3502 train loss:3.670327 +step:3503 train loss:3.617340 +step:3504 train loss:3.573695 +step:3505 train loss:3.585200 +step:3506 train loss:3.489451 +step:3507 train loss:3.606003 +step:3508 train loss:3.546103 +step:3509 train loss:3.619847 +step:3510 train loss:3.550118 +step:3511 train loss:3.587196 +step:3512 train loss:3.723949 +step:3513 train loss:3.547492 +step:3514 train loss:3.561388 +step:3515 train loss:3.814226 +step:3516 train loss:3.604956 +step:3517 train loss:3.561183 +step:3518 train loss:3.569594 +step:3519 train loss:3.559664 +step:3520 train loss:3.593826 +step:3521 train loss:3.578590 +step:3522 train loss:3.497391 +step:3523 train loss:3.597044 +step:3524 train loss:3.579160 +step:3525 train loss:3.571929 +step:3526 train loss:3.591944 +step:3527 train loss:3.542269 +step:3528 train loss:3.595108 +step:3529 train loss:3.572926 +step:3530 train loss:3.570359 +step:3531 train loss:3.557096 +step:3532 train loss:3.751457 +step:3533 train loss:3.564029 +step:3534 train loss:3.582114 +step:3535 train loss:3.556818 +step:3536 train loss:3.555647 +step:3537 train loss:3.571763 +step:3538 train loss:3.595716 +step:3539 train loss:3.547278 +step:3540 train loss:3.611440 +step:3541 train loss:3.580853 +step:3542 train loss:3.587867 +step:3543 train loss:3.512649 +step:3544 train loss:3.529992 +step:3545 train loss:3.533431 +step:3546 train loss:3.598627 +step:3547 train loss:3.606010 +step:3548 train loss:3.579882 +step:3549 train loss:3.576310 +step:3550 train loss:3.571202 +step:3551 train loss:3.592766 +step:3552 train loss:3.485896 +step:3553 train loss:3.608877 +step:3554 train loss:3.604664 +step:3555 train loss:3.587990 +step:3556 train loss:3.612225 +step:3557 train loss:3.601306 +step:3558 train loss:3.572911 +step:3559 train loss:3.520739 +step:3560 train loss:3.613157 +step:3561 train loss:3.605919 +step:3562 train loss:3.780215 +step:3563 train loss:3.637259 +step:3564 train loss:3.596967 +step:3565 train loss:3.597827 +step:3566 train loss:3.570327 +step:3567 train loss:3.508969 +step:3568 train loss:3.539522 +step:3569 train loss:3.622439 +step:3570 train loss:3.650490 +step:3571 train loss:3.624096 +step:3572 train loss:3.616451 +step:3573 train loss:3.573053 +step:3574 train loss:3.573538 +step:3575 train loss:3.564453 +step:3576 train loss:3.546471 +step:3577 train loss:3.558553 +step:3578 train loss:3.637995 +step:3579 train loss:3.548689 +step:3580 train loss:3.631299 +step:3581 train loss:3.572038 +step:3582 train loss:3.631111 +step:3583 train loss:3.564158 +step:3584 train loss:3.541124 +step:3585 train loss:3.585097 +step:3586 train loss:3.542575 +step:3587 train loss:3.631530 +step:3588 train loss:3.768026 +step:3589 train loss:3.594889 +step:3590 train loss:3.577839 +step:3591 train loss:3.588427 +step:3592 train loss:3.548899 +step:3593 train loss:3.521874 +step:3594 train loss:3.577182 +step:3595 train loss:3.547981 +step:3596 train loss:3.627815 +step:3597 train loss:3.601764 +step:3598 train loss:3.556216 +step:3599 train loss:3.609296 +step:3600 train loss:3.544481 +step:3601 train loss:3.566394 +step:3602 train loss:3.547033 +step:3603 train loss:3.568721 +step:3604 train loss:3.590454 +step:3605 train loss:3.700397 +step:3606 train loss:3.598839 +step:3607 train loss:3.579073 +step:3608 train loss:3.597574 +step:3609 train loss:3.580631 +step:3610 train loss:3.551673 +step:3611 train loss:3.551996 +step:3612 train loss:3.621843 +step:3613 train loss:3.587031 +step:3614 train loss:3.537244 +step:3615 train loss:3.572618 +step:3616 train loss:3.524348 +step:3617 train loss:3.604606 +step:3618 train loss:3.558489 +step:3619 train loss:3.549263 +step:3620 train loss:3.564306 +step:3621 train loss:3.525628 +step:3622 train loss:3.631127 +step:3623 train loss:3.621856 +step:3624 train loss:3.593600 +step:3625 train loss:3.567871 +step:3626 train loss:3.583732 +step:3627 train loss:3.579529 +step:3628 train loss:3.561240 +step:3629 train loss:3.568156 +step:3630 train loss:3.649074 +step:3631 train loss:3.574632 +step:3632 train loss:3.603370 +step:3633 train loss:3.564945 +step:3634 train loss:3.567708 +step:3635 train loss:3.560678 +step:3636 train loss:3.629236 +step:3637 train loss:3.703939 +step:3638 train loss:3.621255 +step:3639 train loss:3.609074 +step:3640 train loss:3.615303 +step:3641 train loss:3.652392 +step:3642 train loss:3.549400 +step:3643 train loss:3.719231 +step:3644 train loss:3.607984 +step:3645 train loss:3.583338 +step:3646 train loss:3.701936 +step:3647 train loss:3.588651 +step:3648 train loss:3.583472 +step:3649 train loss:3.531659 +step:3650 train loss:3.575041 +step:3651 train loss:3.573700 +step:3652 train loss:3.558614 +step:3653 train loss:3.498685 +step:3654 train loss:3.554296 +step:3655 train loss:3.546816 +step:3656 train loss:3.577031 +step:3657 train loss:3.595868 +step:3658 train loss:3.589291 +step:3659 train loss:3.573912 +step:3660 train loss:3.549870 +step:3661 train loss:3.576784 +step:3662 train loss:3.547818 +step:3663 train loss:3.585527 +step:3664 train loss:3.542208 +step:3665 train loss:3.585979 +step:3666 train loss:3.620656 +step:3667 train loss:3.717311 +step:3668 train loss:3.593698 +step:3669 train loss:3.554435 +step:3670 train loss:3.602207 +step:3671 train loss:3.560507 +step:3672 train loss:3.597609 +step:3673 train loss:3.579000 +step:3674 train loss:3.593787 +step:3675 train loss:3.607242 +step:3676 train loss:3.570411 +step:3677 train loss:3.531791 +step:3678 train loss:3.591631 +step:3679 train loss:3.494264 +step:3680 train loss:3.597446 +step:3681 train loss:3.631248 +step:3682 train loss:3.610982 +step:3683 train loss:3.552824 +step:3684 train loss:3.551556 +step:3685 train loss:3.582812 +step:3686 train loss:3.607838 +step:3687 train loss:3.566031 +step:3688 train loss:3.539351 +step:3689 train loss:3.571097 +step:3690 train loss:3.561097 +step:3691 train loss:3.543912 +step:3692 train loss:3.609478 +step:3693 train loss:3.741866 +step:3694 train loss:3.555679 +step:3695 train loss:3.608737 +step:3696 train loss:3.572891 +step:3697 train loss:3.564813 +step:3698 train loss:3.505461 +step:3699 train loss:3.531529 +step:3700 train loss:3.559966 +step:3701 train loss:3.580900 +step:3702 train loss:3.601542 +step:3703 train loss:3.558716 +step:3704 train loss:3.604004 +step:3705 train loss:3.582944 +step:3706 train loss:3.537916 +step:3707 train loss:3.588010 +step:3708 train loss:3.567595 +step:3709 train loss:3.487522 +step:3710 train loss:3.610073 +step:3711 train loss:3.559546 +step:3712 train loss:3.598208 +step:3713 train loss:3.549175 +step:3714 train loss:3.567062 +step:3715 train loss:3.686802 +step:3716 train loss:3.589354 +step:3717 train loss:3.566114 +step:3718 train loss:3.569985 +step:3719 train loss:3.564552 +step:3720 train loss:3.572590 +step:3721 train loss:3.631875 +step:3722 train loss:3.645504 +step:3723 train loss:3.533136 +step:3724 train loss:3.589468 +step:3725 train loss:3.569730 +step:3726 train loss:3.588643 +step:3727 train loss:3.662336 +step:3728 train loss:3.626345 +step:3729 train loss:3.522162 +step:3730 train loss:3.540458 +step:3731 train loss:3.565892 +step:3732 train loss:3.720747 +step:3733 train loss:3.577974 +step:3734 train loss:3.579031 +step:3735 train loss:3.518483 +step:3736 train loss:3.575959 +step:3737 train loss:3.623671 +step:3738 train loss:3.648179 +step:3739 train loss:3.563151 +step:3740 train loss:3.469711 +step:3741 train loss:3.671136 +step:3742 train loss:3.581536 +step:3743 train loss:3.560323 +step:3744 train loss:3.562576 +step:3745 train loss:3.573930 +step:3746 train loss:3.539768 +step:3747 train loss:3.556869 +step:3748 train loss:3.603048 +step:3749 train loss:3.582787 +step:3750 validation loss:3.496610 +step:3750 train loss:3.592801 +step:3751 train loss:3.685481 +step:3752 train loss:3.625598 +step:3753 train loss:3.534815 +step:3754 train loss:3.586984 +step:3755 train loss:3.767694 +step:3756 train loss:3.542145 +step:3757 train loss:3.537157 +step:3758 train loss:3.572237 +step:3759 train loss:3.515644 +step:3760 train loss:3.514433 +step:3761 train loss:3.566114 +step:3762 train loss:3.557595 +step:3763 train loss:3.563872 +step:3764 train loss:3.549177 +step:3765 train loss:3.553898 +step:3766 train loss:3.520318 +step:3767 train loss:3.605853 +step:3768 train loss:3.544843 +step:3769 train loss:3.810992 +step:3770 train loss:3.599599 +step:3771 train loss:3.605129 +step:3772 train loss:3.568137 +step:3773 train loss:3.561486 +step:3774 train loss:3.561364 +step:3775 train loss:3.561610 +step:3776 train loss:3.557516 +step:3777 train loss:3.521678 +step:3778 train loss:3.539415 +step:3779 train loss:3.522699 +step:3780 train loss:3.607758 +step:3781 train loss:3.570601 +step:3782 train loss:3.491361 +step:3783 train loss:3.598487 +step:3784 train loss:3.607695 +step:3785 train loss:3.517077 +step:3786 train loss:3.625031 +step:3787 train loss:3.537231 +step:3788 train loss:3.551294 +step:3789 train loss:3.458040 +step:3790 train loss:3.574697 +step:3791 train loss:3.593598 +step:3792 train loss:3.567207 +step:3793 train loss:3.565746 +step:3794 train loss:3.594695 +step:3795 train loss:3.564195 +step:3796 train loss:3.578556 +step:3797 train loss:3.555281 +step:3798 train loss:3.564034 +step:3799 train loss:3.570378 +step:3800 train loss:3.481440 +step:3801 train loss:3.593912 +step:3802 train loss:3.518581 +step:3803 train loss:3.605843 +step:3804 train loss:3.616938 +step:3805 train loss:3.575333 +step:3806 train loss:3.593298 +step:3807 train loss:3.610896 +step:3808 train loss:3.570383 +step:3809 train loss:3.581764 +step:3810 train loss:3.581202 +step:3811 train loss:3.567902 +step:3812 train loss:3.568141 +step:3813 train loss:3.525693 +step:3814 train loss:3.570152 +step:3815 train loss:3.573534 +step:3816 train loss:3.588237 +step:3817 train loss:3.605648 +step:3818 train loss:3.579905 +step:3819 train loss:3.590880 +step:3820 train loss:3.591895 +step:3821 train loss:3.553287 +step:3822 train loss:3.633557 +step:3823 train loss:3.523511 +step:3824 train loss:3.540499 +step:3825 train loss:3.545974 +step:3826 train loss:3.657062 +step:3827 train loss:3.632113 +step:3828 train loss:3.524650 +step:3829 train loss:3.546308 +step:3830 train loss:3.602461 +step:3831 train loss:3.539762 +step:3832 train loss:3.600462 +step:3833 train loss:3.541778 +step:3834 train loss:3.509229 +step:3835 train loss:3.551821 +step:3836 train loss:3.525887 +step:3837 train loss:3.592091 +step:3838 train loss:3.544703 +step:3839 train loss:3.594308 +step:3840 train loss:3.602907 +step:3841 train loss:3.552915 +step:3842 train loss:3.579868 +step:3843 train loss:3.594924 +step:3844 train loss:3.566760 +step:3845 train loss:3.592968 +step:3846 train loss:3.630070 +step:3847 train loss:3.526760 +step:3848 train loss:3.535084 +step:3849 train loss:3.551056 +step:3850 train loss:3.566869 +step:3851 train loss:3.712288 +step:3852 train loss:3.690726 +step:3853 train loss:3.583378 +step:3854 train loss:3.546479 +step:3855 train loss:3.595094 +step:3856 train loss:3.520573 +step:3857 train loss:3.583180 +step:3858 train loss:3.494934 +step:3859 train loss:3.542873 +step:3860 train loss:3.607635 +step:3861 train loss:3.582178 +step:3862 train loss:3.520658 +step:3863 train loss:3.570907 +step:3864 train loss:3.544618 +step:3865 train loss:3.576749 +step:3866 train loss:3.598579 +step:3867 train loss:3.594579 +step:3868 train loss:3.540727 +step:3869 train loss:3.545429 +step:3870 train loss:3.517895 +step:3871 train loss:3.518765 +step:3872 train loss:3.652589 +step:3873 train loss:3.574291 +step:3874 train loss:3.588537 +step:3875 train loss:3.696131 +step:3876 train loss:3.571511 +step:3877 train loss:3.597721 +step:3878 train loss:3.620800 +step:3879 train loss:3.610006 +step:3880 train loss:3.693782 +step:3881 train loss:3.513520 +step:3882 train loss:3.549945 +step:3883 train loss:3.561685 +step:3884 train loss:3.556429 +step:3885 train loss:3.572005 +step:3886 train loss:3.628328 +step:3887 train loss:3.608295 +step:3888 train loss:3.573826 +step:3889 train loss:3.542409 +step:3890 train loss:3.580594 +step:3891 train loss:3.593444 +step:3892 train loss:3.504772 +step:3893 train loss:3.610773 +step:3894 train loss:3.560710 +step:3895 train loss:3.578804 +step:3896 train loss:3.571036 +step:3897 train loss:3.540966 +step:3898 train loss:3.599323 +step:3899 train loss:3.634965 +step:3900 train loss:3.591617 +step:3901 train loss:3.607652 +step:3902 train loss:3.531069 +step:3903 train loss:3.550251 +step:3904 train loss:3.580966 +step:3905 train loss:3.519598 +step:3906 train loss:3.553455 +step:3907 train loss:3.588748 +step:3908 train loss:3.666219 +step:3909 train loss:3.560661 +step:3910 train loss:3.580950 +step:3911 train loss:3.598543 +step:3912 train loss:3.549117 +step:3913 train loss:3.560893 +step:3914 train loss:3.581791 +step:3915 train loss:3.554632 +step:3916 train loss:3.587987 +step:3917 train loss:3.634862 +step:3918 train loss:3.602507 +step:3919 train loss:3.584228 +step:3920 train loss:3.557750 +step:3921 train loss:3.597503 +step:3922 train loss:3.599477 +step:3923 train loss:3.587615 +step:3924 train loss:3.527268 +step:3925 train loss:3.729738 +step:3926 train loss:3.571137 +step:3927 train loss:3.549588 +step:3928 train loss:3.628940 +step:3929 train loss:3.694198 +step:3930 train loss:3.586734 +step:3931 train loss:3.517617 +step:3932 train loss:3.575106 +step:3933 train loss:3.590481 +step:3934 train loss:3.541469 +step:3935 train loss:3.520195 +step:3936 train loss:3.610323 +step:3937 train loss:3.570016 +step:3938 train loss:3.581189 +step:3939 train loss:3.607772 +step:3940 train loss:3.557703 +step:3941 train loss:3.641431 +step:3942 train loss:3.601723 +step:3943 train loss:3.587798 +step:3944 train loss:3.633991 +step:3945 train loss:3.545017 +step:3946 train loss:3.490402 +step:3947 train loss:3.623425 +step:3948 train loss:3.588385 +step:3949 train loss:3.758432 +step:3950 train loss:3.557704 +step:3951 train loss:3.509601 +step:3952 train loss:3.446125 +step:3953 train loss:3.521077 +step:3954 train loss:3.566266 +step:3955 train loss:3.597944 +step:3956 train loss:3.559304 +step:3957 train loss:3.604100 +step:3958 train loss:3.587480 +step:3959 train loss:3.621085 +step:3960 train loss:3.545860 +step:3961 train loss:3.573041 +step:3962 train loss:3.575166 +step:3963 train loss:3.553047 +step:3964 train loss:3.535186 +step:3965 train loss:3.581977 +step:3966 train loss:3.540028 +step:3967 train loss:3.591356 +step:3968 train loss:3.603471 +step:3969 train loss:3.511909 +step:3970 train loss:3.619308 +step:3971 train loss:3.541947 +step:3972 train loss:3.569772 +step:3973 train loss:3.532619 +step:3974 train loss:3.628621 +step:3975 train loss:3.572593 +step:3976 train loss:3.531056 +step:3977 train loss:3.591681 +step:3978 train loss:3.553590 +step:3979 train loss:3.545230 +step:3980 train loss:3.612535 +step:3981 train loss:3.544159 +step:3982 train loss:3.567474 +step:3983 train loss:3.548977 +step:3984 train loss:3.585782 +step:3985 train loss:3.559277 +step:3986 train loss:3.574108 +step:3987 train loss:3.585324 +step:3988 train loss:3.517799 +step:3989 train loss:3.592107 +step:3990 train loss:3.586170 +step:3991 train loss:3.603733 +step:3992 train loss:3.556672 +step:3993 train loss:3.589438 +step:3994 train loss:3.541862 +step:3995 train loss:3.594130 +step:3996 train loss:3.512160 +step:3997 train loss:3.587857 +step:3998 train loss:3.470013 +step:3999 train loss:3.627649 +step:4000 validation loss:3.484882 total_sharp:9.8272e-05 L1_sharp:3.7277e-05 L2_sharp:2.0280e-05 L3_sharp:2.2440e-05 L4_sharp:1.0722e-05 L5_sharp:1.1623e-05 L6_sharp:1.4616e-05 L7_sharp:1.8831e-05 L8_sharp:1.8493e-05 L9_sharp:1.7916e-05 L10_sharp:1.0924e-05 L11_sharp:9.4544e-06 L12_sharp:1.7076e-05 total_fnorm:1.0348e+01 total_l1_linf:7.7789e+04 total_spectral:1.0348e+01 L1_fnorm:2.9343e+00 L2_fnorm:2.7336e+00 L3_fnorm:2.8245e+00 L4_fnorm:2.9395e+00 L5_fnorm:2.9946e+00 L6_fnorm:3.0032e+00 L7_fnorm:3.0194e+00 L8_fnorm:3.0193e+00 L9_fnorm:3.0166e+00 L10_fnorm:3.0225e+00 L11_fnorm:3.0113e+00 L12_fnorm:3.0160e+00 L1_l1linf:2.0313e+00 L2_l1linf:1.9622e+00 L3_l1linf:2.0324e+00 L4_l1linf:2.0734e+00 L5_l1linf:2.0527e+00 L6_l1linf:2.0557e+00 L7_l1linf:2.0622e+00 L8_l1linf:2.0319e+00 L9_l1linf:2.0123e+00 L10_l1linf:2.0117e+00 L11_l1linf:2.0026e+00 L12_l1linf:1.9991e+00 L1_spectral:6.0223e-02 L2_spectral:6.0219e-02 L3_spectral:6.0237e-02 L4_spectral:6.0219e-02 L5_spectral:6.0229e-02 L6_spectral:6.0220e-02 L7_spectral:6.0221e-02 L8_spectral:6.0224e-02 L9_spectral:6.0212e-02 L10_spectral:6.0214e-02 L11_spectral:6.0222e-02 L12_spectral:6.0230e-02 ip_v_neg_g:4.4197e-03 cos_v_neg_g:1.2813e-03 v_norm:1.0348e+01 g_norm:3.3332e-01 hv_norm:6.6981e-02 cos_v_hv:1.5183e-02 hg_norm:4.8206e-01 cos_g_hg:3.2148e-01 v_par:1.3199e-03 v_perp:1.0348e+01 L1_cos_v_neg_g:1.4627e-03 L1_v_norm:2.9343e+00 L2_cos_v_neg_g:5.0375e-04 L2_v_norm:2.7336e+00 L3_cos_v_neg_g:3.0836e-03 L3_v_norm:2.8245e+00 L4_cos_v_neg_g:6.8051e-04 L4_v_norm:2.9395e+00 L5_cos_v_neg_g:2.5014e-03 L5_v_norm:2.9946e+00 L6_cos_v_neg_g:3.5839e-03 L6_v_norm:3.0032e+00 L7_cos_v_neg_g:4.8099e-03 L7_v_norm:3.0194e+00 L8_cos_v_neg_g:6.3314e-03 L8_v_norm:3.0193e+00 L9_cos_v_neg_g:6.1209e-03 L9_v_norm:3.0166e+00 L10_cos_v_neg_g:5.2801e-03 L10_v_norm:3.0225e+00 L11_cos_v_neg_g:4.1246e-03 L11_v_norm:3.0113e+00 L12_cos_v_neg_g:4.0302e-03 L12_v_norm:3.0160e+00 +step:4000 train loss:3.509164 +step:4001 train loss:3.583244 +step:4002 train loss:3.562148 +step:4003 train loss:3.598618 +step:4004 train loss:3.507316 +step:4005 train loss:3.599037 +step:4006 train loss:3.606813 +step:4007 train loss:3.530087 +step:4008 train loss:3.488467 +step:4009 train loss:3.569570 +step:4010 train loss:3.546197 +step:4011 train loss:3.552947 +step:4012 train loss:3.569133 +step:4013 train loss:3.544203 +step:4014 train loss:3.558346 +step:4015 train loss:3.547052 +step:4016 train loss:3.560083 +step:4017 train loss:3.519881 +step:4018 train loss:3.466298 +step:4019 train loss:3.518007 +step:4020 train loss:3.584486 +step:4021 train loss:3.529199 +step:4022 train loss:3.532578 +step:4023 train loss:3.547139 +step:4024 train loss:3.461267 +step:4025 train loss:3.581961 +step:4026 train loss:3.571161 +step:4027 train loss:3.582091 +step:4028 train loss:3.594020 +step:4029 train loss:3.628615 +step:4030 train loss:3.544500 +step:4031 train loss:3.583105 +step:4032 train loss:3.540261 +step:4033 train loss:3.575574 +step:4034 train loss:3.587927 +step:4035 train loss:3.566786 +step:4036 train loss:3.564245 +step:4037 train loss:3.580141 +step:4038 train loss:3.500056 +step:4039 train loss:3.556500 +step:4040 train loss:3.536515 +step:4041 train loss:3.529020 +step:4042 train loss:3.550023 +step:4043 train loss:3.536091 +step:4044 train loss:3.569616 +step:4045 train loss:3.573711 +step:4046 train loss:3.531292 +step:4047 train loss:3.557676 +step:4048 train loss:3.570444 +step:4049 train loss:3.532400 +step:4050 train loss:3.636734 +step:4051 train loss:3.546751 +step:4052 train loss:3.571378 +step:4053 train loss:3.620239 +step:4054 train loss:3.588397 +step:4055 train loss:3.609340 +step:4056 train loss:3.604487 +step:4057 train loss:3.538816 +step:4058 train loss:3.521585 +step:4059 train loss:3.606190 +step:4060 train loss:3.545774 +step:4061 train loss:3.515353 +step:4062 train loss:3.628834 +step:4063 train loss:3.578561 +step:4064 train loss:3.549807 +step:4065 train loss:3.531939 +step:4066 train loss:3.559819 +step:4067 train loss:3.585610 +step:4068 train loss:3.552167 +step:4069 train loss:3.609521 +step:4070 train loss:3.528144 +step:4071 train loss:3.501205 +step:4072 train loss:3.577758 +step:4073 train loss:3.512897 +step:4074 train loss:3.564588 +step:4075 train loss:3.628307 +step:4076 train loss:3.487736 +step:4077 train loss:3.563640 +step:4078 train loss:3.664269 +step:4079 train loss:3.605221 +step:4080 train loss:3.550753 +step:4081 train loss:3.521002 +step:4082 train loss:3.574090 +step:4083 train loss:3.509190 +step:4084 train loss:3.527640 +step:4085 train loss:3.771873 +step:4086 train loss:3.532135 +step:4087 train loss:3.577509 +step:4088 train loss:3.564166 +step:4089 train loss:3.550042 +step:4090 train loss:3.571373 +step:4091 train loss:3.596974 +step:4092 train loss:3.516842 +step:4093 train loss:3.546803 +step:4094 train loss:3.566614 +step:4095 train loss:3.521754 +step:4096 train loss:3.557667 +step:4097 train loss:3.560136 +step:4098 train loss:3.534477 +step:4099 train loss:3.533386 +step:4100 train loss:3.583474 +step:4101 train loss:3.509269 +step:4102 train loss:3.546455 +step:4103 train loss:3.754373 +step:4104 train loss:3.564973 +step:4105 train loss:3.532167 +step:4106 train loss:3.598170 +step:4107 train loss:3.521126 +step:4108 train loss:3.525805 +step:4109 train loss:3.579404 +step:4110 train loss:3.591345 +step:4111 train loss:3.563348 +step:4112 train loss:3.584114 +step:4113 train loss:3.544356 +step:4114 train loss:3.490937 +step:4115 train loss:3.527528 +step:4116 train loss:3.511628 +step:4117 train loss:3.532650 +step:4118 train loss:3.584216 +step:4119 train loss:3.608161 +step:4120 train loss:3.529563 +step:4121 train loss:3.522767 +step:4122 train loss:3.593115 +step:4123 train loss:3.600152 +step:4124 train loss:3.580214 +step:4125 train loss:3.618726 +step:4126 train loss:3.552087 +step:4127 train loss:3.570405 +step:4128 train loss:3.563183 +step:4129 train loss:3.609883 +step:4130 train loss:3.540103 +step:4131 train loss:3.574406 +step:4132 train loss:3.591132 +step:4133 train loss:3.540654 +step:4134 train loss:3.594714 +step:4135 train loss:3.529605 +step:4136 train loss:3.552769 +step:4137 train loss:3.524506 +step:4138 train loss:3.532063 +step:4139 train loss:3.584435 +step:4140 train loss:3.536309 +step:4141 train loss:3.499177 +step:4142 train loss:3.546955 +step:4143 train loss:3.580632 +step:4144 train loss:3.535398 +step:4145 train loss:3.502479 +step:4146 train loss:3.570656 +step:4147 train loss:3.546082 +step:4148 train loss:3.538251 +step:4149 train loss:3.618825 +step:4150 train loss:3.580727 +step:4151 train loss:3.563039 +step:4152 train loss:3.583801 +step:4153 train loss:3.591413 +step:4154 train loss:3.600103 +step:4155 train loss:3.622449 +step:4156 train loss:3.497729 +step:4157 train loss:3.522023 +step:4158 train loss:3.575964 +step:4159 train loss:3.479589 +step:4160 train loss:3.568829 +step:4161 train loss:3.568554 +step:4162 train loss:3.481071 +step:4163 train loss:3.560712 +step:4164 train loss:3.509054 +step:4165 train loss:3.510730 +step:4166 train loss:3.577651 +step:4167 train loss:3.570667 +step:4168 train loss:3.561705 +step:4169 train loss:3.597242 +step:4170 train loss:3.711631 +step:4171 train loss:3.560083 +step:4172 train loss:3.577711 +step:4173 train loss:3.577945 +step:4174 train loss:3.537941 +step:4175 train loss:3.628875 +step:4176 train loss:3.549237 +step:4177 train loss:3.572491 +step:4178 train loss:3.550371 +step:4179 train loss:3.511568 +step:4180 train loss:3.501912 +step:4181 train loss:3.553420 +step:4182 train loss:3.541389 +step:4183 train loss:3.474304 +step:4184 train loss:3.543360 +step:4185 train loss:3.616252 +step:4186 train loss:3.589194 +step:4187 train loss:3.598125 +step:4188 train loss:3.574705 +step:4189 train loss:3.535114 +step:4190 train loss:3.572574 +step:4191 train loss:3.523939 +step:4192 train loss:3.615096 +step:4193 train loss:3.520139 +step:4194 train loss:3.504774 +step:4195 train loss:3.503293 +step:4196 train loss:3.568562 +step:4197 train loss:3.583300 +step:4198 train loss:3.507948 +step:4199 train loss:3.592782 +step:4200 train loss:3.550397 +step:4201 train loss:3.534451 +step:4202 train loss:3.547434 +step:4203 train loss:3.559239 +step:4204 train loss:3.551792 +step:4205 train loss:3.564091 +step:4206 train loss:3.583157 +step:4207 train loss:3.583540 +step:4208 train loss:3.547667 +step:4209 train loss:3.612991 +step:4210 train loss:3.644744 +step:4211 train loss:3.520830 +step:4212 train loss:3.564624 +step:4213 train loss:3.514299 +step:4214 train loss:3.525323 +step:4215 train loss:3.539564 +step:4216 train loss:3.512370 +step:4217 train loss:3.539279 +step:4218 train loss:3.575164 +step:4219 train loss:3.577230 +step:4220 train loss:3.650060 +step:4221 train loss:3.537824 +step:4222 train loss:3.598206 +step:4223 train loss:3.519344 +step:4224 train loss:3.593606 +step:4225 train loss:3.521580 +step:4226 train loss:3.579910 +step:4227 train loss:3.553612 +step:4228 train loss:3.531329 +step:4229 train loss:3.539447 +step:4230 train loss:3.521865 +step:4231 train loss:3.512299 +step:4232 train loss:3.561021 +step:4233 train loss:3.470277 +step:4234 train loss:3.553961 +step:4235 train loss:3.632041 +step:4236 train loss:3.597440 +step:4237 train loss:3.579954 +step:4238 train loss:3.590586 +step:4239 train loss:3.642986 +step:4240 train loss:3.548147 +step:4241 train loss:3.474159 +step:4242 train loss:3.594935 +step:4243 train loss:3.593310 +step:4244 train loss:3.609848 +step:4245 train loss:3.664360 +step:4246 train loss:3.537463 +step:4247 train loss:3.596918 +step:4248 train loss:3.544597 +step:4249 train loss:3.550403 +step:4250 validation loss:3.473215 +step:4250 train loss:3.530976 +step:4251 train loss:3.630234 +step:4252 train loss:3.535990 +step:4253 train loss:3.532428 +step:4254 train loss:3.537765 +step:4255 train loss:3.523201 +step:4256 train loss:3.538070 +step:4257 train loss:3.594493 +step:4258 train loss:3.455749 +step:4259 train loss:3.522262 +step:4260 train loss:3.586354 +step:4261 train loss:3.569360 +step:4262 train loss:3.712761 +step:4263 train loss:3.638547 +step:4264 train loss:3.582801 +step:4265 train loss:3.577214 +step:4266 train loss:3.572485 +step:4267 train loss:3.572208 +step:4268 train loss:3.521188 +step:4269 train loss:3.614013 +step:4270 train loss:3.593998 +step:4271 train loss:3.508071 +step:4272 train loss:3.562058 +step:4273 train loss:3.536159 +step:4274 train loss:3.526521 +step:4275 train loss:3.548142 +step:4276 train loss:3.511446 +step:4277 train loss:3.651203 +step:4278 train loss:3.499828 +step:4279 train loss:3.526743 +step:4280 train loss:3.607822 +step:4281 train loss:3.593548 +step:4282 train loss:3.659905 +step:4283 train loss:3.512820 +step:4284 train loss:3.542774 +step:4285 train loss:3.546782 +step:4286 train loss:3.606576 +step:4287 train loss:3.608708 +step:4288 train loss:3.588101 +step:4289 train loss:3.543886 +step:4290 train loss:3.547012 +step:4291 train loss:3.505772 +step:4292 train loss:3.554809 +step:4293 train loss:3.565051 +step:4294 train loss:3.551832 +step:4295 train loss:3.484593 +step:4296 train loss:3.558910 +step:4297 train loss:3.537758 +step:4298 train loss:3.552222 +step:4299 train loss:3.547784 +step:4300 train loss:3.664306 +step:4301 train loss:3.485065 +step:4302 train loss:3.621376 +step:4303 train loss:3.502665 +step:4304 train loss:3.508020 +step:4305 train loss:3.526189 +step:4306 train loss:3.599425 +step:4307 train loss:3.512383 +step:4308 train loss:3.515127 +step:4309 train loss:3.581939 +step:4310 train loss:3.522576 +step:4311 train loss:3.577821 +step:4312 train loss:3.572703 +step:4313 train loss:3.565460 +step:4314 train loss:3.510635 +step:4315 train loss:3.541704 +step:4316 train loss:3.489679 +step:4317 train loss:3.545149 +step:4318 train loss:3.588049 +step:4319 train loss:3.537525 +step:4320 train loss:3.598520 +step:4321 train loss:3.581496 +step:4322 train loss:3.533551 +step:4323 train loss:3.473450 +step:4324 train loss:3.564961 +step:4325 train loss:3.542720 +step:4326 train loss:3.531944 +step:4327 train loss:3.642581 +step:4328 train loss:3.548892 +step:4329 train loss:3.504424 +step:4330 train loss:3.549456 +step:4331 train loss:3.564119 +step:4332 train loss:3.593316 +step:4333 train loss:3.557033 +step:4334 train loss:3.567094 +step:4335 train loss:3.566088 +step:4336 train loss:3.582222 +step:4337 train loss:3.544176 +step:4338 train loss:3.662230 +step:4339 train loss:3.566428 +step:4340 train loss:3.576930 +step:4341 train loss:3.542856 +step:4342 train loss:3.559403 +step:4343 train loss:3.677517 +step:4344 train loss:3.568142 +step:4345 train loss:3.581444 +step:4346 train loss:3.596757 +step:4347 train loss:3.605023 +step:4348 train loss:3.520223 +step:4349 train loss:3.603872 +step:4350 train loss:3.541080 +step:4351 train loss:3.499305 +step:4352 train loss:3.570338 +step:4353 train loss:3.517952 +step:4354 train loss:3.574075 +step:4355 train loss:3.534668 +step:4356 train loss:3.558957 +step:4357 train loss:3.541577 +step:4358 train loss:3.638481 +step:4359 train loss:3.585113 +step:4360 train loss:3.502945 +step:4361 train loss:3.552959 +step:4362 train loss:3.569941 +step:4363 train loss:3.588102 +step:4364 train loss:3.552032 +step:4365 train loss:3.533033 +step:4366 train loss:3.582278 +step:4367 train loss:3.598084 +step:4368 train loss:3.571265 +step:4369 train loss:3.444033 +step:4370 train loss:3.567099 +step:4371 train loss:3.479427 +step:4372 train loss:3.630011 +step:4373 train loss:3.565520 +step:4374 train loss:3.535525 +step:4375 train loss:3.582871 +step:4376 train loss:3.593191 +step:4377 train loss:3.525524 +step:4378 train loss:3.539987 +step:4379 train loss:3.621183 +step:4380 train loss:3.599751 +step:4381 train loss:3.503585 +step:4382 train loss:3.548042 +step:4383 train loss:3.577693 +step:4384 train loss:3.572272 +step:4385 train loss:3.500613 +step:4386 train loss:3.557301 +step:4387 train loss:3.526952 +step:4388 train loss:3.546301 +step:4389 train loss:3.572321 +step:4390 train loss:3.613349 +step:4391 train loss:3.541043 +step:4392 train loss:3.615139 +step:4393 train loss:3.571888 +step:4394 train loss:3.512056 +step:4395 train loss:3.567955 +step:4396 train loss:3.545202 +step:4397 train loss:3.584932 +step:4398 train loss:3.531457 +step:4399 train loss:3.525752 +step:4400 train loss:3.530663 +step:4401 train loss:3.590676 +step:4402 train loss:3.588022 +step:4403 train loss:3.539762 +step:4404 train loss:3.571470 +step:4405 train loss:3.493364 +step:4406 train loss:3.570403 +step:4407 train loss:3.506075 +step:4408 train loss:3.596978 +step:4409 train loss:3.559455 +step:4410 train loss:3.566121 +step:4411 train loss:3.521028 +step:4412 train loss:3.639126 +step:4413 train loss:3.534820 +step:4414 train loss:3.541518 +step:4415 train loss:3.527246 +step:4416 train loss:3.518076 +step:4417 train loss:3.514097 +step:4418 train loss:3.586635 +step:4419 train loss:3.552995 +step:4420 train loss:3.568651 +step:4421 train loss:3.591201 +step:4422 train loss:3.603761 +step:4423 train loss:3.564959 +step:4424 train loss:3.546552 +step:4425 train loss:3.509958 +step:4426 train loss:3.587612 +step:4427 train loss:3.550279 +step:4428 train loss:3.487626 +step:4429 train loss:3.550688 +step:4430 train loss:3.587290 +step:4431 train loss:3.582775 +step:4432 train loss:3.486117 +step:4433 train loss:3.539513 +step:4434 train loss:3.538614 +step:4435 train loss:3.566943 +step:4436 train loss:3.505715 +step:4437 train loss:3.582746 +step:4438 train loss:3.551401 +step:4439 train loss:3.557095 +step:4440 train loss:3.556782 +step:4441 train loss:3.558771 +step:4442 train loss:3.607111 +step:4443 train loss:3.545189 +step:4444 train loss:3.627207 +step:4445 train loss:3.592396 +step:4446 train loss:3.523193 +step:4447 train loss:3.567412 +step:4448 train loss:3.588229 +step:4449 train loss:3.526124 +step:4450 train loss:3.544053 +step:4451 train loss:3.594500 +step:4452 train loss:3.659471 +step:4453 train loss:3.582629 +step:4454 train loss:3.558813 +step:4455 train loss:3.599931 +step:4456 train loss:3.546637 +step:4457 train loss:3.543589 +step:4458 train loss:3.559829 +step:4459 train loss:3.592288 +step:4460 train loss:3.504208 +step:4461 train loss:3.474089 +step:4462 train loss:3.533823 +step:4463 train loss:3.548676 +step:4464 train loss:3.524536 +step:4465 train loss:3.555894 +step:4466 train loss:3.653762 +step:4467 train loss:3.532807 +step:4468 train loss:3.529260 +step:4469 train loss:3.516697 +step:4470 train loss:3.496958 +step:4471 train loss:3.557585 +step:4472 train loss:3.483641 +step:4473 train loss:3.571158 +step:4474 train loss:3.591136 +step:4475 train loss:3.557820 +step:4476 train loss:3.515663 +step:4477 train loss:3.501250 +step:4478 train loss:3.560955 +step:4479 train loss:3.659085 +step:4480 train loss:3.496916 +step:4481 train loss:3.570297 +step:4482 train loss:3.528308 +step:4483 train loss:3.525212 +step:4484 train loss:3.575088 +step:4485 train loss:3.529631 +step:4486 train loss:3.632271 +step:4487 train loss:3.528867 +step:4488 train loss:3.523874 +step:4489 train loss:3.481820 +step:4490 train loss:3.565589 +step:4491 train loss:3.515845 +step:4492 train loss:3.546445 +step:4493 train loss:3.534881 +step:4494 train loss:3.532327 +step:4495 train loss:3.594368 +step:4496 train loss:3.537504 +step:4497 train loss:3.623714 +step:4498 train loss:3.515393 +step:4499 train loss:3.563603 +step:4500 validation loss:3.462655 total_sharp:9.3148e-05 L1_sharp:8.7084e-05 L2_sharp:1.6220e-05 L3_sharp:1.7587e-05 L4_sharp:7.1746e-06 L5_sharp:9.4669e-06 L6_sharp:1.4442e-05 L7_sharp:1.5966e-05 L8_sharp:1.5707e-05 L9_sharp:1.5729e-05 L10_sharp:9.0113e-06 L11_sharp:7.5362e-06 L12_sharp:9.3882e-06 total_fnorm:1.0332e+01 total_l1_linf:7.7646e+04 total_spectral:1.0332e+01 L1_fnorm:2.8995e+00 L2_fnorm:2.7128e+00 L3_fnorm:2.8244e+00 L4_fnorm:2.9456e+00 L5_fnorm:2.9951e+00 L6_fnorm:2.9970e+00 L7_fnorm:3.0146e+00 L8_fnorm:3.0186e+00 L9_fnorm:3.0134e+00 L10_fnorm:3.0222e+00 L11_fnorm:3.0122e+00 L12_fnorm:3.0150e+00 L1_l1linf:2.0542e+00 L2_l1linf:1.9594e+00 L3_l1linf:2.0199e+00 L4_l1linf:2.0930e+00 L5_l1linf:2.0551e+00 L6_l1linf:2.0467e+00 L7_l1linf:2.0319e+00 L8_l1linf:2.0353e+00 L9_l1linf:2.0107e+00 L10_l1linf:2.0267e+00 L11_l1linf:1.9945e+00 L12_l1linf:2.0164e+00 L1_spectral:6.0238e-02 L2_spectral:6.0206e-02 L3_spectral:6.0224e-02 L4_spectral:6.0254e-02 L5_spectral:6.0237e-02 L6_spectral:6.0203e-02 L7_spectral:6.0220e-02 L8_spectral:6.0216e-02 L9_spectral:6.0215e-02 L10_spectral:6.0222e-02 L11_spectral:6.0210e-02 L12_spectral:6.0230e-02 ip_v_neg_g:3.9677e-03 cos_v_neg_g:1.1402e-03 v_norm:1.0332e+01 g_norm:3.3682e-01 hv_norm:7.0898e-02 cos_v_hv:1.3574e-02 hg_norm:5.4268e-01 cos_g_hg:3.4773e-01 v_par:7.9854e-04 v_perp:1.0332e+01 L1_cos_v_neg_g:3.4573e-03 L1_v_norm:2.8995e+00 L2_cos_v_neg_g:2.8910e-03 L2_v_norm:2.7128e+00 L3_cos_v_neg_g:3.3243e-03 L3_v_norm:2.8244e+00 L4_cos_v_neg_g:2.5297e-03 L4_v_norm:2.9456e+00 L5_cos_v_neg_g:3.0339e-03 L5_v_norm:2.9951e+00 L6_cos_v_neg_g:3.2902e-03 L6_v_norm:2.9970e+00 L7_cos_v_neg_g:3.5378e-03 L7_v_norm:3.0146e+00 L8_cos_v_neg_g:3.5308e-03 L8_v_norm:3.0186e+00 L9_cos_v_neg_g:4.8102e-03 L9_v_norm:3.0134e+00 L10_cos_v_neg_g:3.7043e-03 L10_v_norm:3.0222e+00 L11_cos_v_neg_g:3.0655e-03 L11_v_norm:3.0122e+00 L12_cos_v_neg_g:2.3858e-03 L12_v_norm:3.0150e+00 +step:4500 train loss:3.471009 +step:4501 train loss:3.531696 +step:4502 train loss:3.656549 +step:4503 train loss:3.557580 +step:4504 train loss:3.570240 +step:4505 train loss:3.547654 +step:4506 train loss:3.522437 +step:4507 train loss:3.600042 +step:4508 train loss:3.537407 +step:4509 train loss:3.532534 +step:4510 train loss:3.568147 +step:4511 train loss:3.517855 +step:4512 train loss:3.545412 +step:4513 train loss:3.599105 +step:4514 train loss:3.505433 +step:4515 train loss:3.627535 +step:4516 train loss:3.598528 +step:4517 train loss:3.550746 +step:4518 train loss:3.490670 +step:4519 train loss:3.527411 +step:4520 train loss:3.539586 +step:4521 train loss:3.482655 +step:4522 train loss:3.534461 +step:4523 train loss:3.580808 +step:4524 train loss:3.567080 +step:4525 train loss:3.486910 +step:4526 train loss:3.530080 +step:4527 train loss:3.517771 +step:4528 train loss:3.550768 +step:4529 train loss:3.543611 +step:4530 train loss:3.641060 +step:4531 train loss:3.529958 +step:4532 train loss:3.549314 +step:4533 train loss:3.525865 +step:4534 train loss:3.618340 +step:4535 train loss:3.516486 +step:4536 train loss:3.586375 +step:4537 train loss:3.571005 +step:4538 train loss:3.549491 +step:4539 train loss:3.570160 +step:4540 train loss:3.541346 +step:4541 train loss:3.517234 +step:4542 train loss:3.559507 +step:4543 train loss:3.649229 +step:4544 train loss:3.591226 +step:4545 train loss:3.528481 +step:4546 train loss:3.623380 +step:4547 train loss:3.581684 +step:4548 train loss:3.584260 +step:4549 train loss:3.544103 +step:4550 train loss:3.510392 +step:4551 train loss:3.527259 +step:4552 train loss:3.531502 +step:4553 train loss:3.615277 +step:4554 train loss:3.509778 +step:4555 train loss:3.619235 +step:4556 train loss:3.555418 +step:4557 train loss:3.486146 +step:4558 train loss:3.572932 +step:4559 train loss:3.579936 +step:4560 train loss:3.517073 +step:4561 train loss:3.506619 +step:4562 train loss:3.547708 +step:4563 train loss:3.500613 +step:4564 train loss:3.527327 +step:4565 train loss:3.527574 +step:4566 train loss:3.501357 +step:4567 train loss:3.525703 +step:4568 train loss:3.527153 +step:4569 train loss:3.511065 +step:4570 train loss:3.559628 +step:4571 train loss:3.542819 +step:4572 train loss:3.532543 +step:4573 train loss:3.541237 +step:4574 train loss:3.691027 +step:4575 train loss:3.518380 +step:4576 train loss:3.509400 +step:4577 train loss:3.548395 +step:4578 train loss:3.587692 +step:4579 train loss:3.539999 +step:4580 train loss:3.599021 +step:4581 train loss:3.536632 +step:4582 train loss:3.529905 +step:4583 train loss:3.537332 +step:4584 train loss:3.510077 +step:4585 train loss:3.590151 +step:4586 train loss:3.576456 +step:4587 train loss:3.475053 +step:4588 train loss:3.521578 +step:4589 train loss:3.592617 +step:4590 train loss:3.565248 +step:4591 train loss:3.504322 +step:4592 train loss:3.592466 +step:4593 train loss:3.511283 +step:4594 train loss:3.540920 +step:4595 train loss:3.565058 +step:4596 train loss:3.501807 +step:4597 train loss:3.635551 +step:4598 train loss:3.556310 +step:4599 train loss:3.514217 +step:4600 train loss:3.519406 +step:4601 train loss:3.541933 +step:4602 train loss:3.490547 +step:4603 train loss:3.505550 +step:4604 train loss:3.611999 +step:4605 train loss:3.531883 +step:4606 train loss:3.559536 +step:4607 train loss:3.541290 +step:4608 train loss:3.571904 +step:4609 train loss:3.533574 +step:4610 train loss:3.572464 +step:4611 train loss:3.603956 +step:4612 train loss:3.598684 +step:4613 train loss:3.581262 +step:4614 train loss:3.575361 +step:4615 train loss:3.514439 +step:4616 train loss:3.498993 +step:4617 train loss:3.540949 +step:4618 train loss:3.557511 +step:4619 train loss:3.516400 +step:4620 train loss:3.534742 +step:4621 train loss:3.533134 +step:4622 train loss:3.475129 +step:4623 train loss:3.581601 +step:4624 train loss:3.565214 +step:4625 train loss:3.527296 +step:4626 train loss:3.566532 +step:4627 train loss:3.533525 +step:4628 train loss:3.520323 +step:4629 train loss:3.563337 +step:4630 train loss:3.620228 +step:4631 train loss:3.623463 +step:4632 train loss:3.515666 +step:4633 train loss:3.529073 +step:4634 train loss:3.600594 +step:4635 train loss:3.565465 +step:4636 train loss:3.580517 +step:4637 train loss:3.520100 +step:4638 train loss:3.523532 +step:4639 train loss:3.518718 +step:4640 train loss:3.529730 +step:4641 train loss:3.536554 +step:4642 train loss:3.570819 +step:4643 train loss:3.527952 +step:4644 train loss:3.554859 +step:4645 train loss:3.567246 +step:4646 train loss:3.523266 +step:4647 train loss:3.481113 +step:4648 train loss:3.588930 +step:4649 train loss:3.598502 +step:4650 train loss:3.549107 +step:4651 train loss:3.551105 +step:4652 train loss:3.539229 +step:4653 train loss:3.596725 +step:4654 train loss:3.592002 +step:4655 train loss:3.497467 +step:4656 train loss:3.532593 +step:4657 train loss:3.582075 +step:4658 train loss:3.539208 +step:4659 train loss:3.551405 +step:4660 train loss:3.593691 +step:4661 train loss:3.512393 +step:4662 train loss:3.528409 +step:4663 train loss:3.542514 +step:4664 train loss:3.588036 +step:4665 train loss:3.585680 +step:4666 train loss:3.583765 +step:4667 train loss:3.577903 +step:4668 train loss:3.538881 +step:4669 train loss:3.549645 +step:4670 train loss:3.577525 +step:4671 train loss:3.583115 +step:4672 train loss:3.451639 +step:4673 train loss:3.489637 +step:4674 train loss:3.616401 +step:4675 train loss:3.523920 +step:4676 train loss:3.483949 +step:4677 train loss:3.488431 +step:4678 train loss:3.461392 +step:4679 train loss:3.558842 +step:4680 train loss:3.501264 +step:4681 train loss:3.550151 +step:4682 train loss:3.497772 +step:4683 train loss:3.469125 +step:4684 train loss:3.580712 +step:4685 train loss:3.523473 +step:4686 train loss:3.532076 +step:4687 train loss:3.574302 +step:4688 train loss:3.501805 +step:4689 train loss:3.575747 +step:4690 train loss:3.516394 +step:4691 train loss:3.549854 +step:4692 train loss:3.482350 +step:4693 train loss:3.518847 +step:4694 train loss:3.558221 +step:4695 train loss:3.582242 +step:4696 train loss:3.564240 +step:4697 train loss:3.482101 +step:4698 train loss:3.498141 +step:4699 train loss:3.545805 +step:4700 train loss:3.519756 +step:4701 train loss:3.527761 +step:4702 train loss:3.483012 +step:4703 train loss:3.563572 +step:4704 train loss:3.550514 +step:4705 train loss:3.492387 +step:4706 train loss:3.500387 +step:4707 train loss:3.488009 +step:4708 train loss:3.554018 +step:4709 train loss:3.501401 +step:4710 train loss:3.515441 +step:4711 train loss:3.578010 +step:4712 train loss:3.475126 +step:4713 train loss:3.582059 +step:4714 train loss:3.479095 +step:4715 train loss:3.569624 +step:4716 train loss:3.540164 +step:4717 train loss:3.466219 +step:4718 train loss:3.561466 +step:4719 train loss:3.485703 +step:4720 train loss:3.585578 +step:4721 train loss:3.537368 +step:4722 train loss:3.593836 +step:4723 train loss:3.489944 +step:4724 train loss:3.542535 +step:4725 train loss:3.477316 +step:4726 train loss:3.521530 +step:4727 train loss:3.530151 +step:4728 train loss:3.533368 +step:4729 train loss:3.565924 +step:4730 train loss:3.462381 +step:4731 train loss:3.523824 +step:4732 train loss:3.479269 +step:4733 train loss:3.411908 +step:4734 train loss:3.550453 +step:4735 train loss:3.502177 +step:4736 train loss:3.545569 +step:4737 train loss:3.425106 +step:4738 train loss:3.575929 +step:4739 train loss:3.452134 +step:4740 train loss:3.560332 +step:4741 train loss:3.528608 +step:4742 train loss:3.492903 +step:4743 train loss:3.490632 +step:4744 train loss:3.530551 +step:4745 train loss:3.551763 +step:4746 train loss:3.589737 +step:4747 train loss:3.553495 +step:4748 train loss:3.450348 +step:4749 train loss:3.521068 +step:4750 validation loss:3.454335 +step:4750 train loss:3.467847 +step:4751 train loss:3.559515 +step:4752 train loss:3.493179 +step:4753 train loss:3.597378 +step:4754 train loss:3.468336 +step:4755 train loss:3.509043 +step:4756 train loss:3.588688 +step:4757 train loss:3.507371 +step:4758 train loss:3.531069 +step:4759 train loss:3.525759 +step:4760 train loss:3.557424 +step:4761 train loss:3.474298 +step:4762 train loss:3.504373 +step:4763 train loss:3.530556 +step:4764 train loss:3.588815 +step:4765 train loss:3.488039 +step:4766 train loss:3.504882 +step:4767 train loss:3.457824 +step:4768 train loss:3.512356 +step:4769 train loss:3.542280 +step:4770 train loss:3.499752 +step:4771 train loss:3.514409 +step:4772 train loss:3.484367 +step:4773 train loss:3.522604 +step:4774 train loss:3.467078 +step:4775 train loss:3.595123 +step:4776 train loss:3.461471 +step:4777 train loss:3.532812 +step:4778 train loss:3.476988 +step:4779 train loss:3.521849 +step:4780 train loss:3.459628 +step:4781 train loss:3.465835 +step:4782 train loss:3.576401 +step:4783 train loss:3.562263 +step:4784 train loss:3.525450 +step:4785 train loss:3.520689 +step:4786 train loss:3.632312 +step:4787 train loss:3.464952 +step:4788 train loss:3.489372 +step:4789 train loss:3.510605 +step:4790 train loss:3.566195 +step:4791 train loss:3.528993 +step:4792 train loss:3.573171 +step:4793 train loss:3.489353 +step:4794 train loss:3.564380 +step:4795 train loss:3.511511 +step:4796 train loss:3.501494 +step:4797 train loss:3.508762 +step:4798 train loss:3.514824 +step:4799 train loss:3.515423 +step:4800 train loss:3.543528 +step:4801 train loss:3.539098 +step:4802 train loss:3.575512 +step:4803 train loss:3.559170 +step:4804 train loss:3.514405 +step:4805 train loss:3.511936 +step:4806 train loss:3.490472 +step:4807 train loss:3.596577 +step:4808 train loss:3.465724 +step:4809 train loss:3.570015 +step:4810 train loss:3.510718 +step:4811 train loss:3.528054 +step:4812 train loss:3.506450 +step:4813 train loss:3.461523 +step:4814 train loss:3.461530 +step:4815 train loss:3.451073 +step:4816 train loss:3.521620 +step:4817 train loss:3.451687 +step:4818 train loss:3.520930 +step:4819 train loss:3.512453 +step:4820 train loss:3.770807 +step:4821 train loss:3.541874 +step:4822 train loss:3.549412 +step:4823 train loss:3.483646 +step:4824 train loss:3.487344 +step:4825 train loss:3.471294 +step:4826 train loss:3.556930 +step:4827 train loss:3.505290 +step:4828 train loss:3.443883 +step:4829 train loss:3.552148 +step:4830 train loss:3.490874 +step:4831 train loss:3.640485 +step:4832 train loss:3.508283 +step:4833 train loss:3.545017 +step:4834 train loss:3.447982 +step:4835 train loss:3.539685 +step:4836 train loss:3.520230 +step:4837 train loss:3.548209 +step:4838 train loss:3.490140 +step:4839 train loss:3.553620 +step:4840 train loss:3.460584 +step:4841 train loss:3.554453 +step:4842 train loss:3.471699 +step:4843 train loss:3.547523 +step:4844 train loss:3.550006 +step:4845 train loss:3.487772 +step:4846 train loss:3.503895 +step:4847 train loss:3.485505 +step:4848 train loss:3.513952 +step:4849 train loss:3.464886 +step:4850 train loss:3.478652 +step:4851 train loss:3.466583 +step:4852 train loss:3.549563 +step:4853 train loss:3.524033 +step:4854 train loss:3.503510 +step:4855 train loss:3.565742 +step:4856 train loss:3.537654 +step:4857 train loss:3.541726 +step:4858 train loss:3.624892 +step:4859 train loss:3.469181 +step:4860 train loss:3.547066 +step:4861 train loss:3.518863 +step:4862 train loss:3.552917 +step:4863 train loss:3.489011 +step:4864 train loss:3.499354 +step:4865 train loss:3.493334 +step:4866 train loss:3.537595 +step:4867 train loss:3.508355 +step:4868 train loss:3.522925 +step:4869 train loss:3.477559 +step:4870 train loss:3.503439 +step:4871 train loss:3.590971 +step:4872 train loss:3.531110 +step:4873 train loss:3.534064 +step:4874 train loss:3.499766 +step:4875 train loss:3.469677 +step:4876 train loss:3.480173 +step:4877 train loss:3.484652 +step:4878 train loss:3.520626 +step:4879 train loss:3.486229 +step:4880 train loss:3.506376 +step:4881 train loss:3.457435 +step:4882 train loss:3.654061 +step:4883 train loss:3.469311 +step:4884 train loss:3.495876 +step:4885 train loss:3.472077 +step:4886 train loss:3.549292 +step:4887 train loss:3.501986 +step:4888 train loss:3.510454 +step:4889 train loss:3.510897 +step:4890 train loss:3.542572 +step:4891 train loss:3.482072 +step:4892 train loss:3.487846 +step:4893 train loss:3.535210 +step:4894 train loss:3.470510 +step:4895 train loss:3.502269 +step:4896 train loss:3.482747 +step:4897 train loss:3.559614 +step:4898 train loss:3.507179 +step:4899 train loss:3.491952 +step:4900 train loss:3.536643 +step:4901 train loss:3.490018 +step:4902 train loss:3.478793 +step:4903 train loss:3.502090 +step:4904 train loss:3.511209 +step:4905 train loss:3.512290 +step:4906 train loss:3.509936 +step:4907 train loss:3.583274 +step:4908 train loss:3.487953 +step:4909 train loss:3.498776 +step:4910 train loss:3.515071 +step:4911 train loss:3.571907 +step:4912 train loss:3.544147 +step:4913 train loss:3.526061 +step:4914 train loss:3.512606 +step:4915 train loss:3.497506 +step:4916 train loss:3.435930 +step:4917 train loss:3.461752 +step:4918 train loss:3.493147 +step:4919 train loss:3.487302 +step:4920 train loss:3.484626 +step:4921 train loss:3.650967 +step:4922 train loss:3.536469 +step:4923 train loss:3.555749 +step:4924 train loss:3.555881 +step:4925 train loss:3.489420 +step:4926 train loss:3.477584 +step:4927 train loss:3.516157 +step:4928 train loss:3.552280 +step:4929 train loss:3.505181 +step:4930 train loss:3.488684 +step:4931 train loss:3.480548 +step:4932 train loss:3.494365 +step:4933 train loss:3.486248 +step:4934 train loss:3.551175 +step:4935 train loss:3.538750 +step:4936 train loss:3.500706 +step:4937 train loss:3.614324 +step:4938 train loss:3.600672 +step:4939 train loss:3.464907 +step:4940 train loss:3.546089 +step:4941 train loss:3.443693 +step:4942 train loss:3.486407 +step:4943 train loss:3.484551 +step:4944 train loss:3.490154 +step:4945 train loss:3.532635 +step:4946 train loss:3.511385 +step:4947 train loss:3.493843 +step:4948 train loss:3.533495 +step:4949 train loss:3.440000 +step:4950 train loss:3.520470 +step:4951 train loss:3.570575 +step:4952 train loss:3.511050 +step:4953 train loss:3.537918 +step:4954 train loss:3.448265 +step:4955 train loss:3.522885 +step:4956 train loss:3.552468 +step:4957 train loss:3.543056 +step:4958 train loss:3.459599 +step:4959 train loss:3.574494 +step:4960 train loss:3.506666 +step:4961 train loss:3.521966 +step:4962 train loss:3.484906 +step:4963 train loss:3.532706 +step:4964 train loss:3.483752 +step:4965 train loss:3.635005 +step:4966 train loss:3.485463 +step:4967 train loss:3.590323 +step:4968 train loss:3.480994 +step:4969 train loss:3.523272 +step:4970 train loss:3.512825 +step:4971 train loss:3.468835 +step:4972 train loss:3.508966 +step:4973 train loss:3.518091 +step:4974 train loss:3.506484 +step:4975 train loss:3.594846 +step:4976 train loss:3.566918 +step:4977 train loss:3.517780 +step:4978 train loss:3.502870 +step:4979 train loss:3.502872 +step:4980 train loss:3.610287 +step:4981 train loss:3.448462 +step:4982 train loss:3.530309 +step:4983 train loss:3.453721 +step:4984 train loss:3.638499 +step:4985 train loss:3.535805 +step:4986 train loss:3.479516 +step:4987 train loss:3.498514 +step:4988 train loss:3.701300 +step:4989 train loss:3.502890 +step:4990 train loss:3.492234 +step:4991 train loss:3.508971 +step:4992 train loss:3.495236 +step:4993 train loss:3.473642 +step:4994 train loss:3.586209 +step:4995 train loss:3.509127 +step:4996 train loss:3.590351 +step:4997 train loss:3.495769 +step:4998 train loss:3.498976 +step:4999 train loss:3.485507 +step:5000 validation loss:3.444327 total_sharp:6.8067e-05 L1_sharp:3.9268e-05 L2_sharp:1.4917e-05 L3_sharp:1.3437e-05 L4_sharp:1.0108e-05 L5_sharp:9.8822e-06 L6_sharp:1.1005e-05 L7_sharp:1.3374e-05 L8_sharp:1.2256e-05 L9_sharp:1.1519e-05 L10_sharp:7.5950e-06 L11_sharp:7.4704e-06 L12_sharp:1.6476e-05 total_fnorm:1.0363e+01 total_l1_linf:7.7860e+04 total_spectral:1.0363e+01 L1_fnorm:2.9348e+00 L2_fnorm:2.7478e+00 L3_fnorm:2.8489e+00 L4_fnorm:2.9539e+00 L5_fnorm:2.9923e+00 L6_fnorm:3.0029e+00 L7_fnorm:3.0201e+00 L8_fnorm:3.0200e+00 L9_fnorm:3.0161e+00 L10_fnorm:3.0243e+00 L11_fnorm:3.0138e+00 L12_fnorm:3.0142e+00 L1_l1linf:2.0059e+00 L2_l1linf:1.9736e+00 L3_l1linf:2.0316e+00 L4_l1linf:2.0816e+00 L5_l1linf:2.0556e+00 L6_l1linf:2.0614e+00 L7_l1linf:2.0366e+00 L8_l1linf:2.0311e+00 L9_l1linf:2.0004e+00 L10_l1linf:2.0172e+00 L11_l1linf:1.9911e+00 L12_l1linf:1.9931e+00 L1_spectral:6.0216e-02 L2_spectral:6.0214e-02 L3_spectral:6.0212e-02 L4_spectral:6.0260e-02 L5_spectral:6.0225e-02 L6_spectral:6.0234e-02 L7_spectral:6.0226e-02 L8_spectral:6.0215e-02 L9_spectral:6.0215e-02 L10_spectral:6.0212e-02 L11_spectral:6.0211e-02 L12_spectral:6.0219e-02 ip_v_neg_g:4.1406e-03 cos_v_neg_g:1.0469e-03 v_norm:1.0363e+01 g_norm:3.8167e-01 hv_norm:7.0998e-02 cos_v_hv:9.9352e-03 hg_norm:1.3577e+00 cos_g_hg:4.8530e-01 v_par:2.5219e-03 v_perp:1.0363e+01 L1_cos_v_neg_g:2.0762e-03 L1_v_norm:2.9348e+00 L2_cos_v_neg_g:9.6892e-04 L2_v_norm:2.7478e+00 L3_cos_v_neg_g:1.3693e-03 L3_v_norm:2.8489e+00 L4_cos_v_neg_g:3.3877e-03 L4_v_norm:2.9539e+00 L5_cos_v_neg_g:3.5065e-03 L5_v_norm:2.9923e+00 L6_cos_v_neg_g:2.2940e-03 L6_v_norm:3.0029e+00 L7_cos_v_neg_g:3.5052e-03 L7_v_norm:3.0201e+00 L8_cos_v_neg_g:4.4255e-03 L8_v_norm:3.0200e+00 L9_cos_v_neg_g:3.9442e-03 L9_v_norm:3.0161e+00 L10_cos_v_neg_g:4.2020e-03 L10_v_norm:3.0243e+00 L11_cos_v_neg_g:4.8872e-03 L11_v_norm:3.0138e+00 L12_cos_v_neg_g:4.7096e-03 L12_v_norm:3.0142e+00 +step:5000 train loss:3.597816 +step:5001 train loss:3.463439 +step:5002 train loss:3.520002 +step:5003 train loss:3.510319 +step:5004 train loss:3.505483 +step:5005 train loss:3.502825 +step:5006 train loss:3.545577 +step:5007 train loss:3.546831 +step:5008 train loss:3.487175 +step:5009 train loss:3.529362 +step:5010 train loss:3.482935 +step:5011 train loss:3.509768 +step:5012 train loss:3.483819 +step:5013 train loss:3.586943 +step:5014 train loss:3.499388 +step:5015 train loss:3.575495 +step:5016 train loss:3.503558 +step:5017 train loss:3.552090 +step:5018 train loss:3.469091 +step:5019 train loss:3.503665 +step:5020 train loss:3.497337 +step:5021 train loss:3.512381 +step:5022 train loss:3.543952 +step:5023 train loss:3.517654 +step:5024 train loss:3.565345 +step:5025 train loss:3.455611 +step:5026 train loss:3.577261 +step:5027 train loss:3.511151 +step:5028 train loss:3.575940 +step:5029 train loss:3.472121 +step:5030 train loss:3.511583 +step:5031 train loss:3.498924 +step:5032 train loss:3.526230 +step:5033 train loss:3.510446 +step:5034 train loss:3.506874 +step:5035 train loss:3.589636 +step:5036 train loss:3.542092 +step:5037 train loss:3.490448 +step:5038 train loss:3.542921 +step:5039 train loss:3.554811 +step:5040 train loss:3.517450 +step:5041 train loss:3.532483 +step:5042 train loss:3.438687 +step:5043 train loss:3.576937 +step:5044 train loss:3.497234 +step:5045 train loss:3.548243 +step:5046 train loss:3.464770 +step:5047 train loss:3.546320 +step:5048 train loss:3.459838 +step:5049 train loss:3.595177 +step:5050 train loss:3.481078 +step:5051 train loss:3.528980 +step:5052 train loss:3.424785 +step:5053 train loss:3.612696 +step:5054 train loss:3.493778 +step:5055 train loss:3.520928 +step:5056 train loss:3.554427 +step:5057 train loss:3.487081 +step:5058 train loss:3.514951 +step:5059 train loss:3.480236 +step:5060 train loss:3.526430 +step:5061 train loss:3.518467 +step:5062 train loss:3.491030 +step:5063 train loss:3.483649 +step:5064 train loss:3.494999 +step:5065 train loss:3.476609 +step:5066 train loss:3.541047 +step:5067 train loss:3.521269 +step:5068 train loss:3.504609 +step:5069 train loss:3.477544 +step:5070 train loss:3.505841 +step:5071 train loss:3.578251 +step:5072 train loss:3.467255 +step:5073 train loss:3.476093 +step:5074 train loss:3.423231 +step:5075 train loss:3.493810 +step:5076 train loss:3.423932 +step:5077 train loss:3.488851 +step:5078 train loss:3.492519 +step:5079 train loss:3.524975 +step:5080 train loss:3.505256 +step:5081 train loss:3.513278 +step:5082 train loss:3.502495 +step:5083 train loss:3.559019 +step:5084 train loss:3.540737 +step:5085 train loss:3.501514 +step:5086 train loss:3.577051 +step:5087 train loss:3.563069 +step:5088 train loss:3.481932 +step:5089 train loss:3.548474 +step:5090 train loss:3.494902 +step:5091 train loss:3.496897 +step:5092 train loss:3.595650 +step:5093 train loss:3.476391 +step:5094 train loss:3.475643 +step:5095 train loss:3.525908 +step:5096 train loss:3.498082 +step:5097 train loss:3.502141 +step:5098 train loss:3.507365 +step:5099 train loss:3.466259 +step:5100 train loss:3.483730 +step:5101 train loss:3.674691 +step:5102 train loss:3.518159 +step:5103 train loss:3.527774 +step:5104 train loss:3.578815 +step:5105 train loss:3.511597 +step:5106 train loss:3.468788 +step:5107 train loss:3.489498 +step:5108 train loss:3.480881 +step:5109 train loss:3.564214 +step:5110 train loss:3.473531 +step:5111 train loss:3.569767 +step:5112 train loss:3.477072 +step:5113 train loss:3.463097 +step:5114 train loss:3.503370 +step:5115 train loss:3.469586 +step:5116 train loss:3.523910 +step:5117 train loss:3.468824 +step:5118 train loss:3.497877 +step:5119 train loss:3.476928 +step:5120 train loss:3.525576 +step:5121 train loss:3.467831 +step:5122 train loss:3.483537 +step:5123 train loss:3.466238 +step:5124 train loss:3.428612 +step:5125 train loss:3.537213 +step:5126 train loss:3.524712 +step:5127 train loss:3.528402 +step:5128 train loss:3.539096 +step:5129 train loss:3.468129 +step:5130 train loss:3.479077 +step:5131 train loss:3.421047 +step:5132 train loss:3.537211 +step:5133 train loss:3.506915 +step:5134 train loss:3.507529 +step:5135 train loss:3.461969 +step:5136 train loss:3.531282 +step:5137 train loss:3.525533 +step:5138 train loss:3.507312 +step:5139 train loss:3.539869 +step:5140 train loss:3.517103 +step:5141 train loss:3.544387 +step:5142 train loss:3.498847 +step:5143 train loss:3.520490 +step:5144 train loss:3.519691 +step:5145 train loss:3.460987 +step:5146 train loss:3.454976 +step:5147 train loss:3.532286 +step:5148 train loss:3.458258 +step:5149 train loss:3.533522 +step:5150 train loss:3.508310 +step:5151 train loss:3.476229 +step:5152 train loss:3.519365 +step:5153 train loss:3.493900 +step:5154 train loss:3.505893 +step:5155 train loss:3.512281 +step:5156 train loss:3.490774 +step:5157 train loss:3.492975 +step:5158 train loss:3.510012 +step:5159 train loss:3.552544 +step:5160 train loss:3.617445 +step:5161 train loss:3.544916 +step:5162 train loss:3.563378 +step:5163 train loss:3.477863 +step:5164 train loss:3.546902 +step:5165 train loss:3.554910 +step:5166 train loss:3.489979 +step:5167 train loss:3.592305 +step:5168 train loss:3.509253 +step:5169 train loss:3.538486 +step:5170 train loss:3.520663 +step:5171 train loss:3.562377 +step:5172 train loss:3.482939 +step:5173 train loss:3.545520 +step:5174 train loss:3.478445 +step:5175 train loss:3.513730 +step:5176 train loss:3.502745 +step:5177 train loss:3.504739 +step:5178 train loss:3.567168 +step:5179 train loss:3.477819 +step:5180 train loss:3.560015 +step:5181 train loss:3.502256 +step:5182 train loss:3.561337 +step:5183 train loss:3.490931 +step:5184 train loss:3.472537 +step:5185 train loss:3.494754 +step:5186 train loss:3.552370 +step:5187 train loss:3.546018 +step:5188 train loss:3.478697 +step:5189 train loss:3.525862 +step:5190 train loss:3.506187 +step:5191 train loss:3.489810 +step:5192 train loss:3.472605 +step:5193 train loss:3.559192 +step:5194 train loss:3.505734 +step:5195 train loss:3.479835 +step:5196 train loss:3.549850 +step:5197 train loss:3.596547 +step:5198 train loss:3.510489 +step:5199 train loss:3.492572 +step:5200 train loss:3.523644 +step:5201 train loss:3.505930 +step:5202 train loss:3.516258 +step:5203 train loss:3.515547 +step:5204 train loss:3.490339 +step:5205 train loss:3.531873 +step:5206 train loss:3.470425 +step:5207 train loss:3.475786 +step:5208 train loss:3.535228 +step:5209 train loss:3.552277 +step:5210 train loss:3.455908 +step:5211 train loss:3.502378 +step:5212 train loss:3.518412 +step:5213 train loss:3.493735 +step:5214 train loss:3.541480 +step:5215 train loss:3.655299 +step:5216 train loss:3.505497 +step:5217 train loss:3.484468 +step:5218 train loss:3.489369 +step:5219 train loss:3.551189 +step:5220 train loss:3.472013 +step:5221 train loss:3.473173 +step:5222 train loss:3.553312 +step:5223 train loss:3.544311 +step:5224 train loss:3.444250 +step:5225 train loss:3.591215 +step:5226 train loss:3.504240 +step:5227 train loss:3.579997 +step:5228 train loss:3.546922 +step:5229 train loss:3.489854 +step:5230 train loss:3.501286 +step:5231 train loss:3.449664 +step:5232 train loss:3.573693 +step:5233 train loss:3.532390 +step:5234 train loss:3.540247 +step:5235 train loss:3.483883 +step:5236 train loss:3.564614 +step:5237 train loss:3.613722 +step:5238 train loss:3.518108 +step:5239 train loss:3.576084 +step:5240 train loss:3.463334 +step:5241 train loss:3.522534 +step:5242 train loss:3.490607 +step:5243 train loss:3.496858 +step:5244 train loss:3.495149 +step:5245 train loss:3.539422 +step:5246 train loss:3.580686 +step:5247 train loss:3.508838 +step:5248 train loss:3.482721 +step:5249 train loss:3.540125 +step:5250 validation loss:3.433647 +step:5250 train loss:3.506425 +step:5251 train loss:3.572054 +step:5252 train loss:3.460154 +step:5253 train loss:3.613912 +step:5254 train loss:3.487392 +step:5255 train loss:3.560764 +step:5256 train loss:3.471797 +step:5257 train loss:3.530971 +step:5258 train loss:3.524025 +step:5259 train loss:3.510296 +step:5260 train loss:3.506843 +step:5261 train loss:3.496248 +step:5262 train loss:3.539689 +step:5263 train loss:3.523615 +step:5264 train loss:3.477700 +step:5265 train loss:3.553936 +step:5266 train loss:3.471747 +step:5267 train loss:3.482448 +step:5268 train loss:3.463669 +step:5269 train loss:3.470313 +step:5270 train loss:3.519114 +step:5271 train loss:3.442412 +step:5272 train loss:3.538225 +step:5273 train loss:3.444839 +step:5274 train loss:3.494938 +step:5275 train loss:3.506924 +step:5276 train loss:3.634551 +step:5277 train loss:3.533564 +step:5278 train loss:3.483266 +step:5279 train loss:3.530290 +step:5280 train loss:3.503540 +step:5281 train loss:3.499650 +step:5282 train loss:3.472676 +step:5283 train loss:3.469927 +step:5284 train loss:3.482473 +step:5285 train loss:3.545317 +step:5286 train loss:3.456868 +step:5287 train loss:3.556230 +step:5288 train loss:3.534064 +step:5289 train loss:3.502003 +step:5290 train loss:3.556674 +step:5291 train loss:3.507618 +step:5292 train loss:3.526017 +step:5293 train loss:3.496829 +step:5294 train loss:3.485428 +step:5295 train loss:3.490813 +step:5296 train loss:3.482782 +step:5297 train loss:3.501805 +step:5298 train loss:3.451401 +step:5299 train loss:3.539417 +step:5300 train loss:3.489425 +step:5301 train loss:3.560211 +step:5302 train loss:3.566637 +step:5303 train loss:3.426044 +step:5304 train loss:3.456231 +step:5305 train loss:3.440951 +step:5306 train loss:3.469467 +step:5307 train loss:3.475572 +step:5308 train loss:3.567608 +step:5309 train loss:3.517455 +step:5310 train loss:3.504330 +step:5311 train loss:3.569606 +step:5312 train loss:3.454219 +step:5313 train loss:3.543049 +step:5314 train loss:3.538184 +step:5315 train loss:3.496312 +step:5316 train loss:3.526293 +step:5317 train loss:3.544937 +step:5318 train loss:3.499938 +step:5319 train loss:3.528751 +step:5320 train loss:3.479132 +step:5321 train loss:3.602671 +step:5322 train loss:3.512368 +step:5323 train loss:3.514622 +step:5324 train loss:3.457805 +step:5325 train loss:3.543244 +step:5326 train loss:3.526963 +step:5327 train loss:3.420386 +step:5328 train loss:3.558791 +step:5329 train loss:3.520025 +step:5330 train loss:3.520895 +step:5331 train loss:3.569741 +step:5332 train loss:3.495136 +step:5333 train loss:3.557577 +step:5334 train loss:3.530064 +step:5335 train loss:3.595754 +step:5336 train loss:3.626292 +step:5337 train loss:3.461130 +step:5338 train loss:3.467021 +step:5339 train loss:3.492961 +step:5340 train loss:3.516459 +step:5341 train loss:3.527544 +step:5342 train loss:3.430731 +step:5343 train loss:3.591247 +step:5344 train loss:3.470924 +step:5345 train loss:3.475011 +step:5346 train loss:3.475672 +step:5347 train loss:3.499232 +step:5348 train loss:3.540944 +step:5349 train loss:3.478029 +step:5350 train loss:3.520886 +step:5351 train loss:3.592725 +step:5352 train loss:3.639538 +step:5353 train loss:3.543047 +step:5354 train loss:3.514848 +step:5355 train loss:3.478939 +step:5356 train loss:3.502491 +step:5357 train loss:3.485307 +step:5358 train loss:3.506419 +step:5359 train loss:3.519164 +step:5360 train loss:3.491548 +step:5361 train loss:3.494347 +step:5362 train loss:3.474761 +step:5363 train loss:3.473111 +step:5364 train loss:3.472632 +step:5365 train loss:3.510079 +step:5366 train loss:3.535527 +step:5367 train loss:3.464993 +step:5368 train loss:3.532632 +step:5369 train loss:3.550390 +step:5370 train loss:3.448461 +step:5371 train loss:3.498473 +step:5372 train loss:3.526063 +step:5373 train loss:3.559592 +step:5374 train loss:3.447536 +step:5375 train loss:3.490813 +step:5376 train loss:3.555862 +step:5377 train loss:3.496531 +step:5378 train loss:3.472066 +step:5379 train loss:3.470302 +step:5380 train loss:3.510644 +step:5381 train loss:3.548386 +step:5382 train loss:3.453635 +step:5383 train loss:3.513775 +step:5384 train loss:3.531919 +step:5385 train loss:3.526658 +step:5386 train loss:3.510529 +step:5387 train loss:3.518301 +step:5388 train loss:3.524922 +step:5389 train loss:3.460366 +step:5390 train loss:3.488887 +step:5391 train loss:3.428400 +step:5392 train loss:3.494795 +step:5393 train loss:3.488351 +step:5394 train loss:3.478814 +step:5395 train loss:3.549129 +step:5396 train loss:3.518450 +step:5397 train loss:3.537852 +step:5398 train loss:3.534795 +step:5399 train loss:3.565110 +step:5400 train loss:3.573599 +step:5401 train loss:3.531276 +step:5402 train loss:3.637244 +step:5403 train loss:3.543330 +step:5404 train loss:3.517497 +step:5405 train loss:3.590223 +step:5406 train loss:3.547604 +step:5407 train loss:3.480906 +step:5408 train loss:3.621893 +step:5409 train loss:3.461991 +step:5410 train loss:3.526269 +step:5411 train loss:3.509539 +step:5412 train loss:3.490349 +step:5413 train loss:3.538268 +step:5414 train loss:3.516276 +step:5415 train loss:3.492734 +step:5416 train loss:3.485179 +step:5417 train loss:3.557560 +step:5418 train loss:3.570101 +step:5419 train loss:3.475965 +step:5420 train loss:3.533386 +step:5421 train loss:3.506328 +step:5422 train loss:3.549214 +step:5423 train loss:3.527501 +step:5424 train loss:3.428645 +step:5425 train loss:3.497484 +step:5426 train loss:3.584861 +step:5427 train loss:3.479166 +step:5428 train loss:3.512906 +step:5429 train loss:3.464901 +step:5430 train loss:3.481672 +step:5431 train loss:3.541683 +step:5432 train loss:3.519879 +step:5433 train loss:3.525765 +step:5434 train loss:3.475824 +step:5435 train loss:3.473073 +step:5436 train loss:3.473360 +step:5437 train loss:3.514684 +step:5438 train loss:3.492875 +step:5439 train loss:3.500257 +step:5440 train loss:3.543303 +step:5441 train loss:3.563896 +step:5442 train loss:3.480157 +step:5443 train loss:3.482369 +step:5444 train loss:3.427161 +step:5445 train loss:3.516375 +step:5446 train loss:3.482741 +step:5447 train loss:3.521193 +step:5448 train loss:3.576634 +step:5449 train loss:3.468763 +step:5450 train loss:3.502511 +step:5451 train loss:3.493263 +step:5452 train loss:3.511037 +step:5453 train loss:3.566570 +step:5454 train loss:3.492815 +step:5455 train loss:3.479494 +step:5456 train loss:3.618645 +step:5457 train loss:3.502706 +step:5458 train loss:3.530612 +step:5459 train loss:3.477672 +step:5460 train loss:3.492344 +step:5461 train loss:3.496994 +step:5462 train loss:3.499057 +step:5463 train loss:3.509065 +step:5464 train loss:3.509431 +step:5465 train loss:3.455606 +step:5466 train loss:3.527797 +step:5467 train loss:3.510508 +step:5468 train loss:3.517625 +step:5469 train loss:3.614595 +step:5470 train loss:3.504841 +step:5471 train loss:3.579374 +step:5472 train loss:3.526554 +step:5473 train loss:3.429918 +step:5474 train loss:3.767942 +step:5475 train loss:3.440217 +step:5476 train loss:3.518492 +step:5477 train loss:3.515593 +step:5478 train loss:3.517638 +step:5479 train loss:3.657155 +step:5480 train loss:3.505313 +step:5481 train loss:3.562462 +step:5482 train loss:3.478734 +step:5483 train loss:3.518374 +step:5484 train loss:3.554458 +step:5485 train loss:3.473411 +step:5486 train loss:3.520451 +step:5487 train loss:3.520880 +step:5488 train loss:3.432648 +step:5489 train loss:3.534181 +step:5490 train loss:3.482279 +step:5491 train loss:3.585510 +step:5492 train loss:3.511142 +step:5493 train loss:3.443668 +step:5494 train loss:3.494424 +step:5495 train loss:3.471000 +step:5496 train loss:3.473488 +step:5497 train loss:3.589614 +step:5498 train loss:3.459815 +step:5499 train loss:3.598500 +step:5500 validation loss:3.430192 total_sharp:5.6510e-05 L1_sharp:2.1946e-05 L2_sharp:1.5583e-05 L3_sharp:1.4843e-05 L4_sharp:4.6072e-06 L5_sharp:5.2630e-06 L6_sharp:8.0968e-06 L7_sharp:9.3431e-06 L8_sharp:9.3410e-06 L9_sharp:8.0259e-06 L10_sharp:5.5139e-06 L11_sharp:5.1703e-06 L12_sharp:1.9065e-05 total_fnorm:1.0365e+01 total_l1_linf:7.7862e+04 total_spectral:1.0365e+01 L1_fnorm:2.9314e+00 L2_fnorm:2.7582e+00 L3_fnorm:2.8437e+00 L4_fnorm:2.9595e+00 L5_fnorm:2.9989e+00 L6_fnorm:3.0039e+00 L7_fnorm:3.0172e+00 L8_fnorm:3.0172e+00 L9_fnorm:3.0134e+00 L10_fnorm:3.0240e+00 L11_fnorm:3.0124e+00 L12_fnorm:3.0167e+00 L1_l1linf:1.9963e+00 L2_l1linf:1.9550e+00 L3_l1linf:2.0446e+00 L4_l1linf:2.0771e+00 L5_l1linf:2.0437e+00 L6_l1linf:2.0434e+00 L7_l1linf:2.0626e+00 L8_l1linf:2.0318e+00 L9_l1linf:2.0283e+00 L10_l1linf:2.0227e+00 L11_l1linf:1.9895e+00 L12_l1linf:1.9923e+00 L1_spectral:6.0204e-02 L2_spectral:6.0221e-02 L3_spectral:6.0207e-02 L4_spectral:6.0303e-02 L5_spectral:6.0232e-02 L6_spectral:6.0198e-02 L7_spectral:6.0252e-02 L8_spectral:6.0225e-02 L9_spectral:6.0224e-02 L10_spectral:6.0214e-02 L11_spectral:6.0224e-02 L12_spectral:6.0224e-02 ip_v_neg_g:2.3024e-03 cos_v_neg_g:3.2810e-04 v_norm:1.0365e+01 g_norm:6.7701e-01 hv_norm:7.5184e-02 cos_v_hv:7.7908e-03 hg_norm:3.2604e+00 cos_g_hg:6.4473e-01 v_par:6.5961e-04 v_perp:1.0365e+01 L1_cos_v_neg_g:2.4159e-03 L1_v_norm:2.9314e+00 L2_cos_v_neg_g:1.7966e-03 L2_v_norm:2.7582e+00 L3_cos_v_neg_g:1.8232e-03 L3_v_norm:2.8437e+00 L4_cos_v_neg_g:3.0316e-04 L4_v_norm:2.9595e+00 L5_cos_v_neg_g:7.0064e-04 L5_v_norm:2.9989e+00 L6_cos_v_neg_g:7.8833e-04 L6_v_norm:3.0039e+00 L7_cos_v_neg_g:-4.0433e-05 L7_v_norm:3.0172e+00 L8_cos_v_neg_g:8.1094e-04 L8_v_norm:3.0172e+00 L9_cos_v_neg_g:1.6983e-03 L9_v_norm:3.0134e+00 L10_cos_v_neg_g:2.4948e-03 L10_v_norm:3.0240e+00 L11_cos_v_neg_g:2.6159e-03 L11_v_norm:3.0124e+00 L12_cos_v_neg_g:7.4579e-04 L12_v_norm:3.0167e+00 +step:5500 train loss:3.511051 +step:5501 train loss:3.587795 +step:5502 train loss:3.531318 +step:5503 train loss:3.496736 +step:5504 train loss:3.544870 +step:5505 train loss:3.506038 +step:5506 train loss:3.549620 +step:5507 train loss:3.530709 +step:5508 train loss:3.559159 +step:5509 train loss:3.572779 +step:5510 train loss:3.536886 +step:5511 train loss:3.533420 +step:5512 train loss:3.659350 +step:5513 train loss:3.457591 +step:5514 train loss:3.519327 +step:5515 train loss:3.546209 +step:5516 train loss:3.570235 +step:5517 train loss:3.527229 +step:5518 train loss:3.553358 +step:5519 train loss:3.590766 +step:5520 train loss:3.493811 +step:5521 train loss:3.506356 +step:5522 train loss:3.479187 +step:5523 train loss:3.521597 +step:5524 train loss:3.563595 +step:5525 train loss:3.473816 +step:5526 train loss:3.485777 +step:5527 train loss:3.515036 +step:5528 train loss:3.611446 +step:5529 train loss:3.579385 +step:5530 train loss:3.543298 +step:5531 train loss:3.479857 +step:5532 train loss:3.505713 +step:5533 train loss:3.538432 +step:5534 train loss:3.456288 +step:5535 train loss:3.505780 +step:5536 train loss:3.443570 +step:5537 train loss:3.494242 +step:5538 train loss:3.484854 +step:5539 train loss:3.431000 +step:5540 train loss:3.654718 +step:5541 train loss:3.464983 +step:5542 train loss:3.513420 +step:5543 train loss:3.505671 +step:5544 train loss:3.490999 +step:5545 train loss:3.484649 +step:5546 train loss:3.518644 +step:5547 train loss:3.451072 +step:5548 train loss:3.497593 +step:5549 train loss:3.499409 +step:5550 train loss:3.524700 +step:5551 train loss:3.527521 +step:5552 train loss:3.483697 +step:5553 train loss:3.515285 +step:5554 train loss:3.484517 +step:5555 train loss:3.492408 +step:5556 train loss:3.512293 +step:5557 train loss:3.570793 +step:5558 train loss:3.493046 +step:5559 train loss:3.503189 +step:5560 train loss:3.494606 +step:5561 train loss:3.528900 +step:5562 train loss:3.482379 +step:5563 train loss:3.466067 +step:5564 train loss:3.501628 +step:5565 train loss:3.567172 +step:5566 train loss:3.468320 +step:5567 train loss:3.591090 +step:5568 train loss:3.705100 +step:5569 train loss:3.494067 +step:5570 train loss:3.428514 +step:5571 train loss:3.514824 +step:5572 train loss:3.454829 +step:5573 train loss:3.448033 +step:5574 train loss:3.411651 +step:5575 train loss:3.514344 +step:5576 train loss:3.494826 +step:5577 train loss:3.501157 +step:5578 train loss:3.532041 +step:5579 train loss:3.485986 +step:5580 train loss:3.508849 +step:5581 train loss:3.533171 +step:5582 train loss:3.509689 +step:5583 train loss:3.521634 +step:5584 train loss:3.640320 +step:5585 train loss:3.544443 +step:5586 train loss:3.479882 +step:5587 train loss:3.509452 +step:5588 train loss:3.527263 +step:5589 train loss:3.526823 +step:5590 train loss:3.586145 +step:5591 train loss:3.455255 +step:5592 train loss:3.638155 +step:5593 train loss:3.502847 +step:5594 train loss:3.511966 +step:5595 train loss:3.504553 +step:5596 train loss:3.453558 +step:5597 train loss:3.470176 +step:5598 train loss:3.479236 +step:5599 train loss:3.477833 +step:5600 train loss:3.525614 +step:5601 train loss:3.548135 +step:5602 train loss:3.484228 +step:5603 train loss:3.522716 +step:5604 train loss:3.520804 +step:5605 train loss:3.493155 +step:5606 train loss:3.498348 +step:5607 train loss:3.526070 +step:5608 train loss:3.469943 +step:5609 train loss:3.523273 +step:5610 train loss:3.477507 +step:5611 train loss:3.518181 +step:5612 train loss:3.549357 +step:5613 train loss:3.508394 +step:5614 train loss:3.474846 +step:5615 train loss:3.574330 +step:5616 train loss:3.474011 +step:5617 train loss:3.561691 +step:5618 train loss:3.547936 +step:5619 train loss:3.500200 +step:5620 train loss:3.504620 +step:5621 train loss:3.577309 +step:5622 train loss:3.460263 +step:5623 train loss:3.498245 +step:5624 train loss:3.484117 +step:5625 train loss:3.518913 +step:5626 train loss:3.515826 +step:5627 train loss:3.486360 +step:5628 train loss:3.528819 +step:5629 train loss:3.508158 +step:5630 train loss:3.437668 +step:5631 train loss:3.482688 +step:5632 train loss:3.521580 +step:5633 train loss:3.514627 +step:5634 train loss:3.470081 +step:5635 train loss:3.506634 +step:5636 train loss:3.484826 +step:5637 train loss:3.621699 +step:5638 train loss:3.531942 +step:5639 train loss:3.511714 +step:5640 train loss:3.513725 +step:5641 train loss:3.555515 +step:5642 train loss:3.489470 +step:5643 train loss:3.505612 +step:5644 train loss:3.585011 +step:5645 train loss:3.544143 +step:5646 train loss:3.541463 +step:5647 train loss:3.531077 +step:5648 train loss:3.520344 +step:5649 train loss:3.432659 +step:5650 train loss:3.440523 +step:5651 train loss:3.514507 +step:5652 train loss:3.514004 +step:5653 train loss:3.482743 +step:5654 train loss:3.610685 +step:5655 train loss:3.472973 +step:5656 train loss:3.497200 +step:5657 train loss:3.564130 +step:5658 train loss:3.469810 +step:5659 train loss:3.505429 +step:5660 train loss:3.554447 +step:5661 train loss:3.494816 +step:5662 train loss:3.534327 +step:5663 train loss:3.424923 +step:5664 train loss:3.395802 +step:5665 train loss:3.518720 +step:5666 train loss:3.523332 +step:5667 train loss:3.556648 +step:5668 train loss:3.485931 +step:5669 train loss:3.499679 +step:5670 train loss:3.501124 +step:5671 train loss:3.485300 +step:5672 train loss:3.534040 +step:5673 train loss:3.503517 +step:5674 train loss:3.572015 +step:5675 train loss:3.486632 +step:5676 train loss:3.635845 +step:5677 train loss:3.529691 +step:5678 train loss:3.509656 +step:5679 train loss:3.498690 +step:5680 train loss:3.528484 +step:5681 train loss:3.499909 +step:5682 train loss:3.513315 +step:5683 train loss:3.470148 +step:5684 train loss:3.482091 +step:5685 train loss:3.526960 +step:5686 train loss:3.542827 +step:5687 train loss:3.504901 +step:5688 train loss:3.576976 +step:5689 train loss:3.484927 +step:5690 train loss:3.633841 +step:5691 train loss:3.465874 +step:5692 train loss:3.458631 +step:5693 train loss:3.460116 +step:5694 train loss:3.480237 +step:5695 train loss:3.497859 +step:5696 train loss:3.545799 +step:5697 train loss:3.473138 +step:5698 train loss:3.490481 +step:5699 train loss:3.505681 +step:5700 train loss:3.501935 +step:5701 train loss:3.498553 +step:5702 train loss:3.564245 +step:5703 train loss:3.465152 +step:5704 train loss:3.508084 +step:5705 train loss:3.514955 +step:5706 train loss:3.539783 +step:5707 train loss:3.458142 +step:5708 train loss:3.544051 +step:5709 train loss:3.544881 +step:5710 train loss:3.537446 +step:5711 train loss:3.557296 +step:5712 train loss:3.540945 +step:5713 train loss:3.466553 +step:5714 train loss:3.549108 +step:5715 train loss:3.505549 +step:5716 train loss:3.514222 +step:5717 train loss:3.537248 +step:5718 train loss:3.482152 +step:5719 train loss:3.555400 +step:5720 train loss:3.525951 +step:5721 train loss:3.454812 +step:5722 train loss:3.469271 +step:5723 train loss:3.548635 +step:5724 train loss:3.466478 +step:5725 train loss:3.537448 +step:5726 train loss:3.530833 +step:5727 train loss:3.490715 +step:5728 train loss:3.494461 +step:5729 train loss:3.492921 +step:5730 train loss:3.569368 +step:5731 train loss:3.433168 +step:5732 train loss:3.492476 +step:5733 train loss:3.484633 +step:5734 train loss:3.501739 +step:5735 train loss:3.491623 +step:5736 train loss:3.494152 +step:5737 train loss:3.516792 +step:5738 train loss:3.481663 +step:5739 train loss:3.492118 +step:5740 train loss:3.531502 +step:5741 train loss:3.507465 +step:5742 train loss:3.557453 +step:5743 train loss:3.526841 +step:5744 train loss:3.486221 +step:5745 train loss:3.483892 +step:5746 train loss:3.515816 +step:5747 train loss:3.505870 +step:5748 train loss:3.548271 +step:5749 train loss:3.504283 +step:5750 validation loss:3.425339 +step:5750 train loss:3.512266 +step:5751 train loss:3.525836 +step:5752 train loss:3.512047 +step:5753 train loss:3.485085 +step:5754 train loss:3.491980 +step:5755 train loss:3.507128 +step:5756 train loss:3.497941 +step:5757 train loss:3.556947 +step:5758 train loss:3.493093 +step:5759 train loss:3.455662 +step:5760 train loss:3.536686 +step:5761 train loss:3.531279 +step:5762 train loss:3.490010 +step:5763 train loss:3.519720 +step:5764 train loss:3.481593 +step:5765 train loss:3.599118 +step:5766 train loss:3.508370 +step:5767 train loss:3.542680 +step:5768 train loss:3.479068 +step:5769 train loss:3.602180 +step:5770 train loss:3.525289 +step:5771 train loss:3.552725 +step:5772 train loss:3.499064 +step:5773 train loss:3.484808 +step:5774 train loss:3.488074 +step:5775 train loss:3.560359 +step:5776 train loss:3.544042 +step:5777 train loss:3.463099 +step:5778 train loss:3.556539 +step:5779 train loss:3.504875 +step:5780 train loss:3.480782 +step:5781 train loss:3.547068 +step:5782 train loss:3.504665 +step:5783 train loss:3.463266 +step:5784 train loss:3.567939 +step:5785 train loss:3.555071 +step:5786 train loss:3.467458 +step:5787 train loss:3.518103 +step:5788 train loss:3.524817 +step:5789 train loss:3.466798 +step:5790 train loss:3.569860 +step:5791 train loss:3.496716 +step:5792 train loss:3.771273 +step:5793 train loss:3.541827 +step:5794 train loss:3.556946 +step:5795 train loss:3.553547 +step:5796 train loss:3.533118 +step:5797 train loss:3.513680 +step:5798 train loss:3.513908 +step:5799 train loss:3.485050 +step:5800 train loss:3.643727 +step:5801 train loss:3.516690 +step:5802 train loss:3.505793 +step:5803 train loss:3.516840 +step:5804 train loss:3.533659 +step:5805 train loss:3.502079 +step:5806 train loss:3.536890 +step:5807 train loss:3.460621 +step:5808 train loss:3.490297 +step:5809 train loss:3.504979 +step:5810 train loss:3.476315 +step:5811 train loss:3.494363 +step:5812 train loss:3.471192 +step:5813 train loss:3.483679 +step:5814 train loss:3.478906 +step:5815 train loss:3.482512 +step:5816 train loss:3.544248 +step:5817 train loss:3.554649 +step:5818 train loss:3.525007 +step:5819 train loss:3.578535 +step:5820 train loss:3.517090 +step:5821 train loss:3.511316 +step:5822 train loss:3.527072 +step:5823 train loss:3.533004 +step:5824 train loss:3.481228 +step:5825 train loss:3.575093 +step:5826 train loss:3.487613 +step:5827 train loss:3.455304 +step:5828 train loss:3.441399 +step:5829 train loss:3.504694 +step:5830 train loss:3.479695 +step:5831 train loss:3.451160 +step:5832 train loss:3.565653 +step:5833 train loss:3.540537 +step:5834 train loss:3.526330 +step:5835 train loss:3.477282 +step:5836 train loss:3.442720 +step:5837 train loss:3.562770 +step:5838 train loss:3.542178 +step:5839 train loss:3.519023 +step:5840 train loss:3.602949 +step:5841 train loss:3.522318 +step:5842 train loss:3.538121 +step:5843 train loss:3.478237 +step:5844 train loss:3.548484 +step:5845 train loss:3.458079 +step:5846 train loss:3.507169 +step:5847 train loss:3.532638 +step:5848 train loss:3.601041 +step:5849 train loss:3.496668 +step:5850 train loss:3.526774 +step:5851 train loss:3.487613 +step:5852 train loss:3.578213 +step:5853 train loss:3.673213 +step:5854 train loss:3.460186 +step:5855 train loss:3.520121 +step:5856 train loss:3.493474 +step:5857 train loss:3.503478 +step:5858 train loss:3.476683 +step:5859 train loss:3.484660 +step:5860 train loss:3.586010 +step:5861 train loss:3.468205 +step:5862 train loss:3.582633 +step:5863 train loss:3.521653 +step:5864 train loss:3.509576 +step:5865 train loss:3.511459 +step:5866 train loss:3.502793 +step:5867 train loss:3.589246 +step:5868 train loss:3.506342 +step:5869 train loss:3.534355 +step:5870 train loss:3.512585 +step:5871 train loss:3.489953 +step:5872 train loss:3.521106 +step:5873 train loss:3.500244 +step:5874 train loss:3.581175 +step:5875 train loss:3.508507 +step:5876 train loss:3.490454 +step:5877 train loss:3.498472 +step:5878 train loss:3.496470 +step:5879 train loss:3.468575 +step:5880 train loss:3.667074 +step:5881 train loss:3.504446 +step:5882 train loss:3.478903 +step:5883 train loss:3.479403 +step:5884 train loss:3.495703 +step:5885 train loss:3.494326 +step:5886 train loss:3.514015 +step:5887 train loss:3.513193 +step:5888 train loss:3.494932 +step:5889 train loss:3.473103 +step:5890 train loss:3.518442 +step:5891 train loss:3.464907 +step:5892 train loss:3.546852 +step:5893 train loss:3.468360 +step:5894 train loss:3.459929 +step:5895 train loss:3.468112 +step:5896 train loss:3.473747 +step:5897 train loss:3.544492 +step:5898 train loss:3.764163 +step:5899 train loss:3.489560 +step:5900 train loss:3.543522 +step:5901 train loss:3.496650 +step:5902 train loss:3.503282 +step:5903 train loss:3.494119 +step:5904 train loss:3.526337 +step:5905 train loss:3.630970 +step:5906 train loss:3.573284 +step:5907 train loss:3.514721 +step:5908 train loss:3.490855 +step:5909 train loss:3.487331 +step:5910 train loss:3.471219 +step:5911 train loss:3.490242 +step:5912 train loss:3.505460 +step:5913 train loss:3.526863 +step:5914 train loss:3.507450 +step:5915 train loss:3.630188 +step:5916 train loss:3.510916 +step:5917 train loss:3.480648 +step:5918 train loss:3.481022 +step:5919 train loss:3.506322 +step:5920 train loss:3.503792 +step:5921 train loss:3.477150 +step:5922 train loss:3.532358 +step:5923 train loss:3.528685 +step:5924 train loss:3.484496 +step:5925 train loss:3.608458 +step:5926 train loss:3.491865 +step:5927 train loss:3.472246 +step:5928 train loss:3.507064 +step:5929 train loss:3.527020 +step:5930 train loss:3.477090 +step:5931 train loss:3.459194 +step:5932 train loss:3.503596 +step:5933 train loss:3.558579 +step:5934 train loss:3.464314 +step:5935 train loss:3.496149 +step:5936 train loss:3.482503 +step:5937 train loss:3.463010 +step:5938 train loss:3.482197 +step:5939 train loss:3.459948 +step:5940 train loss:3.542030 +step:5941 train loss:3.478634 +step:5942 train loss:3.493932 +step:5943 train loss:3.497926 +step:5944 train loss:3.554203 +step:5945 train loss:3.487279 +step:5946 train loss:3.464004 +step:5947 train loss:3.474908 +step:5948 train loss:3.516096 +step:5949 train loss:3.559603 +step:5950 train loss:3.519022 +step:5951 train loss:3.519728 +step:5952 train loss:3.442998 +step:5953 train loss:3.482316 +step:5954 train loss:3.497411 +step:5955 train loss:3.503983 +step:5956 train loss:3.479974 +step:5957 train loss:3.448398 +step:5958 train loss:3.522697 +step:5959 train loss:3.479674 +step:5960 train loss:3.457407 +step:5961 train loss:3.481001 +step:5962 train loss:3.511791 +step:5963 train loss:3.545480 +step:5964 train loss:3.502769 +step:5965 train loss:3.520512 +step:5966 train loss:3.518254 +step:5967 train loss:3.482159 +step:5968 train loss:3.555996 +step:5969 train loss:3.494399 +step:5970 train loss:3.515486 +step:5971 train loss:3.463102 +step:5972 train loss:3.492509 +step:5973 train loss:3.479045 +step:5974 train loss:3.507799 +step:5975 train loss:3.474767 +step:5976 train loss:3.523638 +step:5977 train loss:3.474570 +step:5978 train loss:3.461284 +step:5979 train loss:3.495877 +step:5980 train loss:3.570421 +step:5981 train loss:3.458797 +step:5982 train loss:3.472905 +step:5983 train loss:3.536744 +step:5984 train loss:3.480568 +step:5985 train loss:3.526862 +step:5986 train loss:3.498542 +step:5987 train loss:3.484197 +step:5988 train loss:3.491355 +step:5989 train loss:3.514330 +step:5990 train loss:3.444494 +step:5991 train loss:3.504869 +step:5992 train loss:3.540045 +step:5993 train loss:3.490053 +step:5994 train loss:3.512526 +step:5995 train loss:3.400281 +step:5996 train loss:3.569281 +step:5997 train loss:3.550529 +step:5998 train loss:3.424092 +step:5999 train loss:3.454696 +step:6000 validation loss:3.417757 total_sharp:6.9350e-05 L1_sharp:3.1989e-05 L2_sharp:1.5930e-05 L3_sharp:9.8462e-06 L4_sharp:7.9681e-06 L5_sharp:7.2851e-06 L6_sharp:1.0863e-05 L7_sharp:1.3944e-05 L8_sharp:1.2334e-05 L9_sharp:1.2451e-05 L10_sharp:7.5575e-06 L11_sharp:6.9588e-06 L12_sharp:7.6741e-06 total_fnorm:1.0365e+01 total_l1_linf:7.7859e+04 total_spectral:1.0365e+01 L1_fnorm:2.9321e+00 L2_fnorm:2.7500e+00 L3_fnorm:2.8570e+00 L4_fnorm:2.9508e+00 L5_fnorm:2.9942e+00 L6_fnorm:3.0049e+00 L7_fnorm:3.0191e+00 L8_fnorm:3.0195e+00 L9_fnorm:3.0143e+00 L10_fnorm:3.0246e+00 L11_fnorm:3.0141e+00 L12_fnorm:3.0163e+00 L1_l1linf:1.9947e+00 L2_l1linf:1.9609e+00 L3_l1linf:2.0402e+00 L4_l1linf:2.0784e+00 L5_l1linf:2.0373e+00 L6_l1linf:2.0435e+00 L7_l1linf:2.0612e+00 L8_l1linf:2.0453e+00 L9_l1linf:2.0088e+00 L10_l1linf:2.0172e+00 L11_l1linf:1.9970e+00 L12_l1linf:1.9982e+00 L1_spectral:6.0214e-02 L2_spectral:6.0214e-02 L3_spectral:6.0221e-02 L4_spectral:6.0279e-02 L5_spectral:6.0226e-02 L6_spectral:6.0221e-02 L7_spectral:6.0221e-02 L8_spectral:6.0218e-02 L9_spectral:6.0227e-02 L10_spectral:6.0212e-02 L11_spectral:6.0208e-02 L12_spectral:6.0217e-02 ip_v_neg_g:3.4052e-03 cos_v_neg_g:1.0108e-03 v_norm:1.0365e+01 g_norm:3.2500e-01 hv_norm:6.9169e-02 cos_v_hv:1.0393e-02 hg_norm:4.8937e-01 cos_g_hg:3.6052e-01 v_par:8.8238e-04 v_perp:1.0365e+01 L1_cos_v_neg_g:4.3887e-03 L1_v_norm:2.9321e+00 L2_cos_v_neg_g:1.4062e-03 L2_v_norm:2.7500e+00 L3_cos_v_neg_g:2.4376e-03 L3_v_norm:2.8570e+00 L4_cos_v_neg_g:2.8352e-03 L4_v_norm:2.9508e+00 L5_cos_v_neg_g:3.1996e-03 L5_v_norm:2.9942e+00 L6_cos_v_neg_g:3.4518e-03 L6_v_norm:3.0049e+00 L7_cos_v_neg_g:5.0300e-03 L7_v_norm:3.0191e+00 L8_cos_v_neg_g:4.1497e-03 L8_v_norm:3.0195e+00 L9_cos_v_neg_g:3.9976e-03 L9_v_norm:3.0143e+00 L10_cos_v_neg_g:2.5565e-03 L10_v_norm:3.0246e+00 L11_cos_v_neg_g:2.3943e-03 L11_v_norm:3.0141e+00 L12_cos_v_neg_g:6.8398e-04 L12_v_norm:3.0163e+00 +step:6000 train loss:3.504063 +step:6001 train loss:3.468220 +step:6002 train loss:3.496801 +step:6003 train loss:3.515063 +step:6004 train loss:3.468033 +step:6005 train loss:3.540139 +step:6006 train loss:3.446676 +step:6007 train loss:3.471660 +step:6008 train loss:3.479406 +step:6009 train loss:3.517873 +step:6010 train loss:3.506545 +step:6011 train loss:3.497928 +step:6012 train loss:3.465384 +step:6013 train loss:3.523361 +step:6014 train loss:3.542335 +step:6015 train loss:3.542959 +step:6016 train loss:3.508394 +step:6017 train loss:3.518138 +step:6018 train loss:3.457450 +step:6019 train loss:3.495809 +step:6020 train loss:3.481187 +step:6021 train loss:3.410329 +step:6022 train loss:3.523492 +step:6023 train loss:3.456575 +step:6024 train loss:3.535062 +step:6025 train loss:3.497913 +step:6026 train loss:3.472363 +step:6027 train loss:3.511812 +step:6028 train loss:3.428913 +step:6029 train loss:3.542891 +step:6030 train loss:3.511727 +step:6031 train loss:3.484888 +step:6032 train loss:3.446920 +step:6033 train loss:3.499440 +step:6034 train loss:3.527879 +step:6035 train loss:3.441986 +step:6036 train loss:3.418408 +step:6037 train loss:3.533368 +step:6038 train loss:3.536765 +step:6039 train loss:3.521727 +step:6040 train loss:3.478308 +step:6041 train loss:3.458611 +step:6042 train loss:3.439418 +step:6043 train loss:3.498144 +step:6044 train loss:3.617590 +step:6045 train loss:3.463035 +step:6046 train loss:3.471884 +step:6047 train loss:3.505852 +step:6048 train loss:3.518576 +step:6049 train loss:3.494974 +step:6050 train loss:3.462593 +step:6051 train loss:3.513324 +step:6052 train loss:3.487111 +step:6053 train loss:3.604978 +step:6054 train loss:3.644974 +step:6055 train loss:3.457883 +step:6056 train loss:3.450605 +step:6057 train loss:3.484073 +step:6058 train loss:3.513038 +step:6059 train loss:3.514392 +step:6060 train loss:3.522039 +step:6061 train loss:3.534856 +step:6062 train loss:3.491635 +step:6063 train loss:3.502793 +step:6064 train loss:3.498201 +step:6065 train loss:3.500534 +step:6066 train loss:3.486575 +step:6067 train loss:3.525334 +step:6068 train loss:3.469751 +step:6069 train loss:3.426635 +step:6070 train loss:3.580125 +step:6071 train loss:3.515427 +step:6072 train loss:3.457098 +step:6073 train loss:3.500133 +step:6074 train loss:3.580880 +step:6075 train loss:3.504544 +step:6076 train loss:3.512446 +step:6077 train loss:3.510805 +step:6078 train loss:3.447959 +step:6079 train loss:3.479311 +step:6080 train loss:3.484188 +step:6081 train loss:3.521960 +step:6082 train loss:3.469990 +step:6083 train loss:3.484833 +step:6084 train loss:3.552005 +step:6085 train loss:3.543447 +step:6086 train loss:3.445861 +step:6087 train loss:3.490578 +step:6088 train loss:3.476121 +step:6089 train loss:3.532443 +step:6090 train loss:3.536564 +step:6091 train loss:3.486776 +step:6092 train loss:3.445922 +step:6093 train loss:3.508309 +step:6094 train loss:3.421847 +step:6095 train loss:3.590391 +step:6096 train loss:3.460546 +step:6097 train loss:3.534124 +step:6098 train loss:3.506729 +step:6099 train loss:3.570137 +step:6100 train loss:3.561124 +step:6101 train loss:3.496233 +step:6102 train loss:3.615009 +step:6103 train loss:3.496090 +step:6104 train loss:3.606906 +step:6105 train loss:3.543892 +step:6106 train loss:3.482224 +step:6107 train loss:3.545031 +step:6108 train loss:3.508024 +step:6109 train loss:3.578986 +step:6110 train loss:3.510480 +step:6111 train loss:3.547033 +step:6112 train loss:3.485361 +step:6113 train loss:3.511499 +step:6114 train loss:3.482923 +step:6115 train loss:3.540648 +step:6116 train loss:3.485693 +step:6117 train loss:3.538914 +step:6118 train loss:3.524015 +step:6119 train loss:3.530613 +step:6120 train loss:3.676412 +step:6121 train loss:3.512404 +step:6122 train loss:3.520596 +step:6123 train loss:3.498151 +step:6124 train loss:3.476937 +step:6125 train loss:3.468283 +step:6126 train loss:3.485768 +step:6127 train loss:3.472053 +step:6128 train loss:3.443612 +step:6129 train loss:3.668973 +step:6130 train loss:3.458359 +step:6131 train loss:3.437035 +step:6132 train loss:3.509282 +step:6133 train loss:3.472554 +step:6134 train loss:3.504644 +step:6135 train loss:3.584729 +step:6136 train loss:3.603975 +step:6137 train loss:3.466986 +step:6138 train loss:3.523431 +step:6139 train loss:3.505111 +step:6140 train loss:3.501191 +step:6141 train loss:3.463139 +step:6142 train loss:3.525148 +step:6143 train loss:3.493367 +step:6144 train loss:3.515506 +step:6145 train loss:3.759011 +step:6146 train loss:3.593584 +step:6147 train loss:3.683107 +step:6148 train loss:3.444348 +step:6149 train loss:3.575330 +step:6150 train loss:3.526478 +step:6151 train loss:3.479622 +step:6152 train loss:3.477908 +step:6153 train loss:3.546072 +step:6154 train loss:3.629706 +step:6155 train loss:3.498876 +step:6156 train loss:3.598634 +step:6157 train loss:3.523706 +step:6158 train loss:3.512085 +step:6159 train loss:3.480853 +step:6160 train loss:3.645201 +step:6161 train loss:3.497401 +step:6162 train loss:3.516160 +step:6163 train loss:3.547674 +step:6164 train loss:3.462859 +step:6165 train loss:3.525968 +step:6166 train loss:3.524928 +step:6167 train loss:3.538912 +step:6168 train loss:3.515769 +step:6169 train loss:3.507688 +step:6170 train loss:3.511841 +step:6171 train loss:3.482032 +step:6172 train loss:3.469970 +step:6173 train loss:3.520814 +step:6174 train loss:3.446569 +step:6175 train loss:3.459945 +step:6176 train loss:3.443178 +step:6177 train loss:3.534518 +step:6178 train loss:3.483709 +step:6179 train loss:3.494361 +step:6180 train loss:3.498505 +step:6181 train loss:3.534162 +step:6182 train loss:3.415709 +step:6183 train loss:3.425751 +step:6184 train loss:3.538575 +step:6185 train loss:3.496527 +step:6186 train loss:3.458456 +step:6187 train loss:3.501149 +step:6188 train loss:3.467850 +step:6189 train loss:3.506551 +step:6190 train loss:3.466920 +step:6191 train loss:3.498523 +step:6192 train loss:3.467515 +step:6193 train loss:3.532453 +step:6194 train loss:3.524226 +step:6195 train loss:3.507229 +step:6196 train loss:3.519528 +step:6197 train loss:3.541702 +step:6198 train loss:3.457007 +step:6199 train loss:3.478621 +step:6200 train loss:3.522838 +step:6201 train loss:3.563089 +step:6202 train loss:3.567732 +step:6203 train loss:3.565436 +step:6204 train loss:3.548506 +step:6205 train loss:3.484424 +step:6206 train loss:3.471759 +step:6207 train loss:3.531953 +step:6208 train loss:3.556710 +step:6209 train loss:3.525260 +step:6210 train loss:3.556585 +step:6211 train loss:3.474660 +step:6212 train loss:3.466375 +step:6213 train loss:3.480641 +step:6214 train loss:3.457122 +step:6215 train loss:3.636132 +step:6216 train loss:3.502980 +step:6217 train loss:3.559216 +step:6218 train loss:3.536201 +step:6219 train loss:3.550097 +step:6220 train loss:3.503715 +step:6221 train loss:3.470957 +step:6222 train loss:3.711855 +step:6223 train loss:3.469354 +step:6224 train loss:3.502793 +step:6225 train loss:3.482685 +step:6226 train loss:3.493686 +step:6227 train loss:3.498388 +step:6228 train loss:3.492904 +step:6229 train loss:3.529702 +step:6230 train loss:3.489483 +step:6231 train loss:3.597501 +step:6232 train loss:3.443918 +step:6233 train loss:3.482338 +step:6234 train loss:3.488572 +step:6235 train loss:3.520634 +step:6236 train loss:3.454053 +step:6237 train loss:3.479253 +step:6238 train loss:3.503814 +step:6239 train loss:3.489405 +step:6240 train loss:3.512700 +step:6241 train loss:3.492783 +step:6242 train loss:3.492223 +step:6243 train loss:3.529723 +step:6244 train loss:3.683633 +step:6245 train loss:3.479322 +step:6246 train loss:3.470098 +step:6247 train loss:3.461208 +step:6248 train loss:3.465117 +step:6249 train loss:3.406109 +step:6250 validation loss:3.414579 +step:6250 train loss:3.442339 +step:6251 train loss:3.462057 +step:6252 train loss:3.503546 +step:6253 train loss:3.514067 +step:6254 train loss:3.505119 +step:6255 train loss:3.469752 +step:6256 train loss:3.519553 +step:6257 train loss:3.519046 +step:6258 train loss:3.500798 +step:6259 train loss:3.506912 +step:6260 train loss:3.535684 +step:6261 train loss:3.556376 +step:6262 train loss:3.447775 +step:6263 train loss:3.481817 +step:6264 train loss:3.491945 +step:6265 train loss:3.482383 +step:6266 train loss:3.689039 +step:6267 train loss:3.486645 +step:6268 train loss:3.573137 +step:6269 train loss:3.447619 +step:6270 train loss:3.461535 +step:6271 train loss:3.509425 +step:6272 train loss:3.500227 +step:6273 train loss:3.699522 +step:6274 train loss:3.476639 +step:6275 train loss:3.512788 +step:6276 train loss:3.481378 +step:6277 train loss:3.466432 +step:6278 train loss:3.451914 +step:6279 train loss:3.507319 +step:6280 train loss:3.510847 +step:6281 train loss:3.445879 +step:6282 train loss:3.457761 +step:6283 train loss:3.544658 +step:6284 train loss:3.511573 +step:6285 train loss:3.512939 +step:6286 train loss:3.460196 +step:6287 train loss:3.489469 +step:6288 train loss:3.587506 +step:6289 train loss:3.449379 +step:6290 train loss:3.448352 +step:6291 train loss:3.480367 +step:6292 train loss:3.498118 +step:6293 train loss:3.485537 +step:6294 train loss:3.472034 +step:6295 train loss:3.493627 +step:6296 train loss:3.457968 +step:6297 train loss:3.585957 +step:6298 train loss:3.532040 +step:6299 train loss:3.423984 +step:6300 train loss:3.505236 +step:6301 train loss:3.532001 +step:6302 train loss:3.516100 +step:6303 train loss:3.483997 +step:6304 train loss:3.504292 +step:6305 train loss:3.472876 +step:6306 train loss:3.485871 +step:6307 train loss:3.493085 +step:6308 train loss:3.469478 +step:6309 train loss:3.467479 +step:6310 train loss:3.521466 +step:6311 train loss:3.473986 +step:6312 train loss:3.512888 +step:6313 train loss:3.443557 +step:6314 train loss:3.470605 +step:6315 train loss:3.524731 +step:6316 train loss:3.445479 +step:6317 train loss:3.437521 +step:6318 train loss:3.553671 +step:6319 train loss:3.483784 +step:6320 train loss:3.496842 +step:6321 train loss:3.484074 +step:6322 train loss:3.487169 +step:6323 train loss:3.414083 +step:6324 train loss:3.426944 +step:6325 train loss:3.523862 +step:6326 train loss:3.439661 +step:6327 train loss:3.520019 +step:6328 train loss:3.492539 +step:6329 train loss:3.415066 +step:6330 train loss:3.441931 +step:6331 train loss:3.461764 +step:6332 train loss:3.596524 +step:6333 train loss:3.472690 +step:6334 train loss:3.449863 +step:6335 train loss:3.419593 +step:6336 train loss:3.452927 +step:6337 train loss:3.480725 +step:6338 train loss:3.430285 +step:6339 train loss:3.476976 +step:6340 train loss:3.452725 +step:6341 train loss:3.473917 +step:6342 train loss:3.470056 +step:6343 train loss:3.567950 +step:6344 train loss:3.421100 +step:6345 train loss:3.434324 +step:6346 train loss:3.511982 +step:6347 train loss:3.389106 +step:6348 train loss:3.481691 +step:6349 train loss:3.460011 +step:6350 train loss:3.435669 +step:6351 train loss:3.433706 +step:6352 train loss:3.451644 +step:6353 train loss:3.466406 +step:6354 train loss:3.481727 +step:6355 train loss:3.493170 +step:6356 train loss:3.502880 +step:6357 train loss:3.360897 +step:6358 train loss:3.448969 +step:6359 train loss:3.504131 +step:6360 train loss:3.416943 +step:6361 train loss:3.415356 +step:6362 train loss:3.459673 +step:6363 train loss:3.439596 +step:6364 train loss:3.424147 +step:6365 train loss:3.496701 +step:6366 train loss:3.508648 +step:6367 train loss:3.441194 +step:6368 train loss:3.479554 +step:6369 train loss:3.446855 +step:6370 train loss:3.499356 +step:6371 train loss:3.414921 +step:6372 train loss:3.444042 +step:6373 train loss:3.473181 +step:6374 train loss:3.499456 +step:6375 train loss:3.459795 +step:6376 train loss:3.480343 +step:6377 train loss:3.485573 +step:6378 train loss:3.426537 +step:6379 train loss:3.471886 +step:6380 train loss:3.517485 +step:6381 train loss:3.477151 +step:6382 train loss:3.436086 +step:6383 train loss:3.493676 +step:6384 train loss:3.473491 +step:6385 train loss:3.448343 +step:6386 train loss:3.482176 +step:6387 train loss:3.463428 +step:6388 train loss:3.502091 +step:6389 train loss:3.512146 +step:6390 train loss:3.461332 +step:6391 train loss:3.450917 +step:6392 train loss:3.436106 +step:6393 train loss:3.488505 +step:6394 train loss:3.480141 +step:6395 train loss:3.654758 +step:6396 train loss:3.479214 +step:6397 train loss:3.420313 +step:6398 train loss:3.492628 +step:6399 train loss:3.432564 +step:6400 train loss:3.507156 +step:6401 train loss:3.547634 +step:6402 train loss:3.473853 +step:6403 train loss:3.470150 +step:6404 train loss:3.447438 +step:6405 train loss:3.472817 +step:6406 train loss:3.479698 +step:6407 train loss:3.539127 +step:6408 train loss:3.430609 +step:6409 train loss:3.413240 +step:6410 train loss:3.547129 +step:6411 train loss:3.473630 +step:6412 train loss:3.479940 +step:6413 train loss:3.483165 +step:6414 train loss:3.431245 +step:6415 train loss:3.497180 +step:6416 train loss:3.460724 +step:6417 train loss:3.434189 +step:6418 train loss:3.424155 +step:6419 train loss:3.507512 +step:6420 train loss:3.435565 +step:6421 train loss:3.462906 +step:6422 train loss:3.455373 +step:6423 train loss:3.459646 +step:6424 train loss:3.485160 +step:6425 train loss:3.479080 +step:6426 train loss:3.520918 +step:6427 train loss:3.486809 +step:6428 train loss:3.518248 +step:6429 train loss:3.486742 +step:6430 train loss:3.461157 +step:6431 train loss:3.438172 +step:6432 train loss:3.469595 +step:6433 train loss:3.480719 +step:6434 train loss:3.366135 +step:6435 train loss:3.545839 +step:6436 train loss:3.478533 +step:6437 train loss:3.439807 +step:6438 train loss:3.473608 +step:6439 train loss:3.439388 +step:6440 train loss:3.459425 +step:6441 train loss:3.451780 +step:6442 train loss:3.394369 +step:6443 train loss:3.453697 +step:6444 train loss:3.587903 +step:6445 train loss:3.493053 +step:6446 train loss:3.496429 +step:6447 train loss:3.479183 +step:6448 train loss:3.426558 +step:6449 train loss:3.449645 +step:6450 train loss:3.435378 +step:6451 train loss:3.421061 +step:6452 train loss:3.426909 +step:6453 train loss:3.466603 +step:6454 train loss:3.493793 +step:6455 train loss:3.481086 +step:6456 train loss:3.501356 +step:6457 train loss:3.477149 +step:6458 train loss:3.450803 +step:6459 train loss:3.433144 +step:6460 train loss:3.438381 +step:6461 train loss:3.441532 +step:6462 train loss:3.434218 +step:6463 train loss:3.532077 +step:6464 train loss:3.440038 +step:6465 train loss:3.478776 +step:6466 train loss:3.496520 +step:6467 train loss:3.418639 +step:6468 train loss:3.499071 +step:6469 train loss:3.405700 +step:6470 train loss:3.530739 +step:6471 train loss:3.437558 +step:6472 train loss:3.595677 +step:6473 train loss:3.478032 +step:6474 train loss:3.511161 +step:6475 train loss:3.457320 +step:6476 train loss:3.524493 +step:6477 train loss:3.458072 +step:6478 train loss:3.586898 +step:6479 train loss:3.503590 +step:6480 train loss:3.438197 +step:6481 train loss:3.494255 +step:6482 train loss:3.434577 +step:6483 train loss:3.496879 +step:6484 train loss:3.454685 +step:6485 train loss:3.515017 +step:6486 train loss:3.447455 +step:6487 train loss:3.443382 +step:6488 train loss:3.441736 +step:6489 train loss:3.442846 +step:6490 train loss:3.469457 +step:6491 train loss:3.438382 +step:6492 train loss:3.540268 +step:6493 train loss:3.447465 +step:6494 train loss:3.448142 +step:6495 train loss:3.450405 +step:6496 train loss:3.478776 +step:6497 train loss:3.497293 +step:6498 train loss:3.604038 +step:6499 train loss:3.578751 +step:6500 validation loss:3.405749 total_sharp:5.2645e-05 L1_sharp:1.1237e-05 L2_sharp:8.2631e-06 L3_sharp:9.4027e-06 L4_sharp:4.9820e-06 L5_sharp:8.5306e-06 L6_sharp:9.3514e-06 L7_sharp:1.1568e-05 L8_sharp:9.7225e-06 L9_sharp:1.0627e-05 L10_sharp:5.8253e-06 L11_sharp:5.0456e-06 L12_sharp:5.6809e-06 total_fnorm:1.0372e+01 total_l1_linf:7.7962e+04 total_spectral:1.0372e+01 L1_fnorm:2.9354e+00 L2_fnorm:2.7767e+00 L3_fnorm:2.8529e+00 L4_fnorm:2.9579e+00 L5_fnorm:2.9896e+00 L6_fnorm:3.0036e+00 L7_fnorm:3.0196e+00 L8_fnorm:3.0180e+00 L9_fnorm:3.0138e+00 L10_fnorm:3.0250e+00 L11_fnorm:3.0111e+00 L12_fnorm:3.0142e+00 L1_l1linf:1.9884e+00 L2_l1linf:1.9608e+00 L3_l1linf:2.0518e+00 L4_l1linf:2.0978e+00 L5_l1linf:2.0531e+00 L6_l1linf:2.0541e+00 L7_l1linf:2.0547e+00 L8_l1linf:2.0238e+00 L9_l1linf:2.0132e+00 L10_l1linf:2.0282e+00 L11_l1linf:1.9943e+00 L12_l1linf:1.9646e+00 L1_spectral:6.0207e-02 L2_spectral:6.0212e-02 L3_spectral:6.0224e-02 L4_spectral:6.0282e-02 L5_spectral:6.0235e-02 L6_spectral:6.0221e-02 L7_spectral:6.0223e-02 L8_spectral:6.0211e-02 L9_spectral:6.0220e-02 L10_spectral:6.0197e-02 L11_spectral:6.0213e-02 L12_spectral:6.0225e-02 ip_v_neg_g:3.2884e-03 cos_v_neg_g:9.1988e-04 v_norm:1.0372e+01 g_norm:3.4464e-01 hv_norm:5.8788e-02 cos_v_hv:9.2887e-03 hg_norm:8.4890e-01 cos_g_hg:3.9046e-01 v_par:1.2149e-03 v_perp:1.0372e+01 L1_cos_v_neg_g:2.4532e-03 L1_v_norm:2.9354e+00 L2_cos_v_neg_g:1.4358e-03 L2_v_norm:2.7767e+00 L3_cos_v_neg_g:2.0918e-03 L3_v_norm:2.8529e+00 L4_cos_v_neg_g:2.6236e-03 L4_v_norm:2.9579e+00 L5_cos_v_neg_g:3.0208e-03 L5_v_norm:2.9896e+00 L6_cos_v_neg_g:3.5048e-03 L6_v_norm:3.0036e+00 L7_cos_v_neg_g:4.4711e-03 L7_v_norm:3.0196e+00 L8_cos_v_neg_g:3.9274e-03 L8_v_norm:3.0180e+00 L9_cos_v_neg_g:3.8513e-03 L9_v_norm:3.0138e+00 L10_cos_v_neg_g:2.4490e-03 L10_v_norm:3.0250e+00 L11_cos_v_neg_g:2.1379e-03 L11_v_norm:3.0111e+00 L12_cos_v_neg_g:2.4949e-03 L12_v_norm:3.0142e+00 +step:6500 train loss:3.421971 +step:6501 train loss:3.440390 +step:6502 train loss:3.461768 +step:6503 train loss:3.520000 +step:6504 train loss:3.469344 +step:6505 train loss:3.474018 +step:6506 train loss:3.436688 +step:6507 train loss:3.501891 +step:6508 train loss:3.472787 +step:6509 train loss:3.452800 +step:6510 train loss:3.461767 +step:6511 train loss:3.476847 +step:6512 train loss:3.417882 +step:6513 train loss:3.485548 +step:6514 train loss:3.363298 +step:6515 train loss:3.452793 +step:6516 train loss:3.502291 +step:6517 train loss:3.414034 +step:6518 train loss:3.457696 +step:6519 train loss:3.447529 +step:6520 train loss:3.535644 +step:6521 train loss:3.512325 +step:6522 train loss:3.521260 +step:6523 train loss:3.417192 +step:6524 train loss:3.500147 +step:6525 train loss:3.489382 +step:6526 train loss:3.419823 +step:6527 train loss:3.480771 +step:6528 train loss:3.499586 +step:6529 train loss:3.525074 +step:6530 train loss:3.428788 +step:6531 train loss:3.506957 +step:6532 train loss:3.436249 +step:6533 train loss:3.477334 +step:6534 train loss:3.482282 +step:6535 train loss:3.457530 +step:6536 train loss:3.591197 +step:6537 train loss:3.398166 +step:6538 train loss:3.507381 +step:6539 train loss:3.433003 +step:6540 train loss:3.544316 +step:6541 train loss:3.525575 +step:6542 train loss:3.482657 +step:6543 train loss:3.435124 +step:6544 train loss:3.418786 +step:6545 train loss:3.409288 +step:6546 train loss:3.465586 +step:6547 train loss:3.527259 +step:6548 train loss:3.463440 +step:6549 train loss:3.481832 +step:6550 train loss:3.596246 +step:6551 train loss:3.475195 +step:6552 train loss:3.466336 +step:6553 train loss:3.505437 +step:6554 train loss:3.396719 +step:6555 train loss:3.484332 +step:6556 train loss:3.353698 +step:6557 train loss:3.701488 +step:6558 train loss:3.532129 +step:6559 train loss:3.447169 +step:6560 train loss:3.485185 +step:6561 train loss:3.454858 +step:6562 train loss:3.478570 +step:6563 train loss:3.365047 +step:6564 train loss:3.475379 +step:6565 train loss:3.377692 +step:6566 train loss:3.491148 +step:6567 train loss:3.459853 +step:6568 train loss:3.506329 +step:6569 train loss:3.452994 +step:6570 train loss:3.491647 +step:6571 train loss:3.419896 +step:6572 train loss:3.495607 +step:6573 train loss:3.509694 +step:6574 train loss:3.495646 +step:6575 train loss:3.441655 +step:6576 train loss:3.430409 +step:6577 train loss:3.499563 +step:6578 train loss:3.369609 +step:6579 train loss:3.470775 +step:6580 train loss:3.425137 +step:6581 train loss:3.441079 +step:6582 train loss:3.416278 +step:6583 train loss:3.518741 +step:6584 train loss:3.447800 +step:6585 train loss:3.486080 +step:6586 train loss:3.492654 +step:6587 train loss:3.502028 +step:6588 train loss:3.466872 +step:6589 train loss:3.495682 +step:6590 train loss:3.434728 +step:6591 train loss:3.486358 +step:6592 train loss:3.426321 +step:6593 train loss:3.435983 +step:6594 train loss:3.462880 +step:6595 train loss:3.443541 +step:6596 train loss:3.441079 +step:6597 train loss:3.466871 +step:6598 train loss:3.509626 +step:6599 train loss:3.404415 +step:6600 train loss:3.459094 +step:6601 train loss:3.515099 +step:6602 train loss:3.441446 +step:6603 train loss:3.467157 +step:6604 train loss:3.479754 +step:6605 train loss:3.461151 +step:6606 train loss:3.520962 +step:6607 train loss:3.438878 +step:6608 train loss:3.451178 +step:6609 train loss:3.427602 +step:6610 train loss:3.533040 +step:6611 train loss:3.458114 +step:6612 train loss:3.500982 +step:6613 train loss:3.416305 +step:6614 train loss:3.444546 +step:6615 train loss:3.448085 +step:6616 train loss:3.429662 +step:6617 train loss:3.463494 +step:6618 train loss:3.454205 +step:6619 train loss:3.426431 +step:6620 train loss:3.531627 +step:6621 train loss:3.406030 +step:6622 train loss:3.481846 +step:6623 train loss:3.411729 +step:6624 train loss:3.484199 +step:6625 train loss:3.527774 +step:6626 train loss:3.490123 +step:6627 train loss:3.439353 +step:6628 train loss:3.497665 +step:6629 train loss:3.400482 +step:6630 train loss:3.439068 +step:6631 train loss:3.473198 +step:6632 train loss:3.511390 +step:6633 train loss:3.464001 +step:6634 train loss:3.524288 +step:6635 train loss:3.424742 +step:6636 train loss:3.467935 +step:6637 train loss:3.436000 +step:6638 train loss:3.436215 +step:6639 train loss:3.446600 +step:6640 train loss:3.435711 +step:6641 train loss:3.453477 +step:6642 train loss:3.446274 +step:6643 train loss:3.530753 +step:6644 train loss:3.531194 +step:6645 train loss:3.405563 +step:6646 train loss:3.496435 +step:6647 train loss:3.452821 +step:6648 train loss:3.556746 +step:6649 train loss:3.485250 +step:6650 train loss:3.433642 +step:6651 train loss:3.482698 +step:6652 train loss:3.495286 +step:6653 train loss:3.440941 +step:6654 train loss:3.432564 +step:6655 train loss:3.475999 +step:6656 train loss:3.445633 +step:6657 train loss:3.472339 +step:6658 train loss:3.454991 +step:6659 train loss:3.606606 +step:6660 train loss:3.507012 +step:6661 train loss:3.430967 +step:6662 train loss:3.465022 +step:6663 train loss:3.397510 +step:6664 train loss:3.475451 +step:6665 train loss:3.486330 +step:6666 train loss:3.502044 +step:6667 train loss:3.417210 +step:6668 train loss:3.542266 +step:6669 train loss:3.427610 +step:6670 train loss:3.435646 +step:6671 train loss:3.519382 +step:6672 train loss:3.469684 +step:6673 train loss:3.479161 +step:6674 train loss:3.452304 +step:6675 train loss:3.472950 +step:6676 train loss:3.480255 +step:6677 train loss:3.435286 +step:6678 train loss:3.504816 +step:6679 train loss:3.544288 +step:6680 train loss:3.545616 +step:6681 train loss:3.496021 +step:6682 train loss:3.436979 +step:6683 train loss:3.460001 +step:6684 train loss:3.473516 +step:6685 train loss:3.486743 +step:6686 train loss:3.419103 +step:6687 train loss:3.438077 +step:6688 train loss:3.484303 +step:6689 train loss:3.492419 +step:6690 train loss:3.464329 +step:6691 train loss:3.499768 +step:6692 train loss:3.507490 +step:6693 train loss:3.538598 +step:6694 train loss:3.491461 +step:6695 train loss:3.465661 +step:6696 train loss:3.402737 +step:6697 train loss:3.619795 +step:6698 train loss:3.464010 +step:6699 train loss:3.461183 +step:6700 train loss:3.473576 +step:6701 train loss:3.530777 +step:6702 train loss:3.419022 +step:6703 train loss:3.469437 +step:6704 train loss:3.452235 +step:6705 train loss:3.466789 +step:6706 train loss:3.442410 +step:6707 train loss:3.516968 +step:6708 train loss:3.468404 +step:6709 train loss:3.498361 +step:6710 train loss:3.487698 +step:6711 train loss:3.439214 +step:6712 train loss:3.427249 +step:6713 train loss:3.451720 +step:6714 train loss:3.496377 +step:6715 train loss:3.436339 +step:6716 train loss:3.518176 +step:6717 train loss:3.458121 +step:6718 train loss:3.482221 +step:6719 train loss:3.514477 +step:6720 train loss:3.447807 +step:6721 train loss:3.463525 +step:6722 train loss:3.440639 +step:6723 train loss:3.567404 +step:6724 train loss:3.427648 +step:6725 train loss:3.485688 +step:6726 train loss:3.442559 +step:6727 train loss:3.504863 +step:6728 train loss:3.602704 +step:6729 train loss:3.463472 +step:6730 train loss:3.458159 +step:6731 train loss:3.500522 +step:6732 train loss:3.376391 +step:6733 train loss:3.512205 +step:6734 train loss:3.445967 +step:6735 train loss:3.467179 +step:6736 train loss:3.466941 +step:6737 train loss:3.466558 +step:6738 train loss:3.495393 +step:6739 train loss:3.454665 +step:6740 train loss:3.402305 +step:6741 train loss:3.514909 +step:6742 train loss:3.473224 +step:6743 train loss:3.480604 +step:6744 train loss:3.368921 +step:6745 train loss:3.528800 +step:6746 train loss:3.450909 +step:6747 train loss:3.450342 +step:6748 train loss:3.521194 +step:6749 train loss:3.504965 +step:6750 validation loss:3.399230 +step:6750 train loss:3.421961 +step:6751 train loss:3.458292 +step:6752 train loss:3.459006 +step:6753 train loss:3.496423 +step:6754 train loss:3.475849 +step:6755 train loss:3.490100 +step:6756 train loss:3.429346 +step:6757 train loss:3.397233 +step:6758 train loss:3.573267 +step:6759 train loss:3.465338 +step:6760 train loss:3.521113 +step:6761 train loss:3.452079 +step:6762 train loss:3.472742 +step:6763 train loss:3.375326 +step:6764 train loss:3.456459 +step:6765 train loss:3.462573 +step:6766 train loss:3.455313 +step:6767 train loss:3.407144 +step:6768 train loss:3.413244 +step:6769 train loss:3.374810 +step:6770 train loss:3.462204 +step:6771 train loss:3.463628 +step:6772 train loss:3.474916 +step:6773 train loss:3.451808 +step:6774 train loss:3.474223 +step:6775 train loss:3.508017 +step:6776 train loss:3.465143 +step:6777 train loss:3.540037 +step:6778 train loss:3.427281 +step:6779 train loss:3.476610 +step:6780 train loss:3.411193 +step:6781 train loss:3.474575 +step:6782 train loss:3.391606 +step:6783 train loss:3.419985 +step:6784 train loss:3.449967 +step:6785 train loss:3.432738 +step:6786 train loss:3.452026 +step:6787 train loss:3.525512 +step:6788 train loss:3.464470 +step:6789 train loss:3.473523 +step:6790 train loss:3.470854 +step:6791 train loss:3.483416 +step:6792 train loss:3.481651 +step:6793 train loss:3.480400 +step:6794 train loss:3.447745 +step:6795 train loss:3.450907 +step:6796 train loss:3.457114 +step:6797 train loss:3.551348 +step:6798 train loss:3.455425 +step:6799 train loss:3.445093 +step:6800 train loss:3.412366 +step:6801 train loss:3.548723 +step:6802 train loss:3.496515 +step:6803 train loss:3.485946 +step:6804 train loss:3.509476 +step:6805 train loss:3.474954 +step:6806 train loss:3.407305 +step:6807 train loss:3.466437 +step:6808 train loss:3.452186 +step:6809 train loss:3.478342 +step:6810 train loss:3.600660 +step:6811 train loss:3.502563 +step:6812 train loss:3.471697 +step:6813 train loss:3.488197 +step:6814 train loss:3.495679 +step:6815 train loss:3.544345 +step:6816 train loss:3.456578 +step:6817 train loss:3.485175 +step:6818 train loss:3.461481 +step:6819 train loss:3.446290 +step:6820 train loss:3.474154 +step:6821 train loss:3.437719 +step:6822 train loss:3.539673 +step:6823 train loss:3.522242 +step:6824 train loss:3.499049 +step:6825 train loss:3.445807 +step:6826 train loss:3.488545 +step:6827 train loss:3.479128 +step:6828 train loss:3.491497 +step:6829 train loss:3.477735 +step:6830 train loss:3.444928 +step:6831 train loss:3.408061 +step:6832 train loss:3.393299 +step:6833 train loss:3.409204 +step:6834 train loss:3.496744 +step:6835 train loss:3.468070 +step:6836 train loss:3.386753 +step:6837 train loss:3.453358 +step:6838 train loss:3.513466 +step:6839 train loss:3.597470 +step:6840 train loss:3.471497 +step:6841 train loss:3.422434 +step:6842 train loss:3.473201 +step:6843 train loss:3.577936 +step:6844 train loss:3.457477 +step:6845 train loss:3.511001 +step:6846 train loss:3.572614 +step:6847 train loss:3.507380 +step:6848 train loss:3.492794 +step:6849 train loss:3.519325 +step:6850 train loss:3.488631 +step:6851 train loss:3.420320 +step:6852 train loss:3.414369 +step:6853 train loss:3.404935 +step:6854 train loss:3.478289 +step:6855 train loss:3.451151 +step:6856 train loss:3.436233 +step:6857 train loss:3.489415 +step:6858 train loss:3.518030 +step:6859 train loss:3.426828 +step:6860 train loss:3.534433 +step:6861 train loss:3.566096 +step:6862 train loss:3.471587 +step:6863 train loss:3.468445 +step:6864 train loss:3.414680 +step:6865 train loss:3.484622 +step:6866 train loss:3.412640 +step:6867 train loss:3.591426 +step:6868 train loss:3.465262 +step:6869 train loss:3.495649 +step:6870 train loss:3.533491 +step:6871 train loss:3.454939 +step:6872 train loss:3.446701 +step:6873 train loss:3.465937 +step:6874 train loss:3.425176 +step:6875 train loss:3.429318 +step:6876 train loss:3.459241 +step:6877 train loss:3.501457 +step:6878 train loss:3.419078 +step:6879 train loss:3.460308 +step:6880 train loss:3.471173 +step:6881 train loss:3.430845 +step:6882 train loss:3.499508 +step:6883 train loss:3.506392 +step:6884 train loss:3.708210 +step:6885 train loss:3.478997 +step:6886 train loss:3.461520 +step:6887 train loss:3.402725 +step:6888 train loss:3.503529 +step:6889 train loss:3.384902 +step:6890 train loss:3.495532 +step:6891 train loss:3.505502 +step:6892 train loss:3.605704 +step:6893 train loss:3.435878 +step:6894 train loss:3.495888 +step:6895 train loss:3.496430 +step:6896 train loss:3.473538 +step:6897 train loss:3.427601 +step:6898 train loss:3.428070 +step:6899 train loss:3.514121 +step:6900 train loss:3.488750 +step:6901 train loss:3.438565 +step:6902 train loss:3.371199 +step:6903 train loss:3.414000 +step:6904 train loss:3.524161 +step:6905 train loss:3.563731 +step:6906 train loss:3.477576 +step:6907 train loss:3.498013 +step:6908 train loss:3.531395 +step:6909 train loss:3.526522 +step:6910 train loss:3.403405 +step:6911 train loss:3.534374 +step:6912 train loss:3.423932 +step:6913 train loss:3.461785 +step:6914 train loss:3.418062 +step:6915 train loss:3.447821 +step:6916 train loss:3.421562 +step:6917 train loss:3.547011 +step:6918 train loss:3.493358 +step:6919 train loss:3.485105 +step:6920 train loss:3.471191 +step:6921 train loss:3.535359 +step:6922 train loss:3.523285 +step:6923 train loss:3.392294 +step:6924 train loss:3.473242 +step:6925 train loss:3.445364 +step:6926 train loss:3.486140 +step:6927 train loss:3.540824 +step:6928 train loss:3.426213 +step:6929 train loss:3.437241 +step:6930 train loss:3.471201 +step:6931 train loss:3.469349 +step:6932 train loss:3.704580 +step:6933 train loss:3.534073 +step:6934 train loss:3.476333 +step:6935 train loss:3.460769 +step:6936 train loss:3.497748 +step:6937 train loss:3.446333 +step:6938 train loss:3.507614 +step:6939 train loss:3.442560 +step:6940 train loss:3.498693 +step:6941 train loss:3.413495 +step:6942 train loss:3.500969 +step:6943 train loss:3.393926 +step:6944 train loss:3.484616 +step:6945 train loss:3.433303 +step:6946 train loss:3.515548 +step:6947 train loss:3.441033 +step:6948 train loss:3.433903 +step:6949 train loss:3.509699 +step:6950 train loss:3.502381 +step:6951 train loss:3.502780 +step:6952 train loss:3.433823 +step:6953 train loss:3.480413 +step:6954 train loss:3.544323 +step:6955 train loss:3.460269 +step:6956 train loss:3.495680 +step:6957 train loss:3.483859 +step:6958 train loss:3.445122 +step:6959 train loss:3.484914 +step:6960 train loss:3.449502 +step:6961 train loss:3.457287 +step:6962 train loss:3.439550 +step:6963 train loss:3.410858 +step:6964 train loss:3.450553 +step:6965 train loss:3.446165 +step:6966 train loss:3.485849 +step:6967 train loss:3.425546 +step:6968 train loss:3.466772 +step:6969 train loss:3.486168 +step:6970 train loss:3.458540 +step:6971 train loss:3.525276 +step:6972 train loss:3.471781 +step:6973 train loss:3.425029 +step:6974 train loss:3.560803 +step:6975 train loss:3.460116 +step:6976 train loss:3.436437 +step:6977 train loss:3.472513 +step:6978 train loss:3.465351 +step:6979 train loss:3.476148 +step:6980 train loss:3.450600 +step:6981 train loss:3.514023 +step:6982 train loss:3.464320 +step:6983 train loss:3.456852 +step:6984 train loss:3.572990 +step:6985 train loss:3.418478 +step:6986 train loss:3.409678 +step:6987 train loss:3.460933 +step:6988 train loss:3.464363 +step:6989 train loss:3.610550 +step:6990 train loss:3.473530 +step:6991 train loss:3.429039 +step:6992 train loss:3.478749 +step:6993 train loss:3.545663 +step:6994 train loss:3.488937 +step:6995 train loss:3.443005 +step:6996 train loss:3.442339 +step:6997 train loss:3.523721 +step:6998 train loss:3.422472 +step:6999 train loss:3.474438 +step:7000 validation loss:3.392740 total_sharp:5.5955e-05 L1_sharp:5.6476e-05 L2_sharp:1.2132e-05 L3_sharp:9.8435e-06 L4_sharp:6.4686e-06 L5_sharp:6.7471e-06 L6_sharp:8.6348e-06 L7_sharp:1.0061e-05 L8_sharp:1.0081e-05 L9_sharp:9.2085e-06 L10_sharp:5.9280e-06 L11_sharp:4.9379e-06 L12_sharp:6.6516e-06 total_fnorm:1.0369e+01 total_l1_linf:7.7918e+04 total_spectral:1.0369e+01 L1_fnorm:2.9349e+00 L2_fnorm:2.7567e+00 L3_fnorm:2.8549e+00 L4_fnorm:2.9578e+00 L5_fnorm:2.9894e+00 L6_fnorm:3.0030e+00 L7_fnorm:3.0201e+00 L8_fnorm:3.0172e+00 L9_fnorm:3.0162e+00 L10_fnorm:3.0275e+00 L11_fnorm:3.0152e+00 L12_fnorm:3.0145e+00 L1_l1linf:2.0148e+00 L2_l1linf:1.9544e+00 L3_l1linf:2.0418e+00 L4_l1linf:2.0875e+00 L5_l1linf:2.0525e+00 L6_l1linf:2.0506e+00 L7_l1linf:2.0561e+00 L8_l1linf:2.0258e+00 L9_l1linf:2.0229e+00 L10_l1linf:2.0394e+00 L11_l1linf:2.0080e+00 L12_l1linf:1.9739e+00 L1_spectral:6.0215e-02 L2_spectral:6.0244e-02 L3_spectral:6.0213e-02 L4_spectral:6.0312e-02 L5_spectral:6.0232e-02 L6_spectral:6.0227e-02 L7_spectral:6.0211e-02 L8_spectral:6.0240e-02 L9_spectral:6.0225e-02 L10_spectral:6.0207e-02 L11_spectral:6.0218e-02 L12_spectral:6.0228e-02 ip_v_neg_g:2.8179e-03 cos_v_neg_g:8.1578e-04 v_norm:1.0369e+01 g_norm:3.3312e-01 hv_norm:5.9589e-02 cos_v_hv:9.7369e-03 hg_norm:4.7545e-01 cos_g_hg:3.3835e-01 v_par:6.6893e-04 v_perp:1.0369e+01 L1_cos_v_neg_g:3.6710e-03 L1_v_norm:2.9349e+00 L2_cos_v_neg_g:2.1892e-03 L2_v_norm:2.7567e+00 L3_cos_v_neg_g:2.2737e-03 L3_v_norm:2.8549e+00 L4_cos_v_neg_g:2.4886e-03 L4_v_norm:2.9578e+00 L5_cos_v_neg_g:2.1568e-03 L5_v_norm:2.9894e+00 L6_cos_v_neg_g:2.4341e-03 L6_v_norm:3.0030e+00 L7_cos_v_neg_g:2.8653e-03 L7_v_norm:3.0201e+00 L8_cos_v_neg_g:2.8022e-03 L8_v_norm:3.0172e+00 L9_cos_v_neg_g:2.7415e-03 L9_v_norm:3.0162e+00 L10_cos_v_neg_g:2.1994e-03 L10_v_norm:3.0275e+00 L11_cos_v_neg_g:2.9603e-03 L11_v_norm:3.0152e+00 L12_cos_v_neg_g:2.3764e-03 L12_v_norm:3.0145e+00 +step:7000 train loss:3.547664 +step:7001 train loss:3.467212 +step:7002 train loss:3.442405 +step:7003 train loss:3.467737 +step:7004 train loss:3.462889 +step:7005 train loss:3.447407 +step:7006 train loss:3.452934 +step:7007 train loss:3.504307 +step:7008 train loss:3.446679 +step:7009 train loss:3.487723 +step:7010 train loss:3.422742 +step:7011 train loss:3.477485 +step:7012 train loss:3.446977 +step:7013 train loss:3.524684 +step:7014 train loss:3.430978 +step:7015 train loss:3.492960 +step:7016 train loss:3.479625 +step:7017 train loss:3.445759 +step:7018 train loss:3.525419 +step:7019 train loss:3.447392 +step:7020 train loss:3.497278 +step:7021 train loss:3.442413 +step:7022 train loss:3.458904 +step:7023 train loss:3.472375 +step:7024 train loss:3.436325 +step:7025 train loss:3.486795 +step:7026 train loss:3.445445 +step:7027 train loss:3.507048 +step:7028 train loss:3.430506 +step:7029 train loss:3.420073 +step:7030 train loss:3.424389 +step:7031 train loss:3.478518 +step:7032 train loss:3.484479 +step:7033 train loss:3.459098 +step:7034 train loss:3.479882 +step:7035 train loss:3.530535 +step:7036 train loss:3.454347 +step:7037 train loss:3.476243 +step:7038 train loss:3.441843 +step:7039 train loss:3.492196 +step:7040 train loss:3.412282 +step:7041 train loss:3.506653 +step:7042 train loss:3.434425 +step:7043 train loss:3.407307 +step:7044 train loss:3.456477 +step:7045 train loss:3.455394 +step:7046 train loss:3.449418 +step:7047 train loss:3.485694 +step:7048 train loss:3.435861 +step:7049 train loss:3.444045 +step:7050 train loss:3.470309 +step:7051 train loss:3.484324 +step:7052 train loss:3.489134 +step:7053 train loss:3.448222 +step:7054 train loss:3.426861 +step:7055 train loss:3.495556 +step:7056 train loss:3.493700 +step:7057 train loss:3.421245 +step:7058 train loss:3.540096 +step:7059 train loss:3.440231 +step:7060 train loss:3.455982 +step:7061 train loss:3.428356 +step:7062 train loss:3.454198 +step:7063 train loss:3.511277 +step:7064 train loss:3.433319 +step:7065 train loss:3.486372 +step:7066 train loss:3.443918 +step:7067 train loss:3.483897 +step:7068 train loss:3.455139 +step:7069 train loss:3.420096 +step:7070 train loss:3.444469 +step:7071 train loss:3.416674 +step:7072 train loss:3.416200 +step:7073 train loss:3.410713 +step:7074 train loss:3.408314 +step:7075 train loss:3.425409 +step:7076 train loss:3.436610 +step:7077 train loss:3.444870 +step:7078 train loss:3.492604 +step:7079 train loss:3.504209 +step:7080 train loss:3.449839 +step:7081 train loss:3.468654 +step:7082 train loss:3.434935 +step:7083 train loss:3.468464 +step:7084 train loss:3.460275 +step:7085 train loss:3.420077 +step:7086 train loss:3.459929 +step:7087 train loss:3.435965 +step:7088 train loss:3.561672 +step:7089 train loss:3.449491 +step:7090 train loss:3.419403 +step:7091 train loss:3.430580 +step:7092 train loss:3.410462 +step:7093 train loss:3.503982 +step:7094 train loss:3.425246 +step:7095 train loss:3.441959 +step:7096 train loss:3.456107 +step:7097 train loss:3.445482 +step:7098 train loss:3.471202 +step:7099 train loss:3.425235 +step:7100 train loss:3.458644 +step:7101 train loss:3.527651 +step:7102 train loss:3.419417 +step:7103 train loss:3.443431 +step:7104 train loss:3.476591 +step:7105 train loss:3.457808 +step:7106 train loss:3.439736 +step:7107 train loss:3.474044 +step:7108 train loss:3.541376 +step:7109 train loss:3.472052 +step:7110 train loss:3.500038 +step:7111 train loss:3.476658 +step:7112 train loss:3.469828 +step:7113 train loss:3.462814 +step:7114 train loss:3.479314 +step:7115 train loss:3.518292 +step:7116 train loss:3.449578 +step:7117 train loss:3.485434 +step:7118 train loss:3.499790 +step:7119 train loss:3.459044 +step:7120 train loss:3.514158 +step:7121 train loss:3.430749 +step:7122 train loss:3.433589 +step:7123 train loss:3.374658 +step:7124 train loss:3.530957 +step:7125 train loss:3.381193 +step:7126 train loss:3.551624 +step:7127 train loss:3.508049 +step:7128 train loss:3.456631 +step:7129 train loss:3.460140 +step:7130 train loss:3.450387 +step:7131 train loss:3.392168 +step:7132 train loss:3.430452 +step:7133 train loss:3.478158 +step:7134 train loss:3.407476 +step:7135 train loss:3.468000 +step:7136 train loss:3.447058 +step:7137 train loss:3.428679 +step:7138 train loss:3.416061 +step:7139 train loss:3.419316 +step:7140 train loss:3.452201 +step:7141 train loss:3.451442 +step:7142 train loss:3.447302 +step:7143 train loss:3.483536 +step:7144 train loss:3.433219 +step:7145 train loss:3.451610 +step:7146 train loss:3.460523 +step:7147 train loss:3.485307 +step:7148 train loss:3.483245 +step:7149 train loss:3.494000 +step:7150 train loss:3.467799 +step:7151 train loss:3.432784 +step:7152 train loss:3.402636 +step:7153 train loss:3.436194 +step:7154 train loss:3.459455 +step:7155 train loss:3.471603 +step:7156 train loss:3.443456 +step:7157 train loss:3.459430 +step:7158 train loss:3.419350 +step:7159 train loss:3.476267 +step:7160 train loss:3.482008 +step:7161 train loss:3.433258 +step:7162 train loss:3.478068 +step:7163 train loss:3.414713 +step:7164 train loss:3.452862 +step:7165 train loss:3.455957 +step:7166 train loss:3.512907 +step:7167 train loss:3.489904 +step:7168 train loss:3.467832 +step:7169 train loss:3.448488 +step:7170 train loss:3.475451 +step:7171 train loss:3.424171 +step:7172 train loss:3.589069 +step:7173 train loss:3.429501 +step:7174 train loss:3.473875 +step:7175 train loss:3.447697 +step:7176 train loss:3.460177 +step:7177 train loss:3.473162 +step:7178 train loss:3.475646 +step:7179 train loss:3.461361 +step:7180 train loss:3.460451 +step:7181 train loss:3.491583 +step:7182 train loss:3.439027 +step:7183 train loss:3.512137 +step:7184 train loss:3.599488 +step:7185 train loss:3.516447 +step:7186 train loss:3.457647 +step:7187 train loss:3.468000 +step:7188 train loss:3.454218 +step:7189 train loss:3.453013 +step:7190 train loss:3.455201 +step:7191 train loss:3.446907 +step:7192 train loss:3.475328 +step:7193 train loss:3.398366 +step:7194 train loss:3.459640 +step:7195 train loss:3.439830 +step:7196 train loss:3.484431 +step:7197 train loss:3.463153 +step:7198 train loss:3.522427 +step:7199 train loss:3.478699 +step:7200 train loss:3.470420 +step:7201 train loss:3.479467 +step:7202 train loss:3.455753 +step:7203 train loss:3.474258 +step:7204 train loss:3.441787 +step:7205 train loss:3.399678 +step:7206 train loss:3.426952 +step:7207 train loss:3.600369 +step:7208 train loss:3.436715 +step:7209 train loss:3.516294 +step:7210 train loss:3.455463 +step:7211 train loss:3.485691 +step:7212 train loss:3.568179 +step:7213 train loss:3.414517 +step:7214 train loss:3.486608 +step:7215 train loss:3.454017 +step:7216 train loss:3.502534 +step:7217 train loss:3.461273 +step:7218 train loss:3.549129 +step:7219 train loss:3.459155 +step:7220 train loss:3.539661 +step:7221 train loss:3.416652 +step:7222 train loss:3.499922 +step:7223 train loss:3.418545 +step:7224 train loss:3.479037 +step:7225 train loss:3.460291 +step:7226 train loss:3.425463 +step:7227 train loss:3.447183 +step:7228 train loss:3.436249 +step:7229 train loss:3.436603 +step:7230 train loss:3.424507 +step:7231 train loss:3.557421 +step:7232 train loss:3.426312 +step:7233 train loss:3.495547 +step:7234 train loss:3.480672 +step:7235 train loss:3.456622 +step:7236 train loss:3.489434 +step:7237 train loss:3.444940 +step:7238 train loss:3.484015 +step:7239 train loss:3.437943 +step:7240 train loss:3.437074 +step:7241 train loss:3.449453 +step:7242 train loss:3.431530 +step:7243 train loss:3.474868 +step:7244 train loss:3.446914 +step:7245 train loss:3.456060 +step:7246 train loss:3.495106 +step:7247 train loss:3.448061 +step:7248 train loss:3.489776 +step:7249 train loss:3.438293 +step:7250 validation loss:3.390396 +step:7250 train loss:3.461649 +step:7251 train loss:3.506496 +step:7252 train loss:3.416698 +step:7253 train loss:3.508584 +step:7254 train loss:3.444391 +step:7255 train loss:3.420837 +step:7256 train loss:3.458764 +step:7257 train loss:3.497337 +step:7258 train loss:3.455591 +step:7259 train loss:3.437034 +step:7260 train loss:3.521539 +step:7261 train loss:3.482310 +step:7262 train loss:3.437295 +step:7263 train loss:3.479730 +step:7264 train loss:3.463723 +step:7265 train loss:3.370064 +step:7266 train loss:3.492767 +step:7267 train loss:3.411473 +step:7268 train loss:3.476536 +step:7269 train loss:3.478804 +step:7270 train loss:3.437233 +step:7271 train loss:3.454080 +step:7272 train loss:3.458654 +step:7273 train loss:3.455288 +step:7274 train loss:3.432177 +step:7275 train loss:3.503069 +step:7276 train loss:3.410814 +step:7277 train loss:3.458073 +step:7278 train loss:3.431344 +step:7279 train loss:3.410632 +step:7280 train loss:3.480898 +step:7281 train loss:3.505631 +step:7282 train loss:3.501196 +step:7283 train loss:3.392330 +step:7284 train loss:3.433925 +step:7285 train loss:3.462604 +step:7286 train loss:3.594761 +step:7287 train loss:3.501955 +step:7288 train loss:3.456256 +step:7289 train loss:3.464404 +step:7290 train loss:3.507688 +step:7291 train loss:3.473065 +step:7292 train loss:3.539271 +step:7293 train loss:3.440052 +step:7294 train loss:3.524847 +step:7295 train loss:3.412261 +step:7296 train loss:3.411592 +step:7297 train loss:3.455583 +step:7298 train loss:3.432816 +step:7299 train loss:3.475131 +step:7300 train loss:3.459153 +step:7301 train loss:3.407023 +step:7302 train loss:3.553190 +step:7303 train loss:3.444257 +step:7304 train loss:3.389185 +step:7305 train loss:3.464591 +step:7306 train loss:3.494706 +step:7307 train loss:3.499047 +step:7308 train loss:3.450123 +step:7309 train loss:3.415435 +step:7310 train loss:3.444837 +step:7311 train loss:3.430294 +step:7312 train loss:3.470570 +step:7313 train loss:3.507138 +step:7314 train loss:3.401513 +step:7315 train loss:3.396942 +step:7316 train loss:3.539917 +step:7317 train loss:3.475717 +step:7318 train loss:3.416373 +step:7319 train loss:3.444346 +step:7320 train loss:3.476532 +step:7321 train loss:3.503273 +step:7322 train loss:3.382941 +step:7323 train loss:3.441410 +step:7324 train loss:3.466487 +step:7325 train loss:3.429677 +step:7326 train loss:3.457438 +step:7327 train loss:3.434700 +step:7328 train loss:3.550359 +step:7329 train loss:3.392336 +step:7330 train loss:3.450974 +step:7331 train loss:3.444340 +step:7332 train loss:3.488688 +step:7333 train loss:3.467219 +step:7334 train loss:3.433629 +step:7335 train loss:3.435600 +step:7336 train loss:3.687936 +step:7337 train loss:3.473387 +step:7338 train loss:3.472710 +step:7339 train loss:3.479305 +step:7340 train loss:3.470501 +step:7341 train loss:3.458834 +step:7342 train loss:3.448160 +step:7343 train loss:3.462780 +step:7344 train loss:3.540963 +step:7345 train loss:3.402582 +step:7346 train loss:3.438752 +step:7347 train loss:3.431391 +step:7348 train loss:3.435652 +step:7349 train loss:3.534258 +step:7350 train loss:3.522273 +step:7351 train loss:3.456499 +step:7352 train loss:3.482039 +step:7353 train loss:3.467700 +step:7354 train loss:3.415136 +step:7355 train loss:3.595770 +step:7356 train loss:3.566606 +step:7357 train loss:3.488099 +step:7358 train loss:3.469277 +step:7359 train loss:3.441114 +step:7360 train loss:3.451991 +step:7361 train loss:3.405397 +step:7362 train loss:3.452737 +step:7363 train loss:3.465101 +step:7364 train loss:3.501566 +step:7365 train loss:3.484199 +step:7366 train loss:3.447128 +step:7367 train loss:3.525633 +step:7368 train loss:3.503794 +step:7369 train loss:3.496288 +step:7370 train loss:3.459404 +step:7371 train loss:3.418407 +step:7372 train loss:3.476822 +step:7373 train loss:3.498050 +step:7374 train loss:3.594506 +step:7375 train loss:3.416511 +step:7376 train loss:3.434573 +step:7377 train loss:3.480619 +step:7378 train loss:3.434028 +step:7379 train loss:3.558160 +step:7380 train loss:3.519961 +step:7381 train loss:3.484702 +step:7382 train loss:3.449212 +step:7383 train loss:3.544540 +step:7384 train loss:3.484348 +step:7385 train loss:3.443227 +step:7386 train loss:3.444176 +step:7387 train loss:3.492200 +step:7388 train loss:3.523389 +step:7389 train loss:3.466669 +step:7390 train loss:3.407832 +step:7391 train loss:3.443569 +step:7392 train loss:3.502997 +step:7393 train loss:3.468523 +step:7394 train loss:3.507825 +step:7395 train loss:3.396671 +step:7396 train loss:3.499897 +step:7397 train loss:3.423885 +step:7398 train loss:3.441217 +step:7399 train loss:3.486515 +step:7400 train loss:3.490997 +step:7401 train loss:3.408903 +step:7402 train loss:3.524135 +step:7403 train loss:3.412497 +step:7404 train loss:3.476572 +step:7405 train loss:3.605294 +step:7406 train loss:3.427207 +step:7407 train loss:3.475343 +step:7408 train loss:3.475092 +step:7409 train loss:3.449020 +step:7410 train loss:3.616435 +step:7411 train loss:3.460438 +step:7412 train loss:3.462751 +step:7413 train loss:3.518756 +step:7414 train loss:3.427677 +step:7415 train loss:3.485176 +step:7416 train loss:3.369263 +step:7417 train loss:3.490910 +step:7418 train loss:3.468088 +step:7419 train loss:3.438826 +step:7420 train loss:3.432924 +step:7421 train loss:3.464858 +step:7422 train loss:3.425961 +step:7423 train loss:3.565065 +step:7424 train loss:3.628380 +step:7425 train loss:3.511619 +step:7426 train loss:3.479262 +step:7427 train loss:3.449715 +step:7428 train loss:3.494446 +step:7429 train loss:3.485698 +step:7430 train loss:3.413855 +step:7431 train loss:3.419429 +step:7432 train loss:3.428305 +step:7433 train loss:3.525815 +step:7434 train loss:3.438029 +step:7435 train loss:3.525460 +step:7436 train loss:3.566070 +step:7437 train loss:3.388525 +step:7438 train loss:3.447071 +step:7439 train loss:3.459531 +step:7440 train loss:3.432482 +step:7441 train loss:3.403340 +step:7442 train loss:3.628431 +step:7443 train loss:3.450556 +step:7444 train loss:3.491222 +step:7445 train loss:3.425058 +step:7446 train loss:3.446809 +step:7447 train loss:3.372736 +step:7448 train loss:3.430217 +step:7449 train loss:3.439697 +step:7450 train loss:3.473666 +step:7451 train loss:3.508484 +step:7452 train loss:3.434139 +step:7453 train loss:3.459759 +step:7454 train loss:3.445518 +step:7455 train loss:3.457975 +step:7456 train loss:3.431054 +step:7457 train loss:3.440363 +step:7458 train loss:3.476260 +step:7459 train loss:3.452956 +step:7460 train loss:3.466040 +step:7461 train loss:3.498620 +step:7462 train loss:3.436728 +step:7463 train loss:3.498960 +step:7464 train loss:3.421132 +step:7465 train loss:3.430674 +step:7466 train loss:3.432775 +step:7467 train loss:3.441867 +step:7468 train loss:3.496263 +step:7469 train loss:3.423439 +step:7470 train loss:3.456293 +step:7471 train loss:3.444922 +step:7472 train loss:3.477408 +step:7473 train loss:3.420879 +step:7474 train loss:3.406326 +step:7475 train loss:3.436615 +step:7476 train loss:3.470273 +step:7477 train loss:3.449009 +step:7478 train loss:3.442138 +step:7479 train loss:3.456706 +step:7480 train loss:3.739024 +step:7481 train loss:3.387478 +step:7482 train loss:3.459278 +step:7483 train loss:3.453209 +step:7484 train loss:3.475458 +step:7485 train loss:3.460744 +step:7486 train loss:3.486866 +step:7487 train loss:3.477778 +step:7488 train loss:3.500093 +step:7489 train loss:3.494777 +step:7490 train loss:3.440696 +step:7491 train loss:3.462851 +step:7492 train loss:3.572567 +step:7493 train loss:3.545182 +step:7494 train loss:3.569369 +step:7495 train loss:3.439151 +step:7496 train loss:3.426657 +step:7497 train loss:3.526939 +step:7498 train loss:3.459233 +step:7499 train loss:3.498897 +step:7500 validation loss:3.386141 total_sharp:5.9247e-05 L1_sharp:1.6116e-05 L2_sharp:1.2620e-05 L3_sharp:9.8662e-06 L4_sharp:6.1503e-06 L5_sharp:7.5343e-06 L6_sharp:9.6833e-06 L7_sharp:1.0980e-05 L8_sharp:1.0222e-05 L9_sharp:1.0365e-05 L10_sharp:6.1502e-06 L11_sharp:5.4865e-06 L12_sharp:1.1623e-05 total_fnorm:1.0356e+01 total_l1_linf:7.7776e+04 total_spectral:1.0356e+01 L1_fnorm:2.9341e+00 L2_fnorm:2.7681e+00 L3_fnorm:2.8552e+00 L4_fnorm:2.9548e+00 L5_fnorm:2.9808e+00 L6_fnorm:2.9966e+00 L7_fnorm:3.0073e+00 L8_fnorm:3.0138e+00 L9_fnorm:3.0086e+00 L10_fnorm:3.0196e+00 L11_fnorm:3.0053e+00 L12_fnorm:3.0175e+00 L1_l1linf:1.9970e+00 L2_l1linf:1.9430e+00 L3_l1linf:2.0343e+00 L4_l1linf:2.0754e+00 L5_l1linf:2.0328e+00 L6_l1linf:2.0357e+00 L7_l1linf:2.0314e+00 L8_l1linf:2.0280e+00 L9_l1linf:1.9986e+00 L10_l1linf:2.0193e+00 L11_l1linf:1.9937e+00 L12_l1linf:2.0124e+00 L1_spectral:6.0200e-02 L2_spectral:6.0235e-02 L3_spectral:6.0221e-02 L4_spectral:6.0313e-02 L5_spectral:6.0215e-02 L6_spectral:6.0214e-02 L7_spectral:6.0233e-02 L8_spectral:6.0244e-02 L9_spectral:6.0221e-02 L10_spectral:6.0222e-02 L11_spectral:6.0216e-02 L12_spectral:6.0226e-02 ip_v_neg_g:3.8887e-03 cos_v_neg_g:1.0999e-03 v_norm:1.0356e+01 g_norm:3.4141e-01 hv_norm:8.0021e-02 cos_v_hv:7.6671e-03 hg_norm:6.5525e-01 cos_g_hg:3.5117e-01 v_par:1.6011e-03 v_perp:1.0356e+01 L1_cos_v_neg_g:2.1782e-03 L1_v_norm:2.9341e+00 L2_cos_v_neg_g:4.7639e-03 L2_v_norm:2.7681e+00 L3_cos_v_neg_g:2.9376e-03 L3_v_norm:2.8552e+00 L4_cos_v_neg_g:2.1742e-03 L4_v_norm:2.9548e+00 L5_cos_v_neg_g:2.3178e-03 L5_v_norm:2.9808e+00 L6_cos_v_neg_g:2.6830e-03 L6_v_norm:2.9966e+00 L7_cos_v_neg_g:2.8665e-03 L7_v_norm:3.0073e+00 L8_cos_v_neg_g:3.9219e-03 L8_v_norm:3.0138e+00 L9_cos_v_neg_g:4.2951e-03 L9_v_norm:3.0086e+00 L10_cos_v_neg_g:3.8738e-03 L10_v_norm:3.0196e+00 L11_cos_v_neg_g:4.1898e-03 L11_v_norm:3.0053e+00 L12_cos_v_neg_g:3.8378e-03 L12_v_norm:3.0175e+00 +step:7500 train loss:3.443788 +step:7501 train loss:3.434137 +step:7502 train loss:3.426566 +step:7503 train loss:3.403780 +step:7504 train loss:3.427262 +step:7505 train loss:3.417666 +step:7506 train loss:3.476957 +step:7507 train loss:3.394376 +step:7508 train loss:3.465478 +step:7509 train loss:3.436340 +step:7510 train loss:3.463823 +step:7511 train loss:3.470068 +step:7512 train loss:3.739335 +step:7513 train loss:3.421946 +step:7514 train loss:3.453113 +step:7515 train loss:3.417526 +step:7516 train loss:3.431136 +step:7517 train loss:3.465542 +step:7518 train loss:3.440221 +step:7519 train loss:3.452932 +step:7520 train loss:3.517054 +step:7521 train loss:3.402560 +step:7522 train loss:3.460895 +step:7523 train loss:3.494097 +step:7524 train loss:3.441605 +step:7525 train loss:3.443339 +step:7526 train loss:3.392183 +step:7527 train loss:3.401372 +step:7528 train loss:3.499434 +step:7529 train loss:3.475392 +step:7530 train loss:3.422025 +step:7531 train loss:3.498979 +step:7532 train loss:3.486670 +step:7533 train loss:3.414438 +step:7534 train loss:3.478168 +step:7535 train loss:3.478347 +step:7536 train loss:3.512086 +step:7537 train loss:3.539296 +step:7538 train loss:3.560002 +step:7539 train loss:3.457487 +step:7540 train loss:3.443659 +step:7541 train loss:3.497847 +step:7542 train loss:3.460458 +step:7543 train loss:3.415930 +step:7544 train loss:3.459349 +step:7545 train loss:3.447216 +step:7546 train loss:3.403822 +step:7547 train loss:3.447443 +step:7548 train loss:3.463853 +step:7549 train loss:3.446407 +step:7550 train loss:3.441903 +step:7551 train loss:3.545709 +step:7552 train loss:3.454949 +step:7553 train loss:3.491386 +step:7554 train loss:3.416399 +step:7555 train loss:3.511251 +step:7556 train loss:3.411721 +step:7557 train loss:3.505311 +step:7558 train loss:3.495972 +step:7559 train loss:3.451365 +step:7560 train loss:3.546181 +step:7561 train loss:3.514738 +step:7562 train loss:3.423698 +step:7563 train loss:3.418409 +step:7564 train loss:3.469073 +step:7565 train loss:3.487549 +step:7566 train loss:3.483365 +step:7567 train loss:3.494023 +step:7568 train loss:3.440934 +step:7569 train loss:3.502616 +step:7570 train loss:3.482306 +step:7571 train loss:3.565093 +step:7572 train loss:3.419031 +step:7573 train loss:3.484232 +step:7574 train loss:3.445468 +step:7575 train loss:3.441434 +step:7576 train loss:3.447194 +step:7577 train loss:3.464158 +step:7578 train loss:3.522683 +step:7579 train loss:3.456613 +step:7580 train loss:3.447835 +step:7581 train loss:3.432554 +step:7582 train loss:3.488645 +step:7583 train loss:3.422818 +step:7584 train loss:3.406436 +step:7585 train loss:3.375977 +step:7586 train loss:3.412444 +step:7587 train loss:3.473456 +step:7588 train loss:3.606535 +step:7589 train loss:3.425421 +step:7590 train loss:3.491817 +step:7591 train loss:3.496178 +step:7592 train loss:3.454302 +step:7593 train loss:3.479627 +step:7594 train loss:3.479807 +step:7595 train loss:3.448029 +step:7596 train loss:3.500239 +step:7597 train loss:3.405055 +step:7598 train loss:3.468757 +step:7599 train loss:3.456055 +step:7600 train loss:3.419099 +step:7601 train loss:3.529460 +step:7602 train loss:3.470554 +step:7603 train loss:3.432981 +step:7604 train loss:3.579935 +step:7605 train loss:3.463587 +step:7606 train loss:3.497545 +step:7607 train loss:3.449840 +step:7608 train loss:3.461668 +step:7609 train loss:3.499523 +step:7610 train loss:3.454674 +step:7611 train loss:3.432633 +step:7612 train loss:3.376931 +step:7613 train loss:3.424990 +step:7614 train loss:3.495593 +step:7615 train loss:3.453427 +step:7616 train loss:3.519848 +step:7617 train loss:3.419313 +step:7618 train loss:3.508075 +step:7619 train loss:3.448118 +step:7620 train loss:3.438868 +step:7621 train loss:3.383849 +step:7622 train loss:3.661064 +step:7623 train loss:3.673562 +step:7624 train loss:3.487590 +step:7625 train loss:3.526130 +step:7626 train loss:3.444625 +step:7627 train loss:3.515044 +step:7628 train loss:3.396854 +step:7629 train loss:3.457635 +step:7630 train loss:3.468924 +step:7631 train loss:3.449450 +step:7632 train loss:3.502669 +step:7633 train loss:3.569507 +step:7634 train loss:3.530203 +step:7635 train loss:3.437604 +step:7636 train loss:3.462560 +step:7637 train loss:3.407877 +step:7638 train loss:3.521095 +step:7639 train loss:3.450150 +step:7640 train loss:3.430079 +step:7641 train loss:3.460608 +step:7642 train loss:3.797840 +step:7643 train loss:3.547847 +step:7644 train loss:3.473767 +step:7645 train loss:3.456871 +step:7646 train loss:3.445548 +step:7647 train loss:3.439063 +step:7648 train loss:3.472416 +step:7649 train loss:3.431388 +step:7650 train loss:3.481750 +step:7651 train loss:3.503385 +step:7652 train loss:3.381372 +step:7653 train loss:3.585975 +step:7654 train loss:3.433104 +step:7655 train loss:3.456082 +step:7656 train loss:3.430655 +step:7657 train loss:3.442050 +step:7658 train loss:3.398003 +step:7659 train loss:3.462762 +step:7660 train loss:3.395658 +step:7661 train loss:3.413428 +step:7662 train loss:3.414168 +step:7663 train loss:3.462721 +step:7664 train loss:3.424417 +step:7665 train loss:3.395123 +step:7666 train loss:3.503188 +step:7667 train loss:3.418821 +step:7668 train loss:3.526957 +step:7669 train loss:3.462802 +step:7670 train loss:3.415595 +step:7671 train loss:3.469431 +step:7672 train loss:3.488294 +step:7673 train loss:3.454010 +step:7674 train loss:3.491485 +step:7675 train loss:3.548463 +step:7676 train loss:3.516898 +step:7677 train loss:3.541485 +step:7678 train loss:3.481566 +step:7679 train loss:3.502589 +step:7680 train loss:3.510227 +step:7681 train loss:3.475552 +step:7682 train loss:3.447014 +step:7683 train loss:3.447672 +step:7684 train loss:3.421282 +step:7685 train loss:3.401465 +step:7686 train loss:3.523248 +step:7687 train loss:3.435050 +step:7688 train loss:3.403779 +step:7689 train loss:3.453084 +step:7690 train loss:3.419269 +step:7691 train loss:3.447669 +step:7692 train loss:3.481368 +step:7693 train loss:3.482223 +step:7694 train loss:3.534343 +step:7695 train loss:3.461795 +step:7696 train loss:3.436916 +step:7697 train loss:3.425983 +step:7698 train loss:3.484871 +step:7699 train loss:3.481501 +step:7700 train loss:3.380228 +step:7701 train loss:3.497177 +step:7702 train loss:3.439467 +step:7703 train loss:3.442275 +step:7704 train loss:3.494654 +step:7705 train loss:3.453537 +step:7706 train loss:3.389248 +step:7707 train loss:3.510223 +step:7708 train loss:3.447910 +step:7709 train loss:3.468100 +step:7710 train loss:3.528456 +step:7711 train loss:3.489861 +step:7712 train loss:3.436058 +step:7713 train loss:3.514437 +step:7714 train loss:3.460919 +step:7715 train loss:3.412632 +step:7716 train loss:3.452806 +step:7717 train loss:3.476480 +step:7718 train loss:3.482270 +step:7719 train loss:3.439729 +step:7720 train loss:3.453726 +step:7721 train loss:3.494319 +step:7722 train loss:3.425766 +step:7723 train loss:3.797969 +step:7724 train loss:3.460331 +step:7725 train loss:3.370954 +step:7726 train loss:3.444825 +step:7727 train loss:3.472950 +step:7728 train loss:3.423781 +step:7729 train loss:3.431312 +step:7730 train loss:3.456897 +step:7731 train loss:3.484871 +step:7732 train loss:3.510307 +step:7733 train loss:3.416150 +step:7734 train loss:3.444532 +step:7735 train loss:3.531236 +step:7736 train loss:3.477749 +step:7737 train loss:3.492513 +step:7738 train loss:3.394778 +step:7739 train loss:3.474796 +step:7740 train loss:3.422892 +step:7741 train loss:3.456827 +step:7742 train loss:3.458290 +step:7743 train loss:3.410191 +step:7744 train loss:3.536481 +step:7745 train loss:3.423188 +step:7746 train loss:3.396224 +step:7747 train loss:3.493137 +step:7748 train loss:3.474725 +step:7749 train loss:3.399175 +step:7750 validation loss:3.381389 +step:7750 train loss:3.557483 +step:7751 train loss:3.440490 +step:7752 train loss:3.433155 +step:7753 train loss:3.437994 +step:7754 train loss:3.409067 +step:7755 train loss:3.478141 +step:7756 train loss:3.504403 +step:7757 train loss:3.452583 +step:7758 train loss:3.423293 +step:7759 train loss:3.452409 +step:7760 train loss:3.478756 +step:7761 train loss:3.471116 +step:7762 train loss:3.454860 +step:7763 train loss:3.439843 +step:7764 train loss:3.445263 +step:7765 train loss:3.401187 +step:7766 train loss:3.465534 +step:7767 train loss:3.467343 +step:7768 train loss:3.423173 +step:7769 train loss:3.489429 +step:7770 train loss:3.504991 +step:7771 train loss:3.479272 +step:7772 train loss:3.451099 +step:7773 train loss:3.512829 +step:7774 train loss:3.409567 +step:7775 train loss:3.396302 +step:7776 train loss:3.503098 +step:7777 train loss:3.454872 +step:7778 train loss:3.413373 +step:7779 train loss:3.454753 +step:7780 train loss:3.449446 +step:7781 train loss:3.460987 +step:7782 train loss:3.445095 +step:7783 train loss:3.425366 +step:7784 train loss:3.423933 +step:7785 train loss:3.466031 +step:7786 train loss:3.421150 +step:7787 train loss:3.501209 +step:7788 train loss:3.453314 +step:7789 train loss:3.389080 +step:7790 train loss:3.450369 +step:7791 train loss:3.479528 +step:7792 train loss:3.439626 +step:7793 train loss:3.460592 +step:7794 train loss:3.450082 +step:7795 train loss:3.479391 +step:7796 train loss:3.447125 +step:7797 train loss:3.464161 +step:7798 train loss:3.456278 +step:7799 train loss:3.445083 +step:7800 train loss:3.402683 +step:7801 train loss:3.464880 +step:7802 train loss:3.447244 +step:7803 train loss:3.498316 +step:7804 train loss:3.459993 +step:7805 train loss:3.456034 +step:7806 train loss:3.473165 +step:7807 train loss:3.545019 +step:7808 train loss:3.404531 +step:7809 train loss:3.382449 +step:7810 train loss:3.471502 +step:7811 train loss:3.402132 +step:7812 train loss:3.421936 +step:7813 train loss:3.509576 +step:7814 train loss:3.581414 +step:7815 train loss:3.393457 +step:7816 train loss:3.477887 +step:7817 train loss:3.508139 +step:7818 train loss:3.406270 +step:7819 train loss:3.458283 +step:7820 train loss:3.500422 +step:7821 train loss:3.434190 +step:7822 train loss:3.393878 +step:7823 train loss:3.469103 +step:7824 train loss:3.444160 +step:7825 train loss:3.431139 +step:7826 train loss:3.429749 +step:7827 train loss:3.471446 +step:7828 train loss:3.461226 +step:7829 train loss:3.418716 +step:7830 train loss:3.428156 +step:7831 train loss:3.429581 +step:7832 train loss:3.500401 +step:7833 train loss:3.477870 +step:7834 train loss:3.441911 +step:7835 train loss:3.467811 +step:7836 train loss:3.573660 +step:7837 train loss:3.464278 +step:7838 train loss:3.434043 +step:7839 train loss:3.393527 +step:7840 train loss:3.409287 +step:7841 train loss:3.506567 +step:7842 train loss:3.491581 +step:7843 train loss:3.543327 +step:7844 train loss:3.470972 +step:7845 train loss:3.449248 +step:7846 train loss:3.563272 +step:7847 train loss:3.454035 +step:7848 train loss:3.465009 +step:7849 train loss:3.476362 +step:7850 train loss:3.448861 +step:7851 train loss:3.476341 +step:7852 train loss:3.449752 +step:7853 train loss:3.420911 +step:7854 train loss:3.451149 +step:7855 train loss:3.450515 +step:7856 train loss:3.455927 +step:7857 train loss:3.445425 +step:7858 train loss:3.451160 +step:7859 train loss:3.460098 +step:7860 train loss:3.495785 +step:7861 train loss:3.483003 +step:7862 train loss:3.427672 +step:7863 train loss:3.529685 +step:7864 train loss:3.370562 +step:7865 train loss:3.449038 +step:7866 train loss:3.422705 +step:7867 train loss:3.470424 +step:7868 train loss:3.446693 +step:7869 train loss:3.446537 +step:7870 train loss:3.365236 +step:7871 train loss:3.434893 +step:7872 train loss:3.422016 +step:7873 train loss:3.502367 +step:7874 train loss:3.444383 +step:7875 train loss:3.450231 +step:7876 train loss:3.468855 +step:7877 train loss:3.422674 +step:7878 train loss:3.457950 +step:7879 train loss:3.801811 +step:7880 train loss:3.451804 +step:7881 train loss:3.477571 +step:7882 train loss:3.557072 +step:7883 train loss:3.373286 +step:7884 train loss:3.462759 +step:7885 train loss:3.444725 +step:7886 train loss:3.443691 +step:7887 train loss:3.440310 +step:7888 train loss:3.471885 +step:7889 train loss:3.519881 +step:7890 train loss:3.424659 +step:7891 train loss:3.473482 +step:7892 train loss:3.448193 +step:7893 train loss:3.421174 +step:7894 train loss:3.441437 +step:7895 train loss:3.427614 +step:7896 train loss:3.428699 +step:7897 train loss:3.449878 +step:7898 train loss:3.460741 +step:7899 train loss:3.445999 +step:7900 train loss:3.417460 +step:7901 train loss:3.406721 +step:7902 train loss:3.555784 +step:7903 train loss:3.399722 +step:7904 train loss:3.450889 +step:7905 train loss:3.520183 +step:7906 train loss:3.415274 +step:7907 train loss:3.440315 +step:7908 train loss:3.492430 +step:7909 train loss:3.544086 +step:7910 train loss:3.423584 +step:7911 train loss:3.446818 +step:7912 train loss:3.449578 +step:7913 train loss:3.423867 +step:7914 train loss:3.459528 +step:7915 train loss:3.562888 +step:7916 train loss:3.432176 +step:7917 train loss:3.490599 +step:7918 train loss:3.434751 +step:7919 train loss:3.425114 +step:7920 train loss:3.464516 +step:7921 train loss:3.467878 +step:7922 train loss:3.444317 +step:7923 train loss:3.491230 +step:7924 train loss:3.452869 +step:7925 train loss:3.475331 +step:7926 train loss:3.379581 +step:7927 train loss:3.659175 +step:7928 train loss:3.483648 +step:7929 train loss:3.445741 +step:7930 train loss:3.407639 +step:7931 train loss:3.432412 +step:7932 train loss:3.456211 +step:7933 train loss:3.465583 +step:7934 train loss:3.563270 +step:7935 train loss:3.485960 +step:7936 train loss:3.455628 +step:7937 train loss:3.408848 +step:7938 train loss:3.418961 +step:7939 train loss:3.469072 +step:7940 train loss:3.451247 +step:7941 train loss:3.477426 +step:7942 train loss:3.469725 +step:7943 train loss:3.481252 +step:7944 train loss:3.400443 +step:7945 train loss:3.502789 +step:7946 train loss:3.453948 +step:7947 train loss:3.464235 +step:7948 train loss:3.422058 +step:7949 train loss:3.479826 +step:7950 train loss:3.531339 +step:7951 train loss:3.502356 +step:7952 train loss:3.645452 +step:7953 train loss:3.537114 +step:7954 train loss:3.438021 +step:7955 train loss:3.423397 +step:7956 train loss:3.430195 +step:7957 train loss:3.506669 +step:7958 train loss:3.518379 +step:7959 train loss:3.472030 +step:7960 train loss:3.531798 +step:7961 train loss:3.442278 +step:7962 train loss:3.413398 +step:7963 train loss:3.450692 +step:7964 train loss:3.447540 +step:7965 train loss:3.460582 +step:7966 train loss:3.429567 +step:7967 train loss:3.454457 +step:7968 train loss:3.462108 +step:7969 train loss:3.419690 +step:7970 train loss:3.387618 +step:7971 train loss:3.473242 +step:7972 train loss:3.450669 +step:7973 train loss:3.423772 +step:7974 train loss:3.463753 +step:7975 train loss:3.447314 +step:7976 train loss:3.470748 +step:7977 train loss:3.499450 +step:7978 train loss:3.522078 +step:7979 train loss:3.468473 +step:7980 train loss:3.374911 +step:7981 train loss:3.410998 +step:7982 train loss:3.460654 +step:7983 train loss:3.478663 +step:7984 train loss:3.516578 +step:7985 train loss:3.445310 +step:7986 train loss:3.466279 +step:7987 train loss:3.517984 +step:7988 train loss:3.492096 +step:7989 train loss:3.396358 +step:7990 train loss:3.414610 +step:7991 train loss:3.427677 +step:7992 train loss:3.452098 +step:7993 train loss:3.435293 +step:7994 train loss:3.486942 +step:7995 train loss:3.489631 +step:7996 train loss:3.456137 +step:7997 train loss:3.475921 +step:7998 train loss:3.497573 +step:7999 train loss:3.426493 +step:8000 validation loss:3.378914 total_sharp:5.4613e-05 L1_sharp:1.1884e-05 L2_sharp:7.2559e-06 L3_sharp:6.8292e-06 L4_sharp:4.7378e-06 L5_sharp:6.8779e-06 L6_sharp:1.1123e-05 L7_sharp:1.3584e-05 L8_sharp:1.1757e-05 L9_sharp:1.1592e-05 L10_sharp:6.4673e-06 L11_sharp:4.5889e-06 L12_sharp:5.3848e-06 total_fnorm:1.0373e+01 total_l1_linf:7.7858e+04 total_spectral:1.0373e+01 L1_fnorm:2.9408e+00 L2_fnorm:2.7769e+00 L3_fnorm:2.8628e+00 L4_fnorm:2.9634e+00 L5_fnorm:2.9906e+00 L6_fnorm:3.0038e+00 L7_fnorm:3.0082e+00 L8_fnorm:3.0142e+00 L9_fnorm:3.0085e+00 L10_fnorm:3.0267e+00 L11_fnorm:3.0122e+00 L12_fnorm:3.0183e+00 L1_l1linf:1.9806e+00 L2_l1linf:1.9507e+00 L3_l1linf:2.0655e+00 L4_l1linf:2.0681e+00 L5_l1linf:2.0340e+00 L6_l1linf:2.0739e+00 L7_l1linf:2.0304e+00 L8_l1linf:2.0284e+00 L9_l1linf:1.9975e+00 L10_l1linf:2.0335e+00 L11_l1linf:2.0006e+00 L12_l1linf:1.9843e+00 L1_spectral:6.0218e-02 L2_spectral:6.0225e-02 L3_spectral:6.0240e-02 L4_spectral:6.0255e-02 L5_spectral:6.0221e-02 L6_spectral:6.0215e-02 L7_spectral:6.0222e-02 L8_spectral:6.0223e-02 L9_spectral:6.0220e-02 L10_spectral:6.0205e-02 L11_spectral:6.0214e-02 L12_spectral:6.0232e-02 ip_v_neg_g:3.1210e-03 cos_v_neg_g:9.2861e-04 v_norm:1.0373e+01 g_norm:3.2402e-01 hv_norm:5.0202e-02 cos_v_hv:1.1284e-02 hg_norm:4.8727e-01 cos_g_hg:3.1984e-01 v_par:1.3212e-03 v_perp:1.0373e+01 L1_cos_v_neg_g:4.4334e-03 L1_v_norm:2.9408e+00 L2_cos_v_neg_g:3.7239e-04 L2_v_norm:2.7769e+00 L3_cos_v_neg_g:1.5822e-03 L3_v_norm:2.8628e+00 L4_cos_v_neg_g:1.9400e-03 L4_v_norm:2.9634e+00 L5_cos_v_neg_g:2.1369e-03 L5_v_norm:2.9906e+00 L6_cos_v_neg_g:3.2338e-03 L6_v_norm:3.0038e+00 L7_cos_v_neg_g:4.1667e-03 L7_v_norm:3.0082e+00 L8_cos_v_neg_g:3.8346e-03 L8_v_norm:3.0142e+00 L9_cos_v_neg_g:3.9903e-03 L9_v_norm:3.0085e+00 L10_cos_v_neg_g:3.0600e-03 L10_v_norm:3.0267e+00 L11_cos_v_neg_g:2.2666e-03 L11_v_norm:3.0122e+00 L12_cos_v_neg_g:1.9317e-03 L12_v_norm:3.0183e+00 +step:8000 train loss:3.498610 +step:8001 train loss:3.458370 +step:8002 train loss:3.477392 +step:8003 train loss:3.495461 +step:8004 train loss:3.469723 +step:8005 train loss:3.395027 +step:8006 train loss:3.472446 +step:8007 train loss:3.439454 +step:8008 train loss:3.464785 +step:8009 train loss:3.537661 +step:8010 train loss:3.759096 +step:8011 train loss:3.418653 +step:8012 train loss:3.499755 +step:8013 train loss:3.450015 +step:8014 train loss:3.466570 +step:8015 train loss:3.463334 +step:8016 train loss:3.452160 +step:8017 train loss:3.473349 +step:8018 train loss:3.436038 +step:8019 train loss:3.404832 +step:8020 train loss:3.443795 +step:8021 train loss:3.518836 +step:8022 train loss:3.434474 +step:8023 train loss:3.465574 +step:8024 train loss:3.337814 +step:8025 train loss:3.442456 +step:8026 train loss:3.453135 +step:8027 train loss:3.456957 +step:8028 train loss:3.515725 +step:8029 train loss:3.442873 +step:8030 train loss:3.404676 +step:8031 train loss:3.464466 +step:8032 train loss:3.446806 +step:8033 train loss:3.398084 +step:8034 train loss:3.434557 +step:8035 train loss:3.423669 +step:8036 train loss:3.414581 +step:8037 train loss:3.385572 +step:8038 train loss:3.399410 +step:8039 train loss:3.491474 +step:8040 train loss:3.425508 +step:8041 train loss:3.423506 +step:8042 train loss:3.461728 +step:8043 train loss:3.405723 +step:8044 train loss:3.416535 +step:8045 train loss:3.486882 +step:8046 train loss:3.411537 +step:8047 train loss:3.417348 +step:8048 train loss:3.448401 +step:8049 train loss:3.492955 +step:8050 train loss:3.430750 +step:8051 train loss:3.407624 +step:8052 train loss:3.473967 +step:8053 train loss:3.427152 +step:8054 train loss:3.460158 +step:8055 train loss:3.489891 +step:8056 train loss:3.459332 +step:8057 train loss:3.534453 +step:8058 train loss:3.436307 +step:8059 train loss:3.498144 +step:8060 train loss:3.469434 +step:8061 train loss:3.357843 +step:8062 train loss:3.488431 +step:8063 train loss:3.451610 +step:8064 train loss:3.409131 +step:8065 train loss:3.478430 +step:8066 train loss:3.435798 +step:8067 train loss:3.501167 +step:8068 train loss:3.428623 +step:8069 train loss:3.450817 +step:8070 train loss:3.416936 +step:8071 train loss:3.427197 +step:8072 train loss:3.470289 +step:8073 train loss:3.421383 +step:8074 train loss:3.429710 +step:8075 train loss:3.426671 +step:8076 train loss:3.464788 +step:8077 train loss:3.473857 +step:8078 train loss:3.417298 +step:8079 train loss:3.438416 +step:8080 train loss:3.424951 +step:8081 train loss:3.440491 +step:8082 train loss:3.462098 +step:8083 train loss:3.359555 +step:8084 train loss:3.495908 +step:8085 train loss:3.372004 +step:8086 train loss:3.496749 +step:8087 train loss:3.394381 +step:8088 train loss:3.439623 +step:8089 train loss:3.477575 +step:8090 train loss:3.499800 +step:8091 train loss:3.444634 +step:8092 train loss:3.421489 +step:8093 train loss:3.428954 +step:8094 train loss:3.436037 +step:8095 train loss:3.456065 +step:8096 train loss:3.459680 +step:8097 train loss:3.385823 +step:8098 train loss:3.399600 +step:8099 train loss:3.390157 +step:8100 train loss:3.441531 +step:8101 train loss:3.522892 +step:8102 train loss:3.454832 +step:8103 train loss:3.408920 +step:8104 train loss:3.461292 +step:8105 train loss:3.455539 +step:8106 train loss:3.420475 +step:8107 train loss:3.401377 +step:8108 train loss:3.419519 +step:8109 train loss:3.413037 +step:8110 train loss:3.476887 +step:8111 train loss:3.399215 +step:8112 train loss:3.420347 +step:8113 train loss:3.409665 +step:8114 train loss:3.352171 +step:8115 train loss:3.408486 +step:8116 train loss:3.442558 +step:8117 train loss:3.415913 +step:8118 train loss:3.406003 +step:8119 train loss:3.449842 +step:8120 train loss:3.396903 +step:8121 train loss:3.455608 +step:8122 train loss:3.438675 +step:8123 train loss:3.444159 +step:8124 train loss:3.407813 +step:8125 train loss:3.390229 +step:8126 train loss:3.382550 +step:8127 train loss:3.476694 +step:8128 train loss:3.481469 +step:8129 train loss:3.405305 +step:8130 train loss:3.428018 +step:8131 train loss:3.400770 +step:8132 train loss:3.469052 +step:8133 train loss:3.393149 +step:8134 train loss:3.428948 +step:8135 train loss:3.423384 +step:8136 train loss:3.429473 +step:8137 train loss:3.494909 +step:8138 train loss:3.403520 +step:8139 train loss:3.477675 +step:8140 train loss:3.404160 +step:8141 train loss:3.427486 +step:8142 train loss:3.409418 +step:8143 train loss:3.460113 +step:8144 train loss:3.436914 +step:8145 train loss:3.401117 +step:8146 train loss:3.413872 +step:8147 train loss:3.437449 +step:8148 train loss:3.528727 +step:8149 train loss:3.439434 +step:8150 train loss:3.422955 +step:8151 train loss:3.411173 +step:8152 train loss:3.506438 +step:8153 train loss:3.384823 +step:8154 train loss:3.402370 +step:8155 train loss:3.427881 +step:8156 train loss:3.409095 +step:8157 train loss:3.430680 +step:8158 train loss:3.441041 +step:8159 train loss:3.459168 +step:8160 train loss:3.410411 +step:8161 train loss:3.453892 +step:8162 train loss:3.385700 +step:8163 train loss:3.445083 +step:8164 train loss:3.431389 +step:8165 train loss:3.481096 +step:8166 train loss:3.485734 +step:8167 train loss:3.390532 +step:8168 train loss:3.371789 +step:8169 train loss:3.415556 +step:8170 train loss:3.368469 +step:8171 train loss:3.429252 +step:8172 train loss:3.425864 +step:8173 train loss:3.428624 +step:8174 train loss:3.437805 +step:8175 train loss:3.397453 +step:8176 train loss:3.391563 +step:8177 train loss:3.439246 +step:8178 train loss:3.527249 +step:8179 train loss:3.431665 +step:8180 train loss:3.457661 +step:8181 train loss:3.454204 +step:8182 train loss:3.413655 +step:8183 train loss:3.401876 +step:8184 train loss:3.396560 +step:8185 train loss:3.433828 +step:8186 train loss:3.440378 +step:8187 train loss:3.449250 +step:8188 train loss:3.377643 +step:8189 train loss:3.523390 +step:8190 train loss:3.456492 +step:8191 train loss:3.460625 +step:8192 train loss:3.574864 +step:8193 train loss:3.444027 +step:8194 train loss:3.378498 +step:8195 train loss:3.473612 +step:8196 train loss:3.389566 +step:8197 train loss:3.417790 +step:8198 train loss:3.428698 +step:8199 train loss:3.428409 +step:8200 train loss:3.411309 +step:8201 train loss:3.521730 +step:8202 train loss:3.439276 +step:8203 train loss:3.458164 +step:8204 train loss:3.370862 +step:8205 train loss:3.376404 +step:8206 train loss:3.502038 +step:8207 train loss:3.427280 +step:8208 train loss:3.445823 +step:8209 train loss:3.488552 +step:8210 train loss:3.474438 +step:8211 train loss:3.404307 +step:8212 train loss:3.461629 +step:8213 train loss:3.474592 +step:8214 train loss:3.510233 +step:8215 train loss:3.485858 +step:8216 train loss:3.469657 +step:8217 train loss:3.448477 +step:8218 train loss:3.454443 +step:8219 train loss:3.591027 +step:8220 train loss:3.418320 +step:8221 train loss:3.440282 +step:8222 train loss:3.391344 +step:8223 train loss:3.413882 +step:8224 train loss:3.421087 +step:8225 train loss:3.472270 +step:8226 train loss:3.401680 +step:8227 train loss:3.470383 +step:8228 train loss:3.357949 +step:8229 train loss:3.400158 +step:8230 train loss:3.415957 +step:8231 train loss:3.438257 +step:8232 train loss:3.438871 +step:8233 train loss:3.482756 +step:8234 train loss:3.481571 +step:8235 train loss:3.449125 +step:8236 train loss:3.435187 +step:8237 train loss:3.389707 +step:8238 train loss:3.641510 +step:8239 train loss:3.475238 +step:8240 train loss:3.417834 +step:8241 train loss:3.391506 +step:8242 train loss:3.429861 +step:8243 train loss:3.419472 +step:8244 train loss:3.429883 +step:8245 train loss:3.414894 +step:8246 train loss:3.481977 +step:8247 train loss:3.515239 +step:8248 train loss:3.433386 +step:8249 train loss:3.424821 +step:8250 validation loss:3.370884 +step:8250 train loss:3.413738 +step:8251 train loss:3.510059 +step:8252 train loss:3.447486 +step:8253 train loss:3.413892 +step:8254 train loss:3.386487 +step:8255 train loss:3.418278 +step:8256 train loss:3.402206 +step:8257 train loss:3.507783 +step:8258 train loss:3.429678 +step:8259 train loss:3.411083 +step:8260 train loss:3.411224 +step:8261 train loss:3.408622 +step:8262 train loss:3.426293 +step:8263 train loss:3.438917 +step:8264 train loss:3.406654 +step:8265 train loss:3.394976 +step:8266 train loss:3.404671 +step:8267 train loss:3.338335 +step:8268 train loss:3.462356 +step:8269 train loss:3.388716 +step:8270 train loss:3.447102 +step:8271 train loss:3.474248 +step:8272 train loss:3.500137 +step:8273 train loss:3.374653 +step:8274 train loss:3.436632 +step:8275 train loss:3.399425 +step:8276 train loss:3.433681 +step:8277 train loss:3.504881 +step:8278 train loss:3.519339 +step:8279 train loss:3.430033 +step:8280 train loss:3.417575 +step:8281 train loss:3.385854 +step:8282 train loss:3.446425 +step:8283 train loss:3.433403 +step:8284 train loss:3.414546 +step:8285 train loss:3.407898 +step:8286 train loss:3.515553 +step:8287 train loss:3.454890 +step:8288 train loss:3.425622 +step:8289 train loss:3.439879 +step:8290 train loss:3.379029 +step:8291 train loss:3.420930 +step:8292 train loss:3.448145 +step:8293 train loss:3.426163 +step:8294 train loss:3.393513 +step:8295 train loss:3.432095 +step:8296 train loss:3.495130 +step:8297 train loss:3.579964 +step:8298 train loss:3.399257 +step:8299 train loss:3.438639 +step:8300 train loss:3.443893 +step:8301 train loss:3.418049 +step:8302 train loss:3.475655 +step:8303 train loss:3.611711 +step:8304 train loss:3.416865 +step:8305 train loss:3.462825 +step:8306 train loss:3.437536 +step:8307 train loss:3.456456 +step:8308 train loss:3.454340 +step:8309 train loss:3.478261 +step:8310 train loss:3.394419 +step:8311 train loss:3.488005 +step:8312 train loss:3.476722 +step:8313 train loss:3.540579 +step:8314 train loss:3.412452 +step:8315 train loss:3.364106 +step:8316 train loss:3.419779 +step:8317 train loss:3.442731 +step:8318 train loss:3.432142 +step:8319 train loss:3.469663 +step:8320 train loss:3.492296 +step:8321 train loss:3.398012 +step:8322 train loss:3.411789 +step:8323 train loss:3.450954 +step:8324 train loss:3.426539 +step:8325 train loss:3.480935 +step:8326 train loss:3.448971 +step:8327 train loss:3.436528 +step:8328 train loss:3.509112 +step:8329 train loss:3.416664 +step:8330 train loss:3.459473 +step:8331 train loss:3.382327 +step:8332 train loss:3.486282 +step:8333 train loss:3.500049 +step:8334 train loss:3.369393 +step:8335 train loss:3.427099 +step:8336 train loss:3.527060 +step:8337 train loss:3.454922 +step:8338 train loss:3.424702 +step:8339 train loss:3.401924 +step:8340 train loss:3.493365 +step:8341 train loss:3.392826 +step:8342 train loss:3.468908 +step:8343 train loss:3.382181 +step:8344 train loss:3.426816 +step:8345 train loss:3.461306 +step:8346 train loss:3.545712 +step:8347 train loss:3.431981 +step:8348 train loss:3.460923 +step:8349 train loss:3.431427 +step:8350 train loss:3.453320 +step:8351 train loss:3.390450 +step:8352 train loss:3.478581 +step:8353 train loss:3.430497 +step:8354 train loss:3.416735 +step:8355 train loss:3.413537 +step:8356 train loss:3.413801 +step:8357 train loss:3.425033 +step:8358 train loss:3.401851 +step:8359 train loss:3.395177 +step:8360 train loss:3.446130 +step:8361 train loss:3.456705 +step:8362 train loss:3.476426 +step:8363 train loss:3.474651 +step:8364 train loss:3.439792 +step:8365 train loss:3.584088 +step:8366 train loss:3.428928 +step:8367 train loss:3.399403 +step:8368 train loss:3.371328 +step:8369 train loss:3.398308 +step:8370 train loss:3.483276 +step:8371 train loss:3.453219 +step:8372 train loss:3.433794 +step:8373 train loss:3.439736 +step:8374 train loss:3.376550 +step:8375 train loss:3.435701 +step:8376 train loss:3.478955 +step:8377 train loss:3.302693 +step:8378 train loss:3.520920 +step:8379 train loss:3.381920 +step:8380 train loss:3.390577 +step:8381 train loss:3.396187 +step:8382 train loss:3.420851 +step:8383 train loss:3.383063 +step:8384 train loss:3.425843 +step:8385 train loss:3.435978 +step:8386 train loss:3.419256 +step:8387 train loss:3.577726 +step:8388 train loss:3.489430 +step:8389 train loss:3.467112 +step:8390 train loss:3.471004 +step:8391 train loss:3.396557 +step:8392 train loss:3.410254 +step:8393 train loss:3.363939 +step:8394 train loss:3.458022 +step:8395 train loss:3.462722 +step:8396 train loss:3.487412 +step:8397 train loss:3.422614 +step:8398 train loss:3.439039 +step:8399 train loss:3.407314 +step:8400 train loss:3.411250 +step:8401 train loss:3.418250 +step:8402 train loss:3.401221 +step:8403 train loss:3.418723 +step:8404 train loss:3.423930 +step:8405 train loss:3.378105 +step:8406 train loss:3.419294 +step:8407 train loss:3.462425 +step:8408 train loss:3.433056 +step:8409 train loss:3.354719 +step:8410 train loss:3.419829 +step:8411 train loss:3.447248 +step:8412 train loss:3.506090 +step:8413 train loss:3.480736 +step:8414 train loss:3.477225 +step:8415 train loss:3.399085 +step:8416 train loss:3.445481 +step:8417 train loss:3.361257 +step:8418 train loss:3.465192 +step:8419 train loss:3.421000 +step:8420 train loss:3.496732 +step:8421 train loss:3.413400 +step:8422 train loss:3.433221 +step:8423 train loss:3.448642 +step:8424 train loss:3.453610 +step:8425 train loss:3.512204 +step:8426 train loss:3.480394 +step:8427 train loss:3.399188 +step:8428 train loss:3.413268 +step:8429 train loss:3.474921 +step:8430 train loss:3.414114 +step:8431 train loss:3.417468 +step:8432 train loss:3.421206 +step:8433 train loss:3.392890 +step:8434 train loss:3.433241 +step:8435 train loss:3.349549 +step:8436 train loss:3.430521 +step:8437 train loss:3.473825 +step:8438 train loss:3.453321 +step:8439 train loss:3.394622 +step:8440 train loss:3.363894 +step:8441 train loss:3.421692 +step:8442 train loss:3.447016 +step:8443 train loss:3.402377 +step:8444 train loss:3.437145 +step:8445 train loss:3.385180 +step:8446 train loss:3.438410 +step:8447 train loss:3.447289 +step:8448 train loss:3.429187 +step:8449 train loss:3.422011 +step:8450 train loss:3.413533 +step:8451 train loss:3.441492 +step:8452 train loss:3.414783 +step:8453 train loss:3.397666 +step:8454 train loss:3.447206 +step:8455 train loss:3.518987 +step:8456 train loss:3.499505 +step:8457 train loss:3.551167 +step:8458 train loss:3.440546 +step:8459 train loss:3.447499 +step:8460 train loss:3.378182 +step:8461 train loss:3.532267 +step:8462 train loss:3.404216 +step:8463 train loss:3.437866 +step:8464 train loss:3.455399 +step:8465 train loss:3.459723 +step:8466 train loss:3.435104 +step:8467 train loss:3.437083 +step:8468 train loss:3.692189 +step:8469 train loss:3.400670 +step:8470 train loss:3.392709 +step:8471 train loss:3.435633 +step:8472 train loss:3.458647 +step:8473 train loss:3.410694 +step:8474 train loss:3.539599 +step:8475 train loss:3.493346 +step:8476 train loss:3.443658 +step:8477 train loss:3.435366 +step:8478 train loss:3.417077 +step:8479 train loss:3.417680 +step:8480 train loss:3.506624 +step:8481 train loss:3.411536 +step:8482 train loss:3.408535 +step:8483 train loss:3.550647 +step:8484 train loss:3.434638 +step:8485 train loss:3.480400 +step:8486 train loss:3.391185 +step:8487 train loss:3.448104 +step:8488 train loss:3.392436 +step:8489 train loss:3.471201 +step:8490 train loss:3.458592 +step:8491 train loss:3.480942 +step:8492 train loss:3.434313 +step:8493 train loss:3.508315 +step:8494 train loss:3.369208 +step:8495 train loss:3.466486 +step:8496 train loss:3.413064 +step:8497 train loss:3.446309 +step:8498 train loss:3.462154 +step:8499 train loss:3.440013 +step:8500 validation loss:3.369772 total_sharp:6.2232e-05 L1_sharp:6.1073e-05 L2_sharp:1.6547e-05 L3_sharp:8.8752e-06 L4_sharp:5.1143e-06 L5_sharp:7.0071e-06 L6_sharp:1.0832e-05 L7_sharp:1.5616e-05 L8_sharp:1.2279e-05 L9_sharp:1.0252e-05 L10_sharp:5.1488e-06 L11_sharp:4.9442e-06 L12_sharp:1.7280e-05 total_fnorm:1.0295e+01 total_l1_linf:7.7240e+04 total_spectral:1.0295e+01 L1_fnorm:2.9128e+00 L2_fnorm:2.7099e+00 L3_fnorm:2.8219e+00 L4_fnorm:2.9550e+00 L5_fnorm:2.9816e+00 L6_fnorm:2.9911e+00 L7_fnorm:2.9945e+00 L8_fnorm:3.0025e+00 L9_fnorm:2.9954e+00 L10_fnorm:3.0007e+00 L11_fnorm:2.9783e+00 L12_fnorm:3.0073e+00 L1_l1linf:2.0201e+00 L2_l1linf:1.9728e+00 L3_l1linf:1.9971e+00 L4_l1linf:2.0408e+00 L5_l1linf:2.0156e+00 L6_l1linf:2.0202e+00 L7_l1linf:2.0117e+00 L8_l1linf:2.0016e+00 L9_l1linf:1.9886e+00 L10_l1linf:1.9894e+00 L11_l1linf:1.9857e+00 L12_l1linf:2.2171e+00 L1_spectral:6.0222e-02 L2_spectral:6.0205e-02 L3_spectral:6.0218e-02 L4_spectral:6.0299e-02 L5_spectral:6.0230e-02 L6_spectral:6.0228e-02 L7_spectral:6.0226e-02 L8_spectral:6.0235e-02 L9_spectral:6.0251e-02 L10_spectral:6.0258e-02 L11_spectral:6.0219e-02 L12_spectral:6.0240e-02 ip_v_neg_g:3.2877e-03 cos_v_neg_g:9.2997e-04 v_norm:1.0295e+01 g_norm:3.4340e-01 hv_norm:6.5859e-02 cos_v_hv:9.7280e-03 hg_norm:8.5518e-01 cos_g_hg:4.2923e-01 v_par:5.5791e-04 v_perp:1.0295e+01 L1_cos_v_neg_g:5.6871e-03 L1_v_norm:2.9128e+00 L2_cos_v_neg_g:2.5753e-03 L2_v_norm:2.7099e+00 L3_cos_v_neg_g:2.0725e-03 L3_v_norm:2.8219e+00 L4_cos_v_neg_g:3.0695e-03 L4_v_norm:2.9550e+00 L5_cos_v_neg_g:3.0361e-03 L5_v_norm:2.9816e+00 L6_cos_v_neg_g:3.3719e-03 L6_v_norm:2.9911e+00 L7_cos_v_neg_g:3.9040e-03 L7_v_norm:2.9945e+00 L8_cos_v_neg_g:3.5704e-03 L8_v_norm:3.0025e+00 L9_cos_v_neg_g:5.1606e-03 L9_v_norm:2.9954e+00 L10_cos_v_neg_g:3.1167e-03 L10_v_norm:3.0007e+00 L11_cos_v_neg_g:2.6822e-03 L11_v_norm:2.9783e+00 L12_cos_v_neg_g:1.5517e-03 L12_v_norm:3.0073e+00 +step:8500 train loss:3.434445 +step:8501 train loss:3.659800 +step:8502 train loss:3.665383 +step:8503 train loss:3.428439 +step:8504 train loss:3.422880 +step:8505 train loss:3.401672 +step:8506 train loss:3.473217 +step:8507 train loss:3.411107 +step:8508 train loss:3.444379 +step:8509 train loss:3.383394 +step:8510 train loss:3.408790 +step:8511 train loss:3.365086 +step:8512 train loss:3.464648 +step:8513 train loss:3.468357 +step:8514 train loss:3.419327 +step:8515 train loss:3.510150 +step:8516 train loss:3.427604 +step:8517 train loss:3.448221 +step:8518 train loss:3.340626 +step:8519 train loss:3.433764 +step:8520 train loss:3.399386 +step:8521 train loss:3.440015 +step:8522 train loss:3.335238 +step:8523 train loss:3.427457 +step:8524 train loss:3.418436 +step:8525 train loss:3.487084 +step:8526 train loss:3.469142 +step:8527 train loss:3.408791 +step:8528 train loss:3.489464 +step:8529 train loss:3.449434 +step:8530 train loss:3.479781 +step:8531 train loss:3.469066 +step:8532 train loss:3.511013 +step:8533 train loss:3.463178 +step:8534 train loss:3.460587 +step:8535 train loss:3.432865 +step:8536 train loss:3.520525 +step:8537 train loss:3.435870 +step:8538 train loss:3.505645 +step:8539 train loss:3.427295 +step:8540 train loss:3.456676 +step:8541 train loss:3.394370 +step:8542 train loss:3.461302 +step:8543 train loss:3.376160 +step:8544 train loss:3.373757 +step:8545 train loss:3.422727 +step:8546 train loss:3.376672 +step:8547 train loss:3.427150 +step:8548 train loss:3.401729 +step:8549 train loss:3.444589 +step:8550 train loss:3.396940 +step:8551 train loss:3.446782 +step:8552 train loss:3.443444 +step:8553 train loss:3.452382 +step:8554 train loss:3.423266 +step:8555 train loss:3.437172 +step:8556 train loss:3.513980 +step:8557 train loss:3.415310 +step:8558 train loss:3.452232 +step:8559 train loss:3.443225 +step:8560 train loss:3.424613 +step:8561 train loss:3.380380 +step:8562 train loss:3.406054 +step:8563 train loss:3.405889 +step:8564 train loss:3.475541 +step:8565 train loss:3.448122 +step:8566 train loss:3.471468 +step:8567 train loss:3.415735 +step:8568 train loss:3.435003 +step:8569 train loss:3.440923 +step:8570 train loss:3.386761 +step:8571 train loss:3.429204 +step:8572 train loss:3.445238 +step:8573 train loss:3.517811 +step:8574 train loss:3.451193 +step:8575 train loss:3.446649 +step:8576 train loss:3.483416 +step:8577 train loss:3.562508 +step:8578 train loss:3.475965 +step:8579 train loss:3.459266 +step:8580 train loss:3.394218 +step:8581 train loss:3.435879 +step:8582 train loss:3.438740 +step:8583 train loss:3.439144 +step:8584 train loss:3.430418 +step:8585 train loss:3.509780 +step:8586 train loss:3.426457 +step:8587 train loss:3.438088 +step:8588 train loss:3.481927 +step:8589 train loss:3.428502 +step:8590 train loss:3.422843 +step:8591 train loss:3.425532 +step:8592 train loss:3.384010 +step:8593 train loss:3.461330 +step:8594 train loss:3.487366 +step:8595 train loss:3.404832 +step:8596 train loss:3.454465 +step:8597 train loss:3.412959 +step:8598 train loss:3.467220 +step:8599 train loss:3.445390 +step:8600 train loss:3.445581 +step:8601 train loss:3.432278 +step:8602 train loss:3.405087 +step:8603 train loss:3.462867 +step:8604 train loss:3.408514 +step:8605 train loss:3.422525 +step:8606 train loss:3.432936 +step:8607 train loss:3.444401 +step:8608 train loss:3.482386 +step:8609 train loss:3.382159 +step:8610 train loss:3.455016 +step:8611 train loss:3.385489 +step:8612 train loss:3.464841 +step:8613 train loss:3.398232 +step:8614 train loss:3.464556 +step:8615 train loss:3.502867 +step:8616 train loss:3.385205 +step:8617 train loss:3.451622 +step:8618 train loss:3.432307 +step:8619 train loss:3.383945 +step:8620 train loss:3.425031 +step:8621 train loss:3.457103 +step:8622 train loss:3.416042 +step:8623 train loss:3.429255 +step:8624 train loss:3.499442 +step:8625 train loss:3.424366 +step:8626 train loss:3.430686 +step:8627 train loss:3.429372 +step:8628 train loss:3.461026 +step:8629 train loss:3.371389 +step:8630 train loss:3.469204 +step:8631 train loss:3.410848 +step:8632 train loss:3.463759 +step:8633 train loss:3.414088 +step:8634 train loss:3.647342 +step:8635 train loss:3.441999 +step:8636 train loss:3.488859 +step:8637 train loss:3.413026 +step:8638 train loss:3.415508 +step:8639 train loss:3.468269 +step:8640 train loss:3.385580 +step:8641 train loss:3.481843 +step:8642 train loss:3.433954 +step:8643 train loss:3.544331 +step:8644 train loss:3.387489 +step:8645 train loss:3.457164 +step:8646 train loss:3.418486 +step:8647 train loss:3.447384 +step:8648 train loss:3.394760 +step:8649 train loss:3.478294 +step:8650 train loss:3.432043 +step:8651 train loss:3.444117 +step:8652 train loss:3.414668 +step:8653 train loss:3.444917 +step:8654 train loss:3.488964 +step:8655 train loss:3.418919 +step:8656 train loss:3.462320 +step:8657 train loss:3.462766 +step:8658 train loss:3.434480 +step:8659 train loss:3.429722 +step:8660 train loss:3.372089 +step:8661 train loss:3.432471 +step:8662 train loss:3.375304 +step:8663 train loss:3.446757 +step:8664 train loss:3.359176 +step:8665 train loss:3.386562 +step:8666 train loss:3.460006 +step:8667 train loss:3.353481 +step:8668 train loss:3.461465 +step:8669 train loss:3.501850 +step:8670 train loss:3.398328 +step:8671 train loss:3.398030 +step:8672 train loss:3.615269 +step:8673 train loss:3.378359 +step:8674 train loss:3.450090 +step:8675 train loss:3.486646 +step:8676 train loss:3.435357 +step:8677 train loss:3.456540 +step:8678 train loss:3.405769 +step:8679 train loss:3.464632 +step:8680 train loss:3.439976 +step:8681 train loss:3.445391 +step:8682 train loss:3.399234 +step:8683 train loss:3.418486 +step:8684 train loss:3.491797 +step:8685 train loss:3.437314 +step:8686 train loss:3.428455 +step:8687 train loss:3.382111 +step:8688 train loss:3.399896 +step:8689 train loss:3.468346 +step:8690 train loss:3.408398 +step:8691 train loss:3.484342 +step:8692 train loss:3.374932 +step:8693 train loss:3.462384 +step:8694 train loss:3.462841 +step:8695 train loss:3.449815 +step:8696 train loss:3.472467 +step:8697 train loss:3.429729 +step:8698 train loss:3.466292 +step:8699 train loss:3.417599 +step:8700 train loss:3.443418 +step:8701 train loss:3.406185 +step:8702 train loss:3.392161 +step:8703 train loss:3.403139 +step:8704 train loss:3.363128 +step:8705 train loss:3.440225 +step:8706 train loss:3.461865 +step:8707 train loss:3.458862 +step:8708 train loss:3.402110 +step:8709 train loss:3.463689 +step:8710 train loss:3.393637 +step:8711 train loss:3.447087 +step:8712 train loss:3.355834 +step:8713 train loss:3.432795 +step:8714 train loss:3.538933 +step:8715 train loss:3.395790 +step:8716 train loss:3.447380 +step:8717 train loss:3.421072 +step:8718 train loss:3.458960 +step:8719 train loss:3.427076 +step:8720 train loss:3.536283 +step:8721 train loss:3.426722 +step:8722 train loss:3.522102 +step:8723 train loss:3.391412 +step:8724 train loss:3.405192 +step:8725 train loss:3.432918 +step:8726 train loss:3.388684 +step:8727 train loss:3.466908 +step:8728 train loss:3.422377 +step:8729 train loss:3.427082 +step:8730 train loss:3.404796 +step:8731 train loss:3.408485 +step:8732 train loss:3.509955 +step:8733 train loss:3.431917 +step:8734 train loss:3.471489 +step:8735 train loss:3.540021 +step:8736 train loss:3.398649 +step:8737 train loss:3.425083 +step:8738 train loss:3.404289 +step:8739 train loss:3.467638 +step:8740 train loss:3.387576 +step:8741 train loss:3.441414 +step:8742 train loss:3.397369 +step:8743 train loss:3.437181 +step:8744 train loss:3.458623 +step:8745 train loss:3.497264 +step:8746 train loss:3.399383 +step:8747 train loss:3.500437 +step:8748 train loss:3.412765 +step:8749 train loss:3.449256 +step:8750 validation loss:3.363884 +step:8750 train loss:3.457278 +step:8751 train loss:3.499775 +step:8752 train loss:3.357588 +step:8753 train loss:3.401587 +step:8754 train loss:3.453958 +step:8755 train loss:3.437016 +step:8756 train loss:3.481593 +step:8757 train loss:3.394469 +step:8758 train loss:3.551458 +step:8759 train loss:3.397211 +step:8760 train loss:3.430065 +step:8761 train loss:3.506933 +step:8762 train loss:3.403068 +step:8763 train loss:3.376339 +step:8764 train loss:3.448569 +step:8765 train loss:3.515373 +step:8766 train loss:3.447144 +step:8767 train loss:3.406050 +step:8768 train loss:3.446252 +step:8769 train loss:3.417680 +step:8770 train loss:3.465214 +step:8771 train loss:3.435200 +step:8772 train loss:3.457723 +step:8773 train loss:3.417317 +step:8774 train loss:3.451072 +step:8775 train loss:3.448746 +step:8776 train loss:3.393921 +step:8777 train loss:3.429814 +step:8778 train loss:3.440544 +step:8779 train loss:3.460436 +step:8780 train loss:3.425931 +step:8781 train loss:3.430861 +step:8782 train loss:3.450344 +step:8783 train loss:3.430176 +step:8784 train loss:3.456005 +step:8785 train loss:3.441636 +step:8786 train loss:3.519915 +step:8787 train loss:3.460950 +step:8788 train loss:3.362025 +step:8789 train loss:3.461903 +step:8790 train loss:3.389566 +step:8791 train loss:3.441263 +step:8792 train loss:3.378289 +step:8793 train loss:3.470993 +step:8794 train loss:3.391116 +step:8795 train loss:3.460487 +step:8796 train loss:3.605221 +step:8797 train loss:3.354484 +step:8798 train loss:3.507338 +step:8799 train loss:3.425155 +step:8800 train loss:3.419517 +step:8801 train loss:3.442877 +step:8802 train loss:3.499388 +step:8803 train loss:3.456391 +step:8804 train loss:3.439715 +step:8805 train loss:3.456677 +step:8806 train loss:3.426798 +step:8807 train loss:3.418876 +step:8808 train loss:3.372918 +step:8809 train loss:3.502390 +step:8810 train loss:3.400194 +step:8811 train loss:3.390325 +step:8812 train loss:3.434772 +step:8813 train loss:3.345440 +step:8814 train loss:3.533848 +step:8815 train loss:3.378978 +step:8816 train loss:3.496011 +step:8817 train loss:3.434676 +step:8818 train loss:3.366170 +step:8819 train loss:3.483361 +step:8820 train loss:3.411932 +step:8821 train loss:3.436859 +step:8822 train loss:3.419790 +step:8823 train loss:3.436562 +step:8824 train loss:3.495702 +step:8825 train loss:3.471960 +step:8826 train loss:3.441699 +step:8827 train loss:3.402150 +step:8828 train loss:3.445526 +step:8829 train loss:3.422247 +step:8830 train loss:3.401301 +step:8831 train loss:3.476684 +step:8832 train loss:3.412481 +step:8833 train loss:3.448303 +step:8834 train loss:3.413191 +step:8835 train loss:3.349809 +step:8836 train loss:3.478098 +step:8837 train loss:3.383278 +step:8838 train loss:3.422999 +step:8839 train loss:3.409916 +step:8840 train loss:3.413511 +step:8841 train loss:3.425540 +step:8842 train loss:3.434686 +step:8843 train loss:3.449000 +step:8844 train loss:3.412734 +step:8845 train loss:3.434516 +step:8846 train loss:3.400972 +step:8847 train loss:3.442356 +step:8848 train loss:3.488443 +step:8849 train loss:3.467194 +step:8850 train loss:3.459760 +step:8851 train loss:3.343495 +step:8852 train loss:3.444691 +step:8853 train loss:3.426157 +step:8854 train loss:3.398145 +step:8855 train loss:3.468068 +step:8856 train loss:3.457452 +step:8857 train loss:3.525741 +step:8858 train loss:3.391587 +step:8859 train loss:3.466596 +step:8860 train loss:3.422339 +step:8861 train loss:3.406070 +step:8862 train loss:3.403832 +step:8863 train loss:3.387540 +step:8864 train loss:3.458516 +step:8865 train loss:3.448839 +step:8866 train loss:3.333838 +step:8867 train loss:3.435721 +step:8868 train loss:3.460922 +step:8869 train loss:3.549111 +step:8870 train loss:3.423540 +step:8871 train loss:3.450764 +step:8872 train loss:3.434566 +step:8873 train loss:3.433929 +step:8874 train loss:3.488300 +step:8875 train loss:3.422097 +step:8876 train loss:3.458282 +step:8877 train loss:3.440214 +step:8878 train loss:3.489322 +step:8879 train loss:3.451698 +step:8880 train loss:3.398540 +step:8881 train loss:3.363804 +step:8882 train loss:3.433928 +step:8883 train loss:3.421580 +step:8884 train loss:3.510659 +step:8885 train loss:3.444245 +step:8886 train loss:3.448170 +step:8887 train loss:3.475007 +step:8888 train loss:3.433611 +step:8889 train loss:3.437032 +step:8890 train loss:3.425819 +step:8891 train loss:3.399698 +step:8892 train loss:3.481483 +step:8893 train loss:3.425418 +step:8894 train loss:3.442487 +step:8895 train loss:3.472660 +step:8896 train loss:3.387561 +step:8897 train loss:3.479602 +step:8898 train loss:3.410539 +step:8899 train loss:3.432661 +step:8900 train loss:3.401653 +step:8901 train loss:3.418360 +step:8902 train loss:3.455587 +step:8903 train loss:3.396178 +step:8904 train loss:3.449070 +step:8905 train loss:3.424225 +step:8906 train loss:3.412469 +step:8907 train loss:3.425485 +step:8908 train loss:3.492371 +step:8909 train loss:3.433051 +step:8910 train loss:3.396596 +step:8911 train loss:3.493560 +step:8912 train loss:3.390045 +step:8913 train loss:3.402519 +step:8914 train loss:3.495106 +step:8915 train loss:3.438216 +step:8916 train loss:3.464275 +step:8917 train loss:3.423816 +step:8918 train loss:3.426797 +step:8919 train loss:3.418923 +step:8920 train loss:3.443204 +step:8921 train loss:3.437025 +step:8922 train loss:3.417518 +step:8923 train loss:3.605782 +step:8924 train loss:3.496384 +step:8925 train loss:3.424359 +step:8926 train loss:3.438643 +step:8927 train loss:3.468985 +step:8928 train loss:3.421093 +step:8929 train loss:3.417770 +step:8930 train loss:3.471069 +step:8931 train loss:3.380853 +step:8932 train loss:3.486819 +step:8933 train loss:3.391529 +step:8934 train loss:3.433642 +step:8935 train loss:3.445704 +step:8936 train loss:3.482417 +step:8937 train loss:3.476034 +step:8938 train loss:3.417835 +step:8939 train loss:3.486505 +step:8940 train loss:3.440050 +step:8941 train loss:3.384109 +step:8942 train loss:3.461527 +step:8943 train loss:3.392067 +step:8944 train loss:3.444962 +step:8945 train loss:3.459640 +step:8946 train loss:3.310125 +step:8947 train loss:3.497490 +step:8948 train loss:3.344346 +step:8949 train loss:3.346526 +step:8950 train loss:3.389958 +step:8951 train loss:3.430737 +step:8952 train loss:3.449105 +step:8953 train loss:3.404469 +step:8954 train loss:3.510315 +step:8955 train loss:3.425828 +step:8956 train loss:3.453031 +step:8957 train loss:3.441875 +step:8958 train loss:3.418330 +step:8959 train loss:3.412837 +step:8960 train loss:3.378584 +step:8961 train loss:3.401220 +step:8962 train loss:3.453642 +step:8963 train loss:3.430118 +step:8964 train loss:3.416314 +step:8965 train loss:3.455739 +step:8966 train loss:3.418947 +step:8967 train loss:3.394462 +step:8968 train loss:3.379405 +step:8969 train loss:3.370152 +step:8970 train loss:3.447675 +step:8971 train loss:3.398014 +step:8972 train loss:3.600708 +step:8973 train loss:3.483098 +step:8974 train loss:3.442271 +step:8975 train loss:3.445712 +step:8976 train loss:3.406950 +step:8977 train loss:3.490923 +step:8978 train loss:3.477412 +step:8979 train loss:3.395000 +step:8980 train loss:3.492155 +step:8981 train loss:3.443369 +step:8982 train loss:3.414968 +step:8983 train loss:3.358930 +step:8984 train loss:3.483709 +step:8985 train loss:3.400816 +step:8986 train loss:3.436606 +step:8987 train loss:3.411690 +step:8988 train loss:3.461047 +step:8989 train loss:3.370152 +step:8990 train loss:3.510108 +step:8991 train loss:3.365204 +step:8992 train loss:3.420864 +step:8993 train loss:3.511034 +step:8994 train loss:3.416733 +step:8995 train loss:3.441433 +step:8996 train loss:3.409942 +step:8997 train loss:3.362482 +step:8998 train loss:3.364776 +step:8999 train loss:3.387893 +step:9000 validation loss:3.360429 total_sharp:5.0370e-05 L1_sharp:1.0052e-05 L2_sharp:1.3290e-05 L3_sharp:5.1433e-06 L4_sharp:4.5917e-06 L5_sharp:5.5142e-06 L6_sharp:8.9112e-06 L7_sharp:9.0246e-06 L8_sharp:8.1180e-06 L9_sharp:8.4065e-06 L10_sharp:5.2391e-06 L11_sharp:4.7893e-06 L12_sharp:7.2754e-06 total_fnorm:1.0370e+01 total_l1_linf:7.7833e+04 total_spectral:1.0370e+01 L1_fnorm:2.9317e+00 L2_fnorm:2.7627e+00 L3_fnorm:2.8672e+00 L4_fnorm:2.9684e+00 L5_fnorm:2.9895e+00 L6_fnorm:3.0052e+00 L7_fnorm:3.0141e+00 L8_fnorm:3.0153e+00 L9_fnorm:3.0109e+00 L10_fnorm:3.0237e+00 L11_fnorm:3.0109e+00 L12_fnorm:3.0158e+00 L1_l1linf:1.9737e+00 L2_l1linf:1.9619e+00 L3_l1linf:2.0696e+00 L4_l1linf:2.0660e+00 L5_l1linf:2.0187e+00 L6_l1linf:2.0382e+00 L7_l1linf:2.0327e+00 L8_l1linf:2.0354e+00 L9_l1linf:1.9969e+00 L10_l1linf:2.0216e+00 L11_l1linf:1.9871e+00 L12_l1linf:2.0693e+00 L1_spectral:6.0221e-02 L2_spectral:6.0232e-02 L3_spectral:6.0253e-02 L4_spectral:6.0288e-02 L5_spectral:6.0230e-02 L6_spectral:6.0247e-02 L7_spectral:6.0234e-02 L8_spectral:6.0234e-02 L9_spectral:6.0222e-02 L10_spectral:6.0215e-02 L11_spectral:6.0215e-02 L12_spectral:6.0229e-02 ip_v_neg_g:3.2141e-03 cos_v_neg_g:8.6876e-04 v_norm:1.0370e+01 g_norm:3.5676e-01 hv_norm:7.9111e-02 cos_v_hv:6.6027e-03 hg_norm:8.5038e-01 cos_g_hg:4.4990e-01 v_par:2.2593e-03 v_perp:1.0370e+01 L1_cos_v_neg_g:4.2879e-03 L1_v_norm:2.9317e+00 L2_cos_v_neg_g:1.9053e-04 L2_v_norm:2.7627e+00 L3_cos_v_neg_g:5.3159e-04 L3_v_norm:2.8672e+00 L4_cos_v_neg_g:7.1107e-04 L4_v_norm:2.9684e+00 L5_cos_v_neg_g:1.3639e-03 L5_v_norm:2.9895e+00 L6_cos_v_neg_g:2.5006e-03 L6_v_norm:3.0052e+00 L7_cos_v_neg_g:2.9222e-03 L7_v_norm:3.0141e+00 L8_cos_v_neg_g:3.2521e-03 L8_v_norm:3.0153e+00 L9_cos_v_neg_g:4.3668e-03 L9_v_norm:3.0109e+00 L10_cos_v_neg_g:3.5164e-03 L10_v_norm:3.0237e+00 L11_cos_v_neg_g:3.5190e-03 L11_v_norm:3.0109e+00 L12_cos_v_neg_g:4.5588e-03 L12_v_norm:3.0158e+00 +step:9000 train loss:3.473858 +step:9001 train loss:3.445076 +step:9002 train loss:3.450618 +step:9003 train loss:3.390636 +step:9004 train loss:3.390569 +step:9005 train loss:3.404057 +step:9006 train loss:3.404457 +step:9007 train loss:3.424641 +step:9008 train loss:3.378505 +step:9009 train loss:3.375559 +step:9010 train loss:3.415415 +step:9011 train loss:3.407432 +step:9012 train loss:3.522154 +step:9013 train loss:3.348839 +step:9014 train loss:3.421192 +step:9015 train loss:3.419868 +step:9016 train loss:3.495324 +step:9017 train loss:3.436624 +step:9018 train loss:3.359863 +step:9019 train loss:3.447680 +step:9020 train loss:3.452391 +step:9021 train loss:3.412394 +step:9022 train loss:3.424161 +step:9023 train loss:3.420978 +step:9024 train loss:3.441299 +step:9025 train loss:3.424817 +step:9026 train loss:3.382689 +step:9027 train loss:3.426885 +step:9028 train loss:3.448964 +step:9029 train loss:3.467086 +step:9030 train loss:3.464032 +step:9031 train loss:3.429190 +step:9032 train loss:3.441602 +step:9033 train loss:3.426151 +step:9034 train loss:3.434941 +step:9035 train loss:3.438576 +step:9036 train loss:3.387348 +step:9037 train loss:3.381976 +step:9038 train loss:3.505792 +step:9039 train loss:3.410657 +step:9040 train loss:3.425852 +step:9041 train loss:3.472591 +step:9042 train loss:3.330548 +step:9043 train loss:3.423540 +step:9044 train loss:3.443808 +step:9045 train loss:3.388205 +step:9046 train loss:3.433908 +step:9047 train loss:3.426253 +step:9048 train loss:3.407643 +step:9049 train loss:3.440762 +step:9050 train loss:3.395041 +step:9051 train loss:3.429233 +step:9052 train loss:3.361686 +step:9053 train loss:3.490256 +step:9054 train loss:3.499141 +step:9055 train loss:3.421415 +step:9056 train loss:3.484391 +step:9057 train loss:3.340986 +step:9058 train loss:3.424187 +step:9059 train loss:3.499002 +step:9060 train loss:3.430463 +step:9061 train loss:3.457027 +step:9062 train loss:3.387630 +step:9063 train loss:3.524055 +step:9064 train loss:3.407297 +step:9065 train loss:3.418581 +step:9066 train loss:3.438013 +step:9067 train loss:3.400669 +step:9068 train loss:3.470340 +step:9069 train loss:3.430381 +step:9070 train loss:3.478053 +step:9071 train loss:3.417628 +step:9072 train loss:3.437358 +step:9073 train loss:3.396200 +step:9074 train loss:3.478443 +step:9075 train loss:3.423324 +step:9076 train loss:3.391260 +step:9077 train loss:3.468569 +step:9078 train loss:3.405382 +step:9079 train loss:3.453272 +step:9080 train loss:3.385202 +step:9081 train loss:3.424114 +step:9082 train loss:3.451556 +step:9083 train loss:3.478638 +step:9084 train loss:3.369617 +step:9085 train loss:3.443014 +step:9086 train loss:3.424874 +step:9087 train loss:3.372371 +step:9088 train loss:3.433367 +step:9089 train loss:3.447075 +step:9090 train loss:3.382860 +step:9091 train loss:3.482836 +step:9092 train loss:3.408445 +step:9093 train loss:3.407560 +step:9094 train loss:3.532356 +step:9095 train loss:3.401112 +step:9096 train loss:3.416404 +step:9097 train loss:3.403831 +step:9098 train loss:3.393567 +step:9099 train loss:3.518287 +step:9100 train loss:3.552642 +step:9101 train loss:3.468499 +step:9102 train loss:3.411067 +step:9103 train loss:3.419436 +step:9104 train loss:3.506179 +step:9105 train loss:3.366234 +step:9106 train loss:3.492698 +step:9107 train loss:3.426482 +step:9108 train loss:3.408284 +step:9109 train loss:3.436655 +step:9110 train loss:3.436465 +step:9111 train loss:3.420031 +step:9112 train loss:3.418196 +step:9113 train loss:3.444105 +step:9114 train loss:3.396549 +step:9115 train loss:3.423639 +step:9116 train loss:3.450521 +step:9117 train loss:3.458291 +step:9118 train loss:3.428910 +step:9119 train loss:3.348511 +step:9120 train loss:3.451137 +step:9121 train loss:3.478866 +step:9122 train loss:3.424624 +step:9123 train loss:3.445099 +step:9124 train loss:3.475994 +step:9125 train loss:3.425668 +step:9126 train loss:3.403483 +step:9127 train loss:3.435249 +step:9128 train loss:3.492344 +step:9129 train loss:3.446671 +step:9130 train loss:3.458551 +step:9131 train loss:3.440048 +step:9132 train loss:3.447829 +step:9133 train loss:3.437338 +step:9134 train loss:3.408699 +step:9135 train loss:3.438650 +step:9136 train loss:3.435847 +step:9137 train loss:3.487230 +step:9138 train loss:3.406090 +step:9139 train loss:3.481283 +step:9140 train loss:3.407118 +step:9141 train loss:3.382038 +step:9142 train loss:3.562644 +step:9143 train loss:3.389639 +step:9144 train loss:3.483467 +step:9145 train loss:3.489513 +step:9146 train loss:3.401793 +step:9147 train loss:3.476348 +step:9148 train loss:3.495721 +step:9149 train loss:3.405851 +step:9150 train loss:3.429136 +step:9151 train loss:3.486009 +step:9152 train loss:3.445267 +step:9153 train loss:3.414586 +step:9154 train loss:3.426744 +step:9155 train loss:3.390651 +step:9156 train loss:3.396028 +step:9157 train loss:3.415382 +step:9158 train loss:3.390994 +step:9159 train loss:3.483331 +step:9160 train loss:3.366346 +step:9161 train loss:3.394999 +step:9162 train loss:3.481562 +step:9163 train loss:3.425952 +step:9164 train loss:3.398202 +step:9165 train loss:3.392122 +step:9166 train loss:3.450415 +step:9167 train loss:3.392314 +step:9168 train loss:3.434454 +step:9169 train loss:3.374244 +step:9170 train loss:3.393111 +step:9171 train loss:3.457712 +step:9172 train loss:3.382289 +step:9173 train loss:3.503286 +step:9174 train loss:3.431323 +step:9175 train loss:3.409893 +step:9176 train loss:3.390729 +step:9177 train loss:3.437690 +step:9178 train loss:3.383275 +step:9179 train loss:3.342559 +step:9180 train loss:3.436196 +step:9181 train loss:3.445456 +step:9182 train loss:3.417938 +step:9183 train loss:3.422920 +step:9184 train loss:3.420962 +step:9185 train loss:3.431800 +step:9186 train loss:3.392865 +step:9187 train loss:3.468091 +step:9188 train loss:3.507561 +step:9189 train loss:3.430414 +step:9190 train loss:3.436158 +step:9191 train loss:3.424394 +step:9192 train loss:3.440035 +step:9193 train loss:3.438956 +step:9194 train loss:3.374571 +step:9195 train loss:3.367999 +step:9196 train loss:3.416761 +step:9197 train loss:3.376310 +step:9198 train loss:3.448781 +step:9199 train loss:3.397832 +step:9200 train loss:3.423962 +step:9201 train loss:3.457749 +step:9202 train loss:3.444660 +step:9203 train loss:3.400542 +step:9204 train loss:3.600992 +step:9205 train loss:3.518173 +step:9206 train loss:3.428058 +step:9207 train loss:3.481407 +step:9208 train loss:3.457781 +step:9209 train loss:3.476966 +step:9210 train loss:3.372151 +step:9211 train loss:3.396374 +step:9212 train loss:3.400029 +step:9213 train loss:3.458885 +step:9214 train loss:3.400845 +step:9215 train loss:3.470308 +step:9216 train loss:3.432231 +step:9217 train loss:3.372170 +step:9218 train loss:3.463562 +step:9219 train loss:3.421286 +step:9220 train loss:3.467577 +step:9221 train loss:3.519477 +step:9222 train loss:3.464501 +step:9223 train loss:3.635115 +step:9224 train loss:3.471487 +step:9225 train loss:3.402349 +step:9226 train loss:3.421528 +step:9227 train loss:3.434543 +step:9228 train loss:3.439975 +step:9229 train loss:3.395145 +step:9230 train loss:3.457158 +step:9231 train loss:3.345545 +step:9232 train loss:3.403104 +step:9233 train loss:3.421194 +step:9234 train loss:3.477938 +step:9235 train loss:3.482883 +step:9236 train loss:3.388608 +step:9237 train loss:3.451605 +step:9238 train loss:3.423897 +step:9239 train loss:3.417185 +step:9240 train loss:3.384682 +step:9241 train loss:3.416515 +step:9242 train loss:3.427539 +step:9243 train loss:3.423636 +step:9244 train loss:3.398810 +step:9245 train loss:3.405705 +step:9246 train loss:3.404744 +step:9247 train loss:3.413735 +step:9248 train loss:3.426799 +step:9249 train loss:3.426346 +step:9250 validation loss:3.356544 +step:9250 train loss:3.462314 +step:9251 train loss:3.402717 +step:9252 train loss:3.474216 +step:9253 train loss:3.465195 +step:9254 train loss:3.396893 +step:9255 train loss:3.514839 +step:9256 train loss:3.393875 +step:9257 train loss:3.335651 +step:9258 train loss:3.417389 +step:9259 train loss:3.418127 +step:9260 train loss:3.512785 +step:9261 train loss:3.395997 +step:9262 train loss:3.466537 +step:9263 train loss:3.369750 +step:9264 train loss:3.512624 +step:9265 train loss:3.541992 +step:9266 train loss:3.470500 +step:9267 train loss:3.417911 +step:9268 train loss:3.414065 +step:9269 train loss:3.441356 +step:9270 train loss:3.362628 +step:9271 train loss:3.472042 +step:9272 train loss:3.413484 +step:9273 train loss:3.435652 +step:9274 train loss:3.436238 +step:9275 train loss:3.433881 +step:9276 train loss:3.459195 +step:9277 train loss:3.435383 +step:9278 train loss:3.447372 +step:9279 train loss:3.441650 +step:9280 train loss:3.441350 +step:9281 train loss:3.415045 +step:9282 train loss:3.535235 +step:9283 train loss:3.418730 +step:9284 train loss:3.382800 +step:9285 train loss:3.407181 +step:9286 train loss:3.458063 +step:9287 train loss:3.431520 +step:9288 train loss:3.435106 +step:9289 train loss:3.404935 +step:9290 train loss:3.436041 +step:9291 train loss:3.412874 +step:9292 train loss:3.451226 +step:9293 train loss:3.505658 +step:9294 train loss:3.428453 +step:9295 train loss:3.416260 +step:9296 train loss:3.366519 +step:9297 train loss:3.437209 +step:9298 train loss:3.376241 +step:9299 train loss:3.361358 +step:9300 train loss:3.465681 +step:9301 train loss:3.490649 +step:9302 train loss:3.431443 +step:9303 train loss:3.478369 +step:9304 train loss:3.397814 +step:9305 train loss:3.389874 +step:9306 train loss:3.395286 +step:9307 train loss:3.393990 +step:9308 train loss:3.369334 +step:9309 train loss:3.357918 +step:9310 train loss:3.413367 +step:9311 train loss:3.477574 +step:9312 train loss:3.426092 +step:9313 train loss:3.374248 +step:9314 train loss:3.404015 +step:9315 train loss:3.432279 +step:9316 train loss:3.417902 +step:9317 train loss:3.394470 +step:9318 train loss:3.482373 +step:9319 train loss:3.390634 +step:9320 train loss:3.410551 +step:9321 train loss:3.425028 +step:9322 train loss:3.433114 +step:9323 train loss:3.510628 +step:9324 train loss:3.450715 +step:9325 train loss:3.391234 +step:9326 train loss:3.469426 +step:9327 train loss:3.461239 +step:9328 train loss:3.464041 +step:9329 train loss:3.349613 +step:9330 train loss:3.521641 +step:9331 train loss:3.449844 +step:9332 train loss:3.473071 +step:9333 train loss:3.492552 +step:9334 train loss:3.426813 +step:9335 train loss:3.523118 +step:9336 train loss:3.479745 +step:9337 train loss:3.433728 +step:9338 train loss:3.491385 +step:9339 train loss:3.467010 +step:9340 train loss:3.427404 +step:9341 train loss:3.518498 +step:9342 train loss:3.412836 +step:9343 train loss:3.409211 +step:9344 train loss:3.409258 +step:9345 train loss:3.549664 +step:9346 train loss:3.390750 +step:9347 train loss:3.404690 +step:9348 train loss:3.431103 +step:9349 train loss:3.373116 +step:9350 train loss:3.450121 +step:9351 train loss:3.425326 +step:9352 train loss:3.412271 +step:9353 train loss:3.446053 +step:9354 train loss:3.411240 +step:9355 train loss:3.405670 +step:9356 train loss:3.454785 +step:9357 train loss:3.404623 +step:9358 train loss:3.440056 +step:9359 train loss:3.381972 +step:9360 train loss:3.397410 +step:9361 train loss:3.399767 +step:9362 train loss:3.385532 +step:9363 train loss:3.448310 +step:9364 train loss:3.426950 +step:9365 train loss:3.433370 +step:9366 train loss:3.425753 +step:9367 train loss:3.441086 +step:9368 train loss:3.413855 +step:9369 train loss:3.414567 +step:9370 train loss:3.420775 +step:9371 train loss:3.445233 +step:9372 train loss:3.409114 +step:9373 train loss:3.392089 +step:9374 train loss:3.431125 +step:9375 train loss:3.442608 +step:9376 train loss:3.379748 +step:9377 train loss:3.455257 +step:9378 train loss:3.453230 +step:9379 train loss:3.482911 +step:9380 train loss:3.413467 +step:9381 train loss:3.421227 +step:9382 train loss:3.397343 +step:9383 train loss:3.392505 +step:9384 train loss:3.363695 +step:9385 train loss:3.435825 +step:9386 train loss:3.465362 +step:9387 train loss:3.441540 +step:9388 train loss:3.381050 +step:9389 train loss:3.396945 +step:9390 train loss:3.437644 +step:9391 train loss:3.444874 +step:9392 train loss:3.405023 +step:9393 train loss:3.400070 +step:9394 train loss:3.427525 +step:9395 train loss:3.423372 +step:9396 train loss:3.570076 +step:9397 train loss:3.459374 +step:9398 train loss:3.479730 +step:9399 train loss:3.431974 +step:9400 train loss:3.434103 +step:9401 train loss:3.423599 +step:9402 train loss:3.426436 +step:9403 train loss:3.358128 +step:9404 train loss:3.434220 +step:9405 train loss:3.394854 +step:9406 train loss:3.448903 +step:9407 train loss:3.387578 +step:9408 train loss:3.329954 +step:9409 train loss:3.390837 +step:9410 train loss:3.474678 +step:9411 train loss:3.436078 +step:9412 train loss:3.468052 +step:9413 train loss:3.482402 +step:9414 train loss:3.419945 +step:9415 train loss:3.416598 +step:9416 train loss:3.427909 +step:9417 train loss:3.383635 +step:9418 train loss:3.411167 +step:9419 train loss:3.375782 +step:9420 train loss:3.398009 +step:9421 train loss:3.447434 +step:9422 train loss:3.397304 +step:9423 train loss:3.461673 +step:9424 train loss:3.399639 +step:9425 train loss:3.442354 +step:9426 train loss:3.446179 +step:9427 train loss:3.421641 +step:9428 train loss:3.525069 +step:9429 train loss:3.416842 +step:9430 train loss:3.373611 +step:9431 train loss:3.461117 +step:9432 train loss:3.425249 +step:9433 train loss:3.464343 +step:9434 train loss:3.417830 +step:9435 train loss:3.440850 +step:9436 train loss:3.414860 +step:9437 train loss:3.424020 +step:9438 train loss:3.419911 +step:9439 train loss:3.419432 +step:9440 train loss:3.411009 +step:9441 train loss:3.421875 +step:9442 train loss:3.362202 +step:9443 train loss:3.416979 +step:9444 train loss:3.483545 +step:9445 train loss:3.412911 +step:9446 train loss:3.389898 +step:9447 train loss:3.457726 +step:9448 train loss:3.392247 +step:9449 train loss:3.414671 +step:9450 train loss:3.457530 +step:9451 train loss:3.371701 +step:9452 train loss:3.423844 +step:9453 train loss:3.404115 +step:9454 train loss:3.464936 +step:9455 train loss:3.447539 +step:9456 train loss:3.371716 +step:9457 train loss:3.419689 +step:9458 train loss:3.406781 +step:9459 train loss:3.401648 +step:9460 train loss:3.438642 +step:9461 train loss:3.468771 +step:9462 train loss:3.419707 +step:9463 train loss:3.447205 +step:9464 train loss:3.402401 +step:9465 train loss:3.492424 +step:9466 train loss:3.441944 +step:9467 train loss:3.462944 +step:9468 train loss:3.412532 +step:9469 train loss:3.400259 +step:9470 train loss:3.396789 +step:9471 train loss:3.437340 +step:9472 train loss:3.461228 +step:9473 train loss:3.451737 +step:9474 train loss:3.395229 +step:9475 train loss:3.388017 +step:9476 train loss:3.605912 +step:9477 train loss:3.478190 +step:9478 train loss:3.454695 +step:9479 train loss:3.551812 +step:9480 train loss:3.400014 +step:9481 train loss:3.436794 +step:9482 train loss:3.458946 +step:9483 train loss:3.419461 +step:9484 train loss:3.446758 +step:9485 train loss:3.368485 +step:9486 train loss:3.404599 +step:9487 train loss:3.439324 +step:9488 train loss:3.392295 +step:9489 train loss:3.437353 +step:9490 train loss:3.404494 +step:9491 train loss:3.444688 +step:9492 train loss:3.465116 +step:9493 train loss:3.436222 +step:9494 train loss:3.447794 +step:9495 train loss:3.402448 +step:9496 train loss:3.461212 +step:9497 train loss:3.477779 +step:9498 train loss:3.423410 +step:9499 train loss:3.474310 +step:9500 validation loss:3.358457 total_sharp:5.7887e-05 L1_sharp:2.6812e-05 L2_sharp:8.1230e-06 L3_sharp:1.1106e-05 L4_sharp:5.2532e-06 L5_sharp:7.5799e-06 L6_sharp:1.1061e-05 L7_sharp:1.1485e-05 L8_sharp:1.1627e-05 L9_sharp:9.8920e-06 L10_sharp:5.2862e-06 L11_sharp:4.5050e-06 L12_sharp:7.7769e-06 total_fnorm:1.0349e+01 total_l1_linf:7.7649e+04 total_spectral:1.0349e+01 L1_fnorm:2.9267e+00 L2_fnorm:2.7826e+00 L3_fnorm:2.8490e+00 L4_fnorm:2.9596e+00 L5_fnorm:2.9816e+00 L6_fnorm:2.9934e+00 L7_fnorm:2.9977e+00 L8_fnorm:3.0044e+00 L9_fnorm:3.0016e+00 L10_fnorm:3.0224e+00 L11_fnorm:3.0085e+00 L12_fnorm:3.0153e+00 L1_l1linf:1.9806e+00 L2_l1linf:1.9502e+00 L3_l1linf:2.0469e+00 L4_l1linf:2.0973e+00 L5_l1linf:2.0050e+00 L6_l1linf:2.0271e+00 L7_l1linf:2.0091e+00 L8_l1linf:2.0167e+00 L9_l1linf:1.9991e+00 L10_l1linf:2.0329e+00 L11_l1linf:1.9800e+00 L12_l1linf:2.0614e+00 L1_spectral:6.0226e-02 L2_spectral:6.0228e-02 L3_spectral:6.0231e-02 L4_spectral:6.0275e-02 L5_spectral:6.0246e-02 L6_spectral:6.0205e-02 L7_spectral:6.0216e-02 L8_spectral:6.0226e-02 L9_spectral:6.0212e-02 L10_spectral:6.0230e-02 L11_spectral:6.0234e-02 L12_spectral:6.0234e-02 ip_v_neg_g:1.9316e-03 cos_v_neg_g:5.5980e-04 v_norm:1.0349e+01 g_norm:3.3342e-01 hv_norm:7.2700e-02 cos_v_hv:8.2401e-03 hg_norm:8.7422e-01 cos_g_hg:4.0864e-01 v_par:8.9056e-04 v_perp:1.0349e+01 L1_cos_v_neg_g:2.7922e-03 L1_v_norm:2.9267e+00 L2_cos_v_neg_g:2.5176e-03 L2_v_norm:2.7826e+00 L3_cos_v_neg_g:6.5719e-04 L3_v_norm:2.8490e+00 L4_cos_v_neg_g:1.3402e-03 L4_v_norm:2.9596e+00 L5_cos_v_neg_g:1.5727e-03 L5_v_norm:2.9816e+00 L6_cos_v_neg_g:2.6900e-03 L6_v_norm:2.9934e+00 L7_cos_v_neg_g:1.9169e-03 L7_v_norm:2.9977e+00 L8_cos_v_neg_g:2.2371e-03 L8_v_norm:3.0044e+00 L9_cos_v_neg_g:2.6856e-03 L9_v_norm:3.0016e+00 L10_cos_v_neg_g:2.4966e-03 L10_v_norm:3.0224e+00 L11_cos_v_neg_g:2.2531e-03 L11_v_norm:3.0085e+00 L12_cos_v_neg_g:1.9506e-03 L12_v_norm:3.0153e+00 +step:9500 train loss:3.465797 +step:9501 train loss:3.443627 +step:9502 train loss:3.414629 +step:9503 train loss:3.432250 +step:9504 train loss:3.385485 +step:9505 train loss:3.411399 +step:9506 train loss:3.426618 +step:9507 train loss:3.411451 +step:9508 train loss:3.609382 +step:9509 train loss:3.423380 +step:9510 train loss:3.409522 +step:9511 train loss:3.436996 +step:9512 train loss:3.468566 +step:9513 train loss:3.456288 +step:9514 train loss:3.426494 +step:9515 train loss:3.325473 +step:9516 train loss:3.429001 +step:9517 train loss:3.460858 +step:9518 train loss:3.438836 +step:9519 train loss:3.448379 +step:9520 train loss:3.335387 +step:9521 train loss:3.330473 +step:9522 train loss:3.449448 +step:9523 train loss:3.444283 +step:9524 train loss:3.445741 +step:9525 train loss:3.490710 +step:9526 train loss:3.505357 +step:9527 train loss:3.463588 +step:9528 train loss:3.396388 +step:9529 train loss:3.441540 +step:9530 train loss:3.486123 +step:9531 train loss:3.392516 +step:9532 train loss:3.444916 +step:9533 train loss:3.415720 +step:9534 train loss:3.495922 +step:9535 train loss:3.419919 +step:9536 train loss:3.398559 +step:9537 train loss:3.343305 +step:9538 train loss:3.361673 +step:9539 train loss:3.431326 +step:9540 train loss:3.350250 +step:9541 train loss:3.410528 +step:9542 train loss:3.538236 +step:9543 train loss:3.437589 +step:9544 train loss:3.476640 +step:9545 train loss:3.410587 +step:9546 train loss:3.436280 +step:9547 train loss:3.478687 +step:9548 train loss:3.421640 +step:9549 train loss:3.386391 +step:9550 train loss:3.416927 +step:9551 train loss:3.413126 +step:9552 train loss:3.437340 +step:9553 train loss:3.430876 +step:9554 train loss:3.474744 +step:9555 train loss:3.480033 +step:9556 train loss:3.391744 +step:9557 train loss:3.409925 +step:9558 train loss:3.473234 +step:9559 train loss:3.478651 +step:9560 train loss:3.395364 +step:9561 train loss:3.418589 +step:9562 train loss:3.455657 +step:9563 train loss:3.407714 +step:9564 train loss:3.439454 +step:9565 train loss:3.420395 +step:9566 train loss:3.390447 +step:9567 train loss:3.457633 +step:9568 train loss:3.426790 +step:9569 train loss:3.468244 +step:9570 train loss:3.364117 +step:9571 train loss:3.436777 +step:9572 train loss:3.382004 +step:9573 train loss:3.412373 +step:9574 train loss:3.390544 +step:9575 train loss:3.459613 +step:9576 train loss:3.353858 +step:9577 train loss:3.401108 +step:9578 train loss:3.405653 +step:9579 train loss:3.404672 +step:9580 train loss:3.466273 +step:9581 train loss:3.460006 +step:9582 train loss:3.424270 +step:9583 train loss:3.456034 +step:9584 train loss:3.393266 +step:9585 train loss:3.410385 +step:9586 train loss:3.466877 +step:9587 train loss:3.434341 +step:9588 train loss:3.419371 +step:9589 train loss:3.477421 +step:9590 train loss:3.441073 +step:9591 train loss:3.407562 +step:9592 train loss:3.427869 +step:9593 train loss:3.430656 +step:9594 train loss:3.446982 +step:9595 train loss:3.420816 +step:9596 train loss:3.506744 +step:9597 train loss:3.411861 +step:9598 train loss:3.374655 +step:9599 train loss:3.382309 +step:9600 train loss:3.467633 +step:9601 train loss:3.383373 +step:9602 train loss:3.470323 +step:9603 train loss:3.465077 +step:9604 train loss:3.346182 +step:9605 train loss:3.432315 +step:9606 train loss:3.490194 +step:9607 train loss:3.405506 +step:9608 train loss:3.414715 +step:9609 train loss:3.426006 +step:9610 train loss:3.466714 +step:9611 train loss:3.398047 +step:9612 train loss:3.408618 +step:9613 train loss:3.448892 +step:9614 train loss:3.417242 +step:9615 train loss:3.609684 +step:9616 train loss:3.420280 +step:9617 train loss:3.399927 +step:9618 train loss:3.358541 +step:9619 train loss:3.423028 +step:9620 train loss:3.478762 +step:9621 train loss:3.403460 +step:9622 train loss:3.416203 +step:9623 train loss:3.452510 +step:9624 train loss:3.440939 +step:9625 train loss:3.455726 +step:9626 train loss:3.425625 +step:9627 train loss:3.506050 +step:9628 train loss:3.471679 +step:9629 train loss:3.386425 +step:9630 train loss:3.444737 +step:9631 train loss:3.430116 +step:9632 train loss:3.401971 +step:9633 train loss:3.444164 +step:9634 train loss:3.512319 +step:9635 train loss:3.412559 +step:9636 train loss:3.361934 +step:9637 train loss:3.493847 +step:9638 train loss:3.373958 +step:9639 train loss:3.346552 +step:9640 train loss:3.469688 +step:9641 train loss:3.438686 +step:9642 train loss:3.417632 +step:9643 train loss:3.422352 +step:9644 train loss:3.475136 +step:9645 train loss:3.402642 +step:9646 train loss:3.441603 +step:9647 train loss:3.451293 +step:9648 train loss:3.404140 +step:9649 train loss:3.375924 +step:9650 train loss:3.392011 +step:9651 train loss:3.481214 +step:9652 train loss:3.461650 +step:9653 train loss:3.407365 +step:9654 train loss:3.387141 +step:9655 train loss:3.382580 +step:9656 train loss:3.377113 +step:9657 train loss:3.404275 +step:9658 train loss:3.459153 +step:9659 train loss:3.568831 +step:9660 train loss:3.353482 +step:9661 train loss:3.372135 +step:9662 train loss:3.390834 +step:9663 train loss:3.430755 +step:9664 train loss:3.481045 +step:9665 train loss:3.326554 +step:9666 train loss:3.367946 +step:9667 train loss:3.505055 +step:9668 train loss:3.485484 +step:9669 train loss:3.505025 +step:9670 train loss:3.483389 +step:9671 train loss:3.484197 +step:9672 train loss:3.396879 +step:9673 train loss:3.416435 +step:9674 train loss:3.429410 +step:9675 train loss:3.427959 +step:9676 train loss:3.383288 +step:9677 train loss:3.394145 +step:9678 train loss:3.428343 +step:9679 train loss:3.419416 +step:9680 train loss:3.417727 +step:9681 train loss:3.403789 +step:9682 train loss:3.472603 +step:9683 train loss:3.446738 +step:9684 train loss:3.363419 +step:9685 train loss:3.448244 +step:9686 train loss:3.481200 +step:9687 train loss:3.388907 +step:9688 train loss:3.474884 +step:9689 train loss:3.573691 +step:9690 train loss:3.417860 +step:9691 train loss:3.404783 +step:9692 train loss:3.372008 +step:9693 train loss:3.363579 +step:9694 train loss:3.384450 +step:9695 train loss:3.495754 +step:9696 train loss:3.525203 +step:9697 train loss:3.433366 +step:9698 train loss:3.469530 +step:9699 train loss:3.430892 +step:9700 train loss:3.430701 +step:9701 train loss:3.479698 +step:9702 train loss:3.395985 +step:9703 train loss:3.420580 +step:9704 train loss:3.502829 +step:9705 train loss:3.399805 +step:9706 train loss:3.394924 +step:9707 train loss:3.442626 +step:9708 train loss:3.393286 +step:9709 train loss:3.414411 +step:9710 train loss:3.433955 +step:9711 train loss:3.405351 +step:9712 train loss:3.419022 +step:9713 train loss:3.467554 +step:9714 train loss:3.425526 +step:9715 train loss:3.441995 +step:9716 train loss:3.467379 +step:9717 train loss:3.386932 +step:9718 train loss:3.392040 +step:9719 train loss:3.472988 +step:9720 train loss:3.407514 +step:9721 train loss:3.396396 +step:9722 train loss:3.463567 +step:9723 train loss:3.407940 +step:9724 train loss:3.435559 +step:9725 train loss:3.487082 +step:9726 train loss:3.427727 +step:9727 train loss:3.405909 +step:9728 train loss:3.446065 +step:9729 train loss:3.475288 +step:9730 train loss:3.544981 +step:9731 train loss:3.460851 +step:9732 train loss:3.424647 +step:9733 train loss:3.466054 +step:9734 train loss:3.387009 +step:9735 train loss:3.493433 +step:9736 train loss:3.394677 +step:9737 train loss:3.453011 +step:9738 train loss:3.420109 +step:9739 train loss:3.495975 +step:9740 train loss:3.456695 +step:9741 train loss:3.397244 +step:9742 train loss:3.493104 +step:9743 train loss:3.363231 +step:9744 train loss:3.424074 +step:9745 train loss:3.384775 +step:9746 train loss:3.419381 +step:9747 train loss:3.412902 +step:9748 train loss:3.309423 +step:9749 train loss:3.409960 +step:9750 validation loss:3.353874 +step:9750 train loss:3.390600 +step:9751 train loss:3.526495 +step:9752 train loss:3.417410 +step:9753 train loss:3.372472 +step:9754 train loss:3.403386 +step:9755 train loss:3.402317 +step:9756 train loss:3.404016 +step:9757 train loss:3.369348 +step:9758 train loss:3.360124 +step:9759 train loss:3.408678 +step:9760 train loss:3.350891 +step:9761 train loss:3.396065 +step:9762 train loss:3.390679 +step:9763 train loss:3.409402 +step:9764 train loss:3.397038 +step:9765 train loss:3.358827 +step:9766 train loss:3.449847 +step:9767 train loss:3.405960 +step:9768 train loss:3.416964 +step:9769 train loss:3.373530 +step:9770 train loss:3.371368 +step:9771 train loss:3.421381 +step:9772 train loss:3.431893 +step:9773 train loss:3.409578 +step:9774 train loss:3.381204 +step:9775 train loss:3.470150 +step:9776 train loss:3.468942 +step:9777 train loss:3.355489 +step:9778 train loss:3.367008 +step:9779 train loss:3.368600 +step:9780 train loss:3.369869 +step:9781 train loss:3.389476 +step:9782 train loss:3.465560 +step:9783 train loss:3.378060 +step:9784 train loss:3.400703 +step:9785 train loss:3.397217 +step:9786 train loss:3.428891 +step:9787 train loss:3.456154 +step:9788 train loss:3.381641 +step:9789 train loss:3.392922 +step:9790 train loss:3.351874 +step:9791 train loss:3.400962 +step:9792 train loss:3.416203 +step:9793 train loss:3.435663 +step:9794 train loss:3.407883 +step:9795 train loss:3.414679 +step:9796 train loss:3.398728 +step:9797 train loss:3.396441 +step:9798 train loss:3.411724 +step:9799 train loss:3.410060 +step:9800 train loss:3.484337 +step:9801 train loss:3.408661 +step:9802 train loss:3.465362 +step:9803 train loss:3.327442 +step:9804 train loss:3.419203 +step:9805 train loss:3.425465 +step:9806 train loss:3.399616 +step:9807 train loss:3.370135 +step:9808 train loss:3.284605 +step:9809 train loss:3.469794 +step:9810 train loss:3.425739 +step:9811 train loss:3.407688 +step:9812 train loss:3.386092 +step:9813 train loss:3.461685 +step:9814 train loss:3.456232 +step:9815 train loss:3.357859 +step:9816 train loss:3.363749 +step:9817 train loss:3.393663 +step:9818 train loss:3.420031 +step:9819 train loss:3.391680 +step:9820 train loss:3.456084 +step:9821 train loss:3.438541 +step:9822 train loss:3.410369 +step:9823 train loss:3.471963 +step:9824 train loss:3.377068 +step:9825 train loss:3.464401 +step:9826 train loss:3.459493 +step:9827 train loss:3.463298 +step:9828 train loss:3.381476 +step:9829 train loss:3.388623 +step:9830 train loss:3.375437 +step:9831 train loss:3.431071 +step:9832 train loss:3.442703 +step:9833 train loss:3.357177 +step:9834 train loss:3.409943 +step:9835 train loss:3.374816 +step:9836 train loss:3.442354 +step:9837 train loss:3.408565 +step:9838 train loss:3.449980 +step:9839 train loss:3.422589 +step:9840 train loss:3.395035 +step:9841 train loss:3.400508 +step:9842 train loss:3.461261 +step:9843 train loss:3.455162 +step:9844 train loss:3.402636 +step:9845 train loss:3.432872 +step:9846 train loss:3.367461 +step:9847 train loss:3.497439 +step:9848 train loss:3.419607 +step:9849 train loss:3.444639 +step:9850 train loss:3.365205 +step:9851 train loss:3.418705 +step:9852 train loss:3.383943 +step:9853 train loss:3.404906 +step:9854 train loss:3.419905 +step:9855 train loss:3.360960 +step:9856 train loss:3.367986 +step:9857 train loss:3.356532 +step:9858 train loss:3.421038 +step:9859 train loss:3.341599 +step:9860 train loss:3.579372 +step:9861 train loss:3.405087 +step:9862 train loss:3.370788 +step:9863 train loss:3.354312 +step:9864 train loss:3.478713 +step:9865 train loss:3.355803 +step:9866 train loss:3.396189 +step:9867 train loss:3.395492 +step:9868 train loss:3.456181 +step:9869 train loss:3.418846 +step:9870 train loss:3.387955 +step:9871 train loss:3.430650 +step:9872 train loss:3.369890 +step:9873 train loss:3.426920 +step:9874 train loss:3.388916 +step:9875 train loss:3.392466 +step:9876 train loss:3.356571 +step:9877 train loss:3.408692 +step:9878 train loss:3.438391 +step:9879 train loss:3.440411 +step:9880 train loss:3.372516 +step:9881 train loss:3.424562 +step:9882 train loss:3.386451 +step:9883 train loss:3.396524 +step:9884 train loss:3.388387 +step:9885 train loss:3.452845 +step:9886 train loss:3.417079 +step:9887 train loss:3.415426 +step:9888 train loss:3.441643 +step:9889 train loss:3.472813 +step:9890 train loss:3.384806 +step:9891 train loss:3.390622 +step:9892 train loss:3.361110 +step:9893 train loss:3.483175 +step:9894 train loss:3.394864 +step:9895 train loss:3.329326 +step:9896 train loss:3.483793 +step:9897 train loss:3.360343 +step:9898 train loss:3.429520 +step:9899 train loss:3.409713 +step:9900 train loss:3.453232 +step:9901 train loss:3.374158 +step:9902 train loss:3.421300 +step:9903 train loss:3.393058 +step:9904 train loss:3.442906 +step:9905 train loss:3.347236 +step:9906 train loss:3.385958 +step:9907 train loss:3.396593 +step:9908 train loss:3.391962 +step:9909 train loss:3.409886 +step:9910 train loss:3.431887 +step:9911 train loss:3.516477 +step:9912 train loss:3.390661 +step:9913 train loss:3.393815 +step:9914 train loss:3.402527 +step:9915 train loss:3.404746 +step:9916 train loss:3.355025 +step:9917 train loss:3.391817 +step:9918 train loss:3.384680 +step:9919 train loss:3.552193 +step:9920 train loss:3.341294 +step:9921 train loss:3.428762 +step:9922 train loss:3.387197 +step:9923 train loss:3.448612 +step:9924 train loss:3.360078 +step:9925 train loss:3.419948 +step:9926 train loss:3.399777 +step:9927 train loss:3.441870 +step:9928 train loss:3.366030 +step:9929 train loss:3.406019 +step:9930 train loss:3.497142 +step:9931 train loss:3.462874 +step:9932 train loss:3.347658 +step:9933 train loss:3.440674 +step:9934 train loss:3.363220 +step:9935 train loss:3.477062 +step:9936 train loss:3.385164 +step:9937 train loss:3.409231 +step:9938 train loss:3.394845 +step:9939 train loss:3.460085 +step:9940 train loss:3.495258 +step:9941 train loss:3.369171 +step:9942 train loss:3.414428 +step:9943 train loss:3.541254 +step:9944 train loss:3.414142 +step:9945 train loss:3.432764 +step:9946 train loss:3.406276 +step:9947 train loss:3.354633 +step:9948 train loss:3.400584 +step:9949 train loss:3.293963 +step:9950 train loss:3.448636 +step:9951 train loss:3.365092 +step:9952 train loss:3.434535 +step:9953 train loss:3.398375 +step:9954 train loss:3.453262 +step:9955 train loss:3.430956 +step:9956 train loss:3.431322 +step:9957 train loss:3.408191 +step:9958 train loss:3.463920 +step:9959 train loss:3.361345 +step:9960 train loss:3.394567 +step:9961 train loss:3.400434 +step:9962 train loss:3.450864 +step:9963 train loss:3.340813 +step:9964 train loss:3.399800 +step:9965 train loss:3.402675 +step:9966 train loss:3.459095 +step:9967 train loss:3.376339 +step:9968 train loss:3.440068 +step:9969 train loss:3.351322 +step:9970 train loss:3.393093 +step:9971 train loss:3.436981 +step:9972 train loss:3.457076 +step:9973 train loss:3.434890 +step:9974 train loss:3.422951 +step:9975 train loss:3.391045 +step:9976 train loss:3.350561 +step:9977 train loss:3.400651 +step:9978 train loss:3.400943 +step:9979 train loss:3.410976 +step:9980 train loss:3.464288 +step:9981 train loss:3.374392 +step:9982 train loss:3.434722 +step:9983 train loss:3.352834 +step:9984 train loss:3.416546 +step:9985 train loss:3.361543 +step:9986 train loss:3.414767 +step:9987 train loss:3.459517 +step:9988 train loss:3.471148 +step:9989 train loss:3.365945 +step:9990 train loss:3.503857 +step:9991 train loss:3.354418 +step:9992 train loss:3.425226 +step:9993 train loss:3.417563 +step:9994 train loss:3.530218 +step:9995 train loss:3.470543 +step:9996 train loss:3.384546 +step:9997 train loss:3.425981 +step:9998 train loss:3.476012 +step:9999 train loss:3.444618 +step:10000 validation loss:3.350428 total_sharp:5.7250e-05 L1_sharp:6.9797e-05 L2_sharp:8.8957e-06 L3_sharp:7.6063e-06 L4_sharp:4.8783e-06 L5_sharp:6.7419e-06 L6_sharp:7.5922e-06 L7_sharp:8.0102e-06 L8_sharp:7.7248e-06 L9_sharp:8.8986e-06 L10_sharp:4.7485e-06 L11_sharp:4.3358e-06 L12_sharp:1.0276e-05 total_fnorm:1.0347e+01 total_l1_linf:7.7648e+04 total_spectral:1.0347e+01 L1_fnorm:2.9231e+00 L2_fnorm:2.7793e+00 L3_fnorm:2.8634e+00 L4_fnorm:2.9634e+00 L5_fnorm:2.9790e+00 L6_fnorm:2.9984e+00 L7_fnorm:3.0029e+00 L8_fnorm:3.0030e+00 L9_fnorm:2.9938e+00 L10_fnorm:3.0161e+00 L11_fnorm:2.9982e+00 L12_fnorm:3.0147e+00 L1_l1linf:1.9925e+00 L2_l1linf:1.9572e+00 L3_l1linf:2.0431e+00 L4_l1linf:2.0705e+00 L5_l1linf:2.0210e+00 L6_l1linf:2.0227e+00 L7_l1linf:2.0227e+00 L8_l1linf:2.0106e+00 L9_l1linf:1.9768e+00 L10_l1linf:2.0037e+00 L11_l1linf:1.9682e+00 L12_l1linf:2.1471e+00 L1_spectral:6.0202e-02 L2_spectral:6.0224e-02 L3_spectral:6.0233e-02 L4_spectral:6.0285e-02 L5_spectral:6.0219e-02 L6_spectral:6.0210e-02 L7_spectral:6.0242e-02 L8_spectral:6.0218e-02 L9_spectral:6.0246e-02 L10_spectral:6.0209e-02 L11_spectral:6.0223e-02 L12_spectral:6.0217e-02 ip_v_neg_g:4.0331e-03 cos_v_neg_g:1.1147e-03 v_norm:1.0347e+01 g_norm:3.4969e-01 hv_norm:6.8122e-02 cos_v_hv:8.6955e-03 hg_norm:1.3023e+00 cos_g_hg:3.6466e-01 v_par:8.9316e-04 v_perp:1.0347e+01 L1_cos_v_neg_g:4.8467e-03 L1_v_norm:2.9231e+00 L2_cos_v_neg_g:9.9948e-04 L2_v_norm:2.7793e+00 L3_cos_v_neg_g:1.1708e-03 L3_v_norm:2.8634e+00 L4_cos_v_neg_g:2.8342e-03 L4_v_norm:2.9634e+00 L5_cos_v_neg_g:2.7626e-03 L5_v_norm:2.9790e+00 L6_cos_v_neg_g:2.1676e-03 L6_v_norm:2.9984e+00 L7_cos_v_neg_g:3.1827e-03 L7_v_norm:3.0029e+00 L8_cos_v_neg_g:3.7182e-03 L8_v_norm:3.0030e+00 L9_cos_v_neg_g:4.6225e-03 L9_v_norm:2.9938e+00 L10_cos_v_neg_g:4.0142e-03 L10_v_norm:3.0161e+00 L11_cos_v_neg_g:3.9062e-03 L11_v_norm:2.9982e+00 L12_cos_v_neg_g:5.8319e-03 L12_v_norm:3.0147e+00